2022-10-10 18:39:40 +02:00
|
|
|
from datetime import datetime, time, timedelta
|
2022-09-05 18:41:04 +02:00
|
|
|
from operator import itemgetter
|
2022-06-24 16:42:46 +02:00
|
|
|
from typing import Dict
|
|
|
|
|
|
|
|
from dateutil.parser import isoparse
|
2022-07-22 13:27:16 +02:00
|
|
|
from libretime_api_client.v2 import ApiClient
|
2022-10-10 18:39:40 +02:00
|
|
|
from libretime_shared.datetime import time_in_milliseconds, time_in_seconds
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-06 14:09:04 +02:00
|
|
|
from ..liquidsoap.models import StreamPreferences
|
2022-07-01 12:40:24 +02:00
|
|
|
from .events import EventKind
|
|
|
|
|
2022-06-24 16:42:46 +02:00
|
|
|
EVENT_KEY_FORMAT = "%Y-%m-%d-%H-%M-%S"
|
|
|
|
|
|
|
|
|
|
|
|
def datetime_to_event_key(value: datetime) -> str:
|
|
|
|
return value.strftime(EVENT_KEY_FORMAT)
|
|
|
|
|
|
|
|
|
2022-09-05 18:41:04 +02:00
|
|
|
def insert_event(events: dict, event_key: str, event: dict):
|
|
|
|
key = event_key
|
|
|
|
|
|
|
|
# Search for an empty slot
|
|
|
|
index = 0
|
|
|
|
while key in events:
|
|
|
|
# Ignore duplicate event
|
|
|
|
if event == events[key]:
|
|
|
|
return
|
|
|
|
|
|
|
|
key = f"{event_key}_{index}"
|
|
|
|
index += 1
|
|
|
|
|
|
|
|
events[key] = event
|
|
|
|
|
|
|
|
|
2022-06-24 16:42:46 +02:00
|
|
|
def get_schedule(api_client: ApiClient):
|
2022-09-06 14:09:04 +02:00
|
|
|
stream_preferences = StreamPreferences(**api_client.get_stream_preferences().json())
|
|
|
|
|
2022-06-24 16:42:46 +02:00
|
|
|
current_time = datetime.utcnow()
|
|
|
|
end_time = current_time + timedelta(days=1)
|
|
|
|
|
|
|
|
current_time_str = current_time.isoformat(timespec="seconds")
|
|
|
|
end_time_str = end_time.isoformat(timespec="seconds")
|
|
|
|
|
2022-07-22 15:41:38 +02:00
|
|
|
schedule = api_client.list_schedule(
|
2022-06-24 16:42:46 +02:00
|
|
|
params={
|
2022-07-17 22:27:57 +02:00
|
|
|
"ends_after": f"{current_time_str}Z",
|
|
|
|
"ends_before": f"{end_time_str}Z",
|
|
|
|
"overbooked": False,
|
|
|
|
"position_status__gt": 0,
|
2022-06-24 16:42:46 +02:00
|
|
|
}
|
2022-07-22 15:41:38 +02:00
|
|
|
).json()
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-05 18:41:04 +02:00
|
|
|
events: Dict[str, dict] = {}
|
|
|
|
for item in sorted(schedule, key=itemgetter("starts_at")):
|
2022-07-17 22:27:57 +02:00
|
|
|
item["starts_at"] = isoparse(item["starts_at"])
|
|
|
|
item["ends_at"] = isoparse(item["ends_at"])
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-07-27 09:54:57 +02:00
|
|
|
show_instance = api_client.get_show_instance(item["instance"]).json()
|
|
|
|
show = api_client.get_show(show_instance["show"]).json()
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-06 14:09:04 +02:00
|
|
|
if show["live_enabled"]:
|
|
|
|
show_instance["starts_at"] = isoparse(show_instance["starts_at"])
|
|
|
|
show_instance["ends_at"] = isoparse(show_instance["ends_at"])
|
|
|
|
generate_live_events(
|
|
|
|
events,
|
|
|
|
show_instance,
|
|
|
|
stream_preferences.input_fade_transition,
|
|
|
|
)
|
|
|
|
|
2022-06-24 16:42:46 +02:00
|
|
|
if item["file"]:
|
2022-07-27 09:54:57 +02:00
|
|
|
file = api_client.get_file(item["file"]).json()
|
2022-09-05 18:41:04 +02:00
|
|
|
generate_file_events(events, item, file, show)
|
2022-06-24 16:42:46 +02:00
|
|
|
|
|
|
|
elif item["stream"]:
|
2022-07-27 09:54:57 +02:00
|
|
|
webstream = api_client.get_webstream(item["stream"]).json()
|
2022-09-05 18:41:04 +02:00
|
|
|
generate_webstream_events(events, item, webstream, show)
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-05 18:41:04 +02:00
|
|
|
return {"media": dict(sorted(events.items()))}
|
2022-06-24 16:42:46 +02:00
|
|
|
|
|
|
|
|
2022-09-06 14:09:04 +02:00
|
|
|
def generate_live_events(
|
|
|
|
events: dict,
|
|
|
|
show_instance: dict,
|
|
|
|
input_fade_transition: float,
|
|
|
|
):
|
|
|
|
transition = timedelta(seconds=input_fade_transition)
|
|
|
|
|
|
|
|
switch_off_event_key = datetime_to_event_key(show_instance["ends_at"] - transition)
|
|
|
|
kick_out_event_key = datetime_to_event_key(show_instance["ends_at"])
|
|
|
|
|
|
|
|
# If enabled, fade the input source out
|
|
|
|
if switch_off_event_key != kick_out_event_key:
|
|
|
|
switch_off_event = {
|
2023-02-19 17:56:01 +01:00
|
|
|
"type": EventKind.ACTION,
|
2022-09-06 14:09:04 +02:00
|
|
|
"event_type": "switch_off",
|
|
|
|
"start": switch_off_event_key,
|
|
|
|
"end": switch_off_event_key,
|
|
|
|
}
|
|
|
|
insert_event(events, switch_off_event_key, switch_off_event)
|
|
|
|
|
|
|
|
# Then kick the source out
|
|
|
|
kick_out_event = {
|
2023-02-19 17:56:01 +01:00
|
|
|
"type": EventKind.ACTION,
|
2022-09-06 14:09:04 +02:00
|
|
|
"event_type": "kick_out",
|
|
|
|
"start": kick_out_event_key,
|
|
|
|
"end": kick_out_event_key,
|
|
|
|
}
|
|
|
|
insert_event(events, kick_out_event_key, kick_out_event)
|
|
|
|
|
|
|
|
|
2022-06-24 16:42:46 +02:00
|
|
|
def generate_file_events(
|
2022-09-05 18:41:04 +02:00
|
|
|
events: dict,
|
2022-06-24 16:42:46 +02:00
|
|
|
schedule: dict,
|
|
|
|
file: dict,
|
|
|
|
show: dict,
|
2022-09-05 18:41:04 +02:00
|
|
|
):
|
2022-06-24 16:42:46 +02:00
|
|
|
"""
|
|
|
|
Generate events for a scheduled file.
|
|
|
|
"""
|
2022-07-17 22:27:57 +02:00
|
|
|
schedule_start_event_key = datetime_to_event_key(schedule["starts_at"])
|
|
|
|
schedule_end_event_key = datetime_to_event_key(schedule["ends_at"])
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-05 18:41:04 +02:00
|
|
|
event = {
|
2022-07-01 12:40:24 +02:00
|
|
|
"type": EventKind.FILE,
|
2022-06-24 16:42:46 +02:00
|
|
|
"row_id": schedule["id"],
|
|
|
|
"start": schedule_start_event_key,
|
|
|
|
"end": schedule_end_event_key,
|
|
|
|
"uri": file["url"],
|
|
|
|
"id": file["id"],
|
|
|
|
# Show data
|
|
|
|
"show_name": show["name"],
|
|
|
|
# Extra data
|
2022-10-10 18:39:40 +02:00
|
|
|
"fade_in": time_in_milliseconds(time.fromisoformat(schedule["fade_in"])),
|
|
|
|
"fade_out": time_in_milliseconds(time.fromisoformat(schedule["fade_out"])),
|
|
|
|
"cue_in": time_in_seconds(time.fromisoformat(schedule["cue_in"])),
|
|
|
|
"cue_out": time_in_seconds(time.fromisoformat(schedule["cue_out"])),
|
2022-07-01 10:01:40 +02:00
|
|
|
"metadata": {
|
|
|
|
"track_title": file["track_title"],
|
|
|
|
"artist_name": file["artist_name"],
|
|
|
|
"mime": file["mime"],
|
|
|
|
},
|
2022-06-24 16:42:46 +02:00
|
|
|
"replay_gain": file["replay_gain"],
|
2022-06-28 10:46:52 +02:00
|
|
|
"filesize": file["size"],
|
2022-06-24 16:42:46 +02:00
|
|
|
}
|
2022-09-05 18:41:04 +02:00
|
|
|
insert_event(events, schedule_start_event_key, event)
|
2022-06-24 16:42:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
def generate_webstream_events(
|
2022-09-05 18:41:04 +02:00
|
|
|
events: dict,
|
2022-06-24 16:42:46 +02:00
|
|
|
schedule: dict,
|
|
|
|
webstream: dict,
|
|
|
|
show: dict,
|
2022-09-05 18:41:04 +02:00
|
|
|
):
|
2022-06-24 16:42:46 +02:00
|
|
|
"""
|
|
|
|
Generate events for a scheduled webstream.
|
|
|
|
"""
|
2022-07-17 22:27:57 +02:00
|
|
|
schedule_start_event_key = datetime_to_event_key(schedule["starts_at"])
|
|
|
|
schedule_end_event_key = datetime_to_event_key(schedule["ends_at"])
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-05 18:41:04 +02:00
|
|
|
stream_buffer_start_event = {
|
2023-02-19 17:56:01 +01:00
|
|
|
"type": EventKind.WEB_STREAM_BUFFER_START,
|
2022-06-24 16:42:46 +02:00
|
|
|
"row_id": schedule["id"],
|
2022-07-17 22:27:57 +02:00
|
|
|
"start": datetime_to_event_key(schedule["starts_at"] - timedelta(seconds=5)),
|
|
|
|
"end": datetime_to_event_key(schedule["starts_at"] - timedelta(seconds=5)),
|
2022-06-24 16:42:46 +02:00
|
|
|
"uri": webstream["url"],
|
|
|
|
"id": webstream["id"],
|
|
|
|
}
|
2022-09-05 18:41:04 +02:00
|
|
|
insert_event(events, schedule_start_event_key, stream_buffer_start_event)
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-05 18:41:04 +02:00
|
|
|
stream_output_start_event = {
|
2023-02-19 17:56:01 +01:00
|
|
|
"type": EventKind.WEB_STREAM_OUTPUT_START,
|
2022-06-24 16:42:46 +02:00
|
|
|
"row_id": schedule["id"],
|
|
|
|
"start": schedule_start_event_key,
|
|
|
|
"end": schedule_end_event_key,
|
|
|
|
"uri": webstream["url"],
|
|
|
|
"id": webstream["id"],
|
|
|
|
# Show data
|
|
|
|
"show_name": show["name"],
|
|
|
|
}
|
2022-09-05 18:41:04 +02:00
|
|
|
insert_event(events, schedule_start_event_key, stream_output_start_event)
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-07-17 22:27:57 +02:00
|
|
|
# NOTE: stream_*_end were previously triggered 1 second before
|
|
|
|
# the schedule end.
|
2022-09-05 18:41:04 +02:00
|
|
|
stream_buffer_end_event = {
|
2023-02-19 17:56:01 +01:00
|
|
|
"type": EventKind.WEB_STREAM_BUFFER_END,
|
2022-06-24 16:42:46 +02:00
|
|
|
"row_id": schedule["id"],
|
|
|
|
"start": schedule_end_event_key,
|
|
|
|
"end": schedule_end_event_key,
|
|
|
|
"uri": webstream["url"],
|
|
|
|
"id": webstream["id"],
|
|
|
|
}
|
2022-09-05 18:41:04 +02:00
|
|
|
insert_event(events, schedule_end_event_key, stream_buffer_end_event)
|
2022-06-24 16:42:46 +02:00
|
|
|
|
2022-09-05 18:41:04 +02:00
|
|
|
stream_output_end_event = {
|
2023-02-19 17:56:01 +01:00
|
|
|
"type": EventKind.WEB_STREAM_OUTPUT_END,
|
2022-06-24 16:42:46 +02:00
|
|
|
"row_id": schedule["id"],
|
|
|
|
"start": schedule_end_event_key,
|
|
|
|
"end": schedule_end_event_key,
|
|
|
|
"uri": webstream["url"],
|
|
|
|
"id": webstream["id"],
|
|
|
|
}
|
2022-09-05 18:41:04 +02:00
|
|
|
insert_event(events, schedule_end_event_key, stream_output_end_event)
|