feat(playout): replace schedule event dicts with objects

This commit is contained in:
jo 2023-03-04 21:50:12 +01:00 committed by Kyle Robbertze
parent 684e7a6f24
commit a1db2a157a
11 changed files with 646 additions and 667 deletions

View File

@ -891,11 +891,9 @@ SQL;
$schedule_item = [ $schedule_item = [
'id' => $media_id, 'id' => $media_id,
'type' => 'file', 'type' => 'file',
'metadata' => [ 'track_title' => $fileMetadata['track_title'],
'track_title' => $fileMetadata['track_title'], 'artist_name' => $fileMetadata['artist_name'],
'artist_name' => $fileMetadata['artist_name'], 'mime' => $fileMetadata['mime'],
'mime' => $fileMetadata['mime'],
],
'row_id' => $item['id'], 'row_id' => $item['id'],
'uri' => $uri, 'uri' => $uri,
'fade_in' => Application_Model_Schedule::WallTimeToMillisecs($item['fade_in']), 'fade_in' => Application_Model_Schedule::WallTimeToMillisecs($item['fade_in']),
@ -926,25 +924,26 @@ SQL;
$stream_buffer_start = self::AirtimeTimeToPypoTime($buffer_start->format(DEFAULT_TIMESTAMP_FORMAT)); $stream_buffer_start = self::AirtimeTimeToPypoTime($buffer_start->format(DEFAULT_TIMESTAMP_FORMAT));
$schedule_item = [ $schedule_common = [
'row_id' => $item['id'],
'id' => $media_id,
'uri' => $uri,
'show_name' => $item['show_name'],
];
$schedule_item = array_merge($schedule_common, [
'start' => $stream_buffer_start, 'start' => $stream_buffer_start,
'end' => $stream_buffer_start, 'end' => $stream_buffer_start,
'uri' => $uri,
'row_id' => $item['id'],
'type' => 'stream_buffer_start', 'type' => 'stream_buffer_start',
]; ]);
self::appendScheduleItem($data, $start, $schedule_item); self::appendScheduleItem($data, $start, $schedule_item);
$schedule_item = [ $schedule_item = array_merge($schedule_common, [
'id' => $media_id,
'type' => 'stream_output_start',
'row_id' => $item['id'],
'uri' => $uri,
'start' => $start, 'start' => $start,
'end' => $end, 'end' => $end,
'show_name' => $item['show_name'], 'type' => 'stream_output_start',
]; ]);
self::appendScheduleItem($data, $start, $schedule_item); self::appendScheduleItem($data, $start, $schedule_item);
// since a stream never ends we have to insert an additional "kick stream" event. The "start" // since a stream never ends we have to insert an additional "kick stream" event. The "start"
@ -954,21 +953,18 @@ SQL;
$stream_end = self::AirtimeTimeToPypoTime($dt->format(DEFAULT_TIMESTAMP_FORMAT)); $stream_end = self::AirtimeTimeToPypoTime($dt->format(DEFAULT_TIMESTAMP_FORMAT));
$schedule_item = [ $schedule_item = array_merge($schedule_common, [
'start' => $stream_end, 'start' => $stream_end,
'end' => $stream_end, 'end' => $stream_end,
'uri' => $uri,
'type' => 'stream_buffer_end', 'type' => 'stream_buffer_end',
'row_id' => $item['id'], ]);
];
self::appendScheduleItem($data, $stream_end, $schedule_item); self::appendScheduleItem($data, $stream_end, $schedule_item);
$schedule_item = [ $schedule_item = array_merge($schedule_common, [
'start' => $stream_end, 'start' => $stream_end,
'end' => $stream_end, 'end' => $stream_end,
'uri' => $uri,
'type' => 'stream_output_end', 'type' => 'stream_output_end',
]; ]);
self::appendScheduleItem($data, $stream_end, $schedule_item); self::appendScheduleItem($data, $stream_end, $schedule_item);
} }

View File

@ -1,22 +1,35 @@
from datetime import datetime from datetime import datetime
from enum import Enum from enum import Enum
from typing import Dict, Literal, Optional, TypedDict, Union from pathlib import Path
from typing import TYPE_CHECKING, Dict, Literal, Optional, Union
from typing_extensions import NotRequired from dateutil.parser import isoparse
from pydantic import BaseModel, Field, parse_obj_as, validator
from typing_extensions import Annotated
from ..config import CACHE_DIR
from ..utils import mime_guess_extension
if TYPE_CHECKING:
from pydantic.typing import AnyClassMethod
EVENT_KEY_FORMAT = "%Y-%m-%d-%H-%M-%S" EVENT_KEY_FORMAT = "%Y-%m-%d-%H-%M-%S"
def event_key_to_datetime(value: Union[str, datetime]) -> datetime: def event_key_to_datetime(value: Union[str, datetime]) -> datetime:
if isinstance(value, datetime): if isinstance(value, str):
return value value = datetime.strptime(value, EVENT_KEY_FORMAT)
return datetime.strptime(value, EVENT_KEY_FORMAT) return value
def datetime_to_event_key(value: Union[str, datetime]) -> str: def datetime_to_event_key(value: Union[str, datetime]) -> str:
if isinstance(value, str): if isinstance(value, datetime):
return value value = value.strftime(EVENT_KEY_FORMAT)
return value.strftime(EVENT_KEY_FORMAT) return value
def event_isoparse(value: str) -> datetime:
return isoparse(value).replace(tzinfo=None).replace(microsecond=0)
class EventKind(str, Enum): class EventKind(str, Enum):
@ -28,16 +41,24 @@ class EventKind(str, Enum):
WEB_STREAM_OUTPUT_END = "stream_output_end" WEB_STREAM_OUTPUT_END = "stream_output_end"
class BaseEvent(TypedDict): def event_datetime_validator(prop: str) -> "AnyClassMethod":
# TODO: Only use datetime return validator(prop, pre=True, allow_reuse=True)(event_key_to_datetime)
start: Union[str, datetime]
end: Union[str, datetime]
class FileEventMetadata(TypedDict): class BaseEvent(BaseModel):
track_title: str start: datetime
artist_name: str end: datetime
mime: str
_start_validator = event_datetime_validator("start")
_end_validator = event_datetime_validator("end")
@property
def start_key(self) -> str:
return datetime_to_event_key(self.start)
@property
def end_key(self) -> str:
return datetime_to_event_key(self.end)
class FileEvent(BaseEvent): class FileEvent(BaseEvent):
@ -45,7 +66,7 @@ class FileEvent(BaseEvent):
# Schedule # Schedule
row_id: int row_id: int
uri: Optional[str] uri: Optional[str] = None
id: int id: int
# Show data # Show data
@ -57,16 +78,22 @@ class FileEvent(BaseEvent):
cue_in: float cue_in: float
cue_out: float cue_out: float
# TODO: Flatten this metadata dict track_title: Optional[str] = None
metadata: FileEventMetadata artist_name: Optional[str] = None
replay_gain: float mime: str
replay_gain: Optional[float] = None
filesize: int filesize: int
# Runtime file_ready: bool = False
dst: NotRequired[str]
file_ready: NotRequired[bool] @property
file_ext: NotRequired[str] def file_ext(self) -> str:
return mime_guess_extension(self.mime)
@property
def local_filepath(self) -> Path:
return CACHE_DIR / f"{self.id}{self.file_ext}"
class WebStreamEvent(BaseEvent): class WebStreamEvent(BaseEvent):
@ -83,7 +110,7 @@ class WebStreamEvent(BaseEvent):
id: int id: int
# Show data # Show data
show_name: NotRequired[str] show_name: str
class ActionEventKind(str, Enum): class ActionEventKind(str, Enum):
@ -97,7 +124,15 @@ class ActionEvent(BaseEvent):
event_type: str event_type: str
AnyEvent = Union[FileEvent, WebStreamEvent, ActionEvent] AnyEvent = Annotated[
Union[FileEvent, WebStreamEvent, ActionEvent],
Field(discriminator="type"),
]
def parse_any_event(value: dict) -> AnyEvent:
return parse_obj_as(AnyEvent, value) # type: ignore
FileEvents = Dict[str, FileEvent] FileEvents = Dict[str, FileEvent]
Events = Dict[str, AnyEvent] Events = Dict[str, AnyEvent]

View File

@ -16,16 +16,14 @@ from ..config import CACHE_DIR, POLL_INTERVAL, Config
from ..liquidsoap.client import LiquidsoapClient from ..liquidsoap.client import LiquidsoapClient
from ..liquidsoap.models import Info, MessageFormatKind, StreamPreferences, StreamState from ..liquidsoap.models import Info, MessageFormatKind, StreamPreferences, StreamState
from ..timeout import ls_timeout from ..timeout import ls_timeout
from .events import EventKind, Events, FileEvent, FileEvents, event_key_to_datetime from .events import Events, FileEvent, FileEvents
from .liquidsoap import PypoLiquidsoap from .liquidsoap import PypoLiquidsoap
from .schedule import get_schedule from .schedule import get_schedule, receive_schedule
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
here = Path(__file__).parent here = Path(__file__).parent
from ..utils import mime_guess_extension
# pylint: disable=too-many-instance-attributes # pylint: disable=too-many-instance-attributes
class PypoFetch(Thread): class PypoFetch(Thread):
@ -73,7 +71,7 @@ class PypoFetch(Thread):
logger.debug("handling event %s: %s", command, message) logger.debug("handling event %s: %s", command, message)
if command == "update_schedule": if command == "update_schedule":
self.schedule_data = message["schedule"]["media"] self.schedule_data = receive_schedule(message["schedule"]["media"])
self.process_schedule(self.schedule_data) self.process_schedule(self.schedule_data)
elif command == "reset_liquidsoap_bootstrap": elif command == "reset_liquidsoap_bootstrap":
self.set_bootstrap_variables() self.set_bootstrap_variables()
@ -209,15 +207,8 @@ class PypoFetch(Thread):
try: try:
for key in events: for key in events:
item = events[key] item = events[key]
if item["type"] == EventKind.FILE: if isinstance(item, FileEvent):
file_ext = self.sanity_check_media_item(item)
dst = os.path.join(self.cache_dir, f'{item["id"]}{file_ext}')
item["dst"] = dst
item["file_ready"] = False
file_events[key] = item file_events[key] = item
item["start"] = event_key_to_datetime(item["start"])
item["end"] = event_key_to_datetime(item["end"])
all_events[key] = item all_events[key] = item
self.media_prepare_queue.put(copy.copy(file_events)) self.media_prepare_queue.put(copy.copy(file_events))
@ -234,25 +225,6 @@ class PypoFetch(Thread):
except Exception as exception: # pylint: disable=broad-exception-caught except Exception as exception: # pylint: disable=broad-exception-caught
logger.exception(exception) logger.exception(exception)
# do basic validation of file parameters. Useful for debugging
# purposes
def sanity_check_media_item(self, event: FileEvent):
start = event_key_to_datetime(event["start"])
end = event_key_to_datetime(event["end"])
file_ext = mime_guess_extension(event["metadata"]["mime"])
event["file_ext"] = file_ext
length1 = (end - start).total_seconds()
length2 = event["cue_out"] - event["cue_in"]
if abs(length2 - length1) > 1:
logger.error("end - start length: %s", length1)
logger.error("cue_out - cue_in length: %s", length2)
logger.error("Two lengths are not equal!!!")
return file_ext
def is_file_opened(self, path: str) -> bool: def is_file_opened(self, path: str) -> bool:
result = run(["lsof", "--", path], stdout=PIPE, stderr=DEVNULL, check=False) result = run(["lsof", "--", path], stdout=PIPE, stderr=DEVNULL, check=False)
return bool(result.stdout) return bool(result.stdout)
@ -269,10 +241,8 @@ class PypoFetch(Thread):
for key in events: for key in events:
item = events[key] item = events[key]
if item["type"] == EventKind.FILE: if isinstance(item, FileEvent):
if "file_ext" not in item: scheduled_file_set.add(item.local_filepath.name)
item["file_ext"] = mime_guess_extension(item["metadata"]["mime"])
scheduled_file_set.add(f'{item["id"]}{item["file_ext"]}')
expired_files = cached_file_set - scheduled_file_set expired_files = cached_file_set - scheduled_file_set

View File

@ -1,7 +1,6 @@
import hashlib import hashlib
import logging import logging
import os import os
import stat
import time import time
from queue import Empty, Queue from queue import Empty, Queue
from threading import Thread from threading import Thread
@ -36,64 +35,50 @@ class PypoFile(Thread):
""" """
Copy file_event from local library directory to local cache directory. Copy file_event from local library directory to local cache directory.
""" """
file_id = file_event["id"] if file_event.local_filepath.is_file():
dst = file_event["dst"] logger.debug(
"found file %s in cache %s",
file_event.id,
file_event.local_filepath,
)
file_event.file_ready = True
return
dst_exists = True logger.info(
"copying file %s to cache %s",
file_event.id,
file_event.local_filepath,
)
try: try:
dst_size = os.path.getsize(dst) with file_event.local_filepath.open("wb") as file_fd:
if dst_size == 0: try:
dst_exists = False response = self.api_client.download_file(file_event.id, stream=True)
except Exception: # pylint: disable=broad-exception-caught for chunk in response.iter_content(chunk_size=2048):
dst_exists = False file_fd.write(chunk)
do_copy = False except requests.exceptions.HTTPError as exception:
if dst_exists: raise RuntimeError(
# TODO: Check if the locally cached variant of the file is sane. f"could not download file {file_event.id}"
# This used to be a filesize check that didn't end up working. ) from exception
# Once we have watched folders updated files from them might
# become an issue here... This needs proper cache management.
# https://github.com/libretime/libretime/issues/756#issuecomment-477853018
# https://github.com/libretime/libretime/pull/845
logger.debug("found file %s in cache %s, skipping copy...", file_id, dst)
else:
do_copy = True
file_event["file_ready"] = not do_copy # make file world readable and owner writable
file_event.local_filepath.chmod(0o644)
if do_copy: if file_event.filesize == 0:
logger.info("copying file %s to cache %s", file_id, dst) file_event.filesize = self.report_file_size_and_md5_to_api(
try: str(file_event.local_filepath),
with open(dst, "wb") as handle: file_event.id,
logger.info(file_event)
try:
response = self.api_client.download_file(file_id, stream=True)
for chunk in response.iter_content(chunk_size=2048):
handle.write(chunk)
except requests.exceptions.HTTPError as exception:
raise RuntimeError(
f"could not download file {file_event['id']}"
) from exception
# make file world readable and owner writable
os.chmod(dst, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
if file_event["filesize"] == 0:
file_size = self.report_file_size_and_md5_to_api(
dst, file_event["id"]
)
file_event["filesize"] = file_size
file_event["file_ready"] = True
except Exception as exception: # pylint: disable=broad-exception-caught
logger.exception(
"could not copy file %s to %s: %s",
file_id,
dst,
exception,
) )
file_event.file_ready = True
except Exception as exception: # pylint: disable=broad-exception-caught
logger.exception(
"could not copy file %s to %s: %s",
file_event.id,
file_event.local_filepath,
exception,
)
def report_file_size_and_md5_to_api(self, file_path: str, file_id: int) -> int: def report_file_size_and_md5_to_api(self, file_path: str, file_id: int) -> int:
try: try:
file_size = os.path.getsize(file_path) file_size = os.path.getsize(file_path)

View File

@ -5,14 +5,7 @@ from typing import Dict, List, Optional, Set
from ..liquidsoap.client import LiquidsoapClient from ..liquidsoap.client import LiquidsoapClient
from ..utils import seconds_between from ..utils import seconds_between
from .events import ( from .events import ActionEvent, AnyEvent, EventKind, FileEvent, WebStreamEvent
ActionEvent,
AnyEvent,
EventKind,
FileEvent,
WebStreamEvent,
event_key_to_datetime,
)
from .liquidsoap_gateway import TelnetLiquidsoap from .liquidsoap_gateway import TelnetLiquidsoap
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -33,65 +26,66 @@ class PypoLiquidsoap:
list(self.liq_queue_tracker.keys()), list(self.liq_queue_tracker.keys()),
) )
def play(self, media_item: AnyEvent) -> None: def play(self, event: AnyEvent) -> None:
if media_item["type"] == EventKind.FILE: if isinstance(event, FileEvent):
self.handle_file_type(media_item) self.handle_file_type(event)
elif media_item["type"] == EventKind.ACTION: elif isinstance(event, ActionEvent):
self.handle_event_type(media_item) self.handle_event_type(event)
elif media_item["type"] == EventKind.WEB_STREAM_BUFFER_START: elif isinstance(event, WebStreamEvent):
self.telnet_liquidsoap.start_web_stream_buffer(media_item) self.handle_web_stream_type(event)
elif media_item["type"] == EventKind.WEB_STREAM_OUTPUT_START:
if (
media_item["row_id"]
!= self.telnet_liquidsoap.current_prebuffering_stream_id
):
# this is called if the stream wasn't scheduled sufficiently ahead of
# time so that the prebuffering stage could take effect. Let's do the
# prebuffering now.
self.telnet_liquidsoap.start_web_stream_buffer(media_item)
self.telnet_liquidsoap.start_web_stream()
elif media_item["type"] == EventKind.WEB_STREAM_BUFFER_END:
self.telnet_liquidsoap.stop_web_stream_buffer()
elif media_item["type"] == EventKind.WEB_STREAM_OUTPUT_END:
self.telnet_liquidsoap.stop_web_stream_output()
else: else:
raise UnknownMediaItemType(str(media_item)) raise UnknownEvent(str(event))
def handle_file_type(self, media_item: FileEvent) -> None: def handle_file_type(self, file_event: FileEvent) -> None:
""" """
Wait 200 seconds (2000 iterations) for file to become ready, Wait 200 seconds (2000 iterations) for file to become ready,
otherwise give up on it. otherwise give up on it.
""" """
iter_num = 0 iter_num = 0
while not media_item.get("file_ready", False) and iter_num < 2000: while not file_event.file_ready and iter_num < 2000:
time.sleep(0.1) time.sleep(0.1)
iter_num += 1 iter_num += 1
if media_item.get("file_ready", False): if file_event.file_ready:
available_queue = self.find_available_queue() available_queue = self.find_available_queue()
try: try:
self.telnet_liquidsoap.queue_push(available_queue, media_item) self.telnet_liquidsoap.queue_push(available_queue, file_event)
self.liq_queue_tracker[available_queue] = media_item self.liq_queue_tracker[available_queue] = file_event
except Exception as exception: except Exception as exception:
logger.exception(exception) logger.exception(exception)
raise exception raise exception
else: else:
logger.warning( logger.warning(
"File %s did not become ready in less than 5 seconds. Skipping...", "File %s did not become ready in less than 5 seconds. Skipping...",
media_item["dst"], file_event.local_filepath,
) )
def handle_event_type(self, media_item: ActionEvent) -> None: def handle_web_stream_type(self, event: WebStreamEvent) -> None:
if media_item["event_type"] == "kick_out": if event.type == EventKind.WEB_STREAM_BUFFER_START:
self.telnet_liquidsoap.start_web_stream_buffer(event)
elif event.type == EventKind.WEB_STREAM_OUTPUT_START:
if event.row_id != self.telnet_liquidsoap.current_prebuffering_stream_id:
# this is called if the stream wasn't scheduled sufficiently ahead of
# time so that the prebuffering stage could take effect. Let's do the
# prebuffering now.
self.telnet_liquidsoap.start_web_stream_buffer(event)
self.telnet_liquidsoap.start_web_stream()
elif event.type == EventKind.WEB_STREAM_BUFFER_END:
self.telnet_liquidsoap.stop_web_stream_buffer()
elif event.type == EventKind.WEB_STREAM_OUTPUT_END:
self.telnet_liquidsoap.stop_web_stream_output()
def handle_event_type(self, event: ActionEvent) -> None:
if event.event_type == "kick_out":
self.telnet_liquidsoap.disconnect_source("live_dj") self.telnet_liquidsoap.disconnect_source("live_dj")
elif media_item["event_type"] == "switch_off": elif event.event_type == "switch_off":
self.telnet_liquidsoap.switch_source("live_dj", "off") self.telnet_liquidsoap.switch_source("live_dj", "off")
def is_media_item_finished(self, media_item: Optional[AnyEvent]) -> bool: def is_media_item_finished(self, media_item: Optional[AnyEvent]) -> bool:
if media_item is None: if media_item is None:
return True return True
return datetime.utcnow() > event_key_to_datetime(media_item["end"]) return datetime.utcnow() > media_item.end
def find_available_queue(self) -> int: def find_available_queue(self) -> int:
available_queue = None available_queue = None
@ -131,25 +125,25 @@ class PypoLiquidsoap:
try: try:
scheduled_now_files: List[FileEvent] = [ scheduled_now_files: List[FileEvent] = [
x for x in scheduled_now if x["type"] == EventKind.FILE x for x in scheduled_now if x.type == EventKind.FILE # type: ignore
] ]
scheduled_now_webstream: List[WebStreamEvent] = [ scheduled_now_webstream: List[WebStreamEvent] = [
x x # type: ignore
for x in scheduled_now for x in scheduled_now
if x["type"] == EventKind.WEB_STREAM_OUTPUT_START if x.type == EventKind.WEB_STREAM_OUTPUT_START
] ]
schedule_ids: Set[int] = {x["row_id"] for x in scheduled_now_files} schedule_ids: Set[int] = {x.row_id for x in scheduled_now_files}
row_id_map = {} row_id_map: Dict[int, FileEvent] = {}
liq_queue_ids: Set[int] = set() liq_queue_ids: Set[int] = set()
for queue_item in self.liq_queue_tracker.values(): for queue_item in self.liq_queue_tracker.values():
if queue_item is not None and not self.is_media_item_finished( if queue_item is not None and not self.is_media_item_finished(
queue_item queue_item
): ):
liq_queue_ids.add(queue_item["row_id"]) liq_queue_ids.add(queue_item.row_id)
row_id_map[queue_item["row_id"]] = queue_item row_id_map[queue_item.row_id] = queue_item
to_be_removed: Set[int] = set() to_be_removed: Set[int] = set()
to_be_added: Set[int] = set() to_be_added: Set[int] = set()
@ -159,21 +153,18 @@ class PypoLiquidsoap:
# have different attributes. Ff replay gain changes, it shouldn't change the # have different attributes. Ff replay gain changes, it shouldn't change the
# amplification of the currently playing song # amplification of the currently playing song
for item in scheduled_now_files: for item in scheduled_now_files:
if item["row_id"] in row_id_map: if item.row_id in row_id_map:
queue_item = row_id_map[item["row_id"]] queue_item = row_id_map[item.row_id]
assert queue_item is not None
correct = ( if not (
queue_item["start"] == item["start"] queue_item.start == item.start
and queue_item["end"] == item["end"] and queue_item.end == item.end
and queue_item["row_id"] == item["row_id"] and queue_item.row_id == item.row_id
) ):
if not correct:
# need to re-add # need to re-add
logger.info("Track %s found to have new attr.", item) logger.info("Track %s found to have new attr.", item)
to_be_removed.add(item["row_id"]) to_be_removed.add(item.row_id)
to_be_added.add(item["row_id"]) to_be_added.add(item.row_id)
to_be_removed.update(liq_queue_ids - schedule_ids) to_be_removed.update(liq_queue_ids - schedule_ids)
to_be_added.update(schedule_ids - liq_queue_ids) to_be_added.update(schedule_ids - liq_queue_ids)
@ -183,17 +174,14 @@ class PypoLiquidsoap:
# remove files from Liquidsoap's queue # remove files from Liquidsoap's queue
for queue_id, queue_item in self.liq_queue_tracker.items(): for queue_id, queue_item in self.liq_queue_tracker.items():
if ( if queue_item is not None and queue_item.row_id in to_be_removed:
queue_item is not None
and queue_item.get("row_id") in to_be_removed
):
self.stop(queue_id) self.stop(queue_id)
if to_be_added: if to_be_added:
logger.info("Need to add items to Liquidsoap *now*: %s", to_be_added) logger.info("Need to add items to Liquidsoap *now*: %s", to_be_added)
for item in scheduled_now_files: for item in scheduled_now_files:
if item["row_id"] in to_be_added: if item.row_id in to_be_added:
self.modify_cue_point(item) self.modify_cue_point(item)
self.play(item) self.play(item)
@ -204,7 +192,7 @@ class PypoLiquidsoap:
logger.debug("scheduled now webstream: %s", scheduled_now_webstream) logger.debug("scheduled now webstream: %s", scheduled_now_webstream)
if scheduled_now_webstream: if scheduled_now_webstream:
if int(current_stream_id) != int(scheduled_now_webstream[0]["row_id"]): if int(current_stream_id) != int(scheduled_now_webstream[0].row_id):
self.play(scheduled_now_webstream[0]) self.play(scheduled_now_webstream[0])
elif current_stream_id != "-1": elif current_stream_id != "-1":
# something is playing and it shouldn't be. # something is playing and it shouldn't be.
@ -217,31 +205,25 @@ class PypoLiquidsoap:
self.telnet_liquidsoap.queue_remove(queue_id) self.telnet_liquidsoap.queue_remove(queue_id)
self.liq_queue_tracker[queue_id] = None self.liq_queue_tracker[queue_id] = None
def is_file(self, event: AnyEvent) -> bool:
return event["type"] == EventKind.FILE
def clear_queue_tracker(self) -> None: def clear_queue_tracker(self) -> None:
for queue_id in self.liq_queue_tracker: for queue_id in self.liq_queue_tracker:
self.liq_queue_tracker[queue_id] = None self.liq_queue_tracker[queue_id] = None
def modify_cue_point(self, link: FileEvent) -> None: def modify_cue_point(self, file_event: FileEvent) -> None:
assert self.is_file(link) assert file_event.type == EventKind.FILE
lateness = seconds_between( lateness = seconds_between(file_event.start, datetime.utcnow())
event_key_to_datetime(link["start"]),
datetime.utcnow(),
)
if lateness > 0: if lateness > 0:
logger.debug("media item was supposed to start %ss ago", lateness) logger.debug("media item was supposed to start %ss ago", lateness)
cue_in_orig = timedelta(seconds=float(link["cue_in"])) cue_in_orig = timedelta(seconds=file_event.cue_in)
link["cue_in"] = cue_in_orig.total_seconds() + lateness file_event.cue_in = cue_in_orig.total_seconds() + lateness
def clear_all_queues(self) -> None: def clear_all_queues(self) -> None:
self.telnet_liquidsoap.queue_clear_all() self.telnet_liquidsoap.queue_clear_all()
class UnknownMediaItemType(Exception): class UnknownEvent(Exception):
pass pass

View File

@ -1,9 +1,9 @@
import logging import logging
from typing import List from typing import List, Optional
from ..liquidsoap.client import LiquidsoapClient from ..liquidsoap.client import LiquidsoapClient
from ..timeout import ls_timeout from ..timeout import ls_timeout
from .events import FileEvent from .events import FileEvent, WebStreamEvent
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -12,36 +12,35 @@ def create_liquidsoap_annotation(file_event: FileEvent) -> str:
# We need liq_start_next value in the annotate. That is the value that controls # We need liq_start_next value in the annotate. That is the value that controls
# overlap duration of crossfade. # overlap duration of crossfade.
annotations = { annotations = {
"media_id": file_event["id"], "media_id": file_event.id,
"schedule_table_id": file_event.row_id,
"liq_start_next": "0", "liq_start_next": "0",
"liq_fade_in": float(file_event["fade_in"]) / 1000, "liq_fade_in": file_event.fade_in / 1000,
"liq_fade_out": float(file_event["fade_out"]) / 1000, "liq_fade_out": file_event.fade_out / 1000,
"liq_cue_in": float(file_event["cue_in"]), "liq_cue_in": file_event.cue_in,
"liq_cue_out": float(file_event["cue_out"]), "liq_cue_out": file_event.cue_out,
"schedule_table_id": file_event["row_id"],
"replay_gain": f"{file_event['replay_gain']} dB",
} }
if file_event.replay_gain is not None:
annotations["replay_gain"] = f"{file_event.replay_gain} dB"
# Override the the artist/title that Liquidsoap extracts from a file's metadata with # Override the the artist/title that Liquidsoap extracts from a file's metadata with
# the metadata we get from Airtime. (You can modify metadata in Airtime's library, # the metadata we get from Airtime. (You can modify metadata in Airtime's library,
# which doesn't get saved back to the file.) # which doesn't get saved back to the file.)
if "metadata" in file_event: if file_event.artist_name:
if "artist_name" in file_event["metadata"]: annotations["artist"] = file_event.artist_name.replace('"', '\\"')
artist_name = file_event["metadata"]["artist_name"]
if artist_name:
annotations["artist"] = artist_name.replace('"', '\\"')
if "track_title" in file_event["metadata"]: if file_event.track_title:
track_title = file_event["metadata"]["track_title"] annotations["title"] = file_event.track_title.replace('"', '\\"')
if track_title:
annotations["title"] = track_title.replace('"', '\\"')
annotations_str = ",".join(f'{key}="{value}"' for key, value in annotations.items()) annotations_str = ",".join(f'{key}="{value}"' for key, value in annotations.items())
return "annotate:" + annotations_str + ":" + file_event["dst"] return "annotate:" + annotations_str + ":" + str(file_event.local_filepath)
class TelnetLiquidsoap: class TelnetLiquidsoap:
current_prebuffering_stream_id: Optional[int] = None
def __init__( def __init__(
self, self,
liq_client: LiquidsoapClient, liq_client: LiquidsoapClient,
@ -49,7 +48,6 @@ class TelnetLiquidsoap:
): ):
self.liq_client = liq_client self.liq_client = liq_client
self.queues = queues self.queues = queues
self.current_prebuffering_stream_id = None
@ls_timeout @ls_timeout
def queue_clear_all(self): def queue_clear_all(self):
@ -66,10 +64,10 @@ class TelnetLiquidsoap:
logger.exception(exception) logger.exception(exception)
@ls_timeout @ls_timeout
def queue_push(self, queue_id: int, media_item: FileEvent): def queue_push(self, queue_id: int, file_event: FileEvent):
try: try:
annotation = create_liquidsoap_annotation(media_item) annotation = create_liquidsoap_annotation(file_event)
self.liq_client.queue_push(queue_id, annotation, media_item["show_name"]) self.liq_client.queue_push(queue_id, annotation, file_event.show_name)
except (ConnectionError, TimeoutError) as exception: except (ConnectionError, TimeoutError) as exception:
logger.exception(exception) logger.exception(exception)
@ -96,13 +94,10 @@ class TelnetLiquidsoap:
logger.exception(exception) logger.exception(exception)
@ls_timeout @ls_timeout
def start_web_stream_buffer(self, media_item): def start_web_stream_buffer(self, event: WebStreamEvent):
try: try:
self.liq_client.web_stream_start_buffer( self.liq_client.web_stream_start_buffer(event.row_id, event.uri)
media_item["row_id"], self.current_prebuffering_stream_id = event.row_id
media_item["uri"],
)
self.current_prebuffering_stream_id = media_item["row_id"]
except (ConnectionError, TimeoutError) as exception: except (ConnectionError, TimeoutError) as exception:
logger.exception(exception) logger.exception(exception)

View File

@ -7,21 +7,13 @@ from threading import Thread
from typing import List, Tuple from typing import List, Tuple
from ..config import PUSH_INTERVAL, Config from ..config import PUSH_INTERVAL, Config
from .events import AnyEvent, EventKind, Events, event_key_to_datetime from .events import AnyEvent, Events, FileEvent
from .liquidsoap import PypoLiquidsoap from .liquidsoap import PypoLiquidsoap
from .queue import PypoLiqQueue from .queue import PypoLiqQueue
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def is_stream(media_item: AnyEvent) -> bool:
return media_item["type"] == "stream_output_start"
def is_file(media_item: AnyEvent) -> bool:
return media_item["type"] == "file"
class PypoPush(Thread): class PypoPush(Thread):
name = "push" name = "push"
daemon = True daemon = True
@ -81,14 +73,11 @@ class PypoPush(Thread):
item = events[key] item = events[key]
# Ignore track that already ended # Ignore track that already ended
if ( if isinstance(item, FileEvent) and item.end < now:
item["type"] == EventKind.FILE
and event_key_to_datetime(item["end"]) < now
):
logger.debug("ignoring ended media_item: %s", item) logger.debug("ignoring ended media_item: %s", item)
continue continue
diff_sec = (now - event_key_to_datetime(item["start"])).total_seconds() diff_sec = (now - item.start).total_seconds()
if diff_sec >= 0: if diff_sec >= 0:
logger.debug("adding media_item to present: %s", item) logger.debug("adding media_item to present: %s", item)

View File

@ -6,7 +6,7 @@ from threading import Thread
from typing import Any, Dict from typing import Any, Dict
from ..utils import seconds_between from ..utils import seconds_between
from .events import AnyEvent, event_key_to_datetime from .events import AnyEvent
from .liquidsoap import PypoLiquidsoap from .liquidsoap import PypoLiquidsoap
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -49,7 +49,7 @@ class PypoLiqQueue(Thread):
if len(schedule_deque): if len(schedule_deque):
time_until_next_play = seconds_between( time_until_next_play = seconds_between(
datetime.utcnow(), datetime.utcnow(),
event_key_to_datetime(schedule_deque[0]["start"]), schedule_deque[0].start,
) )
else: else:
time_until_next_play = None time_until_next_play = None
@ -66,7 +66,7 @@ class PypoLiqQueue(Thread):
if len(keys): if len(keys):
time_until_next_play = seconds_between( time_until_next_play = seconds_between(
datetime.utcnow(), datetime.utcnow(),
media_schedule[keys[0]]["start"], media_schedule[keys[0]].start,
) )
else: else:

View File

@ -2,7 +2,6 @@ from datetime import datetime, time, timedelta
from operator import itemgetter from operator import itemgetter
from typing import Dict from typing import Dict
from dateutil.parser import isoparse
from libretime_api_client.v2 import ApiClient from libretime_api_client.v2 import ApiClient
from libretime_shared.datetime import time_in_milliseconds, time_in_seconds from libretime_shared.datetime import time_in_milliseconds, time_in_seconds
@ -15,6 +14,8 @@ from .events import (
FileEvent, FileEvent,
WebStreamEvent, WebStreamEvent,
datetime_to_event_key, datetime_to_event_key,
event_isoparse,
parse_any_event,
) )
@ -54,15 +55,15 @@ def get_schedule(api_client: ApiClient) -> Events:
events: Dict[str, AnyEvent] = {} events: Dict[str, AnyEvent] = {}
for item in sorted(schedule, key=itemgetter("starts_at")): for item in sorted(schedule, key=itemgetter("starts_at")):
item["starts_at"] = isoparse(item["starts_at"]) item["starts_at"] = event_isoparse(item["starts_at"])
item["ends_at"] = isoparse(item["ends_at"]) item["ends_at"] = event_isoparse(item["ends_at"])
show_instance = api_client.get_show_instance(item["instance"]).json() show_instance = api_client.get_show_instance(item["instance"]).json()
show = api_client.get_show(show_instance["show"]).json() show = api_client.get_show(show_instance["show"]).json()
if show["live_enabled"]: if show["live_enabled"]:
show_instance["starts_at"] = isoparse(show_instance["starts_at"]) show_instance["starts_at"] = event_isoparse(show_instance["starts_at"])
show_instance["ends_at"] = isoparse(show_instance["ends_at"]) show_instance["ends_at"] = event_isoparse(show_instance["ends_at"])
generate_live_events( generate_live_events(
events, events,
show_instance, show_instance,
@ -87,26 +88,28 @@ def generate_live_events(
): ):
transition = timedelta(seconds=input_fade_transition) transition = timedelta(seconds=input_fade_transition)
switch_off_event_key = datetime_to_event_key(show_instance["ends_at"] - transition) switch_off = show_instance["ends_at"] - transition
kick_out_event_key = datetime_to_event_key(show_instance["ends_at"]) kick_out = show_instance["ends_at"]
switch_off_event_key = datetime_to_event_key(switch_off)
kick_out_event_key = datetime_to_event_key(kick_out)
# If enabled, fade the input source out # If enabled, fade the input source out
if switch_off_event_key != kick_out_event_key: if switch_off != kick_out:
switch_off_event: ActionEvent = { switch_off_event = ActionEvent(
"type": EventKind.ACTION, type=EventKind.ACTION,
"event_type": "switch_off", event_type="switch_off",
"start": switch_off_event_key, start=switch_off,
"end": switch_off_event_key, end=switch_off,
} )
insert_event(events, switch_off_event_key, switch_off_event) insert_event(events, switch_off_event_key, switch_off_event)
# Then kick the source out # Then kick the source out
kick_out_event: ActionEvent = { kick_out_event = ActionEvent(
"type": EventKind.ACTION, type=EventKind.ACTION,
"event_type": "kick_out", event_type="kick_out",
"start": kick_out_event_key, start=kick_out,
"end": kick_out_event_key, end=kick_out,
} )
insert_event(events, kick_out_event_key, kick_out_event) insert_event(events, kick_out_event_key, kick_out_event)
@ -119,32 +122,28 @@ def generate_file_events(
""" """
Generate events for a scheduled file. Generate events for a scheduled file.
""" """
schedule_start_event_key = datetime_to_event_key(schedule["starts_at"]) event = FileEvent(
schedule_end_event_key = datetime_to_event_key(schedule["ends_at"]) type=EventKind.FILE,
row_id=schedule["id"],
event: FileEvent = { start=schedule["starts_at"],
"type": EventKind.FILE, end=schedule["ends_at"],
"row_id": schedule["id"], uri=file["url"],
"start": schedule_start_event_key, id=file["id"],
"end": schedule_end_event_key,
"uri": file["url"],
"id": file["id"],
# Show data # Show data
"show_name": show["name"], show_name=show["name"],
# Extra data # Extra data
"fade_in": time_in_milliseconds(time.fromisoformat(schedule["fade_in"])), fade_in=time_in_milliseconds(time.fromisoformat(schedule["fade_in"])),
"fade_out": time_in_milliseconds(time.fromisoformat(schedule["fade_out"])), fade_out=time_in_milliseconds(time.fromisoformat(schedule["fade_out"])),
"cue_in": time_in_seconds(time.fromisoformat(schedule["cue_in"])), cue_in=time_in_seconds(time.fromisoformat(schedule["cue_in"])),
"cue_out": time_in_seconds(time.fromisoformat(schedule["cue_out"])), cue_out=time_in_seconds(time.fromisoformat(schedule["cue_out"])),
"metadata": { # File data
"track_title": file["track_title"], track_title=file.get("track_title"),
"artist_name": file["artist_name"], artist_name=file.get("artist_name"),
"mime": file["mime"], mime=file["mime"],
}, replay_gain=file["replay_gain"],
"replay_gain": file["replay_gain"], filesize=file["size"],
"filesize": file["size"], )
} insert_event(events, event.start_key, event)
insert_event(events, schedule_start_event_key, event)
def generate_webstream_events( def generate_webstream_events(
@ -159,46 +158,61 @@ def generate_webstream_events(
schedule_start_event_key = datetime_to_event_key(schedule["starts_at"]) schedule_start_event_key = datetime_to_event_key(schedule["starts_at"])
schedule_end_event_key = datetime_to_event_key(schedule["ends_at"]) schedule_end_event_key = datetime_to_event_key(schedule["ends_at"])
stream_buffer_start_event: WebStreamEvent = { stream_buffer_start_event = WebStreamEvent(
"type": EventKind.WEB_STREAM_BUFFER_START, type=EventKind.WEB_STREAM_BUFFER_START,
"row_id": schedule["id"], row_id=schedule["id"],
"start": datetime_to_event_key(schedule["starts_at"] - timedelta(seconds=5)), start=schedule["starts_at"] - timedelta(seconds=5),
"end": datetime_to_event_key(schedule["starts_at"] - timedelta(seconds=5)), end=schedule["starts_at"] - timedelta(seconds=5),
"uri": webstream["url"], uri=webstream["url"],
"id": webstream["id"], id=webstream["id"],
} # Show data
show_name=show["name"],
)
insert_event(events, schedule_start_event_key, stream_buffer_start_event) insert_event(events, schedule_start_event_key, stream_buffer_start_event)
stream_output_start_event: WebStreamEvent = { stream_output_start_event = WebStreamEvent(
"type": EventKind.WEB_STREAM_OUTPUT_START, type=EventKind.WEB_STREAM_OUTPUT_START,
"row_id": schedule["id"], row_id=schedule["id"],
"start": schedule_start_event_key, start=schedule["starts_at"],
"end": schedule_end_event_key, end=schedule["ends_at"],
"uri": webstream["url"], uri=webstream["url"],
"id": webstream["id"], id=webstream["id"],
# Show data # Show data
"show_name": show["name"], show_name=show["name"],
} )
insert_event(events, schedule_start_event_key, stream_output_start_event) insert_event(events, schedule_start_event_key, stream_output_start_event)
# NOTE: stream_*_end were previously triggered 1 second before # NOTE: stream_*_end were previously triggered 1 second before
# the schedule end. # the schedule end.
stream_buffer_end_event: WebStreamEvent = { stream_buffer_end_event = WebStreamEvent(
"type": EventKind.WEB_STREAM_BUFFER_END, type=EventKind.WEB_STREAM_BUFFER_END,
"row_id": schedule["id"], row_id=schedule["id"],
"start": schedule_end_event_key, start=schedule["ends_at"],
"end": schedule_end_event_key, end=schedule["ends_at"],
"uri": webstream["url"], uri=webstream["url"],
"id": webstream["id"], id=webstream["id"],
} # Show data
show_name=show["name"],
)
insert_event(events, schedule_end_event_key, stream_buffer_end_event) insert_event(events, schedule_end_event_key, stream_buffer_end_event)
stream_output_end_event: WebStreamEvent = { stream_output_end_event = WebStreamEvent(
"type": EventKind.WEB_STREAM_OUTPUT_END, type=EventKind.WEB_STREAM_OUTPUT_END,
"row_id": schedule["id"], row_id=schedule["id"],
"start": schedule_end_event_key, start=schedule["ends_at"],
"end": schedule_end_event_key, end=schedule["ends_at"],
"uri": webstream["url"], uri=webstream["url"],
"id": webstream["id"], id=webstream["id"],
} # Show data
show_name=show["name"],
)
insert_event(events, schedule_end_event_key, stream_output_end_event) insert_event(events, schedule_end_event_key, stream_output_end_event)
def receive_schedule(schedule: Dict[str, dict]) -> Events:
events: Dict[str, AnyEvent] = {}
for event_key, event in schedule.items():
events[event_key] = parse_any_event(event)
return events

View File

@ -1,33 +1,45 @@
from datetime import datetime
from pathlib import Path
from unittest import mock
from dateutil.tz import tzutc
from libretime_playout.player.events import EventKind, FileEvent from libretime_playout.player.events import EventKind, FileEvent
from libretime_playout.player.liquidsoap_gateway import create_liquidsoap_annotation from libretime_playout.player.liquidsoap_gateway import create_liquidsoap_annotation
@mock.patch("libretime_playout.player.events.CACHE_DIR", Path("/fake"))
def test_create_liquidsoap_annotation(): def test_create_liquidsoap_annotation():
file_event: FileEvent = { file_event = FileEvent(
"type": EventKind.FILE, type=EventKind.FILE,
"row_id": 1, row_id=1,
"start": "2022-09-05-11-00-00", start=datetime(2022, 9, 5, 11, tzinfo=tzutc()),
"end": "2022-09-05-11-05-02", end=datetime(2022, 9, 5, 11, 5, 2, tzinfo=tzutc()),
"uri": None, uri=None,
"id": 2, id=2,
"show_name": "Show 1", show_name="Show 1",
"fade_in": 500.0, fade_in=500.0,
"fade_out": 500.0, fade_out=500.0,
"cue_in": 13.7008, cue_in=13.7008,
"cue_out": 315.845, cue_out=315.845,
"metadata": { track_title='My Friend the "Forest"',
"track_title": 'My Friend the "Forest"', artist_name="Nils Frahm",
"artist_name": "Nils Frahm", mime="audio/flac",
"mime": "audio/flac", replay_gain=11.46,
}, filesize=10000,
"replay_gain": "11.46", )
"filesize": 10000,
"dst": "fake/path.flac",
}
assert create_liquidsoap_annotation(file_event) == ( assert create_liquidsoap_annotation(file_event) == (
"""annotate:media_id="2",liq_start_next="0",liq_fade_in="0.5",""" "annotate:"
"""liq_fade_out="0.5",liq_cue_in="13.7008",liq_cue_out="315.845",""" 'media_id="2",'
"""schedule_table_id="1",replay_gain="11.46 dB",artist="Nils Frahm",""" 'schedule_table_id="1",'
"""title="My Friend the \\"Forest\\"":fake/path.flac""" 'liq_start_next="0",'
'liq_fade_in="0.5",'
'liq_fade_out="0.5",'
'liq_cue_in="13.7008",'
'liq_cue_out="315.845",'
'replay_gain="11.46 dB",'
'artist="Nils Frahm",'
'title="My Friend the \\"Forest\\""'
":/fake/2.flac"
) )

View File

@ -1,11 +1,16 @@
import random import random
from datetime import timedelta from datetime import datetime
import pytest import pytest
from dateutil.parser import isoparse
from libretime_api_client.v2 import ApiClient from libretime_api_client.v2 import ApiClient
from libretime_playout.player.events import EventKind from libretime_playout.player.events import (
ActionEvent,
EventKind,
FileEvent,
WebStreamEvent,
event_isoparse,
)
from libretime_playout.player.schedule import ( from libretime_playout.player.schedule import (
generate_file_events, generate_file_events,
generate_live_events, generate_live_events,
@ -268,110 +273,112 @@ SCHEDULE = [
def test_generate_live_events(): def test_generate_live_events():
show_instance_3 = SHOW_INSTANCE_3.copy() show_instance_3 = SHOW_INSTANCE_3.copy()
show_instance_3["starts_at"] = isoparse(show_instance_3["starts_at"]) show_instance_3["starts_at"] = event_isoparse(show_instance_3["starts_at"])
show_instance_3["ends_at"] = isoparse(show_instance_3["ends_at"]) show_instance_3["ends_at"] = event_isoparse(show_instance_3["ends_at"])
result = {} result = {}
generate_live_events(result, show_instance_3, 0.0) generate_live_events(result, show_instance_3, 0.0)
assert result == { assert result == {
"2022-09-05-13-00-00": { "2022-09-05-13-00-00": ActionEvent(
"type": EventKind.ACTION, start=datetime(2022, 9, 5, 13, 0),
"event_type": "kick_out", end=datetime(2022, 9, 5, 13, 0),
"start": "2022-09-05-13-00-00", type=EventKind.ACTION,
"end": "2022-09-05-13-00-00", event_type="kick_out",
} ),
} }
result = {} result = {}
generate_live_events(result, show_instance_3, 2.0) generate_live_events(result, show_instance_3, 2.0)
assert result == { assert result == {
"2022-09-05-12-59-58": { "2022-09-05-12-59-58": ActionEvent(
"type": EventKind.ACTION, start=datetime(2022, 9, 5, 12, 59, 58),
"event_type": "switch_off", end=datetime(2022, 9, 5, 12, 59, 58),
"start": "2022-09-05-12-59-58", type=EventKind.ACTION,
"end": "2022-09-05-12-59-58", event_type="switch_off",
}, ),
"2022-09-05-13-00-00": { "2022-09-05-13-00-00": ActionEvent(
"type": EventKind.ACTION, start=datetime(2022, 9, 5, 13, 0),
"event_type": "kick_out", end=datetime(2022, 9, 5, 13, 0),
"start": "2022-09-05-13-00-00", type=EventKind.ACTION,
"end": "2022-09-05-13-00-00", event_type="kick_out",
}, ),
} }
def test_generate_file_events(): def test_generate_file_events():
schedule_1 = SCHEDULE_1.copy() schedule_1 = SCHEDULE_1.copy()
schedule_1["starts_at"] = isoparse(schedule_1["starts_at"]) schedule_1["starts_at"] = event_isoparse(schedule_1["starts_at"])
schedule_1["ends_at"] = isoparse(schedule_1["ends_at"]) schedule_1["ends_at"] = event_isoparse(schedule_1["ends_at"])
result = {} result = {}
generate_file_events(result, schedule_1, FILE_2, SHOW_1) generate_file_events(result, schedule_1, FILE_2, SHOW_1)
assert result == { assert result == {
"2022-09-05-11-00-00": { "2022-09-05-11-00-00": FileEvent(
"type": EventKind.FILE, start=datetime(2022, 9, 5, 11, 0),
"row_id": 1, end=datetime(2022, 9, 5, 11, 5, 2),
"start": "2022-09-05-11-00-00", type=EventKind.FILE,
"end": "2022-09-05-11-05-02", row_id=1,
"uri": None, uri=None,
"id": 2, id=2,
"show_name": "Show 1", show_name="Show 1",
"fade_in": 500.0, fade_in=500.0,
"fade_out": 500.0, fade_out=500.0,
"cue_in": 13.7008, cue_in=13.7008,
"cue_out": 315.845, cue_out=315.845,
"metadata": { track_title="My Friend the Forest",
"track_title": "My Friend the Forest", artist_name="Nils Frahm",
"artist_name": "Nils Frahm", mime="audio/flac",
"mime": "audio/flac", replay_gain=11.46,
}, filesize=10000,
"replay_gain": "11.46", file_ready=False,
"filesize": 10000, )
}
} }
def test_generate_webstream_events(): def test_generate_webstream_events():
schedule_5 = SCHEDULE_5.copy() schedule_5 = SCHEDULE_5.copy()
schedule_5["starts_at"] = isoparse(schedule_5["starts_at"]) schedule_5["starts_at"] = event_isoparse(schedule_5["starts_at"])
schedule_5["ends_at"] = isoparse(schedule_5["ends_at"]) schedule_5["ends_at"] = event_isoparse(schedule_5["ends_at"])
result = {} result = {}
generate_webstream_events(result, schedule_5, WEBSTREAM_1, SHOW_3) generate_webstream_events(result, schedule_5, WEBSTREAM_1, SHOW_3)
assert result == { assert result == {
"2022-09-05-12-10-00": { "2022-09-05-12-10-00": WebStreamEvent(
"type": EventKind.WEB_STREAM_BUFFER_START, start=datetime(2022, 9, 5, 12, 9, 55),
"row_id": 5, end=datetime(2022, 9, 5, 12, 9, 55),
"start": "2022-09-05-12-09-55", type=EventKind.WEB_STREAM_BUFFER_START,
"end": "2022-09-05-12-09-55", row_id=5,
"uri": "http://stream.radio.org/main.ogg", uri="http://stream.radio.org/main.ogg",
"id": 1, id=1,
}, show_name="Show 3",
"2022-09-05-12-10-00_0": { ),
"type": EventKind.WEB_STREAM_OUTPUT_START, "2022-09-05-12-10-00_0": WebStreamEvent(
"row_id": 5, start=datetime(2022, 9, 5, 12, 10),
"start": "2022-09-05-12-10-00", end=datetime(2022, 9, 5, 12, 40),
"end": "2022-09-05-12-40-00", type=EventKind.WEB_STREAM_OUTPUT_START,
"uri": "http://stream.radio.org/main.ogg", row_id=5,
"id": 1, uri="http://stream.radio.org/main.ogg",
"show_name": "Show 3", id=1,
}, show_name="Show 3",
"2022-09-05-12-40-00": { ),
"type": EventKind.WEB_STREAM_BUFFER_END, "2022-09-05-12-40-00": WebStreamEvent(
"row_id": 5, start=datetime(2022, 9, 5, 12, 40),
"start": "2022-09-05-12-40-00", end=datetime(2022, 9, 5, 12, 40),
"end": "2022-09-05-12-40-00", type=EventKind.WEB_STREAM_BUFFER_END,
"uri": "http://stream.radio.org/main.ogg", row_id=5,
"id": 1, uri="http://stream.radio.org/main.ogg",
}, id=1,
"2022-09-05-12-40-00_0": { show_name="Show 3",
"type": EventKind.WEB_STREAM_OUTPUT_END, ),
"row_id": 5, "2022-09-05-12-40-00_0": WebStreamEvent(
"start": "2022-09-05-12-40-00", start=datetime(2022, 9, 5, 12, 40),
"end": "2022-09-05-12-40-00", end=datetime(2022, 9, 5, 12, 40),
"uri": "http://stream.radio.org/main.ogg", type=EventKind.WEB_STREAM_OUTPUT_END,
"id": 1, row_id=5,
}, uri="http://stream.radio.org/main.ogg",
id=1,
show_name="Show 3",
),
} }
@ -412,229 +419,223 @@ def test_get_schedule(schedule, requests_mock, api_client: ApiClient):
requests_mock.get(f"{base_url}/api/v2/webstreams/1", json=WEBSTREAM_1) requests_mock.get(f"{base_url}/api/v2/webstreams/1", json=WEBSTREAM_1)
assert get_schedule(api_client) == { assert get_schedule(api_client) == {
"2022-09-05-11-00-00": { "2022-09-05-11-00-00": FileEvent(
"type": EventKind.FILE, start=datetime(2022, 9, 5, 11, 0),
"row_id": 1, end=datetime(2022, 9, 5, 11, 5, 2),
"start": "2022-09-05-11-00-00", type=EventKind.FILE,
"end": "2022-09-05-11-05-02", row_id=1,
"uri": None, uri=None,
"id": 2, id=2,
"show_name": "Show 1", show_name="Show 1",
"fade_in": 500.0, fade_in=500.0,
"fade_out": 500.0, fade_out=500.0,
"cue_in": 13.7008, cue_in=13.7008,
"cue_out": 315.845, cue_out=315.845,
"metadata": { track_title="My Friend the Forest",
"track_title": "My Friend the Forest", artist_name="Nils Frahm",
"artist_name": "Nils Frahm", mime="audio/flac",
"mime": "audio/flac", replay_gain=11.46,
}, filesize=10000,
"replay_gain": "11.46", file_ready=False,
"filesize": 10000, ),
}, "2022-09-05-11-05-02": FileEvent(
"2022-09-05-11-05-02": { start=datetime(2022, 9, 5, 11, 5, 2),
"type": EventKind.FILE, end=datetime(2022, 9, 5, 11, 10),
"row_id": 2, type=EventKind.FILE,
"start": "2022-09-05-11-05-02", row_id=2,
"end": "2022-09-05-11-10-00", uri=None,
"uri": None, id=4,
"id": 4, show_name="Show 1",
"show_name": "Show 1", fade_in=500.0,
"fade_in": 500.0, fade_out=500.0,
"fade_out": 500.0, cue_in=0.0,
"cue_in": 0.0, cue_out=297.8558,
"cue_out": 297.8558, track_title="#2",
"metadata": { artist_name="Nils Frahm",
"track_title": "#2", mime="audio/flac",
"artist_name": "Nils Frahm", replay_gain=-1.65,
"mime": "audio/flac", filesize=10000,
}, file_ready=False,
"replay_gain": "-1.65", ),
"filesize": 10000, "2022-09-05-11-10-00": FileEvent(
}, start=datetime(2022, 9, 5, 11, 10),
"2022-09-05-11-10-00": { end=datetime(2022, 9, 5, 12, 8, 59),
"type": EventKind.FILE, type=EventKind.FILE,
"row_id": 3, row_id=3,
"start": "2022-09-05-11-10-00", uri=None,
"end": "2022-09-05-12-08-59", id=5,
"uri": None, show_name="Show 2",
"id": 5, fade_in=500.0,
"show_name": "Show 2", fade_out=500.0,
"fade_in": 500.0, cue_in=0.0,
"fade_out": 500.0, cue_out=3539.13,
"cue_in": 0.0, track_title="Democracy Now! 2022-09-05 Monday",
"cue_out": 3539.13, artist_name="Democracy Now! Audio",
"metadata": { mime="audio/mp3",
"track_title": "Democracy Now! 2022-09-05 Monday", replay_gain=-1.39,
"artist_name": "Democracy Now! Audio", filesize=10000,
"mime": "audio/mp3", file_ready=False,
}, ),
"replay_gain": "-1.39", "2022-09-05-12-08-59": FileEvent(
"filesize": 10000, start=datetime(2022, 9, 5, 12, 8, 59),
}, end=datetime(2022, 9, 5, 12, 10),
"2022-09-05-12-08-59": { type=EventKind.FILE,
"type": EventKind.FILE, row_id=4,
"row_id": 4, uri=None,
"start": "2022-09-05-12-08-59", id=4,
"end": "2022-09-05-12-10-00", show_name="Show 2",
"uri": None, fade_in=500.0,
"id": 4, fade_out=500.0,
"show_name": "Show 2", cue_in=0.0,
"fade_in": 500.0, cue_out=61.0,
"fade_out": 500.0, track_title="#2",
"cue_in": 0.0, artist_name="Nils Frahm",
"cue_out": 61.0, mime="audio/flac",
"metadata": { replay_gain=-1.65,
"track_title": "#2", filesize=10000,
"artist_name": "Nils Frahm", file_ready=False,
"mime": "audio/flac", ),
}, "2022-09-05-12-10-00": WebStreamEvent(
"replay_gain": "-1.65", start=datetime(2022, 9, 5, 12, 9, 55),
"filesize": 10000, end=datetime(2022, 9, 5, 12, 9, 55),
}, type=EventKind.WEB_STREAM_BUFFER_START,
"2022-09-05-12-10-00": { row_id=5,
"type": EventKind.WEB_STREAM_BUFFER_START, uri="http://stream.radio.org/main.ogg",
"row_id": 5, id=1,
"start": "2022-09-05-12-09-55", show_name="Show 3",
"end": "2022-09-05-12-09-55", ),
"uri": "http://stream.radio.org/main.ogg", "2022-09-05-12-10-00_0": WebStreamEvent(
"id": 1, start=datetime(2022, 9, 5, 12, 10),
}, end=datetime(2022, 9, 5, 12, 40),
"2022-09-05-12-10-00_0": { type=EventKind.WEB_STREAM_OUTPUT_START,
"type": EventKind.WEB_STREAM_OUTPUT_START, row_id=5,
"row_id": 5, uri="http://stream.radio.org/main.ogg",
"start": "2022-09-05-12-10-00", id=1,
"end": "2022-09-05-12-40-00", show_name="Show 3",
"uri": "http://stream.radio.org/main.ogg", ),
"id": 1, "2022-09-05-12-40-00": WebStreamEvent(
"show_name": "Show 3", start=datetime(2022, 9, 5, 12, 40),
}, end=datetime(2022, 9, 5, 12, 40),
"2022-09-05-12-40-00": { type=EventKind.WEB_STREAM_BUFFER_END,
"type": EventKind.WEB_STREAM_BUFFER_END, row_id=5,
"row_id": 5, uri="http://stream.radio.org/main.ogg",
"start": "2022-09-05-12-40-00", id=1,
"end": "2022-09-05-12-40-00", show_name="Show 3",
"uri": "http://stream.radio.org/main.ogg", ),
"id": 1, "2022-09-05-12-40-00_0": WebStreamEvent(
}, start=datetime(2022, 9, 5, 12, 40),
"2022-09-05-12-40-00_0": { end=datetime(2022, 9, 5, 12, 40),
"type": EventKind.WEB_STREAM_OUTPUT_END, type=EventKind.WEB_STREAM_OUTPUT_END,
"row_id": 5, row_id=5,
"start": "2022-09-05-12-40-00", uri="http://stream.radio.org/main.ogg",
"end": "2022-09-05-12-40-00", id=1,
"uri": "http://stream.radio.org/main.ogg", show_name="Show 3",
"id": 1, ),
}, "2022-09-05-12-40-00_1": FileEvent(
"2022-09-05-12-40-00_1": { start=datetime(2022, 9, 5, 12, 40),
"type": EventKind.FILE, end=datetime(2022, 9, 5, 12, 53, 23),
"row_id": 6, type=EventKind.FILE,
"start": "2022-09-05-12-40-00", row_id=6,
"end": "2022-09-05-12-53-23", uri=None,
"uri": None, id=3,
"id": 3, show_name="Show 3",
"show_name": "Show 3", fade_in=500.0,
"fade_in": 500.0, fade_out=500.0,
"fade_out": 500.0, cue_in=55.1211,
"cue_in": 55.1211, cue_out=858.4,
"cue_out": 858.4, track_title="All Melody",
"metadata": { artist_name="Nils Frahm",
"track_title": "All Melody", mime="audio/flac",
"artist_name": "Nils Frahm", replay_gain=-2.13,
"mime": "audio/flac", filesize=10000,
}, file_ready=False,
"replay_gain": "-2.13", ),
"filesize": 10000, "2022-09-05-12-53-23": FileEvent(
}, start=datetime(2022, 9, 5, 12, 53, 23),
"2022-09-05-12-53-23": { end=datetime(2022, 9, 5, 12, 58, 25),
"type": EventKind.FILE, type=EventKind.FILE,
"row_id": 7, row_id=7,
"start": "2022-09-05-12-53-23", uri=None,
"end": "2022-09-05-12-58-25", id=2,
"uri": None, show_name="Show 3",
"id": 2, fade_in=500.0,
"show_name": "Show 3", fade_out=500.0,
"fade_in": 500.0, cue_in=13.7008,
"fade_out": 500.0, cue_out=315.845,
"cue_in": 13.7008, track_title="My Friend the Forest",
"cue_out": 315.845, artist_name="Nils Frahm",
"metadata": { mime="audio/flac",
"track_title": "My Friend the Forest", replay_gain=11.46,
"artist_name": "Nils Frahm", filesize=10000,
"mime": "audio/flac", file_ready=False,
}, ),
"replay_gain": "11.46", "2022-09-05-12-58-25": FileEvent(
"filesize": 10000, start=datetime(2022, 9, 5, 12, 58, 25),
}, end=datetime(2022, 9, 5, 13, 0),
"2022-09-05-12-58-25": { type=EventKind.FILE,
"type": EventKind.FILE, row_id=8,
"row_id": 8, uri=None,
"start": "2022-09-05-12-58-25", id=1,
"end": "2022-09-05-13-00-00", show_name="Show 3",
"uri": None, fade_in=500.0,
"id": 1, fade_out=500.0,
"show_name": "Show 3", cue_in=8.25245,
"fade_in": 500.0, cue_out=95.0,
"fade_out": 500.0, track_title="The Dane",
"cue_in": 8.25245, artist_name="Nils Frahm",
"cue_out": 95.0, mime="audio/flac",
"metadata": { replay_gain=4.52,
"track_title": "The Dane", filesize=10000,
"artist_name": "Nils Frahm", file_ready=False,
"mime": "audio/flac", ),
}, "2022-09-05-12-59-58": ActionEvent(
"replay_gain": "4.52", start=datetime(2022, 9, 5, 12, 59, 58),
"filesize": 10000, end=datetime(2022, 9, 5, 12, 59, 58),
}, type=EventKind.ACTION,
"2022-09-05-12-59-58": { event_type="switch_off",
"type": EventKind.ACTION, ),
"event_type": "switch_off", "2022-09-05-13-00-00": ActionEvent(
"start": "2022-09-05-12-59-58", start=datetime(2022, 9, 5, 13, 0),
"end": "2022-09-05-12-59-58", end=datetime(2022, 9, 5, 13, 0),
}, type=EventKind.ACTION,
"2022-09-05-13-00-00": { event_type="kick_out",
"type": EventKind.ACTION, ),
"event_type": "kick_out", "2022-09-05-13-00-00_0": FileEvent(
"start": "2022-09-05-13-00-00", start=datetime(2022, 9, 5, 13, 0),
"end": "2022-09-05-13-00-00", end=datetime(2022, 9, 5, 13, 5, 2),
}, type=EventKind.FILE,
"2022-09-05-13-00-00_0": { row_id=9,
"type": EventKind.FILE, uri=None,
"row_id": 9, id=2,
"start": "2022-09-05-13-00-00", show_name="Show 4",
"end": "2022-09-05-13-05-02", fade_in=500.0,
"uri": None, fade_out=500.0,
"id": 2, cue_in=13.7008,
"show_name": "Show 4", cue_out=315.845,
"fade_in": 500.0, track_title="My Friend the Forest",
"fade_out": 500.0, artist_name="Nils Frahm",
"cue_in": 13.7008, mime="audio/flac",
"cue_out": 315.845, replay_gain=11.46,
"metadata": { filesize=10000,
"track_title": "My Friend the Forest", file_ready=False,
"artist_name": "Nils Frahm", ),
"mime": "audio/flac", "2022-09-05-13-05-02": FileEvent(
}, start=datetime(2022, 9, 5, 13, 5, 2),
"replay_gain": "11.46", end=datetime(2022, 9, 5, 13, 10),
"filesize": 10000, type=EventKind.FILE,
}, row_id=10,
"2022-09-05-13-05-02": { uri=None,
"type": EventKind.FILE, id=4,
"row_id": 10, show_name="Show 4",
"start": "2022-09-05-13-05-02", fade_in=500.0,
"end": "2022-09-05-13-10-00", fade_out=500.0,
"uri": None, cue_in=0.0,
"id": 4, cue_out=297.8558,
"show_name": "Show 4", track_title="#2",
"fade_in": 500.0, artist_name="Nils Frahm",
"fade_out": 500.0, mime="audio/flac",
"cue_in": 0.0, replay_gain=-1.65,
"cue_out": 297.8558, filesize=10000,
"metadata": { file_ready=False,
"track_title": "#2", ),
"artist_name": "Nils Frahm",
"mime": "audio/flac",
},
"replay_gain": "-1.65",
"filesize": 10000,
},
} }