refactor: don't use f-string on logging statements
The strings are now be formatted if the logging level is enabled.
This commit is contained in:
parent
c414068c16
commit
861698987c
|
@ -100,7 +100,7 @@ class MessageListener:
|
||||||
Here we parse the message, spin up an analyzer process, and report the
|
Here we parse the message, spin up an analyzer process, and report the
|
||||||
metadata back to the Airtime web application (or report an error).
|
metadata back to the Airtime web application (or report an error).
|
||||||
"""
|
"""
|
||||||
logger.info(f" - Received '{body}' on routing_key '{method_frame.routing_key}'")
|
logger.info("Received '%s' on routing_key '%s'", body, method_frame.routing_key)
|
||||||
|
|
||||||
audio_file_path = ""
|
audio_file_path = ""
|
||||||
# final_file_path = ""
|
# final_file_path = ""
|
||||||
|
@ -180,7 +180,7 @@ class MessageListener:
|
||||||
)
|
)
|
||||||
metadata = queue.get()
|
metadata = queue.get()
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(f"Analyzer pipeline exception: {exception}")
|
logger.exception("Analyzer pipeline exception: %s", exception)
|
||||||
metadata["import_status"] = PipelineStatus.FAILED
|
metadata["import_status"] = PipelineStatus.FAILED
|
||||||
|
|
||||||
# Ensure our queue doesn't fill up and block due to unexpected behavior. Defensive code.
|
# Ensure our queue doesn't fill up and block due to unexpected behavior. Defensive code.
|
||||||
|
|
|
@ -16,7 +16,7 @@ def run_(*args, **kwargs) -> CompletedProcess:
|
||||||
|
|
||||||
except OSError as exception: # executable was not found
|
except OSError as exception: # executable was not found
|
||||||
cmd = args[0]
|
cmd = args[0]
|
||||||
logger.warning(f"Failed to run: {cmd} - {exception}. Is {cmd} installed?")
|
logger.warning("Failed to run: %s - %s. Is %s installed?", cmd, exception, cmd)
|
||||||
raise exception
|
raise exception
|
||||||
|
|
||||||
except CalledProcessError as exception: # returned an error code
|
except CalledProcessError as exception: # returned an error code
|
||||||
|
|
|
@ -26,7 +26,7 @@ def analyze_metadata(filepath_: str, metadata: Dict[str, Any]):
|
||||||
# Get audio file metadata
|
# Get audio file metadata
|
||||||
extracted = mutagen.File(filepath, easy=True)
|
extracted = mutagen.File(filepath, easy=True)
|
||||||
if extracted is None:
|
if extracted is None:
|
||||||
logger.warning(f"no metadata were extracted for {filepath}")
|
logger.warning("no metadata were extracted for %s", filepath)
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
metadata["mime"] = extracted.mime[0]
|
metadata["mime"] = extracted.mime[0]
|
||||||
|
|
|
@ -27,6 +27,6 @@ def analyze_playability(filename: str, metadata: Dict[str, Any]):
|
||||||
raise UnplayableFileError() from exception
|
raise UnplayableFileError() from exception
|
||||||
|
|
||||||
except OSError as exception: # liquidsoap was not found
|
except OSError as exception: # liquidsoap was not found
|
||||||
logger.warning(f"Failed to run: {exception}. Is liquidsoap installed?")
|
logger.warning("Failed to run: %s. Is liquidsoap installed?", exception)
|
||||||
|
|
||||||
return metadata
|
return metadata
|
||||||
|
|
|
@ -43,12 +43,12 @@ def organise_file(
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
dest_path = dest_path.with_name(f"{dest_path.stem}_{uuid4()}{dest_path.suffix}")
|
dest_path = dest_path.with_name(f"{dest_path.stem}_{uuid4()}{dest_path.suffix}")
|
||||||
logger.warning(f"found existing file, using new filepath {dest_path}")
|
logger.warning("found existing file, using new filepath %s", dest_path)
|
||||||
|
|
||||||
# Import
|
# Import
|
||||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
logger.debug(f"moving {filepath} to {dest_path}")
|
logger.debug("moving %s to %s", filepath, dest_path)
|
||||||
shutil.move(filepath, dest_path)
|
shutil.move(filepath, dest_path)
|
||||||
|
|
||||||
metadata["full_path"] = str(dest_path)
|
metadata["full_path"] = str(dest_path)
|
||||||
|
|
|
@ -57,7 +57,7 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
|
||||||
# If we fail to unpickle a saved queue of failed HTTP requests, then we'll just log an error
|
# If we fail to unpickle a saved queue of failed HTTP requests, then we'll just log an error
|
||||||
# and continue because those HTTP requests are lost anyways. The pickled file will be
|
# and continue because those HTTP requests are lost anyways. The pickled file will be
|
||||||
# overwritten the next time the analyzer is shut down too.
|
# overwritten the next time the analyzer is shut down too.
|
||||||
logger.error(f"Failed to unpickle {http_retry_queue_path}. Continuing...")
|
logger.error("Failed to unpickle %s. Continuing...", http_retry_queue_path)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
@ -93,7 +93,7 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
|
||||||
) as exception: # Terrible top-level exception handler to prevent the thread from dying, just in case.
|
) as exception: # Terrible top-level exception handler to prevent the thread from dying, just in case.
|
||||||
if shutdown:
|
if shutdown:
|
||||||
return
|
return
|
||||||
logger.exception(f"Unhandled exception in StatusReporter {exception}")
|
logger.exception("Unhandled exception in StatusReporter %s", exception)
|
||||||
logger.info("Restarting StatusReporter thread")
|
logger.info("Restarting StatusReporter thread")
|
||||||
time.sleep(2) # Throttle it
|
time.sleep(2) # Throttle it
|
||||||
|
|
||||||
|
@ -118,7 +118,7 @@ def send_http_request(picklable_request: PicklableHttpRequest, retry_queue):
|
||||||
# The request failed with an error 500 probably, so let's check if Airtime and/or
|
# The request failed with an error 500 probably, so let's check if Airtime and/or
|
||||||
# the web server are broken. If not, then our request was probably causing an
|
# the web server are broken. If not, then our request was probably causing an
|
||||||
# error 500 in the media API (ie. a bug), so there's no point in retrying it.
|
# error 500 in the media API (ie. a bug), so there's no point in retrying it.
|
||||||
logger.exception(f"HTTP request failed: {exception}")
|
logger.exception("HTTP request failed: %s", exception)
|
||||||
parsed_url = urlparse(exception.response.request.url)
|
parsed_url = urlparse(exception.response.request.url)
|
||||||
if is_web_server_broken(parsed_url.scheme + "://" + parsed_url.netloc):
|
if is_web_server_broken(parsed_url.scheme + "://" + parsed_url.netloc):
|
||||||
# If the web server is having problems, retry the request later:
|
# If the web server is having problems, retry the request later:
|
||||||
|
@ -128,11 +128,12 @@ def send_http_request(picklable_request: PicklableHttpRequest, retry_queue):
|
||||||
# notified by sentry.
|
# notified by sentry.
|
||||||
except requests.exceptions.ConnectionError as exception:
|
except requests.exceptions.ConnectionError as exception:
|
||||||
logger.exception(
|
logger.exception(
|
||||||
f"HTTP request failed due to a connection error. Retrying later. {exception}"
|
"HTTP request failed due to a connection error, retrying later: %s",
|
||||||
|
exception,
|
||||||
)
|
)
|
||||||
retry_queue.append(picklable_request) # Retry it later
|
retry_queue.append(picklable_request) # Retry it later
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(f"HTTP request failed with unhandled exception. {exception}")
|
logger.exception("HTTP request failed with unhandled exception. %s", exception)
|
||||||
# Don't put the request into the retry queue, just give up on this one.
|
# Don't put the request into the retry queue, just give up on this one.
|
||||||
# I'm doing this to protect against us getting some pathological request
|
# I'm doing this to protect against us getting some pathological request
|
||||||
# that breaks our code. I don't want us pickling data that potentially
|
# that breaks our code. I don't want us pickling data that potentially
|
||||||
|
@ -214,7 +215,7 @@ class StatusReporter:
|
||||||
audio_metadata["import_status"] = import_status
|
audio_metadata["import_status"] = import_status
|
||||||
audio_metadata["comment"] = reason # hack attack
|
audio_metadata["comment"] = reason # hack attack
|
||||||
put_payload = json.dumps(audio_metadata)
|
put_payload = json.dumps(audio_metadata)
|
||||||
# logger.debug("sending http put with payload: " + put_payload)
|
# logger.debug("sending http put with payload: %s", put_payload)
|
||||||
|
|
||||||
StatusReporter._send_http_request(
|
StatusReporter._send_http_request(
|
||||||
PicklableHttpRequest(
|
PicklableHttpRequest(
|
||||||
|
|
|
@ -101,16 +101,16 @@ class ApiClient:
|
||||||
|
|
||||||
if api_version[0:3] != AIRTIME_API_VERSION[0:3]:
|
if api_version[0:3] != AIRTIME_API_VERSION[0:3]:
|
||||||
if verbose:
|
if verbose:
|
||||||
logger.info("Airtime API version found: " + str(api_version))
|
logger.info("Airtime API version found: %s", str(api_version))
|
||||||
logger.info(
|
logger.info(
|
||||||
"pypo is only compatible with API version: " + AIRTIME_API_VERSION
|
"pypo is only compatible with API version: %s", AIRTIME_API_VERSION
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
logger.info("Airtime API version found: " + str(api_version))
|
logger.info("Airtime API version found: %s", str(api_version))
|
||||||
logger.info(
|
logger.info(
|
||||||
"pypo is only compatible with API version: " + AIRTIME_API_VERSION
|
"pypo is only compatible with API version: %s", AIRTIME_API_VERSION
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -183,11 +183,11 @@ class ApiClient:
|
||||||
break
|
break
|
||||||
|
|
||||||
except requests.exceptions.HTTPError as exception:
|
except requests.exceptions.HTTPError as exception:
|
||||||
logger.error(f"Http error code: {exception.response.status_code}")
|
logger.error("Http error code: %s", exception.response.status_code)
|
||||||
logger.exception(exception)
|
logger.exception(exception)
|
||||||
|
|
||||||
except requests.exceptions.ConnectionError as exception:
|
except requests.exceptions.ConnectionError as exception:
|
||||||
logger.exception(f"Server is down: {exception}")
|
logger.exception("Server is down: %s", exception)
|
||||||
|
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(exception)
|
logger.exception(exception)
|
||||||
|
|
|
@ -102,17 +102,17 @@ class Importer:
|
||||||
raise RuntimeError(f"could not upload {filepath}") from exception
|
raise RuntimeError(f"could not upload {filepath}") from exception
|
||||||
|
|
||||||
def _delete_file(self, filepath: Path) -> None:
|
def _delete_file(self, filepath: Path) -> None:
|
||||||
logger.info(f"deleting {filepath}")
|
logger.info("deleting %s", filepath)
|
||||||
filepath.unlink()
|
filepath.unlink()
|
||||||
|
|
||||||
def _handle_file(self, filepath: Path, library: Optional[str]) -> None:
|
def _handle_file(self, filepath: Path, library: Optional[str]) -> None:
|
||||||
logger.debug(f"handling file {filepath}")
|
logger.debug("handling file %s", filepath)
|
||||||
|
|
||||||
if not filepath.is_file():
|
if not filepath.is_file():
|
||||||
raise ValueError(f"provided path {filepath} is not a file")
|
raise ValueError(f"provided path {filepath} is not a file")
|
||||||
|
|
||||||
if self._check_file_md5(filepath):
|
if self._check_file_md5(filepath):
|
||||||
logger.info(f"found similar md5sum, ignoring {filepath}")
|
logger.info("found similar md5sum, ignoring %s", filepath)
|
||||||
if self.delete_if_exists:
|
if self.delete_if_exists:
|
||||||
self._delete_file(filepath)
|
self._delete_file(filepath)
|
||||||
return
|
return
|
||||||
|
|
|
@ -153,7 +153,7 @@ class StatsCollectorThread(Thread):
|
||||||
self._collector = StatsCollector(legacy_client)
|
self._collector = StatsCollector(legacy_client)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
logger.info(f"starting {self.name}")
|
logger.info("starting %s", self.name)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
self._collector.collect(self._config.stream.outputs.merged)
|
self._collector.collect(self._config.stream.outputs.merged)
|
||||||
|
|
|
@ -57,10 +57,10 @@ class LiquidsoapClient:
|
||||||
while timeout > 0:
|
while timeout > 0:
|
||||||
try:
|
try:
|
||||||
version = self.version()
|
version = self.version()
|
||||||
logger.info(f"found version {version}")
|
logger.info("found version %s", version)
|
||||||
return version
|
return version
|
||||||
except (ConnectionError, TimeoutError) as exception:
|
except (ConnectionError, TimeoutError) as exception:
|
||||||
logger.warning(f"could not get version: {exception}")
|
logger.warning("could not get version: %s", exception)
|
||||||
timeout -= 1
|
timeout -= 1
|
||||||
sleep(1)
|
sleep(1)
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ class LiquidsoapConnection:
|
||||||
logger.debug("trying to acquire lock")
|
logger.debug("trying to acquire lock")
|
||||||
# pylint: disable=consider-using-with
|
# pylint: disable=consider-using-with
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
logger.debug(f"connecting to {self.address()}")
|
logger.debug("connecting to %s", self.address())
|
||||||
|
|
||||||
if self._path is not None:
|
if self._path is not None:
|
||||||
self._sock = socket(AF_UNIX, SOCK_STREAM)
|
self._sock = socket(AF_UNIX, SOCK_STREAM)
|
||||||
|
@ -79,7 +79,7 @@ class LiquidsoapConnection:
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
if self._sock is not None:
|
if self._sock is not None:
|
||||||
logger.debug(f"closing connection to {self.address()}")
|
logger.debug("closing connection to %s", self.address())
|
||||||
|
|
||||||
self.write("exit")
|
self.write("exit")
|
||||||
# Reading for clean exit
|
# Reading for clean exit
|
||||||
|
@ -97,7 +97,7 @@ class LiquidsoapConnection:
|
||||||
raise InvalidConnection()
|
raise InvalidConnection()
|
||||||
|
|
||||||
for message in messages:
|
for message in messages:
|
||||||
logger.debug(f"sending {message}")
|
logger.debug("sending %s", message)
|
||||||
buffer = message.encode(encoding="utf-8")
|
buffer = message.encode(encoding="utf-8")
|
||||||
buffer += b"\n"
|
buffer += b"\n"
|
||||||
|
|
||||||
|
@ -127,5 +127,5 @@ class LiquidsoapConnection:
|
||||||
buffer = buffer.strip(b"\n")
|
buffer = buffer.strip(b"\n")
|
||||||
message = buffer.decode("utf-8")
|
message = buffer.decode("utf-8")
|
||||||
|
|
||||||
logger.debug(f"received {message}")
|
logger.debug("received %s", message)
|
||||||
return message
|
return message
|
||||||
|
|
|
@ -58,5 +58,5 @@ def cli(log_level: str, log_filepath: Optional[Path], config_filepath: Optional[
|
||||||
if log_level == "debug":
|
if log_level == "debug":
|
||||||
exec_args.append("--debug")
|
exec_args.append("--debug")
|
||||||
|
|
||||||
logger.debug(f"liquidsoap {version} using script: {entrypoint_filepath}")
|
logger.debug("liquidsoap %s using script: %s", version, entrypoint_filepath)
|
||||||
os.execl(*exec_args)
|
os.execl(*exec_args)
|
||||||
|
|
|
@ -56,8 +56,8 @@ def cli(log_level: str, log_filepath: Optional[Path], config_filepath: Optional[
|
||||||
# Although all of our calculations are in UTC, it is useful to know what timezone
|
# Although all of our calculations are in UTC, it is useful to know what timezone
|
||||||
# the local machine is, so that we have a reference for what time the actual
|
# the local machine is, so that we have a reference for what time the actual
|
||||||
# log entries were made
|
# log entries were made
|
||||||
logger.info("Timezone: %s" % str(time.tzname))
|
logger.info("Timezone: %s", time.tzname)
|
||||||
logger.info("UTC time: %s" % str(datetime.utcnow()))
|
logger.info("UTC time: %s", datetime.utcnow())
|
||||||
|
|
||||||
legacy_client = LegacyClient()
|
legacy_client = LegacyClient()
|
||||||
api_client = ApiClient(
|
api_client = ApiClient(
|
||||||
|
|
|
@ -36,7 +36,7 @@ class MessageHandler(ConsumerMixin):
|
||||||
]
|
]
|
||||||
|
|
||||||
def on_message(self, body, message):
|
def on_message(self, body, message):
|
||||||
logger.debug(f"received message: {body}")
|
logger.debug("received message: %s", body)
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
body = body.decode()
|
body = body.decode()
|
||||||
|
@ -45,7 +45,7 @@ class MessageHandler(ConsumerMixin):
|
||||||
|
|
||||||
payload = json.loads(body)
|
payload = json.loads(body)
|
||||||
command = payload["event_type"]
|
command = payload["event_type"]
|
||||||
logger.info(f"handling command: {command}")
|
logger.info("handling command: %s", command)
|
||||||
|
|
||||||
if command in (
|
if command in (
|
||||||
"update_schedule",
|
"update_schedule",
|
||||||
|
@ -66,7 +66,7 @@ class MessageHandler(ConsumerMixin):
|
||||||
self.recorder_queue.put(message.payload)
|
self.recorder_queue.put(message.payload)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.warning(f"invalid command: {command}")
|
logger.warning("invalid command: %s", command)
|
||||||
|
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(exception)
|
logger.exception(exception)
|
||||||
|
|
|
@ -46,7 +46,7 @@ def media(media_id):
|
||||||
|
|
||||||
Replaces: notify --media-id=#{m['schedule_table_id']}
|
Replaces: notify --media-id=#{m['schedule_table_id']}
|
||||||
"""
|
"""
|
||||||
logger.info(f"Sending currently playing media id '{media_id}'")
|
logger.info("Sending currently playing media id '%s'", media_id)
|
||||||
api_client().notify_media_item_start_playing(media_id)
|
api_client().notify_media_item_start_playing(media_id)
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ def webstream(media_id, data):
|
||||||
|
|
||||||
Replaces: notify --webstream='#{json_str}' --media-id=#{!current_dyn_id}
|
Replaces: notify --webstream='#{json_str}' --media-id=#{!current_dyn_id}
|
||||||
"""
|
"""
|
||||||
logger.info(f"Sending currently playing webstream '{media_id}' data '{data}'")
|
logger.info("Sending currently playing webstream '%s' data '%s'", media_id, data)
|
||||||
api_client().notify_webstream_data(data, media_id)
|
api_client().notify_webstream_data(data, media_id)
|
||||||
|
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ def live(name, status):
|
||||||
|
|
||||||
Replaces: notify --source-name=#{sourcename} --source-status=#{status}
|
Replaces: notify --source-name=#{sourcename} --source-status=#{status}
|
||||||
"""
|
"""
|
||||||
logger.info(f"Sending currently playing live source '{name}' status '{status}'")
|
logger.info("Sending currently playing live source '%s' status '%s'", name, status)
|
||||||
api_client().notify_source_status(name, status)
|
api_client().notify_source_status(name, status)
|
||||||
|
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ def stream(stream_id, time, error):
|
||||||
if error is not None:
|
if error is not None:
|
||||||
status = error
|
status = error
|
||||||
|
|
||||||
logger.info(f"Sending output stream '{stream_id}' status '{status}'")
|
logger.info("Sending output stream '%s' status '%s'", stream_id, status)
|
||||||
api_client().notify_liquidsoap_status(status, stream_id, time)
|
api_client().notify_liquidsoap_status(status, stream_id, time)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ class PypoFetch(Thread):
|
||||||
|
|
||||||
def handle_message(self, message):
|
def handle_message(self, message):
|
||||||
try:
|
try:
|
||||||
logger.info("Received event from Pypo Message Handler: %s" % message)
|
logger.info("Received event from Pypo Message Handler: %s", message)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
message = message.decode()
|
message = message.decode()
|
||||||
|
@ -72,7 +72,7 @@ class PypoFetch(Thread):
|
||||||
pass
|
pass
|
||||||
m = json.loads(message)
|
m = json.loads(message)
|
||||||
command = m["event_type"]
|
command = m["event_type"]
|
||||||
logger.info("Handling command: " + command)
|
logger.info("Handling command: %s", command)
|
||||||
|
|
||||||
if command == "update_schedule":
|
if command == "update_schedule":
|
||||||
self.schedule_data = m["schedule"]
|
self.schedule_data = m["schedule"]
|
||||||
|
@ -102,7 +102,7 @@ class PypoFetch(Thread):
|
||||||
m["sourcename"]
|
m["sourcename"]
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.info("Unknown command: %s" % command)
|
logger.info("Unknown command: %s", command)
|
||||||
|
|
||||||
# update timeout value
|
# update timeout value
|
||||||
if command == "update_schedule":
|
if command == "update_schedule":
|
||||||
|
@ -113,7 +113,7 @@ class PypoFetch(Thread):
|
||||||
)
|
)
|
||||||
if self.listener_timeout < 0:
|
if self.listener_timeout < 0:
|
||||||
self.listener_timeout = 0
|
self.listener_timeout = 0
|
||||||
logger.info("New timeout: %s" % self.listener_timeout)
|
logger.info("New timeout: %s", self.listener_timeout)
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(exception)
|
logger.exception(exception)
|
||||||
|
|
||||||
|
@ -128,11 +128,11 @@ class PypoFetch(Thread):
|
||||||
state = StreamState(**self.api_client.get_stream_state().json())
|
state = StreamState(**self.api_client.get_stream_state().json())
|
||||||
|
|
||||||
except RequestException as exception:
|
except RequestException as exception:
|
||||||
logger.exception(f"Unable to get stream settings: {exception}")
|
logger.exception("Unable to get stream settings: %s", exception)
|
||||||
|
|
||||||
logger.debug(f"info: {info}")
|
logger.debug("info: %s", info)
|
||||||
logger.debug(f"preferences: {preferences}")
|
logger.debug("preferences: %s", preferences)
|
||||||
logger.debug(f"state: {state}")
|
logger.debug("state: %s", state)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.pypo_liquidsoap.liq_client.settings_update(
|
self.pypo_liquidsoap.liq_client.settings_update(
|
||||||
|
@ -293,31 +293,31 @@ class PypoFetch(Thread):
|
||||||
|
|
||||||
expired_files = cached_file_set - scheduled_file_set
|
expired_files = cached_file_set - scheduled_file_set
|
||||||
|
|
||||||
logger.debug("Files to remove " + str(expired_files))
|
logger.debug("Files to remove %s", str(expired_files))
|
||||||
for f in expired_files:
|
for f in expired_files:
|
||||||
try:
|
try:
|
||||||
path = os.path.join(self.cache_dir, f)
|
path = os.path.join(self.cache_dir, f)
|
||||||
logger.debug("Removing %s" % path)
|
logger.debug("Removing %s", path)
|
||||||
|
|
||||||
# check if this file is opened (sometimes Liquidsoap is still
|
# check if this file is opened (sometimes Liquidsoap is still
|
||||||
# playing the file due to our knowledge of the track length
|
# playing the file due to our knowledge of the track length
|
||||||
# being incorrect!)
|
# being incorrect!)
|
||||||
if not self.is_file_opened(path):
|
if not self.is_file_opened(path):
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
logger.info("File '%s' removed" % path)
|
logger.info("File '%s' removed", path)
|
||||||
else:
|
else:
|
||||||
logger.info("File '%s' not removed. Still busy!" % path)
|
logger.info("File '%s' not removed. Still busy!", path)
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(f"Problem removing file '{f}': {exception}")
|
logger.exception("Problem removing file '%s': %s", f, exception)
|
||||||
|
|
||||||
def manual_schedule_fetch(self):
|
def manual_schedule_fetch(self):
|
||||||
try:
|
try:
|
||||||
self.schedule_data = get_schedule(self.api_client)
|
self.schedule_data = get_schedule(self.api_client)
|
||||||
logger.debug(f"Received event from API client: {self.schedule_data}")
|
logger.debug("Received event from API client: %s", self.schedule_data)
|
||||||
self.process_schedule(self.schedule_data)
|
self.process_schedule(self.schedule_data)
|
||||||
return True
|
return True
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(f"Unable to fetch schedule: {exception}")
|
logger.exception("Unable to fetch schedule: %s", exception)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def persistent_manual_schedule_fetch(self, max_attempts=1):
|
def persistent_manual_schedule_fetch(self, max_attempts=1):
|
||||||
|
@ -358,7 +358,7 @@ class PypoFetch(Thread):
|
||||||
|
|
||||||
loops = 1
|
loops = 1
|
||||||
while True:
|
while True:
|
||||||
logger.info(f"Loop #{loops}")
|
logger.info("Loop #%s", loops)
|
||||||
manual_fetch_needed = False
|
manual_fetch_needed = False
|
||||||
try:
|
try:
|
||||||
# our simple_queue.get() requires a timeout, in which case we
|
# our simple_queue.get() requires a timeout, in which case we
|
||||||
|
@ -388,7 +388,7 @@ class PypoFetch(Thread):
|
||||||
if manual_fetch_needed:
|
if manual_fetch_needed:
|
||||||
self.persistent_manual_schedule_fetch(max_attempts=5)
|
self.persistent_manual_schedule_fetch(max_attempts=5)
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(f"Failed to manually fetch the schedule: {exception}")
|
logger.exception("Failed to manually fetch the schedule: %s", exception)
|
||||||
|
|
||||||
loops += 1
|
loops += 1
|
||||||
|
|
||||||
|
|
|
@ -50,14 +50,14 @@ class PypoFile(Thread):
|
||||||
# become an issue here... This needs proper cache management.
|
# become an issue here... This needs proper cache management.
|
||||||
# https://github.com/libretime/libretime/issues/756#issuecomment-477853018
|
# https://github.com/libretime/libretime/issues/756#issuecomment-477853018
|
||||||
# https://github.com/libretime/libretime/pull/845
|
# https://github.com/libretime/libretime/pull/845
|
||||||
logger.debug(f"found file {file_id} in cache {dst}, skipping copy...")
|
logger.debug("found file %s in cache %s, skipping copy...", file_id, dst)
|
||||||
else:
|
else:
|
||||||
do_copy = True
|
do_copy = True
|
||||||
|
|
||||||
media_item["file_ready"] = not do_copy
|
media_item["file_ready"] = not do_copy
|
||||||
|
|
||||||
if do_copy:
|
if do_copy:
|
||||||
logger.info(f"copying file {file_id} to cache {dst}")
|
logger.info("copying file %s to cache %s", file_id, dst)
|
||||||
try:
|
try:
|
||||||
with open(dst, "wb") as handle:
|
with open(dst, "wb") as handle:
|
||||||
logger.info(media_item)
|
logger.info(media_item)
|
||||||
|
@ -82,7 +82,12 @@ class PypoFile(Thread):
|
||||||
|
|
||||||
media_item["file_ready"] = True
|
media_item["file_ready"] = True
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(f"could not copy file {file_id} to {dst}: {exception}")
|
logger.exception(
|
||||||
|
"could not copy file %s to %s: %s",
|
||||||
|
file_id,
|
||||||
|
dst,
|
||||||
|
exception,
|
||||||
|
)
|
||||||
|
|
||||||
def report_file_size_and_md5_to_api(self, file_path, file_id):
|
def report_file_size_and_md5_to_api(self, file_path, file_id):
|
||||||
try:
|
try:
|
||||||
|
@ -99,7 +104,9 @@ class PypoFile(Thread):
|
||||||
except OSError as exception:
|
except OSError as exception:
|
||||||
file_size = 0
|
file_size = 0
|
||||||
logger.exception(
|
logger.exception(
|
||||||
f"Error getting file size and md5 hash for file id {file_id}: {exception}"
|
"Error getting file size and md5 hash for file id %s: %s",
|
||||||
|
file_id,
|
||||||
|
exception,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Make PUT request to LibreTime to update the file size and hash
|
# Make PUT request to LibreTime to update the file size and hash
|
||||||
|
@ -112,7 +119,7 @@ class PypoFile(Thread):
|
||||||
except (ConnectionError, Timeout):
|
except (ConnectionError, Timeout):
|
||||||
logger.exception(error_msg)
|
logger.exception(error_msg)
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
logger.exception(f"{error_msg}: {exception}")
|
logger.exception("%s: %s", error_msg, exception)
|
||||||
|
|
||||||
return file_size
|
return file_size
|
||||||
|
|
||||||
|
@ -132,7 +139,7 @@ class PypoFile(Thread):
|
||||||
highest_priority = sorted_keys[0]
|
highest_priority = sorted_keys[0]
|
||||||
media_item = schedule[highest_priority]
|
media_item = schedule[highest_priority]
|
||||||
|
|
||||||
logger.debug("Highest priority item: %s" % highest_priority)
|
logger.debug("Highest priority item: %s", highest_priority)
|
||||||
|
|
||||||
# Remove this media_item from the dictionary. On the next iteration
|
# Remove this media_item from the dictionary. On the next iteration
|
||||||
# (from the main function) we won't consider it for prioritization
|
# (from the main function) we won't consider it for prioritization
|
||||||
|
|
|
@ -157,7 +157,7 @@ class PypoLiquidsoap:
|
||||||
|
|
||||||
if not correct:
|
if not correct:
|
||||||
# need to re-add
|
# need to re-add
|
||||||
logger.info("Track %s found to have new attr." % i)
|
logger.info("Track %s found to have new attr.", i)
|
||||||
to_be_removed.add(i["row_id"])
|
to_be_removed.add(i["row_id"])
|
||||||
to_be_added.add(i["row_id"])
|
to_be_added.add(i["row_id"])
|
||||||
|
|
||||||
|
@ -165,7 +165,7 @@ class PypoLiquidsoap:
|
||||||
to_be_added.update(schedule_ids - liq_queue_ids)
|
to_be_added.update(schedule_ids - liq_queue_ids)
|
||||||
|
|
||||||
if to_be_removed:
|
if to_be_removed:
|
||||||
logger.info("Need to remove items from Liquidsoap: %s" % to_be_removed)
|
logger.info("Need to remove items from Liquidsoap: %s", to_be_removed)
|
||||||
|
|
||||||
# remove files from Liquidsoap's queue
|
# remove files from Liquidsoap's queue
|
||||||
for i in self.liq_queue_tracker:
|
for i in self.liq_queue_tracker:
|
||||||
|
@ -174,7 +174,7 @@ class PypoLiquidsoap:
|
||||||
self.stop(i)
|
self.stop(i)
|
||||||
|
|
||||||
if to_be_added:
|
if to_be_added:
|
||||||
logger.info("Need to add items to Liquidsoap *now*: %s" % to_be_added)
|
logger.info("Need to add items to Liquidsoap *now*: %s", to_be_added)
|
||||||
|
|
||||||
for i in scheduled_now_files:
|
for i in scheduled_now_files:
|
||||||
if i["row_id"] in to_be_added:
|
if i["row_id"] in to_be_added:
|
||||||
|
@ -183,7 +183,7 @@ class PypoLiquidsoap:
|
||||||
|
|
||||||
# handle webstreams
|
# handle webstreams
|
||||||
current_stream_id = self.telnet_liquidsoap.get_current_stream_id()
|
current_stream_id = self.telnet_liquidsoap.get_current_stream_id()
|
||||||
logger.debug(f"scheduled now webstream: {scheduled_now_webstream}")
|
logger.debug("scheduled now webstream: %s", scheduled_now_webstream)
|
||||||
if scheduled_now_webstream:
|
if scheduled_now_webstream:
|
||||||
if int(current_stream_id) != int(scheduled_now_webstream[0]["row_id"]):
|
if int(current_stream_id) != int(scheduled_now_webstream[0]["row_id"]):
|
||||||
self.play(scheduled_now_webstream[0])
|
self.play(scheduled_now_webstream[0])
|
||||||
|
@ -192,7 +192,7 @@ class PypoLiquidsoap:
|
||||||
self.telnet_liquidsoap.stop_web_stream_buffer()
|
self.telnet_liquidsoap.stop_web_stream_buffer()
|
||||||
self.telnet_liquidsoap.stop_web_stream_output()
|
self.telnet_liquidsoap.stop_web_stream_output()
|
||||||
except KeyError as exception:
|
except KeyError as exception:
|
||||||
logger.exception(f"Malformed event in schedule: {exception}")
|
logger.exception("Malformed event in schedule: %s", exception)
|
||||||
|
|
||||||
def stop(self, queue):
|
def stop(self, queue):
|
||||||
self.telnet_liquidsoap.queue_remove(queue)
|
self.telnet_liquidsoap.queue_remove(queue)
|
||||||
|
@ -211,7 +211,7 @@ class PypoLiquidsoap:
|
||||||
lateness = seconds_between(link["start"], datetime.utcnow())
|
lateness = seconds_between(link["start"], datetime.utcnow())
|
||||||
|
|
||||||
if lateness > 0:
|
if lateness > 0:
|
||||||
logger.debug(f"media item was supposed to start {lateness}s ago")
|
logger.debug("media item was supposed to start %ss ago", lateness)
|
||||||
cue_in_orig = timedelta(seconds=float(link["cue_in"]))
|
cue_in_orig = timedelta(seconds=float(link["cue_in"]))
|
||||||
link["cue_in"] = cue_in_orig.total_seconds() + lateness
|
link["cue_in"] = cue_in_orig.total_seconds() + lateness
|
||||||
|
|
||||||
|
|
|
@ -86,16 +86,16 @@ class PypoPush(Thread):
|
||||||
|
|
||||||
# Ignore track that already ended
|
# Ignore track that already ended
|
||||||
if media_item["type"] == "file" and media_item["end"] < tnow:
|
if media_item["type"] == "file" and media_item["end"] < tnow:
|
||||||
logger.debug(f"ignoring ended media_item: {media_item}")
|
logger.debug("ignoring ended media_item: %s", media_item)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
diff_sec = (tnow - media_item["start"]).total_seconds()
|
diff_sec = (tnow - media_item["start"]).total_seconds()
|
||||||
|
|
||||||
if diff_sec >= 0:
|
if diff_sec >= 0:
|
||||||
logger.debug(f"adding media_item to present: {media_item}")
|
logger.debug("adding media_item to present: %s", media_item)
|
||||||
present.append(media_item)
|
present.append(media_item)
|
||||||
else:
|
else:
|
||||||
logger.debug(f"adding media_item to future: {media_item}")
|
logger.debug("adding media_item to future: %s", media_item)
|
||||||
future[mkey] = media_item
|
future[mkey] = media_item
|
||||||
|
|
||||||
return present, future
|
return present, future
|
||||||
|
|
|
@ -36,7 +36,7 @@ class PypoLiqQueue(Thread):
|
||||||
media_schedule = self.queue.get(block=True)
|
media_schedule = self.queue.get(block=True)
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
"waiting %ss until next scheduled item" % time_until_next_play
|
"waiting %ss until next scheduled item", time_until_next_play
|
||||||
)
|
)
|
||||||
media_schedule = self.queue.get(
|
media_schedule = self.queue.get(
|
||||||
block=True, timeout=time_until_next_play
|
block=True, timeout=time_until_next_play
|
||||||
|
|
|
@ -89,7 +89,7 @@ class ShowRecorder(Thread):
|
||||||
args = command.split(" ")
|
args = command.split(" ")
|
||||||
|
|
||||||
logger.info("starting record")
|
logger.info("starting record")
|
||||||
logger.info("command " + command)
|
logger.info("command %s", command)
|
||||||
|
|
||||||
self.p = Popen(args, stdout=PIPE, stderr=PIPE)
|
self.p = Popen(args, stdout=PIPE, stderr=PIPE)
|
||||||
|
|
||||||
|
@ -139,7 +139,7 @@ class ShowRecorder(Thread):
|
||||||
full_date, full_time = self.start_time.split(" ", 1)
|
full_date, full_time = self.start_time.split(" ", 1)
|
||||||
# No idea why we translated - to : before
|
# No idea why we translated - to : before
|
||||||
# full_time = full_time.replace(":","-")
|
# full_time = full_time.replace(":","-")
|
||||||
logger.info("time: %s" % full_time)
|
logger.info("time: %s", full_time)
|
||||||
artist = "Airtime Show Recorder"
|
artist = "Airtime Show Recorder"
|
||||||
# set some metadata for our file daemon
|
# set some metadata for our file daemon
|
||||||
recorded_file = mutagen.File(filepath, easy=True)
|
recorded_file = mutagen.File(filepath, easy=True)
|
||||||
|
@ -160,7 +160,7 @@ class ShowRecorder(Thread):
|
||||||
|
|
||||||
if code == 0:
|
if code == 0:
|
||||||
try:
|
try:
|
||||||
logger.info("Preparing to upload %s" % filepath)
|
logger.info("Preparing to upload %s", filepath)
|
||||||
|
|
||||||
self.set_metadata_and_save(filepath)
|
self.set_metadata_and_save(filepath)
|
||||||
|
|
||||||
|
|
|
@ -61,13 +61,15 @@ class BaseConfig(BaseModel):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
if not filepath.is_file():
|
if not filepath.is_file():
|
||||||
logger.warning(f"provided config filepath '{filepath}' is not a file")
|
logger.warning("provided config filepath '%s' is not a file", filepath)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return safe_load(filepath.read_text(encoding="utf-8"))
|
return safe_load(filepath.read_text(encoding="utf-8"))
|
||||||
except YAMLError as error:
|
except YAMLError as exception:
|
||||||
logger.error(f"config file '{filepath}' is not a valid yaml file: {error}")
|
logger.error(
|
||||||
|
"config file '%s' is not a valid yaml file: %s", filepath, exception
|
||||||
|
)
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ def podcast_download(
|
||||||
tmp_file.write(chunk)
|
tmp_file.write(chunk)
|
||||||
|
|
||||||
except RequestException as exception:
|
except RequestException as exception:
|
||||||
logger.exception(f"could not download podcast episode {episode_id}")
|
logger.exception("could not download podcast episode %s", episode_id)
|
||||||
raise exception
|
raise exception
|
||||||
|
|
||||||
# Save metadata to podcast episode file
|
# Save metadata to podcast episode file
|
||||||
|
@ -67,20 +67,20 @@ def podcast_download(
|
||||||
)
|
)
|
||||||
|
|
||||||
if override_album:
|
if override_album:
|
||||||
logger.debug(f"overriding album name with podcast name {podcast_name}")
|
logger.debug("overriding album name with podcast name %s", podcast_name)
|
||||||
metadata["artist"] = podcast_name
|
metadata["artist"] = podcast_name
|
||||||
metadata["album"] = podcast_name
|
metadata["album"] = podcast_name
|
||||||
metadata["title"] = episode_title
|
metadata["title"] = episode_title
|
||||||
|
|
||||||
elif "album" not in metadata:
|
elif "album" not in metadata:
|
||||||
logger.debug(f"setting album name to podcast name {podcast_name}")
|
logger.debug("setting album name to podcast name %s", podcast_name)
|
||||||
metadata["album"] = podcast_name
|
metadata["album"] = podcast_name
|
||||||
|
|
||||||
metadata.save()
|
metadata.save()
|
||||||
logger.debug(f"saved metadata {metadata}")
|
logger.debug("saved metadata %s", metadata)
|
||||||
|
|
||||||
except MutagenError as exception:
|
except MutagenError as exception:
|
||||||
logger.exception(f"could not save episode {episode_id} metadata")
|
logger.exception("could not save episode %s metadata", episode_id)
|
||||||
raise exception
|
raise exception
|
||||||
|
|
||||||
# Upload podcast episode file
|
# Upload podcast episode file
|
||||||
|
@ -98,7 +98,7 @@ def podcast_download(
|
||||||
result["status"] = 1
|
result["status"] = 1
|
||||||
|
|
||||||
except RequestException as exception:
|
except RequestException as exception:
|
||||||
logger.exception(f"could not upload episode {episode_id}")
|
logger.exception("could not upload episode %s", episode_id)
|
||||||
raise exception
|
raise exception
|
||||||
|
|
||||||
except (RequestException, MutagenError) as exception:
|
except (RequestException, MutagenError) as exception:
|
||||||
|
|
Loading…
Reference in New Issue