chore: rename api_clients dir to libretime_api_client
This commit is contained in:
parent
e84137a8ee
commit
be09f74af6
4 changed files with 0 additions and 0 deletions
1
api_client/libretime_api_client/__init__.py
Normal file
1
api_client/libretime_api_client/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__all__ = ["version1"]
|
224
api_client/libretime_api_client/utils.py
Normal file
224
api_client/libretime_api_client/utils.py
Normal file
|
@ -0,0 +1,224 @@
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
from time import sleep
|
||||
|
||||
import requests
|
||||
from requests.auth import AuthBase
|
||||
|
||||
|
||||
def get_protocol(config):
|
||||
positive_values = ["Yes", "yes", "True", "true", True]
|
||||
port = config["general"].get("base_port", 80)
|
||||
force_ssl = config["general"].get("force_ssl", False)
|
||||
if force_ssl in positive_values:
|
||||
protocol = "https"
|
||||
else:
|
||||
protocol = config["general"].get("protocol")
|
||||
if not protocol:
|
||||
protocol = str(("http", "https")[int(port) == 443])
|
||||
return protocol
|
||||
|
||||
|
||||
class UrlParamDict(dict):
|
||||
def __missing__(self, key):
|
||||
return "{" + key + "}"
|
||||
|
||||
|
||||
class UrlException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class IncompleteUrl(UrlException):
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
def __str__(self):
|
||||
return "Incomplete url: '{}'".format(self.url)
|
||||
|
||||
|
||||
class UrlBadParam(UrlException):
|
||||
def __init__(self, url, param):
|
||||
self.url = url
|
||||
self.param = param
|
||||
|
||||
def __str__(self):
|
||||
return "Bad param '{}' passed into url: '{}'".format(self.param, self.url)
|
||||
|
||||
|
||||
class KeyAuth(AuthBase):
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers["Authorization"] = "Api-Key {}".format(self.key)
|
||||
return r
|
||||
|
||||
|
||||
class ApcUrl:
|
||||
"""A safe abstraction and testable for filling in parameters in
|
||||
api_client.cfg"""
|
||||
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
|
||||
def params(self, **params):
|
||||
temp_url = self.base_url
|
||||
for k, v in params.items():
|
||||
wrapped_param = "{" + k + "}"
|
||||
if not wrapped_param in temp_url:
|
||||
raise UrlBadParam(self.base_url, k)
|
||||
temp_url = temp_url.format_map(UrlParamDict(**params))
|
||||
return ApcUrl(temp_url)
|
||||
|
||||
def url(self):
|
||||
if "{" in self.base_url:
|
||||
raise IncompleteUrl(self.base_url)
|
||||
else:
|
||||
return self.base_url
|
||||
|
||||
|
||||
class ApiRequest:
|
||||
API_HTTP_REQUEST_TIMEOUT = 30 # 30 second HTTP request timeout
|
||||
|
||||
def __init__(self, name, url, logger=None, api_key=None):
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.__req = None
|
||||
if logger is None:
|
||||
self.logger = logging
|
||||
else:
|
||||
self.logger = logger
|
||||
self.auth = KeyAuth(api_key)
|
||||
|
||||
def __call__(self, *, _post_data=None, _put_data=None, params=None, **kwargs):
|
||||
final_url = self.url.params(**kwargs).url()
|
||||
self.logger.debug(final_url)
|
||||
try:
|
||||
if _post_data is not None:
|
||||
res = requests.post(
|
||||
final_url,
|
||||
data=_post_data,
|
||||
auth=self.auth,
|
||||
timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT,
|
||||
)
|
||||
elif _put_data is not None:
|
||||
res = requests.put(
|
||||
final_url,
|
||||
data=_put_data,
|
||||
auth=self.auth,
|
||||
timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT,
|
||||
)
|
||||
else:
|
||||
res = requests.get(
|
||||
final_url,
|
||||
params=params,
|
||||
auth=self.auth,
|
||||
timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT,
|
||||
)
|
||||
|
||||
# Check for bad HTTP status code
|
||||
res.raise_for_status()
|
||||
|
||||
if "application/json" in res.headers["content-type"]:
|
||||
return res.json()
|
||||
return res
|
||||
except requests.exceptions.Timeout:
|
||||
self.logger.error("HTTP request to %s timed out", final_url)
|
||||
raise
|
||||
except requests.exceptions.HTTPError:
|
||||
self.logger.error(
|
||||
f"{res.request.method} {res.request.url} request failed '{res.status_code}':"
|
||||
f"\nPayload: {res.request.body}"
|
||||
f"\nResponse: {res.text}"
|
||||
)
|
||||
raise
|
||||
|
||||
def req(self, *args, **kwargs):
|
||||
self.__req = lambda: self(*args, **kwargs)
|
||||
return self
|
||||
|
||||
def retry(self, n, delay=5):
|
||||
"""Try to send request n times. If after n times it fails then
|
||||
we finally raise exception"""
|
||||
for i in range(0, n - 1):
|
||||
try:
|
||||
return self.__req()
|
||||
except Exception:
|
||||
sleep(delay)
|
||||
return self.__req()
|
||||
|
||||
|
||||
class RequestProvider:
|
||||
"""Creates the available ApiRequest instance that can be read from
|
||||
a config file"""
|
||||
|
||||
def __init__(self, cfg, endpoints):
|
||||
self.config = cfg
|
||||
self.requests = {}
|
||||
if self.config["general"]["base_dir"].startswith("/"):
|
||||
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
||||
|
||||
protocol = get_protocol(self.config)
|
||||
base_port = self.config["general"]["base_port"]
|
||||
base_url = self.config["general"]["base_url"]
|
||||
base_dir = self.config["general"]["base_dir"]
|
||||
api_base = self.config["api_base"]
|
||||
api_url = "{protocol}://{base_url}:{base_port}/{base_dir}{api_base}/{action}".format_map(
|
||||
UrlParamDict(
|
||||
protocol=protocol,
|
||||
base_url=base_url,
|
||||
base_port=base_port,
|
||||
base_dir=base_dir,
|
||||
api_base=api_base,
|
||||
)
|
||||
)
|
||||
self.url = ApcUrl(api_url)
|
||||
|
||||
# Now we must discover the possible actions
|
||||
for action_name, action_value in endpoints.items():
|
||||
new_url = self.url.params(action=action_value)
|
||||
if "{api_key}" in action_value:
|
||||
new_url = new_url.params(api_key=self.config["general"]["api_key"])
|
||||
self.requests[action_name] = ApiRequest(
|
||||
action_name, new_url, api_key=self.config["general"]["api_key"]
|
||||
)
|
||||
|
||||
def available_requests(self):
|
||||
return list(self.requests.keys())
|
||||
|
||||
def __contains__(self, request):
|
||||
return request in self.requests
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr in self:
|
||||
return self.requests[attr]
|
||||
else:
|
||||
return super(RequestProvider, self).__getattribute__(attr)
|
||||
|
||||
|
||||
def time_in_seconds(value):
|
||||
return (
|
||||
value.hour * 60 * 60
|
||||
+ value.minute * 60
|
||||
+ value.second
|
||||
+ value.microsecond / 1000000.0
|
||||
)
|
||||
|
||||
|
||||
def time_in_milliseconds(value):
|
||||
return time_in_seconds(value) * 1000
|
||||
|
||||
|
||||
def fromisoformat(time_string):
|
||||
"""
|
||||
This is required for Python 3.6 support. datetime.time.fromisoformat was
|
||||
only added in Python 3.7. Until LibreTime drops Python 3.6 support, this
|
||||
wrapper uses the old way of doing it.
|
||||
"""
|
||||
try:
|
||||
datetime_obj = datetime.datetime.strptime(time_string, "%H:%M:%S.%f")
|
||||
except ValueError:
|
||||
datetime_obj = datetime.datetime.strptime(time_string, "%H:%M:%S")
|
||||
return datetime_obj.time()
|
538
api_client/libretime_api_client/version1.py
Normal file
538
api_client/libretime_api_client/version1.py
Normal file
|
@ -0,0 +1,538 @@
|
|||
###############################################################################
|
||||
# This file holds the implementations for all the API clients.
|
||||
#
|
||||
# If you want to develop a new client, here are some suggestions: Get the fetch
|
||||
# methods working first, then the push, then the liquidsoap notifier. You will
|
||||
# probably want to create a script on your server side to automatically
|
||||
# schedule a playlist one minute from the current time.
|
||||
###############################################################################
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
from configobj import ConfigObj
|
||||
|
||||
from .utils import ApiRequest, RequestProvider, get_protocol
|
||||
|
||||
AIRTIME_API_VERSION = "1.1"
|
||||
|
||||
|
||||
api_config = {}
|
||||
api_endpoints = {}
|
||||
|
||||
# URL to get the version number of the server API
|
||||
api_endpoints["version_url"] = "version/api_key/{api_key}"
|
||||
# URL to register a components IP Address with the central web server
|
||||
api_endpoints[
|
||||
"register_component"
|
||||
] = "register-component/format/json/api_key/{api_key}/component/{component}"
|
||||
|
||||
# media-monitor
|
||||
api_endpoints["media_setup_url"] = "media-monitor-setup/format/json/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"upload_recorded"
|
||||
] = "upload-recorded/format/json/api_key/{api_key}/fileid/{fileid}/showinstanceid/{showinstanceid}"
|
||||
api_endpoints[
|
||||
"update_media_url"
|
||||
] = "reload-metadata/format/json/api_key/{api_key}/mode/{mode}"
|
||||
api_endpoints[
|
||||
"list_all_db_files"
|
||||
] = "list-all-files/format/json/api_key/{api_key}/dir_id/{dir_id}/all/{all}"
|
||||
api_endpoints[
|
||||
"list_all_watched_dirs"
|
||||
] = "list-all-watched-dirs/format/json/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"add_watched_dir"
|
||||
] = "add-watched-dir/format/json/api_key/{api_key}/path/{path}"
|
||||
api_endpoints[
|
||||
"remove_watched_dir"
|
||||
] = "remove-watched-dir/format/json/api_key/{api_key}/path/{path}"
|
||||
api_endpoints[
|
||||
"set_storage_dir"
|
||||
] = "set-storage-dir/format/json/api_key/{api_key}/path/{path}"
|
||||
api_endpoints[
|
||||
"update_fs_mount"
|
||||
] = "update-file-system-mount/format/json/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"reload_metadata_group"
|
||||
] = "reload-metadata-group/format/json/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"handle_watched_dir_missing"
|
||||
] = "handle-watched-dir-missing/format/json/api_key/{api_key}/dir/{dir}"
|
||||
# show-recorder
|
||||
api_endpoints["show_schedule_url"] = "recorded-shows/format/json/api_key/{api_key}"
|
||||
api_endpoints["upload_file_url"] = "rest/media"
|
||||
api_endpoints["upload_retries"] = "3"
|
||||
api_endpoints["upload_wait"] = "60"
|
||||
# pypo
|
||||
api_endpoints["export_url"] = "schedule/api_key/{api_key}"
|
||||
api_endpoints["get_media_url"] = "get-media/file/{file}/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"update_item_url"
|
||||
] = "notify-schedule-group-play/api_key/{api_key}/schedule_id/{schedule_id}"
|
||||
api_endpoints[
|
||||
"update_start_playing_url"
|
||||
] = "notify-media-item-start-play/api_key/{api_key}/media_id/{media_id}/"
|
||||
api_endpoints[
|
||||
"get_stream_setting"
|
||||
] = "get-stream-setting/format/json/api_key/{api_key}/"
|
||||
api_endpoints[
|
||||
"update_liquidsoap_status"
|
||||
] = "update-liquidsoap-status/format/json/api_key/{api_key}/msg/{msg}/stream_id/{stream_id}/boot_time/{boot_time}"
|
||||
api_endpoints[
|
||||
"update_source_status"
|
||||
] = "update-source-status/format/json/api_key/{api_key}/sourcename/{sourcename}/status/{status}"
|
||||
api_endpoints[
|
||||
"check_live_stream_auth"
|
||||
] = "check-live-stream-auth/format/json/api_key/{api_key}/username/{username}/password/{password}/djtype/{djtype}"
|
||||
api_endpoints["get_bootstrap_info"] = "get-bootstrap-info/format/json/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"get_files_without_replay_gain"
|
||||
] = "get-files-without-replay-gain/api_key/{api_key}/dir_id/{dir_id}"
|
||||
api_endpoints[
|
||||
"update_replay_gain_value"
|
||||
] = "update-replay-gain-value/format/json/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"notify_webstream_data"
|
||||
] = "notify-webstream-data/api_key/{api_key}/media_id/{media_id}/format/json"
|
||||
api_endpoints[
|
||||
"notify_liquidsoap_started"
|
||||
] = "rabbitmq-do-push/api_key/{api_key}/format/json"
|
||||
api_endpoints[
|
||||
"get_stream_parameters"
|
||||
] = "get-stream-parameters/api_key/{api_key}/format/json"
|
||||
api_endpoints["push_stream_stats"] = "push-stream-stats/api_key/{api_key}/format/json"
|
||||
api_endpoints[
|
||||
"update_stream_setting_table"
|
||||
] = "update-stream-setting-table/api_key/{api_key}/format/json"
|
||||
api_endpoints[
|
||||
"get_files_without_silan_value"
|
||||
] = "get-files-without-silan-value/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"update_cue_values_by_silan"
|
||||
] = "update-cue-values-by-silan/api_key/{api_key}"
|
||||
api_endpoints[
|
||||
"update_metadata_on_tunein"
|
||||
] = "update-metadata-on-tunein/api_key/{api_key}"
|
||||
api_config["api_base"] = "api"
|
||||
api_config["bin_dir"] = "/usr/lib/airtime/api_clients/"
|
||||
|
||||
|
||||
################################################################################
|
||||
# Airtime API Version 1 Client
|
||||
################################################################################
|
||||
class AirtimeApiClient(object):
|
||||
def __init__(self, logger=None, config_path="/etc/airtime/airtime.conf"):
|
||||
if logger is None:
|
||||
self.logger = logging
|
||||
else:
|
||||
self.logger = logger
|
||||
|
||||
# loading config file
|
||||
try:
|
||||
self.config = ConfigObj(config_path)
|
||||
self.config.update(api_config)
|
||||
self.services = RequestProvider(self.config, api_endpoints)
|
||||
except Exception as e:
|
||||
self.logger.exception("Error loading config file: %s", config_path)
|
||||
sys.exit(1)
|
||||
|
||||
def __get_airtime_version(self):
|
||||
try:
|
||||
return self.services.version_url()["airtime_version"]
|
||||
except Exception:
|
||||
return -1
|
||||
|
||||
def __get_api_version(self):
|
||||
try:
|
||||
return self.services.version_url()["api_version"]
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
return -1
|
||||
|
||||
def is_server_compatible(self, verbose=True):
|
||||
logger = self.logger
|
||||
api_version = self.__get_api_version()
|
||||
# logger.info('Airtime version found: ' + str(version))
|
||||
if api_version == -1:
|
||||
if verbose:
|
||||
logger.info("Unable to get Airtime API version number.\n")
|
||||
return False
|
||||
elif api_version[0:3] != AIRTIME_API_VERSION[0:3]:
|
||||
if verbose:
|
||||
logger.info("Airtime API version found: " + str(api_version))
|
||||
logger.info(
|
||||
"pypo is only compatible with API version: " + AIRTIME_API_VERSION
|
||||
)
|
||||
return False
|
||||
else:
|
||||
if verbose:
|
||||
logger.info("Airtime API version found: " + str(api_version))
|
||||
logger.info(
|
||||
"pypo is only compatible with API version: " + AIRTIME_API_VERSION
|
||||
)
|
||||
return True
|
||||
|
||||
def get_schedule(self):
|
||||
# TODO : properly refactor this routine
|
||||
# For now the return type is a little messed up for compatibility reasons
|
||||
try:
|
||||
return (True, self.services.export_url())
|
||||
except:
|
||||
return (False, None)
|
||||
|
||||
def notify_liquidsoap_started(self):
|
||||
try:
|
||||
self.services.notify_liquidsoap_started()
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
|
||||
def notify_media_item_start_playing(self, media_id):
|
||||
"""This is a callback from liquidsoap, we use this to notify
|
||||
about the currently playing *song*. We get passed a JSON string
|
||||
which we handed to liquidsoap in get_liquidsoap_data()."""
|
||||
try:
|
||||
return self.services.update_start_playing_url(media_id=media_id)
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
return None
|
||||
|
||||
def get_shows_to_record(self):
|
||||
try:
|
||||
return self.services.show_schedule_url()
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
return None
|
||||
|
||||
def upload_recorded_show(self, files, show_id):
|
||||
logger = self.logger
|
||||
response = ""
|
||||
|
||||
retries = int(self.config["upload_retries"])
|
||||
retries_wait = int(self.config["upload_wait"])
|
||||
|
||||
url = self.construct_rest_url("upload_file_url")
|
||||
|
||||
logger.debug(url)
|
||||
|
||||
for i in range(0, retries):
|
||||
logger.debug("Upload attempt: %s", i + 1)
|
||||
logger.debug(files)
|
||||
logger.debug(ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
||||
|
||||
try:
|
||||
request = requests.post(
|
||||
url, files=files, timeout=float(ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
||||
)
|
||||
response = request.json()
|
||||
logger.debug(response)
|
||||
|
||||
"""
|
||||
FIXME: We need to tell LibreTime that the uploaded track was recorded for a specific show
|
||||
|
||||
My issue here is that response does not yet have an id. The id gets generated at the point
|
||||
where analyzer is done with it's work. We probably need to do what is below in analyzer
|
||||
and also make sure that the show instance id is routed all the way through.
|
||||
|
||||
It already gets uploaded by this but the RestController does not seem to care about it. In
|
||||
the end analyzer doesn't have the info in it's rabbitmq message and imports the show as a
|
||||
regular track.
|
||||
|
||||
logger.info("uploaded show result as file id %s", response.id)
|
||||
|
||||
url = self.construct_url("upload_recorded")
|
||||
url = url.replace('%%fileid%%', response.id)
|
||||
url = url.replace('%%showinstanceid%%', show_id)
|
||||
request.get(url)
|
||||
logger.info("associated uploaded file %s with show instance %s", response.id, show_id)
|
||||
"""
|
||||
break
|
||||
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logger.error("Http error code: %s", e.code)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.error("Server is down: %s", e.args)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
|
||||
# wait some time before next retry
|
||||
time.sleep(retries_wait)
|
||||
|
||||
return response
|
||||
|
||||
def check_live_stream_auth(self, username, password, dj_type):
|
||||
try:
|
||||
return self.services.check_live_stream_auth(
|
||||
username=username, password=password, djtype=dj_type
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
return {}
|
||||
|
||||
def construct_url(self, config_action_key):
|
||||
"""Constructs the base url for every request"""
|
||||
# TODO : Make other methods in this class use this this method.
|
||||
if self.config["general"]["base_dir"].startswith("/"):
|
||||
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
||||
protocol = get_protocol(self.config)
|
||||
url = "%s://%s:%s/%s%s/%s" % (
|
||||
protocol,
|
||||
self.config["general"]["base_url"],
|
||||
str(self.config["general"]["base_port"]),
|
||||
self.config["general"]["base_dir"],
|
||||
self.config["api_base"],
|
||||
self.config[config_action_key],
|
||||
)
|
||||
url = url.replace("%%api_key%%", self.config["general"]["api_key"])
|
||||
return url
|
||||
|
||||
def construct_rest_url(self, config_action_key):
|
||||
"""Constructs the base url for RESTful requests"""
|
||||
if self.config["general"]["base_dir"].startswith("/"):
|
||||
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
||||
protocol = get_protocol(self.config)
|
||||
url = "%s://%s:@%s:%s/%s/%s" % (
|
||||
protocol,
|
||||
self.config["general"]["api_key"],
|
||||
self.config["general"]["base_url"],
|
||||
str(self.config["general"]["base_port"]),
|
||||
self.config["general"]["base_dir"],
|
||||
self.config[config_action_key],
|
||||
)
|
||||
return url
|
||||
|
||||
"""
|
||||
Caller of this method needs to catch any exceptions such as
|
||||
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
||||
"""
|
||||
|
||||
def setup_media_monitor(self):
|
||||
return self.services.media_setup_url()
|
||||
|
||||
def send_media_monitor_requests(self, action_list, dry=False):
|
||||
"""
|
||||
Send a gang of media monitor events at a time. actions_list is a
|
||||
list of dictionaries where every dictionary is representing an
|
||||
action. Every action dict must contain a 'mode' key that says
|
||||
what kind of action it is and an optional 'is_record' key that
|
||||
says whether the show was recorded or not. The value of this key
|
||||
does not matter, only if it's present or not.
|
||||
"""
|
||||
# We are assuming that action_list is a list of dictionaries such
|
||||
# that every dictionary represents the metadata of a file along
|
||||
# with a special mode key that is the action to be executed by the
|
||||
# controller.
|
||||
valid_actions = []
|
||||
# We could get a list of valid_actions in a much shorter way using
|
||||
# filter but here we prefer a little more verbosity to help
|
||||
# debugging
|
||||
for action in action_list:
|
||||
if not "mode" in action:
|
||||
self.logger.debug(
|
||||
"Warning: Trying to send a request element without a 'mode'"
|
||||
)
|
||||
self.logger.debug("Here is the the request: '%s'" % str(action))
|
||||
else:
|
||||
# We alias the value of is_record to true or false no
|
||||
# matter what it is based on if it's absent in the action
|
||||
if "is_record" not in action:
|
||||
action["is_record"] = 0
|
||||
valid_actions.append(action)
|
||||
# Note that we must prefix every key with: mdX where x is a number
|
||||
# Is there a way to format the next line a little better? The
|
||||
# parenthesis make the code almost unreadable
|
||||
md_list = dict(
|
||||
(("md%d" % i), json.dumps(md)) for i, md in enumerate(valid_actions)
|
||||
)
|
||||
# For testing we add the following "dry" parameter to tell the
|
||||
# controller not to actually do any changes
|
||||
if dry:
|
||||
md_list["dry"] = 1
|
||||
self.logger.info("Pumping out %d requests..." % len(valid_actions))
|
||||
return self.services.reload_metadata_group(_post_data=md_list)
|
||||
|
||||
# returns a list of all db files for a given directory in JSON format:
|
||||
# {"files":["path/to/file1", "path/to/file2"]}
|
||||
# Note that these are relative paths to the given directory. The full
|
||||
# path is not returned.
|
||||
def list_all_db_files(self, dir_id, all_files=True):
|
||||
logger = self.logger
|
||||
try:
|
||||
all_files = "1" if all_files else "0"
|
||||
response = self.services.list_all_db_files(dir_id=dir_id, all=all_files)
|
||||
except Exception as e:
|
||||
response = {}
|
||||
logger.error("Exception: %s", e)
|
||||
try:
|
||||
return response["files"]
|
||||
except KeyError:
|
||||
self.logger.error(
|
||||
"Could not find index 'files' in dictionary: %s", str(response)
|
||||
)
|
||||
return []
|
||||
|
||||
"""
|
||||
Caller of this method needs to catch any exceptions such as
|
||||
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
||||
"""
|
||||
|
||||
def list_all_watched_dirs(self):
|
||||
return self.services.list_all_watched_dirs()
|
||||
|
||||
"""
|
||||
Caller of this method needs to catch any exceptions such as
|
||||
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
||||
"""
|
||||
|
||||
def add_watched_dir(self, path):
|
||||
return self.services.add_watched_dir(path=base64.b64encode(path))
|
||||
|
||||
"""
|
||||
Caller of this method needs to catch any exceptions such as
|
||||
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
||||
"""
|
||||
|
||||
def remove_watched_dir(self, path):
|
||||
return self.services.remove_watched_dir(path=base64.b64encode(path))
|
||||
|
||||
"""
|
||||
Caller of this method needs to catch any exceptions such as
|
||||
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
||||
"""
|
||||
|
||||
def set_storage_dir(self, path):
|
||||
return self.services.set_storage_dir(path=base64.b64encode(path))
|
||||
|
||||
"""
|
||||
Caller of this method needs to catch any exceptions such as
|
||||
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
||||
"""
|
||||
|
||||
def get_stream_setting(self):
|
||||
return self.services.get_stream_setting()
|
||||
|
||||
def register_component(self, component):
|
||||
"""Purpose of this method is to contact the server with a "Hey its
|
||||
me!" message. This will allow the server to register the component's
|
||||
(component = media-monitor, pypo etc.) ip address, and later use it
|
||||
to query monit via monit's http service, or download log files via a
|
||||
http server."""
|
||||
return self.services.register_component(component=component)
|
||||
|
||||
def notify_liquidsoap_status(self, msg, stream_id, time):
|
||||
logger = self.logger
|
||||
try:
|
||||
# encoded_msg is no longer used server_side!!
|
||||
encoded_msg = urllib.parse.quote("dummy")
|
||||
|
||||
self.services.update_liquidsoap_status.req(
|
||||
_post_data={"msg_post": msg},
|
||||
msg=encoded_msg,
|
||||
stream_id=stream_id,
|
||||
boot_time=time,
|
||||
).retry(5)
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
|
||||
def notify_source_status(self, sourcename, status):
|
||||
try:
|
||||
return self.services.update_source_status.req(
|
||||
sourcename=sourcename, status=status
|
||||
).retry(5)
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
|
||||
def get_bootstrap_info(self):
|
||||
"""Retrieve infomations needed on bootstrap time"""
|
||||
return self.services.get_bootstrap_info()
|
||||
|
||||
def get_files_without_replay_gain_value(self, dir_id):
|
||||
"""
|
||||
Download a list of files that need to have their ReplayGain value
|
||||
calculated. This list of files is downloaded into a file and the path
|
||||
to this file is the return value.
|
||||
"""
|
||||
# http://localhost/api/get-files-without-replay-gain/dir_id/1
|
||||
try:
|
||||
return self.services.get_files_without_replay_gain(dir_id=dir_id)
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
return []
|
||||
|
||||
def get_files_without_silan_value(self):
|
||||
"""
|
||||
Download a list of files that need to have their cue in/out value
|
||||
calculated. This list of files is downloaded into a file and the path
|
||||
to this file is the return value.
|
||||
"""
|
||||
try:
|
||||
return self.services.get_files_without_silan_value()
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
return []
|
||||
|
||||
def update_replay_gain_values(self, pairs):
|
||||
"""
|
||||
'pairs' is a list of pairs in (x, y), where x is the file's database
|
||||
row id and y is the file's replay_gain value in dB
|
||||
"""
|
||||
self.logger.debug(
|
||||
self.services.update_replay_gain_value(
|
||||
_post_data={"data": json.dumps(pairs)}
|
||||
)
|
||||
)
|
||||
|
||||
def update_cue_values_by_silan(self, pairs):
|
||||
"""
|
||||
'pairs' is a list of pairs in (x, y), where x is the file's database
|
||||
row id and y is the file's cue values in dB
|
||||
"""
|
||||
return self.services.update_cue_values_by_silan(
|
||||
_post_data={"data": json.dumps(pairs)}
|
||||
)
|
||||
|
||||
def notify_webstream_data(self, data, media_id):
|
||||
"""
|
||||
Update the server with the latest metadata we've received from the
|
||||
external webstream
|
||||
"""
|
||||
self.logger.info(
|
||||
self.services.notify_webstream_data.req(
|
||||
_post_data={"data": data}, media_id=str(media_id)
|
||||
).retry(5)
|
||||
)
|
||||
|
||||
def get_stream_parameters(self):
|
||||
response = self.services.get_stream_parameters()
|
||||
self.logger.debug(response)
|
||||
return response
|
||||
|
||||
def push_stream_stats(self, data):
|
||||
# TODO : users of this method should do their own error handling
|
||||
response = self.services.push_stream_stats(
|
||||
_post_data={"data": json.dumps(data)}
|
||||
)
|
||||
return response
|
||||
|
||||
def update_stream_setting_table(self, data):
|
||||
try:
|
||||
response = self.services.update_stream_setting_table(
|
||||
_post_data={"data": json.dumps(data)}
|
||||
)
|
||||
return response
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
|
||||
def update_metadata_on_tunein(self):
|
||||
self.services.update_metadata_on_tunein()
|
||||
|
||||
|
||||
class InvalidContentType(Exception):
|
||||
pass
|
139
api_client/libretime_api_client/version2.py
Normal file
139
api_client/libretime_api_client/version2.py
Normal file
|
@ -0,0 +1,139 @@
|
|||
###############################################################################
|
||||
# This file holds the implementations for all the API clients.
|
||||
#
|
||||
# If you want to develop a new client, here are some suggestions: Get the fetch
|
||||
# methods working first, then the push, then the liquidsoap notifier. You will
|
||||
# probably want to create a script on your server side to automatically
|
||||
# schedule a playlist one minute from the current time.
|
||||
###############################################################################
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from configobj import ConfigObj
|
||||
from dateutil.parser import isoparse
|
||||
|
||||
from .utils import RequestProvider, fromisoformat, time_in_milliseconds, time_in_seconds
|
||||
|
||||
LIBRETIME_API_VERSION = "2.0"
|
||||
|
||||
api_config = {}
|
||||
api_endpoints = {}
|
||||
|
||||
api_endpoints["version_url"] = "version/"
|
||||
api_endpoints["schedule_url"] = "schedule/"
|
||||
api_endpoints["webstream_url"] = "webstreams/{id}/"
|
||||
api_endpoints["show_instance_url"] = "show-instances/{id}/"
|
||||
api_endpoints["show_url"] = "shows/{id}/"
|
||||
api_endpoints["file_url"] = "files/{id}/"
|
||||
api_endpoints["file_download_url"] = "files/{id}/download/"
|
||||
api_config["api_base"] = "api/v2"
|
||||
|
||||
|
||||
class AirtimeApiClient:
|
||||
def __init__(self, logger=None, config_path="/etc/airtime/airtime.conf"):
|
||||
if logger is None:
|
||||
self.logger = logging
|
||||
else:
|
||||
self.logger = logger
|
||||
|
||||
try:
|
||||
self.config = ConfigObj(config_path)
|
||||
self.config.update(api_config)
|
||||
self.services = RequestProvider(self.config, api_endpoints)
|
||||
except Exception as e:
|
||||
self.logger.exception("Error loading config file: %s", config_path)
|
||||
sys.exit(1)
|
||||
|
||||
def get_schedule(self):
|
||||
current_time = datetime.utcnow()
|
||||
end_time = current_time + timedelta(days=1)
|
||||
|
||||
str_current = current_time.isoformat(timespec="seconds")
|
||||
str_end = end_time.isoformat(timespec="seconds")
|
||||
data = self.services.schedule_url(
|
||||
params={
|
||||
"ends__range": ("{}Z,{}Z".format(str_current, str_end)),
|
||||
"is_valid": True,
|
||||
"playout_status__gt": 0,
|
||||
}
|
||||
)
|
||||
result = {"media": {}}
|
||||
for item in data:
|
||||
start = isoparse(item["starts"])
|
||||
key = start.strftime("%Y-%m-%d-%H-%M-%S")
|
||||
end = isoparse(item["ends"])
|
||||
|
||||
show_instance = self.services.show_instance_url(id=item["instance_id"])
|
||||
show = self.services.show_url(id=show_instance["show_id"])
|
||||
|
||||
result["media"][key] = {
|
||||
"start": start.strftime("%Y-%m-%d-%H-%M-%S"),
|
||||
"end": end.strftime("%Y-%m-%d-%H-%M-%S"),
|
||||
"row_id": item["id"],
|
||||
"show_name": show["name"],
|
||||
}
|
||||
current = result["media"][key]
|
||||
if item["file"]:
|
||||
current["independent_event"] = False
|
||||
current["type"] = "file"
|
||||
current["id"] = item["file_id"]
|
||||
|
||||
fade_in = time_in_milliseconds(fromisoformat(item["fade_in"]))
|
||||
fade_out = time_in_milliseconds(fromisoformat(item["fade_out"]))
|
||||
|
||||
cue_in = time_in_seconds(fromisoformat(item["cue_in"]))
|
||||
cue_out = time_in_seconds(fromisoformat(item["cue_out"]))
|
||||
|
||||
current["fade_in"] = fade_in
|
||||
current["fade_out"] = fade_out
|
||||
current["cue_in"] = cue_in
|
||||
current["cue_out"] = cue_out
|
||||
|
||||
info = self.services.file_url(id=item["file_id"])
|
||||
current["metadata"] = info
|
||||
current["uri"] = item["file"]
|
||||
current["replay_gain"] = info["replay_gain"]
|
||||
current["filesize"] = info["filesize"]
|
||||
elif item["stream"]:
|
||||
current["independent_event"] = True
|
||||
current["id"] = item["stream_id"]
|
||||
info = self.services.webstream_url(id=item["stream_id"])
|
||||
current["uri"] = info["url"]
|
||||
current["type"] = "stream_buffer_start"
|
||||
# Stream events are instantaneous
|
||||
current["end"] = current["start"]
|
||||
|
||||
result["{}_0".format(key)] = {
|
||||
"id": current["id"],
|
||||
"type": "stream_output_start",
|
||||
"start": current["start"],
|
||||
"end": current["start"],
|
||||
"uri": current["uri"],
|
||||
"row_id": current["row_id"],
|
||||
"independent_event": current["independent_event"],
|
||||
}
|
||||
|
||||
result[end.isoformat()] = {
|
||||
"type": "stream_buffer_end",
|
||||
"start": current["end"],
|
||||
"end": current["end"],
|
||||
"uri": current["uri"],
|
||||
"row_id": current["row_id"],
|
||||
"independent_event": current["independent_event"],
|
||||
}
|
||||
|
||||
result["{}_0".format(end.isoformat())] = {
|
||||
"type": "stream_output_end",
|
||||
"start": current["end"],
|
||||
"end": current["end"],
|
||||
"uri": current["uri"],
|
||||
"row_id": current["row_id"],
|
||||
"independent_event": current["independent_event"],
|
||||
}
|
||||
return result
|
||||
|
||||
def update_file(self, file_id, payload):
|
||||
data = self.services.file_url(id=file_id)
|
||||
data.update(payload)
|
||||
return self.services.file_url(id=file_id, _put_data=data)
|
Loading…
Add table
Add a link
Reference in a new issue