2021-05-27 15:21:02 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2010-11-30 00:34:22 +01:00
|
|
|
###############################################################################
|
|
|
|
# This file holds the implementations for all the API clients.
|
|
|
|
#
|
2012-08-09 18:15:45 +02:00
|
|
|
# If you want to develop a new client, here are some suggestions: Get the fetch
|
|
|
|
# methods working first, then the push, then the liquidsoap notifier. You will
|
|
|
|
# probably want to create a script on your server side to automatically
|
2010-11-30 00:34:22 +01:00
|
|
|
# schedule a playlist one minute from the current time.
|
|
|
|
###############################################################################
|
2010-11-08 22:54:54 +01:00
|
|
|
import sys
|
|
|
|
import time
|
2020-01-30 14:47:36 +01:00
|
|
|
import urllib.parse
|
2017-03-24 15:12:06 +01:00
|
|
|
import requests
|
2010-11-08 22:54:54 +01:00
|
|
|
import logging
|
2011-02-16 02:59:04 +01:00
|
|
|
import json
|
2011-07-08 23:14:01 +02:00
|
|
|
import base64
|
2013-04-03 23:57:29 +02:00
|
|
|
import traceback
|
2011-07-20 01:35:39 +02:00
|
|
|
from configobj import ConfigObj
|
2010-11-19 00:00:13 +01:00
|
|
|
|
2020-01-30 14:47:36 +01:00
|
|
|
from .utils import RequestProvider, ApiRequest, get_protocol
|
|
|
|
|
2014-02-14 22:22:15 +01:00
|
|
|
AIRTIME_API_VERSION = "1.1"
|
2010-11-08 22:54:54 +01:00
|
|
|
|
2012-10-29 16:42:24 +01:00
|
|
|
|
2013-04-19 00:02:54 +02:00
|
|
|
api_config = {}
|
2020-01-30 14:47:36 +01:00
|
|
|
api_endpoints = {}
|
2013-04-19 00:02:54 +02:00
|
|
|
|
|
|
|
# URL to get the version number of the server API
|
2020-01-30 14:47:36 +01:00
|
|
|
api_endpoints['version_url'] = 'version/api_key/{api_key}'
|
2013-04-19 00:02:54 +02:00
|
|
|
#URL to register a components IP Address with the central web server
|
2020-01-30 14:47:36 +01:00
|
|
|
api_endpoints['register_component'] = 'register-component/format/json/api_key/{api_key}/component/{component}'
|
2013-04-19 00:02:54 +02:00
|
|
|
|
|
|
|
#media-monitor
|
2020-01-30 14:47:36 +01:00
|
|
|
api_endpoints['media_setup_url'] = 'media-monitor-setup/format/json/api_key/{api_key}'
|
|
|
|
api_endpoints['upload_recorded'] = 'upload-recorded/format/json/api_key/{api_key}/fileid/{fileid}/showinstanceid/{showinstanceid}'
|
|
|
|
api_endpoints['update_media_url'] = 'reload-metadata/format/json/api_key/{api_key}/mode/{mode}'
|
|
|
|
api_endpoints['list_all_db_files'] = 'list-all-files/format/json/api_key/{api_key}/dir_id/{dir_id}/all/{all}'
|
|
|
|
api_endpoints['list_all_watched_dirs'] = 'list-all-watched-dirs/format/json/api_key/{api_key}'
|
|
|
|
api_endpoints['add_watched_dir'] = 'add-watched-dir/format/json/api_key/{api_key}/path/{path}'
|
|
|
|
api_endpoints['remove_watched_dir'] = 'remove-watched-dir/format/json/api_key/{api_key}/path/{path}'
|
|
|
|
api_endpoints['set_storage_dir'] = 'set-storage-dir/format/json/api_key/{api_key}/path/{path}'
|
|
|
|
api_endpoints['update_fs_mount'] = 'update-file-system-mount/format/json/api_key/{api_key}'
|
|
|
|
api_endpoints['reload_metadata_group'] = 'reload-metadata-group/format/json/api_key/{api_key}'
|
|
|
|
api_endpoints['handle_watched_dir_missing'] = 'handle-watched-dir-missing/format/json/api_key/{api_key}/dir/{dir}'
|
2013-04-19 00:02:54 +02:00
|
|
|
#show-recorder
|
2020-01-30 14:47:36 +01:00
|
|
|
api_endpoints['show_schedule_url'] = 'recorded-shows/format/json/api_key/{api_key}'
|
|
|
|
api_endpoints['upload_file_url'] = 'rest/media'
|
|
|
|
api_endpoints['upload_retries'] = '3'
|
|
|
|
api_endpoints['upload_wait'] = '60'
|
2013-04-19 00:02:54 +02:00
|
|
|
#pypo
|
2020-01-30 14:47:36 +01:00
|
|
|
api_endpoints['export_url'] = 'schedule/api_key/{api_key}'
|
|
|
|
api_endpoints['get_media_url'] = 'get-media/file/{file}/api_key/{api_key}'
|
|
|
|
api_endpoints['update_item_url'] = 'notify-schedule-group-play/api_key/{api_key}/schedule_id/{schedule_id}'
|
|
|
|
api_endpoints['update_start_playing_url'] = 'notify-media-item-start-play/api_key/{api_key}/media_id/{media_id}/'
|
|
|
|
api_endpoints['get_stream_setting'] = 'get-stream-setting/format/json/api_key/{api_key}/'
|
|
|
|
api_endpoints['update_liquidsoap_status'] = 'update-liquidsoap-status/format/json/api_key/{api_key}/msg/{msg}/stream_id/{stream_id}/boot_time/{boot_time}'
|
|
|
|
api_endpoints['update_source_status'] = 'update-source-status/format/json/api_key/{api_key}/sourcename/{sourcename}/status/{status}'
|
|
|
|
api_endpoints['check_live_stream_auth'] = 'check-live-stream-auth/format/json/api_key/{api_key}/username/{username}/password/{password}/djtype/{djtype}'
|
|
|
|
api_endpoints['get_bootstrap_info'] = 'get-bootstrap-info/format/json/api_key/{api_key}'
|
|
|
|
api_endpoints['get_files_without_replay_gain'] = 'get-files-without-replay-gain/api_key/{api_key}/dir_id/{dir_id}'
|
|
|
|
api_endpoints['update_replay_gain_value'] = 'update-replay-gain-value/format/json/api_key/{api_key}'
|
|
|
|
api_endpoints['notify_webstream_data'] = 'notify-webstream-data/api_key/{api_key}/media_id/{media_id}/format/json'
|
|
|
|
api_endpoints['notify_liquidsoap_started'] = 'rabbitmq-do-push/api_key/{api_key}/format/json'
|
|
|
|
api_endpoints['get_stream_parameters'] = 'get-stream-parameters/api_key/{api_key}/format/json'
|
|
|
|
api_endpoints['push_stream_stats'] = 'push-stream-stats/api_key/{api_key}/format/json'
|
|
|
|
api_endpoints['update_stream_setting_table'] = 'update-stream-setting-table/api_key/{api_key}/format/json'
|
|
|
|
api_endpoints['get_files_without_silan_value'] = 'get-files-without-silan-value/api_key/{api_key}'
|
|
|
|
api_endpoints['update_cue_values_by_silan'] = 'update-cue-values-by-silan/api_key/{api_key}'
|
|
|
|
api_endpoints['update_metadata_on_tunein'] = 'update-metadata-on-tunein/api_key/{api_key}'
|
2014-12-16 18:24:41 +01:00
|
|
|
api_config['api_base'] = 'api'
|
2015-01-14 22:11:49 +01:00
|
|
|
api_config['bin_dir'] = '/usr/lib/airtime/api_clients/'
|
2013-04-19 00:02:54 +02:00
|
|
|
|
|
|
|
|
2010-11-30 00:34:22 +01:00
|
|
|
################################################################################
|
2020-01-30 14:47:36 +01:00
|
|
|
# Airtime API Version 1 Client
|
2010-11-30 00:34:22 +01:00
|
|
|
################################################################################
|
2012-10-29 16:43:39 +01:00
|
|
|
class AirtimeApiClient(object):
|
2014-12-16 18:24:41 +01:00
|
|
|
def __init__(self, logger=None,config_path='/etc/airtime/airtime.conf'):
|
2012-10-30 20:36:39 +01:00
|
|
|
if logger is None: self.logger = logging
|
|
|
|
else: self.logger = logger
|
2012-07-12 22:51:11 +02:00
|
|
|
|
2011-07-20 01:35:39 +02:00
|
|
|
# loading config file
|
|
|
|
try:
|
2012-07-17 22:16:06 +02:00
|
|
|
self.config = ConfigObj(config_path)
|
2013-04-19 00:02:54 +02:00
|
|
|
self.config.update(api_config)
|
2020-01-30 14:47:36 +01:00
|
|
|
self.services = RequestProvider(self.config, api_endpoints)
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception('Error loading config file: %s', config_path)
|
2011-07-20 01:35:39 +02:00
|
|
|
sys.exit(1)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
|
|
|
def __get_airtime_version(self):
|
2020-01-16 15:32:51 +01:00
|
|
|
try: return self.services.version_url()['airtime_version']
|
2014-02-14 22:22:15 +01:00
|
|
|
except Exception: return -1
|
2021-01-19 15:23:50 +01:00
|
|
|
|
2014-02-14 22:22:15 +01:00
|
|
|
def __get_api_version(self):
|
2020-01-30 14:47:36 +01:00
|
|
|
try:
|
|
|
|
return self.services.version_url()['api_version']
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.exception(e)
|
|
|
|
return -1
|
2011-02-23 23:03:27 +01:00
|
|
|
|
2012-07-12 23:58:29 +02:00
|
|
|
def is_server_compatible(self, verbose=True):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2014-02-14 22:22:15 +01:00
|
|
|
api_version = self.__get_api_version()
|
2012-11-07 17:43:49 +01:00
|
|
|
# logger.info('Airtime version found: ' + str(version))
|
2014-02-14 22:22:15 +01:00
|
|
|
if api_version == -1:
|
|
|
|
if verbose:
|
|
|
|
logger.info('Unable to get Airtime API version number.\n')
|
2011-05-16 21:33:31 +02:00
|
|
|
return False
|
2014-02-14 22:22:15 +01:00
|
|
|
elif api_version[0:3] != AIRTIME_API_VERSION[0:3]:
|
2013-02-04 22:05:58 +01:00
|
|
|
if verbose:
|
2014-02-14 22:22:15 +01:00
|
|
|
logger.info('Airtime API version found: ' + str(api_version))
|
|
|
|
logger.info('pypo is only compatible with API version: ' + AIRTIME_API_VERSION)
|
2011-05-16 21:33:31 +02:00
|
|
|
return False
|
2011-02-23 23:03:27 +01:00
|
|
|
else:
|
2013-02-04 22:05:58 +01:00
|
|
|
if verbose:
|
2014-02-14 22:22:15 +01:00
|
|
|
logger.info('Airtime API version found: ' + str(api_version))
|
|
|
|
logger.info('pypo is only compatible with API version: ' + AIRTIME_API_VERSION)
|
2011-02-23 23:03:27 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2012-07-12 23:58:29 +02:00
|
|
|
def get_schedule(self):
|
2012-10-30 22:35:10 +01:00
|
|
|
# TODO : properly refactor this routine
|
2013-09-23 17:16:51 +02:00
|
|
|
# For now the return type is a little messed up for compatibility reasons
|
2012-10-31 15:47:08 +01:00
|
|
|
try: return (True, self.services.export_url())
|
2013-01-25 17:30:36 +01:00
|
|
|
except: return (False, None)
|
2011-02-23 23:03:27 +01:00
|
|
|
|
2012-08-28 21:00:02 +02:00
|
|
|
def notify_liquidsoap_started(self):
|
2013-02-03 06:40:41 +01:00
|
|
|
try:
|
|
|
|
self.services.notify_liquidsoap_started()
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2012-08-28 21:00:02 +02:00
|
|
|
|
2012-08-30 18:02:26 +02:00
|
|
|
def notify_media_item_start_playing(self, media_id):
|
2012-10-31 15:48:03 +01:00
|
|
|
""" This is a callback from liquidsoap, we use this to notify
|
|
|
|
about the currently playing *song*. We get passed a JSON string
|
|
|
|
which we handed to liquidsoap in get_liquidsoap_data(). """
|
2013-02-03 06:40:41 +01:00
|
|
|
try:
|
|
|
|
return self.services.update_start_playing_url(media_id=media_id)
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2013-02-03 06:40:41 +01:00
|
|
|
return None
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-03-25 04:07:13 +01:00
|
|
|
def get_shows_to_record(self):
|
2013-02-03 06:40:41 +01:00
|
|
|
try:
|
|
|
|
return self.services.show_schedule_url()
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2013-02-03 06:40:41 +01:00
|
|
|
return None
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2017-03-24 15:12:06 +01:00
|
|
|
def upload_recorded_show(self, files, show_id):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-03-25 04:07:13 +01:00
|
|
|
response = ''
|
2011-03-29 22:10:00 +02:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
retries = int(self.config["upload_retries"])
|
|
|
|
retries_wait = int(self.config["upload_wait"])
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2017-03-24 15:12:06 +01:00
|
|
|
url = self.construct_rest_url("upload_file_url")
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
logger.debug(url)
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
for i in range(0, retries):
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.debug("Upload attempt: %s", i + 1)
|
2017-03-24 15:12:06 +01:00
|
|
|
logger.debug(files)
|
|
|
|
logger.debug(ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2011-05-16 21:33:31 +02:00
|
|
|
try:
|
2017-03-24 15:12:06 +01:00
|
|
|
request = requests.post(url, files=files, timeout=float(ApiRequest.API_HTTP_REQUEST_TIMEOUT))
|
|
|
|
response = request.json()
|
|
|
|
logger.debug(response)
|
2011-03-29 22:32:31 +02:00
|
|
|
|
2017-03-24 15:12:06 +01:00
|
|
|
"""
|
|
|
|
FIXME: We need to tell LibreTime that the uploaded track was recorded for a specific show
|
|
|
|
|
|
|
|
My issue here is that response does not yet have an id. The id gets generated at the point
|
|
|
|
where analyzer is done with it's work. We probably need to do what is below in analyzer
|
|
|
|
and also make sure that the show instance id is routed all the way through.
|
|
|
|
|
|
|
|
It already gets uploaded by this but the RestController does not seem to care about it. In
|
|
|
|
the end analyzer doesn't have the info in it's rabbitmq message and imports the show as a
|
|
|
|
regular track.
|
|
|
|
|
|
|
|
logger.info("uploaded show result as file id %s", response.id)
|
|
|
|
|
|
|
|
url = self.construct_url("upload_recorded")
|
|
|
|
url = url.replace('%%fileid%%', response.id)
|
|
|
|
url = url.replace('%%showinstanceid%%', show_id)
|
|
|
|
request.get(url)
|
|
|
|
logger.info("associated uploaded file %s with show instance %s", response.id, show_id)
|
|
|
|
"""
|
2011-03-29 22:32:31 +02:00
|
|
|
break
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2020-01-16 15:32:51 +01:00
|
|
|
except requests.exceptions.HTTPError as e:
|
2011-03-29 22:32:31 +02:00
|
|
|
logger.error("Http error code: %s", e.code)
|
2017-03-24 15:12:06 +01:00
|
|
|
logger.error("traceback: %s", traceback.format_exc())
|
2020-01-16 15:32:51 +01:00
|
|
|
except requests.exceptions.ConnectionError as e:
|
2011-03-29 22:32:31 +02:00
|
|
|
logger.error("Server is down: %s", e.args)
|
2017-03-24 15:12:06 +01:00
|
|
|
logger.error("traceback: %s", traceback.format_exc())
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2011-03-29 22:32:31 +02:00
|
|
|
|
|
|
|
#wait some time before next retry
|
|
|
|
time.sleep(retries_wait)
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-03-25 04:07:13 +01:00
|
|
|
return response
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-03-02 22:55:11 +01:00
|
|
|
def check_live_stream_auth(self, username, password, dj_type):
|
2013-02-03 06:40:41 +01:00
|
|
|
try:
|
|
|
|
return self.services.check_live_stream_auth(
|
|
|
|
username=username, password=password, djtype=dj_type)
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2013-02-03 06:40:41 +01:00
|
|
|
return {}
|
2011-04-25 18:49:01 +02:00
|
|
|
|
2012-07-13 22:32:17 +02:00
|
|
|
def construct_url(self,config_action_key):
|
|
|
|
"""Constructs the base url for every request"""
|
|
|
|
# TODO : Make other methods in this class use this this method.
|
2014-12-17 01:42:07 +01:00
|
|
|
if self.config["general"]["base_dir"].startswith("/"):
|
|
|
|
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
2021-01-19 15:23:50 +01:00
|
|
|
protocol = get_protocol(self.config)
|
2017-02-11 13:52:15 +01:00
|
|
|
url = "%s://%s:%s/%s%s/%s" % \
|
2021-01-19 15:23:50 +01:00
|
|
|
(protocol,
|
2017-02-11 13:52:15 +01:00
|
|
|
self.config["general"]["base_url"], str(self.config["general"]["base_port"]),
|
2014-12-17 01:42:07 +01:00
|
|
|
self.config["general"]["base_dir"], self.config["api_base"],
|
2012-10-29 22:46:47 +01:00
|
|
|
self.config[config_action_key])
|
2014-12-17 01:42:07 +01:00
|
|
|
url = url.replace("%%api_key%%", self.config["general"]["api_key"])
|
2012-07-13 22:32:17 +02:00
|
|
|
return url
|
|
|
|
|
2017-03-24 15:12:06 +01:00
|
|
|
def construct_rest_url(self,config_action_key):
|
|
|
|
"""Constructs the base url for RESTful requests"""
|
|
|
|
if self.config["general"]["base_dir"].startswith("/"):
|
|
|
|
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
2021-01-19 15:23:50 +01:00
|
|
|
protocol = get_protocol(self.config)
|
2017-03-24 15:12:06 +01:00
|
|
|
url = "%s://%s:@%s:%s/%s/%s" % \
|
2021-01-19 15:23:50 +01:00
|
|
|
(protocol, self.config["general"]["api_key"],
|
2017-03-24 15:12:06 +01:00
|
|
|
self.config["general"]["base_url"], str(self.config["general"]["base_port"]),
|
|
|
|
self.config["general"]["base_dir"],
|
|
|
|
self.config[config_action_key])
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
2013-02-04 22:05:58 +01:00
|
|
|
"""
|
|
|
|
Caller of this method needs to catch any exceptions such as
|
|
|
|
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
|
|
|
"""
|
2011-06-13 12:10:25 +02:00
|
|
|
def setup_media_monitor(self):
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.media_setup_url()
|
2011-06-13 12:10:25 +02:00
|
|
|
|
2012-07-17 22:16:06 +02:00
|
|
|
def send_media_monitor_requests(self, action_list, dry=False):
|
2012-07-16 22:43:48 +02:00
|
|
|
"""
|
2012-10-29 16:40:41 +01:00
|
|
|
Send a gang of media monitor events at a time. actions_list is a
|
|
|
|
list of dictionaries where every dictionary is representing an
|
|
|
|
action. Every action dict must contain a 'mode' key that says
|
|
|
|
what kind of action it is and an optional 'is_record' key that
|
|
|
|
says whether the show was recorded or not. The value of this key
|
|
|
|
does not matter, only if it's present or not.
|
2012-07-16 22:43:48 +02:00
|
|
|
"""
|
2012-10-29 16:40:41 +01:00
|
|
|
# We are assuming that action_list is a list of dictionaries such
|
|
|
|
# that every dictionary represents the metadata of a file along
|
|
|
|
# with a special mode key that is the action to be executed by the
|
|
|
|
# controller.
|
|
|
|
valid_actions = []
|
|
|
|
# We could get a list of valid_actions in a much shorter way using
|
|
|
|
# filter but here we prefer a little more verbosity to help
|
|
|
|
# debugging
|
|
|
|
for action in action_list:
|
|
|
|
if not 'mode' in action:
|
|
|
|
self.logger.debug("Warning: Trying to send a request element without a 'mode'")
|
|
|
|
self.logger.debug("Here is the the request: '%s'" % str(action) )
|
|
|
|
else:
|
|
|
|
# We alias the value of is_record to true or false no
|
|
|
|
# matter what it is based on if it's absent in the action
|
|
|
|
if 'is_record' not in action:
|
|
|
|
action['is_record'] = 0
|
|
|
|
valid_actions.append(action)
|
|
|
|
# Note that we must prefix every key with: mdX where x is a number
|
|
|
|
# Is there a way to format the next line a little better? The
|
|
|
|
# parenthesis make the code almost unreadable
|
2020-01-20 13:44:17 +01:00
|
|
|
md_list = dict((("md%d" % i), json.dumps(md)) \
|
2012-10-29 16:40:41 +01:00
|
|
|
for i,md in enumerate(valid_actions))
|
|
|
|
# For testing we add the following "dry" parameter to tell the
|
|
|
|
# controller not to actually do any changes
|
|
|
|
if dry: md_list['dry'] = 1
|
|
|
|
self.logger.info("Pumping out %d requests..." % len(valid_actions))
|
2012-10-31 16:22:04 +01:00
|
|
|
return self.services.reload_metadata_group(_post_data=md_list)
|
2012-07-13 20:51:43 +02:00
|
|
|
|
2011-07-07 23:41:08 +02:00
|
|
|
#returns a list of all db files for a given directory in JSON format:
|
|
|
|
#{"files":["path/to/file1", "path/to/file2"]}
|
2011-07-21 12:12:37 +02:00
|
|
|
#Note that these are relative paths to the given directory. The full
|
2011-07-07 23:41:08 +02:00
|
|
|
#path is not returned.
|
2012-09-10 22:29:17 +02:00
|
|
|
def list_all_db_files(self, dir_id, all_files=True):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-07-04 20:29:09 +02:00
|
|
|
try:
|
2020-01-16 15:32:51 +01:00
|
|
|
all_files = "1" if all_files else "0"
|
2012-10-30 22:17:02 +01:00
|
|
|
response = self.services.list_all_db_files(dir_id=dir_id,
|
|
|
|
all=all_files)
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2012-07-16 23:41:36 +02:00
|
|
|
response = {}
|
2011-07-04 20:29:09 +02:00
|
|
|
logger.error("Exception: %s", e)
|
2012-07-11 22:48:46 +02:00
|
|
|
try:
|
2012-07-11 23:09:17 +02:00
|
|
|
return response["files"]
|
|
|
|
except KeyError:
|
2012-09-10 22:29:17 +02:00
|
|
|
self.logger.error("Could not find index 'files' in dictionary: %s",
|
|
|
|
str(response))
|
2012-07-11 22:48:46 +02:00
|
|
|
return []
|
2013-02-04 22:05:58 +01:00
|
|
|
"""
|
|
|
|
Caller of this method needs to catch any exceptions such as
|
|
|
|
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
|
|
|
"""
|
2011-07-04 21:08:02 +02:00
|
|
|
def list_all_watched_dirs(self):
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.list_all_watched_dirs()
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2013-02-04 22:05:58 +01:00
|
|
|
"""
|
|
|
|
Caller of this method needs to catch any exceptions such as
|
|
|
|
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
|
|
|
"""
|
2011-07-08 23:14:01 +02:00
|
|
|
def add_watched_dir(self, path):
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.add_watched_dir(path=base64.b64encode(path))
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2013-02-04 22:05:58 +01:00
|
|
|
"""
|
|
|
|
Caller of this method needs to catch any exceptions such as
|
|
|
|
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
|
|
|
"""
|
2011-07-08 23:14:01 +02:00
|
|
|
def remove_watched_dir(self, path):
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.remove_watched_dir(path=base64.b64encode(path))
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2013-02-04 22:05:58 +01:00
|
|
|
"""
|
|
|
|
Caller of this method needs to catch any exceptions such as
|
|
|
|
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
|
|
|
"""
|
2011-07-08 23:14:01 +02:00
|
|
|
def set_storage_dir(self, path):
|
2012-10-30 21:23:35 +01:00
|
|
|
return self.services.set_storage_dir(path=base64.b64encode(path))
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2013-02-04 22:05:58 +01:00
|
|
|
"""
|
|
|
|
Caller of this method needs to catch any exceptions such as
|
|
|
|
ValueError thrown by json.loads or URLError by urllib2.urlopen
|
|
|
|
"""
|
2011-08-12 21:19:30 +02:00
|
|
|
def get_stream_setting(self):
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.get_stream_setting()
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-09-16 23:51:28 +02:00
|
|
|
def register_component(self, component):
|
2012-10-31 15:54:02 +01:00
|
|
|
""" Purpose of this method is to contact the server with a "Hey its
|
|
|
|
me!" message. This will allow the server to register the component's
|
|
|
|
(component = media-monitor, pypo etc.) ip address, and later use it
|
|
|
|
to query monit via monit's http service, or download log files via a
|
|
|
|
http server. """
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.register_component(component=component)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2011-12-24 16:59:09 +01:00
|
|
|
def notify_liquidsoap_status(self, msg, stream_id, time):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-10-11 02:14:27 +02:00
|
|
|
try:
|
2013-04-04 21:12:52 +02:00
|
|
|
post_data = {"msg_post": msg}
|
|
|
|
|
|
|
|
#encoded_msg is no longer used server_side!!
|
2020-01-16 15:32:51 +01:00
|
|
|
encoded_msg = urllib.parse.quote('dummy')
|
2013-04-03 23:57:29 +02:00
|
|
|
self.services.update_liquidsoap_status.req(post_data,
|
2013-04-04 21:12:52 +02:00
|
|
|
msg=encoded_msg,
|
2013-04-03 23:57:29 +02:00
|
|
|
stream_id=stream_id,
|
|
|
|
boot_time=time).retry(5)
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-03-08 23:42:38 +01:00
|
|
|
def notify_source_status(self, sourcename, status):
|
|
|
|
try:
|
2012-10-31 17:48:11 +01:00
|
|
|
return self.services.update_source_status.req(sourcename=sourcename,
|
|
|
|
status=status).retry(5)
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-03-20 21:41:15 +01:00
|
|
|
def get_bootstrap_info(self):
|
2013-02-03 06:40:41 +01:00
|
|
|
""" Retrieve infomations needed on bootstrap time """
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.get_bootstrap_info()
|
2012-07-12 23:58:29 +02:00
|
|
|
|
|
|
|
def get_files_without_replay_gain_value(self, dir_id):
|
|
|
|
"""
|
2012-08-08 23:56:46 +02:00
|
|
|
Download a list of files that need to have their ReplayGain value
|
|
|
|
calculated. This list of files is downloaded into a file and the path
|
|
|
|
to this file is the return value.
|
2012-07-12 23:58:29 +02:00
|
|
|
"""
|
|
|
|
#http://localhost/api/get-files-without-replay-gain/dir_id/1
|
2013-02-03 06:40:41 +01:00
|
|
|
try:
|
|
|
|
return self.services.get_files_without_replay_gain(dir_id=dir_id)
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2013-02-04 22:05:58 +01:00
|
|
|
return []
|
2013-01-25 17:30:36 +01:00
|
|
|
|
2013-01-18 17:53:26 +01:00
|
|
|
def get_files_without_silan_value(self):
|
|
|
|
"""
|
|
|
|
Download a list of files that need to have their cue in/out value
|
|
|
|
calculated. This list of files is downloaded into a file and the path
|
|
|
|
to this file is the return value.
|
|
|
|
"""
|
2013-02-03 06:40:41 +01:00
|
|
|
try:
|
|
|
|
return self.services.get_files_without_silan_value()
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2013-02-04 22:05:58 +01:00
|
|
|
return []
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-07-16 02:57:40 +02:00
|
|
|
def update_replay_gain_values(self, pairs):
|
|
|
|
"""
|
2012-08-09 18:15:45 +02:00
|
|
|
'pairs' is a list of pairs in (x, y), where x is the file's database
|
|
|
|
row id and y is the file's replay_gain value in dB
|
2012-07-16 02:57:40 +02:00
|
|
|
"""
|
2013-02-04 22:05:58 +01:00
|
|
|
self.logger.debug(self.services.update_replay_gain_value(
|
|
|
|
_post_data={'data': json.dumps(pairs)}))
|
2013-02-03 06:40:41 +01:00
|
|
|
|
2013-01-25 17:30:36 +01:00
|
|
|
|
2013-01-18 17:53:26 +01:00
|
|
|
def update_cue_values_by_silan(self, pairs):
|
|
|
|
"""
|
|
|
|
'pairs' is a list of pairs in (x, y), where x is the file's database
|
|
|
|
row id and y is the file's cue values in dB
|
|
|
|
"""
|
2013-02-04 22:05:58 +01:00
|
|
|
return self.services.update_cue_values_by_silan(_post_data={'data': json.dumps(pairs)})
|
2012-08-15 21:12:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def notify_webstream_data(self, data, media_id):
|
|
|
|
"""
|
|
|
|
Update the server with the latest metadata we've received from the
|
|
|
|
external webstream
|
|
|
|
"""
|
2013-02-04 22:05:58 +01:00
|
|
|
self.logger.info( self.services.notify_webstream_data.req(
|
|
|
|
_post_data={'data':data}, media_id=str(media_id)).retry(5))
|
2012-11-05 20:02:55 +01:00
|
|
|
|
2012-11-02 22:50:43 +01:00
|
|
|
def get_stream_parameters(self):
|
2013-02-04 22:05:58 +01:00
|
|
|
response = self.services.get_stream_parameters()
|
|
|
|
self.logger.debug(response)
|
|
|
|
return response
|
2012-11-21 18:26:37 +01:00
|
|
|
|
2012-11-02 22:50:43 +01:00
|
|
|
def push_stream_stats(self, data):
|
2012-11-07 23:20:12 +01:00
|
|
|
# TODO : users of this method should do their own error handling
|
2013-02-04 22:05:58 +01:00
|
|
|
response = self.services.push_stream_stats(_post_data={'data': json.dumps(data)})
|
|
|
|
return response
|
2013-01-08 23:32:27 +01:00
|
|
|
|
|
|
|
def update_stream_setting_table(self, data):
|
2013-02-03 06:40:41 +01:00
|
|
|
try:
|
|
|
|
response = self.services.update_stream_setting_table(_post_data={'data': json.dumps(data)})
|
|
|
|
return response
|
2020-01-16 15:32:51 +01:00
|
|
|
except Exception as e:
|
2020-01-23 11:37:49 +01:00
|
|
|
self.logger.exception(e)
|
2013-04-27 00:27:40 +02:00
|
|
|
|
2015-05-25 21:37:45 +02:00
|
|
|
def update_metadata_on_tunein(self):
|
|
|
|
self.services.update_metadata_on_tunein()
|
|
|
|
|
2013-04-27 00:27:40 +02:00
|
|
|
|
|
|
|
class InvalidContentType(Exception):
|
|
|
|
pass
|