add API v2
This commit is contained in:
parent
f809c3a8ff
commit
2df0189a90
71 changed files with 2740 additions and 315 deletions
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
__all__ = ["api_client"]
|
||||
__all__ = ["version1"]
|
||||
|
|
166
python_apps/api_clients/api_clients/utils.py
Normal file
166
python_apps/api_clients/api_clients/utils.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
import json
|
||||
import logging
|
||||
import socket
|
||||
import requests
|
||||
from requests.auth import AuthBase
|
||||
|
||||
def get_protocol(config):
|
||||
positive_values = ['Yes', 'yes', 'True', 'true', True]
|
||||
port = config['general'].get('base_port', 80)
|
||||
force_ssl = config['general'].get('force_ssl', False)
|
||||
if force_ssl in positive_values:
|
||||
protocol = 'https'
|
||||
else:
|
||||
protocol = config['general'].get('protocol')
|
||||
if not protocol:
|
||||
protocol = str(("http", "https")[int(port) == 443])
|
||||
return protocol
|
||||
|
||||
class UrlParamDict(dict):
|
||||
def __missing__(self, key):
|
||||
return '{' + key + '}'
|
||||
|
||||
class UrlException(Exception): pass
|
||||
|
||||
class IncompleteUrl(UrlException):
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
def __str__(self):
|
||||
return "Incomplete url: '{}'".format(self.url)
|
||||
|
||||
class UrlBadParam(UrlException):
|
||||
def __init__(self, url, param):
|
||||
self.url = url
|
||||
self.param = param
|
||||
|
||||
def __str__(self):
|
||||
return "Bad param '{}' passed into url: '{}'".format(self.param, self.url)
|
||||
|
||||
class KeyAuth(AuthBase):
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers['Authorization'] = "Api-Key {}".format(self.key)
|
||||
return r
|
||||
|
||||
class ApcUrl:
|
||||
""" A safe abstraction and testable for filling in parameters in
|
||||
api_client.cfg"""
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
|
||||
def params(self, **params):
|
||||
temp_url = self.base_url
|
||||
for k, v in params.items():
|
||||
wrapped_param = "{" + k + "}"
|
||||
if not wrapped_param in temp_url:
|
||||
raise UrlBadParam(self.base_url, k)
|
||||
temp_url = temp_url.format_map(UrlParamDict(**params))
|
||||
return ApcUrl(temp_url)
|
||||
|
||||
def url(self):
|
||||
if '{' in self.base_url:
|
||||
raise IncompleteUrl(self.base_url)
|
||||
else:
|
||||
return self.base_url
|
||||
|
||||
class ApiRequest:
|
||||
API_HTTP_REQUEST_TIMEOUT = 30 # 30 second HTTP request timeout
|
||||
|
||||
def __init__(self, name, url, logger=None, api_key=None):
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.__req = None
|
||||
if logger is None:
|
||||
self.logger = logging
|
||||
else:
|
||||
self.logger = logger
|
||||
self.auth = KeyAuth(api_key)
|
||||
|
||||
def __call__(self, _post_data=None, params=None, **kwargs):
|
||||
final_url = self.url.params(**kwargs).url()
|
||||
self.logger.debug(final_url)
|
||||
try:
|
||||
if _post_data:
|
||||
response = requests.post(final_url,
|
||||
data=_post_data, auth=self.auth,
|
||||
timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
||||
else:
|
||||
response = requests.get(final_url, params=params, auth=self.auth,
|
||||
timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
||||
if 'application/json' in response.headers['content-type']:
|
||||
return response.json()
|
||||
return response
|
||||
except requests.exceptions.Timeout:
|
||||
self.logger.error('HTTP request to %s timed out', final_url)
|
||||
raise
|
||||
|
||||
def req(self, *args, **kwargs):
|
||||
self.__req = lambda : self(*args, **kwargs)
|
||||
return self
|
||||
|
||||
def retry(self, n, delay=5):
|
||||
"""Try to send request n times. If after n times it fails then
|
||||
we finally raise exception"""
|
||||
for i in range(0,n-1):
|
||||
try:
|
||||
return self.__req()
|
||||
except Exception:
|
||||
time.sleep(delay)
|
||||
return self.__req()
|
||||
|
||||
class RequestProvider:
|
||||
""" Creates the available ApiRequest instance that can be read from
|
||||
a config file """
|
||||
def __init__(self, cfg, endpoints):
|
||||
self.config = cfg
|
||||
self.requests = {}
|
||||
if self.config["general"]["base_dir"].startswith("/"):
|
||||
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
||||
|
||||
protocol = get_protocol(self.config)
|
||||
base_port = self.config['general']['base_port']
|
||||
base_url = self.config['general']['base_url']
|
||||
base_dir = self.config['general']['base_dir']
|
||||
api_base = self.config['api_base']
|
||||
api_url = "{protocol}://{base_url}:{base_port}/{base_dir}{api_base}/{action}".format_map(
|
||||
UrlParamDict(protocol=protocol,
|
||||
base_url=base_url,
|
||||
base_port=base_port,
|
||||
base_dir=base_dir,
|
||||
api_base=api_base
|
||||
))
|
||||
self.url = ApcUrl(api_url)
|
||||
|
||||
# Now we must discover the possible actions
|
||||
for action_name, action_value in endpoints.items():
|
||||
new_url = self.url.params(action=action_value)
|
||||
if '{api_key}' in action_value:
|
||||
new_url = new_url.params(api_key=self.config["general"]['api_key'])
|
||||
self.requests[action_name] = ApiRequest(action_name,
|
||||
new_url,
|
||||
api_key=self.config['general']['api_key'])
|
||||
|
||||
def available_requests(self):
|
||||
return list(self.requests.keys())
|
||||
|
||||
def __contains__(self, request):
|
||||
return request in self.requests
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr in self:
|
||||
return self.requests[attr]
|
||||
else:
|
||||
return super(RequestProvider, self).__getattribute__(attr)
|
||||
|
||||
def time_in_seconds(time):
|
||||
return time.hour * 60 * 60 + \
|
||||
time.minute * 60 + \
|
||||
time.second + \
|
||||
time.microsecond / 1000000.0
|
||||
|
||||
def time_in_milliseconds(time):
|
||||
return time_in_seconds(time) * 1000
|
||||
|
|
@ -8,202 +8,71 @@
|
|||
###############################################################################
|
||||
import sys
|
||||
import time
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
import urllib.parse
|
||||
import requests
|
||||
import socket
|
||||
import logging
|
||||
import json
|
||||
import base64
|
||||
import traceback
|
||||
from configobj import ConfigObj
|
||||
|
||||
from .utils import RequestProvider, ApiRequest, get_protocol
|
||||
|
||||
AIRTIME_API_VERSION = "1.1"
|
||||
|
||||
|
||||
api_config = {}
|
||||
api_endpoints = {}
|
||||
|
||||
# URL to get the version number of the server API
|
||||
api_config['version_url'] = 'version/api_key/%%api_key%%'
|
||||
api_endpoints['version_url'] = 'version/api_key/{api_key}'
|
||||
#URL to register a components IP Address with the central web server
|
||||
api_config['register_component'] = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
|
||||
api_endpoints['register_component'] = 'register-component/format/json/api_key/{api_key}/component/{component}'
|
||||
|
||||
#media-monitor
|
||||
api_config['media_setup_url'] = 'media-monitor-setup/format/json/api_key/%%api_key%%'
|
||||
api_config['upload_recorded'] = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
|
||||
api_config['update_media_url'] = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
|
||||
api_config['list_all_db_files'] = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%/all/%%all%%'
|
||||
api_config['list_all_watched_dirs'] = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
|
||||
api_config['add_watched_dir'] = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
api_config['remove_watched_dir'] = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
api_config['set_storage_dir'] = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
api_config['update_fs_mount'] = 'update-file-system-mount/format/json/api_key/%%api_key%%'
|
||||
api_config['reload_metadata_group'] = 'reload-metadata-group/format/json/api_key/%%api_key%%'
|
||||
api_config['handle_watched_dir_missing'] = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
|
||||
api_endpoints['media_setup_url'] = 'media-monitor-setup/format/json/api_key/{api_key}'
|
||||
api_endpoints['upload_recorded'] = 'upload-recorded/format/json/api_key/{api_key}/fileid/{fileid}/showinstanceid/{showinstanceid}'
|
||||
api_endpoints['update_media_url'] = 'reload-metadata/format/json/api_key/{api_key}/mode/{mode}'
|
||||
api_endpoints['list_all_db_files'] = 'list-all-files/format/json/api_key/{api_key}/dir_id/{dir_id}/all/{all}'
|
||||
api_endpoints['list_all_watched_dirs'] = 'list-all-watched-dirs/format/json/api_key/{api_key}'
|
||||
api_endpoints['add_watched_dir'] = 'add-watched-dir/format/json/api_key/{api_key}/path/{path}'
|
||||
api_endpoints['remove_watched_dir'] = 'remove-watched-dir/format/json/api_key/{api_key}/path/{path}'
|
||||
api_endpoints['set_storage_dir'] = 'set-storage-dir/format/json/api_key/{api_key}/path/{path}'
|
||||
api_endpoints['update_fs_mount'] = 'update-file-system-mount/format/json/api_key/{api_key}'
|
||||
api_endpoints['reload_metadata_group'] = 'reload-metadata-group/format/json/api_key/{api_key}'
|
||||
api_endpoints['handle_watched_dir_missing'] = 'handle-watched-dir-missing/format/json/api_key/{api_key}/dir/{dir}'
|
||||
#show-recorder
|
||||
api_config['show_schedule_url'] = 'recorded-shows/format/json/api_key/%%api_key%%'
|
||||
api_config['upload_file_url'] = 'rest/media'
|
||||
api_config['upload_retries'] = '3'
|
||||
api_config['upload_wait'] = '60'
|
||||
api_endpoints['show_schedule_url'] = 'recorded-shows/format/json/api_key/{api_key}'
|
||||
api_endpoints['upload_file_url'] = 'rest/media'
|
||||
api_endpoints['upload_retries'] = '3'
|
||||
api_endpoints['upload_wait'] = '60'
|
||||
#pypo
|
||||
api_config['export_url'] = 'schedule/api_key/%%api_key%%'
|
||||
api_config['get_media_url'] = 'get-media/file/%%file%%/api_key/%%api_key%%'
|
||||
api_config['update_item_url'] = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
|
||||
api_config['update_start_playing_url'] = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/'
|
||||
api_config['get_stream_setting'] = 'get-stream-setting/format/json/api_key/%%api_key%%/'
|
||||
api_config['update_liquidsoap_status'] = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
|
||||
api_config['update_source_status'] = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
|
||||
api_config['check_live_stream_auth'] = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
|
||||
api_config['get_bootstrap_info'] = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
|
||||
api_config['get_files_without_replay_gain'] = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
|
||||
api_config['update_replay_gain_value'] = 'update-replay-gain-value/format/json/api_key/%%api_key%%'
|
||||
api_config['notify_webstream_data'] = 'notify-webstream-data/api_key/%%api_key%%/media_id/%%media_id%%/format/json'
|
||||
api_config['notify_liquidsoap_started'] = 'rabbitmq-do-push/api_key/%%api_key%%/format/json'
|
||||
api_config['get_stream_parameters'] = 'get-stream-parameters/api_key/%%api_key%%/format/json'
|
||||
api_config['push_stream_stats'] = 'push-stream-stats/api_key/%%api_key%%/format/json'
|
||||
api_config['update_stream_setting_table'] = 'update-stream-setting-table/api_key/%%api_key%%/format/json'
|
||||
api_config['get_files_without_silan_value'] = 'get-files-without-silan-value/api_key/%%api_key%%'
|
||||
api_config['update_cue_values_by_silan'] = 'update-cue-values-by-silan/api_key/%%api_key%%'
|
||||
api_endpoints['export_url'] = 'schedule/api_key/{api_key}'
|
||||
api_endpoints['get_media_url'] = 'get-media/file/{file}/api_key/{api_key}'
|
||||
api_endpoints['update_item_url'] = 'notify-schedule-group-play/api_key/{api_key}/schedule_id/{schedule_id}'
|
||||
api_endpoints['update_start_playing_url'] = 'notify-media-item-start-play/api_key/{api_key}/media_id/{media_id}/'
|
||||
api_endpoints['get_stream_setting'] = 'get-stream-setting/format/json/api_key/{api_key}/'
|
||||
api_endpoints['update_liquidsoap_status'] = 'update-liquidsoap-status/format/json/api_key/{api_key}/msg/{msg}/stream_id/{stream_id}/boot_time/{boot_time}'
|
||||
api_endpoints['update_source_status'] = 'update-source-status/format/json/api_key/{api_key}/sourcename/{sourcename}/status/{status}'
|
||||
api_endpoints['check_live_stream_auth'] = 'check-live-stream-auth/format/json/api_key/{api_key}/username/{username}/password/{password}/djtype/{djtype}'
|
||||
api_endpoints['get_bootstrap_info'] = 'get-bootstrap-info/format/json/api_key/{api_key}'
|
||||
api_endpoints['get_files_without_replay_gain'] = 'get-files-without-replay-gain/api_key/{api_key}/dir_id/{dir_id}'
|
||||
api_endpoints['update_replay_gain_value'] = 'update-replay-gain-value/format/json/api_key/{api_key}'
|
||||
api_endpoints['notify_webstream_data'] = 'notify-webstream-data/api_key/{api_key}/media_id/{media_id}/format/json'
|
||||
api_endpoints['notify_liquidsoap_started'] = 'rabbitmq-do-push/api_key/{api_key}/format/json'
|
||||
api_endpoints['get_stream_parameters'] = 'get-stream-parameters/api_key/{api_key}/format/json'
|
||||
api_endpoints['push_stream_stats'] = 'push-stream-stats/api_key/{api_key}/format/json'
|
||||
api_endpoints['update_stream_setting_table'] = 'update-stream-setting-table/api_key/{api_key}/format/json'
|
||||
api_endpoints['get_files_without_silan_value'] = 'get-files-without-silan-value/api_key/{api_key}'
|
||||
api_endpoints['update_cue_values_by_silan'] = 'update-cue-values-by-silan/api_key/{api_key}'
|
||||
api_endpoints['update_metadata_on_tunein'] = 'update-metadata-on-tunein/api_key/{api_key}'
|
||||
api_config['api_base'] = 'api'
|
||||
api_config['bin_dir'] = '/usr/lib/airtime/api_clients/'
|
||||
api_config['update_metadata_on_tunein'] = 'update-metadata-on-tunein/api_key/%%api_key%%'
|
||||
|
||||
def get_protocol(config):
|
||||
positive_values = ['Yes', 'yes', 'True', 'true', True]
|
||||
port = config['general'].get('base_port', 80)
|
||||
force_ssl = config['general'].get('force_ssl', False)
|
||||
if force_ssl in positive_values:
|
||||
protocol = 'https'
|
||||
else:
|
||||
protocol = config['general'].get('protocol')
|
||||
if not protocol:
|
||||
protocol = str(("http", "https")[int(port) == 443])
|
||||
return protocol
|
||||
|
||||
|
||||
################################################################################
|
||||
# Airtime API Client
|
||||
# Airtime API Version 1 Client
|
||||
################################################################################
|
||||
|
||||
class UrlException(Exception): pass
|
||||
|
||||
class IncompleteUrl(UrlException):
|
||||
def __init__(self, url): self.url = url
|
||||
def __str__(self): return "Incomplete url: '%s'" % self.url
|
||||
|
||||
class UrlBadParam(UrlException):
|
||||
def __init__(self, url, param):
|
||||
self.url = url
|
||||
self.param = param
|
||||
def __str__(self):
|
||||
return "Bad param '%s' passed into url: '%s'" % (self.param, self.url)
|
||||
|
||||
class ApcUrl(object):
|
||||
""" A safe abstraction and testable for filling in parameters in
|
||||
api_client.cfg"""
|
||||
def __init__(self, base_url): self.base_url = base_url
|
||||
|
||||
def params(self, **params):
|
||||
temp_url = self.base_url
|
||||
for k, v in params.items():
|
||||
wrapped_param = "%%" + k + "%%"
|
||||
if wrapped_param in temp_url:
|
||||
temp_url = temp_url.replace(wrapped_param, str(v))
|
||||
else: raise UrlBadParam(self.base_url, k)
|
||||
return ApcUrl(temp_url)
|
||||
|
||||
def url(self):
|
||||
if '%%' in self.base_url: raise IncompleteUrl(self.base_url)
|
||||
else: return self.base_url
|
||||
|
||||
class ApiRequest(object):
|
||||
|
||||
API_HTTP_REQUEST_TIMEOUT = 30 # 30 second HTTP request timeout
|
||||
|
||||
def __init__(self, name, url, logger=None):
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.__req = None
|
||||
if logger is None: self.logger = logging
|
||||
else: self.logger = logger
|
||||
|
||||
def __call__(self,_post_data=None, **kwargs):
|
||||
final_url = self.url.params(**kwargs).url()
|
||||
if _post_data is not None:
|
||||
_post_data = urllib.parse.urlencode(_post_data).encode('utf-8')
|
||||
self.logger.debug(final_url)
|
||||
try:
|
||||
req = urllib.request.Request(final_url, _post_data)
|
||||
f = urllib.request.urlopen(req, timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
||||
content_type = f.info().get_content_type()
|
||||
response = f.read()
|
||||
#Everything that calls an ApiRequest should be catching URLError explicitly
|
||||
#(according to the other comments in this file and a cursory grep through the code)
|
||||
#Note that URLError can occur for timeouts as well as socket.timeout
|
||||
except socket.timeout:
|
||||
self.logger.error('HTTP request to %s timed out', final_url)
|
||||
raise
|
||||
except Exception as e:
|
||||
#self.logger.exception(e)
|
||||
raise
|
||||
|
||||
try:
|
||||
if content_type == 'application/json':
|
||||
try:
|
||||
response = response.decode()
|
||||
except (UnicodeDecodeError, AttributeError):
|
||||
pass
|
||||
data = json.loads(response)
|
||||
return data
|
||||
else:
|
||||
raise InvalidContentType()
|
||||
except Exception:
|
||||
#self.logger.exception(e)
|
||||
raise
|
||||
|
||||
def req(self, *args, **kwargs):
|
||||
self.__req = lambda : self(*args, **kwargs)
|
||||
return self
|
||||
|
||||
def retry(self, n, delay=5):
|
||||
"""Try to send request n times. If after n times it fails then
|
||||
we finally raise exception"""
|
||||
for i in range(0,n-1):
|
||||
try: return self.__req()
|
||||
except Exception: time.sleep(delay)
|
||||
return self.__req()
|
||||
|
||||
class RequestProvider(object):
|
||||
""" Creates the available ApiRequest instance that can be read from
|
||||
a config file """
|
||||
def __init__(self, cfg):
|
||||
self.config = cfg
|
||||
self.requests = {}
|
||||
if self.config["general"]["base_dir"].startswith("/"):
|
||||
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
||||
protocol = get_protocol(self.config)
|
||||
|
||||
self.url = ApcUrl("%s://%s:%s/%s%s/%s" \
|
||||
% (protocol, self.config["general"]["base_url"],
|
||||
str(self.config["general"]["base_port"]),
|
||||
self.config["general"]["base_dir"], self.config["api_base"],
|
||||
'%%action%%'))
|
||||
# Now we must discover the possible actions
|
||||
actions = dict( (k,v) for k,v in cfg.items() if '%%api_key%%' in v)
|
||||
for action_name, action_value in actions.items():
|
||||
new_url = self.url.params(action=action_value).params(
|
||||
api_key=self.config["general"]['api_key'])
|
||||
self.requests[action_name] = ApiRequest(action_name, new_url)
|
||||
|
||||
def available_requests(self) : return list(self.requests.keys())
|
||||
def __contains__(self, request) : return request in self.requests
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr in self:
|
||||
return self.requests[attr]
|
||||
else:
|
||||
return super(RequestProvider, self).__getattribute__(attr)
|
||||
|
||||
|
||||
class AirtimeApiClient(object):
|
||||
def __init__(self, logger=None,config_path='/etc/airtime/airtime.conf'):
|
||||
if logger is None: self.logger = logging
|
||||
|
@ -213,7 +82,7 @@ class AirtimeApiClient(object):
|
|||
try:
|
||||
self.config = ConfigObj(config_path)
|
||||
self.config.update(api_config)
|
||||
self.services = RequestProvider(self.config)
|
||||
self.services = RequestProvider(self.config, api_endpoints)
|
||||
except Exception as e:
|
||||
self.logger.exception('Error loading config file: %s', config_path)
|
||||
sys.exit(1)
|
||||
|
@ -223,8 +92,11 @@ class AirtimeApiClient(object):
|
|||
except Exception: return -1
|
||||
|
||||
def __get_api_version(self):
|
||||
try: return self.services.version_url()['api_version']
|
||||
except Exception: return -1
|
||||
try:
|
||||
return self.services.version_url()['api_version']
|
||||
except Exception as e:
|
||||
self.logger.exception(e)
|
||||
return -1
|
||||
|
||||
def is_server_compatible(self, verbose=True):
|
||||
logger = self.logger
|
124
python_apps/api_clients/api_clients/version2.py
Normal file
124
python_apps/api_clients/api_clients/version2.py
Normal file
|
@ -0,0 +1,124 @@
|
|||
###############################################################################
|
||||
# This file holds the implementations for all the API clients.
|
||||
#
|
||||
# If you want to develop a new client, here are some suggestions: Get the fetch
|
||||
# methods working first, then the push, then the liquidsoap notifier. You will
|
||||
# probably want to create a script on your server side to automatically
|
||||
# schedule a playlist one minute from the current time.
|
||||
###############################################################################
|
||||
import datetime
|
||||
from dateutil.parser import isoparse
|
||||
import logging
|
||||
from configobj import ConfigObj
|
||||
from .utils import RequestProvider, time_in_seconds, time_in_milliseconds
|
||||
|
||||
LIBRETIME_API_VERSION = "2.0"
|
||||
|
||||
api_config = {}
|
||||
api_endpoints = {}
|
||||
|
||||
api_endpoints['version_url'] = 'version/'
|
||||
api_endpoints['schedule_url'] = 'schedule/'
|
||||
api_endpoints['webstream_url'] = 'webstreams/{id}/'
|
||||
api_endpoints['show_instance_url'] = 'show-instances/{id}/'
|
||||
api_endpoints['show_url'] = 'shows/{id}/'
|
||||
api_endpoints['file_url'] = 'files/{id}/'
|
||||
api_endpoints['file_download_url'] = 'files/{id}/download/'
|
||||
api_config['api_base'] = 'api/v2'
|
||||
|
||||
class AirtimeApiClient:
|
||||
def __init__(self, logger=None, config_path='/etc/airtime/airtime.conf'):
|
||||
if logger is None:
|
||||
self.logger = logging
|
||||
else:
|
||||
self.logger = logger
|
||||
|
||||
try:
|
||||
self.config = ConfigObj(config_path)
|
||||
self.config.update(api_config)
|
||||
self.services = RequestProvider(self.config, api_endpoints)
|
||||
except Exception as e:
|
||||
self.logger.exception('Error loading config file: %s', config_path)
|
||||
sys.exit(1)
|
||||
|
||||
def get_schedule(self):
|
||||
current_time = datetime.datetime.utcnow()
|
||||
end_time = current_time + datetime.timedelta(hours=1)
|
||||
|
||||
str_current = current_time.isoformat(timespec='seconds')
|
||||
str_end = end_time.isoformat(timespec='seconds')
|
||||
data = self.services.schedule_url(params={
|
||||
'ends__range': (f'{str_current}Z,{str_end}Z'),
|
||||
})
|
||||
result = {'media': {} }
|
||||
for item in data:
|
||||
start = isoparse(item['starts'])
|
||||
key = start.strftime('%YYYY-%mm-%dd-%HH-%MM-%SS')
|
||||
end = isoparse(item['ends'])
|
||||
|
||||
show_instance = self.services.show_instance_url(id=item['instance_id'])
|
||||
show = self.services.show_url(id=show_instance['show_id'])
|
||||
|
||||
result['media'][key] = {
|
||||
'start': start.strftime('%Y-%m-%d-%H-%M-%S'),
|
||||
'end': end.strftime('%Y-%m-%d-%H-%M-%S'),
|
||||
'row_id': item['id']
|
||||
}
|
||||
current = result['media'][key]
|
||||
if item['file']:
|
||||
current['independent_event'] = False
|
||||
current['type'] = 'file'
|
||||
current['id'] = item['file_id']
|
||||
|
||||
fade_in = time_in_milliseconds(datetime.time.fromisoformat(item['fade_in']))
|
||||
fade_out = time_in_milliseconds(datetime.time.fromisoformat(item['fade_out']))
|
||||
|
||||
cue_in = time_in_seconds(datetime.time.fromisoformat(item['cue_in']))
|
||||
cue_out = time_in_seconds(datetime.time.fromisoformat(item['cue_out']))
|
||||
|
||||
current['fade_in'] = fade_in
|
||||
current['fade_out'] = fade_out
|
||||
current['cue_in'] = cue_in
|
||||
current['cue_out'] = cue_out
|
||||
|
||||
info = self.services.file_url(id=item['file_id'])
|
||||
current['metadata'] = info
|
||||
current['uri'] = item['file']
|
||||
current['filesize'] = info['filesize']
|
||||
elif item['stream']:
|
||||
current['independent_event'] = True
|
||||
current['id'] = item['stream_id']
|
||||
info = self.services.webstream_url(id=item['stream_id'])
|
||||
current['uri'] = info['url']
|
||||
current['type'] = 'stream_buffer_start'
|
||||
# Stream events are instantaneous
|
||||
current['end'] = current['start']
|
||||
|
||||
result[f'{key}_0'] = {
|
||||
'id': current['id'],
|
||||
'type': 'stream_output_start',
|
||||
'start': current['start'],
|
||||
'end': current['start'],
|
||||
'uri': current['uri'],
|
||||
'row_id': current['row_id'],
|
||||
'independent_event': current['independent_event'],
|
||||
}
|
||||
|
||||
result[end.isoformat()] = {
|
||||
'type': 'stream_buffer_end',
|
||||
'start': current['end'],
|
||||
'end': current['end'],
|
||||
'uri': current['uri'],
|
||||
'row_id': current['row_id'],
|
||||
'independent_event': current['independent_event'],
|
||||
}
|
||||
|
||||
result[f'{end.isoformat()}_0'] = {
|
||||
'type': 'stream_output_end',
|
||||
'start': current['end'],
|
||||
'end': current['end'],
|
||||
'uri': current['uri'],
|
||||
'row_id': current['row_id'],
|
||||
'independent_event': current['independent_event'],
|
||||
}
|
||||
return result
|
|
@ -9,15 +9,16 @@ print(script_path)
|
|||
os.chdir(script_path)
|
||||
|
||||
setup(name='api_clients',
|
||||
version='1.0',
|
||||
description='Airtime API Client',
|
||||
url='http://github.com/sourcefabric/Airtime',
|
||||
author='sourcefabric',
|
||||
version='2.0.0',
|
||||
description='LibreTime API Client',
|
||||
url='http://github.com/LibreTime/Libretime',
|
||||
author='LibreTime Contributors',
|
||||
license='AGPLv3',
|
||||
packages=['api_clients'],
|
||||
scripts=[],
|
||||
install_requires=[
|
||||
'configobj'
|
||||
'configobj',
|
||||
'python-dateutil',
|
||||
],
|
||||
zip_safe=False,
|
||||
data_files=[])
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import unittest
|
||||
from api_clients.api_client import ApcUrl, UrlBadParam, IncompleteUrl
|
||||
from api_clients.utils import ApcUrl, UrlBadParam, IncompleteUrl
|
||||
|
||||
class TestApcUrl(unittest.TestCase):
|
||||
def test_init(self):
|
||||
|
@ -8,16 +8,16 @@ class TestApcUrl(unittest.TestCase):
|
|||
self.assertEqual(u.base_url, url)
|
||||
|
||||
def test_params_1(self):
|
||||
u = ApcUrl("/testing/%%key%%")
|
||||
u = ApcUrl("/testing/{key}")
|
||||
self.assertEqual(u.params(key='val').url(), '/testing/val')
|
||||
|
||||
def test_params_2(self):
|
||||
u = ApcUrl('/testing/%%key%%/%%api%%/more_testing')
|
||||
u = ApcUrl('/testing/{key}/{api}/more_testing')
|
||||
full_url = u.params(key="AAA",api="BBB").url()
|
||||
self.assertEqual(full_url, '/testing/AAA/BBB/more_testing')
|
||||
|
||||
def test_params_ex(self):
|
||||
u = ApcUrl("/testing/%%key%%")
|
||||
u = ApcUrl("/testing/{key}")
|
||||
with self.assertRaises(UrlBadParam):
|
||||
u.params(bad_key='testing')
|
||||
|
||||
|
@ -26,5 +26,5 @@ class TestApcUrl(unittest.TestCase):
|
|||
self.assertEqual( ApcUrl(u).url(), u )
|
||||
|
||||
def test_url_ex(self):
|
||||
u = ApcUrl('/%%one%%/%%two%%/three').params(two='testing')
|
||||
u = ApcUrl('/{one}/{two}/three').params(two='testing')
|
||||
with self.assertRaises(IncompleteUrl): u.url()
|
||||
|
|
|
@ -1,26 +1,41 @@
|
|||
import unittest
|
||||
import json
|
||||
from mock import MagicMock, patch
|
||||
from api_clients.api_client import ApcUrl, ApiRequest
|
||||
from api_clients.utils import ApcUrl, ApiRequest
|
||||
|
||||
class ResponseInfo:
|
||||
def get_content_type(self):
|
||||
return 'application/json'
|
||||
@property
|
||||
def headers(self):
|
||||
return {'content-type': 'application/json'}
|
||||
|
||||
def json(self):
|
||||
return {'ok', 'ok'}
|
||||
|
||||
class TestApiRequest(unittest.TestCase):
|
||||
def test_init(self):
|
||||
u = ApiRequest('request_name', ApcUrl('/test/ing'))
|
||||
self.assertEqual(u.name, "request_name")
|
||||
|
||||
def test_call(self):
|
||||
ret = json.dumps( {'ok':'ok'} )
|
||||
def test_call_json(self):
|
||||
ret = {'ok':'ok'}
|
||||
read = MagicMock()
|
||||
read.read = MagicMock(return_value=ret)
|
||||
read.info = MagicMock(return_value=ResponseInfo())
|
||||
read.headers = {'content-type': 'application/json'}
|
||||
read.json = MagicMock(return_value=ret)
|
||||
u = 'http://localhost/testing'
|
||||
with patch('urllib.request.urlopen') as mock_method:
|
||||
with patch('requests.get') as mock_method:
|
||||
mock_method.return_value = read
|
||||
request = ApiRequest('mm', ApcUrl(u))()
|
||||
self.assertEqual(request, json.loads(ret))
|
||||
self.assertEqual(request, ret)
|
||||
|
||||
def test_call_html(self):
|
||||
ret = '<html><head></head><body></body></html>'
|
||||
read = MagicMock()
|
||||
read.headers = {'content-type': 'application/html'}
|
||||
read.text = MagicMock(return_value=ret)
|
||||
u = 'http://localhost/testing'
|
||||
with patch('requests.get') as mock_method:
|
||||
mock_method.return_value = read
|
||||
request = ApiRequest('mm', ApcUrl(u))()
|
||||
self.assertEqual(request.text(), ret)
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
||||
|
|
|
@ -2,7 +2,8 @@ import unittest
|
|||
import json
|
||||
from mock import patch, MagicMock
|
||||
from configobj import ConfigObj
|
||||
from api_clients.api_client import RequestProvider, api_config
|
||||
from api_clients.version1 import api_config
|
||||
from api_clients.utils import RequestProvider
|
||||
|
||||
class TestRequestProvider(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
@ -18,13 +19,17 @@ class TestRequestProvider(unittest.TestCase):
|
|||
self.assertTrue('general' in self.cfg)
|
||||
|
||||
def test_init(self):
|
||||
rp = RequestProvider(self.cfg)
|
||||
self.assertTrue( len( rp.available_requests() ) > 0 )
|
||||
rp = RequestProvider(self.cfg, {})
|
||||
self.assertEqual(len(rp.available_requests()), 0)
|
||||
|
||||
def test_contains(self):
|
||||
rp = RequestProvider(self.cfg)
|
||||
methods = ['upload_recorded', 'update_media_url', 'list_all_db_files']
|
||||
methods = {
|
||||
'upload_recorded': '/1/',
|
||||
'update_media_url': '/2/',
|
||||
'list_all_db_files': '/3/',
|
||||
}
|
||||
rp = RequestProvider(self.cfg, methods)
|
||||
for meth in methods:
|
||||
self.assertTrue( meth in rp.requests )
|
||||
self.assertTrue(meth in rp.requests)
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import unittest
|
||||
import datetime
|
||||
import configparser
|
||||
from api_clients.api_client import get_protocol
|
||||
import unittest
|
||||
from api_clients import utils
|
||||
|
||||
def get_force_ssl(value, useConfigParser):
|
||||
config = {}
|
||||
|
@ -10,12 +11,23 @@ def get_force_ssl(value, useConfigParser):
|
|||
'base_port': 80,
|
||||
'force_ssl': value,
|
||||
}
|
||||
return get_protocol(config)
|
||||
return utils.get_protocol(config)
|
||||
|
||||
|
||||
class TestTime(unittest.TestCase):
|
||||
def test_time_in_seconds(self):
|
||||
time = datetime.time(hour=0, minute=3, second=34, microsecond=649600)
|
||||
self.assertTrue(abs(utils.time_in_seconds(time) - 214.65) < 0.009)
|
||||
|
||||
def test_time_in_milliseconds(self):
|
||||
time = datetime.time(hour=0, minute=0, second=0, microsecond=500000)
|
||||
self.assertEqual(utils.time_in_milliseconds(time), 500)
|
||||
|
||||
|
||||
class TestGetProtocol(unittest.TestCase):
|
||||
def test_dict_config_empty_http(self):
|
||||
config = {'general': {}}
|
||||
protocol = get_protocol(config)
|
||||
protocol = utils.get_protocol(config)
|
||||
self.assertEqual(protocol, 'http')
|
||||
|
||||
def test_dict_config_http(self):
|
||||
|
@ -24,7 +36,7 @@ class TestGetProtocol(unittest.TestCase):
|
|||
'base_port': 80,
|
||||
},
|
||||
}
|
||||
protocol = get_protocol(config)
|
||||
protocol = utils.get_protocol(config)
|
||||
self.assertEqual(protocol, 'http')
|
||||
|
||||
def test_dict_config_https(self):
|
||||
|
@ -33,7 +45,7 @@ class TestGetProtocol(unittest.TestCase):
|
|||
'base_port': 443,
|
||||
},
|
||||
}
|
||||
protocol = get_protocol(config)
|
||||
protocol = utils.get_protocol(config)
|
||||
self.assertEqual(protocol, 'https')
|
||||
|
||||
def test_dict_config_force_https(self):
|
||||
|
@ -47,7 +59,7 @@ class TestGetProtocol(unittest.TestCase):
|
|||
def test_configparser_config_empty_http(self):
|
||||
config = configparser.ConfigParser()
|
||||
config['general'] = {}
|
||||
protocol = get_protocol(config)
|
||||
protocol = utils.get_protocol(config)
|
||||
self.assertEqual(protocol, 'http')
|
||||
|
||||
def test_configparser_config_http(self):
|
||||
|
@ -55,7 +67,7 @@ class TestGetProtocol(unittest.TestCase):
|
|||
config['general'] = {
|
||||
'base_port': 80,
|
||||
}
|
||||
protocol = get_protocol(config)
|
||||
protocol = utils.get_protocol(config)
|
||||
self.assertEqual(protocol, 'http')
|
||||
|
||||
def test_configparser_config_https(self):
|
||||
|
@ -63,7 +75,7 @@ class TestGetProtocol(unittest.TestCase):
|
|||
config['general'] = {
|
||||
'base_port': 443,
|
||||
}
|
||||
protocol = get_protocol(config)
|
||||
protocol = utils.get_protocol(config)
|
||||
self.assertEqual(protocol, 'https')
|
||||
|
||||
def test_configparser_config_force_https(self):
|
||||
|
@ -73,3 +85,5 @@ class TestGetProtocol(unittest.TestCase):
|
|||
self.assertEqual(get_force_ssl(value, True), 'https')
|
||||
for value in negative_values:
|
||||
self.assertEqual(get_force_ssl(value, True), 'http')
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
|
@ -28,7 +28,7 @@ from configobj import ConfigObj
|
|||
|
||||
# custom imports
|
||||
#from util import *
|
||||
from api_clients import *
|
||||
from api_clients import version1 as api_client
|
||||
|
||||
LOG_LEVEL = logging.INFO
|
||||
LOG_PATH = '/var/log/airtime/pypo/notify.log'
|
||||
|
|
|
@ -4,7 +4,7 @@ import os
|
|||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from api_clients.api_client import AirtimeApiClient
|
||||
from api_clients.version1 import AirtimeApiClient
|
||||
|
||||
def generate_liquidsoap_config(ss):
|
||||
data = ss['msg']
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
from api_clients import *
|
||||
from api_clients import version1 as api_client
|
||||
import sys
|
||||
|
||||
api_clients = api_client.AirtimeApiClient()
|
||||
|
|
|
@ -12,7 +12,7 @@ import sys
|
|||
import telnetlib
|
||||
import time
|
||||
|
||||
from api_clients import api_client
|
||||
from api_clients import version1 as api_client
|
||||
from configobj import ConfigObj
|
||||
from datetime import datetime
|
||||
from optparse import OptionParser
|
||||
|
|
|
@ -7,7 +7,7 @@ import traceback
|
|||
import logging
|
||||
import time
|
||||
|
||||
from api_clients import api_client
|
||||
from api_clients import version1 as api_client
|
||||
|
||||
class ListenerStat(Thread):
|
||||
|
||||
|
|
|
@ -16,7 +16,8 @@ from queue import Empty
|
|||
from threading import Thread, Timer
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
from api_clients import api_client
|
||||
from api_clients import version1 as v1_api_client
|
||||
from api_clients import version2 as api_client
|
||||
from .timeout import ls_timeout
|
||||
|
||||
|
||||
|
@ -28,7 +29,7 @@ signal.signal(signal.SIGINT, keyboardInterruptHandler)
|
|||
|
||||
logging.captureWarnings(True)
|
||||
|
||||
POLL_INTERVAL = 480
|
||||
POLL_INTERVAL = 400
|
||||
|
||||
class PypoFetch(Thread):
|
||||
|
||||
|
@ -38,6 +39,7 @@ class PypoFetch(Thread):
|
|||
#Hacky...
|
||||
PypoFetch.ref = self
|
||||
|
||||
self.v1_api_client = v1_api_client.AirtimeApiClient()
|
||||
self.api_client = api_client.AirtimeApiClient()
|
||||
self.fetch_queue = pypoFetch_q
|
||||
self.push_queue = pypoPush_q
|
||||
|
@ -150,7 +152,7 @@ class PypoFetch(Thread):
|
|||
def set_bootstrap_variables(self):
|
||||
self.logger.debug('Getting information needed on bootstrap from Airtime')
|
||||
try:
|
||||
info = self.api_client.get_bootstrap_info()
|
||||
info = self.v1_api_client.get_bootstrap_info()
|
||||
except Exception as e:
|
||||
self.logger.exception('Unable to get bootstrap info.. Exiting pypo...')
|
||||
|
||||
|
@ -255,7 +257,7 @@ class PypoFetch(Thread):
|
|||
stream_id = info[0]
|
||||
status = info[1]
|
||||
if(status == "true"):
|
||||
self.api_client.notify_liquidsoap_status("OK", stream_id, str(fake_time))
|
||||
self.v1_api_client.notify_liquidsoap_status("OK", stream_id, str(fake_time))
|
||||
|
||||
|
||||
@ls_timeout
|
||||
|
@ -343,7 +345,7 @@ class PypoFetch(Thread):
|
|||
media_item = media[key]
|
||||
if (media_item['type'] == 'file'):
|
||||
fileExt = self.sanity_check_media_item(media_item)
|
||||
dst = os.path.join(download_dir, "{}{}".format(media_item['id'], fileExt))
|
||||
dst = os.path.join(download_dir, f'{media_item["id"]}{fileExt}')
|
||||
media_item['dst'] = dst
|
||||
media_item['file_ready'] = False
|
||||
media_filtered[key] = media_item
|
||||
|
@ -434,10 +436,14 @@ class PypoFetch(Thread):
|
|||
self.logger.exception("Problem removing file '%s'" % f)
|
||||
|
||||
def manual_schedule_fetch(self):
|
||||
success, self.schedule_data = self.api_client.get_schedule()
|
||||
if success:
|
||||
try:
|
||||
self.schedule_data = self.api_client.get_schedule()
|
||||
self.process_schedule(self.schedule_data)
|
||||
return success
|
||||
return True
|
||||
except Exception as e:
|
||||
self.logger.error('Unable to fetch schedule')
|
||||
self.logger.exception(e)
|
||||
return False
|
||||
|
||||
def persistent_manual_schedule_fetch(self, max_attempts=1):
|
||||
success = False
|
||||
|
@ -452,7 +458,7 @@ class PypoFetch(Thread):
|
|||
# push metadata to TuneIn. We have to do this because TuneIn turns
|
||||
# off metadata if it does not receive a request every 5 minutes.
|
||||
def update_metadata_on_tunein(self):
|
||||
self.api_client.update_metadata_on_tunein()
|
||||
self.v1_api_client.update_metadata_on_tunein()
|
||||
Timer(120, self.update_metadata_on_tunein).start()
|
||||
|
||||
def main(self):
|
||||
|
|
|
@ -16,6 +16,7 @@ import configparser
|
|||
import json
|
||||
import hashlib
|
||||
from requests.exceptions import ConnectionError, HTTPError, Timeout
|
||||
from api_clients import version2 as api_client
|
||||
|
||||
CONFIG_PATH = '/etc/airtime/airtime.conf'
|
||||
|
||||
|
@ -31,6 +32,7 @@ class PypoFile(Thread):
|
|||
self.media = None
|
||||
self.cache_dir = os.path.join(config["cache_dir"], "scheduler")
|
||||
self._config = self.read_config_file(CONFIG_PATH)
|
||||
self.api_client = api_client.AirtimeApiClient()
|
||||
|
||||
def copy_file(self, media_item):
|
||||
"""
|
||||
|
@ -44,6 +46,8 @@ class PypoFile(Thread):
|
|||
dst_exists = True
|
||||
try:
|
||||
dst_size = os.path.getsize(dst)
|
||||
if dst_size == 0:
|
||||
dst_exists = False
|
||||
except Exception as e:
|
||||
dst_exists = False
|
||||
|
||||
|
@ -63,41 +67,16 @@ class PypoFile(Thread):
|
|||
|
||||
if do_copy:
|
||||
self.logger.info("copying from %s to local cache %s" % (src, dst))
|
||||
|
||||
CONFIG_SECTION = 'general'
|
||||
username = self._config[CONFIG_SECTION].get('api_key')
|
||||
baseurl = self._config[CONFIG_SECTION].get('base_url')
|
||||
port = self._config[CONFIG_SECTION].get('base_port', 80)
|
||||
positive_values = ['Yes', 'yes', 'True', 'true', True]
|
||||
force_ssl = self._config[CONFIG_SECTION].get('force_ssl', False)
|
||||
if force_ssl in positive_values:
|
||||
protocol = 'https'
|
||||
self.logger.debug('protocol set to https from force_ssl configuration setting')
|
||||
else:
|
||||
try:
|
||||
protocol = self._config[CONFIG_SECTION]['protocol']
|
||||
self.logger.debug('protocol set to %s from configuration setting' % (protocol))
|
||||
except (NoOptionError, KeyError) as e:
|
||||
protocol = str(("http", "https")[int(port) == 443])
|
||||
self.logger.debug('guessing protocol as %s from port configuration' % (protocol))
|
||||
|
||||
try:
|
||||
host = [protocol, baseurl, port]
|
||||
url = "%s://%s:%s/rest/media/%s/download" % (host[0],
|
||||
host[1],
|
||||
host[2],
|
||||
media_item["id"])
|
||||
with open(dst, "wb") as handle:
|
||||
response = requests.get(url, auth=requests.auth.HTTPBasicAuth(username, ''), stream=True, verify=False)
|
||||
self.logger.info(media_item)
|
||||
response = self.api_client.services.file_download_url(id=media_item['id'])
|
||||
|
||||
if not response.ok:
|
||||
self.logger.error(response)
|
||||
raise Exception("%s - Error occurred downloading file" % response.status_code)
|
||||
|
||||
for chunk in response.iter_content(1024):
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
for chunk in response.iter_content(chunk_size=1024):
|
||||
handle.write(chunk)
|
||||
|
||||
#make file world readable and owner writable
|
||||
|
|
|
@ -20,7 +20,7 @@ from queue import Empty, Queue
|
|||
|
||||
from threading import Thread
|
||||
|
||||
from api_clients import api_client
|
||||
from api_clients import version1 as api_client
|
||||
from .timeout import ls_timeout
|
||||
|
||||
logging.captureWarnings(True)
|
||||
|
|
|
@ -21,14 +21,15 @@ from threading import Thread
|
|||
|
||||
import mutagen
|
||||
|
||||
from api_clients import api_client as apc
|
||||
from api_clients import version1 as v1_api_client
|
||||
from api_clients import version2 as api_client
|
||||
|
||||
def api_client(logger):
|
||||
"""
|
||||
api_client returns the correct instance of AirtimeApiClient. Although there is only one
|
||||
instance to choose from at the moment.
|
||||
"""
|
||||
return apc.AirtimeApiClient(logger)
|
||||
return v1_api_client.AirtimeApiClient(logger)
|
||||
|
||||
# loading config file
|
||||
try:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue