2010-11-30 00:34:22 +01:00
|
|
|
###############################################################################
|
|
|
|
# This file holds the implementations for all the API clients.
|
|
|
|
#
|
2012-08-09 18:15:45 +02:00
|
|
|
# If you want to develop a new client, here are some suggestions: Get the fetch
|
|
|
|
# methods working first, then the push, then the liquidsoap notifier. You will
|
|
|
|
# probably want to create a script on your server side to automatically
|
2010-11-30 00:34:22 +01:00
|
|
|
# schedule a playlist one minute from the current time.
|
|
|
|
###############################################################################
|
2010-11-08 22:54:54 +01:00
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import urllib
|
2011-03-25 04:07:13 +01:00
|
|
|
import urllib2
|
2010-11-08 22:54:54 +01:00
|
|
|
import logging
|
2011-02-16 02:59:04 +01:00
|
|
|
import json
|
2010-11-19 00:00:13 +01:00
|
|
|
from urlparse import urlparse
|
2011-07-08 23:14:01 +02:00
|
|
|
import base64
|
2011-07-20 01:35:39 +02:00
|
|
|
from configobj import ConfigObj
|
2012-01-11 18:17:48 +01:00
|
|
|
import string
|
2012-07-12 22:51:11 +02:00
|
|
|
import traceback
|
2010-11-19 00:00:13 +01:00
|
|
|
|
2012-08-22 16:39:26 +02:00
|
|
|
AIRTIME_VERSION = "2.2.0"
|
2010-11-08 22:54:54 +01:00
|
|
|
|
2012-10-29 16:42:24 +01:00
|
|
|
|
|
|
|
# TODO : Place these functions in some common module. Right now, media
|
|
|
|
# monitor uses the same functions and it would be better to reuse them
|
|
|
|
# instead of copy pasting them around
|
|
|
|
|
2011-08-05 18:35:50 +02:00
|
|
|
def to_unicode(obj, encoding='utf-8'):
|
|
|
|
if isinstance(obj, basestring):
|
|
|
|
if not isinstance(obj, unicode):
|
|
|
|
obj = unicode(obj, encoding)
|
|
|
|
return obj
|
2011-08-05 20:40:03 +02:00
|
|
|
|
|
|
|
def encode_to(obj, encoding='utf-8'):
|
2011-09-28 22:04:40 +02:00
|
|
|
if isinstance(obj, unicode):
|
|
|
|
obj = obj.encode(encoding)
|
2011-08-05 20:40:03 +02:00
|
|
|
return obj
|
2012-07-12 22:51:11 +02:00
|
|
|
|
2011-09-29 20:45:56 +02:00
|
|
|
def convert_dict_value_to_utf8(md):
|
2011-09-28 22:04:40 +02:00
|
|
|
#list comprehension to convert all values of md to utf-8
|
|
|
|
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()])
|
2011-08-05 20:40:03 +02:00
|
|
|
|
2010-11-30 00:34:22 +01:00
|
|
|
################################################################################
|
2011-01-05 18:31:49 +01:00
|
|
|
# Airtime API Client
|
2010-11-30 00:34:22 +01:00
|
|
|
################################################################################
|
|
|
|
|
2012-10-29 16:43:39 +01:00
|
|
|
class AirtimeApiClient(object):
|
2010-11-08 22:54:54 +01:00
|
|
|
|
2012-07-26 21:49:41 +02:00
|
|
|
# This is a little hacky fix so that I don't have to pass the config object
|
|
|
|
# everywhere where AirtimeApiClient needs to be initialized
|
|
|
|
default_config = None
|
|
|
|
# the purpose of this custom constructor is to remember which config file
|
|
|
|
# it was called with. So that after the initial call:
|
|
|
|
# AirtimeApiClient.create_right_config('/path/to/config')
|
|
|
|
# All subsequence calls to create_right_config will be with that config
|
|
|
|
# file
|
|
|
|
@staticmethod
|
|
|
|
def create_right_config(log=None,config_path=None):
|
2012-07-27 16:27:30 +02:00
|
|
|
if config_path: AirtimeApiClient.default_config = config_path
|
2012-08-09 18:13:44 +02:00
|
|
|
elif (not AirtimeApiClient.default_config):
|
|
|
|
raise ValueError("Cannot slip config_path attribute when it has \
|
|
|
|
never been passed yet")
|
2012-08-09 18:15:45 +02:00
|
|
|
return AirtimeApiClient( logger=None,
|
|
|
|
config_path=AirtimeApiClient.default_config )
|
2012-07-26 21:49:41 +02:00
|
|
|
|
2012-07-17 22:16:06 +02:00
|
|
|
def __init__(self, logger=None,config_path='/etc/airtime/api_client.cfg'):
|
2012-07-12 22:51:11 +02:00
|
|
|
if logger is None:
|
|
|
|
self.logger = logging
|
2012-02-24 19:12:50 +01:00
|
|
|
else:
|
2012-07-12 22:51:11 +02:00
|
|
|
self.logger = logger
|
|
|
|
|
2011-07-20 01:35:39 +02:00
|
|
|
# loading config file
|
|
|
|
try:
|
2012-07-17 22:16:06 +02:00
|
|
|
self.config = ConfigObj(config_path)
|
2011-07-20 01:35:39 +02:00
|
|
|
except Exception, e:
|
2012-02-24 19:12:50 +01:00
|
|
|
self.logger.error('Error loading config file: %s', e)
|
2011-07-20 01:35:39 +02:00
|
|
|
sys.exit(1)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-08-30 21:52:47 +02:00
|
|
|
def get_response_from_server(self, url, attempts=-1):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-09-19 18:44:43 +02:00
|
|
|
successful_response = False
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2011-09-19 18:44:43 +02:00
|
|
|
while not successful_response:
|
|
|
|
try:
|
2012-04-27 22:05:26 +02:00
|
|
|
response = urllib2.urlopen(url).read()
|
2011-09-19 18:44:43 +02:00
|
|
|
successful_response = True
|
|
|
|
except IOError, e:
|
|
|
|
logger.error('Error Authenticating with remote server: %s', e)
|
2012-08-15 21:12:44 +02:00
|
|
|
if isinstance(url, urllib2.Request):
|
|
|
|
logger.debug(url.get_full_url())
|
|
|
|
else:
|
|
|
|
logger.debug(url)
|
2011-09-19 18:44:43 +02:00
|
|
|
except Exception, e:
|
|
|
|
logger.error('Couldn\'t connect to remote server. Is it running?')
|
|
|
|
logger.error("%s" % e)
|
2012-08-15 21:12:44 +02:00
|
|
|
if isinstance(url, urllib2.Request):
|
|
|
|
logger.debug(url.get_full_url())
|
|
|
|
else:
|
|
|
|
logger.debug(url)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-08-30 21:52:47 +02:00
|
|
|
#If the user passed in a positive attempts number then that means
|
|
|
|
#attempts will roll over 0 and we stop. If attempts was initially negative,
|
|
|
|
#then we have unlimited attempts
|
|
|
|
if attempts > 0:
|
|
|
|
attempts = attempts - 1
|
|
|
|
if attempts == 0:
|
|
|
|
successful_response = True
|
|
|
|
|
2011-09-19 18:44:43 +02:00
|
|
|
if not successful_response:
|
2012-04-27 22:05:26 +02:00
|
|
|
logger.error("Error connecting to server, waiting 5 seconds and trying again.")
|
2011-09-19 18:44:43 +02:00
|
|
|
time.sleep(5)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
return response
|
2012-07-12 23:58:29 +02:00
|
|
|
|
|
|
|
def get_response_into_file(self, url, block=True):
|
|
|
|
"""
|
|
|
|
This function will query the server and download its response directly
|
2012-08-09 18:15:45 +02:00
|
|
|
into a temporary file. This is useful in the situation where the
|
|
|
|
response from the server can be huge and we don't want to store it into
|
|
|
|
memory (potentially causing Python to use hundreds of MB's of memory).
|
|
|
|
By writing into a file we can then open this file later, and read data
|
|
|
|
a little bit at a time and be very mem efficient.
|
|
|
|
|
|
|
|
The return value of this function is the path of the temporary file.
|
|
|
|
Unless specified using block = False, this function will block until a
|
|
|
|
successful HTTP 200 response is received.
|
2012-07-12 23:58:29 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
logger = self.logger
|
|
|
|
successful_response = False
|
|
|
|
|
|
|
|
while not successful_response:
|
|
|
|
try:
|
|
|
|
path = urllib.urlretrieve(url)[0]
|
|
|
|
successful_response = True
|
|
|
|
except IOError, e:
|
|
|
|
logger.error('Error Authenticating with remote server: %s', e)
|
|
|
|
if not block:
|
|
|
|
raise
|
|
|
|
except Exception, e:
|
|
|
|
logger.error('Couldn\'t connect to remote server. Is it running?')
|
|
|
|
logger.error("%s" % e)
|
|
|
|
if not block:
|
|
|
|
raise
|
|
|
|
|
|
|
|
if not successful_response:
|
|
|
|
logger.error("Error connecting to server, waiting 5 seconds and trying again.")
|
|
|
|
time.sleep(5)
|
|
|
|
|
|
|
|
return path
|
|
|
|
|
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
|
2012-07-12 23:58:29 +02:00
|
|
|
def __get_airtime_version(self):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2012-08-09 18:15:45 +02:00
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"],
|
|
|
|
str(self.config["base_port"]), self.config["api_base"],
|
|
|
|
self.config["version_url"])
|
2011-03-08 22:44:47 +01:00
|
|
|
logger.debug("Trying to contact %s", url)
|
2011-02-23 23:03:27 +01:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
|
2011-05-13 22:08:04 +02:00
|
|
|
version = -1
|
2011-02-23 23:03:27 +01:00
|
|
|
try:
|
2011-09-19 18:44:43 +02:00
|
|
|
data = self.get_response_from_server(url)
|
2011-02-23 23:03:27 +01:00
|
|
|
logger.debug("Data: %s", data)
|
|
|
|
response_json = json.loads(data)
|
|
|
|
version = response_json['version']
|
2011-05-16 21:33:31 +02:00
|
|
|
logger.debug("Airtime Version %s detected", version)
|
2011-06-02 00:21:13 +02:00
|
|
|
except Exception, e:
|
2011-09-19 18:44:43 +02:00
|
|
|
logger.error("Unable to detect Airtime Version - %s", e)
|
2011-06-02 00:21:13 +02:00
|
|
|
return -1
|
2011-02-23 23:03:27 +01:00
|
|
|
|
|
|
|
return version
|
|
|
|
|
|
|
|
def test(self):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2012-07-12 23:58:29 +02:00
|
|
|
items = self.get_schedule()[1]
|
2011-02-23 23:03:27 +01:00
|
|
|
schedule = items["playlists"]
|
|
|
|
logger.debug("Number of playlists found: %s", str(len(schedule)))
|
|
|
|
count = 1
|
|
|
|
for pkey in sorted(schedule.iterkeys()):
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.debug("Playlist #%s", str(count))
|
|
|
|
count += 1
|
2011-02-23 23:03:27 +01:00
|
|
|
playlist = schedule[pkey]
|
|
|
|
for item in playlist["medias"]:
|
|
|
|
filename = urlparse(item["uri"])
|
|
|
|
filename = filename.query[5:]
|
|
|
|
self.get_media(item["uri"], filename)
|
|
|
|
|
|
|
|
|
2012-07-12 23:58:29 +02:00
|
|
|
def is_server_compatible(self, verbose=True):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2012-07-12 23:58:29 +02:00
|
|
|
version = self.__get_airtime_version()
|
2011-05-13 22:08:04 +02:00
|
|
|
if (version == -1):
|
2011-02-23 23:03:27 +01:00
|
|
|
if (verbose):
|
2011-06-01 18:32:42 +02:00
|
|
|
logger.info('Unable to get Airtime version number.\n')
|
2011-05-16 21:33:31 +02:00
|
|
|
return False
|
2011-05-17 16:36:14 +02:00
|
|
|
elif (version[0:3] != AIRTIME_VERSION[0:3]):
|
2011-02-23 23:03:27 +01:00
|
|
|
if (verbose):
|
2011-06-01 18:32:42 +02:00
|
|
|
logger.info('Airtime version found: ' + str(version))
|
2012-08-09 18:15:45 +02:00
|
|
|
logger.info('pypo is at version ' + AIRTIME_VERSION +
|
|
|
|
' and is not compatible with this version of Airtime.\n')
|
2011-05-16 21:33:31 +02:00
|
|
|
return False
|
2011-02-23 23:03:27 +01:00
|
|
|
else:
|
|
|
|
if (verbose):
|
2011-06-01 18:32:42 +02:00
|
|
|
logger.info('Airtime version: ' + str(version))
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.info('pypo is at version ' + AIRTIME_VERSION + ' and is compatible with this version of Airtime.')
|
2011-02-23 23:03:27 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2012-07-12 23:58:29 +02:00
|
|
|
def get_schedule(self):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
# Construct the URL
|
2011-03-30 00:32:53 +02:00
|
|
|
export_url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["export_url"])
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
logger.info("Fetching schedule from %s", export_url)
|
2011-03-08 22:44:47 +01:00
|
|
|
export_url = export_url.replace('%%api_key%%', self.config["api_key"])
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
response = ""
|
|
|
|
try:
|
2011-09-19 18:44:43 +02:00
|
|
|
response_json = self.get_response_from_server(export_url)
|
2011-02-23 23:03:27 +01:00
|
|
|
response = json.loads(response_json)
|
2012-02-27 19:52:35 +01:00
|
|
|
success = True
|
2011-02-23 23:03:27 +01:00
|
|
|
except Exception, e:
|
2011-06-01 18:32:42 +02:00
|
|
|
logger.error(e)
|
2012-02-27 19:52:35 +01:00
|
|
|
success = False
|
2011-02-23 23:03:27 +01:00
|
|
|
|
2012-02-27 19:52:35 +01:00
|
|
|
return success, response
|
2011-02-23 23:03:27 +01:00
|
|
|
|
|
|
|
|
2011-03-29 22:10:00 +02:00
|
|
|
def get_media(self, uri, dst):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
try:
|
2011-05-02 22:33:37 +02:00
|
|
|
src = uri + "/api_key/%%api_key%%"
|
2011-03-08 22:44:47 +01:00
|
|
|
logger.info("try to download from %s to %s", src, dst)
|
2011-03-29 22:10:00 +02:00
|
|
|
src = src.replace("%%api_key%%", self.config["api_key"])
|
2011-02-23 23:03:27 +01:00
|
|
|
# check if file exists already before downloading again
|
2012-07-12 23:58:29 +02:00
|
|
|
headers = urllib.urlretrieve(src, dst)[1]
|
2011-05-02 22:33:37 +02:00
|
|
|
logger.info(headers)
|
2011-02-23 23:03:27 +01:00
|
|
|
except Exception, e:
|
|
|
|
logger.error("%s", e)
|
|
|
|
|
2012-08-28 21:00:02 +02:00
|
|
|
def notify_liquidsoap_started(self):
|
|
|
|
logger = self.logger
|
|
|
|
|
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], \
|
|
|
|
str(self.config["base_port"]), \
|
|
|
|
self.config["api_base"], \
|
|
|
|
self.config["notify_liquidsoap_started"])
|
|
|
|
|
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
|
2012-08-30 21:52:47 +02:00
|
|
|
self.get_response_from_server(url, attempts=5)
|
2012-08-28 21:00:02 +02:00
|
|
|
except Exception, e:
|
|
|
|
logger.error("Exception: %s", str(e))
|
|
|
|
|
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
"""
|
|
|
|
This is a callback from liquidsoap, we use this to notify about the
|
|
|
|
currently playing *song*. We get passed a JSON string which we handed to
|
|
|
|
liquidsoap in get_liquidsoap_data().
|
|
|
|
"""
|
2012-08-30 18:02:26 +02:00
|
|
|
def notify_media_item_start_playing(self, media_id):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-02-23 23:03:27 +01:00
|
|
|
response = ''
|
|
|
|
try:
|
2011-03-30 00:32:53 +02:00
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_start_playing_url"])
|
2011-02-23 23:03:27 +01:00
|
|
|
url = url.replace("%%media_id%%", str(media_id))
|
|
|
|
logger.debug(url)
|
2011-03-08 22:44:47 +01:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-08-30 21:52:47 +02:00
|
|
|
response = self.get_response_from_server(url, attempts = 5)
|
2012-04-27 22:05:26 +02:00
|
|
|
response = json.loads(response)
|
2011-02-23 23:03:27 +01:00
|
|
|
logger.info("API-Status %s", response['status'])
|
|
|
|
logger.info("API-Message %s", response['message'])
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
except Exception, e:
|
2011-03-10 22:41:41 +01:00
|
|
|
logger.error("Exception: %s", e)
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
return response
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-02-23 23:03:27 +01:00
|
|
|
def get_liquidsoap_data(self, pkey, schedule):
|
|
|
|
playlist = schedule[pkey]
|
|
|
|
data = dict()
|
|
|
|
try:
|
|
|
|
data["schedule_id"] = playlist['id']
|
2012-07-12 23:58:29 +02:00
|
|
|
except Exception:
|
2011-02-23 23:03:27 +01:00
|
|
|
data["schedule_id"] = 0
|
|
|
|
return data
|
2011-03-25 04:07:13 +01:00
|
|
|
|
|
|
|
def get_shows_to_record(self):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-03-29 22:32:31 +02:00
|
|
|
response = None
|
2011-03-25 04:07:13 +01:00
|
|
|
try:
|
2011-03-30 00:32:53 +02:00
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["show_schedule_url"])
|
2011-03-25 04:07:13 +01:00
|
|
|
logger.debug(url)
|
2011-03-28 18:10:51 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
2011-09-19 18:44:43 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-09-19 18:44:43 +02:00
|
|
|
response = json.loads(response)
|
2011-03-25 04:07:13 +01:00
|
|
|
logger.info("shows %s", response)
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-03-25 04:07:13 +01:00
|
|
|
except Exception, e:
|
|
|
|
logger.error("Exception: %s", e)
|
2011-06-02 17:36:30 +02:00
|
|
|
response = None
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
return response
|
2011-03-25 04:07:13 +01:00
|
|
|
|
|
|
|
def upload_recorded_show(self, data, headers):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-03-25 04:07:13 +01:00
|
|
|
response = ''
|
2011-03-29 22:10:00 +02:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
retries = int(self.config["upload_retries"])
|
|
|
|
retries_wait = int(self.config["upload_wait"])
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2011-03-30 00:35:57 +02:00
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["upload_file_url"])
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
logger.debug(url)
|
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
for i in range(0, retries):
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.debug("Upload attempt: %s", i + 1)
|
2011-03-25 04:07:13 +01:00
|
|
|
|
2011-05-16 21:33:31 +02:00
|
|
|
try:
|
2011-03-29 22:32:31 +02:00
|
|
|
request = urllib2.Request(url, data, headers)
|
|
|
|
response = urllib2.urlopen(request).read().strip()
|
|
|
|
|
|
|
|
logger.info("uploaded show result %s", response)
|
|
|
|
break
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-03-29 22:32:31 +02:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
logger.error("Http error code: %s", e.code)
|
|
|
|
except urllib2.URLError, e:
|
|
|
|
logger.error("Server is down: %s", e.args)
|
|
|
|
except Exception, e:
|
|
|
|
logger.error("Exception: %s", e)
|
|
|
|
|
|
|
|
#wait some time before next retry
|
|
|
|
time.sleep(retries_wait)
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-03-25 04:07:13 +01:00
|
|
|
return response
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-03-02 22:55:11 +01:00
|
|
|
def check_live_stream_auth(self, username, password, dj_type):
|
2012-07-12 23:58:29 +02:00
|
|
|
"""
|
|
|
|
TODO: Why are we using print statements here? Possibly use logger that
|
|
|
|
is directed to stdout. -MK
|
|
|
|
"""
|
|
|
|
|
2012-02-21 23:58:05 +01:00
|
|
|
response = ''
|
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["check_live_stream_auth"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-02-21 23:58:05 +01:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%username%%", username)
|
2012-03-02 22:55:11 +01:00
|
|
|
url = url.replace("%%djtype%%", dj_type)
|
2012-02-23 17:51:20 +01:00
|
|
|
url = url.replace("%%password%%", password)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2012-02-21 23:58:05 +01:00
|
|
|
response = json.loads(response)
|
|
|
|
except Exception, e:
|
|
|
|
print "Exception: %s", e
|
2012-07-12 23:58:29 +02:00
|
|
|
print "traceback: %s", traceback.format_exc()
|
2012-02-21 23:58:05 +01:00
|
|
|
response = None
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-02-21 23:58:05 +01:00
|
|
|
return response
|
2011-04-25 18:49:01 +02:00
|
|
|
|
2012-07-13 22:32:17 +02:00
|
|
|
def construct_url(self,config_action_key):
|
|
|
|
"""Constructs the base url for every request"""
|
|
|
|
# TODO : Make other methods in this class use this this method.
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config[config_action_key])
|
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
return url
|
|
|
|
|
2011-06-13 12:10:25 +02:00
|
|
|
def setup_media_monitor(self):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-06-13 12:10:25 +02:00
|
|
|
response = None
|
|
|
|
try:
|
2012-07-13 22:32:17 +02:00
|
|
|
url = self.construct_url("media_setup_url")
|
2011-09-19 18:44:43 +02:00
|
|
|
response = self.get_response_from_server(url)
|
|
|
|
response = json.loads(response)
|
2011-07-11 20:44:23 +02:00
|
|
|
logger.info("Connected to Airtime Server. Json Media Storage Dir: %s", response)
|
2011-06-13 12:10:25 +02:00
|
|
|
except Exception, e:
|
|
|
|
response = None
|
|
|
|
logger.error("Exception: %s", e)
|
|
|
|
return response
|
|
|
|
|
2011-06-23 21:14:09 +02:00
|
|
|
def update_media_metadata(self, md, mode, is_record=False):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-04-25 18:49:01 +02:00
|
|
|
response = None
|
|
|
|
try:
|
2012-07-13 22:32:17 +02:00
|
|
|
url = self.construct_url("update_media_url")
|
2011-06-10 16:43:30 +02:00
|
|
|
url = url.replace("%%mode%%", mode)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-07-20 22:27:35 +02:00
|
|
|
self.logger.info("Requesting url %s" % url)
|
|
|
|
|
2011-09-28 22:04:40 +02:00
|
|
|
md = convert_dict_value_to_utf8(md)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2011-06-17 17:54:36 +02:00
|
|
|
data = urllib.urlencode(md)
|
2011-04-25 23:34:58 +02:00
|
|
|
req = urllib2.Request(url, data)
|
2011-04-29 00:02:40 +02:00
|
|
|
|
2012-04-27 22:18:23 +02:00
|
|
|
response = self.get_response_from_server(req)
|
2011-08-05 00:36:40 +02:00
|
|
|
logger.info("update media %s, filepath: %s, mode: %s", response, md['MDATA_KEY_FILEPATH'], mode)
|
2012-07-20 22:27:35 +02:00
|
|
|
self.logger.info("Received response:")
|
|
|
|
self.logger.info(response)
|
2012-07-16 23:09:57 +02:00
|
|
|
try: response = json.loads(response)
|
|
|
|
except ValueError:
|
|
|
|
logger.info("Could not parse json from response: '%s'" % response)
|
2011-06-24 14:50:32 +02:00
|
|
|
|
2011-10-03 20:10:47 +02:00
|
|
|
if("error" not in response and is_record):
|
2011-06-23 21:14:09 +02:00
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["upload_recorded"])
|
|
|
|
url = url.replace("%%fileid%%", str(response[u'id']))
|
|
|
|
url = url.replace("%%showinstanceid%%", str(md['MDATA_KEY_TRACKNUMBER']))
|
2011-06-24 12:47:26 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
2011-06-23 21:14:09 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-06-23 21:14:09 +02:00
|
|
|
response = json.loads(response)
|
|
|
|
logger.info("associate recorded %s", response)
|
2011-04-25 18:49:01 +02:00
|
|
|
except Exception, e:
|
2011-06-13 16:41:57 +02:00
|
|
|
response = None
|
2012-04-11 23:24:26 +02:00
|
|
|
logger.error('Exception: %s', e)
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.error("traceback: %s", traceback.format_exc())
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-04-25 18:49:01 +02:00
|
|
|
return response
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2012-07-17 22:16:06 +02:00
|
|
|
def send_media_monitor_requests(self, action_list, dry=False):
|
2012-07-16 22:43:48 +02:00
|
|
|
"""
|
2012-10-29 16:40:41 +01:00
|
|
|
Send a gang of media monitor events at a time. actions_list is a
|
|
|
|
list of dictionaries where every dictionary is representing an
|
|
|
|
action. Every action dict must contain a 'mode' key that says
|
|
|
|
what kind of action it is and an optional 'is_record' key that
|
|
|
|
says whether the show was recorded or not. The value of this key
|
|
|
|
does not matter, only if it's present or not.
|
2012-07-16 22:43:48 +02:00
|
|
|
"""
|
2012-10-29 16:40:41 +01:00
|
|
|
url = self.construct_url('reload_metadata_group')
|
|
|
|
# We are assuming that action_list is a list of dictionaries such
|
|
|
|
# that every dictionary represents the metadata of a file along
|
|
|
|
# with a special mode key that is the action to be executed by the
|
|
|
|
# controller.
|
|
|
|
valid_actions = []
|
|
|
|
# We could get a list of valid_actions in a much shorter way using
|
|
|
|
# filter but here we prefer a little more verbosity to help
|
|
|
|
# debugging
|
|
|
|
for action in action_list:
|
|
|
|
if not 'mode' in action:
|
|
|
|
self.logger.debug("Warning: Trying to send a request element without a 'mode'")
|
|
|
|
self.logger.debug("Here is the the request: '%s'" % str(action) )
|
|
|
|
else:
|
|
|
|
# We alias the value of is_record to true or false no
|
|
|
|
# matter what it is based on if it's absent in the action
|
|
|
|
if 'is_record' not in action:
|
|
|
|
action['is_record'] = 0
|
|
|
|
valid_actions.append(action)
|
|
|
|
# Note that we must prefix every key with: mdX where x is a number
|
|
|
|
# Is there a way to format the next line a little better? The
|
|
|
|
# parenthesis make the code almost unreadable
|
|
|
|
md_list = dict((("md%d" % i), json.dumps(convert_dict_value_to_utf8(md))) \
|
|
|
|
for i,md in enumerate(valid_actions))
|
|
|
|
# For testing we add the following "dry" parameter to tell the
|
|
|
|
# controller not to actually do any changes
|
|
|
|
if dry: md_list['dry'] = 1
|
|
|
|
self.logger.info("Pumping out %d requests..." % len(valid_actions))
|
|
|
|
data = urllib.urlencode(md_list)
|
|
|
|
req = urllib2.Request(url, data)
|
|
|
|
response = self.get_response_from_server(req)
|
|
|
|
response = json.loads(response)
|
|
|
|
return response
|
2012-07-13 20:51:43 +02:00
|
|
|
|
2011-07-07 23:41:08 +02:00
|
|
|
#returns a list of all db files for a given directory in JSON format:
|
|
|
|
#{"files":["path/to/file1", "path/to/file2"]}
|
2011-07-21 12:12:37 +02:00
|
|
|
#Note that these are relative paths to the given directory. The full
|
2011-07-07 23:41:08 +02:00
|
|
|
#path is not returned.
|
2012-09-10 22:29:17 +02:00
|
|
|
def list_all_db_files(self, dir_id, all_files=True):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-07-04 20:29:09 +02:00
|
|
|
try:
|
2012-09-10 23:42:59 +02:00
|
|
|
all_files = u"1" if all_files else u"0"
|
2012-07-20 23:13:00 +02:00
|
|
|
url = self.construct_url("list_all_db_files")
|
2011-07-04 23:37:05 +02:00
|
|
|
url = url.replace("%%dir_id%%", dir_id)
|
2012-09-10 22:29:17 +02:00
|
|
|
url = url.replace("%%all%%", all_files)
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-07-04 20:29:09 +02:00
|
|
|
response = json.loads(response)
|
|
|
|
except Exception, e:
|
2012-07-16 23:41:36 +02:00
|
|
|
response = {}
|
2011-07-04 20:29:09 +02:00
|
|
|
logger.error("Exception: %s", e)
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2012-07-11 22:48:46 +02:00
|
|
|
try:
|
2012-07-11 23:09:17 +02:00
|
|
|
return response["files"]
|
|
|
|
except KeyError:
|
2012-09-10 22:29:17 +02:00
|
|
|
self.logger.error("Could not find index 'files' in dictionary: %s",
|
|
|
|
str(response))
|
2012-07-11 22:48:46 +02:00
|
|
|
return []
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-04 21:08:02 +02:00
|
|
|
def list_all_watched_dirs(self):
|
2012-07-24 23:12:29 +02:00
|
|
|
# Does this include the stor directory as well?
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-07-04 21:08:02 +02:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["list_all_watched_dirs"])
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-04 21:08:02 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-07-04 21:08:02 +02:00
|
|
|
response = json.loads(response)
|
|
|
|
except Exception, e:
|
|
|
|
response = None
|
|
|
|
logger.error("Exception: %s", e)
|
2012-08-09 18:13:44 +02:00
|
|
|
self.logger.debug(traceback.format_exc())
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-04 21:08:02 +02:00
|
|
|
return response
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
def add_watched_dir(self, path):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-07-08 23:14:01 +02:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["add_watched_dir"])
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%path%%", base64.b64encode(path))
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-07-11 20:00:31 +02:00
|
|
|
response = json.loads(response)
|
2011-07-08 23:14:01 +02:00
|
|
|
except Exception, e:
|
|
|
|
response = None
|
|
|
|
logger.error("Exception: %s", e)
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
return response
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
def remove_watched_dir(self, path):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-07-08 23:14:01 +02:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["remove_watched_dir"])
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%path%%", base64.b64encode(path))
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-07-11 20:00:31 +02:00
|
|
|
response = json.loads(response)
|
2011-07-08 23:14:01 +02:00
|
|
|
except Exception, e:
|
|
|
|
response = None
|
|
|
|
logger.error("Exception: %s", e)
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
return response
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
def set_storage_dir(self, path):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-07-08 23:14:01 +02:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["set_storage_dir"])
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%path%%", base64.b64encode(path))
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-07-11 20:00:31 +02:00
|
|
|
response = json.loads(response)
|
2011-07-08 23:14:01 +02:00
|
|
|
except Exception, e:
|
|
|
|
response = None
|
|
|
|
logger.error("Exception: %s", e)
|
2011-07-21 12:12:37 +02:00
|
|
|
|
2011-07-08 23:14:01 +02:00
|
|
|
return response
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2011-08-12 21:19:30 +02:00
|
|
|
def get_stream_setting(self):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-08-12 21:19:30 +02:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["get_stream_setting"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2011-08-12 21:19:30 +02:00
|
|
|
response = json.loads(response)
|
|
|
|
except Exception, e:
|
|
|
|
response = None
|
2011-08-15 22:10:46 +02:00
|
|
|
logger.error("Exception: %s", e)
|
2011-08-12 21:19:30 +02:00
|
|
|
|
|
|
|
return response
|
2011-05-16 21:33:31 +02:00
|
|
|
|
2011-09-16 23:51:28 +02:00
|
|
|
"""
|
2012-08-09 18:15:45 +02:00
|
|
|
Purpose of this method is to contact the server with a "Hey its me!"
|
|
|
|
message. This will allow the server to register the component's (component
|
|
|
|
= media-monitor, pypo etc.) ip address, and later use it to query monit via
|
|
|
|
monit's http service, or download log files via a http server.
|
2011-09-16 23:51:28 +02:00
|
|
|
"""
|
|
|
|
def register_component(self, component):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-09-16 23:51:28 +02:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["register_component"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2011-09-16 23:51:28 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%component%%", component)
|
2012-04-27 22:20:40 +02:00
|
|
|
self.get_response_from_server(url)
|
2011-09-16 23:51:28 +02:00
|
|
|
except Exception, e:
|
|
|
|
logger.error("Exception: %s", e)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2011-12-24 16:59:09 +01:00
|
|
|
def notify_liquidsoap_status(self, msg, stream_id, time):
|
2012-02-24 19:12:50 +01:00
|
|
|
logger = self.logger
|
2011-10-11 02:14:27 +02:00
|
|
|
try:
|
2011-12-24 16:59:09 +01:00
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_liquidsoap_status"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2011-10-11 02:14:27 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
2011-12-24 16:59:09 +01:00
|
|
|
msg = msg.replace('/', ' ')
|
|
|
|
encoded_msg = urllib.quote(msg, '')
|
|
|
|
url = url.replace("%%msg%%", encoded_msg)
|
2011-10-11 02:14:27 +02:00
|
|
|
url = url.replace("%%stream_id%%", stream_id)
|
2011-12-02 22:57:18 +01:00
|
|
|
url = url.replace("%%boot_time%%", time)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-08-30 21:52:47 +02:00
|
|
|
self.get_response_from_server(url, attempts = 5)
|
2011-10-11 02:14:27 +02:00
|
|
|
except Exception, e:
|
|
|
|
logger.error("Exception: %s", e)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-03-08 23:42:38 +01:00
|
|
|
def notify_source_status(self, sourcename, status):
|
|
|
|
logger = self.logger
|
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_source_status"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-03-08 23:42:38 +01:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%sourcename%%", sourcename)
|
|
|
|
url = url.replace("%%status%%", status)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-08-30 21:52:47 +02:00
|
|
|
self.get_response_from_server(url, attempts = 5)
|
2012-03-08 23:42:38 +01:00
|
|
|
except Exception, e:
|
|
|
|
logger.error("Exception: %s", e)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-01-11 18:17:48 +01:00
|
|
|
"""
|
|
|
|
This function updates status of mounted file system information on airtime
|
|
|
|
"""
|
2012-01-12 23:55:05 +01:00
|
|
|
def update_file_system_mount(self, added_dir, removed_dir):
|
2012-03-14 15:22:41 +01:00
|
|
|
logger = self.logger
|
2012-01-11 18:17:48 +01:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_fs_mount"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-01-11 18:17:48 +01:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
|
2012-01-12 23:55:05 +01:00
|
|
|
added_data_string = string.join(added_dir, ',')
|
|
|
|
removed_data_string = string.join(removed_dir, ',')
|
2012-07-12 23:58:29 +02:00
|
|
|
|
|
|
|
map = [("added_dir", added_data_string), ("removed_dir", removed_data_string)]
|
|
|
|
|
2012-01-11 18:17:48 +01:00
|
|
|
data = urllib.urlencode(map)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-01-11 18:17:48 +01:00
|
|
|
req = urllib2.Request(url, data)
|
2012-04-27 22:18:23 +02:00
|
|
|
response = self.get_response_from_server(req)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
logger.info("update file system mount: %s", json.loads(response))
|
2012-01-11 18:17:48 +01:00
|
|
|
except Exception, e:
|
|
|
|
logger.error('Exception: %s', e)
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.error("traceback: %s", traceback.format_exc())
|
|
|
|
|
2012-01-11 18:17:48 +01:00
|
|
|
"""
|
2012-08-09 18:15:45 +02:00
|
|
|
When watched dir is missing(unplugged or something) on boot up, this
|
|
|
|
function will get called and will call appropriate function on Airtime.
|
2012-01-11 18:17:48 +01:00
|
|
|
"""
|
|
|
|
def handle_watched_dir_missing(self, dir):
|
2012-03-14 15:22:41 +01:00
|
|
|
logger = self.logger
|
2012-01-11 18:17:48 +01:00
|
|
|
try:
|
|
|
|
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["handle_watched_dir_missing"])
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-01-11 18:17:48 +01:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%dir%%", base64.b64encode(dir))
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
|
|
|
logger.info("update file system mount: %s", json.loads(response))
|
2012-01-11 18:17:48 +01:00
|
|
|
except Exception, e:
|
|
|
|
logger.error('Exception: %s', e)
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.error("traceback: %s", traceback.format_exc())
|
|
|
|
|
2012-03-20 21:41:15 +01:00
|
|
|
def get_bootstrap_info(self):
|
2012-07-20 21:37:13 +02:00
|
|
|
"""
|
|
|
|
Retrive infomations needed on bootstrap time
|
|
|
|
"""
|
2012-03-14 15:22:41 +01:00
|
|
|
logger = self.logger
|
|
|
|
try:
|
2012-07-20 21:37:13 +02:00
|
|
|
url = self.construct_url("get_bootstrap_info")
|
2012-04-27 22:05:26 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2012-04-27 22:18:23 +02:00
|
|
|
response = json.loads(response)
|
|
|
|
logger.info("Bootstrap info retrieved %s", response)
|
2012-03-14 15:22:41 +01:00
|
|
|
except Exception, e:
|
2012-03-26 23:44:07 +02:00
|
|
|
response = None
|
2012-03-14 15:22:41 +01:00
|
|
|
logger.error('Exception: %s', e)
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.error("traceback: %s", traceback.format_exc())
|
2012-03-14 15:22:41 +01:00
|
|
|
return response
|
2012-07-12 23:58:29 +02:00
|
|
|
|
|
|
|
def get_files_without_replay_gain_value(self, dir_id):
|
|
|
|
"""
|
2012-08-08 23:56:46 +02:00
|
|
|
Download a list of files that need to have their ReplayGain value
|
|
|
|
calculated. This list of files is downloaded into a file and the path
|
|
|
|
to this file is the return value.
|
2012-07-12 23:58:29 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
#http://localhost/api/get-files-without-replay-gain/dir_id/1
|
|
|
|
|
|
|
|
logger = self.logger
|
|
|
|
try:
|
2012-07-16 02:57:40 +02:00
|
|
|
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(get_files_without_replay_gain)s/" % (self.config)
|
2012-07-12 23:58:29 +02:00
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
url = url.replace("%%dir_id%%", dir_id)
|
2012-07-15 05:36:33 +02:00
|
|
|
response = self.get_response_from_server(url)
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-07-15 05:36:33 +02:00
|
|
|
logger.info("update file system mount: %s", response)
|
|
|
|
response = json.loads(response)
|
|
|
|
#file_path = self.get_response_into_file(url)
|
2012-07-12 23:58:29 +02:00
|
|
|
except Exception, e:
|
2012-07-15 05:36:33 +02:00
|
|
|
response = None
|
2012-07-12 23:58:29 +02:00
|
|
|
logger.error('Exception: %s', e)
|
|
|
|
logger.error("traceback: %s", traceback.format_exc())
|
|
|
|
|
2012-07-15 05:36:33 +02:00
|
|
|
return response
|
2012-07-12 23:58:29 +02:00
|
|
|
|
2012-07-16 02:57:40 +02:00
|
|
|
def update_replay_gain_values(self, pairs):
|
|
|
|
"""
|
2012-08-09 18:15:45 +02:00
|
|
|
'pairs' is a list of pairs in (x, y), where x is the file's database
|
|
|
|
row id and y is the file's replay_gain value in dB
|
2012-07-16 02:57:40 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
#http://localhost/api/update-replay-gain-value/
|
|
|
|
try:
|
|
|
|
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(update_replay_gain_value)s/" % (self.config)
|
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
data = urllib.urlencode({'data': json.dumps(pairs)})
|
|
|
|
request = urllib2.Request(url, data)
|
|
|
|
|
2012-08-15 21:12:44 +02:00
|
|
|
self.logger.debug(self.get_response_from_server(request))
|
2012-07-16 02:57:40 +02:00
|
|
|
except Exception, e:
|
2012-07-16 16:31:00 +02:00
|
|
|
self.logger.error("Exception: %s", e)
|
|
|
|
raise
|
2012-08-15 21:12:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def notify_webstream_data(self, data, media_id):
|
|
|
|
"""
|
|
|
|
Update the server with the latest metadata we've received from the
|
|
|
|
external webstream
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(notify_webstream_data)s/" % (self.config)
|
|
|
|
url = url.replace("%%media_id%%", str(media_id))
|
|
|
|
url = url.replace("%%api_key%%", self.config["api_key"])
|
|
|
|
data = urllib.urlencode({'data': data})
|
|
|
|
self.logger.debug(url)
|
|
|
|
request = urllib2.Request(url, data)
|
|
|
|
|
2012-08-30 21:52:47 +02:00
|
|
|
self.logger.info(self.get_response_from_server(request, attempts = 5))
|
2012-08-15 21:12:44 +02:00
|
|
|
except Exception, e:
|
|
|
|
self.logger.error("Exception: %s", e)
|