Merge branch 'devel' into 2.3.x-saas
Conflicts: airtime_mvc/application/controllers/PreferenceController.php airtime_mvc/application/forms/AddShowWhen.php airtime_mvc/application/forms/GeneralPreferences.php airtime_mvc/application/forms/LiveStreamingPreferences.php airtime_mvc/application/forms/SoundcloudPreferences.php airtime_mvc/application/forms/SupportSettings.php airtime_mvc/application/views/scripts/form/preferences.phtml airtime_mvc/application/views/scripts/form/preferences_email_server.phtml airtime_mvc/application/views/scripts/form/preferences_general.phtml airtime_mvc/application/views/scripts/form/preferences_livestream.phtml airtime_mvc/application/views/scripts/form/support-setting.phtml airtime_mvc/application/views/scripts/schedule/add- show-form.phtml airtime_mvc/public/js/airtime/preferences/preferences.js python_apps/api_clients/api_client.py python_apps/pypo/listenerstat.py
This commit is contained in:
commit
8cd6bd9aa4
346 changed files with 48955 additions and 11856 deletions
|
@ -17,8 +17,9 @@ version_url = 'version/api_key/%%api_key%%'
|
|||
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
|
||||
|
||||
# Hostname
|
||||
base_url = 'localhost'
|
||||
host = 'localhost'
|
||||
base_port = 80
|
||||
base_dir = ''
|
||||
|
||||
#############################
|
||||
## Config for Media Monitor
|
||||
|
@ -123,3 +124,5 @@ notify_liquidsoap_started = 'rabbitmq-do-push/api_key/%%api_key%%/format/json'
|
|||
get_stream_parameters = 'get-stream-parameters/api_key/%%api_key%%/format/json'
|
||||
|
||||
push_stream_stats = 'push-stream-stats/api_key/%%api_key%%/format/json'
|
||||
|
||||
update_stream_setting_table = 'update-stream-setting-table/api_key/%%api_key%%/format/json'
|
||||
|
|
|
@ -12,13 +12,10 @@ import urllib
|
|||
import urllib2
|
||||
import logging
|
||||
import json
|
||||
from urlparse import urlparse
|
||||
import base64
|
||||
from configobj import ConfigObj
|
||||
import string
|
||||
import traceback
|
||||
|
||||
AIRTIME_VERSION = "2.2.1"
|
||||
AIRTIME_VERSION = "2.3.0"
|
||||
|
||||
|
||||
# TODO : Place these functions in some common module. Right now, media
|
||||
|
@ -44,153 +41,114 @@ def convert_dict_value_to_utf8(md):
|
|||
# Airtime API Client
|
||||
################################################################################
|
||||
|
||||
class UrlException(Exception): pass
|
||||
|
||||
class IncompleteUrl(UrlException):
|
||||
def __init__(self, url): self.url = url
|
||||
def __str__(self): return "Incomplete url: '%s'" % self.url
|
||||
|
||||
class UrlBadParam(UrlException):
|
||||
def __init__(self, url, param):
|
||||
self.url = url
|
||||
self.param = param
|
||||
def __str__(self):
|
||||
return "Bad param '%s' passed into url: '%s'" % (self.param, self.url)
|
||||
|
||||
class ApcUrl(object):
|
||||
""" A safe abstraction and testable for filling in parameters in
|
||||
api_client.cfg"""
|
||||
def __init__(self, base_url): self.base_url = base_url
|
||||
|
||||
def params(self, **params):
|
||||
temp_url = self.base_url
|
||||
for k, v in params.iteritems():
|
||||
wrapped_param = "%%" + k + "%%"
|
||||
if wrapped_param in temp_url:
|
||||
temp_url = temp_url.replace(wrapped_param, str(v))
|
||||
else: raise UrlBadParam(self.base_url, k)
|
||||
return ApcUrl(temp_url)
|
||||
|
||||
def url(self):
|
||||
if '%%' in self.base_url: raise IncompleteUrl(self.base_url)
|
||||
else: return self.base_url
|
||||
|
||||
class ApiRequest(object):
|
||||
def __init__(self, name, url):
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.__req = None
|
||||
def __call__(self,_post_data=None, **kwargs):
|
||||
# TODO : get rid of god damn urllib and replace everything with
|
||||
# grequests or requests at least
|
||||
final_url = self.url.params(**kwargs).url()
|
||||
if _post_data is not None: _post_data = urllib.urlencode(_post_data)
|
||||
req = urllib2.Request(final_url, _post_data)
|
||||
response = urllib2.urlopen(req).read()
|
||||
# Ghetto hack for now because we don't the content type we are getting
|
||||
# (Pointless to look at mime since it's not being set correctly always)
|
||||
try: return json.loads(response)
|
||||
except ValueError: return response
|
||||
|
||||
def req(self, *args, **kwargs):
|
||||
self.__req = lambda : self(*args, **kwargs)
|
||||
return self
|
||||
|
||||
def retry(self, n, delay=5):
|
||||
"""Try to send request n times. If after n times it fails then
|
||||
we finally raise exception"""
|
||||
for i in range(0,n-1):
|
||||
try: return self.__req()
|
||||
except Exception: time.sleep(delay)
|
||||
return self.__req()
|
||||
|
||||
class RequestProvider(object):
|
||||
""" Creates the available ApiRequest instance that can be read from
|
||||
a config file """
|
||||
def __init__(self, cfg):
|
||||
self.config = cfg
|
||||
self.requests = {}
|
||||
self.url = ApcUrl("http://%s:%s/%s/%s/%s" \
|
||||
% (self.config["host"], str(self.config["base_port"]),
|
||||
self.config["base_dir"], self.config["api_base"],
|
||||
'%%action%%'))
|
||||
# Now we must discover the possible actions
|
||||
actions = dict( (k,v) for k,v in cfg.iteritems() if '%%api_key%%' in v)
|
||||
for action_name, action_value in actions.iteritems():
|
||||
new_url = self.url.params(action=action_value).params(
|
||||
api_key=self.config['api_key'])
|
||||
self.requests[action_name] = ApiRequest(action_name, new_url)
|
||||
|
||||
def available_requests(self) : return self.requests.keys()
|
||||
def __contains__(self, request) : return request in self.requests
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr in self: return self.requests[attr]
|
||||
else: return super(RequestProvider, self).__getattribute__(attr)
|
||||
|
||||
|
||||
class AirtimeApiClient(object):
|
||||
|
||||
# This is a little hacky fix so that I don't have to pass the config object
|
||||
# everywhere where AirtimeApiClient needs to be initialized
|
||||
default_config = None
|
||||
# the purpose of this custom constructor is to remember which config file
|
||||
# it was called with. So that after the initial call:
|
||||
# AirtimeApiClient.create_right_config('/path/to/config')
|
||||
# All subsequence calls to create_right_config will be with that config
|
||||
# file
|
||||
@staticmethod
|
||||
def create_right_config(log=None,config_path=None):
|
||||
if config_path: AirtimeApiClient.default_config = config_path
|
||||
elif (not AirtimeApiClient.default_config):
|
||||
raise ValueError("Cannot slip config_path attribute when it has \
|
||||
never been passed yet")
|
||||
return AirtimeApiClient( logger=None,
|
||||
config_path=AirtimeApiClient.default_config )
|
||||
|
||||
def __init__(self, logger=None,config_path='/etc/airtime/api_client.cfg'):
|
||||
if logger is None:
|
||||
self.logger = logging
|
||||
else:
|
||||
self.logger = logger
|
||||
if logger is None: self.logger = logging
|
||||
else: self.logger = logger
|
||||
|
||||
# loading config file
|
||||
try:
|
||||
self.config = ConfigObj(config_path)
|
||||
self.services = RequestProvider(self.config)
|
||||
except Exception, e:
|
||||
self.logger.error('Error loading config file: %s', e)
|
||||
self.logger.error('Error loading config file: %s', config_path)
|
||||
self.logger.error('Exception: %s', str(e))
|
||||
sys.exit(1)
|
||||
|
||||
def get_response_from_server(self, url, attempts=-1):
|
||||
logger = self.logger
|
||||
successful_response = False
|
||||
|
||||
while not successful_response:
|
||||
try:
|
||||
response = urllib2.urlopen(url).read()
|
||||
successful_response = True
|
||||
except IOError, e:
|
||||
logger.error('Error Authenticating with remote server: %s', e)
|
||||
if isinstance(url, urllib2.Request):
|
||||
logger.debug(url.get_full_url())
|
||||
else:
|
||||
logger.debug(url)
|
||||
except Exception, e:
|
||||
logger.error('Couldn\'t connect to remote server. Is it running?')
|
||||
logger.error("%s" % e)
|
||||
if isinstance(url, urllib2.Request):
|
||||
logger.debug(url.get_full_url())
|
||||
else:
|
||||
logger.debug(url)
|
||||
|
||||
#If the user passed in a positive attempts number then that means
|
||||
#attempts will roll over 0 and we stop. If attempts was initially negative,
|
||||
#then we have unlimited attempts
|
||||
if attempts > 0:
|
||||
attempts = attempts - 1
|
||||
if attempts == 0:
|
||||
successful_response = True
|
||||
|
||||
if not successful_response:
|
||||
logger.error("Error connecting to server, waiting 5 seconds and trying again.")
|
||||
time.sleep(5)
|
||||
|
||||
return response
|
||||
|
||||
def get_response_into_file(self, url, block=True):
|
||||
"""
|
||||
This function will query the server and download its response directly
|
||||
into a temporary file. This is useful in the situation where the
|
||||
response from the server can be huge and we don't want to store it into
|
||||
memory (potentially causing Python to use hundreds of MB's of memory).
|
||||
By writing into a file we can then open this file later, and read data
|
||||
a little bit at a time and be very mem efficient.
|
||||
|
||||
The return value of this function is the path of the temporary file.
|
||||
Unless specified using block = False, this function will block until a
|
||||
successful HTTP 200 response is received.
|
||||
"""
|
||||
|
||||
logger = self.logger
|
||||
successful_response = False
|
||||
|
||||
while not successful_response:
|
||||
try:
|
||||
path = urllib.urlretrieve(url)[0]
|
||||
successful_response = True
|
||||
except IOError, e:
|
||||
logger.error('Error Authenticating with remote server: %s', e)
|
||||
if not block:
|
||||
raise
|
||||
except Exception, e:
|
||||
logger.error('Couldn\'t connect to remote server. Is it running?')
|
||||
logger.error("%s" % e)
|
||||
if not block:
|
||||
raise
|
||||
|
||||
if not successful_response:
|
||||
logger.error("Error connecting to server, waiting 5 seconds and trying again.")
|
||||
time.sleep(5)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
|
||||
def __get_airtime_version(self):
|
||||
logger = self.logger
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"],
|
||||
str(self.config["base_port"]), self.config["api_base"],
|
||||
self.config["version_url"])
|
||||
logger.debug("Trying to contact %s", url)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
version = -1
|
||||
try:
|
||||
data = self.get_response_from_server(url)
|
||||
logger.debug("Data: %s", data)
|
||||
response_json = json.loads(data)
|
||||
version = response_json['version']
|
||||
logger.debug("Airtime Version %s detected", version)
|
||||
except Exception, e:
|
||||
logger.error("Unable to detect Airtime Version - %s", e)
|
||||
return -1
|
||||
|
||||
return version
|
||||
|
||||
def test(self):
|
||||
logger = self.logger
|
||||
items = self.get_schedule()[1]
|
||||
schedule = items["playlists"]
|
||||
logger.debug("Number of playlists found: %s", str(len(schedule)))
|
||||
count = 1
|
||||
for pkey in sorted(schedule.iterkeys()):
|
||||
logger.debug("Playlist #%s", str(count))
|
||||
count += 1
|
||||
playlist = schedule[pkey]
|
||||
for item in playlist["medias"]:
|
||||
filename = urlparse(item["uri"])
|
||||
filename = filename.query[5:]
|
||||
self.get_media(item["uri"], filename)
|
||||
|
||||
# TODO : maybe fix this function to drop an exception?
|
||||
try: return self.services.version_url()[u'version']
|
||||
except Exception: return -1
|
||||
|
||||
def is_server_compatible(self, verbose=True):
|
||||
logger = self.logger
|
||||
version = self.__get_airtime_version()
|
||||
# logger.info('Airtime version found: ' + str(version))
|
||||
if (version == -1):
|
||||
if (verbose):
|
||||
logger.info('Unable to get Airtime version number.\n')
|
||||
|
@ -209,105 +167,30 @@ class AirtimeApiClient(object):
|
|||
|
||||
|
||||
def get_schedule(self):
|
||||
logger = self.logger
|
||||
|
||||
# Construct the URL
|
||||
export_url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["export_url"])
|
||||
|
||||
logger.info("Fetching schedule from %s", export_url)
|
||||
export_url = export_url.replace('%%api_key%%', self.config["api_key"])
|
||||
|
||||
response = ""
|
||||
try:
|
||||
response_json = self.get_response_from_server(export_url)
|
||||
response = json.loads(response_json)
|
||||
success = True
|
||||
except Exception, e:
|
||||
logger.error(e)
|
||||
success = False
|
||||
|
||||
return success, response
|
||||
|
||||
|
||||
def get_media(self, uri, dst):
|
||||
logger = self.logger
|
||||
|
||||
try:
|
||||
src = uri + "/api_key/%%api_key%%"
|
||||
logger.info("try to download from %s to %s", src, dst)
|
||||
src = src.replace("%%api_key%%", self.config["api_key"])
|
||||
# check if file exists already before downloading again
|
||||
headers = urllib.urlretrieve(src, dst)[1]
|
||||
logger.info(headers)
|
||||
except Exception, e:
|
||||
logger.error("%s", e)
|
||||
# TODO : properly refactor this routine
|
||||
# For now thre return type is a little fucked for compatibility reasons
|
||||
try: return (True, self.services.export_url())
|
||||
except: (False, "")
|
||||
|
||||
def notify_liquidsoap_started(self):
|
||||
logger = self.logger
|
||||
return self.services.notify_liquidsoap_started()
|
||||
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], \
|
||||
str(self.config["base_port"]), \
|
||||
self.config["api_base"], \
|
||||
self.config["notify_liquidsoap_started"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
self.get_response_from_server(url, attempts=5)
|
||||
except Exception, e:
|
||||
logger.error("Exception: %s", str(e))
|
||||
|
||||
|
||||
"""
|
||||
This is a callback from liquidsoap, we use this to notify about the
|
||||
currently playing *song*. We get passed a JSON string which we handed to
|
||||
liquidsoap in get_liquidsoap_data().
|
||||
"""
|
||||
def notify_media_item_start_playing(self, media_id):
|
||||
logger = self.logger
|
||||
response = ''
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_start_playing_url"])
|
||||
url = url.replace("%%media_id%%", str(media_id))
|
||||
logger.debug(url)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
response = self.get_response_from_server(url, attempts = 5)
|
||||
response = json.loads(response)
|
||||
logger.info("API-Status %s", response['status'])
|
||||
logger.info("API-Message %s", response['message'])
|
||||
|
||||
except Exception, e:
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
return response
|
||||
""" This is a callback from liquidsoap, we use this to notify
|
||||
about the currently playing *song*. We get passed a JSON string
|
||||
which we handed to liquidsoap in get_liquidsoap_data(). """
|
||||
return self.services.update_start_playing_url(media_id=media_id)
|
||||
|
||||
# TODO : get this routine out of here it doesn't belong at all here
|
||||
def get_liquidsoap_data(self, pkey, schedule):
|
||||
playlist = schedule[pkey]
|
||||
data = dict()
|
||||
try:
|
||||
data["schedule_id"] = playlist['id']
|
||||
except Exception:
|
||||
data["schedule_id"] = 0
|
||||
try: data["schedule_id"] = playlist['id']
|
||||
except Exception: data["schedule_id"] = 0
|
||||
return data
|
||||
|
||||
def get_shows_to_record(self):
|
||||
logger = self.logger
|
||||
response = None
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["show_schedule_url"])
|
||||
logger.debug(url)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
response = self.get_response_from_server(url)
|
||||
|
||||
response = json.loads(response)
|
||||
logger.info("shows %s", response)
|
||||
|
||||
except Exception, e:
|
||||
logger.error("Exception: %s", e)
|
||||
response = None
|
||||
|
||||
return response
|
||||
return self.services.show_schedule_url()
|
||||
|
||||
def upload_recorded_show(self, data, headers):
|
||||
logger = self.logger
|
||||
|
@ -316,10 +199,9 @@ class AirtimeApiClient(object):
|
|||
retries = int(self.config["upload_retries"])
|
||||
retries_wait = int(self.config["upload_wait"])
|
||||
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["upload_file_url"])
|
||||
url = self.construct_url("upload_file_url")
|
||||
|
||||
logger.debug(url)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
for i in range(0, retries):
|
||||
logger.debug("Upload attempt: %s", i + 1)
|
||||
|
@ -344,86 +226,21 @@ class AirtimeApiClient(object):
|
|||
return response
|
||||
|
||||
def check_live_stream_auth(self, username, password, dj_type):
|
||||
"""
|
||||
TODO: Why are we using print statements here? Possibly use logger that
|
||||
is directed to stdout. -MK
|
||||
"""
|
||||
|
||||
response = ''
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["check_live_stream_auth"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%username%%", username)
|
||||
url = url.replace("%%djtype%%", dj_type)
|
||||
url = url.replace("%%password%%", password)
|
||||
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
except Exception, e:
|
||||
print "Exception: %s", e
|
||||
print "traceback: %s", traceback.format_exc()
|
||||
response = None
|
||||
|
||||
return response
|
||||
return self.services.check_live_stream_auth(
|
||||
username=username, password=password, djtype=dj_type)
|
||||
|
||||
def construct_url(self,config_action_key):
|
||||
"""Constructs the base url for every request"""
|
||||
# TODO : Make other methods in this class use this this method.
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config[config_action_key])
|
||||
url = "http://%s:%s/%s/%s/%s" % \
|
||||
(self.config["host"], str(self.config["base_port"]),
|
||||
self.config["base_dir"], self.config["api_base"],
|
||||
self.config[config_action_key])
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
return url
|
||||
|
||||
def setup_media_monitor(self):
|
||||
logger = self.logger
|
||||
response = None
|
||||
try:
|
||||
url = self.construct_url("media_setup_url")
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
logger.info("Connected to Airtime Server. Json Media Storage Dir: %s", response)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error("Exception: %s", e)
|
||||
return response
|
||||
|
||||
def update_media_metadata(self, md, mode, is_record=False):
|
||||
logger = self.logger
|
||||
response = None
|
||||
try:
|
||||
url = self.construct_url("update_media_url")
|
||||
url = url.replace("%%mode%%", mode)
|
||||
|
||||
self.logger.info("Requesting url %s" % url)
|
||||
|
||||
md = convert_dict_value_to_utf8(md)
|
||||
|
||||
data = urllib.urlencode(md)
|
||||
req = urllib2.Request(url, data)
|
||||
|
||||
response = self.get_response_from_server(req)
|
||||
logger.info("update media %s, filepath: %s, mode: %s", response, md['MDATA_KEY_FILEPATH'], mode)
|
||||
self.logger.info("Received response:")
|
||||
self.logger.info(response)
|
||||
try: response = json.loads(response)
|
||||
except ValueError:
|
||||
logger.info("Could not parse json from response: '%s'" % response)
|
||||
|
||||
if("error" not in response and is_record):
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["upload_recorded"])
|
||||
url = url.replace("%%fileid%%", str(response[u'id']))
|
||||
url = url.replace("%%showinstanceid%%", str(md['MDATA_KEY_TRACKNUMBER']))
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
logger.info("associate recorded %s", response)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error('Exception: %s', e)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
|
||||
return response
|
||||
return self.services.media_setup_url()
|
||||
|
||||
def send_media_monitor_requests(self, action_list, dry=False):
|
||||
"""
|
||||
|
@ -434,7 +251,6 @@ class AirtimeApiClient(object):
|
|||
says whether the show was recorded or not. The value of this key
|
||||
does not matter, only if it's present or not.
|
||||
"""
|
||||
url = self.construct_url('reload_metadata_group')
|
||||
# We are assuming that action_list is a list of dictionaries such
|
||||
# that every dictionary represents the metadata of a file along
|
||||
# with a special mode key that is the action to be executed by the
|
||||
|
@ -462,11 +278,7 @@ class AirtimeApiClient(object):
|
|||
# controller not to actually do any changes
|
||||
if dry: md_list['dry'] = 1
|
||||
self.logger.info("Pumping out %d requests..." % len(valid_actions))
|
||||
data = urllib.urlencode(md_list)
|
||||
req = urllib2.Request(url, data)
|
||||
response = self.get_response_from_server(req)
|
||||
response = json.loads(response)
|
||||
return response
|
||||
return self.services.reload_metadata_group(_post_data=md_list)
|
||||
|
||||
#returns a list of all db files for a given directory in JSON format:
|
||||
#{"files":["path/to/file1", "path/to/file2"]}
|
||||
|
@ -476,15 +288,11 @@ class AirtimeApiClient(object):
|
|||
logger = self.logger
|
||||
try:
|
||||
all_files = u"1" if all_files else u"0"
|
||||
url = self.construct_url("list_all_db_files")
|
||||
url = url.replace("%%dir_id%%", dir_id)
|
||||
url = url.replace("%%all%%", all_files)
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
response = self.services.list_all_db_files(dir_id=dir_id,
|
||||
all=all_files)
|
||||
except Exception, e:
|
||||
response = {}
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
try:
|
||||
return response["files"]
|
||||
except KeyError:
|
||||
|
@ -493,188 +301,52 @@ class AirtimeApiClient(object):
|
|||
return []
|
||||
|
||||
def list_all_watched_dirs(self):
|
||||
# Does this include the stor directory as well?
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["list_all_watched_dirs"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error("Exception: %s", e)
|
||||
self.logger.debug(traceback.format_exc())
|
||||
|
||||
return response
|
||||
return self.services.list_all_watched_dirs()
|
||||
|
||||
def add_watched_dir(self, path):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["add_watched_dir"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%path%%", base64.b64encode(path))
|
||||
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
return response
|
||||
return self.services.add_watched_dir(path=base64.b64encode(path))
|
||||
|
||||
def remove_watched_dir(self, path):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["remove_watched_dir"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%path%%", base64.b64encode(path))
|
||||
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
return response
|
||||
return self.services.remove_watched_dir(path=base64.b64encode(path))
|
||||
|
||||
def set_storage_dir(self, path):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["set_storage_dir"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%path%%", base64.b64encode(path))
|
||||
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
return response
|
||||
return self.services.set_storage_dir(path=base64.b64encode(path))
|
||||
|
||||
def get_stream_setting(self):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["get_stream_setting"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
try: return self.services.get_stream_setting()
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error("Exception: %s", e)
|
||||
return None
|
||||
|
||||
return response
|
||||
|
||||
"""
|
||||
Purpose of this method is to contact the server with a "Hey its me!"
|
||||
message. This will allow the server to register the component's (component
|
||||
= media-monitor, pypo etc.) ip address, and later use it to query monit via
|
||||
monit's http service, or download log files via a http server.
|
||||
"""
|
||||
def register_component(self, component):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["register_component"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%component%%", component)
|
||||
self.get_response_from_server(url)
|
||||
except Exception, e:
|
||||
logger.error("Exception: %s", e)
|
||||
""" Purpose of this method is to contact the server with a "Hey its
|
||||
me!" message. This will allow the server to register the component's
|
||||
(component = media-monitor, pypo etc.) ip address, and later use it
|
||||
to query monit via monit's http service, or download log files via a
|
||||
http server. """
|
||||
return self.services.register_component(component=component)
|
||||
|
||||
def notify_liquidsoap_status(self, msg, stream_id, time):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_liquidsoap_status"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
msg = msg.replace('/', ' ')
|
||||
encoded_msg = urllib.quote(msg, '')
|
||||
url = url.replace("%%msg%%", encoded_msg)
|
||||
url = url.replace("%%stream_id%%", stream_id)
|
||||
url = url.replace("%%boot_time%%", time)
|
||||
|
||||
self.get_response_from_server(url, attempts = 5)
|
||||
self.services.update_liquidsoap_status.req(msg=encoded_msg, stream_id=stream_id,
|
||||
boot_time=time).retry(5)
|
||||
except Exception, e:
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
def notify_source_status(self, sourcename, status):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_source_status"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%sourcename%%", sourcename)
|
||||
url = url.replace("%%status%%", status)
|
||||
|
||||
self.get_response_from_server(url, attempts = 5)
|
||||
logger = self.logger
|
||||
return self.services.update_source_status.req(sourcename=sourcename,
|
||||
status=status).retry(5)
|
||||
except Exception, e:
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
"""
|
||||
This function updates status of mounted file system information on airtime
|
||||
"""
|
||||
def update_file_system_mount(self, added_dir, removed_dir):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_fs_mount"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
added_data_string = string.join(added_dir, ',')
|
||||
removed_data_string = string.join(removed_dir, ',')
|
||||
|
||||
map = [("added_dir", added_data_string), ("removed_dir", removed_data_string)]
|
||||
|
||||
data = urllib.urlencode(map)
|
||||
|
||||
req = urllib2.Request(url, data)
|
||||
response = self.get_response_from_server(req)
|
||||
|
||||
logger.info("update file system mount: %s", json.loads(response))
|
||||
except Exception, e:
|
||||
logger.error('Exception: %s', e)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
|
||||
"""
|
||||
When watched dir is missing(unplugged or something) on boot up, this
|
||||
function will get called and will call appropriate function on Airtime.
|
||||
"""
|
||||
def handle_watched_dir_missing(self, dir):
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["handle_watched_dir_missing"])
|
||||
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%dir%%", base64.b64encode(dir))
|
||||
|
||||
response = self.get_response_from_server(url)
|
||||
logger.info("update file system mount: %s", json.loads(response))
|
||||
except Exception, e:
|
||||
logger.error('Exception: %s', e)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
|
||||
def get_bootstrap_info(self):
|
||||
"""
|
||||
Retrive infomations needed on bootstrap time
|
||||
"""
|
||||
logger = self.logger
|
||||
try:
|
||||
url = self.construct_url("get_bootstrap_info")
|
||||
response = self.get_response_from_server(url)
|
||||
response = json.loads(response)
|
||||
logger.info("Bootstrap info retrieved %s", response)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error('Exception: %s', e)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
return response
|
||||
""" Retrive infomations needed on bootstrap time """
|
||||
return self.services.get_bootstrap_info()
|
||||
|
||||
def get_files_without_replay_gain_value(self, dir_id):
|
||||
"""
|
||||
|
@ -682,43 +354,16 @@ class AirtimeApiClient(object):
|
|||
calculated. This list of files is downloaded into a file and the path
|
||||
to this file is the return value.
|
||||
"""
|
||||
|
||||
#http://localhost/api/get-files-without-replay-gain/dir_id/1
|
||||
|
||||
logger = self.logger
|
||||
try:
|
||||
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(get_files_without_replay_gain)s/" % (self.config)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
url = url.replace("%%dir_id%%", dir_id)
|
||||
response = self.get_response_from_server(url)
|
||||
|
||||
logger.info("update file system mount: %s", response)
|
||||
response = json.loads(response)
|
||||
#file_path = self.get_response_into_file(url)
|
||||
except Exception, e:
|
||||
response = None
|
||||
logger.error('Exception: %s', e)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
|
||||
return response
|
||||
return self.services.get_files_without_replay_gain(dir_id=dir_id)
|
||||
|
||||
def update_replay_gain_values(self, pairs):
|
||||
"""
|
||||
'pairs' is a list of pairs in (x, y), where x is the file's database
|
||||
row id and y is the file's replay_gain value in dB
|
||||
"""
|
||||
|
||||
#http://localhost/api/update-replay-gain-value/
|
||||
try:
|
||||
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(update_replay_gain_value)s/" % (self.config)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
data = urllib.urlencode({'data': json.dumps(pairs)})
|
||||
request = urllib2.Request(url, data)
|
||||
|
||||
self.logger.debug(self.get_response_from_server(request))
|
||||
except Exception, e:
|
||||
self.logger.error("Exception: %s", e)
|
||||
raise
|
||||
self.logger.debug(self.services.update_replay_gain_value(
|
||||
_post_data={'data': json.dumps(pairs)}))
|
||||
|
||||
|
||||
def notify_webstream_data(self, data, media_id):
|
||||
|
@ -726,46 +371,19 @@ class AirtimeApiClient(object):
|
|||
Update the server with the latest metadata we've received from the
|
||||
external webstream
|
||||
"""
|
||||
try:
|
||||
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(notify_webstream_data)s/" % (self.config)
|
||||
url = url.replace("%%media_id%%", str(media_id))
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
data = urllib.urlencode({'data': data})
|
||||
self.logger.debug(url)
|
||||
request = urllib2.Request(url, data)
|
||||
|
||||
self.logger.info(self.get_response_from_server(request, attempts = 5))
|
||||
except Exception, e:
|
||||
self.logger.error("Exception: %s", e)
|
||||
|
||||
self.logger.info( self.services.notify_webstream_data.req(
|
||||
_post_data={'data':data}, media_id=str(media_id)).retry(5))
|
||||
|
||||
def get_stream_parameters(self):
|
||||
response = None
|
||||
try:
|
||||
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(get_stream_parameters)s/" % (self.config)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
self.logger.debug(url)
|
||||
request = urllib2.Request(url)
|
||||
|
||||
response = self.get_response_from_server(request, attempts = 5)
|
||||
self.logger.debug(response)
|
||||
|
||||
response = json.loads(response)
|
||||
except Exception, e:
|
||||
self.logger.error("Exception: %s", e)
|
||||
|
||||
response = self.services.get_stream_parameters()
|
||||
self.logger.debug(response)
|
||||
return response
|
||||
|
||||
def push_stream_stats(self, data):
|
||||
try:
|
||||
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(push_stream_stats)s/" \
|
||||
% (self.config)
|
||||
url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
json_data = json.dumps(data)
|
||||
encoded_data = urllib.urlencode({'data': json_data})
|
||||
request = urllib2.Request(url, encoded_data)
|
||||
print self.get_response_from_server(request, attempts = 1)
|
||||
|
||||
except Exception, e:
|
||||
self.logger.error("Exception: %s", e)
|
||||
# TODO : users of this method should do their own error handling
|
||||
response = self.services.push_stream_stats(_post_data={'data': json.dumps(data)})
|
||||
return response
|
||||
|
||||
def update_stream_setting_table(self, data):
|
||||
response = self.services.update_stream_setting_table(_post_data={'data': json.dumps(data)})
|
||||
return response
|
||||
|
|
30
python_apps/api_clients/tests/test_apcurl.py
Normal file
30
python_apps/api_clients/tests/test_apcurl.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
import unittest
|
||||
from .. api_client import ApcUrl, UrlBadParam, IncompleteUrl
|
||||
|
||||
class TestApcUrl(unittest.TestCase):
|
||||
def test_init(self):
|
||||
url = "/testing"
|
||||
u = ApcUrl(url)
|
||||
self.assertEquals( u.base_url, url)
|
||||
|
||||
def test_params_1(self):
|
||||
u = ApcUrl("/testing/%%key%%")
|
||||
self.assertEquals(u.params(key='val').url(), '/testing/val')
|
||||
|
||||
def test_params_2(self):
|
||||
u = ApcUrl('/testing/%%key%%/%%api%%/more_testing')
|
||||
full_url = u.params(key="AAA",api="BBB").url()
|
||||
self.assertEquals(full_url, '/testing/AAA/BBB/more_testing')
|
||||
|
||||
def test_params_ex(self):
|
||||
u = ApcUrl("/testing/%%key%%")
|
||||
with self.assertRaises(UrlBadParam):
|
||||
u.params(bad_key='testing')
|
||||
|
||||
def test_url(self):
|
||||
u = "one/two/three"
|
||||
self.assertEquals( ApcUrl(u).url(), u )
|
||||
|
||||
def test_url_ex(self):
|
||||
u = ApcUrl('/%%one%%/%%two%%/three').params(two='testing')
|
||||
with self.assertRaises(IncompleteUrl): u.url()
|
21
python_apps/api_clients/tests/test_apirequest.py
Normal file
21
python_apps/api_clients/tests/test_apirequest.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import unittest
|
||||
import json
|
||||
from mock import MagicMock, patch
|
||||
from .. api_client import ApcUrl, ApiRequest
|
||||
|
||||
class TestApiRequest(unittest.TestCase):
|
||||
def test_init(self):
|
||||
u = ApiRequest('request_name', ApcUrl('/test/ing'))
|
||||
self.assertEquals(u.name, "request_name")
|
||||
|
||||
def test_call(self):
|
||||
ret = json.dumps( {u'ok':u'ok'} )
|
||||
read = MagicMock()
|
||||
read.read = MagicMock(return_value=ret)
|
||||
u = '/testing'
|
||||
with patch('urllib2.urlopen') as mock_method:
|
||||
mock_method.return_value = read
|
||||
request = ApiRequest('mm', ApcUrl(u))()
|
||||
self.assertEquals(request, json.loads(ret))
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
32
python_apps/api_clients/tests/test_requestprovider.py
Normal file
32
python_apps/api_clients/tests/test_requestprovider.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
import unittest
|
||||
import json
|
||||
from mock import patch, MagicMock
|
||||
from configobj import ConfigObj
|
||||
from .. api_client import RequestProvider
|
||||
|
||||
class TestRequestProvider(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.cfg = ConfigObj('api_client.cfg')
|
||||
def test_test(self):
|
||||
self.assertTrue('api_key' in self.cfg)
|
||||
def test_init(self):
|
||||
rp = RequestProvider(self.cfg)
|
||||
self.assertTrue( len( rp.available_requests() ) > 0 )
|
||||
def test_contains(self):
|
||||
rp = RequestProvider(self.cfg)
|
||||
methods = ['upload_recorded', 'update_media_url', 'list_all_db_files']
|
||||
for meth in methods:
|
||||
self.assertTrue( meth in rp )
|
||||
|
||||
def test_notify_webstream_data(self):
|
||||
ret = json.dumps( {u'testing' : u'123' } )
|
||||
rp = RequestProvider(self.cfg)
|
||||
read = MagicMock()
|
||||
read.read = MagicMock(return_value=ret)
|
||||
with patch('urllib2.urlopen') as mock_method:
|
||||
mock_method.return_value = read
|
||||
response = rp.notify_webstream_data(media_id=123)
|
||||
mock_method.called_once_with(media_id=123)
|
||||
self.assertEquals(json.loads(ret), response)
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
48
python_apps/media-monitor2/baby.py
Normal file
48
python_apps/media-monitor2/baby.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from media.saas.launcher import setup_logger, setup_global, MM2
|
||||
from media.saas.airtimeinstance import AirtimeInstance
|
||||
from os.path import isdir, join, abspath, exists
|
||||
from os import listdir
|
||||
|
||||
def list_dirs(d): return (x for x in listdir(d) if isdir(join(d,x)))
|
||||
|
||||
def filter_instance(d): return bool(re.match('.+\d+$',d))
|
||||
|
||||
def get_name(p): return re.match('.+/(\d+)$',p).group(1)
|
||||
|
||||
def filter_instances(l): return (x for x in l if filter_instance(x))
|
||||
|
||||
def autoscan_instances(main_cfg):
|
||||
root = main_cfg['instance_root']
|
||||
instances = []
|
||||
for instance_machine in list_dirs(root):
|
||||
instance_machine = join(root, instance_machine)
|
||||
for instance_root in filter_instances(list_dirs(instance_machine)):
|
||||
full_path = abspath(join(instance_machine,instance_root))
|
||||
ai = AirtimeInstance.root_make(get_name(full_path), full_path)
|
||||
instances.append(ai)
|
||||
return instances
|
||||
|
||||
def verify_exists(p):
|
||||
if not exists(p): raise Exception("%s must exist" % p)
|
||||
|
||||
def main(main_cfg):
|
||||
log_config, log_path = main_cfg['log_config'], main_cfg['log_path']
|
||||
verify_exists(log_config)
|
||||
log = setup_logger(log_config, log_path)
|
||||
setup_global(log)
|
||||
for instance in autoscan_instances(main_cfg):
|
||||
print("Launching instance: %s" % str(instance))
|
||||
MM2(instance).start()
|
||||
print("Launched all instances")
|
||||
|
||||
if __name__ == '__main__':
|
||||
root = '/home/rudi/reps/Airtime/python_apps/media-monitor2'
|
||||
default = {
|
||||
'log_path' : join(root, 'test.log'), # config for log
|
||||
'log_config' : join(root, 'configs/logging.cfg'), # where to log
|
||||
# root dir of all instances
|
||||
'instance_root' : join(root, 'saas_stub')
|
||||
}
|
||||
main(default)
|
32
python_apps/media-monitor2/configs/airtime.conf
Normal file
32
python_apps/media-monitor2/configs/airtime.conf
Normal file
|
@ -0,0 +1,32 @@
|
|||
[database]
|
||||
host = localhost
|
||||
dbname = airtime
|
||||
dbuser = airtime
|
||||
dbpass = airtime
|
||||
|
||||
[rabbitmq]
|
||||
host = 127.0.0.1
|
||||
port = 5672
|
||||
user = guest
|
||||
password = guest
|
||||
vhost = /
|
||||
|
||||
[general]
|
||||
api_key = I6EUOJM0D1EIGSMZ9T70
|
||||
web_server_user = www-data
|
||||
airtime_dir = /usr/share/airtime
|
||||
base_url = localhost
|
||||
base_port = 80
|
||||
base_dir = ''
|
||||
|
||||
;How many hours ahead of time should Airtime playout engine (PYPO)
|
||||
;cache scheduled media files.
|
||||
cache_ahead_hours = 1
|
||||
|
||||
[monit]
|
||||
monit_user = guest
|
||||
monit_password = airtime
|
||||
|
||||
[soundcloud]
|
||||
connection_retries = 3
|
||||
time_between_retries = 60
|
126
python_apps/media-monitor2/configs/api_client.cfg
Normal file
126
python_apps/media-monitor2/configs/api_client.cfg
Normal file
|
@ -0,0 +1,126 @@
|
|||
bin_dir = "/usr/lib/airtime/api_clients"
|
||||
|
||||
#############################
|
||||
## Common
|
||||
#############################
|
||||
|
||||
# Value needed to access the API
|
||||
api_key = 'I6EUOJM0D1EIGSMZ9T70'
|
||||
|
||||
# Path to the base of the API
|
||||
api_base = 'api'
|
||||
|
||||
# URL to get the version number of the server API
|
||||
version_url = 'version/api_key/%%api_key%%'
|
||||
|
||||
#URL to register a components IP Address with the central web server
|
||||
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
|
||||
|
||||
# Hostname
|
||||
host = 'localhost'
|
||||
base_port = 80
|
||||
base_dir = ''
|
||||
|
||||
#############################
|
||||
## Config for Media Monitor
|
||||
#############################
|
||||
|
||||
# URL to setup the media monitor
|
||||
media_setup_url = 'media-monitor-setup/format/json/api_key/%%api_key%%'
|
||||
|
||||
# Tell Airtime the file id associated with a show instance.
|
||||
upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
|
||||
|
||||
# URL to tell Airtime to update file's meta data
|
||||
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
|
||||
|
||||
# URL to tell Airtime we want a listing of all files it knows about
|
||||
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%/all/%%all%%'
|
||||
|
||||
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
|
||||
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to tell Airtime we want to add watched directory
|
||||
add_watched_dir = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
|
||||
# URL to tell Airtime we want to add watched directory
|
||||
remove_watched_dir = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
|
||||
# URL to tell Airtime we want to add watched directory
|
||||
set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
|
||||
# URL to tell Airtime about file system mount change
|
||||
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to commit multiple updates from media monitor at the same time
|
||||
|
||||
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to tell Airtime about file system mount change
|
||||
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
|
||||
|
||||
#############################
|
||||
## Config for Recorder
|
||||
#############################
|
||||
|
||||
# URL to get the schedule of shows set to record
|
||||
show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to upload the recorded show's file to Airtime
|
||||
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to commit multiple updates from media monitor at the same time
|
||||
|
||||
#number of retries to upload file if connection problem
|
||||
upload_retries = 3
|
||||
|
||||
#time to wait between attempts to upload file if connection problem (in seconds)
|
||||
upload_wait = 60
|
||||
|
||||
################################################################################
|
||||
# Uncomment *one of the sets* of values from the API clients below, and comment
|
||||
# out all the others.
|
||||
################################################################################
|
||||
|
||||
#############################
|
||||
## Config for Pypo
|
||||
#############################
|
||||
|
||||
# Schedule export path.
|
||||
# %%from%% - starting date/time in the form YYYY-MM-DD-hh-mm
|
||||
# %%to%% - starting date/time in the form YYYY-MM-DD-hh-mm
|
||||
export_url = 'schedule/api_key/%%api_key%%'
|
||||
|
||||
get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
|
||||
|
||||
# Update whether a schedule group has begun playing.
|
||||
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
|
||||
|
||||
# Update whether an audio clip is currently playing.
|
||||
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/'
|
||||
|
||||
# URL to tell Airtime we want to get stream setting
|
||||
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
|
||||
|
||||
#URL to update liquidsoap status
|
||||
update_liquidsoap_status = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
|
||||
|
||||
#URL to check live stream auth
|
||||
check_live_stream_auth = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
|
||||
|
||||
#URL to update source status
|
||||
update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
|
||||
|
||||
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
|
||||
|
||||
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
|
||||
|
||||
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'
|
||||
|
||||
notify_webstream_data = 'notify-webstream-data/api_key/%%api_key%%/media_id/%%media_id%%/format/json'
|
||||
|
||||
notify_liquidsoap_started = 'rabbitmq-do-push/api_key/%%api_key%%/format/json'
|
||||
|
||||
get_stream_parameters = 'get-stream-parameters/api_key/%%api_key%%/format/json'
|
||||
|
||||
push_stream_stats = 'push-stream-stats/api_key/%%api_key%%/format/json'
|
32
python_apps/media-monitor2/configs/logging.cfg
Normal file
32
python_apps/media-monitor2/configs/logging.cfg
Normal file
|
@ -0,0 +1,32 @@
|
|||
[loggers]
|
||||
keys= root,notifier,metadata
|
||||
|
||||
[handlers]
|
||||
keys=fileOutHandler
|
||||
|
||||
[formatters]
|
||||
keys=simpleFormatter
|
||||
|
||||
[logger_root]
|
||||
level=DEBUG
|
||||
handlers=fileOutHandler
|
||||
|
||||
[logger_notifier]
|
||||
level=DEBUG
|
||||
handlers=fileOutHandler
|
||||
qualname=notifier
|
||||
|
||||
[logger_metadata]
|
||||
level=DEBUG
|
||||
handlers=fileOutHandler
|
||||
qualname=metadata
|
||||
|
||||
[handler_fileOutHandler]
|
||||
class=logging.handlers.RotatingFileHandler
|
||||
level=DEBUG
|
||||
formatter=simpleFormatter
|
||||
args=("/var/log/airtime/media-monitor/media-monitor.log", 'a', 10000000, 5,)
|
||||
|
||||
[formatter_simpleFormatter]
|
||||
format=%(asctime)s %(levelname)s - [%(threadName)s] [%(filename)s : %(funcName)s()] : LINE %(lineno)d - %(message)s
|
||||
datefmt=
|
31
python_apps/media-monitor2/configs/media-monitor.cfg
Normal file
31
python_apps/media-monitor2/configs/media-monitor.cfg
Normal file
|
@ -0,0 +1,31 @@
|
|||
api_client = "airtime"
|
||||
|
||||
# where the binary files live
|
||||
bin_dir = '/usr/lib/airtime/media-monitor'
|
||||
|
||||
# where the logging files live
|
||||
log_dir = '/var/log/airtime/media-monitor'
|
||||
|
||||
|
||||
############################################
|
||||
# RabbitMQ settings #
|
||||
############################################
|
||||
rabbitmq_host = 'localhost'
|
||||
rabbitmq_user = 'guest'
|
||||
rabbitmq_password = 'guest'
|
||||
rabbitmq_vhost = '/'
|
||||
|
||||
############################################
|
||||
# Media-Monitor preferences #
|
||||
############################################
|
||||
check_filesystem_events = 5 #how long to queue up events performed on the files themselves.
|
||||
check_airtime_events = 30 #how long to queue metadata input from airtime.
|
||||
|
||||
# MM2 only:
|
||||
touch_interval = 5
|
||||
chunking_number = 450
|
||||
request_max_wait = 3.0
|
||||
rmq_event_wait = 0.1
|
||||
logpath = '/var/log/airtime/media-monitor/media-monitor.log'
|
||||
index_path = '/var/tmp/airtime/media-monitor/last_index'
|
||||
|
|
@ -16,6 +16,16 @@ def load_definitions():
|
|||
t.default(u'0.0')
|
||||
t.depends('length')
|
||||
t.translate(lambda k: format_length(k['length']))
|
||||
|
||||
with md.metadata('MDATA_KEY_CUE_IN') as t:
|
||||
t.default(u'0.0')
|
||||
t.depends('cuein')
|
||||
t.translate(lambda k: format_length(k['cuein']))
|
||||
|
||||
with md.metadata('MDATA_KEY_CUE_OUT') as t:
|
||||
t.default(u'0.0')
|
||||
t.depends('cueout')
|
||||
t.translate(lambda k: format_length(k['cueout']))
|
||||
|
||||
with md.metadata('MDATA_KEY_MIME') as t:
|
||||
t.default(u'')
|
||||
|
|
|
@ -7,6 +7,9 @@ from media.monitor.log import Loggable
|
|||
import media.monitor.pure as mmp
|
||||
from collections import namedtuple
|
||||
import mutagen
|
||||
import subprocess
|
||||
import json
|
||||
import logging
|
||||
|
||||
class FakeMutagen(dict):
|
||||
"""
|
||||
|
@ -94,7 +97,6 @@ class MetadataElement(Loggable):
|
|||
# If value is present and normalized then we only check if it's
|
||||
# normalized or not. We normalize if it's not normalized already
|
||||
|
||||
|
||||
if self.name in original:
|
||||
v = original[self.name]
|
||||
if self.__is_normalized(v): return v
|
||||
|
@ -167,6 +169,19 @@ def normalize_mutagen(path):
|
|||
md['sample_rate'] = getattr(m.info, 'sample_rate', 0)
|
||||
md['mime'] = m.mime[0] if len(m.mime) > 0 else u''
|
||||
md['path'] = normpath(path)
|
||||
|
||||
# silence detect(set default queue in and out)
|
||||
try:
|
||||
command = ['silan', '-f', 'JSON', md['path']]
|
||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
out = proc.stdout.read()
|
||||
info = json.loads(out)
|
||||
md['cuein'] = info['sound'][0][0]
|
||||
md['cueout'] = info['sound'][-1][1]
|
||||
except Exception:
|
||||
logger = logging.getLogger()
|
||||
logger.info('silan is missing')
|
||||
|
||||
if 'title' not in md: md['title'] = u''
|
||||
return md
|
||||
|
||||
|
|
|
@ -13,9 +13,8 @@ from media.monitor.log import Loggable
|
|||
from media.monitor.syncdb import AirtimeDB
|
||||
from media.monitor.exceptions import DirectoryIsNotListed
|
||||
from media.monitor.bootstrap import Bootstrapper
|
||||
from media.monitor.listeners import FileMediator
|
||||
|
||||
from api_clients import api_client as apc
|
||||
from media.saas.thread import apc, user
|
||||
|
||||
class AirtimeNotifier(Loggable):
|
||||
"""
|
||||
|
@ -98,7 +97,7 @@ class AirtimeMessageReceiver(Loggable):
|
|||
if (not directory_id) and (not directory):
|
||||
raise ValueError("You must provide either directory_id or \
|
||||
directory")
|
||||
sdb = AirtimeDB(apc.AirtimeApiClient.create_right_config())
|
||||
sdb = AirtimeDB(apc())
|
||||
if directory : directory = os.path.normpath(directory)
|
||||
if directory_id == None : directory_id = sdb.to_id(directory)
|
||||
if directory == None : directory = sdb.to_directory(directory_id)
|
||||
|
@ -192,7 +191,7 @@ class AirtimeMessageReceiver(Loggable):
|
|||
# request that we'd normally get form pyinotify. But right
|
||||
# now event contractor would take care of this sort of
|
||||
# thing anyway so this might not be necessary after all
|
||||
FileMediator.ignore(msg['filepath'])
|
||||
#user().file_mediator.ignore(msg['filepath'])
|
||||
os.unlink(msg['filepath'])
|
||||
# Verify deletion:
|
||||
if not os.path.exists(msg['filepath']):
|
||||
|
|
|
@ -2,6 +2,7 @@ import os
|
|||
from pydispatch import dispatcher
|
||||
from media.monitor.events import NewFile, DeleteFile, ModifyFile
|
||||
from media.monitor.log import Loggable
|
||||
from media.saas.thread import getsig
|
||||
import media.monitor.pure as mmp
|
||||
|
||||
class Bootstrapper(Loggable):
|
||||
|
@ -16,7 +17,7 @@ class Bootstrapper(Loggable):
|
|||
watch_signal - the signals should send events for every file on.
|
||||
"""
|
||||
self.db = db
|
||||
self.watch_signal = watch_signal
|
||||
self.watch_signal = getsig(watch_signal)
|
||||
|
||||
def flush_all(self, last_ran):
|
||||
"""
|
||||
|
|
|
@ -12,25 +12,21 @@ class MMConfig(object):
|
|||
self.cfg = ConfigObj(path)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
We always return a copy of the config item to prevent callers from
|
||||
doing any modifications through the returned objects methods
|
||||
"""
|
||||
""" We always return a copy of the config item to prevent
|
||||
callers from doing any modifications through the returned
|
||||
objects methods """
|
||||
return copy.deepcopy(self.cfg[key])
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""
|
||||
We use this method not to allow anybody to mess around with config file
|
||||
any settings made should be done through MMConfig's instance methods
|
||||
"""
|
||||
""" We use this method not to allow anybody to mess around with
|
||||
config file any settings made should be done through MMConfig's
|
||||
instance methods """
|
||||
raise ConfigAccessViolation(key)
|
||||
|
||||
def save(self): self.cfg.write()
|
||||
|
||||
def last_ran(self):
|
||||
"""
|
||||
Returns the last time media monitor was ran by looking at the time when
|
||||
the file at 'index_path' was modified
|
||||
"""
|
||||
""" Returns the last time media monitor was ran by looking at
|
||||
the time when the file at 'index_path' was modified """
|
||||
return mmp.last_modified(self.cfg['index_path'])
|
||||
|
||||
|
|
|
@ -3,52 +3,40 @@ import os
|
|||
import abc
|
||||
import re
|
||||
import media.monitor.pure as mmp
|
||||
import media.monitor.owners as owners
|
||||
from media.monitor.pure import LazyProperty
|
||||
from media.monitor.metadata import Metadata
|
||||
from media.monitor.log import Loggable
|
||||
from media.monitor.exceptions import BadSongFile
|
||||
from media.saas.thread import getsig, user
|
||||
|
||||
class PathChannel(object):
|
||||
"""
|
||||
Simple struct to hold a 'signal' string and a related 'path'. Basically
|
||||
used as a named tuple
|
||||
"""
|
||||
""" Simple struct to hold a 'signal' string and a related 'path'.
|
||||
Basically used as a named tuple """
|
||||
def __init__(self, signal, path):
|
||||
self.signal = signal
|
||||
self.signal = getsig(signal)
|
||||
self.path = path
|
||||
|
||||
# TODO : Move this to it's file. Also possible unsingleton and use it as a
|
||||
# simple module just like m.m.owners
|
||||
class EventRegistry(object):
|
||||
"""
|
||||
This class's main use is to keep track all events with a cookie attribute.
|
||||
This is done mainly because some events must be 'morphed' into other events
|
||||
because we later detect that they are move events instead of delete events.
|
||||
"""
|
||||
registry = {}
|
||||
@staticmethod
|
||||
def register(evt): EventRegistry.registry[evt.cookie] = evt
|
||||
@staticmethod
|
||||
def unregister(evt): del EventRegistry.registry[evt.cookie]
|
||||
@staticmethod
|
||||
def registered(evt): return evt.cookie in EventRegistry.registry
|
||||
@staticmethod
|
||||
def matching(evt):
|
||||
event = EventRegistry.registry[evt.cookie]
|
||||
""" This class's main use is to keep track all events with a cookie
|
||||
attribute. This is done mainly because some events must be 'morphed'
|
||||
into other events because we later detect that they are move events
|
||||
instead of delete events. """
|
||||
def __init__(self):
|
||||
self.registry = {}
|
||||
def register(self,evt): self.registry[evt.cookie] = evt
|
||||
def unregister(self,evt): del self.registry[evt.cookie]
|
||||
def registered(self,evt): return evt.cookie in self.registry
|
||||
def matching(self,evt):
|
||||
event = self.registry[evt.cookie]
|
||||
# Want to disallow accessing the same event twice
|
||||
EventRegistry.unregister(event)
|
||||
self.unregister(event)
|
||||
return event
|
||||
def __init__(self,*args,**kwargs):
|
||||
raise Exception("You can instantiate this class. Must only use class \
|
||||
methods")
|
||||
|
||||
|
||||
class EventProxy(Loggable):
|
||||
"""
|
||||
A container object for instances of BaseEvent (or it's subclasses) used for
|
||||
event contractor
|
||||
"""
|
||||
""" A container object for instances of BaseEvent (or it's
|
||||
subclasses) used for event contractor """
|
||||
def __init__(self, orig_evt):
|
||||
self.orig_evt = orig_evt
|
||||
self.evt = orig_evt
|
||||
|
@ -81,12 +69,10 @@ class EventProxy(Loggable):
|
|||
|
||||
|
||||
class HasMetaData(object):
|
||||
"""
|
||||
Any class that inherits from this class gains the metadata attribute that
|
||||
loads metadata from the class's 'path' attribute. This is done lazily so
|
||||
there is no performance penalty to inheriting from this and subsequent
|
||||
calls to metadata are cached
|
||||
"""
|
||||
""" Any class that inherits from this class gains the metadata
|
||||
attribute that loads metadata from the class's 'path' attribute.
|
||||
This is done lazily so there is no performance penalty to inheriting
|
||||
from this and subsequent calls to metadata are cached """
|
||||
__metaclass__ = abc.ABCMeta
|
||||
@LazyProperty
|
||||
def metadata(self): return Metadata(self.path)
|
||||
|
@ -101,7 +87,7 @@ class BaseEvent(Loggable):
|
|||
self._raw_event = raw_event
|
||||
self.path = os.path.normpath(raw_event.pathname)
|
||||
else: self.path = raw_event
|
||||
self.owner = owners.get_owner(self.path)
|
||||
self.owner = user().owner.get_owner(self.path)
|
||||
owner_re = re.search('stor/imported/(?P<owner>\d+)/', self.path)
|
||||
if owner_re:
|
||||
self.logger.info("matched path: %s" % self.path)
|
||||
|
@ -113,11 +99,9 @@ class BaseEvent(Loggable):
|
|||
|
||||
# TODO : delete this method later
|
||||
def reset_hook(self):
|
||||
"""
|
||||
Resets the hook that is called after an event is packed. Before
|
||||
resetting the hook we execute it to make sure that whatever cleanup
|
||||
operations were queued are executed.
|
||||
"""
|
||||
""" Resets the hook that is called after an event is packed.
|
||||
Before resetting the hook we execute it to make sure that
|
||||
whatever cleanup operations were queued are executed. """
|
||||
self._pack_hook()
|
||||
self._pack_hook = lambda: None
|
||||
|
||||
|
@ -131,10 +115,8 @@ class BaseEvent(Loggable):
|
|||
|
||||
# TODO : delete this method later
|
||||
def add_safe_pack_hook(self,k):
|
||||
"""
|
||||
adds a callable object (function) that will be called after the event
|
||||
has been "safe_packed"
|
||||
"""
|
||||
""" adds a callable object (function) that will be called after
|
||||
the event has been "safe_packed" """
|
||||
self._pack_hook = k
|
||||
|
||||
def proxify(self):
|
||||
|
@ -142,17 +124,15 @@ class BaseEvent(Loggable):
|
|||
|
||||
# As opposed to unsafe_pack...
|
||||
def safe_pack(self):
|
||||
"""
|
||||
returns exceptions instead of throwing them to be consistent with
|
||||
events that must catch their own BadSongFile exceptions since generate
|
||||
a set of exceptions instead of a single one
|
||||
"""
|
||||
""" returns exceptions instead of throwing them to be consistent
|
||||
with events that must catch their own BadSongFile exceptions
|
||||
since generate a set of exceptions instead of a single one """
|
||||
try:
|
||||
self._pack_hook()
|
||||
ret = self.pack()
|
||||
# Remove owner of this file only after packing. Otherwise packing
|
||||
# will not serialize the owner correctly into the airtime request
|
||||
owners.remove_file_owner(self.path)
|
||||
user().owner.remove_file_owner(self.path)
|
||||
return ret
|
||||
except BadSongFile as e: return [e]
|
||||
except Exception as e:
|
||||
|
@ -171,42 +151,33 @@ class BaseEvent(Loggable):
|
|||
return self
|
||||
|
||||
def assign_owner(self,req):
|
||||
"""
|
||||
Packs self.owner to req if the owner is valid. I.e. it's not -1. This
|
||||
method is used by various events that would like to pass owner as a
|
||||
parameter. NewFile for example.
|
||||
"""
|
||||
""" Packs self.owner to req if the owner is valid. I.e. it's not
|
||||
-1. This method is used by various events that would like to
|
||||
pass owner as a parameter. NewFile for example. """
|
||||
if self.owner != -1: req['MDATA_KEY_OWNER_ID'] = self.owner
|
||||
|
||||
class FakePyinotify(object):
|
||||
"""
|
||||
sometimes we must create our own pyinotify like objects to
|
||||
""" sometimes we must create our own pyinotify like objects to
|
||||
instantiate objects from the classes below whenever we want to turn
|
||||
a single event into multiple events
|
||||
"""
|
||||
a single event into multiple events """
|
||||
def __init__(self, path): self.pathname = path
|
||||
|
||||
class OrganizeFile(BaseEvent, HasMetaData):
|
||||
"""
|
||||
The only kind of event that does support the pack protocol. It's used
|
||||
internally with mediamonitor to move files in the organize directory.
|
||||
"""
|
||||
""" The only kind of event that does support the pack protocol. It's
|
||||
used internally with mediamonitor to move files in the organize
|
||||
directory. """
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OrganizeFile, self).__init__(*args, **kwargs)
|
||||
def pack(self):
|
||||
raise AttributeError("You can't send organize events to airtime!!!")
|
||||
|
||||
class NewFile(BaseEvent, HasMetaData):
|
||||
"""
|
||||
NewFile events are the only events that contain MDATA_KEY_OWNER_ID metadata
|
||||
in them.
|
||||
"""
|
||||
""" NewFile events are the only events that contain
|
||||
MDATA_KEY_OWNER_ID metadata in them. """
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(NewFile, self).__init__(*args, **kwargs)
|
||||
def pack(self):
|
||||
"""
|
||||
packs turns an event into a media monitor request
|
||||
"""
|
||||
""" packs turns an event into a media monitor request """
|
||||
req_dict = self.metadata.extract()
|
||||
req_dict['mode'] = u'create'
|
||||
req_dict['is_record'] = self.metadata.is_recorded()
|
||||
|
@ -215,11 +186,9 @@ class NewFile(BaseEvent, HasMetaData):
|
|||
return [req_dict]
|
||||
|
||||
class DeleteFile(BaseEvent):
|
||||
"""
|
||||
DeleteFile event only contains the path to be deleted. No other metadata
|
||||
can be or is included. (This is because this event is fired after the
|
||||
deletion occurs).
|
||||
"""
|
||||
""" DeleteFile event only contains the path to be deleted. No other
|
||||
metadata can be or is included. (This is because this event is fired
|
||||
after the deletion occurs). """
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DeleteFile, self).__init__(*args, **kwargs)
|
||||
def pack(self):
|
||||
|
@ -229,9 +198,7 @@ class DeleteFile(BaseEvent):
|
|||
return [req_dict]
|
||||
|
||||
class MoveFile(BaseEvent, HasMetaData):
|
||||
"""
|
||||
Path argument should be the new path of the file that was moved
|
||||
"""
|
||||
""" Path argument should be the new path of the file that was moved """
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MoveFile, self).__init__(*args, **kwargs)
|
||||
def old_path(self):
|
||||
|
@ -255,10 +222,8 @@ class ModifyFile(BaseEvent, HasMetaData):
|
|||
return [req_dict]
|
||||
|
||||
def map_events(directory, constructor):
|
||||
"""
|
||||
Walks 'directory' and creates an event using 'constructor'. Returns a list
|
||||
of the constructed events.
|
||||
"""
|
||||
""" Walks 'directory' and creates an event using 'constructor'.
|
||||
Returns a list of the constructed events. """
|
||||
# -unknown-path should not appear in the path here but more testing
|
||||
# might be necessary
|
||||
for f in mmp.walk_supported(directory, clean_empties=False):
|
||||
|
@ -267,30 +232,25 @@ def map_events(directory, constructor):
|
|||
except BadSongFile as e: yield e
|
||||
|
||||
class DeleteDir(BaseEvent):
|
||||
"""
|
||||
A DeleteDir event unfolds itself into a list of DeleteFile events for every
|
||||
file in the directory.
|
||||
"""
|
||||
""" A DeleteDir event unfolds itself into a list of DeleteFile
|
||||
events for every file in the directory. """
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DeleteDir, self).__init__(*args, **kwargs)
|
||||
def pack(self):
|
||||
return map_events( self.path, DeleteFile )
|
||||
|
||||
class MoveDir(BaseEvent):
|
||||
"""
|
||||
A MoveDir event unfolds itself into a list of MoveFile events for every
|
||||
file in the directory.
|
||||
"""
|
||||
""" A MoveDir event unfolds itself into a list of MoveFile events
|
||||
for every file in the directory. """
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MoveDir, self).__init__(*args, **kwargs)
|
||||
def pack(self):
|
||||
return map_events( self.path, MoveFile )
|
||||
|
||||
class DeleteDirWatch(BaseEvent):
|
||||
"""
|
||||
Deleting a watched directory is different from deleting any other
|
||||
directory. Hence we must have a separate event to handle this case
|
||||
"""
|
||||
""" Deleting a watched directory is different from deleting any
|
||||
other directory. Hence we must have a separate event to handle this
|
||||
case """
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DeleteDirWatch, self).__init__(*args, **kwargs)
|
||||
def pack(self):
|
||||
|
|
|
@ -23,7 +23,7 @@ class FailedToObtainLocale(Exception):
|
|||
|
||||
class CouldNotCreateIndexFile(Exception):
|
||||
"""exception whenever index file cannot be created"""
|
||||
def __init__(self, path, cause):
|
||||
def __init__(self, path, cause=None):
|
||||
self.path = path
|
||||
self.cause = cause
|
||||
def __str__(self): return "Failed to create touch file '%s'" % self.path
|
||||
|
|
|
@ -3,6 +3,7 @@ from pydispatch import dispatcher
|
|||
import abc
|
||||
|
||||
from media.monitor.log import Loggable
|
||||
from media.saas.thread import getsig
|
||||
import media.monitor.pure as mmp
|
||||
|
||||
# Defines the handle interface
|
||||
|
@ -21,10 +22,10 @@ class ReportHandler(Handles):
|
|||
"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
def __init__(self, signal, weak=False):
|
||||
self.signal = signal
|
||||
self.report_signal = "badfile"
|
||||
self.signal = getsig(signal)
|
||||
self.report_signal = getsig("badfile")
|
||||
def dummy(sender, event): self.handle(sender,event)
|
||||
dispatcher.connect(dummy, signal=signal, sender=dispatcher.Any,
|
||||
dispatcher.connect(dummy, signal=self.signal, sender=dispatcher.Any,
|
||||
weak=weak)
|
||||
|
||||
def report_problem_file(self, event, exception=None):
|
||||
|
@ -38,7 +39,7 @@ class ProblemFileHandler(Handles, Loggable):
|
|||
"""
|
||||
def __init__(self, channel, **kwargs):
|
||||
self.channel = channel
|
||||
self.signal = self.channel.signal
|
||||
self.signal = getsig(self.channel.signal)
|
||||
self.problem_dir = self.channel.path
|
||||
def dummy(sender, event, exception):
|
||||
self.handle(sender, event, exception)
|
||||
|
|
|
@ -6,38 +6,33 @@ from functools import wraps
|
|||
import media.monitor.pure as mmp
|
||||
from media.monitor.pure import IncludeOnly
|
||||
from media.monitor.events import OrganizeFile, NewFile, MoveFile, DeleteFile, \
|
||||
DeleteDir, EventRegistry, MoveDir,\
|
||||
DeleteDir, MoveDir,\
|
||||
DeleteDirWatch
|
||||
from media.monitor.log import Loggable, get_logger
|
||||
|
||||
from media.monitor.log import Loggable
|
||||
from media.saas.thread import getsig, user
|
||||
# Note: Because of the way classes that inherit from pyinotify.ProcessEvent
|
||||
# interact with constructors. you should only instantiate objects from them
|
||||
# using keyword arguments. For example:
|
||||
# OrganizeListener('watch_signal') <= wrong
|
||||
# OrganizeListener(signal='watch_signal') <= right
|
||||
|
||||
class FileMediator(object):
|
||||
"""
|
||||
FileMediator is used an intermediate mechanism that filters out certain
|
||||
events.
|
||||
"""
|
||||
ignored_set = set([]) # for paths only
|
||||
logger = get_logger()
|
||||
|
||||
@staticmethod
|
||||
def is_ignored(path): return path in FileMediator.ignored_set
|
||||
@staticmethod
|
||||
def ignore(path): FileMediator.ignored_set.add(path)
|
||||
@staticmethod
|
||||
def unignore(path): FileMediator.ignored_set.remove(path)
|
||||
class FileMediator(Loggable):
|
||||
# TODO : this class is not actually used. remove all references to it
|
||||
# everywhere (including tests).
|
||||
""" FileMediator is used an intermediate mechanism that filters out
|
||||
certain events. """
|
||||
def __init__(self) : self.ignored_set = set([]) # for paths only
|
||||
def is_ignored(self,path) : return path in self.ignored_set
|
||||
def ignore(self, path) : self.ignored_set.add(path)
|
||||
def unignore(self, path) : self.ignored_set.remove(path)
|
||||
|
||||
def mediate_ignored(fn):
|
||||
@wraps(fn)
|
||||
def wrapped(self, event, *args,**kwargs):
|
||||
event.pathname = unicode(event.pathname, "utf-8")
|
||||
if FileMediator.is_ignored(event.pathname):
|
||||
FileMediator.logger.info("Ignoring: '%s' (once)" % event.pathname)
|
||||
FileMediator.unignore(event.pathname)
|
||||
if user().file_mediator.is_ignored(event.pathname):
|
||||
user().file_mediator.logger.info("Ignoring: '%s' (once)" % event.pathname)
|
||||
user().file_mediator.unignore(event.pathname)
|
||||
else: return fn(self, event, *args, **kwargs)
|
||||
return wrapped
|
||||
|
||||
|
@ -45,7 +40,7 @@ class BaseListener(object):
|
|||
def __str__(self):
|
||||
return "Listener(%s), Signal(%s)" % \
|
||||
(self.__class__.__name__, self. signal)
|
||||
def my_init(self, signal): self.signal = signal
|
||||
def my_init(self, signal): self.signal = getsig(signal)
|
||||
|
||||
class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
|
||||
def process_IN_CLOSE_WRITE(self, event):
|
||||
|
@ -66,25 +61,25 @@ class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
|
|||
self.logger.info("Bootstrapping: File in 'organize' directory: \
|
||||
'%s'" % f)
|
||||
if not mmp.file_locked(f):
|
||||
dispatcher.send(signal=self.signal, sender=self,
|
||||
dispatcher.send(signal=getsig(self.signal), sender=self,
|
||||
event=OrganizeFile(f))
|
||||
flushed += 1
|
||||
#self.logger.info("Flushed organized directory with %d files" % flushed)
|
||||
|
||||
@IncludeOnly(mmp.supported_extensions)
|
||||
def process_to_organize(self, event):
|
||||
dispatcher.send(signal=self.signal, sender=self,
|
||||
dispatcher.send(signal=getsig(self.signal), sender=self,
|
||||
event=OrganizeFile(event))
|
||||
|
||||
class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
|
||||
def process_IN_CLOSE_WRITE(self, event):
|
||||
self.process_create(event)
|
||||
def process_IN_MOVED_TO(self, event):
|
||||
if EventRegistry.registered(event):
|
||||
if user().event_registry.registered(event):
|
||||
# We need this trick because we don't how to "expand" dir events
|
||||
# into file events until we know for sure if we deleted or moved
|
||||
morph = MoveDir(event) if event.dir else MoveFile(event)
|
||||
EventRegistry.matching(event).morph_into(morph)
|
||||
user().event_registry.matching(event).morph_into(morph)
|
||||
else: self.process_create(event)
|
||||
def process_IN_MOVED_FROM(self, event):
|
||||
# Is either delete dir or delete file
|
||||
|
@ -92,7 +87,7 @@ class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
|
|||
# evt can be none whenever event points that a file that would be
|
||||
# ignored by @IncludeOnly
|
||||
if hasattr(event,'cookie') and (evt != None):
|
||||
EventRegistry.register(evt)
|
||||
user().event_registry.register(evt)
|
||||
def process_IN_DELETE(self,event): self.process_delete(event)
|
||||
def process_IN_MOVE_SELF(self, event):
|
||||
if '-unknown-path' in event.pathname:
|
||||
|
@ -101,14 +96,14 @@ class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
|
|||
|
||||
def delete_watch_dir(self, event):
|
||||
e = DeleteDirWatch(event)
|
||||
dispatcher.send(signal='watch_move', sender=self, event=e)
|
||||
dispatcher.send(signal=self.signal, sender=self, event=e)
|
||||
dispatcher.send(signal=getsig('watch_move'), sender=self, event=e)
|
||||
dispatcher.send(signal=getsig(self.signal), sender=self, event=e)
|
||||
|
||||
@mediate_ignored
|
||||
@IncludeOnly(mmp.supported_extensions)
|
||||
def process_create(self, event):
|
||||
evt = NewFile(event)
|
||||
dispatcher.send(signal=self.signal, sender=self, event=evt)
|
||||
dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
|
||||
return evt
|
||||
|
||||
@mediate_ignored
|
||||
|
@ -117,13 +112,13 @@ class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
|
|||
evt = None
|
||||
if event.dir : evt = DeleteDir(event)
|
||||
else : evt = DeleteFile(event)
|
||||
dispatcher.send(signal=self.signal, sender=self, event=evt)
|
||||
dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
|
||||
return evt
|
||||
|
||||
@mediate_ignored
|
||||
def process_delete_dir(self, event):
|
||||
evt = DeleteDir(event)
|
||||
dispatcher.send(signal=self.signal, sender=self, event=evt)
|
||||
dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
|
||||
return evt
|
||||
|
||||
def flush_events(self, path):
|
||||
|
@ -138,6 +133,6 @@ class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
|
|||
added = 0
|
||||
for f in mmp.walk_supported(path, clean_empties=False):
|
||||
added += 1
|
||||
dispatcher.send( signal=self.signal, sender=self, event=NewFile(f) )
|
||||
dispatcher.send( signal=getsig(self.signal), sender=self, event=NewFile(f) )
|
||||
self.logger.info( "Flushed watch directory. added = %d" % added )
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import pyinotify
|
||||
import threading
|
||||
import time
|
||||
from pydispatch import dispatcher
|
||||
|
||||
|
@ -9,19 +8,19 @@ from media.monitor.log import Loggable
|
|||
from media.monitor.listeners import StoreWatchListener, OrganizeListener
|
||||
from media.monitor.handler import ProblemFileHandler
|
||||
from media.monitor.organizer import Organizer
|
||||
from media.saas.thread import InstanceInheritingThread, getsig
|
||||
import media.monitor.pure as mmp
|
||||
|
||||
|
||||
class ManagerTimeout(threading.Thread,Loggable):
|
||||
"""
|
||||
The purpose of this class is to flush the organize directory every 3
|
||||
secnods. This used to be just a work around for cc-4235 but recently
|
||||
became a permanent solution because it's "cheap" and reliable
|
||||
"""
|
||||
class ManagerTimeout(InstanceInheritingThread,Loggable):
|
||||
""" The purpose of this class is to flush the organize directory
|
||||
every 3 secnods. This used to be just a work around for cc-4235
|
||||
but recently became a permanent solution because it's "cheap" and
|
||||
reliable """
|
||||
def __init__(self, manager, interval=1.5):
|
||||
# TODO : interval should be read from config and passed here instead
|
||||
# of just using the hard coded value
|
||||
threading.Thread.__init__(self)
|
||||
super(ManagerTimeout, self).__init__()
|
||||
self.manager = manager
|
||||
self.interval = interval
|
||||
def run(self):
|
||||
|
@ -30,19 +29,17 @@ class ManagerTimeout(threading.Thread,Loggable):
|
|||
self.manager.flush_organize()
|
||||
|
||||
class Manager(Loggable):
|
||||
"""
|
||||
An abstraction over media monitors core pyinotify functions. These
|
||||
include adding watched,store, organize directories, etc. Basically
|
||||
composes over WatchManager from pyinotify
|
||||
"""
|
||||
# NOTE : this massive class is a source of many problems of mm and
|
||||
# is in dire need of breaking up and refactoring.
|
||||
""" An abstraction over media monitors core pyinotify functions.
|
||||
These include adding watched,store, organize directories, etc.
|
||||
Basically composes over WatchManager from pyinotify """
|
||||
def __init__(self):
|
||||
self.wm = pyinotify.WatchManager()
|
||||
# These two instance variables are assumed to be constant
|
||||
self.watch_channel = 'watch'
|
||||
self.organize_channel = 'organize'
|
||||
self.watch_channel = getsig('watch')
|
||||
self.organize_channel = getsig('organize')
|
||||
self.watch_listener = StoreWatchListener(signal = self.watch_channel)
|
||||
# TODO : change this to a weak ref
|
||||
# TODO : get rid of this hack once cc-4235 is fixed
|
||||
self.__timeout_thread = ManagerTimeout(self)
|
||||
self.__timeout_thread.daemon = True
|
||||
self.__timeout_thread.start()
|
||||
|
@ -57,11 +54,11 @@ class Manager(Loggable):
|
|||
self.organize_channel),
|
||||
}
|
||||
def dummy(sender, event): self.watch_move( event.path, sender=sender )
|
||||
dispatcher.connect(dummy, signal='watch_move', sender=dispatcher.Any,
|
||||
weak=False)
|
||||
dispatcher.connect(dummy, signal=getsig('watch_move'),
|
||||
sender=dispatcher.Any, weak=False)
|
||||
def subwatch_add(sender, directory):
|
||||
self.__add_watch(directory, self.watch_listener)
|
||||
dispatcher.connect(subwatch_add, signal='add_subwatch',
|
||||
dispatcher.connect(subwatch_add, signal=getsig('add_subwatch'),
|
||||
sender=dispatcher.Any, weak=False)
|
||||
# A private mapping path => watch_descriptor
|
||||
# we use the same dictionary for organize, watch, store wd events.
|
||||
|
@ -76,23 +73,19 @@ class Manager(Loggable):
|
|||
# through dedicated handler objects. Because we must have access to a
|
||||
# manager instance. Hence we must slightly break encapsulation.
|
||||
def watch_move(self, watch_dir, sender=None):
|
||||
"""
|
||||
handle 'watch move' events directly sent from listener
|
||||
"""
|
||||
""" handle 'watch move' events directly sent from listener """
|
||||
self.logger.info("Watch dir '%s' has been renamed (hence removed)" %
|
||||
watch_dir)
|
||||
self.remove_watch_directory(normpath(watch_dir))
|
||||
|
||||
def watch_signal(self):
|
||||
"""
|
||||
Return the signal string our watch_listener is reading events from
|
||||
"""
|
||||
return self.watch_listener.signal
|
||||
""" Return the signal string our watch_listener is reading
|
||||
events from """
|
||||
return getsig(self.watch_listener.signal)
|
||||
|
||||
def __remove_watch(self,path):
|
||||
"""
|
||||
Remove path from being watched (first will check if 'path' is watched)
|
||||
"""
|
||||
""" Remove path from being watched (first will check if 'path'
|
||||
is watched) """
|
||||
# only delete if dir is actually being watched
|
||||
if path in self.__wd_path:
|
||||
wd = self.__wd_path[path]
|
||||
|
@ -100,10 +93,8 @@ class Manager(Loggable):
|
|||
del(self.__wd_path[path])
|
||||
|
||||
def __add_watch(self,path,listener):
|
||||
"""
|
||||
Start watching 'path' using 'listener'. First will check if directory
|
||||
is being watched before adding another watch
|
||||
"""
|
||||
""" Start watching 'path' using 'listener'. First will check if
|
||||
directory is being watched before adding another watch """
|
||||
|
||||
self.logger.info("Attempting to add listener to path '%s'" % path)
|
||||
self.logger.info( 'Listener: %s' % str(listener) )
|
||||
|
@ -114,9 +105,8 @@ class Manager(Loggable):
|
|||
if wd: self.__wd_path[path] = wd.values()[0]
|
||||
|
||||
def __create_organizer(self, target_path, recorded_path):
|
||||
"""
|
||||
creates an organizer at new destination path or modifies the old one
|
||||
"""
|
||||
""" creates an organizer at new destination path or modifies the
|
||||
old one """
|
||||
# TODO : find a proper fix for the following hack
|
||||
# We avoid creating new instances of organize because of the way
|
||||
# it interacts with pydispatch. We must be careful to never have
|
||||
|
@ -134,23 +124,18 @@ class Manager(Loggable):
|
|||
recorded_path=recorded_path)
|
||||
|
||||
def get_problem_files_path(self):
|
||||
"""
|
||||
returns the path where problem files should go
|
||||
"""
|
||||
""" returns the path where problem files should go """
|
||||
return self.organize['problem_files_path']
|
||||
|
||||
def set_problem_files_path(self, new_path):
|
||||
"""
|
||||
Set the path where problem files should go
|
||||
"""
|
||||
""" Set the path where problem files should go """
|
||||
self.organize['problem_files_path'] = new_path
|
||||
self.organize['problem_handler'] = \
|
||||
ProblemFileHandler( PathChannel(signal='badfile',path=new_path) )
|
||||
ProblemFileHandler( PathChannel(signal=getsig('badfile'),
|
||||
path=new_path) )
|
||||
|
||||
def get_recorded_path(self):
|
||||
"""
|
||||
returns the path of the recorded directory
|
||||
"""
|
||||
""" returns the path of the recorded directory """
|
||||
return self.organize['recorded_path']
|
||||
|
||||
def set_recorded_path(self, new_path):
|
||||
|
@ -160,17 +145,14 @@ class Manager(Loggable):
|
|||
self.__add_watch(new_path, self.watch_listener)
|
||||
|
||||
def get_organize_path(self):
|
||||
"""
|
||||
returns the current path that is being watched for organization
|
||||
"""
|
||||
""" returns the current path that is being watched for
|
||||
organization """
|
||||
return self.organize['organize_path']
|
||||
|
||||
def set_organize_path(self, new_path):
|
||||
"""
|
||||
sets the organize path to be new_path. Under the current scheme there
|
||||
is only one organize path but there is no reason why more cannot be
|
||||
supported
|
||||
"""
|
||||
""" sets the organize path to be new_path. Under the current
|
||||
scheme there is only one organize path but there is no reason
|
||||
why more cannot be supported """
|
||||
# if we are already organizing a particular directory we remove the
|
||||
# watch from it first before organizing another directory
|
||||
self.__remove_watch(self.organize['organize_path'])
|
||||
|
@ -188,19 +170,15 @@ class Manager(Loggable):
|
|||
return self.organize['imported_path']
|
||||
|
||||
def set_imported_path(self,new_path):
|
||||
"""
|
||||
set the directory where organized files go to.
|
||||
"""
|
||||
""" set the directory where organized files go to. """
|
||||
self.__remove_watch(self.organize['imported_path'])
|
||||
self.organize['imported_path'] = new_path
|
||||
self.__create_organizer( new_path, self.organize['recorded_path'])
|
||||
self.__add_watch(new_path, self.watch_listener)
|
||||
|
||||
def change_storage_root(self, store):
|
||||
"""
|
||||
hooks up all the directories for you. Problem, recorded, imported,
|
||||
organize.
|
||||
"""
|
||||
""" hooks up all the directories for you. Problem, recorded,
|
||||
imported, organize. """
|
||||
store_paths = mmp.expand_storage(store)
|
||||
# First attempt to make sure that all paths exist before adding any
|
||||
# watches
|
||||
|
@ -217,18 +195,14 @@ class Manager(Loggable):
|
|||
mmp.create_dir(p)
|
||||
|
||||
def has_watch(self, path):
|
||||
"""
|
||||
returns true if the path is being watched or not. Any kind of watch:
|
||||
organize, store, watched.
|
||||
"""
|
||||
""" returns true if the path is being watched or not. Any kind
|
||||
of watch: organize, store, watched. """
|
||||
return path in self.__wd_path
|
||||
|
||||
def add_watch_directory(self, new_dir):
|
||||
"""
|
||||
adds a directory to be "watched". "watched" directories are
|
||||
""" adds a directory to be "watched". "watched" directories are
|
||||
those that are being monitored by media monitor for airtime in
|
||||
this context and not directories pyinotify calls watched
|
||||
"""
|
||||
this context and not directories pyinotify calls watched """
|
||||
if self.has_watch(new_dir):
|
||||
self.logger.info("Cannot add '%s' to watched directories. It's \
|
||||
already being watched" % new_dir)
|
||||
|
@ -237,9 +211,8 @@ class Manager(Loggable):
|
|||
self.__add_watch(new_dir, self.watch_listener)
|
||||
|
||||
def remove_watch_directory(self, watch_dir):
|
||||
"""
|
||||
removes a directory from being "watched". Undoes add_watch_directory
|
||||
"""
|
||||
""" removes a directory from being "watched". Undoes
|
||||
add_watch_directory """
|
||||
if self.has_watch(watch_dir):
|
||||
self.logger.info("Removing watched directory: '%s'", watch_dir)
|
||||
self.__remove_watch(watch_dir)
|
||||
|
@ -250,9 +223,14 @@ class Manager(Loggable):
|
|||
self.logger.info( self.__wd_path )
|
||||
|
||||
def loop(self):
|
||||
"""
|
||||
block until we receive pyinotify events
|
||||
"""
|
||||
""" block until we receive pyinotify events """
|
||||
notifier = pyinotify.Notifier(self.wm)
|
||||
notifier.coalesce_events()
|
||||
notifier.loop()
|
||||
#notifier = pyinotify.ThreadedNotifier(self.wm, read_freq=1)
|
||||
#notifier.coalesce_events()
|
||||
#notifier.start()
|
||||
#return notifier
|
||||
#import asyncore
|
||||
#notifier = pyinotify.AsyncNotifier(self.wm)
|
||||
#asyncore.loop()
|
||||
|
|
|
@ -45,6 +45,8 @@ airtime2mutagen = {
|
|||
"MDATA_KEY_URL" : "website",
|
||||
"MDATA_KEY_ISRC" : "isrc",
|
||||
"MDATA_KEY_COPYRIGHT" : "copyright",
|
||||
"MDATA_KEY_CUE_IN" : "cuein",
|
||||
"MDATA_KEY_CUE_OUT" : "cueout",
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import media.monitor.pure as mmp
|
||||
import media.monitor.owners as owners
|
||||
from media.monitor.handler import ReportHandler
|
||||
from media.monitor.log import Loggable
|
||||
from media.monitor.exceptions import BadSongFile
|
||||
from media.monitor.events import OrganizeFile
|
||||
from pydispatch import dispatcher
|
||||
from os.path import dirname
|
||||
from media.saas.thread import getsig, user
|
||||
import os.path
|
||||
|
||||
class Organizer(ReportHandler,Loggable):
|
||||
|
@ -36,7 +36,7 @@ class Organizer(ReportHandler,Loggable):
|
|||
self.channel = channel
|
||||
self.target_path = target_path
|
||||
self.recorded_path = recorded_path
|
||||
super(Organizer, self).__init__(signal=self.channel, weak=False)
|
||||
super(Organizer, self).__init__(signal=getsig(self.channel), weak=False)
|
||||
|
||||
def handle(self, sender, event):
|
||||
""" Intercept events where a new file has been added to the
|
||||
|
@ -63,7 +63,7 @@ class Organizer(ReportHandler,Loggable):
|
|||
def new_dir_watch(d):
|
||||
# TODO : rewrite as return lambda : dispatcher.send(...
|
||||
def cb():
|
||||
dispatcher.send(signal="add_subwatch", sender=self,
|
||||
dispatcher.send(signal=getsig("add_subwatch"), sender=self,
|
||||
directory=d)
|
||||
return cb
|
||||
|
||||
|
@ -74,7 +74,7 @@ class Organizer(ReportHandler,Loggable):
|
|||
# backwards way is bewcause we are unable to encode the owner id
|
||||
# into the file itself so that the StoreWatchListener listener can
|
||||
# detect it from the file
|
||||
owners.add_file_owner(new_path, owner_id )
|
||||
user().owner.add_file_owner(new_path, owner_id )
|
||||
|
||||
self.logger.info('Organized: "%s" into "%s"' %
|
||||
(event.path, new_path))
|
||||
|
|
|
@ -1,44 +1,40 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from media.monitor.log import get_logger
|
||||
log = get_logger()
|
||||
# hash: 'filepath' => owner_id
|
||||
owners = {}
|
||||
from media.monitor.log import Loggable
|
||||
|
||||
def reset_owners():
|
||||
""" Wipes out all file => owner associations """
|
||||
global owners
|
||||
owners = {}
|
||||
class Owner(Loggable):
|
||||
def __init__(self):
|
||||
# hash: 'filepath' => owner_id
|
||||
self.owners = {}
|
||||
|
||||
def get_owner(self,f):
|
||||
""" Get the owner id of the file 'f' """
|
||||
o = self.owners[f] if f in self.owners else -1
|
||||
self.logger.info("Received owner for %s. Owner: %s" % (f, o))
|
||||
return o
|
||||
|
||||
|
||||
def get_owner(f):
|
||||
""" Get the owner id of the file 'f' """
|
||||
o = owners[f] if f in owners else -1
|
||||
log.info("Received owner for %s. Owner: %s" % (f, o))
|
||||
return o
|
||||
|
||||
|
||||
def add_file_owner(f,owner):
|
||||
""" Associate file f with owner. If owner is -1 then do we will not record
|
||||
it because -1 means there is no owner. Returns True if f is being stored
|
||||
after the function. False otherwise. """
|
||||
if owner == -1: return False
|
||||
if f in owners:
|
||||
if owner != owners[f]: # check for fishiness
|
||||
log.info("Warning ownership of file '%s' changed from '%d' to '%d'"
|
||||
% (f, owners[f], owner))
|
||||
else: return True
|
||||
owners[f] = owner
|
||||
return True
|
||||
|
||||
def has_owner(f):
|
||||
""" True if f is owned by somebody. False otherwise. """
|
||||
return f in owners
|
||||
|
||||
def remove_file_owner(f):
|
||||
""" Try and delete any association made with file f. Returns true if
|
||||
the the association was actually deleted. False otherwise. """
|
||||
if f in owners:
|
||||
del owners[f]
|
||||
def add_file_owner(self,f,owner):
|
||||
""" Associate file f with owner. If owner is -1 then do we will not record
|
||||
it because -1 means there is no owner. Returns True if f is being stored
|
||||
after the function. False otherwise. """
|
||||
if owner == -1: return False
|
||||
if f in self.owners:
|
||||
if owner != self.owners[f]: # check for fishiness
|
||||
self.logger.info("Warning ownership of file '%s' changed from '%d' to '%d'"
|
||||
% (f, self.owners[f], owner))
|
||||
else: return True
|
||||
self.owners[f] = owner
|
||||
return True
|
||||
else: return False
|
||||
|
||||
def has_owner(self,f):
|
||||
""" True if f is owned by somebody. False otherwise. """
|
||||
return f in self.owners
|
||||
|
||||
def remove_file_owner(self,f):
|
||||
""" Try and delete any association made with file f. Returns true if
|
||||
the the association was actually deleted. False otherwise. """
|
||||
if f in self.owners:
|
||||
del self.owners[f]
|
||||
return True
|
||||
else: return False
|
||||
|
||||
|
|
|
@ -6,9 +6,10 @@ import os
|
|||
import math
|
||||
import wave
|
||||
import contextlib
|
||||
import shutil
|
||||
import shutil, pipes
|
||||
import re
|
||||
import sys
|
||||
import stat
|
||||
import hashlib
|
||||
import locale
|
||||
import operator as op
|
||||
|
@ -411,17 +412,26 @@ def owner_id(original_path):
|
|||
def file_playable(pathname):
|
||||
""" Returns True if 'pathname' is playable by liquidsoap. False
|
||||
otherwise. """
|
||||
|
||||
return True
|
||||
#remove all write permissions. This is due to stupid taglib library bug
|
||||
#where all files are opened in write mode. The only way around this is to
|
||||
#modify the file permissions
|
||||
os.chmod(pathname, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
|
||||
|
||||
# when there is an single apostrophe inside of a string quoted by
|
||||
# apostrophes, we can only escape it by replace that apostrophe with
|
||||
# '\''. This breaks the string into two, and inserts an escaped
|
||||
# single quote in between them. We run the command as pypo because
|
||||
# otherwise the target file is opened with write permissions, and
|
||||
# this causes an inotify ON_CLOSE_WRITE event to be fired :/
|
||||
# single quote in between them.
|
||||
command = ("airtime-liquidsoap -c 'output.dummy" + \
|
||||
"(audio_to_stereo(single(\"%s\")))' > /dev/null 2>&1") % \
|
||||
pathname.replace("'", "'\\''")
|
||||
return True
|
||||
|
||||
return_code = subprocess.call(command, shell=True)
|
||||
|
||||
#change/restore permissions to acceptable
|
||||
os.chmod(pathname, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | \
|
||||
stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
|
||||
return (return_code == 0)
|
||||
|
||||
def toposort(data):
|
||||
|
@ -460,7 +470,7 @@ def format_length(mutagen_length):
|
|||
m = int(math.floor(t / 60))
|
||||
s = t % 60
|
||||
# will be ss.uuu
|
||||
s = str(s)
|
||||
s = str('{0:f}'.format(s))
|
||||
seconds = s.split(".")
|
||||
s = seconds[0]
|
||||
# have a maximum of 6 subseconds.
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import threading
|
||||
|
||||
from media.monitor.exceptions import BadSongFile
|
||||
from media.monitor.log import Loggable
|
||||
import api_clients.api_client as ac
|
||||
from media.saas.thread import apc, InstanceInheritingThread
|
||||
|
||||
class ThreadedRequestSync(threading.Thread, Loggable):
|
||||
class ThreadedRequestSync(InstanceInheritingThread, Loggable):
|
||||
def __init__(self, rs):
|
||||
threading.Thread.__init__(self)
|
||||
super(ThreadedRequestSync, self).__init__()
|
||||
self.rs = rs
|
||||
self.daemon = True
|
||||
self.start()
|
||||
|
@ -22,7 +20,7 @@ class RequestSync(Loggable):
|
|||
for some number of times """
|
||||
@classmethod
|
||||
def create_with_api_client(cls, watcher, requests):
|
||||
apiclient = ac.AirtimeApiClient.create_right_config()
|
||||
apiclient = apc()
|
||||
self = cls(watcher, requests, apiclient)
|
||||
return self
|
||||
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
import os
|
||||
from media.monitor.log import Loggable
|
||||
from media.monitor.exceptions import NoDirectoryInAirtime
|
||||
from os.path import normpath
|
||||
from media.saas.thread import user
|
||||
from os.path import normpath, join
|
||||
import media.monitor.pure as mmp
|
||||
|
||||
class AirtimeDB(Loggable):
|
||||
|
@ -11,17 +12,20 @@ class AirtimeDB(Loggable):
|
|||
if reload_now: self.reload_directories()
|
||||
|
||||
def reload_directories(self):
|
||||
"""
|
||||
this is the 'real' constructor, should be called if you ever want the
|
||||
class reinitialized. there's not much point to doing it yourself
|
||||
however, you should just create a new AirtimeDB instance.
|
||||
"""
|
||||
""" this is the 'real' constructor, should be called if you ever
|
||||
want the class reinitialized. there's not much point to doing
|
||||
it yourself however, you should just create a new AirtimeDB
|
||||
instance. """
|
||||
|
||||
saas = user().root_path
|
||||
|
||||
# dirs_setup is a dict with keys:
|
||||
# u'watched_dirs' and u'stor' which point to lists of corresponding
|
||||
# dirs
|
||||
dirs_setup = self.apc.setup_media_monitor()
|
||||
dirs_setup[u'stor'] = normpath( dirs_setup[u'stor'] )
|
||||
dirs_setup[u'watched_dirs'] = map(normpath, dirs_setup[u'watched_dirs'])
|
||||
dirs_setup[u'stor'] = normpath( join(saas, dirs_setup[u'stor'] ) )
|
||||
dirs_setup[u'watched_dirs'] = map(lambda p: normpath(join(saas,p)),
|
||||
dirs_setup[u'watched_dirs'])
|
||||
dirs_with_id = dict([ (k,normpath(v)) for k,v in
|
||||
self.apc.list_all_watched_dirs()['dirs'].iteritems() ])
|
||||
|
||||
|
@ -42,15 +46,11 @@ class AirtimeDB(Loggable):
|
|||
dirs_setup[u'watched_dirs'] ])
|
||||
|
||||
def to_id(self, directory):
|
||||
"""
|
||||
directory path -> id
|
||||
"""
|
||||
""" directory path -> id """
|
||||
return self.dir_to_id[ directory ]
|
||||
|
||||
def to_directory(self, dir_id):
|
||||
"""
|
||||
id -> directory path
|
||||
"""
|
||||
""" id -> directory path """
|
||||
return self.id_to_dir[ dir_id ]
|
||||
|
||||
def storage_path(self) : return self.base_storage
|
||||
|
@ -60,37 +60,31 @@ class AirtimeDB(Loggable):
|
|||
def recorded_path(self) : return self.storage_paths['recorded']
|
||||
|
||||
def list_watched(self):
|
||||
"""
|
||||
returns all watched directories as a list
|
||||
"""
|
||||
""" returns all watched directories as a list """
|
||||
return list(self.watched_directories)
|
||||
|
||||
def list_storable_paths(self):
|
||||
"""
|
||||
returns a list of all the watched directories in the datatabase.
|
||||
(Includes the imported directory and the recorded directory)
|
||||
"""
|
||||
""" returns a list of all the watched directories in the
|
||||
datatabase. (Includes the imported directory and the recorded
|
||||
directory) """
|
||||
l = self.list_watched()
|
||||
l.append(self.import_path())
|
||||
l.append(self.recorded_path())
|
||||
return l
|
||||
|
||||
def dir_id_get_files(self, dir_id, all_files=True):
|
||||
"""
|
||||
Get all files in a directory with id dir_id
|
||||
"""
|
||||
""" Get all files in a directory with id dir_id """
|
||||
base_dir = self.id_to_dir[ dir_id ]
|
||||
return set(( os.path.join(base_dir,p) for p in
|
||||
return set(( join(base_dir,p) for p in
|
||||
self.apc.list_all_db_files( dir_id, all_files ) ))
|
||||
|
||||
def directory_get_files(self, directory, all_files=True):
|
||||
"""
|
||||
returns all the files(recursively) in a directory. a directory is an
|
||||
"actual" directory path instead of its id. This is super hacky because
|
||||
you create one request for the recorded directory and one for the
|
||||
imported directory even though they're the same dir in the database so
|
||||
you get files for both dirs in 1 request...
|
||||
"""
|
||||
""" returns all the files(recursively) in a directory. a
|
||||
directory is an "actual" directory path instead of its id. This
|
||||
is super hacky because you create one request for the recorded
|
||||
directory and one for the imported directory even though they're
|
||||
the same dir in the database so you get files for both dirs in 1
|
||||
request... """
|
||||
normal_dir = os.path.normpath(unicode(directory))
|
||||
if normal_dir not in self.dir_to_id:
|
||||
raise NoDirectoryInAirtime( normal_dir, self.dir_to_id )
|
||||
|
|
|
@ -3,6 +3,7 @@ import media.monitor.pure as mmp
|
|||
import os
|
||||
from media.monitor.log import Loggable
|
||||
from media.monitor.exceptions import CouldNotCreateIndexFile
|
||||
from media.saas.thread import InstanceInheritingThread
|
||||
|
||||
class Toucher(Loggable):
|
||||
"""
|
||||
|
@ -17,56 +18,23 @@ class Toucher(Loggable):
|
|||
self.path)
|
||||
self.logger.info(str(e))
|
||||
|
||||
#http://code.activestate.com/lists/python-ideas/8982/
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
import threading
|
||||
|
||||
class RepeatTimer(threading.Thread):
|
||||
def __init__(self, interval, callable, args=[], kwargs={}):
|
||||
threading.Thread.__init__(self)
|
||||
# interval_current shows number of milliseconds in currently triggered
|
||||
# <tick>
|
||||
self.interval_current = interval
|
||||
# interval_new shows number of milliseconds for next <tick>
|
||||
self.interval_new = interval
|
||||
class RepeatTimer(InstanceInheritingThread):
|
||||
def __init__(self, interval, callable, *args, **kwargs):
|
||||
super(RepeatTimer, self).__init__()
|
||||
self.interval = interval
|
||||
self.callable = callable
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.event = threading.Event()
|
||||
self.event.set()
|
||||
self.activation_dt = None
|
||||
self.__timer = None
|
||||
|
||||
def run(self):
|
||||
while self.event.is_set():
|
||||
self.activation_dt = datetime.utcnow()
|
||||
self.__timer = threading.Timer(self.interval_new,
|
||||
self.callable,
|
||||
self.args,
|
||||
self.kwargs)
|
||||
self.interval_current = self.interval_new
|
||||
self.__timer.start()
|
||||
self.__timer.join()
|
||||
|
||||
def cancel(self):
|
||||
self.event.clear()
|
||||
if self.__timer is not None:
|
||||
self.__timer.cancel()
|
||||
|
||||
def trigger(self):
|
||||
self.callable(*self.args, **self.kwargs)
|
||||
if self.__timer is not None:
|
||||
self.__timer.cancel()
|
||||
|
||||
def change_interval(self, value):
|
||||
self.interval_new = value
|
||||
|
||||
while True:
|
||||
time.sleep(self.interval)
|
||||
self.callable(*self.args, **self.kwargs)
|
||||
|
||||
class ToucherThread(Loggable):
|
||||
"""
|
||||
Creates a thread that touches a file 'path' every 'interval' seconds
|
||||
"""
|
||||
""" Creates a thread that touches a file 'path' every 'interval'
|
||||
seconds """
|
||||
def __init__(self, path, interval=5):
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import threading
|
||||
import time
|
||||
import copy
|
||||
|
||||
|
@ -9,15 +8,16 @@ from media.monitor.exceptions import BadSongFile
|
|||
from media.monitor.eventcontractor import EventContractor
|
||||
from media.monitor.events import EventProxy
|
||||
from media.monitor.request import ThreadedRequestSync, RequestSync
|
||||
from media.saas.thread import InstanceInheritingThread, getsig
|
||||
|
||||
class TimeoutWatcher(threading.Thread,Loggable):
|
||||
class TimeoutWatcher(InstanceInheritingThread,Loggable):
|
||||
"""
|
||||
The job of this thread is to keep an eye on WatchSyncer and force a
|
||||
request whenever the requests go over time out
|
||||
"""
|
||||
def __init__(self, watcher, timeout=5):
|
||||
self.logger.info("Created timeout thread...")
|
||||
threading.Thread.__init__(self)
|
||||
super(TimeoutWatcher, self).__init__()
|
||||
self.watcher = watcher
|
||||
self.timeout = timeout
|
||||
|
||||
|
@ -52,7 +52,7 @@ class WatchSyncer(ReportHandler,Loggable):
|
|||
tc = TimeoutWatcher(self, self.timeout)
|
||||
tc.daemon = True
|
||||
tc.start()
|
||||
super(WatchSyncer, self).__init__(signal=signal)
|
||||
super(WatchSyncer, self).__init__(signal=getsig(signal))
|
||||
|
||||
def handle(self, sender, event):
|
||||
"""
|
||||
|
|
0
python_apps/media-monitor2/media/saas/__init__.py
Normal file
0
python_apps/media-monitor2/media/saas/__init__.py
Normal file
76
python_apps/media-monitor2/media/saas/airtimeinstance.py
Normal file
76
python_apps/media-monitor2/media/saas/airtimeinstance.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
import os
|
||||
from os.path import join, basename, dirname
|
||||
|
||||
from media.monitor.exceptions import NoConfigFile
|
||||
from media.monitor.pure import LazyProperty
|
||||
from media.monitor.config import MMConfig
|
||||
from media.monitor.owners import Owner
|
||||
from media.monitor.events import EventRegistry
|
||||
from media.monitor.listeners import FileMediator
|
||||
from api_clients.api_client import AirtimeApiClient
|
||||
|
||||
# poor man's phantom types...
|
||||
class SignalString(str): pass
|
||||
|
||||
class AirtimeInstance(object):
|
||||
""" AirtimeInstance is a class that abstracts away every airtime
|
||||
instance by providing all the necessary objects required to interact
|
||||
with the instance. ApiClient, configs, root_directory """
|
||||
|
||||
@classmethod
|
||||
def root_make(cls, name, root):
|
||||
cfg = {
|
||||
'api_client' : join(root, 'etc/airtime/api_client.cfg'),
|
||||
'media_monitor' : join(root, 'etc/airtime/media-monitor.cfg'),
|
||||
}
|
||||
return cls(name, root, cfg)
|
||||
|
||||
def __init__(self,name, root_path, config_paths):
|
||||
""" name is an internal name only """
|
||||
for cfg in ['api_client','media_monitor']:
|
||||
if cfg not in config_paths: raise NoConfigFile(config_paths)
|
||||
elif not os.path.exists(config_paths[cfg]):
|
||||
raise NoConfigFile(config_paths[cfg])
|
||||
self.name = name
|
||||
self.config_paths = config_paths
|
||||
self.root_path = root_path
|
||||
|
||||
def signal(self, sig):
|
||||
if isinstance(sig, SignalString): return sig
|
||||
else: return SignalString("%s_%s" % (self.name, sig))
|
||||
|
||||
def touch_file_path(self):
|
||||
""" Get the path of the touch file for every instance """
|
||||
touch_base_path = self.mm_config['index_path']
|
||||
touch_base_name = basename(touch_base_path)
|
||||
new_base_name = self.name + touch_base_name
|
||||
return join(dirname(touch_base_path), new_base_name)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return "%s,%s(%s)" % (self.name, self.root_path, self.config_paths)
|
||||
|
||||
@LazyProperty
|
||||
def api_client(self):
|
||||
return AirtimeApiClient(config_path=self.config_paths['api_client'])
|
||||
|
||||
@LazyProperty
|
||||
def mm_config(self):
|
||||
return MMConfig(self.config_paths['media_monitor'])
|
||||
|
||||
# NOTE to future code monkeys:
|
||||
# I'm well aware that I'm using the shitty service locator pattern
|
||||
# instead of normal constructor injection as I should be. The reason
|
||||
# for this is that I found these issues a little too close to the
|
||||
# end of my tenure. It's highly recommended to rewrite this crap
|
||||
# using proper constructor injection if you ever have the time
|
||||
|
||||
@LazyProperty
|
||||
def owner(self): return Owner()
|
||||
|
||||
@LazyProperty
|
||||
def event_registry(self): return EventRegistry()
|
||||
|
||||
@LazyProperty
|
||||
def file_mediator(self): return FileMediator()
|
||||
|
125
python_apps/media-monitor2/media/saas/launcher.py
Normal file
125
python_apps/media-monitor2/media/saas/launcher.py
Normal file
|
@ -0,0 +1,125 @@
|
|||
import os, sys
|
||||
import logging
|
||||
import logging.config
|
||||
|
||||
import media.monitor.pure as mmp
|
||||
|
||||
from media.monitor.exceptions import FailedToObtainLocale, FailedToSetLocale
|
||||
from media.monitor.log import get_logger, setup_logging
|
||||
from std_err_override import LogWriter
|
||||
from media.saas.thread import InstanceThread, user, apc, getsig
|
||||
from media.monitor.log import Loggable
|
||||
from media.monitor.exceptions import CouldNotCreateIndexFile
|
||||
from media.monitor.toucher import ToucherThread
|
||||
from media.monitor.airtime import AirtimeNotifier, AirtimeMessageReceiver
|
||||
from media.monitor.watchersyncer import WatchSyncer
|
||||
from media.monitor.eventdrainer import EventDrainer
|
||||
from media.monitor.manager import Manager
|
||||
from media.monitor.syncdb import AirtimeDB
|
||||
from media.saas.airtimeinstance import AirtimeInstance
|
||||
|
||||
class MM2(InstanceThread, Loggable):
|
||||
|
||||
def index_create(self, index_create_attempt=False):
|
||||
config = user().mm_config
|
||||
if not index_create_attempt:
|
||||
if not os.path.exists(config['index_path']):
|
||||
self.logger.info("Attempting to create index file:...")
|
||||
try:
|
||||
with open(config['index_path'], 'w') as f: f.write(" ")
|
||||
except Exception as e:
|
||||
self.logger.info("Failed to create index file with exception: %s" \
|
||||
% str(e))
|
||||
else:
|
||||
self.logger.info("Created index file, reloading configuration:")
|
||||
self.index_create(index_create_attempt=True)
|
||||
else:
|
||||
self.logger.info("Already tried to create index. Will not try again ")
|
||||
|
||||
if not os.path.exists(config['index_path']):
|
||||
raise CouldNotCreateIndexFile(config['index_path'])
|
||||
|
||||
def run(self):
|
||||
self.index_create()
|
||||
manager = Manager()
|
||||
apiclient = apc()
|
||||
config = user().mm_config
|
||||
WatchSyncer(signal=getsig('watch'),
|
||||
chunking_number=config['chunking_number'],
|
||||
timeout=config['request_max_wait'])
|
||||
airtime_receiver = AirtimeMessageReceiver(config,manager)
|
||||
airtime_notifier = AirtimeNotifier(config, airtime_receiver)
|
||||
|
||||
|
||||
adb = AirtimeDB(apiclient)
|
||||
store = {
|
||||
u'stor' : adb.storage_path(),
|
||||
u'watched_dirs' : adb.list_watched(),
|
||||
}
|
||||
|
||||
self.logger.info("initializing mm with directories: %s" % str(store))
|
||||
|
||||
self.logger.info(
|
||||
"Initing with the following airtime response:%s" % str(store))
|
||||
|
||||
airtime_receiver.change_storage({ 'directory':store[u'stor'] })
|
||||
|
||||
for watch_dir in store[u'watched_dirs']:
|
||||
if not os.path.exists(watch_dir):
|
||||
# Create the watch_directory here
|
||||
try: os.makedirs(watch_dir)
|
||||
except Exception:
|
||||
self.logger.error("Could not create watch directory: '%s' \
|
||||
(given from the database)." % watch_dir)
|
||||
if os.path.exists(watch_dir):
|
||||
airtime_receiver.new_watch({ 'directory':watch_dir }, restart=True)
|
||||
else: self.logger.info("Failed to add watch on %s" % str(watch_dir))
|
||||
|
||||
EventDrainer(airtime_notifier.connection,
|
||||
interval=float(config['rmq_event_wait']))
|
||||
|
||||
# Launch the toucher that updates the last time when the script was
|
||||
# ran every n seconds.
|
||||
# TODO : verify that this does not interfere with bootstrapping because the
|
||||
# toucher thread might update the last_ran variable too fast
|
||||
ToucherThread(path=user().touch_file_path(),
|
||||
interval=int(config['touch_interval']))
|
||||
|
||||
apiclient.register_component('media-monitor')
|
||||
|
||||
manager.loop()
|
||||
|
||||
def launch_instance(name, root, global_cfg, apc_cfg):
|
||||
cfg = {
|
||||
'api_client' : apc_cfg,
|
||||
'media_monitor' : global_cfg,
|
||||
}
|
||||
ai = AirtimeInstance(name, root, cfg)
|
||||
MM2(ai).start()
|
||||
|
||||
def setup_global(log):
|
||||
""" setup unicode and other stuff """
|
||||
log.info("Attempting to set the locale...")
|
||||
try: mmp.configure_locale(mmp.get_system_locale())
|
||||
except FailedToSetLocale as e:
|
||||
log.info("Failed to set the locale...")
|
||||
sys.exit(1)
|
||||
except FailedToObtainLocale as e:
|
||||
log.info("Failed to obtain the locale form the default path: \
|
||||
'/etc/default/locale'")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
log.info("Failed to set the locale for unknown reason. \
|
||||
Logging exception.")
|
||||
log.info(str(e))
|
||||
|
||||
def setup_logger(log_config, logpath):
|
||||
logging.config.fileConfig(log_config)
|
||||
#need to wait for Python 2.7 for this..
|
||||
#logging.captureWarnings(True)
|
||||
logger = logging.getLogger()
|
||||
LogWriter.override_std_err(logger)
|
||||
logfile = unicode(logpath)
|
||||
setup_logging(logfile)
|
||||
log = get_logger()
|
||||
return log
|
28
python_apps/media-monitor2/media/saas/thread.py
Normal file
28
python_apps/media-monitor2/media/saas/thread.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
import threading
|
||||
|
||||
class UserlessThread(Exception):
|
||||
def __str__(self):
|
||||
return "Current thread: %s is not an instance of InstanceThread \
|
||||
of InstanceInheritingThread" % str(threading.current_thread())
|
||||
|
||||
class HasUser(object):
|
||||
def user(self): return self._user
|
||||
def assign_user(self): self._user = threading.current_thread().user()
|
||||
|
||||
class InstanceThread(threading.Thread, HasUser):
|
||||
def __init__(self,user, *args, **kwargs):
|
||||
super(InstanceThread, self).__init__(*args, **kwargs)
|
||||
self._user = user
|
||||
|
||||
class InstanceInheritingThread(threading.Thread, HasUser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.assign_user()
|
||||
super(InstanceInheritingThread, self).__init__(*args, **kwargs)
|
||||
|
||||
def user():
|
||||
try: return threading.current_thread().user()
|
||||
except AttributeError: raise UserlessThread()
|
||||
|
||||
def apc(): return user().api_client
|
||||
|
||||
def getsig(s): return user().signal(s)
|
|
@ -1,140 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
import logging.config
|
||||
from media.saas.launcher import setup_global, launch_instance, setup_logger
|
||||
from media.monitor.config import MMConfig
|
||||
|
||||
from media.monitor.manager import Manager
|
||||
from media.monitor.bootstrap import Bootstrapper
|
||||
from media.monitor.log import get_logger, setup_logging
|
||||
from media.monitor.config import MMConfig
|
||||
from media.monitor.toucher import ToucherThread
|
||||
from media.monitor.syncdb import AirtimeDB
|
||||
from media.monitor.exceptions import FailedToObtainLocale, \
|
||||
FailedToSetLocale, \
|
||||
NoConfigFile
|
||||
from media.monitor.airtime import AirtimeNotifier, \
|
||||
AirtimeMessageReceiver
|
||||
from media.monitor.watchersyncer import WatchSyncer
|
||||
from media.monitor.eventdrainer import EventDrainer
|
||||
from media.update.replaygainupdater import ReplayGainUpdater
|
||||
from std_err_override import LogWriter
|
||||
|
||||
import media.monitor.pure as mmp
|
||||
from api_clients import api_client as apc
|
||||
|
||||
|
||||
def main(global_config, api_client_config, log_config,
|
||||
index_create_attempt=False):
|
||||
for cfg in [global_config, api_client_config]:
|
||||
if not os.path.exists(cfg): raise NoConfigFile(cfg)
|
||||
# MMConfig is a proxy around ConfigObj instances. it does not allow
|
||||
# itself users of MMConfig instances to modify any config options
|
||||
# directly through the dictionary. Users of this object muse use the
|
||||
# correct methods designated for modification
|
||||
try: config = MMConfig(global_config)
|
||||
except NoConfigFile as e:
|
||||
print("Cannot run mediamonitor2 without configuration file.")
|
||||
print("Current config path: '%s'" % global_config)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print("Unknown error reading configuration file: '%s'" % global_config)
|
||||
print(str(e))
|
||||
|
||||
|
||||
logging.config.fileConfig(log_config)
|
||||
|
||||
#need to wait for Python 2.7 for this..
|
||||
#logging.captureWarnings(True)
|
||||
|
||||
logger = logging.getLogger()
|
||||
LogWriter.override_std_err(logger)
|
||||
logfile = unicode( config['logpath'] )
|
||||
setup_logging(logfile)
|
||||
log = get_logger()
|
||||
|
||||
if not index_create_attempt:
|
||||
if not os.path.exists(config['index_path']):
|
||||
log.info("Attempting to create index file:...")
|
||||
try:
|
||||
with open(config['index_path'], 'w') as f: f.write(" ")
|
||||
except Exception as e:
|
||||
log.info("Failed to create index file with exception: %s" \
|
||||
% str(e))
|
||||
else:
|
||||
log.info("Created index file, reloading configuration:")
|
||||
main( global_config, api_client_config, log_config,
|
||||
index_create_attempt=True )
|
||||
else:
|
||||
log.info("Already tried to create index. Will not try again ")
|
||||
|
||||
if not os.path.exists(config['index_path']):
|
||||
log.info("Index file does not exist. Terminating")
|
||||
|
||||
log.info("Attempting to set the locale...")
|
||||
|
||||
try:
|
||||
mmp.configure_locale(mmp.get_system_locale())
|
||||
except FailedToSetLocale as e:
|
||||
log.info("Failed to set the locale...")
|
||||
sys.exit(1)
|
||||
except FailedToObtainLocale as e:
|
||||
log.info("Failed to obtain the locale form the default path: \
|
||||
'/etc/default/locale'")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
log.info("Failed to set the locale for unknown reason. \
|
||||
Logging exception.")
|
||||
log.info(str(e))
|
||||
|
||||
watch_syncer = WatchSyncer(signal='watch',
|
||||
chunking_number=config['chunking_number'],
|
||||
timeout=config['request_max_wait'])
|
||||
|
||||
apiclient = apc.AirtimeApiClient.create_right_config(log=log,
|
||||
config_path=api_client_config)
|
||||
|
||||
ReplayGainUpdater.start_reply_gain(apiclient)
|
||||
|
||||
sdb = AirtimeDB(apiclient)
|
||||
|
||||
manager = Manager()
|
||||
|
||||
airtime_receiver = AirtimeMessageReceiver(config,manager)
|
||||
airtime_notifier = AirtimeNotifier(config, airtime_receiver)
|
||||
|
||||
store = apiclient.setup_media_monitor()
|
||||
|
||||
log.info("Initing with the following airtime response:%s" % str(store))
|
||||
|
||||
airtime_receiver.change_storage({ 'directory':store[u'stor'] })
|
||||
|
||||
for watch_dir in store[u'watched_dirs']:
|
||||
if not os.path.exists(watch_dir):
|
||||
# Create the watch_directory here
|
||||
try: os.makedirs(watch_dir)
|
||||
except Exception as e:
|
||||
log.error("Could not create watch directory: '%s' \
|
||||
(given from the database)." % watch_dir)
|
||||
if os.path.exists(watch_dir):
|
||||
airtime_receiver.new_watch({ 'directory':watch_dir }, restart=True)
|
||||
else: log.info("Failed to add watch on %s" % str(watch_dir))
|
||||
|
||||
bs = Bootstrapper( db=sdb, watch_signal='watch' )
|
||||
|
||||
ed = EventDrainer(airtime_notifier.connection,
|
||||
interval=float(config['rmq_event_wait']))
|
||||
|
||||
# Launch the toucher that updates the last time when the script was
|
||||
# ran every n seconds.
|
||||
# TODO : verify that this does not interfere with bootstrapping because the
|
||||
# toucher thread might update the last_ran variable too fast
|
||||
tt = ToucherThread(path=config['index_path'],
|
||||
interval=int(config['touch_interval']))
|
||||
|
||||
apiclient.register_component('media-monitor')
|
||||
|
||||
manager.loop()
|
||||
def main(global_config, api_client_config, log_config):
|
||||
""" function to run hosted install """
|
||||
mm_config = MMConfig(global_config)
|
||||
log = setup_logger( log_config, mm_config['logpath'] )
|
||||
setup_global(log)
|
||||
launch_instance('hosted_install', '/', global_config, api_client_config)
|
||||
|
||||
__doc__ = """
|
||||
Usage:
|
||||
|
@ -156,3 +31,4 @@ if __name__ == '__main__':
|
|||
sys.exit(0)
|
||||
print("Running mm1.99")
|
||||
main(args['--config'],args['--apiclient'],args['--log'])
|
||||
|
||||
|
|
21
python_apps/media-monitor2/tests/test_instance.py
Normal file
21
python_apps/media-monitor2/tests/test_instance.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import unittest
|
||||
from copy import deepcopy
|
||||
from media.saas.airtimeinstance import AirtimeInstance, NoConfigFile
|
||||
|
||||
class TestAirtimeInstance(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.cfg = {
|
||||
'api_client' : 'tests/test_instance.py',
|
||||
'media_monitor' : 'tests/test_instance.py',
|
||||
'logging' : 'tests/test_instance.py',
|
||||
}
|
||||
|
||||
def test_init_good(self):
|
||||
AirtimeInstance("/root", self.cfg)
|
||||
self.assertTrue(True)
|
||||
|
||||
def test_init_bad(self):
|
||||
cfg = deepcopy(self.cfg)
|
||||
cfg['api_client'] = 'bs'
|
||||
with self.assertRaises(NoConfigFile):
|
||||
AirtimeInstance("/root", cfg)
|
64
python_apps/media-monitor2/tests/test_thread.py
Normal file
64
python_apps/media-monitor2/tests/test_thread.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import time
|
||||
from media.saas.thread import InstanceThread, InstanceInheritingThread
|
||||
|
||||
# ugly but necessary for 2.7
|
||||
signal = False
|
||||
signal2 = False
|
||||
|
||||
class TestInstanceThread(unittest.TestCase):
|
||||
def test_user_inject(self):
|
||||
global signal
|
||||
signal = False
|
||||
u = "rudi"
|
||||
class T(InstanceThread):
|
||||
def run(me):
|
||||
global signal
|
||||
super(T, me).run()
|
||||
signal = True
|
||||
self.assertEquals(u, me.user())
|
||||
t = T(u, name="test_user_inject")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
time.sleep(0.2)
|
||||
self.assertTrue(signal)
|
||||
|
||||
def test_inheriting_thread(utest):
|
||||
global signal2
|
||||
u = "testing..."
|
||||
|
||||
class TT(InstanceInheritingThread):
|
||||
def run(self):
|
||||
global signal2
|
||||
utest.assertEquals(self.user(), u)
|
||||
signal2 = True
|
||||
|
||||
class T(InstanceThread):
|
||||
def run(self):
|
||||
super(T, self).run()
|
||||
child_thread = TT(name="child thread")
|
||||
child_thread.daemon = True
|
||||
child_thread.start()
|
||||
|
||||
parent_thread = T(u, name="Parent instance thread")
|
||||
parent_thread.daemon = True
|
||||
parent_thread.start()
|
||||
|
||||
time.sleep(0.2)
|
||||
utest.assertTrue(signal2)
|
||||
|
||||
def test_different_user(utest):
|
||||
u1, u2 = "ru", "di"
|
||||
class T(InstanceThread):
|
||||
def run(self):
|
||||
super(T, self).run()
|
||||
|
||||
for u in [u1, u2]:
|
||||
t = T(u)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
utest.assertEquals(t.user(), u)
|
||||
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
|
@ -38,27 +38,6 @@ def get_os_codename():
|
|||
|
||||
return ("unknown", "unknown")
|
||||
|
||||
def generate_liquidsoap_config(ss):
|
||||
data = ss['msg']
|
||||
fh = open('/etc/airtime/liquidsoap.cfg', 'w')
|
||||
fh.write("################################################\n")
|
||||
fh.write("# THIS FILE IS AUTO GENERATED. DO NOT CHANGE!! #\n")
|
||||
fh.write("################################################\n")
|
||||
for d in data:
|
||||
buffer = d[u'keyname'] + " = "
|
||||
if(d[u'type'] == 'string'):
|
||||
temp = d[u'value']
|
||||
buffer += '"%s"' % temp
|
||||
else:
|
||||
temp = d[u'value']
|
||||
if(temp == ""):
|
||||
temp = "0"
|
||||
buffer += temp
|
||||
buffer += "\n"
|
||||
fh.write(api_client.encode_to(buffer))
|
||||
fh.write('log_file = "/var/log/airtime/pypo-liquidsoap/<script>.log"\n')
|
||||
fh.close()
|
||||
|
||||
PATH_INI_FILE = '/etc/airtime/pypo.cfg'
|
||||
PATH_LIQUIDSOAP_BIN = '/usr/lib/airtime/pypo/bin/liquidsoap_bin'
|
||||
|
||||
|
|
|
@ -407,7 +407,7 @@ end
|
|||
# fade using both cross() and switch().
|
||||
def input.http_restart(~id,~initial_url="http://dummy/url")
|
||||
|
||||
source = input.http(buffer=5.,max=15.,id=id,autostart=false,initial_url)
|
||||
source = audio_to_stereo(input.http(buffer=5.,max=15.,id=id,autostart=false,initial_url))
|
||||
|
||||
def stopped()
|
||||
"stopped" == list.hd(server.execute("#{id}.status"))
|
||||
|
|
|
@ -35,10 +35,6 @@ just_switched = ref false
|
|||
|
||||
%include "ls_lib.liq"
|
||||
|
||||
#web_stream = input.harbor("test-harbor", port=8999, password=stream_harbor_pass)
|
||||
#web_stream = on_metadata(notify_stream, web_stream)
|
||||
#output.dummy(fallible=true, web_stream)
|
||||
|
||||
queue = audio_to_stereo(id="queue_src", request.equeue(id="queue", length=0.5))
|
||||
queue = cue_cut(queue)
|
||||
queue = amplify(1., override="replay_gain", queue)
|
||||
|
@ -51,7 +47,8 @@ output.dummy(fallible=true, queue)
|
|||
|
||||
http = input.http_restart(id="http")
|
||||
http = cross_http(http_input_id="http",http)
|
||||
stream_queue = http_fallback(http_input_id="http",http=http,default=queue)
|
||||
output.dummy(fallible=true, http)
|
||||
stream_queue = http_fallback(http_input_id="http", http=http, default=queue)
|
||||
|
||||
ignore(output.dummy(stream_queue, fallible=true))
|
||||
|
||||
|
@ -120,7 +117,11 @@ server.register(namespace="dynamic_source",
|
|||
# fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source_all() end)
|
||||
|
||||
default = amplify(id="silence_src", 0.00001, noise())
|
||||
default = rewrite_metadata([("artist","Airtime"), ("title", "offline")], default)
|
||||
ref_off_air_meta = ref off_air_meta
|
||||
if !ref_off_air_meta == "" then
|
||||
ref_off_air_meta := "Airtime - offline"
|
||||
end
|
||||
default = rewrite_metadata([("title", !ref_off_air_meta)], default)
|
||||
ignore(output.dummy(default, fallible=true))
|
||||
|
||||
master_dj_enabled = ref false
|
||||
|
|
|
@ -31,15 +31,15 @@ class ListenerStat(Thread):
|
|||
return self.api_client.get_stream_parameters()
|
||||
|
||||
|
||||
def get_icecast_xml(self, ip):
|
||||
#encoded = base64.b64encode("%(admin_user)s:%(admin_password)s" % ip)
|
||||
def get_stream_server_xml(self, ip, url):
|
||||
encoded = base64.b64encode("%(admin_user)s:%(admin_pass)s" % ip)
|
||||
|
||||
#header = {"Authorization":"Basic %s" % encoded}
|
||||
self.logger.debug(ip)
|
||||
url = 'http://%(host)s:%(port)s/stats.xsl' % ip
|
||||
self.logger.debug(url)
|
||||
req = urllib2.Request(url=url)
|
||||
#headers=header)
|
||||
header = {"Authorization":"Basic %s" % encoded}
|
||||
req = urllib2.Request(
|
||||
#assuming that the icecast stats path is /admin/stats.xml
|
||||
#need to fix this
|
||||
url=url,
|
||||
headers=header)
|
||||
|
||||
f = urllib2.urlopen(req)
|
||||
document = f.read()
|
||||
|
@ -48,7 +48,8 @@ class ListenerStat(Thread):
|
|||
|
||||
|
||||
def get_icecast_stats(self, ip):
|
||||
document = self.get_icecast_xml(ip)
|
||||
url = 'http://%(host)s:%(port)s/admin/stats.xml' % ip
|
||||
document = self.get_stream_server_xml(ip, url)
|
||||
dom = xml.dom.minidom.parseString(document)
|
||||
sources = dom.getElementsByTagName("source")
|
||||
|
||||
|
@ -66,6 +67,24 @@ class ListenerStat(Thread):
|
|||
mount_stats = {"timestamp":timestamp, \
|
||||
"num_listeners": num_listeners, \
|
||||
"mount_name": mount_name}
|
||||
|
||||
return mount_stats
|
||||
|
||||
def get_shoutcast_stats(self, ip):
|
||||
url = 'http://%(host)s:%(port)s/admin.cgi?sid=1&mode=viewxml' % ip
|
||||
document = self.get_stream_server_xml(ip, url)
|
||||
dom = xml.dom.minidom.parseString(document)
|
||||
current_listeners = dom.getElementsByTagName("CURRENTLISTENERS")
|
||||
|
||||
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
||||
num_listeners = 0
|
||||
if len(current_listeners):
|
||||
num_listeners = self.get_node_text(current_listeners[0].childNodes)
|
||||
|
||||
mount_stats = {"timestamp":timestamp, \
|
||||
"num_listeners": num_listeners, \
|
||||
"mount_name": "shoutcast"}
|
||||
|
||||
return mount_stats
|
||||
|
||||
def get_stream_stats(self, stream_parameters):
|
||||
|
@ -77,16 +96,27 @@ class ListenerStat(Thread):
|
|||
#streams are the same server, we will still initiate 3 separate
|
||||
#connections
|
||||
for k, v in stream_parameters.items():
|
||||
#v["admin_user"] = "admin"
|
||||
#v["admin_password"] = "hackme"
|
||||
if v["enable"] == 'true':
|
||||
stats.append(self.get_icecast_stats(v))
|
||||
try:
|
||||
if v["output"] == "icecast":
|
||||
stats.append(self.get_icecast_stats(v))
|
||||
else:
|
||||
stats.append(self.get_shoutcast_stats(v))
|
||||
self.update_listener_stat_error(v["mount"], 'OK')
|
||||
except Exception, e:
|
||||
self.logger.error('Exception: %s', e)
|
||||
self.update_listener_stat_error(v["mount"], str(e))
|
||||
|
||||
return stats
|
||||
|
||||
def push_stream_stats(self, stats):
|
||||
self.api_client.push_stream_stats(stats)
|
||||
|
||||
def update_listener_stat_error(self, stream_id, error):
|
||||
keyname = '%s_listener_stat_error' % stream_id
|
||||
data = {keyname: error}
|
||||
self.api_client.update_stream_setting_table(data)
|
||||
|
||||
def run(self):
|
||||
#Wake up every 120 seconds and gather icecast statistics. Note that we
|
||||
#are currently querying the server every 2 minutes for list of
|
||||
|
@ -99,10 +129,12 @@ class ListenerStat(Thread):
|
|||
stats = self.get_stream_stats(stream_parameters["stream_params"])
|
||||
self.logger.debug(stats)
|
||||
|
||||
self.push_stream_stats(stats)
|
||||
if not stats:
|
||||
self.logger.error("Not able to get listener stats")
|
||||
else:
|
||||
self.push_stream_stats(stats)
|
||||
except Exception, e:
|
||||
top = traceback.format_exc()
|
||||
self.logger.error('Exception: %s', top)
|
||||
self.logger.error('Exception: %s', e)
|
||||
|
||||
time.sleep(120)
|
||||
|
||||
|
|
0
python_apps/pypo/media/__init__.py
Normal file
0
python_apps/pypo/media/__init__.py
Normal file
0
python_apps/pypo/media/update/__init__.py
Normal file
0
python_apps/pypo/media/update/__init__.py
Normal file
|
@ -3,12 +3,11 @@ from threading import Thread
|
|||
import traceback
|
||||
import os
|
||||
import time
|
||||
import logging
|
||||
|
||||
from media.update import replaygain
|
||||
from media.monitor.log import Loggable
|
||||
|
||||
|
||||
class ReplayGainUpdater(Thread, Loggable):
|
||||
class ReplayGainUpdater(Thread):
|
||||
"""
|
||||
The purpose of the class is to query the server for a list of files which
|
||||
do not have a ReplayGain value calculated. This class will iterate over the
|
||||
|
@ -30,6 +29,7 @@ class ReplayGainUpdater(Thread, Loggable):
|
|||
def __init__(self,apc):
|
||||
Thread.__init__(self)
|
||||
self.api_client = apc
|
||||
self.logger = logging.getLogger()
|
||||
|
||||
def main(self):
|
||||
raw_response = self.api_client.list_all_watched_dirs()
|
||||
|
@ -56,7 +56,10 @@ class ReplayGainUpdater(Thread, Loggable):
|
|||
full_path = os.path.join(dir_path, f['fp'])
|
||||
processed_data.append((f['id'], replaygain.calculate_replay_gain(full_path)))
|
||||
|
||||
self.api_client.update_replay_gain_values(processed_data)
|
||||
try:
|
||||
self.api_client.update_replay_gain_values(processed_data)
|
||||
except Exception as e: self.unexpected_exception(e)
|
||||
|
||||
if len(files) == 0: break
|
||||
self.logger.info("Processed: %d songs" % total)
|
||||
|
|
@ -24,6 +24,8 @@ from recorder import Recorder
|
|||
from listenerstat import ListenerStat
|
||||
from pypomessagehandler import PypoMessageHandler
|
||||
|
||||
from media.update.replaygainupdater import ReplayGainUpdater
|
||||
|
||||
from configobj import ConfigObj
|
||||
|
||||
# custom imports
|
||||
|
@ -174,6 +176,9 @@ if __name__ == '__main__':
|
|||
sys.exit()
|
||||
|
||||
api_client = api_client.AirtimeApiClient()
|
||||
|
||||
ReplayGainUpdater.start_reply_gain(api_client)
|
||||
|
||||
api_client.register_component("pypo")
|
||||
|
||||
pypoFetch_q = Queue()
|
||||
|
|
|
@ -188,28 +188,6 @@ class PypoFetch(Thread):
|
|||
self.update_liquidsoap_station_name(info['station_name'])
|
||||
self.update_liquidsoap_transition_fade(info['transition_fade'])
|
||||
|
||||
def write_liquidsoap_config(self, setting):
|
||||
fh = open('/etc/airtime/liquidsoap.cfg', 'w')
|
||||
self.logger.info("Rewriting liquidsoap.cfg...")
|
||||
fh.write("################################################\n")
|
||||
fh.write("# THIS FILE IS AUTO GENERATED. DO NOT CHANGE!! #\n")
|
||||
fh.write("################################################\n")
|
||||
for k, d in setting:
|
||||
buffer_str = d[u'keyname'] + " = "
|
||||
if d[u'type'] == 'string':
|
||||
temp = d[u'value']
|
||||
buffer_str += '"%s"' % temp
|
||||
else:
|
||||
temp = d[u'value']
|
||||
if temp == "":
|
||||
temp = "0"
|
||||
buffer_str += temp
|
||||
|
||||
buffer_str += "\n"
|
||||
fh.write(api_client.encode_to(buffer_str))
|
||||
fh.write("log_file = \"/var/log/airtime/pypo-liquidsoap/<script>.log\"\n");
|
||||
fh.close()
|
||||
|
||||
def restart_liquidsoap(self):
|
||||
|
||||
self.telnet_lock.acquire()
|
||||
|
@ -296,10 +274,10 @@ class PypoFetch(Thread):
|
|||
dump, stream = s[u'keyname'].split('_', 1)
|
||||
state_change_restart[stream] = False
|
||||
# This is the case where restart is required no matter what
|
||||
if (existing[s[u'keyname']] != s[u'value']):
|
||||
if (existing[s[u'keyname']] != str(s[u'value'])):
|
||||
self.logger.info("'Need-to-restart' state detected for %s...", s[u'keyname'])
|
||||
restart = True;
|
||||
elif "master_live_stream_port" in s[u'keyname'] or "master_live_stream_mp" in s[u'keyname'] or "dj_live_stream_port" in s[u'keyname'] or "dj_live_stream_mp" in s[u'keyname']:
|
||||
elif "master_live_stream_port" in s[u'keyname'] or "master_live_stream_mp" in s[u'keyname'] or "dj_live_stream_port" in s[u'keyname'] or "dj_live_stream_mp" in s[u'keyname'] or "off_air_meta" in s[u'keyname']:
|
||||
if (existing[s[u'keyname']] != s[u'value']):
|
||||
self.logger.info("'Need-to-restart' state detected for %s...", s[u'keyname'])
|
||||
restart = True;
|
||||
|
|
|
@ -209,7 +209,8 @@ class PypoPush(Thread):
|
|||
else:
|
||||
correct = liquidsoap_queue_approx[0]['start'] == media_item['start'] and \
|
||||
liquidsoap_queue_approx[0]['row_id'] == media_item['row_id'] and \
|
||||
liquidsoap_queue_approx[0]['end'] == media_item['end']
|
||||
liquidsoap_queue_approx[0]['end'] == media_item['end'] and \
|
||||
liquidsoap_queue_approx[0]['replay_gain'] == media_item['replay_gain']
|
||||
elif is_stream(media_item):
|
||||
correct = liquidsoap_stream_id == str(media_item['row_id'])
|
||||
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
<?php
|
||||
|
||||
// Define path to application directory
|
||||
define('APPLICATION_PATH', realpath(dirname(__FILE__) . '/../../../application'));
|
||||
echo APPLICATION_PATH.PHP_EOL;
|
||||
|
||||
// Ensure library/ is on include_path
|
||||
set_include_path(get_include_path() . PATH_SEPARATOR . realpath(APPLICATION_PATH . '/../library'));
|
||||
|
||||
set_include_path(get_include_path() . PATH_SEPARATOR . APPLICATION_PATH . '/models');
|
||||
echo get_include_path().PHP_EOL;
|
||||
|
||||
//Controller plugins.
|
||||
set_include_path(APPLICATION_PATH . get_include_path() . PATH_SEPARATOR . '/controllers/plugins');
|
||||
|
||||
|
||||
require_once APPLICATION_PATH.'/configs/conf.php';
|
||||
require_once(APPLICATION_PATH.'/../library/propel/runtime/lib/Propel.php');
|
||||
|
||||
require_once 'Soundcloud.php';
|
||||
require_once 'Playlist.php';
|
||||
require_once 'StoredFile.php';
|
||||
require_once 'Schedule.php';
|
||||
require_once 'Shows.php';
|
||||
require_once 'User.php';
|
||||
require_once 'RabbitMq.php';
|
||||
require_once 'Preference.php';
|
||||
//require_once APPLICATION_PATH.'/controllers/plugins/RabbitMqPlugin.php';
|
||||
|
||||
// Initialize Propel with the runtime configuration
|
||||
Propel::init(__DIR__."/../../../application/configs/airtime-conf.php");
|
||||
|
||||
$playlistName = "pypo_playlist_test";
|
||||
$secondsFromNow = 30;
|
||||
|
||||
echo " ************************************************************** \n";
|
||||
echo " This script schedules a playlist to play $secondsFromNow minute(s) from now.\n";
|
||||
echo " This is a utility to help you debug the scheduler.\n";
|
||||
echo " ************************************************************** \n";
|
||||
echo "\n";
|
||||
echo "Deleting playlists with the name '$playlistName'...";
|
||||
// Delete any old playlists
|
||||
$pl2 = Playlist::findPlaylistByName($playlistName);
|
||||
foreach ($pl2 as $playlist) {
|
||||
//var_dump($playlist);
|
||||
$playlist->delete();
|
||||
}
|
||||
echo "done.\n";
|
||||
|
||||
// Create a new playlist
|
||||
echo "Creating new playlist '$playlistName'...";
|
||||
$pl = new Playlist();
|
||||
$pl->create($playlistName);
|
||||
|
||||
|
||||
$mediaFile = Application_Model_StoredFile::findByOriginalName("Peter_Rudenko_-_Opening.mp3");
|
||||
if (is_null($mediaFile)) {
|
||||
echo "Adding test audio clip to the database.\n";
|
||||
$v = array("filepath" => __DIR__."/../../../audio_samples/vorbis.com/Hydrate-Kenny_Beltrey.ogg");
|
||||
$mediaFile = Application_Model_StoredFile::Insert($v);
|
||||
}
|
||||
$pl->addAudioClip($mediaFile->getId());
|
||||
echo "done.\n";
|
||||
|
||||
|
||||
//$pl2 = Playlist::findPlaylistByName("pypo_playlist_test");
|
||||
//var_dump($pl2);
|
||||
|
||||
// Get current time
|
||||
// In the format YYYY-MM-DD HH:MM:SS.nnnnnn
|
||||
$startTime = date("Y-m-d H:i:s");
|
||||
$endTime = date("Y-m-d H:i:s", time()+(60*60));
|
||||
|
||||
echo "Removing everything from the scheduler between $startTime and $endTime...";
|
||||
|
||||
|
||||
// Check for succces
|
||||
$scheduleClear = Schedule::isScheduleEmptyInRange($startTime, "01:00:00");
|
||||
if (!$scheduleClear) {
|
||||
echo "\nERROR: Schedule could not be cleared.\n\n";
|
||||
var_dump(Schedule::getItems($startTime, $endTime));
|
||||
exit;
|
||||
}
|
||||
echo "done.\n";
|
||||
|
||||
// Schedule the playlist for two minutes from now
|
||||
echo "Scheduling new playlist...\n";
|
||||
//$playTime = date("Y-m-d H:i:s", time()+(60*$minutesFromNow));
|
||||
$playTime = date("Y-m-d H:i:s", time()+($secondsFromNow));
|
||||
|
||||
//$scheduleGroup = new ScheduleGroup();
|
||||
//$scheduleGroup->add($playTime, null, $pl->getId());
|
||||
|
||||
//$show = new Application_Model_ShowInstance($showInstanceId);
|
||||
//$show->scheduleShow(array($pl->getId()));
|
||||
|
||||
//$show->setShowStart();
|
||||
//$show->setShowEnd();
|
||||
|
||||
echo " SUCCESS: Playlist scheduled at $playTime\n\n";
|
|
@ -1,59 +0,0 @@
|
|||
[loggers]
|
||||
keys=root
|
||||
|
||||
[handlers]
|
||||
keys=consoleHandler,fileHandlerERROR,fileHandlerDEBUG,nullHandler
|
||||
|
||||
[formatters]
|
||||
keys=simpleFormatter
|
||||
|
||||
[logger_root]
|
||||
level=DEBUG
|
||||
handlers=consoleHandler,fileHandlerERROR,fileHandlerDEBUG
|
||||
|
||||
[logger_libs]
|
||||
handlers=nullHandler
|
||||
level=CRITICAL
|
||||
qualname="process"
|
||||
propagate=0
|
||||
|
||||
[handler_consoleHandler]
|
||||
class=StreamHandler
|
||||
level=CRITICAL
|
||||
formatter=simpleFormatter
|
||||
args=(sys.stdout,)
|
||||
|
||||
[handler_fileHandlerERROR]
|
||||
class=FileHandler
|
||||
level=CRITICAL
|
||||
formatter=simpleFormatter
|
||||
args=("./error-unit-test.log",)
|
||||
|
||||
[handler_fileHandlerDEBUG]
|
||||
class=FileHandler
|
||||
level=CRITICAL
|
||||
formatter=simpleFormatter
|
||||
args=("./debug-unit-test.log",)
|
||||
|
||||
[handler_nullHandler]
|
||||
class=FileHandler
|
||||
level=CRITICAL
|
||||
formatter=simpleFormatter
|
||||
args=("./log-null-unit-test.log",)
|
||||
|
||||
|
||||
[formatter_simpleFormatter]
|
||||
format=%(asctime)s %(levelname)s - [%(filename)s : %(funcName)s() : line %(lineno)d] - %(message)s
|
||||
datefmt=
|
||||
|
||||
|
||||
## multitail color sheme
|
||||
## pyml / python
|
||||
# colorscheme:pyml:www.obp.net
|
||||
# cs_re:blue:\[[^ ]*\]
|
||||
# cs_re:red:CRITICAL:*
|
||||
# cs_re:red,black,blink:ERROR:*
|
||||
# cs_re:blue:NOTICE:*
|
||||
# cs_re:cyan:INFO:*
|
||||
# cs_re:green:DEBUG:*
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
import time
|
||||
import os
|
||||
import traceback
|
||||
from optparse import *
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
import logging.config
|
||||
import shutil
|
||||
import urllib
|
||||
import urllib2
|
||||
import pickle
|
||||
import telnetlib
|
||||
import random
|
||||
import string
|
||||
import operator
|
||||
import inspect
|
||||
|
||||
# additional modules (should be checked)
|
||||
from configobj import ConfigObj
|
||||
|
||||
# custom imports
|
||||
from util import *
|
||||
from api_clients import *
|
||||
|
||||
import random
|
||||
import unittest
|
||||
|
||||
# configure logging
|
||||
logging.config.fileConfig("logging-api-validator.cfg")
|
||||
|
||||
try:
|
||||
config = ConfigObj('/etc/airtime/pypo.cfg')
|
||||
except Exception, e:
|
||||
print 'Error loading config file: ', e
|
||||
sys.exit()
|
||||
|
||||
|
||||
class TestApiFunctions(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.api_client = api_client.api_client_factory(config)
|
||||
|
||||
def test_is_server_compatible(self):
|
||||
self.assertTrue(self.api_client.is_server_compatible(False))
|
||||
|
||||
def test_get_schedule(self):
|
||||
status, response = self.api_client.get_schedule()
|
||||
self.assertTrue(response.has_key("status"))
|
||||
self.assertTrue(response.has_key("playlists"))
|
||||
self.assertTrue(response.has_key("check"))
|
||||
self.assertTrue(status == 1)
|
||||
|
||||
def test_get_media(self):
|
||||
self.assertTrue(True)
|
||||
|
||||
def test_notify_scheduled_item_start_playing(self):
|
||||
arr = dict()
|
||||
arr["x"] = dict()
|
||||
arr["x"]["schedule_id"]=1
|
||||
|
||||
response = self.api_client.notify_scheduled_item_start_playing("x", arr)
|
||||
self.assertTrue(response.has_key("status"))
|
||||
self.assertTrue(response.has_key("message"))
|
||||
|
||||
def test_notify_media_item_start_playing(self):
|
||||
response = self.api_client.notify_media_item_start_playing('{"schedule_id":1}', 5)
|
||||
self.assertTrue(response.has_key("status"))
|
||||
self.assertTrue(response.has_key("message"))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Binary file not shown.
Loading…
Add table
Add a link
Reference in a new issue