Merge branch '2.2.x'

Conflicts:
	install_minimal/airtime-install
	python_apps/media-monitor/airtimefilemonitor/airtimemetadata.py
This commit is contained in:
Martin Konecny 2012-10-26 00:31:12 -04:00
commit 6500c3312d
750 changed files with 45683 additions and 140897 deletions

View file

@ -34,7 +34,7 @@ upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%file
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
# URL to tell Airtime we want a listing of all files it knows about
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%/all/%%all%%'
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
@ -51,6 +51,10 @@ set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%
# URL to tell Airtime about file system mount change
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
# URL to commit multiple updates from media monitor at the same time
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
# URL to tell Airtime about file system mount change
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
@ -64,6 +68,8 @@ show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
# URL to upload the recorded show's file to Airtime
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
# URL to commit multiple updates from media monitor at the same time
#number of retries to upload file if connection problem
upload_retries = 3
@ -90,10 +96,7 @@ get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
# Update whether an audio clip is currently playing.
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'
# ???
generate_range_url = 'generate_range_dp.php'
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/'
# URL to tell Airtime we want to get stream setting
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
@ -109,3 +112,11 @@ update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sou
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'
notify_webstream_data = 'notify-webstream-data/api_key/%%api_key%%/media_id/%%media_id%%/format/json'
notify_liquidsoap_started = 'rabbitmq-do-push/api_key/%%api_key%%/format/json'

View file

@ -1,39 +1,25 @@
###############################################################################
# This file holds the implementations for all the API clients.
#
# If you want to develop a new client, here are some suggestions:
# Get the fetch methods working first, then the push, then the liquidsoap notifier.
# You will probably want to create a script on your server side to automatically
# If you want to develop a new client, here are some suggestions: Get the fetch
# methods working first, then the push, then the liquidsoap notifier. You will
# probably want to create a script on your server side to automatically
# schedule a playlist one minute from the current time.
###############################################################################
import sys
import time
import urllib
import urllib2
import logging
import json
import os
from urlparse import urlparse
import base64
from configobj import ConfigObj
import string
import hashlib
import traceback
AIRTIME_VERSION = "2.1.3"
AIRTIME_VERSION = "2.2.0"
def api_client_factory(config, logger=None):
if logger != None:
temp_logger = logger
else:
temp_logger = logging.getLogger()
if config["api_client"] == "airtime":
return AirTimeApiClient(temp_logger)
else:
temp_logger.info('API Client "'+config["api_client"]+'" not supported. Please check your config file.\n')
sys.exit()
def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
@ -44,156 +30,131 @@ def encode_to(obj, encoding='utf-8'):
if isinstance(obj, unicode):
obj = obj.encode(encoding)
return obj
def convert_dict_value_to_utf8(md):
#list comprehension to convert all values of md to utf-8
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()])
class ApiClientInterface:
# Implementation: optional
#
# Called from: beginning of all scripts
#
# Should exit the program if this version of pypo is not compatible with
# 3rd party software.
def is_server_compatible(self, verbose = True):
pass
# Implementation: Required
#
# Called from: fetch loop
#
# This is the main method you need to implement when creating a new API client.
# start and end are for testing purposes.
# start and end are strings in the format YYYY-DD-MM-hh-mm-ss
def get_schedule(self, start=None, end=None):
return 0, []
# Implementation: Required
#
# Called from: fetch loop
#
# This downloads the media from the server.
def get_media(self, src, dst):
pass
# Implementation: optional
# You dont actually have to implement this function for the liquidsoap playout to work.
#
# Called from: pypo_notify.py
#
# This is a callback from liquidsoap, we use this to notify about the
# currently playing *song*. We get passed a JSON string which we handed to
# liquidsoap in get_liquidsoap_data().
def notify_media_item_start_playing(self, data, media_id):
pass
# Implementation: optional
# You dont actually have to implement this function for the liquidsoap playout to work.
def generate_range_dp(self):
pass
# Implementation: optional
#
# Called from: push loop
#
# Return a dict of extra info you want to pass to liquidsoap
# You will be able to use this data in update_start_playing
def get_liquidsoap_data(self, pkey, schedule):
pass
def get_shows_to_record(self):
pass
def upload_recorded_show(self):
pass
def check_media_status(self, md5):
pass
def update_media_metadata(self, md):
pass
def list_all_db_files(self, dir_id):
pass
def list_all_watched_dirs(self):
pass
def add_watched_dir(self):
pass
def remove_watched_dir(self):
pass
def set_storage_dir(self):
pass
def register_component(self):
pass
def notify_liquidsoap_error(self, error_msg, stream_id):
pass
def notify_liquidsoap_connection(self, stream_id):
pass
# Put here whatever tests you want to run to make sure your API is working
def test(self):
pass
#def get_media_type(self, playlist):
# nil
################################################################################
# Airtime API Client
################################################################################
class AirTimeApiClient(ApiClientInterface):
class AirtimeApiClient():
def __init__(self, logger=None):
if logger != None:
self.logger = logger
# This is a little hacky fix so that I don't have to pass the config object
# everywhere where AirtimeApiClient needs to be initialized
default_config = None
# the purpose of this custom constructor is to remember which config file
# it was called with. So that after the initial call:
# AirtimeApiClient.create_right_config('/path/to/config')
# All subsequence calls to create_right_config will be with that config
# file
@staticmethod
def create_right_config(log=None,config_path=None):
if config_path: AirtimeApiClient.default_config = config_path
elif (not AirtimeApiClient.default_config):
raise ValueError("Cannot slip config_path attribute when it has \
never been passed yet")
return AirtimeApiClient( logger=None,
config_path=AirtimeApiClient.default_config )
def __init__(self, logger=None,config_path='/etc/airtime/api_client.cfg'):
if logger is None:
self.logger = logging
else:
self.logger = logging.getLogger()
self.logger = logger
# loading config file
try:
self.config = ConfigObj('/etc/airtime/api_client.cfg')
self.config = ConfigObj(config_path)
except Exception, e:
self.logger.error('Error loading config file: %s', e)
sys.exit(1)
def get_response_from_server(self, url):
def get_response_from_server(self, url, attempts=-1):
logger = self.logger
successful_response = False
while not successful_response:
try:
response = urllib2.urlopen(url).read()
successful_response = True
except IOError, e:
logger.error('Error Authenticating with remote server: %s', e)
if isinstance(url, urllib2.Request):
logger.debug(url.get_full_url())
else:
logger.debug(url)
except Exception, e:
logger.error('Couldn\'t connect to remote server. Is it running?')
logger.error("%s" % e)
if isinstance(url, urllib2.Request):
logger.debug(url.get_full_url())
else:
logger.debug(url)
#If the user passed in a positive attempts number then that means
#attempts will roll over 0 and we stop. If attempts was initially negative,
#then we have unlimited attempts
if attempts > 0:
attempts = attempts - 1
if attempts == 0:
successful_response = True
if not successful_response:
logger.error("Error connecting to server, waiting 5 seconds and trying again.")
time.sleep(5)
return response
def __get_airtime_version(self, verbose = True):
return response
def get_response_into_file(self, url, block=True):
"""
This function will query the server and download its response directly
into a temporary file. This is useful in the situation where the
response from the server can be huge and we don't want to store it into
memory (potentially causing Python to use hundreds of MB's of memory).
By writing into a file we can then open this file later, and read data
a little bit at a time and be very mem efficient.
The return value of this function is the path of the temporary file.
Unless specified using block = False, this function will block until a
successful HTTP 200 response is received.
"""
logger = self.logger
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["version_url"])
successful_response = False
while not successful_response:
try:
path = urllib.urlretrieve(url)[0]
successful_response = True
except IOError, e:
logger.error('Error Authenticating with remote server: %s', e)
if not block:
raise
except Exception, e:
logger.error('Couldn\'t connect to remote server. Is it running?')
logger.error("%s" % e)
if not block:
raise
if not successful_response:
logger.error("Error connecting to server, waiting 5 seconds and trying again.")
time.sleep(5)
return path
def __get_airtime_version(self):
logger = self.logger
url = "http://%s:%s/%s/%s" % (self.config["base_url"],
str(self.config["base_port"]), self.config["api_base"],
self.config["version_url"])
logger.debug("Trying to contact %s", url)
url = url.replace("%%api_key%%", self.config["api_key"])
version = -1
response = None
try:
data = self.get_response_from_server(url)
logger.debug("Data: %s", data)
@ -208,13 +169,13 @@ class AirTimeApiClient(ApiClientInterface):
def test(self):
logger = self.logger
status, items = self.get_schedule('2010-01-01-00-00-00', '2011-01-01-00-00-00')
items = self.get_schedule()[1]
schedule = items["playlists"]
logger.debug("Number of playlists found: %s", str(len(schedule)))
count = 1
for pkey in sorted(schedule.iterkeys()):
logger.debug("Playlist #%s",str(count))
count+=1
logger.debug("Playlist #%s", str(count))
count += 1
playlist = schedule[pkey]
for item in playlist["medias"]:
filename = urlparse(item["uri"])
@ -222,9 +183,9 @@ class AirTimeApiClient(ApiClientInterface):
self.get_media(item["uri"], filename)
def is_server_compatible(self, verbose = True):
def is_server_compatible(self, verbose=True):
logger = self.logger
version = self.__get_airtime_version(verbose)
version = self.__get_airtime_version()
if (version == -1):
if (verbose):
logger.info('Unable to get Airtime version number.\n')
@ -232,16 +193,17 @@ class AirTimeApiClient(ApiClientInterface):
elif (version[0:3] != AIRTIME_VERSION[0:3]):
if (verbose):
logger.info('Airtime version found: ' + str(version))
logger.info('pypo is at version ' +AIRTIME_VERSION+' and is not compatible with this version of Airtime.\n')
logger.info('pypo is at version ' + AIRTIME_VERSION +
' and is not compatible with this version of Airtime.\n')
return False
else:
if (verbose):
logger.info('Airtime version: ' + str(version))
logger.info('pypo is at version ' +AIRTIME_VERSION+' and is compatible with this version of Airtime.')
logger.info('pypo is at version ' + AIRTIME_VERSION + ' and is compatible with this version of Airtime.')
return True
def get_schedule(self, start=None, end=None):
def get_schedule(self):
logger = self.logger
# Construct the URL
@ -270,28 +232,42 @@ class AirTimeApiClient(ApiClientInterface):
logger.info("try to download from %s to %s", src, dst)
src = src.replace("%%api_key%%", self.config["api_key"])
# check if file exists already before downloading again
filename, headers = urllib.urlretrieve(src, dst)
headers = urllib.urlretrieve(src, dst)[1]
logger.info(headers)
except Exception, e:
logger.error("%s", e)
def notify_liquidsoap_started(self):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], \
str(self.config["base_port"]), \
self.config["api_base"], \
self.config["notify_liquidsoap_started"])
url = url.replace("%%api_key%%", self.config["api_key"])
self.get_response_from_server(url, attempts=5)
except Exception, e:
logger.error("Exception: %s", str(e))
"""
This is a callback from liquidsoap, we use this to notify about the
currently playing *song*. We get passed a JSON string which we handed to
liquidsoap in get_liquidsoap_data().
"""
def notify_media_item_start_playing(self, data, media_id):
def notify_media_item_start_playing(self, media_id):
logger = self.logger
response = ''
try:
schedule_id = data
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_start_playing_url"])
url = url.replace("%%media_id%%", str(media_id))
url = url.replace("%%schedule_id%%", str(schedule_id))
logger.debug(url)
url = url.replace("%%api_key%%", self.config["api_key"])
response = self.get_response_from_server(url)
response = self.get_response_from_server(url, attempts = 5)
response = json.loads(response)
logger.info("API-Status %s", response['status'])
logger.info("API-Message %s", response['message'])
@ -302,12 +278,11 @@ class AirTimeApiClient(ApiClientInterface):
return response
def get_liquidsoap_data(self, pkey, schedule):
logger = self.logger
playlist = schedule[pkey]
data = dict()
try:
data["schedule_id"] = playlist['id']
except Exception, e:
except Exception:
data["schedule_id"] = 0
return data
@ -342,7 +317,7 @@ class AirTimeApiClient(ApiClientInterface):
url = url.replace("%%api_key%%", self.config["api_key"])
for i in range(0, retries):
logger.debug("Upload attempt: %s", i+1)
logger.debug("Upload attempt: %s", i + 1)
try:
request = urllib2.Request(url, data, headers)
@ -362,62 +337,72 @@ class AirTimeApiClient(ApiClientInterface):
time.sleep(retries_wait)
return response
def check_live_stream_auth(self, username, password, dj_type):
#logger = logging.getLogger()
"""
TODO: Why are we using print statements here? Possibly use logger that
is directed to stdout. -MK
"""
response = ''
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["check_live_stream_auth"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%username%%", username)
url = url.replace("%%djtype%%", dj_type)
url = url.replace("%%password%%", password)
response = self.get_response_from_server(url)
response = json.loads(response)
except Exception, e:
import traceback
top = traceback.format_exc()
print "Exception: %s", e
print "traceback: %s", top
print "traceback: %s", traceback.format_exc()
response = None
return response
def construct_url(self,config_action_key):
"""Constructs the base url for every request"""
# TODO : Make other methods in this class use this this method.
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config[config_action_key])
url = url.replace("%%api_key%%", self.config["api_key"])
return url
def setup_media_monitor(self):
logger = self.logger
response = None
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["media_setup_url"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = self.construct_url("media_setup_url")
response = self.get_response_from_server(url)
response = json.loads(response)
logger.info("Connected to Airtime Server. Json Media Storage Dir: %s", response)
except Exception, e:
response = None
logger.error("Exception: %s", e)
return response
def update_media_metadata(self, md, mode, is_record=False):
logger = self.logger
response = None
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_media_url"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = self.construct_url("update_media_url")
url = url.replace("%%mode%%", mode)
self.logger.info("Requesting url %s" % url)
md = convert_dict_value_to_utf8(md)
data = urllib.urlencode(md)
req = urllib2.Request(url, data)
response = self.get_response_from_server(req)
logger.info("update media %s, filepath: %s, mode: %s", response, md['MDATA_KEY_FILEPATH'], mode)
response = json.loads(response)
self.logger.info("Received response:")
self.logger.info(response)
try: response = json.loads(response)
except ValueError:
logger.info("Could not parse json from response: '%s'" % response)
if("error" not in response and is_record):
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["upload_recorded"])
@ -430,45 +415,99 @@ class AirTimeApiClient(ApiClientInterface):
logger.info("associate recorded %s", response)
except Exception, e:
response = None
import traceback
top = traceback.format_exc()
logger.error('Exception: %s', e)
logger.error("traceback: %s", top)
logger.error("traceback: %s", traceback.format_exc())
return response
def send_media_monitor_requests(self, action_list, dry=False):
"""
Send a gang of media monitor events at a time. actions_list is a list
of dictionaries where every dictionary is representing an action. Every
action dict must contain a 'mode' key that says what kind of action it
is and an optional 'is_record' key that says whether the show was
recorded or not. The value of this key does not matter, only if it's
present or not.
"""
logger = self.logger
try:
url = self.construct_url('reload_metadata_group')
# We are assuming that action_list is a list of dictionaries such
# that every dictionary represents the metadata of a file along
# with a special mode key that is the action to be executed by the
# controller.
valid_actions = []
# We could get a list of valid_actions in a much shorter way using
# filter but here we prefer a little more verbosity to help
# debugging
for action in action_list:
if not 'mode' in action:
self.logger.debug("Warning: Trying to send a request element without a 'mode'")
self.logger.debug("Here is the the request: '%s'" % str(action) )
else:
# We alias the value of is_record to true or false no
# matter what it is based on if it's absent in the action
if 'is_record' not in action:
action['is_record'] = 0
valid_actions.append(action)
# Note that we must prefix every key with: mdX where x is a number
# Is there a way to format the next line a little better? The
# parenthesis make the code almost unreadable
md_list = dict((("md%d" % i), json.dumps(convert_dict_value_to_utf8(md))) \
for i,md in enumerate(valid_actions))
# For testing we add the following "dry" parameter to tell the
# controller not to actually do any changes
if dry: md_list['dry'] = 1
self.logger.info("Pumping out %d requests..." % len(valid_actions))
data = urllib.urlencode(md_list)
req = urllib2.Request(url, data)
response = self.get_response_from_server(req)
response = json.loads(response)
return response
except ValueError: raise
except Exception, e:
logger.error('Exception: %s', e)
logger.error("traceback: %s", traceback.format_exc())
raise
#returns a list of all db files for a given directory in JSON format:
#{"files":["path/to/file1", "path/to/file2"]}
#Note that these are relative paths to the given directory. The full
#path is not returned.
def list_all_db_files(self, dir_id):
def list_all_db_files(self, dir_id, all_files=True):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["list_all_db_files"])
url = url.replace("%%api_key%%", self.config["api_key"])
all_files = u"1" if all_files else u"0"
url = self.construct_url("list_all_db_files")
url = url.replace("%%dir_id%%", dir_id)
url = url.replace("%%all%%", all_files)
response = self.get_response_from_server(url)
response = json.loads(response)
except Exception, e:
response = None
response = {}
logger.error("Exception: %s", e)
return response
try:
return response["files"]
except KeyError:
self.logger.error("Could not find index 'files' in dictionary: %s",
str(response))
return []
def list_all_watched_dirs(self):
# Does this include the stor directory as well?
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["list_all_watched_dirs"])
url = url.replace("%%api_key%%", self.config["api_key"])
response = self.get_response_from_server(url)
response = json.loads(response)
except Exception, e:
response = None
logger.error("Exception: %s", e)
self.logger.debug(traceback.format_exc())
return response
@ -479,7 +518,7 @@ class AirTimeApiClient(ApiClientInterface):
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%path%%", base64.b64encode(path))
response = self.get_response_from_server(url)
response = json.loads(response)
except Exception, e:
@ -495,7 +534,7 @@ class AirTimeApiClient(ApiClientInterface):
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%path%%", base64.b64encode(path))
response = self.get_response_from_server(url)
response = json.loads(response)
except Exception, e:
@ -519,13 +558,13 @@ class AirTimeApiClient(ApiClientInterface):
logger.error("Exception: %s", e)
return response
def get_stream_setting(self):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["get_stream_setting"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%api_key%%", self.config["api_key"])
response = self.get_response_from_server(url)
response = json.loads(response)
except Exception, e:
@ -535,51 +574,51 @@ class AirTimeApiClient(ApiClientInterface):
return response
"""
Purpose of this method is to contact the server with a "Hey its me!" message.
This will allow the server to register the component's (component = media-monitor, pypo etc.)
ip address, and later use it to query monit via monit's http service, or download log files
via a http server.
Purpose of this method is to contact the server with a "Hey its me!"
message. This will allow the server to register the component's (component
= media-monitor, pypo etc.) ip address, and later use it to query monit via
monit's http service, or download log files via a http server.
"""
def register_component(self, component):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["register_component"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%component%%", component)
self.get_response_from_server(url)
except Exception, e:
logger.error("Exception: %s", e)
def notify_liquidsoap_status(self, msg, stream_id, time):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_liquidsoap_status"])
url = url.replace("%%api_key%%", self.config["api_key"])
msg = msg.replace('/', ' ')
encoded_msg = urllib.quote(msg, '')
url = url.replace("%%msg%%", encoded_msg)
url = url.replace("%%stream_id%%", stream_id)
url = url.replace("%%boot_time%%", time)
response = self.get_response_from_server(url)
self.get_response_from_server(url, attempts = 5)
except Exception, e:
logger.error("Exception: %s", e)
def notify_source_status(self, sourcename, status):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_source_status"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%sourcename%%", sourcename)
url = url.replace("%%status%%", status)
response = self.get_response_from_server(url)
self.get_response_from_server(url, attempts = 5)
except Exception, e:
logger.error("Exception: %s", e)
"""
This function updates status of mounted file system information on airtime
"""
@ -587,64 +626,116 @@ class AirTimeApiClient(ApiClientInterface):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["update_fs_mount"])
url = url.replace("%%api_key%%", self.config["api_key"])
added_data_string = string.join(added_dir, ',')
removed_data_string = string.join(removed_dir, ',')
map = [("added_dir", added_data_string),("removed_dir",removed_data_string)]
map = [("added_dir", added_data_string), ("removed_dir", removed_data_string)]
data = urllib.urlencode(map)
req = urllib2.Request(url, data)
response = self.get_response_from_server(req)
logger.info("update file system mount: %s", json.loads(response))
except Exception, e:
import traceback
top = traceback.format_exc()
logger.error('Exception: %s', e)
logger.error("traceback: %s", top)
logger.error("traceback: %s", traceback.format_exc())
"""
When watched dir is missing(unplugged or something) on boot up, this function will get called
and will call appropriate function on Airtime.
When watched dir is missing(unplugged or something) on boot up, this
function will get called and will call appropriate function on Airtime.
"""
def handle_watched_dir_missing(self, dir):
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["handle_watched_dir_missing"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%dir%%", base64.b64encode(dir))
response = self.get_response_from_server(url)
logger.info("update file system mount: %s", json.loads(response))
except Exception, e:
import traceback
top = traceback.format_exc()
logger.error('Exception: %s', e)
logger.error("traceback: %s", top)
"""
Retrive infomations needed on bootstrap time
"""
logger.error("traceback: %s", traceback.format_exc())
def get_bootstrap_info(self):
"""
Retrive infomations needed on bootstrap time
"""
logger = self.logger
try:
url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["get_bootstrap_info"])
url = url.replace("%%api_key%%", self.config["api_key"])
url = self.construct_url("get_bootstrap_info")
response = self.get_response_from_server(url)
response = json.loads(response)
logger.info("Bootstrap info retrieved %s", response)
except Exception, e:
response = None
import traceback
top = traceback.format_exc()
logger.error('Exception: %s', e)
logger.error("traceback: %s", top)
logger.error("traceback: %s", traceback.format_exc())
return response
def get_files_without_replay_gain_value(self, dir_id):
"""
Download a list of files that need to have their ReplayGain value
calculated. This list of files is downloaded into a file and the path
to this file is the return value.
"""
#http://localhost/api/get-files-without-replay-gain/dir_id/1
logger = self.logger
try:
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(get_files_without_replay_gain)s/" % (self.config)
url = url.replace("%%api_key%%", self.config["api_key"])
url = url.replace("%%dir_id%%", dir_id)
response = self.get_response_from_server(url)
logger.info("update file system mount: %s", response)
response = json.loads(response)
#file_path = self.get_response_into_file(url)
except Exception, e:
response = None
logger.error('Exception: %s', e)
logger.error("traceback: %s", traceback.format_exc())
return response
def update_replay_gain_values(self, pairs):
"""
'pairs' is a list of pairs in (x, y), where x is the file's database
row id and y is the file's replay_gain value in dB
"""
#http://localhost/api/update-replay-gain-value/
try:
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(update_replay_gain_value)s/" % (self.config)
url = url.replace("%%api_key%%", self.config["api_key"])
data = urllib.urlencode({'data': json.dumps(pairs)})
request = urllib2.Request(url, data)
self.logger.debug(self.get_response_from_server(request))
except Exception, e:
self.logger.error("Exception: %s", e)
raise
def notify_webstream_data(self, data, media_id):
"""
Update the server with the latest metadata we've received from the
external webstream
"""
try:
url = "http://%(base_url)s:%(base_port)s/%(api_base)s/%(notify_webstream_data)s/" % (self.config)
url = url.replace("%%media_id%%", str(media_id))
url = url.replace("%%api_key%%", self.config["api_key"])
data = urllib.urlencode({'data': data})
self.logger.debug(url)
request = urllib2.Request(url, data)
self.logger.info(self.get_response_from_server(request, attempts = 5))
except Exception, e:
self.logger.error("Exception: %s", e)

View file

@ -14,12 +14,12 @@ def copy_dir(src_dir, dest_dir):
shutil.copytree(src_dir, dest_dir)
PATH_INI_FILE = '/etc/airtime/api_client.cfg'
current_script_dir = get_current_script_dir()
if not os.path.exists(PATH_INI_FILE):
shutil.copy('%s/../api_client.cfg'%current_script_dir, PATH_INI_FILE)
"""load config file"""
try:
config = ConfigObj("%s/../api_client.cfg" % current_script_dir)

View file

@ -8,7 +8,7 @@ virtualenv_bin="/usr/lib/airtime/airtime_virtualenv/bin/"
media_monitor_path="/usr/lib/airtime/media-monitor/"
media_monitor_script="media_monitor.py"
api_client_path="/usr/lib/airtime/"
api_client_path="/usr/lib/airtime/:/usr/lib/airtime/media-monitor/mm2/"
cd ${media_monitor_path}
@ -25,7 +25,6 @@ fi
export PYTHONPATH=${api_client_path}
export LC_ALL=`cat /etc/default/locale | grep "LANG=" | cut -d= -f2 | tr -d "\n\""`
# Note the -u when calling python! we need it to get unbuffered binary stdout and stderr
exec python -u ${media_monitor_path}${media_monitor_script} > /var/log/airtime/media-monitor/py-interpreter.log 2>&1
exec python ${media_monitor_path}${media_monitor_script} > /var/log/airtime/media-monitor/py-interpreter.log 2>&1
# EOF

View file

@ -5,11 +5,8 @@ import time
import pyinotify
import shutil
from subprocess import Popen, PIPE
from api_clients import api_client
class AirtimeMediaMonitorBootstrap():
"""AirtimeMediaMonitorBootstrap constructor
Keyword Arguments:
@ -29,11 +26,11 @@ class AirtimeMediaMonitorBootstrap():
self.curr_mtab_file = "/var/tmp/airtime/media-monitor/currMtab"
self.logger.info("Adding %s on watch list...", self.mount_file)
self.wm.add_watch(self.mount_file, pyinotify.ALL_EVENTS, rec=False, auto_add=False)
tmp_dir = os.path.dirname(self.curr_mtab_file)
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
# create currMtab file if it's the first time
if not os.path.exists(self.curr_mtab_file):
shutil.copy('/etc/mtab', self.curr_mtab_file)
@ -43,8 +40,7 @@ class AirtimeMediaMonitorBootstrap():
went offline.
"""
def scan(self):
directories = self.get_list_of_watched_dirs();
directories = self.get_list_of_watched_dirs()
self.logger.info("watched directories found: %s", directories)
for id, dir in directories.iteritems():
@ -60,12 +56,21 @@ class AirtimeMediaMonitorBootstrap():
return self.api_client.list_all_db_files(dir_id)
"""
returns the path and the database row id for this path for all watched directories. Also
returns the path and its corresponding database row idfor all watched directories. Also
returns the Stor directory, which can be identified by its row id (always has value of "1")
Return type is a dictionary similar to:
{"1":"/srv/airtime/stor/"}
"""
def get_list_of_watched_dirs(self):
json = self.api_client.list_all_watched_dirs()
return json["dirs"]
try:
return json["dirs"]
except KeyError as e:
self.logger.error("Could not find index 'dirs' in dictionary: %s", str(json))
self.logger.error(e)
return {}
"""
This function takes in a path name provided by the database (and its corresponding row id)
@ -77,10 +82,8 @@ class AirtimeMediaMonitorBootstrap():
dir -- pathname of the directory
"""
def sync_database_to_filesystem(self, dir_id, dir):
# TODO: is this line even necessary?
dir = os.path.normpath(dir)+"/"
"""
set to hold new and/or modified files. We use a set to make it ok if files are added
twice. This is because some of the tests for new files return result sets that are not
@ -91,41 +94,39 @@ class AirtimeMediaMonitorBootstrap():
db_known_files_set = set()
files = self.list_db_files(dir_id)
for file in files['files']:
db_known_files_set.add(file)
for f in files:
db_known_files_set.add(f)
all_files = self.mmc.scan_dir_for_new_files(dir)
all_files = self.mmc.clean_dirty_file_paths( self.mmc.scan_dir_for_new_files(dir) )
all_files_set = set()
for file_path in all_files:
file_path = file_path.strip(" \n")
if len(file_path) > 0 and self.config.problem_directory not in file_path:
if self.config.problem_directory not in file_path:
all_files_set.add(file_path[len(dir):])
# if dir doesn't exists, update db
if not os.path.exists(dir):
self.pe.handle_watched_dir_missing(dir)
self.pe.handle_stdout_files(dir)
if os.path.exists(self.mmc.timestamp_file):
"""find files that have been modified since the last time media-monitor process started."""
time_diff_sec = time.time() - os.path.getmtime(self.mmc.timestamp_file)
command = "find '%s' -iname '*.ogg' -o -iname '*.mp3' -type f -readable -mmin -%d" % (dir, time_diff_sec/60+1)
command = self.mmc.find_command(directory=dir, extra_arguments=("-type f -readable -mmin -%d" % (time_diff_sec/60+1)))
else:
command = "find '%s' -iname '*.ogg' -o -iname '*.mp3' -type f -readable" % dir
command = self.mmc.find_command(directory=dir, extra_arguments="-type f -readable")
self.logger.debug(command)
stdout = self.mmc.exec_command(command)
if stdout is None:
self.logger.error("Unrecoverable error when syncing db to filesystem.")
return
new_files = stdout.splitlines()
if stdout is None:
new_files = []
else:
new_files = stdout.splitlines()
new_and_modified_files = set()
for file_path in new_files:
file_path = file_path.strip(" \n")
if len(file_path) > 0 and self.config.problem_directory not in file_path:
if self.config.problem_directory not in file_path:
new_and_modified_files.add(file_path[len(dir):])
"""
@ -156,16 +157,12 @@ class AirtimeMediaMonitorBootstrap():
self.logger.debug(full_file_path)
self.pe.handle_removed_file(False, full_file_path)
for file_path in new_files_set:
self.logger.debug("new file")
full_file_path = os.path.join(dir, file_path)
self.logger.debug(full_file_path)
if os.path.exists(full_file_path):
self.pe.handle_created_file(False, full_file_path, os.path.basename(full_file_path))
for file_path in modified_files_set:
self.logger.debug("modified file")
full_file_path = "%s%s" % (dir, file_path)
self.logger.debug(full_file_path)
if os.path.exists(full_file_path):
self.pe.handle_modified_file(False, full_file_path, os.path.basename(full_file_path))
for file_set, debug_message, handle_attribute in [(new_files_set, "new file", "handle_created_file"),
(modified_files_set, "modified file", "handle_modified_file")]:
for file_path in file_set:
self.logger.debug(debug_message)
full_file_path = os.path.join(dir, file_path)
self.logger.debug(full_file_path)
if os.path.exists(full_file_path):
getattr(self.pe,handle_attribute)(False,full_file_path, os.path.basename(full_file_path))

View file

@ -1,65 +1,82 @@
# -*- coding: utf-8 -*-
import replaygain
import os
import hashlib
import mutagen
import logging
import math
import re
import traceback
from api_clients import api_client
"""
list of supported easy tags in mutagen version 1.20
['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry', 'date', 'performer', 'musicbrainz_albumartistid', 'composer', 'encodedby', 'tracknumber', 'musicbrainz_albumid', 'album', 'asin', 'musicbrainz_artistid', 'mood', 'copyright', 'author', 'media', 'length', 'version', 'artistsort', 'titlesort', 'discsubtitle', 'website', 'musicip_fingerprint', 'conductor', 'compilation', 'barcode', 'performer:*', 'composersort', 'musicbrainz_discid', 'musicbrainz_albumtype', 'genre', 'isrc', 'discnumber', 'musicbrainz_trmid', 'replaygain_*_gain', 'musicip_puid', 'artist', 'title', 'bpm', 'musicbrainz_trackid', 'arranger', 'albumsort', 'replaygain_*_peak', 'organization']
['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
'date', 'performer', 'musicbrainz_albumartistid', 'composer', 'encodedby',
'tracknumber', 'musicbrainz_albumid', 'album', 'asin', 'musicbrainz_artistid',
'mood', 'copyright', 'author', 'media', 'length', 'version', 'artistsort',
'titlesort', 'discsubtitle', 'website', 'musicip_fingerprint', 'conductor',
'compilation', 'barcode', 'performer:*', 'composersort', 'musicbrainz_discid',
'musicbrainz_albumtype', 'genre', 'isrc', 'discnumber', 'musicbrainz_trmid',
'replaygain_*_gain', 'musicip_puid', 'artist', 'title', 'bpm', 'musicbrainz_trackid',
'arranger', 'albumsort', 'replaygain_*_peak', 'organization']
"""
class AirtimeMetadata:
def __init__(self):
self.airtime2mutagen = {\
"MDATA_KEY_TITLE": "title",\
"MDATA_KEY_CREATOR": "artist",\
"MDATA_KEY_SOURCE": "album",\
"MDATA_KEY_GENRE": "genre",\
"MDATA_KEY_MOOD": "mood",\
"MDATA_KEY_TRACKNUMBER": "tracknumber",\
"MDATA_KEY_BPM": "bpm",\
"MDATA_KEY_LABEL": "organization",\
"MDATA_KEY_COMPOSER": "composer",\
"MDATA_KEY_ENCODER": "encodedby",\
"MDATA_KEY_CONDUCTOR": "conductor",\
"MDATA_KEY_YEAR": "date",\
"MDATA_KEY_URL": "website",\
"MDATA_KEY_ISRC": "isrc",\
"MDATA_KEY_COPYRIGHT": "copyright",\
"MDATA_KEY_TITLE": "title", \
"MDATA_KEY_CREATOR": "artist", \
"MDATA_KEY_SOURCE": "album", \
"MDATA_KEY_GENRE": "genre", \
"MDATA_KEY_MOOD": "mood", \
"MDATA_KEY_TRACKNUMBER": "tracknumber", \
"MDATA_KEY_BPM": "bpm", \
"MDATA_KEY_LABEL": "organization", \
"MDATA_KEY_COMPOSER": "composer", \
"MDATA_KEY_ENCODER": "encodedby", \
"MDATA_KEY_CONDUCTOR": "conductor", \
"MDATA_KEY_YEAR": "date", \
"MDATA_KEY_URL": "website", \
"MDATA_KEY_ISRC": "isrc", \
"MDATA_KEY_COPYRIGHT": "copyright", \
}
self.mutagen2airtime = {\
"title": "MDATA_KEY_TITLE",\
"artist": "MDATA_KEY_CREATOR",\
"album": "MDATA_KEY_SOURCE",\
"genre": "MDATA_KEY_GENRE",\
"mood": "MDATA_KEY_MOOD",\
"tracknumber": "MDATA_KEY_TRACKNUMBER",\
"bpm": "MDATA_KEY_BPM",\
"organization": "MDATA_KEY_LABEL",\
"composer": "MDATA_KEY_COMPOSER",\
"encodedby": "MDATA_KEY_ENCODER",\
"conductor": "MDATA_KEY_CONDUCTOR",\
"date": "MDATA_KEY_YEAR",\
"website": "MDATA_KEY_URL",\
"isrc": "MDATA_KEY_ISRC",\
"copyright": "MDATA_KEY_COPYRIGHT",\
"title": "MDATA_KEY_TITLE", \
"artist": "MDATA_KEY_CREATOR", \
"album": "MDATA_KEY_SOURCE", \
"genre": "MDATA_KEY_GENRE", \
"mood": "MDATA_KEY_MOOD", \
"tracknumber": "MDATA_KEY_TRACKNUMBER", \
"bpm": "MDATA_KEY_BPM", \
"organization": "MDATA_KEY_LABEL", \
"composer": "MDATA_KEY_COMPOSER", \
"encodedby": "MDATA_KEY_ENCODER", \
"conductor": "MDATA_KEY_CONDUCTOR", \
"date": "MDATA_KEY_YEAR", \
"website": "MDATA_KEY_URL", \
"isrc": "MDATA_KEY_ISRC", \
"copyright": "MDATA_KEY_COPYRIGHT", \
}
self.logger = logging.getLogger()
def get_md5(self, filepath):
f = open(filepath, 'rb')
m = hashlib.md5()
m.update(f.read())
md5 = m.hexdigest()
"""
Returns an md5 of the file located at filepath. Returns an empty string
if there was an error reading the file.
"""
try:
f = open(filepath, 'rb')
m = hashlib.md5()
m.update(f.read())
md5 = m.hexdigest()
except Exception, e:
return ""
return md5
@ -67,9 +84,9 @@ class AirtimeMetadata:
## return format hh:mm:ss.uuu
def format_length(self, mutagen_length):
t = float(mutagen_length)
h = int(math.floor(t/3600))
h = int(math.floor(t / 3600))
t = t % 3600
m = int(math.floor(t/60))
m = int(math.floor(t / 60))
s = t % 60
# will be ss.uuu
@ -94,12 +111,12 @@ class AirtimeMetadata:
for key in m:
if key in self.airtime2mutagen:
value = m[key]
if value is not None:
value = unicode(value)
else:
value = unicode('');
#if len(value) > 0:
self.logger.debug("Saving key '%s' with value '%s' to file", key, value)
airtime_file[self.airtime2mutagen[key]] = value
@ -120,35 +137,44 @@ class AirtimeMetadata:
return item
def get_md_from_file(self, filepath):
"""
Returns None if error retrieving metadata. Otherwise returns a dictionary
representing the file's metadata
"""
self.logger.info("getting info from filepath %s", filepath)
md = {}
replay_gain_val = replaygain.calculate_replay_gain(filepath)
self.logger.info('ReplayGain calculated as %s for %s' % (replay_gain_val, filepath))
md['MDATA_KEY_REPLAYGAIN'] = replay_gain_val
try:
md = {}
md5 = self.get_md5(filepath)
md['MDATA_KEY_MD5'] = md5
file_info = mutagen.File(filepath, easy=True)
except Exception, e:
self.logger.error("failed getting metadata from %s", filepath)
self.logger.error("Exception %s", e)
return None
#check if file has any metadata
if file_info is None:
return None
#check if file has any metadata
if file_info is not None:
for key in file_info.keys() :
if key in self.mutagen2airtime:
val = file_info[key]
try:
if val is not None and len(val) > 0 and val[0] is not None and len(val[0]) > 0:
md[self.mutagen2airtime[key]] = val[0]
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
for key in file_info.keys() :
if key in self.mutagen2airtime:
val = file_info[key]
try:
if val is not None and len(val) > 0 and val[0] is not None and len(val[0]) > 0:
md[self.mutagen2airtime[key]] = val[0]
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
if 'MDATA_KEY_TITLE' not in md:
#get rid of file extension from original name, name might have more than 1 '.' in it.
original_name = os.path.basename(filepath)
@ -165,8 +191,6 @@ class AirtimeMetadata:
pass
if isinstance(md['MDATA_KEY_TRACKNUMBER'], basestring):
match = re.search('^(\d*/\d*)?', md['MDATA_KEY_TRACKNUMBER'])
try:
md['MDATA_KEY_TRACKNUMBER'] = int(md['MDATA_KEY_TRACKNUMBER'].split("/")[0], 10)
except Exception, e:
@ -222,28 +246,23 @@ class AirtimeMetadata:
#end of db truncation checks.
try:
md['MDATA_KEY_BITRATE'] = getattr(file_info.info, "bitrate", "0")
except Exception as e:
self.logger.warn("Could not get Bitrate")
md['MDATA_KEY_BITRATE'] = "0"
try:
md['MDATA_KEY_SAMPLERATE'] = getattr(file_info.info, "sample_rate", "0")
except Exception as e:
self.logger.warn("Could not get Samplerate")
md['MDATA_KEY_SAMPLERATE'] = "0"
self.logger.info("Bitrate: %s , Samplerate: %s", md['MDATA_KEY_BITRATE'], md['MDATA_KEY_SAMPLERATE'])
try: md['MDATA_KEY_DURATION'] = self.format_length(file_info.info.length)
except Exception as e: self.logger.warn("File: '%s' raises: %s", filepath, str(e))
md['MDATA_KEY_BITRATE'] = getattr(file_info.info, "bitrate", 0)
md['MDATA_KEY_SAMPLERATE'] = getattr(file_info.info, "sample_rate", 0)
try: md['MDATA_KEY_MIME'] = file_info.mime[0]
except Exception as e: self.logger.warn("File: '%s' has no mime type", filepath, str(e))
md['MDATA_KEY_DURATION'] = self.format_length(getattr(file_info.info, "length", 0.0))
md['MDATA_KEY_MIME'] = ""
if len(file_info.mime) > 0:
md['MDATA_KEY_MIME'] = file_info.mime[0]
except Exception as e:
self.logger.warn(e)
if "mp3" in md['MDATA_KEY_MIME']:
md['MDATA_KEY_FTYPE'] = "audioclip"
elif "vorbis" in md['MDATA_KEY_MIME']:
md['MDATA_KEY_FTYPE'] = "audioclip"
else:
self.logger.error("File %s of mime type %s does not appear to be a valid vorbis or mp3 file." % (filepath, md['MDATA_KEY_MIME']))
return None
return md

View file

@ -8,13 +8,11 @@ import traceback
# For RabbitMQ
from kombu.connection import BrokerConnection
from kombu.messaging import Exchange, Queue, Consumer, Producer
from kombu.messaging import Exchange, Queue, Consumer
import pyinotify
from pyinotify import Notifier
#from api_clients import api_client
from api_clients import api_client
from airtimemetadata import AirtimeMetadata
class AirtimeNotifier(Notifier):
@ -38,6 +36,11 @@ class AirtimeNotifier(Notifier):
time.sleep(5)
def init_rabbit_mq(self):
"""
This function will attempt to connect to RabbitMQ Server and if successful
return 'True'. Returns 'False' otherwise.
"""
self.logger.info("Initializing RabbitMQ stuff")
try:
schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True)
@ -53,13 +56,13 @@ class AirtimeNotifier(Notifier):
return True
"""
Messages received from RabbitMQ are handled here. These messages
instruct media-monitor of events such as a new directory being watched,
file metadata has been changed, or any other changes to the config of
media-monitor via the web UI.
"""
def handle_message(self, body, message):
"""
Messages received from RabbitMQ are handled here. These messages
instruct media-monitor of events such as a new directory being watched,
file metadata has been changed, or any other changes to the config of
media-monitor via the web UI.
"""
# ACK the message to take it off the queue
message.ack()
@ -101,16 +104,12 @@ class AirtimeNotifier(Notifier):
self.bootstrap.sync_database_to_filesystem(new_storage_directory_id, new_storage_directory)
self.config.storage_directory = os.path.normpath(new_storage_directory)
self.config.imported_directory = os.path.normpath(new_storage_directory + '/imported')
self.config.organize_directory = os.path.normpath(new_storage_directory + '/organize')
self.config.imported_directory = os.path.normpath(os.path.join(new_storage_directory, '/imported'))
self.config.organize_directory = os.path.normpath(os.path.join(new_storage_directory, '/organize'))
self.mmc.ensure_is_dir(self.config.storage_directory)
self.mmc.ensure_is_dir(self.config.imported_directory)
self.mmc.ensure_is_dir(self.config.organize_directory)
self.mmc.is_readable(self.config.storage_directory, True)
self.mmc.is_readable(self.config.imported_directory, True)
self.mmc.is_readable(self.config.organize_directory, True)
for directory in [self.config.storage_directory, self.config.imported_directory, self.config.organize_directory]:
self.mmc.ensure_is_dir(directory)
self.mmc.is_readable(directory, True)
self.watch_directory(new_storage_directory)
elif m['event_type'] == "file_delete":
@ -129,31 +128,29 @@ class AirtimeNotifier(Notifier):
self.logger.error("traceback: %s", traceback.format_exc())
"""
Update airtime with information about files discovered in our
watched directories.
event: a dict() object with the following attributes:
-filepath
-mode
-data
-is_recorded_show
"""
def update_airtime(self, event):
"""
Update airtime with information about files discovered in our
watched directories.
event: a dict() object with the following attributes:
-filepath
-mode
-data
-is_recorded_show
"""
try:
self.logger.info("updating filepath: %s ", event['filepath'])
filepath = event['filepath']
mode = event['mode']
md = {}
md['MDATA_KEY_FILEPATH'] = filepath
md['MDATA_KEY_FILEPATH'] = os.path.normpath(filepath)
if 'data' in event:
file_md = event['data']
md.update(file_md)
else:
file_md = None
data = None
if (os.path.exists(filepath) and (mode == self.config.MODE_CREATE)):
if file_md is None:
@ -184,7 +181,7 @@ class AirtimeNotifier(Notifier):
self.api_client.update_media_metadata(md, mode)
elif (mode == self.config.MODE_DELETE):
self.api_client.update_media_metadata(md, mode)
elif (mode == self.config.MODE_DELETE_DIR):
self.api_client.update_media_metadata(md, mode)

View file

@ -9,7 +9,6 @@ import difflib
import traceback
from subprocess import Popen, PIPE
import pyinotify
from pyinotify import ProcessEvent
from airtimemetadata import AirtimeMetadata
@ -59,10 +58,10 @@ class AirtimeProcessEvent(ProcessEvent):
if "-unknown-path" in path:
unknown_path = path
pos = path.find("-unknown-path")
path = path[0:pos]+"/"
path = path[0:pos] + "/"
list = self.api_client.list_all_watched_dirs()
# case where the dir that is being watched is moved to somewhere
# case where the dir that is being watched is moved to somewhere
if path in list[u'dirs'].values():
self.logger.info("Requesting the airtime server to remove '%s'", path)
res = self.api_client.remove_watched_dir(path)
@ -81,14 +80,14 @@ class AirtimeProcessEvent(ProcessEvent):
self.logger.info("Removing watch on: %s wd %s", unknown_path, wd)
self.wm.rm_watch(wd, rec=True)
self.file_events.append({'mode': self.config.MODE_DELETE_DIR, 'filepath': path})
def process_IN_DELETE_SELF(self, event):
#we only care about files that have been moved away from imported/ or organize/ dir
if event.path in self.config.problem_directory or event.path in self.config.organize_directory:
return
self.logger.info("event: %s", event)
path = event.path + '/'
if event.dir:
@ -103,7 +102,7 @@ class AirtimeProcessEvent(ProcessEvent):
self.logger.info("%s removed from watch folder list successfully.", path)
else:
self.logger.info("Removing the watch folder failed: %s", res['msg']['error'])
def process_IN_CREATE(self, event):
if event.path in self.mount_file_dir:
return
@ -111,18 +110,18 @@ class AirtimeProcessEvent(ProcessEvent):
if not event.dir:
# record the timestamp of the time on IN_CREATE event
self.create_dict[event.pathname] = time.time()
#event.dir: True if the event was raised against a directory.
#event.name: filename
#event.pathname: pathname (str): Concatenation of 'path' and 'name'.
# we used to use IN_CREATE event, but the IN_CREATE event gets fired before the
# copy was done. Hence, IN_CLOSE_WRITE is the correct one to handle.
# copy was done. Hence, IN_CLOSE_WRITE is the correct one to handle.
def process_IN_CLOSE_WRITE(self, event):
if event.path in self.mount_file_dir:
return
self.logger.info("event: %s", event)
self.logger.info("create_dict: %s", self.create_dict)
try:
del self.create_dict[event.pathname]
self.handle_created_file(event.dir, event.pathname, event.name)
@ -130,8 +129,8 @@ class AirtimeProcessEvent(ProcessEvent):
pass
#self.logger.warn("%s does not exist in create_dict", event.pathname)
#Uncomment the above warning when we fix CC-3830 for 2.1.1
def handle_created_file(self, dir, pathname, name):
if not dir:
self.logger.debug("PROCESS_IN_CLOSE_WRITE: %s, name: %s, pathname: %s ", dir, name, pathname)
@ -145,12 +144,12 @@ class AirtimeProcessEvent(ProcessEvent):
self.temp_files[pathname] = None
elif self.mmc.is_audio_file(name):
if self.mmc.is_parent_directory(pathname, self.config.organize_directory):
#file was created in /srv/airtime/stor/organize. Need to process and move
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
self.mmc.organize_new_file(pathname, file_md)
else:
@ -182,7 +181,7 @@ class AirtimeProcessEvent(ProcessEvent):
if self.mmc.is_audio_file(name):
is_recorded = self.mmc.is_parent_directory(pathname, self.config.recorded_directory)
self.file_events.append({'filepath': pathname, 'mode': self.config.MODE_MODIFY, 'is_recorded_show': is_recorded})
# if change is detected on /etc/mtab, we check what mount(file system) was added/removed
# and act accordingly
def handle_mount_change(self):
@ -192,41 +191,41 @@ class AirtimeProcessEvent(ProcessEvent):
shutil.move(self.curr_mtab_file, self.prev_mtab_file)
# create the file
shutil.copy(self.mount_file, self.curr_mtab_file)
d = difflib.Differ()
curr_fh = open(self.curr_mtab_file, 'r')
prev_fh = open(self.prev_mtab_file, 'r')
diff = list(d.compare(prev_fh.readlines(), curr_fh.readlines()))
added_mount_points = []
removed_mount_points = []
for dir in diff:
info = dir.split(' ')
if info[0] == '+':
added_mount_points.append(info[2])
elif info[0] == '-':
removed_mount_points.append(info[2])
self.logger.info("added: %s", added_mount_points)
self.logger.info("removed: %s", removed_mount_points)
# send current mount information to Airtime
self.api_client.update_file_system_mount(added_mount_points, removed_mount_points);
def handle_watched_dir_missing(self, dir):
self.api_client.handle_watched_dir_missing(dir);
#if a file is moved somewhere, this callback is run. With details about
#where the file is being moved from. The corresponding process_IN_MOVED_TO
#callback is only called if the destination of the file is also in a watched
#directory.
def process_IN_MOVED_FROM(self, event):
#we don't care about files that have been moved from problem_directory
if event.path in self.config.problem_directory:
return
self.logger.info("process_IN_MOVED_FROM: %s", event)
if not event.dir:
if event.pathname in self.temp_files:
@ -241,10 +240,10 @@ class AirtimeProcessEvent(ProcessEvent):
def process_IN_MOVED_TO(self, event):
self.logger.info("process_IN_MOVED_TO: %s", event)
# if /etc/mtab is modified
filename = self.mount_file_dir +"/mtab"
filename = self.mount_file_dir + "/mtab"
if event.pathname in filename:
self.handle_mount_change()
if event.path in self.config.problem_directory:
return
@ -265,15 +264,15 @@ class AirtimeProcessEvent(ProcessEvent):
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
filepath = self.mmc.organize_new_file(pathname, file_md)
else:
#move to problem_files
self.mmc.move_to_problem_dir(pathname)
else:
filepath = event.pathname
@ -283,23 +282,23 @@ class AirtimeProcessEvent(ProcessEvent):
#file's original location is from outside an inotify watched dir.
pathname = event.pathname
if self.mmc.is_parent_directory(pathname, self.config.organize_directory):
#file was created in /srv/airtime/stor/organize. Need to process and move
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
self.mmc.organize_new_file(pathname, file_md)
else:
#move to problem_files
self.mmc.move_to_problem_dir(pathname)
else:
#show moved from unwatched folder into a watched folder. Do not "organize".
@ -309,33 +308,33 @@ class AirtimeProcessEvent(ProcessEvent):
#When we move a directory into a watched_dir, we only get a notification that the dir was created,
#and no additional information about files that came along with that directory.
#need to scan the entire directory for files.
if event.cookie in self.cookies_IN_MOVED_FROM:
del self.cookies_IN_MOVED_FROM[event.cookie]
mode = self.config.MODE_MOVED
else:
mode = self.config.MODE_CREATE
files = self.mmc.scan_dir_for_new_files(event.pathname)
if self.mmc.is_parent_directory(event.pathname, self.config.organize_directory):
for pathname in files:
#file was created in /srv/airtime/stor/organize. Need to process and move
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
self.mmc.organize_new_file(pathname, file_md)
#self.file_events.append({'mode': mode, 'filepath': filepath, 'is_recorded_show': False})
else:
#move to problem_files
self.mmc.move_to_problem_dir(pathname)
else:
for file in files:
self.file_events.append({'mode': mode, 'filepath': file, 'is_recorded_show': False})
@ -368,12 +367,12 @@ class AirtimeProcessEvent(ProcessEvent):
for event in self.file_events:
self.multi_queue.put(event)
self.mmc.touch_index_file()
self.file_events = []
#yield to worker thread
time.sleep(0)
#use items() because we are going to be modifying this
#dictionary while iterating over it.
for k, pair in self.cookies_IN_MOVED_FROM.items():
@ -390,7 +389,7 @@ class AirtimeProcessEvent(ProcessEvent):
#it from the Airtime directory.
del self.cookies_IN_MOVED_FROM[k]
self.handle_removed_file(False, event.pathname)
# we don't want create_dict grow infinitely
# this part is like a garbage collector
for k, t in self.create_dict.items():
@ -402,16 +401,16 @@ class AirtimeProcessEvent(ProcessEvent):
# handling those cases. We are manully calling handle_created_file
# function.
if os.path.exists(k):
# check if file is open
# check if file is open
try:
command = "lsof "+k
command = "lsof " + k
#f = os.popen(command)
f = Popen(command, shell=True, stdout=PIPE).stdout
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
continue
if not f.readlines():
self.logger.info("Handling file: %s", k)
self.handle_created_file(False, k, os.path.basename(k))

View file

@ -10,20 +10,31 @@ import traceback
from subprocess import Popen, PIPE
from airtimemetadata import AirtimeMetadata
from api_clients import api_client
import pyinotify
class MediaMonitorCommon:
timestamp_file = "/var/tmp/airtime/media-monitor/last_index"
supported_file_formats = ['mp3', 'ogg']
def __init__(self, airtime_config, wm=None):
self.supported_file_formats = ['mp3', 'ogg']
self.logger = logging.getLogger()
self.config = airtime_config
self.md_manager = AirtimeMetadata()
self.wm = wm
def clean_dirty_file_paths(self, dirty_files):
""" clean dirty file paths by removing blanks and removing trailing/leading whitespace"""
return filter(lambda e: len(e) > 0, [ f.strip(" \n") for f in dirty_files ])
def find_command(self, directory, extra_arguments=""):
""" Builds a find command that respects supported_file_formats list
Note: Use single quotes to quote arguments """
ext_globs = [ "-iname '*.%s'" % ext for ext in self.supported_file_formats ]
find_glob = ' -o '.join(ext_globs)
return "find '%s' %s %s" % (directory, find_glob, extra_arguments)
def is_parent_directory(self, filepath, directory):
filepath = os.path.normpath(filepath)
directory = os.path.normpath(directory)
@ -31,7 +42,6 @@ class MediaMonitorCommon:
def is_temp_file(self, filename):
info = filename.split(".")
# if file doesn't have any extension, info[-2] throws exception
# Hence, checking length of info before we do anything
if(len(info) >= 2):
@ -41,20 +51,19 @@ class MediaMonitorCommon:
def is_audio_file(self, filename):
info = filename.split(".")
if len(info) < 2: return False # handle cases like filename="mp3"
return info[-1].lower() in self.supported_file_formats
#check if file is readable by "nobody"
def is_user_readable(self, filepath, euid='nobody', egid='nogroup'):
f = None
try:
uid = pwd.getpwnam(euid)[2]
gid = grp.getgrnam(egid)[2]
#drop root permissions and become "nobody"
os.setegid(gid)
os.seteuid(uid)
open(filepath)
f = open(filepath)
readable = True
except IOError:
self.logger.warn("File does not have correct permissions: '%s'", filepath)
@ -65,17 +74,16 @@ class MediaMonitorCommon:
self.logger.error("traceback: %s", traceback.format_exc())
finally:
#reset effective user to root
if f: f.close()
os.seteuid(0)
os.setegid(0)
return readable
# the function only changes the permission if its not readable by www-data
def is_readable(self, item, is_dir):
try:
return self.is_user_readable(item, 'www-data', 'www-data') \
and self.is_user_readable(item, 'pypo', 'pypo')
except Exception, e:
return self.is_user_readable(item, 'www-data', 'www-data')
except Exception:
self.logger.warn(u"Failed to check owner/group/permissions for %s", item)
return False
@ -93,7 +101,7 @@ class MediaMonitorCommon:
will attempt to make the file world readable by modifying the file's permission's
as well as the file's parent directory permissions. We should only call this function
on files in Airtime's stor directory, not watched directories!
Returns True if we were able to make the file world readable. False otherwise.
"""
original_file = pathname
@ -110,7 +118,7 @@ class MediaMonitorCommon:
else:
pathname = dirname
is_dir = True
except Exception, e:
except Exception:
#something went wrong while we were trying to make world readable.
return False
@ -154,7 +162,7 @@ class MediaMonitorCommon:
try:
os.rmdir(dir)
self.cleanup_empty_dirs(os.path.dirname(dir))
except Exception, e:
except Exception:
#non-critical exception because we probably tried to delete a non-empty dir.
#Don't need to log this, let's just "return"
pass
@ -194,7 +202,7 @@ class MediaMonitorCommon:
break
except Exception, e:
self.logger.error("Exception %s", e)
self.logger.error("Exception %s", e)
return filepath
@ -202,7 +210,6 @@ class MediaMonitorCommon:
def create_file_path(self, original_path, orig_md):
storage_directory = self.config.storage_directory
try:
#will be in the format .ext
file_ext = os.path.splitext(original_path)[1].lower()
@ -242,7 +249,7 @@ class MediaMonitorCommon:
show_name = '-'.join(title[3:])
new_md = {}
new_md["MDATA_KEY_FILEPATH"] = original_path
new_md['MDATA_KEY_FILEPATH'] = os.path.normpath(original_path)
new_md['MDATA_KEY_TITLE'] = '%s-%s-%s:%s:%s' % (show_name, orig_md['MDATA_KEY_YEAR'], show_hour, show_min, show_sec)
self.md_manager.save_md_to_file(new_md)
@ -270,21 +277,24 @@ class MediaMonitorCommon:
try:
"""
File name charset encoding is UTF-8.
File name charset encoding is UTF-8.
"""
stdout = stdout.decode("UTF-8")
except Exception, e:
except Exception:
stdout = None
self.logger.error("Could not decode %s using UTF-8" % stdout)
return stdout
def scan_dir_for_new_files(self, dir):
command = 'find "%s" -iname "*.ogg" -o -iname "*.mp3" -type f -readable' % dir.replace('"', '\\"')
command = self.find_command(directory=dir, extra_arguments="-type f -readable")
self.logger.debug(command)
stdout = self.exec_command(command)
return stdout.splitlines()
if stdout is None:
return []
else:
return stdout.splitlines()
def touch_index_file(self):
dirname = os.path.dirname(self.timestamp_file)
@ -316,13 +326,10 @@ class MediaMonitorCommon:
if return_code != 0:
#print pathname for py-interpreter.log
print pathname
return (return_code == 0)
def move_to_problem_dir(self, source):
dest = os.path.join(self.config.problem_directory, os.path.basename(source))
try:
omask = os.umask(0)
os.rename(source, dest)

View file

@ -0,0 +1,132 @@
from subprocess import Popen, PIPE
import re
import os
import sys
import shutil
import tempfile
import logging
logger = logging.getLogger()
def get_process_output(command):
"""
Run subprocess and return stdout
"""
#logger.debug(command)
p = Popen(command, shell=True, stdout=PIPE)
return p.communicate()[0].strip()
def run_process(command):
"""
Run subprocess and return "return code"
"""
p = Popen(command, shell=True)
return os.waitpid(p.pid, 0)[1]
def get_mime_type(file_path):
"""
Attempts to get the mime type but will return prematurely if the process
takes longer than 5 seconds. Note that this function should only be called
for files which do not have a mp3/ogg/flac extension.
"""
return get_process_output("timeout 5 file -b --mime-type %s" % file_path)
def duplicate_file(file_path):
"""
Makes a duplicate of the file and returns the path of this duplicate file.
"""
fsrc = open(file_path, 'r')
fdst = tempfile.NamedTemporaryFile(delete=False)
#logger.info("Copying %s to %s" % (file_path, fdst.name))
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
return fdst.name
def get_file_type(file_path):
file_type = None
if re.search(r'mp3$', file_path, re.IGNORECASE):
file_type = 'mp3'
elif re.search(r'og(g|a)$', file_path, re.IGNORECASE):
file_type = 'vorbis'
elif re.search(r'flac$', file_path, re.IGNORECASE):
file_type = 'flac'
else:
mime_type = get_mime_type(file_path) == "audio/mpeg"
if 'mpeg' in mime_type:
file_type = 'mp3'
elif 'ogg' in mime_type:
file_type = 'vorbis'
elif 'flac' in mime_type:
file_type = 'flac'
return file_type
def calculate_replay_gain(file_path):
"""
This function accepts files of type mp3/ogg/flac and returns a calculated
ReplayGain value in dB. If the value cannot be calculated for some reason,
then we default to 0 (Unity Gain).
http://wiki.hydrogenaudio.org/index.php?title=ReplayGain_1.0_specification
"""
try:
"""
Making a duplicate is required because the ReplayGain extraction
utilities we use make unwanted modifications to the file.
"""
search = None
temp_file_path = duplicate_file(file_path)
file_type = get_file_type(file_path)
if file_type:
if file_type == 'mp3':
if run_process("which mp3gain > /dev/null") == 0:
out = get_process_output('mp3gain -q "%s" 2> /dev/null' % temp_file_path)
search = re.search(r'Recommended "Track" dB change: (.*)', out)
else:
logger.warn("mp3gain not found")
elif file_type == 'vorbis':
if run_process("which vorbisgain > /dev/null && which ogginfo > /dev/null") == 0:
run_process('vorbisgain -q -f "%s" 2>/dev/null >/dev/null' % temp_file_path)
out = get_process_output('ogginfo "%s"' % temp_file_path)
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
else:
logger.warn("vorbisgain/ogginfo not found")
elif file_type == 'flac':
if run_process("which metaflac > /dev/null") == 0:
out = get_process_output('metaflac --show-tag=REPLAYGAIN_TRACK_GAIN "%s"' % temp_file_path)
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
else:
logger.warn("metaflac not found")
else:
pass
#no longer need the temp, file simply remove it.
os.remove(temp_file_path)
except Exception, e:
logger.error(str(e))
replay_gain = 0
if search:
matches = search.groups()
if len(matches) == 1:
replay_gain = matches[0]
return replay_gain
# Example of running from command line:
# python replay_gain.py /path/to/filename.mp3
if __name__ == "__main__":
print calculate_replay_gain(sys.argv[1])

View file

@ -1,11 +1,10 @@
# -*- coding: utf-8 -*-
from mediaconfig import AirtimeMediaConfig
import traceback
import os
class MediaMonitorWorkerProcess:
def __init__(self, config, mmc):
self.config = config
self.mmc = mmc

View file

@ -9,34 +9,35 @@ from configobj import ConfigObj
if os.geteuid() != 0:
print "Please run this as root."
sys.exit(1)
def get_current_script_dir():
current_script_dir = os.path.realpath(__file__)
index = current_script_dir.rindex('/')
return current_script_dir[0:index]
def copy_dir(src_dir, dest_dir):
if (os.path.exists(dest_dir)) and (dest_dir != "/"):
shutil.rmtree(dest_dir)
if not (os.path.exists(dest_dir)):
#print "Copying directory "+os.path.realpath(src_dir)+" to "+os.path.realpath(dest_dir)
shutil.copytree(src_dir, dest_dir)
def create_dir(path):
try:
os.makedirs(path)
# TODO : fix this, at least print the error
except Exception, e:
pass
def get_rand_string(length=10):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(length))
PATH_INI_FILE = '/etc/airtime/media-monitor.cfg'
try:
# Absolute path this script is in
current_script_dir = get_current_script_dir()
if not os.path.exists(PATH_INI_FILE):
shutil.copy('%s/../media-monitor.cfg'%current_script_dir, PATH_INI_FILE)
@ -46,26 +47,24 @@ try:
except Exception, e:
print 'Error loading config file: ', e
sys.exit(1)
#copy monit files
shutil.copy('%s/../../monit/monit-airtime-generic.cfg'%current_script_dir, '/etc/monit/conf.d/')
subprocess.call('sed -i "s/\$admin_pass/%s/g" /etc/monit/conf.d/monit-airtime-generic.cfg' % get_rand_string(), shell=True)
shutil.copy('%s/../monit-airtime-media-monitor.cfg'%current_script_dir, '/etc/monit/conf.d/')
#create log dir
create_dir(config['log_dir'])
os.system("chown -R pypo:pypo "+config["log_dir"])
#copy python files
copy_dir("%s/.."%current_script_dir, config["bin_dir"])
#set executable permissions on python files
os.system("chown -R pypo:pypo "+config["bin_dir"])
# mm2
mm2_source = os.path.realpath(os.path.join(current_script_dir,
"../../media-monitor2"))
copy_dir(mm2_source, os.path.join( config["bin_dir"], "mm2" ))
#copy init.d script
shutil.copy(config["bin_dir"]+"/airtime-media-monitor-init-d", "/etc/init.d/airtime-media-monitor")
except Exception, e:
print e

View file

@ -16,7 +16,16 @@ rabbitmq_password = 'guest'
rabbitmq_vhost = '/'
############################################
# Media-Monitor preferences #
# Media-Monitor preferences #
############################################
check_filesystem_events = 5 #how long to queue up events performed on the files themselves.
check_airtime_events = 30 #how long to queue metadata input from airtime.
# MM2 only:
touch_interval = 5
chunking_number = 450
request_max_wait = 3.0
rmq_event_wait = 0.1
logpath = '/var/log/airtime/media-monitor/media-monitor.log'
index_path = '/var/tmp/airtime/media-monitor/last_index'

View file

@ -1,144 +1,11 @@
# -*- coding: utf-8 -*-
import time
import logging
import logging.config
import time
import sys
import os
import signal
import traceback
import locale
from configobj import ConfigObj
from api_clients import api_client as apc
import mm2.mm2 as mm2
from std_err_override import LogWriter
from multiprocessing import Process, Queue as mpQueue
global_cfg = '/etc/airtime/media-monitor.cfg'
api_client_cfg = '/etc/airtime/api_client.cfg'
logging_cfg = '/usr/lib/airtime/media-monitor/logging.cfg'
from threading import Thread
from pyinotify import WatchManager
from airtimefilemonitor.airtimenotifier import AirtimeNotifier
from airtimefilemonitor.mediamonitorcommon import MediaMonitorCommon
from airtimefilemonitor.airtimeprocessevent import AirtimeProcessEvent
from airtimefilemonitor.mediaconfig import AirtimeMediaConfig
from airtimefilemonitor.workerprocess import MediaMonitorWorkerProcess
from airtimefilemonitor.airtimemediamonitorbootstrap import AirtimeMediaMonitorBootstrap
def configure_locale():
logger.debug("Before %s", locale.nl_langinfo(locale.CODESET))
current_locale = locale.getlocale()
if current_locale[1] is None:
logger.debug("No locale currently set. Attempting to get default locale.")
default_locale = locale.getdefaultlocale()
if default_locale[1] is None:
logger.debug("No default locale exists. Let's try loading from /etc/default/locale")
if os.path.exists("/etc/default/locale"):
config = ConfigObj('/etc/default/locale')
lang = config.get('LANG')
new_locale = lang
else:
logger.error("/etc/default/locale could not be found! Please run 'sudo update-locale' from command-line.")
sys.exit(1)
else:
new_locale = default_locale
logger.info("New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale))
reload(sys)
sys.setdefaultencoding("UTF-8")
current_locale_encoding = locale.getlocale()[1].lower()
logger.debug("sys default encoding %s", sys.getdefaultencoding())
logger.debug("After %s", locale.nl_langinfo(locale.CODESET))
if current_locale_encoding not in ['utf-8', 'utf8']:
logger.error("Need a UTF-8 locale. Currently '%s'. Exiting..." % current_locale_encoding)
sys.exit(1)
# configure logging
try:
logging.config.fileConfig("logging.cfg")
#need to wait for Python 2.7 for this..
#logging.captureWarnings(True)
logger = logging.getLogger()
LogWriter.override_std_err(logger)
except Exception, e:
print 'Error configuring logging: ', e
sys.exit(1)
logger.info("\n\n*** Media Monitor bootup ***\n\n")
try:
configure_locale()
config = AirtimeMediaConfig(logger)
api_client = apc.api_client_factory(config.cfg)
api_client.register_component("media-monitor")
logger.info("Setting up monitor")
response = None
while response is None:
response = api_client.setup_media_monitor()
time.sleep(5)
storage_directory = response["stor"]
watched_dirs = response["watched_dirs"]
logger.info("Storage Directory is: %s", storage_directory)
config.storage_directory = os.path.normpath(storage_directory)
config.imported_directory = os.path.normpath(os.path.join(storage_directory, 'imported'))
config.organize_directory = os.path.normpath(os.path.join(storage_directory, 'organize'))
config.recorded_directory = os.path.normpath(os.path.join(storage_directory, 'recorded'))
config.problem_directory = os.path.normpath(os.path.join(storage_directory, 'problem_files'))
dirs = [config.imported_directory, config.organize_directory, config.recorded_directory, config.problem_directory]
for d in dirs:
if not os.path.exists(d):
os.makedirs(d, 02775)
multi_queue = mpQueue()
logger.info("Initializing event processor")
wm = WatchManager()
mmc = MediaMonitorCommon(config, wm=wm)
pe = AirtimeProcessEvent(queue=multi_queue, airtime_config=config, wm=wm, mmc=mmc, api_client=api_client)
bootstrap = AirtimeMediaMonitorBootstrap(logger, pe, api_client, mmc, wm, config)
bootstrap.scan()
notifier = AirtimeNotifier(wm, pe, read_freq=0, timeout=0, airtime_config=config, api_client=api_client, bootstrap=bootstrap, mmc=mmc)
notifier.coalesce_events()
#create 5 worker threads
wp = MediaMonitorWorkerProcess(config, mmc)
for i in range(5):
threadName = "Thread #%d" % i
t = Thread(target=wp.process_file_events, name=threadName, args=(multi_queue, notifier))
t.start()
wdd = notifier.watch_directory(storage_directory)
logger.info("Added watch to %s", storage_directory)
logger.info("wdd result %s", wdd[storage_directory])
for dir in watched_dirs:
wdd = notifier.watch_directory(dir)
logger.info("Added watch to %s", dir)
logger.info("wdd result %s", wdd[dir])
notifier.loop(callback=pe.notifier_loop_callback)
except KeyboardInterrupt:
notifier.stop()
logger.info("Keyboard Interrupt")
except Exception, e:
logger.error('Exception: %s', e)
logger.error("traceback: %s", traceback.format_exc())
mm2.main( global_cfg, api_client_cfg, logging_cfg )

View file

@ -0,0 +1,142 @@
# -*- coding: utf-8 -*-
import time
import logging.config
import sys
import os
import traceback
import locale
from configobj import ConfigObj
from api_clients import api_client as apc
from std_err_override import LogWriter
from multiprocessing import Queue as mpQueue
from threading import Thread
from pyinotify import WatchManager
from airtimefilemonitor.airtimenotifier import AirtimeNotifier
from airtimefilemonitor.mediamonitorcommon import MediaMonitorCommon
from airtimefilemonitor.airtimeprocessevent import AirtimeProcessEvent
from airtimefilemonitor.mediaconfig import AirtimeMediaConfig
from airtimefilemonitor.workerprocess import MediaMonitorWorkerProcess
from airtimefilemonitor.airtimemediamonitorbootstrap import AirtimeMediaMonitorBootstrap
def configure_locale():
logger.debug("Before %s", locale.nl_langinfo(locale.CODESET))
current_locale = locale.getlocale()
if current_locale[1] is None:
logger.debug("No locale currently set. Attempting to get default locale.")
default_locale = locale.getdefaultlocale()
if default_locale[1] is None:
logger.debug("No default locale exists. Let's try loading from /etc/default/locale")
if os.path.exists("/etc/default/locale"):
config = ConfigObj('/etc/default/locale')
lang = config.get('LANG')
new_locale = lang
else:
logger.error("/etc/default/locale could not be found! Please run 'sudo update-locale' from command-line.")
sys.exit(1)
else:
new_locale = default_locale
logger.info("New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale))
reload(sys)
sys.setdefaultencoding("UTF-8")
current_locale_encoding = locale.getlocale()[1].lower()
logger.debug("sys default encoding %s", sys.getdefaultencoding())
logger.debug("After %s", locale.nl_langinfo(locale.CODESET))
if current_locale_encoding not in ['utf-8', 'utf8']:
logger.error("Need a UTF-8 locale. Currently '%s'. Exiting..." % current_locale_encoding)
sys.exit(1)
# configure logging
try:
logging.config.fileConfig("logging.cfg")
#need to wait for Python 2.7 for this..
#logging.captureWarnings(True)
logger = logging.getLogger()
LogWriter.override_std_err(logger)
except Exception, e:
print 'Error configuring logging: ', e
sys.exit(1)
logger.info("\n\n*** Media Monitor bootup ***\n\n")
try:
configure_locale()
config = AirtimeMediaConfig(logger)
api_client = apc.AirtimeApiClient()
api_client.register_component("media-monitor")
logger.info("Setting up monitor")
response = None
while response is None:
response = api_client.setup_media_monitor()
time.sleep(5)
storage_directory = response["stor"]
watched_dirs = response["watched_dirs"]
logger.info("Storage Directory is: %s", storage_directory)
config.storage_directory = os.path.normpath(storage_directory)
config.imported_directory = os.path.normpath(os.path.join(storage_directory, 'imported'))
config.organize_directory = os.path.normpath(os.path.join(storage_directory, 'organize'))
config.recorded_directory = os.path.normpath(os.path.join(storage_directory, 'recorded'))
config.problem_directory = os.path.normpath(os.path.join(storage_directory, 'problem_files'))
dirs = [config.imported_directory, config.organize_directory, config.recorded_directory, config.problem_directory]
for d in dirs:
if not os.path.exists(d):
os.makedirs(d, 02775)
multi_queue = mpQueue()
logger.info("Initializing event processor")
wm = WatchManager()
mmc = MediaMonitorCommon(config, wm=wm)
pe = AirtimeProcessEvent(queue=multi_queue, airtime_config=config, wm=wm, mmc=mmc, api_client=api_client)
bootstrap = AirtimeMediaMonitorBootstrap(logger, pe, api_client, mmc, wm, config)
bootstrap.scan()
notifier = AirtimeNotifier(wm, pe, read_freq=0, timeout=0, airtime_config=config, api_client=api_client, bootstrap=bootstrap, mmc=mmc)
notifier.coalesce_events()
#create 5 worker threads
wp = MediaMonitorWorkerProcess(config, mmc)
for i in range(5):
threadName = "Thread #%d" % i
t = Thread(target=wp.process_file_events, name=threadName, args=(multi_queue, notifier))
t.start()
wdd = notifier.watch_directory(storage_directory)
logger.info("Added watch to %s", storage_directory)
logger.info("wdd result %s", wdd[storage_directory])
for dir in watched_dirs:
wdd = notifier.watch_directory(dir)
logger.info("Added watch to %s", dir)
logger.info("wdd result %s", wdd[dir])
notifier.loop(callback=pe.notifier_loop_callback)
except KeyboardInterrupt:
notifier.stop()
logger.info("Keyboard Interrupt")
except Exception, e:
logger.error('Exception: %s', e)
logger.error("traceback: %s", traceback.format_exc())

View file

@ -0,0 +1 @@

View file

@ -0,0 +1 @@

View file

@ -0,0 +1,111 @@
# -*- coding: utf-8 -*-
import media.monitor.process as md
from os.path import normpath
from media.monitor.pure import format_length, file_md5
with md.metadata('MDATA_KEY_DURATION') as t:
t.default(u'0.0')
t.depends('length')
t.translate(lambda k: format_length(k['length']))
with md.metadata('MDATA_KEY_MIME') as t:
t.default(u'')
t.depends('mime')
t.translate(lambda k: k['mime'].replace('-','/'))
with md.metadata('MDATA_KEY_BITRATE') as t:
t.default(u'')
t.depends('bitrate')
t.translate(lambda k: k['bitrate'])
with md.metadata('MDATA_KEY_SAMPLERATE') as t:
t.default(u'0')
t.depends('sample_rate')
t.translate(lambda k: k['sample_rate'])
with md.metadata('MDATA_KEY_FTYPE'):
t.depends('ftype') # i don't think this field even exists
t.default(u'audioclip')
t.translate(lambda k: k['ftype']) # but just in case
with md.metadata("MDATA_KEY_CREATOR") as t:
t.depends("artist")
# A little kludge to make sure that we have some value for when we parse
# MDATA_KEY_TITLE
t.default(u"")
t.max_length(512)
with md.metadata("MDATA_KEY_SOURCE") as t:
t.depends("album")
t.max_length(512)
with md.metadata("MDATA_KEY_GENRE") as t:
t.depends("genre")
t.max_length(64)
with md.metadata("MDATA_KEY_MOOD") as t:
t.depends("mood")
t.max_length(64)
with md.metadata("MDATA_KEY_TRACKNUMBER") as t:
t.depends("tracknumber")
with md.metadata("MDATA_KEY_BPM") as t:
t.depends("bpm")
t.max_length(8)
with md.metadata("MDATA_KEY_LABEL") as t:
t.depends("organization")
t.max_length(512)
with md.metadata("MDATA_KEY_COMPOSER") as t:
t.depends("composer")
t.max_length(512)
with md.metadata("MDATA_KEY_ENCODER") as t:
t.depends("encodedby")
t.max_length(512)
with md.metadata("MDATA_KEY_CONDUCTOR") as t:
t.depends("conductor")
t.max_length(512)
with md.metadata("MDATA_KEY_YEAR") as t:
t.depends("date")
t.max_length(16)
with md.metadata("MDATA_KEY_URL") as t:
t.depends("website")
with md.metadata("MDATA_KEY_ISRC") as t:
t.depends("isrc")
t.max_length(512)
with md.metadata("MDATA_KEY_COPYRIGHT") as t:
t.depends("copyright")
t.max_length(512)
with md.metadata("MDATA_KEY_FILEPATH") as t:
t.depends('path')
t.translate(lambda k: normpath(k['path']))
with md.metadata("MDATA_KEY_MD5") as t:
t.depends('path')
t.optional(False)
t.translate(lambda k: file_md5(k['path'], max_length=100))
# owner is handled differently by (by events.py)
with md.metadata('MDATA_KEY_ORIGINAL_PATH') as t:
t.depends('original_path')
# MDATA_KEY_TITLE is the annoying special case
with md.metadata('MDATA_KEY_TITLE') as t:
# Need to know MDATA_KEY_CREATOR to know if show was recorded. Value is
# defaulted to "" from definitions above
t.depends('title','MDATA_KEY_CREATOR')
t.max_length(512)
with md.metadata('MDATA_KEY_LABEL') as t:
t.depends('label')
t.max_length(512)

View file

@ -0,0 +1,140 @@
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from media.monitor.pure import truncate_to_length, toposort
import mutagen
class MetadataAbsent(Exception):
def __init__(self, name): self.name = name
def __str__(self): return "Could not obtain element '%s'" % self.name
class MetadataElement(object):
def __init__(self,name):
self.name = name
# "Sane" defaults
self.__deps = set()
self.__normalizer = lambda x: x
self.__optional = True
self.__default = None
self.__is_normalized = lambda _ : True
self.__max_length = -1
def max_length(self,l):
self.__max_length = l
def optional(self, setting):
self.__optional = setting
def is_optional(self):
return self.__optional
def depends(self, *deps):
self.__deps = set(deps)
def dependencies(self):
return self.__deps
def translate(self, f):
self.__translator = f
def is_normalized(self, f):
self.__is_normalized = f
def normalize(self, f):
self.__normalizer = f
def default(self,v):
self.__default = v
def get_default(self):
if hasattr(self.__default, '__call__'): return self.__default()
else: return self.__default
def has_default(self):
return self.__default is not None
def path(self):
return self.__path
def __slice_deps(self, d):
return dict( (k,v) for k,v in d.iteritems() if k in self.__deps)
def __str__(self):
return "%s(%s)" % (self.name, ' '.join(list(self.__deps)))
def read_value(self, path, original, running={}):
# If value is present and normalized then we don't touch it
if self.name in original:
v = original[self.name]
if self.__is_normalized(v): return v
else: return self.__normalizer(v)
# A dictionary slice with all the dependencies and their values
dep_slice_orig = self.__slice_deps(original)
dep_slice_running = self.__slice_deps(running)
full_deps = dict( dep_slice_orig.items()
+ dep_slice_running.items() )
# check if any dependencies are absent
if len(full_deps) != len(self.__deps) or len(self.__deps) == 0:
# If we have a default value then use that. Otherwise throw an
# exception
if self.has_default(): return self.get_default()
else: raise MetadataAbsent(self.name)
# We have all dependencies. Now for actual for parsing
r = self.__normalizer( self.__translator(full_deps) )
if self.__max_length != -1:
r = truncate_to_length(r, self.__max_length)
return r
def normalize_mutagen(path):
"""
Consumes a path and reads the metadata using mutagen. normalizes some of
the metadata that isn't read through the mutagen hash
"""
m = mutagen.File(path, easy=True)
md = {}
for k,v in m.iteritems():
if type(v) is list: md[k] = v[0]
else: md[k] = v
# populate special metadata values
md['length'] = getattr(m.info, u'length', 0.0)
md['bitrate'] = getattr(m.info, 'bitrate', u'')
md['sample_rate'] = getattr(m.info, 'sample_rate', 0)
md['mime'] = m.mime[0] if len(m.mime) > 0 else u''
md['path'] = path
return md
class MetadataReader(object):
def __init__(self):
self.clear()
def register_metadata(self,m):
self.__mdata_name_map[m.name] = m
d = dict( (name,m.dependencies()) for name,m in
self.__mdata_name_map.iteritems() )
new_list = list( toposort(d) )
self.__metadata = [ self.__mdata_name_map[name] for name in new_list
if name in self.__mdata_name_map]
def clear(self):
self.__mdata_name_map = {}
self.__metadata = []
def read(self, path, muta_hash):
normalized_metadata = {}
for mdata in self.__metadata:
try:
normalized_metadata[mdata.name] = mdata.read_value(
path, muta_hash, normalized_metadata)
except MetadataAbsent:
if not mdata.is_optional(): raise
return normalized_metadata
global_reader = MetadataReader()
@contextmanager
def metadata(name):
t = MetadataElement(name)
yield t
global_reader.register_metadata(t)

View file

@ -0,0 +1,207 @@
# -*- coding: utf-8 -*-
from kombu.messaging import Exchange, Queue, Consumer
from kombu.connection import BrokerConnection
from os.path import normpath
import json
import os
import copy
from media.monitor.exceptions import BadSongFile, InvalidMetadataElement
from media.monitor.metadata import Metadata
from media.monitor.log import Loggable
from media.monitor.syncdb import AirtimeDB
from media.monitor.exceptions import DirectoryIsNotListed
from media.monitor.bootstrap import Bootstrapper
from media.monitor.listeners import FileMediator
from api_clients import api_client as apc
class AirtimeNotifier(Loggable):
"""
AirtimeNotifier is responsible for interecepting RabbitMQ messages and
feeding them to the event_handler object it was initialized with. The only
thing it does to the messages is parse them from json
"""
def __init__(self, cfg, message_receiver):
self.cfg = cfg
try:
self.handler = message_receiver
self.logger.info("Initializing RabbitMQ message consumer...")
schedule_exchange = Exchange("airtime-media-monitor", "direct",
durable=True, auto_delete=True)
schedule_queue = Queue("media-monitor", exchange=schedule_exchange,
key="filesystem")
self.connection = BrokerConnection(cfg["rabbitmq_host"],
cfg["rabbitmq_user"], cfg["rabbitmq_password"],
cfg["rabbitmq_vhost"])
channel = self.connection.channel()
consumer = Consumer(channel, schedule_queue)
consumer.register_callback(self.handle_message)
consumer.consume()
self.logger.info("Initialized RabbitMQ consumer.")
except Exception as e:
self.logger.info("Failed to initialize RabbitMQ consumer")
self.logger.error(e)
def handle_message(self, body, message):
"""
Messages received from RabbitMQ are handled here. These messages
instruct media-monitor of events such as a new directory being watched,
file metadata has been changed, or any other changes to the config of
media-monitor via the web UI.
"""
message.ack()
self.logger.info("Received md from RabbitMQ: %s" % str(body))
m = json.loads(message.body)
# TODO : normalize any other keys that could be used to pass
# directories
if 'directory' in m: m['directory'] = normpath(m['directory'])
self.handler.message(m)
class AirtimeMessageReceiver(Loggable):
def __init__(self, cfg, manager):
self.dispatch_table = {
'md_update' : self.md_update,
'new_watch' : self.new_watch,
'remove_watch' : self.remove_watch,
'rescan_watch' : self.rescan_watch,
'change_stor' : self.change_storage,
'file_delete' : self.file_delete,
}
self.cfg = cfg
self.manager = manager
def message(self, msg):
"""
This method is called by an AirtimeNotifier instance that
consumes the Rabbit MQ events that trigger this. The method
return true when the event was executed and false when it wasn't.
"""
msg = copy.deepcopy(msg)
if msg['event_type'] in self.dispatch_table:
evt = msg['event_type']
del msg['event_type']
self.logger.info("Handling RabbitMQ message: '%s'" % evt)
self._execute_message(evt,msg)
return True
else:
self.logger.info("Received invalid message with 'event_type': '%s'"
% msg['event_type'])
self.logger.info("Message details: %s" % str(msg))
return False
def _execute_message(self,evt,message):
self.dispatch_table[evt](message)
def __request_now_bootstrap(self, directory_id=None, directory=None,
all_files=True):
if (not directory_id) and (not directory):
raise ValueError("You must provide either directory_id or \
directory")
sdb = AirtimeDB(apc.AirtimeApiClient.create_right_config())
if directory : directory = os.path.normpath(directory)
if directory_id == None : directory_id = sdb.to_id(directory)
if directory == None : directory = sdb.to_directory(directory_id)
try:
bs = Bootstrapper( sdb, self.manager.watch_signal() )
bs.flush_watch( directory=directory, last_ran=self.cfg.last_ran() )
except Exception as e:
self.fatal_exception("Exception bootstrapping: (dir,id)=(%s,%s)" %
(directory, directory_id), e)
raise DirectoryIsNotListed(directory, cause=e)
def md_update(self, msg):
self.logger.info("Updating metadata for: '%s'" %
msg['MDATA_KEY_FILEPATH'])
md_path = msg['MDATA_KEY_FILEPATH']
try: Metadata.write_unsafe(path=md_path, md=msg)
except BadSongFile as e:
self.logger.info("Cannot find metadata file: '%s'" % e.path)
except InvalidMetadataElement as e:
self.logger.info("Metadata instance not supported for this file '%s'" \
% e.path)
self.logger.info(str(e))
except Exception as e:
# TODO : add md_path to problem path or something?
self.fatal_exception("Unknown error when writing metadata to: '%s'"
% md_path, e)
def new_watch(self, msg, restart=False):
msg['directory'] = normpath(msg['directory'])
self.logger.info("Creating watch for directory: '%s'" %
msg['directory'])
if not os.path.exists(msg['directory']):
try: os.makedirs(msg['directory'])
except Exception as e:
self.fatal_exception("Failed to create watched dir '%s'" %
msg['directory'],e)
else:
self.logger.info("Created new watch directory: '%s'" %
msg['directory'])
self.new_watch(msg)
else:
self.__request_now_bootstrap( directory=msg['directory'],
all_files=restart)
self.manager.add_watch_directory(msg['directory'])
def remove_watch(self, msg):
msg['directory'] = normpath(msg['directory'])
self.logger.info("Removing watch from directory: '%s'" %
msg['directory'])
self.manager.remove_watch_directory(msg['directory'])
def rescan_watch(self, msg):
self.logger.info("Trying to rescan watched directory: '%s'" %
msg['directory'])
try:
# id is always an integer but in the dictionary the key is always a
# string
self.__request_now_bootstrap( unicode(msg['id']) )
except DirectoryIsNotListed as e:
self.fatal_exception("Bad rescan request", e)
except Exception as e:
self.fatal_exception("Bad rescan request. Unknown error.", e)
else:
self.logger.info("Successfully re-scanned: '%s'" % msg['directory'])
def change_storage(self, msg):
new_storage_directory = msg['directory']
self.manager.change_storage_root(new_storage_directory)
for to_bootstrap in [ self.manager.get_recorded_path(),
self.manager.get_imported_path() ]:
self.__request_now_bootstrap( directory=to_bootstrap )
def file_delete(self, msg):
# Deletes should be requested only from imported folder but we
# don't verify that. Security risk perhaps?
# we only delete if we are passed the special delete flag that is
# necessary with every "delete_file" request
if not msg['delete']:
self.logger.info("No clippy confirmation, ignoring event. \
Out of curiousity we will print some details.")
self.logger.info(msg)
return
# TODO : Add validation that we are deleting a file that's under our
# surveillance. We don't to delete some random system file.
if os.path.exists(msg['filepath']):
try:
self.logger.info("Attempting to delete '%s'" %
msg['filepath'])
# We use FileMediator to ignore any paths with
# msg['filepath'] so that we do not send a duplicate delete
# request that we'd normally get form pyinotify. But right
# now event contractor would take care of this sort of
# thing anyway so this might not be necessary after all
FileMediator.ignore(msg['filepath'])
os.unlink(msg['filepath'])
# Verify deletion:
if not os.path.exists(msg['filepath']):
self.logger.info("Successfully deleted: '%s'" %
msg['filepath'])
except Exception as e:
self.fatal_exception("Failed to delete '%s'" % msg['filepath'],
e)
else: # validation for filepath existence failed
self.logger.info("Attempting to delete file '%s' that does not \
exist. Full request:" % msg['filepath'])
self.logger.info(msg)

View file

@ -0,0 +1,62 @@
import os
from pydispatch import dispatcher
from media.monitor.events import NewFile, DeleteFile, ModifyFile
from media.monitor.log import Loggable
import media.monitor.pure as mmp
class Bootstrapper(Loggable):
"""
Bootstrapper reads all the info in the filesystem flushes organize events
and watch events
"""
def __init__(self,db,watch_signal):
"""
db - AirtimeDB object; small layer over api client
last_ran - last time the program was ran.
watch_signal - the signals should send events for every file on.
"""
self.db = db
self.watch_signal = watch_signal
def flush_all(self, last_ran):
"""
bootstrap every single watched directory. only useful at startup note
that because of the way list_directories works we also flush the import
directory as well I think
"""
for d in self.db.list_storable_paths(): self.flush_watch(d, last_ran)
def flush_watch(self, directory, last_ran, all_files=False):
"""
flush a single watch/imported directory. useful when wanting to to
rescan, or add a watched/imported directory
"""
songs = set([])
added = modded = deleted = 0
for f in mmp.walk_supported(directory, clean_empties=False):
songs.add(f)
# We decide whether to update a file's metadata by checking its
# system modification date. If it's above the value self.last_ran
# which is passed to us that means media monitor wasn't aware when
# this changes occured in the filesystem hence it will send the
# correct events to sync the database with the filesystem
if os.path.getmtime(f) > last_ran:
modded += 1
dispatcher.send(signal=self.watch_signal, sender=self,
event=ModifyFile(f))
db_songs = set(( song for song in self.db.directory_get_files(directory,
all_files)
if mmp.sub_path(directory,song) ))
# Get all the files that are in the database but in the file
# system. These are the files marked for deletions
for to_delete in db_songs.difference(songs):
dispatcher.send(signal=self.watch_signal, sender=self,
event=DeleteFile(to_delete))
deleted += 1
for to_add in songs.difference(db_songs):
dispatcher.send(signal=self.watch_signal, sender=self,
event=NewFile(to_add))
added += 1
self.logger.info( "Flushed watch directory (%s). \
(added, modified, deleted) = (%d, %d, %d)"
% (directory, added, modded, deleted) )

View file

@ -0,0 +1,36 @@
# -*- coding: utf-8 -*-
import os
import copy
from configobj import ConfigObj
from media.monitor.exceptions import NoConfigFile, ConfigAccessViolation
import media.monitor.pure as mmp
class MMConfig(object):
def __init__(self, path):
if not os.path.exists(path): raise NoConfigFile(path)
self.cfg = ConfigObj(path)
def __getitem__(self, key):
"""
We always return a copy of the config item to prevent callers from
doing any modifications through the returned objects methods
"""
return copy.deepcopy(self.cfg[key])
def __setitem__(self, key, value):
"""
We use this method not to allow anybody to mess around with config file
any settings made should be done through MMConfig's instance methods
"""
raise ConfigAccessViolation(key)
def save(self): self.cfg.write()
def last_ran(self):
"""
Returns the last time media monitor was ran by looking at the time when
the file at 'index_path' was modified
"""
return mmp.last_modified(self.cfg['index_path'])

View file

@ -0,0 +1,40 @@
from media.monitor.log import Loggable
from media.monitor.events import DeleteFile
class EventContractor(Loggable):
def __init__(self):
self.store = {}
def event_registered(self, evt):
"""
returns true if the event is registered which means that there is
another "unpacked" event somewhere out there with the same path
"""
return evt.path in self.store
def get_old_event(self, evt):
"""
get the previously registered event with the same path as 'evt'
"""
return self.store[ evt.path ]
def register(self, evt):
if self.event_registered(evt):
ev_proxy = self.get_old_event(evt)
if ev_proxy.same_event(evt):
ev_proxy.merge_proxy(evt)
return False
# delete overrides any other event
elif evt.is_event(DeleteFile):
ev_proxy.merge_proxy(evt)
return False
else:
ev_proxy.run_hook()
ev_proxy.reset_hook()
self.store[ evt.path ] = evt
evt.set_pack_hook( lambda : self.__unregister(evt) )
return True
def __unregister(self, evt):
del self.store[evt.path]

View file

@ -0,0 +1,20 @@
import socket
from media.monitor.log import Loggable
from media.monitor.toucher import RepeatTimer
class EventDrainer(Loggable):
"""
Flushes events from RabbitMQ that are sent from airtime every
certain amount of time
"""
def __init__(self, connection, interval=1):
def cb():
# TODO : make 0.3 parameter configurable
try : connection.drain_events(timeout=0.3)
except socket.timeout : pass
except Exception as e :
self.fatal_exception("Error flushing events", e)
t = RepeatTimer(interval, cb)
t.daemon = True
t.start()

View file

@ -0,0 +1,291 @@
# -*- coding: utf-8 -*-
import os
import abc
import media.monitor.pure as mmp
import media.monitor.owners as owners
from media.monitor.pure import LazyProperty
from media.monitor.metadata import Metadata
from media.monitor.log import Loggable
from media.monitor.exceptions import BadSongFile
class PathChannel(object):
"""
Simple struct to hold a 'signal' string and a related 'path'. Basically
used as a named tuple
"""
def __init__(self, signal, path):
self.signal = signal
self.path = path
# TODO : Move this to it's file. Also possible unsingleton and use it as a
# simple module just like m.m.owners
class EventRegistry(object):
"""
This class's main use is to keep track all events with a cookie attribute.
This is done mainly because some events must be 'morphed' into other events
because we later detect that they are move events instead of delete events.
"""
registry = {}
@staticmethod
def register(evt): EventRegistry.registry[evt.cookie] = evt
@staticmethod
def unregister(evt): del EventRegistry.registry[evt.cookie]
@staticmethod
def registered(evt): return evt.cookie in EventRegistry.registry
@staticmethod
def matching(evt):
event = EventRegistry.registry[evt.cookie]
# Want to disallow accessing the same event twice
EventRegistry.unregister(event)
return event
def __init__(self,*args,**kwargs):
raise Exception("You can instantiate this class. Must only use class \
methods")
class EventProxy(Loggable):
"""
A container object for instances of BaseEvent (or it's subclasses) used for
event contractor
"""
def __init__(self, orig_evt):
self.orig_evt = orig_evt
self.evt = orig_evt
self.reset_hook()
if hasattr(orig_evt, 'path'): self.path = orig_evt.path
def set_pack_hook(self, l):
self._pack_hook = l
def reset_hook(self):
self._pack_hook = lambda : None
def run_hook(self):
self._pack_hook()
def safe_pack(self):
self.run_hook()
# make sure that cleanup hook is never called twice for the same event
self.reset_hook()
return self.evt.safe_pack()
def merge_proxy(self, proxy):
self.evt = proxy.evt
def is_event(self, real_event):
return isinstance(self.evt, real_event)
def same_event(self, proxy):
return self.evt.__class__ == proxy.evt.__class__
class HasMetaData(object):
"""
Any class that inherits from this class gains the metadata attribute that
loads metadata from the class's 'path' attribute. This is done lazily so
there is no performance penalty to inheriting from this and subsequent
calls to metadata are cached
"""
__metaclass__ = abc.ABCMeta
@LazyProperty
def metadata(self): return Metadata(self.path)
class BaseEvent(Loggable):
__metaclass__ = abc.ABCMeta
def __init__(self, raw_event):
# TODO : clean up this idiotic hack
# we should use keyword constructors instead of this behaviour checking
# bs to initialize BaseEvent
if hasattr(raw_event,"pathname"):
self._raw_event = raw_event
self.path = os.path.normpath(raw_event.pathname)
else: self.path = raw_event
self.owner = owners.get_owner(self.path)
self._pack_hook = lambda: None # no op
# into another event
# TODO : delete this method later
def reset_hook(self):
"""
Resets the hook that is called after an event is packed. Before
resetting the hook we execute it to make sure that whatever cleanup
operations were queued are executed.
"""
self._pack_hook()
self._pack_hook = lambda: None
def exists(self): return os.path.exists(self.path)
@LazyProperty
def cookie(self): return getattr( self._raw_event, 'cookie', None )
def __str__(self):
return "Event(%s). Path(%s)" % ( self.path, self.__class__.__name__)
# TODO : delete this method later
def add_safe_pack_hook(self,k):
"""
adds a callable object (function) that will be called after the event
has been "safe_packed"
"""
self._pack_hook = k
def proxify(self):
return EventProxy(self)
# As opposed to unsafe_pack...
def safe_pack(self):
"""
returns exceptions instead of throwing them to be consistent with
events that must catch their own BadSongFile exceptions since generate
a set of exceptions instead of a single one
"""
try:
self._pack_hook()
ret = self.pack()
# Remove owner of this file only after packing. Otherwise packing
# will not serialize the owner correctly into the airtime request
owners.remove_file_owner(self.path)
return ret
except BadSongFile as e: return [e]
# nothing to see here, please move along
def morph_into(self, evt):
self.logger.info("Morphing %s into %s" % ( str(self), str(evt) ) )
self._raw_event = evt._raw_event
self.path = evt.path
self.__class__ = evt.__class__
# Clean up old hook and transfer the new events hook
self.reset_hook()
self.add_safe_pack_hook( evt._pack_hook )
return self
def assign_owner(self,req):
"""
Packs self.owner to req if the owner is valid. I.e. it's not -1. This
method is used by various events that would like to pass owner as a
parameter. NewFile for example.
"""
if self.owner != -1: req['MDATA_KEY_OWNER_ID'] = self.owner
class FakePyinotify(object):
"""
sometimes we must create our own pyinotify like objects to
instantiate objects from the classes below whenever we want to turn
a single event into multiple events
"""
def __init__(self, path): self.pathname = path
class OrganizeFile(BaseEvent, HasMetaData):
"""
The only kind of event that does support the pack protocol. It's used
internally with mediamonitor to move files in the organize directory.
"""
def __init__(self, *args, **kwargs):
super(OrganizeFile, self).__init__(*args, **kwargs)
def pack(self):
raise AttributeError("You can't send organize events to airtime!!!")
class NewFile(BaseEvent, HasMetaData):
"""
NewFile events are the only events that contain MDATA_KEY_OWNER_ID metadata
in them.
"""
def __init__(self, *args, **kwargs):
super(NewFile, self).__init__(*args, **kwargs)
def pack(self):
"""
packs turns an event into a media monitor request
"""
req_dict = self.metadata.extract()
req_dict['mode'] = u'create'
req_dict['is_record'] = self.metadata.is_recorded()
self.assign_owner(req_dict)
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
return [req_dict]
class DeleteFile(BaseEvent):
"""
DeleteFile event only contains the path to be deleted. No other metadata
can be or is included. (This is because this event is fired after the
deletion occurs).
"""
def __init__(self, *args, **kwargs):
super(DeleteFile, self).__init__(*args, **kwargs)
def pack(self):
req_dict = {}
req_dict['mode'] = u'delete'
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
return [req_dict]
class MoveFile(BaseEvent, HasMetaData):
"""
Path argument should be the new path of the file that was moved
"""
def __init__(self, *args, **kwargs):
super(MoveFile, self).__init__(*args, **kwargs)
def old_path(self):
return self._raw_event.src_pathname
def pack(self):
req_dict = {}
req_dict['mode'] = u'moved'
req_dict['MDATA_KEY_ORIGINAL_PATH'] = self.old_path()
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
req_dict['MDATA_KEY_MD5'] = self.metadata.extract()['MDATA_KEY_MD5']
return [req_dict]
class ModifyFile(BaseEvent, HasMetaData):
def __init__(self, *args, **kwargs):
super(ModifyFile, self).__init__(*args, **kwargs)
def pack(self):
req_dict = self.metadata.extract()
req_dict['mode'] = u'modify'
# path to directory that is to be removed
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
return [req_dict]
def map_events(directory, constructor):
"""
Walks 'directory' and creates an event using 'constructor'. Returns a list
of the constructed events.
"""
# -unknown-path should not appear in the path here but more testing
# might be necessary
for f in mmp.walk_supported(directory, clean_empties=False):
try:
for e in constructor( FakePyinotify(f) ).pack(): yield e
except BadSongFile as e: yield e
class DeleteDir(BaseEvent):
"""
A DeleteDir event unfolds itself into a list of DeleteFile events for every
file in the directory.
"""
def __init__(self, *args, **kwargs):
super(DeleteDir, self).__init__(*args, **kwargs)
def pack(self):
return map_events( self.path, DeleteFile )
class MoveDir(BaseEvent):
"""
A MoveDir event unfolds itself into a list of MoveFile events for every
file in the directory.
"""
def __init__(self, *args, **kwargs):
super(MoveDir, self).__init__(*args, **kwargs)
def pack(self):
return map_events( self.path, MoveFile )
class DeleteDirWatch(BaseEvent):
"""
Deleting a watched directory is different from deleting any other
directory. Hence we must have a separate event to handle this case
"""
def __init__(self, *args, **kwargs):
super(DeleteDirWatch, self).__init__(*args, **kwargs)
def pack(self):
req_dict = {}
req_dict['mode'] = u'delete_dir'
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path + "/" )
return [req_dict]

View file

@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
class BadSongFile(Exception):
def __init__(self, path): self.path = path
def __str__(self): return "Can't read %s" % self.path
class NoConfigFile(Exception):
def __init__(self, path): self.path = path
def __str__(self):
return "Path '%s' for config file does not exit" % self.path
class ConfigAccessViolation(Exception):
def __init__(self,key): self.key = key
def __str__(self): return "You must not access key '%s' directly" % self.key
class FailedToSetLocale(Exception):
def __str__(self): return "Failed to set locale"
class FailedToObtainLocale(Exception):
def __init__(self, path, cause):
self.path = path
self.cause = cause
def __str__(self): return "Failed to obtain locale from '%s'" % self.path
class CouldNotCreateIndexFile(Exception):
"""exception whenever index file cannot be created"""
def __init__(self, path, cause):
self.path = path
self.cause = cause
def __str__(self): return "Failed to create touch file '%s'" % self.path
class DirectoryIsNotListed(Exception):
def __init__(self,dir_id,cause=None):
self.dir_id = dir_id
self.cause = cause
def __str__(self):
return "%d was not listed as a directory in the database" % self.dir_id
class FailedToCreateDir(Exception):
def __init__(self,path, parent):
self.path = path
self.parent = parent
def __str__(self): return "Failed to create path '%s'" % self.path
class NoDirectoryInAirtime(Exception):
def __init__(self,path, does_exist):
self.path = path
self.does_exist = does_exist
def __str__(self):
return "Directory '%s' does not exist in Airtime.\n \
However: %s do exist." % (self.path, self.does_exist)
class InvalidMetadataElement(Exception):
def __init__(self, parent, key, path):
self.parent = parent
self.key = key
self.path = path
def __str__(self):
return "InvalidMetadataElement: (key,path) = (%s,%s)" \
% (self.key, self.path)

View file

@ -0,0 +1,59 @@
# -*- coding: utf-8 -*-
from pydispatch import dispatcher
import abc
from media.monitor.log import Loggable
import media.monitor.pure as mmp
# Defines the handle interface
class Handles(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def handle(self, sender, event, *args, **kwargs): pass
# TODO : Investigate whether weak reffing in dispatcher.connect could possibly
# cause a memory leak
class ReportHandler(Handles):
"""
A handler that can also report problem files when things go wrong
through the report_problem_file routine
"""
__metaclass__ = abc.ABCMeta
def __init__(self, signal, weak=False):
self.signal = signal
self.report_signal = "badfile"
def dummy(sender, event): self.handle(sender,event)
dispatcher.connect(dummy, signal=signal, sender=dispatcher.Any,
weak=weak)
def report_problem_file(self, event, exception=None):
dispatcher.send(signal=self.report_signal, sender=self, event=event,
exception=exception)
class ProblemFileHandler(Handles, Loggable):
"""
Responsible for answering to events passed through the 'badfile'
signal. Moves the problem file passed to the designated directory.
"""
def __init__(self, channel, **kwargs):
self.channel = channel
self.signal = self.channel.signal
self.problem_dir = self.channel.path
def dummy(sender, event, exception):
self.handle(sender, event, exception)
dispatcher.connect(dummy, signal=self.signal, sender=dispatcher.Any,
weak=False)
mmp.create_dir( self.problem_dir )
self.logger.info("Initialized problem file handler. Problem dir: '%s'" %
self.problem_dir)
def handle(self, sender, event, exception=None):
# TODO : use the exception parameter for something
self.logger.info("Received problem file: '%s'. Supposed to move it to \
problem dir", event.path)
try: mmp.move_to_dir(dir_path=self.problem_dir, file_path=event.path)
except Exception as e:
self.logger.info("Could not move file: '%s' to problem dir: '%s'" %
(event.path, self.problem_dir))
self.logger.info("Exception: %s" % str(e))

View file

@ -0,0 +1,141 @@
# -*- coding: utf-8 -*-
import pyinotify
from pydispatch import dispatcher
import media.monitor.pure as mmp
from media.monitor.pure import IncludeOnly
from media.monitor.events import OrganizeFile, NewFile, MoveFile, DeleteFile, \
DeleteDir, EventRegistry, MoveDir,\
DeleteDirWatch
from media.monitor.log import Loggable, get_logger
# Note: Because of the way classes that inherit from pyinotify.ProcessEvent
# interact with constructors. you should only instantiate objects from them
# using keyword arguments. For example:
# OrganizeListener('watch_signal') <= wrong
# OrganizeListener(signal='watch_signal') <= right
class FileMediator(object):
"""
FileMediator is used an intermediate mechanism that filters out certain
events.
"""
ignored_set = set([]) # for paths only
logger = get_logger()
@staticmethod
def is_ignored(path): return path in FileMediator.ignored_set
@staticmethod
def ignore(path): FileMediator.ignored_set.add(path)
@staticmethod
def unignore(path): FileMediator.ignored_set.remove(path)
def mediate_ignored(fn):
def wrapped(self, event, *args,**kwargs):
event.pathname = unicode(event.pathname, "utf-8")
if FileMediator.is_ignored(event.pathname):
FileMediator.logger.info("Ignoring: '%s' (once)" % event.pathname)
FileMediator.unignore(event.pathname)
else: return fn(self, event, *args, **kwargs)
return wrapped
class BaseListener(object):
def __str__(self):
return "Listener(%s), Signal(%s)" % \
(self.__class__.__name__, self. signal)
def my_init(self, signal): self.signal = signal
class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
def process_IN_CLOSE_WRITE(self, event):
#self.logger.info("===> handling: '%s'" % str(event))
self.process_to_organize(event)
def process_IN_MOVED_TO(self, event):
#self.logger.info("===> handling: '%s'" % str(event))
self.process_to_organize(event)
def flush_events(self, path):
"""
organize the whole directory at path. (pretty much by doing what
handle does to every file
"""
flushed = 0
for f in mmp.walk_supported(path, clean_empties=True):
self.logger.info("Bootstrapping: File in 'organize' directory: \
'%s'" % f)
if not mmp.file_locked(f):
dispatcher.send(signal=self.signal, sender=self,
event=OrganizeFile(f))
flushed += 1
#self.logger.info("Flushed organized directory with %d files" % flushed)
@IncludeOnly(mmp.supported_extensions)
def process_to_organize(self, event):
dispatcher.send(signal=self.signal, sender=self,
event=OrganizeFile(event))
class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
def process_IN_CLOSE_WRITE(self, event):
self.process_create(event)
def process_IN_MOVED_TO(self, event):
if EventRegistry.registered(event):
# We need this trick because we don't how to "expand" dir events
# into file events until we know for sure if we deleted or moved
morph = MoveDir(event) if event.dir else MoveFile(event)
EventRegistry.matching(event).morph_into(morph)
else: self.process_create(event)
def process_IN_MOVED_FROM(self, event):
# Is either delete dir or delete file
evt = self.process_delete(event)
# evt can be none whenever event points that a file that would be
# ignored by @IncludeOnly
if hasattr(event,'cookie') and (evt != None):
EventRegistry.register(evt)
def process_IN_DELETE(self,event): self.process_delete(event)
def process_IN_MOVE_SELF(self, event):
if '-unknown-path' in event.pathname:
event.pathname = event.pathname.replace('-unknown-path','')
self.delete_watch_dir(event)
def delete_watch_dir(self, event):
e = DeleteDirWatch(event)
dispatcher.send(signal='watch_move', sender=self, event=e)
dispatcher.send(signal=self.signal, sender=self, event=e)
@mediate_ignored
@IncludeOnly(mmp.supported_extensions)
def process_create(self, event):
evt = NewFile(event)
dispatcher.send(signal=self.signal, sender=self, event=evt)
return evt
@mediate_ignored
@IncludeOnly(mmp.supported_extensions)
def process_delete(self, event):
evt = None
if event.dir : evt = DeleteDir(event)
else : evt = DeleteFile(event)
dispatcher.send(signal=self.signal, sender=self, event=evt)
return evt
@mediate_ignored
def process_delete_dir(self, event):
evt = DeleteDir(event)
dispatcher.send(signal=self.signal, sender=self, event=evt)
return evt
def flush_events(self, path):
"""
walk over path and send a NewFile event for every file in this
directory. Not to be confused with bootstrapping which is a more
careful process that involved figuring out what's in the database
first.
"""
# Songs is a dictionary where every key is the watched the directory
# and the value is a set with all the files in that directory.
added = 0
for f in mmp.walk_supported(path, clean_empties=False):
added += 1
dispatcher.send( signal=self.signal, sender=self, event=NewFile(f) )
self.logger.info( "Flushed watch directory. added = %d" % added )

View file

@ -0,0 +1,43 @@
import logging
import abc
import traceback
from media.monitor.pure import LazyProperty
appname = 'root'
def setup_logging(log_path):
"""
Setup logging by writing log to 'log_path'
"""
#logger = logging.getLogger(appname)
logging.basicConfig(filename=log_path, level=logging.DEBUG)
def get_logger():
"""
in case we want to use the common logger from a procedural interface
"""
return logging.getLogger()
class Loggable(object):
"""
Any class that wants to log can inherit from this class and automatically
get a logger attribute that can be used like: self.logger.info(...) etc.
"""
__metaclass__ = abc.ABCMeta
@LazyProperty
def logger(self): return get_logger()
def unexpected_exception(self,e):
"""
Default message for 'unexpected' exceptions
"""
self.fatal_exception("'Unexpected' exception has occured:", e)
def fatal_exception(self, message, e):
"""
Prints an exception 'e' with 'message'. Also outputs the traceback.
"""
self.logger.error( message )
self.logger.error( str(e) )
self.logger.error( traceback.format_exc() )

View file

@ -0,0 +1,252 @@
import pyinotify
import threading
import time
from pydispatch import dispatcher
from os.path import normpath
from media.monitor.events import PathChannel
from media.monitor.log import Loggable
from media.monitor.listeners import StoreWatchListener, OrganizeListener
from media.monitor.handler import ProblemFileHandler
from media.monitor.organizer import Organizer
import media.monitor.pure as mmp
class ManagerTimeout(threading.Thread,Loggable):
"""
The purpose of this class is to flush the organize directory every 3
secnods. This used to be just a work around for cc-4235 but recently
became a permanent solution because it's "cheap" and reliable
"""
def __init__(self, manager, interval=1.5):
# TODO : interval should be read from config and passed here instead
# of just using the hard coded value
threading.Thread.__init__(self)
self.manager = manager
self.interval = interval
def run(self):
while True:
time.sleep(self.interval)
self.manager.flush_organize()
class Manager(Loggable):
"""
An abstraction over media monitors core pyinotify functions. These
include adding watched,store, organize directories, etc. Basically
composes over WatchManager from pyinotify
"""
def __init__(self):
self.wm = pyinotify.WatchManager()
# These two instance variables are assumed to be constant
self.watch_channel = 'watch'
self.organize_channel = 'organize'
self.watch_listener = StoreWatchListener(signal = self.watch_channel)
# TODO : change this to a weak ref
# TODO : get rid of this hack once cc-4235 is fixed
self.__timeout_thread = ManagerTimeout(self)
self.__timeout_thread.daemon = True
self.__timeout_thread.start()
self.organize = {
'organize_path' : None,
'imported_path' : None,
'recorded_path' : None,
'problem_files_path' : None,
'organizer' : None,
'problem_handler' : None,
'organize_listener' : OrganizeListener(signal=
self.organize_channel),
}
def dummy(sender, event): self.watch_move( event.path, sender=sender )
dispatcher.connect(dummy, signal='watch_move', sender=dispatcher.Any,
weak=False)
def subwatch_add(sender, directory):
self.__add_watch(directory, self.watch_listener)
dispatcher.connect(subwatch_add, signal='add_subwatch',
sender=dispatcher.Any, weak=False)
# A private mapping path => watch_descriptor
# we use the same dictionary for organize, watch, store wd events.
# this is a little hacky because we are unable to have multiple wd's
# on the same path.
self.__wd_path = {}
# The following set isn't really necessary anymore. Should be
# removed...
self.watched_directories = set([])
# This is the only event that we are unable to process "normally". I.e.
# through dedicated handler objects. Because we must have access to a
# manager instance. Hence we must slightly break encapsulation.
def watch_move(self, watch_dir, sender=None):
"""
handle 'watch move' events directly sent from listener
"""
self.logger.info("Watch dir '%s' has been renamed (hence removed)" %
watch_dir)
self.remove_watch_directory(normpath(watch_dir))
def watch_signal(self):
"""
Return the signal string our watch_listener is reading events from
"""
return self.watch_listener.signal
def __remove_watch(self,path):
"""
Remove path from being watched (first will check if 'path' is watched)
"""
# only delete if dir is actually being watched
if path in self.__wd_path:
wd = self.__wd_path[path]
self.wm.rm_watch(wd, rec=True)
del(self.__wd_path[path])
def __add_watch(self,path,listener):
"""
Start watching 'path' using 'listener'. First will check if directory
is being watched before adding another watch
"""
self.logger.info("Attempting to add listener to path '%s'" % path)
self.logger.info( 'Listener: %s' % str(listener) )
if not self.has_watch(path):
wd = self.wm.add_watch(path, pyinotify.ALL_EVENTS, rec=True,
auto_add=True, proc_fun=listener)
if wd: self.__wd_path[path] = wd.values()[0]
def __create_organizer(self, target_path, recorded_path):
"""
creates an organizer at new destination path or modifies the old one
"""
# TODO : find a proper fix for the following hack
# We avoid creating new instances of organize because of the way
# it interacts with pydispatch. We must be careful to never have
# more than one instance of OrganizeListener but this is not so
# easy. (The singleton hack in Organizer) doesn't work. This is
# the only thing that seems to work.
if self.organize['organizer']:
o = self.organize['organizer']
o.channel = self.organize_channel
o.target_path = target_path
o.recorded_path = recorded_path
else:
self.organize['organizer'] = Organizer(channel=
self.organize_channel, target_path=target_path,
recorded_path=recorded_path)
def get_problem_files_path(self):
"""
returns the path where problem files should go
"""
return self.organize['problem_files_path']
def set_problem_files_path(self, new_path):
"""
Set the path where problem files should go
"""
self.organize['problem_files_path'] = new_path
self.organize['problem_handler'] = \
ProblemFileHandler( PathChannel(signal='badfile',path=new_path) )
def get_recorded_path(self):
"""
returns the path of the recorded directory
"""
return self.organize['recorded_path']
def set_recorded_path(self, new_path):
self.__remove_watch(self.organize['recorded_path'])
self.organize['recorded_path'] = new_path
self.__create_organizer( self.organize['imported_path'], new_path)
self.__add_watch(new_path, self.watch_listener)
def get_organize_path(self):
"""
returns the current path that is being watched for organization
"""
return self.organize['organize_path']
def set_organize_path(self, new_path):
"""
sets the organize path to be new_path. Under the current scheme there
is only one organize path but there is no reason why more cannot be
supported
"""
# if we are already organizing a particular directory we remove the
# watch from it first before organizing another directory
self.__remove_watch(self.organize['organize_path'])
self.organize['organize_path'] = new_path
# the OrganizeListener instance will walk path and dispatch an organize
# event for every file in that directory
self.organize['organize_listener'].flush_events(new_path)
#self.__add_watch(new_path, self.organize['organize_listener'])
def flush_organize(self):
path = self.organize['organize_path']
self.organize['organize_listener'].flush_events(path)
def get_imported_path(self):
return self.organize['imported_path']
def set_imported_path(self,new_path):
"""
set the directory where organized files go to.
"""
self.__remove_watch(self.organize['imported_path'])
self.organize['imported_path'] = new_path
self.__create_organizer( new_path, self.organize['recorded_path'])
self.__add_watch(new_path, self.watch_listener)
def change_storage_root(self, store):
"""
hooks up all the directories for you. Problem, recorded, imported,
organize.
"""
store_paths = mmp.expand_storage(store)
self.set_problem_files_path(store_paths['problem_files'])
self.set_imported_path(store_paths['imported'])
self.set_recorded_path(store_paths['recorded'])
self.set_organize_path(store_paths['organize'])
mmp.create_dir(store)
for p in store_paths.values():
mmp.create_dir(p)
def has_watch(self, path):
"""
returns true if the path is being watched or not. Any kind of watch:
organize, store, watched.
"""
return path in self.__wd_path
def add_watch_directory(self, new_dir):
"""
adds a directory to be "watched". "watched" directories are
those that are being monitored by media monitor for airtime in
this context and not directories pyinotify calls watched
"""
if self.has_watch(new_dir):
self.logger.info("Cannot add '%s' to watched directories. It's \
already being watched" % new_dir)
else:
self.logger.info("Adding watched directory: '%s'" % new_dir)
self.__add_watch(new_dir, self.watch_listener)
def remove_watch_directory(self, watch_dir):
"""
removes a directory from being "watched". Undoes add_watch_directory
"""
if self.has_watch(watch_dir):
self.logger.info("Removing watched directory: '%s'", watch_dir)
self.__remove_watch(watch_dir)
else:
self.logger.info("'%s' is not being watched, hence cannot be \
removed" % watch_dir)
self.logger.info("The directories we are watching now are:")
self.logger.info( self.__wd_path )
def loop(self):
"""
block until we receive pyinotify events
"""
notifier = pyinotify.Notifier(self.wm)
notifier.coalesce_events()
notifier.loop()

View file

@ -0,0 +1,230 @@
# -*- coding: utf-8 -*-
import mutagen
import os
import copy
from collections import namedtuple
from mutagen.easymp4 import EasyMP4KeyError
from mutagen.easyid3 import EasyID3KeyError
from media.monitor.exceptions import BadSongFile, InvalidMetadataElement
from media.monitor.log import Loggable
from media.monitor.pure import format_length, truncate_to_length
import media.monitor.pure as mmp
"""
list of supported easy tags in mutagen version 1.20
['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
'date', 'performer', 'musicbrainz_albumartistid', 'composer', 'encodedby',
'tracknumber', 'musicbrainz_albumid', 'album', 'asin', 'musicbrainz_artistid',
'mood', 'copyright', 'author', 'media', 'length', 'version', 'artistsort',
'titlesort', 'discsubtitle', 'website', 'musicip_fingerprint', 'conductor',
'compilation', 'barcode', 'performer:*', 'composersort', 'musicbrainz_discid',
'musicbrainz_albumtype', 'genre', 'isrc', 'discnumber', 'musicbrainz_trmid',
'replaygain_*_gain', 'musicip_puid', 'artist', 'title', 'bpm',
'musicbrainz_trackid', 'arranger', 'albumsort', 'replaygain_*_peak',
'organization']
"""
airtime2mutagen = {
"MDATA_KEY_TITLE" : "title",
"MDATA_KEY_CREATOR" : "artist",
"MDATA_KEY_SOURCE" : "album",
"MDATA_KEY_GENRE" : "genre",
"MDATA_KEY_MOOD" : "mood",
"MDATA_KEY_TRACKNUMBER" : "tracknumber",
"MDATA_KEY_BPM" : "bpm",
"MDATA_KEY_LABEL" : "label",
"MDATA_KEY_COMPOSER" : "composer",
"MDATA_KEY_ENCODER" : "encodedby",
"MDATA_KEY_CONDUCTOR" : "conductor",
"MDATA_KEY_YEAR" : "date",
"MDATA_KEY_URL" : "website",
"MDATA_KEY_ISRC" : "isrc",
"MDATA_KEY_COPYRIGHT" : "copyright",
}
class FakeMutagen(dict):
"""
Need this fake mutagen object so that airtime_special functions
return a proper default value instead of throwing an exceptions for
files that mutagen doesn't recognize
"""
FakeInfo = namedtuple('FakeInfo','length bitrate')
def __init__(self,path):
self.path = path
self.mime = ['audio/wav']
self.info = FakeMutagen.FakeInfo(0.0, '')
dict.__init__(self)
def set_length(self,l):
old_bitrate = self.info.bitrate
self.info = FakeMutagen.FakeInfo(l, old_bitrate)
# Some airtime attributes are special because they must use the mutagen object
# itself to calculate the value that they need. The lambda associated with each
# key should attempt to extract the corresponding value from the mutagen object
# itself pass as 'm'. In the case when nothing can be extracted the lambda
# should return some default value to be assigned anyway or None so that the
# airtime metadata object will skip the attribute outright.
airtime_special = {
"MDATA_KEY_DURATION" :
lambda m: format_length(getattr(m.info, u'length', 0.0)),
"MDATA_KEY_BITRATE" :
lambda m: getattr(m.info, "bitrate", ''),
"MDATA_KEY_SAMPLERATE" :
lambda m: getattr(m.info, u'sample_rate', 0),
"MDATA_KEY_MIME" :
lambda m: m.mime[0] if len(m.mime) > 0 else u'',
}
mutagen2airtime = dict( (v,k) for k,v in airtime2mutagen.iteritems()
if isinstance(v, str) )
truncate_table = {
'MDATA_KEY_GENRE' : 64,
'MDATA_KEY_TITLE' : 512,
'MDATA_KEY_CREATOR' : 512,
'MDATA_KEY_SOURCE' : 512,
'MDATA_KEY_MOOD' : 64,
'MDATA_KEY_LABEL' : 512,
'MDATA_KEY_COMPOSER' : 512,
'MDATA_KEY_ENCODER' : 255,
'MDATA_KEY_CONDUCTOR' : 512,
'MDATA_KEY_YEAR' : 16,
'MDATA_KEY_URL' : 512,
'MDATA_KEY_ISRC' : 512,
'MDATA_KEY_COPYRIGHT' : 512,
}
class Metadata(Loggable):
# TODO : refactor the way metadata is being handled. Right now things are a
# little bit messy. Some of the handling is in m.m.pure while the rest is
# here. Also interface is not very consistent
@staticmethod
def fix_title(path):
# If we have no title in path we will format it
# TODO : this is very hacky so make sure to fix it
m = mutagen.File(path, easy=True)
if u'title' not in m:
new_title = unicode( mmp.no_extension_basename(path) )
m[u'title'] = new_title
m.save()
@staticmethod
def airtime_dict(d):
"""
Converts mutagen dictionary 'd' into airtime dictionary
"""
temp_dict = {}
for m_key, m_val in d.iteritems():
# TODO : some files have multiple fields for the same metadata.
# genre is one example. In that case mutagen will return a list
# of values
if isinstance(m_val, list):
# TODO : does it make more sense to just skip the element in
# this case?
if len(m_val) == 0: assign_val = ''
else: assign_val = m_val[0]
else: assign_val = m_val
temp_dict[ m_key ] = assign_val
airtime_dictionary = {}
for muta_k, muta_v in temp_dict.iteritems():
# We must check if we can actually translate the mutagen key into
# an airtime key before doing the conversion
if muta_k in mutagen2airtime:
airtime_key = mutagen2airtime[muta_k]
# Apply truncation in the case where airtime_key is in our
# truncation table
muta_v = \
truncate_to_length(muta_v, truncate_table[airtime_key])\
if airtime_key in truncate_table else muta_v
airtime_dictionary[ airtime_key ] = muta_v
return airtime_dictionary
@staticmethod
def write_unsafe(path,md):
"""
Writes 'md' metadata into 'path' through mutagen. Converts all
dictionary values to strings because mutagen will not write anything
else
"""
if not os.path.exists(path): raise BadSongFile(path)
song_file = mutagen.File(path, easy=True)
exceptions = [] # for bad keys
for airtime_k, airtime_v in md.iteritems():
if airtime_k in airtime2mutagen:
# The unicode cast here is mostly for integers that need to be
# strings
try:
song_file[ airtime2mutagen[airtime_k] ] = unicode(airtime_v)
except (EasyMP4KeyError, EasyID3KeyError) as e:
exceptions.append(InvalidMetadataElement(e, airtime_k,
path))
song_file.save()
# bubble dem up so that user knows that something is wrong
for e in exceptions: raise e
def __init__(self, fpath):
# Forcing the unicode through
try : fpath = fpath.decode("utf-8")
except : pass
if not mmp.file_playable(fpath): raise BadSongFile(fpath)
try : full_mutagen = mutagen.File(fpath, easy=True)
except Exception : raise BadSongFile(fpath)
self.path = fpath
if not os.path.exists(self.path):
self.logger.info("Attempting to read metadata of file \
that does not exist. Setting metadata to {}")
self.__metadata = {}
return
# TODO : Simplify the way all of these rules are handled right now it's
# extremely unclear and needs to be refactored.
#if full_mutagen is None: raise BadSongFile(fpath)
if full_mutagen is None: full_mutagen = FakeMutagen(fpath)
self.__metadata = Metadata.airtime_dict(full_mutagen)
# Now we extra the special values that are calculated from the mutagen
# object itself:
if mmp.extension(fpath) == 'wav':
full_mutagen.set_length(mmp.read_wave_duration(fpath))
for special_key,f in airtime_special.iteritems():
try:
new_val = f(full_mutagen)
if new_val is not None:
self.__metadata[special_key] = new_val
except Exception as e:
self.logger.info("Could not get special key %s for %s" %
(special_key, fpath))
self.logger.info(str(e))
# Finally, we "normalize" all the metadata here:
self.__metadata = mmp.normalized_metadata(self.__metadata, fpath)
# Now we must load the md5:
# TODO : perhaps we shouldn't hard code how many bytes we're reading
# from the file?
self.__metadata['MDATA_KEY_MD5'] = mmp.file_md5(fpath,max_length=100)
def is_recorded(self):
"""
returns true if the file has been created by airtime through recording
"""
return mmp.is_airtime_recorded( self.__metadata )
def extract(self):
"""
returns a copy of the metadata that was loaded when object was
constructed
"""
return copy.deepcopy(self.__metadata)
def utf8(self):
"""
Returns a unicode aware representation of the data that is compatible
with what is spent to airtime
"""
return mmp.convert_dict_value_to_utf8(self.extract())

View file

@ -0,0 +1,91 @@
# -*- coding: utf-8 -*-
import media.monitor.pure as mmp
import media.monitor.owners as owners
from media.monitor.handler import ReportHandler
from media.monitor.log import Loggable
from media.monitor.exceptions import BadSongFile
from media.monitor.events import OrganizeFile
from pydispatch import dispatcher
from os.path import dirname
import os.path
class Organizer(ReportHandler,Loggable):
"""
Organizer is responsible to to listening to OrganizeListener events
and committing the appropriate changes to the filesystem. It does
not in any interact with WatchSyncer's even when the the WatchSyncer
is a "storage directory". The "storage" directory picks up all of
its events through pyinotify. (These events are fed to it through
StoreWatchListener)
"""
# Commented out making this class a singleton because it's just a band aid
# for the real issue. The real issue being making multiple Organizer
# instances with pydispatch
#_instance = None
#def __new__(cls, channel, target_path, recorded_path):
#if cls._instance:
#cls._instance.channel = channel
#cls._instance.target_path = target_path
#cls._instance.recorded_path = recorded_path
#else:
#cls._instance = super(Organizer, cls).__new__( cls, channel,
#target_path, recorded_path)
#return cls._instance
def __init__(self, channel, target_path, recorded_path):
self.channel = channel
self.target_path = target_path
self.recorded_path = recorded_path
super(Organizer, self).__init__(signal=self.channel, weak=False)
def handle(self, sender, event):
"""
Intercept events where a new file has been added to the organize
directory and place it in the correct path (starting with
self.target_path)
"""
# Only handle this event type
assert isinstance(event, OrganizeFile), \
"Organizer can only handle OrganizeFile events.Given '%s'" % event
try:
# We must select the target_path based on whether file was recorded
# by airtime or not.
# Do we need to "massage" the path using mmp.organized_path?
target_path = self.recorded_path if event.metadata.is_recorded() \
else self.target_path
# nasty hack do this properly
owner_id = mmp.owner_id(event.path)
if owner_id != -1:
target_path = os.path.join(target_path, unicode(owner_id))
mdata = event.metadata.extract()
new_path = mmp.organized_path(event.path, target_path, mdata)
# See hack in mmp.magic_move
def new_dir_watch(d):
# TODO : rewrite as return lambda : dispatcher.send(...
def cb():
dispatcher.send(signal="add_subwatch", sender=self,
directory=d)
return cb
mmp.magic_move(event.path, new_path,
after_dir_make=new_dir_watch(dirname(new_path)))
# The reason we need to go around saving the owner in this ass
# backwards way is bewcause we are unable to encode the owner id
# into the file itself so that the StoreWatchListener listener can
# detect it from the file
owners.add_file_owner(new_path, owner_id )
self.logger.info('Organized: "%s" into "%s"' %
(event.path, new_path))
except BadSongFile as e:
self.report_problem_file(event=event, exception=e)
# probably general error in mmp.magic.move...
except Exception as e:
self.unexpected_exception( e )
self.report_problem_file(event=event, exception=e)

View file

@ -0,0 +1,52 @@
# -*- coding: utf-8 -*-
from media.monitor.log import get_logger
log = get_logger()
# hash: 'filepath' => owner_id
owners = {}
def reset_owners():
"""
Wipes out all file => owner associations
"""
global owners
owners = {}
def get_owner(f):
"""
Get the owner id of the file 'f'
"""
return owners[f] if f in owners else -1
def add_file_owner(f,owner):
"""
Associate file f with owner. If owner is -1 then do we will not record it
because -1 means there is no owner. Returns True if f is being stored after
the function. False otherwise.
"""
if owner == -1: return False
if f in owners:
if owner != owners[f]: # check for fishiness
log.info("Warning ownership of file '%s' changed from '%d' to '%d'"
% (f, owners[f], owner))
else: return True
owners[f] = owner
return True
def has_owner(f):
"""
True if f is owned by somebody. False otherwise.
"""
return f in owners
def remove_file_owner(f):
"""
Try and delete any association made with file f. Returns true if the the
association was actually deleted. False otherwise.
"""
if f in owners:
del owners[f]
return True
else: return False

View file

@ -0,0 +1,552 @@
# -*- coding: utf-8 -*-
import copy
from subprocess import Popen, PIPE
import subprocess
import os
import math
import wave
import contextlib
import shutil
import re
import sys
import hashlib
import locale
import operator as op
from os.path import normpath
from itertools import takewhile
# you need to import reduce in python 3
try: from functools import reduce
except: pass
from configobj import ConfigObj
from media.monitor.exceptions import FailedToSetLocale, FailedToCreateDir
#supported_extensions = [u"mp3", u"ogg", u"oga"]
supported_extensions = [u"mp3", u"ogg", u"oga", u"flac", u"wav",
u'm4a', u'mp4']
unicode_unknown = u'unknown'
path_md = ['MDATA_KEY_TITLE', 'MDATA_KEY_CREATOR', 'MDATA_KEY_SOURCE',
'MDATA_KEY_TRACKNUMBER', 'MDATA_KEY_BITRATE']
class LazyProperty(object):
"""
meant to be used for lazy evaluation of an object attribute.
property should represent non-mutable data, as it replaces itself.
"""
def __init__(self,fget):
self.fget = fget
self.func_name = fget.__name__
def __get__(self,obj,cls):
if obj is None: return None
value = self.fget(obj)
setattr(obj,self.func_name,value)
return value
class IncludeOnly(object):
"""
A little decorator to help listeners only be called on extensions
they support
NOTE: this decorator only works on methods and not functions. Maybe
fix this?
"""
def __init__(self, *deco_args):
self.exts = set([])
for arg in deco_args:
if isinstance(arg,str): self.add(arg)
elif hasattr(arg, '__iter__'):
for x in arg: self.exts.add(x)
def __call__(self, func):
def _wrap(moi, event, *args, **kwargs):
ext = extension(event.pathname)
# Checking for emptiness b/c we don't want to skip direcotries
if (ext.lower() in self.exts) or event.dir:
return func(moi, event, *args, **kwargs)
return _wrap
def partition(f, alist):
"""
Partition is very similar to filter except that it also returns the
elements for which f return false but in a tuple.
>>> partition(lambda x : x > 3, [1,2,3,4,5,6])
([4, 5, 6], [1, 2, 3])
"""
return (filter(f, alist), filter(lambda x: not f(x), alist))
def is_file_supported(path):
"""
Checks if a file's path(filename) extension matches the kind that we
support note that this is case insensitive.
>>> is_file_supported("test.mp3")
True
>>> is_file_supported("/bs/path/test.mP3")
True
>>> is_file_supported("test.txt")
False
"""
return extension(path).lower() in supported_extensions
# TODO : In the future we would like a better way to find out whether a show
# has been recorded
def is_airtime_recorded(md):
"""
Takes a metadata dictionary and returns True if it belongs to a file that
was recorded by Airtime.
"""
if not 'MDATA_KEY_CREATOR' in md: return False
return md['MDATA_KEY_CREATOR'] == u'Airtime Show Recorder'
def read_wave_duration(path):
with contextlib.closing(wave.open(path,'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
duration = frames/float(rate)
return duration
def clean_empty_dirs(path):
"""
walks path and deletes every empty directory it finds
"""
# TODO : test this function
if path.endswith('/'): clean_empty_dirs(path[0:-1])
else:
for root, dirs, _ in os.walk(path, topdown=False):
full_paths = ( os.path.join(root, d) for d in dirs )
for d in full_paths:
if os.path.exists(d):
if not os.listdir(d): os.removedirs(d)
def extension(path):
"""
return extension of path, empty string otherwise. Prefer to return empty
string instead of None because of bad handling of "maybe" types in python.
I.e. interpreter won't enforce None checks on the programmer
>>> extension("testing.php")
'php'
>>> extension("a.b.c.d.php")
'php'
>>> extension('/no/extension')
''
>>> extension('/path/extension.ml')
'ml'
"""
ext = path.split(".")
if len(ext) < 2: return ""
else: return ext[-1]
def no_extension_basename(path):
"""
returns the extensionsless basename of a filepath
>>> no_extension_basename("/home/test.mp3")
u'test'
>>> no_extension_basename("/home/test")
u'test'
>>> no_extension_basename('blah.ml')
u'blah'
>>> no_extension_basename('a.b.c.d.mp3')
u'a.b.c.d'
"""
base = unicode(os.path.basename(path))
if extension(base) == "": return base
else: return '.'.join(base.split(".")[0:-1])
def walk_supported(directory, clean_empties=False):
"""
A small generator wrapper around os.walk to only give us files that support
the extensions we are considering. When clean_empties is True we
recursively delete empty directories left over in directory after the walk.
"""
for root, dirs, files in os.walk(directory):
full_paths = ( os.path.join(root, name) for name in files
if is_file_supported(name) )
for fp in full_paths: yield fp
if clean_empties: clean_empty_dirs(directory)
def file_locked(path):
cmd = "lsof %s" % path
f = Popen(cmd, shell=True, stdout=PIPE).stdout
return bool(f.readlines())
def magic_move(old, new, after_dir_make=lambda : None):
"""
Moves path old to new and constructs the necessary to directories for new
along the way
"""
new_dir = os.path.dirname(new)
if not os.path.exists(new_dir): os.makedirs(new_dir)
# We need this crusty hack because anytime a directory is created we must
# re-add it with add_watch otherwise putting files in it will not trigger
# pyinotify events
after_dir_make()
shutil.move(old,new)
def move_to_dir(dir_path,file_path):
"""
moves a file at file_path into dir_path/basename(filename)
"""
bs = os.path.basename(file_path)
magic_move(file_path, os.path.join(dir_path, bs))
def apply_rules_dict(d, rules):
"""
Consumes a dictionary of rules that maps some keys to lambdas which it
applies to every matching element in d and returns a new dictionary with
the rules applied. If a rule returns none then it's not applied
"""
new_d = copy.deepcopy(d)
for k, rule in rules.iteritems():
if k in d:
new_val = rule(d[k])
if new_val is not None: new_d[k] = new_val
return new_d
def default_to_f(dictionary, keys, default, condition):
new_d = copy.deepcopy(dictionary)
for k in keys:
if condition(dictionary=new_d, key=k): new_d[k] = default
return new_d
def default_to(dictionary, keys, default):
"""
Checks if the list of keys 'keys' exists in 'dictionary'. If not then it
returns a new dictionary with all those missing keys defaults to 'default'
"""
cnd = lambda dictionary, key: key not in dictionary
return default_to_f(dictionary, keys, default, cnd)
def remove_whitespace(dictionary):
"""
Remove values that empty whitespace in the dictionary
"""
nd = copy.deepcopy(dictionary)
bad_keys = []
for k,v in nd.iteritems():
if hasattr(v,'strip'):
stripped = v.strip()
# ghetto and maybe unnecessary
if stripped == '' or stripped == u'': bad_keys.append(k)
for bad_key in bad_keys: del nd[bad_key]
return nd
def parse_int(s):
"""
Tries very hard to get some sort of integer result from s. Defaults to 0
when it fails
>>> parse_int("123")
'123'
>>> parse_int("123saf")
'123'
>>> parse_int("asdf")
None
"""
if s.isdigit(): return s
else:
try : return str(reduce(op.add, takewhile(lambda x: x.isdigit(), s)))
except: return None
def normalized_metadata(md, original_path):
"""
consumes a dictionary of metadata and returns a new dictionary with the
formatted meta data. We also consume original_path because we must set
MDATA_KEY_CREATOR based on in it sometimes
"""
new_md = copy.deepcopy(md)
# replace all slashes with dashes
#for k,v in new_md.iteritems(): new_md[k] = unicode(v).replace('/','-')
# Specific rules that are applied in a per attribute basis
format_rules = {
'MDATA_KEY_TRACKNUMBER' : parse_int,
'MDATA_KEY_FILEPATH' : lambda x: os.path.normpath(x),
'MDATA_KEY_BPM' : lambda x: x[0:8],
'MDATA_KEY_MIME' : lambda x: x.replace('audio/vorbis','audio/ogg'),
# Whenever 0 is reported we change it to empty
#'MDATA_KEY_BITRATE' : lambda x: '' if str(x) == '0' else x
}
new_md = remove_whitespace(new_md) # remove whitespace fields
# Format all the fields in format_rules
new_md = apply_rules_dict(new_md, format_rules)
# set filetype to audioclip by default
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_FTYPE'],
default=u'audioclip')
# Try to parse bpm but delete the whole key if that fails
if 'MDATA_KEY_BPM' in new_md:
new_md['MDATA_KEY_BPM'] = parse_int(new_md['MDATA_KEY_BPM'])
if new_md['MDATA_KEY_BPM'] is None:
del new_md['MDATA_KEY_BPM']
if not is_airtime_recorded(new_md):
# Read title from filename if it does not exist
default_title = no_extension_basename(original_path)
default_title = re.sub(r'__\d+\.',u'.', default_title)
if re.match(".+-%s-.+$" % unicode_unknown, default_title):
default_title = u''
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_TITLE'],
default=default_title)
new_md['MDATA_KEY_TITLE'] = re.sub(r'-\d+kbps$', u'',
new_md['MDATA_KEY_TITLE'])
# TODO : wtf is this for again?
new_md['MDATA_KEY_TITLE'] = re.sub(r'-?%s-?' % unicode_unknown, u'',
new_md['MDATA_KEY_TITLE'])
return new_md
def organized_path(old_path, root_path, orig_md):
"""
old_path - path where file is store at the moment <= maybe not necessary?
root_path - the parent directory where all organized files go
orig_md - original meta data of the file as given by mutagen AFTER being
normalized
return value: new file path
"""
filepath = None
ext = extension(old_path)
def default_f(dictionary, key):
if key in dictionary: return len(str(dictionary[key])) == 0
else: return True
# We set some metadata elements to a default "unknown" value because we use
# these fields to create a path hence they cannot be empty Here "normal"
# means normalized only for organized path
# MDATA_KEY_BITRATE is in bytes/second i.e. (256000) we want to turn this
# into 254kbps
# Some metadata elements cannot be empty, hence we default them to some
# value just so that we can create a correct path
normal_md = default_to_f(orig_md, path_md, unicode_unknown, default_f)
try:
formatted = str(int(normal_md['MDATA_KEY_BITRATE']) / 1000)
normal_md['MDATA_KEY_BITRATE'] = formatted + 'kbps'
except:
normal_md['MDATA_KEY_BITRATE'] = unicode_unknown
if is_airtime_recorded(normal_md):
# normal_md['MDATA_KEY_TITLE'] = 'show_name-yyyy-mm-dd-hh:mm:ss'
r = "(?P<show>.+)-(?P<date>\d+-\d+-\d+)-(?P<time>\d+:\d+:\d+)$"
title_re = re.match(r, normal_md['MDATA_KEY_TITLE'])
show_name = title_re.group('show')
#date = title_re.group('date')
yyyy, mm, _ = normal_md['MDATA_KEY_YEAR'].split('-',2)
fname_base = '%s-%s-%s.%s' % \
(title_re.group('time'), show_name,
normal_md['MDATA_KEY_BITRATE'], ext)
filepath = os.path.join(root_path, yyyy, mm, fname_base)
elif len(normal_md['MDATA_KEY_TRACKNUMBER']) == 0:
fname = u'%s-%s.%s' % (normal_md['MDATA_KEY_TITLE'],
normal_md['MDATA_KEY_BITRATE'], ext)
path = os.path.join(root_path, normal_md['MDATA_KEY_CREATOR'],
normal_md['MDATA_KEY_SOURCE'] )
filepath = os.path.join(path, fname)
else: # The "normal" case
fname = u'%s-%s-%s.%s' % (normal_md['MDATA_KEY_TRACKNUMBER'],
normal_md['MDATA_KEY_TITLE'],
normal_md['MDATA_KEY_BITRATE'], ext)
path = os.path.join(root_path, normal_md['MDATA_KEY_CREATOR'],
normal_md['MDATA_KEY_SOURCE'])
filepath = os.path.join(path, fname)
return filepath
# TODO : Get rid of this function and every one of its uses. We no longer use
# the md5 signature of a song for anything
def file_md5(path,max_length=100):
"""
Get md5 of file path (if it exists). Use only max_length characters to save
time and memory. Pass max_length=-1 to read the whole file (like in mm1)
"""
if os.path.exists(path):
with open(path, 'rb') as f:
m = hashlib.md5()
# If a file is shorter than "max_length" python will just return
# whatever it was able to read which is acceptable behaviour
m.update(f.read(max_length))
return m.hexdigest()
else: raise ValueError("'%s' must exist to find its md5" % path)
def encode_to(obj, encoding='utf-8'):
# TODO : add documentation + unit tests for this function
if isinstance(obj, unicode): obj = obj.encode(encoding)
return obj
def convert_dict_value_to_utf8(md):
"""
formats a dictionary to send as a request to api client
"""
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()])
def get_system_locale(locale_path='/etc/default/locale'):
"""
Returns the configuration object for the system's default locale. Normally
requires root access.
"""
if os.path.exists(locale_path):
try:
config = ConfigObj(locale_path)
return config
except Exception as e: raise FailedToSetLocale(locale_path,cause=e)
else: raise ValueError("locale path '%s' does not exist. \
permissions issue?" % locale_path)
def configure_locale(config):
"""
sets the locale according to the system's locale.
"""
current_locale = locale.getlocale()
if current_locale[1] is None:
default_locale = locale.getdefaultlocale()
if default_locale[1] is None:
lang = config.get('LANG')
new_locale = lang
else: new_locale = default_locale
locale.setlocale(locale.LC_ALL, new_locale)
reload(sys)
sys.setdefaultencoding("UTF-8")
current_locale_encoding = locale.getlocale()[1].lower()
if current_locale_encoding not in ['utf-8', 'utf8']:
raise FailedToSetLocale()
def fondle(path,times=None):
# TODO : write unit tests for this
"""
touch a file to change the last modified date. Beware of calling this
function on the same file from multiple threads.
"""
with file(path, 'a'): os.utime(path, times)
def last_modified(path):
"""
return the time of the last time mm2 was ran. path refers to the index file
whose date modified attribute contains this information. In the case when
the file does not exist we set this time 0 so that any files on the
filesystem were modified after it
"""
if os.path.exists(path): return os.path.getmtime(path)
else: return 0
def expand_storage(store):
"""
A storage directory usually consists of 4 different subdirectories. This
function returns their paths
"""
store = os.path.normpath(store)
return {
'organize' : os.path.join(store, 'organize'),
'recorded' : os.path.join(store, 'recorded'),
'problem_files' : os.path.join(store, 'problem_files'),
'imported' : os.path.join(store, 'imported'),
}
def create_dir(path):
"""
will try and make sure that path exists at all costs. raises an exception
if it fails at this task.
"""
if not os.path.exists(path):
try : os.makedirs(path)
except Exception as e : raise FailedToCreateDir(path, e)
else: # if no error occurs we still need to check that dir exists
if not os.path.exists: raise FailedToCreateDir(path)
def sub_path(directory,f):
"""
returns true if 'f' is in the tree of files under directory.
NOTE: does not look at any symlinks or anything like that, just looks at
the paths.
"""
normalized = normpath(directory)
common = os.path.commonprefix([ normalized, normpath(f) ])
return common == normalized
def owner_id(original_path):
"""
Given 'original_path' return the file name of the of 'identifier' file.
return the id that is contained in it. If no file is found or nothing is
read then -1 is returned. File is deleted after the number has been read
"""
fname = "%s.identifier" % original_path
owner_id = -1
try:
f = open(fname)
for line in f:
owner_id = int(line)
break
f.close()
except Exception: pass
else:
try: os.unlink(fname)
except Exception: raise
return owner_id
def file_playable(pathname):
"""
Returns True if 'pathname' is playable by liquidsoap. False otherwise.
"""
# when there is an single apostrophe inside of a string quoted by
# apostrophes, we can only escape it by replace that apostrophe with
# '\''. This breaks the string into two, and inserts an escaped
# single quote in between them. We run the command as pypo because
# otherwise the target file is opened with write permissions, and
# this causes an inotify ON_CLOSE_WRITE event to be fired :/
command = ("airtime-liquidsoap -c 'output.dummy" + \
"(audio_to_stereo(single(\"%s\")))' > /dev/null 2>&1") % \
pathname.replace("'", "'\\''")
return True
return_code = subprocess.call(command, shell=True)
return (return_code == 0)
def toposort(data):
"""
Topological sort on 'data' where 'data' is of the form:
data = [
'one' : set('two','three'),
'two' : set('three'),
'three' : set()
]
"""
for k, v in data.items():
v.discard(k) # Ignore self dependencies
extra_items_in_deps = reduce(set.union, data.values()) - set(data.keys())
data.update(dict((item,set()) for item in extra_items_in_deps))
while True:
ordered = set(item for item,dep in data.items() if not dep)
if not ordered: break
for e in sorted(ordered): yield e
data = dict((item,(dep - ordered)) for item,dep in data.items()
if item not in ordered)
assert not data, "A cyclic dependency exists amongst %r" % data
def truncate_to_length(item, length):
"""
Truncates 'item' to 'length'
"""
if isinstance(item, int): item = str(item)
if isinstance(item, basestring):
if len(item) > length: return item[0:length]
else: return item
def format_length(mutagen_length):
"""
Convert mutagen length to airtime length
"""
t = float(mutagen_length)
h = int(math.floor(t / 3600))
t = t % 3600
m = int(math.floor(t / 60))
s = t % 60
# will be ss.uuu
s = str(s)
seconds = s.split(".")
s = seconds[0]
# have a maximum of 6 subseconds.
if len(seconds[1]) >= 6: ss = seconds[1][0:6]
else: ss = seconds[1][0:]
return "%s:%s:%s.%s" % (h, m, s, ss)
if __name__ == '__main__':
import doctest
doctest.testmod()

View file

@ -0,0 +1,110 @@
# -*- coding: utf-8 -*-
import os
from media.monitor.log import Loggable
from media.monitor.exceptions import NoDirectoryInAirtime
from os.path import normpath
import media.monitor.pure as mmp
class AirtimeDB(Loggable):
def __init__(self, apc, reload_now=True):
self.apc = apc
if reload_now: self.reload_directories()
def reload_directories(self):
"""
this is the 'real' constructor, should be called if you ever want the
class reinitialized. there's not much point to doing it yourself
however, you should just create a new AirtimeDB instance.
"""
# dirs_setup is a dict with keys:
# u'watched_dirs' and u'stor' which point to lists of corresponding
# dirs
dirs_setup = self.apc.setup_media_monitor()
dirs_setup[u'stor'] = normpath( dirs_setup[u'stor'] )
dirs_setup[u'watched_dirs'] = map(normpath, dirs_setup[u'watched_dirs'])
dirs_with_id = dict([ (k,normpath(v)) for k,v in
self.apc.list_all_watched_dirs()['dirs'].iteritems() ])
self.id_to_dir = dirs_with_id
self.dir_to_id = dict([ (v,k) for k,v in dirs_with_id.iteritems() ])
self.base_storage = dirs_setup[u'stor']
self.storage_paths = mmp.expand_storage( self.base_storage )
self.base_id = self.dir_to_id[self.base_storage]
# hack to get around annoying schema of airtime db
self.dir_to_id[ self.recorded_path() ] = self.base_id
self.dir_to_id[ self.import_path() ] = self.base_id
# We don't know from the x_to_y dict which directory is watched or
# store...
self.watched_directories = set([ os.path.normpath(p) for p in
dirs_setup[u'watched_dirs'] ])
def to_id(self, directory):
"""
directory path -> id
"""
return self.dir_to_id[ directory ]
def to_directory(self, dir_id):
"""
id -> directory path
"""
return self.id_to_dir[ dir_id ]
def storage_path(self): return self.base_storage
def organize_path(self): return self.storage_paths['organize']
def problem_path(self): return self.storage_paths['problem_files']
def import_path(self): return self.storage_paths['imported']
def recorded_path(self): return self.storage_paths['recorded']
def list_watched(self):
"""
returns all watched directories as a list
"""
return list(self.watched_directories)
def list_storable_paths(self):
"""
returns a list of all the watched directories in the datatabase.
(Includes the imported directory and the recorded directory)
"""
l = self.list_watched()
l.append(self.import_path())
l.append(self.recorded_path())
return l
def dir_id_get_files(self, dir_id, all_files=True):
"""
Get all files in a directory with id dir_id
"""
base_dir = self.id_to_dir[ dir_id ]
return set(( os.path.join(base_dir,p) for p in
self.apc.list_all_db_files( dir_id, all_files ) ))
def directory_get_files(self, directory, all_files=True):
"""
returns all the files(recursively) in a directory. a directory is an
"actual" directory path instead of its id. This is super hacky because
you create one request for the recorded directory and one for the
imported directory even though they're the same dir in the database so
you get files for both dirs in 1 request...
"""
normal_dir = os.path.normpath(unicode(directory))
if normal_dir not in self.dir_to_id:
raise NoDirectoryInAirtime( normal_dir, self.dir_to_id )
all_files = self.dir_id_get_files( self.dir_to_id[normal_dir],
all_files )
if normal_dir == self.recorded_path():
all_files = [ p for p in all_files if
mmp.sub_path( self.recorded_path(), p ) ]
elif normal_dir == self.import_path():
all_files = [ p for p in all_files if
mmp.sub_path( self.import_path(), p ) ]
elif normal_dir == self.storage_path():
self.logger.info("Warning, you're getting all files in '%s' which \
includes imported + record" % normal_dir)
return set(all_files)

View file

@ -0,0 +1,83 @@
# -*- coding: utf-8 -*-
import media.monitor.pure as mmp
import os
from media.monitor.log import Loggable
from media.monitor.exceptions import CouldNotCreateIndexFile
class Toucher(Loggable):
"""
Class responsible for touching a file at a certain path when called
"""
def __init__(self,path):
self.path = path
def __call__(self):
try: mmp.fondle(self.path)
except Exception as e:
self.logger.info("Failed to touch file: '%s'. Logging exception." %
self.path)
self.logger.info(str(e))
#http://code.activestate.com/lists/python-ideas/8982/
from datetime import datetime
import threading
class RepeatTimer(threading.Thread):
def __init__(self, interval, callable, args=[], kwargs={}):
threading.Thread.__init__(self)
# interval_current shows number of milliseconds in currently triggered
# <tick>
self.interval_current = interval
# interval_new shows number of milliseconds for next <tick>
self.interval_new = interval
self.callable = callable
self.args = args
self.kwargs = kwargs
self.event = threading.Event()
self.event.set()
self.activation_dt = None
self.__timer = None
def run(self):
while self.event.is_set():
self.activation_dt = datetime.utcnow()
self.__timer = threading.Timer(self.interval_new,
self.callable,
self.args,
self.kwargs)
self.interval_current = self.interval_new
self.__timer.start()
self.__timer.join()
def cancel(self):
self.event.clear()
if self.__timer is not None:
self.__timer.cancel()
def trigger(self):
self.callable(*self.args, **self.kwargs)
if self.__timer is not None:
self.__timer.cancel()
def change_interval(self, value):
self.interval_new = value
class ToucherThread(Loggable):
"""
Creates a thread that touches a file 'path' every 'interval' seconds
"""
def __init__(self, path, interval=5):
if not os.path.exists(path):
try:
# TODO : rewrite using with?
f = open(path,'w')
f.write('')
f.close()
except Exception as e:
raise CouldNotCreateIndexFile(path,e)
cb = Toucher(path)
t = RepeatTimer(interval, cb)
t.daemon = True # thread terminates once process is done
t.start()

View file

@ -0,0 +1,226 @@
# -*- coding: utf-8 -*-
import threading
import time
import copy
from media.monitor.handler import ReportHandler
from media.monitor.log import Loggable
from media.monitor.exceptions import BadSongFile
from media.monitor.pure import LazyProperty
from media.monitor.eventcontractor import EventContractor
from media.monitor.events import EventProxy
import api_clients.api_client as ac
class RequestSync(threading.Thread,Loggable):
"""
This class is responsible for making the api call to send a request
to airtime. In the process it packs the requests and retries for
some number of times
"""
def __init__(self, watcher, requests):
threading.Thread.__init__(self)
self.watcher = watcher
self.requests = requests
self.retries = 1
self.request_wait = 0.3
@LazyProperty
def apiclient(self):
return ac.AirtimeApiClient.create_right_config()
def run(self):
self.logger.info("Attempting request with %d items." %
len(self.requests))
# Note that we must attach the appropriate mode to every
# response. Also Not forget to attach the 'is_record' to any
# requests that are related to recorded shows
# TODO : recorded shows aren't flagged right
# Is this retry shit even necessary? Consider getting rid of this.
packed_requests = []
for request_event in self.requests:
try:
for request in request_event.safe_pack():
if isinstance(request, BadSongFile):
self.logger.info("Bad song file: '%s'" % request.path)
else: packed_requests.append(request)
except Exception as e:
self.unexpected_exception( e )
if hasattr(request_event, 'path'):
self.logger.info("Possibly related to path: '%s'" %
request_event.path)
def make_req():
self.apiclient.send_media_monitor_requests( packed_requests )
for try_index in range(0,self.retries):
try: make_req()
# most likely we did not get json response as we expected
except ValueError:
self.logger.info("ApiController.php probably crashed, we \
diagnose this from the fact that it did not return \
valid json")
self.logger.info("Trying again after %f seconds" %
self.request_wait)
time.sleep( self.request_wait )
except Exception as e: self.unexpected_exception(e)
else:
self.logger.info("Request worked on the '%d' try" %
(try_index + 1))
break
else: self.logger.info("Failed to send request after '%d' tries..." %
self.retries)
self.watcher.flag_done()
class TimeoutWatcher(threading.Thread,Loggable):
"""
The job of this thread is to keep an eye on WatchSyncer and force a
request whenever the requests go over time out
"""
def __init__(self, watcher, timeout=5):
self.logger.info("Created timeout thread...")
threading.Thread.__init__(self)
self.watcher = watcher
self.timeout = timeout
def run(self):
# We try to launch a new thread every self.timeout seconds
# so that the people do not have to wait for the queue to fill up
while True:
time.sleep(self.timeout)
# If there is any requests left we launch em. Note that this
# isn't strictly necessary since RequestSync threads already
# chain themselves
if self.watcher.requests_in_queue():
self.logger.info("We have %d requests waiting to be launched" %
self.watcher.requests_left_count())
self.watcher.request_do()
# Same for events, this behaviour is mandatory however.
if self.watcher.events_in_queue():
self.logger.info("We have %d events that are unflushed" %
self.watcher.events_left_count())
self.watcher.flush_events()
class WatchSyncer(ReportHandler,Loggable):
def __init__(self, signal, chunking_number = 100, timeout=15):
self.timeout = float(timeout)
self.chunking_number = int(chunking_number)
self.request_running = False
self.__current_thread = None
self.__requests = []
self.contractor = EventContractor()
self.__reset_queue()
tc = TimeoutWatcher(self, self.timeout)
tc.daemon = True
tc.start()
super(WatchSyncer, self).__init__(signal=signal)
def handle(self, sender, event):
"""
We implement this abstract method from ReportHandler
"""
if hasattr(event, 'pack'):
# We push this event into queue
self.logger.info("Received event '%s'. Path: '%s'" % \
( event.__class__.__name__,
getattr(event,'path','No path exists') ))
try:
# If there is a strange bug anywhere in the code the next line
# should be a suspect
ev = EventProxy(event)
if self.contractor.register(ev): self.push_queue(ev)
#self.push_queue( event )
except BadSongFile as e:
self.fatal_exception("Received bas song file '%s'" % e.path, e)
except Exception as e:
self.unexpected_exception(e)
else:
self.logger.info("Received event that does not implement packing.\
Printing its representation:")
self.logger.info( repr(event) )
def requests_left_count(self):
"""
returns the number of requests left in the queue. requests are
functions that create RequestSync threads
"""
return len(self.__requests)
def events_left_count(self):
"""
Returns the number of events left in the queue to create a request
"""
return len(self.__queue)
def push_queue(self, elem):
"""
Added 'elem' to the event queue and launch a request if we are
over the the chunking number
"""
self.logger.info("Added event into queue")
if self.events_left_count() >= self.chunking_number:
self.push_request()
self.request_do() # Launch the request if nothing is running
self.__queue.append(elem)
def flush_events(self):
"""
Force flush the current events held in the queue
"""
self.logger.info("Force flushing events...")
self.push_request()
self.request_do()
def events_in_queue(self):
"""
returns true if there are events in the queue that haven't been
processed yet
"""
return len(self.__queue) > 0
def requests_in_queue(self):
"""
Returns true if there are any requests in the queue. False otherwise.
"""
return len(self.__requests) > 0
def flag_done(self):
"""
called by request thread when it finishes operating
"""
self.request_running = False
self.__current_thread = None
# This call might not be necessary but we would like to get the
# ball running with the requests as soon as possible
if self.requests_in_queue() > 0: self.request_do()
def request_do(self):
"""
launches a request thread only if one is not running right now
"""
if not self.request_running:
self.request_running = True
self.__requests.pop()()
def push_request(self):
"""
Create a request from the current events in the queue and schedule it
"""
self.logger.info("WatchSyncer : Unleashing request")
# want to do request asyncly and empty the queue
requests = copy.copy(self.__queue)
def launch_request():
# Need shallow copy here
t = RequestSync(watcher=self, requests=requests)
t.start()
self.__current_thread = t
self.__requests.append(launch_request)
self.__reset_queue()
def __reset_queue(self): self.__queue = []
def __del__(self):
# Ideally we would like to do a little more to ensure safe shutdown
if self.events_in_queue():
self.logger.warn("Terminating with events still in the queue...")
if self.requests_in_queue():
self.logger.warn("Terminating with http requests still pending...")

View file

@ -0,0 +1,133 @@
from subprocess import Popen, PIPE
import re
import os
import sys
import shutil
import tempfile
import logging
logger = logging.getLogger()
def get_process_output(command):
"""
Run subprocess and return stdout
"""
logger.debug(command)
p = Popen(command, shell=True, stdout=PIPE)
return p.communicate()[0].strip()
def run_process(command):
"""
Run subprocess and return "return code"
"""
p = Popen(command, shell=True)
return os.waitpid(p.pid, 0)[1]
def get_mime_type(file_path):
"""
Attempts to get the mime type but will return prematurely if the process
takes longer than 5 seconds. Note that this function should only be called
for files which do not have a mp3/ogg/flac extension.
"""
return get_process_output("timeout 5 file -b --mime-type %s" % file_path)
def duplicate_file(file_path):
"""
Makes a duplicate of the file and returns the path of this duplicate file.
"""
fsrc = open(file_path, 'r')
fdst = tempfile.NamedTemporaryFile(delete=False)
logger.info("Copying %s to %s" % (file_path, fdst.name))
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
return fdst.name
def get_file_type(file_path):
file_type = None
if re.search(r'mp3$', file_path, re.IGNORECASE):
file_type = 'mp3'
elif re.search(r'og(g|a)$', file_path, re.IGNORECASE):
file_type = 'vorbis'
elif re.search(r'flac$', file_path, re.IGNORECASE):
file_type = 'flac'
else:
mime_type = get_mime_type(file_path)
if 'mpeg' in mime_type:
file_type = 'mp3'
elif 'ogg' in mime_type:
file_type = 'vorbis'
elif 'flac' in mime_type:
file_type = 'flac'
return file_type
def calculate_replay_gain(file_path):
"""
This function accepts files of type mp3/ogg/flac and returns a calculated ReplayGain value in dB.
If the value cannot be calculated for some reason, then we default to 0 (Unity Gain).
http://wiki.hydrogenaudio.org/index.php?title=ReplayGain_1.0_specification
"""
try:
"""
Making a duplicate is required because the ReplayGain extraction utilities we use
make unwanted modifications to the file.
"""
search = None
temp_file_path = duplicate_file(file_path)
file_type = get_file_type(file_path)
nice_level = '15'
if file_type:
if file_type == 'mp3':
if run_process("which mp3gain > /dev/null") == 0:
out = get_process_output('nice -n %s mp3gain -q "%s" 2> /dev/null' % (nice_level, temp_file_path))
search = re.search(r'Recommended "Track" dB change: (.*)', out)
else:
logger.warn("mp3gain not found")
elif file_type == 'vorbis':
if run_process("which vorbisgain > /dev/null && which ogginfo > /dev/null") == 0:
run_process('nice -n %s vorbisgain -q -f "%s" 2>/dev/null >/dev/null' % (nice_level,temp_file_path))
out = get_process_output('ogginfo "%s"' % temp_file_path)
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
else:
logger.warn("vorbisgain/ogginfo not found")
elif file_type == 'flac':
if run_process("which metaflac > /dev/null") == 0:
out = get_process_output('nice -n %s metaflac --show-tag=REPLAYGAIN_TRACK_GAIN "%s"' % (nice_level, temp_file_path))
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
else: logger.warn("metaflac not found")
except Exception, e:
logger.error(str(e))
finally:
#no longer need the temp, file simply remove it.
try: os.remove(temp_file_path)
except: pass
replay_gain = 0
if search:
matches = search.groups()
if len(matches) == 1:
replay_gain = matches[0]
else:
logger.warn("Received more than 1 match in: '%s'" % str(matches))
return replay_gain
# Example of running from command line:
# python replay_gain.py /path/to/filename.mp3
if __name__ == "__main__":
print calculate_replay_gain(sys.argv[1])

View file

@ -0,0 +1,79 @@
from threading import Thread
import traceback
import os
import time
from media.update import replaygain
from media.monitor.log import Loggable
class ReplayGainUpdater(Thread, Loggable):
"""
The purpose of the class is to query the server for a list of files which
do not have a ReplayGain value calculated. This class will iterate over the
list calculate the values, update the server and repeat the process until
the server reports there are no files left.
This class will see heavy activity right after a 2.1->2.2 upgrade since 2.2
introduces ReplayGain normalization. A fresh install of Airtime 2.2 will
see this class not used at all since a file imported in 2.2 will
automatically have its ReplayGain value calculated.
"""
@staticmethod
def start_reply_gain(apc):
me = ReplayGainUpdater(apc)
me.daemon = True
me.start()
def __init__(self,apc):
Thread.__init__(self)
self.api_client = apc
def main(self):
raw_response = self.api_client.list_all_watched_dirs()
if 'dirs' not in raw_response:
self.logger.error("Could not get a list of watched directories \
with a dirs attribute. Printing full request:")
self.logger.error( raw_response )
return
directories = raw_response['dirs']
for dir_id, dir_path in directories.iteritems():
try:
# keep getting few rows at a time for current music_dir (stor
# or watched folder).
total = 0
while True:
# return a list of pairs where the first value is the
# file's database row id and the second value is the
# filepath
files = self.api_client.get_files_without_replay_gain_value(dir_id)
processed_data = []
for f in files:
full_path = os.path.join(dir_path, f['fp'])
processed_data.append((f['id'], replaygain.calculate_replay_gain(full_path)))
self.api_client.update_replay_gain_values(processed_data)
if len(files) == 0: break
self.logger.info("Processed: %d songs" % total)
except Exception, e:
self.logger.error(e)
self.logger.debug(traceback.format_exc())
def run(self):
try:
while True:
self.logger.info("Runnning replaygain updater")
self.main()
# Sleep for 5 minutes in case new files have been added
time.sleep(60 * 5)
except Exception, e:
self.logger.error('ReplayGainUpdater Exception: %s', traceback.format_exc())
self.logger.error(e)
if __name__ == "__main__":
rgu = ReplayGainUpdater()
rgu.main()

View file

@ -0,0 +1,153 @@
# -*- coding: utf-8 -*-
import sys
import os
import logging
import logging.config
from media.monitor.manager import Manager
from media.monitor.bootstrap import Bootstrapper
from media.monitor.log import get_logger, setup_logging
from media.monitor.config import MMConfig
from media.monitor.toucher import ToucherThread
from media.monitor.syncdb import AirtimeDB
from media.monitor.exceptions import FailedToObtainLocale, \
FailedToSetLocale, \
NoConfigFile
from media.monitor.airtime import AirtimeNotifier, \
AirtimeMessageReceiver
from media.monitor.watchersyncer import WatchSyncer
from media.monitor.eventdrainer import EventDrainer
from media.update.replaygainupdater import ReplayGainUpdater
from std_err_override import LogWriter
import media.monitor.pure as mmp
from api_clients import api_client as apc
def main(global_config, api_client_config, log_config,
index_create_attempt=False):
for cfg in [global_config, api_client_config]:
if not os.path.exists(cfg): raise NoConfigFile(cfg)
# MMConfig is a proxy around ConfigObj instances. it does not allow
# itself users of MMConfig instances to modify any config options
# directly through the dictionary. Users of this object muse use the
# correct methods designated for modification
try: config = MMConfig(global_config)
except NoConfigFile as e:
print("Cannot run mediamonitor2 without configuration file.")
print("Current config path: '%s'" % global_config)
sys.exit(1)
except Exception as e:
print("Unknown error reading configuration file: '%s'" % global_config)
print(str(e))
logging.config.fileConfig(log_config)
#need to wait for Python 2.7 for this..
#logging.captureWarnings(True)
logger = logging.getLogger()
LogWriter.override_std_err(logger)
logfile = unicode( config['logpath'] )
setup_logging(logfile)
log = get_logger()
if not index_create_attempt:
if not os.path.exists(config['index_path']):
log.info("Attempting to create index file:...")
try:
with open(config['index_path'], 'w') as f: f.write(" ")
except Exception as e:
log.info("Failed to create index file with exception: %s" % str(e))
else:
log.info("Created index file, reloading configuration:")
main( global_config, api_client_config, log_config,
index_create_attempt=True )
else:
log.info("Already tried to create index. Will not try again ")
if not os.path.exists(config['index_path']):
log.info("Index file does not exist. Terminating")
log.info("Attempting to set the locale...")
try:
mmp.configure_locale(mmp.get_system_locale())
except FailedToSetLocale as e:
log.info("Failed to set the locale...")
sys.exit(1)
except FailedToObtainLocale as e:
log.info("Failed to obtain the locale form the default path: \
'/etc/default/locale'")
sys.exit(1)
except Exception as e:
log.info("Failed to set the locale for unknown reason. \
Logging exception.")
log.info(str(e))
watch_syncer = WatchSyncer(signal='watch',
chunking_number=config['chunking_number'],
timeout=config['request_max_wait'])
apiclient = apc.AirtimeApiClient.create_right_config(log=log,
config_path=api_client_config)
ReplayGainUpdater.start_reply_gain(apiclient)
sdb = AirtimeDB(apiclient)
manager = Manager()
airtime_receiver = AirtimeMessageReceiver(config,manager)
airtime_notifier = AirtimeNotifier(config, airtime_receiver)
store = apiclient.setup_media_monitor()
airtime_receiver.change_storage({ 'directory':store[u'stor'] })
for watch_dir in store[u'watched_dirs']:
if not os.path.exists(watch_dir):
# Create the watch_directory here
try: os.makedirs(watch_dir)
except Exception as e:
log.error("Could not create watch directory: '%s' \
(given from the database)." % watch_dir)
if os.path.exists(watch_dir):
airtime_receiver.new_watch({ 'directory':watch_dir }, restart=True)
bs = Bootstrapper( db=sdb, watch_signal='watch' )
ed = EventDrainer(airtime_notifier.connection,
interval=float(config['rmq_event_wait']))
# Launch the toucher that updates the last time when the script was
# ran every n seconds.
# TODO : verify that this does not interfere with bootstrapping because the
# toucher thread might update the last_ran variable too fast
tt = ToucherThread(path=config['index_path'],
interval=int(config['touch_interval']))
apiclient.register_component('media-monitor')
manager.loop()
__doc__ = """
Usage:
mm2.py --config=<path> --apiclient=<path> --log=<path>
Options:
-h --help Show this screen
--config=<path> path to mm2 config
--apiclient=<path> path to apiclient config
--log=<path> log config at <path>
"""
if __name__ == '__main__':
from docopt import docopt
args = docopt(__doc__,version="mm1.99")
for k in ['--apiclient','--config','--log']:
if not os.path.exists(args[k]):
print("'%s' must exist" % args[k])
sys.exit(0)
print("Running mm1.99")
main(args['--config'],args['--apiclient'],args['--log'])

View file

@ -0,0 +1,30 @@
#!/usr/bin/python
import sys
import os
import getopt
import pyinotify
import pprint
# a little shit script to test out pyinotify events
class AT(pyinotify.ProcessEvent):
def process_default(self, event):
pprint.pprint(event)
def main():
optlist, arguments = getopt.getopt(sys.argv[1:], '', ["dir="])
ldir = ""
for k,v in optlist:
if k == '--dir':
ldir = v
break
if not os.path.exists(ldir):
print("can't pyinotify dir: '%s'. it don't exist" % ldir)
sys.exit(0)
wm = pyinotify.WatchManager()
notifier = pyinotify.Notifier(wm)
print("Watching: '%s'" % ldir)
wm.add_watch(ldir, pyinotify.ALL_EVENTS, auto_add=True, rec=True, proc_fun=AT())
notifier.loop()
if __name__ == '__main__': main()

View file

@ -0,0 +1 @@

View file

@ -0,0 +1,115 @@
bin_dir = "/usr/lib/airtime/api_clients"
#############################
## Common
#############################
# Value needed to access the API
api_key = '3MP2IUR45E6KYQ01CUYK'
# Path to the base of the API
api_base = 'api'
# URL to get the version number of the server API
version_url = 'version/api_key/%%api_key%%'
#URL to register a components IP Address with the central web server
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
# Hostname
base_url = 'localhost'
base_port = 80
#############################
## Config for Media Monitor
#############################
# URL to setup the media monitor
media_setup_url = 'media-monitor-setup/format/json/api_key/%%api_key%%'
# Tell Airtime the file id associated with a show instance.
upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
# URL to tell Airtime to update file's meta data
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
# URL to tell Airtime we want a listing of all files it knows about
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
# URL to tell Airtime we want to add watched directory
add_watched_dir = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
remove_watched_dir = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime about file system mount change
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
# URL to tell Airtime about file system mount change
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
#############################
## Config for Recorder
#############################
# URL to get the schedule of shows set to record
show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
# URL to upload the recorded show's file to Airtime
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
# URL to commit multiple updates from media monitor at the same time
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
#number of retries to upload file if connection problem
upload_retries = 3
#time to wait between attempts to upload file if connection problem (in seconds)
upload_wait = 60
################################################################################
# Uncomment *one of the sets* of values from the API clients below, and comment
# out all the others.
################################################################################
#############################
## Config for Pypo
#############################
# Schedule export path.
# %%from%% - starting date/time in the form YYYY-MM-DD-hh-mm
# %%to%% - starting date/time in the form YYYY-MM-DD-hh-mm
export_url = 'schedule/api_key/%%api_key%%'
get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
# Update whether a schedule group has begun playing.
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
# Update whether an audio clip is currently playing.
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'
# URL to tell Airtime we want to get stream setting
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
#URL to update liquidsoap status
update_liquidsoap_status = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
#URL to check live stream auth
check_live_stream_auth = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
#URL to update source status
update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'

View file

@ -0,0 +1,138 @@
bin_dir = "/usr/lib/airtime/api_clients"
############################################
# RabbitMQ settings #
############################################
rabbitmq_host = 'localhost'
rabbitmq_user = 'guest'
rabbitmq_password = 'guest'
rabbitmq_vhost = '/'
############################################
# Media-Monitor preferences #
############################################
check_filesystem_events = 5 #how long to queue up events performed on the files themselves.
check_airtime_events = 30 #how long to queue metadata input from airtime.
touch_interval = 5
chunking_number = 450
request_max_wait = 3.0
rmq_event_wait = 0.5
logpath = '/home/rudi/throwaway/mm2.log'
#############################
## Common
#############################
index_path = '/home/rudi/Airtime/python_apps/media-monitor2/sample_post.txt'
# Value needed to access the API
api_key = '5LF5D953RNS3KJSHN6FF'
# Path to the base of the API
api_base = 'api'
# URL to get the version number of the server API
version_url = 'version/api_key/%%api_key%%'
#URL to register a components IP Address with the central web server
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
# Hostname
base_url = 'localhost'
base_port = 80
#############################
## Config for Media Monitor
#############################
# URL to setup the media monitor
media_setup_url = 'media-monitor-setup/format/json/api_key/%%api_key%%'
# Tell Airtime the file id associated with a show instance.
upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
# URL to tell Airtime to update file's meta data
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
# URL to tell Airtime we want a listing of all files it knows about
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
# URL to tell Airtime we want to add watched directory
add_watched_dir = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
remove_watched_dir = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime about file system mount change
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
# URL to tell Airtime about file system mount change
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
#############################
## Config for Recorder
#############################
# URL to get the schedule of shows set to record
show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
# URL to upload the recorded show's file to Airtime
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
# URL to commit multiple updates from media monitor at the same time
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
#number of retries to upload file if connection problem
upload_retries = 3
#time to wait between attempts to upload file if connection problem (in seconds)
upload_wait = 60
################################################################################
# Uncomment *one of the sets* of values from the API clients below, and comment
# out all the others.
################################################################################
#############################
## Config for Pypo
#############################
# Schedule export path.
# %%from%% - starting date/time in the form YYYY-MM-DD-hh-mm
# %%to%% - starting date/time in the form YYYY-MM-DD-hh-mm
export_url = 'schedule/api_key/%%api_key%%'
get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
# Update whether a schedule group has begun playing.
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
# Update whether an audio clip is currently playing.
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'
# URL to tell Airtime we want to get stream setting
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
#URL to update liquidsoap status
update_liquidsoap_status = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
#URL to check live stream auth
check_live_stream_auth = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
#URL to update source status
update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'

View file

@ -0,0 +1,15 @@
# The tests rely on a lot of absolute paths and other garbage so this file
# configures all of that
music_folder = u'/home/rudi/music'
o_path = u'/home/rudi/throwaway/ACDC_-_Back_In_Black-sample-64kbps.ogg'
watch_path = u'/home/rudi/throwaway/fucking_around/watch/',
real_path1 = u'/home/rudi/throwaway/fucking_around/watch/unknown/unknown/ACDC_-_Back_In_Black-sample-64kbps-64kbps.ogg'
opath = u"/home/rudi/Airtime/python_apps/media-monitor2/tests/"
ppath = u"/home/rudi/Airtime/python_apps/media-monitor2/media/"
api_client_path = '/etc/airtime/api_client.cfg'
# holdover from the time we had a special config for testing
sample_config = api_client_path
real_config = api_client_path

View file

@ -0,0 +1,7 @@
#!/usr/bin/perl
use strict;
use warnings;
foreach my $file (glob "*.py") {
system("python $file") unless $file =~ /prepare_tests.py/;
}

View file

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
import unittest
import os
import sys
from api_clients import api_client as apc
import prepare_tests
class TestApiClient(unittest.TestCase):
def setUp(self):
test_path = prepare_tests.api_client_path
print("Running from api_config: %s" % test_path)
if not os.path.exists(test_path):
print("path for config does not exist: '%s' % test_path")
# TODO : is there a cleaner way to exit the unit testing?
sys.exit(1)
self.apc = apc.AirtimeApiClient(config_path=test_path)
self.apc.register_component("api-client-tester")
# All of the following requests should error out in some way
self.bad_requests = [
{ 'mode' : 'dang it', 'is_record' : 0 },
{ 'mode' : 'damn frank', 'is_record' : 1 },
{ 'no_mode' : 'at_all' }, ]
def test_bad_requests(self):
responses = self.apc.send_media_monitor_requests(self.bad_requests, dry=True)
for response in responses:
self.assertTrue( 'key' in response )
self.assertTrue( 'error' in response )
print( "Response: '%s'" % response )
# We don't actually test any well formed requests because it is more
# involved
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,24 @@
api_client = 'airtime'
# where the binary files live
bin_dir = '/usr/lib/airtime/media-monitor'
# where the logging files live
log_dir = '/var/log/airtime/media-monitor'
############################################
# RabbitMQ settings #
############################################
rabbitmq_host = 'localhost'
rabbitmq_user = 'guest'
rabbitmq_password = 'guest'
rabbitmq_vhost = '/'
############################################
# Media-Monitor preferences #
############################################
check_filesystem_events = '5'
check_airtime_events = '30'
list_value_testing = 'val1', 'val2', 'val3'

View file

@ -0,0 +1,28 @@
# -*- coding: utf-8 -*-
import unittest
import pprint
from media.monitor.config import MMConfig
from media.monitor.exceptions import NoConfigFile, ConfigAccessViolation
pp = pprint.PrettyPrinter(indent=4)
class TestMMConfig(unittest.TestCase):
def setUp(self):
self.real_config = MMConfig("./test_config.cfg")
#pp.pprint(self.real_config.cfg.dict)
def test_bad_config(self):
self.assertRaises( NoConfigFile, lambda : MMConfig("/fake/stuff/here") )
def test_no_set(self):
def myf(): self.real_config['bad'] = 'change'
self.assertRaises( ConfigAccessViolation, myf )
def test_copying(self):
k = 'list_value_testing'
mycopy = self.real_config[k]
mycopy.append("another element")
self.assertTrue( len(mycopy) , len(self.real_config[k]) + 1 )
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,58 @@
import unittest
from media.monitor.eventcontractor import EventContractor
#from media.monitor.exceptions import BadSongFile
from media.monitor.events import FakePyinotify, NewFile, MoveFile, \
DeleteFile
class TestMMP(unittest.TestCase):
def test_event_registered(self):
ev = EventContractor()
e1 = NewFile( FakePyinotify('bullshit.mp3') ).proxify()
e2 = MoveFile( FakePyinotify('bullshit.mp3') ).proxify()
ev.register(e1)
self.assertTrue( ev.event_registered(e2) )
def test_get_old_event(self):
ev = EventContractor()
e1 = NewFile( FakePyinotify('bullshit.mp3') ).proxify()
e2 = MoveFile( FakePyinotify('bullshit.mp3') ).proxify()
ev.register(e1)
self.assertEqual( ev.get_old_event(e2), e1 )
def test_register(self):
ev = EventContractor()
e1 = NewFile( FakePyinotify('bullshit.mp3') ).proxify()
e2 = DeleteFile( FakePyinotify('bullshit.mp3') ).proxify()
self.assertTrue( ev.register(e1) )
self.assertFalse( ev.register(e2) )
self.assertEqual( len(ev.store.keys()), 1 )
delete_ev = e1.safe_pack()[0]
self.assertEqual( delete_ev['mode'], u'delete')
self.assertEqual( len(ev.store.keys()), 0 )
e3 = DeleteFile( FakePyinotify('horseshit.mp3') ).proxify()
self.assertTrue( ev.register(e3) )
self.assertTrue( ev.register(e2) )
def test_register2(self):
ev = EventContractor()
p = 'bullshit.mp3'
events = [
NewFile( FakePyinotify(p) ),
NewFile( FakePyinotify(p) ),
DeleteFile( FakePyinotify(p) ),
NewFile( FakePyinotify(p) ),
NewFile( FakePyinotify(p) ), ]
events = map(lambda x: x.proxify(), events)
actual_events = []
for e in events:
if ev.register(e):
actual_events.append(e)
self.assertEqual( len(ev.store.keys()), 1 )
#packed = [ x.safe_pack() for x in actual_events ]
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,77 @@
import os, shutil
import time
import pyinotify
import unittest
from pydispatch import dispatcher
from media.monitor.listeners import OrganizeListener
from media.monitor.events import OrganizeFile
from os.path import join, normpath, abspath
def create_file(p):
with open(p, 'w') as f: f.write(" ")
class TestOrganizeListener(unittest.TestCase):
def setUp(self):
self.organize_path = 'test_o'
self.sig = 'org'
def my_abs_path(x):
return normpath(join(os.getcwd(), x))
self.sample_files = [ my_abs_path(join(self.organize_path, f))
for f in [ "gogi.mp3",
"gio.mp3",
"mimino.ogg" ] ]
os.mkdir(self.organize_path)
def test_flush_events(self):
org = self.create_org()
self.create_sample_files()
received = [0]
def pass_event(sender, event):
if isinstance(event, OrganizeFile):
received[0] += 1
self.assertTrue( abspath(event.path) in self.sample_files )
dispatcher.connect(pass_event, signal=self.sig, sender=dispatcher.Any,
weak=True)
org.flush_events( self.organize_path )
self.assertEqual( received[0], len(self.sample_files) )
self.delete_sample_files()
def test_process(self):
org = self.create_org()
received = [0]
def pass_event(sender, event):
if isinstance(event, OrganizeFile):
self.assertTrue( event.path in self.sample_files )
received[0] += 1
dispatcher.connect(pass_event, signal=self.sig, sender=dispatcher.Any,
weak=True)
wm = pyinotify.WatchManager()
def stopper(notifier):
return received[0] == len(self.sample_files)
tn = pyinotify.ThreadedNotifier(wm, default_proc_fun=org)
tn.daemon = True
tn.start()
wm.add_watch(self.organize_path, pyinotify.ALL_EVENTS, rec=True,
auto_add=True)
time.sleep(0.5)
self.create_sample_files()
time.sleep(1)
self.assertEqual( len(self.sample_files), received[0] )
self.delete_sample_files()
def tearDown(self):
shutil.rmtree(self.organize_path)
def create_sample_files(self):
for f in self.sample_files: create_file(f)
def delete_sample_files(self):
for f in self.sample_files: os.remove(f)
def create_org(self):
return OrganizeListener( signal=self.sig )
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,41 @@
import unittest
from media.monitor.manager import Manager
def add_paths(m,paths):
for path in paths:
m.add_watch_directory(path)
class TestManager(unittest.TestCase):
def setUp(self):
self.opath = "/home/rudi/Airtime/python_apps/media-monitor2/tests/"
self.ppath = "/home/rudi/Airtime/python_apps/media-monitor2/media/"
self.paths = [self.opath, self.ppath]
def test_init(self):
man = Manager()
self.assertTrue( len(man.watched_directories) == 0 )
self.assertTrue( man.watch_channel is not None )
self.assertTrue( man.organize_channel is not None )
def test_organize_path(self):
man = Manager()
man.set_organize_path( self.opath )
self.assertEqual( man.get_organize_path(), self.opath )
man.set_organize_path( self.ppath )
self.assertEqual( man.get_organize_path(), self.ppath )
def test_add_watch_directory(self):
man = Manager()
add_paths(man, self.paths)
for path in self.paths:
self.assertTrue( man.has_watch(path) )
def test_remove_watch_directory(self):
man = Manager()
add_paths(man, self.paths)
for path in self.paths:
self.assertTrue( man.has_watch(path) )
man.remove_watch_directory( path )
self.assertTrue( not man.has_watch(path) )
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,51 @@
# -*- coding: utf-8 -*-
import os
import unittest
import sys
import media.monitor.metadata as mmm
class TestMetadata(unittest.TestCase):
def setUp(self):
self.music_folder = u'/home/rudi/music'
def test_got_music_folder(self):
t = os.path.exists(self.music_folder)
self.assertTrue(t)
if not t:
print("'%s' must exist for this test to run." % self.music_folder )
sys.exit(1)
def test_metadata(self):
full_paths = (os.path.join(self.music_folder,filename) for filename in os.listdir(self.music_folder))
i = 0
for full_path in full_paths:
if os.path.isfile(full_path):
md_full = mmm.Metadata(full_path)
md = md_full.extract()
if i < 3:
i += 1
print("Sample metadata: '%s'" % md)
self.assertTrue( len( md.keys() ) > 0 )
self.assertTrue( 'MDATA_KEY_MD5' in md )
utf8 = md_full.utf8()
for k,v in md.iteritems():
if hasattr(utf8[k], 'decode'):
self.assertEqual( utf8[k].decode('utf-8'), md[k] )
else: print("Skipping '%s' because it's a directory" % full_path)
def test_airtime_mutagen_dict(self):
for muta,airtime in mmm.mutagen2airtime.iteritems():
self.assertEqual( mmm.airtime2mutagen[airtime], muta )
def test_format_length(self):
# TODO : add some real tests for this function
x1 = 123456
print("Formatting '%s' to '%s'" % (x1, mmm.format_length(x1)))
def test_truncate_to_length(self):
s1 = "testing with non string literal"
s2 = u"testing with unicode literal"
self.assertEqual( len(mmm.truncate_to_length(s1, 5)), 5)
self.assertEqual( len(mmm.truncate_to_length(s2, 8)), 8)
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
import unittest
import json
from media.monitor.airtime import AirtimeNotifier, AirtimeMessageReceiver
from mock import patch, Mock
from media.monitor.config import MMConfig
from media.monitor.manager import Manager
def filter_ev(d): return { i : j for i,j in d.iteritems() if i != 'event_type' }
class TestReceiver(unittest.TestCase):
def setUp(self):
# TODO : properly mock this later
cfg = {}
self.amr = AirtimeMessageReceiver(cfg, Manager())
def test_supported(self):
# Every supported message should fire something
for event_type in self.amr.dispatch_table.keys():
msg = { 'event_type' : event_type, 'extra_param' : 123 }
filtered = filter_ev(msg)
# There should be a better way to test the following without
# patching private methods
with patch.object(self.amr, '_execute_message') as mock_method:
mock_method.side_effect = None
ret = self.amr.message(msg)
self.assertTrue(ret)
mock_method.assert_called_with(event_type, filtered)
def test_no_mod_message(self):
ev = { 'event_type' : 'new_watch', 'directory' : 'something here' }
filtered = filter_ev(ev)
with patch.object(self.amr, '_execute_message') as mock_method:
mock_method.return_value = "tested"
ret = self.amr.message(ev)
self.assertTrue( ret ) # message passing worked
mock_method.assert_called_with(ev['event_type'], filtered)
# test that our copy of the message does not get modified
self.assertTrue( 'event_type' in ev )
class TestAirtimeNotifier(unittest.TestCase):
def test_handle_message(self):
#from configobj import ConfigObj
test_cfg = MMConfig('./test_config.cfg')
ran = [False]
class MockReceiver(object):
def message(me,m):
self.assertTrue( 'event_type' in m )
self.assertEqual( m['path'], '/bs/path' )
ran[0] = True
airtime = AirtimeNotifier(cfg=test_cfg, message_receiver=MockReceiver())
m1 = Mock()
m1.ack = "ack'd message"
m2 = Mock()
m2.body = json.dumps({ 'event_type' : 'file_delete', 'path' : '/bs/path' })
airtime.handle_message(body=m1,message=m2)
self.assertTrue( ran[0] )
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,36 @@
# -*- coding: utf-8 -*-
import unittest
import media.monitor.owners as owners
class TestMMP(unittest.TestCase):
def setUp(self):
self.f = "test.mp3"
def test_has_owner(self):
owners.reset_owners()
o = 12345
self.assertTrue( owners.add_file_owner(self.f,o) )
self.assertTrue( owners.has_owner(self.f) )
def test_add_file_owner(self):
owners.reset_owners()
self.assertFalse( owners.add_file_owner('testing', -1) )
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertTrue( owners.add_file_owner(self.f, 456) )
def test_remove_file_owner(self):
owners.reset_owners()
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertTrue( owners.remove_file_owner(self.f) )
self.assertFalse( owners.remove_file_owner(self.f) )
def test_get_owner(self):
owners.reset_owners()
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertEqual( owners.get_owner(self.f), 123, "file is owned" )
self.assertEqual( owners.get_owner("random_stuff.txt"), -1,
"file is not owned" )
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,131 @@
# -*- coding: utf-8 -*-
import unittest
import os
import media.monitor.pure as mmp
from media.monitor.metadata import Metadata
class TestMMP(unittest.TestCase):
def setUp(self):
self.md1 = {'MDATA_KEY_MD5': '71185323c2ab0179460546a9d0690107',
'MDATA_KEY_FTYPE': 'audioclip',
'MDATA_KEY_MIME': 'audio/vorbis',
'MDATA_KEY_DURATION': '0:0:25.000687',
'MDATA_KEY_SAMPLERATE': 48000,
'MDATA_KEY_BITRATE': 64000,
'MDATA_KEY_REPLAYGAIN': 0,
'MDATA_KEY_TITLE': u'ACDC_-_Back_In_Black-sample-64kbps'}
def test_apply_rules(self):
sample_dict = {
'key' : 'val',
'test' : 'IT',
}
rules = {
'key' : lambda x : x.upper(),
'test' : lambda y : y.lower()
}
sample_dict = mmp.apply_rules_dict(sample_dict, rules)
self.assertEqual(sample_dict['key'], 'VAL')
self.assertEqual(sample_dict['test'], 'it')
def test_default_to(self):
sd = { }
def_keys = ['one','two','three']
sd = mmp.default_to(dictionary=sd, keys=def_keys, default='DEF')
for k in def_keys: self.assertEqual( sd[k], 'DEF' )
def test_normalized_metadata(self):
#Recorded show test first
orig = Metadata.airtime_dict({
'date' : [u'2012-08-21'],
'tracknumber' : [u'2'],
'title' : [u'record-2012-08-21-11:29:00'],
'artist' : [u'Airtime Show Recorder']
})
orga = Metadata.airtime_dict({
'date' : [u'2012-08-21'],
'tracknumber' : [u'2'],
'artist' : [u'Airtime Show Recorder'],
'title' : [u'record-2012-08-21-11:29:00']
})
orga['MDATA_KEY_FTYPE'] = u'audioclip'
orig['MDATA_KEY_BITRATE'] = u'256000'
orga['MDATA_KEY_BITRATE'] = u'256000'
old_path = "/home/rudi/recorded/2012-08-21-11:29:00.ogg"
normalized = mmp.normalized_metadata(orig, old_path)
normalized['MDATA_KEY_BITRATE'] = u'256000'
self.assertEqual( orga, normalized )
organized_base_name = "11:29:00-record-256kbps.ogg"
base = "/srv/airtime/stor/"
organized_path = mmp.organized_path(old_path,base, normalized)
self.assertEqual(os.path.basename(organized_path), organized_base_name)
def test_normalized_metadata2(self):
"""
cc-4305
"""
orig = Metadata.airtime_dict({
'date' : [u'2012-08-27'],
'tracknumber' : [u'3'],
'title' : [u'18-11-00-Untitled Show'],
'artist' : [u'Airtime Show Recorder']
})
old_path = "/home/rudi/recorded/doesnt_really_matter.ogg"
normalized = mmp.normalized_metadata(orig, old_path)
normalized['MDATA_KEY_BITRATE'] = u'256000'
opath = mmp.organized_path(old_path, "/srv/airtime/stor/",
normalized)
# TODO : add a better test than this...
self.assertTrue( len(opath) > 0 )
def test_normalized_metadata3(self):
"""
Test the case where the metadata is empty
"""
orig = Metadata.airtime_dict({})
paths_unknown_title = [
("/testin/unknown-unknown-unknown.mp3",""),
("/testin/01-unknown-123kbps.mp3",""),
("/testin/02-unknown-140kbps.mp3",""),
("/testin/unknown-unknown-123kbps.mp3",""),
("/testin/unknown-bibimbop-unknown.mp3","bibimbop"),
]
for p,res in paths_unknown_title:
normalized = mmp.normalized_metadata(orig, p)
self.assertEqual( normalized['MDATA_KEY_TITLE'], res)
def test_file_md5(self):
p = os.path.realpath(__file__)
m1 = mmp.file_md5(p)
m2 = mmp.file_md5(p,10)
self.assertTrue( m1 != m2 )
self.assertRaises( ValueError, lambda : mmp.file_md5('/bull/shit/path') )
self.assertTrue( m1 == mmp.file_md5(p) )
def test_sub_path(self):
f1 = "/home/testing/123.mp3"
d1 = "/home/testing"
d2 = "/home/testing/"
self.assertTrue( mmp.sub_path(d1, f1) )
self.assertTrue( mmp.sub_path(d2, f1) )
def test_parse_int(self):
self.assertEqual( mmp.parse_int("123"), "123" )
self.assertEqual( mmp.parse_int("123asf"), "123" )
self.assertEqual( mmp.parse_int("asdf"), None )
def test_owner_id(self):
start_path = "testing.mp3"
id_path = "testing.mp3.identifier"
o_id = 123
f = open(id_path, 'w')
f.write("123")
f.close()
possible_id = mmp.owner_id(start_path)
self.assertFalse( os.path.exists(id_path) )
self.assertEqual( possible_id, o_id )
self.assertEqual( -1, mmp.owner_id("something.random") )
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,35 @@
# -*- coding: utf-8 -*-
import unittest
import os
from media.monitor.syncdb import AirtimeDB
from media.monitor.log import get_logger
from media.monitor.pure import partition
import api_clients.api_client as ac
import prepare_tests
class TestAirtimeDB(unittest.TestCase):
def setUp(self):
self.ac = ac.AirtimeApiClient(logger=get_logger(),
config_path=prepare_tests.real_config)
def test_syncdb_init(self):
sdb = AirtimeDB(self.ac)
self.assertTrue( len(sdb.list_storable_paths()) > 0 )
def test_list(self):
self.sdb = AirtimeDB(self.ac)
for watch_dir in self.sdb.list_storable_paths():
self.assertTrue( os.path.exists(watch_dir) )
def test_directory_get_files(self):
sdb = AirtimeDB(self.ac)
print(sdb.list_storable_paths())
for wdir in sdb.list_storable_paths():
files = sdb.directory_get_files(wdir)
print( "total files: %d" % len(files) )
self.assertTrue( len(files) >= 0 )
self.assertTrue( isinstance(files, set) )
exist, deleted = partition(os.path.exists, files)
print("(exist, deleted) = (%d, %d)" % ( len(exist), len(deleted) ) )
if __name__ == '__main__': unittest.main()

View file

@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
import unittest
import time
import media.monitor.pure as mmp
from media.monitor.toucher import Toucher, ToucherThread
class BaseTest(unittest.TestCase):
def setUp(self):
self.p = "api_client.cfg"
class TestToucher(BaseTest):
def test_toucher(self):
t1 = mmp.last_modified(self.p)
t = Toucher(self.p)
t()
t2 = mmp.last_modified(self.p)
print("(t1,t2) = (%d, %d) diff => %d" % (t1, t2, t2 - t1))
self.assertTrue( t2 > t1 )
class TestToucherThread(BaseTest):
def test_thread(self):
t1 = mmp.last_modified(self.p)
ToucherThread(self.p, interval=1)
time.sleep(2)
t2 = mmp.last_modified(self.p)
print("(t1,t2) = (%d, %d) diff => %d" % (t1, t2, t2 - t1))
self.assertTrue( t2 > t1 )
if __name__ == '__main__': unittest.main()

View file

@ -6,7 +6,7 @@ virtualenv_bin="/usr/lib/airtime/airtime_virtualenv/bin/"
ls_user="pypo"
export HOME="/var/tmp/airtime/pypo/"
api_client_path="/usr/lib/airtime/"
ls_path="/usr/bin/airtime-liquidsoap --verbose"
ls_path="/usr/bin/airtime-liquidsoap --verbose -f"
ls_param="/usr/lib/airtime/pypo/bin/liquidsoap_scripts/ls_script.liq"
exec 2>&1

View file

@ -0,0 +1,85 @@
#!/bin/bash
### BEGIN INIT INFO
# Provides: airtime-liquidsoap
# Required-Start: $local_fs $remote_fs $network $syslog
# Required-Stop: $local_fs $remote_fs $network $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Liquidsoap daemon
### END INIT INFO
USERID=pypo
GROUPID=pypo
NAME="Liquidsoap Playout Engine"
DAEMON=/usr/lib/airtime/pypo/bin/airtime-liquidsoap
PIDFILE=/var/run/airtime-liquidsoap.pid
start () {
chown pypo:pypo /var/log/airtime/pypo
chown pypo:pypo /var/log/airtime/pypo-liquidsoap
start-stop-daemon --start --background --quiet --chuid $USERID:$GROUPID \
--nicelevel -15 --make-pidfile --pidfile $PIDFILE --startas $DAEMON
monit monitor airtime-liquidsoap >/dev/null 2>&1
}
stop () {
monit unmonitor airtime-liquidsoap >/dev/null 2>&1
/usr/lib/airtime/airtime_virtualenv/bin/python /usr/lib/airtime/pypo/bin/liquidsoap_scripts/liquidsoap_prepare_terminate.py
# Send TERM after 5 seconds, wait at most 30 seconds.
start-stop-daemon --stop --oknodo --retry 5 --quiet --pidfile $PIDFILE
rm -f $PIDFILE
}
start_no_monit() {
start-stop-daemon --start --background --quiet --chuid $USERID:$USERID --make-pidfile --pidfile $PIDFILE --startas $DAEMON
}
case "${1:-''}" in
'stop')
echo -n "Stopping Liquidsoap: "
stop
echo "Done."
;;
'start')
echo -n "Starting Liquidsoap: "
start
echo "Done."
;;
'restart')
# restart commands here
echo -n "Restarting Liquidsoap: "
stop
start
echo "Done."
;;
'status')
if [ -f "/var/run/airtime-liquidsoap.pid" ]; then
pid=`cat /var/run/airtime-liquidsoap.pid`
if [ -d "/proc/$pid" ]; then
echo "Liquidsoap is running"
exit 0
fi
fi
echo "Liquidsoap is not running"
exit 1
;;
'start-no-monit')
# restart commands here
echo -n "Starting $NAME: "
start_no_monit
echo "Done."
;;
*) # no parameter specified
echo "Usage: $SELF start|stop|restart"
exit 1
;;
esac

View file

@ -25,7 +25,7 @@ export PYTHONPATH=${api_client_path}:$PYTHONPATH
export LC_ALL=`cat /etc/default/locale | grep "LANG=" | cut -d= -f2 | tr -d "\n\""`
export TERM=xterm
# Note the -u when calling python! we need it to get unbuffered binary stdout and stderr
exec python -u ${pypo_path}${pypo_script} > /var/log/airtime/pypo/py-interpreter.log 2>&1
exec python ${pypo_path}${pypo_script} > /var/log/airtime/pypo/py-interpreter.log 2>&1
# EOF

View file

@ -9,80 +9,30 @@
# Short-Description: Manage airtime-playout daemon
### END INIT INFO
USERID=pypo
ROOTUSERID=root
GROUPID=pypo
NAME=Airtime\ Playout
USERID=root
NAME="Airtime Scheduler Engine"
DAEMON0=/usr/lib/airtime/pypo/bin/airtime-playout
PIDFILE0=/var/run/airtime-playout.pid
DAEMON1=/usr/lib/airtime/pypo/bin/airtime-liquidsoap
PIDFILE1=/var/run/airtime-liquidsoap.pid
liquidsoap_start () {
start-stop-daemon --start --background --quiet --chuid $USERID:$GROUPID \
--nicelevel -15 --make-pidfile --pidfile $PIDFILE1 --startas $DAEMON1
monit monitor airtime-liquidsoap >/dev/null 2>&1
}
liquidsoap_stop () {
monit unmonitor airtime-liquidsoap >/dev/null 2>&1
/usr/lib/airtime/airtime_virtualenv/bin/python /usr/lib/airtime/pypo/bin/liquidsoap_scripts/liquidsoap_prepare_terminate.py
# Send TERM after 5 seconds, wait at most 30 seconds.
start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE1
rm -f $PIDFILE1
}
stop_pypo () {
monit unmonitor airtime-playout >/dev/null 2>&1
# Send TERM after 5 seconds, wait at most 30 seconds.
start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE0
rm -f $PIDFILE0
}
DAEMON=/usr/lib/airtime/pypo/bin/airtime-playout
PIDFILE=/var/run/airtime-playout.pid
start () {
chown pypo:pypo /etc/airtime
chown pypo:pypo /etc/airtime/liquidsoap.cfg
start-stop-daemon --start --background --quiet --chuid $ROOTUSERID:$ROOTUSERID --make-pidfile --pidfile $PIDFILE0 --startas $DAEMON0
start-stop-daemon --start --background --quiet --chuid $USERID:$USERID --make-pidfile --pidfile $PIDFILE --startas $DAEMON
monit monitor airtime-playout >/dev/null 2>&1
liquidsoap_start
}
stop () {
# Send TERM after 5 seconds, wait at most 30 seconds.
monit unmonitor airtime-playout >/dev/null 2>&1
start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE0
rm -f $PIDFILE0
liquidsoap_stop
}
monit_restart() {
start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE0
rm -f $PIDFILE0
/usr/lib/airtime/airtime_virtualenv/bin/python /usr/lib/airtime/pypo/bin/liquidsoap_scripts/liquidsoap_prepare_terminate.py
start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE1
rm -f $PIDFILE1
start-stop-daemon --start --background --quiet --chuid $USERID:$GROUPID --make-pidfile --pidfile $PIDFILE0 --startas $DAEMON0
start-stop-daemon --start --background --quiet --chuid $USERID:$GROUPID \
--nicelevel -15 --make-pidfile --pidfile $PIDFILE1 --startas $DAEMON1
start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE
rm -f $PIDFILE
}
start_no_monit() {
start-stop-daemon --start --background --quiet --chuid $ROOTUSERID:$ROOTUSERID --make-pidfile --pidfile $PIDFILE0 --startas $DAEMON0
liquidsoap_start
start-stop-daemon --start --background --quiet --chuid $USERID:$USERID --make-pidfile --pidfile $PIDFILE --startas $DAEMON
}
case "${1:-''}" in
@ -111,41 +61,13 @@ case "${1:-''}" in
start_no_monit
echo "Done."
;;
'monit-restart')
# restart commands here
echo -n "Monit Restarting $NAME: "
monit_restart
echo "Done."
;;
'status')
# status commands here
/usr/bin/airtime-check-system
;;
'stop-liquidsoap')
echo -n "Stopping Liquidsoap: "
liquidsoap_stop
echo "Done."
;;
'start-liquidsoap')
echo -n "Starting Liquidsoap: "
liquidsoap_start
echo "Done."
;;
'restart-liquidsoap')
# restart commands here
echo -n "Restarting Liquidsoap: "
liquidsoap_stop
liquidsoap_start
echo "Done."
;;
'pypo-stop')
# restart commands here
echo -n "Restarting Pypo: "
stop_pypo
echo "Done."
;;
*) # no parameter specified
echo "Usage: $SELF start|stop|restart|status"
exit 1
;;
esac

View file

@ -98,6 +98,7 @@ try:
#copy init.d script
shutil.copy(config["bin_dir"]+"/bin/airtime-playout-init-d", "/etc/init.d/airtime-playout")
shutil.copy(config["bin_dir"]+"/bin/airtime-liquidsoap-init-d", "/etc/init.d/airtime-liquidsoap")
#copy log rotate script
shutil.copy(config["bin_dir"]+"/bin/liquidsoap_scripts/airtime-liquidsoap.logrotate", "/etc/logrotate.d/airtime-liquidsoap")

View file

@ -23,15 +23,15 @@ def get_os_codename():
try:
p = Popen("which lsb_release > /dev/null", shell=True)
sts = os.waitpid(p.pid, 0)[1]
if (sts == 0):
#lsb_release is available on this system. Let's get the os codename
p = Popen("lsb_release -sc", shell=True, stdout=PIPE)
codename = p.communicate()[0].strip('\r\n')
p = Popen("lsb_release -sd", shell=True, stdout=PIPE)
fullname = p.communicate()[0].strip('\r\n')
return (codename, fullname)
except Exception, e:
pass
@ -58,7 +58,7 @@ def generate_liquidsoap_config(ss):
fh.write(api_client.encode_to(buffer))
fh.write('log_file = "/var/log/airtime/pypo-liquidsoap/<script>.log"\n')
fh.close()
PATH_INI_FILE = '/etc/airtime/pypo.cfg'
PATH_LIQUIDSOAP_BIN = '/usr/lib/airtime/pypo/bin/liquidsoap_bin'
@ -71,54 +71,46 @@ try:
except Exception, e:
print 'Error loading config file: ', e
sys.exit(1)
try:
try:
#select appropriate liquidsoap file for given system os/architecture
architecture = platform.architecture()[0]
arch = arch_map[architecture]
print "* Detecting OS: ...",
(codename, fullname) = get_os_codename()
print " Found %s (%s) on %s architecture" % (fullname, codename, arch)
print " * Installing Liquidsoap binary"
binary_path = os.path.join(PATH_LIQUIDSOAP_BIN, "liquidsoap_%s_%s" % (codename, arch))
try:
open(binary_path)
print " Found %s (%s) on %s architecture" % (fullname, codename, arch)
print " * Creating symlink to Liquidsoap binary"
p = Popen("which liquidsoap", shell=True, stdout=PIPE)
liq_path = p.communicate()[0].strip()
symlink_path = "/usr/bin/airtime-liquidsoap"
if p.returncode == 0:
try:
os.remove("/usr/bin/airtime-liquidsoap")
except OSError, e:
#only get here if it doesn't exist
os.unlink(symlink_path)
except Exception:
#liq_path DNE, which is OK.
pass
os.symlink(binary_path, "/usr/bin/airtime-liquidsoap")
except IOError, e:
"""
shutil.copy can throw this exception for two reasons. First reason is that it cannot open the source file.
This is when the liquidsoap file we requested does not exist, and therefore tells the user we don't support
their OS/System architecture. The second reason for this exception is the shutil.copy cannot open the target file.
Since this script is being run as root (and we cannot install to a read-only device), this should never happen. So
it is safe to assume this exception is a result of the first case.
Note: We cannot simply use os.path.exists before this, since it sometimes gives us "false" incorrectly
"""
print "Unsupported OS/system architecture."
print e
os.symlink(liq_path, symlink_path)
else:
print " * Liquidsoap binary not found!"
sys.exit(1)
#initialize init.d scripts
subprocess.call("update-rc.d airtime-playout defaults >/dev/null 2>&1", shell=True)
subprocess.call("update-rc.d airtime-liquidsoap defaults >/dev/null 2>&1", shell=True)
#clear out an previous pypo cache
print "* Clearing previous pypo cache"
print "* Clearing previous pypo cache"
subprocess.call("rm -rf /var/tmp/airtime/pypo/cache/scheduler/* >/dev/null 2>&1", shell=True)
if "airtime_service_start" in os.environ and os.environ["airtime_service_start"] == "t":
print "* Waiting for pypo processes to start..."
print "* Waiting for pypo processes to start..."
subprocess.call("invoke-rc.d airtime-playout start-no-monit > /dev/null 2>&1", shell=True)
subprocess.call("invoke-rc.d airtime-liquidsoap start-no-monit > /dev/null 2>&1", shell=True)
except Exception, e:
print e

View file

@ -30,6 +30,7 @@ try:
#remove init.d script
print " * Removing Pypo init.d Script"
remove_file("/etc/init.d/airtime-playout")
remove_file("/etc/init.d/airtime-liquidsoap")
#remove bin, cache, tmp and file dir
print " * Removing Pypo Program Directory"

View file

@ -8,7 +8,7 @@ if os.geteuid() != 0:
try:
#stop pypo and liquidsoap processes
print "Waiting for pypo processes to stop...",
print "Waiting for Pypo process to stop...",
try:
os.remove("/usr/bin/airtime-liquidsoap")
except Exception, e:
@ -18,5 +18,12 @@ try:
print "OK"
else:
print "Wasn't running"
print "Waiting for Liquidsoap process to stop...",
if (os.path.exists('/etc/init.d/airtime-liquidsoap')):
subprocess.call("invoke-rc.d airtime-liquidsoap stop", shell=True)
print "OK"
else:
print "Wasn't running"
except Exception, e:
print e

View file

@ -31,8 +31,13 @@ try:
sys.exit(1)
os.system("invoke-rc.d airtime-playout stop")
os.system("invoke-rc.d airtime-liquidsoap stop")
os.system("rm -f /etc/init.d/airtime-playout")
os.system("rm -f /etc/init.d/airtime-liquidsoap")
os.system("update-rc.d -f airtime-playout remove >/dev/null 2>&1")
os.system("update-rc.d -f airtime-liquidsoap remove >/dev/null 2>&1")
#remove logrotate script
os.system("rm -f /etc/logrotate.d/airtime-liquidsoap")
@ -44,7 +49,7 @@ try:
remove_path(config["cache_base_dir"])
print "Removing symlinks"
os.system("rm -f /usr/bin/airtime-playout")
os.system("rm -f /usr/bin/airtime-liquidsoap")
print "Removing pypo files"
remove_path(config["bin_dir"])

View file

@ -1,7 +1,6 @@
import logging
import sys
from api_clients import api_client
from configobj import ConfigObj
from api_clients.api_client import AirtimeApiClient
def generate_liquidsoap_config(ss):
data = ss['msg']
@ -9,31 +8,24 @@ def generate_liquidsoap_config(ss):
fh.write("################################################\n")
fh.write("# THIS FILE IS AUTO GENERATED. DO NOT CHANGE!! #\n")
fh.write("################################################\n")
for d in data:
buffer = d[u'keyname'] + " = "
if(d[u'type'] == 'string'):
temp = d[u'value']
buffer += '"%s"' % temp
key = d['keyname']
str_buffer = d[u'keyname'] + " = "
if d[u'type'] == 'string':
val = '"%s"' % d['value']
else:
temp = d[u'value']
if(temp == ""):
temp = "0"
buffer += temp
buffer += "\n"
fh.write(api_client.encode_to(buffer))
val = d[u'value']
val = val if len(val) > 0 else "0"
str_buffer = "%s = %s\n" % (key, val)
fh.write(str_buffer.encode('utf-8'))
fh.write('log_file = "/var/log/airtime/pypo-liquidsoap/<script>.log"\n')
fh.close()
PATH_INI_FILE = '/etc/airtime/pypo.cfg'
try:
config = ConfigObj(PATH_INI_FILE)
except Exception, e:
print 'Error loading config file: ', e
sys.exit(1)
logging.basicConfig(format='%(message)s')
ac = api_client.api_client_factory(config, logging.getLogger())
ac = AirtimeApiClient(logging.getLogger())
ss = ac.get_stream_setting()
if ss is not None:

View file

@ -1,27 +1,18 @@
from api_clients import *
from configobj import ConfigObj
import sys
import json
try:
config = ConfigObj('/etc/airtime/pypo.cfg')
except Exception, e:
print 'error: ', e
sys.exit()
api_clients = api_client.api_client_factory(config)
api_clients = api_client.AirtimeApiClient()
dj_type = sys.argv[1]
username = sys.argv[2]
password = sys.argv[3]
type = ''
source_type = ''
if dj_type == '--master':
type = 'master'
source_type = 'master'
elif dj_type == '--dj':
type = 'dj'
response = api_clients.check_live_stream_auth(username, password, type)
source_type = 'dj'
print response['msg']
response = api_clients.check_live_stream_auth(username, password, source_type)
print response['msg']

View file

@ -1,21 +1,33 @@
def notify(m)
log("/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --data='#{!pypo_data}' --media-id=#{m['schedule_table_id']} &")
system("/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --data='#{!pypo_data}' --media-id=#{m['schedule_table_id']} &")
#current_media_id := string_of(m['schedule_table_id'])
command = "/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --data='#{!pypo_data}' --media-id=#{m['schedule_table_id']} &"
log(command)
system(command)
end
def notify_stream(m)
json_str = string.replace(pattern="\n",(fun (s) -> ""), json_of(m))
#if a string has a single apostrophe in it, let's comment it out by ending the string before right before it
#escaping the apostrophe, and then starting a new string right after it. This is why we use 3 apostrophes.
json_str = string.replace(pattern="'",(fun (s) -> "'\''"), json_str)
command = "/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --webstream='#{json_str}' --media-id=#{!current_dyn_id} &"
log(command)
system(command)
end
# A function applied to each metadata chunk
def append_title(m) =
log("Using stream_format #{!stream_metadata_type}")
if !stream_metadata_type == 1 then
[("artist","#{!show_name} - #{m['artist']}")]
[("title", "#{!show_name} - #{m['artist']} - #{m['title']}")]
elsif !stream_metadata_type == 2 then
[("artist",!station_name), ("title", !show_name)]
[("title", "#{!station_name} - #{!show_name}")]
else
[]
[("title", "#{m['artist']} - #{m['title']}")]
end
end
def crossfade(s)
def crossfade_airtime(s)
#duration is automatically overwritten by metadata fields passed in
#with audio
s = fade.in(type="log", duration=0., s)
@ -62,7 +74,8 @@ def to_live(old,new) =
end
def output_to(output_type, type, bitrate, host, port, pass, mount_point, url, description, genre, user, s, stream, connected, name) =
def output_to(output_type, type, bitrate, host, port, pass, mount_point, url, description, genre, user, s, stream, connected, name, channels) =
source = ref s
def on_error(msg)
connected := "false"
system("/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --error='#{msg}' --stream-id=#{stream} --time=#{!time} &")
@ -74,74 +87,166 @@ def output_to(output_type, type, bitrate, host, port, pass, mount_point, url, de
system("/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --connect --stream-id=#{stream} --time=#{!time} &")
log("/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --connect --stream-id=#{stream} --time=#{!time} &")
end
stereo = (channels == "stereo")
if output_type == "icecast" then
user_ref = ref user
if user == "" then
user_ref := "source"
end
output = output.icecast(host = host,
output_mono = output.icecast(host = host,
port = port,
password = pass,
mount = mount_point,
fallible = true,
url = url,
description = description,
name = name,
genre = genre,
user = !user_ref,
on_error = on_error,
on_connect = on_connect,
name = name)
on_connect = on_connect)
output_stereo = output.icecast(host = host,
port = port,
password = pass,
mount = mount_point,
fallible = true,
url = url,
description = description,
name = name,
genre = genre,
user = !user_ref,
on_error = on_error,
on_connect = on_connect)
if type == "mp3" then
if bitrate == 24 then
ignore(output(%mp3(bitrate = 24),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 24, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 24, stereo = false), mean(!source)))
end
elsif bitrate == 32 then
ignore(output(%mp3(bitrate = 32),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 32, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 32, stereo = false), mean(!source)))
end
elsif bitrate == 48 then
ignore(output(%mp3(bitrate = 48),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 48, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 48, stereo = false), mean(!source)))
end
elsif bitrate == 64 then
ignore(output(%mp3(bitrate = 64),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 64, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 64, stereo = false), mean(!source)))
end
elsif bitrate == 96 then
ignore(output(%mp3(bitrate = 96),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 96, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 96, stereo = false), mean(!source)))
end
elsif bitrate == 128 then
ignore(output(%mp3(bitrate = 128),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 128, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 128, stereo = false), mean(!source)))
end
elsif bitrate == 160 then
ignore(output(%mp3(bitrate = 160),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 160, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 160, stereo = false), mean(!source)))
end
elsif bitrate == 192 then
ignore(output(%mp3(bitrate = 192),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 192, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 192, stereo = false), mean(!source)))
end
elsif bitrate == 224 then
ignore(output(%mp3(bitrate = 224),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 224, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 224, stereo = false), mean(!source)))
end
elsif bitrate == 256 then
ignore(output(%mp3(bitrate = 256),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 256, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 256, stereo = false), mean(!source)))
end
elsif bitrate == 320 then
ignore(output(%mp3(bitrate = 320),s))
if stereo then
ignore(output_stereo(%mp3(bitrate = 320, stereo = true), !source))
else
ignore(output_mono(%mp3(bitrate = 320, stereo = false), mean(!source)))
end
end
else
source = ref s
if not icecast_vorbis_metadata then
source := add(normalize=false, [amplify(0.00001, noise()),s])
source := add(normalize=false, [amplify(0.00001, noise()), !source])
end
if bitrate == 24 then
ignore(output(%vorbis(quality=-0.1),!source))
elsif bitrate == 32 then
ignore(output(%vorbis(quality=-0.1),!source))
elsif bitrate == 48 then
ignore(output(%vorbis(quality=-0.1),!source))
if bitrate == 24 or bitrate == 32 or bitrate == 48 then
if stereo then
ignore(output_stereo(%vorbis(quality=-0.1, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=-0.1, channels = 1), mean(!source)))
end
elsif bitrate == 64 then
ignore(output(%vorbis(quality=0),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0, channels = 1), mean(!source)))
end
elsif bitrate == 96 then
ignore(output(%vorbis(quality=0.2),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0.2, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0.2, channels = 1), mean(!source)))
end
elsif bitrate == 128 then
ignore(output(%vorbis(quality=0.4),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0.4, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0.4, channels = 1), mean(!source)))
end
elsif bitrate == 160 then
ignore(output(%vorbis(quality=0.5),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0.5, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0.5, channels = 1), mean(!source)))
end
elsif bitrate == 192 then
ignore(output(%vorbis(quality=0.6),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0.6, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0.6, channels = 1), mean(!source)))
end
elsif bitrate == 224 then
ignore(output(%vorbis(quality=0.7),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0.7, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0.7, channels = 1), mean(!source)))
end
elsif bitrate == 256 then
ignore(output(%vorbis(quality=0.8),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0.8, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0.8, channels = 1), mean(!source)))
end
elsif bitrate == 320 then
ignore(output(%vorbis(quality=0.9),!source))
if stereo then
ignore(output_stereo(%vorbis(quality=0.9, channels = 2), !source))
else
ignore(output_mono(%vorbis(quality=0.9, channels = 1), mean(!source)))
end
end
end
else
@ -164,7 +269,7 @@ def output_to(output_type, type, bitrate, host, port, pass, mount_point, url, de
if url == "" then
url_ref := "N/A"
end
output.shoutcast = output.shoutcast(id = "shoutcast_stream_#{stream}",
output.shoutcast_mono = output.shoutcast(id = "shoutcast_stream_#{stream}",
host = host,
port = port,
password = pass,
@ -175,28 +280,85 @@ def output_to(output_type, type, bitrate, host, port, pass, mount_point, url, de
user = !user_ref,
on_error = on_error,
on_connect = on_connect)
output.shoutcast_stereo = output.shoutcast(id = "shoutcast_stream_#{stream}",
host = host,
port = port,
password = pass,
fallible = true,
url = !url_ref,
genre = !genre_ref,
name = !description_ref,
user = !user_ref,
on_error = on_error,
on_connect = on_connect)
if bitrate == 24 then
ignore(output.shoutcast(%mp3(bitrate = 24),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 24, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 24, stereo = false), mean(!source)))
end
elsif bitrate == 32 then
ignore(output.shoutcast(%mp3(bitrate = 32),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 32, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 32, stereo = false), mean(!source)))
end
elsif bitrate == 48 then
ignore(output.shoutcast(%mp3(bitrate = 48),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 48, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 48, stereo = false), mean(!source)))
end
elsif bitrate == 64 then
ignore(output.shoutcast(%mp3(bitrate = 64),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 64, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 64, stereo = false), mean(!source)))
end
elsif bitrate == 96 then
ignore(output.shoutcast(%mp3(bitrate = 96),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 96, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 96, stereo = false), mean(!source)))
end
elsif bitrate == 128 then
ignore(output.shoutcast(%mp3(bitrate = 128),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 128, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 128, stereo = false), mean(!source)))
end
elsif bitrate == 160 then
ignore(output.shoutcast(%mp3(bitrate = 160),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 160, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 160, stereo = false), mean(!source)))
end
elsif bitrate == 192 then
ignore(output.shoutcast(%mp3(bitrate = 192),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 192, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 192, stereo = false), mean(!source)))
end
elsif bitrate == 224 then
ignore(output.shoutcast(%mp3(bitrate = 224),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 224, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 224, stereo = false), mean(!source)))
end
elsif bitrate == 256 then
ignore(output.shoutcast(%mp3(bitrate = 256),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 256, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 256, stereo = false), mean(!source)))
end
elsif bitrate == 320 then
ignore(output.shoutcast(%mp3(bitrate = 320),s))
if stereo then
ignore(output.shoutcast_stereo(%mp3(bitrate = 320, stereo = true), !source))
else
ignore(output.shoutcast_mono(%mp3(bitrate = 320, stereo = false), mean(!source)))
end
end
end
end
@ -225,5 +387,142 @@ def add_skip_command(s)
server.register(namespace="source",
usage="skip",
description="Skip the current song.",
"skip",skip)
"skip",fun(s) -> begin log("source.skip") skip(s) end)
end
dyn_out = output.icecast(%wav,
host="localhost",
port=8999,
password=stream_harbor_pass,
mount="test-harbor",
fallible=true)
def set_dynamic_source_id(id) =
current_dyn_id := id
string_of(!current_dyn_id)
end
def get_dynamic_source_id() =
string_of(!current_dyn_id)
end
# Function to create a playlist source and output it.
def create_dynamic_source(uri) =
# The playlist source
s = audio_to_stereo(input.http(buffer=2., max=12., uri))
# The output
active_dyn_out = dyn_out(s)
# We register both source and output
# in the list of sources
dyn_sources :=
list.append([(!current_dyn_id, s),(!current_dyn_id, active_dyn_out)], !dyn_sources)
notify([("schedule_table_id", !current_dyn_id)])
"Done!"
end
# A function to destroy a dynamic source
def destroy_dynamic_source(id) =
# We need to find the source in the list,
# remove it and destroy it. Currently, the language
# lacks some nice operators for that so we do it
# the functional way
# This function is executed on every item in the list
# of dynamic sources
def parse_list(ret, current_element) =
# ret is of the form: (matching_sources, remaining_sources)
# We extract those two:
matching_sources = fst(ret)
remaining_sources = snd(ret)
# current_element is of the form: ("uri", source) so
# we check the first element
current_id = fst(current_element)
if current_id == id then
# In this case, we add the source to the list of
# matched sources
(list.append( [snd(current_element)],
matching_sources),
remaining_sources)
else
# In this case, we put the element in the list of remaining
# sources
(matching_sources,
list.append([current_element],
remaining_sources))
end
end
# Now we execute the function:
result = list.fold(parse_list, ([], []), !dyn_sources)
matching_sources = fst(result)
remaining_sources = snd(result)
# We store the remaining sources in dyn_sources
dyn_sources := remaining_sources
# If no source matched, we return an error
if list.length(matching_sources) == 0 then
"Error: no matching sources!"
else
# We stop all sources
list.iter(source.shutdown, matching_sources)
# And return
"Done!"
end
end
# A function to destroy a dynamic source
def destroy_dynamic_source_all() =
# We need to find the source in the list,
# remove it and destroy it. Currently, the language
# lacks some nice operators for that so we do it
# the functional way
# This function is executed on every item in the list
# of dynamic sources
def parse_list(ret, current_element) =
# ret is of the form: (matching_sources, remaining_sources)
# We extract those two:
matching_sources = fst(ret)
remaining_sources = snd(ret)
# current_element is of the form: ("uri", source) so
# we check the first element
current_uri = fst(current_element)
# in this case, we add the source to the list of
# matched sources
(list.append( [snd(current_element)],
matching_sources),
remaining_sources)
end
# now we execute the function:
result = list.fold(parse_list, ([], []), !dyn_sources)
matching_sources = fst(result)
remaining_sources = snd(result)
# we store the remaining sources in dyn_sources
dyn_sources := remaining_sources
# if no source matched, we return an error
if list.length(matching_sources) == 0 then
"error: no matching sources!"
else
# we stop all sources
list.iter(source.shutdown, matching_sources)
# And return
"Done!"
end
end

View file

@ -6,13 +6,22 @@ set("log.stdout", true)
set("server.telnet", true)
set("server.telnet.port", 1234)
time = ref string_of(gettimeofday())
#Dynamic source list
dyn_sources = ref []
webstream_enabled = ref false
time = ref string_of(gettimeofday())
queue = audio_to_stereo(id="queue_src", request.equeue(id="queue", length=0.5))
queue = cue_cut(queue)
queue = amplify(1., override="replay_gain", queue)
#fallback between queue and input.harbor (for restreaming other web-streams)
#live stream setup
set("harbor.bind_addr", "0.0.0.0")
current_dyn_id = ref '-1'
pypo_data = ref '0'
web_stream_enabled = ref false
stream_metadata_type = ref 0
default_dj_fade = ref 0.
station_name = ref ''
@ -26,26 +35,92 @@ s2_namespace = ref ''
s3_namespace = ref ''
just_switched = ref false
stream_harbor_pass = list.hd(get_process_lines('pwgen -s -N 1 -n 20'))
%include "ls_lib.liq"
queue = on_metadata(notify, queue)
queue = map_metadata(append_title, queue)
web_stream = input.harbor("test-harbor", port=8999, password=stream_harbor_pass)
web_stream = on_metadata(notify_stream, web_stream)
output.dummy(fallible=true, web_stream)
# the crossfade function controls fade in/out
queue = crossfade(queue)
ignore(output.dummy(queue, fallible=true))
queue = crossfade_airtime(queue)
queue = on_metadata(notify, queue)
queue = map_metadata(update=false, append_title, queue)
output.dummy(fallible=true, queue)
server.register(namespace="vars", "pypo_data", fun (s) -> begin pypo_data := s "Done" end)
server.register(namespace="vars", "web_stream_enabled", fun (s) -> begin web_stream_enabled := (s == "true") string_of(!web_stream_enabled) end)
server.register(namespace="vars", "stream_metadata_type", fun (s) -> begin stream_metadata_type := int_of_string(s) s end)
server.register(namespace="vars", "show_name", fun (s) -> begin show_name := s s end)
server.register(namespace="vars", "station_name", fun (s) -> begin station_name := s s end)
server.register(namespace="vars", "bootup_time", fun (s) -> begin time := s s end)
server.register(namespace="streams", "connection_status", fun (s) -> begin "1:#{!s1_connected},2:#{!s2_connected},3:#{!s3_connected}" end)
server.register(namespace="vars", "default_dj_fade", fun (s) -> begin default_dj_fade := float_of_string(s) s end)
stream_queue = switch(id="stream_queue_switch", track_sensitive=false,
transitions=[transition, transition],
[({!webstream_enabled},web_stream),
({true}, queue)])
ignore(output.dummy(stream_queue, fallible=true))
server.register(namespace="vars",
"pypo_data",
fun (s) -> begin log("vars.pypo_data") pypo_data := s "Done" end)
server.register(namespace="vars",
"stream_metadata_type",
fun (s) -> begin log("vars.stream_metadata_type") stream_metadata_type := int_of_string(s) s end)
server.register(namespace="vars",
"show_name",
fun (s) -> begin log("vars.show_name") show_name := s s end)
server.register(namespace="vars",
"station_name",
fun (s) -> begin log("vars.station_name") station_name := s s end)
server.register(namespace="vars",
"bootup_time",
fun (s) -> begin log("vars.bootup_time") time := s s end)
server.register(namespace="streams",
"connection_status",
fun (s) -> begin log("streams.connection_status") "1:#{!s1_connected},2:#{!s2_connected},3:#{!s3_connected}" end)
server.register(namespace="vars",
"default_dj_fade",
fun (s) -> begin log("vars.default_dj_fade") default_dj_fade := float_of_string(s) s end)
server.register(namespace="dynamic_source",
description="Enable webstream output",
usage='start',
"output_start",
fun (s) -> begin log("dynamic_source.output_start") webstream_enabled := true "enabled" end)
server.register(namespace="dynamic_source",
description="Enable webstream output",
usage='stop',
"output_stop",
fun (s) -> begin log("dynamic_source.output_stop") webstream_enabled := false "disabled" end)
server.register(namespace="dynamic_source",
description="Set the cc_schedule row id",
usage="id <id>",
"id",
fun (s) -> begin log("dynamic_source.id") set_dynamic_source_id(s) end)
server.register(namespace="dynamic_source",
description="Get the cc_schedule row id",
usage="get_id",
"get_id",
fun (s) -> begin log("dynamic_source.get_id") get_dynamic_source_id() end)
server.register(namespace="dynamic_source",
description="Start a new dynamic source.",
usage="start <uri>",
"read_start",
fun (uri) -> begin log("dynamic_source.read_start") create_dynamic_source(uri) end)
server.register(namespace="dynamic_source",
description="Stop a dynamic source.",
usage="stop <id>",
"read_stop",
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source(s) end)
server.register(namespace="dynamic_source",
description="Stop a dynamic source.",
usage="stop <id>",
"read_stop_all",
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source_all() end)
default = amplify(id="silence_src", 0.00001, noise())
default = rewrite_metadata([("artist","Airtime"), ("title", "offline")],default)
default = rewrite_metadata([("artist","Airtime"), ("title", "offline")], default)
ignore(output.dummy(default, fallible=true))
master_dj_enabled = ref false
@ -53,33 +128,30 @@ live_dj_enabled = ref false
scheduled_play_enabled = ref false
def make_master_dj_available()
master_dj_enabled := true
master_dj_enabled := true
end
def make_master_dj_unavailable()
master_dj_enabled := false
master_dj_enabled := false
end
def make_live_dj_available()
live_dj_enabled := true
live_dj_enabled := true
end
def make_live_dj_unavailable()
live_dj_enabled := false
live_dj_enabled := false
end
def make_scheduled_play_available()
scheduled_play_enabled := true
just_switched := true
scheduled_play_enabled := true
just_switched := true
end
def make_scheduled_play_unavailable()
scheduled_play_enabled := false
scheduled_play_enabled := false
end
#live stream setup
set("harbor.bind_addr", "0.0.0.0")
def update_source_status(sourcename, status) =
system("/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --source-name=#{sourcename} --source-status=#{status} &")
log("/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --source-name=#{sourcename} --source-status=#{status} &")
@ -89,7 +161,7 @@ def live_dj_connect(header) =
update_source_status("live_dj", true)
end
def live_dj_disconnect() =
def live_dj_disconnect() =
update_source_status("live_dj", false)
end
@ -97,43 +169,37 @@ def master_dj_connect(header) =
update_source_status("master_dj", true)
end
def master_dj_disconnect() =
def master_dj_disconnect() =
update_source_status("master_dj", false)
end
#auth function for live stream
def check_master_dj_client(user,password) =
#get the output of the php script
ret = get_process_lines("python /usr/lib/airtime/pypo/bin/liquidsoap_scripts/liquidsoap_auth.py --master #{user} #{password}")
#ret has now the value of the live client (dj1,dj2, or djx), or "ERROR"/"unknown" ...
ret = list.hd(ret)
#return true to let the client transmit data, or false to tell harbor to decline
if (ret == "True") then
true
else
false
end
log("master connected")
#get the output of the php script
ret = get_process_lines("python /usr/lib/airtime/pypo/bin/liquidsoap_scripts/liquidsoap_auth.py --master #{user} #{password}")
#ret has now the value of the live client (dj1,dj2, or djx), or "ERROR"/"unknown" ...
ret = list.hd(ret)
#return true to let the client transmit data, or false to tell harbor to decline
ret == "True"
end
def check_dj_client(user,password) =
#get the output of the php script
ret = get_process_lines("python /usr/lib/airtime/pypo/bin/liquidsoap_scripts/liquidsoap_auth.py --dj #{user} #{password}")
#ret has now the value of the live client (dj1,dj2, or djx), or "ERROR"/"unknown" ...
ret = list.hd(ret)
#return true to let the client transmit data, or false to tell harbor to decline
if (ret == "True") then
true
else
false
end
log("live dj connected")
#get the output of the php script
ret = get_process_lines("python /usr/lib/airtime/pypo/bin/liquidsoap_scripts/liquidsoap_auth.py --dj #{user} #{password}")
#ret has now the value of the live client (dj1,dj2, or djx), or "ERROR"/"unknown" ...
hd = list.hd(ret)
hd == "True"
end
def append_dj_inputs(master_harbor_input_port, master_harbor_input_mount_point, dj_harbor_input_port, dj_harbor_input_mount_point, s) =
if master_harbor_input_port != 0 and master_harbor_input_mount_point != "" and dj_harbor_input_port != 0 and dj_harbor_input_mount_point != "" then
master_dj = mksafe(audio_to_stereo(input.harbor(id="master_harbor", master_harbor_input_mount_point, port=master_harbor_input_port, auth=check_master_dj_client,
master_dj = mksafe(audio_to_stereo(input.harbor(id="master_harbor", master_harbor_input_mount_point, port=master_harbor_input_port, auth=check_master_dj_client,
max=40., on_connect=master_dj_connect, on_disconnect=master_dj_disconnect)))
dj_live = mksafe(audio_to_stereo(input.harbor(id="live_dj_harbor", dj_harbor_input_mount_point, port=dj_harbor_input_port, auth=check_dj_client,
max=40., on_connect=live_dj_connect, on_disconnect=live_dj_disconnect)))
max=40., on_connect=live_dj_connect, on_disconnect=live_dj_disconnect)))
master_dj = rewrite_metadata([("artist","Airtime"), ("title", "Master Dj")],master_dj)
dj_live = rewrite_metadata([("artist","Airtime"), ("title", "Live Dj")],dj_live)
@ -149,9 +215,9 @@ def append_dj_inputs(master_harbor_input_port, master_harbor_input_mount_point,
switch(id="master_dj_switch", track_sensitive=false, transitions=[transition, transition], [({!master_dj_enabled},master_dj), ({true}, s)])
elsif dj_harbor_input_port != 0 and dj_harbor_input_mount_point != "" then
dj_live = mksafe(audio_to_stereo(input.harbor(id="live_dj_harbor", dj_harbor_input_mount_point, port=dj_harbor_input_port, auth=check_dj_client,
max=40., on_connect=live_dj_connect, on_disconnect=live_dj_disconnect)))
dj_live = rewrite_metadata([("artist","Airtime"), ("title", "Live Dj")],dj_live)
max=40., on_connect=live_dj_connect, on_disconnect=live_dj_disconnect)))
dj_live = rewrite_metadata([("artist","Airtime"), ("title", "Live Dj")],dj_live)
ignore(output.dummy(dj_live, fallible=true))
switch(id="live_dj_switch", track_sensitive=false, transitions=[transition, transition], [({!live_dj_enabled},dj_live), ({true}, s)])
@ -160,103 +226,91 @@ def append_dj_inputs(master_harbor_input_port, master_harbor_input_mount_point,
end
end
s = switch(id="default_switch", track_sensitive=false, transitions=[transition_default, transition], [({!scheduled_play_enabled},queue),({true},default)])
s = append_dj_inputs(master_live_stream_port, master_live_stream_mp, dj_live_stream_port, dj_live_stream_mp, s)
s = switch(id="default_switch", track_sensitive=false,
transitions=[transition_default, transition],
[({!scheduled_play_enabled}, stream_queue),({true},default)])
s = append_dj_inputs(master_live_stream_port, master_live_stream_mp,
dj_live_stream_port, dj_live_stream_mp, s)
# Attach a skip command to the source s:
#web_stream_source = input.http(id="web_stream", autostart = false, buffer=0.5, max=20., "")
#once the stream is started, give it a sink so that liquidsoap doesn't
#create buffer overflow warnings in the log file.
#output.dummy(fallible=true, web_stream_source)
#s = switch(track_sensitive = false,
# transitions=[to_live,to_live],
# [
# ({ !web_stream_enabled }, web_stream_source),
# ({ true }, s)
# ]
#)
add_skip_command(s)
server.register(namespace="streams",
description="Stop Master DJ source.",
usage="master_dj_stop",
"master_dj_stop",
fun (s) -> begin make_master_dj_unavailable() "Done." end)
description="Stop Master DJ source.",
usage="master_dj_stop",
"master_dj_stop",
fun (s) -> begin log("streams.master_dj_stop") make_master_dj_unavailable() "Done." end)
server.register(namespace="streams",
description="Start Master DJ source.",
usage="master_dj_start",
"master_dj_start",
fun (s) -> begin make_master_dj_available() "Done." end)
description="Start Master DJ source.",
usage="master_dj_start",
"master_dj_start",
fun (s) -> begin log("streams.master_dj_start") make_master_dj_available() "Done." end)
server.register(namespace="streams",
description="Stop Live DJ source.",
usage="live_dj_stop",
"live_dj_stop",
fun (s) -> begin make_live_dj_unavailable() "Done." end)
description="Stop Live DJ source.",
usage="live_dj_stop",
"live_dj_stop",
fun (s) -> begin log("streams.live_dj_stop") make_live_dj_unavailable() "Done." end)
server.register(namespace="streams",
description="Start Live DJ source.",
usage="live_dj_start",
"live_dj_start",
fun (s) -> begin make_live_dj_available() "Done." end)
description="Start Live DJ source.",
usage="live_dj_start",
"live_dj_start",
fun (s) -> begin log("streams.live_dj_start") make_live_dj_available() "Done." end)
server.register(namespace="streams",
description="Stop Scheduled Play source.",
usage="scheduled_play_stop",
"scheduled_play_stop",
fun (s) -> begin make_scheduled_play_unavailable() "Done." end)
description="Stop Scheduled Play source.",
usage="scheduled_play_stop",
"scheduled_play_stop",
fun (s) -> begin log("streams.scheduled_play_stop") make_scheduled_play_unavailable() "Done." end)
server.register(namespace="streams",
description="Start Scheduled Play source.",
usage="scheduled_play_start",
"scheduled_play_start",
fun (s) -> begin make_scheduled_play_available() "Done." end)
description="Start Scheduled Play source.",
usage="scheduled_play_start",
"scheduled_play_start",
fun (s) -> begin log("streams.scheduled_play_start") make_scheduled_play_available() "Done." end)
if output_sound_device then
success = ref false
log(output_sound_device_type)
%ifdef output.alsa
if output_sound_device_type == "ALSA" then
ignore(output.alsa(s))
success := true
end
%endif
%ifdef output.ao
if output_sound_device_type == "AO" then
ignore(output.ao(s))
success := true
end
%endif
%ifdef output.oss
if output_sound_device_type == "OSS" then
ignore(output.oss(s))
success := true
end
%endif
%ifdef output.portaudio
if output_sound_device_type == "Portaudio" then
ignore(output.portaudio(s))
success := true
end
%endif
%ifdef output.pulseaudio
if output_sound_device_type == "Pulseaudio" then
ignore(output.pulseaudio(s))
success := true
end
%endif
if (!success == false) then
ignore(output.prefered(s))
end
end
if s1_enable == true then
@ -265,8 +319,10 @@ if s1_enable == true then
else
s1_namespace := s1_mount
end
server.register(namespace=!s1_namespace, "connected", fun (s) -> begin !s1_connected end)
output_to(s1_output, s1_type, s1_bitrate, s1_host, s1_port, s1_pass, s1_mount, s1_url, s1_description, s1_genre, s1_user, s, "1", s1_connected, s1_name)
server.register(namespace=!s1_namespace, "connected", fun (s) -> begin log("#{!s1_namespace}.connected") !s1_connected end)
output_to(s1_output, s1_type, s1_bitrate, s1_host, s1_port, s1_pass,
s1_mount, s1_url, s1_description, s1_genre, s1_user, s, "1",
s1_connected, s1_name, s1_channels)
end
if s2_enable == true then
@ -275,9 +331,11 @@ if s2_enable == true then
else
s2_namespace := s2_mount
end
server.register(namespace=!s2_namespace, "connected", fun (s) -> begin !s2_connected end)
output_to(s2_output, s2_type, s2_bitrate, s2_host, s2_port, s2_pass, s2_mount, s2_url, s2_description, s2_genre, s2_user, s, "2", s2_connected, s2_name)
server.register(namespace=!s2_namespace, "connected", fun (s) -> begin log("#{!s2_namespace}.connected") !s2_connected end)
output_to(s2_output, s2_type, s2_bitrate, s2_host, s2_port, s2_pass,
s2_mount, s2_url, s2_description, s2_genre, s2_user, s, "2",
s2_connected, s2_name, s2_channels)
end
if s3_enable == true then
@ -286,8 +344,12 @@ if s3_enable == true then
else
s3_namespace := s3_mount
end
server.register(namespace=!s3_namespace, "connected", fun (s) -> begin !s3_connected end)
output_to(s3_output, s3_type, s3_bitrate, s3_host, s3_port, s3_pass, s3_mount, s3_url, s3_description, s3_genre, s3_user, s, "3", s3_connected, s3_name)
server.register(namespace=!s3_namespace, "connected", fun (s) -> begin log("#{!s3_namespace}.connected") !s3_connected end)
output_to(s3_output, s3_type, s3_bitrate, s3_host, s3_port, s3_pass,
s3_mount, s3_url, s3_name, s3_genre, s3_user, s, "3",
s3_connected, s3_description, s3_channels)
end
ignore(output.dummy(blank()))
command = "/usr/lib/airtime/pypo/bin/liquidsoap_scripts/notify.sh --liquidsoap-started &"
log(command)
system(command)

View file

@ -10,4 +10,5 @@ SCRIPT=`readlink -f $0`
# Absolute path this script is in
SCRIPTPATH=`dirname $SCRIPT`
cd ${SCRIPTPATH}/../ && python pyponotify.py "$@"
cd ${SCRIPTPATH}/../
timeout 45 python pyponotify.py "$@"

View file

@ -5,5 +5,5 @@
check process airtime-liquidsoap
with pidfile "/var/run/airtime-liquidsoap.pid"
start program = "/etc/init.d/airtime-playout monit-restart" with timeout 5 seconds
stop program = "/etc/init.d/airtime-playout stop"
start program = "/etc/init.d/airtime-liquidsoap start" with timeout 5 seconds
stop program = "/etc/init.d/airtime-liquidsoap stop"

View file

@ -107,10 +107,10 @@ except Exception, e:
class Global:
def __init__(self):
self.api_client = api_client.api_client_factory(config)
self.api_client = api_client.AirtimeApiClient()
def selfcheck(self):
self.api_client = api_client.api_client_factory(config)
self.api_client = api_client.AirtimeApiClient()
return self.api_client.is_server_compatible()
def test_api(self):
@ -130,7 +130,7 @@ def liquidsoap_running_test(telnet_lock, host, port, logger):
msg = "version\n"
tn.write(msg)
tn.write("exit\n")
logger.info("Liquidsoap version %s", tn.read_all())
logger.info("Found: %s", tn.read_all())
except Exception, e:
logger.error(str(e))
success = False
@ -172,7 +172,7 @@ if __name__ == '__main__':
g.test_api()
sys.exit()
api_client = api_client.api_client_factory(config)
api_client = api_client.AirtimeApiClient()
api_client.register_component("pypo")
pypoFetch_q = Queue()
@ -210,7 +210,7 @@ if __name__ == '__main__':
recorder.start()
# all join() are commented out because we want to exit entire pypo
# if pypofetch is exiting
# if pypofetch is exiting
#pmh.join()
#recorder.join()
#pp.join()

View file

@ -8,6 +8,7 @@ import json
import telnetlib
import copy
from threading import Thread
import subprocess
from Queue import Empty
@ -40,7 +41,7 @@ except Exception, e:
class PypoFetch(Thread):
def __init__(self, pypoFetch_q, pypoPush_q, media_q, telnet_lock):
Thread.__init__(self)
self.api_client = api_client.api_client_factory(config)
self.api_client = api_client.AirtimeApiClient()
self.fetch_queue = pypoFetch_q
self.push_queue = pypoPush_q
self.media_prepare_queue = media_q
@ -58,7 +59,7 @@ class PypoFetch(Thread):
if not os.path.isdir(dir):
"""
We get here if path does not exist, or path does exist but
is a file. We are not handling the second case, but don't
is a file. We are not handling the second case, but don't
think we actually care about handling it.
"""
self.logger.debug("Cache dir does not exist. Creating...")
@ -84,9 +85,11 @@ class PypoFetch(Thread):
if command == 'update_schedule':
self.schedule_data = m['schedule']
self.process_schedule(self.schedule_data)
elif command == 'reset_liquidsoap_bootstrap':
self.set_bootstrap_variables()
elif command == 'update_stream_setting':
self.logger.info("Updating stream setting...")
self.regenerateLiquidsoapConf(m['setting'])
self.regenerate_liquidsoap_conf(m['setting'])
elif command == 'update_stream_format':
self.logger.info("Updating stream format...")
self.update_liquidsoap_stream_format(m['stream_format'])
@ -102,6 +105,8 @@ class PypoFetch(Thread):
elif command == 'disconnect_source':
self.logger.info("disconnect_on_source show command received...")
self.disconnect_source(self.logger, self.telnet_lock, m['sourcename'])
else:
self.logger.info("Unknown command: %s" % command)
# update timeout value
if command == 'update_schedule':
@ -142,14 +147,14 @@ class PypoFetch(Thread):
def switch_source(logger, lock, sourcename, status):
logger.debug('Switching source: %s to "%s" status', sourcename, status)
command = "streams."
if(sourcename == "master_dj"):
if sourcename == "master_dj":
command += "master_dj_"
elif(sourcename == "live_dj"):
elif sourcename == "live_dj":
command += "live_dj_"
elif(sourcename == "scheduled_play"):
elif sourcename == "scheduled_play":
command += "scheduled_play_"
if(status == "on"):
if status == "on":
command += "start\n"
else:
command += "stop\n"
@ -172,7 +177,7 @@ class PypoFetch(Thread):
def set_bootstrap_variables(self):
self.logger.debug('Getting information needed on bootstrap from Airtime')
info = self.api_client.get_bootstrap_info()
if info == None:
if info is None:
self.logger.error('Unable to get bootstrap info.. Exiting pypo...')
sys.exit(1)
else:
@ -204,12 +209,41 @@ class PypoFetch(Thread):
fh.write(api_client.encode_to(buffer_str))
fh.write("log_file = \"/var/log/airtime/pypo-liquidsoap/<script>.log\"\n");
fh.close()
# restarting pypo.
# we could just restart liquidsoap but it take more time somehow.
self.logger.info("Restarting pypo...")
sys.exit(0)
def regenerateLiquidsoapConf(self, setting):
def restart_liquidsoap(self):
self.telnet_lock.acquire()
try:
self.logger.info("Restarting Liquidsoap")
subprocess.call('/etc/init.d/airtime-liquidsoap restart', shell=True)
#Wait here and poll Liquidsoap until it has started up
self.logger.info("Waiting for Liquidsoap to start")
while True:
try:
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
tn.write("exit\n")
tn.read_all()
self.logger.info("Liquidsoap is up and running")
break
except Exception, e:
#sleep 0.5 seconds and try again
time.sleep(0.5)
except Exception, e:
self.logger.error(e)
finally:
self.telnet_lock.release()
try:
self.set_bootstrap_variables()
#get the most up to date schedule, which will #initiate the process
#of making sure Liquidsoap is playing the schedule
self.manual_schedule_fetch()
except Exception, e:
self.logger.error(str(e))
def regenerate_liquidsoap_conf(self, setting):
existing = {}
# create a temp file
@ -218,7 +252,8 @@ class PypoFetch(Thread):
fh = open('/etc/airtime/liquidsoap.cfg', 'r')
except IOError, e:
#file does not exist
self.write_liquidsoap_config(setting)
self.restart_liquidsoap()
return
self.logger.info("Reading existing config...")
# read existing conf file and build dict
@ -246,10 +281,10 @@ class PypoFetch(Thread):
existing[key] = value
fh.close()
# dict flag for any change in cofig
# dict flag for any change in config
change = {}
# this flag is to detect disable -> disable change
# in that case, we don't want to restart even if there are chnges.
# in that case, we don't want to restart even if there are changes.
state_change_restart = {}
#restart flag
restart = False
@ -284,7 +319,7 @@ class PypoFetch(Thread):
if stream not in change:
change[stream] = False
if not (s[u'value'] == existing[s[u'keyname']]):
self.logger.info("Keyname: %s, Curent value: %s, New Value: %s", s[u'keyname'], existing[s[u'keyname']], s[u'value'])
self.logger.info("Keyname: %s, Current value: %s, New Value: %s", s[u'keyname'], existing[s[u'keyname']], s[u'value'])
change[stream] = True
# set flag change for sound_device alway True
@ -298,21 +333,21 @@ class PypoFetch(Thread):
restart = True
# rewrite
if restart:
self.write_liquidsoap_config(setting)
self.restart_liquidsoap()
else:
self.logger.info("No change detected in setting...")
self.update_liquidsoap_connection_status()
def update_liquidsoap_connection_status(self):
"""
updates the status of liquidsoap connection to the streaming server
This fucntion updates the bootup time variable in liquidsoap script
updates the status of Liquidsoap connection to the streaming server
This function updates the bootup time variable in Liquidsoap script
"""
self.telnet_lock.acquire()
try:
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
# update the boot up time of liquidsoap. Since liquidsoap is not restarting,
# update the boot up time of Liquidsoap. Since Liquidsoap is not restarting,
# we are manually adjusting the bootup time variable so the status msg will get
# updated.
current_time = time.time()
@ -425,9 +460,9 @@ class PypoFetch(Thread):
for key in media:
media_item = media[key]
if(media_item['type'] == 'file'):
if (media_item['type'] == 'file'):
fileExt = os.path.splitext(media_item['uri'])[1]
dst = os.path.join(download_dir, media_item['id'] + fileExt)
dst = os.path.join(download_dir, unicode(media_item['id']) + fileExt)
media_item['dst'] = dst
media_item['file_ready'] = False
media_filtered[key] = media_item
@ -448,30 +483,39 @@ class PypoFetch(Thread):
"""
Get list of all files in the cache dir and remove them if they aren't being used anymore.
Input dict() media, lists all files that are scheduled or currently playing. Not being in this
dict() means the file is safe to remove.
dict() means the file is safe to remove.
"""
cached_file_set = set(os.listdir(self.cache_dir))
scheduled_file_set = set()
for mkey in media:
media_item = media[mkey]
fileExt = os.path.splitext(media_item['uri'])[1]
scheduled_file_set.add(media_item["id"] + fileExt)
if media_item['type'] == 'file':
fileExt = os.path.splitext(media_item['uri'])[1]
scheduled_file_set.add(unicode(media_item["id"]) + fileExt)
unneeded_files = cached_file_set - scheduled_file_set
expired_files = cached_file_set - scheduled_file_set
self.logger.debug("Files to remove " + str(unneeded_files))
for f in unneeded_files:
self.logger.debug("Removing %s" % os.path.join(self.cache_dir, f))
os.remove(os.path.join(self.cache_dir, f))
self.logger.debug("Files to remove " + str(expired_files))
for f in expired_files:
try:
self.logger.debug("Removing %s" % os.path.join(self.cache_dir, f))
os.remove(os.path.join(self.cache_dir, f))
except Exception, e:
self.logger.error(e)
def manual_schedule_fetch(self):
success, self.schedule_data = self.api_client.get_schedule()
if success:
self.process_schedule(self.schedule_data)
return success
def main(self):
# Bootstrap: since we are just starting up, we need to grab the
# most recent schedule. After that we can just wait for updates.
success, self.schedule_data = self.api_client.get_schedule()
# most recent schedule. After that we can just wait for updates.
success = self.manual_schedule_fetch()
if success:
self.logger.info("Bootstrap schedule received: %s", self.schedule_data)
self.process_schedule(self.schedule_data)
self.set_bootstrap_variables()
loops = 1
@ -481,14 +525,14 @@ class PypoFetch(Thread):
"""
our simple_queue.get() requires a timeout, in which case we
fetch the Airtime schedule manually. It is important to fetch
the schedule periodically because if we didn't, we would only
get schedule updates via RabbitMq if the user was constantly
using the Airtime interface.
the schedule periodically because if we didn't, we would only
get schedule updates via RabbitMq if the user was constantly
using the Airtime interface.
If the user is not using the interface, RabbitMq messages are not
sent, and we will have very stale (or non-existent!) data about the
sent, and we will have very stale (or non-existent!) data about the
schedule.
Currently we are checking every POLL_INTERVAL seconds
"""
@ -497,9 +541,7 @@ class PypoFetch(Thread):
self.handle_message(message)
except Empty, e:
self.logger.info("Queue timeout. Fetching schedule manually")
success, self.schedule_data = self.api_client.get_schedule()
if success:
self.process_schedule(self.schedule_data)
self.manual_schedule_fetch()
except Exception, e:
import traceback
top = traceback.format_exc()

View file

@ -68,6 +68,9 @@ class PypoMessageHandler(Thread):
if command == 'update_schedule':
self.logger.info("Updating schdule...")
self.pypo_queue.put(message)
elif command == 'reset_liquidsoap_bootstrap':
self.logger.info("Resetting bootstrap vars...")
self.pypo_queue.put(message)
elif command == 'update_stream_setting':
self.logger.info("Updating stream setting...")
self.pypo_queue.put(message)
@ -90,6 +93,8 @@ class PypoMessageHandler(Thread):
self.recorder_queue.put(message)
elif command == 'cancel_recording':
self.recorder_queue.put(message)
else:
self.logger.info("Unknown command: %s" % command)
except Exception, e:
self.logger.error("Exception in handling RabbitMQ message: %s", e)

View file

@ -6,10 +6,10 @@ Python part of radio playout (pypo)
This function acts as a gateway between liquidsoap and the server API.
Mainly used to tell the platform what pypo/liquidsoap does.
Main case:
Main case:
- whenever LS starts playing a new track, its on_metadata callback calls
a function in ls (notify(m)) which then calls the python script here
with the currently starting filename as parameter
with the currently starting filename as parameter
- this python script takes this parameter, tries to extract the actual
media id from it, and then calls back to the API to tell about it about it.
@ -33,14 +33,17 @@ usage = "%prog [options]" + " - notification gateway"
parser = OptionParser(usage=usage)
# Options
parser.add_option("-d", "--data", help="Pass JSON data from liquidsoap into this script.", metavar="data")
parser.add_option("-d", "--data", help="Pass JSON data from Liquidsoap into this script.", metavar="data")
parser.add_option("-m", "--media-id", help="ID of the file that is currently playing.", metavar="media_id")
parser.add_option("-e", "--error", action="store", dest="error", type="string", help="liquidsoap error msg.", metavar="error_msg")
parser.add_option("-e", "--error", action="store", dest="error", type="string", help="Liquidsoap error msg.", metavar="error_msg")
parser.add_option("-s", "--stream-id", help="ID stream", metavar="stream_id")
parser.add_option("-c", "--connect", help="liquidsoap connected", action="store_true", metavar="connect")
parser.add_option("-t", "--time", help="liquidsoap boot up time", action="store", dest="time", metavar="time", type="string")
parser.add_option("-c", "--connect", help="Liquidsoap connected", action="store_true", metavar="connect")
parser.add_option("-t", "--time", help="Liquidsoap boot up time", action="store", dest="time", metavar="time", type="string")
parser.add_option("-x", "--source-name", help="source connection name", metavar="source_name")
parser.add_option("-y", "--source-status", help="source connection stauts", metavar="source_status")
parser.add_option("-y", "--source-status", help="source connection status", metavar="source_status")
parser.add_option("-w", "--webstream", help="JSON metadata associated with webstream", metavar="json_data")
parser.add_option("-n", "--liquidsoap-started", help="notify liquidsoap started", metavar="json_data", action="store_true", default=True)
# parse options
(options, args) = parser.parse_args()
@ -64,22 +67,21 @@ except Exception, e:
class Notify:
def __init__(self):
self.api_client = api_client.api_client_factory(config)
self.api_client = api_client.AirtimeApiClient(logger=logger)
def notify_media_start_playing(self, data, media_id):
logger = logging.getLogger("notify")
def notify_liquidsoap_started(self):
logger.debug("Notifying server that Liquidsoap has started")
self.api_client.notify_liquidsoap_started()
def notify_media_start_playing(self, media_id):
logger.debug('#################################################')
logger.debug('# Calling server to update about what\'s playing #')
logger.debug('#################################################')
logger.debug('data = ' + str(data))
response = self.api_client.notify_media_item_start_playing(data, media_id)
response = self.api_client.notify_media_item_start_playing(media_id)
logger.debug("Response: " + json.dumps(response))
# @pram time: time that LS started
def notify_liquidsoap_status(self, msg, stream_id, time):
logger = logging.getLogger("notify")
logger.debug('#################################################')
logger.debug('# Calling server to update liquidsoap status #')
logger.debug('#################################################')
@ -88,8 +90,6 @@ class Notify:
logger.debug("Response: " + json.dumps(response))
def notify_source_status(self, source_name, status):
logger = logging.getLogger("notify")
logger.debug('#################################################')
logger.debug('# Calling server to update source status #')
logger.debug('#################################################')
@ -97,6 +97,13 @@ class Notify:
response = self.api_client.notify_source_status(source_name, status)
logger.debug("Response: " + json.dumps(response))
def notify_webstream_data(self, data, media_id):
logger.debug('#################################################')
logger.debug('# Calling server to update webstream data #')
logger.debug('#################################################')
response = self.api_client.notify_webstream_data(data, media_id)
if __name__ == '__main__':
print
print '#########################################'
@ -105,7 +112,6 @@ if __name__ == '__main__':
print '#########################################'
# initialize
logger = logging.getLogger("notify")
if options.error and options.stream_id:
try:
n = Notify()
@ -124,17 +130,23 @@ if __name__ == '__main__':
n.notify_source_status(options.source_name, options.source_status)
except Exception, e:
print e
else:
if not options.data:
print "NOTICE: 'data' command-line argument not given."
sys.exit()
if not options.media_id:
print "NOTICE: 'media_id' command-line argument not given."
sys.exit()
elif options.webstream:
try:
n = Notify()
n.notify_webstream_data(options.webstream, options.media_id)
except Exception, e:
print e
elif options.media_id:
try:
n = Notify()
n.notify_media_start_playing(options.data, options.media_id)
n.notify_media_start_playing(options.media_id)
except Exception, e:
print e
elif options.liquidsoap_started:
try:
n = Notify()
n.notify_liquidsoap_started()
except Exception, e:
print e

View file

@ -39,16 +39,23 @@ except Exception, e:
logger.error('Error loading config file %s', e)
sys.exit()
def is_stream(media_item):
return media_item['type'] == 'stream_output_start'
def is_file(media_item):
return media_item['type'] == 'file'
class PypoPush(Thread):
def __init__(self, q, telnet_lock):
Thread.__init__(self)
self.api_client = api_client.api_client_factory(config)
self.api_client = api_client.AirtimeApiClient()
self.queue = q
self.telnet_lock = telnet_lock
self.pushed_objects = {}
self.logger = logging.getLogger('push')
self.current_prebuffering_stream_id = None
def main(self):
loops = 0
@ -70,41 +77,43 @@ class PypoPush(Thread):
#We get to the following lines only if a schedule was received.
liquidsoap_queue_approx = self.get_queue_items_from_liquidsoap()
liquidsoap_stream_id = self.get_current_stream_id_from_liquidsoap()
tnow = datetime.utcnow()
current_event_chain, original_chain = self.get_current_chain(chains, tnow)
if len(current_event_chain) > 0 and len(liquidsoap_queue_approx) == 0:
#Something is scheduled but Liquidsoap is not playing anything!
#Need to schedule it immediately..this might happen if Liquidsoap crashed.
if len(current_event_chain) > 0:
try:
chains.remove(original_chain)
except ValueError, e:
self.logger.error(str(e))
self.modify_cue_point(current_event_chain[0])
next_media_item_chain = current_event_chain
time_until_next_play = 0
#sleep for 0.2 seconds to give pypo-file time to copy.
time.sleep(0.2)
#At this point we know that Liquidsoap is playing something, and that something
#is scheduled. We need to verify whether the schedule we just received matches
#what Liquidsoap is playing, and if not, correct it.
self.handle_new_schedule(media_schedule, liquidsoap_queue_approx, liquidsoap_stream_id, current_event_chain)
#At this point everything in the present has been taken care of and Liquidsoap
#is playing whatever is scheduled.
#Now we need to prepare ourselves for future scheduled events.
#
next_media_item_chain = self.get_next_schedule_chain(chains, tnow)
self.logger.debug("Next schedule chain: %s", next_media_item_chain)
if next_media_item_chain is not None:
try:
chains.remove(next_media_item_chain)
except ValueError, e:
self.logger.error(str(e))
chain_start = datetime.strptime(next_media_item_chain[0]['start'], "%Y-%m-%d-%H-%M-%S")
time_until_next_play = self.date_interval_to_seconds(chain_start - datetime.utcnow())
self.logger.debug("Blocking %s seconds until show start", time_until_next_play)
else:
media_chain = filter(lambda item: (item["type"] == "file"), current_event_chain)
self.handle_new_media_schedule(media_schedule, liquidsoap_queue_approx, media_chain)
next_media_item_chain = self.get_next_schedule_chain(chains, tnow)
self.logger.debug("Next schedule chain: %s", next_media_item_chain)
if next_media_item_chain is not None:
try:
chains.remove(next_media_item_chain)
except ValueError, e:
self.logger.error(str(e))
chain_start = datetime.strptime(next_media_item_chain[0]['start'], "%Y-%m-%d-%H-%M-%S")
time_until_next_play = self.date_interval_to_seconds(chain_start - datetime.utcnow())
self.logger.debug("Blocking %s seconds until show start", time_until_next_play)
else:
self.logger.debug("Blocking indefinitely since no show scheduled")
time_until_next_play = None
self.logger.debug("Blocking indefinitely since no show scheduled")
time_until_next_play = None
except Empty, e:
#We only get here when a new chain of tracks are ready to be played.
self.push_to_liquidsoap(next_media_item_chain)
@ -125,6 +134,25 @@ class PypoPush(Thread):
loops = 0
loops += 1
def get_current_stream_id_from_liquidsoap(self):
response = "-1"
try:
self.telnet_lock.acquire()
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
msg = 'dynamic_source.get_id\n'
tn.write(msg)
response = tn.read_until("\r\n").strip(" \r\n")
tn.write('exit\n')
tn.read_all()
except Exception, e:
self.logger.error("Error connecting to Liquidsoap: %s", e)
response = []
finally:
self.telnet_lock.release()
return response
def get_queue_items_from_liquidsoap(self):
"""
This function connects to Liquidsoap to find what media items are in its queue.
@ -157,7 +185,7 @@ class PypoPush(Thread):
else:
"""
We should only reach here if Pypo crashed and restarted (because self.pushed_objects was reset). In this case
let's clear the entire Liquidsoap queue.
let's clear the entire Liquidsoap queue.
"""
self.logger.error("ID exists in liquidsoap queue that does not exist in our pushed_objects queue: " + item)
self.clear_liquidsoap_queue()
@ -166,43 +194,98 @@ class PypoPush(Thread):
return liquidsoap_queue_approx
def handle_new_media_schedule(self, media_schedule, liquidsoap_queue_approx, media_chain):
def is_correct_current_item(self, media_item, liquidsoap_queue_approx, liquidsoap_stream_id):
correct = False
if media_item is None:
correct = (len(liquidsoap_queue_approx) == 0 and liquidsoap_stream_id == "-1")
else:
if is_file(media_item):
if len(liquidsoap_queue_approx) == 0:
correct = False
else:
correct = liquidsoap_queue_approx[0]['start'] == media_item['start'] and \
liquidsoap_queue_approx[0]['row_id'] == media_item['row_id'] and \
liquidsoap_queue_approx[0]['end'] == media_item['end']
elif is_stream(media_item):
correct = liquidsoap_stream_id == str(media_item['row_id'])
self.logger.debug("Is current item correct?: %s", str(correct))
return correct
#clear all webstreams and files from Liquidsoap
def clear_all_liquidsoap_items(self):
self.remove_from_liquidsoap_queue(0, None)
self.stop_web_stream_all()
def handle_new_schedule(self, media_schedule, liquidsoap_queue_approx, liquidsoap_stream_id, current_event_chain):
"""
This function's purpose is to gracefully handle situations where
Liquidsoap already has a track in its queue, but the schedule
Liquidsoap already has a track in its queue, but the schedule
has changed. If the schedule has changed, this function's job is to
call other functions that will connect to Liquidsoap and alter its
queue.
"""
file_chain = filter(lambda item: (item["type"] == "file"), current_event_chain)
stream_chain = filter(lambda item: (item["type"] == "stream_output_start"), current_event_chain)
problem_at_iteration = self.find_removed_items(media_schedule, liquidsoap_queue_approx)
self.logger.debug(current_event_chain)
if problem_at_iteration is not None:
#Items that are in Liquidsoap's queue aren't scheduled anymore. We need to connect
#and remove these items.
self.logger.debug("Change in link %s of current chain", problem_at_iteration)
self.remove_from_liquidsoap_queue(problem_at_iteration, liquidsoap_queue_approx[problem_at_iteration:])
#Take care of the case where the current playing may be incorrect
if len(current_event_chain) > 0:
if problem_at_iteration is None and len(media_chain) > len(liquidsoap_queue_approx):
self.logger.debug("New schedule has longer current chain.")
problem_at_iteration = len(liquidsoap_queue_approx)
current_item = current_event_chain[0]
if not self.is_correct_current_item(current_item, liquidsoap_queue_approx, liquidsoap_stream_id):
self.clear_all_liquidsoap_items()
if is_stream(current_item):
if current_item['row_id'] != self.current_prebuffering_stream_id:
#this is called if the stream wasn't scheduled sufficiently ahead of time
#so that the prebuffering stage could take effect. Let's do the prebuffering now.
self.start_web_stream_buffer(current_item)
self.start_web_stream(current_item)
if is_file(current_item):
self.modify_cue_point(file_chain[0])
self.push_to_liquidsoap(file_chain)
#we've changed the queue, so let's refetch it
liquidsoap_queue_approx = self.get_queue_items_from_liquidsoap()
if problem_at_iteration is not None:
self.logger.debug("Change in chain at link %s", problem_at_iteration)
elif not self.is_correct_current_item(None, liquidsoap_queue_approx, liquidsoap_stream_id):
#Liquidsoap is playing something even though it shouldn't be
self.clear_all_liquidsoap_items()
#If the current item scheduled is a file, then files come in chains, and
#therefore we need to make sure the entire chain is correct.
if len(current_event_chain) > 0 and is_file(current_event_chain[0]):
problem_at_iteration = self.find_removed_items(media_schedule, liquidsoap_queue_approx)
if problem_at_iteration is not None:
#Items that are in Liquidsoap's queue aren't scheduled anymore. We need to connect
#and remove these items.
self.logger.debug("Change in link %s of current chain", problem_at_iteration)
self.remove_from_liquidsoap_queue(problem_at_iteration, liquidsoap_queue_approx[problem_at_iteration:])
if problem_at_iteration is None and len(file_chain) > len(liquidsoap_queue_approx):
self.logger.debug("New schedule has longer current chain.")
problem_at_iteration = len(liquidsoap_queue_approx)
if problem_at_iteration is not None:
self.logger.debug("Change in chain at link %s", problem_at_iteration)
chain_to_push = file_chain[problem_at_iteration:]
if len(chain_to_push) > 0:
self.modify_cue_point(chain_to_push[0])
self.push_to_liquidsoap(chain_to_push)
chain_to_push = media_chain[problem_at_iteration:]
if len(chain_to_push) > 0:
self.modify_cue_point(chain_to_push[0])
self.push_to_liquidsoap(chain_to_push)
"""
Compare whats in the liquidsoap_queue to the new schedule we just
received in media_schedule. This function only iterates over liquidsoap_queue_approx
and finds if every item in that list is still scheduled in "media_schedule". It doesn't
and finds if every item in that list is still scheduled in "media_schedule". It doesn't
take care of the case where media_schedule has more items than liquidsoap_queue_approx
"""
def find_removed_items(self, media_schedule, liquidsoap_queue_approx):
#iterate through the items we got from the liquidsoap queue and
#iterate through the items we got from the liquidsoap queue and
#see if they are the same as the newly received schedule
iteration = 0
problem_at_iteration = None
@ -219,12 +302,12 @@ class PypoPush(Thread):
else:
#A different item has been scheduled at the same time! Need to remove
#all tracks from the Liquidsoap queue starting at this point, and re-add
#them.
#them.
problem_at_iteration = iteration
break
else:
#There are no more items scheduled for this time! The user has shortened
#the playlist, so we simply need to remove tracks from the queue.
#the playlist, so we simply need to remove tracks from the queue.
problem_at_iteration = iteration
break
iteration += 1
@ -241,8 +324,12 @@ class PypoPush(Thread):
for mkey in sorted_keys:
media_item = media_schedule[mkey]
if media_item['type'] == "event":
if media_item['independent_event']:
if len(current_chain) > 0:
chains.append(current_chain)
chains.append([media_item])
current_chain = []
elif len(current_chain) == 0:
current_chain.append(media_item)
elif media_item['start'] == current_chain[-1]['end']:
@ -274,16 +361,16 @@ class PypoPush(Thread):
"""
Returns two chains, original chain and current_chain. current_chain is a subset of
original_chain but can also be equal to original chain.
We return original chain because the user of this function may want to clean
up the input 'chains' list
chain, original = get_current_chain(chains)
and
and
chains.remove(chain) can throw a ValueError exception
but
but
chains.remove(original) won't
"""
def get_current_chain(self, chains, tnow):
@ -307,7 +394,7 @@ class PypoPush(Thread):
"""
The purpose of this function is to take a look at the last received schedule from
pypo-fetch and return the next chain of media_items. A chain is defined as a sequence
pypo-fetch and return the next chain of media_items. A chain is defined as a sequence
of media_items where the end time of media_item 'n' is the start time of media_item
'n+1'
"""
@ -327,7 +414,15 @@ class PypoPush(Thread):
def date_interval_to_seconds(self, interval):
return (interval.microseconds + (interval.seconds + interval.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
"""
Convert timedelta object into int representing the number of seconds. If
number of seconds is less than 0, then return 0.
"""
seconds = (interval.microseconds + \
(interval.seconds + interval.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
if seconds < 0: seconds = 0
return seconds
def push_to_liquidsoap(self, event_chain):
@ -353,9 +448,133 @@ class PypoPush(Thread):
PypoFetch.disconnect_source(self.logger, self.telnet_lock, "live_dj")
elif media_item['event_type'] == "switch_off":
PypoFetch.switch_source(self.logger, self.telnet_lock, "live_dj", "off")
elif media_item['type'] == 'stream_buffer_start':
self.start_web_stream_buffer(media_item)
elif media_item['type'] == "stream_output_start":
if media_item['row_id'] != self.current_prebuffering_stream_id:
#this is called if the stream wasn't scheduled sufficiently ahead of time
#so that the prebuffering stage could take effect. Let's do the prebuffering now.
self.start_web_stream_buffer(media_item)
self.start_web_stream(media_item)
elif media_item['type'] == "stream_buffer_end":
self.stop_web_stream_buffer(media_item)
elif media_item['type'] == "stream_output_end":
self.stop_web_stream_output(media_item)
except Exception, e:
self.logger.error('Pypo Push Exception: %s', e)
def start_web_stream_buffer(self, media_item):
try:
self.telnet_lock.acquire()
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
msg = 'dynamic_source.id %s\n' % media_item['row_id']
self.logger.debug(msg)
tn.write(msg)
#example: dynamic_source.read_start http://87.230.101.24:80/top100station.mp3
msg = 'dynamic_source.read_start %s\n' % media_item['uri'].encode('latin-1')
self.logger.debug(msg)
tn.write(msg)
tn.write("exit\n")
self.logger.debug(tn.read_all())
self.current_prebuffering_stream_id = media_item['row_id']
except Exception, e:
self.logger.error(str(e))
finally:
self.telnet_lock.release()
def start_web_stream(self, media_item):
try:
self.telnet_lock.acquire()
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
#TODO: DO we need this?
msg = 'streams.scheduled_play_start\n'
tn.write(msg)
msg = 'dynamic_source.output_start\n'
self.logger.debug(msg)
tn.write(msg)
tn.write("exit\n")
self.logger.debug(tn.read_all())
self.current_prebuffering_stream_id = None
except Exception, e:
self.logger.error(str(e))
finally:
self.telnet_lock.release()
def stop_web_stream_all(self):
try:
self.telnet_lock.acquire()
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
msg = 'dynamic_source.read_stop_all xxx\n'
self.logger.debug(msg)
tn.write(msg)
msg = 'dynamic_source.output_stop\n'
self.logger.debug(msg)
tn.write(msg)
msg = 'dynamic_source.id -1\n'
self.logger.debug(msg)
tn.write(msg)
tn.write("exit\n")
self.logger.debug(tn.read_all())
except Exception, e:
self.logger.error(str(e))
finally:
self.telnet_lock.release()
def stop_web_stream_buffer(self, media_item):
try:
self.telnet_lock.acquire()
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
#dynamic_source.stop http://87.230.101.24:80/top100station.mp3
msg = 'dynamic_source.read_stop %s\n' % media_item['row_id']
self.logger.debug(msg)
tn.write(msg)
msg = 'dynamic_source.id -1\n'
self.logger.debug(msg)
tn.write(msg)
tn.write("exit\n")
self.logger.debug(tn.read_all())
except Exception, e:
self.logger.error(str(e))
finally:
self.telnet_lock.release()
def stop_web_stream_output(self, media_item):
try:
self.telnet_lock.acquire()
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
#dynamic_source.stop http://87.230.101.24:80/top100station.mp3
msg = 'dynamic_source.output_stop\n'
self.logger.debug(msg)
tn.write(msg)
tn.write("exit\n")
self.logger.debug(tn.read_all())
except Exception, e:
self.logger.error(str(e))
finally:
self.telnet_lock.release()
def clear_liquidsoap_queue(self):
self.logger.debug("Clearing Liquidsoap queue")
try:
@ -474,9 +693,9 @@ class PypoPush(Thread):
self.telnet_lock.release()
def create_liquidsoap_annotation(self, media):
# we need lia_start_next value in the annotate. That is the value that controlls overlap duration of crossfade.
return 'annotate:media_id="%s",liq_start_next="0",liq_fade_in="%s",liq_fade_out="%s",liq_cue_in="%s",liq_cue_out="%s",schedule_table_id="%s":%s' \
% (media['id'], float(media['fade_in']) / 1000, float(media['fade_out']) / 1000, float(media['cue_in']), float(media['cue_out']), media['row_id'], media['dst'])
# We need liq_start_next value in the annotate. That is the value that controls overlap duration of crossfade.
return 'annotate:media_id="%s",liq_start_next="0",liq_fade_in="%s",liq_fade_out="%s",liq_cue_in="%s",liq_cue_out="%s",schedule_table_id="%s",replay_gain="%s dB":%s' \
% (media['id'], float(media['fade_in']) / 1000, float(media['fade_out']) / 1000, float(media['cue_in']), float(media['cue_out']), media['row_id'], media['replay_gain'], media['dst'])
def run(self):
try: self.main()

View file

@ -9,6 +9,7 @@ import sys
import pytz
import signal
import math
import traceback
from configobj import ConfigObj
@ -20,7 +21,14 @@ from threading import Thread
import mutagen
from api_clients import api_client
from api_clients import api_client as apc
def api_client(logger):
"""
api_client returns the correct instance of AirtimeApiClient. Although there is only one
instance to choose from at the moment.
"""
return apc.AirtimeApiClient(logger)
# loading config file
try:
@ -29,15 +37,20 @@ except Exception, e:
print ('Error loading config file: %s', e)
sys.exit()
# TODO : add docstrings everywhere in this module
def getDateTimeObj(time):
# TODO : clean up for this function later.
# - use tuples to parse result from split (instead of indices)
# - perhaps validate the input before doing dangerous casts?
# - rename this function to follow the standard convention
# - rename time to something else so that the module name does not get
# shadowed
# - add docstring to document all behaviour of this function
timeinfo = time.split(" ")
date = timeinfo[0].split("-")
time = timeinfo[1].split(":")
date = map(int, date)
time = map(int, time)
return datetime.datetime(date[0], date[1], date[2], time[0], time[1], time[2], 0, None)
date = [ int(x) for x in timeinfo[0].split("-") ]
my_time = [ int(x) for x in timeinfo[1].split(":") ]
return datetime.datetime(date[0], date[1], date[2], my_time[0], my_time[1], my_time[2], 0, None)
PUSH_INTERVAL = 2
@ -45,16 +58,16 @@ class ShowRecorder(Thread):
def __init__ (self, show_instance, show_name, filelength, start_time):
Thread.__init__(self)
self.logger = logging.getLogger('recorder')
self.api_client = api_client.api_client_factory(config, self.logger)
self.filelength = filelength
self.start_time = start_time
self.logger = logging.getLogger('recorder')
self.api_client = api_client(self.logger)
self.filelength = filelength
self.start_time = start_time
self.show_instance = show_instance
self.show_name = show_name
self.p = None
self.show_name = show_name
self.p = None
def record_show(self):
length = str(self.filelength) + ".0"
length = str(self.filelength) + ".0"
filename = self.start_time
filename = filename.replace(" ", "-")
@ -63,16 +76,18 @@ class ShowRecorder(Thread):
else:
filetype = "ogg";
filepath = "%s%s.%s" % (config["base_recorded_files"], filename, filetype)
joined_path = os.path.join(config["base_recorded_files"], filename)
filepath = "%s.%s" % (joined_path, filetype)
br = config["record_bitrate"]
sr = config["record_samplerate"]
c = config["record_channels"]
c = config["record_channels"]
ss = config["record_sample_size"]
#-f:16,2,44100
#-b:256
command = "ecasound -f:%s,%s,%s -i alsa -o %s,%s000 -t:%s" % (ss, c, sr, filepath, br, length)
command = "ecasound -f:%s,%s,%s -i alsa -o %s,%s000 -t:%s" % \
(ss, c, sr, filepath, br, length)
args = command.split(" ")
self.logger.info("starting record")
@ -121,27 +136,27 @@ class ShowRecorder(Thread):
self.api_client.upload_recorded_show(datagen, headers)
def set_metadata_and_save(self, filepath):
"""
Writes song to 'filepath'. Uses metadata from:
self.start_time, self.show_name, self.show_instance
"""
try:
date = self.start_time
md = date.split(" ")
time = md[1].replace(":", "-")
self.logger.info("time: %s" % time)
name = time + "-" + self.show_name
full_date, full_time = self.start_time.split(" ",1)
# No idea why we translated - to : before
#full_time = full_time.replace(":","-")
self.logger.info("time: %s" % full_time)
artist = "Airtime Show Recorder"
#set some metadata for our file daemon
recorded_file = mutagen.File(filepath, easy=True)
recorded_file['title'] = name
recorded_file = mutagen.File(filepath, easy = True)
recorded_file['artist'] = artist
recorded_file['date'] = md[0]
#recorded_file['date'] = md[0].split("-")[0]
recorded_file['date'] = full_date
recorded_file['title'] = "%s-%s-%s" % (self.show_name,
full_date, full_time)
#You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string
recorded_file['tracknumber'] = unicode(self.show_instance)
recorded_file.save()
except Exception, e:
import traceback
top = traceback.format_exc()
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top)
@ -167,20 +182,20 @@ class ShowRecorder(Thread):
class Recorder(Thread):
def __init__(self, q):
Thread.__init__(self)
self.logger = logging.getLogger('recorder')
self.api_client = api_client.api_client_factory(config, self.logger)
self.api_client.register_component("show-recorder")
self.sr = None
self.logger = logging.getLogger('recorder')
self.api_client = api_client(self.logger)
self.sr = None
self.shows_to_record = {}
self.server_timezone = ''
self.queue = q
self.queue = q
self.loops = 0
self.api_client.register_component("show-recorder")
self.logger.info("RecorderFetch: init complete")
self.loops = 0
def handle_message(self):
if not self.queue.empty():
message = self.queue.get()
msg = json.loads(message)
msg = json.loads(message)
command = msg["event_type"]
self.logger.info("Received msg from Pypo Message Handler: %s", msg)
if command == 'cancel_recording':
@ -199,10 +214,11 @@ class Recorder(Thread):
shows = m['shows']
for show in shows:
show_starts = getDateTimeObj(show[u'starts'])
show_end = getDateTimeObj(show[u'ends'])
time_delta = show_end - show_starts
show_end = getDateTimeObj(show[u'ends'])
time_delta = show_end - show_starts
temp_shows_to_record[show[u'starts']] = [time_delta, show[u'instance_id'], show[u'name'], m['server_timezone']]
temp_shows_to_record[show[u'starts']] = [time_delta,
show[u'instance_id'], show[u'name'], m['server_timezone']]
self.shows_to_record = temp_shows_to_record
def get_time_till_next_show(self):
@ -210,10 +226,10 @@ class Recorder(Thread):
tnow = datetime.datetime.utcnow()
sorted_show_keys = sorted(self.shows_to_record.keys())
start_time = sorted_show_keys[0]
next_show = getDateTimeObj(start_time)
start_time = sorted_show_keys[0]
next_show = getDateTimeObj(start_time)
delta = next_show - tnow
delta = next_show - tnow
s = '%s.%s' % (delta.seconds, delta.microseconds)
out = float(s)
@ -224,43 +240,42 @@ class Recorder(Thread):
return out
def start_record(self):
if len(self.shows_to_record) != 0:
try:
delta = self.get_time_till_next_show()
if delta < 5:
self.logger.debug("sleeping %s seconds until show", delta)
time.sleep(delta)
if len(self.shows_to_record) == 0: return None
try:
delta = self.get_time_till_next_show()
if delta < 5:
self.logger.debug("sleeping %s seconds until show", delta)
time.sleep(delta)
sorted_show_keys = sorted(self.shows_to_record.keys())
start_time = sorted_show_keys[0]
show_length = self.shows_to_record[start_time][0]
show_instance = self.shows_to_record[start_time][1]
show_name = self.shows_to_record[start_time][2]
server_timezone = self.shows_to_record[start_time][3]
sorted_show_keys = sorted(self.shows_to_record.keys())
start_time = sorted_show_keys[0]
show_length = self.shows_to_record[start_time][0]
show_instance = self.shows_to_record[start_time][1]
show_name = self.shows_to_record[start_time][2]
server_timezone = self.shows_to_record[start_time][3]
T = pytz.timezone(server_timezone)
start_time_on_UTC = getDateTimeObj(start_time)
start_time_on_server = start_time_on_UTC.replace(tzinfo=pytz.utc).astimezone(T)
start_time_formatted = '%(year)d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:%(sec)02d' % \
{'year': start_time_on_server.year, 'month': start_time_on_server.month, 'day': start_time_on_server.day, \
'hour': start_time_on_server.hour, 'min': start_time_on_server.minute, 'sec': start_time_on_server.second}
self.sr = ShowRecorder(show_instance, show_name, show_length.seconds, start_time_formatted)
self.sr.start()
#remove show from shows to record.
del self.shows_to_record[start_time]
#self.time_till_next_show = self.get_time_till_next_show()
except Exception, e :
import traceback
top = traceback.format_exc()
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top)
T = pytz.timezone(server_timezone)
start_time_on_UTC = getDateTimeObj(start_time)
start_time_on_server = start_time_on_UTC.replace(tzinfo=pytz.utc).astimezone(T)
start_time_formatted = '%(year)d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:%(sec)02d' % \
{'year': start_time_on_server.year, 'month': start_time_on_server.month, 'day': start_time_on_server.day, \
'hour': start_time_on_server.hour, 'min': start_time_on_server.minute, 'sec': start_time_on_server.second}
self.sr = ShowRecorder(show_instance, show_name, show_length.seconds, start_time_formatted)
self.sr.start()
#remove show from shows to record.
del self.shows_to_record[start_time]
#self.time_till_next_show = self.get_time_till_next_show()
except Exception, e :
top = traceback.format_exc()
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top)
"""
Main loop of the thread:
Wait for schedule updates from RabbitMQ, but in case there arent any,
poll the server to get the upcoming schedule.
"""
def run(self):
"""
Main loop of the thread:
Wait for schedule updates from RabbitMQ, but in case there arent any,
poll the server to get the upcoming schedule.
"""
try:
self.logger.info("Started...")
# Bootstrap: since we are just starting up, we need to grab the
@ -271,6 +286,7 @@ class Recorder(Thread):
self.process_recorder_schedule(temp)
self.logger.info("Bootstrap recorder schedule received: %s", temp)
except Exception, e:
self.logger.error( traceback.format_exc() )
self.logger.error(e)
self.logger.info("Bootstrap complete: got initial copy of the schedule")
@ -292,14 +308,15 @@ class Recorder(Thread):
self.process_recorder_schedule(temp)
self.logger.info("updated recorder schedule received: %s", temp)
except Exception, e:
self.logger.error( traceback.format_exc() )
self.logger.error(e)
try: self.handle_message()
except Exception, e:
self.logger.error( traceback.format_exc() )
self.logger.error('Pypo Recorder Exception: %s', e)
time.sleep(PUSH_INTERVAL)
self.loops += 1
except Exception, e :
import traceback
top = traceback.format_exc()
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top)

View file

@ -78,7 +78,7 @@ echo "Removing everything from the scheduler between $startTime and $endTime..."
$scheduleClear = Schedule::isScheduleEmptyInRange($startTime, "01:00:00");
if (!$scheduleClear) {
echo "\nERROR: Schedule could not be cleared.\n\n";
var_dump(Schedule::GetItems($startTime, $endTime));
var_dump(Schedule::getItems($startTime, $endTime));
exit;
}
echo "done.\n";

View file

@ -0,0 +1,12 @@
argparse==1.2.1
amqplib==1.0.2
PyDispatcher==2.0.3
anyjson==0.3.3
kombu==2.2.6
pyinotify==0.9.3
poster==0.8.1
pytz==2011k
wsgiref==0.1.2
configobj==4.7.2
mutagen==1.20
docopt==0.4.2