Merge branch 'devel' of dev.sourcefabric.org:airtime into devel
This commit is contained in:
commit
0322e6b0c6
|
@ -389,17 +389,17 @@ class ApiController extends Zend_Controller_Action
|
|||
}
|
||||
}
|
||||
|
||||
public function uploadRecordAction() {
|
||||
public function uploadRecordedAction() {
|
||||
$show_instance_id = $this->_getParam('showinstanceid');
|
||||
$file_id = $this->_getParam('fileid');
|
||||
$this->view->fileid = $file_id;
|
||||
$this->view->showinstanceid = $show_instance_id;
|
||||
$this->uploadRecordActionParam($show_instance_id, $file_id);
|
||||
$this->uploadRecordedActionParam($show_instance_id, $file_id);
|
||||
}
|
||||
|
||||
// The paramterized version of the uploadRecordAction controller. We want this controller's action
|
||||
// The paramterized version of the uploadRecordedAction controller. We want this controller's action
|
||||
// to be invokable from other controllers instead being of only through http
|
||||
public function uploadRecordActionParam($show_instance_id, $file_id)
|
||||
public function uploadRecordedActionParam($show_instance_id, $file_id)
|
||||
{
|
||||
$showCanceled = false;
|
||||
$file = Application_Model_StoredFile::Recall($file_id);
|
||||
|
@ -447,8 +447,17 @@ class ApiController extends Zend_Controller_Action
|
|||
$this->view->watched_dirs = $watchedDirsPath;
|
||||
}
|
||||
|
||||
public function dispatchMetaDataAction($md, $mode)
|
||||
public function dispatchMetadataAction($md, $mode, $dry_run=false)
|
||||
{
|
||||
// Replace this compound result in a hash with proper error handling later on
|
||||
$return_hash = array();
|
||||
if ( $dry_run ) { // for debugging we return garbage not to screw around with the db
|
||||
return array(
|
||||
'md' => $md,
|
||||
'mode' => $mode,
|
||||
'fileid' => 123456
|
||||
);
|
||||
}
|
||||
Application_Model_Preference::SetImportTimestamp();
|
||||
if ($mode == "create") {
|
||||
$filepath = $md['MDATA_KEY_FILEPATH'];
|
||||
|
@ -460,8 +469,8 @@ class ApiController extends Zend_Controller_Action
|
|||
// path already exist
|
||||
if ($file->getFileExistsFlag()) {
|
||||
// file marked as exists
|
||||
$this->view->error = "File already exists in Airtime.";
|
||||
return;
|
||||
$return_hash['error'] = "File already exists in Airtime.";
|
||||
return $return_hash;
|
||||
} else {
|
||||
// file marked as not exists
|
||||
$file->setFileExistsFlag(true);
|
||||
|
@ -475,8 +484,8 @@ class ApiController extends Zend_Controller_Action
|
|||
|
||||
//File is not in database anymore.
|
||||
if (is_null($file)) {
|
||||
$this->view->error = "File does not exist in Airtime.";
|
||||
return;
|
||||
$return_hash['error'] = "File does not exist in Airtime.";
|
||||
return $return_hash;
|
||||
}
|
||||
//Updating a metadata change.
|
||||
else {
|
||||
|
@ -488,8 +497,7 @@ class ApiController extends Zend_Controller_Action
|
|||
$file = Application_Model_StoredFile::RecallByMd5($md5);
|
||||
|
||||
if (is_null($file)) {
|
||||
$this->view->error = "File doesn't exist in Airtime.";
|
||||
return;
|
||||
return "File doesn't exist in Airtime.";
|
||||
}
|
||||
else {
|
||||
$filepath = $md['MDATA_KEY_FILEPATH'];
|
||||
|
@ -503,8 +511,8 @@ class ApiController extends Zend_Controller_Action
|
|||
$file = Application_Model_StoredFile::RecallByFilepath($filepath);
|
||||
|
||||
if (is_null($file)) {
|
||||
$this->view->error = "File doesn't exist in Airtime.";
|
||||
return;
|
||||
$return_hash['error'] = "File doesn't exist in Airtime.";
|
||||
return $return_hash;
|
||||
}
|
||||
else {
|
||||
$file->deleteByMediaMonitor();
|
||||
|
@ -518,9 +526,11 @@ class ApiController extends Zend_Controller_Action
|
|||
foreach($files as $file){
|
||||
$file->deleteByMediaMonitor();
|
||||
}
|
||||
return;
|
||||
$return_hash['success'] = 1;
|
||||
return $return_hash;
|
||||
}
|
||||
return $file->getId();
|
||||
$return_hash['fileid'] = $file->getId();
|
||||
return $return_hash;
|
||||
}
|
||||
|
||||
public function reloadMetadataGroupAction()
|
||||
|
@ -528,28 +538,51 @@ class ApiController extends Zend_Controller_Action
|
|||
$request = $this->getRequest();
|
||||
// extract all file metadata params from the request.
|
||||
// The value is a json encoded hash that has all the information related to this action
|
||||
// The key does not have any meaning as of yet but it could potentially correspond
|
||||
// The key(mdXXX) does not have any meaning as of yet but it could potentially correspond
|
||||
// to some unique id.
|
||||
$responses = array();
|
||||
$dry = $request->getParam('dry') || false;
|
||||
$params = $request->getParams();
|
||||
$valid_modes = array('delete_dir', 'delete', 'moved', 'modify', 'create');
|
||||
foreach ($request->getParams() as $k => $raw_json) {
|
||||
// Valid requests must start with mdXXX where XXX represents at least 1 digit
|
||||
if( !preg_match('/^md\d+$/', $k) ) { continue; }
|
||||
$info_json = json_decode($raw_json, $assoc=true);
|
||||
if( !array_key_exists('mode', $info_json) ) { // Log invalid requests
|
||||
Logging::log("Received bad request(key=$k), no 'mode' parameter. Bad request is:");
|
||||
Logging::log( $info_json );
|
||||
array_push( $responses, array(
|
||||
'error' => "Bad request. no 'mode' parameter passed.",
|
||||
'key' => $k));
|
||||
continue;
|
||||
} elseif ( !in_array($info_json['mode'], $valid_modes) ) {
|
||||
// A request still has a chance of being invalid even if it exists but it's validated
|
||||
// by $valid_modes array
|
||||
$mode = $info_json['mode'];
|
||||
Logging::log("Received bad request(key=$k). 'mode' parameter was invalid with value: '$mode'. Request:");
|
||||
Logging::log( $info_json );
|
||||
array_push( $responses, array(
|
||||
'error' => "Bad request. 'mode' parameter is invalid",
|
||||
'key' => $k,
|
||||
'mode' => $mode ) );
|
||||
continue;
|
||||
}
|
||||
// Removing 'mode' key from $info_json might not be necessary...
|
||||
$mode = $info_json['mode'];
|
||||
unset( $info_json['mode'] );
|
||||
// TODO : uncomment the following line to actually do something
|
||||
$response = $this->dispatchMetaDataAction($info_json, $info_json['mode']);
|
||||
array_push($responses, $this->dispatchMetaDataAction($info_json, $info_json['mode']));
|
||||
// Like wise, remove the following line when done
|
||||
$response = $this->dispatchMetadataAction($info_json, $mode, $dry_run=$dry);
|
||||
// We attack the 'key' back to every request in case the would like to associate
|
||||
// his requests with particular responses
|
||||
$response['key'] = $k;
|
||||
array_push($responses, $response);
|
||||
// On recorded show requests we do some extra work here. Not sure what it actually is and it
|
||||
// was usually called from the python api
|
||||
if( $info_json['is_record'] ) {
|
||||
// TODO : must check for error in $response before proceeding...
|
||||
$this->uploadRecordActionParam($info_json['showinstanceid'],$info_json['fileid']);
|
||||
// was usually called from the python api client. Now we just call it straight from the controller to
|
||||
// save the http roundtrip
|
||||
if( $info_json['is_record'] and !array_key_exists('error', $response) ) {
|
||||
$this->uploadRecordedActionParam($info_json['showinstanceid'],$info_json['fileid'],$dry_run=$dry);
|
||||
}
|
||||
// TODO : Remove this line when done debugging
|
||||
Logging::log( $info_json );
|
||||
|
||||
}
|
||||
die(json_encode( array('successes' => 19, 'fails' => 123) ));
|
||||
die( json_encode($responses) );
|
||||
}
|
||||
|
||||
public function reloadMetadataAction()
|
||||
|
|
|
@ -41,7 +41,7 @@ def convert_dict_value_to_utf8(md):
|
|||
|
||||
class AirtimeApiClient():
|
||||
|
||||
def __init__(self, logger=None):
|
||||
def __init__(self, logger=None,config_path='/etc/airtime/api_client.cfg'):
|
||||
if logger is None:
|
||||
self.logger = logging
|
||||
else:
|
||||
|
@ -49,7 +49,7 @@ class AirtimeApiClient():
|
|||
|
||||
# loading config file
|
||||
try:
|
||||
self.config = ConfigObj('/etc/airtime/api_client.cfg')
|
||||
self.config = ConfigObj(config_path)
|
||||
except Exception, e:
|
||||
self.logger.error('Error loading config file: %s', e)
|
||||
sys.exit(1)
|
||||
|
@ -366,7 +366,7 @@ class AirtimeApiClient():
|
|||
|
||||
return response
|
||||
|
||||
def send_media_monitor_requests(self, action_list):
|
||||
def send_media_monitor_requests(self, action_list, dry=False):
|
||||
"""
|
||||
Send a gang of media monitor events at a time. actions_list is a list of dictionaries
|
||||
where every dictionary is representing an action. Every action dict must contain a 'mode'
|
||||
|
@ -394,28 +394,22 @@ class AirtimeApiClient():
|
|||
# matter what it is based on if it's absent in the action
|
||||
if 'is_record' in action:
|
||||
self.logger.debug("Sending a 'recorded' action")
|
||||
action['is_record'] = True
|
||||
else: action['is_record'] = False
|
||||
action['is_record'] = 1
|
||||
else: action['is_record'] = 0
|
||||
valid_actions.append(action)
|
||||
|
||||
md_list = dict((i, json.dumps(convert_dict_value_to_utf8(md))) for i,md in enumerate(valid_actions))
|
||||
# Note that we must prefix every key with: mdX where x is a number
|
||||
# Is there a way to format the next line a little better? The
|
||||
# parenthesis make the code almost unreadable
|
||||
md_list = dict((("md%d" % i), json.dumps(convert_dict_value_to_utf8(md))) \
|
||||
for i,md in enumerate(valid_actions))
|
||||
# For testing we add the following "dry" parameter to tell the
|
||||
# controller not to actually do any changes
|
||||
if dry: md_list['dry'] = 1
|
||||
self.logger.info("Pumping out %d requests..." % len(valid_actions))
|
||||
data = urllib.urlencode(md_list)
|
||||
req = urllib2.Request(url, data)
|
||||
response = self.get_response_from_server(req)
|
||||
response = json.loads(response)
|
||||
# TODO : this request returns a more detailed response of what
|
||||
# happened through a json array. Hence we should handle errors
|
||||
# differently
|
||||
# we would like to move all of this to the controller since we are
|
||||
# not doing anything here
|
||||
#if("error" not in response and is_record):
|
||||
#url = "http://%s:%s/%s/%s" % (self.config["base_url"], str(self.config["base_port"]), self.config["api_base"], self.config["upload_recorded"]) url = url.replace("%%fileid%%", str(response[u'id']))
|
||||
#url = url.replace("%%showinstanceid%%", str(md_list['MDATA_KEY_TRACKNUMBER']))
|
||||
#url = url.replace("%%api_key%%", self.config["api_key"])
|
||||
|
||||
#response = self.get_response_from_server(url)
|
||||
#response = json.loads(response)
|
||||
#logger.info("associate recorded %s", response)
|
||||
return response
|
||||
except Exception, e:
|
||||
logger.error('Exception: %s', e)
|
||||
|
|
|
@ -25,7 +25,6 @@ fi
|
|||
export PYTHONPATH=${api_client_path}
|
||||
export LC_ALL=`cat /etc/default/locale | grep "LANG=" | cut -d= -f2 | tr -d "\n\""`
|
||||
|
||||
# Note the -u when calling python! we need it to get unbuffered binary stdout and stderr
|
||||
exec python -u ${media_monitor_path}${media_monitor_script} > /var/log/airtime/media-monitor/py-interpreter.log 2>&1
|
||||
exec python ${media_monitor_path}${media_monitor_script} > /var/log/airtime/media-monitor/py-interpreter.log 2>&1
|
||||
|
||||
# EOF
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from kombu.messaging import Exchange, Queue, Consumer
|
||||
from kombu.connection import BrokerConnection
|
||||
import json
|
||||
import copy
|
||||
|
||||
from media.monitor.log import Loggable
|
||||
|
||||
# Do not confuse with media monitor 1's AirtimeNotifier class that more related
|
||||
# to pyinotify's Notifier class. AirtimeNotifier just notifies when events come
|
||||
# from Airtime itself. I.E. changes made in the web UI that must be updated
|
||||
# through media monitor
|
||||
|
||||
class AirtimeNotifier(Loggable):
|
||||
"""
|
||||
AirtimeNotifier is responsible for interecepting RabbitMQ messages and feeding them to the
|
||||
event_handler object it was initialized with. The only thing it does to the messages is parse
|
||||
them from json
|
||||
"""
|
||||
def __init__(self, cfg, message_receiver):
|
||||
self.cfg = cfg
|
||||
try:
|
||||
self.handler = message_receiver
|
||||
self.logger.info("Initializing RabbitMQ message consumer...")
|
||||
schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True)
|
||||
schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem")
|
||||
#self.connection = BrokerConnection(cfg["rabbitmq_host"], cfg["rabbitmq_user"],
|
||||
#cfg["rabbitmq_password"], cfg["rabbitmq_vhost"])
|
||||
connection = BrokerConnection(cfg["rabbitmq_host"], cfg["rabbitmq_user"],
|
||||
cfg["rabbitmq_password"], cfg["rabbitmq_vhost"])
|
||||
channel = connection.channel()
|
||||
consumer = Consumer(channel, schedule_queue)
|
||||
consumer.register_callback(self.handle_message)
|
||||
consumer.consume()
|
||||
except Exception as e:
|
||||
self.logger.info("Failed to initialize RabbitMQ consumer")
|
||||
self.logger.error(e)
|
||||
raise
|
||||
|
||||
def handle_message(self, body, message):
|
||||
"""
|
||||
Messages received from RabbitMQ are handled here. These messages
|
||||
instruct media-monitor of events such as a new directory being watched,
|
||||
file metadata has been changed, or any other changes to the config of
|
||||
media-monitor via the web UI.
|
||||
"""
|
||||
message.ack()
|
||||
self.logger.info("Received md from RabbitMQ: %s" % str(body))
|
||||
m = json.loads(message.body)
|
||||
self.handler.message(m)
|
||||
|
||||
|
||||
class AirtimeMessageReceiver(Loggable):
|
||||
def __init__(self, cfg):
|
||||
self.dispatch_table = {
|
||||
'md_update' : self.md_update,
|
||||
'new_watch' : self.new_watch,
|
||||
'remove_watch' : self.remove_watch,
|
||||
'rescan_watch' : self.rescan_watch,
|
||||
'change_stor' : self.change_storage,
|
||||
'file_delete' : self.file_delete,
|
||||
}
|
||||
self.cfg = cfg
|
||||
def message(self, msg):
|
||||
"""
|
||||
This method is called by an AirtimeNotifier instance that consumes the Rabbit MQ events
|
||||
that trigger this. The method return true when the event was executed and false when it
|
||||
wasn't
|
||||
"""
|
||||
msg = copy.deepcopy(msg)
|
||||
if msg['event_type'] in self.dispatch_table:
|
||||
evt = msg['event_type']
|
||||
del msg['event_type']
|
||||
self.logger.info("Handling RabbitMQ message: '%s'" % evt)
|
||||
self.execute_message(evt,msg)
|
||||
return True
|
||||
else:
|
||||
self.logger.info("Received invalid message with 'event_type': '%s'" % msg['event_type'])
|
||||
self.logger.info("Message details: %s" % str(msg))
|
||||
return False
|
||||
def execute_message(self,evt,message):
|
||||
self.dispatch_table[evt](message)
|
||||
|
||||
def supported_messages(self):
|
||||
return self.dispatch_table.keys()
|
||||
|
||||
# Handler methods - Should either fire the events directly with
|
||||
# pydispatcher or do the necessary changes on the filesystem that will fire
|
||||
# the events
|
||||
def md_update(self, msg):
|
||||
pass
|
||||
def new_watch(self, msg):
|
||||
pass
|
||||
def remove_watch(self, msg):
|
||||
pass
|
||||
def rescan_watch(self, msg):
|
||||
pass
|
||||
def change_storage(self, msg):
|
||||
pass
|
||||
def file_delete(self, msg):
|
||||
pass
|
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
from configobj import ConfigObj
|
||||
import copy
|
||||
|
||||
from media.monitor.log import Loggable
|
||||
from media.monitor.exceptions import NoConfigFile, ConfigAccessViolation
|
||||
|
||||
class MMConfig(Loggable):
|
||||
def __init__(self, path):
|
||||
if not os.path.exists(path):
|
||||
self.logger.error("Configuration file does not exist. Path: '%s'" % path)
|
||||
raise NoConfigFile(path)
|
||||
self.cfg = ConfigObj(path)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
We always return a copy of the config item to prevent callers from doing any modifications
|
||||
through the returned objects methods
|
||||
"""
|
||||
return copy.deepcopy(self.cfg[key])
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""
|
||||
We use this method not to allow anybody to mess around with config file
|
||||
any settings made should be done through MMConfig's instance methods
|
||||
"""
|
||||
raise ConfigAccessViolation(key)
|
||||
|
||||
def save(self): self.cfg.write()
|
||||
|
||||
# Remove this after debugging...
|
||||
def haxxor_set(self, key, value): self.cfg[key] = value
|
||||
def haxxor_get(self, key): return self.cfg[key]
|
||||
|
||||
|
|
@ -1,7 +1,14 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
class BadSongFile(Exception):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
def __init__(self, path): self.path = path
|
||||
def __str__(self): return "Can't read %s" % self.path
|
||||
|
||||
class NoConfigFile(Exception):
|
||||
def __init__(self, path): self.path = path
|
||||
def __str__(self):
|
||||
return "Can't read %s" % self.path
|
||||
return "Path '%s' for config file does not exit" % self.path
|
||||
|
||||
class ConfigAccessViolation(Exception):
|
||||
def __init__(self,key): self.key = key
|
||||
def __str__(self): return "You must not access key '%s' directly" % self.key
|
||||
|
|
|
@ -6,6 +6,26 @@ import media.monitor.pure as mmp
|
|||
from media.monitor.pure import IncludeOnly
|
||||
from media.monitor.events import OrganizeFile, NewFile, DeleteFile
|
||||
|
||||
# We attempt to document a list of all special cases and hacks that the
|
||||
# following classes should be able to handle.
|
||||
# TODO : implement all of the following special cases
|
||||
#
|
||||
# - Recursive directories being added to organized dirs are not handled
|
||||
# properly as they only send a request for the dir and not for every file. Also
|
||||
# more hacks are needed to check that the directory finished moving/copying?
|
||||
#
|
||||
# - In the case when a 'watched' directory's subdirectory is delete we should
|
||||
# send a special request telling ApiController to delete a whole dir. This is
|
||||
# done becasue pyinotify will not send an individual file delete event for
|
||||
# every file in that directory
|
||||
#
|
||||
# - Special move events are required whenever a file is moved from a 'watched'
|
||||
# directory into another 'watched' directory (or subdirectory). In this case we
|
||||
# must identify the file by its md5 signature instead of it's filepath like we
|
||||
# usually do. Maybe it's best to always identify a file based on its md5
|
||||
# signature?. Of course that's not possible for some modification events
|
||||
# because the md5 signature will change...
|
||||
|
||||
|
||||
class BaseListener(object):
|
||||
def my_init(self, signal):
|
||||
|
|
|
@ -24,7 +24,11 @@ class RequestSync(threading.Thread,Loggable):
|
|||
def run(self):
|
||||
# TODO : implement proper request sending
|
||||
self.logger.info("launching request with %d items." % len(self.requests))
|
||||
#self.apiclient.update_media_metadata(
|
||||
# Note that we must attach the appropriate mode to every response. Also
|
||||
# Not forget to attach the 'is_record' to any requests that are related
|
||||
# to recorded shows
|
||||
# A simplistic request would like:
|
||||
# self.apiclient.send_media_monitor_requests(requests)
|
||||
self.watcher.flag_done()
|
||||
|
||||
class TimeoutWatcher(threading.Thread,Loggable):
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
bin_dir = "/usr/lib/airtime/api_clients"
|
||||
|
||||
#############################
|
||||
## Common
|
||||
#############################
|
||||
|
||||
# Value needed to access the API
|
||||
api_key = '3MP2IUR45E6KYQ01CUYK'
|
||||
|
||||
# Path to the base of the API
|
||||
api_base = 'api'
|
||||
|
||||
# URL to get the version number of the server API
|
||||
version_url = 'version/api_key/%%api_key%%'
|
||||
|
||||
#URL to register a components IP Address with the central web server
|
||||
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
|
||||
|
||||
# Hostname
|
||||
base_url = 'localhost'
|
||||
base_port = 80
|
||||
|
||||
#############################
|
||||
## Config for Media Monitor
|
||||
#############################
|
||||
|
||||
# URL to setup the media monitor
|
||||
media_setup_url = 'media-monitor-setup/format/json/api_key/%%api_key%%'
|
||||
|
||||
# Tell Airtime the file id associated with a show instance.
|
||||
upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
|
||||
|
||||
# URL to tell Airtime to update file's meta data
|
||||
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
|
||||
|
||||
# URL to tell Airtime we want a listing of all files it knows about
|
||||
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'
|
||||
|
||||
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
|
||||
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to tell Airtime we want to add watched directory
|
||||
add_watched_dir = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
|
||||
# URL to tell Airtime we want to add watched directory
|
||||
remove_watched_dir = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
|
||||
# URL to tell Airtime we want to add watched directory
|
||||
set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||
|
||||
# URL to tell Airtime about file system mount change
|
||||
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to tell Airtime about file system mount change
|
||||
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
|
||||
|
||||
#############################
|
||||
## Config for Recorder
|
||||
#############################
|
||||
|
||||
# URL to get the schedule of shows set to record
|
||||
show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to upload the recorded show's file to Airtime
|
||||
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
|
||||
|
||||
# URL to commit multiple updates from media monitor at the same time
|
||||
|
||||
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
|
||||
|
||||
#number of retries to upload file if connection problem
|
||||
upload_retries = 3
|
||||
|
||||
#time to wait between attempts to upload file if connection problem (in seconds)
|
||||
upload_wait = 60
|
||||
|
||||
################################################################################
|
||||
# Uncomment *one of the sets* of values from the API clients below, and comment
|
||||
# out all the others.
|
||||
################################################################################
|
||||
|
||||
#############################
|
||||
## Config for Pypo
|
||||
#############################
|
||||
|
||||
# Schedule export path.
|
||||
# %%from%% - starting date/time in the form YYYY-MM-DD-hh-mm
|
||||
# %%to%% - starting date/time in the form YYYY-MM-DD-hh-mm
|
||||
export_url = 'schedule/api_key/%%api_key%%'
|
||||
|
||||
get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
|
||||
|
||||
# Update whether a schedule group has begun playing.
|
||||
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
|
||||
|
||||
# Update whether an audio clip is currently playing.
|
||||
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'
|
||||
|
||||
# URL to tell Airtime we want to get stream setting
|
||||
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
|
||||
|
||||
#URL to update liquidsoap status
|
||||
update_liquidsoap_status = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
|
||||
|
||||
#URL to check live stream auth
|
||||
check_live_stream_auth = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
|
||||
|
||||
#URL to update source status
|
||||
update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
|
||||
|
||||
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
|
||||
|
||||
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
|
||||
|
||||
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'
|
|
@ -0,0 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import os
|
||||
import sys
|
||||
from api_clients import api_client as apc
|
||||
|
||||
class TestApiClient(unittest.TestCase):
|
||||
def setUp(self):
|
||||
test_path = '/home/rudi/Airtime/python_apps/media-monitor2/tests/api_client.cfg'
|
||||
if not os.path.exists(test_path):
|
||||
print("path for config does not exist: '%s' % test_path")
|
||||
# TODO : is there a cleaner way to exit the unit testing?
|
||||
sys.exit(1)
|
||||
self.apc = apc.AirtimeApiClient(config_path=test_path)
|
||||
self.apc.register_component("api-client-tester")
|
||||
# All of the following requests should error out in some way
|
||||
self.bad_requests = [
|
||||
{ 'mode' : 'dang it', 'is_record' : 0 },
|
||||
{ 'mode' : 'damn frank', 'is_record' : 1 },
|
||||
{ 'no_mode' : 'at_all' }, ]
|
||||
|
||||
def test_bad_requests(self):
|
||||
responses = self.apc.send_media_monitor_requests(self.bad_requests, dry=True)
|
||||
for response in responses:
|
||||
self.assertTrue( 'key' in response )
|
||||
self.assertTrue( 'error' in response )
|
||||
print( "Response: '%s'" % response )
|
||||
|
||||
# We don't actually test any well formed requests because it is more
|
||||
# involved
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
||||
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import pprint
|
||||
|
||||
from media.monitor.config import MMConfig
|
||||
from media.monitor.exceptions import NoConfigFile, ConfigAccessViolation
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
|
||||
class TestMMConfig(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.real_config = MMConfig("./test_config.cfg")
|
||||
#pp.pprint(self.real_config.cfg.dict)
|
||||
|
||||
def test_bad_config(self):
|
||||
self.assertRaises( NoConfigFile, lambda : MMConfig("/fake/stuff/here") )
|
||||
|
||||
def test_no_set(self):
|
||||
def myf(): self.real_config['bad'] = 'change'
|
||||
self.assertRaises( ConfigAccessViolation, myf )
|
||||
|
||||
def test_copying(self):
|
||||
k = 'list_value_testing'
|
||||
mycopy = self.real_config[k]
|
||||
mycopy.append("another element")
|
||||
self.assertTrue( len(mycopy) , len(self.real_config[k]) + 1 )
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
|
@ -0,0 +1,63 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import json
|
||||
|
||||
from media.monitor.airtime import AirtimeNotifier, AirtimeMessageReceiver
|
||||
from mock import patch, Mock
|
||||
from media.monitor.config import MMConfig
|
||||
|
||||
def filter_ev(d): return { i : j for i,j in d.iteritems() if i != 'event_type' }
|
||||
|
||||
class TestReceiver(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# TODO : properly mock this later
|
||||
cfg = {}
|
||||
self.amr = AirtimeMessageReceiver(cfg)
|
||||
|
||||
def test_supported_messages(self):
|
||||
self.assertTrue( len(self.amr.supported_messages()) > 0 )
|
||||
|
||||
def test_supported(self):
|
||||
# Every supported message should fire something
|
||||
for event_type in self.amr.supported_messages():
|
||||
msg = { 'event_type' : event_type, 'extra_param' : 123 }
|
||||
filtered = filter_ev(msg)
|
||||
with patch.object(self.amr, 'execute_message') as mock_method:
|
||||
mock_method.side_effect = None
|
||||
ret = self.amr.message(msg)
|
||||
self.assertTrue(ret)
|
||||
mock_method.assert_called_with(event_type, filtered)
|
||||
|
||||
def test_no_mod_message(self):
|
||||
ev = { 'event_type' : 'new_watch', 'directory' : 'something here' }
|
||||
filtered = filter_ev(ev)
|
||||
with patch.object(self.amr, 'execute_message') as mock_method:
|
||||
mock_method.return_value = "tested"
|
||||
ret = self.amr.message(ev)
|
||||
self.assertTrue( ret ) # message passing worked
|
||||
mock_method.assert_called_with(ev['event_type'], filtered)
|
||||
# test that our copy of the message does not get modified
|
||||
self.assertTrue( 'event_type' in ev )
|
||||
|
||||
class TestAirtimeNotifier(unittest.TestCase):
|
||||
def test_handle_message(self):
|
||||
#from configobj import ConfigObj
|
||||
test_cfg = MMConfig('./test_config.cfg')
|
||||
ran = [False]
|
||||
class MockReceiver(object):
|
||||
def message(me,m):
|
||||
self.assertTrue( 'event_type' in m )
|
||||
self.assertEqual( m['path'], '/bs/path' )
|
||||
ran[0] = True
|
||||
airtime = AirtimeNotifier(cfg=test_cfg, message_receiver=MockReceiver())
|
||||
m1 = Mock()
|
||||
m1.ack = "ack'd message"
|
||||
m2 = Mock()
|
||||
m2.body = json.dumps({ 'event_type' : 'file_delete', 'path' : '/bs/path' })
|
||||
airtime.handle_message(body=m1,message=m2)
|
||||
self.assertTrue( ran[0] )
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
|
@ -0,0 +1,24 @@
|
|||
api_client = 'airtime'
|
||||
|
||||
# where the binary files live
|
||||
bin_dir = '/usr/lib/airtime/media-monitor'
|
||||
|
||||
# where the logging files live
|
||||
log_dir = '/var/log/airtime/media-monitor'
|
||||
|
||||
|
||||
############################################
|
||||
# RabbitMQ settings #
|
||||
############################################
|
||||
rabbitmq_host = 'localhost'
|
||||
rabbitmq_user = 'guest'
|
||||
rabbitmq_password = 'guest'
|
||||
rabbitmq_vhost = '/'
|
||||
|
||||
############################################
|
||||
# Media-Monitor preferences #
|
||||
############################################
|
||||
check_filesystem_events = '5'
|
||||
check_airtime_events = '30'
|
||||
|
||||
list_value_testing = 'val1', 'val2', 'val3'
|
|
@ -25,7 +25,6 @@ export PYTHONPATH=${api_client_path}:$PYTHONPATH
|
|||
export LC_ALL=`cat /etc/default/locale | grep "LANG=" | cut -d= -f2 | tr -d "\n\""`
|
||||
export TERM=xterm
|
||||
|
||||
# Note the -u when calling python! we need it to get unbuffered binary stdout and stderr
|
||||
exec python -u ${pypo_path}${pypo_script} > /var/log/airtime/pypo/py-interpreter.log 2>&1
|
||||
exec python ${pypo_path}${pypo_script} > /var/log/airtime/pypo/py-interpreter.log 2>&1
|
||||
|
||||
# EOF
|
||||
|
|
|
@ -42,7 +42,7 @@ def getDateTimeObj(time):
|
|||
# - perhaps validate the input before doing dangerous casts?
|
||||
# - rename this function to follow the standard convention
|
||||
# - rename time to something else so that the module name does not get
|
||||
# shadowed
|
||||
# shadowed
|
||||
# - add docstring to document all behaviour of this function
|
||||
timeinfo = time.split(" ")
|
||||
date = [ int(x) for x in timeinfo[0].split("-") ]
|
||||
|
@ -134,6 +134,9 @@ class ShowRecorder(Thread):
|
|||
try:
|
||||
date = self.start_time
|
||||
md = date.split(" ")
|
||||
# TODO : rename 'time' variable to something better so that there
|
||||
# is no naming conflicts with the time module that is being
|
||||
# imported
|
||||
time = md[1].replace(":", "-")
|
||||
self.logger.info("time: %s" % time)
|
||||
|
||||
|
|
Loading…
Reference in New Issue