cc-4105: Made config object persist data after being garbage collected. Added more tests
This commit is contained in:
parent
76cac68fe7
commit
12aa76b0a8
|
@ -2,6 +2,7 @@
|
||||||
from kombu.messaging import Exchange, Queue, Consumer
|
from kombu.messaging import Exchange, Queue, Consumer
|
||||||
from kombu.connection import BrokerConnection
|
from kombu.connection import BrokerConnection
|
||||||
import json
|
import json
|
||||||
|
import copy
|
||||||
|
|
||||||
from media.monitor.log import Loggable
|
from media.monitor.log import Loggable
|
||||||
|
|
||||||
|
@ -63,15 +64,25 @@ class AirtimeMessageReceiver(Loggable):
|
||||||
def message(self, msg):
|
def message(self, msg):
|
||||||
"""
|
"""
|
||||||
This method is called by an AirtimeNotifier instance that consumes the Rabbit MQ events
|
This method is called by an AirtimeNotifier instance that consumes the Rabbit MQ events
|
||||||
that trigger this.
|
that trigger this. The method return true when the event was executed and false when it
|
||||||
|
wasn't
|
||||||
"""
|
"""
|
||||||
|
msg = copy.deepcopy(msg)
|
||||||
if msg['event_type'] in self.dispatch_table:
|
if msg['event_type'] in self.dispatch_table:
|
||||||
# Perhaps we should get rid of the event_type key?
|
evt = msg['event_type']
|
||||||
self.logger.info("Handling RabbitMQ message: '%s'" % msg['event_type'])
|
del msg['event_type']
|
||||||
self.dispatch_table['event_type'](msg)
|
self.logger.info("Handling RabbitMQ message: '%s'" % evt)
|
||||||
|
self.execute_message(evt,msg)
|
||||||
|
return True
|
||||||
else:
|
else:
|
||||||
self.logger.info("Received invalid message with 'event_type': '%s'" % msg['event_type'])
|
self.logger.info("Received invalid message with 'event_type': '%s'" % msg['event_type'])
|
||||||
self.logger.info("Message details: %s" % str(msg))
|
self.logger.info("Message details: %s" % str(msg))
|
||||||
|
return False
|
||||||
|
def execute_message(self,evt,message):
|
||||||
|
self.dispatch_table[evt](message)
|
||||||
|
|
||||||
|
def supported_messages(self):
|
||||||
|
return self.dispatch_table.keys()
|
||||||
|
|
||||||
# Handler methods - Should either fire the events directly with
|
# Handler methods - Should either fire the events directly with
|
||||||
# pydispatcher or do the necessary changes on the filesystem that will fire
|
# pydispatcher or do the necessary changes on the filesystem that will fire
|
||||||
|
|
|
@ -33,4 +33,10 @@ class MMConfig(Loggable):
|
||||||
def haxxor_set(self, key, value): self.cfg[key] = value
|
def haxxor_set(self, key, value): self.cfg[key] = value
|
||||||
def haxxor_get(self, key): return self.cfg[key]
|
def haxxor_get(self, key): return self.cfg[key]
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
"""
|
||||||
|
persist any changes made whenever this object is garbage collected
|
||||||
|
"""
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,115 @@
|
||||||
|
bin_dir = "/usr/lib/airtime/api_clients"
|
||||||
|
|
||||||
|
#############################
|
||||||
|
## Common
|
||||||
|
#############################
|
||||||
|
|
||||||
|
# Value needed to access the API
|
||||||
|
api_key = '3MP2IUR45E6KYQ01CUYK'
|
||||||
|
|
||||||
|
# Path to the base of the API
|
||||||
|
api_base = 'api'
|
||||||
|
|
||||||
|
# URL to get the version number of the server API
|
||||||
|
version_url = 'version/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
#URL to register a components IP Address with the central web server
|
||||||
|
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
|
||||||
|
|
||||||
|
# Hostname
|
||||||
|
base_url = 'localhost'
|
||||||
|
base_port = 80
|
||||||
|
|
||||||
|
#############################
|
||||||
|
## Config for Media Monitor
|
||||||
|
#############################
|
||||||
|
|
||||||
|
# URL to setup the media monitor
|
||||||
|
media_setup_url = 'media-monitor-setup/format/json/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
# Tell Airtime the file id associated with a show instance.
|
||||||
|
upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime to update file's meta data
|
||||||
|
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime we want a listing of all files it knows about
|
||||||
|
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
|
||||||
|
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime we want to add watched directory
|
||||||
|
add_watched_dir = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime we want to add watched directory
|
||||||
|
remove_watched_dir = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime we want to add watched directory
|
||||||
|
set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime about file system mount change
|
||||||
|
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime about file system mount change
|
||||||
|
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
|
||||||
|
|
||||||
|
#############################
|
||||||
|
## Config for Recorder
|
||||||
|
#############################
|
||||||
|
|
||||||
|
# URL to get the schedule of shows set to record
|
||||||
|
show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
# URL to upload the recorded show's file to Airtime
|
||||||
|
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
# URL to commit multiple updates from media monitor at the same time
|
||||||
|
|
||||||
|
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
#number of retries to upload file if connection problem
|
||||||
|
upload_retries = 3
|
||||||
|
|
||||||
|
#time to wait between attempts to upload file if connection problem (in seconds)
|
||||||
|
upload_wait = 60
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
# Uncomment *one of the sets* of values from the API clients below, and comment
|
||||||
|
# out all the others.
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
#############################
|
||||||
|
## Config for Pypo
|
||||||
|
#############################
|
||||||
|
|
||||||
|
# Schedule export path.
|
||||||
|
# %%from%% - starting date/time in the form YYYY-MM-DD-hh-mm
|
||||||
|
# %%to%% - starting date/time in the form YYYY-MM-DD-hh-mm
|
||||||
|
export_url = 'schedule/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
# Update whether a schedule group has begun playing.
|
||||||
|
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
|
||||||
|
|
||||||
|
# Update whether an audio clip is currently playing.
|
||||||
|
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'
|
||||||
|
|
||||||
|
# URL to tell Airtime we want to get stream setting
|
||||||
|
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
|
||||||
|
|
||||||
|
#URL to update liquidsoap status
|
||||||
|
update_liquidsoap_status = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
|
||||||
|
|
||||||
|
#URL to check live stream auth
|
||||||
|
check_live_stream_auth = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
|
||||||
|
|
||||||
|
#URL to update source status
|
||||||
|
update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
|
||||||
|
|
||||||
|
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
|
||||||
|
|
||||||
|
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
|
||||||
|
|
||||||
|
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'
|
|
@ -0,0 +1,28 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
import pprint
|
||||||
|
|
||||||
|
from media.monitor.config import MMConfig
|
||||||
|
from media.monitor.exceptions import NoConfigFile, ConfigAccessViolation
|
||||||
|
|
||||||
|
pp = pprint.PrettyPrinter(indent=4)
|
||||||
|
|
||||||
|
class TestMMConfig(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.real_config = MMConfig("./test_config.cfg")
|
||||||
|
#pp.pprint(self.real_config.cfg.dict)
|
||||||
|
|
||||||
|
def test_bad_config(self):
|
||||||
|
self.assertRaises( NoConfigFile, lambda : MMConfig("/fake/stuff/here") )
|
||||||
|
|
||||||
|
def test_no_set(self):
|
||||||
|
def myf(): self.real_config['bad'] = 'change'
|
||||||
|
self.assertRaises( ConfigAccessViolation, myf )
|
||||||
|
|
||||||
|
def test_copying(self):
|
||||||
|
k = 'list_value_testing'
|
||||||
|
mycopy = self.real_config[k]
|
||||||
|
mycopy.append("another element")
|
||||||
|
self.assertTrue( len(mycopy) , len(self.real_config[k]) + 1 )
|
||||||
|
|
||||||
|
if __name__ == '__main__': unittest.main()
|
|
@ -0,0 +1,23 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from media.monitor.airtime import AirtimeNotifier, AirtimeMessageReceiver
|
||||||
|
from mock import patch
|
||||||
|
|
||||||
|
class TestReceiver(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
# TODO : properly mock this later
|
||||||
|
cfg = {}
|
||||||
|
self.amr = AirtimeMessageReceiver(cfg)
|
||||||
|
|
||||||
|
def test_message(self):
|
||||||
|
for event_type in self.amr.supported_messages():
|
||||||
|
msg = { 'event_type' : event_type, 'extra_param' : 123 }
|
||||||
|
filtered = { i : j for i,j in msg.iteritems() if i != 'event_type' }
|
||||||
|
with patch.object(self.amr, 'execute_message') as mock_method:
|
||||||
|
mock_method.side_effect = None
|
||||||
|
ret = self.amr.message(msg)
|
||||||
|
self.assertTrue(ret)
|
||||||
|
mock_method.assert_called_with(event_type, filtered)
|
||||||
|
|
||||||
|
if __name__ == '__main__': unittest.main()
|
|
@ -0,0 +1,24 @@
|
||||||
|
api_client = "airtime"
|
||||||
|
|
||||||
|
# where the binary files live
|
||||||
|
bin_dir = '/usr/lib/airtime/media-monitor'
|
||||||
|
|
||||||
|
# where the logging files live
|
||||||
|
log_dir = '/var/log/airtime/media-monitor'
|
||||||
|
|
||||||
|
|
||||||
|
############################################
|
||||||
|
# RabbitMQ settings #
|
||||||
|
############################################
|
||||||
|
rabbitmq_host = 'localhost'
|
||||||
|
rabbitmq_user = 'guest'
|
||||||
|
rabbitmq_password = 'guest'
|
||||||
|
rabbitmq_vhost = '/'
|
||||||
|
|
||||||
|
############################################
|
||||||
|
# Media-Monitor preferences #
|
||||||
|
############################################
|
||||||
|
check_filesystem_events = 5 #how long to queue up events performed on the files themselves.
|
||||||
|
check_airtime_events = 30 #how long to queue metadata input from airtime.
|
||||||
|
|
||||||
|
list_value_testing = 'val1', 'val2', 'val3'
|
Loading…
Reference in New Issue