Merge branch 'devel' of dev.sourcefabric.org:airtime into devel
This commit is contained in:
commit
0cbd6165c4
|
@ -17,7 +17,8 @@ var AIRTIME = (function(AIRTIME){
|
|||
width;
|
||||
|
||||
function isTimeValid(time) {
|
||||
var regExpr = new RegExp("^\\d{2}[:]\\d{2}[:]\\d{2}([.]\\d{1,6})?$");
|
||||
//var regExpr = new RegExp("^\\d{2}[:]\\d{2}[:]\\d{2}([.]\\d{1,6})?$");
|
||||
var regExpr = new RegExp("^\\d{2}[:]([0-5]){1}([0-9]){1}[:]([0-5]){1}([0-9]){1}([.]\\d{1,6})?$");
|
||||
|
||||
return regExpr.test(time);
|
||||
}
|
||||
|
|
|
@ -65,15 +65,16 @@ class BaseListener(object):
|
|||
|
||||
class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
|
||||
def process_IN_CLOSE_WRITE(self, event):
|
||||
self.logger.info("===> handling: '%s'" % str(event))
|
||||
#self.logger.info("===> handling: '%s'" % str(event))
|
||||
self.process_to_organize(event)
|
||||
# got cookie
|
||||
def process_IN_MOVED_TO(self, event):
|
||||
self.logger.info("===> handling: '%s'" % str(event))
|
||||
#self.logger.info("===> handling: '%s'" % str(event))
|
||||
self.process_to_organize(event)
|
||||
|
||||
def process_default(self, event):
|
||||
self.logger.info("===> Not handling: '%s'" % str(event))
|
||||
pass
|
||||
#self.logger.info("===> Not handling: '%s'" % str(event))
|
||||
|
||||
def flush_events(self, path):
|
||||
"""
|
||||
|
@ -133,8 +134,8 @@ class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
|
|||
@IncludeOnly(mmp.supported_extensions)
|
||||
def process_delete(self, event):
|
||||
evt = None
|
||||
if event.dir: evt = DeleteDir(event)
|
||||
else: evt = DeleteFile(event)
|
||||
if event.dir : evt = DeleteDir(event)
|
||||
else : evt = DeleteFile(event)
|
||||
dispatcher.send(signal=self.signal, sender=self, event=evt)
|
||||
return evt
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ class Manager(Loggable):
|
|||
self.organize_channel = 'organize'
|
||||
self.watch_listener = StoreWatchListener(signal = self.watch_channel)
|
||||
# TODO : change this to a weak ref
|
||||
# TODO : get rid of this hack once cc-4235 is fixed
|
||||
self.__timeout_thread = ManagerTimeout(self)
|
||||
self.__timeout_thread.daemon = True
|
||||
self.__timeout_thread.start()
|
||||
|
|
|
@ -229,20 +229,13 @@ def normalized_metadata(md, original_path):
|
|||
for k,v in new_md.iteritems(): new_md[k] = unicode(v).replace('/','-')
|
||||
# Specific rules that are applied in a per attribute basis
|
||||
format_rules = {
|
||||
# It's very likely that the following isn't strictly necessary. But the
|
||||
# old code would cast MDATA_KEY_TRACKNUMBER to an integer as a
|
||||
# byproduct of formatting the track number to 2 digits.
|
||||
'MDATA_KEY_TRACKNUMBER' : parse_int,
|
||||
'MDATA_KEY_BITRATE' : lambda x: str(int(x) / 1000) + "kbps",
|
||||
'MDATA_KEY_FILEPATH' : lambda x: os.path.normpath(x),
|
||||
'MDATA_KEY_MIME' : lambda x: x.replace('-','/'),
|
||||
'MDATA_KEY_BPM' : lambda x: x[0:8],
|
||||
}
|
||||
# note that we could have saved a bit of code by rewriting new_md using
|
||||
# defaultdict(lambda x: "unknown"). But it seems to be too implicit and
|
||||
# could possibly lead to subtle bugs down the road. Plus the following
|
||||
# approach gives us the flexibility to use different defaults for different
|
||||
# attributes
|
||||
|
||||
new_md = remove_whitespace(new_md)
|
||||
new_md = apply_rules_dict(new_md, format_rules)
|
||||
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_TITLE'],
|
||||
|
@ -287,7 +280,7 @@ def organized_path(old_path, root_path, orig_md):
|
|||
yyyy, mm, _ = normal_md['MDATA_KEY_YEAR'].split('-',3)
|
||||
path = os.path.join(root_path, yyyy, mm)
|
||||
filepath = os.path.join(path,fname)
|
||||
elif normal_md['MDATA_KEY_TRACKNUMBER'] == unicode_unknown:
|
||||
elif len(normal_md['MDATA_KEY_TRACKNUMBER']) == 0:
|
||||
fname = u'%s-%s.%s' % (normal_md['MDATA_KEY_TITLE'],
|
||||
normal_md['MDATA_KEY_BITRATE'], ext)
|
||||
path = os.path.join(root_path, normal_md['MDATA_KEY_CREATOR'],
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#export PYTHONPATH="/home/rudi/Airtime/python_apps/:/home/rudi/Airtime/python_apps/media-monitor2/"
|
||||
PYTHONPATH='/home/rudi/Airtime/python_apps/:/home/rudi/Airtime/python_apps/media-monitor2/'
|
||||
export PYTHONPATH
|
||||
python ./mm2.py --config="/home/rudi/Airtime/python_apps/media-monitor2/tests/live_client.cfg" --apiclient="/home/rudi/Airtime/python_apps/media-monitor2/tests/live_client.cfg" --log="/home/rudi/Airtime/python_apps/media-monitor/logging.cfg"
|
||||
python ./mm2.py --config="/etc/airtime/media-monitor.cfg" --apiclient="/etc/airtime/api_client.cfg" --log="/home/rudi/Airtime/python_apps/media-monitor/logging.cfg"
|
||||
|
|
|
@ -57,7 +57,7 @@ def main(global_config, api_client_config, log_config,
|
|||
if not os.path.exists(config['index_path']):
|
||||
log.info("Attempting to create index file:...")
|
||||
try:
|
||||
with open(config['index_path']) as f: f.write(" ")
|
||||
with open(config['index_path'], 'w') as f: f.write(" ")
|
||||
except Exception as e:
|
||||
log.info("Failed to create index file with exception: %s" % str(e))
|
||||
else:
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
import os, shutil
|
||||
import time
|
||||
import pyinotify
|
||||
import unittest
|
||||
from pydispatch import dispatcher
|
||||
|
||||
from media.monitor.listeners import OrganizeListener
|
||||
from media.monitor.events import OrganizeFile
|
||||
|
||||
from os.path import join, normpath, abspath
|
||||
|
||||
def create_file(p):
|
||||
with open(p, 'w') as f: f.write(" ")
|
||||
|
||||
class TestOrganizeListener(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.organize_path = 'test_o'
|
||||
self.sig = 'org'
|
||||
def my_abs_path(x):
|
||||
return normpath(join(os.getcwd(), x))
|
||||
self.sample_files = [ my_abs_path(join(self.organize_path, f))
|
||||
for f in [ "gogi.mp3",
|
||||
"gio.mp3",
|
||||
"mimino.ogg" ] ]
|
||||
os.mkdir(self.organize_path)
|
||||
|
||||
def test_flush_events(self):
|
||||
org = self.create_org()
|
||||
self.create_sample_files()
|
||||
received = [0]
|
||||
def pass_event(sender, event):
|
||||
if isinstance(event, OrganizeFile):
|
||||
received[0] += 1
|
||||
self.assertTrue( abspath(event.path) in self.sample_files )
|
||||
dispatcher.connect(pass_event, signal=self.sig, sender=dispatcher.Any,
|
||||
weak=True)
|
||||
org.flush_events( self.organize_path )
|
||||
self.assertEqual( received[0], len(self.sample_files) )
|
||||
self.delete_sample_files()
|
||||
|
||||
def test_process(self):
|
||||
org = self.create_org()
|
||||
received = [0]
|
||||
def pass_event(sender, event):
|
||||
if isinstance(event, OrganizeFile):
|
||||
self.assertTrue( event.path in self.sample_files )
|
||||
received[0] += 1
|
||||
dispatcher.connect(pass_event, signal=self.sig, sender=dispatcher.Any,
|
||||
weak=True)
|
||||
wm = pyinotify.WatchManager()
|
||||
def stopper(notifier):
|
||||
return received[0] == len(self.sample_files)
|
||||
tn = pyinotify.ThreadedNotifier(wm, default_proc_fun=org)
|
||||
tn.daemon = True
|
||||
tn.start()
|
||||
wm.add_watch(self.organize_path, pyinotify.ALL_EVENTS, rec=True,
|
||||
auto_add=True)
|
||||
time.sleep(0.5)
|
||||
self.create_sample_files()
|
||||
time.sleep(1)
|
||||
self.assertEqual( len(self.sample_files), received[0] )
|
||||
self.delete_sample_files()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.organize_path)
|
||||
|
||||
def create_sample_files(self):
|
||||
for f in self.sample_files: create_file(f)
|
||||
|
||||
def delete_sample_files(self):
|
||||
for f in self.sample_files: os.remove(f)
|
||||
|
||||
def create_org(self):
|
||||
return OrganizeListener( signal=self.sig )
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
||||
|
Loading…
Reference in New Issue