Merge branch 'devel' of dev.sourcefabric.org:airtime into devel
This commit is contained in:
commit
86ee2966e6
|
@ -121,10 +121,10 @@ class Metadata(Loggable):
|
|||
|
||||
def __init__(self, fpath):
|
||||
# Forcing the unicode through
|
||||
try: fpath = fpath.decode("utf-8")
|
||||
except: pass
|
||||
try: full_mutagen = mutagen.File(fpath, easy=True)
|
||||
except Exception: raise BadSongFile(fpath)
|
||||
try : fpath = fpath.decode("utf-8")
|
||||
except : pass
|
||||
try : full_mutagen = mutagen.File(fpath, easy=True)
|
||||
except Exception : raise BadSongFile(fpath)
|
||||
self.path = fpath
|
||||
# TODO : Simplify the way all of these rules are handled right not it's
|
||||
# extremely unclear and needs to be refactored.
|
||||
|
@ -162,6 +162,8 @@ class Metadata(Loggable):
|
|||
# Finally, we "normalize" all the metadata here:
|
||||
self.__metadata = mmp.normalized_metadata(self.__metadata, fpath)
|
||||
# Now we must load the md5:
|
||||
# TODO : perhaps we shouldn't hard code how many bytes we're reading
|
||||
# from the file?
|
||||
self.__metadata['MDATA_KEY_MD5'] = mmp.file_md5(fpath,max_length=100)
|
||||
|
||||
def is_recorded(self):
|
||||
|
|
|
@ -163,11 +163,13 @@ def apply_rules_dict(d, rules):
|
|||
"""
|
||||
Consumes a dictionary of rules that maps some keys to lambdas which it
|
||||
applies to every matching element in d and returns a new dictionary with
|
||||
the rules applied
|
||||
the rules applied. If a rule returns none then it's not applied
|
||||
"""
|
||||
new_d = copy.deepcopy(d)
|
||||
for k, rule in rules.iteritems():
|
||||
if k in d: new_d[k] = rule(d[k])
|
||||
if k in d:
|
||||
new_val = rule(d[k])
|
||||
if new_val is not None: new_d[k] = new_val
|
||||
return new_d
|
||||
|
||||
def default_to_f(dictionary, keys, default, condition):
|
||||
|
@ -183,10 +185,6 @@ def default_to(dictionary, keys, default):
|
|||
"""
|
||||
cnd = lambda dictionary, key: key not in dictionary
|
||||
return default_to_f(dictionary, keys, default, cnd)
|
||||
#new_d = copy.deepcopy(dictionary)
|
||||
#for k in keys:
|
||||
#if not (k in new_d): new_d[k] = default
|
||||
#return new_d
|
||||
|
||||
def remove_whitespace(dictionary):
|
||||
"""
|
||||
|
@ -205,18 +203,18 @@ def remove_whitespace(dictionary):
|
|||
def parse_int(s):
|
||||
"""
|
||||
Tries very hard to get some sort of integer result from s. Defaults to 0
|
||||
when it failes
|
||||
when it fails
|
||||
>>> parse_int("123")
|
||||
'123'
|
||||
>>> parse_int("123saf")
|
||||
'123'
|
||||
>>> parse_int("asdf")
|
||||
''
|
||||
None
|
||||
"""
|
||||
if s.isdigit(): return s
|
||||
else:
|
||||
try : return str(reduce(op.add, takewhile(lambda x: x.isdigit(), s)))
|
||||
except: return ''
|
||||
except: return None
|
||||
|
||||
def normalized_metadata(md, original_path):
|
||||
"""
|
||||
|
@ -240,7 +238,8 @@ def normalized_metadata(md, original_path):
|
|||
new_md = apply_rules_dict(new_md, format_rules)
|
||||
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_TITLE'],
|
||||
default=no_extension_basename(original_path))
|
||||
new_md = default_to(dictionary=new_md, keys=path_md, default=u'')
|
||||
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_CREATOR',
|
||||
'MDATA_KEY_SOURCE'], default=u'')
|
||||
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_FTYPE'],
|
||||
default=u'audioclip')
|
||||
# In the case where the creator is 'Airtime Show Recorder' we would like to
|
||||
|
|
|
@ -48,7 +48,7 @@ class TestMMP(unittest.TestCase):
|
|||
real_path1 = \
|
||||
u'/home/rudi/throwaway/fucking_around/watch/unknown/unknown/ACDC_-_Back_In_Black-sample-64kbps-64kbps.ogg'
|
||||
self.assertTrue( 'unknown' in og, True )
|
||||
self.assertEqual( og, real_path1 )
|
||||
self.assertEqual( og, real_path1 ) # TODO : fix this failure
|
||||
# for recorded it should be something like this
|
||||
# ./recorded/2012/07/2012-07-09-17-55-00-Untitled Show-256kbps.ogg
|
||||
|
||||
|
@ -70,6 +70,6 @@ class TestMMP(unittest.TestCase):
|
|||
def test_parse_int(self):
|
||||
self.assertEqual( mmp.parse_int("123"), "123" )
|
||||
self.assertEqual( mmp.parse_int("123asf"), "123" )
|
||||
self.assertEqual( mmp.parse_int("asdf"), "" )
|
||||
self.assertEqual( mmp.parse_int("asdf"), None )
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
||||
|
|
|
@ -58,7 +58,7 @@ class PypoFetch(Thread):
|
|||
if not os.path.isdir(dir):
|
||||
"""
|
||||
We get here if path does not exist, or path does exist but
|
||||
is a file. We are not handling the second case, but don't
|
||||
is a file. We are not handling the second case, but don't
|
||||
think we actually care about handling it.
|
||||
"""
|
||||
self.logger.debug("Cache dir does not exist. Creating...")
|
||||
|
@ -426,16 +426,19 @@ class PypoFetch(Thread):
|
|||
for key in media:
|
||||
media_item = media[key]
|
||||
"""
|
||||
{u'end': u'2012-07-26-04-05-00', u'fade_out': 500, u'show_name': u'Untitled Show', u'uri': u'http://',
|
||||
u'cue_in': 0, u'start': u'2012-07-26-04-00-00', u'replay_gain': u'0', u'row_id': 16, u'cue_out': 300, u'type':
|
||||
{u'end': u'2012-07-26-04-05-00', u'fade_out': 500, u'show_name': u'Untitled Show', u'uri': u'http://',
|
||||
u'cue_in': 0, u'start': u'2012-07-26-04-00-00', u'replay_gain': u'0', u'row_id': 16, u'cue_out': 300, u'type':
|
||||
u'stream', u'id': 1, u'fade_in': 500}
|
||||
"""
|
||||
if(media_item['type'] == 'file'):
|
||||
if (media_item['type'] == 'file'):
|
||||
fileExt = os.path.splitext(media_item['uri'])[1]
|
||||
dst = os.path.join(download_dir, unicode(media_item['id']) + fileExt)
|
||||
media_item['dst'] = dst
|
||||
media_item['file_ready'] = False
|
||||
media_filtered[key] = media_item
|
||||
elif media_item['type'] == 'stream':
|
||||
#flag to indicate whether the stream started prebuffering
|
||||
media_item['prebuffer_started'] = False
|
||||
|
||||
self.media_prepare_queue.put(copy.copy(media_filtered))
|
||||
except Exception, e: self.logger.error("%s", e)
|
||||
|
@ -453,7 +456,7 @@ class PypoFetch(Thread):
|
|||
"""
|
||||
Get list of all files in the cache dir and remove them if they aren't being used anymore.
|
||||
Input dict() media, lists all files that are scheduled or currently playing. Not being in this
|
||||
dict() means the file is safe to remove.
|
||||
dict() means the file is safe to remove.
|
||||
"""
|
||||
cached_file_set = set(os.listdir(self.cache_dir))
|
||||
scheduled_file_set = set()
|
||||
|
@ -476,7 +479,7 @@ class PypoFetch(Thread):
|
|||
|
||||
def main(self):
|
||||
# Bootstrap: since we are just starting up, we need to grab the
|
||||
# most recent schedule. After that we can just wait for updates.
|
||||
# most recent schedule. After that we can just wait for updates.
|
||||
success, self.schedule_data = self.api_client.get_schedule()
|
||||
if success:
|
||||
self.logger.info("Bootstrap schedule received: %s", self.schedule_data)
|
||||
|
@ -490,14 +493,14 @@ class PypoFetch(Thread):
|
|||
"""
|
||||
our simple_queue.get() requires a timeout, in which case we
|
||||
fetch the Airtime schedule manually. It is important to fetch
|
||||
the schedule periodically because if we didn't, we would only
|
||||
get schedule updates via RabbitMq if the user was constantly
|
||||
using the Airtime interface.
|
||||
|
||||
the schedule periodically because if we didn't, we would only
|
||||
get schedule updates via RabbitMq if the user was constantly
|
||||
using the Airtime interface.
|
||||
|
||||
If the user is not using the interface, RabbitMq messages are not
|
||||
sent, and we will have very stale (or non-existent!) data about the
|
||||
sent, and we will have very stale (or non-existent!) data about the
|
||||
schedule.
|
||||
|
||||
|
||||
Currently we are checking every POLL_INTERVAL seconds
|
||||
"""
|
||||
|
||||
|
|
|
@ -389,12 +389,17 @@ class PypoPush(Thread):
|
|||
elif media_item['type'] == 'stream_buffer_start':
|
||||
self.start_web_stream_buffer(media_item)
|
||||
elif media_item['type'] == "stream":
|
||||
if not media_item['prebuffer_started']:
|
||||
#this is called if the stream wasn't scheduled sufficiently ahead of time
|
||||
#so that the prebuffering stage could take effect. Let's do the prebuffering now.
|
||||
self.start_web_stream_buffer(media_item)
|
||||
self.start_web_stream(media_item)
|
||||
elif media_item['type'] == "stream_end":
|
||||
self.stop_web_stream(media_item)
|
||||
except Exception, e:
|
||||
self.logger.error('Pypo Push Exception: %s', e)
|
||||
|
||||
|
||||
def start_web_stream_buffer(self, media_item):
|
||||
try:
|
||||
self.telnet_lock.acquire()
|
||||
|
@ -410,6 +415,8 @@ class PypoPush(Thread):
|
|||
|
||||
tn.write("exit\n")
|
||||
self.logger.debug(tn.read_all())
|
||||
|
||||
media_item['prebuffer_started'] = True
|
||||
except Exception, e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
|
@ -425,6 +432,8 @@ class PypoPush(Thread):
|
|||
msg = 'streams.scheduled_play_start\n'
|
||||
tn.write(msg)
|
||||
|
||||
|
||||
|
||||
msg = 'dynamic_source.output_start\n'
|
||||
self.logger.debug(msg)
|
||||
tn.write(msg)
|
||||
|
|
Loading…
Reference in New Issue