Merge branch 'devel' of dev.sourcefabric.org:airtime into devel

This commit is contained in:
denise 2012-09-05 14:50:34 -04:00
commit cf0fc13c81
9 changed files with 408 additions and 72 deletions

View File

@ -461,7 +461,7 @@ EOT;
$pos = $pos + 1; $pos = $pos + 1;
} }
} catch (Exception $e) { } catch (Exception $e) {
Logging::log($e->getMessage()); Logging::info($e->getMessage());
} }
} }

View File

@ -25,7 +25,8 @@ class Application_Model_RabbitMq
$CC_CONFIG["rabbitmq"]["password"], $CC_CONFIG["rabbitmq"]["password"],
$CC_CONFIG["rabbitmq"]["vhost"]); $CC_CONFIG["rabbitmq"]["vhost"]);
$channel = $conn->channel(); $channel = $conn->channel();
$channel->access_request($CC_CONFIG["rabbitmq"]["vhost"], false, false, true, true); $channel->access_request($CC_CONFIG["rabbitmq"]["vhost"], false, false,
true, true);
$EXCHANGE = 'airtime-pypo'; $EXCHANGE = 'airtime-pypo';
$channel->exchange_declare($EXCHANGE, 'direct', false, true); $channel->exchange_declare($EXCHANGE, 'direct', false, true);
@ -50,7 +51,8 @@ class Application_Model_RabbitMq
$CC_CONFIG["rabbitmq"]["password"], $CC_CONFIG["rabbitmq"]["password"],
$CC_CONFIG["rabbitmq"]["vhost"]); $CC_CONFIG["rabbitmq"]["vhost"]);
$channel = $conn->channel(); $channel = $conn->channel();
$channel->access_request($CC_CONFIG["rabbitmq"]["vhost"], false, false, true, true); $channel->access_request($CC_CONFIG["rabbitmq"]["vhost"], false, false,
true, true);
$EXCHANGE = 'airtime-media-monitor'; $EXCHANGE = 'airtime-media-monitor';
$channel->exchange_declare($EXCHANGE, 'direct', false, true); $channel->exchange_declare($EXCHANGE, 'direct', false, true);
@ -73,7 +75,8 @@ class Application_Model_RabbitMq
$CC_CONFIG["rabbitmq"]["password"], $CC_CONFIG["rabbitmq"]["password"],
$CC_CONFIG["rabbitmq"]["vhost"]); $CC_CONFIG["rabbitmq"]["vhost"]);
$channel = $conn->channel(); $channel = $conn->channel();
$channel->access_request($CC_CONFIG["rabbitmq"]["vhost"], false, false, true, true); $channel->access_request($CC_CONFIG["rabbitmq"]["vhost"], false, false,
true, true);
$EXCHANGE = 'airtime-pypo'; $EXCHANGE = 'airtime-pypo';
$channel->exchange_declare($EXCHANGE, 'direct', false, true); $channel->exchange_declare($EXCHANGE, 'direct', false, true);
@ -84,7 +87,8 @@ class Application_Model_RabbitMq
$temp['event_type'] = $event_type; $temp['event_type'] = $event_type;
$temp['server_timezone'] = Application_Model_Preference::GetTimezone(); $temp['server_timezone'] = Application_Model_Preference::GetTimezone();
if ($event_type == "update_recorder_schedule") { if ($event_type == "update_recorder_schedule") {
$temp['shows'] = Application_Model_Show::getShows($now, $end_timestamp, $excludeInstance=NULL, $onlyRecord=TRUE); $temp['shows'] = Application_Model_Show::getShows($now,
$end_timestamp, $excludeInstance=NULL, $onlyRecord=TRUE);
} }
$data = json_encode($temp); $data = json_encode($temp);
$msg = new AMQPMessage($data, array('content_type' => 'text/plain')); $msg = new AMQPMessage($data, array('content_type' => 'text/plain'));

View File

@ -322,10 +322,21 @@ class Application_Model_StoredFile
{ {
global $CC_CONFIG; global $CC_CONFIG;
$con = Propel::getConnection(); $con = Propel::getConnection();
$sql = "SELECT playlist_id " $sql = "SELECT playlist_id "
." FROM ".$CC_CONFIG['playistTable'] ." FROM cc_playlist"
." WHERE file_id='{$this->id}'"; ." WHERE file_id = :file_id";
$ids = $con->query($sql)->fetchAll();
$stmt = $con->prepare($sql);
$stmt->bindParam(':file_id', $this->id, PDO::PARAM_INT);
if ($stmt->execute()) {
$ids = $stmt->fetchAll();
} else {
$msg = implode(',', $stmt->errorInfo());
throw new Exception("Error: $msg");
}
$playlists = array(); $playlists = array();
if (is_array($ids) && count($ids) > 0) { if (is_array($ids) && count($ids) > 0) {
foreach ($ids as $id) { foreach ($ids as $id) {
@ -394,12 +405,16 @@ class Application_Model_StoredFile
*/ */
public function getFileExtension() public function getFileExtension()
{ {
// TODO : what's the point of having this function? Can we not just use
// the extension from the file_path column from cc_files?
$mime = $this->_file->getDbMime(); $mime = $this->_file->getDbMime();
if ($mime == "audio/vorbis" || $mime == "application/ogg") { if ($mime == "audio/vorbis" || $mime == "application/ogg") {
return "ogg"; return "ogg";
} elseif ($mime == "audio/mp3" || $mime == "audio/mpeg") { } elseif ($mime == "audio/mp3" || $mime == "audio/mpeg") {
return "mp3"; return "mp3";
} elseif ($mime == "audio/x/flac") {
return "flac";
} }
} }
@ -951,7 +966,7 @@ class Application_Model_StoredFile
$uid = $user->getId(); $uid = $user->getId();
} }
$id_file = "$audio_stor.identifier"; $id_file = "$audio_stor.identifier";
if (file_put_contents($id_file,$uid) === false) { if (file_put_contents($id_file, $uid) === false) {
Logging::info("Could not write file to identify user: '$uid'"); Logging::info("Could not write file to identify user: '$uid'");
Logging::info("Id file path: '$id_file'"); Logging::info("Id file path: '$id_file'");
Logging::info("Defaulting to admin (no identification file was Logging::info("Defaulting to admin (no identification file was
@ -1003,7 +1018,7 @@ class Application_Model_StoredFile
global $CC_CONFIG; global $CC_CONFIG;
$con = Propel::getConnection(); $con = Propel::getConnection();
$sql = "SELECT count(*) as cnt FROM ".$CC_CONFIG["filesTable"]." WHERE file_exists"; $sql = "SELECT count(*) as cnt FROM cc_files WHERE file_exists";
return $con->query($sql)->fetchColumn(0); return $con->query($sql)->fetchColumn(0);
} }
@ -1012,53 +1027,59 @@ class Application_Model_StoredFile
* *
* Enter description here ... * Enter description here ...
* @param $dir_id - if this is not provided, it returns all files with full path constructed. * @param $dir_id - if this is not provided, it returns all files with full path constructed.
* @param $propelObj - if this is true, it returns array of proepl obj
*/ */
public static function listAllFiles($dir_id=null, $all, $propelObj=false) public static function listAllFiles($dir_id=null, $all)
{ {
$con = Propel::getConnection(); $con = Propel::getConnection();
$file_exists = $all ? "" : "and f.file_exists = 'TRUE'";
if ($propelObj) {
$sql = "SELECT m.directory || f.filepath as fp"
." FROM CC_MUSIC_DIRS m"
." LEFT JOIN CC_FILES f"
." ON m.id = f.directory WHERE m.id = $dir_id $file_exists";
} else {
$sql = "SELECT filepath as fp" $sql = "SELECT filepath as fp"
." FROM CC_FILES as f" ." FROM CC_FILES as f"
." WHERE f.directory = $dir_id $file_exists"; ." WHERE f.directory = :dir_id";
if (!$all) {
$sql .= " AND f.file_exists = 'TRUE'";
}
$stmt = $con->prepare($sql);
$stmt->bindParam(':dir_id', $dir_id);
if ($stmt->execute()) {
$rows = $stmt->fetchAll();
} else {
$msg = implode(',', $stmt->errorInfo());
throw new Exception("Error: $msg");
} }
$rows = $con->query($sql)->fetchAll();
$results = array(); $results = array();
foreach ($rows as $row) { foreach ($rows as $row) {
if ($propelObj) {
$results[] = Application_Model_StoredFile::RecallByFilepath($row["fp"]);
} else {
$results[] = $row["fp"]; $results[] = $row["fp"];
} }
}
return $results; return $results;
} }
//TODO: MERGE THIS FUNCTION AND "listAllFiles" -MK //TODO: MERGE THIS FUNCTION AND "listAllFiles" -MK
public static function listAllFiles2($dir_id=null, $limit=null) public static function listAllFiles2($dir_id=null, $limit="ALL")
{ {
$con = Propel::getConnection(); $con = Propel::getConnection();
$sql = "SELECT id, filepath as fp" $sql = "SELECT id, filepath as fp"
." FROM CC_FILES" ." FROM CC_FILES"
." WHERE directory = $dir_id" ." WHERE directory = :dir_id"
." AND file_exists = 'TRUE'" ." AND file_exists = 'TRUE'"
." AND replay_gain is NULL"; ." AND replay_gain is NULL"
if (!is_null($limit) && is_int($limit)) { ." LIMIT :lim";
$sql .= " LIMIT $limit";
}
$rows = $con->query($sql, PDO::FETCH_ASSOC)->fetchAll(); $stmt = $con->prepare($sql);
$stmt->bindParam(':dir_id', $dir_id);
$stmt->bindParam(':lim', $limit);
if ($stmt->execute()) {
$rows = $stmt->fetchAll(PDO::FETCH_ASSOC);
} else {
$msg = implode(',', $stmt->errorInfo());
throw new Exception("Error: $msg");
}
return $rows; return $rows;
} }

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,103 @@
# -*- coding: utf-8 -*-
import media.monitor.process as md
from os.path import normpath
from media.monitor.pure import format_length, file_md5
with md.metadata('MDATA_KEY_DURATION') as t:
t.default(u'0.0')
t.depends('length')
t.translate(lambda k: format_length(k['length']))
with md.metadata('MDATA_KEY_MIME') as t:
t.default(u'')
t.depends('mime')
t.translate(lambda k: k['mime'].replace('-','/'))
with md.metadata('MDATA_KEY_BITRATE') as t:
t.default(u'')
t.depends('bitrate')
t.translate(lambda k: k['bitrate'])
with md.metadata('MDATA_KEY_SAMPLERATE') as t:
t.default(u'0')
t.depends('sample_rate')
t.translate(lambda k: k['sample_rate'])
with md.metadata('MDATA_KEY_FTYPE'):
t.depends('ftype') # i don't think this field even exists
t.default(u'audioclip')
t.translate(lambda k: k['ftype']) # but just in case
with md.metadata("MDATA_KEY_CREATOR") as t:
t.depends("artist")
# A little kludge to make sure that we have some value for when we parse
# MDATA_KEY_TITLE
t.default(u"")
t.max_length(512)
with md.metadata("MDATA_KEY_SOURCE") as t:
t.depends("album")
t.max_length(512)
with md.metadata("MDATA_KEY_GENRE") as t:
t.depends("genre")
t.max_length(64)
with md.metadata("MDATA_KEY_MOOD") as t:
t.depends("mood")
t.max_length(64)
with md.metadata("MDATA_KEY_TRACKNUMBER") as t:
t.depends("tracknumber")
with md.metadata("MDATA_KEY_BPM") as t:
t.depends("bpm")
t.max_length(8)
with md.metadata("MDATA_KEY_LABEL") as t:
t.depends("organization")
t.max_length(512)
with md.metadata("MDATA_KEY_COMPOSER") as t:
t.depends("composer")
t.max_length(512)
with md.metadata("MDATA_KEY_ENCODER") as t:
t.depends("encodedby")
t.max_length(512)
with md.metadata("MDATA_KEY_CONDUCTOR") as t:
t.depends("conductor")
t.max_length(512)
with md.metadata("MDATA_KEY_YEAR") as t:
t.depends("date")
t.max_length(16)
with md.metadata("MDATA_KEY_URL") as t:
t.depends("website")
with md.metadata("MDATA_KEY_ISRC") as t:
t.depends("isrc")
t.max_length(512)
with md.metadata("MDATA_KEY_COPYRIGHT") as t:
t.depends("copyright")
t.max_length(512)
with md.metadata("MDATA_KEY_FILEPATH") as t:
t.depends('path')
t.translate(lambda k: normpath(k['path']))
with md.metadata("MDATA_KEY_MD5") as t:
t.depends('path')
t.optional(False)
t.translate(lambda k: file_md5(k['path'], max_length=100))
# MDATA_KEY_TITLE is the annoying special case
with md.metadata('MDATA_KEY_TITLE') as t:
# Need to know MDATA_KEY_CREATOR to know if show was recorded. Value is
# defaulted to "" from definitions above
t.depends('title','MDATA_KEY_CREATOR')
t.max_length(512)

View File

@ -0,0 +1,140 @@
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from media.monitor.pure import truncate_to_length, toposort
import mutagen
class MetadataAbsent(Exception):
def __init__(self, name): self.name = name
def __str__(self): return "Could not obtain element '%s'" % self.name
class MetadataElement(object):
def __init__(self,name):
self.name = name
# "Sane" defaults
self.__deps = set()
self.__normalizer = lambda x: x
self.__optional = True
self.__default = None
self.__is_normalized = lambda _ : True
self.__max_length = -1
def max_length(self,l):
self.__max_length = l
def optional(self, setting):
self.__optional = setting
def is_optional(self):
return self.__optional
def depends(self, *deps):
self.__deps = set(deps)
def dependencies(self):
return self.__deps
def translate(self, f):
self.__translator = f
def is_normalized(self, f):
self.__is_normalized = f
def normalize(self, f):
self.__normalizer = f
def default(self,v):
self.__default = v
def get_default(self):
if hasattr(self.__default, '__call__'): return self.__default()
else: return self.__default
def has_default(self):
return self.__default is not None
def path(self):
return self.__path
def __slice_deps(self, d):
return dict( (k,v) for k,v in d.iteritems() if k in self.__deps)
def __str__(self):
return "%s(%s)" % (self.name, ' '.join(list(self.__deps)))
def read_value(self, path, original, running={}):
# If value is present and normalized then we don't touch it
if self.name in original:
v = original[self.name]
if self.__is_normalized(v): return v
else: return self.__normalizer(v)
# A dictionary slice with all the dependencies and their values
dep_slice_orig = self.__slice_deps(original)
dep_slice_running = self.__slice_deps(running)
full_deps = dict( dep_slice_orig.items()
+ dep_slice_running.items() )
# check if any dependencies are absent
if len(full_deps) != len(self.__deps) or len(self.__deps) == 0:
# If we have a default value then use that. Otherwise throw an
# exception
if self.has_default(): return self.get_default()
else: raise MetadataAbsent(self.name)
# We have all dependencies. Now for actual for parsing
r = self.__normalizer( self.__translator(full_deps) )
if self.__max_length != -1:
r = truncate_to_length(r, self.__max_length)
return r
def normalize_mutagen(path):
"""
Consumes a path and reads the metadata using mutagen. normalizes some of
the metadata that isn't read through the mutagen hash
"""
m = mutagen.File(path, easy=True)
md = {}
for k,v in m.iteritems():
if type(v) is list: md[k] = v[0]
else: md[k] = v
# populate special metadata values
md['length'] = getattr(m.info, u'length', 0.0)
md['bitrate'] = getattr(m.info, 'bitrate', u'')
md['sample_rate'] = getattr(m.info, 'sample_rate', 0)
md['mime'] = m.mime[0] if len(m.mime) > 0 else u''
md['path'] = path
return md
class MetadataReader(object):
def __init__(self):
self.clear()
def register_metadata(self,m):
self.__mdata_name_map[m.name] = m
d = dict( (name,m.dependencies()) for name,m in
self.__mdata_name_map.iteritems() )
new_list = list( toposort(d) )
self.__metadata = [ self.__mdata_name_map[name] for name in new_list
if name in self.__mdata_name_map]
def clear(self):
self.__mdata_name_map = {}
self.__metadata = []
def read(self, path, muta_hash):
normalized_metadata = {}
for mdata in self.__metadata:
try:
normalized_metadata[mdata.name] = mdata.read_value(
path, muta_hash, normalized_metadata)
except MetadataAbsent:
if not mdata.is_optional(): raise
return normalized_metadata
global_reader = MetadataReader()
@contextmanager
def metadata(name):
t = MetadataElement(name)
yield t
global_reader.register_metadata(t)

View File

@ -2,6 +2,7 @@
import copy import copy
import subprocess import subprocess
import os import os
import math
import shutil import shutil
import re import re
import sys import sys
@ -11,6 +12,9 @@ import operator as op
from os.path import normpath from os.path import normpath
from itertools import takewhile from itertools import takewhile
# you need to import reduce in python 3
try: from functools import reduce
except: pass
from configobj import ConfigObj from configobj import ConfigObj
from media.monitor.exceptions import FailedToSetLocale, FailedToCreateDir from media.monitor.exceptions import FailedToSetLocale, FailedToCreateDir
@ -84,6 +88,10 @@ def is_file_supported(path):
# TODO : In the future we would like a better way to find out whether a show # TODO : In the future we would like a better way to find out whether a show
# has been recorded # has been recorded
def is_airtime_recorded(md): def is_airtime_recorded(md):
"""
Takes a metadata dictionary and returns True if it belongs to a file that
was recorded by Airtime.
"""
if not 'MDATA_KEY_CREATOR' in md: return False if not 'MDATA_KEY_CREATOR' in md: return False
return md['MDATA_KEY_CREATOR'] == u'Airtime Show Recorder' return md['MDATA_KEY_CREATOR'] == u'Airtime Show Recorder'
@ -253,11 +261,13 @@ def normalized_metadata(md, original_path):
if new_md['MDATA_KEY_BPM'] is None: if new_md['MDATA_KEY_BPM'] is None:
del new_md['MDATA_KEY_BPM'] del new_md['MDATA_KEY_BPM']
if is_airtime_recorded(new_md): if is_airtime_recorded(new_md):
hour,minute,second,name = new_md['MDATA_KEY_TITLE'].split("-",3) #hour,minute,second,name = new_md['MDATA_KEY_TITLE'].split("-",3)
new_md['MDATA_KEY_TITLE'] = u'%s-%s-%s:%s:%s' % \ #new_md['MDATA_KEY_TITLE'] = u'%s-%s-%s:%s:%s' % \
(name, new_md['MDATA_KEY_YEAR'], hour, minute, second) #(name, new_md['MDATA_KEY_YEAR'], hour, minute, second)
# We changed show recorder to output correct metadata for recorded
# shows
pass
else: else:
# Read title from filename if it does not exist # Read title from filename if it does not exist
default_title = no_extension_basename(original_path) default_title = no_extension_basename(original_path)
@ -265,9 +275,14 @@ def normalized_metadata(md, original_path):
default_title = u'' default_title = u''
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_TITLE'], new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_TITLE'],
default=default_title) default=default_title)
new_md['MDATA_KEY_TITLE'] = re.sub(r'-\d+kbps$', u'',
new_md['MDATA_KEY_TITLE'])
# TODO : wtf is this for again?
new_md['MDATA_KEY_TITLE'] = re.sub(r'-?%s-?' % unicode_unknown, u'', new_md['MDATA_KEY_TITLE'] = re.sub(r'-?%s-?' % unicode_unknown, u'',
new_md['MDATA_KEY_TITLE']) new_md['MDATA_KEY_TITLE'])
# ugly mother fucking band aid until enterprise metadata framework is
# working
return new_md return new_md
def organized_path(old_path, root_path, orig_md): def organized_path(old_path, root_path, orig_md):
@ -280,8 +295,6 @@ def organized_path(old_path, root_path, orig_md):
""" """
filepath = None filepath = None
ext = extension(old_path) ext = extension(old_path)
# The blocks for each if statement look awfully similar. Perhaps there is a
# way to simplify this code
def default_f(dictionary, key): def default_f(dictionary, key):
if key in dictionary: return len(dictionary[key]) == 0 if key in dictionary: return len(dictionary[key]) == 0
else: return True else: return True
@ -291,6 +304,8 @@ def organized_path(old_path, root_path, orig_md):
# MDATA_KEY_BITRATE is in bytes/second i.e. (256000) we want to turn this # MDATA_KEY_BITRATE is in bytes/second i.e. (256000) we want to turn this
# into 254kbps # into 254kbps
# Some metadata elements cannot be empty, hence we default them to some
# value just so that we can create a correct path
normal_md = default_to_f(orig_md, path_md, unicode_unknown, default_f) normal_md = default_to_f(orig_md, path_md, unicode_unknown, default_f)
try: try:
formatted = str(int(normal_md['MDATA_KEY_BITRATE']) / 1000) formatted = str(int(normal_md['MDATA_KEY_BITRATE']) / 1000)
@ -299,13 +314,15 @@ def organized_path(old_path, root_path, orig_md):
normal_md['MDATA_KEY_BITRATE'] = unicode_unknown normal_md['MDATA_KEY_BITRATE'] = unicode_unknown
if is_airtime_recorded(normal_md): if is_airtime_recorded(normal_md):
title_re = re.match("(?P<show>.+)-(?P<date>\d+-\d+-\d+-\d+:\d+:\d+)$", # normal_md['MDATA_KEY_TITLE'] = 'show_name-yyyy-mm-dd-hh:mm:ss'
normal_md['MDATA_KEY_TITLE']) r = "(?P<show>.+)-(?P<date>\d+-\d+-\d+)-(?P<time>\d+:\d+:\d+)$"
title_re = re.match(r, normal_md['MDATA_KEY_TITLE'])
show_name = title_re.group('show') show_name = title_re.group('show')
date = title_re.group('date').replace(':','-') #date = title_re.group('date')
yyyy, mm, _ = normal_md['MDATA_KEY_YEAR'].split('-',2) yyyy, mm, _ = normal_md['MDATA_KEY_YEAR'].split('-',2)
fname_base = '%s-%s-%s.%s' % \ fname_base = '%s-%s-%s.%s' % \
(date, show_name, normal_md['MDATA_KEY_BITRATE'], ext) (title_re.group('time'), show_name,
normal_md['MDATA_KEY_BITRATE'], ext)
filepath = os.path.join(root_path, yyyy, mm, fname_base) filepath = os.path.join(root_path, yyyy, mm, fname_base)
elif len(normal_md['MDATA_KEY_TRACKNUMBER']) == 0: elif len(normal_md['MDATA_KEY_TRACKNUMBER']) == 0:
fname = u'%s-%s.%s' % (normal_md['MDATA_KEY_TITLE'], fname = u'%s-%s.%s' % (normal_md['MDATA_KEY_TITLE'],
@ -451,7 +468,9 @@ def owner_id(original_path):
return owner_id return owner_id
def file_playable(pathname): def file_playable(pathname):
"""
Returns True if 'pathname' is playable by liquidsoap. False otherwise.
"""
#when there is an single apostrophe inside of a string quoted by #when there is an single apostrophe inside of a string quoted by
#apostrophes, we can only escape it by replace that apostrophe with '\''. #apostrophes, we can only escape it by replace that apostrophe with '\''.
#This breaks the string into two, and inserts an escaped single quote in #This breaks the string into two, and inserts an escaped single quote in
@ -465,6 +484,54 @@ def file_playable(pathname):
return_code = subprocess.call(command, shell=True) return_code = subprocess.call(command, shell=True)
return (return_code == 0) return (return_code == 0)
def toposort(data):
"""
Topological sort on 'data' where 'data' is of the form:
data = [
'one' : set('two','three'),
'two' : set('three'),
'three' : set()
]
"""
for k, v in data.items():
v.discard(k) # Ignore self dependencies
extra_items_in_deps = reduce(set.union, data.values()) - set(data.keys())
data.update({item:set() for item in extra_items_in_deps})
while True:
ordered = set(item for item,dep in data.items() if not dep)
if not ordered: break
for e in sorted(ordered): yield e
data = dict((item,(dep - ordered)) for item,dep in data.items()
if item not in ordered)
assert not data, "A cyclic dependency exists amongst %r" % data
def truncate_to_length(item, length):
"""
Truncates 'item' to 'length'
"""
if isinstance(item, int): item = str(item)
if isinstance(item, basestring):
if len(item) > length: return item[0:length]
else: return item
def format_length(mutagen_length):
"""
Convert mutagen length to airtime length
"""
t = float(mutagen_length)
h = int(math.floor(t / 3600))
t = t % 3600
m = int(math.floor(t / 60))
s = t % 60
# will be ss.uuu
s = str(s)
seconds = s.split(".")
s = seconds[0]
# have a maximum of 6 subseconds.
if len(seconds[1]) >= 6: ss = seconds[1][0:6]
else: ss = seconds[1][0:]
return "%s:%s:%s.%s" % (h, m, s, ss)
if __name__ == '__main__': if __name__ == '__main__':
import doctest import doctest
doctest.testmod() doctest.testmod()

View File

@ -37,6 +37,8 @@ except Exception, e:
print ('Error loading config file: %s', e) print ('Error loading config file: %s', e)
sys.exit() sys.exit()
# TODO : add docstrings everywhere in this module
def getDateTimeObj(time): def getDateTimeObj(time):
# TODO : clean up for this function later. # TODO : clean up for this function later.
# - use tuples to parse result from split (instead of indices) # - use tuples to parse result from split (instead of indices)
@ -139,20 +141,17 @@ class ShowRecorder(Thread):
self.start_time, self.show_name, self.show_instance self.start_time, self.show_name, self.show_instance
""" """
try: try:
date = self.start_time full_date, full_time = self.start_time.split(" ",1)
md = date.split(" ") # No idea why we translated - to : before
#full_time = full_time.replace(":","-")
record_time = md[1].replace(":", "-") self.logger.info("time: %s" % full_time)
self.logger.info("time: %s" % record_time)
artist = "Airtime Show Recorder" artist = "Airtime Show Recorder"
#set some metadata for our file daemon #set some metadata for our file daemon
recorded_file = mutagen.File(filepath, easy = True) recorded_file = mutagen.File(filepath, easy = True)
recorded_file['title'] = record_time + "-" + self.show_name
recorded_file['artist'] = artist recorded_file['artist'] = artist
recorded_file['date'] = md[0] recorded_file['date'] = full_date
#recorded_file['date'] = md[0].split("-")[0] recorded_file['title'] = "%s-%s-%s" % (self.show_name,
full_date, full_time)
#You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string #You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string
recorded_file['tracknumber'] = unicode(self.show_instance) recorded_file['tracknumber'] = unicode(self.show_instance)
recorded_file.save() recorded_file.save()
@ -218,7 +217,8 @@ class Recorder(Thread):
show_end = getDateTimeObj(show[u'ends']) show_end = getDateTimeObj(show[u'ends'])
time_delta = show_end - show_starts time_delta = show_end - show_starts
temp_shows_to_record[show[u'starts']] = [time_delta, show[u'instance_id'], show[u'name'], m['server_timezone']] temp_shows_to_record[show[u'starts']] = [time_delta,
show[u'instance_id'], show[u'name'], m['server_timezone']]
self.shows_to_record = temp_shows_to_record self.shows_to_record = temp_shows_to_record
def get_time_till_next_show(self): def get_time_till_next_show(self):
@ -270,12 +270,12 @@ class Recorder(Thread):
self.logger.error('Exception: %s', e) self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top) self.logger.error("traceback: %s", top)
def run(self):
""" """
Main loop of the thread: Main loop of the thread:
Wait for schedule updates from RabbitMQ, but in case there arent any, Wait for schedule updates from RabbitMQ, but in case there arent any,
poll the server to get the upcoming schedule. poll the server to get the upcoming schedule.
""" """
def run(self):
try: try:
self.logger.info("Started...") self.logger.info("Started...")
# Bootstrap: since we are just starting up, we need to grab the # Bootstrap: since we are just starting up, we need to grab the

View File

@ -1,16 +1,16 @@
#!/bin/bash #!/bin/bash
#Hack to parse rabbitmq pid and place it into the correct directory. This is also /etc/init.d/rabbitmq-server status | grep "\[{pid"
#done in our rabbitmq init.d script, but placing it here so that monit recognizes pid_found="$?"
# it faster (in time for the upcoming airtime-check-system)
codename=`lsb_release -cs` if [ "$pid_found" == "0" ]; then
if [ "$codename" = "lucid" -o "$codename" = "maverick" -o "$codename" = "natty" -o "$codename" = "squeeze" ] #PID is available in the status message
then
rabbitmqpid=`sed "s/.*,\(.*\)\}.*/\1/" /var/lib/rabbitmq/pids`
else
#RabbitMQ in Ubuntu Oneiric and newer have a different way of storing the PID.
rabbitmqstatus=`/etc/init.d/rabbitmq-server status | grep "\[{pid"` rabbitmqstatus=`/etc/init.d/rabbitmq-server status | grep "\[{pid"`
rabbitmqpid=`echo $rabbitmqstatus | sed "s/.*,\(.*\)\}.*/\1/"` rabbitmqpid=`echo $rabbitmqstatus | sed "s/.*,\(.*\)\}.*/\1/"`
else
#PID should be available from file
rabbitmqpid=`sed "s/.*,\(.*\)\}.*/\1/" /var/lib/rabbitmq/pids`
fi fi
echo "RabbitMQ PID: $rabbitmqpid" echo "RabbitMQ PID: $rabbitmqpid"
echo "$rabbitmqpid" > /var/run/rabbitmq.pid echo "$rabbitmqpid" > /var/run/rabbitmq.pid