CC-3336: Refactor schedule API used by pypo
-removed export_source -rewrote GetScheduledPlaylists()
This commit is contained in:
parent
c5c1dce4d6
commit
da012af6ed
|
@ -466,6 +466,19 @@ class Application_Model_Schedule {
|
||||||
return $rows;
|
return $rows;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
"2012-02-23-01-00-00":{
|
||||||
|
"row_id":"1",
|
||||||
|
"id":"caf951f6d8f087c3a90291a9622073f9",
|
||||||
|
"uri":"http:\/\/localhost:80\/api\/get-media\/file\/caf951f6d8f087c3a90291a9622073f9.mp3",
|
||||||
|
"fade_in":0,
|
||||||
|
"fade_out":0,
|
||||||
|
"cue_in":0,
|
||||||
|
"cue_out":199.798,
|
||||||
|
"start":"2012-02-23-01-00-00",
|
||||||
|
"end":"2012-02-23-01-03-19"
|
||||||
|
}
|
||||||
|
* */
|
||||||
|
|
||||||
public static function GetScheduledPlaylists($p_fromDateTime = null, $p_toDateTime = null){
|
public static function GetScheduledPlaylists($p_fromDateTime = null, $p_toDateTime = null){
|
||||||
|
|
||||||
|
@ -488,10 +501,38 @@ class Application_Model_Schedule {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Scheduler wants everything in a playlist
|
// Scheduler wants everything in a playlist
|
||||||
$data = Application_Model_Schedule::GetItems($range_start, $range_end);
|
$items = Application_Model_Schedule::GetItems($range_start, $range_end);
|
||||||
|
|
||||||
Logging::log(print_r($data, true));
|
$data = array();
|
||||||
|
$utcTimeZone = new DateTimeZone("UTC");
|
||||||
|
|
||||||
|
foreach ($items as $item){
|
||||||
|
|
||||||
|
$storedFile = Application_Model_StoredFile::Recall($item["file_id"]);
|
||||||
|
$uri = $storedFile->getFileUrlUsingConfigAddress();
|
||||||
|
|
||||||
|
$showEndDateTime = new DateTime($item["show_end"], $utcTimeZone);
|
||||||
|
$trackEndDateTime = new DateTime($item["ends"], $utcTimeZone);
|
||||||
|
|
||||||
|
if ($trackEndDateTime->getTimestamp() > $showEndDateTime->getTimestamp()){
|
||||||
|
$diff = $trackEndDateTime->getTimestamp() - $showEndDateTime->getTimestamp();
|
||||||
|
//assuming ends takes cue_out into assumption
|
||||||
|
$item["cue_out"] = $item["cue_out"] - $diff;
|
||||||
|
}
|
||||||
|
|
||||||
|
$starts = Application_Model_Schedule::AirtimeTimeToPypoTime($item["starts"]);
|
||||||
|
$data[$starts] = array(
|
||||||
|
'id' => $storedFile->getGunid(),
|
||||||
|
'uri' => $uri,
|
||||||
|
'fade_in' => Application_Model_Schedule::WallTimeToMillisecs($item["fade_in"]),
|
||||||
|
'fade_out' => Application_Model_Schedule::WallTimeToMillisecs($item["fade_out"]),
|
||||||
|
'cue_in' => Application_Model_DateHelper::CalculateLengthInSeconds($item["cue_in"]),
|
||||||
|
'cue_out' => Application_Model_DateHelper::CalculateLengthInSeconds($item["cue_out"]),
|
||||||
|
'start' => $starts,
|
||||||
|
'end' => Application_Model_Schedule::AirtimeTimeToPypoTime($item["ends"])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return $data;
|
return $data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -564,7 +605,6 @@ class Application_Model_Schedule {
|
||||||
|
|
||||||
$starts = Application_Model_Schedule::AirtimeTimeToPypoTime($item["starts"]);
|
$starts = Application_Model_Schedule::AirtimeTimeToPypoTime($item["starts"]);
|
||||||
$medias[$starts] = array(
|
$medias[$starts] = array(
|
||||||
'row_id' => $item["id"],
|
|
||||||
'id' => $storedFile->getGunid(),
|
'id' => $storedFile->getGunid(),
|
||||||
'uri' => $uri,
|
'uri' => $uri,
|
||||||
'fade_in' => Application_Model_Schedule::WallTimeToMillisecs($item["fade_in"]),
|
'fade_in' => Application_Model_Schedule::WallTimeToMillisecs($item["fade_in"]),
|
||||||
|
@ -572,7 +612,6 @@ class Application_Model_Schedule {
|
||||||
'fade_cross' => 0,
|
'fade_cross' => 0,
|
||||||
'cue_in' => Application_Model_DateHelper::CalculateLengthInSeconds($item["cue_in"]),
|
'cue_in' => Application_Model_DateHelper::CalculateLengthInSeconds($item["cue_in"]),
|
||||||
'cue_out' => Application_Model_DateHelper::CalculateLengthInSeconds($item["cue_out"]),
|
'cue_out' => Application_Model_DateHelper::CalculateLengthInSeconds($item["cue_out"]),
|
||||||
'export_source' => 'scheduler',
|
|
||||||
'start' => $starts,
|
'start' => $starts,
|
||||||
'end' => Application_Model_Schedule::AirtimeTimeToPypoTime($item["ends"])
|
'end' => Application_Model_Schedule::AirtimeTimeToPypoTime($item["ends"])
|
||||||
);
|
);
|
||||||
|
|
|
@ -54,23 +54,16 @@ except Exception, e:
|
||||||
class Global:
|
class Global:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.api_client = api_client.api_client_factory(config)
|
self.api_client = api_client.api_client_factory(config)
|
||||||
self.set_export_source('scheduler')
|
|
||||||
|
|
||||||
def selfcheck(self):
|
def selfcheck(self):
|
||||||
self.api_client = api_client.api_client_factory(config)
|
self.api_client = api_client.api_client_factory(config)
|
||||||
return self.api_client.is_server_compatible()
|
return self.api_client.is_server_compatible()
|
||||||
|
|
||||||
def set_export_source(self, export_source):
|
|
||||||
self.export_source = export_source
|
|
||||||
self.cache_dir = config["cache_dir"] + self.export_source + '/'
|
|
||||||
self.schedule_file = self.cache_dir + 'schedule.pickle'
|
|
||||||
self.schedule_tracker_file = self.cache_dir + "schedule_tracker.pickle"
|
|
||||||
|
|
||||||
def test_api(self):
|
def test_api(self):
|
||||||
self.api_client.test()
|
self.api_client.test()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def check_schedule(self, export_source):
|
def check_schedule(self):
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -45,7 +45,10 @@ class PypoFetch(Thread):
|
||||||
def __init__(self, q):
|
def __init__(self, q):
|
||||||
Thread.__init__(self)
|
Thread.__init__(self)
|
||||||
self.api_client = api_client.api_client_factory(config)
|
self.api_client = api_client.api_client_factory(config)
|
||||||
self.set_export_source('scheduler')
|
|
||||||
|
self.cache_dir = os.path.join(config["cache_dir"], "scheduler")
|
||||||
|
logger.info("Creating cache directory at %s", self.cache_dir)
|
||||||
|
|
||||||
self.queue = q
|
self.queue = q
|
||||||
self.schedule_data = []
|
self.schedule_data = []
|
||||||
logger = logging.getLogger('fetch')
|
logger = logging.getLogger('fetch')
|
||||||
|
@ -245,15 +248,6 @@ class PypoFetch(Thread):
|
||||||
if(status == "true"):
|
if(status == "true"):
|
||||||
self.api_client.notify_liquidsoap_status("OK", stream_id, str(fake_time))
|
self.api_client.notify_liquidsoap_status("OK", stream_id, str(fake_time))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def set_export_source(self, export_source):
|
|
||||||
logger = logging.getLogger('fetch')
|
|
||||||
self.export_source = export_source
|
|
||||||
self.cache_dir = config["cache_dir"] + self.export_source + '/'
|
|
||||||
logger.info("Creating cache directory at %s", self.cache_dir)
|
|
||||||
|
|
||||||
|
|
||||||
def update_liquidsoap_stream_format(self, stream_format):
|
def update_liquidsoap_stream_format(self, stream_format):
|
||||||
# Push stream metadata to liquidsoap
|
# Push stream metadata to liquidsoap
|
||||||
# TODO: THIS LIQUIDSOAP STUFF NEEDS TO BE MOVED TO PYPO-PUSH!!!
|
# TODO: THIS LIQUIDSOAP STUFF NEEDS TO BE MOVED TO PYPO-PUSH!!!
|
||||||
|
@ -294,7 +288,7 @@ class PypoFetch(Thread):
|
||||||
to the cache dir (Folder-structure: cache/YYYY-MM-DD-hh-mm-ss)
|
to the cache dir (Folder-structure: cache/YYYY-MM-DD-hh-mm-ss)
|
||||||
- runs the cleanup routine, to get rid of unused cached files
|
- runs the cleanup routine, to get rid of unused cached files
|
||||||
"""
|
"""
|
||||||
def process_schedule(self, schedule_data, export_source, bootstrapping):
|
def process_schedule(self, schedule_data, bootstrapping):
|
||||||
logger = logging.getLogger('fetch')
|
logger = logging.getLogger('fetch')
|
||||||
playlists = schedule_data["playlists"]
|
playlists = schedule_data["playlists"]
|
||||||
|
|
||||||
|
@ -310,7 +304,7 @@ class PypoFetch(Thread):
|
||||||
self.queue.put(scheduled_data)
|
self.queue.put(scheduled_data)
|
||||||
|
|
||||||
# cleanup
|
# cleanup
|
||||||
try: self.cleanup(self.export_source)
|
try: self.cleanup()
|
||||||
except Exception, e: logger.error("%s", e)
|
except Exception, e: logger.error("%s", e)
|
||||||
|
|
||||||
|
|
||||||
|
@ -392,7 +386,7 @@ class PypoFetch(Thread):
|
||||||
|
|
||||||
fileExt = os.path.splitext(media['uri'])[1]
|
fileExt = os.path.splitext(media['uri'])[1]
|
||||||
try:
|
try:
|
||||||
dst = "%s%s/%s%s" % (self.cache_dir, pkey, media['id'], fileExt)
|
dst = os.path.join(self.cache_dir, pkey, media['id']+fileExt)
|
||||||
|
|
||||||
# download media file
|
# download media file
|
||||||
self.handle_remote_file(media, dst)
|
self.handle_remote_file(media, dst)
|
||||||
|
@ -406,8 +400,8 @@ class PypoFetch(Thread):
|
||||||
|
|
||||||
if fsize > 0:
|
if fsize > 0:
|
||||||
pl_entry = \
|
pl_entry = \
|
||||||
'annotate:export_source="%s",media_id="%s",liq_start_next="%s",liq_fade_in="%s",liq_fade_out="%s",liq_cue_in="%s",liq_cue_out="%s",schedule_table_id="%s":%s' \
|
'annotate:media_id="%s",liq_start_next="%s",liq_fade_in="%s",liq_fade_out="%s",liq_cue_in="%s",liq_cue_out="%s",schedule_table_id="%s":%s' \
|
||||||
% (media['export_source'], media['id'], 0, \
|
% (media['id'], 0, \
|
||||||
float(media['fade_in']) / 1000, \
|
float(media['fade_in']) / 1000, \
|
||||||
float(media['fade_out']) / 1000, \
|
float(media['fade_out']) / 1000, \
|
||||||
float(media['cue_in']), \
|
float(media['cue_in']), \
|
||||||
|
@ -452,7 +446,7 @@ class PypoFetch(Thread):
|
||||||
Cleans up folders in cache_dir. Look for modification date older than "now - CACHE_FOR"
|
Cleans up folders in cache_dir. Look for modification date older than "now - CACHE_FOR"
|
||||||
and deletes them.
|
and deletes them.
|
||||||
"""
|
"""
|
||||||
def cleanup(self, export_source):
|
def cleanup(self):
|
||||||
logger = logging.getLogger('fetch')
|
logger = logging.getLogger('fetch')
|
||||||
|
|
||||||
offset = 3600 * int(config["cache_for"])
|
offset = 3600 * int(config["cache_for"])
|
||||||
|
|
|
@ -34,7 +34,6 @@ class PypoPush(Thread):
|
||||||
def __init__(self, q):
|
def __init__(self, q):
|
||||||
Thread.__init__(self)
|
Thread.__init__(self)
|
||||||
self.api_client = api_client.api_client_factory(config)
|
self.api_client = api_client.api_client_factory(config)
|
||||||
self.set_export_source('scheduler')
|
|
||||||
self.queue = q
|
self.queue = q
|
||||||
|
|
||||||
self.schedule = dict()
|
self.schedule = dict()
|
||||||
|
@ -42,11 +41,6 @@ class PypoPush(Thread):
|
||||||
|
|
||||||
self.liquidsoap_state_play = True
|
self.liquidsoap_state_play = True
|
||||||
self.push_ahead = 10
|
self.push_ahead = 10
|
||||||
|
|
||||||
def set_export_source(self, export_source):
|
|
||||||
self.export_source = export_source
|
|
||||||
self.cache_dir = config["cache_dir"] + self.export_source + '/'
|
|
||||||
self.schedule_tracker_file = self.cache_dir + "schedule_tracker.pickle"
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The Push Loop - the push loop periodically checks if there is a playlist
|
The Push Loop - the push loop periodically checks if there is a playlist
|
||||||
|
@ -54,7 +48,7 @@ class PypoPush(Thread):
|
||||||
If yes, the current liquidsoap playlist gets replaced with the corresponding one,
|
If yes, the current liquidsoap playlist gets replaced with the corresponding one,
|
||||||
then liquidsoap is asked (via telnet) to reload and immediately play it.
|
then liquidsoap is asked (via telnet) to reload and immediately play it.
|
||||||
"""
|
"""
|
||||||
def push(self, export_source):
|
def push(self):
|
||||||
logger = logging.getLogger('push')
|
logger = logging.getLogger('push')
|
||||||
|
|
||||||
timenow = time.time()
|
timenow = time.time()
|
||||||
|
|
Loading…
Reference in New Issue