Merged baseUrl related conflicts
This commit is contained in:
commit
dc00528cd5
62 changed files with 1155 additions and 1234 deletions
29
CREDITS
29
CREDITS
|
@ -1,3 +1,32 @@
|
||||||
|
=======
|
||||||
|
CREDITS
|
||||||
|
=======
|
||||||
|
Version 2.2.0
|
||||||
|
-------------
|
||||||
|
Martin Konecny (martin.konecny@sourcefabric.org)
|
||||||
|
Role: Developer Team Lead
|
||||||
|
|
||||||
|
Naomi Aro (naomi.aro@sourcefabric.org)
|
||||||
|
Role: Software Developer
|
||||||
|
|
||||||
|
James Moon (james.moon@sourcefabric.org)
|
||||||
|
Role: Software Developer
|
||||||
|
|
||||||
|
Denise Rigato (denise.rigato@sourcefabric.org)
|
||||||
|
Role: Software Developer
|
||||||
|
|
||||||
|
Rudi Grinberg (rudi.grinberg@sourcefabric.org)
|
||||||
|
Role: Software Developer
|
||||||
|
|
||||||
|
Cliff Wang (cliff.wang@sourcefabric.org)
|
||||||
|
Role: QA
|
||||||
|
|
||||||
|
Mikayel Karapetian (michael.karapetian@sourcefabric.org)
|
||||||
|
Role: QA
|
||||||
|
|
||||||
|
Daniel James (daniel.james@sourcefabric.org)
|
||||||
|
Role: Documentor & QA
|
||||||
|
|
||||||
=======
|
=======
|
||||||
CREDITS
|
CREDITS
|
||||||
=======
|
=======
|
||||||
|
|
|
@ -1,40 +1,40 @@
|
||||||
<?php
|
<?php
|
||||||
|
|
||||||
define('AIRTIME_COPYRIGHT_DATE', '2010-2012');
|
define('AIRTIME_COPYRIGHT_DATE' , '2010-2012');
|
||||||
define('AIRTIME_REST_VERSION', '1.1');
|
define('AIRTIME_REST_VERSION' , '1.1');
|
||||||
define('AIRTIME_API_VERSION', '1.1');
|
define('AIRTIME_API_VERSION' , '1.1');
|
||||||
|
|
||||||
// Metadata Keys for files
|
// Metadata Keys for files
|
||||||
define('MDATA_KEY_FILEPATH', 'filepath');
|
define('MDATA_KEY_FILEPATH' , 'filepath');
|
||||||
define('MDATA_KEY_DIRECTORY', 'directory');
|
define('MDATA_KEY_DIRECTORY' , 'directory');
|
||||||
define('MDATA_KEY_MD5', 'md5');
|
define('MDATA_KEY_MD5' , 'md5');
|
||||||
define('MDATA_KEY_TITLE', 'track_title');
|
define('MDATA_KEY_TITLE' , 'track_title');
|
||||||
define('MDATA_KEY_CREATOR', 'artist_name');
|
define('MDATA_KEY_CREATOR' , 'artist_name');
|
||||||
define('MDATA_KEY_SOURCE', 'album_title');
|
define('MDATA_KEY_SOURCE' , 'album_title');
|
||||||
define('MDATA_KEY_DURATION', 'length');
|
define('MDATA_KEY_DURATION' , 'length');
|
||||||
define('MDATA_KEY_MIME', 'mime');
|
define('MDATA_KEY_MIME' , 'mime');
|
||||||
define('MDATA_KEY_FTYPE', 'ftype');
|
define('MDATA_KEY_FTYPE' , 'ftype');
|
||||||
define('MDATA_KEY_URL', 'info_url');
|
define('MDATA_KEY_URL' , 'info_url');
|
||||||
define('MDATA_KEY_GENRE', 'genre');
|
define('MDATA_KEY_GENRE' , 'genre');
|
||||||
define('MDATA_KEY_MOOD', 'mood');
|
define('MDATA_KEY_MOOD' , 'mood');
|
||||||
define('MDATA_KEY_LABEL', 'label');
|
define('MDATA_KEY_LABEL' , 'label');
|
||||||
define('MDATA_KEY_COMPOSER', 'composer');
|
define('MDATA_KEY_COMPOSER' , 'composer');
|
||||||
define('MDATA_KEY_DESCRIPTION', 'description');
|
define('MDATA_KEY_DESCRIPTION' , 'description');
|
||||||
define('MDATA_KEY_SAMPLERATE', 'sample_rate');
|
define('MDATA_KEY_SAMPLERATE' , 'sample_rate');
|
||||||
define('MDATA_KEY_BITRATE', 'bit_rate');
|
define('MDATA_KEY_BITRATE' , 'bit_rate');
|
||||||
define('MDATA_KEY_ENCODER', 'encoded_by');
|
define('MDATA_KEY_ENCODER' , 'encoded_by');
|
||||||
define('MDATA_KEY_ISRC', 'isrc_number');
|
define('MDATA_KEY_ISRC' , 'isrc_number');
|
||||||
define('MDATA_KEY_COPYRIGHT', 'copyright');
|
define('MDATA_KEY_COPYRIGHT' , 'copyright');
|
||||||
define('MDATA_KEY_YEAR', 'year');
|
define('MDATA_KEY_YEAR' , 'year');
|
||||||
define('MDATA_KEY_BPM', 'bpm');
|
define('MDATA_KEY_BPM' , 'bpm');
|
||||||
define('MDATA_KEY_TRACKNUMBER', 'track_number');
|
define('MDATA_KEY_TRACKNUMBER' , 'track_number');
|
||||||
define('MDATA_KEY_CONDUCTOR', 'conductor');
|
define('MDATA_KEY_CONDUCTOR' , 'conductor');
|
||||||
define('MDATA_KEY_LANGUAGE', 'language');
|
define('MDATA_KEY_LANGUAGE' , 'language');
|
||||||
define('MDATA_KEY_REPLAYGAIN', 'replay_gain');
|
define('MDATA_KEY_REPLAYGAIN' , 'replay_gain');
|
||||||
define('MDATA_KEY_OWNER_ID', 'owner_id');
|
define('MDATA_KEY_OWNER_ID' , 'owner_id');
|
||||||
|
|
||||||
define('UI_MDATA_VALUE_FORMAT_FILE', 'File');
|
define('UI_MDATA_VALUE_FORMAT_FILE' , 'File');
|
||||||
define('UI_MDATA_VALUE_FORMAT_STREAM', 'live stream');
|
define('UI_MDATA_VALUE_FORMAT_STREAM' , 'live stream');
|
||||||
|
|
||||||
// Session Keys
|
// Session Keys
|
||||||
define('UI_PLAYLISTCONTROLLER_OBJ_SESSNAME', 'PLAYLISTCONTROLLER_OBJ');
|
define('UI_PLAYLISTCONTROLLER_OBJ_SESSNAME', 'PLAYLISTCONTROLLER_OBJ');
|
||||||
|
@ -43,6 +43,6 @@ define('UI_BLOCK_SESSNAME', 'BLOCK');*/
|
||||||
|
|
||||||
|
|
||||||
// Soundcloud contants
|
// Soundcloud contants
|
||||||
define('SOUNDCLOUD_NOT_UPLOADED_YET', -1);
|
define('SOUNDCLOUD_NOT_UPLOADED_YET' , -1);
|
||||||
define('SOUNDCLOUD_PROGRESS', -2);
|
define('SOUNDCLOUD_PROGRESS' , -2);
|
||||||
define('SOUNDCLOUD_ERROR', -3);
|
define('SOUNDCLOUD_ERROR' , -3);
|
||||||
|
|
|
@ -490,6 +490,10 @@ class ApiController extends Zend_Controller_Action
|
||||||
$file->setFileExistsFlag(true);
|
$file->setFileExistsFlag(true);
|
||||||
$file->setMetadata($md);
|
$file->setMetadata($md);
|
||||||
}
|
}
|
||||||
|
if ($md['is_record'] != 0) {
|
||||||
|
$this->uploadRecordedActionParam($md['MDATA_KEY_TRACKNUMBER'], $file->getId());
|
||||||
|
}
|
||||||
|
|
||||||
} elseif ($mode == "modify") {
|
} elseif ($mode == "modify") {
|
||||||
$filepath = $md['MDATA_KEY_FILEPATH'];
|
$filepath = $md['MDATA_KEY_FILEPATH'];
|
||||||
$file = Application_Model_StoredFile::RecallByFilepath($filepath);
|
$file = Application_Model_StoredFile::RecallByFilepath($filepath);
|
||||||
|
@ -562,7 +566,6 @@ class ApiController extends Zend_Controller_Action
|
||||||
// least 1 digit
|
// least 1 digit
|
||||||
if ( !preg_match('/^md\d+$/', $k) ) { continue; }
|
if ( !preg_match('/^md\d+$/', $k) ) { continue; }
|
||||||
$info_json = json_decode($raw_json, $assoc = true);
|
$info_json = json_decode($raw_json, $assoc = true);
|
||||||
unset( $info_json["is_record"] );
|
|
||||||
// Log invalid requests
|
// Log invalid requests
|
||||||
if ( !array_key_exists('mode', $info_json) ) {
|
if ( !array_key_exists('mode', $info_json) ) {
|
||||||
Logging::info("Received bad request(key=$k), no 'mode' parameter. Bad request is:");
|
Logging::info("Received bad request(key=$k), no 'mode' parameter. Bad request is:");
|
||||||
|
|
|
@ -454,7 +454,7 @@ class Application_Form_SmartBlockCriteria extends Zend_Form_SubForm
|
||||||
$column = CcFilesPeer::getTableMap()->getColumnByPhpName($criteria2PeerMap[$d['sp_criteria_field']]);
|
$column = CcFilesPeer::getTableMap()->getColumnByPhpName($criteria2PeerMap[$d['sp_criteria_field']]);
|
||||||
// validation on type of column
|
// validation on type of column
|
||||||
if ($d['sp_criteria_field'] == 'length') {
|
if ($d['sp_criteria_field'] == 'length') {
|
||||||
if (!preg_match("/(\d{2}):(\d{2}):(\d{2})/", $d['sp_criteria_value'])) {
|
if (!preg_match("/^(\d{2}):(\d{2}):(\d{2})/", $d['sp_criteria_value'])) {
|
||||||
$element->addError("'Length' should be in '00:00:00' format");
|
$element->addError("'Length' should be in '00:00:00' format");
|
||||||
$isValid = false;
|
$isValid = false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -263,6 +263,15 @@ SQL;
|
||||||
global $CC_CONFIG;
|
global $CC_CONFIG;
|
||||||
$con = Propel::getConnection();
|
$con = Propel::getConnection();
|
||||||
|
|
||||||
|
$p_start_str = $p_start->format("Y-m-d H:i:s");
|
||||||
|
$p_end_str = $p_end->format("Y-m-d H:i:s");
|
||||||
|
|
||||||
|
|
||||||
|
//We need to search 24 hours before and after the show times so that that we
|
||||||
|
//capture all of the show's contents.
|
||||||
|
$p_track_start= $p_start->sub(new DateInterval("PT24H"))->format("Y-m-d H:i:s");
|
||||||
|
$p_track_end = $p_end->add(new DateInterval("PT24H"))->format("Y-m-d H:i:s");
|
||||||
|
|
||||||
$templateSql = <<<SQL
|
$templateSql = <<<SQL
|
||||||
SELECT DISTINCT sched.starts AS sched_starts,
|
SELECT DISTINCT sched.starts AS sched_starts,
|
||||||
sched.ends AS sched_ends,
|
sched.ends AS sched_ends,
|
||||||
|
@ -287,7 +296,14 @@ SQL;
|
||||||
SQL;
|
SQL;
|
||||||
$filesJoin = <<<SQL
|
$filesJoin = <<<SQL
|
||||||
cc_schedule AS sched
|
cc_schedule AS sched
|
||||||
JOIN cc_files AS ft ON (sched.file_id = ft.id)
|
JOIN cc_files AS ft ON (sched.file_id = ft.id
|
||||||
|
AND ((sched.starts >= '{$p_track_start}'
|
||||||
|
AND sched.starts < '{$p_track_end}')
|
||||||
|
OR (sched.ends > '{$p_track_start}'
|
||||||
|
AND sched.ends <= '{$p_track_end}')
|
||||||
|
OR (sched.starts <= '{$p_track_start}'
|
||||||
|
AND sched.ends >= '{$p_track_end}'))
|
||||||
|
)
|
||||||
SQL;
|
SQL;
|
||||||
|
|
||||||
|
|
||||||
|
@ -307,7 +323,14 @@ SQL;
|
||||||
SQL;
|
SQL;
|
||||||
$streamJoin = <<<SQL
|
$streamJoin = <<<SQL
|
||||||
cc_schedule AS sched
|
cc_schedule AS sched
|
||||||
JOIN cc_webstream AS ws ON (sched.stream_id = ws.id)
|
JOIN cc_webstream AS ws ON (sched.stream_id = ws.id
|
||||||
|
AND ((sched.starts >= '{$p_track_start}'
|
||||||
|
AND sched.starts < '{$p_track_end}')
|
||||||
|
OR (sched.ends > '{$p_track_start}'
|
||||||
|
AND sched.ends <= '{$p_track_end}')
|
||||||
|
OR (sched.starts <= '{$p_track_start}'
|
||||||
|
AND sched.ends >= '{$p_track_end}'))
|
||||||
|
)
|
||||||
LEFT JOIN cc_subjs AS sub ON (ws.creator_id = sub.id)
|
LEFT JOIN cc_subjs AS sub ON (ws.creator_id = sub.id)
|
||||||
SQL;
|
SQL;
|
||||||
|
|
||||||
|
@ -344,12 +367,12 @@ SELECT showt.name AS show_name,
|
||||||
JOIN cc_show AS showt ON (showt.id = si.show_id)
|
JOIN cc_show AS showt ON (showt.id = si.show_id)
|
||||||
WHERE si.modified_instance = FALSE
|
WHERE si.modified_instance = FALSE
|
||||||
$showPredicate
|
$showPredicate
|
||||||
AND ((si.starts >= '{$p_start}'
|
AND ((si.starts >= '{$p_start_str}'
|
||||||
AND si.starts < '{$p_end}')
|
AND si.starts < '{$p_end_str}')
|
||||||
OR (si.ends > '{$p_start}'
|
OR (si.ends > '{$p_start_str}'
|
||||||
AND si.ends <= '{$p_end}')
|
AND si.ends <= '{$p_end_str}')
|
||||||
OR (si.starts <= '{$p_start}'
|
OR (si.starts <= '{$p_start_str}'
|
||||||
AND si.ends >= '{$p_end}'))
|
AND si.ends >= '{$p_end_str}'))
|
||||||
ORDER BY si_starts,
|
ORDER BY si_starts,
|
||||||
sched_starts;
|
sched_starts;
|
||||||
SQL;
|
SQL;
|
||||||
|
@ -713,6 +736,7 @@ SQL;
|
||||||
'end' => $stream_end,
|
'end' => $stream_end,
|
||||||
'uri' => $uri,
|
'uri' => $uri,
|
||||||
'type' => 'stream_buffer_end',
|
'type' => 'stream_buffer_end',
|
||||||
|
'row_id' => $item["id"],
|
||||||
'independent_event' => true
|
'independent_event' => true
|
||||||
);
|
);
|
||||||
self::appendScheduleItem($data, $stream_end, $schedule_item);
|
self::appendScheduleItem($data, $stream_end, $schedule_item);
|
||||||
|
@ -1127,7 +1151,6 @@ SQL;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if ($isAdminOrPM) {
|
if ($isAdminOrPM) {
|
||||||
Logging::info( $data );
|
|
||||||
Application_Model_Show::create($data);
|
Application_Model_Show::create($data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -366,10 +366,9 @@ class Application_Model_Scheduler
|
||||||
* @param array $fileIds
|
* @param array $fileIds
|
||||||
* @param array $playlistIds
|
* @param array $playlistIds
|
||||||
*/
|
*/
|
||||||
private function insertAfter($scheduleItems, $schedFiles, $adjustSched = true)
|
private function insertAfter($scheduleItems, $schedFiles, $adjustSched = true, $mediaItems = null)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
|
|
||||||
$affectedShowInstances = array();
|
$affectedShowInstances = array();
|
||||||
|
|
||||||
//dont want to recalculate times for moved items.
|
//dont want to recalculate times for moved items.
|
||||||
|
@ -385,6 +384,16 @@ class Application_Model_Scheduler
|
||||||
foreach ($scheduleItems as $schedule) {
|
foreach ($scheduleItems as $schedule) {
|
||||||
$id = intval($schedule["id"]);
|
$id = intval($schedule["id"]);
|
||||||
|
|
||||||
|
// if mediaItmes is passed in, we want to create contents
|
||||||
|
// at the time of insert. This is for dyanmic blocks or
|
||||||
|
// playlist that contains dynamic blocks
|
||||||
|
if ($mediaItems != null) {
|
||||||
|
$schedFiles = array();
|
||||||
|
foreach ($mediaItems as $media) {
|
||||||
|
$schedFiles = array_merge($schedFiles, $this->retrieveMediaFiles($media["id"], $media["type"]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if ($id !== 0) {
|
if ($id !== 0) {
|
||||||
$schedItem = CcScheduleQuery::create()->findPK($id, $this->con);
|
$schedItem = CcScheduleQuery::create()->findPK($id, $this->con);
|
||||||
$instance = $schedItem->getCcShowInstances($this->con);
|
$instance = $schedItem->getCcShowInstances($this->con);
|
||||||
|
@ -527,10 +536,32 @@ class Application_Model_Scheduler
|
||||||
|
|
||||||
$this->validateRequest($scheduleItems);
|
$this->validateRequest($scheduleItems);
|
||||||
|
|
||||||
|
$requireDynamicContentCreation = false;
|
||||||
|
|
||||||
|
foreach ($mediaItems as $media) {
|
||||||
|
if ($media['type'] == "playlist") {
|
||||||
|
$pl = new Application_Model_Playlist($media['id']);
|
||||||
|
if ($pl->hasDynamicBlock()) {
|
||||||
|
$requireDynamicContentCreation = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else if ($media['type'] == "block") {
|
||||||
|
$bl = new Application_Model_Block($media['id']);
|
||||||
|
if (!$bl->isStatic()) {
|
||||||
|
$requireDynamicContentCreation = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($requireDynamicContentCreation) {
|
||||||
|
$this->insertAfter($scheduleItems, $schedFiles, $adjustSched, $mediaItems);
|
||||||
|
} else {
|
||||||
foreach ($mediaItems as $media) {
|
foreach ($mediaItems as $media) {
|
||||||
$schedFiles = array_merge($schedFiles, $this->retrieveMediaFiles($media["id"], $media["type"]));
|
$schedFiles = array_merge($schedFiles, $this->retrieveMediaFiles($media["id"], $media["type"]));
|
||||||
}
|
}
|
||||||
$this->insertAfter($scheduleItems, $schedFiles, $adjustSched);
|
$this->insertAfter($scheduleItems, $schedFiles, $adjustSched);
|
||||||
|
}
|
||||||
|
|
||||||
$this->con->commit();
|
$this->con->commit();
|
||||||
|
|
||||||
|
|
|
@ -1743,7 +1743,8 @@ SQL;
|
||||||
$days = $interval->format('%a');
|
$days = $interval->format('%a');
|
||||||
$shows = Application_Model_Show::getShows($p_start, $p_end);
|
$shows = Application_Model_Show::getShows($p_start, $p_end);
|
||||||
$nowEpoch = time();
|
$nowEpoch = time();
|
||||||
|
$content_count = Application_Model_ShowInstance::getContentCount(
|
||||||
|
$p_start, $p_end);
|
||||||
$timezone = date_default_timezone_get();
|
$timezone = date_default_timezone_get();
|
||||||
|
|
||||||
foreach ($shows as $show) {
|
foreach ($shows as $show) {
|
||||||
|
@ -1789,9 +1790,9 @@ SQL;
|
||||||
|
|
||||||
$showInstance = new Application_Model_ShowInstance(
|
$showInstance = new Application_Model_ShowInstance(
|
||||||
$show["instance_id"]);
|
$show["instance_id"]);
|
||||||
//$showContent = $showInstance->getShowListContent();
|
|
||||||
|
|
||||||
$options["show_empty"] = ($showInstance->showEmpty()) ? 1 : 0;
|
$options["show_empty"] = (array_key_exists($show['instance_id'],
|
||||||
|
$content_count)) ? 0 : 1;
|
||||||
|
|
||||||
$events[] = &self::makeFullCalendarEvent($show, $options,
|
$events[] = &self::makeFullCalendarEvent($show, $options,
|
||||||
$startsDT, $endsDT, $startsEpochStr, $endsEpochStr);
|
$startsDT, $endsDT, $startsEpochStr, $endsEpochStr);
|
||||||
|
|
|
@ -198,7 +198,9 @@ class Application_Model_ShowBuilder
|
||||||
} elseif (intval($p_item["si_record"]) === 1) {
|
} elseif (intval($p_item["si_record"]) === 1) {
|
||||||
$row["record"] = true;
|
$row["record"] = true;
|
||||||
|
|
||||||
if (Application_Model_Preference::GetUploadToSoundcloudOption()) {
|
// at the time of creating on show, the recorded file is not in the DB yet.
|
||||||
|
// therefore, 'si_file_id' is null. So we need to check it.
|
||||||
|
if (Application_Model_Preference::GetUploadToSoundcloudOption() && isset($p_item['si_file_id'])) {
|
||||||
$file = Application_Model_StoredFile::Recall(
|
$file = Application_Model_StoredFile::Recall(
|
||||||
$p_item['si_file_id']);
|
$p_item['si_file_id']);
|
||||||
if (isset($file)) {
|
if (isset($file)) {
|
||||||
|
@ -421,8 +423,7 @@ class Application_Model_ShowBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
$scheduled_items = Application_Model_Schedule::GetScheduleDetailItems(
|
$scheduled_items = Application_Model_Schedule::GetScheduleDetailItems(
|
||||||
$this->startDT->format("Y-m-d H:i:s"), $this->endDT->format(
|
$this->startDT, $this->endDT, $shows);
|
||||||
"Y-m-d H:i:s"), $shows);
|
|
||||||
|
|
||||||
for ($i = 0, $rows = count($scheduled_items); $i < $rows; $i++) {
|
for ($i = 0, $rows = count($scheduled_items); $i < $rows; $i++) {
|
||||||
|
|
||||||
|
|
|
@ -662,6 +662,31 @@ SQL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public static function getContentCount($p_start, $p_end)
|
||||||
|
{
|
||||||
|
$sql = <<<SQL
|
||||||
|
SELECT instance_id,
|
||||||
|
count(*) AS instance_count
|
||||||
|
FROM cc_schedule
|
||||||
|
WHERE ends > :p_start::TIMESTAMP
|
||||||
|
AND starts < :p_end::TIMESTAMP
|
||||||
|
GROUP BY instance_id
|
||||||
|
SQL;
|
||||||
|
|
||||||
|
$counts = Application_Common_Database::prepareAndExecute($sql, array(
|
||||||
|
':p_start' => $p_start->format("Y-m-d G:i:s"),
|
||||||
|
':p_end' => $p_end->format("Y-m-d G:i:s"))
|
||||||
|
, 'all');
|
||||||
|
|
||||||
|
$real_counts = array();
|
||||||
|
foreach ($counts as $c) {
|
||||||
|
$real_counts[$c['instance_id']] = $c['instance_count'];
|
||||||
|
}
|
||||||
|
return $real_counts;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public function showEmpty()
|
public function showEmpty()
|
||||||
{
|
{
|
||||||
$sql = <<<SQL
|
$sql = <<<SQL
|
||||||
|
|
|
@ -26,7 +26,10 @@ class Application_Model_Soundcloud
|
||||||
public function uploadTrack($filepath, $filename, $description,
|
public function uploadTrack($filepath, $filename, $description,
|
||||||
$tags=array(), $release=null, $genre=null)
|
$tags=array(), $release=null, $genre=null)
|
||||||
{
|
{
|
||||||
if ($this->getToken()) {
|
|
||||||
|
if (!$this->getToken()) {
|
||||||
|
throw new NoSoundCloundToken();
|
||||||
|
}
|
||||||
if (count($tags)) {
|
if (count($tags)) {
|
||||||
$tags = join(" ", $tags);
|
$tags = join(" ", $tags);
|
||||||
$tags = $tags." ".Application_Model_Preference::GetSoundCloudTags();
|
$tags = $tags." ".Application_Model_Preference::GetSoundCloudTags();
|
||||||
|
@ -82,9 +85,7 @@ class Application_Model_Soundcloud
|
||||||
);
|
);
|
||||||
|
|
||||||
return $response;
|
return $response;
|
||||||
} else {
|
|
||||||
throw new NoSoundCloundToken();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static function uploadSoundcloud($id)
|
public static function uploadSoundcloud($id)
|
||||||
|
|
|
@ -127,9 +127,9 @@ class CcSchedule extends BaseCcSchedule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($microsecond == 0) {
|
if ($microsecond == 0) {
|
||||||
$this->fadein = $dt->format('H:i:s.u');
|
$this->fade_in = $dt->format('H:i:s.u');
|
||||||
} else {
|
} else {
|
||||||
$this->fadein = $dt->format('H:i:s').".".$microsecond;
|
$this->fade_in = $dt->format('H:i:s').".".$microsecond;
|
||||||
}
|
}
|
||||||
$this->modifiedColumns[] = CcSchedulePeer::FADE_IN;
|
$this->modifiedColumns[] = CcSchedulePeer::FADE_IN;
|
||||||
|
|
||||||
|
@ -164,9 +164,9 @@ class CcSchedule extends BaseCcSchedule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($microsecond == 0) {
|
if ($microsecond == 0) {
|
||||||
$this->fadeout = $dt->format('H:i:s.u');
|
$this->fade_out = $dt->format('H:i:s.u');
|
||||||
} else {
|
} else {
|
||||||
$this->fadeout = $dt->format('H:i:s').".".$microsecond;
|
$this->fade_out = $dt->format('H:i:s').".".$microsecond;
|
||||||
}
|
}
|
||||||
$this->modifiedColumns[] = CcSchedulePeer::FADE_OUT;
|
$this->modifiedColumns[] = CcSchedulePeer::FADE_OUT;
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ class CcPlaylistTableMap extends TableMap {
|
||||||
*/
|
*/
|
||||||
public function buildRelations()
|
public function buildRelations()
|
||||||
{
|
{
|
||||||
$this->addRelation('CcSubjs', 'CcSubjs', RelationMap::MANY_TO_ONE, array('creator_id' => 'id', ), null, null);
|
$this->addRelation('CcSubjs', 'CcSubjs', RelationMap::MANY_TO_ONE, array('creator_id' => 'id', ), 'CASCADE', null);
|
||||||
$this->addRelation('CcPlaylistcontents', 'CcPlaylistcontents', RelationMap::ONE_TO_MANY, array('id' => 'playlist_id', ), 'CASCADE', null);
|
$this->addRelation('CcPlaylistcontents', 'CcPlaylistcontents', RelationMap::ONE_TO_MANY, array('id' => 'playlist_id', ), 'CASCADE', null);
|
||||||
} // buildRelations()
|
} // buildRelations()
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,7 @@ class CcSubjsTableMap extends TableMap {
|
||||||
$this->addRelation('CcFilesRelatedByDbEditedby', 'CcFiles', RelationMap::ONE_TO_MANY, array('id' => 'editedby', ), null, null);
|
$this->addRelation('CcFilesRelatedByDbEditedby', 'CcFiles', RelationMap::ONE_TO_MANY, array('id' => 'editedby', ), null, null);
|
||||||
$this->addRelation('CcPerms', 'CcPerms', RelationMap::ONE_TO_MANY, array('id' => 'subj', ), 'CASCADE', null);
|
$this->addRelation('CcPerms', 'CcPerms', RelationMap::ONE_TO_MANY, array('id' => 'subj', ), 'CASCADE', null);
|
||||||
$this->addRelation('CcShowHosts', 'CcShowHosts', RelationMap::ONE_TO_MANY, array('id' => 'subjs_id', ), 'CASCADE', null);
|
$this->addRelation('CcShowHosts', 'CcShowHosts', RelationMap::ONE_TO_MANY, array('id' => 'subjs_id', ), 'CASCADE', null);
|
||||||
$this->addRelation('CcPlaylist', 'CcPlaylist', RelationMap::ONE_TO_MANY, array('id' => 'creator_id', ), null, null);
|
$this->addRelation('CcPlaylist', 'CcPlaylist', RelationMap::ONE_TO_MANY, array('id' => 'creator_id', ), 'CASCADE', null);
|
||||||
$this->addRelation('CcBlock', 'CcBlock', RelationMap::ONE_TO_MANY, array('id' => 'creator_id', ), null, null);
|
$this->addRelation('CcBlock', 'CcBlock', RelationMap::ONE_TO_MANY, array('id' => 'creator_id', ), null, null);
|
||||||
$this->addRelation('CcPref', 'CcPref', RelationMap::ONE_TO_MANY, array('id' => 'subjid', ), 'CASCADE', null);
|
$this->addRelation('CcPref', 'CcPref', RelationMap::ONE_TO_MANY, array('id' => 'subjid', ), 'CASCADE', null);
|
||||||
$this->addRelation('CcSess', 'CcSess', RelationMap::ONE_TO_MANY, array('id' => 'userid', ), 'CASCADE', null);
|
$this->addRelation('CcSess', 'CcSess', RelationMap::ONE_TO_MANY, array('id' => 'userid', ), 'CASCADE', null);
|
||||||
|
|
|
@ -404,6 +404,9 @@ abstract class BaseCcSubjsPeer {
|
||||||
// Invalidate objects in CcShowHostsPeer instance pool,
|
// Invalidate objects in CcShowHostsPeer instance pool,
|
||||||
// since one or more of them may be deleted by ON DELETE CASCADE/SETNULL rule.
|
// since one or more of them may be deleted by ON DELETE CASCADE/SETNULL rule.
|
||||||
CcShowHostsPeer::clearInstancePool();
|
CcShowHostsPeer::clearInstancePool();
|
||||||
|
// Invalidate objects in CcPlaylistPeer instance pool,
|
||||||
|
// since one or more of them may be deleted by ON DELETE CASCADE/SETNULL rule.
|
||||||
|
CcPlaylistPeer::clearInstancePool();
|
||||||
// Invalidate objects in CcPrefPeer instance pool,
|
// Invalidate objects in CcPrefPeer instance pool,
|
||||||
// since one or more of them may be deleted by ON DELETE CASCADE/SETNULL rule.
|
// since one or more of them may be deleted by ON DELETE CASCADE/SETNULL rule.
|
||||||
CcPrefPeer::clearInstancePool();
|
CcPrefPeer::clearInstancePool();
|
||||||
|
|
|
@ -49,7 +49,7 @@
|
||||||
</dd>
|
</dd>
|
||||||
<dt id="master_username-label">
|
<dt id="master_username-label">
|
||||||
<label class="optional" for="master_username"><?php echo $this->element->getElement('master_username')->getLabel() ?> :
|
<label class="optional" for="master_username"><?php echo $this->element->getElement('master_username')->getLabel() ?> :
|
||||||
<span class='stream_username_help_icon'></span>
|
<span class='master_username_help_icon'></span>
|
||||||
</label>
|
</label>
|
||||||
</dt>
|
</dt>
|
||||||
<dd id="master_username-element">
|
<dd id="master_username-element">
|
||||||
|
|
|
@ -91,9 +91,6 @@
|
||||||
<index name="cc_files_name_idx">
|
<index name="cc_files_name_idx">
|
||||||
<index-column name="name"/>
|
<index-column name="name"/>
|
||||||
</index>
|
</index>
|
||||||
<index name="cc_files_file_exists_idx">
|
|
||||||
<index-column name="file_exists"/>
|
|
||||||
</index>
|
|
||||||
</table>
|
</table>
|
||||||
<table name="cc_perms" phpName="CcPerms">
|
<table name="cc_perms" phpName="CcPerms">
|
||||||
<column name="permid" phpName="Permid" type="INTEGER" primaryKey="true" required="true"/>
|
<column name="permid" phpName="Permid" type="INTEGER" primaryKey="true" required="true"/>
|
||||||
|
@ -206,7 +203,7 @@
|
||||||
<parameter name="foreign_table" value="cc_playlistcontents" />
|
<parameter name="foreign_table" value="cc_playlistcontents" />
|
||||||
<parameter name="expression" value="SUM(cliplength)" />
|
<parameter name="expression" value="SUM(cliplength)" />
|
||||||
</behavior>
|
</behavior>
|
||||||
<foreign-key foreignTable="cc_subjs" name="cc_playlist_createdby_fkey">
|
<foreign-key foreignTable="cc_subjs" name="cc_playlist_createdby_fkey" onDelete="CASCADE">
|
||||||
<reference local="creator_id" foreign="id"/>
|
<reference local="creator_id" foreign="id"/>
|
||||||
</foreign-key>
|
</foreign-key>
|
||||||
</table>
|
</table>
|
||||||
|
@ -332,6 +329,9 @@
|
||||||
<foreign-key foreignTable="cc_webstream" name="cc_show_stream_fkey" onDelete="CASCADE">
|
<foreign-key foreignTable="cc_webstream" name="cc_show_stream_fkey" onDelete="CASCADE">
|
||||||
<reference local="stream_id" foreign="id"/>
|
<reference local="stream_id" foreign="id"/>
|
||||||
</foreign-key>
|
</foreign-key>
|
||||||
|
<index name="cc_schedule_instance_id_idx">
|
||||||
|
<index-column name="instance_id"/>
|
||||||
|
</index>
|
||||||
</table>
|
</table>
|
||||||
<table name="cc_sess" phpName="CcSess">
|
<table name="cc_sess" phpName="CcSess">
|
||||||
<column name="sessid" phpName="Sessid" type="CHAR" size="32" primaryKey="true" required="true"/>
|
<column name="sessid" phpName="Sessid" type="CHAR" size="32" primaryKey="true" required="true"/>
|
||||||
|
|
|
@ -105,8 +105,6 @@ CREATE INDEX "cc_files_md5_idx" ON "cc_files" ("md5");
|
||||||
|
|
||||||
CREATE INDEX "cc_files_name_idx" ON "cc_files" ("name");
|
CREATE INDEX "cc_files_name_idx" ON "cc_files" ("name");
|
||||||
|
|
||||||
CREATE INDEX "cc_files_file_exists_idx" ON "cc_files" ("file_exists");
|
|
||||||
|
|
||||||
-----------------------------------------------------------------------------
|
-----------------------------------------------------------------------------
|
||||||
-- cc_perms
|
-- cc_perms
|
||||||
-----------------------------------------------------------------------------
|
-----------------------------------------------------------------------------
|
||||||
|
@ -429,6 +427,8 @@ COMMENT ON TABLE "cc_schedule" IS '';
|
||||||
|
|
||||||
|
|
||||||
SET search_path TO public;
|
SET search_path TO public;
|
||||||
|
CREATE INDEX "cc_schedule_instance_id_idx" ON "cc_schedule" ("instance_id");
|
||||||
|
|
||||||
-----------------------------------------------------------------------------
|
-----------------------------------------------------------------------------
|
||||||
-- cc_sess
|
-- cc_sess
|
||||||
-----------------------------------------------------------------------------
|
-----------------------------------------------------------------------------
|
||||||
|
@ -689,7 +689,7 @@ ALTER TABLE "cc_show_hosts" ADD CONSTRAINT "cc_perm_show_fkey" FOREIGN KEY ("sho
|
||||||
|
|
||||||
ALTER TABLE "cc_show_hosts" ADD CONSTRAINT "cc_perm_host_fkey" FOREIGN KEY ("subjs_id") REFERENCES "cc_subjs" ("id") ON DELETE CASCADE;
|
ALTER TABLE "cc_show_hosts" ADD CONSTRAINT "cc_perm_host_fkey" FOREIGN KEY ("subjs_id") REFERENCES "cc_subjs" ("id") ON DELETE CASCADE;
|
||||||
|
|
||||||
ALTER TABLE "cc_playlist" ADD CONSTRAINT "cc_playlist_createdby_fkey" FOREIGN KEY ("creator_id") REFERENCES "cc_subjs" ("id");
|
ALTER TABLE "cc_playlist" ADD CONSTRAINT "cc_playlist_createdby_fkey" FOREIGN KEY ("creator_id") REFERENCES "cc_subjs" ("id") ON DELETE CASCADE;
|
||||||
|
|
||||||
ALTER TABLE "cc_playlistcontents" ADD CONSTRAINT "cc_playlistcontents_file_id_fkey" FOREIGN KEY ("file_id") REFERENCES "cc_files" ("id") ON DELETE CASCADE;
|
ALTER TABLE "cc_playlistcontents" ADD CONSTRAINT "cc_playlistcontents_file_id_fkey" FOREIGN KEY ("file_id") REFERENCES "cc_files" ("id") ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
|
@ -104,7 +104,8 @@ select {
|
||||||
line-height:16px !important;
|
line-height:16px !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.airtime_auth_help_icon, .custom_auth_help_icon, .stream_username_help_icon, .playlist_type_help_icon {
|
.airtime_auth_help_icon, .custom_auth_help_icon, .stream_username_help_icon,
|
||||||
|
.playlist_type_help_icon, .master_username_help_icon {
|
||||||
cursor: help;
|
cursor: help;
|
||||||
position: relative;
|
position: relative;
|
||||||
display:inline-block; zoom:1;
|
display:inline-block; zoom:1;
|
||||||
|
|
|
@ -53,8 +53,10 @@ function open_audio_preview(type, id, audioFileTitle, audioFileArtist) {
|
||||||
if(index != -1){
|
if(index != -1){
|
||||||
audioFileTitle = audioFileTitle.substring(0,index);
|
audioFileTitle = audioFileTitle.substring(0,index);
|
||||||
}
|
}
|
||||||
openPreviewWindow(baseUrl+'audiopreview/audio-preview/audioFileID/'+id+'/audioFileArtist/'+encodeURIComponent(audioFileArtist)+'/audioFileTitle/'+encodeURIComponent(audioFileTitle)+'/type/'+type);
|
// The reason that we need to encode artist and title string is that
|
||||||
|
// sometime they contain '/' or '\' and apache reject %2f or %5f
|
||||||
|
// so the work around is to encode it twice.
|
||||||
|
openPreviewWindow(baseUrl+'audiopreview/audio-preview/audioFileID/'+id+'/audioFileArtist/'+encodeURIComponent(encodeURIComponent(audioFileArtist))+'/audioFileTitle/'+encodeURIComponent(encodeURIComponent(audioFileTitle))+'/type/'+type);
|
||||||
_preview_window.focus();
|
_preview_window.focus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -333,6 +333,27 @@ $(document).ready(function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
$(".stream_username_help_icon").qtip({
|
$(".stream_username_help_icon").qtip({
|
||||||
|
content: {
|
||||||
|
text: "If your Icecast server expects a username of 'source', this field can be left blank."
|
||||||
|
},
|
||||||
|
hide: {
|
||||||
|
delay: 500,
|
||||||
|
fixed: true
|
||||||
|
},
|
||||||
|
style: {
|
||||||
|
border: {
|
||||||
|
width: 0,
|
||||||
|
radius: 4
|
||||||
|
},
|
||||||
|
classes: "ui-tooltip-dark ui-tooltip-rounded"
|
||||||
|
},
|
||||||
|
position: {
|
||||||
|
my: "left bottom",
|
||||||
|
at: "right center"
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
$(".master_username_help_icon").qtip({
|
||||||
content: {
|
content: {
|
||||||
text: "If your live streaming client does not ask for a username, this field should be 'source'."
|
text: "If your live streaming client does not ask for a username, this field should be 'source'."
|
||||||
},
|
},
|
||||||
|
|
15
changelog
15
changelog
|
@ -1,3 +1,14 @@
|
||||||
|
2.2.0 - October 25th, 2012
|
||||||
|
* New features
|
||||||
|
* Smart Playlists
|
||||||
|
* Webstream rebroadcasts
|
||||||
|
* Replaygain support
|
||||||
|
* FLAC + WAV support (AAC if you compile your own Liquidsoap)
|
||||||
|
* Huge performance increase on library import
|
||||||
|
* User ownership of files
|
||||||
|
* Stereo/mono streams
|
||||||
|
* Rescan watched folders button (useful for network drives where keeping in sync is more difficult)
|
||||||
|
|
||||||
2.1.3 - July 4th, 2012
|
2.1.3 - July 4th, 2012
|
||||||
* Changes
|
* Changes
|
||||||
* Clarify inputs and output labels under stream settings
|
* Clarify inputs and output labels under stream settings
|
||||||
|
@ -56,7 +67,7 @@
|
||||||
* Removing a watched directory and adding it again preserves playlists & shows with those files.
|
* Removing a watched directory and adding it again preserves playlists & shows with those files.
|
||||||
* An icon in the playlist shows whether a file is missing on disk, warning the user that the playlist will not go according to plan.
|
* An icon in the playlist shows whether a file is missing on disk, warning the user that the playlist will not go according to plan.
|
||||||
* Media monitor detects add and removal of watched temporary local storage (USB disks for example) and network drives.
|
* Media monitor detects add and removal of watched temporary local storage (USB disks for example) and network drives.
|
||||||
* Broadcast Log - export play count of tracks within a given time range. Useful for royalty reporting purposes.
|
* Broadcast Log - export play count of tracks within a given time range. Useful for royalty reporting purposes.
|
||||||
* Minor Improvements:
|
* Minor Improvements:
|
||||||
* Ability to turn off the broadcast.
|
* Ability to turn off the broadcast.
|
||||||
* Editing metadata in the library will update the metadata on disk.
|
* Editing metadata in the library will update the metadata on disk.
|
||||||
|
@ -67,7 +78,7 @@
|
||||||
* Repeating shows default to "No End"
|
* Repeating shows default to "No End"
|
||||||
* Ability to "View on Soundcloud" for recorded shows in the calendar
|
* Ability to "View on Soundcloud" for recorded shows in the calendar
|
||||||
* "Listen" preview player no longer falls behind the broadcast (you can only mute the stream now, not stop it)
|
* "Listen" preview player no longer falls behind the broadcast (you can only mute the stream now, not stop it)
|
||||||
* Tracks that cannot be played will be rejected on upload and put in to the directory "/srv/airtime/store/problem_files" (but currently it will not tell you that it rejected them - sorry\!)
|
* Tracks that cannot be played will be rejected on upload and put in to the directory "/srv/airtime/stor/problem_files" (but currently it will not tell you that it rejected them - sorry\!)
|
||||||
* Library is automatically refreshed when media import is finished
|
* Library is automatically refreshed when media import is finished
|
||||||
* Show "Disk Full" message when trying to upload a file that wont fit on the disk
|
* Show "Disk Full" message when trying to upload a file that wont fit on the disk
|
||||||
* Reduced CPU utilization for OGG streams
|
* Reduced CPU utilization for OGG streams
|
||||||
|
|
|
@ -181,7 +181,8 @@ libmad-ocaml-dev libtaglib-ocaml-dev libalsa-ocaml-dev libtaglib-ocaml-dev libvo
|
||||||
libspeex-dev libspeexdsp-dev speex libladspa-ocaml-dev festival festival-dev \
|
libspeex-dev libspeexdsp-dev speex libladspa-ocaml-dev festival festival-dev \
|
||||||
libsamplerate-dev libxmlplaylist-ocaml-dev libxmlrpc-light-ocaml-dev libflac-dev \
|
libsamplerate-dev libxmlplaylist-ocaml-dev libxmlrpc-light-ocaml-dev libflac-dev \
|
||||||
libxml-dom-perl libxml-dom-xpath-perl patch autoconf libmp3lame-dev \
|
libxml-dom-perl libxml-dom-xpath-perl patch autoconf libmp3lame-dev \
|
||||||
libcamomile-ocaml-dev libcamlimages-ocaml-dev libtool libpulse-dev libjack-dev camlidl libfaad-dev''')
|
libcamomile-ocaml-dev libcamlimages-ocaml-dev libtool libpulse-dev libjack-dev
|
||||||
|
camlidl libfaad-dev libpcre-ocaml-dev''')
|
||||||
|
|
||||||
root = '/home/martin/src'
|
root = '/home/martin/src'
|
||||||
do_run('mkdir -p %s' % root)
|
do_run('mkdir -p %s' % root)
|
||||||
|
|
|
@ -111,6 +111,10 @@ class UpgradeCommon{
|
||||||
$old = "list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'";
|
$old = "list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'";
|
||||||
$new = "list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%/all/%%all%%'";
|
$new = "list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%/all/%%all%%'";
|
||||||
exec("sed -i \"s#$old#$new#g\" /etc/airtime/api_client.cfg");
|
exec("sed -i \"s#$old#$new#g\" /etc/airtime/api_client.cfg");
|
||||||
|
|
||||||
|
$old = "update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'";
|
||||||
|
$new = "update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/'";
|
||||||
|
exec("sed -i \"s#$old#$new#g\" /etc/airtime/api_client.cfg");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -14,6 +14,13 @@ INSERT INTO cc_stream_setting (keyname, value, type) VALUES ('s2_channels', 'ste
|
||||||
INSERT INTO cc_stream_setting (keyname, value, type) VALUES ('s3_channels', 'stereo', 'string');
|
INSERT INTO cc_stream_setting (keyname, value, type) VALUES ('s3_channels', 'stereo', 'string');
|
||||||
|
|
||||||
|
|
||||||
|
CREATE FUNCTION airtime_to_int(chartoconvert character varying) RETURNS integer
|
||||||
|
AS
|
||||||
|
'SELECT CASE WHEN trim($1) SIMILAR TO ''[0-9]+'' THEN CAST(trim($1) AS integer) ELSE NULL END;'
|
||||||
|
LANGUAGE SQL
|
||||||
|
IMMUTABLE
|
||||||
|
RETURNS NULL ON NULL INPUT;
|
||||||
|
|
||||||
--clean up database of scheduled items that weren't properly deleted in 2.1.x
|
--clean up database of scheduled items that weren't properly deleted in 2.1.x
|
||||||
--due to a bug
|
--due to a bug
|
||||||
DELETE
|
DELETE
|
||||||
|
@ -27,14 +34,9 @@ WHERE id IN
|
||||||
ALTER TABLE cc_files
|
ALTER TABLE cc_files
|
||||||
DROP CONSTRAINT cc_files_gunid_idx;
|
DROP CONSTRAINT cc_files_gunid_idx;
|
||||||
|
|
||||||
DROP TABLE cc_access;
|
DROP INDEX cc_files_file_exists_idx;
|
||||||
|
|
||||||
CREATE FUNCTION airtime_to_int(chartoconvert character varying) RETURNS integer
|
DROP TABLE cc_access;
|
||||||
AS
|
|
||||||
'SELECT CASE WHEN trim($1) SIMILAR TO ''[0-9]+'' THEN CAST(trim($1) AS integer) ELSE NULL END;'
|
|
||||||
LANGUAGE SQL
|
|
||||||
IMMUTABLE
|
|
||||||
RETURNS NULL ON NULL INPUT;
|
|
||||||
|
|
||||||
CREATE SEQUENCE cc_block_id_seq
|
CREATE SEQUENCE cc_block_id_seq
|
||||||
START WITH 1
|
START WITH 1
|
||||||
|
@ -140,6 +142,12 @@ ALTER TABLE cc_playlistcontents
|
||||||
ALTER TABLE cc_schedule
|
ALTER TABLE cc_schedule
|
||||||
ADD COLUMN stream_id integer;
|
ADD COLUMN stream_id integer;
|
||||||
|
|
||||||
|
CREATE INDEX cc_schedule_instance_id_idx
|
||||||
|
ON cc_schedule
|
||||||
|
USING btree
|
||||||
|
(instance_id);
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE cc_subjs
|
ALTER TABLE cc_subjs
|
||||||
ADD COLUMN cell_phone character varying(255);
|
ADD COLUMN cell_phone character varying(255);
|
||||||
|
|
||||||
|
@ -179,6 +187,33 @@ ALTER TABLE cc_schedule
|
||||||
ALTER TABLE cc_webstream_metadata
|
ALTER TABLE cc_webstream_metadata
|
||||||
ADD CONSTRAINT cc_schedule_inst_fkey FOREIGN KEY (instance_id) REFERENCES cc_schedule(id) ON DELETE CASCADE;
|
ADD CONSTRAINT cc_schedule_inst_fkey FOREIGN KEY (instance_id) REFERENCES cc_schedule(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE cc_playlist
|
||||||
|
DROP CONSTRAINT cc_playlist_createdby_fkey;
|
||||||
|
|
||||||
|
ALTER SEQUENCE cc_block_id_seq
|
||||||
|
OWNED BY cc_block.id;
|
||||||
|
|
||||||
|
ALTER SEQUENCE cc_blockcontents_id_seq
|
||||||
|
OWNED BY cc_blockcontents.id;
|
||||||
|
|
||||||
|
ALTER SEQUENCE cc_blockcriteria_id_seq
|
||||||
|
OWNED BY cc_blockcriteria.id;
|
||||||
|
|
||||||
|
ALTER SEQUENCE cc_webstream_id_seq
|
||||||
|
OWNED BY cc_webstream.id;
|
||||||
|
|
||||||
|
ALTER SEQUENCE cc_webstream_metadata_id_seq
|
||||||
|
OWNED BY cc_webstream_metadata.id;
|
||||||
|
|
||||||
|
ALTER TABLE cc_playlist
|
||||||
|
ADD CONSTRAINT cc_playlist_createdby_fkey FOREIGN KEY (creator_id) REFERENCES cc_subjs(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
DROP FUNCTION airtime_to_int(chartoconvert character varying);
|
DROP FUNCTION airtime_to_int(chartoconvert character varying);
|
||||||
|
|
||||||
UPDATE cc_files
|
UPDATE cc_files
|
||||||
|
|
|
@ -117,3 +117,5 @@ get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key
|
||||||
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'
|
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'
|
||||||
|
|
||||||
notify_webstream_data = 'notify-webstream-data/api_key/%%api_key%%/media_id/%%media_id%%/format/json'
|
notify_webstream_data = 'notify-webstream-data/api_key/%%api_key%%/media_id/%%media_id%%/format/json'
|
||||||
|
|
||||||
|
notify_liquidsoap_started = 'rabbitmq-do-push/api_key/%%api_key%%/format/json'
|
||||||
|
|
|
@ -20,6 +20,11 @@ import traceback
|
||||||
|
|
||||||
AIRTIME_VERSION = "2.2.0"
|
AIRTIME_VERSION = "2.2.0"
|
||||||
|
|
||||||
|
|
||||||
|
# TODO : Place these functions in some common module. Right now, media
|
||||||
|
# monitor uses the same functions and it would be better to reuse them
|
||||||
|
# instead of copy pasting them around
|
||||||
|
|
||||||
def to_unicode(obj, encoding='utf-8'):
|
def to_unicode(obj, encoding='utf-8'):
|
||||||
if isinstance(obj, basestring):
|
if isinstance(obj, basestring):
|
||||||
if not isinstance(obj, unicode):
|
if not isinstance(obj, unicode):
|
||||||
|
@ -39,7 +44,7 @@ def convert_dict_value_to_utf8(md):
|
||||||
# Airtime API Client
|
# Airtime API Client
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
class AirtimeApiClient():
|
class AirtimeApiClient(object):
|
||||||
|
|
||||||
# This is a little hacky fix so that I don't have to pass the config object
|
# This is a little hacky fix so that I don't have to pass the config object
|
||||||
# everywhere where AirtimeApiClient needs to be initialized
|
# everywhere where AirtimeApiClient needs to be initialized
|
||||||
|
@ -408,15 +413,13 @@ class AirtimeApiClient():
|
||||||
|
|
||||||
def send_media_monitor_requests(self, action_list, dry=False):
|
def send_media_monitor_requests(self, action_list, dry=False):
|
||||||
"""
|
"""
|
||||||
Send a gang of media monitor events at a time. actions_list is a list
|
Send a gang of media monitor events at a time. actions_list is a
|
||||||
of dictionaries where every dictionary is representing an action. Every
|
list of dictionaries where every dictionary is representing an
|
||||||
action dict must contain a 'mode' key that says what kind of action it
|
action. Every action dict must contain a 'mode' key that says
|
||||||
is and an optional 'is_record' key that says whether the show was
|
what kind of action it is and an optional 'is_record' key that
|
||||||
recorded or not. The value of this key does not matter, only if it's
|
says whether the show was recorded or not. The value of this key
|
||||||
present or not.
|
does not matter, only if it's present or not.
|
||||||
"""
|
"""
|
||||||
logger = self.logger
|
|
||||||
try:
|
|
||||||
url = self.construct_url('reload_metadata_group')
|
url = self.construct_url('reload_metadata_group')
|
||||||
# We are assuming that action_list is a list of dictionaries such
|
# We are assuming that action_list is a list of dictionaries such
|
||||||
# that every dictionary represents the metadata of a file along
|
# that every dictionary represents the metadata of a file along
|
||||||
|
@ -450,11 +453,6 @@ class AirtimeApiClient():
|
||||||
response = self.get_response_from_server(req)
|
response = self.get_response_from_server(req)
|
||||||
response = json.loads(response)
|
response = json.loads(response)
|
||||||
return response
|
return response
|
||||||
except ValueError: raise
|
|
||||||
except Exception, e:
|
|
||||||
logger.error('Exception: %s', e)
|
|
||||||
logger.error("traceback: %s", traceback.format_exc())
|
|
||||||
raise
|
|
||||||
|
|
||||||
#returns a list of all db files for a given directory in JSON format:
|
#returns a list of all db files for a given directory in JSON format:
|
||||||
#{"files":["path/to/file1", "path/to/file2"]}
|
#{"files":["path/to/file1", "path/to/file2"]}
|
||||||
|
|
|
@ -1,111 +1,143 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import media.monitor.process as md
|
import media.metadata.process as md
|
||||||
|
import re
|
||||||
from os.path import normpath
|
from os.path import normpath
|
||||||
from media.monitor.pure import format_length, file_md5
|
from media.monitor.pure import format_length, file_md5, is_airtime_recorded, \
|
||||||
|
no_extension_basename
|
||||||
|
|
||||||
with md.metadata('MDATA_KEY_DURATION') as t:
|
defs_loaded = False
|
||||||
|
|
||||||
|
def is_defs_loaded():
|
||||||
|
global defs_loaded
|
||||||
|
return defs_loaded
|
||||||
|
|
||||||
|
def load_definitions():
|
||||||
|
with md.metadata('MDATA_KEY_DURATION') as t:
|
||||||
t.default(u'0.0')
|
t.default(u'0.0')
|
||||||
t.depends('length')
|
t.depends('length')
|
||||||
t.translate(lambda k: format_length(k['length']))
|
t.translate(lambda k: format_length(k['length']))
|
||||||
|
|
||||||
with md.metadata('MDATA_KEY_MIME') as t:
|
with md.metadata('MDATA_KEY_MIME') as t:
|
||||||
t.default(u'')
|
t.default(u'')
|
||||||
t.depends('mime')
|
t.depends('mime')
|
||||||
t.translate(lambda k: k['mime'].replace('-','/'))
|
# Is this necessary?
|
||||||
|
t.translate(lambda k: k['mime'].replace('audio/vorbis','audio/ogg'))
|
||||||
|
|
||||||
with md.metadata('MDATA_KEY_BITRATE') as t:
|
with md.metadata('MDATA_KEY_BITRATE') as t:
|
||||||
t.default(u'')
|
t.default(u'')
|
||||||
t.depends('bitrate')
|
t.depends('bitrate')
|
||||||
t.translate(lambda k: k['bitrate'])
|
t.translate(lambda k: k['bitrate'])
|
||||||
|
|
||||||
with md.metadata('MDATA_KEY_SAMPLERATE') as t:
|
with md.metadata('MDATA_KEY_SAMPLERATE') as t:
|
||||||
t.default(u'0')
|
t.default(u'0')
|
||||||
t.depends('sample_rate')
|
t.depends('sample_rate')
|
||||||
t.translate(lambda k: k['sample_rate'])
|
t.translate(lambda k: k['sample_rate'])
|
||||||
|
|
||||||
with md.metadata('MDATA_KEY_FTYPE'):
|
with md.metadata('MDATA_KEY_FTYPE') as t:
|
||||||
t.depends('ftype') # i don't think this field even exists
|
t.depends('ftype') # i don't think this field even exists
|
||||||
t.default(u'audioclip')
|
t.default(u'audioclip')
|
||||||
t.translate(lambda k: k['ftype']) # but just in case
|
t.translate(lambda k: k['ftype']) # but just in case
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_CREATOR") as t:
|
with md.metadata("MDATA_KEY_CREATOR") as t:
|
||||||
t.depends("artist")
|
t.depends("artist")
|
||||||
# A little kludge to make sure that we have some value for when we parse
|
# A little kludge to make sure that we have some value for when we parse
|
||||||
# MDATA_KEY_TITLE
|
# MDATA_KEY_TITLE
|
||||||
t.default(u"")
|
t.default(u"")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_SOURCE") as t:
|
with md.metadata("MDATA_KEY_SOURCE") as t:
|
||||||
t.depends("album")
|
t.depends("album")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_GENRE") as t:
|
with md.metadata("MDATA_KEY_GENRE") as t:
|
||||||
t.depends("genre")
|
t.depends("genre")
|
||||||
t.max_length(64)
|
t.max_length(64)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_MOOD") as t:
|
with md.metadata("MDATA_KEY_MOOD") as t:
|
||||||
t.depends("mood")
|
t.depends("mood")
|
||||||
t.max_length(64)
|
t.max_length(64)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_TRACKNUMBER") as t:
|
with md.metadata("MDATA_KEY_TRACKNUMBER") as t:
|
||||||
t.depends("tracknumber")
|
t.depends("tracknumber")
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_BPM") as t:
|
with md.metadata("MDATA_KEY_BPM") as t:
|
||||||
t.depends("bpm")
|
t.depends("bpm")
|
||||||
t.max_length(8)
|
t.max_length(8)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_LABEL") as t:
|
with md.metadata("MDATA_KEY_LABEL") as t:
|
||||||
t.depends("organization")
|
t.depends("organization")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_COMPOSER") as t:
|
with md.metadata("MDATA_KEY_COMPOSER") as t:
|
||||||
t.depends("composer")
|
t.depends("composer")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_ENCODER") as t:
|
with md.metadata("MDATA_KEY_ENCODER") as t:
|
||||||
t.depends("encodedby")
|
t.depends("encodedby")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_CONDUCTOR") as t:
|
with md.metadata("MDATA_KEY_CONDUCTOR") as t:
|
||||||
t.depends("conductor")
|
t.depends("conductor")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_YEAR") as t:
|
with md.metadata("MDATA_KEY_YEAR") as t:
|
||||||
t.depends("date")
|
t.depends("date")
|
||||||
t.max_length(16)
|
t.max_length(16)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_URL") as t:
|
with md.metadata("MDATA_KEY_URL") as t:
|
||||||
t.depends("website")
|
t.depends("website")
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_ISRC") as t:
|
with md.metadata("MDATA_KEY_ISRC") as t:
|
||||||
t.depends("isrc")
|
t.depends("isrc")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_COPYRIGHT") as t:
|
with md.metadata("MDATA_KEY_COPYRIGHT") as t:
|
||||||
t.depends("copyright")
|
t.depends("copyright")
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_FILEPATH") as t:
|
with md.metadata("MDATA_KEY_ORIGINAL_PATH") as t:
|
||||||
t.depends('path')
|
t.depends('path')
|
||||||
t.translate(lambda k: normpath(k['path']))
|
t.translate(lambda k: unicode(normpath(k['path'])))
|
||||||
|
|
||||||
with md.metadata("MDATA_KEY_MD5") as t:
|
with md.metadata("MDATA_KEY_MD5") as t:
|
||||||
t.depends('path')
|
t.depends('path')
|
||||||
t.optional(False)
|
t.optional(False)
|
||||||
t.translate(lambda k: file_md5(k['path'], max_length=100))
|
t.translate(lambda k: file_md5(k['path'], max_length=100))
|
||||||
|
|
||||||
# owner is handled differently by (by events.py)
|
# owner is handled differently by (by events.py)
|
||||||
|
|
||||||
with md.metadata('MDATA_KEY_ORIGINAL_PATH') as t:
|
# MDATA_KEY_TITLE is the annoying special case b/c we sometimes read it
|
||||||
t.depends('original_path')
|
# from file name
|
||||||
|
|
||||||
# MDATA_KEY_TITLE is the annoying special case
|
|
||||||
with md.metadata('MDATA_KEY_TITLE') as t:
|
# must handle 3 cases:
|
||||||
|
# 1. regular case (not recorded + title is present)
|
||||||
|
# 2. title is absent (read from file)
|
||||||
|
# 3. recorded file
|
||||||
|
def tr_title(k):
|
||||||
|
#unicode_unknown = u"unknown"
|
||||||
|
new_title = u""
|
||||||
|
if is_airtime_recorded(k) or k['title'] != u"":
|
||||||
|
new_title = k['title']
|
||||||
|
else:
|
||||||
|
default_title = no_extension_basename(k['path'])
|
||||||
|
default_title = re.sub(r'__\d+\.',u'.', default_title)
|
||||||
|
|
||||||
|
# format is: track_number-title-123kbps.mp3
|
||||||
|
m = re.match(".+?-(?P<title>.+)-(\d+kbps|unknown)$", default_title)
|
||||||
|
if m: new_title = m.group('title')
|
||||||
|
else: new_title = re.sub(r'-\d+kbps$', u'', default_title)
|
||||||
|
|
||||||
|
return new_title
|
||||||
|
|
||||||
|
with md.metadata('MDATA_KEY_TITLE') as t:
|
||||||
# Need to know MDATA_KEY_CREATOR to know if show was recorded. Value is
|
# Need to know MDATA_KEY_CREATOR to know if show was recorded. Value is
|
||||||
# defaulted to "" from definitions above
|
# defaulted to "" from definitions above
|
||||||
t.depends('title','MDATA_KEY_CREATOR')
|
t.depends('title','MDATA_KEY_CREATOR','path')
|
||||||
|
t.optional(False)
|
||||||
|
t.translate(tr_title)
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
||||||
with md.metadata('MDATA_KEY_LABEL') as t:
|
with md.metadata('MDATA_KEY_LABEL') as t:
|
||||||
t.depends('label')
|
t.depends('label')
|
||||||
t.max_length(512)
|
t.max_length(512)
|
||||||
|
|
|
@ -1,14 +1,36 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from media.monitor.pure import truncate_to_length, toposort
|
from media.monitor.pure import truncate_to_length, toposort
|
||||||
|
from os.path import normpath
|
||||||
|
from media.monitor.exceptions import BadSongFile
|
||||||
|
from media.monitor.log import Loggable
|
||||||
|
import media.monitor.pure as mmp
|
||||||
|
from collections import namedtuple
|
||||||
import mutagen
|
import mutagen
|
||||||
|
|
||||||
|
class FakeMutagen(dict):
|
||||||
|
"""
|
||||||
|
Need this fake mutagen object so that airtime_special functions
|
||||||
|
return a proper default value instead of throwing an exceptions for
|
||||||
|
files that mutagen doesn't recognize
|
||||||
|
"""
|
||||||
|
FakeInfo = namedtuple('FakeInfo','length bitrate')
|
||||||
|
def __init__(self,path):
|
||||||
|
self.path = path
|
||||||
|
self.mime = ['audio/wav']
|
||||||
|
self.info = FakeMutagen.FakeInfo(0.0, '')
|
||||||
|
dict.__init__(self)
|
||||||
|
def set_length(self,l):
|
||||||
|
old_bitrate = self.info.bitrate
|
||||||
|
self.info = FakeMutagen.FakeInfo(l, old_bitrate)
|
||||||
|
|
||||||
|
|
||||||
class MetadataAbsent(Exception):
|
class MetadataAbsent(Exception):
|
||||||
def __init__(self, name): self.name = name
|
def __init__(self, name): self.name = name
|
||||||
def __str__(self): return "Could not obtain element '%s'" % self.name
|
def __str__(self): return "Could not obtain element '%s'" % self.name
|
||||||
|
|
||||||
class MetadataElement(object):
|
class MetadataElement(Loggable):
|
||||||
|
|
||||||
def __init__(self,name):
|
def __init__(self,name):
|
||||||
self.name = name
|
self.name = name
|
||||||
# "Sane" defaults
|
# "Sane" defaults
|
||||||
|
@ -18,6 +40,7 @@ class MetadataElement(object):
|
||||||
self.__default = None
|
self.__default = None
|
||||||
self.__is_normalized = lambda _ : True
|
self.__is_normalized = lambda _ : True
|
||||||
self.__max_length = -1
|
self.__max_length = -1
|
||||||
|
self.__translator = None
|
||||||
|
|
||||||
def max_length(self,l):
|
def max_length(self,l):
|
||||||
self.__max_length = l
|
self.__max_length = l
|
||||||
|
@ -57,31 +80,64 @@ class MetadataElement(object):
|
||||||
return self.__path
|
return self.__path
|
||||||
|
|
||||||
def __slice_deps(self, d):
|
def __slice_deps(self, d):
|
||||||
|
"""
|
||||||
|
returns a dictionary of all the key value pairs in d that are also
|
||||||
|
present in self.__deps
|
||||||
|
"""
|
||||||
return dict( (k,v) for k,v in d.iteritems() if k in self.__deps)
|
return dict( (k,v) for k,v in d.iteritems() if k in self.__deps)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "%s(%s)" % (self.name, ' '.join(list(self.__deps)))
|
return "%s(%s)" % (self.name, ' '.join(list(self.__deps)))
|
||||||
|
|
||||||
def read_value(self, path, original, running={}):
|
def read_value(self, path, original, running={}):
|
||||||
# If value is present and normalized then we don't touch it
|
|
||||||
|
# If value is present and normalized then we only check if it's
|
||||||
|
# normalized or not. We normalize if it's not normalized already
|
||||||
|
|
||||||
|
|
||||||
if self.name in original:
|
if self.name in original:
|
||||||
v = original[self.name]
|
v = original[self.name]
|
||||||
if self.__is_normalized(v): return v
|
if self.__is_normalized(v): return v
|
||||||
else: return self.__normalizer(v)
|
else: return self.__normalizer(v)
|
||||||
|
|
||||||
# A dictionary slice with all the dependencies and their values
|
# We slice out only the dependencies that are required for the metadata
|
||||||
|
# element.
|
||||||
dep_slice_orig = self.__slice_deps(original)
|
dep_slice_orig = self.__slice_deps(original)
|
||||||
dep_slice_running = self.__slice_deps(running)
|
dep_slice_running = self.__slice_deps(running)
|
||||||
|
# TODO : remove this later
|
||||||
|
dep_slice_special = self.__slice_deps({'path' : path})
|
||||||
|
# We combine all required dependencies into a single dictionary
|
||||||
|
# that we will pass to the translator
|
||||||
full_deps = dict( dep_slice_orig.items()
|
full_deps = dict( dep_slice_orig.items()
|
||||||
+ dep_slice_running.items() )
|
+ dep_slice_running.items()
|
||||||
|
+ dep_slice_special.items())
|
||||||
|
|
||||||
# check if any dependencies are absent
|
# check if any dependencies are absent
|
||||||
if len(full_deps) != len(self.__deps) or len(self.__deps) == 0:
|
# note: there is no point checking the case that len(full_deps) >
|
||||||
|
# len(self.__deps) because we make sure to "slice out" any supefluous
|
||||||
|
# dependencies above.
|
||||||
|
if len(full_deps) != len(self.dependencies()) or \
|
||||||
|
len(self.dependencies()) == 0:
|
||||||
# If we have a default value then use that. Otherwise throw an
|
# If we have a default value then use that. Otherwise throw an
|
||||||
# exception
|
# exception
|
||||||
if self.has_default(): return self.get_default()
|
if self.has_default(): return self.get_default()
|
||||||
else: raise MetadataAbsent(self.name)
|
else: raise MetadataAbsent(self.name)
|
||||||
|
|
||||||
# We have all dependencies. Now for actual for parsing
|
# We have all dependencies. Now for actual for parsing
|
||||||
|
def def_translate(dep):
|
||||||
|
def wrap(k):
|
||||||
|
e = [ x for x in dep ][0]
|
||||||
|
return k[e]
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
# Only case where we can select a default translator
|
||||||
|
if self.__translator is None:
|
||||||
|
self.translate(def_translate(self.dependencies()))
|
||||||
|
if len(self.dependencies()) > 2: # dependencies include themselves
|
||||||
|
self.logger.info("Ignoring some dependencies in translate %s"
|
||||||
|
% self.name)
|
||||||
|
self.logger.info(self.dependencies())
|
||||||
|
|
||||||
r = self.__normalizer( self.__translator(full_deps) )
|
r = self.__normalizer( self.__translator(full_deps) )
|
||||||
if self.__max_length != -1:
|
if self.__max_length != -1:
|
||||||
r = truncate_to_length(r, self.__max_length)
|
r = truncate_to_length(r, self.__max_length)
|
||||||
|
@ -92,24 +148,40 @@ def normalize_mutagen(path):
|
||||||
Consumes a path and reads the metadata using mutagen. normalizes some of
|
Consumes a path and reads the metadata using mutagen. normalizes some of
|
||||||
the metadata that isn't read through the mutagen hash
|
the metadata that isn't read through the mutagen hash
|
||||||
"""
|
"""
|
||||||
m = mutagen.File(path, easy=True)
|
if not mmp.file_playable(path): raise BadSongFile(path)
|
||||||
|
try : m = mutagen.File(path, easy=True)
|
||||||
|
except Exception : raise BadSongFile(path)
|
||||||
|
if m is None: m = FakeMutagen(path)
|
||||||
|
try:
|
||||||
|
if mmp.extension(path) == 'wav':
|
||||||
|
m.set_length(mmp.read_wave_duration(path))
|
||||||
|
except Exception: raise BadSongFile(path)
|
||||||
md = {}
|
md = {}
|
||||||
for k,v in m.iteritems():
|
for k,v in m.iteritems():
|
||||||
if type(v) is list: md[k] = v[0]
|
if type(v) is list:
|
||||||
|
if len(v) > 0: md[k] = v[0]
|
||||||
else: md[k] = v
|
else: md[k] = v
|
||||||
# populate special metadata values
|
# populate special metadata values
|
||||||
md['length'] = getattr(m.info, u'length', 0.0)
|
md['length'] = getattr(m.info, 'length', 0.0)
|
||||||
md['bitrate'] = getattr(m.info, 'bitrate', u'')
|
md['bitrate'] = getattr(m.info, 'bitrate', u'')
|
||||||
md['sample_rate'] = getattr(m.info, 'sample_rate', 0)
|
md['sample_rate'] = getattr(m.info, 'sample_rate', 0)
|
||||||
md['mime'] = m.mime[0] if len(m.mime) > 0 else u''
|
md['mime'] = m.mime[0] if len(m.mime) > 0 else u''
|
||||||
md['path'] = path
|
md['path'] = normpath(path)
|
||||||
|
if 'title' not in md: md['title'] = u''
|
||||||
return md
|
return md
|
||||||
|
|
||||||
|
|
||||||
|
class OverwriteMetadataElement(Exception):
|
||||||
|
def __init__(self, m): self.m = m
|
||||||
|
def __str__(self): return "Trying to overwrite: %s" % self.m
|
||||||
|
|
||||||
class MetadataReader(object):
|
class MetadataReader(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.clear()
|
self.clear()
|
||||||
|
|
||||||
def register_metadata(self,m):
|
def register_metadata(self,m):
|
||||||
|
if m in self.__mdata_name_map:
|
||||||
|
raise OverwriteMetadataElement(m)
|
||||||
self.__mdata_name_map[m.name] = m
|
self.__mdata_name_map[m.name] = m
|
||||||
d = dict( (name,m.dependencies()) for name,m in
|
d = dict( (name,m.dependencies()) for name,m in
|
||||||
self.__mdata_name_map.iteritems() )
|
self.__mdata_name_map.iteritems() )
|
||||||
|
@ -131,6 +203,9 @@ class MetadataReader(object):
|
||||||
if not mdata.is_optional(): raise
|
if not mdata.is_optional(): raise
|
||||||
return normalized_metadata
|
return normalized_metadata
|
||||||
|
|
||||||
|
def read_mutagen(self, path):
|
||||||
|
return self.read(path, normalize_mutagen(path))
|
||||||
|
|
||||||
global_reader = MetadataReader()
|
global_reader = MetadataReader()
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
|
|
@ -199,6 +199,7 @@ class NewFile(BaseEvent, HasMetaData):
|
||||||
"""
|
"""
|
||||||
req_dict = self.metadata.extract()
|
req_dict = self.metadata.extract()
|
||||||
req_dict['mode'] = u'create'
|
req_dict['mode'] = u'create'
|
||||||
|
req_dict['is_record'] = self.metadata.is_recorded()
|
||||||
self.assign_owner(req_dict)
|
self.assign_owner(req_dict)
|
||||||
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
|
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
|
||||||
return [req_dict]
|
return [req_dict]
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import pyinotify
|
import pyinotify
|
||||||
from pydispatch import dispatcher
|
from pydispatch import dispatcher
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
import media.monitor.pure as mmp
|
import media.monitor.pure as mmp
|
||||||
from media.monitor.pure import IncludeOnly
|
from media.monitor.pure import IncludeOnly
|
||||||
|
@ -31,6 +32,7 @@ class FileMediator(object):
|
||||||
def unignore(path): FileMediator.ignored_set.remove(path)
|
def unignore(path): FileMediator.ignored_set.remove(path)
|
||||||
|
|
||||||
def mediate_ignored(fn):
|
def mediate_ignored(fn):
|
||||||
|
@wraps(fn)
|
||||||
def wrapped(self, event, *args,**kwargs):
|
def wrapped(self, event, *args,**kwargs):
|
||||||
event.pathname = unicode(event.pathname, "utf-8")
|
event.pathname = unicode(event.pathname, "utf-8")
|
||||||
if FileMediator.is_ignored(event.pathname):
|
if FileMediator.is_ignored(event.pathname):
|
||||||
|
@ -49,15 +51,11 @@ class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
|
||||||
def process_IN_CLOSE_WRITE(self, event):
|
def process_IN_CLOSE_WRITE(self, event):
|
||||||
#self.logger.info("===> handling: '%s'" % str(event))
|
#self.logger.info("===> handling: '%s'" % str(event))
|
||||||
self.process_to_organize(event)
|
self.process_to_organize(event)
|
||||||
# got cookie
|
|
||||||
def process_IN_MOVED_TO(self, event):
|
def process_IN_MOVED_TO(self, event):
|
||||||
#self.logger.info("===> handling: '%s'" % str(event))
|
#self.logger.info("===> handling: '%s'" % str(event))
|
||||||
self.process_to_organize(event)
|
self.process_to_organize(event)
|
||||||
|
|
||||||
def process_default(self, event):
|
|
||||||
pass
|
|
||||||
#self.logger.info("===> Not handling: '%s'" % str(event))
|
|
||||||
|
|
||||||
def flush_events(self, path):
|
def flush_events(self, path):
|
||||||
"""
|
"""
|
||||||
organize the whole directory at path. (pretty much by doing what
|
organize the whole directory at path. (pretty much by doing what
|
||||||
|
@ -67,6 +65,7 @@ class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
|
||||||
for f in mmp.walk_supported(path, clean_empties=True):
|
for f in mmp.walk_supported(path, clean_empties=True):
|
||||||
self.logger.info("Bootstrapping: File in 'organize' directory: \
|
self.logger.info("Bootstrapping: File in 'organize' directory: \
|
||||||
'%s'" % f)
|
'%s'" % f)
|
||||||
|
if not mmp.file_locked(f):
|
||||||
dispatcher.send(signal=self.signal, sender=self,
|
dispatcher.send(signal=self.signal, sender=self,
|
||||||
event=OrganizeFile(f))
|
event=OrganizeFile(f))
|
||||||
flushed += 1
|
flushed += 1
|
||||||
|
|
|
@ -6,37 +6,30 @@ from media.monitor.pure import LazyProperty
|
||||||
appname = 'root'
|
appname = 'root'
|
||||||
|
|
||||||
def setup_logging(log_path):
|
def setup_logging(log_path):
|
||||||
"""
|
""" Setup logging by writing log to 'log_path' """
|
||||||
Setup logging by writing log to 'log_path'
|
|
||||||
"""
|
|
||||||
#logger = logging.getLogger(appname)
|
#logger = logging.getLogger(appname)
|
||||||
logging.basicConfig(filename=log_path, level=logging.DEBUG)
|
logging.basicConfig(filename=log_path, level=logging.DEBUG)
|
||||||
|
|
||||||
def get_logger():
|
def get_logger():
|
||||||
"""
|
""" in case we want to use the common logger from a procedural
|
||||||
in case we want to use the common logger from a procedural interface
|
interface """
|
||||||
"""
|
|
||||||
return logging.getLogger()
|
return logging.getLogger()
|
||||||
|
|
||||||
class Loggable(object):
|
class Loggable(object):
|
||||||
"""
|
""" Any class that wants to log can inherit from this class and
|
||||||
Any class that wants to log can inherit from this class and automatically
|
automatically get a logger attribute that can be used like:
|
||||||
get a logger attribute that can be used like: self.logger.info(...) etc.
|
self.logger.info(...) etc. """
|
||||||
"""
|
|
||||||
__metaclass__ = abc.ABCMeta
|
__metaclass__ = abc.ABCMeta
|
||||||
@LazyProperty
|
@LazyProperty
|
||||||
def logger(self): return get_logger()
|
def logger(self): return get_logger()
|
||||||
|
|
||||||
def unexpected_exception(self,e):
|
def unexpected_exception(self,e):
|
||||||
"""
|
""" Default message for 'unexpected' exceptions """
|
||||||
Default message for 'unexpected' exceptions
|
|
||||||
"""
|
|
||||||
self.fatal_exception("'Unexpected' exception has occured:", e)
|
self.fatal_exception("'Unexpected' exception has occured:", e)
|
||||||
|
|
||||||
def fatal_exception(self, message, e):
|
def fatal_exception(self, message, e):
|
||||||
"""
|
""" Prints an exception 'e' with 'message'. Also outputs the
|
||||||
Prints an exception 'e' with 'message'. Also outputs the traceback.
|
traceback. """
|
||||||
"""
|
|
||||||
self.logger.error( message )
|
self.logger.error( message )
|
||||||
self.logger.error( str(e) )
|
self.logger.error( str(e) )
|
||||||
self.logger.error( traceback.format_exc() )
|
self.logger.error( traceback.format_exc() )
|
||||||
|
|
|
@ -18,7 +18,7 @@ class ManagerTimeout(threading.Thread,Loggable):
|
||||||
secnods. This used to be just a work around for cc-4235 but recently
|
secnods. This used to be just a work around for cc-4235 but recently
|
||||||
became a permanent solution because it's "cheap" and reliable
|
became a permanent solution because it's "cheap" and reliable
|
||||||
"""
|
"""
|
||||||
def __init__(self, manager, interval=3):
|
def __init__(self, manager, interval=1.5):
|
||||||
# TODO : interval should be read from config and passed here instead
|
# TODO : interval should be read from config and passed here instead
|
||||||
# of just using the hard coded value
|
# of just using the hard coded value
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
|
@ -26,7 +26,7 @@ class ManagerTimeout(threading.Thread,Loggable):
|
||||||
self.interval = interval
|
self.interval = interval
|
||||||
def run(self):
|
def run(self):
|
||||||
while True:
|
while True:
|
||||||
time.sleep(self.interval) # every 3 seconds
|
time.sleep(self.interval)
|
||||||
self.manager.flush_organize()
|
self.manager.flush_organize()
|
||||||
|
|
||||||
class Manager(Loggable):
|
class Manager(Loggable):
|
||||||
|
@ -178,7 +178,7 @@ class Manager(Loggable):
|
||||||
# the OrganizeListener instance will walk path and dispatch an organize
|
# the OrganizeListener instance will walk path and dispatch an organize
|
||||||
# event for every file in that directory
|
# event for every file in that directory
|
||||||
self.organize['organize_listener'].flush_events(new_path)
|
self.organize['organize_listener'].flush_events(new_path)
|
||||||
self.__add_watch(new_path, self.organize['organize_listener'])
|
#self.__add_watch(new_path, self.organize['organize_listener'])
|
||||||
|
|
||||||
def flush_organize(self):
|
def flush_organize(self):
|
||||||
path = self.organize['organize_path']
|
path = self.organize['organize_path']
|
||||||
|
|
|
@ -2,15 +2,19 @@
|
||||||
import mutagen
|
import mutagen
|
||||||
import os
|
import os
|
||||||
import copy
|
import copy
|
||||||
from collections import namedtuple
|
|
||||||
from mutagen.easymp4 import EasyMP4KeyError
|
from mutagen.easymp4 import EasyMP4KeyError
|
||||||
from mutagen.easyid3 import EasyID3KeyError
|
from mutagen.easyid3 import EasyID3KeyError
|
||||||
|
|
||||||
from media.monitor.exceptions import BadSongFile, InvalidMetadataElement
|
from media.monitor.exceptions import BadSongFile, InvalidMetadataElement
|
||||||
from media.monitor.log import Loggable
|
from media.monitor.log import Loggable
|
||||||
from media.monitor.pure import format_length, truncate_to_length
|
from media.monitor.pure import format_length
|
||||||
import media.monitor.pure as mmp
|
import media.monitor.pure as mmp
|
||||||
|
|
||||||
|
# emf related stuff
|
||||||
|
from media.metadata.process import global_reader
|
||||||
|
import media.metadata.definitions as defs
|
||||||
|
defs.load_definitions()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
list of supported easy tags in mutagen version 1.20
|
list of supported easy tags in mutagen version 1.20
|
||||||
['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
|
['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
|
||||||
|
@ -43,21 +47,6 @@ airtime2mutagen = {
|
||||||
"MDATA_KEY_COPYRIGHT" : "copyright",
|
"MDATA_KEY_COPYRIGHT" : "copyright",
|
||||||
}
|
}
|
||||||
|
|
||||||
class FakeMutagen(dict):
|
|
||||||
"""
|
|
||||||
Need this fake mutagen object so that airtime_special functions
|
|
||||||
return a proper default value instead of throwing an exceptions for
|
|
||||||
files that mutagen doesn't recognize
|
|
||||||
"""
|
|
||||||
FakeInfo = namedtuple('FakeInfo','length bitrate')
|
|
||||||
def __init__(self,path):
|
|
||||||
self.path = path
|
|
||||||
self.mime = ['audio/wav']
|
|
||||||
self.info = FakeMutagen.FakeInfo(0.0, '')
|
|
||||||
dict.__init__(self)
|
|
||||||
def set_length(self,l):
|
|
||||||
old_bitrate = self.info.bitrate
|
|
||||||
self.info = FakeMutagen.FakeInfo(l, old_bitrate)
|
|
||||||
|
|
||||||
# Some airtime attributes are special because they must use the mutagen object
|
# Some airtime attributes are special because they must use the mutagen object
|
||||||
# itself to calculate the value that they need. The lambda associated with each
|
# itself to calculate the value that they need. The lambda associated with each
|
||||||
|
@ -100,6 +89,7 @@ class Metadata(Loggable):
|
||||||
# little bit messy. Some of the handling is in m.m.pure while the rest is
|
# little bit messy. Some of the handling is in m.m.pure while the rest is
|
||||||
# here. Also interface is not very consistent
|
# here. Also interface is not very consistent
|
||||||
|
|
||||||
|
# TODO : what is this shit? maybe get rid of it?
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fix_title(path):
|
def fix_title(path):
|
||||||
# If we have no title in path we will format it
|
# If we have no title in path we will format it
|
||||||
|
@ -110,39 +100,6 @@ class Metadata(Loggable):
|
||||||
m[u'title'] = new_title
|
m[u'title'] = new_title
|
||||||
m.save()
|
m.save()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def airtime_dict(d):
|
|
||||||
"""
|
|
||||||
Converts mutagen dictionary 'd' into airtime dictionary
|
|
||||||
"""
|
|
||||||
temp_dict = {}
|
|
||||||
for m_key, m_val in d.iteritems():
|
|
||||||
# TODO : some files have multiple fields for the same metadata.
|
|
||||||
# genre is one example. In that case mutagen will return a list
|
|
||||||
# of values
|
|
||||||
|
|
||||||
if isinstance(m_val, list):
|
|
||||||
# TODO : does it make more sense to just skip the element in
|
|
||||||
# this case?
|
|
||||||
if len(m_val) == 0: assign_val = ''
|
|
||||||
else: assign_val = m_val[0]
|
|
||||||
else: assign_val = m_val
|
|
||||||
|
|
||||||
temp_dict[ m_key ] = assign_val
|
|
||||||
airtime_dictionary = {}
|
|
||||||
for muta_k, muta_v in temp_dict.iteritems():
|
|
||||||
# We must check if we can actually translate the mutagen key into
|
|
||||||
# an airtime key before doing the conversion
|
|
||||||
if muta_k in mutagen2airtime:
|
|
||||||
airtime_key = mutagen2airtime[muta_k]
|
|
||||||
# Apply truncation in the case where airtime_key is in our
|
|
||||||
# truncation table
|
|
||||||
muta_v = \
|
|
||||||
truncate_to_length(muta_v, truncate_table[airtime_key])\
|
|
||||||
if airtime_key in truncate_table else muta_v
|
|
||||||
airtime_dictionary[ airtime_key ] = muta_v
|
|
||||||
return airtime_dictionary
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def write_unsafe(path,md):
|
def write_unsafe(path,md):
|
||||||
"""
|
"""
|
||||||
|
@ -157,6 +114,7 @@ class Metadata(Loggable):
|
||||||
if airtime_k in airtime2mutagen:
|
if airtime_k in airtime2mutagen:
|
||||||
# The unicode cast here is mostly for integers that need to be
|
# The unicode cast here is mostly for integers that need to be
|
||||||
# strings
|
# strings
|
||||||
|
if airtime_v is None: continue
|
||||||
try:
|
try:
|
||||||
song_file[ airtime2mutagen[airtime_k] ] = unicode(airtime_v)
|
song_file[ airtime2mutagen[airtime_k] ] = unicode(airtime_v)
|
||||||
except (EasyMP4KeyError, EasyID3KeyError) as e:
|
except (EasyMP4KeyError, EasyID3KeyError) as e:
|
||||||
|
@ -170,44 +128,7 @@ class Metadata(Loggable):
|
||||||
# Forcing the unicode through
|
# Forcing the unicode through
|
||||||
try : fpath = fpath.decode("utf-8")
|
try : fpath = fpath.decode("utf-8")
|
||||||
except : pass
|
except : pass
|
||||||
|
self.__metadata = global_reader.read_mutagen(fpath)
|
||||||
if not mmp.file_playable(fpath): raise BadSongFile(fpath)
|
|
||||||
|
|
||||||
try : full_mutagen = mutagen.File(fpath, easy=True)
|
|
||||||
except Exception : raise BadSongFile(fpath)
|
|
||||||
|
|
||||||
self.path = fpath
|
|
||||||
if not os.path.exists(self.path):
|
|
||||||
self.logger.info("Attempting to read metadata of file \
|
|
||||||
that does not exist. Setting metadata to {}")
|
|
||||||
self.__metadata = {}
|
|
||||||
return
|
|
||||||
# TODO : Simplify the way all of these rules are handled right now it's
|
|
||||||
# extremely unclear and needs to be refactored.
|
|
||||||
#if full_mutagen is None: raise BadSongFile(fpath)
|
|
||||||
if full_mutagen is None: full_mutagen = FakeMutagen(fpath)
|
|
||||||
self.__metadata = Metadata.airtime_dict(full_mutagen)
|
|
||||||
# Now we extra the special values that are calculated from the mutagen
|
|
||||||
# object itself:
|
|
||||||
|
|
||||||
if mmp.extension(fpath) == 'wav':
|
|
||||||
full_mutagen.set_length(mmp.read_wave_duration(fpath))
|
|
||||||
|
|
||||||
for special_key,f in airtime_special.iteritems():
|
|
||||||
try:
|
|
||||||
new_val = f(full_mutagen)
|
|
||||||
if new_val is not None:
|
|
||||||
self.__metadata[special_key] = new_val
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.info("Could not get special key %s for %s" %
|
|
||||||
(special_key, fpath))
|
|
||||||
self.logger.info(str(e))
|
|
||||||
# Finally, we "normalize" all the metadata here:
|
|
||||||
self.__metadata = mmp.normalized_metadata(self.__metadata, fpath)
|
|
||||||
# Now we must load the md5:
|
|
||||||
# TODO : perhaps we shouldn't hard code how many bytes we're reading
|
|
||||||
# from the file?
|
|
||||||
self.__metadata['MDATA_KEY_MD5'] = mmp.file_md5(fpath,max_length=100)
|
|
||||||
|
|
||||||
def is_recorded(self):
|
def is_recorded(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import media.monitor.pure as mmp
|
import media.monitor.pure as mmp
|
||||||
import media.monitor.owners as owners
|
import media.monitor.owners as owners
|
||||||
from media.monitor.handler import ReportHandler
|
from media.monitor.handler import ReportHandler
|
||||||
|
@ -11,14 +10,12 @@ from os.path import dirname
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
class Organizer(ReportHandler,Loggable):
|
class Organizer(ReportHandler,Loggable):
|
||||||
"""
|
""" Organizer is responsible to to listening to OrganizeListener
|
||||||
Organizer is responsible to to listening to OrganizeListener events
|
events and committing the appropriate changes to the filesystem.
|
||||||
and committing the appropriate changes to the filesystem. It does
|
It does not in any interact with WatchSyncer's even when the the
|
||||||
not in any interact with WatchSyncer's even when the the WatchSyncer
|
WatchSyncer is a "storage directory". The "storage" directory picks
|
||||||
is a "storage directory". The "storage" directory picks up all of
|
up all of its events through pyinotify. (These events are fed to it
|
||||||
its events through pyinotify. (These events are fed to it through
|
through StoreWatchListener) """
|
||||||
StoreWatchListener)
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Commented out making this class a singleton because it's just a band aid
|
# Commented out making this class a singleton because it's just a band aid
|
||||||
# for the real issue. The real issue being making multiple Organizer
|
# for the real issue. The real issue being making multiple Organizer
|
||||||
|
@ -42,11 +39,9 @@ class Organizer(ReportHandler,Loggable):
|
||||||
super(Organizer, self).__init__(signal=self.channel, weak=False)
|
super(Organizer, self).__init__(signal=self.channel, weak=False)
|
||||||
|
|
||||||
def handle(self, sender, event):
|
def handle(self, sender, event):
|
||||||
"""
|
""" Intercept events where a new file has been added to the
|
||||||
Intercept events where a new file has been added to the organize
|
organize directory and place it in the correct path (starting
|
||||||
directory and place it in the correct path (starting with
|
with self.target_path) """
|
||||||
self.target_path)
|
|
||||||
"""
|
|
||||||
# Only handle this event type
|
# Only handle this event type
|
||||||
assert isinstance(event, OrganizeFile), \
|
assert isinstance(event, OrganizeFile), \
|
||||||
"Organizer can only handle OrganizeFile events.Given '%s'" % event
|
"Organizer can only handle OrganizeFile events.Given '%s'" % event
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import copy
|
import copy
|
||||||
|
from subprocess import Popen, PIPE
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
import math
|
import math
|
||||||
|
@ -21,7 +22,6 @@ from configobj import ConfigObj
|
||||||
|
|
||||||
from media.monitor.exceptions import FailedToSetLocale, FailedToCreateDir
|
from media.monitor.exceptions import FailedToSetLocale, FailedToCreateDir
|
||||||
|
|
||||||
#supported_extensions = [u"mp3", u"ogg", u"oga"]
|
|
||||||
supported_extensions = [u"mp3", u"ogg", u"oga", u"flac", u"wav",
|
supported_extensions = [u"mp3", u"ogg", u"oga", u"flac", u"wav",
|
||||||
u'm4a', u'mp4']
|
u'm4a', u'mp4']
|
||||||
|
|
||||||
|
@ -66,7 +66,6 @@ class IncludeOnly(object):
|
||||||
return func(moi, event, *args, **kwargs)
|
return func(moi, event, *args, **kwargs)
|
||||||
return _wrap
|
return _wrap
|
||||||
|
|
||||||
|
|
||||||
def partition(f, alist):
|
def partition(f, alist):
|
||||||
"""
|
"""
|
||||||
Partition is very similar to filter except that it also returns the
|
Partition is very similar to filter except that it also returns the
|
||||||
|
@ -92,14 +91,13 @@ def is_file_supported(path):
|
||||||
# TODO : In the future we would like a better way to find out whether a show
|
# TODO : In the future we would like a better way to find out whether a show
|
||||||
# has been recorded
|
# has been recorded
|
||||||
def is_airtime_recorded(md):
|
def is_airtime_recorded(md):
|
||||||
"""
|
""" Takes a metadata dictionary and returns True if it belongs to a
|
||||||
Takes a metadata dictionary and returns True if it belongs to a file that
|
file that was recorded by Airtime. """
|
||||||
was recorded by Airtime.
|
|
||||||
"""
|
|
||||||
if not 'MDATA_KEY_CREATOR' in md: return False
|
if not 'MDATA_KEY_CREATOR' in md: return False
|
||||||
return md['MDATA_KEY_CREATOR'] == u'Airtime Show Recorder'
|
return md['MDATA_KEY_CREATOR'] == u'Airtime Show Recorder'
|
||||||
|
|
||||||
def read_wave_duration(path):
|
def read_wave_duration(path):
|
||||||
|
""" Read the length of .wav file (mutagen does not handle this) """
|
||||||
with contextlib.closing(wave.open(path,'r')) as f:
|
with contextlib.closing(wave.open(path,'r')) as f:
|
||||||
frames = f.getnframes()
|
frames = f.getnframes()
|
||||||
rate = f.getframerate()
|
rate = f.getframerate()
|
||||||
|
@ -107,9 +105,7 @@ def read_wave_duration(path):
|
||||||
return duration
|
return duration
|
||||||
|
|
||||||
def clean_empty_dirs(path):
|
def clean_empty_dirs(path):
|
||||||
"""
|
""" walks path and deletes every empty directory it finds """
|
||||||
walks path and deletes every empty directory it finds
|
|
||||||
"""
|
|
||||||
# TODO : test this function
|
# TODO : test this function
|
||||||
if path.endswith('/'): clean_empty_dirs(path[0:-1])
|
if path.endswith('/'): clean_empty_dirs(path[0:-1])
|
||||||
else:
|
else:
|
||||||
|
@ -154,22 +150,25 @@ def no_extension_basename(path):
|
||||||
else: return '.'.join(base.split(".")[0:-1])
|
else: return '.'.join(base.split(".")[0:-1])
|
||||||
|
|
||||||
def walk_supported(directory, clean_empties=False):
|
def walk_supported(directory, clean_empties=False):
|
||||||
"""
|
""" A small generator wrapper around os.walk to only give us files
|
||||||
A small generator wrapper around os.walk to only give us files that support
|
that support the extensions we are considering. When clean_empties
|
||||||
the extensions we are considering. When clean_empties is True we
|
is True we recursively delete empty directories left over in
|
||||||
recursively delete empty directories left over in directory after the walk.
|
directory after the walk. """
|
||||||
"""
|
|
||||||
for root, dirs, files in os.walk(directory):
|
for root, dirs, files in os.walk(directory):
|
||||||
full_paths = ( os.path.join(root, name) for name in files
|
full_paths = ( os.path.join(root, name) for name in files
|
||||||
if is_file_supported(name) )
|
if is_file_supported(name) )
|
||||||
for fp in full_paths: yield fp
|
for fp in full_paths: yield fp
|
||||||
if clean_empties: clean_empty_dirs(directory)
|
if clean_empties: clean_empty_dirs(directory)
|
||||||
|
|
||||||
|
|
||||||
|
def file_locked(path):
|
||||||
|
cmd = "lsof %s" % path
|
||||||
|
f = Popen(cmd, shell=True, stdout=PIPE).stdout
|
||||||
|
return bool(f.readlines())
|
||||||
|
|
||||||
def magic_move(old, new, after_dir_make=lambda : None):
|
def magic_move(old, new, after_dir_make=lambda : None):
|
||||||
"""
|
""" Moves path old to new and constructs the necessary to
|
||||||
Moves path old to new and constructs the necessary to directories for new
|
directories for new along the way """
|
||||||
along the way
|
|
||||||
"""
|
|
||||||
new_dir = os.path.dirname(new)
|
new_dir = os.path.dirname(new)
|
||||||
if not os.path.exists(new_dir): os.makedirs(new_dir)
|
if not os.path.exists(new_dir): os.makedirs(new_dir)
|
||||||
# We need this crusty hack because anytime a directory is created we must
|
# We need this crusty hack because anytime a directory is created we must
|
||||||
|
@ -179,18 +178,15 @@ def magic_move(old, new, after_dir_make=lambda : None):
|
||||||
shutil.move(old,new)
|
shutil.move(old,new)
|
||||||
|
|
||||||
def move_to_dir(dir_path,file_path):
|
def move_to_dir(dir_path,file_path):
|
||||||
"""
|
""" moves a file at file_path into dir_path/basename(filename) """
|
||||||
moves a file at file_path into dir_path/basename(filename)
|
|
||||||
"""
|
|
||||||
bs = os.path.basename(file_path)
|
bs = os.path.basename(file_path)
|
||||||
magic_move(file_path, os.path.join(dir_path, bs))
|
magic_move(file_path, os.path.join(dir_path, bs))
|
||||||
|
|
||||||
def apply_rules_dict(d, rules):
|
def apply_rules_dict(d, rules):
|
||||||
"""
|
""" Consumes a dictionary of rules that maps some keys to lambdas
|
||||||
Consumes a dictionary of rules that maps some keys to lambdas which it
|
which it applies to every matching element in d and returns a new
|
||||||
applies to every matching element in d and returns a new dictionary with
|
dictionary with the rules applied. If a rule returns none then it's
|
||||||
the rules applied. If a rule returns none then it's not applied
|
not applied """
|
||||||
"""
|
|
||||||
new_d = copy.deepcopy(d)
|
new_d = copy.deepcopy(d)
|
||||||
for k, rule in rules.iteritems():
|
for k, rule in rules.iteritems():
|
||||||
if k in d:
|
if k in d:
|
||||||
|
@ -205,17 +201,14 @@ def default_to_f(dictionary, keys, default, condition):
|
||||||
return new_d
|
return new_d
|
||||||
|
|
||||||
def default_to(dictionary, keys, default):
|
def default_to(dictionary, keys, default):
|
||||||
"""
|
""" Checks if the list of keys 'keys' exists in 'dictionary'. If
|
||||||
Checks if the list of keys 'keys' exists in 'dictionary'. If not then it
|
not then it returns a new dictionary with all those missing keys
|
||||||
returns a new dictionary with all those missing keys defaults to 'default'
|
defaults to 'default' """
|
||||||
"""
|
|
||||||
cnd = lambda dictionary, key: key not in dictionary
|
cnd = lambda dictionary, key: key not in dictionary
|
||||||
return default_to_f(dictionary, keys, default, cnd)
|
return default_to_f(dictionary, keys, default, cnd)
|
||||||
|
|
||||||
def remove_whitespace(dictionary):
|
def remove_whitespace(dictionary):
|
||||||
"""
|
""" Remove values that empty whitespace in the dictionary """
|
||||||
Remove values that empty whitespace in the dictionary
|
|
||||||
"""
|
|
||||||
nd = copy.deepcopy(dictionary)
|
nd = copy.deepcopy(dictionary)
|
||||||
bad_keys = []
|
bad_keys = []
|
||||||
for k,v in nd.iteritems():
|
for k,v in nd.iteritems():
|
||||||
|
@ -227,6 +220,7 @@ def remove_whitespace(dictionary):
|
||||||
return nd
|
return nd
|
||||||
|
|
||||||
def parse_int(s):
|
def parse_int(s):
|
||||||
|
# TODO : this function isn't used anywhere yet but it may useful for emf
|
||||||
"""
|
"""
|
||||||
Tries very hard to get some sort of integer result from s. Defaults to 0
|
Tries very hard to get some sort of integer result from s. Defaults to 0
|
||||||
when it fails
|
when it fails
|
||||||
|
@ -242,53 +236,6 @@ def parse_int(s):
|
||||||
try : return str(reduce(op.add, takewhile(lambda x: x.isdigit(), s)))
|
try : return str(reduce(op.add, takewhile(lambda x: x.isdigit(), s)))
|
||||||
except: return None
|
except: return None
|
||||||
|
|
||||||
def normalized_metadata(md, original_path):
|
|
||||||
"""
|
|
||||||
consumes a dictionary of metadata and returns a new dictionary with the
|
|
||||||
formatted meta data. We also consume original_path because we must set
|
|
||||||
MDATA_KEY_CREATOR based on in it sometimes
|
|
||||||
"""
|
|
||||||
new_md = copy.deepcopy(md)
|
|
||||||
# replace all slashes with dashes
|
|
||||||
#for k,v in new_md.iteritems(): new_md[k] = unicode(v).replace('/','-')
|
|
||||||
# Specific rules that are applied in a per attribute basis
|
|
||||||
format_rules = {
|
|
||||||
'MDATA_KEY_TRACKNUMBER' : parse_int,
|
|
||||||
'MDATA_KEY_FILEPATH' : lambda x: os.path.normpath(x),
|
|
||||||
'MDATA_KEY_BPM' : lambda x: x[0:8],
|
|
||||||
'MDATA_KEY_MIME' : lambda x: x.replace('audio/vorbis','audio/ogg'),
|
|
||||||
# Whenever 0 is reported we change it to empty
|
|
||||||
#'MDATA_KEY_BITRATE' : lambda x: '' if str(x) == '0' else x
|
|
||||||
}
|
|
||||||
|
|
||||||
new_md = remove_whitespace(new_md) # remove whitespace fields
|
|
||||||
# Format all the fields in format_rules
|
|
||||||
new_md = apply_rules_dict(new_md, format_rules)
|
|
||||||
# set filetype to audioclip by default
|
|
||||||
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_FTYPE'],
|
|
||||||
default=u'audioclip')
|
|
||||||
|
|
||||||
# Try to parse bpm but delete the whole key if that fails
|
|
||||||
if 'MDATA_KEY_BPM' in new_md:
|
|
||||||
new_md['MDATA_KEY_BPM'] = parse_int(new_md['MDATA_KEY_BPM'])
|
|
||||||
if new_md['MDATA_KEY_BPM'] is None:
|
|
||||||
del new_md['MDATA_KEY_BPM']
|
|
||||||
|
|
||||||
if not is_airtime_recorded(new_md):
|
|
||||||
# Read title from filename if it does not exist
|
|
||||||
default_title = no_extension_basename(original_path)
|
|
||||||
default_title = re.sub(r'__\d+\.',u'.', default_title)
|
|
||||||
if re.match(".+-%s-.+$" % unicode_unknown, default_title):
|
|
||||||
default_title = u''
|
|
||||||
new_md = default_to(dictionary=new_md, keys=['MDATA_KEY_TITLE'],
|
|
||||||
default=default_title)
|
|
||||||
new_md['MDATA_KEY_TITLE'] = re.sub(r'-\d+kbps$', u'',
|
|
||||||
new_md['MDATA_KEY_TITLE'])
|
|
||||||
|
|
||||||
# TODO : wtf is this for again?
|
|
||||||
new_md['MDATA_KEY_TITLE'] = re.sub(r'-?%s-?' % unicode_unknown, u'',
|
|
||||||
new_md['MDATA_KEY_TITLE'])
|
|
||||||
return new_md
|
|
||||||
|
|
||||||
def organized_path(old_path, root_path, orig_md):
|
def organized_path(old_path, root_path, orig_md):
|
||||||
"""
|
"""
|
||||||
|
@ -348,10 +295,9 @@ def organized_path(old_path, root_path, orig_md):
|
||||||
# TODO : Get rid of this function and every one of its uses. We no longer use
|
# TODO : Get rid of this function and every one of its uses. We no longer use
|
||||||
# the md5 signature of a song for anything
|
# the md5 signature of a song for anything
|
||||||
def file_md5(path,max_length=100):
|
def file_md5(path,max_length=100):
|
||||||
"""
|
""" Get md5 of file path (if it exists). Use only max_length
|
||||||
Get md5 of file path (if it exists). Use only max_length characters to save
|
characters to save time and memory. Pass max_length=-1 to read the
|
||||||
time and memory. Pass max_length=-1 to read the whole file (like in mm1)
|
whole file (like in mm1) """
|
||||||
"""
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
with open(path, 'rb') as f:
|
with open(path, 'rb') as f:
|
||||||
m = hashlib.md5()
|
m = hashlib.md5()
|
||||||
|
@ -367,16 +313,12 @@ def encode_to(obj, encoding='utf-8'):
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def convert_dict_value_to_utf8(md):
|
def convert_dict_value_to_utf8(md):
|
||||||
"""
|
""" formats a dictionary to send as a request to api client """
|
||||||
formats a dictionary to send as a request to api client
|
|
||||||
"""
|
|
||||||
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()])
|
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()])
|
||||||
|
|
||||||
def get_system_locale(locale_path='/etc/default/locale'):
|
def get_system_locale(locale_path='/etc/default/locale'):
|
||||||
"""
|
""" Returns the configuration object for the system's default
|
||||||
Returns the configuration object for the system's default locale. Normally
|
locale. Normally requires root access. """
|
||||||
requires root access.
|
|
||||||
"""
|
|
||||||
if os.path.exists(locale_path):
|
if os.path.exists(locale_path):
|
||||||
try:
|
try:
|
||||||
config = ConfigObj(locale_path)
|
config = ConfigObj(locale_path)
|
||||||
|
@ -386,9 +328,7 @@ def get_system_locale(locale_path='/etc/default/locale'):
|
||||||
permissions issue?" % locale_path)
|
permissions issue?" % locale_path)
|
||||||
|
|
||||||
def configure_locale(config):
|
def configure_locale(config):
|
||||||
"""
|
""" sets the locale according to the system's locale. """
|
||||||
sets the locale according to the system's locale.
|
|
||||||
"""
|
|
||||||
current_locale = locale.getlocale()
|
current_locale = locale.getlocale()
|
||||||
if current_locale[1] is None:
|
if current_locale[1] is None:
|
||||||
default_locale = locale.getdefaultlocale()
|
default_locale = locale.getdefaultlocale()
|
||||||
|
@ -405,27 +345,21 @@ def configure_locale(config):
|
||||||
|
|
||||||
def fondle(path,times=None):
|
def fondle(path,times=None):
|
||||||
# TODO : write unit tests for this
|
# TODO : write unit tests for this
|
||||||
"""
|
""" touch a file to change the last modified date. Beware of calling
|
||||||
touch a file to change the last modified date. Beware of calling this
|
this function on the same file from multiple threads. """
|
||||||
function on the same file from multiple threads.
|
|
||||||
"""
|
|
||||||
with file(path, 'a'): os.utime(path, times)
|
with file(path, 'a'): os.utime(path, times)
|
||||||
|
|
||||||
def last_modified(path):
|
def last_modified(path):
|
||||||
"""
|
""" return the time of the last time mm2 was ran. path refers to the
|
||||||
return the time of the last time mm2 was ran. path refers to the index file
|
index file whose date modified attribute contains this information.
|
||||||
whose date modified attribute contains this information. In the case when
|
In the case when the file does not exist we set this time 0 so that
|
||||||
the file does not exist we set this time 0 so that any files on the
|
any files on the filesystem were modified after it """
|
||||||
filesystem were modified after it
|
|
||||||
"""
|
|
||||||
if os.path.exists(path): return os.path.getmtime(path)
|
if os.path.exists(path): return os.path.getmtime(path)
|
||||||
else: return 0
|
else: return 0
|
||||||
|
|
||||||
def expand_storage(store):
|
def expand_storage(store):
|
||||||
"""
|
""" A storage directory usually consists of 4 different
|
||||||
A storage directory usually consists of 4 different subdirectories. This
|
subdirectories. This function returns their paths """
|
||||||
function returns their paths
|
|
||||||
"""
|
|
||||||
store = os.path.normpath(store)
|
store = os.path.normpath(store)
|
||||||
return {
|
return {
|
||||||
'organize' : os.path.join(store, 'organize'),
|
'organize' : os.path.join(store, 'organize'),
|
||||||
|
@ -435,10 +369,8 @@ def expand_storage(store):
|
||||||
}
|
}
|
||||||
|
|
||||||
def create_dir(path):
|
def create_dir(path):
|
||||||
"""
|
""" will try and make sure that path exists at all costs. raises an
|
||||||
will try and make sure that path exists at all costs. raises an exception
|
exception if it fails at this task. """
|
||||||
if it fails at this task.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
try : os.makedirs(path)
|
try : os.makedirs(path)
|
||||||
except Exception as e : raise FailedToCreateDir(path, e)
|
except Exception as e : raise FailedToCreateDir(path, e)
|
||||||
|
@ -456,11 +388,10 @@ def sub_path(directory,f):
|
||||||
return common == normalized
|
return common == normalized
|
||||||
|
|
||||||
def owner_id(original_path):
|
def owner_id(original_path):
|
||||||
"""
|
""" Given 'original_path' return the file name of the of
|
||||||
Given 'original_path' return the file name of the of 'identifier' file.
|
'identifier' file. return the id that is contained in it. If no file
|
||||||
return the id that is contained in it. If no file is found or nothing is
|
is found or nothing is read then -1 is returned. File is deleted
|
||||||
read then -1 is returned. File is deleted after the number has been read
|
after the number has been read """
|
||||||
"""
|
|
||||||
fname = "%s.identifier" % original_path
|
fname = "%s.identifier" % original_path
|
||||||
owner_id = -1
|
owner_id = -1
|
||||||
try:
|
try:
|
||||||
|
@ -476,9 +407,8 @@ def owner_id(original_path):
|
||||||
return owner_id
|
return owner_id
|
||||||
|
|
||||||
def file_playable(pathname):
|
def file_playable(pathname):
|
||||||
"""
|
""" Returns True if 'pathname' is playable by liquidsoap. False
|
||||||
Returns True if 'pathname' is playable by liquidsoap. False otherwise.
|
otherwise. """
|
||||||
"""
|
|
||||||
# when there is an single apostrophe inside of a string quoted by
|
# when there is an single apostrophe inside of a string quoted by
|
||||||
# apostrophes, we can only escape it by replace that apostrophe with
|
# apostrophes, we can only escape it by replace that apostrophe with
|
||||||
# '\''. This breaks the string into two, and inserts an escaped
|
# '\''. This breaks the string into two, and inserts an escaped
|
||||||
|
@ -514,18 +444,14 @@ def toposort(data):
|
||||||
assert not data, "A cyclic dependency exists amongst %r" % data
|
assert not data, "A cyclic dependency exists amongst %r" % data
|
||||||
|
|
||||||
def truncate_to_length(item, length):
|
def truncate_to_length(item, length):
|
||||||
"""
|
""" Truncates 'item' to 'length' """
|
||||||
Truncates 'item' to 'length'
|
|
||||||
"""
|
|
||||||
if isinstance(item, int): item = str(item)
|
if isinstance(item, int): item = str(item)
|
||||||
if isinstance(item, basestring):
|
if isinstance(item, basestring):
|
||||||
if len(item) > length: return item[0:length]
|
if len(item) > length: return item[0:length]
|
||||||
else: return item
|
else: return item
|
||||||
|
|
||||||
def format_length(mutagen_length):
|
def format_length(mutagen_length):
|
||||||
"""
|
""" Convert mutagen length to airtime length """
|
||||||
Convert mutagen length to airtime length
|
|
||||||
"""
|
|
||||||
t = float(mutagen_length)
|
t = float(mutagen_length)
|
||||||
h = int(math.floor(t / 3600))
|
h = int(math.floor(t / 3600))
|
||||||
t = t % 3600
|
t = t % 3600
|
||||||
|
|
60
python_apps/media-monitor2/media/monitor/request.py
Normal file
60
python_apps/media-monitor2/media/monitor/request.py
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from media.monitor.exceptions import BadSongFile
|
||||||
|
from media.monitor.log import Loggable
|
||||||
|
import api_clients.api_client as ac
|
||||||
|
|
||||||
|
class ThreadedRequestSync(threading.Thread, Loggable):
|
||||||
|
def __init__(self, rs):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.rs = rs
|
||||||
|
self.daemon = True
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.rs.run_request()
|
||||||
|
|
||||||
|
class RequestSync(Loggable):
|
||||||
|
""" This class is responsible for making the api call to send a
|
||||||
|
request to airtime. In the process it packs the requests and retries
|
||||||
|
for some number of times """
|
||||||
|
@classmethod
|
||||||
|
def create_with_api_client(cls, watcher, requests):
|
||||||
|
apiclient = ac.AirtimeApiClient.create_right_config()
|
||||||
|
self = cls(watcher, requests, apiclient)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __init__(self, watcher, requests, apiclient):
|
||||||
|
self.watcher = watcher
|
||||||
|
self.requests = requests
|
||||||
|
self.apiclient = apiclient
|
||||||
|
|
||||||
|
def run_request(self):
|
||||||
|
self.logger.info("Attempting request with %d items." %
|
||||||
|
len(self.requests))
|
||||||
|
packed_requests = []
|
||||||
|
for request_event in self.requests:
|
||||||
|
try:
|
||||||
|
for request in request_event.safe_pack():
|
||||||
|
if isinstance(request, BadSongFile):
|
||||||
|
self.logger.info("Bad song file: '%s'" % request.path)
|
||||||
|
else: packed_requests.append(request)
|
||||||
|
except Exception as e:
|
||||||
|
self.unexpected_exception( e )
|
||||||
|
if hasattr(request_event, 'path'):
|
||||||
|
self.logger.info("Possibly related to path: '%s'" %
|
||||||
|
request_event.path)
|
||||||
|
try: self.apiclient.send_media_monitor_requests( packed_requests )
|
||||||
|
# most likely we did not get json response as we expected
|
||||||
|
except ValueError:
|
||||||
|
self.logger.info("ApiController.php probably crashed, we \
|
||||||
|
diagnose this from the fact that it did not return \
|
||||||
|
valid json")
|
||||||
|
self.logger.info("Trying again after %f seconds" %
|
||||||
|
self.request_wait)
|
||||||
|
except Exception as e: self.unexpected_exception(e)
|
||||||
|
else: self.logger.info("Request was successful")
|
||||||
|
self.watcher.flag_done() # poor man's condition variable
|
||||||
|
|
|
@ -53,11 +53,11 @@ class AirtimeDB(Loggable):
|
||||||
"""
|
"""
|
||||||
return self.id_to_dir[ dir_id ]
|
return self.id_to_dir[ dir_id ]
|
||||||
|
|
||||||
def storage_path(self): return self.base_storage
|
def storage_path(self) : return self.base_storage
|
||||||
def organize_path(self): return self.storage_paths['organize']
|
def organize_path(self) : return self.storage_paths['organize']
|
||||||
def problem_path(self): return self.storage_paths['problem_files']
|
def problem_path(self) : return self.storage_paths['problem_files']
|
||||||
def import_path(self): return self.storage_paths['imported']
|
def import_path(self) : return self.storage_paths['imported']
|
||||||
def recorded_path(self): return self.storage_paths['recorded']
|
def recorded_path(self) : return self.storage_paths['recorded']
|
||||||
|
|
||||||
def list_watched(self):
|
def list_watched(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -6,69 +6,9 @@ import copy
|
||||||
from media.monitor.handler import ReportHandler
|
from media.monitor.handler import ReportHandler
|
||||||
from media.monitor.log import Loggable
|
from media.monitor.log import Loggable
|
||||||
from media.monitor.exceptions import BadSongFile
|
from media.monitor.exceptions import BadSongFile
|
||||||
from media.monitor.pure import LazyProperty
|
|
||||||
from media.monitor.eventcontractor import EventContractor
|
from media.monitor.eventcontractor import EventContractor
|
||||||
from media.monitor.events import EventProxy
|
from media.monitor.events import EventProxy
|
||||||
|
from media.monitor.request import ThreadedRequestSync, RequestSync
|
||||||
import api_clients.api_client as ac
|
|
||||||
|
|
||||||
class RequestSync(threading.Thread,Loggable):
|
|
||||||
"""
|
|
||||||
This class is responsible for making the api call to send a request
|
|
||||||
to airtime. In the process it packs the requests and retries for
|
|
||||||
some number of times
|
|
||||||
"""
|
|
||||||
def __init__(self, watcher, requests):
|
|
||||||
threading.Thread.__init__(self)
|
|
||||||
self.watcher = watcher
|
|
||||||
self.requests = requests
|
|
||||||
self.retries = 1
|
|
||||||
self.request_wait = 0.3
|
|
||||||
|
|
||||||
@LazyProperty
|
|
||||||
def apiclient(self):
|
|
||||||
return ac.AirtimeApiClient.create_right_config()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
self.logger.info("Attempting request with %d items." %
|
|
||||||
len(self.requests))
|
|
||||||
# Note that we must attach the appropriate mode to every
|
|
||||||
# response. Also Not forget to attach the 'is_record' to any
|
|
||||||
# requests that are related to recorded shows
|
|
||||||
# TODO : recorded shows aren't flagged right
|
|
||||||
# Is this retry shit even necessary? Consider getting rid of this.
|
|
||||||
packed_requests = []
|
|
||||||
for request_event in self.requests:
|
|
||||||
try:
|
|
||||||
for request in request_event.safe_pack():
|
|
||||||
if isinstance(request, BadSongFile):
|
|
||||||
self.logger.info("Bad song file: '%s'" % request.path)
|
|
||||||
else: packed_requests.append(request)
|
|
||||||
except Exception as e:
|
|
||||||
self.unexpected_exception( e )
|
|
||||||
if hasattr(request_event, 'path'):
|
|
||||||
self.logger.info("Possibly related to path: '%s'" %
|
|
||||||
request_event.path)
|
|
||||||
def make_req():
|
|
||||||
self.apiclient.send_media_monitor_requests( packed_requests )
|
|
||||||
for try_index in range(0,self.retries):
|
|
||||||
try: make_req()
|
|
||||||
# most likely we did not get json response as we expected
|
|
||||||
except ValueError:
|
|
||||||
self.logger.info("ApiController.php probably crashed, we \
|
|
||||||
diagnose this from the fact that it did not return \
|
|
||||||
valid json")
|
|
||||||
self.logger.info("Trying again after %f seconds" %
|
|
||||||
self.request_wait)
|
|
||||||
time.sleep( self.request_wait )
|
|
||||||
except Exception as e: self.unexpected_exception(e)
|
|
||||||
else:
|
|
||||||
self.logger.info("Request worked on the '%d' try" %
|
|
||||||
(try_index + 1))
|
|
||||||
break
|
|
||||||
else: self.logger.info("Failed to send request after '%d' tries..." %
|
|
||||||
self.retries)
|
|
||||||
self.watcher.flag_done()
|
|
||||||
|
|
||||||
class TimeoutWatcher(threading.Thread,Loggable):
|
class TimeoutWatcher(threading.Thread,Loggable):
|
||||||
"""
|
"""
|
||||||
|
@ -131,8 +71,7 @@ class WatchSyncer(ReportHandler,Loggable):
|
||||||
#self.push_queue( event )
|
#self.push_queue( event )
|
||||||
except BadSongFile as e:
|
except BadSongFile as e:
|
||||||
self.fatal_exception("Received bas song file '%s'" % e.path, e)
|
self.fatal_exception("Received bas song file '%s'" % e.path, e)
|
||||||
except Exception as e:
|
except Exception as e: self.unexpected_exception(e)
|
||||||
self.unexpected_exception(e)
|
|
||||||
else:
|
else:
|
||||||
self.logger.info("Received event that does not implement packing.\
|
self.logger.info("Received event that does not implement packing.\
|
||||||
Printing its representation:")
|
Printing its representation:")
|
||||||
|
@ -209,8 +148,8 @@ class WatchSyncer(ReportHandler,Loggable):
|
||||||
requests = copy.copy(self.__queue)
|
requests = copy.copy(self.__queue)
|
||||||
def launch_request():
|
def launch_request():
|
||||||
# Need shallow copy here
|
# Need shallow copy here
|
||||||
t = RequestSync(watcher=self, requests=requests)
|
t = ThreadedRequestSync( RequestSync.create_with_api_client(
|
||||||
t.start()
|
watcher=self, requests=requests) )
|
||||||
self.__current_thread = t
|
self.__current_thread = t
|
||||||
self.__requests.append(launch_request)
|
self.__requests.append(launch_request)
|
||||||
self.__reset_queue()
|
self.__reset_queue()
|
||||||
|
@ -218,7 +157,8 @@ class WatchSyncer(ReportHandler,Loggable):
|
||||||
def __reset_queue(self): self.__queue = []
|
def __reset_queue(self): self.__queue = []
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
# Ideally we would like to do a little more to ensure safe shutdown
|
#this destructor is completely untested and it's unclear whether
|
||||||
|
#it's even doing anything useful. consider removing it
|
||||||
if self.events_in_queue():
|
if self.events_in_queue():
|
||||||
self.logger.warn("Terminating with events still in the queue...")
|
self.logger.warn("Terminating with events still in the queue...")
|
||||||
if self.requests_in_queue():
|
if self.requests_in_queue():
|
||||||
|
|
|
@ -60,7 +60,8 @@ def main(global_config, api_client_config, log_config,
|
||||||
try:
|
try:
|
||||||
with open(config['index_path'], 'w') as f: f.write(" ")
|
with open(config['index_path'], 'w') as f: f.write(" ")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.info("Failed to create index file with exception: %s" % str(e))
|
log.info("Failed to create index file with exception: %s" \
|
||||||
|
% str(e))
|
||||||
else:
|
else:
|
||||||
log.info("Created index file, reloading configuration:")
|
log.info("Created index file, reloading configuration:")
|
||||||
main( global_config, api_client_config, log_config,
|
main( global_config, api_client_config, log_config,
|
||||||
|
|
|
@ -19,8 +19,8 @@ class TestApiClient(unittest.TestCase):
|
||||||
self.apc.register_component("api-client-tester")
|
self.apc.register_component("api-client-tester")
|
||||||
# All of the following requests should error out in some way
|
# All of the following requests should error out in some way
|
||||||
self.bad_requests = [
|
self.bad_requests = [
|
||||||
{ 'mode' : 'dang it', 'is_record' : 0 },
|
{ 'mode' : 'foo', 'is_record' : 0 },
|
||||||
{ 'mode' : 'damn frank', 'is_record' : 1 },
|
{ 'mode' : 'bar', 'is_record' : 1 },
|
||||||
{ 'no_mode' : 'at_all' }, ]
|
{ 'no_mode' : 'at_all' }, ]
|
||||||
|
|
||||||
def test_bad_requests(self):
|
def test_bad_requests(self):
|
||||||
|
|
31
python_apps/media-monitor2/tests/test_emf.py
Normal file
31
python_apps/media-monitor2/tests/test_emf.py
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
#from pprint import pprint as pp
|
||||||
|
|
||||||
|
from media.metadata.process import global_reader
|
||||||
|
from media.monitor.metadata import Metadata
|
||||||
|
|
||||||
|
import media.metadata.definitions as defs
|
||||||
|
defs.load_definitions()
|
||||||
|
|
||||||
|
class TestMMP(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.maxDiff = None
|
||||||
|
|
||||||
|
def metadatas(self,f):
|
||||||
|
return global_reader.read_mutagen(f), Metadata(f).extract()
|
||||||
|
|
||||||
|
def test_old_metadata(self):
|
||||||
|
path = "/home/rudi/music/Nightingale.mp3"
|
||||||
|
m = global_reader.read_mutagen(path)
|
||||||
|
self.assertTrue( len(m) > 0 )
|
||||||
|
n = Metadata(path)
|
||||||
|
self.assertEqual(n.extract(), m)
|
||||||
|
|
||||||
|
def test_recorded(self):
|
||||||
|
recorded_file = "./15:15:00-Untitled Show-256kbps.ogg"
|
||||||
|
emf, old = self.metadatas(recorded_file)
|
||||||
|
self.assertEqual(emf, old)
|
||||||
|
|
||||||
|
if __name__ == '__main__': unittest.main()
|
|
@ -26,7 +26,6 @@ class TestMetadata(unittest.TestCase):
|
||||||
i += 1
|
i += 1
|
||||||
print("Sample metadata: '%s'" % md)
|
print("Sample metadata: '%s'" % md)
|
||||||
self.assertTrue( len( md.keys() ) > 0 )
|
self.assertTrue( len( md.keys() ) > 0 )
|
||||||
self.assertTrue( 'MDATA_KEY_MD5' in md )
|
|
||||||
utf8 = md_full.utf8()
|
utf8 = md_full.utf8()
|
||||||
for k,v in md.iteritems():
|
for k,v in md.iteritems():
|
||||||
if hasattr(utf8[k], 'decode'):
|
if hasattr(utf8[k], 'decode'):
|
||||||
|
@ -42,10 +41,4 @@ class TestMetadata(unittest.TestCase):
|
||||||
x1 = 123456
|
x1 = 123456
|
||||||
print("Formatting '%s' to '%s'" % (x1, mmm.format_length(x1)))
|
print("Formatting '%s' to '%s'" % (x1, mmm.format_length(x1)))
|
||||||
|
|
||||||
def test_truncate_to_length(self):
|
|
||||||
s1 = "testing with non string literal"
|
|
||||||
s2 = u"testing with unicode literal"
|
|
||||||
self.assertEqual( len(mmm.truncate_to_length(s1, 5)), 5)
|
|
||||||
self.assertEqual( len(mmm.truncate_to_length(s2, 8)), 8)
|
|
||||||
|
|
||||||
if __name__ == '__main__': unittest.main()
|
if __name__ == '__main__': unittest.main()
|
||||||
|
|
44
python_apps/media-monitor2/tests/test_metadata_def.py
Normal file
44
python_apps/media-monitor2/tests/test_metadata_def.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import media.metadata.process as md
|
||||||
|
|
||||||
|
class TestMetadataDef(unittest.TestCase):
|
||||||
|
def test_simple(self):
|
||||||
|
|
||||||
|
with md.metadata('MDATA_TESTING') as t:
|
||||||
|
t.optional(True)
|
||||||
|
t.depends('ONE','TWO')
|
||||||
|
t.default('unknown')
|
||||||
|
t.translate(lambda kw: kw['ONE'] + kw['TWO'])
|
||||||
|
|
||||||
|
h = { 'ONE' : "testing", 'TWO' : "123" }
|
||||||
|
result = md.global_reader.read('test_path',h)
|
||||||
|
self.assertTrue( 'MDATA_TESTING' in result )
|
||||||
|
self.assertEqual( result['MDATA_TESTING'], 'testing123' )
|
||||||
|
h1 = { 'ONE' : 'big testing', 'two' : 'nothing' }
|
||||||
|
result1 = md.global_reader.read('bs path', h1)
|
||||||
|
self.assertEqual( result1['MDATA_TESTING'], 'unknown' )
|
||||||
|
|
||||||
|
def test_topo(self):
|
||||||
|
with md.metadata('MDATA_TESTING') as t:
|
||||||
|
t.depends('shen','sheni')
|
||||||
|
t.default('megitzda')
|
||||||
|
t.translate(lambda kw: kw['shen'] + kw['sheni'])
|
||||||
|
|
||||||
|
with md.metadata('shen') as t:
|
||||||
|
t.default('vaxo')
|
||||||
|
|
||||||
|
with md.metadata('sheni') as t:
|
||||||
|
t.default('gio')
|
||||||
|
|
||||||
|
with md.metadata('vaxo') as t:
|
||||||
|
t.depends('shevetsi')
|
||||||
|
|
||||||
|
v = md.global_reader.read('bs mang', {})
|
||||||
|
self.assertEqual(v['MDATA_TESTING'], 'vaxogio')
|
||||||
|
self.assertTrue( 'vaxo' not in v )
|
||||||
|
|
||||||
|
md.global_reader.clear()
|
||||||
|
|
||||||
|
if __name__ == '__main__': unittest.main()
|
|
@ -2,7 +2,6 @@
|
||||||
import unittest
|
import unittest
|
||||||
import os
|
import os
|
||||||
import media.monitor.pure as mmp
|
import media.monitor.pure as mmp
|
||||||
from media.monitor.metadata import Metadata
|
|
||||||
|
|
||||||
class TestMMP(unittest.TestCase):
|
class TestMMP(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -34,68 +33,6 @@ class TestMMP(unittest.TestCase):
|
||||||
sd = mmp.default_to(dictionary=sd, keys=def_keys, default='DEF')
|
sd = mmp.default_to(dictionary=sd, keys=def_keys, default='DEF')
|
||||||
for k in def_keys: self.assertEqual( sd[k], 'DEF' )
|
for k in def_keys: self.assertEqual( sd[k], 'DEF' )
|
||||||
|
|
||||||
def test_normalized_metadata(self):
|
|
||||||
#Recorded show test first
|
|
||||||
orig = Metadata.airtime_dict({
|
|
||||||
'date' : [u'2012-08-21'],
|
|
||||||
'tracknumber' : [u'2'],
|
|
||||||
'title' : [u'record-2012-08-21-11:29:00'],
|
|
||||||
'artist' : [u'Airtime Show Recorder']
|
|
||||||
})
|
|
||||||
orga = Metadata.airtime_dict({
|
|
||||||
'date' : [u'2012-08-21'],
|
|
||||||
'tracknumber' : [u'2'],
|
|
||||||
'artist' : [u'Airtime Show Recorder'],
|
|
||||||
'title' : [u'record-2012-08-21-11:29:00']
|
|
||||||
})
|
|
||||||
orga['MDATA_KEY_FTYPE'] = u'audioclip'
|
|
||||||
orig['MDATA_KEY_BITRATE'] = u'256000'
|
|
||||||
orga['MDATA_KEY_BITRATE'] = u'256000'
|
|
||||||
old_path = "/home/rudi/recorded/2012-08-21-11:29:00.ogg"
|
|
||||||
normalized = mmp.normalized_metadata(orig, old_path)
|
|
||||||
normalized['MDATA_KEY_BITRATE'] = u'256000'
|
|
||||||
|
|
||||||
self.assertEqual( orga, normalized )
|
|
||||||
|
|
||||||
organized_base_name = "11:29:00-record-256kbps.ogg"
|
|
||||||
base = "/srv/airtime/stor/"
|
|
||||||
organized_path = mmp.organized_path(old_path,base, normalized)
|
|
||||||
self.assertEqual(os.path.basename(organized_path), organized_base_name)
|
|
||||||
|
|
||||||
def test_normalized_metadata2(self):
|
|
||||||
"""
|
|
||||||
cc-4305
|
|
||||||
"""
|
|
||||||
orig = Metadata.airtime_dict({
|
|
||||||
'date' : [u'2012-08-27'],
|
|
||||||
'tracknumber' : [u'3'],
|
|
||||||
'title' : [u'18-11-00-Untitled Show'],
|
|
||||||
'artist' : [u'Airtime Show Recorder']
|
|
||||||
})
|
|
||||||
old_path = "/home/rudi/recorded/doesnt_really_matter.ogg"
|
|
||||||
normalized = mmp.normalized_metadata(orig, old_path)
|
|
||||||
normalized['MDATA_KEY_BITRATE'] = u'256000'
|
|
||||||
opath = mmp.organized_path(old_path, "/srv/airtime/stor/",
|
|
||||||
normalized)
|
|
||||||
# TODO : add a better test than this...
|
|
||||||
self.assertTrue( len(opath) > 0 )
|
|
||||||
|
|
||||||
def test_normalized_metadata3(self):
|
|
||||||
"""
|
|
||||||
Test the case where the metadata is empty
|
|
||||||
"""
|
|
||||||
orig = Metadata.airtime_dict({})
|
|
||||||
paths_unknown_title = [
|
|
||||||
("/testin/unknown-unknown-unknown.mp3",""),
|
|
||||||
("/testin/01-unknown-123kbps.mp3",""),
|
|
||||||
("/testin/02-unknown-140kbps.mp3",""),
|
|
||||||
("/testin/unknown-unknown-123kbps.mp3",""),
|
|
||||||
("/testin/unknown-bibimbop-unknown.mp3","bibimbop"),
|
|
||||||
]
|
|
||||||
for p,res in paths_unknown_title:
|
|
||||||
normalized = mmp.normalized_metadata(orig, p)
|
|
||||||
self.assertEqual( normalized['MDATA_KEY_TITLE'], res)
|
|
||||||
|
|
||||||
def test_file_md5(self):
|
def test_file_md5(self):
|
||||||
p = os.path.realpath(__file__)
|
p = os.path.realpath(__file__)
|
||||||
m1 = mmp.file_md5(p)
|
m1 = mmp.file_md5(p)
|
||||||
|
@ -116,6 +53,13 @@ class TestMMP(unittest.TestCase):
|
||||||
self.assertEqual( mmp.parse_int("123asf"), "123" )
|
self.assertEqual( mmp.parse_int("123asf"), "123" )
|
||||||
self.assertEqual( mmp.parse_int("asdf"), None )
|
self.assertEqual( mmp.parse_int("asdf"), None )
|
||||||
|
|
||||||
|
def test_truncate_to_length(self):
|
||||||
|
s1 = "testing with non string literal"
|
||||||
|
s2 = u"testing with unicode literal"
|
||||||
|
self.assertEqual( len(mmp.truncate_to_length(s1, 5)), 5)
|
||||||
|
self.assertEqual( len(mmp.truncate_to_length(s2, 8)), 8)
|
||||||
|
|
||||||
|
|
||||||
def test_owner_id(self):
|
def test_owner_id(self):
|
||||||
start_path = "testing.mp3"
|
start_path = "testing.mp3"
|
||||||
id_path = "testing.mp3.identifier"
|
id_path = "testing.mp3.identifier"
|
||||||
|
|
48
python_apps/media-monitor2/tests/test_requestsync.py
Normal file
48
python_apps/media-monitor2/tests/test_requestsync.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
import unittest
|
||||||
|
from mock import MagicMock
|
||||||
|
|
||||||
|
from media.monitor.request import RequestSync
|
||||||
|
|
||||||
|
class TestRequestSync(unittest.TestCase):
|
||||||
|
|
||||||
|
def apc_mock(self):
|
||||||
|
fake_apc = MagicMock()
|
||||||
|
fake_apc.send_media_monitor_requests = MagicMock()
|
||||||
|
return fake_apc
|
||||||
|
|
||||||
|
def watcher_mock(self):
|
||||||
|
fake_watcher = MagicMock()
|
||||||
|
fake_watcher.flag_done = MagicMock()
|
||||||
|
return fake_watcher
|
||||||
|
|
||||||
|
def request_mock(self):
|
||||||
|
fake_request = MagicMock()
|
||||||
|
fake_request.safe_pack = MagicMock(return_value=[])
|
||||||
|
return fake_request
|
||||||
|
|
||||||
|
def test_send_media_monitor(self):
|
||||||
|
fake_apc = self.apc_mock()
|
||||||
|
fake_requests = [ self.request_mock() for x in range(1,5) ]
|
||||||
|
fake_watcher = self.watcher_mock()
|
||||||
|
rs = RequestSync(fake_watcher, fake_requests, fake_apc)
|
||||||
|
rs.run_request()
|
||||||
|
self.assertEquals(fake_apc.send_media_monitor_requests.call_count, 1)
|
||||||
|
|
||||||
|
def test_flag_done(self):
|
||||||
|
fake_apc = self.apc_mock()
|
||||||
|
fake_requests = [ self.request_mock() for x in range(1,5) ]
|
||||||
|
fake_watcher = self.watcher_mock()
|
||||||
|
rs = RequestSync(fake_watcher, fake_requests, fake_apc)
|
||||||
|
rs.run_request()
|
||||||
|
self.assertEquals(fake_watcher.flag_done.call_count, 1)
|
||||||
|
|
||||||
|
def test_safe_pack(self):
|
||||||
|
fake_apc = self.apc_mock()
|
||||||
|
fake_requests = [ self.request_mock() for x in range(1,5) ]
|
||||||
|
fake_watcher = self.watcher_mock()
|
||||||
|
rs = RequestSync(fake_watcher, fake_requests, fake_apc)
|
||||||
|
rs.run_request()
|
||||||
|
for req in fake_requests:
|
||||||
|
self.assertEquals(req.safe_pack.call_count, 1)
|
||||||
|
|
||||||
|
if __name__ == '__main__': unittest.main()
|
|
@ -1,314 +0,0 @@
|
||||||
# These operators need to be updated..
|
|
||||||
|
|
||||||
|
|
||||||
# Stream data from mplayer
|
|
||||||
# @category Source / Input
|
|
||||||
# @param s data URI.
|
|
||||||
# @param ~restart restart on exit.
|
|
||||||
# @param ~restart_on_error restart on exit with error.
|
|
||||||
# @param ~buffer Duration of the pre-buffered data.
|
|
||||||
# @param ~max Maximum duration of the buffered data.
|
|
||||||
def input.mplayer(~id="input.mplayer",
|
|
||||||
~restart=true,~restart_on_error=false,
|
|
||||||
~buffer=0.2,~max=10.,s) =
|
|
||||||
input.external(id=id,restart=restart,
|
|
||||||
restart_on_error=restart_on_error,
|
|
||||||
buffer=buffer,max=max,
|
|
||||||
"mplayer -really-quiet -ao pcm:file=/dev/stdout \
|
|
||||||
-vc null -vo null #{quote(s)} 2>/dev/null")
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
# Output the stream using aplay.
|
|
||||||
# Using this turns "root.sync" to false
|
|
||||||
# since aplay will do the synchronisation
|
|
||||||
# @category Source / Output
|
|
||||||
# @param ~id Output's ID
|
|
||||||
# @param ~device Alsa pcm device name
|
|
||||||
# @param ~restart_on_crash Restart external process on crash. If false, liquidsoap will stop.
|
|
||||||
# @param ~fallible Allow the child source to fail, in which case the output will be (temporarily) stopped.
|
|
||||||
# @param ~on_start Callback executed when outputting starts.
|
|
||||||
# @param ~on_stop Callback executed when outputting stops.
|
|
||||||
# @param s Source to play
|
|
||||||
def output.aplay(~id="output.aplay",~device="default",
|
|
||||||
~fallible=false,~on_start={()},~on_stop={()},
|
|
||||||
~restart_on_crash=false,s)
|
|
||||||
def aplay_p(m) =
|
|
||||||
"aplay -D #{device}"
|
|
||||||
end
|
|
||||||
log(label=id,level=3,"Setting root.sync to false")
|
|
||||||
set("root.sync",false)
|
|
||||||
output.pipe.external(id=id,
|
|
||||||
fallible=fallible,on_start=on_start,on_stop=on_stop,
|
|
||||||
restart_on_crash=restart_on_crash,
|
|
||||||
restart_on_new_track=false,
|
|
||||||
process=aplay_p,s)
|
|
||||||
end
|
|
||||||
|
|
||||||
%ifdef output.icecast.external
|
|
||||||
# Output to icecast using the lame command line encoder.
|
|
||||||
# @category Source / Output
|
|
||||||
# @param ~id Output's ID
|
|
||||||
# @param ~start Start output threads on operator initialization.
|
|
||||||
# @param ~restart Restart output after a failure. By default, liquidsoap will stop if the output failed.
|
|
||||||
# @param ~restart_delay Delay, in seconds, before attempting new connection, if restart is enabled.
|
|
||||||
# @param ~restart_on_crash Restart external process on crash. If false, liquidsoap will stop.
|
|
||||||
# @param ~restart_on_new_track Restart encoder upon new track.
|
|
||||||
# @param ~restart_encoder_delay Restart the encoder after this delay, in seconds.
|
|
||||||
# @param ~user User for shout source connection. Useful only in special cases, like with per-mountpoint users.
|
|
||||||
# @param ~lame The lame binary
|
|
||||||
# @param ~bitrate Encoder bitrate
|
|
||||||
# @param ~swap Swap audio samples. Depends on local machine's endianess and lame's version. Test this parameter if you experience garbaged mp3 audio data. On intel 32 and 64 architectures, the parameter should be "true" for lame version >= 3.98.
|
|
||||||
# @param ~dumpfile Dump stream to file, for debugging purpose. Disabled if empty.
|
|
||||||
# @param ~protocol Protocol of the streaming server: 'http' for Icecast, 'icy' for Shoutcast.
|
|
||||||
# @param ~fallible Allow the child source to fail, in which case the output will be (temporarily) stopped.
|
|
||||||
# @param ~on_start Callback executed when outputting starts.
|
|
||||||
# @param ~on_stop Callback executed when outputting stops.
|
|
||||||
# @param s The source to output
|
|
||||||
def output.icecast.lame(
|
|
||||||
~id="output.icecast.lame",~start=true,
|
|
||||||
~restart=false,~restart_delay=3,
|
|
||||||
~host="localhost",~port=8000,
|
|
||||||
~user="source",~password="hackme",
|
|
||||||
~genre="Misc",~url="http://savonet.sf.net/",
|
|
||||||
~description="Liquidsoap Radio!",~public=true,
|
|
||||||
~dumpfile="",~mount="Use [name]",
|
|
||||||
~name="Use [mount]",~protocol="http",
|
|
||||||
~lame="lame",~bitrate=128,~swap=false,
|
|
||||||
~fallible=false,~on_start={()},~on_stop={()},
|
|
||||||
~restart_on_crash=false,~restart_on_new_track=false,
|
|
||||||
~restart_encoder_delay=3600,~headers=[],s)
|
|
||||||
samplerate = get(default=44100,"frame.samplerate")
|
|
||||||
samplerate = float_of_int(samplerate) / 1000.
|
|
||||||
channels = get(default=2,"frame.channels")
|
|
||||||
swap = if swap then "-x" else "" end
|
|
||||||
mode =
|
|
||||||
if channels == 2 then
|
|
||||||
"j" # Encoding in joint stereo..
|
|
||||||
else
|
|
||||||
"m"
|
|
||||||
end
|
|
||||||
# Metadata update is set by ICY with icecast
|
|
||||||
def lame_p(m)
|
|
||||||
"#{lame} -b #{bitrate} -r --bitwidth 16 -s #{samplerate} \
|
|
||||||
--signed -m #{mode} --nores #{swap} -t - -"
|
|
||||||
end
|
|
||||||
output.icecast.external(id=id,
|
|
||||||
process=lame_p,bitrate=bitrate,start=start,
|
|
||||||
restart=restart,restart_delay=restart_delay,
|
|
||||||
host=host,port=port,user=user,password=password,
|
|
||||||
genre=genre,url=url,description=description,
|
|
||||||
public=public,dumpfile=dumpfile,restart_encoder_delay=restart_encoder_delay,
|
|
||||||
name=name,mount=mount,protocol=protocol,
|
|
||||||
header=false,restart_on_crash=restart_on_crash,
|
|
||||||
restart_on_new_track=restart_on_new_track,headers=headers,
|
|
||||||
fallible=fallible,on_start=on_start,on_stop=on_stop,
|
|
||||||
s)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Output to shoutcast using the lame encoder.
|
|
||||||
# @category Source / Output
|
|
||||||
# @param ~id Output's ID
|
|
||||||
# @param ~start Start output threads on operator initialization.
|
|
||||||
# @param ~restart Restart output after a failure. By default, liquidsoap will stop if the output failed.
|
|
||||||
# @param ~restart_delay Delay, in seconds, before attempting new connection, if restart is enabled.
|
|
||||||
# @param ~restart_on_crash Restart external process on crash. If false, liquidsoap will stop.
|
|
||||||
# @param ~restart_on_new_track Restart encoder upon new track.
|
|
||||||
# @param ~restart_encoder_delay Restart the encoder after this delay, in seconds.
|
|
||||||
# @param ~user User for shout source connection. Useful only in special cases, like with per-mountpoint users.
|
|
||||||
# @param ~lame The lame binary
|
|
||||||
# @param ~bitrate Encoder bitrate
|
|
||||||
# @param ~icy_reset Reset shoutcast source buffer upon connecting (necessary for NSV).
|
|
||||||
# @param ~dumpfile Dump stream to file, for debugging purpose. Disabled if empty.
|
|
||||||
# @param ~fallible Allow the child source to fail, in which case the output will be (temporarily) stopped.
|
|
||||||
# @param ~on_start Callback executed when outputting starts.
|
|
||||||
# @param ~on_stop Callback executed when outputting stops.
|
|
||||||
# @param s The source to output
|
|
||||||
def output.shoutcast.lame(
|
|
||||||
~id="output.shoutcast.mp3",~start=true,
|
|
||||||
~restart=false,~restart_delay=3,
|
|
||||||
~host="localhost",~port=8000,
|
|
||||||
~user="source",~password="hackme",
|
|
||||||
~genre="Misc",~url="http://savonet.sf.net/",
|
|
||||||
~description="Liquidsoap Radio!",~public=true,
|
|
||||||
~dumpfile="",~name="Use [mount]",~icy_reset=true,
|
|
||||||
~lame="lame",~aim="",~icq="",~irc="",
|
|
||||||
~fallible=false,~on_start={()},~on_stop={()},
|
|
||||||
~restart_on_crash=false,~restart_on_new_track=false,
|
|
||||||
~restart_encoder_delay=3600,~bitrate=128,s) =
|
|
||||||
icy_reset = if icy_reset then "1" else "0" end
|
|
||||||
headers = [("icy-aim",aim),("icy-irc",irc),
|
|
||||||
("icy-icq",icq),("icy-reset",icy_reset)]
|
|
||||||
output.icecast.lame(
|
|
||||||
id=id, headers=headers, lame=lame,
|
|
||||||
bitrate=bitrate, start=start,
|
|
||||||
restart=restart, restart_encoder_delay=restart_encoder_delay,
|
|
||||||
host=host, port=port, user=user, password=password,
|
|
||||||
genre=genre, url=url, description=description,
|
|
||||||
public=public, dumpfile=dumpfile,
|
|
||||||
restart_on_crash=restart_on_crash,
|
|
||||||
restart_on_new_track=restart_on_new_track,
|
|
||||||
name=name, mount="/", protocol="icy",
|
|
||||||
fallible=fallible,on_start=on_start,on_stop=on_stop,
|
|
||||||
s)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Output to icecast using the flac command line encoder.
|
|
||||||
# @category Source / Output
|
|
||||||
# @param ~id Output's ID
|
|
||||||
# @param ~start Start output threads on operator initialization.
|
|
||||||
# @param ~restart Restart output after a failure. By default, liquidsoap will stop if the output failed.
|
|
||||||
# @param ~restart_delay Delay, in seconds, before attempting new connection, if restart is enabled.
|
|
||||||
# @param ~restart_on_crash Restart external process on crash. If false, liquidsoap will stop.
|
|
||||||
# @param ~restart_on_new_track Restart encoder upon new track. If false, the resulting stream will have a single track.
|
|
||||||
# @param ~restart_encoder_delay Restart the encoder after this delay, in seconds.
|
|
||||||
# @param ~user User for shout source connection. Useful only in special cases, like with per-mountpoint users.
|
|
||||||
# @param ~flac The flac binary
|
|
||||||
# @param ~quality Encoder quality (0..8)
|
|
||||||
# @param ~dumpfile Dump stream to file, for debugging purpose. Disabled if empty.
|
|
||||||
# @param ~protocol Protocol of the streaming server: 'http' for Icecast, 'icy' for Shoutcast.
|
|
||||||
# @param ~fallible Allow the child source to fail, in which case the output will be (temporarily) stopped.
|
|
||||||
# @param ~on_start Callback executed when outputting starts.
|
|
||||||
# @param ~on_stop Callback executed when outputting stops.
|
|
||||||
# @param s The source to output
|
|
||||||
def output.icecast.flac(
|
|
||||||
~id="output.icecast.flac",~start=true,
|
|
||||||
~restart=false,~restart_delay=3,
|
|
||||||
~host="localhost",~port=8000,
|
|
||||||
~user="source",~password="hackme",
|
|
||||||
~genre="Misc",~url="http://savonet.sf.net/",
|
|
||||||
~description="Liquidsoap Radio!",~public=true,
|
|
||||||
~dumpfile="",~mount="Use [name]",
|
|
||||||
~name="Use [mount]",~protocol="http",
|
|
||||||
~flac="flac",~quality=6,
|
|
||||||
~restart_on_crash=false,
|
|
||||||
~restart_on_new_track=true,
|
|
||||||
~restart_encoder_delay=(-1),
|
|
||||||
~fallible=false,~on_start={()},~on_stop={()},
|
|
||||||
s)
|
|
||||||
# We will use raw format, to
|
|
||||||
# bypass input length value in WAV
|
|
||||||
# header (input length is not known)
|
|
||||||
channels = get(default=2,"frame.channels")
|
|
||||||
samplerate = get(default=44100,"frame.samplerate")
|
|
||||||
def flac_p(m)=
|
|
||||||
def option(x) =
|
|
||||||
"-T #{quote(fst(x))}=#{quote(snd(x))}"
|
|
||||||
end
|
|
||||||
m = list.map(option,m)
|
|
||||||
m = string.concat(separator=" ",m)
|
|
||||||
"#{flac} --force-raw-format --endian=little --channels=#{channels} \
|
|
||||||
--bps=16 --sample-rate=#{samplerate} --sign=signed #{m} \
|
|
||||||
-#{quality} --ogg -c -"
|
|
||||||
end
|
|
||||||
output.icecast.external(id=id,
|
|
||||||
process=flac_p,bitrate=(-1),start=start,
|
|
||||||
restart=restart,restart_delay=restart_delay,
|
|
||||||
host=host,port=port,user=user,password=password,
|
|
||||||
genre=genre,url=url,description=description,
|
|
||||||
public=public,dumpfile=dumpfile,
|
|
||||||
name=name,mount=mount,protocol=protocol,
|
|
||||||
fallible=fallible,on_start=on_start,on_stop=on_stop,
|
|
||||||
restart_on_new_track=restart_on_new_track,
|
|
||||||
format="ogg",header=false,icy_metadata=false,
|
|
||||||
restart_on_crash=restart_on_crash,
|
|
||||||
restart_encoder_delay=restart_encoder_delay,
|
|
||||||
s)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Output to icecast using the aacplusenc command line encoder.
|
|
||||||
# @category Source / Output
|
|
||||||
# @param ~id Output's ID
|
|
||||||
# @param ~start Start output threads on operator initialization.
|
|
||||||
# @param ~restart Restart output after a failure. By default, liquidsoap will stop if the output failed.
|
|
||||||
# @param ~restart_delay Delay, in seconds, before attempting new connection, if restart is enabled.
|
|
||||||
# @param ~restart_on_crash Restart external process on crash. If false, liquidsoap will stop.
|
|
||||||
# @param ~restart_on_new_track Restart encoder upon new track.
|
|
||||||
# @param ~restart_encoder_delay Restart the encoder after this delay, in seconds.
|
|
||||||
# @param ~user User for shout source connection. Useful only in special cases, like with per-mountpoint users.
|
|
||||||
# @param ~aacplusenc The aacplusenc binary
|
|
||||||
# @param ~bitrate Encoder bitrate
|
|
||||||
# @param ~dumpfile Dump stream to file, for debugging purpose. Disabled if empty.
|
|
||||||
# @param ~protocol Protocol of the streaming server: 'http' for Icecast, 'icy' for Shoutcast.
|
|
||||||
# @param ~fallible Allow the child source to fail, in which case the output will be (temporarily) stopped.
|
|
||||||
# @param ~on_start Callback executed when outputting starts.
|
|
||||||
# @param ~on_stop Callback executed when outputting stops.
|
|
||||||
# @param s The source to output
|
|
||||||
def output.icecast.aacplusenc(
|
|
||||||
~id="output.icecast.aacplusenc",~start=true,
|
|
||||||
~restart=false,~restart_delay=3,
|
|
||||||
~host="localhost",~port=8000,
|
|
||||||
~user="source",~password="hackme",
|
|
||||||
~genre="Misc",~url="http://savonet.sf.net/",
|
|
||||||
~description="Liquidsoap Radio!",~public=true,
|
|
||||||
~dumpfile="",~mount="Use [name]",
|
|
||||||
~name="Use [mount]",~protocol="http",
|
|
||||||
~aacplusenc="aacplusenc",~bitrate=64,
|
|
||||||
~fallible=false,~on_start={()},~on_stop={()},
|
|
||||||
~restart_on_crash=false,~restart_on_new_track=false,
|
|
||||||
~restart_encoder_delay=3600,~headers=[],s)
|
|
||||||
# Metadata update is set by ICY with icecast
|
|
||||||
def aacplusenc_p(m)
|
|
||||||
"#{aacplusenc} - - #{bitrate}"
|
|
||||||
end
|
|
||||||
output.icecast.external(id=id,
|
|
||||||
process=aacplusenc_p,bitrate=bitrate,start=start,
|
|
||||||
restart=restart,restart_delay=restart_delay,
|
|
||||||
host=host,port=port,user=user,password=password,
|
|
||||||
genre=genre,url=url,description=description,
|
|
||||||
public=public,dumpfile=dumpfile,
|
|
||||||
name=name,mount=mount,protocol=protocol,
|
|
||||||
fallible=fallible,on_start=on_start,on_stop=on_stop,
|
|
||||||
header=true,restart_on_crash=restart_on_crash,
|
|
||||||
restart_on_new_track=restart_on_new_track,headers=headers,
|
|
||||||
restart_encoder_delay=restart_encoder_delay,format="audio/aacp",s)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Output to shoutcast using the aacplusenc encoder.
|
|
||||||
# @category Source / Output
|
|
||||||
# @param ~id Output's ID
|
|
||||||
# @param ~start Start output threads on operator initialization.
|
|
||||||
# @param ~restart Restart output after a failure. By default, liquidsoap will stop if the output failed.
|
|
||||||
# @param ~restart_delay Delay, in seconds, before attempting new connection, if restart is enabled.
|
|
||||||
# @param ~restart_on_crash Restart external process on crash. If false, liquidsoap will stop.
|
|
||||||
# @param ~restart_on_new_track Restart encoder upon new track.
|
|
||||||
# @param ~restart_encoder_delay Restart the encoder after this delay, in seconds.
|
|
||||||
# @param ~user User for shout source connection. Useful only in special cases, like with per-mountpoint users.
|
|
||||||
# @param ~aacplusenc The aacplusenc binary
|
|
||||||
# @param ~bitrate Encoder bitrate
|
|
||||||
# @param ~icy_reset Reset shoutcast source buffer upon connecting (necessary for NSV).
|
|
||||||
# @param ~dumpfile Dump stream to file, for debugging purpose. Disabled if empty.
|
|
||||||
# @param ~fallible Allow the child source to fail, in which case the output will be (temporarily) stopped.
|
|
||||||
# @param ~on_start Callback executed when outputting starts.
|
|
||||||
# @param ~on_stop Callback executed when outputting stops.
|
|
||||||
# @param s The source to output
|
|
||||||
def output.shoutcast.aacplusenc(
|
|
||||||
~id="output.shoutcast.aacplusenc",~start=true,
|
|
||||||
~restart=false,~restart_delay=3,
|
|
||||||
~host="localhost",~port=8000,
|
|
||||||
~user="source",~password="hackme",
|
|
||||||
~genre="Misc",~url="http://savonet.sf.net/",
|
|
||||||
~description="Liquidsoap Radio!",~public=true,
|
|
||||||
~fallible=false,~on_start={()},~on_stop={()},
|
|
||||||
~dumpfile="",~name="Use [mount]",~icy_reset=true,
|
|
||||||
~aim="",~icq="",~irc="",~aacplusenc="aacplusenc",
|
|
||||||
~restart_on_crash=false,~restart_on_new_track=false,
|
|
||||||
~restart_encoder_delay=3600,~bitrate=64,s) =
|
|
||||||
icy_reset = if icy_reset then "1" else "0" end
|
|
||||||
headers = [("icy-aim",aim),("icy-irc",irc),
|
|
||||||
("icy-icq",icq),("icy-reset",icy_reset)]
|
|
||||||
output.icecast.aacplusenc(
|
|
||||||
id=id, headers=headers, aacplusenc=aacplusenc,
|
|
||||||
bitrate=bitrate, start=start,
|
|
||||||
restart=restart, restart_delay=restart_delay,
|
|
||||||
host=host, port=port, user=user, password=password,
|
|
||||||
genre=genre, url=url, description=description,
|
|
||||||
public=public, dumpfile=dumpfile,
|
|
||||||
fallible=fallible,on_start=on_start,on_stop=on_stop,
|
|
||||||
restart_on_crash=restart_on_crash, restart_encoder_delay=restart_encoder_delay,
|
|
||||||
restart_on_new_track=restart_on_new_track,
|
|
||||||
name=name, mount="/", protocol="icy",
|
|
||||||
s)
|
|
||||||
end
|
|
||||||
%endif
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
# Enable external Musepack decoder. Requires the
|
# Enable external Musepack decoder. Requires the
|
||||||
# mpcdec binary in the path. Does not work on
|
# mpcdec binary in the path. Does not work on
|
||||||
# Win32.
|
# Win32.
|
||||||
|
# @category Liquidsoap
|
||||||
def enable_external_mpc_decoder() =
|
def enable_external_mpc_decoder() =
|
||||||
# A list of know extensions and content-type for Musepack.
|
# A list of know extensions and content-type for Musepack.
|
||||||
# Values from http://en.wikipedia.org/wiki/Musepack
|
# Values from http://en.wikipedia.org/wiki/Musepack
|
||||||
|
|
|
@ -13,18 +13,12 @@ def http_response(~protocol="HTTP/1.1",
|
||||||
~headers=[],
|
~headers=[],
|
||||||
~data="") =
|
~data="") =
|
||||||
status = http_codes[string_of(code)]
|
status = http_codes[string_of(code)]
|
||||||
# Set content-length if needed and not set by the
|
# Set content-length and connection: close
|
||||||
# user.
|
|
||||||
headers =
|
headers =
|
||||||
if data != "" and
|
list.append(headers,
|
||||||
not list.mem_assoc("Content-Length",headers)
|
[("Content-Length", "#{string.length(data)}"),
|
||||||
then
|
("Connection", "close")])
|
||||||
list.append([("Content-Length",
|
|
||||||
"#{string.length(data)}")],
|
|
||||||
headers)
|
|
||||||
else
|
|
||||||
headers
|
|
||||||
end
|
|
||||||
headers = list.map(fun (x) -> "#{fst(x)}: #{snd(x)}",headers)
|
headers = list.map(fun (x) -> "#{fst(x)}: #{snd(x)}",headers)
|
||||||
headers = string.concat(separator="\r\n",headers)
|
headers = string.concat(separator="\r\n",headers)
|
||||||
# If no headers are provided, we should avoid
|
# If no headers are provided, we should avoid
|
||||||
|
|
|
@ -5,3 +5,4 @@
|
||||||
%include "flows.liq"
|
%include "flows.liq"
|
||||||
%include "http.liq"
|
%include "http.liq"
|
||||||
%include "video_text.liq"
|
%include "video_text.liq"
|
||||||
|
%include "gstreamer.liq"
|
||||||
|
|
|
@ -21,10 +21,14 @@ end
|
||||||
# @param a Key to look for
|
# @param a Key to look for
|
||||||
# @param l List of pairs (key,value)
|
# @param l List of pairs (key,value)
|
||||||
def list.mem_assoc(a,l)
|
def list.mem_assoc(a,l)
|
||||||
v = list.assoc(a,l)
|
def f(cur, el) =
|
||||||
# We check for existence, since "" may indicate
|
if not cur then
|
||||||
# either a binding (a,"") or no binding..
|
fst(el) == a
|
||||||
list.mem((a,v),l)
|
else
|
||||||
|
cur
|
||||||
|
end
|
||||||
|
end
|
||||||
|
list.fold(f, false, l)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Remove a pair from an associative list
|
# Remove a pair from an associative list
|
||||||
|
@ -164,8 +168,7 @@ def out(s)
|
||||||
output.prefered(mksafe(s))
|
output.prefered(mksafe(s))
|
||||||
end
|
end
|
||||||
|
|
||||||
# Special track insensitive fallback that
|
# Special track insensitive fallback that always skips current song before switching.
|
||||||
# always skip current song before switching.
|
|
||||||
# @category Source / Track Processing
|
# @category Source / Track Processing
|
||||||
# @param ~input The input source
|
# @param ~input The input source
|
||||||
# @param f The fallback source
|
# @param f The fallback source
|
||||||
|
@ -212,14 +215,17 @@ end
|
||||||
# Simple crossfade.
|
# Simple crossfade.
|
||||||
# @category Source / Track Processing
|
# @category Source / Track Processing
|
||||||
# @param ~start_next Duration in seconds of the crossed end of track.
|
# @param ~start_next Duration in seconds of the crossed end of track.
|
||||||
# @param ~fade_in Duration of the fade in for next track
|
# @param ~fade_in Duration of the fade in for next track.
|
||||||
# @param ~fade_out Duration of the fade out for previous track
|
# @param ~fade_out Duration of the fade out for previous track.
|
||||||
# @param s The source to use
|
# @param ~conservative Always prepare for a premature end-of-track.
|
||||||
def crossfade(~id="",~start_next,~fade_in,~fade_out,s)
|
# @param s The source to use.
|
||||||
|
def crossfade(~id="",~conservative=true,
|
||||||
|
~start_next=5.,~fade_in=3.,~fade_out=3.,
|
||||||
|
s)
|
||||||
s = fade.in(duration=fade_in,s)
|
s = fade.in(duration=fade_in,s)
|
||||||
s = fade.out(duration=fade_out,s)
|
s = fade.out(duration=fade_out,s)
|
||||||
fader = fun (a,b) -> add(normalize=false,[b,a])
|
fader = fun (a,b) -> add(normalize=false,[b,a])
|
||||||
cross(id=id,conservative=true,duration=start_next,fader,s)
|
cross(id=id,conservative=conservative,duration=start_next,fader,s)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Append speech-synthesized tracks reading the metadata.
|
# Append speech-synthesized tracks reading the metadata.
|
||||||
|
@ -242,8 +248,7 @@ def helium(s)
|
||||||
end
|
end
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
# Return true if process exited with 0 code.
|
# Return true if process exited with 0 code. Command should return quickly.
|
||||||
# Command should return quickly.
|
|
||||||
# @category System
|
# @category System
|
||||||
# @param command Command to test
|
# @param command Command to test
|
||||||
def test_process(command)
|
def test_process(command)
|
||||||
|
@ -277,12 +282,9 @@ def url.split(uri) =
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Register a server/telnet command to
|
# Register a server/telnet command to update a source's metadata. Returns
|
||||||
# update a source's metadata. Returns
|
# a new source, which will receive the updated metadata. The command has
|
||||||
# a new source, which will receive the
|
# the following format: insert key1="val1",key2="val2",...
|
||||||
# updated metadata. It behaves just like
|
|
||||||
# the pre-1.0 insert_metadata() operator,
|
|
||||||
# i.e. insert key1="val1",key2="val2",...
|
|
||||||
# @category Source / Track Processing
|
# @category Source / Track Processing
|
||||||
# @param ~id Force the value of the source ID.
|
# @param ~id Force the value of the source ID.
|
||||||
def server.insert_metadata(~id="",s) =
|
def server.insert_metadata(~id="",s) =
|
||||||
|
@ -424,15 +426,15 @@ end
|
||||||
# @param ~conservative Always prepare for a premature end-of-track.
|
# @param ~conservative Always prepare for a premature end-of-track.
|
||||||
# @param ~default Transition used when no rule applies \
|
# @param ~default Transition used when no rule applies \
|
||||||
# (default: sequence).
|
# (default: sequence).
|
||||||
# @param ~high Value, in dB, for loud sound level
|
# @param ~high Value, in dB, for loud sound level.
|
||||||
# @param ~medium Value, in dB, for medium sound level
|
# @param ~medium Value, in dB, for medium sound level.
|
||||||
# @param ~margin Margin to detect sources that have too different \
|
# @param ~margin Margin to detect sources that have too different \
|
||||||
# sound level for crossing.
|
# sound level for crossing.
|
||||||
# @param s The input source.
|
# @param s The input source.
|
||||||
def smart_crossfade (~start_next=5.,~fade_in=3.,~fade_out=3.,
|
def smart_crossfade (~start_next=5.,~fade_in=3.,~fade_out=3.,
|
||||||
~default=(fun (a,b) -> sequence([a, b])),
|
~default=(fun (a,b) -> sequence([a, b])),
|
||||||
~high=-15., ~medium=-32., ~margin=4.,
|
~high=-15., ~medium=-32., ~margin=4.,
|
||||||
~width=2.,~conservative=false,s)
|
~width=2.,~conservative=true,s)
|
||||||
fade.out = fade.out(type="sin",duration=fade_out)
|
fade.out = fade.out(type="sin",duration=fade_out)
|
||||||
fade.in = fade.in(type="sin",duration=fade_in)
|
fade.in = fade.in(type="sin",duration=fade_in)
|
||||||
add = fun (a,b) -> add(normalize=false,[b, a])
|
add = fun (a,b) -> add(normalize=false,[b, a])
|
||||||
|
@ -549,7 +551,18 @@ def playlist.reloadable(~id="",~random=false,~on_done={()},uri)
|
||||||
if request.resolve(playlist) then
|
if request.resolve(playlist) then
|
||||||
playlist = request.filename(playlist)
|
playlist = request.filename(playlist)
|
||||||
files = playlist.parse(playlist)
|
files = playlist.parse(playlist)
|
||||||
list.map(snd,files)
|
def file_request(el) =
|
||||||
|
meta = fst(el)
|
||||||
|
file = snd(el)
|
||||||
|
s = list.fold(fun (cur, el) ->
|
||||||
|
"#{cur},#{fst(el)}=#{string.escape(snd(el))}", "", meta)
|
||||||
|
if s == "" then
|
||||||
|
file
|
||||||
|
else
|
||||||
|
"annotate:#{s}:#{file}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
list.map(file_request,files)
|
||||||
else
|
else
|
||||||
log(label=id,"Couldn't read playlist: request resolution failed.")
|
log(label=id,"Couldn't read playlist: request resolution failed.")
|
||||||
[]
|
[]
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
%ifdef video.add_text.gd
|
%ifdef video.add_text.gd
|
||||||
# Add a scrolling line of text on video frames.
|
# Add a scrolling line of text on video frames.
|
||||||
|
# @category Source / Video Processing
|
||||||
# @param ~id Force the value of the source ID.
|
# @param ~id Force the value of the source ID.
|
||||||
# @param ~color Text color (in 0xRRGGBB format).
|
# @param ~color Text color (in 0xRRGGBB format).
|
||||||
# @param ~cycle Cycle text.
|
# @param ~cycle Cycle text.
|
||||||
|
@ -22,6 +23,7 @@ end
|
||||||
|
|
||||||
%ifdef video.add_text.sdl
|
%ifdef video.add_text.sdl
|
||||||
# Add a scrolling line of text on video frames.
|
# Add a scrolling line of text on video frames.
|
||||||
|
# @category Source / Video Processing
|
||||||
# @param ~id Force the value of the source ID.
|
# @param ~id Force the value of the source ID.
|
||||||
# @param ~color Text color (in 0xRRGGBB format).
|
# @param ~color Text color (in 0xRRGGBB format).
|
||||||
# @param ~cycle Cycle text.
|
# @param ~cycle Cycle text.
|
||||||
|
|
|
@ -27,7 +27,7 @@ def append_title(m) =
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def crossfade(s)
|
def crossfade_airtime(s)
|
||||||
#duration is automatically overwritten by metadata fields passed in
|
#duration is automatically overwritten by metadata fields passed in
|
||||||
#with audio
|
#with audio
|
||||||
s = fade.in(type="log", duration=0., s)
|
s = fade.in(type="log", duration=0., s)
|
||||||
|
@ -402,6 +402,11 @@ def set_dynamic_source_id(id) =
|
||||||
string_of(!current_dyn_id)
|
string_of(!current_dyn_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_dynamic_source_id() =
|
||||||
|
string_of(!current_dyn_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
# Function to create a playlist source and output it.
|
# Function to create a playlist source and output it.
|
||||||
def create_dynamic_source(uri) =
|
def create_dynamic_source(uri) =
|
||||||
# The playlist source
|
# The playlist source
|
||||||
|
@ -413,7 +418,7 @@ def create_dynamic_source(uri) =
|
||||||
# We register both source and output
|
# We register both source and output
|
||||||
# in the list of sources
|
# in the list of sources
|
||||||
dyn_sources :=
|
dyn_sources :=
|
||||||
list.append([(uri,s),(uri,active_dyn_out)], !dyn_sources)
|
list.append([(!current_dyn_id, s),(!current_dyn_id, active_dyn_out)], !dyn_sources)
|
||||||
|
|
||||||
notify([("schedule_table_id", !current_dyn_id)])
|
notify([("schedule_table_id", !current_dyn_id)])
|
||||||
"Done!"
|
"Done!"
|
||||||
|
@ -421,7 +426,62 @@ end
|
||||||
|
|
||||||
|
|
||||||
# A function to destroy a dynamic source
|
# A function to destroy a dynamic source
|
||||||
def destroy_dynamic_source_all(uri) =
|
def destroy_dynamic_source(id) =
|
||||||
|
# We need to find the source in the list,
|
||||||
|
# remove it and destroy it. Currently, the language
|
||||||
|
# lacks some nice operators for that so we do it
|
||||||
|
# the functional way
|
||||||
|
|
||||||
|
# This function is executed on every item in the list
|
||||||
|
# of dynamic sources
|
||||||
|
def parse_list(ret, current_element) =
|
||||||
|
# ret is of the form: (matching_sources, remaining_sources)
|
||||||
|
# We extract those two:
|
||||||
|
matching_sources = fst(ret)
|
||||||
|
remaining_sources = snd(ret)
|
||||||
|
|
||||||
|
# current_element is of the form: ("uri", source) so
|
||||||
|
# we check the first element
|
||||||
|
current_id = fst(current_element)
|
||||||
|
if current_id == id then
|
||||||
|
# In this case, we add the source to the list of
|
||||||
|
# matched sources
|
||||||
|
(list.append( [snd(current_element)],
|
||||||
|
matching_sources),
|
||||||
|
remaining_sources)
|
||||||
|
else
|
||||||
|
# In this case, we put the element in the list of remaining
|
||||||
|
# sources
|
||||||
|
(matching_sources,
|
||||||
|
list.append([current_element],
|
||||||
|
remaining_sources))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Now we execute the function:
|
||||||
|
result = list.fold(parse_list, ([], []), !dyn_sources)
|
||||||
|
matching_sources = fst(result)
|
||||||
|
remaining_sources = snd(result)
|
||||||
|
|
||||||
|
# We store the remaining sources in dyn_sources
|
||||||
|
dyn_sources := remaining_sources
|
||||||
|
|
||||||
|
# If no source matched, we return an error
|
||||||
|
if list.length(matching_sources) == 0 then
|
||||||
|
"Error: no matching sources!"
|
||||||
|
else
|
||||||
|
# We stop all sources
|
||||||
|
list.iter(source.shutdown, matching_sources)
|
||||||
|
# And return
|
||||||
|
"Done!"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# A function to destroy a dynamic source
|
||||||
|
def destroy_dynamic_source_all() =
|
||||||
# We need to find the source in the list,
|
# We need to find the source in the list,
|
||||||
# remove it and destroy it. Currently, the language
|
# remove it and destroy it. Currently, the language
|
||||||
# lacks some nice operators for that so we do it
|
# lacks some nice operators for that so we do it
|
||||||
|
@ -466,57 +526,3 @@ end
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# A function to destroy a dynamic source
|
|
||||||
def destroy_dynamic_source(uri) =
|
|
||||||
# We need to find the source in the list,
|
|
||||||
# remove it and destroy it. Currently, the language
|
|
||||||
# lacks some nice operators for that so we do it
|
|
||||||
# the functional way
|
|
||||||
|
|
||||||
# This function is executed on every item in the list
|
|
||||||
# of dynamic sources
|
|
||||||
def parse_list(ret, current_element) =
|
|
||||||
# ret is of the form: (matching_sources, remaining_sources)
|
|
||||||
# We extract those two:
|
|
||||||
matching_sources = fst(ret)
|
|
||||||
remaining_sources = snd(ret)
|
|
||||||
|
|
||||||
# current_element is of the form: ("uri", source) so
|
|
||||||
# we check the first element
|
|
||||||
current_uri = fst(current_element)
|
|
||||||
if current_uri == uri then
|
|
||||||
# In this case, we add the source to the list of
|
|
||||||
# matched sources
|
|
||||||
(list.append( [snd(current_element)],
|
|
||||||
matching_sources),
|
|
||||||
remaining_sources)
|
|
||||||
else
|
|
||||||
# In this case, we put the element in the list of remaining
|
|
||||||
# sources
|
|
||||||
(matching_sources,
|
|
||||||
list.append([current_element],
|
|
||||||
remaining_sources))
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Now we execute the function:
|
|
||||||
result = list.fold(parse_list, ([], []), !dyn_sources)
|
|
||||||
matching_sources = fst(result)
|
|
||||||
remaining_sources = snd(result)
|
|
||||||
|
|
||||||
# We store the remaining sources in dyn_sources
|
|
||||||
dyn_sources := remaining_sources
|
|
||||||
|
|
||||||
# If no source matched, we return an error
|
|
||||||
if list.length(matching_sources) == 0 then
|
|
||||||
"Error: no matching sources!"
|
|
||||||
else
|
|
||||||
# We stop all sources
|
|
||||||
list.iter(source.shutdown, matching_sources)
|
|
||||||
# And return
|
|
||||||
"Done!"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ queue = amplify(1., override="replay_gain", queue)
|
||||||
#live stream setup
|
#live stream setup
|
||||||
set("harbor.bind_addr", "0.0.0.0")
|
set("harbor.bind_addr", "0.0.0.0")
|
||||||
|
|
||||||
current_dyn_id = ref ''
|
current_dyn_id = ref '-1'
|
||||||
|
|
||||||
pypo_data = ref '0'
|
pypo_data = ref '0'
|
||||||
stream_metadata_type = ref 0
|
stream_metadata_type = ref 0
|
||||||
|
@ -43,10 +43,11 @@ web_stream = input.harbor("test-harbor", port=8999, password=stream_harbor_pass)
|
||||||
web_stream = on_metadata(notify_stream, web_stream)
|
web_stream = on_metadata(notify_stream, web_stream)
|
||||||
output.dummy(fallible=true, web_stream)
|
output.dummy(fallible=true, web_stream)
|
||||||
|
|
||||||
|
|
||||||
|
# the crossfade function controls fade in/out
|
||||||
|
queue = crossfade_airtime(queue)
|
||||||
queue = on_metadata(notify, queue)
|
queue = on_metadata(notify, queue)
|
||||||
queue = map_metadata(update=false, append_title, queue)
|
queue = map_metadata(update=false, append_title, queue)
|
||||||
# the crossfade function controls fade in/out
|
|
||||||
queue = crossfade(queue)
|
|
||||||
output.dummy(fallible=true, queue)
|
output.dummy(fallible=true, queue)
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,21 +96,28 @@ server.register(namespace="dynamic_source",
|
||||||
usage="id <id>",
|
usage="id <id>",
|
||||||
"id",
|
"id",
|
||||||
fun (s) -> begin log("dynamic_source.id") set_dynamic_source_id(s) end)
|
fun (s) -> begin log("dynamic_source.id") set_dynamic_source_id(s) end)
|
||||||
|
|
||||||
|
server.register(namespace="dynamic_source",
|
||||||
|
description="Get the cc_schedule row id",
|
||||||
|
usage="get_id",
|
||||||
|
"get_id",
|
||||||
|
fun (s) -> begin log("dynamic_source.get_id") get_dynamic_source_id() end)
|
||||||
|
|
||||||
server.register(namespace="dynamic_source",
|
server.register(namespace="dynamic_source",
|
||||||
description="Start a new dynamic source.",
|
description="Start a new dynamic source.",
|
||||||
usage="start <uri>",
|
usage="start <uri>",
|
||||||
"read_start",
|
"read_start",
|
||||||
fun (s) -> begin log("dynamic_source.read_start") create_dynamic_source(s) end)
|
fun (uri) -> begin log("dynamic_source.read_start") create_dynamic_source(uri) end)
|
||||||
server.register(namespace="dynamic_source",
|
server.register(namespace="dynamic_source",
|
||||||
description="Stop a dynamic source.",
|
description="Stop a dynamic source.",
|
||||||
usage="stop <uri>",
|
usage="stop <id>",
|
||||||
"read_stop",
|
"read_stop",
|
||||||
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source(s) end)
|
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source(s) end)
|
||||||
server.register(namespace="dynamic_source",
|
server.register(namespace="dynamic_source",
|
||||||
description="Stop a dynamic source.",
|
description="Stop a dynamic source.",
|
||||||
usage="stop <uri>",
|
usage="stop <id>",
|
||||||
"read_stop_all",
|
"read_stop_all",
|
||||||
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source_all(s) end)
|
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source_all() end)
|
||||||
|
|
||||||
default = amplify(id="silence_src", 0.00001, noise())
|
default = amplify(id="silence_src", 0.00001, noise())
|
||||||
default = rewrite_metadata([("artist","Airtime"), ("title", "offline")], default)
|
default = rewrite_metadata([("artist","Airtime"), ("title", "offline")], default)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
import traceback
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Python part of radio playout (pypo)
|
Python part of radio playout (pypo)
|
||||||
|
@ -102,6 +103,24 @@ class Notify:
|
||||||
logger.debug('# Calling server to update webstream data #')
|
logger.debug('# Calling server to update webstream data #')
|
||||||
logger.debug('#################################################')
|
logger.debug('#################################################')
|
||||||
response = self.api_client.notify_webstream_data(data, media_id)
|
response = self.api_client.notify_webstream_data(data, media_id)
|
||||||
|
logger.debug("Response: " + json.dumps(response))
|
||||||
|
|
||||||
|
def run_with_options(self, options):
|
||||||
|
if options.error and options.stream_id:
|
||||||
|
self.notify_liquidsoap_status(options.error, options.stream_id, options.time)
|
||||||
|
elif options.connect and options.stream_id:
|
||||||
|
self.notify_liquidsoap_status("OK", options.stream_id, options.time)
|
||||||
|
elif options.source_name and options.source_status:
|
||||||
|
self.notify_source_status(options.source_name, options.source_status)
|
||||||
|
elif options.webstream:
|
||||||
|
self.notify_webstream_data(options.webstream, options.media_id)
|
||||||
|
elif options.media_id:
|
||||||
|
self.notify_media_start_playing(options.media_id)
|
||||||
|
elif options.liquidsoap_started:
|
||||||
|
self.notify_liquidsoap_started()
|
||||||
|
else:
|
||||||
|
logger.debug("Unrecognized option in options(%s). Doing nothing" \
|
||||||
|
% str(options))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -112,41 +131,9 @@ if __name__ == '__main__':
|
||||||
print '#########################################'
|
print '#########################################'
|
||||||
|
|
||||||
# initialize
|
# initialize
|
||||||
if options.error and options.stream_id:
|
|
||||||
try:
|
try:
|
||||||
n = Notify()
|
n = Notify()
|
||||||
n.notify_liquidsoap_status(options.error, options.stream_id, options.time)
|
n.run_with_options(options)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print e
|
print( traceback.format_exc() )
|
||||||
elif options.connect and options.stream_id:
|
|
||||||
try:
|
|
||||||
n = Notify()
|
|
||||||
n.notify_liquidsoap_status("OK", options.stream_id, options.time)
|
|
||||||
except Exception, e:
|
|
||||||
print e
|
|
||||||
elif options.source_name and options.source_status:
|
|
||||||
try:
|
|
||||||
n = Notify()
|
|
||||||
n.notify_source_status(options.source_name, options.source_status)
|
|
||||||
except Exception, e:
|
|
||||||
print e
|
|
||||||
elif options.webstream:
|
|
||||||
try:
|
|
||||||
n = Notify()
|
|
||||||
n.notify_webstream_data(options.webstream, options.media_id)
|
|
||||||
except Exception, e:
|
|
||||||
print e
|
|
||||||
elif options.media_id:
|
|
||||||
|
|
||||||
try:
|
|
||||||
n = Notify()
|
|
||||||
n.notify_media_start_playing(options.media_id)
|
|
||||||
except Exception, e:
|
|
||||||
print e
|
|
||||||
elif options.liquidsoap_started:
|
|
||||||
try:
|
|
||||||
n = Notify()
|
|
||||||
n.notify_liquidsoap_started()
|
|
||||||
except Exception, e:
|
|
||||||
print e
|
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,6 @@ class PypoPush(Thread):
|
||||||
|
|
||||||
self.pushed_objects = {}
|
self.pushed_objects = {}
|
||||||
self.logger = logging.getLogger('push')
|
self.logger = logging.getLogger('push')
|
||||||
self.current_stream_info = None
|
|
||||||
self.current_prebuffering_stream_id = None
|
self.current_prebuffering_stream_id = None
|
||||||
|
|
||||||
def main(self):
|
def main(self):
|
||||||
|
@ -78,6 +77,7 @@ class PypoPush(Thread):
|
||||||
|
|
||||||
#We get to the following lines only if a schedule was received.
|
#We get to the following lines only if a schedule was received.
|
||||||
liquidsoap_queue_approx = self.get_queue_items_from_liquidsoap()
|
liquidsoap_queue_approx = self.get_queue_items_from_liquidsoap()
|
||||||
|
liquidsoap_stream_id = self.get_current_stream_id_from_liquidsoap()
|
||||||
|
|
||||||
tnow = datetime.utcnow()
|
tnow = datetime.utcnow()
|
||||||
current_event_chain, original_chain = self.get_current_chain(chains, tnow)
|
current_event_chain, original_chain = self.get_current_chain(chains, tnow)
|
||||||
|
@ -92,7 +92,7 @@ class PypoPush(Thread):
|
||||||
#is scheduled. We need to verify whether the schedule we just received matches
|
#is scheduled. We need to verify whether the schedule we just received matches
|
||||||
#what Liquidsoap is playing, and if not, correct it.
|
#what Liquidsoap is playing, and if not, correct it.
|
||||||
|
|
||||||
self.handle_new_schedule(media_schedule, liquidsoap_queue_approx, current_event_chain)
|
self.handle_new_schedule(media_schedule, liquidsoap_queue_approx, liquidsoap_stream_id, current_event_chain)
|
||||||
|
|
||||||
|
|
||||||
#At this point everything in the present has been taken care of and Liquidsoap
|
#At this point everything in the present has been taken care of and Liquidsoap
|
||||||
|
@ -134,6 +134,25 @@ class PypoPush(Thread):
|
||||||
loops = 0
|
loops = 0
|
||||||
loops += 1
|
loops += 1
|
||||||
|
|
||||||
|
def get_current_stream_id_from_liquidsoap(self):
|
||||||
|
response = "-1"
|
||||||
|
try:
|
||||||
|
self.telnet_lock.acquire()
|
||||||
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
||||||
|
|
||||||
|
msg = 'dynamic_source.get_id\n'
|
||||||
|
tn.write(msg)
|
||||||
|
response = tn.read_until("\r\n").strip(" \r\n")
|
||||||
|
tn.write('exit\n')
|
||||||
|
tn.read_all()
|
||||||
|
except Exception, e:
|
||||||
|
self.logger.error("Error connecting to Liquidsoap: %s", e)
|
||||||
|
response = []
|
||||||
|
finally:
|
||||||
|
self.telnet_lock.release()
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
def get_queue_items_from_liquidsoap(self):
|
def get_queue_items_from_liquidsoap(self):
|
||||||
"""
|
"""
|
||||||
This function connects to Liquidsoap to find what media items are in its queue.
|
This function connects to Liquidsoap to find what media items are in its queue.
|
||||||
|
@ -175,10 +194,10 @@ class PypoPush(Thread):
|
||||||
|
|
||||||
return liquidsoap_queue_approx
|
return liquidsoap_queue_approx
|
||||||
|
|
||||||
def is_correct_current_item(self, media_item, liquidsoap_queue_approx):
|
def is_correct_current_item(self, media_item, liquidsoap_queue_approx, liquidsoap_stream_id):
|
||||||
correct = False
|
correct = False
|
||||||
if media_item is None:
|
if media_item is None:
|
||||||
correct = (len(liquidsoap_queue_approx) == 0 and self.current_stream_info is None)
|
correct = (len(liquidsoap_queue_approx) == 0 and liquidsoap_stream_id == "-1")
|
||||||
else:
|
else:
|
||||||
if is_file(media_item):
|
if is_file(media_item):
|
||||||
if len(liquidsoap_queue_approx) == 0:
|
if len(liquidsoap_queue_approx) == 0:
|
||||||
|
@ -188,10 +207,7 @@ class PypoPush(Thread):
|
||||||
liquidsoap_queue_approx[0]['row_id'] == media_item['row_id'] and \
|
liquidsoap_queue_approx[0]['row_id'] == media_item['row_id'] and \
|
||||||
liquidsoap_queue_approx[0]['end'] == media_item['end']
|
liquidsoap_queue_approx[0]['end'] == media_item['end']
|
||||||
elif is_stream(media_item):
|
elif is_stream(media_item):
|
||||||
if self.current_stream_info is None:
|
correct = liquidsoap_stream_id == str(media_item['row_id'])
|
||||||
correct = False
|
|
||||||
else:
|
|
||||||
correct = self.current_stream_info['row_id'] == media_item['row_id']
|
|
||||||
|
|
||||||
self.logger.debug("Is current item correct?: %s", str(correct))
|
self.logger.debug("Is current item correct?: %s", str(correct))
|
||||||
return correct
|
return correct
|
||||||
|
@ -202,7 +218,7 @@ class PypoPush(Thread):
|
||||||
self.remove_from_liquidsoap_queue(0, None)
|
self.remove_from_liquidsoap_queue(0, None)
|
||||||
self.stop_web_stream_all()
|
self.stop_web_stream_all()
|
||||||
|
|
||||||
def handle_new_schedule(self, media_schedule, liquidsoap_queue_approx, current_event_chain):
|
def handle_new_schedule(self, media_schedule, liquidsoap_queue_approx, liquidsoap_stream_id, current_event_chain):
|
||||||
"""
|
"""
|
||||||
This function's purpose is to gracefully handle situations where
|
This function's purpose is to gracefully handle situations where
|
||||||
Liquidsoap already has a track in its queue, but the schedule
|
Liquidsoap already has a track in its queue, but the schedule
|
||||||
|
@ -213,14 +229,13 @@ class PypoPush(Thread):
|
||||||
file_chain = filter(lambda item: (item["type"] == "file"), current_event_chain)
|
file_chain = filter(lambda item: (item["type"] == "file"), current_event_chain)
|
||||||
stream_chain = filter(lambda item: (item["type"] == "stream_output_start"), current_event_chain)
|
stream_chain = filter(lambda item: (item["type"] == "stream_output_start"), current_event_chain)
|
||||||
|
|
||||||
self.logger.debug(self.current_stream_info)
|
|
||||||
self.logger.debug(current_event_chain)
|
self.logger.debug(current_event_chain)
|
||||||
|
|
||||||
#Take care of the case where the current playing may be incorrect
|
#Take care of the case where the current playing may be incorrect
|
||||||
if len(current_event_chain) > 0:
|
if len(current_event_chain) > 0:
|
||||||
|
|
||||||
current_item = current_event_chain[0]
|
current_item = current_event_chain[0]
|
||||||
if not self.is_correct_current_item(current_item, liquidsoap_queue_approx):
|
if not self.is_correct_current_item(current_item, liquidsoap_queue_approx, liquidsoap_stream_id):
|
||||||
self.clear_all_liquidsoap_items()
|
self.clear_all_liquidsoap_items()
|
||||||
if is_stream(current_item):
|
if is_stream(current_item):
|
||||||
if current_item['row_id'] != self.current_prebuffering_stream_id:
|
if current_item['row_id'] != self.current_prebuffering_stream_id:
|
||||||
|
@ -234,7 +249,7 @@ class PypoPush(Thread):
|
||||||
#we've changed the queue, so let's refetch it
|
#we've changed the queue, so let's refetch it
|
||||||
liquidsoap_queue_approx = self.get_queue_items_from_liquidsoap()
|
liquidsoap_queue_approx = self.get_queue_items_from_liquidsoap()
|
||||||
|
|
||||||
elif not self.is_correct_current_item(None, liquidsoap_queue_approx):
|
elif not self.is_correct_current_item(None, liquidsoap_queue_approx, liquidsoap_stream_id):
|
||||||
#Liquidsoap is playing something even though it shouldn't be
|
#Liquidsoap is playing something even though it shouldn't be
|
||||||
self.clear_all_liquidsoap_items()
|
self.clear_all_liquidsoap_items()
|
||||||
|
|
||||||
|
@ -455,6 +470,7 @@ class PypoPush(Thread):
|
||||||
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
||||||
|
|
||||||
msg = 'dynamic_source.id %s\n' % media_item['row_id']
|
msg = 'dynamic_source.id %s\n' % media_item['row_id']
|
||||||
|
self.logger.debug(msg)
|
||||||
tn.write(msg)
|
tn.write(msg)
|
||||||
|
|
||||||
#example: dynamic_source.read_start http://87.230.101.24:80/top100station.mp3
|
#example: dynamic_source.read_start http://87.230.101.24:80/top100station.mp3
|
||||||
|
@ -489,7 +505,6 @@ class PypoPush(Thread):
|
||||||
self.logger.debug(tn.read_all())
|
self.logger.debug(tn.read_all())
|
||||||
|
|
||||||
self.current_prebuffering_stream_id = None
|
self.current_prebuffering_stream_id = None
|
||||||
self.current_stream_info = media_item
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
self.logger.error(str(e))
|
self.logger.error(str(e))
|
||||||
finally:
|
finally:
|
||||||
|
@ -508,10 +523,13 @@ class PypoPush(Thread):
|
||||||
self.logger.debug(msg)
|
self.logger.debug(msg)
|
||||||
tn.write(msg)
|
tn.write(msg)
|
||||||
|
|
||||||
|
msg = 'dynamic_source.id -1\n'
|
||||||
|
self.logger.debug(msg)
|
||||||
|
tn.write(msg)
|
||||||
|
|
||||||
tn.write("exit\n")
|
tn.write("exit\n")
|
||||||
self.logger.debug(tn.read_all())
|
self.logger.debug(tn.read_all())
|
||||||
|
|
||||||
self.current_stream_info = None
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
self.logger.error(str(e))
|
self.logger.error(str(e))
|
||||||
finally:
|
finally:
|
||||||
|
@ -523,14 +541,17 @@ class PypoPush(Thread):
|
||||||
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
||||||
#dynamic_source.stop http://87.230.101.24:80/top100station.mp3
|
#dynamic_source.stop http://87.230.101.24:80/top100station.mp3
|
||||||
|
|
||||||
msg = 'dynamic_source.read_stop %s\n' % media_item['uri'].encode('latin-1')
|
msg = 'dynamic_source.read_stop %s\n' % media_item['row_id']
|
||||||
|
self.logger.debug(msg)
|
||||||
|
tn.write(msg)
|
||||||
|
|
||||||
|
msg = 'dynamic_source.id -1\n'
|
||||||
self.logger.debug(msg)
|
self.logger.debug(msg)
|
||||||
tn.write(msg)
|
tn.write(msg)
|
||||||
|
|
||||||
tn.write("exit\n")
|
tn.write("exit\n")
|
||||||
self.logger.debug(tn.read_all())
|
self.logger.debug(tn.read_all())
|
||||||
|
|
||||||
self.current_stream_info = None
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
self.logger.error(str(e))
|
self.logger.error(str(e))
|
||||||
finally:
|
finally:
|
||||||
|
@ -549,7 +570,6 @@ class PypoPush(Thread):
|
||||||
tn.write("exit\n")
|
tn.write("exit\n")
|
||||||
self.logger.debug(tn.read_all())
|
self.logger.debug(tn.read_all())
|
||||||
|
|
||||||
self.current_stream_info = None
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
self.logger.error(str(e))
|
self.logger.error(str(e))
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
AirtimeCheck::ExitIfNotRoot();
|
AirtimeCheck::ExitIfNotRoot();
|
||||||
|
|
||||||
|
date_default_timezone_set("UTC");
|
||||||
|
|
||||||
$sapi_type = php_sapi_name();
|
$sapi_type = php_sapi_name();
|
||||||
|
|
||||||
$showColor = !in_array("--no-color", $argv);
|
$showColor = !in_array("--no-color", $argv);
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
exitIfNotRoot();
|
exitIfNotRoot();
|
||||||
|
|
||||||
|
date_default_timezone_set("UTC");
|
||||||
|
|
||||||
$airtimeIni = getAirtimeConf();
|
$airtimeIni = getAirtimeConf();
|
||||||
$airtime_base_dir = $airtimeIni['general']['airtime_dir'];
|
$airtime_base_dir = $airtimeIni['general']['airtime_dir'];
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
exitIfNotRoot();
|
exitIfNotRoot();
|
||||||
|
|
||||||
|
date_default_timezone_set("UTC");
|
||||||
|
|
||||||
$values = parse_ini_file('/etc/airtime/airtime.conf', true);
|
$values = parse_ini_file('/etc/airtime/airtime.conf', true);
|
||||||
|
|
||||||
// Name of the web server user
|
// Name of the web server user
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
<?php
|
<?php
|
||||||
|
date_default_timezone_set("UTC");
|
||||||
|
|
||||||
$values = parse_ini_file('/etc/airtime/airtime.conf', true);
|
$values = parse_ini_file('/etc/airtime/airtime.conf', true);
|
||||||
|
|
||||||
// Name of the web server user
|
// Name of the web server user
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue