Merge branch '2.2.x' into 2.2.x-saas

Conflicts:
	airtime_mvc/application/forms/LiveStreamingPreferences.php
This commit is contained in:
Martin Konecny 2012-11-28 16:10:33 -05:00
commit 4983721565
27 changed files with 385 additions and 246 deletions

View File

@ -1,2 +1,2 @@
PRODUCT_ID=Airtime
PRODUCT_RELEASE=2.2.0
PRODUCT_RELEASE=2.2.1

View File

@ -48,7 +48,7 @@ class Application_Common_DateHelper
/**
* Get the week start date of this week in the format
* YYYY-MM-DD
*
*
* @return String - week start date
*/
function getWeekStartDate()
@ -231,7 +231,7 @@ class Application_Common_DateHelper
if (2 !== substr_count($p_time, ":")){
return FALSE;
}
if (1 === substr_count($p_time, ".")){
list($hhmmss, $ms) = explode(".", $p_time);
} else {
@ -275,17 +275,17 @@ class Application_Common_DateHelper
return $dateTime;
}
/* Convenience method to return a date formatted into a String rather than a
/* Convenience method to return a date formatted into a String rather than a
* DateTime object. Note that if an empty string is provided for $p_dateString
* then the current time is provided.
*
* then the current time is provided.
*
* @param $p_dateString
* Date string in UTC timezone.
* @param $p_format
* Format which the string should be returned in.
*
* @return string
* Date String in localtime
*
* @return string
* Date String in localtime
* */
public static function ConvertToLocalDateTimeString($p_dateString, $p_format="Y-m-d H:i:s"){
if (is_null($p_dateString) || strlen($p_dateString) == 0)
@ -302,7 +302,7 @@ class Application_Common_DateHelper
/*
* Example input: "00:02:32.746562". Output is a DateInterval object
* representing that 2 minute, 32.746562 second interval.
*
*
*/
public static function getDateIntervalFromString($p_interval){
list($hour_min_sec, $subsec) = explode(".", $p_interval);
@ -335,7 +335,7 @@ class Application_Common_DateHelper
$retVal['errMsg'] = "The year '$year' must be within the range of 1753 - 9999";
} else if (!checkdate($month, $day, $year)) {
$retVal['success'] = false;
$retVal['errMsg'] = "'$year-$month-$day' is not a valid date";
$retVal['errMsg'] = "'$year-$month-$day' is not a valid date";
} else {
// check time
if (isset($timeInfo)) {

View File

@ -952,31 +952,33 @@ class ApiController extends Zend_Controller_Action
$data_arr = json_decode($data);
if (!is_null($media_id) && isset($data_arr->title) && strlen($data_arr->title) < 1024) {
if (!is_null($media_id)) {
if (isset($data_arr->title) &&
strlen($data_arr->title) < 1024) {
$previous_metadata = CcWebstreamMetadataQuery::create()
->orderByDbStartTime('desc')
->filterByDbInstanceId($media_id)
->findOne();
$previous_metadata = CcWebstreamMetadataQuery::create()
->orderByDbStartTime('desc')
->filterByDbInstanceId($media_id)
->findOne();
$do_insert = true;
if ($previous_metadata) {
if ($previous_metadata->getDbLiquidsoapData() == $data_arr->title) {
Logging::debug("Duplicate found: ".$data_arr->title);
$do_insert = false;
$do_insert = true;
if ($previous_metadata) {
if ($previous_metadata->getDbLiquidsoapData() == $data_arr->title) {
Logging::debug("Duplicate found: ".$data_arr->title);
$do_insert = false;
}
}
if ($do_insert) {
$webstream_metadata = new CcWebstreamMetadata();
$webstream_metadata->setDbInstanceId($media_id);
$webstream_metadata->setDbStartTime(new DateTime("now", new DateTimeZone("UTC")));
$webstream_metadata->setDbLiquidsoapData($data_arr->title);
$webstream_metadata->save();
}
}
if ($do_insert) {
$webstream_metadata = new CcWebstreamMetadata();
$webstream_metadata->setDbInstanceId($media_id);
$webstream_metadata->setDbStartTime(new DateTime("now", new DateTimeZone("UTC")));
$webstream_metadata->setDbLiquidsoapData($data_arr->title);
$webstream_metadata->save();
}
} else {
throw new Error("Unexpected error. media_id $media_id has a null stream value in cc_schedule!");
throw new Exception("Null value of media_id");
}
$this->view->response = $data;

View File

@ -107,7 +107,6 @@ class Application_Form_LiveStreamingPreferences extends Zend_Form_SubForm
public function isValid($data)
{
$isValid = parent::isValid($data);
return $isValid;
}

View File

@ -360,8 +360,10 @@ SQL;
{
$sql = <<<SQL
SELECT SUM(cliplength) AS LENGTH
FROM cc_blockcontents
FROM cc_blockcontents as bc
JOIN cc_files as f ON bc.file_id = f.id
WHERE block_id = :block_id
AND f.file_exists = true
SQL;
$result = Application_Common_Database::prepareAndExecute($sql, array(':block_id'=>$this->id), 'all', PDO::FETCH_NUM);
return $result[0][0];
@ -471,9 +473,9 @@ SQL;
Logging::info("Adding to block");
Logging::info("at position {$pos}");
}
foreach ($p_items as $ac) {
Logging::info("Adding audio file {$ac}");
Logging::info("Adding audio file {$ac[0]}");
try {
if (is_array($ac) && $ac[1] == 'audioclip') {
$res = $this->insertBlockElement($this->buildEntry($ac[0], $pos));

View File

@ -204,9 +204,9 @@ class Application_Model_Preference
$fade = $out;
}
$fade = number_format($fade, 2);
$fade = number_format($fade, 1);
//fades need 2 leading zeros for DateTime conversion
$fade = rtrim(str_pad($fade, 5, "0", STR_PAD_LEFT), "0");
$fade = str_pad($fade, 4, "0", STR_PAD_LEFT);
return $fade;
}
@ -1109,7 +1109,6 @@ class Application_Model_Preference
} else {
/*For now we just have this hack for debugging. We should not
rely on this crappy behaviour in case of failure*/
Logging::info("Pref: $pref_param");
Logging::warn("Index $x does not exist preferences");
Logging::warn("Defaulting to identity and printing preferences");
Logging::warn($ds);

View File

@ -321,7 +321,7 @@ SQL;
ws.description AS file_album_title,
ws.length AS file_length,
't'::BOOL AS file_exists,
NULL as file_mime
ws.mime as file_mime
SQL;
$streamJoin = <<<SQL
cc_schedule AS sched
@ -674,6 +674,12 @@ SQL;
$start = self::AirtimeTimeToPypoTime($item["start"]);
$end = self::AirtimeTimeToPypoTime($item["end"]);
list(,,,$start_hour,,) = explode("-", $start);
list(,,,$end_hour,,) = explode("-", $end);
$same_hour = $start_hour == $end_hour;
$independent_event = !$same_hour;
$schedule_item = array(
'id' => $media_id,
'type' => 'file',
@ -687,7 +693,7 @@ SQL;
'end' => $end,
'show_name' => $item["show_name"],
'replay_gain' => is_null($item["replay_gain"]) ? "0": $item["replay_gain"],
'independent_event' => false
'independent_event' => $independent_event,
);
self::appendScheduleItem($data, $start, $schedule_item);
}
@ -827,26 +833,60 @@ SQL;
}
}
/* Check if two events are less than or equal to 1 second apart
*/
public static function areEventsLinked($event1, $event2) {
$dt1 = DateTime::createFromFormat("Y-m-d-H-i-s", $event1['start']);
$dt2 = DateTime::createFromFormat("Y-m-d-H-i-s", $event2['start']);
$seconds = $dt2->getTimestamp() - $dt1->getTimestamp();
return $seconds <= 1;
}
/**
* Purpose of this function is to iterate through the entire
* schedule array that was just built and fix the data up a bit. For
* example, if we have two consecutive webstreams, we don't need the
* first webstream to shutdown the output, when the second one will
* just switch it back on. Preventing this behaviour stops hiccups
* in output sound.
* Streams are a 4 stage process.
* 1) start buffering stream 5 seconds ahead of its start time
* 2) at the start time tell liquidsoap to switch to this source
* 3) at the end time, tell liquidsoap to stop reading this stream
* 4) at the end time, tell liquidsoap to switch away from input.http source.
*
* When we have two streams back-to-back, some of these steps are unnecessary
* for the second stream. Instead of sending commands 1,2,3,4,1,2,3,4 we should
* send 1,2,1,2,3,4 - We don't need to tell liquidsoap to stop reading (#3), because #1
* of the next stream implies this when we pass in a new url. We also don't need #4.
*
* There's a special case here is well. When the back-to-back streams are the same, we
* can collapse the instructions 1,2,(3,4,1,2),3,4 to 1,2,3,4. We basically cut out the
* middle part. This function handles this.
*/
private static function filterData(&$data)
private static function foldData(&$data)
{
$previous_key = null;
$previous_val = null;
$previous_previous_key = null;
$previous_previous_val = null;
$previous_previous_previous_key = null;
$previous_previous_previous_val = null;
foreach ($data as $k => $v) {
if ($v["type"] == "stream_buffer_start"
&& !is_null($previous_val)
&& $previous_val["type"] == "stream_output_end") {
if ($v["type"] == "stream_output_start"
&& !is_null($previous_previous_val)
&& $previous_previous_val["type"] == "stream_output_end"
&& self::areEventsLinked($previous_previous_val, $v)) {
unset($data[$previous_previous_previous_key]);
unset($data[$previous_previous_key]);
unset($data[$previous_key]);
if ($previous_previous_val['uri'] == $v['uri']) {
unset($data[$k]);
}
}
$previous_previous_previous_key = $previous_previous_key;
$previous_previous_previous_val = $previous_previous_val;
$previous_previous_key = $previous_key;
$previous_previous_val = $previous_val;
$previous_key = $k;
$previous_val = $v;
}
@ -859,10 +899,12 @@ SQL;
$data = array();
$data["media"] = array();
//Harbor kick times *MUST* be ahead of schedule events, so that pypo
//executes them first.
self::createInputHarborKickTimes($data, $range_start, $range_end);
self::createScheduledEvents($data, $range_start, $range_end);
self::filterData($data["media"]);
self::foldData($data["media"]);
return $data;
}

View File

@ -266,8 +266,12 @@ class Application_Model_ShowBuilder
$row["instance"] = intval($p_item["si_id"]);
$row["starts"] = $schedStartDT->format("H:i:s");
$row["ends"] = $schedEndDT->format("H:i:s");
$formatter = new LengthFormatter($p_item['file_length']);
$cue_out = Application_Common_DateHelper::calculateLengthInSeconds($p_item['cue_out']);
$cue_in = Application_Common_DateHelper::calculateLengthInSeconds($p_item['cue_in']);
$run_time = $cue_out-$cue_in;
$formatter = new LengthFormatter(Application_Common_DateHelper::ConvertMSToHHMMSSmm($run_time*1000));
$row['runtime'] = $formatter->format();
$row["title"] = $p_item["file_track_title"];

View File

@ -34,7 +34,7 @@ class Application_Model_Webstream implements Application_Model_LibraryEditable
public function getCreatorId()
{
return $this->Webstream->getCcSubjs()->getDbId();
return $this->webstream->getDbCreatorId();
}
public function getLastModified($p_type)
@ -51,7 +51,7 @@ class Application_Model_Webstream implements Application_Model_LibraryEditable
$di = new DateInterval("PT{$hours}H{$min}M{$sec}S");
return $di->format("%Hh %Im");
}
}
return "";
}

View File

@ -153,12 +153,24 @@ function buildplaylist(p_url, p_playIndex) {
continue;
}
} else if (data[index]['type'] == 1) {
media = {title: data[index]['element_title'],
artist: data[index]['element_artist'],
mp3:data[index]['uri']
};
var mime = data[index]['mime'];
if (mime.search(/mp3/i) > 0 || mime.search(/mpeg/i) > 0) {
key = "mp3";
} else if (mime.search(/og(g|a)/i) > 0 || mime.search(/vorbis/i) > 0) {
key = "oga";
} else if (mime.search(/mp4/i) > 0) {
key = "m4a";
} else if (mime.search(/wav/i) > 0) {
key = "wav";
}
if (key) {
media = {title: data[index]['element_title'],
artist: data[index]['element_artist']
};
media[key] = data[index]['uri']
}
}
console.log(data[index]);
if (media && isAudioSupported(data[index]['mime'])) {
// javascript doesn't support associative array with numeric key
// so we need to remove the gap if we skip any of tracks due to

View File

@ -15,5 +15,6 @@ function isAudioSupported(mime){
//is adding a javascript library to do the work for you, which seems like overkill....
return (!!audio.canPlayType && audio.canPlayType(bMime) != "") ||
(mime.indexOf("mp3") != -1 && navigator.mimeTypes ["application/x-shockwave-flash"] != undefined) ||
(mime.indexOf("mp4") != -1 && navigator.mimeTypes ["application/x-shockwave-flash"] != undefined);
(mime.indexOf("mp4") != -1 && navigator.mimeTypes ["application/x-shockwave-flash"] != undefined) ||
(mime.indexOf("mpeg") != -1 && navigator.mimeTypes ["application/x-shockwave-flash"] != undefined);
}

View File

@ -81,6 +81,10 @@ var AIRTIME = (function(AIRTIME) {
return container;
},
cursor : 'pointer',
cursorAt: {
top: 30,
left: 100
},
connectToSortable : '#show_builder_table'
});
};

View File

@ -975,7 +975,6 @@ function addProgressIcon(id) {
}
function checkLibrarySCUploadStatus(){
var url = '/Library/get-upload-to-soundcloud-status',
span,
id;
@ -1028,15 +1027,22 @@ function addQtipToSCIcons(){
});
}
else if($(this).hasClass("soundcloud")){
var sc_id = $(this).parent().parent().data("aData").soundcloud_id;
$(this).qtip({
content: {
//text: "The soundcloud id for this file is: "+sc_id
text: "Retrieving data from the server...",
ajax: {
url: "/Library/get-upload-to-soundcloud-status",
type: "post",
data: ({format: "json", id : id, type: "file"}),
success: function(json, status){
this.set('content.text', "The soundcloud id for this file is: "+json.sc_id);
id = sc_id;
if (id == undefined) {
id = json.sc_id;
}
this.set('content.text', "The soundcloud id for this file is: "+id);
}
}
},

View File

@ -402,8 +402,9 @@ function setupUI() {
$(".repeat_tracks_help_icon").qtip({
content: {
text: "If your criteria is too strict, Airtime may not be able to fill up the desired smart block length." +
" Hence, if you check this option, tracks will be used more than once."
text: "The desired block length will not be reached if Airtime cannot find " +
"enough unique tracks to match your criteria. Enable this option if you wish to allow " +
"tracks to be added multiple times to the smart block."
},
hide: {
delay: 500,

View File

@ -25,7 +25,8 @@ echo "----------------------------------------------------"
dist=`lsb_release -is`
code=`lsb_release -cs`
if [ "$dist" = "Debian" ]; then
#enable squeeze backports to get lame packages
if [ "$dist" = "Debian" -a "$code" = "squeeze" ]; then
set +e
grep -E "deb http://backports.debian.org/debian-backports squeeze-backports main" /etc/apt/sources.list
returncode=$?

View File

@ -28,7 +28,8 @@ echo "----------------------------------------------------"
dist=`lsb_release -is`
code=`lsb_release -cs`
if [ "$dist" -eq "Debian" ]; then
#enable squeeze backports to get lame packages
if [ "$dist" = "Debian" -a "$code" = "squeeze" ]; then
grep "deb http://backports.debian.org/debian-backports squeeze-backports main" /etc/apt/sources.list
if [ "$?" -ne "0" ]; then
echo "deb http://backports.debian.org/debian-backports squeeze-backports main" >> /etc/apt/sources.list

View File

@ -1,3 +1,3 @@
<?php
define('AIRTIME_VERSION', '2.2.0');
define('AIRTIME_VERSION', '2.2.1');

View File

@ -100,4 +100,8 @@ if (strcmp($version, "2.2.0") < 0) {
passthru("php --php-ini $SCRIPTPATH/../airtime-php.ini $SCRIPTPATH/../upgrades/airtime-2.2.0/airtime-upgrade.php");
pause();
}
if (strcmp($version, "2.2.1") < 0) {
passthru("php --php-ini $SCRIPTPATH/../airtime-php.ini $SCRIPTPATH/../upgrades/airtime-2.2.1/airtime-upgrade.php");
pause();
}
echo "******************************* Upgrade Complete *******************************".PHP_EOL;

View File

@ -0,0 +1,24 @@
<?php
/* All functions other than start() should be marked as
* private.
*/
class AirtimeDatabaseUpgrade{
public static function start($p_dbValues){
echo "* Updating Database".PHP_EOL;
self::task0($p_dbValues);
echo " * Complete".PHP_EOL;
}
private static function task0($p_dbValues){
$username = $p_dbValues['database']['dbuser'];
$password = $p_dbValues['database']['dbpass'];
$host = $p_dbValues['database']['host'];
$database = $p_dbValues['database']['dbname'];
$dir = __DIR__;
passthru("export PGPASSWORD=$password && psql -h $host -U $username -q -f $dir/data/upgrade.sql $database 2>&1 | grep -v \"will create implicit index\"");
}
}

View File

@ -0,0 +1,8 @@
<?php
require_once 'DbUpgrade.php';
$filename = "/etc/airtime/airtime.conf";
$values = parse_ini_file($filename, true);
AirtimeDatabaseUpgrade::start($values);

View File

@ -0,0 +1,9 @@
DELETE FROM cc_pref WHERE keystr = 'system_version';
INSERT INTO cc_pref (keystr, valstr) VALUES ('system_version', '2.2.1');
ALTER TABLE cc_block
DROP CONSTRAINT cc_block_createdby_fkey;
ALTER TABLE cc_block
ADD CONSTRAINT cc_block_createdby_fkey FOREIGN KEY (creator_id) REFERENCES cc_subjs(id) ON DELETE CASCADE;

View File

@ -18,7 +18,7 @@ from configobj import ConfigObj
import string
import traceback
AIRTIME_VERSION = "2.2.0"
AIRTIME_VERSION = "2.2.1"
# TODO : Place these functions in some common module. Right now, media

View File

@ -254,29 +254,15 @@ def output_to(output_type, type, bitrate, host, port, pass, mount_point, url, de
if user == "" then
user_ref := "source"
end
description_ref = ref description
if description == "" then
description_ref := "N/A"
end
genre_ref = ref genre
if genre == "" then
genre_ref := "N/A"
end
url_ref = ref url
if url == "" then
url_ref := "N/A"
end
output.shoutcast_mono = output.shoutcast(id = "shoutcast_stream_#{stream}",
host = host,
port = port,
password = pass,
fallible = true,
url = !url_ref,
genre = !genre_ref,
name = !description_ref,
url = url,
genre = genre,
name = description,
user = !user_ref,
on_error = on_error,
on_connect = on_connect)
@ -286,9 +272,9 @@ def output_to(output_type, type, bitrate, host, port, pass, mount_point, url, de
port = port,
password = pass,
fallible = true,
url = !url_ref,
genre = !genre_ref,
name = !description_ref,
url = url,
genre = genre,
name = description,
user = !user_ref,
on_error = on_error,
on_connect = on_connect)
@ -390,13 +376,6 @@ def add_skip_command(s)
"skip",fun(s) -> begin log("source.skip") skip(s) end)
end
dyn_out = output.icecast(%wav,
host="localhost",
port=8999,
password=stream_harbor_pass,
mount="test-harbor",
fallible=true)
def set_dynamic_source_id(id) =
current_dyn_id := id
string_of(!current_dyn_id)
@ -406,123 +385,159 @@ def get_dynamic_source_id() =
string_of(!current_dyn_id)
end
#cc-4633
# Function to create a playlist source and output it.
def create_dynamic_source(uri) =
# The playlist source
s = audio_to_stereo(input.http(buffer=2., max=12., uri))
# The output
active_dyn_out = dyn_out(s)
# NOTE
# A few values are hardcoded and may be dependent:
# - the delay in gracetime is linked with the buffer duration of input.http
# (delay should be a bit less than buffer)
# - crossing duration should be less than buffer length
# (at best, a higher duration will be ineffective)
# We register both source and output
# in the list of sources
dyn_sources :=
list.append([(!current_dyn_id, s),(!current_dyn_id, active_dyn_out)], !dyn_sources)
# HTTP input with "restart" command that waits for "stop" to be effected
# before "start" command is issued. Optionally it takes a new URL to play,
# which makes it a convenient replacement for "url".
# In the future, this may become a core feature of the HTTP input.
# TODO If we stop and restart quickly several times in a row,
# the data bursts accumulate and create buffer overflow.
# Flushing the buffer on restart could be a good idea, but
# it would also create an interruptions while the buffer is
# refilling... on the other hand, this would avoid having to
# fade using both cross() and switch().
def input.http_restart(~id,~initial_url="http://dummy/url")
source = input.http(buffer=5.,max=15.,id=id,autostart=false,initial_url)
def stopped()
"stopped" == list.hd(server.execute("#{id}.status"))
end
server.register(namespace=id,
"restart",
usage="restart [url]",
fun (url) -> begin
if url != "" then
log(string_of(server.execute("#{id}.url #{url}")))
end
log(string_of(server.execute("#{id}.stop")))
add_timeout(0.5,
{ if stopped() then
log(string_of(server.execute("#{id}.start"))) ;
(-1.)
else 0.5 end})
"OK"
end)
# Dummy output should be useless if HTTP stream is meant
# to be listened to immediately. Otherwise, apply it.
#
# output.dummy(fallible=true,source)
source
notify([("schedule_table_id", !current_dyn_id)])
"Done!"
end
# Transitions between URL changes in HTTP streams.
def cross_http(~debug=true,~http_input_id,source)
# A function to destroy a dynamic source
def destroy_dynamic_source(id) =
# We need to find the source in the list,
# remove it and destroy it. Currently, the language
# lacks some nice operators for that so we do it
# the functional way
id = http_input_id
last_url = ref ""
change = ref false
# This function is executed on every item in the list
# of dynamic sources
def parse_list(ret, current_element) =
# ret is of the form: (matching_sources, remaining_sources)
# We extract those two:
matching_sources = fst(ret)
remaining_sources = snd(ret)
# current_element is of the form: ("uri", source) so
# we check the first element
current_id = fst(current_element)
if current_id == id then
# In this case, we add the source to the list of
# matched sources
(list.append( [snd(current_element)],
matching_sources),
remaining_sources)
else
# In this case, we put the element in the list of remaining
# sources
(matching_sources,
list.append([current_element],
remaining_sources))
def on_m(m)
notify_stream(m)
changed = m["source_url"] != !last_url
log("URL now #{m['source_url']} (change: #{changed})")
if changed then
if !last_url != "" then change := true end
last_url := m["source_url"]
end
end
# Now we execute the function:
result = list.fold(parse_list, ([], []), !dyn_sources)
matching_sources = fst(result)
remaining_sources = snd(result)
# We store the remaining sources in dyn_sources
dyn_sources := remaining_sources
# We use both metadata and status to know about the current URL.
# Using only metadata may be more precise is crazy corner cases,
# but it's also asking too much: the metadata may not pass through
# before the crosser is instantiated.
# Using only status in crosser misses some info, eg. on first URL.
source = on_metadata(on_m,source)
# If no source matched, we return an error
if list.length(matching_sources) == 0 then
"Error: no matching sources!"
else
# We stop all sources
list.iter(source.shutdown, matching_sources)
# And return
"Done!"
cross_d = 3.
def crosser(a,b)
url = list.hd(server.execute('#{id}.url'))
status = list.hd(server.execute('#{id}.status'))
on_m([("source_url",url)])
if debug then
log("New track inside HTTP stream")
log(" status: #{status}")
log(" need to cross: #{!change}")
log(" remaining #{source.remaining(a)} sec before, \
#{source.remaining(b)} sec after")
end
if !change then
change := false
# In principle one should avoid crossing on a live stream
# it'd be okay to do it here (eg. use add instead of sequence)
# because it's only once per URL, but be cautious.
sequence([fade.out(duration=cross_d,a),fade.in(b)])
else
# This is done on tracks inside a single stream.
# Do NOT cross here or you'll gradually empty the buffer!
sequence([a,b])
end
end
# Setting conservative=true would mess with the delayed switch below
cross(duration=cross_d,conservative=false,crosser,source)
end
# Custom fallback between http and default source with fading of
# beginning and end of HTTP stream.
# It does not take potential URL changes into account, as long as
# they do not interrupt streaming (thanks to the HTTP buffer).
def http_fallback(~http_input_id,~http,~default)
id = http_input_id
# We use a custom switching predicate to trigger switching (and thus,
# transitions) before the end of a track (rather, end of HTTP stream).
# It is complexified because we don't want to trigger switching when
# HTTP disconnects for just an instant, when changing URL: for that
# we use gracetime below.
# A function to destroy a dynamic source
def destroy_dynamic_source_all() =
# We need to find the source in the list,
# remove it and destroy it. Currently, the language
# lacks some nice operators for that so we do it
# the functional way
# This function is executed on every item in the list
# of dynamic sources
def parse_list(ret, current_element) =
# ret is of the form: (matching_sources, remaining_sources)
# We extract those two:
matching_sources = fst(ret)
remaining_sources = snd(ret)
# current_element is of the form: ("uri", source) so
# we check the first element
current_uri = fst(current_element)
# in this case, we add the source to the list of
# matched sources
(list.append( [snd(current_element)],
matching_sources),
remaining_sources)
def gracetime(~delay=3.,f)
last_true = ref 0.
{ if f() then
last_true := gettimeofday()
true
else
gettimeofday() < !last_true+delay
end }
end
# now we execute the function:
result = list.fold(parse_list, ([], []), !dyn_sources)
matching_sources = fst(result)
remaining_sources = snd(result)
# we store the remaining sources in dyn_sources
dyn_sources := remaining_sources
# if no source matched, we return an error
if list.length(matching_sources) == 0 then
"error: no matching sources!"
else
# we stop all sources
list.iter(source.shutdown, matching_sources)
# And return
"Done!"
def connected()
status = list.hd(server.execute("#{id}.status"))
not(list.mem(status,["polling","stopped"]))
end
connected = gracetime(connected)
def to_live(a,b) =
log("TRANSITION to live")
add(normalize=false,
[fade.initial(b),fade.final(a)])
end
def to_static(a,b) =
log("TRANSITION to static")
sequence([fade.out(a),fade.initial(b)])
end
switch(
track_sensitive=false,
transitions=[to_live,to_static],
[(# make sure it is connected, and not buffering
{connected() and source.is_ready(http) and !webstream_enabled}, http),
({true},default)])
end

View File

@ -7,15 +7,11 @@ set("server.telnet", true)
set("server.telnet.port", 1234)
#Dynamic source list
dyn_sources = ref []
#dyn_sources = ref []
webstream_enabled = ref false
time = ref string_of(gettimeofday())
queue = audio_to_stereo(id="queue_src", request.equeue(id="queue", length=0.5))
queue = cue_cut(queue)
queue = amplify(1., override="replay_gain", queue)
#fallback between queue and input.harbor (for restreaming other web-streams)
#live stream setup
set("harbor.bind_addr", "0.0.0.0")
@ -35,14 +31,17 @@ s2_namespace = ref ''
s3_namespace = ref ''
just_switched = ref false
stream_harbor_pass = list.hd(get_process_lines('pwgen -s -N 1 -n 20'))
#stream_harbor_pass = list.hd(get_process_lines('pwgen -s -N 1 -n 20'))
%include "ls_lib.liq"
web_stream = input.harbor("test-harbor", port=8999, password=stream_harbor_pass)
web_stream = on_metadata(notify_stream, web_stream)
output.dummy(fallible=true, web_stream)
#web_stream = input.harbor("test-harbor", port=8999, password=stream_harbor_pass)
#web_stream = on_metadata(notify_stream, web_stream)
#output.dummy(fallible=true, web_stream)
queue = audio_to_stereo(id="queue_src", request.equeue(id="queue", length=0.5))
queue = cue_cut(queue)
queue = amplify(1., override="replay_gain", queue)
# the crossfade function controls fade in/out
queue = crossfade_airtime(queue)
@ -50,11 +49,9 @@ queue = on_metadata(notify, queue)
queue = map_metadata(update=false, append_title, queue)
output.dummy(fallible=true, queue)
stream_queue = switch(id="stream_queue_switch", track_sensitive=false,
transitions=[transition, transition],
[({!webstream_enabled},web_stream),
({true}, queue)])
http = input.http_restart(id="http")
http = cross_http(http_input_id="http",http)
stream_queue = http_fallback(http_input_id="http",http=http,default=queue)
ignore(output.dummy(stream_queue, fallible=true))
@ -84,7 +81,9 @@ server.register(namespace="dynamic_source",
description="Enable webstream output",
usage='start',
"output_start",
fun (s) -> begin log("dynamic_source.output_start") webstream_enabled := true "enabled" end)
fun (s) -> begin log("dynamic_source.output_start")
notify([("schedule_table_id", !current_dyn_id)])
webstream_enabled := true "enabled" end)
server.register(namespace="dynamic_source",
description="Enable webstream output",
usage='stop',
@ -92,32 +91,33 @@ server.register(namespace="dynamic_source",
fun (s) -> begin log("dynamic_source.output_stop") webstream_enabled := false "disabled" end)
server.register(namespace="dynamic_source",
description="Set the cc_schedule row id",
description="Set the streams cc_schedule row id",
usage="id <id>",
"id",
fun (s) -> begin log("dynamic_source.id") set_dynamic_source_id(s) end)
server.register(namespace="dynamic_source",
description="Get the cc_schedule row id",
description="Get the streams cc_schedule row id",
usage="get_id",
"get_id",
fun (s) -> begin log("dynamic_source.get_id") get_dynamic_source_id() end)
server.register(namespace="dynamic_source",
description="Start a new dynamic source.",
usage="start <uri>",
"read_start",
fun (uri) -> begin log("dynamic_source.read_start") create_dynamic_source(uri) end)
server.register(namespace="dynamic_source",
description="Stop a dynamic source.",
usage="stop <id>",
"read_stop",
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source(s) end)
server.register(namespace="dynamic_source",
description="Stop a dynamic source.",
usage="stop <id>",
"read_stop_all",
fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source_all() end)
#server.register(namespace="dynamic_source",
# description="Start a new dynamic source.",
# usage="start <uri>",
# "read_start",
# fun (uri) -> begin log("dynamic_source.read_start") begin_stream_read(uri) end)
#server.register(namespace="dynamic_source",
# description="Stop a dynamic source.",
# usage="stop <id>",
# "read_stop",
# fun (s) -> begin log("dynamic_source.read_stop") stop_stream_read(s) end)
#server.register(namespace="dynamic_source",
# description="Stop a dynamic source.",
# usage="stop <id>",
# "read_stop_all",
# fun (s) -> begin log("dynamic_source.read_stop") destroy_dynamic_source_all() end)
default = amplify(id="silence_src", 0.00001, noise())
default = rewrite_metadata([("artist","Airtime"), ("title", "offline")], default)
@ -224,6 +224,7 @@ end
s = switch(id="default_switch", track_sensitive=false,
transitions=[transition_default, transition],
[({!scheduled_play_enabled}, stream_queue),({true},default)])
s = append_dj_inputs(master_live_stream_port, master_live_stream_mp,
dj_live_stream_port, dj_live_stream_mp, s)

View File

@ -478,8 +478,8 @@ class PypoPush(Thread):
self.logger.debug(msg)
tn.write(msg)
#example: dynamic_source.read_start http://87.230.101.24:80/top100station.mp3
msg = 'dynamic_source.read_start %s\n' % media_item['uri'].encode('latin-1')
#msg = 'dynamic_source.read_start %s\n' % media_item['uri'].encode('latin-1')
msg = 'http.restart %s\n' % media_item['uri'].encode('latin-1')
self.logger.debug(msg)
tn.write(msg)
@ -520,7 +520,8 @@ class PypoPush(Thread):
self.telnet_lock.acquire()
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
msg = 'dynamic_source.read_stop_all xxx\n'
#msg = 'dynamic_source.read_stop_all xxx\n'
msg = 'http.stop\n'
self.logger.debug(msg)
tn.write(msg)
@ -546,7 +547,8 @@ class PypoPush(Thread):
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
#dynamic_source.stop http://87.230.101.24:80/top100station.mp3
msg = 'dynamic_source.read_stop %s\n' % media_item['row_id']
#msg = 'dynamic_source.read_stop %s\n' % media_item['row_id']
msg = 'http.stop\n'
self.logger.debug(msg)
tn.write(msg)

View File

@ -8,6 +8,9 @@ import json
import shutil
import commands
sys.path.append('/usr/lib/airtime/media-monitor/mm2/')
from media.monitor.pure import is_file_supported
# create logger
logger = logging.getLogger()
@ -53,8 +56,7 @@ def copy_or_move_files_to(paths, dest, flag):
copy_or_move_files_to(sub_path, dest, flag)
elif(os.path.isfile(path)):
#copy file to dest
ext = os.path.splitext(path)[1]
if( 'mp3' in ext or 'ogg' in ext ):
if(is_file_supported(path)):
destfile = dest+os.path.basename(path)
if(flag == 'copy'):
print "Copying %(src)s to %(dest)s..." % {'src':path, 'dest':destfile}
@ -159,7 +161,7 @@ def WatchAddAction(option, opt, value, parser):
path = currentDir+path
path = apc.encode_to(path, 'utf-8')
if(os.path.isdir(path)):
os.chmod(path, 0765)
#os.chmod(path, 0765)
res = api_client.add_watched_dir(path)
if(res is None):
exit("Unable to connect to the server.")

View File

@ -27,14 +27,14 @@ def printUsage():
print " -m mount (default: test) "
print " -h show help menu"
def find_liquidsoap_binary():
"""
Starting with Airtime 2.0, we don't know the exact location of the Liquidsoap
binary because it may have been installed through a debian package. Let's find
the location of this binary.
"""
rv = subprocess.call("which airtime-liquidsoap > /dev/null", shell=True)
if rv == 0:
return "airtime-liquidsoap"
@ -78,7 +78,7 @@ for o, a in optlist:
mount = a
try:
print "Protocol: %s " % stream_type
print "Host: %s" % host
print "Port: %s" % port
@ -86,35 +86,35 @@ try:
print "Password: %s" % password
if stream_type == "icecast":
print "Mount: %s\n" % mount
url = "http://%s:%s/%s" % (host, port, mount)
print "Outputting to %s streaming server. You should be able to hear a monotonous tone on '%s'. Press ctrl-c to quit." % (stream_type, url)
liquidsoap_exe = find_liquidsoap_binary()
if liquidsoap_exe is None:
raise Exception("Liquidsoap not found!")
if stream_type == "icecast":
command = "%s 'output.icecast(%%vorbis, host = \"%s\", port = %s, user= \"%s\", password = \"%s\", mount=\"%s\", sine())'" % (liquidsoap_exe, host, port, user, password, mount)
else:
command = "%s /usr/lib/airtime/pypo/bin/liquidsoap_scripts/library/pervasives.liq 'output.shoutcast(%%mp3, host=\"%s\", port = %s, user= \"%s\", password = \"%s\", sine())'" \
% (liquidsoap_exe, host, port, user, password)
if not verbose:
command += " 2>/dev/null | grep \"failed\""
else:
print command
#print command
rv = subprocess.call(command, shell=True)
#if we reach this point, it means that our subprocess exited without the user
#doing a keyboard interrupt. This means there was a problem outputting to the
#doing a keyboard interrupt. This means there was a problem outputting to the
#stream server. Print appropriate message.
print "There was an error with your stream configuration. Please review your configuration " + \
"and run this program again. Use the -h option for help"
except KeyboardInterrupt, ki:
print "\nExiting"
except Exception, e: