2011-03-03 06:22:28 +01:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import logging
|
|
|
|
import logging.config
|
|
|
|
import shutil
|
|
|
|
import json
|
2011-03-04 02:13:55 +01:00
|
|
|
import telnetlib
|
2012-03-01 23:58:44 +01:00
|
|
|
import copy
|
2012-02-12 05:53:43 +01:00
|
|
|
from threading import Thread
|
2011-03-23 06:09:27 +01:00
|
|
|
|
2011-03-03 06:22:28 +01:00
|
|
|
from api_clients import api_client
|
|
|
|
|
|
|
|
from configobj import ConfigObj
|
|
|
|
|
2011-03-21 00:34:43 +01:00
|
|
|
# configure logging
|
|
|
|
logging.config.fileConfig("logging.cfg")
|
|
|
|
|
2011-03-03 06:22:28 +01:00
|
|
|
# loading config file
|
|
|
|
try:
|
2011-03-30 00:32:53 +02:00
|
|
|
config = ConfigObj('/etc/airtime/pypo.cfg')
|
2011-03-03 06:22:28 +01:00
|
|
|
LS_HOST = config['ls_host']
|
|
|
|
LS_PORT = config['ls_port']
|
2011-03-23 06:09:27 +01:00
|
|
|
POLL_INTERVAL = int(config['poll_interval'])
|
2011-03-21 00:34:43 +01:00
|
|
|
|
2011-03-03 06:22:28 +01:00
|
|
|
except Exception, e:
|
2011-06-01 18:32:42 +02:00
|
|
|
logger = logging.getLogger()
|
|
|
|
logger.error('Error loading config file: %s', e)
|
2011-03-03 06:22:28 +01:00
|
|
|
sys.exit()
|
|
|
|
|
2011-03-21 00:34:43 +01:00
|
|
|
class PypoFetch(Thread):
|
2012-03-16 04:14:19 +01:00
|
|
|
def __init__(self, pypoFetch_q, pypoPush_q, media_q, telnet_lock):
|
2011-03-21 00:34:43 +01:00
|
|
|
Thread.__init__(self)
|
2011-03-03 06:22:28 +01:00
|
|
|
self.api_client = api_client.api_client_factory(config)
|
2012-02-28 20:44:39 +01:00
|
|
|
self.fetch_queue = pypoFetch_q
|
|
|
|
self.push_queue = pypoPush_q
|
2012-03-01 23:58:44 +01:00
|
|
|
self.media_prepare_queue = media_q
|
2012-02-23 02:41:24 +01:00
|
|
|
|
2012-03-16 04:14:19 +01:00
|
|
|
self.telnet_lock = telnet_lock
|
|
|
|
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger = logging.getLogger();
|
|
|
|
|
2012-02-23 02:41:24 +01:00
|
|
|
self.cache_dir = os.path.join(config["cache_dir"], "scheduler")
|
2012-02-29 04:33:19 +01:00
|
|
|
self.logger.debug("Cache dir %s", self.cache_dir)
|
2011-03-03 06:22:28 +01:00
|
|
|
|
2012-02-23 02:41:24 +01:00
|
|
|
try:
|
2012-02-28 21:32:18 +01:00
|
|
|
if not os.path.isdir(dir):
|
|
|
|
"""
|
|
|
|
We get here if path does not exist, or path does exist but
|
|
|
|
is a file. We are not handling the second case, but don't
|
|
|
|
think we actually care about handling it.
|
|
|
|
"""
|
2012-02-29 04:33:19 +01:00
|
|
|
self.logger.debug("Cache dir does not exist. Creating...")
|
2012-02-23 02:41:24 +01:00
|
|
|
os.makedirs(dir)
|
|
|
|
except Exception, e:
|
2012-02-28 21:32:18 +01:00
|
|
|
pass
|
2012-02-23 02:41:24 +01:00
|
|
|
|
2011-08-15 22:10:46 +02:00
|
|
|
self.schedule_data = []
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("PypoFetch: init complete")
|
2011-08-15 22:10:46 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
Handle a message from RabbitMQ, put it into our yucky global var.
|
|
|
|
Hopefully there is a better way to do this.
|
|
|
|
"""
|
2012-02-12 05:53:43 +01:00
|
|
|
def handle_message(self, message):
|
|
|
|
try:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Received event from Pypo Message Handler: %s" % message)
|
2011-09-13 20:56:24 +02:00
|
|
|
|
2012-02-12 05:53:43 +01:00
|
|
|
m = json.loads(message)
|
2011-09-13 20:56:24 +02:00
|
|
|
command = m['event_type']
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Handling command: " + command)
|
2011-08-15 22:10:46 +02:00
|
|
|
|
2011-09-13 20:56:24 +02:00
|
|
|
if command == 'update_schedule':
|
|
|
|
self.schedule_data = m['schedule']
|
2012-03-17 18:55:56 +01:00
|
|
|
self.process_schedule(self.schedule_data)
|
2011-09-13 20:56:24 +02:00
|
|
|
elif command == 'update_stream_setting':
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Updating stream setting...")
|
2011-09-13 20:56:24 +02:00
|
|
|
self.regenerateLiquidsoapConf(m['setting'])
|
2012-02-11 00:43:40 +01:00
|
|
|
elif command == 'update_stream_format':
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Updating stream format...")
|
2012-02-11 00:43:40 +01:00
|
|
|
self.update_liquidsoap_stream_format(m['stream_format'])
|
|
|
|
elif command == 'update_station_name':
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Updating station name...")
|
2012-02-11 00:43:40 +01:00
|
|
|
self.update_liquidsoap_station_name(m['station_name'])
|
2012-03-08 23:42:38 +01:00
|
|
|
elif command == 'switch_source':
|
|
|
|
self.logger.info("switch_on_source show command received...")
|
|
|
|
self.switch_source(m['sourcename'], m['status'])
|
2012-03-14 16:09:59 +01:00
|
|
|
elif command == 'disconnect_source':
|
|
|
|
self.logger.info("disconnect_on_source show command received...")
|
|
|
|
self.disconnect_source(m['sourcename'])
|
2011-09-13 20:56:24 +02:00
|
|
|
except Exception, e:
|
2012-02-28 20:44:39 +01:00
|
|
|
import traceback
|
|
|
|
top = traceback.format_exc()
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.error('Exception: %s', e)
|
|
|
|
self.logger.error("traceback: %s", top)
|
|
|
|
self.logger.error("Exception in handling Message Handler message: %s", e)
|
2012-03-14 16:09:59 +01:00
|
|
|
|
|
|
|
def disconnect_source(self,sourcename):
|
|
|
|
self.logger.debug('Disconnecting source: %s', sourcename)
|
|
|
|
command = ""
|
|
|
|
if(sourcename == "master_dj"):
|
|
|
|
command += "master_harbor.kick\n"
|
|
|
|
elif(sourcename == "live_dj"):
|
|
|
|
command += "live_dj_harbor.kick\n"
|
|
|
|
|
2012-03-16 04:14:19 +01:00
|
|
|
self.telnet_lock.acquire()
|
2012-03-14 16:09:59 +01:00
|
|
|
try:
|
|
|
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
|
|
|
tn.write(command)
|
|
|
|
tn.write('exit\n')
|
|
|
|
tn.read_all()
|
|
|
|
except Exception, e:
|
2012-03-16 04:14:19 +01:00
|
|
|
self.logger.error(str(e))
|
|
|
|
finally:
|
|
|
|
self.telnet_lock.release()
|
2012-03-14 16:09:59 +01:00
|
|
|
|
2012-03-08 23:42:38 +01:00
|
|
|
def switch_source(self, sourcename, status):
|
|
|
|
self.logger.debug('Switching source: %s to "%s" status', sourcename, status)
|
|
|
|
command = "streams."
|
|
|
|
if(sourcename == "master_dj"):
|
|
|
|
command += "master_dj_"
|
2012-03-13 19:34:48 +01:00
|
|
|
elif(sourcename == "live_dj"):
|
2012-03-08 23:42:38 +01:00
|
|
|
command += "live_dj_"
|
2012-03-13 19:34:48 +01:00
|
|
|
elif(sourcename == "scheduled_play"):
|
|
|
|
command += "scheduled_play_"
|
|
|
|
|
2012-03-08 23:42:38 +01:00
|
|
|
if(status == "on"):
|
|
|
|
command += "start\n"
|
|
|
|
else:
|
|
|
|
command += "stop\n"
|
|
|
|
|
2012-03-16 04:14:19 +01:00
|
|
|
self.telnet_lock.acquire()
|
2012-03-08 23:42:38 +01:00
|
|
|
try:
|
|
|
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
|
|
|
tn.write(command)
|
|
|
|
tn.write('exit\n')
|
|
|
|
tn.read_all()
|
|
|
|
except Exception, e:
|
2012-03-16 04:14:19 +01:00
|
|
|
self.logger.error(str(e))
|
|
|
|
finally:
|
|
|
|
self.telnet_lock.release()
|
2012-03-20 21:41:15 +01:00
|
|
|
|
2012-03-14 15:22:41 +01:00
|
|
|
"""
|
2012-03-20 21:41:15 +01:00
|
|
|
grabs some information that are needed to be set on bootstrap time
|
|
|
|
and configures them
|
2012-03-14 15:22:41 +01:00
|
|
|
"""
|
2012-03-20 21:41:15 +01:00
|
|
|
def set_bootstrap_variables(self):
|
|
|
|
self.logger.debug('Getting information needed on bootstrap from Airtime')
|
|
|
|
info = self.api_client.get_bootstrap_info()
|
|
|
|
self.logger.debug('info:%s',info)
|
|
|
|
for k, v in info['switch_status'].iteritems():
|
2012-03-14 15:22:41 +01:00
|
|
|
self.switch_source(k, v)
|
2012-03-20 21:41:15 +01:00
|
|
|
self.update_liquidsoap_stream_format(info['stream_label'])
|
|
|
|
self.update_liquidsoap_station_name(info['station_name'])
|
2012-03-18 04:51:58 +01:00
|
|
|
|
2012-03-02 22:55:11 +01:00
|
|
|
def regenerateLiquidsoapConf(self, setting_p):
|
2011-08-15 22:10:46 +02:00
|
|
|
existing = {}
|
|
|
|
# create a temp file
|
|
|
|
fh = open('/etc/airtime/liquidsoap.cfg', 'r')
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Reading existing config...")
|
2011-08-15 22:10:46 +02:00
|
|
|
# read existing conf file and build dict
|
|
|
|
while 1:
|
|
|
|
line = fh.readline()
|
|
|
|
if not line:
|
2011-08-23 16:12:18 +02:00
|
|
|
break
|
2011-08-29 23:44:28 +02:00
|
|
|
|
|
|
|
line = line.strip()
|
2011-08-23 16:12:18 +02:00
|
|
|
if line.find('#') == 0:
|
|
|
|
continue
|
2011-08-29 23:44:28 +02:00
|
|
|
# if empty line
|
|
|
|
if not line:
|
|
|
|
continue
|
2012-02-24 20:10:27 +01:00
|
|
|
key, value = line.split(' = ')
|
2011-08-15 22:10:46 +02:00
|
|
|
key = key.strip()
|
|
|
|
value = value.strip()
|
|
|
|
value = value.replace('"', '')
|
2011-08-18 19:53:12 +02:00
|
|
|
if value == "" or value == "0":
|
2011-08-15 22:10:46 +02:00
|
|
|
value = ''
|
|
|
|
existing[key] = value
|
|
|
|
fh.close()
|
2011-08-18 19:53:12 +02:00
|
|
|
|
|
|
|
# dict flag for any change in cofig
|
|
|
|
change = {}
|
|
|
|
# this flag is to detect diable -> disable change
|
|
|
|
# in that case, we don't want to restart even if there are chnges.
|
|
|
|
state_change_restart = {}
|
|
|
|
#restart flag
|
|
|
|
restart = False
|
2011-08-15 22:10:46 +02:00
|
|
|
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Looking for changes...")
|
2012-03-02 22:55:11 +01:00
|
|
|
setting = sorted(setting_p.items())
|
2011-08-15 22:10:46 +02:00
|
|
|
# look for changes
|
2012-03-02 22:55:11 +01:00
|
|
|
for k, s in setting:
|
2011-09-01 22:02:06 +02:00
|
|
|
if "output_sound_device" in s[u'keyname'] or "icecast_vorbis_metadata" in s[u'keyname']:
|
2011-08-18 19:53:12 +02:00
|
|
|
dump, stream = s[u'keyname'].split('_', 1)
|
|
|
|
state_change_restart[stream] = False
|
|
|
|
# This is the case where restart is required no matter what
|
|
|
|
if (existing[s[u'keyname']] != s[u'value']):
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("'Need-to-restart' state detected for %s...", s[u'keyname'])
|
2011-08-18 19:53:12 +02:00
|
|
|
restart = True;
|
2012-03-02 22:55:11 +01:00
|
|
|
elif "master_live_stream_port" in s[u'keyname'] or "master_live_stream_mp" in s[u'keyname'] or "dj_live_stream_port" in s[u'keyname'] or "dj_live_stream_mp" in s[u'keyname']:
|
2012-02-23 17:11:02 +01:00
|
|
|
if (existing[s[u'keyname']] != s[u'value']):
|
2012-03-02 22:55:11 +01:00
|
|
|
self.logger.info("'Need-to-restart' state detected for %s...", s[u'keyname'])
|
2012-02-23 17:11:02 +01:00
|
|
|
restart = True;
|
2011-08-18 19:53:12 +02:00
|
|
|
else:
|
2011-08-24 23:13:56 +02:00
|
|
|
stream, dump = s[u'keyname'].split('_',1)
|
|
|
|
if "_output" in s[u'keyname']:
|
|
|
|
if (existing[s[u'keyname']] != s[u'value']):
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("'Need-to-restart' state detected for %s...", s[u'keyname'])
|
2011-08-24 23:13:56 +02:00
|
|
|
restart = True;
|
2011-08-26 19:13:02 +02:00
|
|
|
state_change_restart[stream] = True
|
2011-08-24 23:13:56 +02:00
|
|
|
elif ( s[u'value'] != 'disabled'):
|
2011-08-18 19:53:12 +02:00
|
|
|
state_change_restart[stream] = True
|
2011-08-26 19:13:02 +02:00
|
|
|
else:
|
|
|
|
state_change_restart[stream] = False
|
2011-08-24 23:13:56 +02:00
|
|
|
else:
|
2011-08-18 19:53:12 +02:00
|
|
|
# setting inital value
|
|
|
|
if stream not in change:
|
|
|
|
change[stream] = False
|
|
|
|
if not (s[u'value'] == existing[s[u'keyname']]):
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Keyname: %s, Curent value: %s, New Value: %s", s[u'keyname'], existing[s[u'keyname']], s[u'value'])
|
2011-08-18 19:53:12 +02:00
|
|
|
change[stream] = True
|
|
|
|
|
|
|
|
# set flag change for sound_device alway True
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Change:%s, State_Change:%s...", change, state_change_restart)
|
2011-08-18 19:53:12 +02:00
|
|
|
|
|
|
|
for k, v in state_change_restart.items():
|
|
|
|
if k == "sound_device" and v:
|
|
|
|
restart = True
|
|
|
|
elif v and change[k]:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("'Need-to-restart' state detected for %s...", k)
|
2011-08-18 19:53:12 +02:00
|
|
|
restart = True
|
2011-08-15 22:10:46 +02:00
|
|
|
# rewrite
|
2011-08-18 19:53:12 +02:00
|
|
|
if restart:
|
2011-08-15 22:10:46 +02:00
|
|
|
fh = open('/etc/airtime/liquidsoap.cfg', 'w')
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Rewriting liquidsoap.cfg...")
|
2011-08-23 16:12:18 +02:00
|
|
|
fh.write("################################################\n")
|
|
|
|
fh.write("# THIS FILE IS AUTO GENERATED. DO NOT CHANGE!! #\n")
|
|
|
|
fh.write("################################################\n")
|
2012-03-02 22:55:11 +01:00
|
|
|
for k, d in setting:
|
2012-03-17 19:16:11 +01:00
|
|
|
buffer_str = d[u'keyname'] + " = "
|
2011-08-15 22:10:46 +02:00
|
|
|
if(d[u'type'] == 'string'):
|
|
|
|
temp = d[u'value']
|
|
|
|
if(temp == ""):
|
2011-08-18 19:53:12 +02:00
|
|
|
temp = ""
|
2012-03-17 19:16:11 +01:00
|
|
|
buffer_str += "\"" + temp + "\""
|
2011-08-15 22:10:46 +02:00
|
|
|
else:
|
|
|
|
temp = d[u'value']
|
|
|
|
if(temp == ""):
|
|
|
|
temp = "0"
|
2012-03-17 19:16:11 +01:00
|
|
|
buffer_str += temp
|
|
|
|
buffer_str += "\n"
|
|
|
|
fh.write(api_client.encode_to(buffer_str))
|
2011-08-24 23:13:56 +02:00
|
|
|
fh.write("log_file = \"/var/log/airtime/pypo-liquidsoap/<script>.log\"\n");
|
2011-08-15 22:10:46 +02:00
|
|
|
fh.close()
|
2011-08-18 19:53:12 +02:00
|
|
|
# restarting pypo.
|
|
|
|
# we could just restart liquidsoap but it take more time somehow.
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Restarting pypo...")
|
2012-02-10 20:53:22 +01:00
|
|
|
sys.exit(0)
|
2011-08-15 22:10:46 +02:00
|
|
|
else:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("No change detected in setting...")
|
2011-12-24 16:59:09 +01:00
|
|
|
self.update_liquidsoap_connection_status()
|
2012-02-29 04:33:19 +01:00
|
|
|
|
2012-02-28 19:58:10 +01:00
|
|
|
def update_liquidsoap_connection_status(self):
|
|
|
|
"""
|
2011-12-24 16:59:09 +01:00
|
|
|
updates the status of liquidsoap connection to the streaming server
|
|
|
|
This fucntion updates the bootup time variable in liquidsoap script
|
2012-03-14 15:22:41 +01:00
|
|
|
"""
|
2011-12-24 16:59:09 +01:00
|
|
|
|
2012-03-16 04:14:19 +01:00
|
|
|
self.telnet_lock.acquire()
|
|
|
|
try:
|
|
|
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
|
|
|
# update the boot up time of liquidsoap. Since liquidsoap is not restarting,
|
|
|
|
# we are manually adjusting the bootup time variable so the status msg will get
|
|
|
|
# updated.
|
|
|
|
current_time = time.time()
|
|
|
|
boot_up_time_command = "vars.bootup_time "+str(current_time)+"\n"
|
|
|
|
tn.write(boot_up_time_command)
|
|
|
|
tn.write("streams.connection_status\n")
|
|
|
|
tn.write('exit\n')
|
|
|
|
|
|
|
|
output = tn.read_all()
|
|
|
|
except Exception, e:
|
|
|
|
self.logger.error(str(e))
|
|
|
|
finally:
|
|
|
|
self.telnet_lock.release()
|
|
|
|
|
2011-12-24 16:59:09 +01:00
|
|
|
output_list = output.split("\r\n")
|
|
|
|
stream_info = output_list[2]
|
|
|
|
|
|
|
|
# streamin info is in the form of:
|
|
|
|
# eg. s1:true,2:true,3:false
|
|
|
|
streams = stream_info.split(",")
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info(streams)
|
2011-12-24 16:59:09 +01:00
|
|
|
|
|
|
|
fake_time = current_time + 1
|
|
|
|
for s in streams:
|
|
|
|
info = s.split(':')
|
|
|
|
stream_id = info[0]
|
|
|
|
status = info[1]
|
|
|
|
if(status == "true"):
|
|
|
|
self.api_client.notify_liquidsoap_status("OK", stream_id, str(fake_time))
|
|
|
|
|
2012-02-11 00:43:40 +01:00
|
|
|
def update_liquidsoap_stream_format(self, stream_format):
|
|
|
|
# Push stream metadata to liquidsoap
|
|
|
|
# TODO: THIS LIQUIDSOAP STUFF NEEDS TO BE MOVED TO PYPO-PUSH!!!
|
|
|
|
try:
|
2012-03-16 04:14:19 +01:00
|
|
|
self.telnet_lock.acquire()
|
2012-03-17 01:47:46 +01:00
|
|
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
|
|
|
command = ('vars.stream_metadata_type %s\n' % stream_format).encode('utf-8')
|
|
|
|
self.logger.info(command)
|
|
|
|
tn.write(command)
|
|
|
|
tn.write('exit\n')
|
|
|
|
tn.read_all()
|
2012-02-11 00:43:40 +01:00
|
|
|
except Exception, e:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.error("Exception %s", e)
|
2012-03-17 01:47:46 +01:00
|
|
|
finally:
|
|
|
|
self.telnet_lock.release()
|
2012-02-11 00:43:40 +01:00
|
|
|
|
|
|
|
def update_liquidsoap_station_name(self, station_name):
|
2011-03-23 06:09:27 +01:00
|
|
|
# Push stream metadata to liquidsoap
|
|
|
|
# TODO: THIS LIQUIDSOAP STUFF NEEDS TO BE MOVED TO PYPO-PUSH!!!
|
2011-03-03 06:22:28 +01:00
|
|
|
try:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info(LS_HOST)
|
|
|
|
self.logger.info(LS_PORT)
|
2012-03-16 04:14:19 +01:00
|
|
|
|
|
|
|
self.telnet_lock.acquire()
|
|
|
|
try:
|
|
|
|
tn = telnetlib.Telnet(LS_HOST, LS_PORT)
|
|
|
|
command = ('vars.station_name %s\n' % station_name).encode('utf-8')
|
|
|
|
self.logger.info(command)
|
|
|
|
tn.write(command)
|
|
|
|
tn.write('exit\n')
|
|
|
|
tn.read_all()
|
|
|
|
except Exception, e:
|
|
|
|
self.logger.error(str(e))
|
|
|
|
finally:
|
|
|
|
self.telnet_lock.release()
|
2011-03-23 06:09:27 +01:00
|
|
|
except Exception, e:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.error("Exception %s", e)
|
2012-02-11 00:43:40 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
Process the schedule
|
|
|
|
- Reads the scheduled entries of a given range (actual time +/- "prepare_ahead" / "cache_for")
|
|
|
|
- Saves a serialized file of the schedule
|
|
|
|
- playlists are prepared. (brought to liquidsoap format) and, if not mounted via nsf, files are copied
|
|
|
|
to the cache dir (Folder-structure: cache/YYYY-MM-DD-hh-mm-ss)
|
|
|
|
- runs the cleanup routine, to get rid of unused cached files
|
|
|
|
"""
|
2012-03-17 18:55:56 +01:00
|
|
|
def process_schedule(self, schedule_data):
|
2012-02-28 21:32:18 +01:00
|
|
|
self.logger.debug(schedule_data)
|
2012-02-27 19:52:35 +01:00
|
|
|
media = schedule_data["media"]
|
2011-03-03 06:22:28 +01:00
|
|
|
|
2011-06-15 21:49:42 +02:00
|
|
|
# Download all the media and put playlists in liquidsoap "annotate" format
|
2011-03-21 00:34:43 +01:00
|
|
|
try:
|
2012-03-01 23:58:44 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
Make sure cache_dir exists
|
|
|
|
"""
|
|
|
|
download_dir = self.cache_dir
|
|
|
|
try:
|
|
|
|
os.makedirs(download_dir)
|
|
|
|
except Exception, e:
|
|
|
|
pass
|
|
|
|
|
|
|
|
for key in media:
|
|
|
|
media_item = media[key]
|
|
|
|
|
|
|
|
fileExt = os.path.splitext(media_item['uri'])[1]
|
|
|
|
dst = os.path.join(download_dir, media_item['id']+fileExt)
|
|
|
|
media_item['dst'] = dst
|
|
|
|
|
|
|
|
self.media_prepare_queue.put(copy.copy(media))
|
2012-03-17 18:55:56 +01:00
|
|
|
self.prepare_media(media)
|
2012-02-27 19:52:35 +01:00
|
|
|
except Exception, e: self.logger.error("%s", e)
|
2011-03-21 00:34:43 +01:00
|
|
|
|
2011-03-23 06:09:27 +01:00
|
|
|
# Send the data to pypo-push
|
2012-03-01 23:58:44 +01:00
|
|
|
self.logger.debug("Pushing to pypo-push")
|
2012-02-27 19:52:35 +01:00
|
|
|
self.push_queue.put(media)
|
|
|
|
|
2012-03-06 01:02:46 +01:00
|
|
|
|
2011-03-03 06:22:28 +01:00
|
|
|
# cleanup
|
2012-03-17 18:55:56 +01:00
|
|
|
try: self.cache_cleanup(media)
|
2012-02-27 19:52:35 +01:00
|
|
|
except Exception, e: self.logger.error("%s", e)
|
2011-03-03 06:22:28 +01:00
|
|
|
|
2012-02-27 19:52:35 +01:00
|
|
|
|
2011-03-03 06:22:28 +01:00
|
|
|
|
2012-03-17 18:55:56 +01:00
|
|
|
def prepare_media(self, media):
|
2012-02-27 19:52:35 +01:00
|
|
|
"""
|
2012-03-18 05:38:39 +01:00
|
|
|
Iterate through the list of media items in "media" append some
|
|
|
|
attributes such as show_name
|
2012-02-27 19:52:35 +01:00
|
|
|
"""
|
2011-03-03 06:22:28 +01:00
|
|
|
try:
|
2012-02-27 19:52:35 +01:00
|
|
|
mediaKeys = sorted(media.iterkeys())
|
|
|
|
for mkey in mediaKeys:
|
|
|
|
media_item = media[mkey]
|
2012-03-17 18:55:56 +01:00
|
|
|
media_item['show_name'] = "TODO"
|
2011-03-03 06:22:28 +01:00
|
|
|
except Exception, e:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.error("%s", e)
|
|
|
|
|
|
|
|
return media
|
2011-03-23 06:09:27 +01:00
|
|
|
|
2012-03-17 18:55:56 +01:00
|
|
|
|
2012-02-27 19:52:35 +01:00
|
|
|
def handle_media_file(self, media_item, dst):
|
|
|
|
"""
|
|
|
|
Download and cache the media item.
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.logger.debug("Processing track %s", media_item['uri'])
|
2011-03-03 06:22:28 +01:00
|
|
|
|
2012-02-27 19:52:35 +01:00
|
|
|
try:
|
|
|
|
#blocking function to download the media item
|
2012-03-01 23:58:44 +01:00
|
|
|
#self.download_file(media_item, dst)
|
|
|
|
self.copy_file(media_item, dst)
|
2012-02-27 19:52:35 +01:00
|
|
|
|
|
|
|
if os.access(dst, os.R_OK):
|
|
|
|
# check filesize (avoid zero-byte files)
|
|
|
|
try:
|
|
|
|
fsize = os.path.getsize(dst)
|
2011-03-03 06:22:28 +01:00
|
|
|
if fsize > 0:
|
2012-02-27 19:52:35 +01:00
|
|
|
return True
|
|
|
|
except Exception, e:
|
|
|
|
self.logger.error("%s", e)
|
|
|
|
fsize = 0
|
|
|
|
else:
|
|
|
|
self.logger.warning("Cannot read file %s.", dst)
|
2011-03-03 06:22:28 +01:00
|
|
|
|
2012-02-27 19:52:35 +01:00
|
|
|
except Exception, e:
|
|
|
|
self.logger.info("%s", e)
|
|
|
|
|
|
|
|
return False
|
2011-03-03 06:22:28 +01:00
|
|
|
|
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
def copy_file(self, media_item, dst):
|
|
|
|
"""
|
2012-03-16 05:48:39 +01:00
|
|
|
Copy the file from local library directory. Note that we are not using os.path.exists
|
|
|
|
since this can lie to us! It seems the best way to get whether a file exists is to actually
|
|
|
|
do an operation on the file (such as query its size). Getting the file size of a non-existent
|
|
|
|
file will throw an exception, so we can look for this exception instead of os.path.exists.
|
2012-03-01 23:58:44 +01:00
|
|
|
"""
|
2012-03-16 05:48:39 +01:00
|
|
|
|
|
|
|
src = media_item['uri']
|
|
|
|
|
|
|
|
try:
|
|
|
|
src_size = os.path.getsize(src)
|
|
|
|
except Exception, e:
|
|
|
|
self.logger.error("Could not get size of source file: %s", src)
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
dst_exists = True
|
|
|
|
try:
|
|
|
|
dst_size = os.path.getsize(dst)
|
|
|
|
except Exception, e:
|
|
|
|
dst_exists = False
|
|
|
|
|
|
|
|
do_copy = False
|
|
|
|
if dst_exists:
|
|
|
|
if src_size != dst_size:
|
|
|
|
do_copy = True
|
|
|
|
else:
|
|
|
|
do_copy = True
|
|
|
|
|
|
|
|
|
|
|
|
if do_copy:
|
|
|
|
self.logger.debug("copying from %s to local cache %s" % (src, dst))
|
2012-03-01 23:58:44 +01:00
|
|
|
try:
|
2012-03-16 05:48:39 +01:00
|
|
|
"""
|
|
|
|
copy will overwrite dst if it already exists
|
|
|
|
"""
|
|
|
|
shutil.copy(src, dst)
|
2012-03-01 23:58:44 +01:00
|
|
|
except:
|
2012-03-16 05:48:39 +01:00
|
|
|
self.logger.error("Could not copy from %s to %s" % (src, dst))
|
2012-03-01 23:58:44 +01:00
|
|
|
|
|
|
|
|
2012-03-17 01:39:49 +01:00
|
|
|
"""
|
2012-02-27 19:52:35 +01:00
|
|
|
def download_file(self, media_item, dst):
|
2012-03-17 01:39:49 +01:00
|
|
|
#Download a file from a remote server and store it in the cache.
|
2011-06-15 21:49:42 +02:00
|
|
|
if os.path.isfile(dst):
|
|
|
|
pass
|
2012-02-27 19:52:35 +01:00
|
|
|
#self.logger.debug("file already in cache: %s", dst)
|
2011-03-03 06:22:28 +01:00
|
|
|
else:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.debug("try to download %s", media_item['uri'])
|
|
|
|
self.api_client.get_media(media_item['uri'], dst)
|
2012-03-17 01:39:49 +01:00
|
|
|
"""
|
2012-03-06 01:02:46 +01:00
|
|
|
|
2012-03-17 18:55:56 +01:00
|
|
|
def cache_cleanup(self, media):
|
2012-03-06 01:02:46 +01:00
|
|
|
"""
|
|
|
|
Get list of all files in the cache dir and remove them if they aren't being used anymore.
|
|
|
|
Input dict() media, lists all files that are scheduled or currently playing. Not being in this
|
|
|
|
dict() means the file is safe to remove.
|
|
|
|
"""
|
|
|
|
cached_file_set = set(os.listdir(self.cache_dir))
|
|
|
|
scheduled_file_set = set()
|
|
|
|
|
|
|
|
for mkey in media:
|
|
|
|
media_item = media[mkey]
|
|
|
|
fileExt = os.path.splitext(media_item['uri'])[1]
|
|
|
|
scheduled_file_set.add(media_item["id"] + fileExt)
|
|
|
|
|
|
|
|
unneeded_files = cached_file_set - scheduled_file_set
|
|
|
|
|
|
|
|
self.logger.debug("Files to remove " + str(unneeded_files))
|
|
|
|
for file in unneeded_files:
|
|
|
|
self.logger.debug("Removing %s" % os.path.join(self.cache_dir, file))
|
|
|
|
os.remove(os.path.join(self.cache_dir, file))
|
2011-03-23 06:09:27 +01:00
|
|
|
|
2011-09-08 18:17:42 +02:00
|
|
|
def main(self):
|
2011-03-23 06:09:27 +01:00
|
|
|
# Bootstrap: since we are just starting up, we need to grab the
|
|
|
|
# most recent schedule. After that we can just wait for updates.
|
2012-02-27 19:52:35 +01:00
|
|
|
success, self.schedule_data = self.api_client.get_schedule()
|
|
|
|
if success:
|
|
|
|
self.logger.info("Bootstrap schedule received: %s", self.schedule_data)
|
2012-03-17 18:55:56 +01:00
|
|
|
self.process_schedule(self.schedule_data)
|
2012-03-20 21:41:15 +01:00
|
|
|
self.set_bootstrap_variables()
|
2011-09-20 19:25:29 +02:00
|
|
|
|
2011-03-23 06:09:27 +01:00
|
|
|
loops = 1
|
2011-03-21 00:34:43 +01:00
|
|
|
while True:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.info("Loop #%s", loops)
|
2012-02-12 05:53:43 +01:00
|
|
|
try:
|
2011-09-08 18:17:42 +02:00
|
|
|
"""
|
2012-03-12 22:52:17 +01:00
|
|
|
our simple_queue.get() requires a timeout, in which case we
|
|
|
|
fetch the Airtime schedule manually. It is important to fetch
|
|
|
|
the schedule periodically because if we didn't, we would only
|
|
|
|
get schedule updates via RabbitMq if the user was constantly
|
|
|
|
using the Airtime interface.
|
|
|
|
|
|
|
|
If the user is not using the interface, RabbitMq messages are not
|
|
|
|
sent, and we will have very stale (or non-existent!) data about the
|
|
|
|
schedule.
|
|
|
|
|
|
|
|
Currently we are checking every 3600 seconds (1 hour)
|
2011-09-08 18:17:42 +02:00
|
|
|
"""
|
2012-02-27 19:52:35 +01:00
|
|
|
message = self.fetch_queue.get(block=True, timeout=3600)
|
|
|
|
self.handle_message(message)
|
2011-09-08 18:17:42 +02:00
|
|
|
except Exception, e:
|
2012-02-27 19:52:35 +01:00
|
|
|
self.logger.error("Exception, %s", e)
|
2012-02-12 05:53:43 +01:00
|
|
|
|
2012-02-28 21:32:18 +01:00
|
|
|
success, self.schedule_data = self.api_client.get_schedule()
|
|
|
|
if success:
|
2012-03-17 18:55:56 +01:00
|
|
|
self.process_schedule(self.schedule_data)
|
2011-09-08 18:17:42 +02:00
|
|
|
|
2012-02-24 19:12:50 +01:00
|
|
|
loops += 1
|
2011-09-08 18:17:42 +02:00
|
|
|
|
|
|
|
def run(self):
|
2012-02-28 21:32:18 +01:00
|
|
|
"""
|
|
|
|
Entry point of the thread
|
|
|
|
"""
|
|
|
|
self.main()
|