2010-11-05 15:54:15 +01:00
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Python part of radio playout ( pypo )
2010-11-08 22:54:54 +01:00
The main functions are " fetch " ( . / pypo_cli . py - f ) and " push " ( . / pypo_cli . py - p )
2010-11-05 15:54:15 +01:00
2010-11-12 23:07:01 +01:00
There are two layers : scheduler & daypart ( fallback )
The daypart is a fallback - layer generated by the playlists daypart - settings
( eg a playlist creator can say that the list is good for Monday and Tues ,
between 14 : 00 and 16 : 00 ) . So if there is nothing in the schedule , pypo will
still play something ( instead of silence . . ) This layer is optional .
It is there so that you dont have a fallback player which plays the same 100
tracks over and over again .
2010-11-05 15:54:15 +01:00
Attention & ToDos
- liquidsoap does not like mono files ! So we have to make sure that only files with
2010-11-30 00:34:22 +01:00
2 channels are fed to LiquidSoap
2010-11-05 15:54:15 +01:00
( solved : current = audio_to_stereo ( current ) - maybe not with ultimate performance )
made for python version 2.5 ! !
should work with 2.6 as well with a bit of adaption . for
sure the json parsing has to be changed
2010-11-08 22:54:54 +01:00
( 2.6 has an parser , pypo brings it ' s own -> util/json.py)
2010-11-05 15:54:15 +01:00
"""
# python defaults (debian default)
import time
2011-01-17 20:36:52 +01:00
import calendar
2010-11-05 15:54:15 +01:00
import os
import traceback
from optparse import *
import sys
import time
import datetime
import logging
import logging . config
import shutil
import urllib
import urllib2
import pickle
import telnetlib
import random
import string
import operator
2010-11-08 22:54:54 +01:00
import inspect
2010-11-05 15:54:15 +01:00
# additional modules (should be checked)
from configobj import ConfigObj
# custom imports
from util import *
2010-11-08 22:54:54 +01:00
from api_clients import *
2010-11-05 15:54:15 +01:00
2010-11-08 22:54:54 +01:00
PYPO_VERSION = ' 0.2 '
2010-11-19 00:00:13 +01:00
PYPO_MEDIA_SKIP = 1
PYPO_MEDIA_LIVE_SESSION = 2
PYPO_MEDIA_STREAM = 3
PYPO_MEDIA_FILE_URL = 4
PYPO_MEDIA_FILE_LOCAL = 5
2010-11-05 15:54:15 +01:00
2010-11-08 22:54:54 +01:00
# Set up command-line options
2010-11-05 15:54:15 +01:00
parser = OptionParser ( )
# help screeen / info
usage = " % prog [options] " + " - python playout system "
parser = OptionParser ( usage = usage )
2010-11-08 22:54:54 +01:00
# Options
parser . add_option ( " -v " , " --compat " , help = " Check compatibility with server API version " , default = False , action = " store_true " , dest = " check_compat " )
2010-11-19 00:00:13 +01:00
parser . add_option ( " -t " , " --test " , help = " Do a test to make sure everything is working properly. " , default = False , action = " store_true " , dest = " test " )
2010-11-30 00:34:22 +01:00
parser . add_option ( " -f " , " --fetch-scheduler " , help = " Fetch the schedule from server. This is a polling process that runs forever. " , default = False , action = " store_true " , dest = " fetch_scheduler " )
parser . add_option ( " -p " , " --push-scheduler " , help = " Push the schedule to Liquidsoap. This is a polling process that runs forever. " , default = False , action = " store_true " , dest = " push_scheduler " )
2010-11-05 15:54:15 +01:00
parser . add_option ( " -F " , " --fetch-daypart " , help = " Fetch from daypart - scheduler (loop, interval in config file) " , default = False , action = " store_true " , dest = " fetch_daypart " )
parser . add_option ( " -P " , " --push-daypart " , help = " Push daypart to Liquidsoap (loop, interval in config file) " , default = False , action = " store_true " , dest = " push_daypart " )
2010-11-08 22:54:54 +01:00
parser . add_option ( " -b " , " --cleanup " , help = " Cleanup " , default = False , action = " store_true " , dest = " cleanup " )
2010-11-30 00:34:22 +01:00
#parser.add_option("-j", "--jingles", help="Get new jingles from server, comma separated list if jingle-id's as argument", metavar="LIST")
2010-11-05 15:54:15 +01:00
parser . add_option ( " -c " , " --check " , help = " Check the cached schedule and exit " , default = False , action = " store_true " , dest = " check " )
# parse options
( options , args ) = parser . parse_args ( )
# configure logging
logging . config . fileConfig ( " logging.cfg " )
# loading config file
try :
config = ConfigObj ( ' config.cfg ' )
POLL_INTERVAL = float ( config [ ' poll_interval ' ] )
2011-01-10 23:10:18 +01:00
PUSH_INTERVAL = 0.5
#PUSH_INTERVAL = float(config['push_interval'])
2010-11-05 15:54:15 +01:00
LS_HOST = config [ ' ls_host ' ]
LS_PORT = config [ ' ls_port ' ]
except Exception , e :
2010-11-08 22:54:54 +01:00
print ' Error loading config file: ' , e
2010-11-05 15:54:15 +01:00
sys . exit ( )
class Global :
def __init__ ( self ) :
print
2010-11-08 22:54:54 +01:00
def selfcheck ( self ) :
self . api_client = api_client . api_client_factory ( config )
2010-12-15 01:09:13 +01:00
if ( not self . api_client . is_server_compatible ( ) ) :
2010-12-22 00:28:17 +01:00
sys . exit ( )
2010-11-05 15:54:15 +01:00
class Playout :
def __init__ ( self ) :
2010-11-08 22:54:54 +01:00
self . api_client = api_client . api_client_factory ( config )
2010-11-05 15:54:15 +01:00
self . cue_file = CueFile ( )
2010-11-30 00:34:22 +01:00
self . silence_file = config [ " file_dir " ] + ' basic/silence.mp3 '
self . push_ahead = 15
2010-11-05 15:54:15 +01:00
self . range_updated = False
2010-11-30 00:34:22 +01:00
2010-11-19 00:00:13 +01:00
def test_api ( self ) :
self . api_client . test ( )
2010-11-30 00:34:22 +01:00
def set_export_source ( self , export_source ) :
self . export_source = export_source
self . cache_dir = config [ " cache_dir " ] + self . export_source + ' / '
self . schedule_file = self . cache_dir + ' schedule.pickle '
self . schedule_tracker_file = self . cache_dir + " schedule_tracker.pickle "
2010-11-05 15:54:15 +01:00
"""
Fetching part of pypo
- Reads the scheduled entries of a given range ( actual time + / - " prepare_ahead " / " cache_for " )
- Saves a serialized file of the schedule
2010-11-08 22:54:54 +01:00
- playlists are prepared . ( brought to liquidsoap format ) and , if not mounted via nsf , files are copied
2010-11-05 15:54:15 +01:00
to the cache dir ( Folder - structure : cache / YYYY - MM - DD - hh - mm - ss )
- runs the cleanup routine , to get rid of unused cashed files
"""
def fetch ( self , export_source ) :
"""
2010-11-08 22:54:54 +01:00
wrapper script for fetching the whole schedule ( in json )
2010-11-05 15:54:15 +01:00
"""
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-05 15:54:15 +01:00
2010-11-30 00:34:22 +01:00
self . set_export_source ( export_source )
2010-11-05 15:54:15 +01:00
try : os . mkdir ( self . cache_dir )
except Exception , e : pass
"""
Trigger daypart range - generation . ( Only if daypart - instance )
"""
if self . export_source == ' daypart ' :
print ' ****************************** '
print ' *** TRIGGER DAYPART UPDATE *** '
print ' ****************************** '
try :
self . generate_range_dp ( )
except Exception , e :
2010-11-19 00:00:13 +01:00
logger . error ( " %s " , e )
2010-11-05 15:54:15 +01:00
2010-11-08 22:54:54 +01:00
# get schedule
2010-11-05 15:54:15 +01:00
try :
2010-11-08 22:54:54 +01:00
while self . get_schedule ( ) != 1 :
2010-11-05 15:54:15 +01:00
logger . warning ( " failed to read from export url " )
time . sleep ( 1 )
2010-11-19 00:00:13 +01:00
except Exception , e : logger . error ( " %s " , e )
2010-11-05 15:54:15 +01:00
# prepare the playlists
2010-11-30 00:34:22 +01:00
if config [ " cue_style " ] == ' pre ' :
try : self . prepare_playlists_cue ( )
2010-11-19 00:00:13 +01:00
except Exception , e : logger . error ( " %s " , e )
2010-11-30 00:34:22 +01:00
elif config [ " cue_style " ] == ' otf ' :
2010-11-05 15:54:15 +01:00
try : self . prepare_playlists ( self . export_source )
2010-11-19 00:00:13 +01:00
except Exception , e : logger . error ( " %s " , e )
2010-11-05 15:54:15 +01:00
# cleanup
try : self . cleanup ( self . export_source )
2010-11-19 00:00:13 +01:00
except Exception , e : logger . error ( " %s " , e )
2010-11-05 15:54:15 +01:00
2011-01-02 03:45:10 +01:00
#logger.info("fetch loop completed")
2010-11-05 15:54:15 +01:00
"""
This is actually a bit ugly ( again feel free to improve ! ! )
The generate_range_dp function should be called once a day ,
we do this at 18 h . The hour before the state is set back to ' False '
"""
def generate_range_dp ( self ) :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-05 15:54:15 +01:00
logger . debug ( " trying to trigger daypart update " )
tnow = time . localtime ( time . time ( ) )
2010-11-30 00:34:22 +01:00
if ( tnow [ 3 ] == 16 ) :
2010-11-05 15:54:15 +01:00
self . range_updated = False
2010-11-30 00:34:22 +01:00
if ( tnow [ 3 ] == 17 and self . range_updated == False ) :
2010-11-05 15:54:15 +01:00
try :
print self . api_client . generate_range_dp ( )
logger . info ( " daypart updated " )
self . range_updated = True
except Exception , e :
print e
2010-11-08 22:54:54 +01:00
def get_schedule ( self ) :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-12-15 22:38:38 +01:00
status , response = self . api_client . get_schedule ( )
2010-11-08 22:54:54 +01:00
2010-11-05 15:54:15 +01:00
if status == 1 :
2010-11-08 22:54:54 +01:00
logger . info ( " dump serialized schedule to %s " , self . schedule_file )
schedule = response [ ' playlists ' ]
2010-11-05 15:54:15 +01:00
try :
schedule_file = open ( self . schedule_file , " w " )
pickle . dump ( schedule , schedule_file )
schedule_file . close ( )
except Exception , e :
2010-11-19 00:00:13 +01:00
logger . critical ( " Exception %s " , e )
2010-11-05 15:54:15 +01:00
status = 0
return status
"""
Alternative version of playout preparation . Every playlist entry is
pre - cued if neccessary ( cue_in / cue_out != 0 ) and stored in the
playlist folder .
file is eg 2010 - 06 - 23 - 15 - 00 - 00 / 17 _cue_10 .132 - 123.321 . mp3
"""
2010-11-30 00:34:22 +01:00
def prepare_playlists_cue ( self ) :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-05 15:54:15 +01:00
2010-11-30 00:34:22 +01:00
# Load schedule from disk
schedule = self . load_schedule ( )
2010-11-19 00:00:13 +01:00
# Dont do anything if schedule is empty
if ( not schedule ) :
2010-11-24 23:57:55 +01:00
logger . debug ( " Schedule is empty. " )
2010-11-19 00:00:13 +01:00
return
2010-11-24 23:57:55 +01:00
scheduleKeys = sorted ( schedule . iterkeys ( ) )
2010-11-05 15:54:15 +01:00
try :
2010-11-24 23:57:55 +01:00
for pkey in scheduleKeys :
2010-11-05 15:54:15 +01:00
logger . info ( " found playlist at %s " , pkey )
playlist = schedule [ pkey ]
# create playlist directory
2010-11-24 23:57:55 +01:00
try :
os . mkdir ( self . cache_dir + str ( pkey ) )
except Exception , e :
pass
2010-11-05 15:54:15 +01:00
ls_playlist = ' ' ;
2010-11-30 00:34:22 +01:00
logger . debug ( ' ***************************************** ' )
logger . debug ( ' pkey: ' + str ( pkey ) )
logger . debug ( ' cached at : ' + self . cache_dir + str ( pkey ) )
logger . debug ( ' subtype: ' + str ( playlist [ ' subtype ' ] ) )
logger . debug ( ' played: ' + str ( playlist [ ' played ' ] ) )
logger . debug ( ' schedule id: ' + str ( playlist [ ' schedule_id ' ] ) )
logger . debug ( ' duration: ' + str ( playlist [ ' duration ' ] ) )
logger . debug ( ' source id: ' + str ( playlist [ ' x_ident ' ] ) )
logger . debug ( ' ***************************************** ' )
# Creating an API call like the next two lines would make this more flexible
# mediaType = api_client.get_media_type(playlist)
# if (mediaType == PYPO_MEDIA_SKIP):
2010-11-05 15:54:15 +01:00
2010-11-19 21:53:57 +01:00
if int ( playlist [ ' played ' ] ) == 1 :
2010-11-05 15:54:15 +01:00
logger . info ( " playlist %s already played / sent to liquidsoap, so will ignore it " , pkey )
elif int ( playlist [ ' subtype ' ] ) == 5 :
2010-11-19 21:53:57 +01:00
ls_playlist = self . handle_live_session ( playlist , pkey , ls_playlist )
2010-11-05 15:54:15 +01:00
elif int ( playlist [ ' subtype ' ] ) == 6 :
2010-11-19 21:53:57 +01:00
ls_playlist = self . handle_live_cast ( playlist , pkey , ls_playlist )
2010-11-05 15:54:15 +01:00
elif int ( playlist [ ' subtype ' ] ) > 0 and int ( playlist [ ' subtype ' ] ) < 5 :
2010-11-19 21:53:57 +01:00
ls_playlist = self . handle_media_file ( playlist , pkey , ls_playlist )
2010-11-05 15:54:15 +01:00
"""
This is kind of hackish . We add a bunch of " silence " tracks to the end of each playlist .
So we can make sure the list does not get repeated just before a new one is called .
( or in case nothing is in the scheduler afterwards )
20 x silence = 10 hours
"""
for i in range ( 0 , 1 ) :
2010-11-19 21:53:57 +01:00
ls_playlist + = self . silence_file + " \n "
2010-11-05 15:54:15 +01:00
print ' ' ,
# write playlist file
plfile = open ( self . cache_dir + str ( pkey ) + ' /list.lsp ' , " w " )
plfile . write ( ls_playlist )
plfile . close ( )
logger . info ( ' ls playlist file written to %s ' , self . cache_dir + str ( pkey ) + ' /list.lsp ' )
except Exception , e :
logger . info ( " %s " , e )
2010-11-19 21:53:57 +01:00
def handle_live_session ( self , playlist , pkey , ls_playlist ) :
"""
This is a live session , so silence is scheduled .
Maybe not the most elegant solution : )
It adds 20 times 30 min silence to the playlist
Silence file has to be in < file_dir > / basic / silence . mp3
"""
logger = logging . getLogger ( )
logger . debug ( " found %s seconds of live/studio session at %s " , pkey , playlist [ ' duration ' ] )
if os . path . isfile ( self . silence_file ) :
logger . debug ( ' file stored at: %s ' + self . silence_file )
for i in range ( 0 , 19 ) :
ls_playlist + = self . silence_file + " \n "
else :
print ' Could not find silence file! '
print ' File is expected to be at: ' + self . silence_file
logger . critical ( ' File is expected to be at: %s ' , self . silence_file )
sys . exit ( )
2010-11-24 23:57:55 +01:00
return ls_playlist
2010-11-19 21:53:57 +01:00
def handle_live_cast ( self , playlist , pkey , ls_playlist ) :
"""
This is a live - cast session
Create a silence list . ( could eg also be a fallback list . . )
"""
logger = logging . getLogger ( )
logger . debug ( " found %s seconds of live-cast session at %s " , pkey , playlist [ ' duration ' ] )
if os . path . isfile ( self . silence_file ) :
logger . debug ( ' file stored at: %s ' + self . silence_file )
for i in range ( 0 , 19 ) :
ls_playlist + = self . silence_file + " \n "
else :
print ' Could not find silence file! '
print ' File is expected to be at: ' + self . silence_file
logger . critical ( ' File is expected to be at: %s ' , self . silence_file )
sys . exit ( )
return ls_playlist
def handle_media_file ( self , playlist , pkey , ls_playlist ) :
"""
This handles both remote and local files .
2010-11-24 23:57:55 +01:00
Returns an updated ls_playlist string .
2010-11-19 21:53:57 +01:00
"""
logger = logging . getLogger ( )
for media in playlist [ ' medias ' ] :
2010-11-24 01:21:05 +01:00
logger . debug ( " Processing track %s " , media [ ' uri ' ] )
2010-11-19 21:53:57 +01:00
2011-01-02 03:45:10 +01:00
fileExt = os . path . splitext ( media [ ' uri ' ] ) [ 1 ]
2010-11-19 21:53:57 +01:00
try :
if str ( media [ ' cue_in ' ] ) == ' 0 ' and str ( media [ ' cue_out ' ] ) == ' 0 ' :
2010-11-24 01:21:05 +01:00
logger . debug ( ' No cue in/out detected for this file ' )
2011-01-02 03:45:10 +01:00
dst = " %s %s / %s %s " % ( self . cache_dir , str ( pkey ) , str ( media [ ' id ' ] ) , str ( fileExt ) )
2010-11-19 21:53:57 +01:00
do_cue = False
else :
2010-11-24 01:21:05 +01:00
logger . debug ( ' Cue in/out detected ' )
2011-01-02 03:45:10 +01:00
dst = " %s %s / %s _cue_ %s - %s %s " % \
( self . cache_dir , str ( pkey ) , str ( media [ ' id ' ] ) , str ( float ( media [ ' cue_in ' ] ) / 1000 ) , str ( float ( media [ ' cue_out ' ] ) / 1000 ) , str ( fileExt ) )
2010-11-19 21:53:57 +01:00
do_cue = True
# check if it is a remote file, if yes download
2010-11-24 01:21:05 +01:00
if media [ ' uri ' ] [ 0 : 4 ] == ' http ' :
2010-11-19 21:53:57 +01:00
self . handle_remote_file ( media , dst , do_cue )
else :
# Assume local file
self . handle_local_file ( media , dst , do_cue )
if True == os . access ( dst , os . R_OK ) :
# check filesize (avoid zero-byte files)
try : fsize = os . path . getsize ( dst )
except Exception , e :
logger . error ( " %s " , e )
fsize = 0
if fsize > 0 :
pl_entry = ' annotate:export_source= " %s " ,media_id= " %s " ,liq_start_next= " %s " ,liq_fade_in= " %s " ,liq_fade_out= " %s " : %s ' % \
( str ( media [ ' export_source ' ] ) , media [ ' id ' ] , 0 , str ( float ( media [ ' fade_in ' ] ) / 1000 ) , str ( float ( media [ ' fade_out ' ] ) / 1000 ) , dst )
2010-11-24 01:21:05 +01:00
logger . debug ( pl_entry )
2010-11-19 21:53:57 +01:00
"""
Tracks are only added to the playlist if they are accessible
on the file system and larger than 0 bytes .
So this can lead to playlists shorter than expectet .
( there is a hardware silence detector for this cases . . . )
"""
ls_playlist + = pl_entry + " \n "
logger . debug ( " everything ok, adding %s to playlist " , pl_entry )
else :
2010-11-24 01:21:05 +01:00
print ' zero-file: ' + dst + ' from ' + media [ ' uri ' ]
2010-11-19 21:53:57 +01:00
logger . warning ( " zero-size file - skiping %s . will not add it to playlist " , dst )
else :
logger . warning ( " something went wrong. file %s not available. will not add it to playlist " , dst )
except Exception , e : logger . info ( " %s " , e )
return ls_playlist
def handle_remote_file ( self , media , dst , do_cue ) :
logger = logging . getLogger ( )
if do_cue == False :
if os . path . isfile ( dst ) :
logger . debug ( " file already in cache: %s " , dst )
else :
2010-11-24 01:21:05 +01:00
logger . debug ( " try to download %s " , media [ ' uri ' ] )
self . api_client . get_media ( media [ ' uri ' ] , dst )
2010-11-19 21:53:57 +01:00
else :
if os . path . isfile ( dst ) :
logger . debug ( " file already in cache: %s " , dst )
else :
2010-11-24 01:21:05 +01:00
logger . debug ( " try to download and cue %s " , media [ ' uri ' ] )
2010-11-19 21:53:57 +01:00
2011-01-02 03:45:10 +01:00
fileExt = os . path . splitext ( media [ ' uri ' ] ) [ 1 ]
dst_tmp = config [ " tmp_dir " ] + " " . join ( [ random . choice ( string . letters ) for i in xrange ( 10 ) ] ) + fileExt
2010-11-24 01:21:05 +01:00
self . api_client . get_media ( media [ ' uri ' ] , dst_tmp )
2010-11-19 21:53:57 +01:00
# cue
2010-11-24 23:57:55 +01:00
logger . debug ( " STARTING CUE " )
debugDst = self . cue_file . cue ( dst_tmp , dst , float ( media [ ' cue_in ' ] ) / 1000 , float ( media [ ' cue_out ' ] ) / 1000 )
logger . debug ( debugDst )
logger . debug ( " END CUE " )
2010-11-19 21:53:57 +01:00
if True == os . access ( dst , os . R_OK ) :
try : fsize = os . path . getsize ( dst )
except Exception , e :
logger . error ( " %s " , e )
fsize = 0
if fsize > 0 :
logger . debug ( ' try to remove temporary file: %s ' + dst_tmp )
try : os . remove ( dst_tmp )
except Exception , e :
logger . error ( " %s " , e )
else :
logger . warning ( ' something went wrong cueing: %s - using uncued file ' + dst )
try : os . rename ( dst_tmp , dst )
except Exception , e :
logger . error ( " %s " , e )
def handle_local_file ( self , media , dst , do_cue ) :
"""
Handle files on NAS . Pre - cueing not implemented at the moment .
( not needed by openbroadcast , feel free to add this )
2010-12-31 18:20:17 +01:00
Here ' s an implementation for locally stored files.
2010-11-19 21:53:57 +01:00
Works the same as with remote files , just replaced API - download with
file copy .
"""
logger = logging . getLogger ( )
if do_cue == False :
if os . path . isfile ( dst ) :
logger . debug ( " file already in cache: %s " , dst )
else :
2010-11-24 01:21:05 +01:00
logger . debug ( " try to copy file to cache %s " , media [ ' uri ' ] )
2010-11-19 21:53:57 +01:00
try :
2010-11-24 01:21:05 +01:00
shutil . copy ( media [ ' uri ' ] , dst )
logger . info ( " copied %s to %s " , media [ ' uri ' ] , dst )
2010-11-19 21:53:57 +01:00
except Exception , e :
logger . error ( " %s " , e )
else :
if os . path . isfile ( dst ) :
logger . debug ( " file already in cache: %s " , dst )
else :
2010-11-24 01:21:05 +01:00
logger . debug ( " try to copy and cue %s " , media [ ' uri ' ] )
2010-11-19 21:53:57 +01:00
print ' *** '
2010-11-30 00:34:22 +01:00
dst_tmp = config [ " tmp_dir " ] + " " . join ( [ random . choice ( string . letters ) for i in xrange ( 10 ) ] )
2010-11-19 21:53:57 +01:00
print dst_tmp
print ' *** '
try :
2010-11-24 01:21:05 +01:00
shutil . copy ( media [ ' uri ' ] , dst_tmp )
logger . info ( " copied %s to %s " , media [ ' uri ' ] , dst_tmp )
2010-11-19 21:53:57 +01:00
except Exception , e :
logger . error ( " %s " , e )
# cue
2010-12-15 22:38:38 +01:00
print " STARTING CUE "
2010-11-19 21:53:57 +01:00
print self . cue_file . cue ( dst_tmp , dst , float ( media [ ' cue_in ' ] ) / 1000 , float ( media [ ' cue_out ' ] ) / 1000 )
print " END CUE "
if True == os . access ( dst , os . R_OK ) :
try : fsize = os . path . getsize ( dst )
except Exception , e :
logger . error ( " %s " , e )
fsize = 0
if fsize > 0 :
logger . debug ( ' try to remove temporary file: %s ' + dst_tmp )
try : os . remove ( dst_tmp )
except Exception , e :
logger . error ( " %s " , e )
else :
logger . warning ( ' something went wrong cueing: %s - using uncued file ' + dst )
try : os . rename ( dst_tmp , dst )
except Exception , e :
logger . error ( " %s " , e )
2010-11-05 15:54:15 +01:00
def cleanup ( self , export_source ) :
2010-11-24 01:21:05 +01:00
"""
Cleans up folders in cache_dir . Look for modification date older than " now - CACHE_FOR "
2010-11-24 23:57:55 +01:00
and deletes them .
2010-11-24 01:21:05 +01:00
"""
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-24 23:57:55 +01:00
2010-11-30 00:34:22 +01:00
self . set_export_source ( export_source )
offset = 3600 * int ( config [ " cache_for " ] )
2010-11-05 15:54:15 +01:00
now = time . time ( )
2010-11-24 23:57:55 +01:00
for r , d , f in os . walk ( self . cache_dir ) :
2010-11-05 15:54:15 +01:00
for dir in d :
2010-11-24 23:57:55 +01:00
try :
timestamp = time . mktime ( time . strptime ( dir , " % Y- % m- %d - % H- % M- % S " ) )
#logger.debug('dir : %s', (dir))
#logger.debug('age : %s', (round((now - timestamp),1)))
#logger.debug('delete in : %ss', (round((offset - (now - timestamp)),1)))
#logger.debug('Folder "Age": %s - %s', round((((now - offset) - timestamp) / 60), 2), os.path.join(r, dir))
if ( now - timestamp ) > offset :
try :
logger . debug ( ' trying to remove %s - timestamp: %s ' , os . path . join ( r , dir ) , timestamp )
shutil . rmtree ( os . path . join ( r , dir ) )
except Exception , e :
logger . error ( " %s " , e )
pass
else :
logger . info ( ' sucessfully removed %s ' , os . path . join ( r , dir ) )
except Exception , e :
print e
logger . error ( " %s " , e )
2010-11-05 15:54:15 +01:00
"""
2010-11-24 23:57:55 +01:00
The Push Loop - the push loop periodically ( minimal 1 / 2 of the playlist - grid )
2010-11-08 22:54:54 +01:00
checks if there is a playlist that should be scheduled at the current time .
2010-11-05 15:54:15 +01:00
If yes , the temporary liquidsoap playlist gets replaced with the corresponding one ,
2010-11-08 22:54:54 +01:00
then liquidsoap is asked ( via telnet ) to reload and immediately play it .
2010-11-05 15:54:15 +01:00
"""
def push ( self , export_source ) :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-05 15:54:15 +01:00
2010-11-30 00:34:22 +01:00
self . set_export_source ( export_source )
2010-11-05 15:54:15 +01:00
2010-11-30 00:34:22 +01:00
#try:
# dummy = self.schedule
# logger.debug('schedule already loaded')
#except Exception, e:
# self.schedule = self.push_init(self.export_source)
2010-11-05 15:54:15 +01:00
2010-11-30 00:34:22 +01:00
self . schedule = self . load_schedule ( )
playedItems = self . load_schedule_tracker ( )
2011-01-10 23:10:18 +01:00
tcoming = time . localtime ( time . time ( ) + self . push_ahead )
2010-11-05 15:54:15 +01:00
tnow = time . localtime ( time . time ( ) )
2011-01-10 23:10:18 +01:00
str_tcoming_s = " %04d - %02d - %02d - %02d - %02d - %02d " % ( tcoming [ 0 ] , tcoming [ 1 ] , tcoming [ 2 ] , tcoming [ 3 ] , tcoming [ 4 ] , tcoming [ 5 ] )
2010-11-05 15:54:15 +01:00
if self . schedule == None :
2010-11-30 00:34:22 +01:00
logger . warn ( ' Unable to loop schedule - maybe write in progress? ' )
logger . warn ( ' Will try again in next loop. ' )
2010-11-05 15:54:15 +01:00
else :
2011-01-10 23:10:18 +01:00
for pkey in self . schedule :
playedFlag = ( pkey in playedItems ) and playedItems [ pkey ] . get ( " played " , 0 )
if pkey [ 0 : 19 ] < = str_tcoming_s and not playedFlag :
2010-11-24 23:57:55 +01:00
logger . debug ( ' Preparing to push playlist scheduled at: %s ' , pkey )
2010-11-05 15:54:15 +01:00
playlist = self . schedule [ pkey ]
2011-01-10 23:10:18 +01:00
ptype = playlist [ ' subtype ' ]
# We have a match, replace the current playlist and
# force liquidsoap to refresh.
if ( self . push_liquidsoap ( pkey , self . schedule , ptype ) == 1 ) :
logger . debug ( " Pushed to liquidsoap, updating ' played ' status. " )
# Marked the current playlist as 'played' in the schedule tracker
# so it is not called again in the next push loop.
# Write changes back to tracker file.
playedItems [ pkey ] = playlist
playedItems [ pkey ] [ ' played ' ] = 1
schedule_tracker = open ( self . schedule_tracker_file , " w " )
pickle . dump ( playedItems , schedule_tracker )
schedule_tracker . close ( )
logger . debug ( " Wrote schedule to disk: " + str ( playedItems ) )
# Call API to update schedule states
logger . debug ( " Doing callback to server to update ' played ' status. " )
self . api_client . notify_scheduled_item_start_playing ( pkey , self . schedule )
2010-11-30 00:34:22 +01:00
def load_schedule ( self ) :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-30 00:34:22 +01:00
schedule = None
# create the file if it doesnt exist
if ( not os . path . exists ( self . schedule_file ) ) :
logger . debug ( ' creating file ' + self . schedule_file )
open ( self . schedule_file , ' w ' ) . close ( )
else :
# load the schedule from cache
2011-01-02 03:45:10 +01:00
#logger.debug('loading schedule file '+self.schedule_file)
2010-11-30 00:34:22 +01:00
try :
schedule_file = open ( self . schedule_file , " r " )
schedule = pickle . load ( schedule_file )
schedule_file . close ( )
except Exception , e :
logger . error ( ' %s ' , e )
2010-11-05 15:54:15 +01:00
return schedule
2010-11-30 00:34:22 +01:00
def load_schedule_tracker ( self ) :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-30 00:34:22 +01:00
playedItems = dict ( )
# create the file if it doesnt exist
if ( not os . path . exists ( self . schedule_tracker_file ) ) :
logger . debug ( ' creating file ' + self . schedule_tracker_file )
schedule_tracker = open ( self . schedule_tracker_file , ' w ' )
pickle . dump ( playedItems , schedule_tracker )
schedule_tracker . close ( )
else :
try :
2011-01-02 03:45:10 +01:00
#logger.debug('loading schedule tracker file '+ self.schedule_tracker_file)
2010-11-30 00:34:22 +01:00
schedule_tracker = open ( self . schedule_tracker_file , " r " )
playedItems = pickle . load ( schedule_tracker )
schedule_tracker . close ( )
except Exception , e :
logger . error ( ' Unable to load schedule tracker file: %s ' , e )
return playedItems
2010-11-05 15:54:15 +01:00
2010-11-30 00:34:22 +01:00
def push_liquidsoap ( self , pkey , schedule , ptype ) :
logger = logging . getLogger ( )
2010-11-05 15:54:15 +01:00
src = self . cache_dir + str ( pkey ) + ' /list.lsp '
try :
if True == os . access ( src , os . R_OK ) :
2010-11-24 23:57:55 +01:00
logger . debug ( ' OK - Can read playlist file ' )
2010-11-05 15:54:15 +01:00
pl_file = open ( src , " r " )
"""
i know this could be wrapped , maybe later . .
"""
tn = telnetlib . Telnet ( LS_HOST , 1234 )
2011-01-17 16:30:45 +01:00
for line in pl_file . readlines ( ) :
line = line . strip ( )
logger . debug ( line )
tn . write ( self . export_source + ' .push %s ' % ( line ) )
tn . write ( " \n " )
2010-11-05 15:54:15 +01:00
tn . write ( " exit \n " )
2010-11-24 23:57:55 +01:00
logger . debug ( tn . read_all ( ) )
2011-01-17 20:36:52 +01:00
pattern = ' % Y- % m- %d - % H- % M- % S '
#strptime returns struct_time in local time
#mktime takes a time_struct and returns a floating point
#gmtime Convert a time expressed in seconds since the epoch to a struct_time in UTC
#mktime: expresses the time in local time, not UTC. It returns a floating point number, for compatibility with time().
epoch_start = calendar . timegm ( time . gmtime ( time . mktime ( time . strptime ( pkey , pattern ) ) ) )
#Return the time as a floating point number expressed in seconds since the epoch, in UTC.
epoch_now = time . time ( )
2011-01-20 00:32:20 +01:00
logger . debug ( " Epoch start: " + str ( epoch_start ) )
logger . debug ( " Epoch now: " + str ( epoch_now ) )
2011-01-17 20:36:52 +01:00
sleep_time = epoch_start - epoch_now ;
if sleep_time < 0 :
sleep_time = 0
logger . debug ( ' sleeping for %s s ' % ( sleep_time ) )
time . sleep ( sleep_time )
2010-11-05 15:54:15 +01:00
2010-11-24 23:57:55 +01:00
logger . debug ( ' sending " flip " ' )
2010-11-05 15:54:15 +01:00
tn = telnetlib . Telnet ( LS_HOST , 1234 )
2010-11-30 00:34:22 +01:00
# Get any extra information for liquidsoap (which will be sent back to us)
liquidsoap_data = self . api_client . get_liquidsoap_data ( pkey , schedule )
logger . debug ( " Sending additional data to liquidsoap: " + liquidsoap_data )
2011-01-13 19:12:35 +01:00
#Sending JSON string. Example: {"schedule_id":"13"}
2010-11-30 00:34:22 +01:00
tn . write ( " vars.pypo_data " + liquidsoap_data + " \n " )
2010-11-05 15:54:15 +01:00
tn . write ( self . export_source + ' .flip ' )
tn . write ( " \n " )
tn . write ( " exit \n " )
2010-11-30 00:34:22 +01:00
tn . read_all ( )
2010-11-05 15:54:15 +01:00
status = 1
except Exception , e :
logger . error ( ' %s ' , e )
status = 0
return status
"""
2010-11-08 22:54:54 +01:00
Updates the jingles . Give comma separated list of jingle tracks .
2010-11-30 00:34:22 +01:00
NOTE : commented out because it needs to be converted to use the API client . - Paul
2010-11-05 15:54:15 +01:00
"""
2010-11-30 00:34:22 +01:00
#def update_jingles(self, options):
# print 'jingles'
#
# jingle_list = string.split(options, ',')
# print jingle_list
# for media_id in jingle_list:
# # api path maybe should not be hard-coded
# src = API_BASE + 'api/pypo/get_media/' + str(media_id)
# print src
# # include the hourly jungles for the moment
# dst = "%s%s/%s.mp3" % (config["file_dir"], 'jingles/hourly', str(media_id))
# print dst
#
# try:
# print '** urllib auth with: ',
# print self.api_auth
# opener = urllib.URLopener()
# opener.retrieve (src, dst, False, self.api_auth)
# logger.info("downloaded %s to %s", src, dst)
# except Exception, e:
# print e
# logger.error("%s", e)
2010-11-05 15:54:15 +01:00
def check_schedule ( self , export_source ) :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-05 15:54:15 +01:00
2010-11-30 00:34:22 +01:00
self . set_export_source ( export_source )
2010-11-05 15:54:15 +01:00
try :
schedule_file = open ( self . schedule_file , " r " )
schedule = pickle . load ( schedule_file )
schedule_file . close ( )
except Exception , e :
logger . error ( " %s " , e )
schedule = None
for pkey in sorted ( schedule . iterkeys ( ) ) :
playlist = schedule [ pkey ]
print ' ***************************************** '
print ' \033 [0;32m %s %s \033 [m ' % ( ' scheduled at: ' , str ( pkey ) )
print ' cached at : ' + self . cache_dir + str ( pkey )
print ' subtype: ' + str ( playlist [ ' subtype ' ] )
print ' played: ' + str ( playlist [ ' played ' ] )
print ' schedule id: ' + str ( playlist [ ' schedule_id ' ] )
print ' duration: ' + str ( playlist [ ' duration ' ] )
print ' source id: ' + str ( playlist [ ' x_ident ' ] )
print ' ----------------------------------------- '
for media in playlist [ ' medias ' ] :
print media
print
if __name__ == ' __main__ ' :
print
2010-11-08 22:54:54 +01:00
print ' ########################################### '
print ' # *** pypo *** # '
print ' # Liquidsoap + External Scheduler # '
print ' # Playout System # '
print ' ########################################### '
2010-11-05 15:54:15 +01:00
print
# initialize
g = Global ( )
g . selfcheck ( )
po = Playout ( )
run = True
while run == True :
2010-11-19 21:53:57 +01:00
logger = logging . getLogger ( )
2010-11-05 15:54:15 +01:00
loops = 0
2010-11-19 00:00:13 +01:00
if options . test :
po . test_api ( )
sys . exit ( )
2010-11-05 15:54:15 +01:00
while options . fetch_scheduler :
try : po . fetch ( ' scheduler ' )
except Exception , e :
print e
sys . exit ( )
2011-01-02 03:45:10 +01:00
#print 'ZZzZzZzzzzZZZz.... sleeping for ' + str(POLL_INTERVAL) + ' seconds'
#logger.info('fetch loop %s - ZZzZzZzzzzZZZz.... sleeping for %s seconds', loops, POLL_INTERVAL)
if ( loops % 2 == 0 ) :
logger . info ( " heartbeat \n \n \n \n " )
2010-11-05 15:54:15 +01:00
loops + = 1
time . sleep ( POLL_INTERVAL )
while options . fetch_daypart :
try : po . fetch ( ' daypart ' )
except Exception , e :
print e
sys . exit ( )
2011-01-02 03:45:10 +01:00
#print 'ZZzZzZzzzzZZZz.... sleeping for ' + str(POLL_INTERVAL) + ' seconds'
#logger.info('fetch loop %s - ZZzZzZzzzzZZZz.... sleeping for %s seconds', loops, POLL_INTERVAL)
2010-11-05 15:54:15 +01:00
loops + = 1
time . sleep ( POLL_INTERVAL )
while options . push_scheduler :
po . push ( ' scheduler ' )
try : po . push ( ' scheduler ' )
except Exception , e :
print ' PUSH ERROR!! WILL EXIT NOW:( '
print e
sys . exit ( )
2011-01-02 03:45:10 +01:00
if ( loops % 20 == 0 ) :
logger . info ( " heartbeat " )
#logger.info('push loop %s - ZZzZzZzzzzZZZz.... sleeping for %s seconds', loops, PUSH_INTERVAL)
2010-11-05 15:54:15 +01:00
loops + = 1
time . sleep ( PUSH_INTERVAL )
while options . push_daypart :
po . push ( ' daypart ' )
try : po . push ( ' daypart ' )
except Exception , e :
print ' PUSH ERROR!! WILL EXIT NOW:( '
print e
sys . exit ( )
2011-01-02 03:45:10 +01:00
#logger.info('push loop %s - ZZzZzZzzzzZZZz.... sleeping for %s seconds', loops, PUSH_INTERVAL)
2010-11-05 15:54:15 +01:00
loops + = 1
time . sleep ( PUSH_INTERVAL )
2010-11-30 00:34:22 +01:00
#while options.jingles:
# try: po.update_jingles(options.jingles)
# except Exception, e:
# print e
# sys.exit()
2010-11-05 15:54:15 +01:00
while options . check :
try : po . check_schedule ( )
except Exception , e :
print e
sys . exit ( )
while options . cleanup :
2010-11-30 00:34:22 +01:00
try : po . cleanup ( ' scheduler ' )
2010-11-05 15:54:15 +01:00
except Exception , e :
print e
sys . exit ( )
2010-11-08 22:54:54 +01:00
2010-11-05 15:54:15 +01:00
sys . exit ( )