remove media-monitor

airtime-media-monitor conflicts with airtime-analyzer, which is what
is now used to monitor the LibreTime application

https://github.com/LibreTime/libretime-debian-packaging/issues/2#issuecomment-359987457
This commit is contained in:
Kyle Robertze 2018-01-24 09:20:51 +02:00
parent 4508d0d8c0
commit c16d7c2a1d
81 changed files with 13 additions and 6164 deletions

View File

@ -54,7 +54,7 @@ class Application_Model_RabbitMq
{
$md["event_type"] = $event_type;
$exchange = 'airtime-media-monitor';
$exchange = 'airtime-analyzer';
$data = json_encode($md);
self::sendMessage($exchange, 'direct', true, $data);
}

View File

@ -184,7 +184,7 @@ class Application_Model_Systemstatus
$ip = $component->getDbIp();
$docRoot = self::GetMonitStatus($ip);
$data = self::ExtractServiceInformation($docRoot, "airtime-media-monitor");
$data = self::ExtractServiceInformation($docRoot, "airtime-analyzer");
return $data;
}

View File

@ -82,7 +82,7 @@
Make sure RabbitMQ is installed correctly, and that your settings in /etc/airtime/airtime.conf
are correct. Try using <code>sudo rabbitmqctl list_users</code> and <code>sudo rabbitmqctl list_vhosts</code>
to see if the airtime user (or your custom RabbitMQ user) exists, then checking that
<code>sudo rabbitmqctl list_exchanges</code> contains entries for airtime-media-monitor, airtime-pypo,
<code>sudo rabbitmqctl list_exchanges</code> contains entries for airtime-analyzer, airtime-pypo,
and airtime-uploads.
<?php
}

View File

@ -107,9 +107,9 @@ function checkRMQConnection() {
}
/**
* Check if airtime-media-monitor is currently running
* Check if airtime-analyzer is currently running
*
* @return boolean true if airtime-media-monitor is running
* @return boolean true if airtime-analyzer is running
*/
function checkAnalyzerService() {
exec("pgrep -f -u www-data airtime_analyzer", $out, $status);

View File

@ -8,7 +8,7 @@ rabbitmqctl start_app
RABBITMQ_VHOST="/airtime_tests"
RABBITMQ_USER="airtime_tests"
RABBITMQ_PASSWORD="airtime_tests"
EXCHANGES="airtime-pypo|pypo-fetch|airtime-media-monitor|media-monitor"
EXCHANGES="airtime-pypo|pypo-fetch|airtime-analyzer|media-monitor"
rabbitmqctl list_vhosts | grep $RABBITMQ_VHOST
RESULT="$?"

View File

@ -2,7 +2,7 @@ If your Airtime server is not working as expected, individual components of the
sudo invoke-rc.d airtime-liquidsoap start|stop|restart|status
sudo invoke-rc.d airtime-playout start|stop|restart|status
sudo invoke-rc.d airtime-media-monitor start|stop|restart|status
sudo invoke-rc.d airtime-analyzer start|stop|restart|status
sudo invoke-rc.d apache2 start|stop|restart|status
sudo invoke-rc.d rabbitmq-server start|stop|restart|status
@ -14,7 +14,7 @@ The server should respond:
Restarting Airtime Playout: Done.
The **status** option for **airtime-playout** and **airtime-media-monitor** runs the **airtime-check-system** script to confirm that all of Airtime's dependencies are installed and running correctly.
The **status** option for **airtime-playout** and **airtime-analyzer** runs the **airtime-check-system** script to confirm that all of Airtime's dependencies are installed and running correctly.
Log files
---------
@ -79,6 +79,6 @@ where the hostname is *airtime.example.com*. If the hostname has changed, it may
rabbitmqctl add_vhost /airtime
rabbitmqctl add_user airtime XXXXXXXXXXXXXXXXXXXX
rabbitmqctl set_permissions -p /airtime airtime
"airtime-pypo|pypo-fetch|airtime-media-monitor|media-monitor"
  "airtime-pypo|pypo-fetch|airtime-media-monitor|media-monitor"
 "airtime-pypo|pypo-fetch|airtime-media-monitor|media-monitor"
"airtime-pypo|pypo-fetch|airtime-analyzer|media-monitor"
  "airtime-pypo|pypo-fetch|airtime-analyzer|media-monitor"
 "airtime-pypo|pypo-fetch|airtime-analyzer|media-monitor"

View File

@ -1017,7 +1017,7 @@ loud "-----------------------------------------------------"
RABBITMQ_VHOST=/airtime
RABBITMQ_USER=airtime
RABBITMQ_PASSWORD=airtime
EXCHANGES="airtime-pypo|pypo-fetch|airtime-media-monitor|media-monitor"
EXCHANGES="airtime-pypo|pypo-fetch|airtime-analyzer|media-monitor"
# Ignore errors in this check to avoid dying when vhost isn't found
set +e

View File

@ -1,17 +0,0 @@
#!/usr/bin/python
import logging
import locale
import time
import sys
import os
import mm2.mm2 as mm2
from std_err_override import LogWriter
locale.setlocale(locale.LC_ALL, '')
def run():
global_cfg = '/etc/airtime/airtime.conf'
logging_cfg = '/etc/airtime/media_monitor_logging.cfg'
mm2.main( global_cfg, logging_cfg )
run()

View File

@ -1,32 +0,0 @@
[loggers]
keys= root,notifier,metadata
[handlers]
keys=fileOutHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=DEBUG
handlers=fileOutHandler
[logger_notifier]
level=DEBUG
handlers=fileOutHandler
qualname=notifier
[logger_metadata]
level=DEBUG
handlers=fileOutHandler
qualname=metadata
[handler_fileOutHandler]
class=logging.handlers.RotatingFileHandler
level=DEBUG
formatter=simpleFormatter
args=("/var/log/airtime/media-monitor/media-monitor.log", 'a', 10000000, 5,)
[formatter_simpleFormatter]
format=%(asctime)s %(levelname)s - [%(threadName)s] [%(filename)s : %(funcName)s()] : LINE %(lineno)d - %(message)s
datefmt=

View File

@ -1,78 +0,0 @@
#!/bin/bash
### BEGIN INIT INFO
# Provides: airtime-media-monitor
# Required-Start: $local_fs $remote_fs $network $syslog $all
# Required-Stop: $local_fs $remote_fs $network $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Manage airtime-media-monitor daemon
### END INIT INFO
USERID=www-data
GROUPID=www-data
NAME=airtime-media-monitor
DAEMON=/usr/bin/$NAME
PIDFILE=/var/run/$NAME.pid
# Exit if the package is not installed
[ -x "$DAEMON" ] || exit 0
# Read configuration variable file if it is present
[ -r /etc/default/$NAME ] && . /etc/default/$NAME
# Load the VERBOSE setting and other rcS variables
. /lib/init/vars.sh
# Define LSB log_* functions.
# Depend on lsb-base (>= 3.2-14) to ensure that this file is present
# and status_of_proc is working.
. /lib/lsb/init-functions
start () {
start-stop-daemon --start --background --quiet --chuid $USERID:$GROUPID \
--make-pidfile --pidfile $PIDFILE --startas $DAEMON
}
stop () {
# Send TERM after 5 seconds, wait at most 30 seconds.
start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE
rm -f $PIDFILE
}
case "${1:-''}" in
'start')
# start commands here
echo -n "Starting $NAME: "
start
echo "Done."
;;
'stop')
# stop commands here
echo -n "Stopping $NAME: "
stop
echo "Done."
;;
'restart')
# restart commands here
echo -n "Restarting $NAME: "
stop
start
echo "Done."
;;
'force-reload')
# reload commands here
echo -n "Reloading $NAME: "
stop
start
echo "Done."
;;
'status')
status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
;;
*) # no parameter specified
echo "Usage: $SELF start|stop|restart|status"
exit 1
;;
esac

View File

@ -1,15 +0,0 @@
description "Airtime Media Monitor"
author "help@sourcefabric.org"
start on runlevel [2345]
stop on runlevel [!2345]
respawn
setuid WEB_USER
setgid WEB_USER
env LANG='en_US.UTF-8'
env LC_ALL='en_US.UTF-8'
exec airtime-media-monitor

View File

@ -1,16 +0,0 @@
import logging
import locale
import time
import sys
import os
import mm2.mm2 as mm2
from std_err_override import LogWriter
locale.setlocale(locale.LC_ALL, '')
def run():
global_cfg = '/etc/airtime/airtime.conf'
logging_cfg = os.path.join(os.path.dirname(__file__), 'logging.cfg')
mm2.main( global_cfg, logging_cfg )
run()

View File

@ -1,168 +0,0 @@
# -*- coding: utf-8 -*-
import os
import time
import pyinotify
import shutil
class AirtimeMediaMonitorBootstrap():
"""AirtimeMediaMonitorBootstrap constructor
Keyword Arguments:
logger -- reference to the media-monitor logging facility
pe -- reference to an instance of ProcessEvent
api_clients -- reference of api_clients to communicate with airtime-server
"""
def __init__(self, logger, pe, api_client, mmc, wm, config):
self.logger = logger
self.pe = pe
self.api_client = api_client
self.mmc = mmc
self.wm = wm
self.config = config
# add /etc on watch list so we can detect mount
self.mount_file = "/etc"
self.curr_mtab_file = "/var/tmp/airtime/media-monitor/currMtab"
self.logger.info("Adding %s on watch list...", self.mount_file)
self.wm.add_watch(self.mount_file, pyinotify.ALL_EVENTS, rec=False, auto_add=False)
tmp_dir = os.path.dirname(self.curr_mtab_file)
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
# create currMtab file if it's the first time
if not os.path.exists(self.curr_mtab_file):
shutil.copy('/etc/mtab', self.curr_mtab_file)
"""On bootup we want to scan all directories and look for files that
weren't there or files that changed before media-monitor process
went offline.
"""
def scan(self):
directories = self.get_list_of_watched_dirs()
self.logger.info("watched directories found: %s", directories)
for id, dir in directories.iteritems():
self.logger.debug("%s, %s", id, dir)
self.sync_database_to_filesystem(id, dir)
"""Gets a list of files that the Airtime database knows for a specific directory.
You need to provide the directory's row ID, which is obtained when calling
get_list_of_watched_dirs function.
dir_id -- row id of the directory in the cc_watched_dirs database table
"""
def list_db_files(self, dir_id):
return self.api_client.list_all_db_files(dir_id)
"""
returns the path and its corresponding database row idfor all watched directories. Also
returns the Stor directory, which can be identified by its row id (always has value of "1")
Return type is a dictionary similar to:
{"1":"/srv/airtime/stor/"}
"""
def get_list_of_watched_dirs(self):
json = self.api_client.list_all_watched_dirs()
try:
return json["dirs"]
except KeyError as e:
self.logger.error("Could not find index 'dirs' in dictionary: %s", str(json))
self.logger.error(str(e))
return {}
"""
This function takes in a path name provided by the database (and its corresponding row id)
and reads the list of files in the local file system. Its purpose is to discover which files
exist on the file system but not in the database and vice versa, as well as which files have
been modified since the database was last updated. In each case, this method will call an
appropiate method to ensure that the database actually represents the filesystem.
dir_id -- row id of the directory in the cc_watched_dirs database table
dir -- pathname of the directory
"""
def sync_database_to_filesystem(self, dir_id, dir):
# TODO: is this line even necessary?
dir = os.path.normpath(dir)+"/"
"""
set to hold new and/or modified files. We use a set to make it ok if files are added
twice. This is because some of the tests for new files return result sets that are not
mutually exclusive from each other.
"""
removed_files = set()
db_known_files_set = set()
files = self.list_db_files(dir_id)
for f in files:
db_known_files_set.add(f)
all_files = self.mmc.clean_dirty_file_paths( self.mmc.scan_dir_for_new_files(dir) )
all_files_set = set()
for file_path in all_files:
if self.config.problem_directory not in file_path:
all_files_set.add(file_path[len(dir):])
# if dir doesn't exists, update db
if not os.path.exists(dir):
self.pe.handle_stdout_files(dir)
if os.path.exists(self.mmc.timestamp_file):
"""find files that have been modified since the last time media-monitor process started."""
time_diff_sec = time.time() - os.path.getmtime(self.mmc.timestamp_file)
command = self.mmc.find_command(directory=dir, extra_arguments=("-type f -readable -mmin -%d" % (time_diff_sec/60+1)))
else:
command = self.mmc.find_command(directory=dir, extra_arguments="-type f -readable")
self.logger.debug(command)
stdout = self.mmc.exec_command(command)
if stdout is None:
new_files = []
else:
new_files = stdout.splitlines()
new_and_modified_files = set()
for file_path in new_files:
if self.config.problem_directory not in file_path:
new_and_modified_files.add(file_path[len(dir):])
"""
new_and_modified_files gives us a set of files that were either copied or modified
since the last time media-monitor was running. These files were collected based on
their modified timestamp. But this is not all that has changed in the directory. Files
could have been removed, or files could have been moved into this directory (moving does
not affect last modified timestamp). Lets get a list of files that are on the file-system
that the db has no record of, and vice-versa.
"""
deleted_files_set = db_known_files_set - all_files_set
new_files_set = all_files_set - db_known_files_set
modified_files_set = new_and_modified_files - new_files_set
self.logger.info(u"Deleted files: \n%s\n\n", deleted_files_set)
self.logger.info(u"New files: \n%s\n\n", new_files_set)
self.logger.info(u"Modified files: \n%s\n\n", modified_files_set)
#"touch" file timestamp
try:
self.mmc.touch_index_file()
except Exception, e:
self.logger.warn(e)
for file_path in deleted_files_set:
self.logger.debug("deleted file")
full_file_path = os.path.join(dir, file_path)
self.logger.debug(full_file_path)
self.pe.handle_removed_file(False, full_file_path)
for file_set, debug_message, handle_attribute in [(new_files_set, "new file", "handle_created_file"),
(modified_files_set, "modified file", "handle_modified_file")]:
for file_path in file_set:
self.logger.debug(debug_message)
full_file_path = os.path.join(dir, file_path)
self.logger.debug(full_file_path)
if os.path.exists(full_file_path):
getattr(self.pe,handle_attribute)(False,full_file_path, os.path.basename(full_file_path))

View File

@ -1,268 +0,0 @@
# -*- coding: utf-8 -*-
import replaygain
import os
import hashlib
import mutagen
import logging
import math
import traceback
"""
list of supported easy tags in mutagen version 1.20
['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
'date', 'performer', 'musicbrainz_albumartistid', 'composer', 'encodedby',
'tracknumber', 'musicbrainz_albumid', 'album', 'asin', 'musicbrainz_artistid',
'mood', 'copyright', 'author', 'media', 'length', 'version', 'artistsort',
'titlesort', 'discsubtitle', 'website', 'musicip_fingerprint', 'conductor',
'compilation', 'barcode', 'performer:*', 'composersort', 'musicbrainz_discid',
'musicbrainz_albumtype', 'genre', 'isrc', 'discnumber', 'musicbrainz_trmid',
'replaygain_*_gain', 'musicip_puid', 'artist', 'title', 'bpm', 'musicbrainz_trackid',
'arranger', 'albumsort', 'replaygain_*_peak', 'organization']
"""
class AirtimeMetadata:
def __init__(self):
self.airtime2mutagen = {\
"MDATA_KEY_TITLE": "title", \
"MDATA_KEY_CREATOR": "artist", \
"MDATA_KEY_SOURCE": "album", \
"MDATA_KEY_GENRE": "genre", \
"MDATA_KEY_MOOD": "mood", \
"MDATA_KEY_TRACKNUMBER": "tracknumber", \
"MDATA_KEY_BPM": "bpm", \
"MDATA_KEY_LABEL": "organization", \
"MDATA_KEY_COMPOSER": "composer", \
"MDATA_KEY_ENCODER": "encodedby", \
"MDATA_KEY_CONDUCTOR": "conductor", \
"MDATA_KEY_YEAR": "date", \
"MDATA_KEY_URL": "website", \
"MDATA_KEY_ISRC": "isrc", \
"MDATA_KEY_COPYRIGHT": "copyright", \
}
self.mutagen2airtime = {\
"title": "MDATA_KEY_TITLE", \
"artist": "MDATA_KEY_CREATOR", \
"album": "MDATA_KEY_SOURCE", \
"genre": "MDATA_KEY_GENRE", \
"mood": "MDATA_KEY_MOOD", \
"tracknumber": "MDATA_KEY_TRACKNUMBER", \
"bpm": "MDATA_KEY_BPM", \
"organization": "MDATA_KEY_LABEL", \
"composer": "MDATA_KEY_COMPOSER", \
"encodedby": "MDATA_KEY_ENCODER", \
"conductor": "MDATA_KEY_CONDUCTOR", \
"date": "MDATA_KEY_YEAR", \
"website": "MDATA_KEY_URL", \
"isrc": "MDATA_KEY_ISRC", \
"copyright": "MDATA_KEY_COPYRIGHT", \
}
self.logger = logging.getLogger()
def get_md5(self, filepath):
"""
Returns an md5 of the file located at filepath. Returns an empty string
if there was an error reading the file.
"""
try:
f = open(filepath, 'rb')
m = hashlib.md5()
m.update(f.read())
md5 = m.hexdigest()
except Exception, e:
return ""
return md5
## mutagen_length is in seconds with the format (d+).dd
## return format hh:mm:ss.uuu
def format_length(self, mutagen_length):
t = float(mutagen_length)
h = int(math.floor(t / 3600))
t = t % 3600
m = int(math.floor(t / 60))
s = t % 60
# will be ss.uuu
s = str(s)
seconds = s.split(".")
s = seconds[0]
# have a maximum of 6 subseconds.
if len(seconds[1]) >= 6:
ss = seconds[1][0:6]
else:
ss = seconds[1][0:]
length = "%s:%s:%s.%s" % (h, m, s, ss)
return length
def save_md_to_file(self, m):
try:
airtime_file = mutagen.File(m['MDATA_KEY_FILEPATH'], easy=True)
for key in m:
if key in self.airtime2mutagen:
value = m[key]
if value is not None:
value = unicode(value)
else:
value = unicode('');
#if len(value) > 0:
self.logger.debug("Saving key '%s' with value '%s' to file", key, value)
airtime_file[self.airtime2mutagen[key]] = value
airtime_file.save()
except Exception, e:
self.logger.error('Trying to save md')
self.logger.error('Exception: %s', e)
self.logger.error('Filepath %s', m['MDATA_KEY_FILEPATH'])
def truncate_to_length(self, item, length):
if isinstance(item, int):
item = str(item)
if isinstance(item, basestring):
if len(item) > length:
return item[0:length]
else:
return item
def get_md_from_file(self, filepath):
"""
Returns None if error retrieving metadata. Otherwise returns a dictionary
representing the file's metadata
"""
self.logger.info("getting info from filepath %s", filepath)
md = {}
replay_gain_val = replaygain.calculate_replay_gain(filepath)
self.logger.info('ReplayGain calculated as %s for %s' % (replay_gain_val, filepath))
md['MDATA_KEY_REPLAYGAIN'] = replay_gain_val
try:
md5 = self.get_md5(filepath)
md['MDATA_KEY_MD5'] = md5
file_info = mutagen.File(filepath, easy=True)
except Exception, e:
self.logger.error("failed getting metadata from %s", filepath)
self.logger.error("Exception %s", e)
return None
#check if file has any metadata
if file_info is None:
return None
for key in file_info.keys() :
if key in self.mutagen2airtime:
val = file_info[key]
try:
if val is not None and len(val) > 0 and val[0] is not None and len(val[0]) > 0:
md[self.mutagen2airtime[key]] = val[0]
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
if 'MDATA_KEY_TITLE' not in md:
#get rid of file extension from original name, name might have more than 1 '.' in it.
original_name = os.path.basename(filepath)
original_name = original_name.split(".")[0:-1]
original_name = ''.join(original_name)
md['MDATA_KEY_TITLE'] = original_name
#incase track number is in format u'4/11'
#need to also check that the tracknumber is even a tracknumber (cc-2582)
if 'MDATA_KEY_TRACKNUMBER' in md:
try:
md['MDATA_KEY_TRACKNUMBER'] = int(md['MDATA_KEY_TRACKNUMBER'])
except Exception, e:
pass
if isinstance(md['MDATA_KEY_TRACKNUMBER'], basestring):
try:
md['MDATA_KEY_TRACKNUMBER'] = int(md['MDATA_KEY_TRACKNUMBER'].split("/")[0], 10)
except Exception, e:
del md['MDATA_KEY_TRACKNUMBER']
#make sure bpm is valid, need to check more types of formats for this tag to assure correct parsing.
if 'MDATA_KEY_BPM' in md:
if isinstance(md['MDATA_KEY_BPM'], basestring):
try:
md['MDATA_KEY_BPM'] = int(md['MDATA_KEY_BPM'])
except Exception, e:
del md['MDATA_KEY_BPM']
#following metadata is truncated if needed to fit db requirements.
if 'MDATA_KEY_GENRE' in md:
md['MDATA_KEY_GENRE'] = self.truncate_to_length(md['MDATA_KEY_GENRE'], 64)
if 'MDATA_KEY_TITLE' in md:
md['MDATA_KEY_TITLE'] = self.truncate_to_length(md['MDATA_KEY_TITLE'], 512)
if 'MDATA_KEY_CREATOR' in md:
md['MDATA_KEY_CREATOR'] = self.truncate_to_length(md['MDATA_KEY_CREATOR'], 512)
if 'MDATA_KEY_SOURCE' in md:
md['MDATA_KEY_SOURCE'] = self.truncate_to_length(md['MDATA_KEY_SOURCE'], 512)
if 'MDATA_KEY_MOOD' in md:
md['MDATA_KEY_MOOD'] = self.truncate_to_length(md['MDATA_KEY_MOOD'], 64)
if 'MDATA_KEY_LABEL' in md:
md['MDATA_KEY_LABEL'] = self.truncate_to_length(md['MDATA_KEY_LABEL'], 512)
if 'MDATA_KEY_COMPOSER' in md:
md['MDATA_KEY_COMPOSER'] = self.truncate_to_length(md['MDATA_KEY_COMPOSER'], 512)
if 'MDATA_KEY_ENCODER' in md:
md['MDATA_KEY_ENCODER'] = self.truncate_to_length(md['MDATA_KEY_ENCODER'], 255)
if 'MDATA_KEY_CONDUCTOR' in md:
md['MDATA_KEY_CONDUCTOR'] = self.truncate_to_length(md['MDATA_KEY_CONDUCTOR'], 512)
if 'MDATA_KEY_YEAR' in md:
md['MDATA_KEY_YEAR'] = self.truncate_to_length(md['MDATA_KEY_YEAR'], 16)
if 'MDATA_KEY_URL' in md:
md['MDATA_KEY_URL'] = self.truncate_to_length(md['MDATA_KEY_URL'], 512)
if 'MDATA_KEY_ISRC' in md:
md['MDATA_KEY_ISRC'] = self.truncate_to_length(md['MDATA_KEY_ISRC'], 512)
if 'MDATA_KEY_COPYRIGHT' in md:
md['MDATA_KEY_COPYRIGHT'] = self.truncate_to_length(md['MDATA_KEY_COPYRIGHT'], 512)
#end of db truncation checks.
try:
md['MDATA_KEY_BITRATE'] = getattr(file_info.info, "bitrate", 0)
md['MDATA_KEY_SAMPLERATE'] = getattr(file_info.info, "sample_rate", 0)
md['MDATA_KEY_DURATION'] = self.format_length(getattr(file_info.info, "length", 0.0))
md['MDATA_KEY_MIME'] = ""
if len(file_info.mime) > 0:
md['MDATA_KEY_MIME'] = file_info.mime[0]
except Exception as e:
self.logger.warn(e)
if "mp3" in md['MDATA_KEY_MIME']:
md['MDATA_KEY_FTYPE'] = "audioclip"
elif "vorbis" in md['MDATA_KEY_MIME']:
md['MDATA_KEY_FTYPE'] = "audioclip"
else:
self.logger.error("File %s of mime type %s does not appear to be a valid vorbis or mp3 file." % (filepath, md['MDATA_KEY_MIME']))
return None
return md

View File

@ -1,213 +0,0 @@
# -*- coding: utf-8 -*-
import json
import time
import os
import logging
import traceback
# For RabbitMQ
from kombu.connection import BrokerConnection
from kombu.messaging import Exchange, Queue, Consumer
import pyinotify
from pyinotify import Notifier
from airtimemetadata import AirtimeMetadata
class AirtimeNotifier(Notifier):
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0, threshold=0, timeout=None, airtime_config=None, api_client=None, bootstrap=None, mmc=None):
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq, threshold, timeout)
self.logger = logging.getLogger()
self.config = airtime_config
self.api_client = api_client
self.bootstrap = bootstrap
self.md_manager = AirtimeMetadata()
self.import_processes = {}
self.watched_folders = []
self.mmc = mmc
self.wm = watch_manager
self.mask = pyinotify.ALL_EVENTS
while not self.init_rabbit_mq():
self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds")
time.sleep(5)
def init_rabbit_mq(self):
"""
This function will attempt to connect to RabbitMQ Server and if successful
return 'True'. Returns 'False' otherwise.
"""
self.logger.info("Initializing RabbitMQ stuff")
try:
schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True)
schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem")
self.connection = BrokerConnection(self.config.cfg["rabbitmq"]["rabbitmq_host"], self.config.cfg["rabbitmq"]["rabbitmq_user"], self.config.cfg["rabbitmq"]["rabbitmq_password"], self.config.cfg["rabbitmq"]["rabbitmq_vhost"])
channel = self.connection.channel()
consumer = Consumer(channel, schedule_queue)
consumer.register_callback(self.handle_message)
consumer.consume()
except Exception, e:
self.logger.error(e)
return False
return True
def handle_message(self, body, message):
"""
Messages received from RabbitMQ are handled here. These messages
instruct media-monitor of events such as a new directory being watched,
file metadata has been changed, or any other changes to the config of
media-monitor via the web UI.
"""
# ACK the message to take it off the queue
message.ack()
self.logger.info("Received md from RabbitMQ: " + body)
m = json.loads(message.body)
if m['event_type'] == "md_update":
self.logger.info("AIRTIME NOTIFIER md update event")
self.md_manager.save_md_to_file(m)
elif m['event_type'] == "new_watch":
self.logger.info("AIRTIME NOTIFIER add watched folder event " + m['directory'])
self.walk_newly_watched_directory(m['directory'])
self.watch_directory(m['directory'])
elif m['event_type'] == "remove_watch":
watched_directory = m['directory']
mm = self.proc_fun()
wd = mm.wm.get_wd(watched_directory)
self.logger.info("Removing watch on: %s wd %s", watched_directory, wd)
mm.wm.rm_watch(wd, rec=True)
elif m['event_type'] == "rescan_watch":
self.bootstrap.sync_database_to_filesystem(str(m['id']), m['directory'])
elif m['event_type'] == "change_stor":
storage_directory = self.config.storage_directory
new_storage_directory = m['directory']
new_storage_directory_id = str(m['dir_id'])
mm = self.proc_fun()
wd = mm.wm.get_wd(storage_directory)
self.logger.info("Removing watch on: %s wd %s", storage_directory, wd)
mm.wm.rm_watch(wd, rec=True)
self.bootstrap.sync_database_to_filesystem(new_storage_directory_id, new_storage_directory)
self.config.storage_directory = os.path.normpath(new_storage_directory)
self.config.imported_directory = os.path.normpath(os.path.join(new_storage_directory, '/imported'))
self.config.organize_directory = os.path.normpath(os.path.join(new_storage_directory, '/organize'))
for directory in [self.config.storage_directory, self.config.imported_directory, self.config.organize_directory]:
self.mmc.ensure_is_dir(directory)
self.mmc.is_readable(directory, True)
self.watch_directory(new_storage_directory)
elif m['event_type'] == "file_delete":
filepath = m['filepath']
mm = self.proc_fun()
self.logger.info("Adding file to ignore: %s ", filepath)
mm.add_filepath_to_ignore(filepath)
if m['delete']:
self.logger.info("Deleting file: %s ", filepath)
try:
os.unlink(filepath)
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
def update_airtime(self, event):
"""
Update airtime with information about files discovered in our
watched directories.
event: a dict() object with the following attributes:
-filepath
-mode
-data
-is_recorded_show
"""
try:
self.logger.info("updating filepath: %s ", event['filepath'])
filepath = event['filepath']
mode = event['mode']
md = {}
md['MDATA_KEY_FILEPATH'] = os.path.normpath(filepath)
if 'data' in event:
file_md = event['data']
md.update(file_md)
else:
file_md = None
if (os.path.exists(filepath) and (mode == self.config.MODE_CREATE)):
if file_md is None:
mutagen = self.md_manager.get_md_from_file(filepath)
if mutagen is None:
return
md.update(mutagen)
if 'is_recorded_show' in event and event['is_recorded_show']:
self.api_client.update_media_metadata(md, mode, True)
else:
self.api_client.update_media_metadata(md, mode)
elif (os.path.exists(filepath) and (mode == self.config.MODE_MODIFY)):
mutagen = self.md_manager.get_md_from_file(filepath)
if mutagen is None:
return
md.update(mutagen)
if 'is_recorded_show' in event and event['is_recorded_show']:
self.api_client.update_media_metadata(md, mode, True)
else:
self.api_client.update_media_metadata(md, mode)
elif (mode == self.config.MODE_MOVED):
md['MDATA_KEY_MD5'] = self.md_manager.get_md5(filepath)
if 'is_recorded_show' in event and event['is_recorded_show']:
self.api_client.update_media_metadata(md, mode, True)
else:
self.api_client.update_media_metadata(md, mode)
elif (mode == self.config.MODE_DELETE):
self.api_client.update_media_metadata(md, mode)
elif (mode == self.config.MODE_DELETE_DIR):
self.api_client.update_media_metadata(md, mode)
except Exception, e:
self.logger.error("failed updating filepath: %s ", event['filepath'])
self.logger.error('Exception: %s', e)
self.logger.error('Traceback: %s', traceback.format_exc())
#define which directories the pyinotify WatchManager should watch.
def watch_directory(self, directory):
return self.wm.add_watch(directory, self.mask, rec=True, auto_add=True)
def walk_newly_watched_directory(self, directory):
mm = self.proc_fun()
self.mmc.is_readable(directory, True)
for (path, dirs, files) in os.walk(directory):
for filename in files:
full_filepath = os.path.join(path, filename)
if self.mmc.is_audio_file(full_filepath):
if self.mmc.is_readable(full_filepath, False):
self.logger.info("importing %s", full_filepath)
event = {'filepath': full_filepath, 'mode': self.config.MODE_CREATE, 'is_recorded_show': False}
mm.multi_queue.put(event)
else:
self.logger.warn("file '%s' has does not have sufficient read permissions. Ignoring.", full_filepath)

View File

@ -1,431 +0,0 @@
# -*- coding: utf-8 -*-
import socket
import logging
import time
import os
import shutil
import difflib
import traceback
from subprocess import Popen, PIPE
from pyinotify import ProcessEvent
from airtimemetadata import AirtimeMetadata
from airtimefilemonitor.mediaconfig import AirtimeMediaConfig
from api_clients import api_client
class AirtimeProcessEvent(ProcessEvent):
#TODO
def my_init(self, queue, airtime_config=None, wm=None, mmc=None, api_client=api_client):
"""
Method automatically called from ProcessEvent.__init__(). Additional
keyworded arguments passed to ProcessEvent.__init__() are then
delegated to my_init().
"""
self.logger = logging.getLogger()
self.config = airtime_config
#put the file path into this dict if we want to ignore certain
#events. For example, when deleting a file from the web ui, we
#are going to delete it from the db on the server side, so media-monitor
#doesn't need to contact the server and tell it to delete again.
self.ignore_event = set()
self.temp_files = {}
self.cookies_IN_MOVED_FROM = {}
self.file_events = []
self.multi_queue = queue
self.wm = wm
self.md_manager = AirtimeMetadata()
self.mmc = mmc
self.api_client = api_client
self.create_dict = {}
self.mount_file_dir = "/etc";
self.mount_file = "/etc/mtab";
self.curr_mtab_file = "/var/tmp/airtime/media-monitor/currMtab"
self.prev_mtab_file = "/var/tmp/airtime/media-monitor/prevMtab"
def add_filepath_to_ignore(self, filepath):
self.ignore_event.add(filepath)
def process_IN_MOVE_SELF(self, event):
self.logger.info("event: %s", event)
path = event.path
if event.dir:
if "-unknown-path" in path:
unknown_path = path
pos = path.find("-unknown-path")
path = path[0:pos] + "/"
list = self.api_client.list_all_watched_dirs()
# case where the dir that is being watched is moved to somewhere
if path in list[u'dirs'].values():
self.logger.info("Requesting the airtime server to remove '%s'", path)
res = self.api_client.remove_watched_dir(path)
if(res is None):
self.logger.info("Unable to connect to the Airtime server.")
# sucess
if(res['msg']['code'] == 0):
self.logger.info("%s removed from watch folder list successfully.", path)
else:
self.logger.info("Removing the watch folder failed: %s", res['msg']['error'])
else:
# subdir being moved
# in this case, it has to remove watch manualy and also have to manually delete all records
# on cc_files table
wd = self.wm.get_wd(unknown_path)
self.logger.info("Removing watch on: %s wd %s", unknown_path, wd)
self.wm.rm_watch(wd, rec=True)
self.file_events.append({'mode': self.config.MODE_DELETE_DIR, 'filepath': path})
def process_IN_DELETE_SELF(self, event):
#we only care about files that have been moved away from imported/ or organize/ dir
if event.path in self.config.problem_directory or event.path in self.config.organize_directory:
return
self.logger.info("event: %s", event)
path = event.path + '/'
if event.dir:
list = self.api_client.list_all_watched_dirs()
if path in list[u'dirs'].values():
self.logger.info("Requesting the airtime server to remove '%s'", path)
res = self.api_client.remove_watched_dir(path)
if(res is None):
self.logger.info("Unable to connect to the Airtime server.")
# sucess
if(res['msg']['code'] == 0):
self.logger.info("%s removed from watch folder list successfully.", path)
else:
self.logger.info("Removing the watch folder failed: %s", res['msg']['error'])
def process_IN_CREATE(self, event):
if event.path in self.mount_file_dir:
return
self.logger.info("event: %s", event)
if not event.dir:
# record the timestamp of the time on IN_CREATE event
self.create_dict[event.pathname] = time.time()
#event.dir: True if the event was raised against a directory.
#event.name: filename
#event.pathname: pathname (str): Concatenation of 'path' and 'name'.
# we used to use IN_CREATE event, but the IN_CREATE event gets fired before the
# copy was done. Hence, IN_CLOSE_WRITE is the correct one to handle.
def process_IN_CLOSE_WRITE(self, event):
if event.path in self.mount_file_dir:
return
self.logger.info("event: %s", event)
self.logger.info("create_dict: %s", self.create_dict)
try:
del self.create_dict[event.pathname]
self.handle_created_file(event.dir, event.pathname, event.name)
except KeyError, e:
pass
#self.logger.warn("%s does not exist in create_dict", event.pathname)
#Uncomment the above warning when we fix CC-3830 for 2.1.1
def handle_created_file(self, dir, pathname, name):
if not dir:
self.logger.debug("PROCESS_IN_CLOSE_WRITE: %s, name: %s, pathname: %s ", dir, name, pathname)
if self.mmc.is_temp_file(name) :
#file created is a tmp file which will be modified and then moved back to the original filename.
#Easy Tag creates this when changing metadata of ogg files.
self.temp_files[pathname] = None
#file is being overwritten/replaced in GUI.
elif "goutputstream" in pathname:
self.temp_files[pathname] = None
elif self.mmc.is_audio_file(name):
if self.mmc.is_parent_directory(pathname, self.config.organize_directory):
#file was created in /srv/airtime/stor/organize. Need to process and move
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
self.mmc.organize_new_file(pathname, file_md)
else:
#move to problem_files
self.mmc.move_to_problem_dir(pathname)
else:
# only append to self.file_events if the file isn't going to be altered by organize_new_file(). If file is going
# to be altered by organize_new_file(), then process_IN_MOVED_TO event will handle appending it to self.file_events
is_recorded = self.mmc.is_parent_directory(pathname, self.config.recorded_directory)
self.file_events.append({'mode': self.config.MODE_CREATE, 'filepath': pathname, 'is_recorded_show': is_recorded})
def process_IN_MODIFY(self, event):
# if IN_MODIFY is followed by IN_CREATE, it's not true modify event
if not event.pathname in self.create_dict:
self.logger.info("process_IN_MODIFY: %s", event)
self.handle_modified_file(event.dir, event.pathname, event.name)
def handle_modified_file(self, dir, pathname, name):
# if /etc/mtab is modified
if pathname in self.mount_file:
self.handle_mount_change()
# update timestamp on create_dict for the entry with pathname as the key
if pathname in self.create_dict:
self.create_dict[pathname] = time.time()
if not dir and not self.mmc.is_parent_directory(pathname, self.config.organize_directory):
self.logger.info("Modified: %s", pathname)
if self.mmc.is_audio_file(name):
is_recorded = self.mmc.is_parent_directory(pathname, self.config.recorded_directory)
self.file_events.append({'filepath': pathname, 'mode': self.config.MODE_MODIFY, 'is_recorded_show': is_recorded})
# if change is detected on /etc/mtab, we check what mount(file system) was added/removed
# and act accordingly
def handle_mount_change(self):
self.logger.info("Mount change detected, handling changes...");
# take snapshot of mtab file and update currMtab and prevMtab
# move currMtab to prevMtab and create new currMtab
shutil.move(self.curr_mtab_file, self.prev_mtab_file)
# create the file
shutil.copy(self.mount_file, self.curr_mtab_file)
d = difflib.Differ()
curr_fh = open(self.curr_mtab_file, 'r')
prev_fh = open(self.prev_mtab_file, 'r')
diff = list(d.compare(prev_fh.readlines(), curr_fh.readlines()))
added_mount_points = []
removed_mount_points = []
for dir in diff:
info = dir.split(' ')
if info[0] == '+':
added_mount_points.append(info[2])
elif info[0] == '-':
removed_mount_points.append(info[2])
self.logger.info("added: %s", added_mount_points)
self.logger.info("removed: %s", removed_mount_points)
# send current mount information to Airtime
self.api_client.update_file_system_mount(added_mount_points, removed_mount_points);
def handle_watched_dir_missing(self, dir):
self.api_client.handle_watched_dir_missing(dir);
#if a file is moved somewhere, this callback is run. With details about
#where the file is being moved from. The corresponding process_IN_MOVED_TO
#callback is only called if the destination of the file is also in a watched
#directory.
def process_IN_MOVED_FROM(self, event):
#we don't care about files that have been moved from problem_directory
if event.path in self.config.problem_directory:
return
self.logger.info("process_IN_MOVED_FROM: %s", event)
if not event.dir:
if event.pathname in self.temp_files:
self.temp_files[event.cookie] = event.pathname
elif not self.mmc.is_parent_directory(event.pathname, self.config.organize_directory):
#we don't care about moved_from events from the organize dir.
if self.mmc.is_audio_file(event.name):
self.cookies_IN_MOVED_FROM[event.cookie] = (event, time.time())
else:
self.cookies_IN_MOVED_FROM[event.cookie] = (event, time.time())
def process_IN_MOVED_TO(self, event):
self.logger.info("process_IN_MOVED_TO: %s", event)
# if /etc/mtab is modified
filename = self.mount_file_dir + "/mtab"
if event.pathname in filename:
self.handle_mount_change()
if event.path in self.config.problem_directory:
return
if not event.dir:
if self.mmc.is_audio_file(event.name):
if event.cookie in self.temp_files:
self.file_events.append({'filepath': event.pathname, 'mode': self.config.MODE_MODIFY})
del self.temp_files[event.cookie]
elif event.cookie in self.cookies_IN_MOVED_FROM:
#file's original location was also in a watched directory
del self.cookies_IN_MOVED_FROM[event.cookie]
if self.mmc.is_parent_directory(event.pathname, self.config.organize_directory):
pathname = event.pathname
#file was created in /srv/airtime/stor/organize. Need to process and move
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
filepath = self.mmc.organize_new_file(pathname, file_md)
else:
#move to problem_files
self.mmc.move_to_problem_dir(pathname)
else:
filepath = event.pathname
if (filepath is not None):
self.file_events.append({'filepath': filepath, 'mode': self.config.MODE_MOVED})
else:
#file's original location is from outside an inotify watched dir.
pathname = event.pathname
if self.mmc.is_parent_directory(pathname, self.config.organize_directory):
#file was created in /srv/airtime/stor/organize. Need to process and move
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
self.mmc.organize_new_file(pathname, file_md)
else:
#move to problem_files
self.mmc.move_to_problem_dir(pathname)
else:
#show moved from unwatched folder into a watched folder. Do not "organize".
is_recorded = self.mmc.is_parent_directory(event.pathname, self.config.recorded_directory)
self.file_events.append({'mode': self.config.MODE_CREATE, 'filepath': event.pathname, 'is_recorded_show': is_recorded})
else:
#When we move a directory into a watched_dir, we only get a notification that the dir was created,
#and no additional information about files that came along with that directory.
#need to scan the entire directory for files.
if event.cookie in self.cookies_IN_MOVED_FROM:
del self.cookies_IN_MOVED_FROM[event.cookie]
mode = self.config.MODE_MOVED
else:
mode = self.config.MODE_CREATE
files = self.mmc.scan_dir_for_new_files(event.pathname)
if self.mmc.is_parent_directory(event.pathname, self.config.organize_directory):
for pathname in files:
#file was created in /srv/airtime/stor/organize. Need to process and move
#to /srv/airtime/stor/imported
file_md = self.md_manager.get_md_from_file(pathname)
playable = self.mmc.test_file_playability(pathname)
if file_md and playable:
self.mmc.organize_new_file(pathname, file_md)
#self.file_events.append({'mode': mode, 'filepath': filepath, 'is_recorded_show': False})
else:
#move to problem_files
self.mmc.move_to_problem_dir(pathname)
else:
for file in files:
self.file_events.append({'mode': mode, 'filepath': file, 'is_recorded_show': False})
def process_IN_DELETE(self, event):
if event.path in self.mount_file_dir:
return
self.logger.info("process_IN_DELETE: %s", event)
self.handle_removed_file(event.dir, event.pathname)
def handle_removed_file(self, dir, pathname):
self.logger.info("Deleting %s", pathname)
if not dir:
if self.mmc.is_audio_file(pathname):
if pathname in self.ignore_event:
self.logger.info("pathname in ignore event")
self.ignore_event.remove(pathname)
elif not self.mmc.is_parent_directory(pathname, self.config.organize_directory):
self.logger.info("deleting a file not in organize")
#we don't care if a file was deleted from the organize directory.
self.file_events.append({'filepath': pathname, 'mode': self.config.MODE_DELETE})
def process_default(self, event):
pass
def notifier_loop_callback(self, notifier):
if len(self.file_events) > 0:
for event in self.file_events:
self.multi_queue.put(event)
self.mmc.touch_index_file()
self.file_events = []
#yield to worker thread
time.sleep(0)
#use items() because we are going to be modifying this
#dictionary while iterating over it.
for k, pair in self.cookies_IN_MOVED_FROM.items():
event = pair[0]
timestamp = pair[1]
timestamp_now = time.time()
if timestamp_now - timestamp > 5:
#in_moved_from event didn't have a corresponding
#in_moved_to event in the last 5 seconds.
#This means the file was moved to outside of the
#watched directories. Let's handle this by deleting
#it from the Airtime directory.
del self.cookies_IN_MOVED_FROM[k]
self.handle_removed_file(False, event.pathname)
# we don't want create_dict grow infinitely
# this part is like a garbage collector
for k, t in self.create_dict.items():
now = time.time()
if now - t > 5:
# check if file exist
# When whole directory is copied to the organized dir,
# inotify doesn't fire IN_CLOSE_WRITE, hench we need special way of
# handling those cases. We are manully calling handle_created_file
# function.
if os.path.exists(k):
# check if file is open
try:
command = "lsof " + k
#f = os.popen(command)
f = Popen(command, shell=True, stdout=PIPE).stdout
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
continue
if not f.readlines():
self.logger.info("Handling file: %s", k)
self.handle_created_file(False, k, os.path.basename(k))
del self.create_dict[k]
else:
del self.create_dict[k]
#check for any events received from Airtime.
try:
notifier.connection.drain_events(timeout=0.1)
#avoid logging a bunch of timeout messages.
except socket.timeout:
pass
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
time.sleep(3)

View File

@ -1,27 +0,0 @@
# -*- coding: utf-8 -*-
import sys
from configobj import ConfigObj
class AirtimeMediaConfig:
MODE_CREATE = "create"
MODE_MODIFY = "modify"
MODE_MOVED = "moved"
MODE_DELETE = "delete"
MODE_DELETE_DIR = "delete_dir"
def __init__(self, logger):
# loading config file
try:
config = ConfigObj('/etc/airtime/airtime.conf')
self.cfg = config
except Exception, e:
logger.info('Error loading config: ', e)
sys.exit(1)
self.storage_directory = None

View File

@ -1,341 +0,0 @@
# -*- coding: utf-8 -*-
import os
import grp
import pwd
import logging
import stat
import subprocess
import traceback
from subprocess import Popen, PIPE
from airtimemetadata import AirtimeMetadata
import pyinotify
class MediaMonitorCommon:
timestamp_file = "/var/tmp/airtime/media-monitor/last_index"
supported_file_formats = ['mp3', 'ogg']
def __init__(self, airtime_config, wm=None):
self.logger = logging.getLogger()
self.config = airtime_config
self.md_manager = AirtimeMetadata()
self.wm = wm
def clean_dirty_file_paths(self, dirty_files):
""" clean dirty file paths by removing blanks and removing trailing/leading whitespace"""
return filter(lambda e: len(e) > 0, [ f.strip(" \n") for f in dirty_files ])
def find_command(self, directory, extra_arguments=""):
""" Builds a find command that respects supported_file_formats list
Note: Use single quotes to quote arguments """
ext_globs = [ "-iname '*.%s'" % ext for ext in self.supported_file_formats ]
find_glob = ' -o '.join(ext_globs)
return "find '%s' %s %s" % (directory, find_glob, extra_arguments)
def is_parent_directory(self, filepath, directory):
filepath = os.path.normpath(filepath)
directory = os.path.normpath(directory)
return (directory == filepath[0:len(directory)])
def is_temp_file(self, filename):
info = filename.split(".")
# if file doesn't have any extension, info[-2] throws exception
# Hence, checking length of info before we do anything
if(len(info) >= 2):
return info[-2].lower() in self.supported_file_formats
else:
return False
def is_audio_file(self, filename):
info = filename.split(".")
if len(info) < 2: return False # handle cases like filename="mp3"
return info[-1].lower() in self.supported_file_formats
#check if file is readable by "nobody"
def is_user_readable(self, filepath, euid='nobody', egid='nogroup'):
f = None
try:
uid = pwd.getpwnam(euid)[2]
gid = grp.getgrnam(egid)[2]
#drop root permissions and become "nobody"
os.setegid(gid)
os.seteuid(uid)
f = open(filepath)
readable = True
except IOError:
self.logger.warn("File does not have correct permissions: '%s'", filepath)
readable = False
except Exception, e:
self.logger.error("Unexpected exception thrown: %s", e)
readable = False
self.logger.error("traceback: %s", traceback.format_exc())
finally:
#reset effective user to root
if f: f.close()
os.seteuid(0)
os.setegid(0)
return readable
# the function only changes the permission if its not readable by www-data
def is_readable(self, item, is_dir):
try:
return self.is_user_readable(item, 'www-data', 'www-data')
except Exception:
self.logger.warn(u"Failed to check owner/group/permissions for %s", item)
return False
def make_file_readable(self, pathname, is_dir):
if is_dir:
#set to 755
os.chmod(pathname, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
else:
#set to 644
os.chmod(pathname, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
def make_readable(self, pathname):
"""
Should only call this function if is_readable() returns False. This function
will attempt to make the file world readable by modifying the file's permission's
as well as the file's parent directory permissions. We should only call this function
on files in Airtime's stor directory, not watched directories!
Returns True if we were able to make the file world readable. False otherwise.
"""
original_file = pathname
is_dir = False
try:
while not self.is_readable(original_file, is_dir):
#Not readable. Make appropriate permission changes.
self.make_file_readable(pathname, is_dir)
dirname = os.path.dirname(pathname)
if dirname == pathname:
#most likey reason for this is that we've hit '/'. Avoid infinite loop by terminating loop
raise Exception()
else:
pathname = dirname
is_dir = True
except Exception:
#something went wrong while we were trying to make world readable.
return False
return True
#checks if path is a directory, and if it doesnt exist, then creates it.
#Otherwise prints error to log file.
def ensure_is_dir(self, directory):
try:
omask = os.umask(0)
if not os.path.exists(directory):
os.makedirs(directory, 02777)
self.wm.add_watch(directory, pyinotify.ALL_EVENTS, rec=True, auto_add=True)
elif not os.path.isdir(directory):
#path exists but it is a file not a directory!
self.logger.error(u"path %s exists, but it is not a directory!!!", directory)
finally:
os.umask(omask)
#moves file from source to dest but also recursively removes the
#the source file's parent directories if they are now empty.
def move_file(self, source, dest):
try:
omask = os.umask(0)
os.rename(source, dest)
except Exception, e:
self.logger.error("failed to move file. %s", e)
self.logger.error("traceback: %s", traceback.format_exc())
finally:
os.umask(omask)
dir = os.path.dirname(source)
self.cleanup_empty_dirs(dir)
#keep moving up the file hierarchy and deleting parent
#directories until we hit a non-empty directory, or we
#hit the organize dir.
def cleanup_empty_dirs(self, dir):
if os.path.normpath(dir) != self.config.organize_directory:
if len(os.listdir(dir)) == 0:
try:
os.rmdir(dir)
self.cleanup_empty_dirs(os.path.dirname(dir))
except Exception:
#non-critical exception because we probably tried to delete a non-empty dir.
#Don't need to log this, let's just "return"
pass
#checks if path exists already in stor. If the path exists and the md5s are the
#same just overwrite.
def create_unique_filename(self, filepath, old_filepath):
try:
if(os.path.exists(filepath)):
self.logger.info("Path %s exists", filepath)
self.logger.info("Checking if md5s are the same.")
md5_fp = self.md_manager.get_md5(filepath)
md5_ofp = self.md_manager.get_md5(old_filepath)
if(md5_fp == md5_ofp):
self.logger.info("Md5s are the same, moving to same filepath.")
return filepath
self.logger.info("Md5s aren't the same, appending to filepath.")
file_dir = os.path.dirname(filepath)
filename = os.path.basename(filepath).split(".")[0]
#will be in the format .ext
file_ext = os.path.splitext(filepath)[1]
i = 1;
while(True):
new_filepath = '%s/%s(%s)%s' % (file_dir, filename, i, file_ext)
self.logger.error("Trying %s", new_filepath)
if(os.path.exists(new_filepath)):
i = i + 1;
else:
filepath = new_filepath
break
except Exception, e:
self.logger.error("Exception %s", e)
return filepath
#create path in /srv/airtime/stor/imported/[song-metadata]
def create_file_path(self, original_path, orig_md):
storage_directory = self.config.storage_directory
try:
#will be in the format .ext
file_ext = os.path.splitext(original_path)[1].lower()
path_md = ['MDATA_KEY_TITLE', 'MDATA_KEY_CREATOR', 'MDATA_KEY_SOURCE', 'MDATA_KEY_TRACKNUMBER', 'MDATA_KEY_BITRATE']
md = {}
for m in path_md:
if m not in orig_md:
md[m] = u'unknown'
else:
#get rid of any "/" which will interfere with the filepath.
if isinstance(orig_md[m], basestring):
md[m] = orig_md[m].replace("/", "-")
else:
md[m] = orig_md[m]
if 'MDATA_KEY_TRACKNUMBER' in orig_md:
#make sure all track numbers are at least 2 digits long in the filepath.
md['MDATA_KEY_TRACKNUMBER'] = "%02d" % (int(md['MDATA_KEY_TRACKNUMBER']))
#format bitrate as 128kbps
md['MDATA_KEY_BITRATE'] = str(md['MDATA_KEY_BITRATE'] / 1000) + "kbps"
filepath = None
#file is recorded by Airtime
#/srv/airtime/stor/recorded/year/month/year-month-day-time-showname-bitrate.ext
if(md['MDATA_KEY_CREATOR'] == u"Airtime Show Recorder"):
#yyyy-mm-dd-hh-MM-ss
y = orig_md['MDATA_KEY_YEAR'].split("-")
filepath = u'%s/%s/%s/%s/%s-%s-%s%s' % (storage_directory, "recorded", y[0], y[1], orig_md['MDATA_KEY_YEAR'], md['MDATA_KEY_TITLE'], md['MDATA_KEY_BITRATE'], file_ext)
#"Show-Title-2011-03-28-17:15:00"
title = md['MDATA_KEY_TITLE'].split("-")
show_hour = title[0]
show_min = title[1]
show_sec = title[2]
show_name = '-'.join(title[3:])
new_md = {}
new_md['MDATA_KEY_FILEPATH'] = os.path.normpath(original_path)
new_md['MDATA_KEY_TITLE'] = '%s-%s-%s:%s:%s' % (show_name, orig_md['MDATA_KEY_YEAR'], show_hour, show_min, show_sec)
self.md_manager.save_md_to_file(new_md)
elif(md['MDATA_KEY_TRACKNUMBER'] == u'unknown'):
filepath = u'%s/%s/%s/%s/%s-%s%s' % (storage_directory, "imported", md['MDATA_KEY_CREATOR'], md['MDATA_KEY_SOURCE'], md['MDATA_KEY_TITLE'], md['MDATA_KEY_BITRATE'], file_ext)
else:
filepath = u'%s/%s/%s/%s/%s-%s-%s%s' % (storage_directory, "imported", md['MDATA_KEY_CREATOR'], md['MDATA_KEY_SOURCE'], md['MDATA_KEY_TRACKNUMBER'], md['MDATA_KEY_TITLE'], md['MDATA_KEY_BITRATE'], file_ext)
filepath = self.create_unique_filename(filepath, original_path)
self.logger.info('Unique filepath: %s', filepath)
self.ensure_is_dir(os.path.dirname(filepath))
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
return filepath
def exec_command(self, command):
p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
self.logger.warn("command \n%s\n return with a non-zero return value", command)
self.logger.error(stderr)
try:
"""
File name charset encoding is UTF-8.
"""
stdout = stdout.decode("UTF-8")
except Exception:
stdout = None
self.logger.error("Could not decode %s using UTF-8" % stdout)
return stdout
def scan_dir_for_new_files(self, dir):
command = self.find_command(directory=dir, extra_arguments="-type f -readable")
self.logger.debug(command)
stdout = self.exec_command(command)
if stdout is None:
return []
else:
return stdout.splitlines()
def touch_index_file(self):
dirname = os.path.dirname(self.timestamp_file)
try:
if not os.path.exists(dirname):
os.makedirs(dirname)
open(self.timestamp_file, "w")
except Exception, e:
self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc())
def organize_new_file(self, pathname, file_md):
self.logger.info("Organizing new file: %s", pathname)
filepath = self.create_file_path(pathname, file_md)
self.logger.debug(u"Moving from %s to %s", pathname, filepath)
self.move_file(pathname, filepath)
self.make_readable(filepath)
return filepath
def test_file_playability(self, pathname):
#when there is an single apostrophe inside of a string quoted by apostrophes, we can only escape it by replace that apostrophe
#with '\''. This breaks the string into two, and inserts an escaped single quote in between them.
#We run the command as pypo because otherwise the target file is opened with write permissions, and this causes an inotify ON_CLOSE_WRITE event
#to be fired :/
command = "sudo -u pypo airtime-liquidsoap -c 'output.dummy(audio_to_stereo(single(\"%s\")))' > /dev/null 2>&1" % pathname.replace("'", "'\\''")
return_code = subprocess.call(command, shell=True)
if return_code != 0:
#print pathname for py-interpreter.log
print pathname
return (return_code == 0)
def move_to_problem_dir(self, source):
dest = os.path.join(self.config.problem_directory, os.path.basename(source))
try:
omask = os.umask(0)
os.rename(source, dest)
except Exception, e:
self.logger.error("failed to move file. %s", e)
self.logger.error("traceback: %s", traceback.format_exc())
finally:
os.umask(omask)

View File

@ -1,142 +0,0 @@
from subprocess import Popen, PIPE
import re
import os
import sys
import shutil
import tempfile
import logging
logger = logging.getLogger()
def get_process_output(command):
"""
Run subprocess and return stdout
"""
#logger.debug(command)
p = Popen(command, shell=True, stdout=PIPE)
return p.communicate()[0].strip()
def run_process(command):
"""
Run subprocess and return "return code"
"""
p = Popen(command, shell=True)
return os.waitpid(p.pid, 0)[1]
def get_mime_type(file_path):
"""
Attempts to get the mime type but will return prematurely if the process
takes longer than 5 seconds. Note that this function should only be called
for files which do not have a mp3/ogg/flac extension.
"""
return get_process_output("timeout 5 file -b --mime-type %s" % file_path)
def duplicate_file(file_path):
"""
Makes a duplicate of the file and returns the path of this duplicate file.
"""
fsrc = open(file_path, 'r')
fdst = tempfile.NamedTemporaryFile(delete=False)
#logger.info("Copying %s to %s" % (file_path, fdst.name))
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
return fdst.name
def get_file_type(file_path):
file_type = None
if re.search(r'mp3$', file_path, re.IGNORECASE):
file_type = 'mp3'
elif re.search(r'og(g|a)$', file_path, re.IGNORECASE):
file_type = 'vorbis'
elif re.search(r'flac$', file_path, re.IGNORECASE):
file_type = 'flac'
elif re.search(r'(mp4|m4a)$', file_path, re.IGNORECASE):
file_type = 'mp4'
else:
mime_type = get_mime_type(file_path) == "audio/mpeg"
if 'mpeg' in mime_type:
file_type = 'mp3'
elif 'ogg' in mime_type or "oga" in mime_type:
file_type = 'vorbis'
elif 'flac' in mime_type:
file_type = 'flac'
elif 'mp4' in mime_type or "m4a" in mime_type:
file_type = 'mp4'
return file_type
def calculate_replay_gain(file_path):
"""
This function accepts files of type mp3/ogg/flac and returns a calculated
ReplayGain value in dB. If the value cannot be calculated for some reason,
then we default to 0 (Unity Gain).
http://wiki.hydrogenaudio.org/index.php?title=ReplayGain_1.0_specification
"""
try:
"""
Making a duplicate is required because the ReplayGain extraction
utilities we use make unwanted modifications to the file.
"""
search = None
temp_file_path = duplicate_file(file_path)
file_type = get_file_type(file_path)
if file_type:
if file_type == 'mp3':
if run_process("which mp3gain > /dev/null") == 0:
out = get_process_output('mp3gain -q "%s" 2> /dev/null' % temp_file_path)
search = re.search(r'Recommended "Track" dB change: (.*)', out)
else:
logger.warn("mp3gain not found")
elif file_type == 'vorbis':
if run_process("which vorbisgain > /dev/null && which ogginfo > /dev/null") == 0:
run_process('vorbisgain -q -f "%s" 2>/dev/null >/dev/null' % temp_file_path)
out = get_process_output('ogginfo "%s"' % temp_file_path)
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
else:
logger.warn("vorbisgain/ogginfo not found")
elif file_type == 'flac':
if run_process("which metaflac > /dev/null") == 0:
out = get_process_output('metaflac --show-tag=REPLAYGAIN_TRACK_GAIN "%s"' % temp_file_path)
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
else:
logger.warn("metaflac not found")
elif file_type == 'mp4':
if run_process("which aacgain > /dev/null") == 0:
out = get_process_output('aacgain -q "%s" 2> /dev/null' % temp_file_path)
search = re.search(r'Recommended "Track" dB change: (.*)', out)
else:
logger.warn("aacgain not found")
else:
pass
#no longer need the temp, file simply remove it.
os.remove(temp_file_path)
except Exception, e:
logger.error(str(e))
replay_gain = 0
if search:
matches = search.groups()
if len(matches) == 1:
replay_gain = matches[0]
return replay_gain
# Example of running from command line:
# python replay_gain.py /path/to/filename.mp3
if __name__ == "__main__":
print calculate_replay_gain(sys.argv[1])

View File

@ -1,22 +0,0 @@
# -*- coding: utf-8 -*-
import traceback
import os
class MediaMonitorWorkerProcess:
def __init__(self, config, mmc):
self.config = config
self.mmc = mmc
#this function is run in its own process, and continuously
#checks the queue for any new file events.
def process_file_events(self, queue, notifier):
while True:
try:
event = queue.get()
notifier.logger.info("received event %s", event)
notifier.update_airtime(event)
except Exception, e:
notifier.logger.error(e)
notifier.logger.error("traceback: %s", traceback.format_exc())

View File

@ -1,142 +0,0 @@
# -*- coding: utf-8 -*-
import time
import logging.config
import sys
import os
import traceback
import locale
from configobj import ConfigObj
from api_clients import api_client as apc
from std_err_override import LogWriter
from multiprocessing import Queue as mpQueue
from threading import Thread
from pyinotify import WatchManager
from airtimefilemonitor.airtimenotifier import AirtimeNotifier
from airtimefilemonitor.mediamonitorcommon import MediaMonitorCommon
from airtimefilemonitor.airtimeprocessevent import AirtimeProcessEvent
from airtimefilemonitor.mediaconfig import AirtimeMediaConfig
from airtimefilemonitor.workerprocess import MediaMonitorWorkerProcess
from airtimefilemonitor.airtimemediamonitorbootstrap import AirtimeMediaMonitorBootstrap
def configure_locale():
logger.debug("Before %s", locale.nl_langinfo(locale.CODESET))
current_locale = locale.getlocale()
if current_locale[1] is None:
logger.debug("No locale currently set. Attempting to get default locale.")
default_locale = locale.getdefaultlocale()
if default_locale[1] is None:
logger.debug("No default locale exists. Let's try loading from /etc/default/locale")
if os.path.exists("/etc/default/locale"):
config = ConfigObj('/etc/default/locale')
lang = config.get('LANG')
new_locale = lang
else:
logger.error("/etc/default/locale could not be found! Please run 'sudo update-locale' from command-line.")
sys.exit(1)
else:
new_locale = default_locale
logger.info("New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale))
reload(sys)
sys.setdefaultencoding("UTF-8")
current_locale_encoding = locale.getlocale()[1].lower()
logger.debug("sys default encoding %s", sys.getdefaultencoding())
logger.debug("After %s", locale.nl_langinfo(locale.CODESET))
if current_locale_encoding not in ['utf-8', 'utf8']:
logger.error("Need a UTF-8 locale. Currently '%s'. Exiting..." % current_locale_encoding)
sys.exit(1)
# configure logging
try:
logging.config.fileConfig("%s/logging.cfg" % os.path.dirname(os.path.realpath(__file__)))
#need to wait for Python 2.7 for this..
#logging.captureWarnings(True)
logger = logging.getLogger()
LogWriter.override_std_err(logger)
except Exception, e:
print 'Error configuring logging: ', e
sys.exit(1)
logger.info("\n\n*** Media Monitor bootup ***\n\n")
try:
configure_locale()
config = AirtimeMediaConfig(logger)
api_client = apc.AirtimeApiClient()
api_client.register_component("media-monitor")
logger.info("Setting up monitor")
response = None
while response is None:
response = api_client.setup_media_monitor()
time.sleep(5)
storage_directory = response["stor"]
watched_dirs = response["watched_dirs"]
logger.info("Storage Directory is: %s", storage_directory)
config.storage_directory = os.path.normpath(storage_directory)
config.imported_directory = os.path.normpath(os.path.join(storage_directory, 'imported'))
config.organize_directory = os.path.normpath(os.path.join(storage_directory, 'organize'))
config.recorded_directory = os.path.normpath(os.path.join(storage_directory, 'recorded'))
config.problem_directory = os.path.normpath(os.path.join(storage_directory, 'problem_files'))
dirs = [config.imported_directory, config.organize_directory, config.recorded_directory, config.problem_directory]
for d in dirs:
if not os.path.exists(d):
os.makedirs(d, 02775)
multi_queue = mpQueue()
logger.info("Initializing event processor")
wm = WatchManager()
mmc = MediaMonitorCommon(config, wm=wm)
pe = AirtimeProcessEvent(queue=multi_queue, airtime_config=config, wm=wm, mmc=mmc, api_client=api_client)
bootstrap = AirtimeMediaMonitorBootstrap(logger, pe, api_client, mmc, wm, config)
bootstrap.scan()
notifier = AirtimeNotifier(wm, pe, read_freq=0, timeout=0, airtime_config=config, api_client=api_client, bootstrap=bootstrap, mmc=mmc)
notifier.coalesce_events()
#create 5 worker threads
wp = MediaMonitorWorkerProcess(config, mmc)
for i in range(5):
threadName = "Thread #%d" % i
t = Thread(target=wp.process_file_events, name=threadName, args=(multi_queue, notifier))
t.start()
wdd = notifier.watch_directory(storage_directory)
logger.info("Added watch to %s", storage_directory)
logger.info("wdd result %s", wdd[storage_directory])
for dir in watched_dirs:
wdd = notifier.watch_directory(dir)
logger.info("Added watch to %s", dir)
logger.info("wdd result %s", wdd[dir])
notifier.loop(callback=pe.notifier_loop_callback)
except KeyboardInterrupt:
notifier.stop()
logger.info("Keyboard Interrupt")
except Exception, e:
logger.error('Exception: %s', e)
logger.error("traceback: %s", traceback.format_exc())

View File

@ -1 +0,0 @@

View File

@ -1,48 +0,0 @@
# -*- coding: utf-8 -*-
import re
from media.saas.launcher import setup_logger, setup_global, MM2
from media.saas.airtimeinstance import AirtimeInstance
from os.path import isdir, join, abspath, exists, dirname
from os import listdir
def list_dirs(d): return (x for x in listdir(d) if isdir(join(d,x)))
def filter_instance(d): return bool(re.match('.+\d+$',d))
def get_name(p): return re.match('.+/(\d+)$',p).group(1)
def filter_instances(l): return (x for x in l if filter_instance(x))
def autoscan_instances(main_cfg):
root = main_cfg['instance_root']
instances = []
for instance_machine in list_dirs(root):
instance_machine = join(root, instance_machine)
for instance_root in filter_instances(list_dirs(instance_machine)):
full_path = abspath(join(instance_machine,instance_root))
ai = AirtimeInstance.root_make(get_name(full_path), full_path)
instances.append(ai)
return instances
def verify_exists(p):
if not exists(p): raise Exception("%s must exist" % p)
def main(main_cfg):
log_config, log_path = main_cfg['log_config'], main_cfg['log_path']
verify_exists(log_config)
log = setup_logger(log_config, log_path)
setup_global(log)
for instance in autoscan_instances(main_cfg):
print("Launching instance: %s" % str(instance))
#MM2(instance).start()
print("Launched all instances")
if __name__ == '__main__':
pwd = dirname(__file__)
default = {
'log_path' : join(pwd, 'test.log'), # config for log
'log_config' : join(pwd, 'configs/logging.cfg'), # where to log
# root dir of all instances
'instance_root' : '/mnt/airtimepro/instances'
}
main(default)

View File

@ -1,32 +0,0 @@
[loggers]
keys= root,notifier,metadata
[handlers]
keys=fileOutHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=DEBUG
handlers=fileOutHandler
[logger_notifier]
level=DEBUG
handlers=fileOutHandler
qualname=notifier
[logger_metadata]
level=DEBUG
handlers=fileOutHandler
qualname=metadata
[handler_fileOutHandler]
class=logging.handlers.RotatingFileHandler
level=DEBUG
formatter=simpleFormatter
args=("/var/log/airtime/media-monitor/media-monitor.log", 'a', 10000000, 5,)
[formatter_simpleFormatter]
format=%(asctime)s %(levelname)s - [%(threadName)s] [%(filename)s : %(funcName)s()] : LINE %(lineno)d - %(message)s
datefmt=

View File

@ -1,158 +0,0 @@
# -*- coding: utf-8 -*-
import process as md
import re
from os.path import normpath
from ..monitor.pure import format_length, file_md5, is_airtime_recorded, \
no_extension_basename
defs_loaded = False
MAX_SIGNED_INT = 2**31-1
def is_defs_loaded():
global defs_loaded
return defs_loaded
def load_definitions():
with md.metadata('MDATA_KEY_DURATION') as t:
t.default(u'0.0')
t.depends('length')
t.translate(lambda k: format_length(k['length']))
with md.metadata('MDATA_KEY_CUE_IN') as t:
t.default(u'0.0')
t.depends('cuein')
t.translate(lambda k: format_length(k['cuein']))
with md.metadata('MDATA_KEY_CUE_OUT') as t:
t.default(u'0.0')
t.depends('cueout')
t.translate(lambda k: format_length(k['cueout']))
with md.metadata('MDATA_KEY_MIME') as t:
t.default(u'')
t.depends('mime')
# Is this necessary?
t.translate(lambda k: k['mime'].replace('audio/vorbis','audio/ogg'))
with md.metadata('MDATA_KEY_BITRATE') as t:
t.default(u'')
t.depends('bitrate')
t.translate(lambda k: k['bitrate'])
t.max_value(MAX_SIGNED_INT)
with md.metadata('MDATA_KEY_SAMPLERATE') as t:
t.default(u'0')
t.depends('sample_rate')
t.translate(lambda k: k['sample_rate'])
t.max_value(MAX_SIGNED_INT)
with md.metadata('MDATA_KEY_FTYPE') as t:
t.depends('ftype') # i don't think this field even exists
t.default(u'audioclip')
t.translate(lambda k: k['ftype']) # but just in case
with md.metadata("MDATA_KEY_CREATOR") as t:
t.depends("artist")
# A little kludge to make sure that we have some value for when we parse
# MDATA_KEY_TITLE
t.default(u"")
t.max_length(512)
with md.metadata("MDATA_KEY_SOURCE") as t:
t.depends("album")
t.max_length(512)
with md.metadata("MDATA_KEY_GENRE") as t:
t.depends("genre")
t.max_length(64)
with md.metadata("MDATA_KEY_MOOD") as t:
t.depends("mood")
t.max_length(64)
with md.metadata("MDATA_KEY_TRACKNUMBER") as t:
t.depends("tracknumber")
t.max_value(MAX_SIGNED_INT)
with md.metadata("MDATA_KEY_BPM") as t:
t.depends("bpm")
t.max_value(MAX_SIGNED_INT)
with md.metadata("MDATA_KEY_LABEL") as t:
t.depends("organization")
t.max_length(512)
with md.metadata("MDATA_KEY_COMPOSER") as t:
t.depends("composer")
t.max_length(512)
with md.metadata("MDATA_KEY_ENCODER") as t:
t.depends("encodedby")
t.max_length(512)
with md.metadata("MDATA_KEY_CONDUCTOR") as t:
t.depends("conductor")
t.max_length(512)
with md.metadata("MDATA_KEY_YEAR") as t:
t.depends("date")
t.max_length(16)
with md.metadata("MDATA_KEY_URL") as t:
t.depends("website")
with md.metadata("MDATA_KEY_ISRC") as t:
t.depends("isrc")
t.max_length(512)
with md.metadata("MDATA_KEY_COPYRIGHT") as t:
t.depends("copyright")
t.max_length(512)
with md.metadata("MDATA_KEY_ORIGINAL_PATH") as t:
t.depends('path')
t.translate(lambda k: unicode(normpath(k['path'])))
with md.metadata("MDATA_KEY_MD5") as t:
t.depends('path')
t.optional(False)
t.translate(lambda k: file_md5(k['path'], max_length=100))
# owner is handled differently by (by events.py)
# MDATA_KEY_TITLE is the annoying special case b/c we sometimes read it
# from file name
# must handle 3 cases:
# 1. regular case (not recorded + title is present)
# 2. title is absent (read from file)
# 3. recorded file
def tr_title(k):
#unicode_unknown = u"unknown"
new_title = u""
if is_airtime_recorded(k) or k['title'] != u"":
new_title = k['title']
else:
default_title = no_extension_basename(k['path'])
default_title = re.sub(r'__\d+\.',u'.', default_title)
# format is: track_number-title-123kbps.mp3
m = re.match(".+?-(?P<title>.+)-(\d+kbps|unknown)$", default_title)
if m: new_title = m.group('title')
else: new_title = re.sub(r'-\d+kbps$', u'', default_title)
return new_title
with md.metadata('MDATA_KEY_TITLE') as t:
# Need to know MDATA_KEY_CREATOR to know if show was recorded. Value is
# defaulted to "" from definitions above
t.depends('title','MDATA_KEY_CREATOR','path')
t.optional(False)
t.translate(tr_title)
t.max_length(512)
with md.metadata('MDATA_KEY_LABEL') as t:
t.depends('label')
t.max_length(512)

View File

@ -1,237 +0,0 @@
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from ..monitor.pure import truncate_to_value, truncate_to_length, toposort
from os.path import normpath
from ..monitor.exceptions import BadSongFile
from ..monitor.log import Loggable
from ..monitor import pure as mmp
from collections import namedtuple
import mutagen
import subprocess
import json
import logging
class FakeMutagen(dict):
"""
Need this fake mutagen object so that airtime_special functions
return a proper default value instead of throwing an exceptions for
files that mutagen doesn't recognize
"""
FakeInfo = namedtuple('FakeInfo','length bitrate')
def __init__(self,path):
self.path = path
self.mime = ['audio/wav']
self.info = FakeMutagen.FakeInfo(0.0, '')
dict.__init__(self)
def set_length(self,l):
old_bitrate = self.info.bitrate
self.info = FakeMutagen.FakeInfo(l, old_bitrate)
class MetadataAbsent(Exception):
def __init__(self, name): self.name = name
def __str__(self): return "Could not obtain element '%s'" % self.name
class MetadataElement(Loggable):
def __init__(self,name):
self.name = name
# "Sane" defaults
self.__deps = set()
self.__normalizer = lambda x: x
self.__optional = True
self.__default = None
self.__is_normalized = lambda _ : True
self.__max_length = -1
self.__max_value = -1
self.__translator = None
def max_length(self,l):
self.__max_length = l
def max_value(self,v):
self.__max_value = v
def optional(self, setting):
self.__optional = setting
def is_optional(self):
return self.__optional
def depends(self, *deps):
self.__deps = set(deps)
def dependencies(self):
return self.__deps
def translate(self, f):
self.__translator = f
def is_normalized(self, f):
self.__is_normalized = f
def normalize(self, f):
self.__normalizer = f
def default(self,v):
self.__default = v
def get_default(self):
if hasattr(self.__default, '__call__'): return self.__default()
else: return self.__default
def has_default(self):
return self.__default is not None
def path(self):
return self.__path
def __slice_deps(self, d):
"""
returns a dictionary of all the key value pairs in d that are also
present in self.__deps
"""
return dict( (k,v) for k,v in d.iteritems() if k in self.__deps)
def __str__(self):
return "%s(%s)" % (self.name, ' '.join(list(self.__deps)))
def read_value(self, path, original, running={}):
# If value is present and normalized then we only check if it's
# normalized or not. We normalize if it's not normalized already
if self.name in original:
v = original[self.name]
if self.__is_normalized(v): return v
else: return self.__normalizer(v)
# We slice out only the dependencies that are required for the metadata
# element.
dep_slice_orig = self.__slice_deps(original)
dep_slice_running = self.__slice_deps(running)
# TODO : remove this later
dep_slice_special = self.__slice_deps({'path' : path})
# We combine all required dependencies into a single dictionary
# that we will pass to the translator
full_deps = dict( dep_slice_orig.items()
+ dep_slice_running.items()
+ dep_slice_special.items())
# check if any dependencies are absent
# note: there is no point checking the case that len(full_deps) >
# len(self.__deps) because we make sure to "slice out" any supefluous
# dependencies above.
if len(full_deps) != len(self.dependencies()) or \
len(self.dependencies()) == 0:
# If we have a default value then use that. Otherwise throw an
# exception
if self.has_default(): return self.get_default()
else: raise MetadataAbsent(self.name)
# We have all dependencies. Now for actual for parsing
def def_translate(dep):
def wrap(k):
e = [ x for x in dep ][0]
return k[e]
return wrap
# Only case where we can select a default translator
if self.__translator is None:
self.translate(def_translate(self.dependencies()))
if len(self.dependencies()) > 2: # dependencies include themselves
self.logger.info("Ignoring some dependencies in translate %s"
% self.name)
self.logger.info(self.dependencies())
r = self.__normalizer( self.__translator(full_deps) )
if self.__max_length != -1:
r = truncate_to_length(r, self.__max_length)
if self.__max_value != -1:
try: r = truncate_to_value(r, self.__max_value)
except ValueError, e: r = ''
return r
def normalize_mutagen(path):
"""
Consumes a path and reads the metadata using mutagen. normalizes some of
the metadata that isn't read through the mutagen hash
"""
if not mmp.file_playable(path): raise BadSongFile(path)
try : m = mutagen.File(path, easy=True)
except Exception : raise BadSongFile(path)
if m is None: m = FakeMutagen(path)
try:
if mmp.extension(path) == 'wav':
m.set_length(mmp.read_wave_duration(path))
except Exception: raise BadSongFile(path)
md = {}
for k,v in m.iteritems():
if type(v) is list:
if len(v) > 0: md[k] = v[0]
else: md[k] = v
# populate special metadata values
md['length'] = getattr(m.info, 'length', 0.0)
md['bitrate'] = getattr(m.info, 'bitrate', u'')
md['sample_rate'] = getattr(m.info, 'sample_rate', 0)
md['mime'] = m.mime[0] if len(m.mime) > 0 else u''
md['path'] = normpath(path)
# silence detect(set default cue in and out)
#try:
#command = ['silan', '-b', '-f', 'JSON', md['path']]
#proc = subprocess.Popen(command, stdout=subprocess.PIPE)
#out = proc.communicate()[0].strip('\r\n')
#info = json.loads(out)
#md['cuein'] = info['sound'][0][0]
#md['cueout'] = info['sound'][0][1]
#except Exception:
#self.logger.debug('silan is missing')
if 'title' not in md: md['title'] = u''
return md
class OverwriteMetadataElement(Exception):
def __init__(self, m): self.m = m
def __str__(self): return "Trying to overwrite: %s" % self.m
class MetadataReader(object):
def __init__(self):
self.clear()
def register_metadata(self,m):
if m in self.__mdata_name_map:
raise OverwriteMetadataElement(m)
self.__mdata_name_map[m.name] = m
d = dict( (name,m.dependencies()) for name,m in
self.__mdata_name_map.iteritems() )
new_list = list( toposort(d) )
self.__metadata = [ self.__mdata_name_map[name] for name in new_list
if name in self.__mdata_name_map]
def clear(self):
self.__mdata_name_map = {}
self.__metadata = []
def read(self, path, muta_hash):
normalized_metadata = {}
for mdata in self.__metadata:
try:
normalized_metadata[mdata.name] = mdata.read_value(
path, muta_hash, normalized_metadata)
except MetadataAbsent:
if not mdata.is_optional(): raise
return normalized_metadata
def read_mutagen(self, path):
return self.read(path, normalize_mutagen(path))
global_reader = MetadataReader()
@contextmanager
def metadata(name):
t = MetadataElement(name)
yield t
global_reader.register_metadata(t)

View File

@ -1,215 +0,0 @@
# -*- coding: utf-8 -*-
from kombu.messaging import Exchange, Queue, Consumer
from kombu.connection import BrokerConnection
from kombu.simple import SimpleQueue
from os.path import normpath
import json
import os
import copy
import time
from exceptions import BadSongFile, InvalidMetadataElement, DirectoryIsNotListed
from metadata import Metadata
from log import Loggable
from syncdb import AirtimeDB
from bootstrap import Bootstrapper
from ..saas.thread import apc, user
class AirtimeNotifier(Loggable):
"""
AirtimeNotifier is responsible for interecepting RabbitMQ messages and
feeding them to the event_handler object it was initialized with. The only
thing it does to the messages is parse them from json
"""
def __init__(self, cfg, message_receiver):
self.cfg = cfg
self.handler = message_receiver
while not self.init_rabbit_mq():
self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds")
time.sleep(5)
def init_rabbit_mq(self):
try:
self.logger.info("Initializing RabbitMQ message consumer...")
schedule_exchange = Exchange("airtime-media-monitor", "direct",
durable=True, auto_delete=True)
schedule_queue = Queue("media-monitor", exchange=schedule_exchange,
key="filesystem")
self.connection = BrokerConnection(self.cfg["rabbitmq"]["host"],
self.cfg["rabbitmq"]["user"], self.cfg["rabbitmq"]["password"],
self.cfg["rabbitmq"]["vhost"])
channel = self.connection.channel()
self.simple_queue = SimpleQueue(channel, schedule_queue)
self.logger.info("Initialized RabbitMQ consumer.")
except Exception as e:
self.logger.info("Failed to initialize RabbitMQ consumer")
self.logger.error(e)
return False
return True
def handle_message(self, message):
"""
Messages received from RabbitMQ are handled here. These messages
instruct media-monitor of events such as a new directory being watched,
file metadata has been changed, or any other changes to the config of
media-monitor via the web UI.
"""
self.logger.info("Received md from RabbitMQ: %s" % str(message))
m = json.loads(message)
# TODO : normalize any other keys that could be used to pass
# directories
if 'directory' in m: m['directory'] = normpath(m['directory'])
self.handler.message(m)
class AirtimeMessageReceiver(Loggable):
def __init__(self, cfg, manager):
self.dispatch_table = {
'md_update' : self.md_update,
'new_watch' : self.new_watch,
'remove_watch' : self.remove_watch,
'rescan_watch' : self.rescan_watch,
'change_stor' : self.change_storage,
'file_delete' : self.file_delete,
}
self.cfg = cfg
self.manager = manager
def message(self, msg):
"""
This method is called by an AirtimeNotifier instance that
consumes the Rabbit MQ events that trigger this. The method
return true when the event was executed and false when it wasn't.
"""
msg = copy.deepcopy(msg)
if msg['event_type'] in self.dispatch_table:
evt = msg['event_type']
del msg['event_type']
self.logger.info("Handling RabbitMQ message: '%s'" % evt)
self._execute_message(evt,msg)
return True
else:
self.logger.info("Received invalid message with 'event_type': '%s'"
% msg['event_type'])
self.logger.info("Message details: %s" % str(msg))
return False
def _execute_message(self,evt,message):
self.dispatch_table[evt](message)
def __request_now_bootstrap(self, directory_id=None, directory=None,
all_files=True):
if (not directory_id) and (not directory):
raise ValueError("You must provide either directory_id or \
directory")
sdb = AirtimeDB(apc())
if directory : directory = os.path.normpath(directory)
if directory_id == None : directory_id = sdb.to_id(directory)
if directory == None : directory = sdb.to_directory(directory_id)
try:
bs = Bootstrapper( sdb, self.manager.watch_signal() )
bs.flush_watch( directory=directory, last_ran=self.cfg.last_ran() )
except Exception as e:
self.fatal_exception("Exception bootstrapping: (dir,id)=(%s,%s)" %
(directory, directory_id), e)
raise DirectoryIsNotListed(directory, cause=e)
def md_update(self, msg):
self.logger.info("Updating metadata for: '%s'" %
msg['MDATA_KEY_FILEPATH'])
md_path = msg['MDATA_KEY_FILEPATH']
try: Metadata.write_unsafe(path=md_path, md=msg)
except BadSongFile as e:
self.logger.info("Cannot find metadata file: '%s'" % e.path)
except InvalidMetadataElement as e:
self.logger.info("Metadata instance not supported for this file '%s'" \
% e.path)
self.logger.info(str(e))
except Exception as e:
# TODO : add md_path to problem path or something?
self.fatal_exception("Unknown error when writing metadata to: '%s'"
% md_path, e)
def new_watch(self, msg, restart=False):
msg['directory'] = normpath(msg['directory'])
self.logger.info("Creating watch for directory: '%s'" %
msg['directory'])
if not os.path.exists(msg['directory']):
try: os.makedirs(msg['directory'])
except Exception as e:
self.fatal_exception("Failed to create watched dir '%s'" %
msg['directory'],e)
else:
self.logger.info("Created new watch directory: '%s'" %
msg['directory'])
self.new_watch(msg)
else:
self.__request_now_bootstrap( directory=msg['directory'],
all_files=restart)
self.manager.add_watch_directory(msg['directory'])
def remove_watch(self, msg):
msg['directory'] = normpath(msg['directory'])
self.logger.info("Removing watch from directory: '%s'" %
msg['directory'])
self.manager.remove_watch_directory(msg['directory'])
def rescan_watch(self, msg):
self.logger.info("Trying to rescan watched directory: '%s'" %
msg['directory'])
try:
# id is always an integer but in the dictionary the key is always a
# string
self.__request_now_bootstrap( unicode(msg['id']) )
except DirectoryIsNotListed as e:
self.fatal_exception("Bad rescan request", e)
except Exception as e:
self.fatal_exception("Bad rescan request. Unknown error.", e)
else:
self.logger.info("Successfully re-scanned: '%s'" % msg['directory'])
def change_storage(self, msg):
new_storage_directory = msg['directory']
self.manager.change_storage_root(new_storage_directory)
for to_bootstrap in [ self.manager.get_recorded_path(),
self.manager.get_imported_path() ]:
self.__request_now_bootstrap( directory=to_bootstrap )
def file_delete(self, msg):
# Deletes should be requested only from imported folder but we
# don't verify that. Security risk perhaps?
# we only delete if we are passed the special delete flag that is
# necessary with every "delete_file" request
if not msg['delete']:
self.logger.info("No clippy confirmation, ignoring event. \
Out of curiousity we will print some details.")
self.logger.info(msg)
return
# TODO : Add validation that we are deleting a file that's under our
# surveillance. We don't to delete some random system file.
if os.path.exists(msg['filepath']):
try:
self.logger.info("Attempting to delete '%s'" %
msg['filepath'])
# We use FileMediator to ignore any paths with
# msg['filepath'] so that we do not send a duplicate delete
# request that we'd normally get form pyinotify. But right
# now event contractor would take care of this sort of
# thing anyway so this might not be necessary after all
#user().file_mediator.ignore(msg['filepath'])
os.unlink(msg['filepath'])
# Verify deletion:
if not os.path.exists(msg['filepath']):
self.logger.info("Successfully deleted: '%s'" %
msg['filepath'])
except Exception as e:
self.fatal_exception("Failed to delete '%s'" % msg['filepath'],
e)
else: # validation for filepath existence failed
self.logger.info("Attempting to delete file '%s' that does not \
exist. Full request:" % msg['filepath'])
self.logger.info(msg)

View File

@ -1,63 +0,0 @@
import os
from pydispatch import dispatcher
from events import NewFile, DeleteFile, ModifyFile
from log import Loggable
from ..saas.thread import getsig
import pure as mmp
class Bootstrapper(Loggable):
"""
Bootstrapper reads all the info in the filesystem flushes organize events
and watch events
"""
def __init__(self,db,watch_signal):
"""
db - AirtimeDB object; small layer over api client
last_ran - last time the program was ran.
watch_signal - the signals should send events for every file on.
"""
self.db = db
self.watch_signal = getsig(watch_signal)
def flush_all(self, last_ran):
"""
bootstrap every single watched directory. only useful at startup note
that because of the way list_directories works we also flush the import
directory as well I think
"""
for d in self.db.list_storable_paths(): self.flush_watch(d, last_ran)
def flush_watch(self, directory, last_ran, all_files=False):
"""
flush a single watch/imported directory. useful when wanting to to
rescan, or add a watched/imported directory
"""
songs = set([])
added = modded = deleted = 0
for f in mmp.walk_supported(directory, clean_empties=False):
songs.add(f)
# We decide whether to update a file's metadata by checking its
# system modification date. If it's above the value self.last_ran
# which is passed to us that means media monitor wasn't aware when
# this changes occured in the filesystem hence it will send the
# correct events to sync the database with the filesystem
if os.path.getmtime(f) > last_ran:
modded += 1
dispatcher.send(signal=self.watch_signal, sender=self,
event=ModifyFile(f))
db_songs = set(( song for song in self.db.directory_get_files(directory,
all_files)
if mmp.sub_path(directory,song) ))
# Get all the files that are in the database but in the file
# system. These are the files marked for deletions
for to_delete in db_songs.difference(songs):
dispatcher.send(signal=self.watch_signal, sender=self,
event=DeleteFile(to_delete))
deleted += 1
for to_add in songs.difference(db_songs):
dispatcher.send(signal=self.watch_signal, sender=self,
event=NewFile(to_add))
added += 1
self.logger.info( "Flushed watch directory (%s). \
(added, modified, deleted) = (%d, %d, %d)"
% (directory, added, modded, deleted) )

View File

@ -1,32 +0,0 @@
# -*- coding: utf-8 -*-
import os
import copy
from configobj import ConfigObj
from exceptions import NoConfigFile, ConfigAccessViolation
import pure as mmp
class MMConfig(object):
def __init__(self, path):
if not os.path.exists(path): raise NoConfigFile(path)
self.cfg = ConfigObj(path)
def __getitem__(self, key):
""" We always return a copy of the config item to prevent
callers from doing any modifications through the returned
objects methods """
return copy.deepcopy(self.cfg[key])
def __setitem__(self, key, value):
""" We use this method not to allow anybody to mess around with
config file any settings made should be done through MMConfig's
instance methods """
raise ConfigAccessViolation(key)
def save(self): self.cfg.write()
def last_ran(self):
""" Returns the last time media monitor was ran by looking at
the time when the file at 'index_path' was modified """
return mmp.last_modified(self.cfg['media-monitor']['index_path'])

View File

@ -1,40 +0,0 @@
from log import Loggable
from events import DeleteFile
class EventContractor(Loggable):
def __init__(self):
self.store = {}
def event_registered(self, evt):
"""
returns true if the event is registered which means that there is
another "unpacked" event somewhere out there with the same path
"""
return evt.path in self.store
def get_old_event(self, evt):
"""
get the previously registered event with the same path as 'evt'
"""
return self.store[ evt.path ]
def register(self, evt):
if self.event_registered(evt):
ev_proxy = self.get_old_event(evt)
if ev_proxy.same_event(evt):
ev_proxy.merge_proxy(evt)
return False
# delete overrides any other event
elif evt.is_event(DeleteFile):
ev_proxy.merge_proxy(evt)
return False
else:
ev_proxy.run_hook()
ev_proxy.reset_hook()
self.store[ evt.path ] = evt
evt.set_pack_hook( lambda : self.__unregister(evt) )
return True
def __unregister(self, evt):
del self.store[evt.path]

View File

@ -1,27 +0,0 @@
import socket
import time
from log import Loggable
from toucher import RepeatTimer
from amqplib.client_0_8.exceptions import AMQPConnectionException
class EventDrainer(Loggable):
"""
Flushes events from RabbitMQ that are sent from airtime every
certain amount of time
"""
def __init__(self, airtime_notifier, interval=1):
def cb():
try:
message = airtime_notifier.simple_queue.get(block=True)
airtime_notifier.handle_message(message.payload)
message.ack()
except (IOError, AttributeError, AMQPConnectionException), e:
self.logger.error('Exception: %s', e)
while not airtime_notifier.init_rabbit_mq():
self.logger.error("Error connecting to RabbitMQ Server. \
Trying again in few seconds")
time.sleep(5)
t = RepeatTimer(interval, cb)
t.daemon = True
t.start()

View File

@ -1,261 +0,0 @@
# -*- coding: utf-8 -*-
import os
import abc
import re
import pure as mmp
from pure import LazyProperty
from metadata import Metadata
from log import Loggable
from exceptions import BadSongFile
from ..saas.thread import getsig, user
class PathChannel(object):
""" Simple struct to hold a 'signal' string and a related 'path'.
Basically used as a named tuple """
def __init__(self, signal, path):
self.signal = getsig(signal)
self.path = path
# TODO : Move this to it's file. Also possible unsingleton and use it as a
# simple module just like m.m.owners
class EventRegistry(object):
""" This class's main use is to keep track all events with a cookie
attribute. This is done mainly because some events must be 'morphed'
into other events because we later detect that they are move events
instead of delete events. """
def __init__(self):
self.registry = {}
def register(self,evt): self.registry[evt.cookie] = evt
def unregister(self,evt): del self.registry[evt.cookie]
def registered(self,evt): return evt.cookie in self.registry
def matching(self,evt):
event = self.registry[evt.cookie]
# Want to disallow accessing the same event twice
self.unregister(event)
return event
class EventProxy(Loggable):
""" A container object for instances of BaseEvent (or it's
subclasses) used for event contractor """
def __init__(self, orig_evt):
self.orig_evt = orig_evt
self.evt = orig_evt
self.reset_hook()
if hasattr(orig_evt, 'path'): self.path = orig_evt.path
def set_pack_hook(self, l):
self._pack_hook = l
def reset_hook(self):
self._pack_hook = lambda : None
def run_hook(self):
self._pack_hook()
def safe_pack(self):
self.run_hook()
# make sure that cleanup hook is never called twice for the same event
self.reset_hook()
return self.evt.safe_pack()
def merge_proxy(self, proxy):
self.evt = proxy.evt
def is_event(self, real_event):
return isinstance(self.evt, real_event)
def same_event(self, proxy):
return self.evt.__class__ == proxy.evt.__class__
class HasMetaData(object):
""" Any class that inherits from this class gains the metadata
attribute that loads metadata from the class's 'path' attribute.
This is done lazily so there is no performance penalty to inheriting
from this and subsequent calls to metadata are cached """
__metaclass__ = abc.ABCMeta
@LazyProperty
def metadata(self): return Metadata(self.path)
class BaseEvent(Loggable):
__metaclass__ = abc.ABCMeta
def __init__(self, raw_event):
# TODO : clean up this idiotic hack
# we should use keyword constructors instead of this behaviour checking
# bs to initialize BaseEvent
if hasattr(raw_event,"pathname"):
self._raw_event = raw_event
self.path = os.path.normpath(raw_event.pathname)
else: self.path = raw_event
self.owner = user().owner.get_owner(self.path)
owner_re = re.search('stor/imported/(?P<owner>\d+)/', self.path)
if owner_re:
self.logger.info("matched path: %s" % self.path)
self.owner = owner_re.group('owner')
else:
self.logger.info("did not match path: %s" % self.path)
self._pack_hook = lambda: None # no op
# into another event
# TODO : delete this method later
def reset_hook(self):
""" Resets the hook that is called after an event is packed.
Before resetting the hook we execute it to make sure that
whatever cleanup operations were queued are executed. """
self._pack_hook()
self._pack_hook = lambda: None
def exists(self): return os.path.exists(self.path)
@LazyProperty
def cookie(self): return getattr( self._raw_event, 'cookie', None )
def __str__(self):
return "Event(%s). Path(%s)" % ( self.path, self.__class__.__name__)
# TODO : delete this method later
def add_safe_pack_hook(self,k):
""" adds a callable object (function) that will be called after
the event has been "safe_packed" """
self._pack_hook = k
def proxify(self):
return EventProxy(self)
# As opposed to unsafe_pack...
def safe_pack(self):
""" returns exceptions instead of throwing them to be consistent
with events that must catch their own BadSongFile exceptions
since generate a set of exceptions instead of a single one """
try:
self._pack_hook()
ret = self.pack()
# Remove owner of this file only after packing. Otherwise packing
# will not serialize the owner correctly into the airtime request
user().owner.remove_file_owner(self.path)
return ret
except BadSongFile as e: return [e]
except Exception as e:
self.unexpected_exception(e)
return[e]
# nothing to see here, please move along
def morph_into(self, evt):
self.logger.info("Morphing %s into %s" % ( str(self), str(evt) ) )
self._raw_event = evt._raw_event
self.path = evt.path
self.__class__ = evt.__class__
# Clean up old hook and transfer the new events hook
self.reset_hook()
self.add_safe_pack_hook( evt._pack_hook )
return self
def assign_owner(self,req):
""" Packs self.owner to req if the owner is valid. I.e. it's not
-1. This method is used by various events that would like to
pass owner as a parameter. NewFile for example. """
if self.owner != -1: req['MDATA_KEY_OWNER_ID'] = self.owner
class FakePyinotify(object):
""" sometimes we must create our own pyinotify like objects to
instantiate objects from the classes below whenever we want to turn
a single event into multiple events """
def __init__(self, path): self.pathname = path
class OrganizeFile(BaseEvent, HasMetaData):
""" The only kind of event that does support the pack protocol. It's
used internally with mediamonitor to move files in the organize
directory. """
def __init__(self, *args, **kwargs):
super(OrganizeFile, self).__init__(*args, **kwargs)
def pack(self):
raise AttributeError("You can't send organize events to airtime!!!")
class NewFile(BaseEvent, HasMetaData):
""" NewFile events are the only events that contain
MDATA_KEY_OWNER_ID metadata in them. """
def __init__(self, *args, **kwargs):
super(NewFile, self).__init__(*args, **kwargs)
def pack(self):
""" packs turns an event into a media monitor request """
req_dict = self.metadata.extract()
req_dict['mode'] = u'create'
req_dict['is_record'] = self.metadata.is_recorded()
self.assign_owner(req_dict)
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
return [req_dict]
class DeleteFile(BaseEvent):
""" DeleteFile event only contains the path to be deleted. No other
metadata can be or is included. (This is because this event is fired
after the deletion occurs). """
def __init__(self, *args, **kwargs):
super(DeleteFile, self).__init__(*args, **kwargs)
def pack(self):
req_dict = {}
req_dict['mode'] = u'delete'
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
return [req_dict]
class MoveFile(BaseEvent, HasMetaData):
""" Path argument should be the new path of the file that was moved """
def __init__(self, *args, **kwargs):
super(MoveFile, self).__init__(*args, **kwargs)
def old_path(self):
return self._raw_event.src_pathname
def pack(self):
req_dict = {}
req_dict['mode'] = u'moved'
req_dict['MDATA_KEY_ORIGINAL_PATH'] = self.old_path()
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
req_dict['MDATA_KEY_MD5'] = self.metadata.extract()['MDATA_KEY_MD5']
return [req_dict]
class ModifyFile(BaseEvent, HasMetaData):
def __init__(self, *args, **kwargs):
super(ModifyFile, self).__init__(*args, **kwargs)
def pack(self):
req_dict = self.metadata.extract()
req_dict['mode'] = u'modify'
# path to directory that is to be removed
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
return [req_dict]
def map_events(directory, constructor):
""" Walks 'directory' and creates an event using 'constructor'.
Returns a list of the constructed events. """
# -unknown-path should not appear in the path here but more testing
# might be necessary
for f in mmp.walk_supported(directory, clean_empties=False):
try:
for e in constructor( FakePyinotify(f) ).pack(): yield e
except BadSongFile as e: yield e
class DeleteDir(BaseEvent):
""" A DeleteDir event unfolds itself into a list of DeleteFile
events for every file in the directory. """
def __init__(self, *args, **kwargs):
super(DeleteDir, self).__init__(*args, **kwargs)
def pack(self):
return map_events( self.path, DeleteFile )
class MoveDir(BaseEvent):
""" A MoveDir event unfolds itself into a list of MoveFile events
for every file in the directory. """
def __init__(self, *args, **kwargs):
super(MoveDir, self).__init__(*args, **kwargs)
def pack(self):
return map_events( self.path, MoveFile )
class DeleteDirWatch(BaseEvent):
""" Deleting a watched directory is different from deleting any
other directory. Hence we must have a separate event to handle this
case """
def __init__(self, *args, **kwargs):
super(DeleteDirWatch, self).__init__(*args, **kwargs)
def pack(self):
req_dict = {}
req_dict['mode'] = u'delete_dir'
req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path + "/" )
return [req_dict]

View File

@ -1,60 +0,0 @@
# -*- coding: utf-8 -*-
class BadSongFile(Exception):
def __init__(self, path): self.path = path
def __str__(self): return "Can't read %s" % self.path
class NoConfigFile(Exception):
def __init__(self, path): self.path = path
def __str__(self):
return "Path '%s' for config file does not exit" % self.path
class ConfigAccessViolation(Exception):
def __init__(self,key): self.key = key
def __str__(self): return "You must not access key '%s' directly" % self.key
class FailedToSetLocale(Exception):
def __str__(self): return "Failed to set locale"
class FailedToObtainLocale(Exception):
def __init__(self, path, cause):
self.path = path
self.cause = cause
def __str__(self): return "Failed to obtain locale from '%s'" % self.path
class CouldNotCreateIndexFile(Exception):
"""exception whenever index file cannot be created"""
def __init__(self, path, cause=None):
self.path = path
self.cause = cause
def __str__(self): return "Failed to create touch file '%s'" % self.path
class DirectoryIsNotListed(Exception):
def __init__(self,dir_id,cause=None):
self.dir_id = dir_id
self.cause = cause
def __str__(self):
return "%d was not listed as a directory in the database" % self.dir_id
class FailedToCreateDir(Exception):
def __init__(self,path, parent):
self.path = path
self.parent = parent
def __str__(self): return "Failed to create path '%s'" % self.path
class NoDirectoryInAirtime(Exception):
def __init__(self,path, does_exist):
self.path = path
self.does_exist = does_exist
def __str__(self):
return "Directory '%s' does not exist in Airtime.\n \
However: %s do exist." % (self.path, self.does_exist)
class InvalidMetadataElement(Exception):
def __init__(self, parent, key, path):
self.parent = parent
self.key = key
self.path = path
def __str__(self):
return "InvalidMetadataElement: (key,path) = (%s,%s)" \
% (self.key, self.path)

View File

@ -1,60 +0,0 @@
# -*- coding: utf-8 -*-
from pydispatch import dispatcher
import abc
from log import Loggable
from ..saas.thread import getsig
import pure as mmp
# Defines the handle interface
class Handles(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def handle(self, sender, event, *args, **kwargs): pass
# TODO : Investigate whether weak reffing in dispatcher.connect could possibly
# cause a memory leak
class ReportHandler(Handles):
"""
A handler that can also report problem files when things go wrong
through the report_problem_file routine
"""
__metaclass__ = abc.ABCMeta
def __init__(self, signal, weak=False):
self.signal = getsig(signal)
self.report_signal = getsig("badfile")
def dummy(sender, event): self.handle(sender,event)
dispatcher.connect(dummy, signal=self.signal, sender=dispatcher.Any,
weak=weak)
def report_problem_file(self, event, exception=None):
dispatcher.send(signal=self.report_signal, sender=self, event=event,
exception=exception)
class ProblemFileHandler(Handles, Loggable):
"""
Responsible for answering to events passed through the 'badfile'
signal. Moves the problem file passed to the designated directory.
"""
def __init__(self, channel, **kwargs):
self.channel = channel
self.signal = getsig(self.channel.signal)
self.problem_dir = self.channel.path
def dummy(sender, event, exception):
self.handle(sender, event, exception)
dispatcher.connect(dummy, signal=self.signal, sender=dispatcher.Any,
weak=False)
mmp.create_dir( self.problem_dir )
self.logger.info("Initialized problem file handler. Problem dir: '%s'" %
self.problem_dir)
def handle(self, sender, event, exception=None):
# TODO : use the exception parameter for something
self.logger.info("Received problem file: '%s'. Supposed to move it to \
problem dir", event.path)
try: mmp.move_to_dir(dir_path=self.problem_dir, file_path=event.path)
except Exception as e:
self.logger.info("Could not move file: '%s' to problem dir: '%s'" %
(event.path, self.problem_dir))
self.logger.info("Exception: %s" % str(e))

View File

@ -1,138 +0,0 @@
# -*- coding: utf-8 -*-
import pyinotify
from pydispatch import dispatcher
from functools import wraps
import pure as mmp
from pure import IncludeOnly
from events import OrganizeFile, NewFile, MoveFile, DeleteFile, \
DeleteDir, MoveDir,\
DeleteDirWatch
from log import Loggable
from ..saas.thread import getsig, user
# Note: Because of the way classes that inherit from pyinotify.ProcessEvent
# interact with constructors. you should only instantiate objects from them
# using keyword arguments. For example:
# OrganizeListener('watch_signal') <= wrong
# OrganizeListener(signal='watch_signal') <= right
class FileMediator(Loggable):
# TODO : this class is not actually used. remove all references to it
# everywhere (including tests).
""" FileMediator is used an intermediate mechanism that filters out
certain events. """
def __init__(self) : self.ignored_set = set([]) # for paths only
def is_ignored(self,path) : return path in self.ignored_set
def ignore(self, path) : self.ignored_set.add(path)
def unignore(self, path) : self.ignored_set.remove(path)
def mediate_ignored(fn):
@wraps(fn)
def wrapped(self, event, *args,**kwargs):
event.pathname = unicode(event.pathname, "utf-8")
if user().file_mediator.is_ignored(event.pathname):
user().file_mediator.logger.info("Ignoring: '%s' (once)" % event.pathname)
user().file_mediator.unignore(event.pathname)
else: return fn(self, event, *args, **kwargs)
return wrapped
class BaseListener(object):
def __str__(self):
return "Listener(%s), Signal(%s)" % \
(self.__class__.__name__, self. signal)
def my_init(self, signal): self.signal = getsig(signal)
class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
def process_IN_CLOSE_WRITE(self, event):
#self.logger.info("===> handling: '%s'" % str(event))
self.process_to_organize(event)
def process_IN_MOVED_TO(self, event):
#self.logger.info("===> handling: '%s'" % str(event))
self.process_to_organize(event)
def flush_events(self, path):
"""
organize the whole directory at path. (pretty much by doing what
handle does to every file
"""
flushed = 0
for f in mmp.walk_supported(path, clean_empties=True):
self.logger.info("Bootstrapping: File in 'organize' directory: \
'%s'" % f)
if not mmp.file_locked(f):
dispatcher.send(signal=getsig(self.signal), sender=self,
event=OrganizeFile(f))
flushed += 1
#self.logger.info("Flushed organized directory with %d files" % flushed)
@IncludeOnly(mmp.supported_extensions)
def process_to_organize(self, event):
dispatcher.send(signal=getsig(self.signal), sender=self,
event=OrganizeFile(event))
class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
def process_IN_CLOSE_WRITE(self, event):
self.process_create(event)
def process_IN_MOVED_TO(self, event):
if user().event_registry.registered(event):
# We need this trick because we don't how to "expand" dir events
# into file events until we know for sure if we deleted or moved
morph = MoveDir(event) if event.dir else MoveFile(event)
user().event_registry.matching(event).morph_into(morph)
else: self.process_create(event)
def process_IN_MOVED_FROM(self, event):
# Is either delete dir or delete file
evt = self.process_delete(event)
# evt can be none whenever event points that a file that would be
# ignored by @IncludeOnly
if hasattr(event,'cookie') and (evt != None):
user().event_registry.register(evt)
def process_IN_DELETE(self,event): self.process_delete(event)
def process_IN_MOVE_SELF(self, event):
if '-unknown-path' in event.pathname:
event.pathname = event.pathname.replace('-unknown-path','')
self.delete_watch_dir(event)
def delete_watch_dir(self, event):
e = DeleteDirWatch(event)
dispatcher.send(signal=getsig('watch_move'), sender=self, event=e)
dispatcher.send(signal=getsig(self.signal), sender=self, event=e)
@mediate_ignored
@IncludeOnly(mmp.supported_extensions)
def process_create(self, event):
evt = NewFile(event)
dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
return evt
@mediate_ignored
@IncludeOnly(mmp.supported_extensions)
def process_delete(self, event):
evt = None
if event.dir : evt = DeleteDir(event)
else : evt = DeleteFile(event)
dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
return evt
@mediate_ignored
def process_delete_dir(self, event):
evt = DeleteDir(event)
dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
return evt
def flush_events(self, path):
"""
walk over path and send a NewFile event for every file in this
directory. Not to be confused with bootstrapping which is a more
careful process that involved figuring out what's in the database
first.
"""
# Songs is a dictionary where every key is the watched the directory
# and the value is a set with all the files in that directory.
added = 0
for f in mmp.walk_supported(path, clean_empties=False):
added += 1
dispatcher.send( signal=getsig(self.signal), sender=self, event=NewFile(f) )
self.logger.info( "Flushed watch directory. added = %d" % added )

View File

@ -1,36 +0,0 @@
import logging
import abc
import traceback
from pure import LazyProperty
appname = 'root'
def setup_logging(log_path):
""" Setup logging by writing log to 'log_path' """
#logger = logging.getLogger(appname)
logging.basicConfig(filename=log_path, level=logging.DEBUG)
def get_logger():
""" in case we want to use the common logger from a procedural
interface """
return logging.getLogger()
class Loggable(object):
""" Any class that wants to log can inherit from this class and
automatically get a logger attribute that can be used like:
self.logger.info(...) etc. """
__metaclass__ = abc.ABCMeta
@LazyProperty
def logger(self): return get_logger()
def unexpected_exception(self,e):
""" Default message for 'unexpected' exceptions """
self.fatal_exception("'Unexpected' exception has occured:", e)
def fatal_exception(self, message, e):
""" Prints an exception 'e' with 'message'. Also outputs the
traceback. """
self.logger.error( message )
self.logger.error( str(e) )
self.logger.error( traceback.format_exc() )

View File

@ -1,236 +0,0 @@
import pyinotify
import time
import os
from pydispatch import dispatcher
from os.path import normpath
from events import PathChannel
from log import Loggable
from listeners import StoreWatchListener, OrganizeListener
from handler import ProblemFileHandler
from organizer import Organizer
from ..saas.thread import InstanceInheritingThread, getsig
import pure as mmp
class ManagerTimeout(InstanceInheritingThread,Loggable):
""" The purpose of this class is to flush the organize directory
every 3 secnods. This used to be just a work around for cc-4235
but recently became a permanent solution because it's "cheap" and
reliable """
def __init__(self, manager, interval=1.5):
# TODO : interval should be read from config and passed here instead
# of just using the hard coded value
super(ManagerTimeout, self).__init__()
self.manager = manager
self.interval = interval
def run(self):
while True:
time.sleep(self.interval)
self.manager.flush_organize()
class Manager(Loggable):
# NOTE : this massive class is a source of many problems of mm and
# is in dire need of breaking up and refactoring.
""" An abstraction over media monitors core pyinotify functions.
These include adding watched,store, organize directories, etc.
Basically composes over WatchManager from pyinotify """
def __init__(self):
self.wm = pyinotify.WatchManager()
# These two instance variables are assumed to be constant
self.watch_channel = getsig('watch')
self.organize_channel = getsig('organize')
self.watch_listener = StoreWatchListener(signal = self.watch_channel)
self.__timeout_thread = ManagerTimeout(self)
self.__timeout_thread.daemon = True
self.__timeout_thread.start()
self.organize = {
'organize_path' : None,
'imported_path' : None,
'recorded_path' : None,
'problem_files_path' : None,
'organizer' : None,
'problem_handler' : None,
'organize_listener' : OrganizeListener(signal=
self.organize_channel),
}
def dummy(sender, event): self.watch_move( event.path, sender=sender )
dispatcher.connect(dummy, signal=getsig('watch_move'),
sender=dispatcher.Any, weak=False)
def subwatch_add(sender, directory):
self.__add_watch(directory, self.watch_listener)
dispatcher.connect(subwatch_add, signal=getsig('add_subwatch'),
sender=dispatcher.Any, weak=False)
# A private mapping path => watch_descriptor
# we use the same dictionary for organize, watch, store wd events.
# this is a little hacky because we are unable to have multiple wd's
# on the same path.
self.__wd_path = {}
# The following set isn't really necessary anymore. Should be
# removed...
self.watched_directories = set([])
# This is the only event that we are unable to process "normally". I.e.
# through dedicated handler objects. Because we must have access to a
# manager instance. Hence we must slightly break encapsulation.
def watch_move(self, watch_dir, sender=None):
""" handle 'watch move' events directly sent from listener """
self.logger.info("Watch dir '%s' has been renamed (hence removed)" %
watch_dir)
self.remove_watch_directory(normpath(watch_dir))
def watch_signal(self):
""" Return the signal string our watch_listener is reading
events from """
return getsig(self.watch_listener.signal)
def __remove_watch(self,path):
""" Remove path from being watched (first will check if 'path'
is watched) """
# only delete if dir is actually being watched
if path in self.__wd_path:
wd = self.__wd_path[path]
self.wm.rm_watch(wd, rec=True)
del(self.__wd_path[path])
def __add_watch(self,path,listener):
""" Start watching 'path' using 'listener'. First will check if
directory is being watched before adding another watch """
self.logger.info("Attempting to add listener to path '%s'" % path)
self.logger.info( 'Listener: %s' % str(listener) )
if not self.has_watch(path):
wd = self.wm.add_watch(path, pyinotify.ALL_EVENTS, rec=True,
auto_add=True, proc_fun=listener)
if wd: self.__wd_path[path] = wd.values()[0]
def __create_organizer(self, target_path, recorded_path):
""" creates an organizer at new destination path or modifies the
old one """
# TODO : find a proper fix for the following hack
# We avoid creating new instances of organize because of the way
# it interacts with pydispatch. We must be careful to never have
# more than one instance of OrganizeListener but this is not so
# easy. (The singleton hack in Organizer) doesn't work. This is
# the only thing that seems to work.
if self.organize['organizer']:
o = self.organize['organizer']
o.channel = self.organize_channel
o.target_path = target_path
o.recorded_path = recorded_path
else:
self.organize['organizer'] = Organizer(channel=
self.organize_channel, target_path=target_path,
recorded_path=recorded_path)
def get_problem_files_path(self):
""" returns the path where problem files should go """
return self.organize['problem_files_path']
def set_problem_files_path(self, new_path):
""" Set the path where problem files should go """
self.organize['problem_files_path'] = new_path
self.organize['problem_handler'] = \
ProblemFileHandler( PathChannel(signal=getsig('badfile'),
path=new_path) )
def get_recorded_path(self):
""" returns the path of the recorded directory """
return self.organize['recorded_path']
def set_recorded_path(self, new_path):
self.__remove_watch(self.organize['recorded_path'])
self.organize['recorded_path'] = new_path
self.__create_organizer( self.organize['imported_path'], new_path)
self.__add_watch(new_path, self.watch_listener)
def get_organize_path(self):
""" returns the current path that is being watched for
organization """
return self.organize['organize_path']
def set_organize_path(self, new_path):
""" sets the organize path to be new_path. Under the current
scheme there is only one organize path but there is no reason
why more cannot be supported """
# if we are already organizing a particular directory we remove the
# watch from it first before organizing another directory
self.__remove_watch(self.organize['organize_path'])
self.organize['organize_path'] = new_path
# the OrganizeListener instance will walk path and dispatch an organize
# event for every file in that directory
self.organize['organize_listener'].flush_events(new_path)
#self.__add_watch(new_path, self.organize['organize_listener'])
def flush_organize(self):
path = self.organize['organize_path']
self.organize['organize_listener'].flush_events(path)
def get_imported_path(self):
return self.organize['imported_path']
def set_imported_path(self,new_path):
""" set the directory where organized files go to. """
self.__remove_watch(self.organize['imported_path'])
self.organize['imported_path'] = new_path
self.__create_organizer( new_path, self.organize['recorded_path'])
self.__add_watch(new_path, self.watch_listener)
def change_storage_root(self, store):
""" hooks up all the directories for you. Problem, recorded,
imported, organize. """
store_paths = mmp.expand_storage(store)
# First attempt to make sure that all paths exist before adding any
# watches
for path_type, path in store_paths.iteritems():
try: mmp.create_dir(path)
except mmp.FailedToCreateDir as e: self.unexpected_exception(e)
os.chmod(store_paths['organize'], 0775)
self.set_problem_files_path(store_paths['problem_files'])
self.set_imported_path(store_paths['imported'])
self.set_recorded_path(store_paths['recorded'])
self.set_organize_path(store_paths['organize'])
def has_watch(self, path):
""" returns true if the path is being watched or not. Any kind
of watch: organize, store, watched. """
return path in self.__wd_path
def add_watch_directory(self, new_dir):
""" adds a directory to be "watched". "watched" directories are
those that are being monitored by media monitor for airtime in
this context and not directories pyinotify calls watched """
if self.has_watch(new_dir):
self.logger.info("Cannot add '%s' to watched directories. It's \
already being watched" % new_dir)
else:
self.logger.info("Adding watched directory: '%s'" % new_dir)
self.__add_watch(new_dir, self.watch_listener)
def remove_watch_directory(self, watch_dir):
""" removes a directory from being "watched". Undoes
add_watch_directory """
if self.has_watch(watch_dir):
self.logger.info("Removing watched directory: '%s'", watch_dir)
self.__remove_watch(watch_dir)
else:
self.logger.info("'%s' is not being watched, hence cannot be \
removed" % watch_dir)
self.logger.info("The directories we are watching now are:")
self.logger.info( self.__wd_path )
def loop(self):
""" block until we receive pyinotify events """
notifier = pyinotify.Notifier(self.wm)
notifier.coalesce_events()
notifier.loop()
#notifier = pyinotify.ThreadedNotifier(self.wm, read_freq=1)
#notifier.coalesce_events()
#notifier.start()
#return notifier
#import asyncore
#notifier = pyinotify.AsyncNotifier(self.wm)
#asyncore.loop()

View File

@ -1,155 +0,0 @@
# -*- coding: utf-8 -*-
import mutagen
import os
import copy
from mutagen.easymp4 import EasyMP4KeyError
from mutagen.easyid3 import EasyID3KeyError
from exceptions import BadSongFile, InvalidMetadataElement
from log import Loggable
from pure import format_length
import pure as mmp
# emf related stuff
from ..metadata.process import global_reader
from ..metadata import definitions as defs
defs.load_definitions()
"""
list of supported easy tags in mutagen version 1.20
['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
'date', 'performer', 'musicbrainz_albumartistid', 'composer', 'encodedby',
'tracknumber', 'musicbrainz_albumid', 'album', 'asin', 'musicbrainz_artistid',
'mood', 'copyright', 'author', 'media', 'length', 'version', 'artistsort',
'titlesort', 'discsubtitle', 'website', 'musicip_fingerprint', 'conductor',
'compilation', 'barcode', 'performer:*', 'composersort', 'musicbrainz_discid',
'musicbrainz_albumtype', 'genre', 'isrc', 'discnumber', 'musicbrainz_trmid',
'replaygain_*_gain', 'musicip_puid', 'artist', 'title', 'bpm',
'musicbrainz_trackid', 'arranger', 'albumsort', 'replaygain_*_peak',
'organization']
"""
airtime2mutagen = {
"MDATA_KEY_TITLE" : "title",
"MDATA_KEY_CREATOR" : "artist",
"MDATA_KEY_SOURCE" : "album",
"MDATA_KEY_GENRE" : "genre",
"MDATA_KEY_MOOD" : "mood",
"MDATA_KEY_TRACKNUMBER" : "tracknumber",
"MDATA_KEY_BPM" : "bpm",
"MDATA_KEY_LABEL" : "label",
"MDATA_KEY_COMPOSER" : "composer",
"MDATA_KEY_ENCODER" : "encodedby",
"MDATA_KEY_CONDUCTOR" : "conductor",
"MDATA_KEY_YEAR" : "date",
"MDATA_KEY_URL" : "website",
"MDATA_KEY_ISRC" : "isrc",
"MDATA_KEY_COPYRIGHT" : "copyright",
"MDATA_KEY_CUE_IN" : "cuein",
"MDATA_KEY_CUE_OUT" : "cueout",
}
#doesn't make sense for us to write these values to a track's metadata
mutagen_do_not_write = ["MDATA_KEY_CUE_IN", "MDATA_KEY_CUE_OUT"]
# Some airtime attributes are special because they must use the mutagen object
# itself to calculate the value that they need. The lambda associated with each
# key should attempt to extract the corresponding value from the mutagen object
# itself pass as 'm'. In the case when nothing can be extracted the lambda
# should return some default value to be assigned anyway or None so that the
# airtime metadata object will skip the attribute outright.
airtime_special = {
"MDATA_KEY_DURATION" :
lambda m: format_length(getattr(m.info, u'length', 0.0)),
"MDATA_KEY_BITRATE" :
lambda m: getattr(m.info, "bitrate", ''),
"MDATA_KEY_SAMPLERATE" :
lambda m: getattr(m.info, u'sample_rate', 0),
"MDATA_KEY_MIME" :
lambda m: m.mime[0] if len(m.mime) > 0 else u'',
}
mutagen2airtime = dict( (v,k) for k,v in airtime2mutagen.iteritems()
if isinstance(v, str) )
truncate_table = {
'MDATA_KEY_GENRE' : 64,
'MDATA_KEY_TITLE' : 512,
'MDATA_KEY_CREATOR' : 512,
'MDATA_KEY_SOURCE' : 512,
'MDATA_KEY_MOOD' : 64,
'MDATA_KEY_LABEL' : 512,
'MDATA_KEY_COMPOSER' : 512,
'MDATA_KEY_ENCODER' : 255,
'MDATA_KEY_CONDUCTOR' : 512,
'MDATA_KEY_YEAR' : 16,
'MDATA_KEY_URL' : 512,
'MDATA_KEY_ISRC' : 512,
'MDATA_KEY_COPYRIGHT' : 512,
}
class Metadata(Loggable):
# TODO : refactor the way metadata is being handled. Right now things are a
# little bit messy. Some of the handling is in m.m.pure while the rest is
# here. Also interface is not very consistent
@staticmethod
def fix_title(path):
# If we have no title in path we will format it
# TODO : this is very hacky so make sure to fix it
m = mutagen.File(path, easy=True)
if u'title' not in m:
new_title = unicode( mmp.no_extension_basename(path) )
m[u'title'] = new_title
m.save()
@staticmethod
def write_unsafe(path,md):
"""
Writes 'md' metadata into 'path' through mutagen. Converts all
dictionary values to strings because mutagen will not write anything
else
"""
if not os.path.exists(path): raise BadSongFile(path)
song_file = mutagen.File(path, easy=True)
exceptions = [] # for bad keys
for airtime_k, airtime_v in md.iteritems():
if airtime_k in airtime2mutagen and \
airtime_k not in mutagen_do_not_write:
# The unicode cast here is mostly for integers that need to be
# strings
if airtime_v is None: continue
try:
song_file[ airtime2mutagen[airtime_k] ] = unicode(airtime_v)
except (EasyMP4KeyError, EasyID3KeyError) as e:
exceptions.append(InvalidMetadataElement(e, airtime_k,
path))
song_file.save()
# bubble dem up so that user knows that something is wrong
for e in exceptions: raise e
def __init__(self, fpath):
# Forcing the unicode through
try : fpath = fpath.decode("utf-8")
except : pass
self.__metadata = global_reader.read_mutagen(fpath)
def is_recorded(self):
"""
returns true if the file has been created by airtime through recording
"""
return mmp.is_airtime_recorded( self.__metadata )
def extract(self):
"""
returns a copy of the metadata that was loaded when object was
constructed
"""
return copy.deepcopy(self.__metadata)
def utf8(self):
"""
Returns a unicode aware representation of the data that is compatible
with what is spent to airtime
"""
return mmp.convert_dict_value_to_utf8(self.extract())

View File

@ -1,87 +0,0 @@
# -*- coding: utf-8 -*-
import pure as mmp
from handler import ReportHandler
from log import Loggable
from exceptions import BadSongFile
from events import OrganizeFile
from pydispatch import dispatcher
from os.path import dirname
from ..saas.thread import getsig, user
import os.path
class Organizer(ReportHandler,Loggable):
""" Organizer is responsible to to listening to OrganizeListener
events and committing the appropriate changes to the filesystem.
It does not in any interact with WatchSyncer's even when the the
WatchSyncer is a "storage directory". The "storage" directory picks
up all of its events through pyinotify. (These events are fed to it
through StoreWatchListener) """
# Commented out making this class a singleton because it's just a band aid
# for the real issue. The real issue being making multiple Organizer
# instances with pydispatch
#_instance = None
#def __new__(cls, channel, target_path, recorded_path):
#if cls._instance:
#cls._instance.channel = channel
#cls._instance.target_path = target_path
#cls._instance.recorded_path = recorded_path
#else:
#cls._instance = super(Organizer, cls).__new__( cls, channel,
#target_path, recorded_path)
#return cls._instance
def __init__(self, channel, target_path, recorded_path):
self.channel = channel
self.target_path = target_path
self.recorded_path = recorded_path
super(Organizer, self).__init__(signal=getsig(self.channel), weak=False)
def handle(self, sender, event):
""" Intercept events where a new file has been added to the
organize directory and place it in the correct path (starting
with self.target_path) """
# Only handle this event type
assert isinstance(event, OrganizeFile), \
"Organizer can only handle OrganizeFile events.Given '%s'" % event
try:
# We must select the target_path based on whether file was recorded
# by airtime or not.
# Do we need to "massage" the path using mmp.organized_path?
target_path = self.recorded_path if event.metadata.is_recorded() \
else self.target_path
# nasty hack do this properly
owner_id = mmp.owner_id(event.path)
if owner_id != -1:
target_path = os.path.join(target_path, unicode(owner_id))
mdata = event.metadata.extract()
new_path = mmp.organized_path(event.path, target_path, mdata)
# See hack in mmp.magic_move
def new_dir_watch(d):
# TODO : rewrite as return lambda : dispatcher.send(...
def cb():
dispatcher.send(signal=getsig("add_subwatch"), sender=self,
directory=d)
return cb
mmp.magic_move(event.path, new_path,
after_dir_make=new_dir_watch(dirname(new_path)))
# The reason we need to go around saving the owner in this
# backwards way is because we are unable to encode the owner id
# into the file itself so that the StoreWatchListener listener can
# detect it from the file
user().owner.add_file_owner(new_path, owner_id )
self.logger.info('Organized: "%s" into "%s"' %
(event.path, new_path))
except BadSongFile as e:
self.report_problem_file(event=event, exception=e)
# probably general error in mmp.magic.move...
except Exception as e:
self.unexpected_exception( e )
self.report_problem_file(event=event, exception=e)

View File

@ -1,40 +0,0 @@
# -*- coding: utf-8 -*-
from log import Loggable
class Owner(Loggable):
def __init__(self):
# hash: 'filepath' => owner_id
self.owners = {}
def get_owner(self,f):
""" Get the owner id of the file 'f' """
o = self.owners[f] if f in self.owners else -1
self.logger.info("Received owner for %s. Owner: %s" % (f, o))
return o
def add_file_owner(self,f,owner):
""" Associate file f with owner. If owner is -1 then do we will not record
it because -1 means there is no owner. Returns True if f is being stored
after the function. False otherwise. """
if owner == -1: return False
if f in self.owners:
if owner != self.owners[f]: # check for fishiness
self.logger.info("Warning ownership of file '%s' changed from '%d' to '%d'"
% (f, self.owners[f], owner))
else: return True
self.owners[f] = owner
return True
def has_owner(self,f):
""" True if f is owned by somebody. False otherwise. """
return f in self.owners
def remove_file_owner(self,f):
""" Try and delete any association made with file f. Returns true if
the the association was actually deleted. False otherwise. """
if f in self.owners:
del self.owners[f]
return True
else: return False

View File

@ -1,508 +0,0 @@
# -*- coding: utf-8 -*-
import copy
from subprocess import Popen, PIPE
import subprocess
import os
import math
import wave
import contextlib
import shutil, pipes
import re
import sys
import stat
import hashlib
import locale
import operator as op
from os.path import normpath
from itertools import takewhile
# you need to import reduce in python 3
try: from functools import reduce
except: pass
from configobj import ConfigObj
from exceptions import FailedToSetLocale, FailedToCreateDir
supported_extensions = [u"mp3", u"ogg", u"oga", u"flac", u"wav",
u'm4a', u'mp4', 'opus']
unicode_unknown = u'unknown'
path_md = ['MDATA_KEY_TITLE', 'MDATA_KEY_CREATOR', 'MDATA_KEY_SOURCE',
'MDATA_KEY_TRACKNUMBER', 'MDATA_KEY_BITRATE']
class LazyProperty(object):
"""
meant to be used for lazy evaluation of an object attribute.
property should represent non-mutable data, as it replaces itself.
"""
def __init__(self,fget):
self.fget = fget
self.func_name = fget.__name__
def __get__(self,obj,cls):
if obj is None: return None
value = self.fget(obj)
setattr(obj,self.func_name,value)
return value
class IncludeOnly(object):
"""
A little decorator to help listeners only be called on extensions
they support
NOTE: this decorator only works on methods and not functions. Maybe
fix this?
"""
def __init__(self, *deco_args):
self.exts = set([])
for arg in deco_args:
if isinstance(arg,str): self.add(arg)
elif hasattr(arg, '__iter__'):
for x in arg: self.exts.add(x)
def __call__(self, func):
def _wrap(moi, event, *args, **kwargs):
ext = extension(event.pathname)
# Checking for emptiness b/c we don't want to skip direcotries
if (ext.lower() in self.exts) or event.dir:
return func(moi, event, *args, **kwargs)
return _wrap
def partition(f, alist):
"""
Partition is very similar to filter except that it also returns the
elements for which f return false but in a tuple.
>>> partition(lambda x : x > 3, [1,2,3,4,5,6])
([4, 5, 6], [1, 2, 3])
"""
return (filter(f, alist), filter(lambda x: not f(x), alist))
def is_file_supported(path):
"""
Checks if a file's path(filename) extension matches the kind that we
support note that this is case insensitive.
>>> is_file_supported("test.mp3")
True
>>> is_file_supported("/bs/path/test.mP3")
True
>>> is_file_supported("test.txt")
False
"""
return extension(path).lower() in supported_extensions
# TODO : In the future we would like a better way to find out whether a show
# has been recorded
def is_airtime_recorded(md):
""" Takes a metadata dictionary and returns True if it belongs to a
file that was recorded by Airtime. """
if not 'MDATA_KEY_CREATOR' in md: return False
return md['MDATA_KEY_CREATOR'] == u'Airtime Show Recorder'
def read_wave_duration(path):
""" Read the length of .wav file (mutagen does not handle this) """
with contextlib.closing(wave.open(path,'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
duration = frames/float(rate)
return duration
def clean_empty_dirs(path):
""" walks path and deletes every empty directory it finds """
# TODO : test this function
if path.endswith('/'): clean_empty_dirs(path[0:-1])
else:
for root, dirs, _ in os.walk(path, topdown=False):
full_paths = ( os.path.join(root, d) for d in dirs )
for d in full_paths:
if os.path.exists(d):
#Try block avoids a race condition where a file is added AFTER listdir
#is run but before removedirs. (Dir is not empty and removedirs throws
#an exception in that case then.)
try:
if not os.listdir(d): os.rmdir(d)
except OSError:
pass
def extension(path):
"""
return extension of path, empty string otherwise. Prefer to return empty
string instead of None because of bad handling of "maybe" types in python.
I.e. interpreter won't enforce None checks on the programmer
>>> extension("testing.php")
'php'
>>> extension("a.b.c.d.php")
'php'
>>> extension('/no/extension')
''
>>> extension('/path/extension.ml')
'ml'
"""
ext = path.split(".")
if len(ext) < 2: return ""
else: return ext[-1]
def no_extension_basename(path):
"""
returns the extensionsless basename of a filepath
>>> no_extension_basename("/home/test.mp3")
u'test'
>>> no_extension_basename("/home/test")
u'test'
>>> no_extension_basename('blah.ml')
u'blah'
>>> no_extension_basename('a.b.c.d.mp3')
u'a.b.c.d'
"""
base = unicode(os.path.basename(path))
if extension(base) == "": return base
else: return '.'.join(base.split(".")[0:-1])
def walk_supported(directory, clean_empties=False):
""" A small generator wrapper around os.walk to only give us files
that support the extensions we are considering. When clean_empties
is True we recursively delete empty directories left over in
directory after the walk. """
if directory is None:
return
for root, dirs, files in os.walk(directory):
full_paths = ( os.path.join(root, name) for name in files
if is_file_supported(name) )
for fp in full_paths: yield fp
if clean_empties: clean_empty_dirs(directory)
def file_locked(path):
#Capture stderr to avoid polluting py-interpreter.log
proc = Popen(["lsof", path], stdout=PIPE, stderr=PIPE)
out = proc.communicate()[0].strip('\r\n')
return bool(out)
def magic_move(old, new, after_dir_make=lambda : None):
""" Moves path old to new and constructs the necessary to
directories for new along the way """
new_dir = os.path.dirname(new)
if not os.path.exists(new_dir): os.makedirs(new_dir)
# We need this crusty hack because anytime a directory is created we must
# re-add it with add_watch otherwise putting files in it will not trigger
# pyinotify events
after_dir_make()
shutil.move(old,new)
def move_to_dir(dir_path,file_path):
""" moves a file at file_path into dir_path/basename(filename) """
bs = os.path.basename(file_path)
magic_move(file_path, os.path.join(dir_path, bs))
def apply_rules_dict(d, rules):
""" Consumes a dictionary of rules that maps some keys to lambdas
which it applies to every matching element in d and returns a new
dictionary with the rules applied. If a rule returns none then it's
not applied """
new_d = copy.deepcopy(d)
for k, rule in rules.iteritems():
if k in d:
new_val = rule(d[k])
if new_val is not None: new_d[k] = new_val
return new_d
def default_to_f(dictionary, keys, default, condition):
new_d = copy.deepcopy(dictionary)
for k in keys:
if condition(dictionary=new_d, key=k): new_d[k] = default
return new_d
def default_to(dictionary, keys, default):
""" Checks if the list of keys 'keys' exists in 'dictionary'. If
not then it returns a new dictionary with all those missing keys
defaults to 'default' """
cnd = lambda dictionary, key: key not in dictionary
return default_to_f(dictionary, keys, default, cnd)
def remove_whitespace(dictionary):
""" Remove values that empty whitespace in the dictionary """
nd = copy.deepcopy(dictionary)
bad_keys = []
for k,v in nd.iteritems():
if hasattr(v,'strip'):
stripped = v.strip()
# ghetto and maybe unnecessary
if stripped == '' or stripped == u'': bad_keys.append(k)
for bad_key in bad_keys: del nd[bad_key]
return nd
def parse_int(s):
# TODO : this function isn't used anywhere yet but it may useful for emf
"""
Tries very hard to get some sort of integer result from s. Defaults to 0
when it fails
>>> parse_int("123")
'123'
>>> parse_int("123saf")
'123'
>>> parse_int("asdf")
None
"""
if s.isdigit(): return s
else:
try : return str(reduce(op.add, takewhile(lambda x: x.isdigit(), s)))
except: return None
def organized_path(old_path, root_path, orig_md):
"""
old_path - path where file is store at the moment <= maybe not necessary?
root_path - the parent directory where all organized files go
orig_md - original meta data of the file as given by mutagen AFTER being
normalized
return value: new file path
"""
filepath = None
ext = extension(old_path)
def default_f(dictionary, key):
if key in dictionary: return len(str(dictionary[key])) == 0
else: return True
# We set some metadata elements to a default "unknown" value because we use
# these fields to create a path hence they cannot be empty Here "normal"
# means normalized only for organized path
# MDATA_KEY_BITRATE is in bytes/second i.e. (256000) we want to turn this
# into 254kbps
# Some metadata elements cannot be empty, hence we default them to some
# value just so that we can create a correct path
normal_md = default_to_f(orig_md, path_md, unicode_unknown, default_f)
try:
formatted = str(int(normal_md['MDATA_KEY_BITRATE']) / 1000)
normal_md['MDATA_KEY_BITRATE'] = formatted + 'kbps'
except:
normal_md['MDATA_KEY_BITRATE'] = unicode_unknown
if is_airtime_recorded(normal_md):
# normal_md['MDATA_KEY_TITLE'] = 'show_name-yyyy-mm-dd-hh:mm:ss'
r = "(?P<show>.+)-(?P<date>\d+-\d+-\d+)-(?P<time>\d+:\d+:\d+)$"
title_re = re.match(r, normal_md['MDATA_KEY_TITLE'])
show_name = title_re.group('show')
#date = title_re.group('date')
yyyy, mm, dd = normal_md['MDATA_KEY_YEAR'].split('-',2)
fname_base = '%s-%s-%s.%s' % \
(title_re.group('time'), show_name,
normal_md['MDATA_KEY_BITRATE'], ext)
filepath = os.path.join(root_path, yyyy, mm, dd, fname_base)
elif len(normal_md['MDATA_KEY_TRACKNUMBER']) == 0:
fname = u'%s-%s.%s' % (normal_md['MDATA_KEY_TITLE'],
normal_md['MDATA_KEY_BITRATE'], ext)
path = os.path.join(root_path, normal_md['MDATA_KEY_CREATOR'],
normal_md['MDATA_KEY_SOURCE'] )
filepath = os.path.join(path, fname)
else: # The "normal" case
fname = u'%s-%s-%s.%s' % (normal_md['MDATA_KEY_TRACKNUMBER'],
normal_md['MDATA_KEY_TITLE'],
normal_md['MDATA_KEY_BITRATE'], ext)
path = os.path.join(root_path, normal_md['MDATA_KEY_CREATOR'],
normal_md['MDATA_KEY_SOURCE'])
filepath = os.path.join(path, fname)
return filepath
# TODO : Get rid of this function and every one of its uses. We no longer use
# the md5 signature of a song for anything
def file_md5(path,max_length=100):
""" Get md5 of file path (if it exists). Use only max_length
characters to save time and memory. Pass max_length=-1 to read the
whole file (like in mm1) """
if os.path.exists(path):
with open(path, 'rb') as f:
m = hashlib.md5()
# If a file is shorter than "max_length" python will just return
# whatever it was able to read which is acceptable behaviour
m.update(f.read(max_length))
return m.hexdigest()
else: raise ValueError("'%s' must exist to find its md5" % path)
def encode_to(obj, encoding='utf-8'):
# TODO : add documentation + unit tests for this function
if isinstance(obj, unicode): obj = obj.encode(encoding)
return obj
def convert_dict_value_to_utf8(md):
""" formats a dictionary to send as a request to api client """
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()])
def get_system_locale(locale_path='/etc/default/locale'):
""" Returns the configuration object for the system's default
locale. Normally requires root access. """
if os.path.exists(locale_path):
try:
config = ConfigObj(locale_path)
return config
except Exception as e: raise FailedToSetLocale(locale_path,cause=e)
else: raise ValueError("locale path '%s' does not exist. \
permissions issue?" % locale_path)
def configure_locale(config):
""" sets the locale according to the system's locale. """
current_locale = locale.getlocale()
if current_locale[1] is None:
default_locale = locale.getdefaultlocale()
if default_locale[1] is None:
lang = config.get('LANG')
new_locale = lang
else: new_locale = default_locale
locale.setlocale(locale.LC_ALL, new_locale)
reload(sys)
sys.setdefaultencoding("UTF-8")
current_locale_encoding = locale.getlocale()[1].lower()
if current_locale_encoding not in ['utf-8', 'utf8']:
raise FailedToSetLocale()
def fondle(path,times=None):
# TODO : write unit tests for this
""" touch a file to change the last modified date. Beware of calling
this function on the same file from multiple threads. """
with file(path, 'a'): os.utime(path, times)
def last_modified(path):
""" return the time of the last time mm2 was ran. path refers to the
index file whose date modified attribute contains this information.
In the case when the file does not exist we set this time 0 so that
any files on the filesystem were modified after it """
if os.path.exists(path): return os.path.getmtime(path)
else: return 0
def expand_storage(store):
""" A storage directory usually consists of 4 different
subdirectories. This function returns their paths """
store = os.path.normpath(store)
return {
'organize' : os.path.join(store, 'organize'),
'recorded' : os.path.join(store, 'recorded'),
'problem_files' : os.path.join(store, 'problem_files'),
'imported' : os.path.join(store, 'imported'),
}
def create_dir(path):
""" will try and make sure that path exists at all costs. raises an
exception if it fails at this task. """
if not os.path.exists(path):
try : os.makedirs(path)
except Exception as e : raise FailedToCreateDir(path, e)
else: # if no error occurs we still need to check that dir exists
if not os.path.exists: raise FailedToCreateDir(path)
def sub_path(directory,f):
"""
returns true if 'f' is in the tree of files under directory.
NOTE: does not look at any symlinks or anything like that, just looks at
the paths.
"""
normalized = normpath(directory)
common = os.path.commonprefix([ normalized, normpath(f) ])
return common == normalized
def owner_id(original_path):
""" Given 'original_path' return the file name of the of
'identifier' file. return the id that is contained in it. If no file
is found or nothing is read then -1 is returned. File is deleted
after the number has been read """
fname = "%s.identifier" % original_path
owner_id = -1
try:
f = open(fname)
for line in f:
owner_id = int(line)
break
f.close()
except Exception: pass
else:
try: os.unlink(fname)
except Exception: raise
return owner_id
def file_playable(pathname):
""" Returns True if 'pathname' is playable by liquidsoap. False
otherwise. """
#currently disabled because this confuses inotify....
return True
#remove all write permissions. This is due to stupid taglib library bug
#where all files are opened in write mode. The only way around this is to
#modify the file permissions
os.chmod(pathname, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
# when there is an single apostrophe inside of a string quoted by
# apostrophes, we can only escape it by replace that apostrophe with
# '\''. This breaks the string into two, and inserts an escaped
# single quote in between them.
command = ("airtime-liquidsoap -c 'output.dummy" + \
"(audio_to_stereo(single(\"%s\")))' > /dev/null 2>&1") % \
pathname.replace("'", "'\\''")
return_code = subprocess.call(command, shell=True)
#change/restore permissions to acceptable
os.chmod(pathname, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | \
stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
return (return_code == 0)
def toposort(data):
"""
Topological sort on 'data' where 'data' is of the form:
data = [
'one' : set('two','three'),
'two' : set('three'),
'three' : set()
]
"""
for k, v in data.items():
v.discard(k) # Ignore self dependencies
extra_items_in_deps = reduce(set.union, data.values()) - set(data.keys())
data.update(dict((item,set()) for item in extra_items_in_deps))
while True:
ordered = set(item for item,dep in data.items() if not dep)
if not ordered: break
for e in sorted(ordered): yield e
data = dict((item,(dep - ordered)) for item,dep in data.items()
if item not in ordered)
assert not data, "A cyclic dependency exists amongst %r" % data
def truncate_to_length(item, length):
""" Truncates 'item' to 'length' """
if isinstance(item, int): item = str(item)
if isinstance(item, basestring):
if len(item) > length: return item[0:length]
else: return item
def truncate_to_value(item, value):
""" Truncates 'item' to 'value' """
if isinstance(item, basestring): item = int(item)
if isinstance(item, int):
item = abs(item)
if item > value: item = value
return str(item)
def format_length(mutagen_length):
if convert_format(mutagen_length):
""" Convert mutagen length to airtime length """
t = float(mutagen_length)
h = int(math.floor(t / 3600))
t = t % 3600
m = int(math.floor(t / 60))
s = t % 60
# will be ss.uuu
s = str('{0:f}'.format(s))
seconds = s.split(".")
s = seconds[0]
# have a maximum of 6 subseconds.
if len(seconds[1]) >= 6: ss = seconds[1][0:6]
else: ss = seconds[1][0:]
return "%s:%s:%s.%s" % (h, m, s, ss)
def convert_format(value):
regCompiled = re.compile("^[0-9][0-9]:[0-9][0-9]:[0-9][0-9](\.\d+)?$")
if re.search(regCompiled, str(value)) is None:
return True
else:
return False
if __name__ == '__main__':
import doctest
doctest.testmod()

View File

@ -1,56 +0,0 @@
# -*- coding: utf-8 -*-
from exceptions import BadSongFile
from log import Loggable
from ..saas.thread import apc, InstanceInheritingThread
class ThreadedRequestSync(InstanceInheritingThread, Loggable):
def __init__(self, rs):
super(ThreadedRequestSync, self).__init__()
self.rs = rs
self.daemon = True
self.start()
def run(self):
self.rs.run_request()
class RequestSync(Loggable):
""" This class is responsible for making the api call to send a
request to airtime. In the process it packs the requests and retries
for some number of times """
@classmethod
def create_with_api_client(cls, watcher, requests):
apiclient = apc()
self = cls(watcher, requests, apiclient)
return self
def __init__(self, watcher, requests, apiclient):
self.watcher = watcher
self.requests = requests
self.apiclient = apiclient
def run_request(self):
self.logger.info("Attempting request with %d items." %
len(self.requests))
packed_requests = []
for request_event in self.requests:
try:
for request in request_event.safe_pack():
if isinstance(request, BadSongFile):
self.logger.info("Bad song file: '%s'" % request.path)
else: packed_requests.append(request)
except Exception as e:
self.unexpected_exception( e )
if hasattr(request_event, 'path'):
self.logger.info("Possibly related to path: '%s'" %
request_event.path)
try: self.apiclient.send_media_monitor_requests( packed_requests )
# most likely we did not get json response as we expected
except ValueError:
self.logger.info("ApiController.php probably crashed, we \
diagnose this from the fact that it did not return \
valid json")
except Exception as e: self.unexpected_exception(e)
else: self.logger.info("Request was successful")
self.watcher.flag_done() # poor man's condition variable

View File

@ -1,108 +0,0 @@
# -*- coding: utf-8 -*-
import os
from log import Loggable
from exceptions import NoDirectoryInAirtime
from ..saas.thread import user
from os.path import normpath, join
import pure as mmp
class AirtimeDB(Loggable):
def __init__(self, apc, reload_now=True):
self.apc = apc
if reload_now: self.reload_directories()
def reload_directories(self):
""" this is the 'real' constructor, should be called if you ever
want the class reinitialized. there's not much point to doing
it yourself however, you should just create a new AirtimeDB
instance. """
saas = user().root_path
try:
# dirs_setup is a dict with keys:
# u'watched_dirs' and u'stor' which point to lists of corresponding
# dirs
dirs_setup = self.apc.setup_media_monitor()
dirs_setup[u'stor'] = normpath( join(saas, dirs_setup[u'stor'] ) )
dirs_setup[u'watched_dirs'] = map(lambda p: normpath(join(saas,p)),
dirs_setup[u'watched_dirs'])
dirs_with_id = dict([ (k,normpath(v)) for k,v in
self.apc.list_all_watched_dirs()['dirs'].iteritems() ])
self.id_to_dir = dirs_with_id
self.dir_to_id = dict([ (v,k) for k,v in dirs_with_id.iteritems() ])
self.base_storage = dirs_setup[u'stor']
self.storage_paths = mmp.expand_storage( self.base_storage )
self.base_id = self.dir_to_id[self.base_storage]
# hack to get around annoying schema of airtime db
self.dir_to_id[ self.recorded_path() ] = self.base_id
self.dir_to_id[ self.import_path() ] = self.base_id
# We don't know from the x_to_y dict which directory is watched or
# store...
self.watched_directories = set([ os.path.normpath(p) for p in
dirs_setup[u'watched_dirs'] ])
except Exception, e:
self.logger.info(str(e))
def to_id(self, directory):
""" directory path -> id """
return self.dir_to_id[ directory ]
def to_directory(self, dir_id):
""" id -> directory path """
return self.id_to_dir[ dir_id ]
def storage_path(self) : return self.base_storage
def organize_path(self) : return self.storage_paths['organize']
def problem_path(self) : return self.storage_paths['problem_files']
def import_path(self) : return self.storage_paths['imported']
def recorded_path(self) : return self.storage_paths['recorded']
def list_watched(self):
""" returns all watched directories as a list """
return list(self.watched_directories)
def list_storable_paths(self):
""" returns a list of all the watched directories in the
datatabase. (Includes the imported directory and the recorded
directory) """
l = self.list_watched()
l.append(self.import_path())
l.append(self.recorded_path())
return l
def dir_id_get_files(self, dir_id, all_files=True):
""" Get all files in a directory with id dir_id """
base_dir = self.id_to_dir[ dir_id ]
return set(( join(base_dir,p) for p in
self.apc.list_all_db_files( dir_id, all_files ) ))
def directory_get_files(self, directory, all_files=True):
""" returns all the files(recursively) in a directory. a
directory is an "actual" directory path instead of its id. This
is super hacky because you create one request for the recorded
directory and one for the imported directory even though they're
the same dir in the database so you get files for both dirs in 1
request... """
normal_dir = os.path.normpath(unicode(directory))
if normal_dir not in self.dir_to_id:
raise NoDirectoryInAirtime( normal_dir, self.dir_to_id )
all_files = self.dir_id_get_files( self.dir_to_id[normal_dir],
all_files )
if normal_dir == self.recorded_path():
all_files = [ p for p in all_files if
mmp.sub_path( self.recorded_path(), p ) ]
elif normal_dir == self.import_path():
all_files = [ p for p in all_files if
mmp.sub_path( self.import_path(), p ) ]
elif normal_dir == self.storage_path():
self.logger.info("Warning, you're getting all files in '%s' which \
includes imported + record" % normal_dir)
return set(all_files)

View File

@ -1,51 +0,0 @@
# -*- coding: utf-8 -*-
import pure as mmp
import os
from log import Loggable
from exceptions import CouldNotCreateIndexFile
from ..saas.thread import InstanceInheritingThread
class Toucher(Loggable):
"""
Class responsible for touching a file at a certain path when called
"""
def __init__(self,path):
self.path = path
def __call__(self):
try: mmp.fondle(self.path)
except Exception as e:
self.logger.info("Failed to touch file: '%s'. Logging exception." %
self.path)
self.logger.info(str(e))
import time
class RepeatTimer(InstanceInheritingThread):
def __init__(self, interval, callable, *args, **kwargs):
super(RepeatTimer, self).__init__()
self.interval = interval
self.callable = callable
self.args = args
self.kwargs = kwargs
def run(self):
while True:
time.sleep(self.interval)
self.callable(*self.args, **self.kwargs)
class ToucherThread(Loggable):
""" Creates a thread that touches a file 'path' every 'interval'
seconds """
def __init__(self, path, interval=5):
if not os.path.exists(path):
try:
# TODO : rewrite using with?
f = open(path,'w')
f.write('')
f.close()
except Exception as e:
raise CouldNotCreateIndexFile(path,e)
cb = Toucher(path)
t = RepeatTimer(interval, cb)
t.daemon = True # thread terminates once process is done
t.start()

View File

@ -1,166 +0,0 @@
# -*- coding: utf-8 -*-
import time
import copy
from handler import ReportHandler
from log import Loggable
from exceptions import BadSongFile
from eventcontractor import EventContractor
from events import EventProxy
from request import ThreadedRequestSync, RequestSync
from ..saas.thread import InstanceInheritingThread, getsig
class TimeoutWatcher(InstanceInheritingThread,Loggable):
"""
The job of this thread is to keep an eye on WatchSyncer and force a
request whenever the requests go over time out
"""
def __init__(self, watcher, timeout=5):
self.logger.info("Created timeout thread...")
super(TimeoutWatcher, self).__init__()
self.watcher = watcher
self.timeout = timeout
def run(self):
# We try to launch a new thread every self.timeout seconds
# so that the people do not have to wait for the queue to fill up
while True:
time.sleep(self.timeout)
# If there is any requests left we launch em. Note that this
# isn't strictly necessary since RequestSync threads already
# chain themselves
if self.watcher.requests_in_queue():
self.logger.info("We have %d requests waiting to be launched" %
self.watcher.requests_left_count())
self.watcher.request_do()
# Same for events, this behaviour is mandatory however.
if self.watcher.events_in_queue():
self.logger.info("We have %d events that are unflushed" %
self.watcher.events_left_count())
self.watcher.flush_events()
class WatchSyncer(ReportHandler,Loggable):
def __init__(self, signal, chunking_number = 100, timeout=15):
self.timeout = float(timeout)
self.chunking_number = int(chunking_number)
self.request_running = False
self.__current_thread = None
self.__requests = []
self.contractor = EventContractor()
self.__reset_queue()
tc = TimeoutWatcher(self, self.timeout)
tc.daemon = True
tc.start()
super(WatchSyncer, self).__init__(signal=getsig(signal))
def handle(self, sender, event):
"""
We implement this abstract method from ReportHandler
"""
if hasattr(event, 'pack'):
# We push this event into queue
self.logger.info("Received event '%s'. Path: '%s'" % \
( event.__class__.__name__,
getattr(event,'path','No path exists') ))
try:
# If there is a strange bug anywhere in the code the next line
# should be a suspect
ev = EventProxy(event)
if self.contractor.register(ev): self.push_queue(ev)
#self.push_queue( event )
except BadSongFile as e:
self.fatal_exception("Received bas song file '%s'" % e.path, e)
except Exception as e: self.unexpected_exception(e)
else:
self.logger.info("Received event that does not implement packing.\
Printing its representation:")
self.logger.info( repr(event) )
def requests_left_count(self):
"""
returns the number of requests left in the queue. requests are
functions that create RequestSync threads
"""
return len(self.__requests)
def events_left_count(self):
"""
Returns the number of events left in the queue to create a request
"""
return len(self.__queue)
def push_queue(self, elem):
"""
Added 'elem' to the event queue and launch a request if we are
over the the chunking number
"""
self.logger.info("Added event into queue")
if self.events_left_count() >= self.chunking_number:
self.push_request()
self.request_do() # Launch the request if nothing is running
self.__queue.append(elem)
def flush_events(self):
"""
Force flush the current events held in the queue
"""
self.logger.info("Force flushing events...")
self.push_request()
self.request_do()
def events_in_queue(self):
"""
returns true if there are events in the queue that haven't been
processed yet
"""
return len(self.__queue) > 0
def requests_in_queue(self):
"""
Returns true if there are any requests in the queue. False otherwise.
"""
return len(self.__requests) > 0
def flag_done(self):
"""
called by request thread when it finishes operating
"""
self.request_running = False
self.__current_thread = None
# This call might not be necessary but we would like to get the
# ball running with the requests as soon as possible
if self.requests_in_queue() > 0: self.request_do()
def request_do(self):
"""
launches a request thread only if one is not running right now
"""
if not self.request_running:
self.request_running = True
self.__requests.pop()()
def push_request(self):
"""
Create a request from the current events in the queue and schedule it
"""
self.logger.info("WatchSyncer : Unleashing request")
# want to do request asyncly and empty the queue
requests = copy.copy(self.__queue)
def launch_request():
# Need shallow copy here
t = ThreadedRequestSync( RequestSync.create_with_api_client(
watcher=self, requests=requests) )
self.__current_thread = t
self.__requests.append(launch_request)
self.__reset_queue()
def __reset_queue(self): self.__queue = []
def __del__(self):
#this destructor is completely untested and it's unclear whether
#it's even doing anything useful. consider removing it
if self.events_in_queue():
self.logger.warn("Terminating with events still in the queue...")
if self.requests_in_queue():
self.logger.warn("Terminating with http requests still pending...")

View File

@ -1,73 +0,0 @@
import os
from os.path import join, basename, dirname
from ..monitor.exceptions import NoConfigFile
from ..monitor.pure import LazyProperty
from ..monitor.config import MMConfig
from ..monitor.owners import Owner
from ..monitor.events import EventRegistry
from ..monitor.listeners import FileMediator
from api_clients.api_client import AirtimeApiClient
# poor man's phantom types...
class SignalString(str): pass
class AirtimeInstance(object):
""" AirtimeInstance is a class that abstracts away every airtime
instance by providing all the necessary objects required to interact
with the instance. ApiClient, configs, root_directory """
@classmethod
def root_make(cls, name, root):
cfg = {
'api_client' : join(root, 'etc/airtime/api_client.cfg'),
'media_monitor' : join(root, 'etc/airtime/airtime.conf'),
}
return cls(name, root, cfg)
def __init__(self,name, root_path, config_paths):
""" name is an internal name only """
for cfg in ['api_client','media_monitor']:
if cfg not in config_paths: raise NoConfigFile(config_paths)
elif not os.path.exists(config_paths[cfg]):
raise NoConfigFile(config_paths[cfg])
self.name = name
self.config_paths = config_paths
self.root_path = root_path
def signal(self, sig):
if isinstance(sig, SignalString): return sig
else: return SignalString("%s_%s" % (self.name, sig))
def touch_file_path(self):
""" Get the path of the touch file for every instance """
touch_base_path = self.mm_config['media-monitor']['index_path']
touch_base_name = basename(touch_base_path)
new_base_name = self.name + touch_base_name
return join(dirname(touch_base_path), new_base_name)
def __str__(self):
return "%s,%s(%s)" % (self.name, self.root_path, self.config_paths)
@LazyProperty
def api_client(self):
return AirtimeApiClient(config_path=self.config_paths['api_client'])
@LazyProperty
def mm_config(self):
return MMConfig(self.config_paths['media_monitor'])
# I'm well aware that I'm using the service locator pattern
# instead of normal constructor injection as I should be.
# It's recommended to rewrite this using proper constructor injection
@LazyProperty
def owner(self): return Owner()
@LazyProperty
def event_registry(self): return EventRegistry()
@LazyProperty
def file_mediator(self): return FileMediator()

View File

@ -1,133 +0,0 @@
import os, sys
import logging
import logging.config
from ..monitor import pure as mmp
from ..monitor.exceptions import FailedToObtainLocale, FailedToSetLocale
from ..monitor.log import get_logger, setup_logging
from std_err_override import LogWriter
from ..saas.thread import InstanceThread, user, apc, getsig
from ..monitor.log import Loggable
from ..monitor.exceptions import CouldNotCreateIndexFile
from ..monitor.toucher import ToucherThread
from ..monitor.airtime import AirtimeNotifier, AirtimeMessageReceiver
from ..monitor.watchersyncer import WatchSyncer
from ..monitor.eventdrainer import EventDrainer
from ..monitor.manager import Manager
from ..monitor.syncdb import AirtimeDB
from airtimeinstance import AirtimeInstance
class MM2(InstanceThread, Loggable):
def index_create(self, index_create_attempt=False):
config = user().mm_config
if not index_create_attempt:
if not os.path.exists(config['media-monitor']['index_path']):
self.logger.info("Attempting to create index file:...")
try:
with open(config['media-monitor']['index_path'], 'w') as f: f.write(" ")
except Exception as e:
self.logger.info("Failed to create index file with exception: %s" \
% str(e))
else:
self.logger.info("Created index file, reloading configuration:")
self.index_create(index_create_attempt=True)
else:
self.logger.info("Already tried to create index. Will not try again ")
if not os.path.exists(config['media-monitor']['index_path']):
raise CouldNotCreateIndexFile(config['media-monitor']['index_path'])
def run(self):
self.index_create()
manager = Manager()
apiclient = apc()
config = user().mm_config
WatchSyncer(signal=getsig('watch'),
chunking_number=config['media-monitor']['chunking_number'],
timeout=config['media-monitor']['request_max_wait'])
airtime_receiver = AirtimeMessageReceiver(config,manager)
airtime_notifier = AirtimeNotifier(config, airtime_receiver)
adb = AirtimeDB(apiclient)
store = {
u'stor' : adb.storage_path(),
u'watched_dirs' : adb.list_watched(),
}
self.logger.info("initializing mm with directories: %s" % str(store))
self.logger.info(
"Initing with the following airtime response:%s" % str(store))
airtime_receiver.change_storage({ 'directory':store[u'stor'] })
for watch_dir in store[u'watched_dirs']:
if not os.path.exists(watch_dir):
# Create the watch_directory here
try: os.makedirs(watch_dir)
except Exception:
self.logger.error("Could not create watch directory: '%s' \
(given from the database)." % watch_dir)
if os.path.exists(watch_dir):
airtime_receiver.new_watch({ 'directory':watch_dir }, restart=True)
else: self.logger.info("Failed to add watch on %s" % str(watch_dir))
EventDrainer(airtime_notifier,
interval=float(config['media-monitor']['rmq_event_wait']))
# Launch the toucher that updates the last time when the script was
# ran every n seconds.
# TODO : verify that this does not interfere with bootstrapping because the
# toucher thread might update the last_ran variable too fast
ToucherThread(path=user().touch_file_path(),
interval=int(config['media-monitor']['touch_interval']))
success = False
while not success:
try:
apiclient.register_component('media-monitor')
success = True
except Exception, e:
self.logger.error(str(e))
import time
time.sleep(10)
manager.loop()
def launch_instance(name, root, global_cfg):
cfg = {
'api_client' : global_cfg,
'media_monitor' : global_cfg,
}
ai = AirtimeInstance(name, root, cfg)
MM2(ai).start()
def setup_global(log):
""" setup unicode and other stuff """
log.info("Attempting to set the locale...")
try: mmp.configure_locale(mmp.get_system_locale())
except FailedToSetLocale as e:
log.info("Failed to set the locale...")
sys.exit(1)
except FailedToObtainLocale as e:
log.info("Failed to obtain the locale form the default path: \
'/etc/default/locale'")
sys.exit(1)
except Exception as e:
log.info("Failed to set the locale for unknown reason. \
Logging exception.")
log.info(str(e))
def setup_logger(log_config, logpath):
logging.config.fileConfig(log_config)
#need to wait for Python 2.7 for this..
#logging.captureWarnings(True)
logger = logging.getLogger()
LogWriter.override_std_err(logger)
logfile = unicode(logpath)
setup_logging(logfile)
log = get_logger()
return log

View File

@ -1,28 +0,0 @@
import threading
class UserlessThread(Exception):
def __str__(self):
return "Current thread: %s is not an instance of InstanceThread \
of InstanceInheritingThread" % str(threading.current_thread())
class HasUser(object):
def user(self): return self._user
def assign_user(self): self._user = threading.current_thread().user()
class InstanceThread(threading.Thread, HasUser):
def __init__(self,user, *args, **kwargs):
super(InstanceThread, self).__init__(*args, **kwargs)
self._user = user
class InstanceInheritingThread(threading.Thread, HasUser):
def __init__(self, *args, **kwargs):
self.assign_user()
super(InstanceInheritingThread, self).__init__(*args, **kwargs)
def user():
try: return threading.current_thread().user()
except AttributeError: raise UserlessThread()
def apc(): return user().api_client
def getsig(s): return user().signal(s)

View File

@ -1,34 +0,0 @@
# -*- coding: utf-8 -*-
import sys
import os
from media.saas.launcher import setup_global, launch_instance, setup_logger
from media.monitor.config import MMConfig
def main(global_config, log_config):
""" function to run hosted install """
mm_config = MMConfig(global_config)
log = setup_logger( log_config, mm_config['media-monitor']['logpath'] )
setup_global(log)
launch_instance('hosted_install', '/', global_config)
__doc__ = """
Usage:
mm2.py --config=<path> --apiclient=<path> --log=<path>
Options:
-h --help Show this screen
--config=<path> path to mm2 config
--apiclient=<path> path to apiclient config
--log=<path> log config at <path>
"""
if __name__ == '__main__':
from docopt import docopt
args = docopt(__doc__,version="mm1.99")
for k in ['--apiclient','--config','--log']:
if not os.path.exists(args[k]):
print("'%s' must exist" % args[k])
sys.exit(0)
print("Running mm1.99")
main(args['--config'],args['--apiclient'],args['--log'])

View File

@ -1,30 +0,0 @@
#!/usr/bin/python
import sys
import os
import getopt
import pyinotify
import pprint
# a little script to test out pyinotify events
class AT(pyinotify.ProcessEvent):
def process_default(self, event):
pprint.pprint(event)
def main():
optlist, arguments = getopt.getopt(sys.argv[1:], '', ["dir="])
ldir = ""
for k,v in optlist:
if k == '--dir':
ldir = v
break
if not os.path.exists(ldir):
print("can't pyinotify dir: '%s'. it don't exist" % ldir)
sys.exit(0)
wm = pyinotify.WatchManager()
notifier = pyinotify.Notifier(wm)
print("Watching: '%s'" % ldir)
wm.add_watch(ldir, pyinotify.ALL_EVENTS, auto_add=True, rec=True, proc_fun=AT())
notifier.loop()
if __name__ == '__main__': main()

View File

@ -1,115 +0,0 @@
bin_dir = "/usr/lib/airtime/api_clients"
#############################
## Common
#############################
# Value needed to access the API
api_key = '3MP2IUR45E6KYQ01CUYK'
# Path to the base of the API
api_base = 'api'
# URL to get the version number of the server API
version_url = 'version/api_key/%%api_key%%'
#URL to register a components IP Address with the central web server
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
# Hostname
base_url = 'localhost'
base_port = 80
#############################
## Config for Media Monitor
#############################
# URL to setup the media monitor
media_setup_url = 'media-monitor-setup/format/json/api_key/%%api_key%%'
# Tell Airtime the file id associated with a show instance.
upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
# URL to tell Airtime to update file's meta data
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
# URL to tell Airtime we want a listing of all files it knows about
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
# URL to tell Airtime we want to add watched directory
add_watched_dir = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
remove_watched_dir = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime about file system mount change
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
# URL to tell Airtime about file system mount change
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
#############################
## Config for Recorder
#############################
# URL to get the schedule of shows set to record
show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
# URL to upload the recorded show's file to Airtime
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
# URL to commit multiple updates from media monitor at the same time
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
#number of retries to upload file if connection problem
upload_retries = 3
#time to wait between attempts to upload file if connection problem (in seconds)
upload_wait = 60
################################################################################
# Uncomment *one of the sets* of values from the API clients below, and comment
# out all the others.
################################################################################
#############################
## Config for Pypo
#############################
# Schedule export path.
# %%from%% - starting date/time in the form YYYY-MM-DD-hh-mm
# %%to%% - starting date/time in the form YYYY-MM-DD-hh-mm
export_url = 'schedule/api_key/%%api_key%%'
get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
# Update whether a schedule group has begun playing.
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
# Update whether an audio clip is currently playing.
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'
# URL to tell Airtime we want to get stream setting
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
#URL to update liquidsoap status
update_liquidsoap_status = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
#URL to check live stream auth
check_live_stream_auth = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
#URL to update source status
update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'

View File

@ -1,138 +0,0 @@
bin_dir = "/usr/lib/airtime/api_clients"
############################################
# RabbitMQ settings #
############################################
rabbitmq_host = 'localhost'
rabbitmq_user = 'guest'
rabbitmq_password = 'guest'
rabbitmq_vhost = '/'
############################################
# Media-Monitor preferences #
############################################
check_filesystem_events = 5 #how long to queue up events performed on the files themselves.
check_airtime_events = 30 #how long to queue metadata input from airtime.
touch_interval = 5
chunking_number = 450
request_max_wait = 3.0
rmq_event_wait = 0.5
logpath = '/home/rudi/throwaway/mm2.log'
#############################
## Common
#############################
index_path = '/home/rudi/Airtime/python_apps/media-monitor2/sample_post.txt'
# Value needed to access the API
api_key = '5LF5D953RNS3KJSHN6FF'
# Path to the base of the API
api_base = 'api'
# URL to get the version number of the server API
version_url = 'version/api_key/%%api_key%%'
#URL to register a components IP Address with the central web server
register_component = 'register-component/format/json/api_key/%%api_key%%/component/%%component%%'
# Hostname
base_url = 'localhost'
base_port = 80
#############################
## Config for Media Monitor
#############################
# URL to setup the media monitor
media_setup_url = 'media-monitor-setup/format/json/api_key/%%api_key%%'
# Tell Airtime the file id associated with a show instance.
upload_recorded = 'upload-recorded/format/json/api_key/%%api_key%%/fileid/%%fileid%%/showinstanceid/%%showinstanceid%%'
# URL to tell Airtime to update file's meta data
update_media_url = 'reload-metadata/format/json/api_key/%%api_key%%/mode/%%mode%%'
# URL to tell Airtime we want a listing of all files it knows about
list_all_db_files = 'list-all-files/format/json/api_key/%%api_key%%/dir_id/%%dir_id%%'
# URL to tell Airtime we want a listing of all dirs its watching (including the stor dir)
list_all_watched_dirs = 'list-all-watched-dirs/format/json/api_key/%%api_key%%'
# URL to tell Airtime we want to add watched directory
add_watched_dir = 'add-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
remove_watched_dir = 'remove-watched-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime we want to add watched directory
set_storage_dir = 'set-storage-dir/format/json/api_key/%%api_key%%/path/%%path%%'
# URL to tell Airtime about file system mount change
update_fs_mount = 'update-file-system-mount/format/json/api_key/%%api_key%%'
# URL to tell Airtime about file system mount change
handle_watched_dir_missing = 'handle-watched-dir-missing/format/json/api_key/%%api_key%%/dir/%%dir%%'
#############################
## Config for Recorder
#############################
# URL to get the schedule of shows set to record
show_schedule_url = 'recorded-shows/format/json/api_key/%%api_key%%'
# URL to upload the recorded show's file to Airtime
upload_file_url = 'upload-file/format/json/api_key/%%api_key%%'
# URL to commit multiple updates from media monitor at the same time
reload_metadata_group = 'reload-metadata-group/format/json/api_key/%%api_key%%'
#number of retries to upload file if connection problem
upload_retries = 3
#time to wait between attempts to upload file if connection problem (in seconds)
upload_wait = 60
################################################################################
# Uncomment *one of the sets* of values from the API clients below, and comment
# out all the others.
################################################################################
#############################
## Config for Pypo
#############################
# Schedule export path.
# %%from%% - starting date/time in the form YYYY-MM-DD-hh-mm
# %%to%% - starting date/time in the form YYYY-MM-DD-hh-mm
export_url = 'schedule/api_key/%%api_key%%'
get_media_url = 'get-media/file/%%file%%/api_key/%%api_key%%'
# Update whether a schedule group has begun playing.
update_item_url = 'notify-schedule-group-play/api_key/%%api_key%%/schedule_id/%%schedule_id%%'
# Update whether an audio clip is currently playing.
update_start_playing_url = 'notify-media-item-start-play/api_key/%%api_key%%/media_id/%%media_id%%/schedule_id/%%schedule_id%%'
# URL to tell Airtime we want to get stream setting
get_stream_setting = 'get-stream-setting/format/json/api_key/%%api_key%%/'
#URL to update liquidsoap status
update_liquidsoap_status = 'update-liquidsoap-status/format/json/api_key/%%api_key%%/msg/%%msg%%/stream_id/%%stream_id%%/boot_time/%%boot_time%%'
#URL to check live stream auth
check_live_stream_auth = 'check-live-stream-auth/format/json/api_key/%%api_key%%/username/%%username%%/password/%%password%%/djtype/%%djtype%%'
#URL to update source status
update_source_status = 'update-source-status/format/json/api_key/%%api_key%%/sourcename/%%sourcename%%/status/%%status%%'
get_bootstrap_info = 'get-bootstrap-info/format/json/api_key/%%api_key%%'
get_files_without_replay_gain = 'get-files-without-replay-gain/api_key/%%api_key%%/dir_id/%%dir_id%%'
update_replay_gain_value = 'update-replay-gain-value/api_key/%%api_key%%'

View File

@ -1,14 +0,0 @@
# The tests rely on a lot of absolute paths so this file
# configures all of that
music_folder = u'/home/rudi/music'
o_path = u'/home/rudi/throwaway/ACDC_-_Back_In_Black-sample-64kbps.ogg'
watch_path = u'/home/rudi/throwaway/watch/',
real_path1 = u'/home/rudi/throwaway/watch/unknown/unknown/ACDC_-_Back_In_Black-sample-64kbps-64kbps.ogg'
opath = u"/home/rudi/Airtime/python_apps/media-monitor2/tests/"
ppath = u"/home/rudi/Airtime/python_apps/media-monitor2/media/"
api_client_path = '/etc/airtime/airtime.conf'
# holdover from the time we had a special config for testing
sample_config = api_client_path
real_config = api_client_path

View File

@ -1,7 +0,0 @@
#!/usr/bin/perl
use strict;
use warnings;
foreach my $file (glob "*.py") {
system("python $file") unless $file =~ /prepare_tests.py/;
}

View File

@ -1,38 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import os
import sys
from api_clients import api_client as apc
import prepare_tests
class TestApiClient(unittest.TestCase):
def setUp(self):
test_path = prepare_tests.api_client_path
print("Running from api_config: %s" % test_path)
if not os.path.exists(test_path):
print("path for config does not exist: '%s' % test_path")
# TODO : is there a cleaner way to exit the unit testing?
sys.exit(1)
self.apc = apc.AirtimeApiClient(config_path=test_path)
self.apc.register_component("api-client-tester")
# All of the following requests should error out in some way
self.bad_requests = [
{ 'mode' : 'foo', 'is_record' : 0 },
{ 'mode' : 'bar', 'is_record' : 1 },
{ 'no_mode' : 'at_all' }, ]
def test_bad_requests(self):
responses = self.apc.send_media_monitor_requests(self.bad_requests, dry=True)
for response in responses:
self.assertTrue( 'key' in response )
self.assertTrue( 'error' in response )
print( "Response: '%s'" % response )
# We don't actually test any well formed requests because it is more
# involved
if __name__ == '__main__': unittest.main()

View File

@ -1,24 +0,0 @@
api_client = 'airtime'
# where the binary files live
bin_dir = '/usr/lib/airtime/media-monitor'
# where the logging files live
log_dir = '/var/log/airtime/media-monitor'
############################################
# RabbitMQ settings #
############################################
rabbitmq_host = 'localhost'
rabbitmq_user = 'guest'
rabbitmq_password = 'guest'
rabbitmq_vhost = '/'
############################################
# Media-Monitor preferences #
############################################
check_filesystem_events = '5'
check_airtime_events = '30'
list_value_testing = 'val1', 'val2', 'val3'

View File

@ -1,28 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import pprint
from media.monitor.config import MMConfig
from media.monitor.exceptions import NoConfigFile, ConfigAccessViolation
pp = pprint.PrettyPrinter(indent=4)
class TestMMConfig(unittest.TestCase):
def setUp(self):
self.real_config = MMConfig("./test_config.cfg")
#pp.pprint(self.real_config.cfg.dict)
def test_bad_config(self):
self.assertRaises( NoConfigFile, lambda : MMConfig("/fake/stuff/here") )
def test_no_set(self):
def myf(): self.real_config['bad'] = 'change'
self.assertRaises( ConfigAccessViolation, myf )
def test_copying(self):
k = 'list_value_testing'
mycopy = self.real_config[k]
mycopy.append("another element")
self.assertTrue( len(mycopy) , len(self.real_config[k]) + 1 )
if __name__ == '__main__': unittest.main()

View File

@ -1,31 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
#from pprint import pprint as pp
from media.metadata.process import global_reader
from media.monitor.metadata import Metadata
import media.metadata.definitions as defs
defs.load_definitions()
class TestMMP(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def metadatas(self,f):
return global_reader.read_mutagen(f), Metadata(f).extract()
def test_old_metadata(self):
path = "/home/rudi/music/Nightingale.mp3"
m = global_reader.read_mutagen(path)
self.assertTrue( len(m) > 0 )
n = Metadata(path)
self.assertEqual(n.extract(), m)
def test_recorded(self):
recorded_file = "./15:15:00-Untitled Show-256kbps.ogg"
emf, old = self.metadatas(recorded_file)
self.assertEqual(emf, old)
if __name__ == '__main__': unittest.main()

View File

@ -1,58 +0,0 @@
import unittest
from media.monitor.eventcontractor import EventContractor
#from media.monitor.exceptions import BadSongFile
from media.monitor.events import FakePyinotify, NewFile, MoveFile, \
DeleteFile
class TestMMP(unittest.TestCase):
def test_event_registered(self):
ev = EventContractor()
e1 = NewFile( FakePyinotify('bull.mp3') ).proxify()
e2 = MoveFile( FakePyinotify('bull.mp3') ).proxify()
ev.register(e1)
self.assertTrue( ev.event_registered(e2) )
def test_get_old_event(self):
ev = EventContractor()
e1 = NewFile( FakePyinotify('bull.mp3') ).proxify()
e2 = MoveFile( FakePyinotify('bull.mp3') ).proxify()
ev.register(e1)
self.assertEqual( ev.get_old_event(e2), e1 )
def test_register(self):
ev = EventContractor()
e1 = NewFile( FakePyinotify('bull.mp3') ).proxify()
e2 = DeleteFile( FakePyinotify('bull.mp3') ).proxify()
self.assertTrue( ev.register(e1) )
self.assertFalse( ev.register(e2) )
self.assertEqual( len(ev.store.keys()), 1 )
delete_ev = e1.safe_pack()[0]
self.assertEqual( delete_ev['mode'], u'delete')
self.assertEqual( len(ev.store.keys()), 0 )
e3 = DeleteFile( FakePyinotify('horse.mp3') ).proxify()
self.assertTrue( ev.register(e3) )
self.assertTrue( ev.register(e2) )
def test_register2(self):
ev = EventContractor()
p = 'bull.mp3'
events = [
NewFile( FakePyinotify(p) ),
NewFile( FakePyinotify(p) ),
DeleteFile( FakePyinotify(p) ),
NewFile( FakePyinotify(p) ),
NewFile( FakePyinotify(p) ), ]
events = map(lambda x: x.proxify(), events)
actual_events = []
for e in events:
if ev.register(e):
actual_events.append(e)
self.assertEqual( len(ev.store.keys()), 1 )
#packed = [ x.safe_pack() for x in actual_events ]
if __name__ == '__main__': unittest.main()

View File

@ -1,21 +0,0 @@
import unittest
from copy import deepcopy
from media.saas.airtimeinstance import AirtimeInstance, NoConfigFile
class TestAirtimeInstance(unittest.TestCase):
def setUp(self):
self.cfg = {
'api_client' : 'tests/test_instance.py',
'media_monitor' : 'tests/test_instance.py',
'logging' : 'tests/test_instance.py',
}
def test_init_good(self):
AirtimeInstance("/root", self.cfg)
self.assertTrue(True)
def test_init_bad(self):
cfg = deepcopy(self.cfg)
cfg['api_client'] = 'bs'
with self.assertRaises(NoConfigFile):
AirtimeInstance("/root", cfg)

View File

@ -1,77 +0,0 @@
import os, shutil
import time
import pyinotify
import unittest
from pydispatch import dispatcher
from media.monitor.listeners import OrganizeListener
from media.monitor.events import OrganizeFile
from os.path import join, normpath, abspath
def create_file(p):
with open(p, 'w') as f: f.write(" ")
class TestOrganizeListener(unittest.TestCase):
def setUp(self):
self.organize_path = 'test_o'
self.sig = 'org'
def my_abs_path(x):
return normpath(join(os.getcwd(), x))
self.sample_files = [ my_abs_path(join(self.organize_path, f))
for f in [ "gogi.mp3",
"gio.mp3",
"mimino.ogg" ] ]
os.mkdir(self.organize_path)
def test_flush_events(self):
org = self.create_org()
self.create_sample_files()
received = [0]
def pass_event(sender, event):
if isinstance(event, OrganizeFile):
received[0] += 1
self.assertTrue( abspath(event.path) in self.sample_files )
dispatcher.connect(pass_event, signal=self.sig, sender=dispatcher.Any,
weak=True)
org.flush_events( self.organize_path )
self.assertEqual( received[0], len(self.sample_files) )
self.delete_sample_files()
def test_process(self):
org = self.create_org()
received = [0]
def pass_event(sender, event):
if isinstance(event, OrganizeFile):
self.assertTrue( event.path in self.sample_files )
received[0] += 1
dispatcher.connect(pass_event, signal=self.sig, sender=dispatcher.Any,
weak=True)
wm = pyinotify.WatchManager()
def stopper(notifier):
return received[0] == len(self.sample_files)
tn = pyinotify.ThreadedNotifier(wm, default_proc_fun=org)
tn.daemon = True
tn.start()
wm.add_watch(self.organize_path, pyinotify.ALL_EVENTS, rec=True,
auto_add=True)
time.sleep(0.5)
self.create_sample_files()
time.sleep(1)
self.assertEqual( len(self.sample_files), received[0] )
self.delete_sample_files()
def tearDown(self):
shutil.rmtree(self.organize_path)
def create_sample_files(self):
for f in self.sample_files: create_file(f)
def delete_sample_files(self):
for f in self.sample_files: os.remove(f)
def create_org(self):
return OrganizeListener( signal=self.sig )
if __name__ == '__main__': unittest.main()

View File

@ -1,41 +0,0 @@
import unittest
from media.monitor.manager import Manager
def add_paths(m,paths):
for path in paths:
m.add_watch_directory(path)
class TestManager(unittest.TestCase):
def setUp(self):
self.opath = "/home/rudi/Airtime/python_apps/media-monitor2/tests/"
self.ppath = "/home/rudi/Airtime/python_apps/media-monitor2/media/"
self.paths = [self.opath, self.ppath]
def test_init(self):
man = Manager()
self.assertTrue( len(man.watched_directories) == 0 )
self.assertTrue( man.watch_channel is not None )
self.assertTrue( man.organize_channel is not None )
def test_organize_path(self):
man = Manager()
man.set_organize_path( self.opath )
self.assertEqual( man.get_organize_path(), self.opath )
man.set_organize_path( self.ppath )
self.assertEqual( man.get_organize_path(), self.ppath )
def test_add_watch_directory(self):
man = Manager()
add_paths(man, self.paths)
for path in self.paths:
self.assertTrue( man.has_watch(path) )
def test_remove_watch_directory(self):
man = Manager()
add_paths(man, self.paths)
for path in self.paths:
self.assertTrue( man.has_watch(path) )
man.remove_watch_directory( path )
self.assertTrue( not man.has_watch(path) )
if __name__ == '__main__': unittest.main()

View File

@ -1,44 +0,0 @@
# -*- coding: utf-8 -*-
import os
import unittest
import sys
import media.monitor.metadata as mmm
class TestMetadata(unittest.TestCase):
def setUp(self):
self.music_folder = u'/home/rudi/music'
def test_got_music_folder(self):
t = os.path.exists(self.music_folder)
self.assertTrue(t)
if not t:
print("'%s' must exist for this test to run." % self.music_folder )
sys.exit(1)
def test_metadata(self):
full_paths = (os.path.join(self.music_folder,filename) for filename in os.listdir(self.music_folder))
i = 0
for full_path in full_paths:
if os.path.isfile(full_path):
md_full = mmm.Metadata(full_path)
md = md_full.extract()
if i < 3:
i += 1
print("Sample metadata: '%s'" % md)
self.assertTrue( len( md.keys() ) > 0 )
utf8 = md_full.utf8()
for k,v in md.iteritems():
if hasattr(utf8[k], 'decode'):
self.assertEqual( utf8[k].decode('utf-8'), md[k] )
else: print("Skipping '%s' because it's a directory" % full_path)
def test_airtime_mutagen_dict(self):
for muta,airtime in mmm.mutagen2airtime.iteritems():
self.assertEqual( mmm.airtime2mutagen[airtime], muta )
def test_format_length(self):
# TODO : add some real tests for this function
x1 = 123456
print("Formatting '%s' to '%s'" % (x1, mmm.format_length(x1)))
if __name__ == '__main__': unittest.main()

View File

@ -1,44 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import media.metadata.process as md
class TestMetadataDef(unittest.TestCase):
def test_simple(self):
with md.metadata('MDATA_TESTING') as t:
t.optional(True)
t.depends('ONE','TWO')
t.default('unknown')
t.translate(lambda kw: kw['ONE'] + kw['TWO'])
h = { 'ONE' : "testing", 'TWO' : "123" }
result = md.global_reader.read('test_path',h)
self.assertTrue( 'MDATA_TESTING' in result )
self.assertEqual( result['MDATA_TESTING'], 'testing123' )
h1 = { 'ONE' : 'big testing', 'two' : 'nothing' }
result1 = md.global_reader.read('bs path', h1)
self.assertEqual( result1['MDATA_TESTING'], 'unknown' )
def test_topo(self):
with md.metadata('MDATA_TESTING') as t:
t.depends('shen','sheni')
t.default('megitzda')
t.translate(lambda kw: kw['shen'] + kw['sheni'])
with md.metadata('shen') as t:
t.default('vaxo')
with md.metadata('sheni') as t:
t.default('gio')
with md.metadata('vaxo') as t:
t.depends('shevetsi')
v = md.global_reader.read('bs mang', {})
self.assertEqual(v['MDATA_TESTING'], 'vaxogio')
self.assertTrue( 'vaxo' not in v )
md.global_reader.clear()
if __name__ == '__main__': unittest.main()

View File

@ -1,64 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import json
from media.monitor.airtime import AirtimeNotifier, AirtimeMessageReceiver
from mock import patch, Mock
from media.monitor.config import MMConfig
from media.monitor.manager import Manager
def filter_ev(d): return { i : j for i,j in d.iteritems() if i != 'event_type' }
class TestReceiver(unittest.TestCase):
def setUp(self):
# TODO : properly mock this later
cfg = {}
self.amr = AirtimeMessageReceiver(cfg, Manager())
def test_supported(self):
# Every supported message should fire something
for event_type in self.amr.dispatch_table.keys():
msg = { 'event_type' : event_type, 'extra_param' : 123 }
filtered = filter_ev(msg)
# There should be a better way to test the following without
# patching private methods
with patch.object(self.amr, '_execute_message') as mock_method:
mock_method.side_effect = None
ret = self.amr.message(msg)
self.assertTrue(ret)
mock_method.assert_called_with(event_type, filtered)
def test_no_mod_message(self):
ev = { 'event_type' : 'new_watch', 'directory' : 'something here' }
filtered = filter_ev(ev)
with patch.object(self.amr, '_execute_message') as mock_method:
mock_method.return_value = "tested"
ret = self.amr.message(ev)
self.assertTrue( ret ) # message passing worked
mock_method.assert_called_with(ev['event_type'], filtered)
# test that our copy of the message does not get modified
self.assertTrue( 'event_type' in ev )
class TestAirtimeNotifier(unittest.TestCase):
def test_handle_message(self):
#from configobj import ConfigObj
test_cfg = MMConfig('./test_config.cfg')
ran = [False]
class MockReceiver(object):
def message(me,m):
self.assertTrue( 'event_type' in m )
self.assertEqual( m['path'], '/bs/path' )
ran[0] = True
airtime = AirtimeNotifier(cfg=test_cfg, message_receiver=MockReceiver())
m1 = Mock()
m1.ack = "ack'd message"
m2 = Mock()
m2.body = json.dumps({ 'event_type' : 'file_delete', 'path' : '/bs/path' })
airtime.handle_message(body=m1,message=m2)
self.assertTrue( ran[0] )
if __name__ == '__main__': unittest.main()

View File

@ -1,36 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from media.monitor import owners
class TestMMP(unittest.TestCase):
def setUp(self):
self.f = "test.mp3"
def test_has_owner(self):
owners.reset_owners()
o = 12345
self.assertTrue( owners.add_file_owner(self.f,o) )
self.assertTrue( owners.has_owner(self.f) )
def test_add_file_owner(self):
owners.reset_owners()
self.assertFalse( owners.add_file_owner('testing', -1) )
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertTrue( owners.add_file_owner(self.f, 456) )
def test_remove_file_owner(self):
owners.reset_owners()
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertTrue( owners.remove_file_owner(self.f) )
self.assertFalse( owners.remove_file_owner(self.f) )
def test_get_owner(self):
owners.reset_owners()
self.assertTrue( owners.add_file_owner(self.f, 123) )
self.assertEqual( owners.get_owner(self.f), 123, "file is owned" )
self.assertEqual( owners.get_owner("random_stuff.txt"), -1,
"file is not owned" )
if __name__ == '__main__': unittest.main()

View File

@ -1,75 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import os
import media.monitor.pure as mmp
class TestMMP(unittest.TestCase):
def setUp(self):
self.md1 = {'MDATA_KEY_MD5': '71185323c2ab0179460546a9d0690107',
'MDATA_KEY_FTYPE': 'audioclip',
'MDATA_KEY_MIME': 'audio/vorbis',
'MDATA_KEY_DURATION': '0:0:25.000687',
'MDATA_KEY_SAMPLERATE': 48000,
'MDATA_KEY_BITRATE': 64000,
'MDATA_KEY_REPLAYGAIN': 0,
'MDATA_KEY_TITLE': u'ACDC_-_Back_In_Black-sample-64kbps'}
def test_apply_rules(self):
sample_dict = {
'key' : 'val',
'test' : 'IT',
}
rules = {
'key' : lambda x : x.upper(),
'test' : lambda y : y.lower()
}
sample_dict = mmp.apply_rules_dict(sample_dict, rules)
self.assertEqual(sample_dict['key'], 'VAL')
self.assertEqual(sample_dict['test'], 'it')
def test_default_to(self):
sd = { }
def_keys = ['one','two','three']
sd = mmp.default_to(dictionary=sd, keys=def_keys, default='DEF')
for k in def_keys: self.assertEqual( sd[k], 'DEF' )
def test_file_md5(self):
p = os.path.realpath(__file__)
m1 = mmp.file_md5(p)
m2 = mmp.file_md5(p,10)
self.assertTrue( m1 != m2 )
self.assertRaises( ValueError, lambda : mmp.file_md5('/file/path') )
self.assertTrue( m1 == mmp.file_md5(p) )
def test_sub_path(self):
f1 = "/home/testing/123.mp3"
d1 = "/home/testing"
d2 = "/home/testing/"
self.assertTrue( mmp.sub_path(d1, f1) )
self.assertTrue( mmp.sub_path(d2, f1) )
def test_parse_int(self):
self.assertEqual( mmp.parse_int("123"), "123" )
self.assertEqual( mmp.parse_int("123asf"), "123" )
self.assertEqual( mmp.parse_int("asdf"), None )
def test_truncate_to_length(self):
s1 = "testing with non string literal"
s2 = u"testing with unicode literal"
self.assertEqual( len(mmp.truncate_to_length(s1, 5)), 5)
self.assertEqual( len(mmp.truncate_to_length(s2, 8)), 8)
def test_owner_id(self):
start_path = "testing.mp3"
id_path = "testing.mp3.identifier"
o_id = 123
f = open(id_path, 'w')
f.write("123")
f.close()
possible_id = mmp.owner_id(start_path)
self.assertFalse( os.path.exists(id_path) )
self.assertEqual( possible_id, o_id )
self.assertEqual( -1, mmp.owner_id("something.random") )
if __name__ == '__main__': unittest.main()

View File

@ -1,48 +0,0 @@
import unittest
from mock import MagicMock
from media.monitor.request import RequestSync
class TestRequestSync(unittest.TestCase):
def apc_mock(self):
fake_apc = MagicMock()
fake_apc.send_media_monitor_requests = MagicMock()
return fake_apc
def watcher_mock(self):
fake_watcher = MagicMock()
fake_watcher.flag_done = MagicMock()
return fake_watcher
def request_mock(self):
fake_request = MagicMock()
fake_request.safe_pack = MagicMock(return_value=[])
return fake_request
def test_send_media_monitor(self):
fake_apc = self.apc_mock()
fake_requests = [ self.request_mock() for x in range(1,5) ]
fake_watcher = self.watcher_mock()
rs = RequestSync(fake_watcher, fake_requests, fake_apc)
rs.run_request()
self.assertEquals(fake_apc.send_media_monitor_requests.call_count, 1)
def test_flag_done(self):
fake_apc = self.apc_mock()
fake_requests = [ self.request_mock() for x in range(1,5) ]
fake_watcher = self.watcher_mock()
rs = RequestSync(fake_watcher, fake_requests, fake_apc)
rs.run_request()
self.assertEquals(fake_watcher.flag_done.call_count, 1)
def test_safe_pack(self):
fake_apc = self.apc_mock()
fake_requests = [ self.request_mock() for x in range(1,5) ]
fake_watcher = self.watcher_mock()
rs = RequestSync(fake_watcher, fake_requests, fake_apc)
rs.run_request()
for req in fake_requests:
self.assertEquals(req.safe_pack.call_count, 1)
if __name__ == '__main__': unittest.main()

View File

@ -1,35 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import os
from media.monitor.syncdb import AirtimeDB
from media.monitor.log import get_logger
from media.monitor.pure import partition
import api_clients.api_client as ac
import prepare_tests
class TestAirtimeDB(unittest.TestCase):
def setUp(self):
self.ac = ac.AirtimeApiClient(logger=get_logger(),
config_path=prepare_tests.real_config)
def test_syncdb_init(self):
sdb = AirtimeDB(self.ac)
self.assertTrue( len(sdb.list_storable_paths()) > 0 )
def test_list(self):
self.sdb = AirtimeDB(self.ac)
for watch_dir in self.sdb.list_storable_paths():
self.assertTrue( os.path.exists(watch_dir) )
def test_directory_get_files(self):
sdb = AirtimeDB(self.ac)
print(sdb.list_storable_paths())
for wdir in sdb.list_storable_paths():
files = sdb.directory_get_files(wdir)
print( "total files: %d" % len(files) )
self.assertTrue( len(files) >= 0 )
self.assertTrue( isinstance(files, set) )
exist, deleted = partition(os.path.exists, files)
print("(exist, deleted) = (%d, %d)" % ( len(exist), len(deleted) ) )
if __name__ == '__main__': unittest.main()

View File

@ -1,64 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import time
from media.saas.thread import InstanceThread, InstanceInheritingThread
# ugly but necessary for 2.7
signal = False
signal2 = False
class TestInstanceThread(unittest.TestCase):
def test_user_inject(self):
global signal
signal = False
u = "rudi"
class T(InstanceThread):
def run(me):
global signal
super(T, me).run()
signal = True
self.assertEquals(u, me.user())
t = T(u, name="test_user_inject")
t.daemon = True
t.start()
time.sleep(0.2)
self.assertTrue(signal)
def test_inheriting_thread(utest):
global signal2
u = "testing..."
class TT(InstanceInheritingThread):
def run(self):
global signal2
utest.assertEquals(self.user(), u)
signal2 = True
class T(InstanceThread):
def run(self):
super(T, self).run()
child_thread = TT(name="child thread")
child_thread.daemon = True
child_thread.start()
parent_thread = T(u, name="Parent instance thread")
parent_thread.daemon = True
parent_thread.start()
time.sleep(0.2)
utest.assertTrue(signal2)
def test_different_user(utest):
u1, u2 = "ru", "di"
class T(InstanceThread):
def run(self):
super(T, self).run()
for u in [u1, u2]:
t = T(u)
t.daemon = True
t.start()
utest.assertEquals(t.user(), u)
if __name__ == '__main__': unittest.main()

View File

@ -1,54 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import time
import media.monitor.pure as mmp
from media.monitor.toucher import Toucher, ToucherThread
class BaseTest(unittest.TestCase):
def setUp(self):
self.p = "api_client.cfg"
class TestToucher(BaseTest):
def test_toucher(self):
t1 = mmp.last_modified(self.p)
t = Toucher(self.p)
t()
t2 = mmp.last_modified(self.p)
print("(t1,t2) = (%d, %d) diff => %d" % (t1, t2, t2 - t1))
self.assertTrue( t2 > t1 )
class TestToucherThread(BaseTest):
def test_thread(self):
t1 = mmp.last_modified(self.p)
ToucherThread(self.p, interval=1)
time.sleep(2)
t2 = mmp.last_modified(self.p)
print("(t1,t2) = (%d, %d) diff => %d" % (t1, t2, t2 - t1))
self.assertTrue( t2 > t1 )
if __name__ == '__main__': unittest.main()

View File

@ -1,66 +0,0 @@
from setuptools import setup
from subprocess import call
import sys
import os
script_path = os.path.dirname(os.path.realpath(__file__))
print script_path
os.chdir(script_path)
# Allows us to avoid installing the upstart init script when deploying on Airtime Pro:
if '--no-init-script' in sys.argv:
data_files = []
sys.argv.remove('--no-init-script') # super hax
else:
media_monitor_files = []
mm2_files = []
for root, dirnames, filenames in os.walk('media-monitor'):
for filename in filenames:
media_monitor_files.append(os.path.join(root, filename))
for root, dirnames, filenames in os.walk('media-monitor2'):
for filename in filenames:
mm2_files.append(os.path.join(root, filename))
data_files = [
('/etc/init', ['install/upstart/airtime-media-monitor.conf.template']),
('/etc/init.d', ['install/sysvinit/airtime-media-monitor']),
('/etc/airtime', ['install/media_monitor_logging.cfg']),
('/var/log/airtime/media-monitor', []),
('/var/tmp/airtime/media-monitor', []),
]
print data_files
setup(name='airtime-media-monitor',
version='1.0',
description='Airtime Media Monitor',
url='http://github.com/sourcefabric/Airtime',
author='sourcefabric',
license='AGPLv3',
packages=['media_monitor', 'mm2', 'mm2.configs',
'mm2.media', 'mm2.media.monitor',
'mm2.media.metadata', 'mm2.media.saas'
],
package_data={'': ['*.cfg']},
scripts=['bin/airtime-media-monitor'],
install_requires=[
'amqplib',
'anyjson',
'argparse',
'configobj',
'docopt',
'kombu',
'mutagen',
'poster',
'PyDispatcher',
'pyinotify',
'pytz',
'wsgiref'
],
zip_safe=False,
data_files=data_files)
# Reload the initctl config so that the media-monitor service works
if data_files:
print "Reloading initctl configuration"
#call(['initctl', 'reload-configuration'])
print "Run \"sudo service airtime-media-monitor start\""

View File

@ -66,8 +66,6 @@ for i in ${FILES[*]}; do
echo $i
done
echo "pip airtime-playout"
# We're no longer using media-monitor
# echo "pip airtime-media-monitor"
echo -e "\nIf your web root is not listed, you will need to manually remove it."
@ -105,6 +103,6 @@ if [[ "$IN" = "y" || "$IN" = "Y" ]]; then
dropAirtimeDatabase
fi
pip uninstall -y airtime-playout airtime-media-monitor
pip uninstall -y airtime-playout airtime-media-monitor airtime-analyzer
service apache2 restart
echo "...Done"