2012-05-14 22:09:49 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
from threading import Thread
|
|
|
|
from Queue import Empty
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import shutil
|
|
|
|
import os
|
2012-03-17 19:16:11 +01:00
|
|
|
import sys
|
2012-06-26 23:00:14 +02:00
|
|
|
import stat
|
2014-12-03 19:22:52 +01:00
|
|
|
import requests
|
2014-12-03 00:46:17 +01:00
|
|
|
import ConfigParser
|
2014-07-10 23:56:41 +02:00
|
|
|
|
2012-04-21 00:32:10 +02:00
|
|
|
from std_err_override import LogWriter
|
|
|
|
|
2014-12-03 00:46:17 +01:00
|
|
|
CONFIG_PATH = '/etc/airtime/airtime.conf'
|
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
# configure logging
|
|
|
|
logging.config.fileConfig("logging.cfg")
|
2012-04-21 00:32:10 +02:00
|
|
|
logger = logging.getLogger()
|
|
|
|
LogWriter.override_std_err(logger)
|
|
|
|
|
|
|
|
#need to wait for Python 2.7 for this..
|
|
|
|
#logging.captureWarnings(True)
|
2012-03-01 23:58:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
class PypoFile(Thread):
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2013-04-26 04:11:26 +02:00
|
|
|
def __init__(self, schedule_queue, config):
|
2012-03-01 23:58:44 +01:00
|
|
|
Thread.__init__(self)
|
|
|
|
self.logger = logging.getLogger()
|
|
|
|
self.media_queue = schedule_queue
|
|
|
|
self.media = None
|
|
|
|
self.cache_dir = os.path.join(config["cache_dir"], "scheduler")
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
def copy_file(self, media_item):
|
|
|
|
"""
|
|
|
|
Copy media_item from local library directory to local cache directory.
|
2012-06-26 23:00:14 +02:00
|
|
|
"""
|
2012-04-13 21:23:01 +02:00
|
|
|
src = media_item['uri']
|
|
|
|
dst = media_item['dst']
|
2014-12-03 19:22:52 +01:00
|
|
|
|
2014-12-03 00:46:17 +01:00
|
|
|
src_size = media_item['filesize']
|
2014-10-24 21:50:52 +02:00
|
|
|
|
2012-04-13 21:23:01 +02:00
|
|
|
dst_exists = True
|
|
|
|
try:
|
|
|
|
dst_size = os.path.getsize(dst)
|
|
|
|
except Exception, e:
|
|
|
|
dst_exists = False
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-04-13 21:23:01 +02:00
|
|
|
do_copy = False
|
|
|
|
if dst_exists:
|
|
|
|
if src_size != dst_size:
|
|
|
|
do_copy = True
|
2012-06-29 18:11:38 +02:00
|
|
|
else:
|
2012-07-03 22:43:33 +02:00
|
|
|
self.logger.debug("file %s already exists in local cache as %s, skipping copying..." % (src, dst))
|
2012-04-13 21:23:01 +02:00
|
|
|
else:
|
|
|
|
do_copy = True
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-07-03 23:06:35 +02:00
|
|
|
media_item['file_ready'] = not do_copy
|
|
|
|
|
2012-04-13 21:23:01 +02:00
|
|
|
if do_copy:
|
|
|
|
self.logger.debug("copying from %s to local cache %s" % (src, dst))
|
2012-03-01 23:58:44 +01:00
|
|
|
try:
|
2014-12-03 00:46:17 +01:00
|
|
|
config = self.read_config_file(CONFIG_PATH)
|
|
|
|
CONFIG_SECTION = "general"
|
|
|
|
username = config.get(CONFIG_SECTION, 'api_key')
|
2014-12-08 21:33:02 +01:00
|
|
|
|
2014-12-08 20:47:23 +01:00
|
|
|
host = config.get(CONFIG_SECTION, 'base_url')
|
|
|
|
url = "http://%s/rest/media/%s/download" % (host, media_item["id"])
|
2014-12-03 00:46:17 +01:00
|
|
|
|
2014-12-03 19:22:52 +01:00
|
|
|
with open(dst, "wb") as handle:
|
2014-12-03 20:50:58 +01:00
|
|
|
response = requests.get(url, auth=requests.auth.HTTPBasicAuth(username, ''), stream=True, verify=False)
|
2014-12-03 19:22:52 +01:00
|
|
|
|
|
|
|
if not response.ok:
|
2014-12-18 00:30:06 +01:00
|
|
|
self.logger.error(response)
|
2014-12-03 19:22:52 +01:00
|
|
|
raise Exception("%s - Error occurred downloading file" % response.status_code)
|
|
|
|
|
|
|
|
for chunk in response.iter_content(1024):
|
|
|
|
if not chunk:
|
|
|
|
break
|
|
|
|
|
|
|
|
handle.write(chunk)
|
2012-06-26 23:00:14 +02:00
|
|
|
|
|
|
|
#make file world readable
|
2012-06-26 23:50:26 +02:00
|
|
|
os.chmod(dst, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
|
2013-05-29 00:06:23 +02:00
|
|
|
|
|
|
|
media_item['file_ready'] = True
|
2012-06-26 23:00:14 +02:00
|
|
|
except Exception, e:
|
2012-04-13 21:23:01 +02:00
|
|
|
self.logger.error("Could not copy from %s to %s" % (src, dst))
|
2012-06-26 23:00:14 +02:00
|
|
|
self.logger.error(e)
|
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
def get_highest_priority_media_item(self, schedule):
|
|
|
|
"""
|
|
|
|
Get highest priority media_item in the queue. Currently the highest
|
|
|
|
priority is decided by how close the start time is to "now".
|
|
|
|
"""
|
2012-04-16 17:45:48 +02:00
|
|
|
if schedule is None or len(schedule) == 0:
|
2012-03-01 23:58:44 +01:00
|
|
|
return None
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
sorted_keys = sorted(schedule.keys())
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
if len(sorted_keys) == 0:
|
|
|
|
return None
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
highest_priority = sorted_keys[0]
|
|
|
|
media_item = schedule[highest_priority]
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
self.logger.debug("Highest priority item: %s" % highest_priority)
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
"""
|
|
|
|
Remove this media_item from the dictionary. On the next iteration
|
|
|
|
(from the main function) we won't consider it for prioritization
|
|
|
|
anymore. If on the next iteration we have received a new schedule,
|
|
|
|
it is very possible we will have to deal with the same media_items
|
|
|
|
again. In this situation, the worst possible case is that we try to
|
|
|
|
copy the file again and realize we already have it (thus aborting the copy).
|
|
|
|
"""
|
2012-06-26 23:00:14 +02:00
|
|
|
del schedule[highest_priority]
|
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
return media_item
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2014-12-03 00:46:17 +01:00
|
|
|
def read_config_file(self, config_path):
|
|
|
|
"""Parse the application's config file located at config_path."""
|
|
|
|
config = ConfigParser.SafeConfigParser()
|
|
|
|
try:
|
|
|
|
config.readfp(open(config_path))
|
|
|
|
except IOError as e:
|
|
|
|
logging.debug("Failed to open config file at %s: %s" % (config_path, e.strerror))
|
|
|
|
sys.exit()
|
|
|
|
except Exception:
|
|
|
|
logging.debug(e.strerror)
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
return config
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
def main(self):
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
if self.media is None or len(self.media) == 0:
|
|
|
|
"""
|
|
|
|
We have no schedule, so we have nothing else to do. Let's
|
|
|
|
do a blocked wait on the queue
|
|
|
|
"""
|
|
|
|
self.media = self.media_queue.get(block=True)
|
|
|
|
else:
|
|
|
|
"""
|
|
|
|
We have a schedule we need to process, but we also want
|
|
|
|
to check if a newer schedule is available. In this case
|
|
|
|
do a non-blocking queue.get and in either case (we get something
|
|
|
|
or we don't), get back to work on preparing getting files.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
self.media = self.media_queue.get_nowait()
|
|
|
|
except Empty, e:
|
|
|
|
pass
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-04-16 17:45:48 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
media_item = self.get_highest_priority_media_item(self.media)
|
2012-04-16 17:45:48 +02:00
|
|
|
if media_item is not None:
|
2014-12-03 00:46:17 +01:00
|
|
|
self.copy_file(media_item)
|
2012-03-01 23:58:44 +01:00
|
|
|
except Exception, e:
|
2012-04-13 21:23:01 +02:00
|
|
|
import traceback
|
|
|
|
top = traceback.format_exc()
|
2012-03-01 23:58:44 +01:00
|
|
|
self.logger.error(str(e))
|
2012-04-13 21:23:01 +02:00
|
|
|
self.logger.error(top)
|
2012-03-01 23:58:44 +01:00
|
|
|
raise
|
2012-06-26 23:00:14 +02:00
|
|
|
|
2012-03-01 23:58:44 +01:00
|
|
|
def run(self):
|
|
|
|
"""
|
|
|
|
Entry point of the thread
|
|
|
|
"""
|
|
|
|
self.main()
|