Merge branch 'saas' into saas-media-refactor
Conflicts: airtime_mvc/application/cloud_storage/ProxyStorageBackend.php airtime_mvc/application/controllers/ApiController.php
This commit is contained in:
commit
3a1141d4c8
42 changed files with 9042 additions and 145 deletions
|
@ -5,6 +5,12 @@ import socket
|
|||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.key import Key
|
||||
|
||||
# Fix for getaddrinfo deadlock. See these issues for details:
|
||||
# https://github.com/gevent/gevent/issues/349
|
||||
# https://github.com/docker/docker-registry/issues/400
|
||||
u'fix getaddrinfo deadlock'.encode('idna')
|
||||
|
||||
CLOUD_CONFIG_PATH = '/etc/airtime-saas/cloud_storage.conf'
|
||||
STORAGE_BACKEND_FILE = "file"
|
||||
SOCKET_TIMEOUT = 240
|
||||
|
||||
|
@ -64,8 +70,7 @@ class CloudStorageUploader:
|
|||
metadata: ID3 tags and other metadata extracted from the audio file.
|
||||
|
||||
Returns:
|
||||
The metadata dictionary it received with three new keys:
|
||||
filesize: The file's filesize in bytes.
|
||||
The metadata dictionary it received with two new keys:
|
||||
filename: The file's filename.
|
||||
resource_id: The unique object name used to identify the objects
|
||||
on Amazon S3
|
||||
|
@ -101,8 +106,6 @@ class CloudStorageUploader:
|
|||
key.key = resource_id
|
||||
key.set_metadata('filename', file_base_name)
|
||||
key.set_contents_from_filename(audio_file_path)
|
||||
|
||||
metadata["filesize"] = os.path.getsize(audio_file_path)
|
||||
|
||||
# Remove file from organize directory
|
||||
try:
|
||||
|
|
|
@ -4,6 +4,8 @@ import mutagen
|
|||
import magic
|
||||
import wave
|
||||
import logging
|
||||
import os
|
||||
import hashlib
|
||||
from analyzer import Analyzer
|
||||
|
||||
class MetadataAnalyzer(Analyzer):
|
||||
|
@ -96,6 +98,20 @@ class MetadataAnalyzer(Analyzer):
|
|||
#If we couldn't figure out the track_number or track_total, just ignore it...
|
||||
pass
|
||||
|
||||
# Get file size and md5 hash of the file
|
||||
metadata["filesize"] = os.path.getsize(filename)
|
||||
|
||||
with open(filename, 'rb') as fh:
|
||||
m = hashlib.md5()
|
||||
while True:
|
||||
data = fh.read(8192)
|
||||
if not data:
|
||||
break
|
||||
m.update(data)
|
||||
metadata["md5"] = m.hexdigest()
|
||||
|
||||
|
||||
|
||||
#We normalize the mutagen tags slightly here, so in case mutagen changes,
|
||||
#we find the
|
||||
mutagen_to_airtime_mapping = {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from nose.tools import *
|
||||
from ConfigParser import SafeConfigParser
|
||||
from airtime_analyzer.cloud_storage_uploader import CloudStorageUploader
|
||||
from airtime_analyzer.airtime_analyzer import AirtimeAnalyzerServer
|
||||
from airtime_analyzer import config_file
|
||||
|
@ -10,7 +11,8 @@ def teardown():
|
|||
pass
|
||||
|
||||
def test_analyze():
|
||||
cloud_storage_config_path = '/etc/airtime-saas/production/cloud_storage.conf'
|
||||
cloud_storage_config = config_file.read_config_file(cloud_storage_config_path)
|
||||
|
||||
cloud_storage_config = SafeConfigParser()
|
||||
cloud_storage_config.add_section("current_backend")
|
||||
cloud_storage_config.set("current_backend", "storage_backend", "file")
|
||||
cl = CloudStorageUploader(cloud_storage_config)
|
||||
cl._storage_backend = "file"
|
|
@ -37,7 +37,7 @@ signal.signal(signal.SIGINT, keyboardInterruptHandler)
|
|||
#need to wait for Python 2.7 for this..
|
||||
#logging.captureWarnings(True)
|
||||
|
||||
POLL_INTERVAL = 1800
|
||||
POLL_INTERVAL = 480
|
||||
|
||||
class PypoFetch(Thread):
|
||||
|
||||
|
|
|
@ -10,6 +10,9 @@ import sys
|
|||
import stat
|
||||
import requests
|
||||
import ConfigParser
|
||||
import json
|
||||
import hashlib
|
||||
from requests.exceptions import ConnectionError, HTTPError, Timeout
|
||||
|
||||
from std_err_override import LogWriter
|
||||
|
||||
|
@ -68,7 +71,6 @@ class PypoFile(Thread):
|
|||
|
||||
host = config.get(CONFIG_SECTION, 'base_url')
|
||||
url = "http://%s/rest/media/%s/download" % (host, media_item["id"])
|
||||
|
||||
with open(dst, "wb") as handle:
|
||||
response = requests.get(url, auth=requests.auth.HTTPBasicAuth(username, ''), stream=True, verify=False)
|
||||
|
||||
|
@ -85,11 +87,48 @@ class PypoFile(Thread):
|
|||
#make file world readable
|
||||
os.chmod(dst, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
|
||||
|
||||
if media_item['filesize'] == 0:
|
||||
file_size = self.report_file_size_and_md5_to_airtime(dst, media_item["id"], host, username)
|
||||
media_item["filesize"] = file_size
|
||||
|
||||
media_item['file_ready'] = True
|
||||
except Exception, e:
|
||||
self.logger.error("Could not copy from %s to %s" % (src, dst))
|
||||
self.logger.error(e)
|
||||
|
||||
def report_file_size_and_md5_to_airtime(self, file_path, file_id, host_name, api_key):
|
||||
try:
|
||||
file_size = os.path.getsize(file_path)
|
||||
|
||||
with open(file_path, 'rb') as fh:
|
||||
m = hashlib.md5()
|
||||
while True:
|
||||
data = fh.read(8192)
|
||||
if not data:
|
||||
break
|
||||
m.update(data)
|
||||
md5_hash = m.hexdigest()
|
||||
except (OSError, IOError) as e:
|
||||
file_size = 0
|
||||
self.logger.error("Error getting file size and md5 hash for file id %s" % file_id)
|
||||
self.logger.error(e)
|
||||
|
||||
# Make PUT request to Airtime to update the file size and hash
|
||||
error_msg = "Could not update media file %s with file size and md5 hash" % file_id
|
||||
try:
|
||||
put_url = "http://%s/rest/media/%s" % (host_name, file_id)
|
||||
payload = json.dumps({'filesize': file_size, 'md5': md5_hash})
|
||||
response = requests.put(put_url, data=payload, auth=requests.auth.HTTPBasicAuth(api_key, ''))
|
||||
if not response.ok:
|
||||
self.logger.error(error_msg)
|
||||
except (ConnectionError, Timeout):
|
||||
self.logger.error(error_msg)
|
||||
except Exception as e:
|
||||
self.logger.error(error_msg)
|
||||
self.logger.error(e)
|
||||
|
||||
return file_size
|
||||
|
||||
def get_highest_priority_media_item(self, schedule):
|
||||
"""
|
||||
Get highest priority media_item in the queue. Currently the highest
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue