Merge branch 'saas' into saas-media-refactor
Conflicts: airtime_mvc/application/cloud_storage/ProxyStorageBackend.php airtime_mvc/application/controllers/ApiController.php
This commit is contained in:
commit
3a1141d4c8
42 changed files with 9042 additions and 145 deletions
|
@ -5,6 +5,12 @@ import socket
|
|||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.key import Key
|
||||
|
||||
# Fix for getaddrinfo deadlock. See these issues for details:
|
||||
# https://github.com/gevent/gevent/issues/349
|
||||
# https://github.com/docker/docker-registry/issues/400
|
||||
u'fix getaddrinfo deadlock'.encode('idna')
|
||||
|
||||
CLOUD_CONFIG_PATH = '/etc/airtime-saas/cloud_storage.conf'
|
||||
STORAGE_BACKEND_FILE = "file"
|
||||
SOCKET_TIMEOUT = 240
|
||||
|
||||
|
@ -64,8 +70,7 @@ class CloudStorageUploader:
|
|||
metadata: ID3 tags and other metadata extracted from the audio file.
|
||||
|
||||
Returns:
|
||||
The metadata dictionary it received with three new keys:
|
||||
filesize: The file's filesize in bytes.
|
||||
The metadata dictionary it received with two new keys:
|
||||
filename: The file's filename.
|
||||
resource_id: The unique object name used to identify the objects
|
||||
on Amazon S3
|
||||
|
@ -101,8 +106,6 @@ class CloudStorageUploader:
|
|||
key.key = resource_id
|
||||
key.set_metadata('filename', file_base_name)
|
||||
key.set_contents_from_filename(audio_file_path)
|
||||
|
||||
metadata["filesize"] = os.path.getsize(audio_file_path)
|
||||
|
||||
# Remove file from organize directory
|
||||
try:
|
||||
|
|
|
@ -4,6 +4,8 @@ import mutagen
|
|||
import magic
|
||||
import wave
|
||||
import logging
|
||||
import os
|
||||
import hashlib
|
||||
from analyzer import Analyzer
|
||||
|
||||
class MetadataAnalyzer(Analyzer):
|
||||
|
@ -96,6 +98,20 @@ class MetadataAnalyzer(Analyzer):
|
|||
#If we couldn't figure out the track_number or track_total, just ignore it...
|
||||
pass
|
||||
|
||||
# Get file size and md5 hash of the file
|
||||
metadata["filesize"] = os.path.getsize(filename)
|
||||
|
||||
with open(filename, 'rb') as fh:
|
||||
m = hashlib.md5()
|
||||
while True:
|
||||
data = fh.read(8192)
|
||||
if not data:
|
||||
break
|
||||
m.update(data)
|
||||
metadata["md5"] = m.hexdigest()
|
||||
|
||||
|
||||
|
||||
#We normalize the mutagen tags slightly here, so in case mutagen changes,
|
||||
#we find the
|
||||
mutagen_to_airtime_mapping = {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from nose.tools import *
|
||||
from ConfigParser import SafeConfigParser
|
||||
from airtime_analyzer.cloud_storage_uploader import CloudStorageUploader
|
||||
from airtime_analyzer.airtime_analyzer import AirtimeAnalyzerServer
|
||||
from airtime_analyzer import config_file
|
||||
|
@ -10,7 +11,8 @@ def teardown():
|
|||
pass
|
||||
|
||||
def test_analyze():
|
||||
cloud_storage_config_path = '/etc/airtime-saas/production/cloud_storage.conf'
|
||||
cloud_storage_config = config_file.read_config_file(cloud_storage_config_path)
|
||||
|
||||
cloud_storage_config = SafeConfigParser()
|
||||
cloud_storage_config.add_section("current_backend")
|
||||
cloud_storage_config.set("current_backend", "storage_backend", "file")
|
||||
cl = CloudStorageUploader(cloud_storage_config)
|
||||
cl._storage_backend = "file"
|
Loading…
Add table
Add a link
Reference in a new issue