Merge branch 'saas' into saas-cloud-storage-config-per-dev-env

Conflicts:

python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py

python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py

python_apps/airtime_analyzer/airtime_analyzer/message_listener.py
This commit is contained in:
drigato 2015-02-04 13:36:35 -05:00
commit c6fc184559
14 changed files with 322 additions and 39 deletions

View file

@ -51,6 +51,10 @@ class AnalyzerPipeline:
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.")
if not isinstance(original_filename, unicode):
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.")
if not isinstance(file_prefix, unicode):
raise TypeError("file_prefix must be unicode. Was of type " + type(file_prefix).__name__ + " instead.")
if not isinstance(cloud_storage_enabled, bool):
raise TypeError("cloud_storage_enabled must be a boolean. Was of type " + type(cloud_storage_enabled).__name__ + " instead.")
# Analyze the audio file we were told to analyze:
@ -63,7 +67,6 @@ class AnalyzerPipeline:
metadata = ReplayGainAnalyzer.analyze(audio_file_path, metadata)
metadata = PlayabilityAnalyzer.analyze(audio_file_path, metadata)
csu = CloudStorageUploader(cloud_storage_config)
if csu.enabled():
metadata = csu.upload_obj(audio_file_path, metadata)

View file

@ -22,18 +22,25 @@ class CloudStorageUploader:
def __init__(self, config):
CLOUD_STORAGE_CONFIG_SECTION = config.get("current_backend", "storage_backend")
self._storage_backend = CLOUD_STORAGE_CONFIG_SECTION
try:
cloud_storage_config_section = config.get("current_backend", "storage_backend")
self._storage_backend = cloud_storage_config_section
except Exception as e:
print e
print "Defaulting to file storage"
self._storage_backend = STORAGE_BACKEND_FILE
if self._storage_backend == STORAGE_BACKEND_FILE:
self._host = ""
self._bucket = ""
self._api_key = ""
self._api_key_secret = ""
else:
self._host = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'host')
self._bucket = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'bucket')
self._api_key = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'api_key')
self._api_key_secret = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'api_key_secret')
self._host = config.get(cloud_storage_config_section, 'host')
self._bucket = config.get(cloud_storage_config_section, 'bucket')
self._api_key = config.get(cloud_storage_config_section, 'api_key')
self._api_key_secret = config.get(cloud_storage_config_section, 'api_key_secret')
def enabled(self):
if self._storage_backend == "file":

View file

@ -1,7 +1,7 @@
import os
import logging
import uuid
import config_file
import ConfigParser
from libcloud.storage.providers import get_driver
from libcloud.storage.types import Provider, ContainerDoesNotExistError, ObjectDoesNotExistError
@ -27,20 +27,30 @@ class CloudStorageUploader:
def __init__(self):
config = config_file.read_config_file(CLOUD_CONFIG_PATH)
config = ConfigParser.SafeConfigParser()
try:
config.readfp(open(CLOUD_CONFIG_PATH))
cloud_storage_config_section = config.get("current_backend", "storage_backend")
self._storage_backend = cloud_storage_config_section
except IOError as e:
print "Failed to open config file at " + CLOUD_CONFIG_PATH + ": " + e.strerror
print "Defaulting to file storage"
self._storage_backend = STORAGE_BACKEND_FILE
except Exception as e:
print e
print "Defaulting to file storage"
self._storage_backend = STORAGE_BACKEND_FILE
CLOUD_STORAGE_CONFIG_SECTION = config.get("current_backend", "storage_backend")
self._storage_backend = CLOUD_STORAGE_CONFIG_SECTION
if self._storage_backend == STORAGE_BACKEND_FILE:
self._provider = ""
self._bucket = ""
self._api_key = ""
self._api_key_secret = ""
else:
self._provider = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'provider')
self._bucket = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'bucket')
self._api_key = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'api_key')
self._api_key_secret = config.get(CLOUD_STORAGE_CONFIG_SECTION, 'api_key_secret')
self._provider = config.get(cloud_storage_config_section, 'provider')
self._bucket = config.get(cloud_storage_config_section, 'bucket')
self._api_key = config.get(cloud_storage_config_section, 'api_key')
self._api_key_secret = config.get(cloud_storage_config_section, 'api_key_secret')
def enabled(self):
if self._storage_backend == "file":

View file

@ -213,7 +213,7 @@ class MessageListener:
def spawn_analyzer_process(audio_file_path, import_directory, original_filename, file_prefix, cloud_storage_config):
''' Spawn a child process to analyze and import a new audio file. '''
q = multiprocessing.Queue()
p = multiprocessing.Process(target=AnalyzerPipeline.run_analysis,
p = multiprocessing.Process(target=AnalyzerPipeline.run_analysis,
args=(q, audio_file_path, import_directory, original_filename, file_prefix, cloud_storage_config))
p.start()
p.join()

View file

@ -19,7 +19,7 @@ class PlayabilityAnalyzer(Analyzer):
:param metadata: A metadata dictionary where the results will be put
:return: The metadata dictionary
'''
command = [PlayabilityAnalyzer.LIQUIDSOAP_EXECUTABLE, '-v', '-c', "output.dummy(audio_to_stereo(single('%s')))" % filename]
command = [PlayabilityAnalyzer.LIQUIDSOAP_EXECUTABLE, '-v', '-c', "output.dummy(audio_to_stereo(single(argv(1))))", '--', filename]
try:
subprocess.check_output(command, stderr=subprocess.STDOUT, close_fds=True)

View file

@ -20,8 +20,10 @@ def teardown():
def test_basic():
filename = os.path.basename(DEFAULT_AUDIO_FILE)
q = multiprocessing.Queue()
cloud_storage_enabled = False
file_prefix = u''
#This actually imports the file into the "./Test Artist" directory.
AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename)
AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename, file_prefix, cloud_storage_enabled)
metadata = q.get()
assert metadata['track_title'] == u'Test Title'
assert metadata['artist_name'] == u'Test Artist'