Make the unit tests pass again

This commit is contained in:
Albert Santoni 2015-02-02 13:14:14 -05:00
parent 536094ea50
commit 9baed1b328
3 changed files with 13 additions and 7 deletions

View File

@ -21,7 +21,7 @@ class AnalyzerPipeline:
"""
@staticmethod
def run_analysis(queue, audio_file_path, import_directory, original_filename):
def run_analysis(queue, audio_file_path, import_directory, original_filename, cloud_storage_enabled):
"""Analyze and import an audio file, and put all extracted metadata into queue.
Keyword arguments:
@ -34,7 +34,7 @@ class AnalyzerPipeline:
preserve. The file at audio_file_path typically has a
temporary randomly generated name, which is why we want
to know what the original name was.
station_domain: The Airtime Pro account's domain name. i.e. bananas
cloud_storage_enabled: Whether to store the file in the cloud or on the local disk.
"""
# It is super critical to initialize a separate log file here so that we
# don't inherit logging/locks from the parent process. Supposedly
@ -50,6 +50,8 @@ class AnalyzerPipeline:
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.")
if not isinstance(original_filename, unicode):
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.")
if not isinstance(cloud_storage_enabled, bool):
raise TypeError("cloud_storage_enabled must be a boolean. Was of type " + type(cloud_storage_enabled).__name__ + " instead.")
# Analyze the audio file we were told to analyze:
@ -60,9 +62,8 @@ class AnalyzerPipeline:
metadata = ReplayGainAnalyzer.analyze(audio_file_path, metadata)
metadata = PlayabilityAnalyzer.analyze(audio_file_path, metadata)
csu = CloudStorageUploader()
if csu.enabled():
if cloud_storage_enabled:
csu = CloudStorageUploader()
metadata = csu.upload_obj(audio_file_path, metadata)
else:
metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata)

View File

@ -207,9 +207,13 @@ class MessageListener:
@staticmethod
def spawn_analyzer_process(audio_file_path, import_directory, original_filename):
''' Spawn a child process to analyze and import a new audio file. '''
csu = CloudStorageUploader()
cloud_storage_enabled = csu.enabled()
q = multiprocessing.Queue()
p = multiprocessing.Process(target=AnalyzerPipeline.run_analysis,
args=(q, audio_file_path, import_directory, original_filename))
args=(q, audio_file_path, import_directory, original_filename, cloud_storage_enabled))
p.start()
p.join()
if p.exitcode == 0:

View File

@ -20,8 +20,9 @@ def teardown():
def test_basic():
filename = os.path.basename(DEFAULT_AUDIO_FILE)
q = multiprocessing.Queue()
cloud_storage_enabled = False
#This actually imports the file into the "./Test Artist" directory.
AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename)
AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename, cloud_storage_enabled)
metadata = q.get()
assert metadata['track_title'] == u'Test Title'
assert metadata['artist_name'] == u'Test Artist'