diff --git a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py index 514c1db38..f3cf04180 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py @@ -21,7 +21,7 @@ class AnalyzerPipeline: """ @staticmethod - def run_analysis(queue, audio_file_path, import_directory, original_filename): + def run_analysis(queue, audio_file_path, import_directory, original_filename, cloud_storage_enabled): """Analyze and import an audio file, and put all extracted metadata into queue. Keyword arguments: @@ -34,7 +34,7 @@ class AnalyzerPipeline: preserve. The file at audio_file_path typically has a temporary randomly generated name, which is why we want to know what the original name was. - station_domain: The Airtime Pro account's domain name. i.e. bananas + cloud_storage_enabled: Whether to store the file in the cloud or on the local disk. """ # It is super critical to initialize a separate log file here so that we # don't inherit logging/locks from the parent process. Supposedly @@ -50,6 +50,8 @@ class AnalyzerPipeline: raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.") if not isinstance(original_filename, unicode): raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.") + if not isinstance(cloud_storage_enabled, bool): + raise TypeError("cloud_storage_enabled must be a boolean. Was of type " + type(cloud_storage_enabled).__name__ + " instead.") # Analyze the audio file we were told to analyze: @@ -60,9 +62,8 @@ class AnalyzerPipeline: metadata = ReplayGainAnalyzer.analyze(audio_file_path, metadata) metadata = PlayabilityAnalyzer.analyze(audio_file_path, metadata) - - csu = CloudStorageUploader() - if csu.enabled(): + if cloud_storage_enabled: + csu = CloudStorageUploader() metadata = csu.upload_obj(audio_file_path, metadata) else: metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata) diff --git a/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py b/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py index f88fa6bc7..8ed5fa782 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py @@ -207,9 +207,13 @@ class MessageListener: @staticmethod def spawn_analyzer_process(audio_file_path, import_directory, original_filename): ''' Spawn a child process to analyze and import a new audio file. ''' + + csu = CloudStorageUploader() + cloud_storage_enabled = csu.enabled() + q = multiprocessing.Queue() p = multiprocessing.Process(target=AnalyzerPipeline.run_analysis, - args=(q, audio_file_path, import_directory, original_filename)) + args=(q, audio_file_path, import_directory, original_filename, cloud_storage_enabled)) p.start() p.join() if p.exitcode == 0: diff --git a/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py b/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py index 54458e33f..23d7d046b 100644 --- a/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py +++ b/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py @@ -20,8 +20,9 @@ def teardown(): def test_basic(): filename = os.path.basename(DEFAULT_AUDIO_FILE) q = multiprocessing.Queue() + cloud_storage_enabled = False #This actually imports the file into the "./Test Artist" directory. - AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename) + AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename, cloud_storage_enabled) metadata = q.get() assert metadata['track_title'] == u'Test Title' assert metadata['artist_name'] == u'Test Artist'