SAAS-527: Allow files to be uploaded to either the cloud or on local file storage
Not quite done.
This commit is contained in:
parent
ff0a685243
commit
1de326283e
5 changed files with 29 additions and 12 deletions
|
@ -21,7 +21,7 @@ class AnalyzerPipeline:
|
|||
"""
|
||||
|
||||
@staticmethod
|
||||
def run_analysis(queue, audio_file_path, import_directory, original_filename, station_domain):
|
||||
def run_analysis(queue, audio_file_path, import_directory, original_filename, station_domain, current_storage_backend):
|
||||
"""Analyze and import an audio file, and put all extracted metadata into queue.
|
||||
|
||||
Keyword arguments:
|
||||
|
@ -55,15 +55,19 @@ class AnalyzerPipeline:
|
|||
# Analyze the audio file we were told to analyze:
|
||||
# First, we extract the ID3 tags and other metadata:
|
||||
metadata = dict()
|
||||
metadata = MetadataAnalyzer.analyze(audio_file_path, metadata)
|
||||
metadata["station_domain"] = station_domain
|
||||
|
||||
metadata = MetadataAnalyzer.analyze(audio_file_path, metadata)
|
||||
metadata = CuePointAnalyzer.analyze(audio_file_path, metadata)
|
||||
metadata = ReplayGainAnalyzer.analyze(audio_file_path, metadata)
|
||||
metadata = PlayabilityAnalyzer.analyze(audio_file_path, metadata)
|
||||
|
||||
csu = CloudStorageUploader()
|
||||
metadata = csu.upload_obj(audio_file_path, metadata)
|
||||
if current_storage_backend == "file":
|
||||
metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata)
|
||||
else:
|
||||
csu = CloudStorageUploader()
|
||||
metadata = csu.upload_obj(audio_file_path, metadata)
|
||||
|
||||
metadata["import_status"] = 0 # Successfully imported
|
||||
|
||||
# Note that the queue we're putting the results into is our interprocess communication
|
||||
|
|
|
@ -150,6 +150,8 @@ class MessageListener:
|
|||
original_filename = ""
|
||||
callback_url = ""
|
||||
api_key = ""
|
||||
station_domain = ""
|
||||
current_storage_backend = ""
|
||||
|
||||
''' Spin up a worker process. We use the multiprocessing module and multiprocessing.Queue
|
||||
to pass objects between the processes so that if the analyzer process crashes, it does not
|
||||
|
@ -166,8 +168,9 @@ class MessageListener:
|
|||
audio_file_path = msg_dict["tmp_file_path"]
|
||||
import_directory = msg_dict["import_directory"]
|
||||
original_filename = msg_dict["original_filename"]
|
||||
current_storage_backend = msg_dict["current_storage_backend"]
|
||||
|
||||
audio_metadata = MessageListener.spawn_analyzer_process(audio_file_path, import_directory, original_filename, station_domain)
|
||||
audio_metadata = MessageListener.spawn_analyzer_process(audio_file_path, import_directory, original_filename, station_domain, current_storage_backend)
|
||||
StatusReporter.report_success_to_callback_url(callback_url, api_key, audio_metadata)
|
||||
|
||||
except KeyError as e:
|
||||
|
@ -206,11 +209,11 @@ class MessageListener:
|
|||
channel.basic_ack(delivery_tag=method_frame.delivery_tag)
|
||||
|
||||
@staticmethod
|
||||
def spawn_analyzer_process(audio_file_path, import_directory, original_filename, station_domain):
|
||||
def spawn_analyzer_process(audio_file_path, import_directory, original_filename, station_domain, current_storage_backend):
|
||||
''' Spawn a child process to analyze and import a new audio file. '''
|
||||
q = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(target=AnalyzerPipeline.run_analysis,
|
||||
args=(q, audio_file_path, import_directory, original_filename, station_domain))
|
||||
args=(q, audio_file_path, import_directory, original_filename, station_domain, current_storage_backend))
|
||||
p.start()
|
||||
p.join()
|
||||
if p.exitcode == 0:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue