Another small bugfix for error handling in the analyzer

This commit is contained in:
Albert Santoni 2015-04-06 17:33:08 -04:00
parent 492a7f329a
commit d5012c25cb
2 changed files with 9 additions and 7 deletions

View file

@ -21,7 +21,9 @@ class AnalyzerPipeline:
so that if it crashes, it does not kill the entire airtime_analyzer daemon and
the failure to import can be reported back to the web application.
"""
IMPORT_STATUS_FAILED = 2
@staticmethod
def run_analysis(queue, audio_file_path, import_directory, original_filename, storage_backend, file_prefix, cloud_storage_config):
"""Analyze and import an audio file, and put all extracted metadata into queue.
@ -86,12 +88,12 @@ class AnalyzerPipeline:
queue.put(metadata)
except UnplayableFileError as e:
logging.exception(e)
metadata["import_status"] = 2
metadata["import_status"] = IMPORT_STATUS_FAILED
metadata["reason"] = "The file could not be played."
raise e
except Exception as e:
# Ensures the traceback for this child process gets written to our log files:
logging.exception(e)
logging.exception(e)
raise e
@staticmethod

View file

@ -226,19 +226,19 @@ class MessageListener:
else:
raise Exception("Analyzer process terminated unexpectedly.")
'''
results = {}
metadata = {}
q = Queue.Queue()
try:
AnalyzerPipeline.run_analysis(q, audio_file_path, import_directory, original_filename, storage_backend, file_prefix, cloud_storage_config)
results = q.get()
metadata = q.get()
except Exception as e:
logging.error("Analyzer pipeline exception: %s" % str(e))
pass
metadata["import_status"] = AnalyzerPipeline.IMPORT_STATUS_FAILED
# Ensure our queue doesn't fill up and block due to unexpected behaviour. Defensive code.
while not q.empty():
q.get()
return results
return metadata