More analyzer log statements

This commit is contained in:
drigato 2015-02-03 13:54:25 -05:00
parent 3127507124
commit 7808fd4708
2 changed files with 9 additions and 3 deletions

View file

@ -67,12 +67,13 @@ class AnalyzerPipeline:
csu = CloudStorageUploader() csu = CloudStorageUploader()
if csu.enabled(): if csu.enabled():
logging.info("333")
metadata = csu.upload_obj(audio_file_path, metadata) metadata = csu.upload_obj(audio_file_path, metadata)
else: else:
metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata) metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata)
metadata["import_status"] = 0 # Successfully imported metadata["import_status"] = 0 # Successfully imported
logging.info("333") logging.info("444")
# Note that the queue we're putting the results into is our interprocess communication # Note that the queue we're putting the results into is our interprocess communication
# back to the main process. # back to the main process.
@ -80,7 +81,7 @@ class AnalyzerPipeline:
# Pass all the file metadata back to the main analyzer process, which then passes # Pass all the file metadata back to the main analyzer process, which then passes
# it back to the Airtime web application. # it back to the Airtime web application.
queue.put(metadata) queue.put(metadata)
logging.info("444") logging.info("555")
except UnplayableFileError as e: except UnplayableFileError as e:
logging.exception(e) logging.exception(e)
metadata["import_status"] = 2 metadata["import_status"] = 2

View file

@ -31,6 +31,7 @@ class CloudStorageUploader:
CLOUD_CONFIG_PATH = "/etc/airtime-saas/%s/cloud_storage_%s.conf" % (dev_env, dev_env) CLOUD_CONFIG_PATH = "/etc/airtime-saas/%s/cloud_storage_%s.conf" % (dev_env, dev_env)
logging.info(CLOUD_CONFIG_PATH)
config = config_file.read_config_file(CLOUD_CONFIG_PATH) config = config_file.read_config_file(CLOUD_CONFIG_PATH)
CLOUD_STORAGE_CONFIG_SECTION = config.get("current_backend", "storage_backend") CLOUD_STORAGE_CONFIG_SECTION = config.get("current_backend", "storage_backend")
@ -72,7 +73,7 @@ class CloudStorageUploader:
resource_id: The unique object name used to identify the objects resource_id: The unique object name used to identify the objects
on Amazon S3 on Amazon S3
""" """
logging.info("aaa")
file_base_name = os.path.basename(audio_file_path) file_base_name = os.path.basename(audio_file_path)
file_name, extension = os.path.splitext(file_base_name) file_name, extension = os.path.splitext(file_base_name)
@ -82,6 +83,7 @@ class CloudStorageUploader:
file_name = file_name.replace(" ", "-") file_name = file_name.replace(" ", "-")
unique_id = str(uuid.uuid4()) unique_id = str(uuid.uuid4())
logging.info("bbb")
# We add another prefix to the resource name with the last two characters # We add another prefix to the resource name with the last two characters
# of the unique id so files are not all placed under the root folder. We # of the unique id so files are not all placed under the root folder. We
@ -89,8 +91,10 @@ class CloudStorageUploader:
# is done via the S3 Browser client. The client will hang if there are too # is done via the S3 Browser client. The client will hang if there are too
# many files under the same folder. # many files under the same folder.
unique_id_prefix = unique_id[-2:] unique_id_prefix = unique_id[-2:]
logging.info("ccc")
resource_id = "%s/%s/%s_%s%s" % (metadata['file_prefix'], unique_id_prefix, file_name, unique_id, extension) resource_id = "%s/%s/%s_%s%s" % (metadata['file_prefix'], unique_id_prefix, file_name, unique_id, extension)
logging.info("ddd")
conn = S3Connection(self._api_key, self._api_key_secret, host=self._host) conn = S3Connection(self._api_key, self._api_key_secret, host=self._host)
bucket = conn.get_bucket(self._bucket) bucket = conn.get_bucket(self._bucket)
@ -101,6 +105,7 @@ class CloudStorageUploader:
key.set_contents_from_filename(audio_file_path) key.set_contents_from_filename(audio_file_path)
metadata["filesize"] = os.path.getsize(audio_file_path) metadata["filesize"] = os.path.getsize(audio_file_path)
logging.info("eee")
# Remove file from organize directory # Remove file from organize directory
try: try: