From 5040eb498df4215845070ff93099d52b7a49cfc3 Mon Sep 17 00:00:00 2001 From: drigato Date: Mon, 2 Feb 2015 12:54:56 -0500 Subject: [PATCH 1/7] SAAS-560: Deploy separate cloud storage config files for each development environment --- airtime_mvc/application/configs/conf.php | 26 +++++++++---------- airtime_mvc/application/models/RabbitMq.php | 2 +- .../cloud_storage_uploader.py | 10 +++++-- 3 files changed, 22 insertions(+), 16 deletions(-) diff --git a/airtime_mvc/application/configs/conf.php b/airtime_mvc/application/configs/conf.php index 57226328d..dacdcc698 100644 --- a/airtime_mvc/application/configs/conf.php +++ b/airtime_mvc/application/configs/conf.php @@ -25,19 +25,6 @@ class Config { $filename = isset($_SERVER['AIRTIME_CONF']) ? $_SERVER['AIRTIME_CONF'] : "/etc/airtime/airtime.conf"; } - // Parse separate conf file for cloud storage values - $cloudStorageConfig = isset($_SERVER['CLOUD_STORAGE_CONF']) ? $_SERVER['CLOUD_STORAGE_CONF'] : "/etc/airtime-saas/cloud_storage.conf"; - $cloudStorageValues = parse_ini_file($cloudStorageConfig, true); - - $CC_CONFIG["supportedStorageBackends"] = array('amazon_S3'); - foreach ($CC_CONFIG["supportedStorageBackends"] as $backend) { - $CC_CONFIG[$backend] = $cloudStorageValues[$backend]; - } - - // Tells us where file uploads will be uploaded to. - // It will either be set to a cloud storage backend or local file storage. - $CC_CONFIG["current_backend"] = $cloudStorageValues["current_backend"]["storage_backend"]; - $values = parse_ini_file($filename, true); // Name of the web server user @@ -54,6 +41,19 @@ class Config { $CC_CONFIG['dev_env'] = 'production'; } + // Parse separate conf file for cloud storage values + $cloudStorageConfig = "/etc/airtime-saas/".$CC_CONFIG['dev_env']."/cloud_storage_".$CC_CONFIG['dev_env'].".conf"; + $cloudStorageValues = parse_ini_file($cloudStorageConfig, true); + + $CC_CONFIG["supportedStorageBackends"] = array('amazon_S3'); + foreach ($CC_CONFIG["supportedStorageBackends"] as $backend) { + $CC_CONFIG[$backend] = $cloudStorageValues[$backend]; + } + + // Tells us where file uploads will be uploaded to. + // It will either be set to a cloud storage backend or local file storage. + $CC_CONFIG["current_backend"] = $cloudStorageValues["current_backend"]["storage_backend"]; + $CC_CONFIG['cache_ahead_hours'] = $values['general']['cache_ahead_hours']; $CC_CONFIG['monit_user'] = $values['monit']['monit_user']; diff --git a/airtime_mvc/application/models/RabbitMq.php b/airtime_mvc/application/models/RabbitMq.php index 49c0f40b7..30481b216 100644 --- a/airtime_mvc/application/models/RabbitMq.php +++ b/airtime_mvc/application/models/RabbitMq.php @@ -89,7 +89,7 @@ class Application_Model_RabbitMq if (array_key_exists("dev_env", $CC_CONFIG)) { $devEnv = $CC_CONFIG["dev_env"]; } - $config = parse_ini_file("/etc/airtime-saas/rabbitmq-analyzer-" . $devEnv . ".ini", true); + $config = parse_ini_file("/etc/airtime-saas/".$devEnv."/rabbitmq-analyzer-" . $devEnv . ".ini", true); $conn = new AMQPConnection($config["rabbitmq"]["host"], $config["rabbitmq"]["port"], $config["rabbitmq"]["user"], diff --git a/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py b/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py index 635764154..52e20fe76 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py @@ -5,8 +5,7 @@ import config_file from boto.s3.connection import S3Connection from boto.s3.key import Key - -CLOUD_CONFIG_PATH = '/etc/airtime-saas/cloud_storage.conf' +AIRTIME_CONFIG_PATH = '/etc/airtime/airtime.conf' STORAGE_BACKEND_FILE = "file" class CloudStorageUploader: @@ -25,6 +24,13 @@ class CloudStorageUploader: def __init__(self): + airtime_config = config_file.read_config_file(AIRTIME_CONFIG_PATH) + dev_env = "production" # Default + if airtime_config.has_option("general", "dev_env"): + dev_env = airtime_config.get("general", "dev_env") + + + CLOUD_CONFIG_PATH = "/etc/airtime-saas/%s/cloud_storage_%s.conf" % (dev_env, dev_env) config = config_file.read_config_file(CLOUD_CONFIG_PATH) CLOUD_STORAGE_CONFIG_SECTION = config.get("current_backend", "storage_backend") From 312750712414dba231359fa4403baec75c594519 Mon Sep 17 00:00:00 2001 From: drigato Date: Tue, 3 Feb 2015 13:21:07 -0500 Subject: [PATCH 2/7] Analyzer log statements for debugging --- .../airtime_analyzer/airtime_analyzer/analyzer_pipeline.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py index 682afb2d3..94bc00038 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py @@ -42,6 +42,7 @@ class AnalyzerPipeline: AnalyzerPipeline.python_logger_deadlock_workaround() try: + logging.info("111") if not isinstance(queue, multiprocessing.queues.Queue): raise TypeError("queue must be a multiprocessing.Queue()") if not isinstance(audio_file_path, unicode): @@ -61,6 +62,7 @@ class AnalyzerPipeline: metadata = CuePointAnalyzer.analyze(audio_file_path, metadata) metadata = ReplayGainAnalyzer.analyze(audio_file_path, metadata) metadata = PlayabilityAnalyzer.analyze(audio_file_path, metadata) + logging.info("222") csu = CloudStorageUploader() @@ -70,6 +72,7 @@ class AnalyzerPipeline: metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata) metadata["import_status"] = 0 # Successfully imported + logging.info("333") # Note that the queue we're putting the results into is our interprocess communication # back to the main process. @@ -77,6 +80,7 @@ class AnalyzerPipeline: # Pass all the file metadata back to the main analyzer process, which then passes # it back to the Airtime web application. queue.put(metadata) + logging.info("444") except UnplayableFileError as e: logging.exception(e) metadata["import_status"] = 2 From 7808fd470826da4218f548ca4a647e1d11d6771a Mon Sep 17 00:00:00 2001 From: drigato Date: Tue, 3 Feb 2015 13:54:25 -0500 Subject: [PATCH 3/7] More analyzer log statements --- .../airtime_analyzer/airtime_analyzer/analyzer_pipeline.py | 5 +++-- .../airtime_analyzer/cloud_storage_uploader.py | 7 ++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py index 94bc00038..0779d5146 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py @@ -67,12 +67,13 @@ class AnalyzerPipeline: csu = CloudStorageUploader() if csu.enabled(): + logging.info("333") metadata = csu.upload_obj(audio_file_path, metadata) else: metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata) metadata["import_status"] = 0 # Successfully imported - logging.info("333") + logging.info("444") # Note that the queue we're putting the results into is our interprocess communication # back to the main process. @@ -80,7 +81,7 @@ class AnalyzerPipeline: # Pass all the file metadata back to the main analyzer process, which then passes # it back to the Airtime web application. queue.put(metadata) - logging.info("444") + logging.info("555") except UnplayableFileError as e: logging.exception(e) metadata["import_status"] = 2 diff --git a/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py b/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py index 52e20fe76..e47a6b004 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py @@ -31,6 +31,7 @@ class CloudStorageUploader: CLOUD_CONFIG_PATH = "/etc/airtime-saas/%s/cloud_storage_%s.conf" % (dev_env, dev_env) + logging.info(CLOUD_CONFIG_PATH) config = config_file.read_config_file(CLOUD_CONFIG_PATH) CLOUD_STORAGE_CONFIG_SECTION = config.get("current_backend", "storage_backend") @@ -72,7 +73,7 @@ class CloudStorageUploader: resource_id: The unique object name used to identify the objects on Amazon S3 """ - + logging.info("aaa") file_base_name = os.path.basename(audio_file_path) file_name, extension = os.path.splitext(file_base_name) @@ -82,6 +83,7 @@ class CloudStorageUploader: file_name = file_name.replace(" ", "-") unique_id = str(uuid.uuid4()) + logging.info("bbb") # We add another prefix to the resource name with the last two characters # of the unique id so files are not all placed under the root folder. We @@ -89,8 +91,10 @@ class CloudStorageUploader: # is done via the S3 Browser client. The client will hang if there are too # many files under the same folder. unique_id_prefix = unique_id[-2:] + logging.info("ccc") resource_id = "%s/%s/%s_%s%s" % (metadata['file_prefix'], unique_id_prefix, file_name, unique_id, extension) + logging.info("ddd") conn = S3Connection(self._api_key, self._api_key_secret, host=self._host) bucket = conn.get_bucket(self._bucket) @@ -101,6 +105,7 @@ class CloudStorageUploader: key.set_contents_from_filename(audio_file_path) metadata["filesize"] = os.path.getsize(audio_file_path) + logging.info("eee") # Remove file from organize directory try: From 7b3f9af04c319901ec8a3114693f2deb8016585c Mon Sep 17 00:00:00 2001 From: drigato Date: Tue, 3 Feb 2015 15:55:47 -0500 Subject: [PATCH 4/7] SAAS-560: Deploy separate cloud storage config files for each development environment Changed analyzer upstart to take the cloud storage config file as a command line option Dropped the dev env portion from the rabbitmq-analyzer.ini filename --- airtime_mvc/application/configs/conf.php | 2 +- airtime_mvc/application/models/RabbitMq.php | 2 +- .../airtime_analyzer/airtime_analyzer.py | 9 ++++++--- .../airtime_analyzer/analyzer_pipeline.py | 12 ++++------- .../cloud_storage_uploader.py | 20 ++----------------- .../airtime_analyzer/message_listener.py | 11 ++++++---- .../airtime_analyzer/bin/airtime_analyzer | 9 ++++++++- 7 files changed, 29 insertions(+), 36 deletions(-) diff --git a/airtime_mvc/application/configs/conf.php b/airtime_mvc/application/configs/conf.php index dacdcc698..75a69d751 100644 --- a/airtime_mvc/application/configs/conf.php +++ b/airtime_mvc/application/configs/conf.php @@ -42,7 +42,7 @@ class Config { } // Parse separate conf file for cloud storage values - $cloudStorageConfig = "/etc/airtime-saas/".$CC_CONFIG['dev_env']."/cloud_storage_".$CC_CONFIG['dev_env'].".conf"; + $cloudStorageConfig = "/etc/airtime-saas/".$CC_CONFIG['dev_env']."/cloud_storage.conf"; $cloudStorageValues = parse_ini_file($cloudStorageConfig, true); $CC_CONFIG["supportedStorageBackends"] = array('amazon_S3'); diff --git a/airtime_mvc/application/models/RabbitMq.php b/airtime_mvc/application/models/RabbitMq.php index 30481b216..9ab7e6c22 100644 --- a/airtime_mvc/application/models/RabbitMq.php +++ b/airtime_mvc/application/models/RabbitMq.php @@ -89,7 +89,7 @@ class Application_Model_RabbitMq if (array_key_exists("dev_env", $CC_CONFIG)) { $devEnv = $CC_CONFIG["dev_env"]; } - $config = parse_ini_file("/etc/airtime-saas/".$devEnv."/rabbitmq-analyzer-" . $devEnv . ".ini", true); + $config = parse_ini_file("/etc/airtime-saas/".$devEnv."/rabbitmq-analyzer.ini", true); $conn = new AMQPConnection($config["rabbitmq"]["host"], $config["rabbitmq"]["port"], $config["rabbitmq"]["user"], diff --git a/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py b/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py index c643f21c9..2dc78d5bf 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py @@ -23,7 +23,7 @@ class AirtimeAnalyzerServer: # Variables _log_level = logging.INFO - def __init__(self, config_path, http_retry_queue_path, debug=False): + def __init__(self, config_path, cloud_storage_config_path, http_retry_queue_path, debug=False): # Dump a stacktrace with 'kill -SIGUSR2 ' signal.signal(signal.SIGUSR2, lambda sig, frame: AirtimeAnalyzerServer.dump_stacktrace()) @@ -31,15 +31,18 @@ class AirtimeAnalyzerServer: # Configure logging self.setup_logging(debug) - # Read our config file + # Read our rmq config file config = config_file.read_config_file(config_path) + + # Read the cloud storage config file + cloud_storage_config = config_file.read_config_file(cloud_storage_config_path) # Start up the StatusReporter process StatusReporter.start_thread(http_retry_queue_path) # Start listening for RabbitMQ messages telling us about newly # uploaded files. This blocks until we recieve a shutdown signal. - self._msg_listener = MessageListener(config) + self._msg_listener = MessageListener(config, cloud_storage_config) StatusReporter.stop_thread() diff --git a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py index 0779d5146..7d56f0de3 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py @@ -21,7 +21,7 @@ class AnalyzerPipeline: """ @staticmethod - def run_analysis(queue, audio_file_path, import_directory, original_filename, file_prefix): + def run_analysis(queue, audio_file_path, import_directory, original_filename, file_prefix, cloud_storage_config): """Analyze and import an audio file, and put all extracted metadata into queue. Keyword arguments: @@ -34,7 +34,8 @@ class AnalyzerPipeline: preserve. The file at audio_file_path typically has a temporary randomly generated name, which is why we want to know what the original name was. - station_domain: The Airtime Pro account's domain name. i.e. bananas + file_prefix: + cloud_storage_config: ConfigParser object containing the cloud storage configuration settings """ # It is super critical to initialize a separate log file here so that we # don't inherit logging/locks from the parent process. Supposedly @@ -42,7 +43,6 @@ class AnalyzerPipeline: AnalyzerPipeline.python_logger_deadlock_workaround() try: - logging.info("111") if not isinstance(queue, multiprocessing.queues.Queue): raise TypeError("queue must be a multiprocessing.Queue()") if not isinstance(audio_file_path, unicode): @@ -62,18 +62,15 @@ class AnalyzerPipeline: metadata = CuePointAnalyzer.analyze(audio_file_path, metadata) metadata = ReplayGainAnalyzer.analyze(audio_file_path, metadata) metadata = PlayabilityAnalyzer.analyze(audio_file_path, metadata) - logging.info("222") - csu = CloudStorageUploader() + csu = CloudStorageUploader(cloud_storage_config) if csu.enabled(): - logging.info("333") metadata = csu.upload_obj(audio_file_path, metadata) else: metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata) metadata["import_status"] = 0 # Successfully imported - logging.info("444") # Note that the queue we're putting the results into is our interprocess communication # back to the main process. @@ -81,7 +78,6 @@ class AnalyzerPipeline: # Pass all the file metadata back to the main analyzer process, which then passes # it back to the Airtime web application. queue.put(metadata) - logging.info("555") except UnplayableFileError as e: logging.exception(e) metadata["import_status"] = 2 diff --git a/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py b/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py index e47a6b004..eda3aabee 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py @@ -1,11 +1,9 @@ import os import logging import uuid -import config_file from boto.s3.connection import S3Connection from boto.s3.key import Key -AIRTIME_CONFIG_PATH = '/etc/airtime/airtime.conf' STORAGE_BACKEND_FILE = "file" class CloudStorageUploader: @@ -22,17 +20,7 @@ class CloudStorageUploader: _api_key_secret: Secret access key to objects on Amazon S3. """ - def __init__(self): - - airtime_config = config_file.read_config_file(AIRTIME_CONFIG_PATH) - dev_env = "production" # Default - if airtime_config.has_option("general", "dev_env"): - dev_env = airtime_config.get("general", "dev_env") - - - CLOUD_CONFIG_PATH = "/etc/airtime-saas/%s/cloud_storage_%s.conf" % (dev_env, dev_env) - logging.info(CLOUD_CONFIG_PATH) - config = config_file.read_config_file(CLOUD_CONFIG_PATH) + def __init__(self, config): CLOUD_STORAGE_CONFIG_SECTION = config.get("current_backend", "storage_backend") self._storage_backend = CLOUD_STORAGE_CONFIG_SECTION @@ -73,7 +61,7 @@ class CloudStorageUploader: resource_id: The unique object name used to identify the objects on Amazon S3 """ - logging.info("aaa") + file_base_name = os.path.basename(audio_file_path) file_name, extension = os.path.splitext(file_base_name) @@ -83,7 +71,6 @@ class CloudStorageUploader: file_name = file_name.replace(" ", "-") unique_id = str(uuid.uuid4()) - logging.info("bbb") # We add another prefix to the resource name with the last two characters # of the unique id so files are not all placed under the root folder. We @@ -91,10 +78,8 @@ class CloudStorageUploader: # is done via the S3 Browser client. The client will hang if there are too # many files under the same folder. unique_id_prefix = unique_id[-2:] - logging.info("ccc") resource_id = "%s/%s/%s_%s%s" % (metadata['file_prefix'], unique_id_prefix, file_name, unique_id, extension) - logging.info("ddd") conn = S3Connection(self._api_key, self._api_key_secret, host=self._host) bucket = conn.get_bucket(self._bucket) @@ -105,7 +90,6 @@ class CloudStorageUploader: key.set_contents_from_filename(audio_file_path) metadata["filesize"] = os.path.getsize(audio_file_path) - logging.info("eee") # Remove file from organize directory try: diff --git a/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py b/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py index 4f1e31084..76713d47f 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py @@ -55,12 +55,13 @@ QUEUE = "airtime-uploads" """ class MessageListener: - def __init__(self, config): + def __init__(self, config, cloud_storage_config): ''' Start listening for file upload notification messages from RabbitMQ Keyword arguments: config: A ConfigParser object containing the [rabbitmq] configuration. + cloud_storage_config: A ConfigParser object containing the cloud storage configuration. ''' self._shutdown = False @@ -73,6 +74,8 @@ class MessageListener: self._username = config.get(RMQ_CONFIG_SECTION, 'user') self._password = config.get(RMQ_CONFIG_SECTION, 'password') self._vhost = config.get(RMQ_CONFIG_SECTION, 'vhost') + + self.cloud_storage_config = cloud_storage_config # Set up a signal handler so we can shutdown gracefully # For some reason, this signal handler must be set up here. I'd rather @@ -167,7 +170,7 @@ class MessageListener: original_filename = msg_dict["original_filename"] file_prefix = msg_dict["file_prefix"] - audio_metadata = MessageListener.spawn_analyzer_process(audio_file_path, import_directory, original_filename, file_prefix) + audio_metadata = MessageListener.spawn_analyzer_process(audio_file_path, import_directory, original_filename, file_prefix, self.cloud_storage_config) StatusReporter.report_success_to_callback_url(callback_url, api_key, audio_metadata) @@ -207,11 +210,11 @@ class MessageListener: channel.basic_ack(delivery_tag=method_frame.delivery_tag) @staticmethod - def spawn_analyzer_process(audio_file_path, import_directory, original_filename, file_prefix): + def spawn_analyzer_process(audio_file_path, import_directory, original_filename, file_prefix, cloud_storage_config): ''' Spawn a child process to analyze and import a new audio file. ''' q = multiprocessing.Queue() p = multiprocessing.Process(target=AnalyzerPipeline.run_analysis, - args=(q, audio_file_path, import_directory, original_filename, file_prefix)) + args=(q, audio_file_path, import_directory, original_filename, file_prefix, cloud_storage_config)) p.start() p.join() if p.exitcode == 0: diff --git a/python_apps/airtime_analyzer/bin/airtime_analyzer b/python_apps/airtime_analyzer/bin/airtime_analyzer index 35debbd41..154274a93 100755 --- a/python_apps/airtime_analyzer/bin/airtime_analyzer +++ b/python_apps/airtime_analyzer/bin/airtime_analyzer @@ -9,6 +9,7 @@ import airtime_analyzer.airtime_analyzer as aa VERSION = "1.0" DEFAULT_CONFIG_PATH = '/etc/airtime/airtime.conf' +DEFAULT_CLOUD_STORAGE_CONFIG_PATH = '/etc/airtime-saas/production/cloud_storage.conf' DEFAULT_HTTP_RETRY_PATH = '/tmp/airtime_analyzer_http_retries' def run(): @@ -18,6 +19,7 @@ def run(): parser.add_argument("-d", "--daemon", help="run as a daemon", action="store_true") parser.add_argument("--debug", help="log full debugging output", action="store_true") parser.add_argument("--rmq-config-file", help="specify a configuration file with RabbitMQ settings (default is %s)" % DEFAULT_CONFIG_PATH) + parser.add_argument("--cloud-storage-config-file", help="specify a configuration file with cloud storage settings (default is %s)" % DEFAULT_CLOUD_STORAGE_CONFIG_PATH) parser.add_argument("--http-retry-queue-file", help="specify where incompleted HTTP requests will be serialized (default is %s)" % DEFAULT_HTTP_RETRY_PATH) args = parser.parse_args() @@ -25,20 +27,25 @@ def run(): #Default config file path config_path = DEFAULT_CONFIG_PATH + cloud_storage_config_path = DEFAULT_CLOUD_STORAGE_CONFIG_PATH http_retry_queue_path = DEFAULT_HTTP_RETRY_PATH if args.rmq_config_file: config_path = args.rmq_config_file + if args.cloud_storage_config_file: + cloud_storage_config_path = args.cloud_storage_config_file if args.http_retry_queue_file: http_retry_queue_path = args.http_retry_queue_file if args.daemon: with daemon.DaemonContext(): - aa.AirtimeAnalyzerServer(config_path=config_path, + aa.AirtimeAnalyzerServer(config_path=config_path, + cloud_storage_config_path = cloud_storage_config_path, http_retry_queue_path=http_retry_queue_path, debug=args.debug) else: # Run without daemonizing aa.AirtimeAnalyzerServer(config_path=config_path, + cloud_storage_config_path = cloud_storage_config_path, http_retry_queue_path=http_retry_queue_path, debug=args.debug) From e6171680ad5255e8c7d85ea4d71de7a10e8fc08f Mon Sep 17 00:00:00 2001 From: drigato Date: Wed, 4 Feb 2015 14:34:04 -0500 Subject: [PATCH 5/7] SAAS-560: Deploy separate cloud storage config files for each development environment Small fix --- .../airtime_analyzer/airtime_analyzer/analyzer_pipeline.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py index df2a294c4..077fd88da 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/analyzer_pipeline.py @@ -2,7 +2,8 @@ """ import logging import threading -import multiprocessing +import multiprocessing +import ConfigParser from metadata_analyzer import MetadataAnalyzer from filemover_analyzer import FileMoverAnalyzer from cloud_storage_uploader import CloudStorageUploader @@ -53,8 +54,8 @@ class AnalyzerPipeline: raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.") if not isinstance(file_prefix, unicode): raise TypeError("file_prefix must be unicode. Was of type " + type(file_prefix).__name__ + " instead.") - if not isinstance(cloud_storage_enabled, bool): - raise TypeError("cloud_storage_enabled must be a boolean. Was of type " + type(cloud_storage_enabled).__name__ + " instead.") + if not isinstance(cloud_storage_config, ConfigParser.SafeConfigParser): + raise TypeError("cloud_storage_config must be a SafeConfigParser. Was of type " + type(cloud_storage_config).__name__ + " instead.") # Analyze the audio file we were told to analyze: From 6cfac054cad5eceb58262067b998c54748184890 Mon Sep 17 00:00:00 2001 From: drigato Date: Thu, 5 Feb 2015 10:14:35 -0500 Subject: [PATCH 6/7] SAAS-560: Deploy separate cloud storage config files for each development environment Renamed the analyzer config variables to be named specially for rabbitmq config --- .../airtime_analyzer/airtime_analyzer.py | 6 +++--- .../airtime_analyzer/message_listener.py | 16 ++++++++-------- .../airtime_analyzer/bin/airtime_analyzer | 12 ++++++------ 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py b/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py index 2dc78d5bf..b90d58cab 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/airtime_analyzer.py @@ -23,7 +23,7 @@ class AirtimeAnalyzerServer: # Variables _log_level = logging.INFO - def __init__(self, config_path, cloud_storage_config_path, http_retry_queue_path, debug=False): + def __init__(self, rmq_config_path, cloud_storage_config_path, http_retry_queue_path, debug=False): # Dump a stacktrace with 'kill -SIGUSR2 ' signal.signal(signal.SIGUSR2, lambda sig, frame: AirtimeAnalyzerServer.dump_stacktrace()) @@ -32,7 +32,7 @@ class AirtimeAnalyzerServer: self.setup_logging(debug) # Read our rmq config file - config = config_file.read_config_file(config_path) + rmq_config = config_file.read_config_file(rmq_config_path) # Read the cloud storage config file cloud_storage_config = config_file.read_config_file(cloud_storage_config_path) @@ -42,7 +42,7 @@ class AirtimeAnalyzerServer: # Start listening for RabbitMQ messages telling us about newly # uploaded files. This blocks until we recieve a shutdown signal. - self._msg_listener = MessageListener(config, cloud_storage_config) + self._msg_listener = MessageListener(rmq_config, cloud_storage_config) StatusReporter.stop_thread() diff --git a/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py b/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py index f1ad52306..43e6e1d11 100644 --- a/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py +++ b/python_apps/airtime_analyzer/airtime_analyzer/message_listener.py @@ -55,25 +55,25 @@ QUEUE = "airtime-uploads" """ class MessageListener: - def __init__(self, config, cloud_storage_config): + def __init__(self, rmq_config, cloud_storage_config): ''' Start listening for file upload notification messages from RabbitMQ Keyword arguments: - config: A ConfigParser object containing the [rabbitmq] configuration. + rmq_config: A ConfigParser object containing the [rabbitmq] configuration. cloud_storage_config: A ConfigParser object containing the cloud storage configuration. ''' self._shutdown = False - # Read the RabbitMQ connection settings from the config file + # Read the RabbitMQ connection settings from the rmq_config file # The exceptions throw here by default give good error messages. RMQ_CONFIG_SECTION = "rabbitmq" - self._host = config.get(RMQ_CONFIG_SECTION, 'host') - self._port = config.getint(RMQ_CONFIG_SECTION, 'port') - self._username = config.get(RMQ_CONFIG_SECTION, 'user') - self._password = config.get(RMQ_CONFIG_SECTION, 'password') - self._vhost = config.get(RMQ_CONFIG_SECTION, 'vhost') + self._host = rmq_config.get(RMQ_CONFIG_SECTION, 'host') + self._port = rmq_config.getint(RMQ_CONFIG_SECTION, 'port') + self._username = rmq_config.get(RMQ_CONFIG_SECTION, 'user') + self._password = rmq_config.get(RMQ_CONFIG_SECTION, 'password') + self._vhost = rmq_config.get(RMQ_CONFIG_SECTION, 'vhost') self.cloud_storage_config = cloud_storage_config diff --git a/python_apps/airtime_analyzer/bin/airtime_analyzer b/python_apps/airtime_analyzer/bin/airtime_analyzer index 154274a93..98ac8a5b6 100755 --- a/python_apps/airtime_analyzer/bin/airtime_analyzer +++ b/python_apps/airtime_analyzer/bin/airtime_analyzer @@ -8,7 +8,7 @@ import os import airtime_analyzer.airtime_analyzer as aa VERSION = "1.0" -DEFAULT_CONFIG_PATH = '/etc/airtime/airtime.conf' +DEFAULT_RMQ_CONFIG_PATH = '/etc/airtime/airtime.conf' DEFAULT_CLOUD_STORAGE_CONFIG_PATH = '/etc/airtime-saas/production/cloud_storage.conf' DEFAULT_HTTP_RETRY_PATH = '/tmp/airtime_analyzer_http_retries' @@ -18,7 +18,7 @@ def run(): parser = argparse.ArgumentParser() parser.add_argument("-d", "--daemon", help="run as a daemon", action="store_true") parser.add_argument("--debug", help="log full debugging output", action="store_true") - parser.add_argument("--rmq-config-file", help="specify a configuration file with RabbitMQ settings (default is %s)" % DEFAULT_CONFIG_PATH) + parser.add_argument("--rmq-config-file", help="specify a configuration file with RabbitMQ settings (default is %s)" % DEFAULT_RMQ_CONFIG_PATH) parser.add_argument("--cloud-storage-config-file", help="specify a configuration file with cloud storage settings (default is %s)" % DEFAULT_CLOUD_STORAGE_CONFIG_PATH) parser.add_argument("--http-retry-queue-file", help="specify where incompleted HTTP requests will be serialized (default is %s)" % DEFAULT_HTTP_RETRY_PATH) args = parser.parse_args() @@ -26,11 +26,11 @@ def run(): check_if_media_monitor_is_running() #Default config file path - config_path = DEFAULT_CONFIG_PATH + rmq_config_path = DEFAULT_RMQ_CONFIG_PATH cloud_storage_config_path = DEFAULT_CLOUD_STORAGE_CONFIG_PATH http_retry_queue_path = DEFAULT_HTTP_RETRY_PATH if args.rmq_config_file: - config_path = args.rmq_config_file + rmq_config_path = args.rmq_config_file if args.cloud_storage_config_file: cloud_storage_config_path = args.cloud_storage_config_file if args.http_retry_queue_file: @@ -38,13 +38,13 @@ def run(): if args.daemon: with daemon.DaemonContext(): - aa.AirtimeAnalyzerServer(config_path=config_path, + aa.AirtimeAnalyzerServer(rmq_config_path=rmq_config_path, cloud_storage_config_path = cloud_storage_config_path, http_retry_queue_path=http_retry_queue_path, debug=args.debug) else: # Run without daemonizing - aa.AirtimeAnalyzerServer(config_path=config_path, + aa.AirtimeAnalyzerServer(rmq_config_path=rmq_config_path, cloud_storage_config_path = cloud_storage_config_path, http_retry_queue_path=http_retry_queue_path, debug=args.debug) From cee0ff48819394cb01cde56c7d891a10b30b2c1a Mon Sep 17 00:00:00 2001 From: drigato Date: Thu, 5 Feb 2015 14:31:20 -0500 Subject: [PATCH 7/7] SAAS-560: Deploy separate cloud storage config files for each development environment Default to production config files if dev env specific files are not found. Fix analyzer unit tests. --- airtime_mvc/application/configs/conf.php | 5 +++++ airtime_mvc/application/models/RabbitMq.php | 8 +++++++- .../airtime_analyzer/tests/analyzer_pipeline_tests.py | 6 ++++-- .../tests/cloud_storage_uploader_tests.py | 5 ++++- 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/airtime_mvc/application/configs/conf.php b/airtime_mvc/application/configs/conf.php index 75a69d751..6af950882 100644 --- a/airtime_mvc/application/configs/conf.php +++ b/airtime_mvc/application/configs/conf.php @@ -43,6 +43,11 @@ class Config { // Parse separate conf file for cloud storage values $cloudStorageConfig = "/etc/airtime-saas/".$CC_CONFIG['dev_env']."/cloud_storage.conf"; + if (!file_exists($cloudStorageConfig)) { + // If the dev env specific cloud_storage.conf doesn't exist default + // to the production cloud_storage.conf + $cloudStorageConfig = "/etc/airtime-saas/production/cloud_storage.conf"; + } $cloudStorageValues = parse_ini_file($cloudStorageConfig, true); $CC_CONFIG["supportedStorageBackends"] = array('amazon_S3'); diff --git a/airtime_mvc/application/models/RabbitMq.php b/airtime_mvc/application/models/RabbitMq.php index 9ab7e6c22..435036a6e 100644 --- a/airtime_mvc/application/models/RabbitMq.php +++ b/airtime_mvc/application/models/RabbitMq.php @@ -89,7 +89,13 @@ class Application_Model_RabbitMq if (array_key_exists("dev_env", $CC_CONFIG)) { $devEnv = $CC_CONFIG["dev_env"]; } - $config = parse_ini_file("/etc/airtime-saas/".$devEnv."/rabbitmq-analyzer.ini", true); + $rmq_config_path = "/etc/airtime-saas/".$devEnv."/rabbitmq-analyzer.ini"; + if (!file_exists($rmq_config_path)) { + // If the dev env specific rabbitmq-analyzer.ini doesn't exist default + // to the production rabbitmq-analyzer.ini + $rmq_config_path = "/etc/airtime-saas/production/rabbitmq-analyzer.ini"; + } + $config = parse_ini_file($rmq_config_path, true); $conn = new AMQPConnection($config["rabbitmq"]["host"], $config["rabbitmq"]["port"], $config["rabbitmq"]["user"], diff --git a/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py b/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py index fecfb3182..e5230448f 100644 --- a/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py +++ b/python_apps/airtime_analyzer/tests/analyzer_pipeline_tests.py @@ -5,6 +5,7 @@ import multiprocessing import Queue import datetime from airtime_analyzer.analyzer_pipeline import AnalyzerPipeline +from airtime_analyzer import config_file DEFAULT_AUDIO_FILE = u'tests/test_data/44100Hz-16bit-mono.mp3' DEFAULT_IMPORT_DEST = u'Test Artist/Test Album/44100Hz-16bit-mono.mp3' @@ -20,10 +21,11 @@ def teardown(): def test_basic(): filename = os.path.basename(DEFAULT_AUDIO_FILE) q = multiprocessing.Queue() - cloud_storage_enabled = False + cloud_storage_config_path = '/etc/airtime-saas/production/cloud_storage.conf' + cloud_storage_config = config_file.read_config_file(cloud_storage_config_path) file_prefix = u'' #This actually imports the file into the "./Test Artist" directory. - AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename, file_prefix, cloud_storage_enabled) + AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename, file_prefix, cloud_storage_config) metadata = q.get() assert metadata['track_title'] == u'Test Title' assert metadata['artist_name'] == u'Test Artist' diff --git a/python_apps/airtime_analyzer/tests/cloud_storage_uploader_tests.py b/python_apps/airtime_analyzer/tests/cloud_storage_uploader_tests.py index d54e4573a..44fa8e414 100644 --- a/python_apps/airtime_analyzer/tests/cloud_storage_uploader_tests.py +++ b/python_apps/airtime_analyzer/tests/cloud_storage_uploader_tests.py @@ -1,6 +1,7 @@ from nose.tools import * from airtime_analyzer.cloud_storage_uploader import CloudStorageUploader from airtime_analyzer.airtime_analyzer import AirtimeAnalyzerServer +from airtime_analyzer import config_file def setup(): pass @@ -9,5 +10,7 @@ def teardown(): pass def test_analyze(): - cl = CloudStorageUploader() + cloud_storage_config_path = '/etc/airtime-saas/production/cloud_storage.conf' + cloud_storage_config = config_file.read_config_file(cloud_storage_config_path) + cl = CloudStorageUploader(cloud_storage_config) cl._storage_backend = "file" \ No newline at end of file