Merge branch 'cc-5709-airtime-analyzer' into cc-5709-airtime-analyzer-cloud-storage

Conflicts:
	python_apps/airtime_analyzer/airtime_analyzer/filemover_analyzer.py
This commit is contained in:
drigato 2014-10-22 17:16:46 -04:00
commit 70ff67374b
7 changed files with 66 additions and 44 deletions

View file

@ -120,8 +120,11 @@ class Application_Model_Auth
*/ */
public static function pinSessionToClient($auth) public static function pinSessionToClient($auth)
{ {
$CC_CONFIG = Config::getConfig();
$serverName = isset($_SERVER['SERVER_NAME']) ? $_SERVER['SERVER_NAME'] : ""; $serverName = isset($_SERVER['SERVER_NAME']) ? $_SERVER['SERVER_NAME'] : "";
$remoteAddr = isset($_SERVER['REMOTE_ADDR']) ? $_SERVER['REMOTE_ADDR'] : ""; $remoteAddr = isset($_SERVER['REMOTE_ADDR']) ? $_SERVER['REMOTE_ADDR'] : "";
$auth->setStorage(new Zend_Auth_Storage_Session('Airtime' . $serverName . $remoteAddr . Application_Model_Preference::GetClientId())); $sessionIdentifier = 'Airtime' . '-' . $serverName . '-' . $remoteAddr . '-' . Application_Model_Preference::GetClientId() . '-' . $CC_CONFIG["baseDir"];
$auth->setStorage(new Zend_Auth_Storage_Session($sessionIdentifier));
} }
} }

View file

@ -29,8 +29,10 @@ echo "----------------------------------------------------"
dist=`lsb_release -is` dist=`lsb_release -is`
code=`lsb_release -cs` code=`lsb_release -cs`
set +e
apache2 -v | grep "2\.4" > /dev/null apache2 -v | grep "2\.4" > /dev/null
apacheversion=$? apacheversion=$?
set -e
#enable squeeze backports to get lame packages #enable squeeze backports to get lame packages
if [ "$dist" = "Debian" -a "$code" = "squeeze" ]; then if [ "$dist" = "Debian" -a "$code" = "squeeze" ]; then

View file

@ -42,15 +42,16 @@ class FileMoverAnalyzer(Analyzer):
# TODO: Also, handle the case where the move fails and write some code # TODO: Also, handle the case where the move fails and write some code
# to possibly move the file to problem_files. # to possibly move the file to problem_files.
max_dir_len = 32 max_dir_len = 48
max_file_len = 32 max_file_len = 48
final_file_path = import_directory final_file_path = import_directory
orig_file_basename, orig_file_extension = os.path.splitext(original_filename)
if metadata.has_key("artist_name"): if metadata.has_key("artist_name"):
final_file_path += "/" + metadata["artist_name"][0:max_dir_len] # truncating with array slicing final_file_path += "/" + metadata["artist_name"][0:max_dir_len] # truncating with array slicing
if metadata.has_key("album_title"): if metadata.has_key("album_title"):
final_file_path += "/" + metadata["album_title"][0:max_dir_len] final_file_path += "/" + metadata["album_title"][0:max_dir_len]
final_file_path += "/" + original_filename[0:max_file_len] # Note that orig_file_extension includes the "." already
final_file_path += "/" + orig_file_basename[0:max_file_len] + orig_file_extension
#Ensure any redundant slashes are stripped #Ensure any redundant slashes are stripped
final_file_path = os.path.normpath(final_file_path) final_file_path = os.path.normpath(final_file_path)

View file

@ -128,7 +128,12 @@ class MessageListener:
def disconnect_from_messaging_server(self): def disconnect_from_messaging_server(self):
'''Stop consuming RabbitMQ messages and disconnect''' '''Stop consuming RabbitMQ messages and disconnect'''
# If you try to close a connection that's already closed, you're going to have a bad time.
# We're breaking EAFP because this can be called multiple times depending on exception
# handling flow here.
if not self._channel.is_closed and not self._channel.is_closing:
self._channel.stop_consuming() self._channel.stop_consuming()
if not self._connection.is_closed and not self._connection.is_closing:
self._connection.close() self._connection.close()
def graceful_shutdown(self, signum, frame): def graceful_shutdown(self, signum, frame):

View file

@ -57,7 +57,8 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
logging.error("Failed to unpickle %s. Continuing..." % http_retry_queue_path) logging.error("Failed to unpickle %s. Continuing..." % http_retry_queue_path)
pass pass
while True:
try:
while not shutdown: while not shutdown:
try: try:
request = ipc_queue.get(block=True, timeout=5) request = ipc_queue.get(block=True, timeout=5)
@ -84,6 +85,14 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
# while the web server is down or unreachable. # while the web server is down or unreachable.
with open(http_retry_queue_path, 'wb') as pickle_file: with open(http_retry_queue_path, 'wb') as pickle_file:
pickle.dump(retry_queue, pickle_file) pickle.dump(retry_queue, pickle_file)
except Exception as e: # Terrible top-level exception handler to prevent the thread from dying, just in case.
if shutdown:
return
logging.exception("Unhandled exception in StatusReporter")
logging.exception(e)
logging.info("Restarting StatusReporter thread")
time.sleep(2) # Throttle it
def send_http_request(picklable_request, retry_queue): def send_http_request(picklable_request, retry_queue):
if not isinstance(picklable_request, PicklableHttpRequest): if not isinstance(picklable_request, PicklableHttpRequest):
@ -134,11 +143,11 @@ def is_web_server_broken(url):
test_req = requests.get(url) test_req = requests.get(url)
test_req.raise_for_status() test_req.raise_for_status()
except Exception as e: except Exception as e:
return true return True
else: else:
# The request worked fine, so the web server and Airtime are still up. # The request worked fine, so the web server and Airtime are still up.
return false return False
return false return False
def alert_hung_request(): def alert_hung_request():

View file

@ -9,14 +9,16 @@ respawn
setuid www-data setuid www-data
setgid www-data setgid www-data
expect fork #expect fork
env LANG='en_US.UTF-8' env LANG='en_US.UTF-8'
env LC_ALL='en_US.UTF-8' env LC_ALL='en_US.UTF-8'
script #script
airtime_analyzer # airtime_analyzer
end script #end script
exec airtime_analyzer