Merging 2.5.x into saas

This commit is contained in:
Duncan Sommerville 2014-10-27 16:27:45 -04:00
commit 1a90184a69
8 changed files with 87 additions and 46 deletions

View File

@ -61,6 +61,7 @@ class LoginController extends Zend_Controller_Action
$result = $auth->authenticate($authAdapter);
if ($result->isValid()) {
Zend_Session::regenerateId();
//all info about this user from the login table omit only the password
$userInfo = $authAdapter->getResultRowObject(null, 'password');
@ -81,6 +82,7 @@ class LoginController extends Zend_Controller_Action
$auth = Zend_Auth::getInstance();
$result = $auth->authenticate($authAdapter);
if ($result->isValid()) {
Zend_Session::regenerateId();
//set the user locale in case user changed it in when logging in
Application_Model_Preference::SetUserLocale($locale);

View File

@ -129,6 +129,15 @@ class Rest_MediaController extends Zend_Rest_Controller
public function postAction()
{
/* If the user presents a valid API key, we don't check CSRF tokens.
CSRF tokens are only used for session based authentication.
*/
if(!$this->verifyAPIKey()){
if(!$this->verifyCSRFToken($this->_getParam('csrf_token'))){
return;
}
}
if (!$this->verifyAuth(true, true))
{
return;
@ -294,6 +303,21 @@ class Rest_MediaController extends Zend_Rest_Controller
}
return $id;
}
private function verifyCSRFToken($token){
$current_namespace = new Zend_Session_Namespace('csrf_namespace');
$observed_csrf_token = $token;
$expected_csrf_token = $current_namespace->authtoken;
if($observed_csrf_token == $expected_csrf_token){
return true;
}else{
$resp = $this->getResponse();
$resp->setHttpResponseCode(401);
$resp->appendBody("ERROR: Token Missmatch.");
return false;
}
}
private function verifyAuth($checkApiKey, $checkSession)
{

View File

@ -166,10 +166,7 @@
</ul>
<?php endif; ?>
</dd>
<?php echo $this->element->getElement('csrf') ?>
<button type="submit" id="cu_save_user" class="btn btn-small right-floated"><?php echo _("Save")?></button>
</dl>
<button type="submit" id="cu_save_user" class="btn btn-small right-floated"><?php echo _("Save")?></button>
</form>

View File

@ -11,8 +11,8 @@
}
?>
<form id="plupload_form" <?php if ($this->quotaLimitReached) { ?> class="hidden" <?php } ?>>
<?php echo $this->form->getElement('csrf') ?>
<div id="plupload_files"></div>
<?php echo $this->form->getElement('csrf') ?>
<div id="plupload_files"></div>
</form>
<div id="plupload_error">
<table></table>

View File

@ -42,14 +42,16 @@ class FileMoverAnalyzer(Analyzer):
# TODO: Also, handle the case where the move fails and write some code
# to possibly move the file to problem_files.
max_dir_len = 32
max_file_len = 32
max_dir_len = 48
max_file_len = 48
final_file_path = import_directory
orig_file_basename, orig_file_extension = os.path.splitext(original_filename)
if metadata.has_key("artist_name"):
final_file_path += "/" + metadata["artist_name"][0:max_dir_len] # truncating with array slicing
if metadata.has_key("album_title"):
final_file_path += "/" + metadata["album_title"][0:max_dir_len]
final_file_path += "/" + original_filename[0:max_file_len]
final_file_path += "/" + metadata["album_title"][0:max_dir_len]
# Note that orig_file_extension includes the "." already
final_file_path += "/" + orig_file_basename[0:max_file_len] + orig_file_extension
#Ensure any redundant slashes are stripped
final_file_path = os.path.normpath(final_file_path)

View File

@ -120,8 +120,13 @@ class MessageListener:
def disconnect_from_messaging_server(self):
'''Stop consuming RabbitMQ messages and disconnect'''
self._channel.stop_consuming()
self._connection.close()
# If you try to close a connection that's already closed, you're going to have a bad time.
# We're breaking EAFP because this can be called multiple times depending on exception
# handling flow here.
if not self._channel.is_closed and not self._channel.is_closing:
self._channel.stop_consuming()
if not self._connection.is_closed and not self._connection.is_closing:
self._connection.close()
def graceful_shutdown(self, signum, frame):
'''Disconnect and break out of the message listening loop'''

View File

@ -38,9 +38,9 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
# retried later:
retry_queue = collections.deque()
shutdown = False
# Unpickle retry_queue from disk so that we won't have lost any uploads
# if airtime_analyzer is shut down while the web server is down or unreachable,
# Unpickle retry_queue from disk so that we won't have lost any uploads
# if airtime_analyzer is shut down while the web server is down or unreachable,
# and there were failed HTTP requests pending, waiting to be retried.
try:
with open(http_retry_queue_path, 'rb') as pickle_file:
@ -57,33 +57,42 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
logging.error("Failed to unpickle %s. Continuing..." % http_retry_queue_path)
pass
while not shutdown:
while True:
try:
request = ipc_queue.get(block=True, timeout=5)
if isinstance(request, str) and request == "shutdown": # Bit of a cheat
shutdown = True
break
if not isinstance(request, PicklableHttpRequest):
raise TypeError("request must be a PicklableHttpRequest. Was of type " + type(request).__name__)
except Queue.Empty:
request = None
# If there's no new HTTP request we need to execute, let's check our "retry
# queue" and see if there's any failed HTTP requests we can retry:
if request:
send_http_request(request, retry_queue)
else:
# Using a for loop instead of while so we only iterate over all the requests once!
for i in range(len(retry_queue)):
request = retry_queue.popleft()
send_http_request(request, retry_queue)
while not shutdown:
try:
request = ipc_queue.get(block=True, timeout=5)
if isinstance(request, str) and request == "shutdown": # Bit of a cheat
shutdown = True
break
if not isinstance(request, PicklableHttpRequest):
raise TypeError("request must be a PicklableHttpRequest. Was of type " + type(request).__name__)
except Queue.Empty:
request = None
# If there's no new HTTP request we need to execute, let's check our "retry
# queue" and see if there's any failed HTTP requests we can retry:
if request:
send_http_request(request, retry_queue)
else:
# Using a for loop instead of while so we only iterate over all the requests once!
for i in range(len(retry_queue)):
request = retry_queue.popleft()
send_http_request(request, retry_queue)
logging.info("Shutting down status_reporter")
# Pickle retry_queue to disk so that we don't lose uploads if we're shut down while
# while the web server is down or unreachable.
with open(http_retry_queue_path, 'wb') as pickle_file:
pickle.dump(retry_queue, pickle_file)
except Exception as e: # Terrible top-level exception handler to prevent the thread from dying, just in case.
if shutdown:
return
logging.exception("Unhandled exception in StatusReporter")
logging.exception(e)
logging.info("Restarting StatusReporter thread")
time.sleep(2) # Throttle it
logging.info("Shutting down status_reporter")
# Pickle retry_queue to disk so that we don't lose uploads if we're shut down while
# while the web server is down or unreachable.
with open(http_retry_queue_path, 'wb') as pickle_file:
pickle.dump(retry_queue, pickle_file)
def send_http_request(picklable_request, retry_queue):
if not isinstance(picklable_request, PicklableHttpRequest):
@ -134,11 +143,11 @@ def is_web_server_broken(url):
test_req = requests.get(url)
test_req.raise_for_status()
except Exception as e:
return true
return True
else:
# The request worked fine, so the web server and Airtime are still up.
return false
return false
return False
return False
def alert_hung_request():

View File

@ -9,14 +9,16 @@ respawn
setuid www-data
setgid www-data
expect fork
#expect fork
env LANG='en_US.UTF-8'
env LC_ALL='en_US.UTF-8'
script
airtime_analyzer
end script
#script
# airtime_analyzer
#end script
exec airtime_analyzer