Merge branch 'cc-5709-airtime-analyzer' into cc-5709-airtime-analyzer-saas
Conflicts: airtime_mvc/public/index.php * Reverted some SaaS-only thing Martin did a year ago. Looks benign but only one way to find out...
This commit is contained in:
commit
1c5e2d6205
|
@ -1,6 +1,6 @@
|
|||
<?php
|
||||
// This file generated by Propel 1.5.2 convert-conf target
|
||||
// from XML runtime conf file /home/denise/airtime/airtime_mvc/build/runtime-conf.xml
|
||||
// from XML runtime conf file /home/ubuntu/airtime/airtime_mvc/build/runtime-conf.xml
|
||||
$conf = array (
|
||||
'datasources' =>
|
||||
array (
|
||||
|
@ -12,6 +12,14 @@ $conf = array (
|
|||
'dsn' => 'pgsql:host=localhost;port=5432;dbname=airtime;user=airtime;password=airtime',
|
||||
),
|
||||
),
|
||||
'airtime_test' =>
|
||||
array (
|
||||
'adapter' => 'pgsql',
|
||||
'connection' =>
|
||||
array (
|
||||
'dsn' => 'pgsql:host=localhost;port=5432;dbname=airtime_test;user=airtime;password=airtime',
|
||||
),
|
||||
),
|
||||
'default' => 'airtime',
|
||||
),
|
||||
'generator_version' => '1.5.2',
|
||||
|
|
|
@ -6,6 +6,10 @@ bootstrap.class = "Bootstrap"
|
|||
appnamespace = "Application"
|
||||
resources.frontController.controllerDirectory = APPLICATION_PATH "/controllers"
|
||||
resources.frontController.params.displayExceptions = 0
|
||||
resources.frontController.moduleDirectory = APPLICATION_PATH "/modules"
|
||||
resources.frontController.plugins.putHandler = "Zend_Controller_Plugin_PutHandler"
|
||||
;load everything in the modules directory including models
|
||||
resources.modules[] = ""
|
||||
resources.layout.layoutPath = APPLICATION_PATH "/layouts/scripts/"
|
||||
resources.view[] =
|
||||
resources.db.adapter = "Pdo_Pgsql"
|
||||
|
|
|
@ -65,7 +65,7 @@ class LoginController extends Zend_Controller_Action
|
|||
|
||||
Application_Model_LoginAttempts::resetAttempts($_SERVER['REMOTE_ADDR']);
|
||||
Application_Model_Subjects::resetLoginAttempts($username);
|
||||
|
||||
|
||||
$tempSess = new Zend_Session_Namespace("referrer");
|
||||
$tempSess->referrer = 'login';
|
||||
|
||||
|
|
|
@ -2,12 +2,11 @@
|
|||
|
||||
class PluploadController extends Zend_Controller_Action
|
||||
{
|
||||
|
||||
public function init()
|
||||
{
|
||||
$ajaxContext = $this->_helper->getHelper('AjaxContext');
|
||||
$ajaxContext->addActionContext('upload', 'json')
|
||||
->addActionContext('copyfile', 'json')
|
||||
$ajaxContext->addActionContext('upload', 'json')
|
||||
->addActionContext('recent-uploads', 'json')
|
||||
->initContext();
|
||||
}
|
||||
|
||||
|
@ -18,12 +17,14 @@ class PluploadController extends Zend_Controller_Action
|
|||
$baseUrl = Application_Common_OsPath::getBaseDir();
|
||||
$locale = Application_Model_Preference::GetLocale();
|
||||
|
||||
$this->view->headScript()->appendFile($baseUrl.'js/datatables/js/jquery.dataTables.js?'.$CC_CONFIG['airtime_version'], 'text/javascript');
|
||||
$this->view->headScript()->appendFile($baseUrl.'js/plupload/plupload.full.min.js?'.$CC_CONFIG['airtime_version'],'text/javascript');
|
||||
$this->view->headScript()->appendFile($baseUrl.'js/plupload/jquery.plupload.queue.min.js?'.$CC_CONFIG['airtime_version'],'text/javascript');
|
||||
$this->view->headScript()->appendFile($baseUrl.'js/airtime/library/plupload.js?'.$CC_CONFIG['airtime_version'],'text/javascript');
|
||||
$this->view->headScript()->appendFile($baseUrl.'js/plupload/i18n/'.$locale.'.js?'.$CC_CONFIG['airtime_version'],'text/javascript');
|
||||
|
||||
$this->view->headLink()->appendStylesheet($baseUrl.'css/plupload.queue.css?'.$CC_CONFIG['airtime_version']);
|
||||
$this->view->headLink()->appendStylesheet($baseUrl.'css/addmedia.css?'.$CC_CONFIG['airtime_version']);
|
||||
}
|
||||
|
||||
public function uploadAction()
|
||||
|
@ -34,17 +35,53 @@ class PluploadController extends Zend_Controller_Action
|
|||
|
||||
$this->_helper->json->sendJson(array("jsonrpc" => "2.0", "tempfilepath" => $tempFileName));
|
||||
}
|
||||
|
||||
public function copyfileAction()
|
||||
|
||||
public function recentUploadsAction()
|
||||
{
|
||||
$upload_dir = ini_get("upload_tmp_dir") . DIRECTORY_SEPARATOR . "plupload";
|
||||
$filename = $this->_getParam('name');
|
||||
$tempname = $this->_getParam('tempname');
|
||||
$result = Application_Model_StoredFile::copyFileToStor($upload_dir,
|
||||
$filename, $tempname);
|
||||
if (!is_null($result))
|
||||
$this->_helper->json->sendJson(array("jsonrpc" => "2.0", "error" => $result));
|
||||
if (isset($_GET['uploadFilter'])) {
|
||||
$filter = $_GET['uploadFilter'];
|
||||
} else {
|
||||
$filter = "all";
|
||||
}
|
||||
|
||||
$limit = isset($_GET['iDisplayLength']) ? $_GET['iDisplayLength'] : 10;
|
||||
$rowStart = isset($_GET['iDisplayStart']) ? $_GET['iDisplayStart'] : 0;
|
||||
|
||||
$recentUploadsQuery = CcFilesQuery::create()->filterByDbUtime(array('min' => time() - 30 * 24 * 60 * 60))
|
||||
->orderByDbUtime(Criteria::DESC);
|
||||
|
||||
$numTotalRecentUploads = $recentUploadsQuery->find()->count();
|
||||
|
||||
if ($filter == "pending") {
|
||||
$recentUploadsQuery->filterByDbImportStatus("1");
|
||||
} else if ($filter == "failed") {
|
||||
$recentUploadsQuery->filterByDbImportStatus(array('min' => 100));
|
||||
}
|
||||
|
||||
$recentUploads = $recentUploadsQuery->offset($rowStart)->limit($limit)->find();
|
||||
|
||||
$numRecentUploads = $limit;
|
||||
//CcFilesQuery::create()->filterByDbUtime(array('min' => time() - 30 * 24 * 60 * 60))
|
||||
|
||||
//$this->_helper->json->sendJson(array("jsonrpc" => "2.0", "tempfilepath" => $tempFileName));
|
||||
|
||||
$uploadsArray = array();
|
||||
|
||||
foreach ($recentUploads as $upload)
|
||||
{
|
||||
$upload->toArray(BasePeer::TYPE_FIELDNAME);
|
||||
//array_push($uploadsArray, $upload); //TODO: $this->sanitizeResponse($upload));
|
||||
|
||||
//$this->_helper->json->sendJson($upload->asJson());
|
||||
//TODO: Invoke sanitization here
|
||||
array_push($uploadsArray, $upload->toArray(BasePeer::TYPE_FIELDNAME));
|
||||
}
|
||||
|
||||
|
||||
$this->_helper->json->sendJson(array("jsonrpc" => "2.0"));
|
||||
$this->view->sEcho = intval($this->getRequest()->getParam('sEcho'));
|
||||
$this->view->iTotalDisplayRecords = $numTotalRecentUploads;
|
||||
//$this->view->iTotalDisplayRecords = $numRecentUploads; //$r["iTotalDisplayRecords"];
|
||||
$this->view->iTotalRecords = $numTotalRecentUploads; //$r["iTotalRecords"];
|
||||
$this->view->files = $uploadsArray; //$r["aaData"];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,6 +110,13 @@ class Zend_Controller_Plugin_Acl extends Zend_Controller_Plugin_Abstract
|
|||
{
|
||||
$controller = strtolower($request->getControllerName());
|
||||
|
||||
//Ignore authentication for all access to the rest API. We do auth via API keys for this
|
||||
//and/or by OAuth.
|
||||
if (strtolower($request->getModuleName()) == "rest")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (in_array($controller, array("api", "auth", "locale"))) {
|
||||
|
||||
$this->setRoleName("G");
|
||||
|
|
|
@ -301,7 +301,9 @@ SQL;
|
|||
public static function getDirByPK($pk)
|
||||
{
|
||||
$dir = CcMusicDirsQuery::create()->findPK($pk);
|
||||
|
||||
if (!$dir) {
|
||||
return null;
|
||||
}
|
||||
$mus_dir = new Application_Model_MusicDir($dir);
|
||||
|
||||
return $mus_dir;
|
||||
|
|
|
@ -1409,4 +1409,24 @@ class Application_Model_Preference
|
|||
public static function GetHistoryFileTemplate() {
|
||||
return self::getValue("history_file_template");
|
||||
}
|
||||
|
||||
public static function getDiskUsage()
|
||||
{
|
||||
return self::getValue("disk_usage");
|
||||
}
|
||||
|
||||
public static function setDiskUsage($value)
|
||||
{
|
||||
self::setValue("disk_usage", $value);
|
||||
}
|
||||
|
||||
public static function updateDiskUsage($filesize)
|
||||
{
|
||||
$currentDiskUsage = self::getDiskUsage();
|
||||
if (empty($currentDiskUsage)) {
|
||||
$currentDiskUsage = 0;
|
||||
}
|
||||
|
||||
self::setDiskUsage($currentDiskUsage + $filesize);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ class Application_Model_RabbitMq
|
|||
self::$doPush = true;
|
||||
}
|
||||
|
||||
private static function sendMessage($exchange, $data)
|
||||
private static function sendMessage($exchange, $exchangeType, $autoDeleteExchange, $data, $queue="")
|
||||
{
|
||||
$CC_CONFIG = Config::getConfig();
|
||||
|
||||
|
@ -31,7 +31,9 @@ class Application_Model_RabbitMq
|
|||
$channel->access_request($CC_CONFIG["rabbitmq"]["vhost"], false, false,
|
||||
true, true);
|
||||
|
||||
$channel->exchange_declare($exchange, 'direct', false, true);
|
||||
//I'm pretty sure we DON'T want to autodelete ANY exchanges but I'm keeping the code
|
||||
//the way it is just so I don't accidentally break anything when I add the Analyzer code in. -- Albert, March 13, 2014
|
||||
$channel->exchange_declare($exchange, $exchangeType, false, true, $autoDeleteExchange);
|
||||
|
||||
$msg = new AMQPMessage($data, array('content_type' => 'text/plain'));
|
||||
|
||||
|
@ -46,7 +48,7 @@ class Application_Model_RabbitMq
|
|||
|
||||
$exchange = 'airtime-pypo';
|
||||
$data = json_encode($md, JSON_FORCE_OBJECT);
|
||||
self::sendMessage($exchange, $data);
|
||||
self::sendMessage($exchange, 'direct', true, $data);
|
||||
}
|
||||
|
||||
public static function SendMessageToMediaMonitor($event_type, $md)
|
||||
|
@ -55,7 +57,7 @@ class Application_Model_RabbitMq
|
|||
|
||||
$exchange = 'airtime-media-monitor';
|
||||
$data = json_encode($md);
|
||||
self::sendMessage($exchange, $data);
|
||||
self::sendMessage($exchange, 'direct', true, $data);
|
||||
}
|
||||
|
||||
public static function SendMessageToShowRecorder($event_type)
|
||||
|
@ -74,7 +76,21 @@ class Application_Model_RabbitMq
|
|||
}
|
||||
$data = json_encode($temp);
|
||||
|
||||
self::sendMessage($exchange, $data);
|
||||
self::sendMessage($exchange, 'direct', true, $data);
|
||||
}
|
||||
|
||||
public static function SendMessageToAnalyzer($tmpFilePath, $importedStorageDirectory, $originalFilename,
|
||||
$callbackUrl, $apiKey)
|
||||
{
|
||||
$exchange = 'airtime-uploads';
|
||||
$data['tmp_file_path'] = $tmpFilePath;
|
||||
$data['import_directory'] = $importedStorageDirectory;
|
||||
$data['original_filename'] = $originalFilename;
|
||||
$data['callback_url'] = $callbackUrl;
|
||||
$data['api_key'] = $apiKey;
|
||||
|
||||
$jsonData = json_encode($data);
|
||||
self::sendMessage($exchange, 'topic', false, $jsonData, 'airtime-uploads');
|
||||
}
|
||||
|
||||
public static function SendMessageToHaproxyConfigDaemon($md){
|
||||
|
|
|
@ -346,6 +346,21 @@ SQL;
|
|||
return array();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the file (on disk) corresponding to this class exists or not.
|
||||
* @return boolean true if the file exists, false otherwise.
|
||||
*/
|
||||
public function existsOnDisk()
|
||||
{
|
||||
$exists = false;
|
||||
try {
|
||||
$exists = file_exists($this->getFilePath());
|
||||
} catch (Exception $e) {
|
||||
return false;
|
||||
}
|
||||
return $exists;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete stored virtual file
|
||||
|
@ -355,8 +370,11 @@ SQL;
|
|||
*/
|
||||
public function delete()
|
||||
{
|
||||
|
||||
$filepath = $this->getFilePath();
|
||||
|
||||
//Update the user's disk usage
|
||||
Application_Model_Preference::updateDiskUsage(-1 * abs(filesize($filepath)));
|
||||
|
||||
// Check if the file is scheduled to be played in the future
|
||||
if (Application_Model_Schedule::IsFileScheduledInTheFuture($this->getId())) {
|
||||
throw new DeleteScheduledFileException();
|
||||
|
@ -370,8 +388,10 @@ SQL;
|
|||
}
|
||||
|
||||
$music_dir = Application_Model_MusicDir::getDirByPK($this->_file->getDbDirectory());
|
||||
assert($music_dir);
|
||||
$type = $music_dir->getType();
|
||||
|
||||
|
||||
|
||||
if (file_exists($filepath) && $type == "stor") {
|
||||
$data = array("filepath" => $filepath, "delete" => 1);
|
||||
try {
|
||||
|
@ -473,8 +493,13 @@ SQL;
|
|||
*/
|
||||
public function getFilePath()
|
||||
{
|
||||
assert($this->_file);
|
||||
|
||||
$music_dir = Application_Model_MusicDir::getDirByPK($this->
|
||||
_file->getDbDirectory());
|
||||
if (!$music_dir) {
|
||||
throw new Exception("Invalid music_dir for file in database.");
|
||||
}
|
||||
$directory = $music_dir->getDirectory();
|
||||
$filepath = $this->_file->getDbFilepath();
|
||||
|
||||
|
@ -988,48 +1013,69 @@ SQL;
|
|||
return $freeSpace >= $fileSize;
|
||||
}
|
||||
|
||||
public static function copyFileToStor($p_targetDir, $fileName, $tempname)
|
||||
/**
|
||||
* Copy a newly uploaded audio file from its temporary upload directory
|
||||
* on the local disk (like /tmp) over to Airtime's "stor" directory,
|
||||
* which is where all ingested music/media live.
|
||||
*
|
||||
* This is done in PHP here on the web server rather than in airtime_analyzer because
|
||||
* the airtime_analyzer might be running on a different physical computer than the web server,
|
||||
* and it probably won't have access to the web server's /tmp folder. The stor/organize directory
|
||||
* is, however, both accessible to the machines running airtime_analyzer and the web server
|
||||
* on Airtime Pro.
|
||||
*
|
||||
* The file is actually copied to "stor/organize", which is a staging directory where files go
|
||||
* before they're processed by airtime_analyzer, which then moves them to "stor/imported" in the final
|
||||
* step.
|
||||
*
|
||||
* TODO: Implement better error handling here...
|
||||
*
|
||||
* @param string $tempFilePath
|
||||
* @param string $originalFilename
|
||||
* @throws Exception
|
||||
* @return Ambigous <unknown, string>
|
||||
*/
|
||||
public static function copyFileToStor($tempFilePath, $originalFilename)
|
||||
{
|
||||
$audio_file = $p_targetDir . DIRECTORY_SEPARATOR . $tempname;
|
||||
$audio_file = $tempFilePath;
|
||||
Logging::info('copyFileToStor: moving file '.$audio_file);
|
||||
|
||||
|
||||
$storDir = Application_Model_MusicDir::getStorDir();
|
||||
$stor = $storDir->getDirectory();
|
||||
// check if "organize" dir exists and if not create one
|
||||
if (!file_exists($stor."/organize")) {
|
||||
if (!mkdir($stor."/organize", 0777)) {
|
||||
return array(
|
||||
"code" => 109,
|
||||
"message" => _("Failed to create 'organize' directory."));
|
||||
throw new Exception("Failed to create organize directory.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (chmod($audio_file, 0644) === false) {
|
||||
Logging::info("Warning: couldn't change permissions of $audio_file to 0644");
|
||||
}
|
||||
|
||||
|
||||
// Check if we have enough space before copying
|
||||
if (!self::isEnoughDiskSpaceToCopy($stor, $audio_file)) {
|
||||
$freeSpace = disk_free_space($stor);
|
||||
$fileSize = filesize($audio_file);
|
||||
|
||||
return array("code" => 107,
|
||||
"message" => sprintf(_("The file was not uploaded, there is "
|
||||
."%s MB of disk space left and the file you are "
|
||||
."uploading has a size of %s MB."), $freeSpace, $fileSize));
|
||||
|
||||
throw new Exception(sprintf(_("The file was not uploaded, there is "
|
||||
."%s MB of disk space left and the file you are "
|
||||
."uploading has a size of %s MB."), $freeSpace, $fileSize));
|
||||
}
|
||||
|
||||
|
||||
// Check if liquidsoap can play this file
|
||||
// TODO: Move this to airtime_analyzer
|
||||
if (!self::liquidsoapFilePlayabilityTest($audio_file)) {
|
||||
return array(
|
||||
"code" => 110,
|
||||
"message" => _("This file appears to be corrupted and will not "
|
||||
."be added to media library."));
|
||||
"code" => 110,
|
||||
"message" => _("This file appears to be corrupted and will not "
|
||||
."be added to media library."));
|
||||
}
|
||||
|
||||
|
||||
// Did all the checks for real, now trying to copy
|
||||
$audio_stor = Application_Common_OsPath::join($stor, "organize",
|
||||
$fileName);
|
||||
$originalFilename);
|
||||
$user = Application_Model_User::getCurrentUser();
|
||||
if (is_null($user)) {
|
||||
$uid = Application_Model_User::getFirstAdminId();
|
||||
|
@ -1044,7 +1090,7 @@ SQL;
|
|||
written)");
|
||||
} else {
|
||||
Logging::info("Successfully written identification file for
|
||||
uploaded '$audio_stor'");
|
||||
uploaded '$audio_stor'");
|
||||
}
|
||||
//if the uploaded file is not UTF-8 encoded, let's encode it. Assuming source
|
||||
//encoding is ISO-8859-1
|
||||
|
@ -1059,18 +1105,14 @@ SQL;
|
|||
//is enough disk space .
|
||||
unlink($audio_file); //remove the file after failed rename
|
||||
unlink($id_file); // Also remove the identifier file
|
||||
|
||||
return array(
|
||||
"code" => 108,
|
||||
"message" => _("The file was not uploaded, this error can occur if the computer "
|
||||
."hard drive does not have enough disk space or the stor "
|
||||
."directory does not have correct write permissions."));
|
||||
|
||||
throw new Exception("The file was not uploaded, this error can occur if the computer "
|
||||
."hard drive does not have enough disk space or the stor "
|
||||
."directory does not have correct write permissions.");
|
||||
}
|
||||
// Now that we successfully added this file, we will add another tag
|
||||
// file that will identify the user that owns it
|
||||
return null;
|
||||
return $audio_stor;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Pass the file through Liquidsoap and test if it is readable. Return True if readable, and False otherwise.
|
||||
*/
|
||||
|
|
|
@ -44,7 +44,7 @@ class CcFilesTableMap extends TableMap {
|
|||
$this->addColumn('FTYPE', 'DbFtype', 'VARCHAR', true, 128, '');
|
||||
$this->addForeignKey('DIRECTORY', 'DbDirectory', 'INTEGER', 'cc_music_dirs', 'ID', false, null, null);
|
||||
$this->addColumn('FILEPATH', 'DbFilepath', 'LONGVARCHAR', false, null, '');
|
||||
$this->addColumn('STATE', 'DbState', 'VARCHAR', true, 128, 'empty');
|
||||
$this->addColumn('IMPORT_STATUS', 'DbImportStatus', 'INTEGER', true, null, 0);
|
||||
$this->addColumn('CURRENTLYACCESSING', 'DbCurrentlyaccessing', 'INTEGER', true, null, 0);
|
||||
$this->addForeignKey('EDITEDBY', 'DbEditedby', 'INTEGER', 'cc_subjs', 'ID', false, null, null);
|
||||
$this->addColumn('MTIME', 'DbMtime', 'TIMESTAMP', false, 6, null);
|
||||
|
|
|
@ -65,11 +65,11 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
protected $filepath;
|
||||
|
||||
/**
|
||||
* The value for the state field.
|
||||
* Note: this column has a database default value of: 'empty'
|
||||
* @var string
|
||||
* The value for the import_status field.
|
||||
* Note: this column has a database default value of: 0
|
||||
* @var int
|
||||
*/
|
||||
protected $state;
|
||||
protected $import_status;
|
||||
|
||||
/**
|
||||
* The value for the currentlyaccessing field.
|
||||
|
@ -524,7 +524,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
$this->mime = '';
|
||||
$this->ftype = '';
|
||||
$this->filepath = '';
|
||||
$this->state = 'empty';
|
||||
$this->import_status = 0;
|
||||
$this->currentlyaccessing = 0;
|
||||
$this->length = '00:00:00';
|
||||
$this->file_exists = true;
|
||||
|
@ -607,13 +607,13 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
}
|
||||
|
||||
/**
|
||||
* Get the [state] column value.
|
||||
* Get the [import_status] column value.
|
||||
*
|
||||
* @return string
|
||||
* @return int
|
||||
*/
|
||||
public function getDbState()
|
||||
public function getDbImportStatus()
|
||||
{
|
||||
return $this->state;
|
||||
return $this->import_status;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1463,24 +1463,24 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
} // setDbFilepath()
|
||||
|
||||
/**
|
||||
* Set the value of [state] column.
|
||||
* Set the value of [import_status] column.
|
||||
*
|
||||
* @param string $v new value
|
||||
* @param int $v new value
|
||||
* @return CcFiles The current object (for fluent API support)
|
||||
*/
|
||||
public function setDbState($v)
|
||||
public function setDbImportStatus($v)
|
||||
{
|
||||
if ($v !== null) {
|
||||
$v = (string) $v;
|
||||
$v = (int) $v;
|
||||
}
|
||||
|
||||
if ($this->state !== $v || $this->isNew()) {
|
||||
$this->state = $v;
|
||||
$this->modifiedColumns[] = CcFilesPeer::STATE;
|
||||
if ($this->import_status !== $v || $this->isNew()) {
|
||||
$this->import_status = $v;
|
||||
$this->modifiedColumns[] = CcFilesPeer::IMPORT_STATUS;
|
||||
}
|
||||
|
||||
return $this;
|
||||
} // setDbState()
|
||||
} // setDbImportStatus()
|
||||
|
||||
/**
|
||||
* Set the value of [currentlyaccessing] column.
|
||||
|
@ -2892,7 +2892,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
return false;
|
||||
}
|
||||
|
||||
if ($this->state !== 'empty') {
|
||||
if ($this->import_status !== 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -2960,7 +2960,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
$this->ftype = ($row[$startcol + 3] !== null) ? (string) $row[$startcol + 3] : null;
|
||||
$this->directory = ($row[$startcol + 4] !== null) ? (int) $row[$startcol + 4] : null;
|
||||
$this->filepath = ($row[$startcol + 5] !== null) ? (string) $row[$startcol + 5] : null;
|
||||
$this->state = ($row[$startcol + 6] !== null) ? (string) $row[$startcol + 6] : null;
|
||||
$this->import_status = ($row[$startcol + 6] !== null) ? (int) $row[$startcol + 6] : null;
|
||||
$this->currentlyaccessing = ($row[$startcol + 7] !== null) ? (int) $row[$startcol + 7] : null;
|
||||
$this->editedby = ($row[$startcol + 8] !== null) ? (int) $row[$startcol + 8] : null;
|
||||
$this->mtime = ($row[$startcol + 9] !== null) ? (string) $row[$startcol + 9] : null;
|
||||
|
@ -3502,7 +3502,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
return $this->getDbFilepath();
|
||||
break;
|
||||
case 6:
|
||||
return $this->getDbState();
|
||||
return $this->getDbImportStatus();
|
||||
break;
|
||||
case 7:
|
||||
return $this->getDbCurrentlyaccessing();
|
||||
|
@ -3723,7 +3723,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
$keys[3] => $this->getDbFtype(),
|
||||
$keys[4] => $this->getDbDirectory(),
|
||||
$keys[5] => $this->getDbFilepath(),
|
||||
$keys[6] => $this->getDbState(),
|
||||
$keys[6] => $this->getDbImportStatus(),
|
||||
$keys[7] => $this->getDbCurrentlyaccessing(),
|
||||
$keys[8] => $this->getDbEditedby(),
|
||||
$keys[9] => $this->getDbMtime(),
|
||||
|
@ -3848,7 +3848,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
$this->setDbFilepath($value);
|
||||
break;
|
||||
case 6:
|
||||
$this->setDbState($value);
|
||||
$this->setDbImportStatus($value);
|
||||
break;
|
||||
case 7:
|
||||
$this->setDbCurrentlyaccessing($value);
|
||||
|
@ -4069,7 +4069,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
if (array_key_exists($keys[3], $arr)) $this->setDbFtype($arr[$keys[3]]);
|
||||
if (array_key_exists($keys[4], $arr)) $this->setDbDirectory($arr[$keys[4]]);
|
||||
if (array_key_exists($keys[5], $arr)) $this->setDbFilepath($arr[$keys[5]]);
|
||||
if (array_key_exists($keys[6], $arr)) $this->setDbState($arr[$keys[6]]);
|
||||
if (array_key_exists($keys[6], $arr)) $this->setDbImportStatus($arr[$keys[6]]);
|
||||
if (array_key_exists($keys[7], $arr)) $this->setDbCurrentlyaccessing($arr[$keys[7]]);
|
||||
if (array_key_exists($keys[8], $arr)) $this->setDbEditedby($arr[$keys[8]]);
|
||||
if (array_key_exists($keys[9], $arr)) $this->setDbMtime($arr[$keys[9]]);
|
||||
|
@ -4150,7 +4150,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
if ($this->isColumnModified(CcFilesPeer::FTYPE)) $criteria->add(CcFilesPeer::FTYPE, $this->ftype);
|
||||
if ($this->isColumnModified(CcFilesPeer::DIRECTORY)) $criteria->add(CcFilesPeer::DIRECTORY, $this->directory);
|
||||
if ($this->isColumnModified(CcFilesPeer::FILEPATH)) $criteria->add(CcFilesPeer::FILEPATH, $this->filepath);
|
||||
if ($this->isColumnModified(CcFilesPeer::STATE)) $criteria->add(CcFilesPeer::STATE, $this->state);
|
||||
if ($this->isColumnModified(CcFilesPeer::IMPORT_STATUS)) $criteria->add(CcFilesPeer::IMPORT_STATUS, $this->import_status);
|
||||
if ($this->isColumnModified(CcFilesPeer::CURRENTLYACCESSING)) $criteria->add(CcFilesPeer::CURRENTLYACCESSING, $this->currentlyaccessing);
|
||||
if ($this->isColumnModified(CcFilesPeer::EDITEDBY)) $criteria->add(CcFilesPeer::EDITEDBY, $this->editedby);
|
||||
if ($this->isColumnModified(CcFilesPeer::MTIME)) $criteria->add(CcFilesPeer::MTIME, $this->mtime);
|
||||
|
@ -4280,7 +4280,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
$copyObj->setDbFtype($this->ftype);
|
||||
$copyObj->setDbDirectory($this->directory);
|
||||
$copyObj->setDbFilepath($this->filepath);
|
||||
$copyObj->setDbState($this->state);
|
||||
$copyObj->setDbImportStatus($this->import_status);
|
||||
$copyObj->setDbCurrentlyaccessing($this->currentlyaccessing);
|
||||
$copyObj->setDbEditedby($this->editedby);
|
||||
$copyObj->setDbMtime($this->mtime);
|
||||
|
@ -5328,7 +5328,7 @@ abstract class BaseCcFiles extends BaseObject implements Persistent
|
|||
$this->ftype = null;
|
||||
$this->directory = null;
|
||||
$this->filepath = null;
|
||||
$this->state = null;
|
||||
$this->import_status = null;
|
||||
$this->currentlyaccessing = null;
|
||||
$this->editedby = null;
|
||||
$this->mtime = null;
|
||||
|
|
|
@ -49,8 +49,8 @@ abstract class BaseCcFilesPeer {
|
|||
/** the column name for the FILEPATH field */
|
||||
const FILEPATH = 'cc_files.FILEPATH';
|
||||
|
||||
/** the column name for the STATE field */
|
||||
const STATE = 'cc_files.STATE';
|
||||
/** the column name for the IMPORT_STATUS field */
|
||||
const IMPORT_STATUS = 'cc_files.IMPORT_STATUS';
|
||||
|
||||
/** the column name for the CURRENTLYACCESSING field */
|
||||
const CURRENTLYACCESSING = 'cc_files.CURRENTLYACCESSING';
|
||||
|
@ -257,11 +257,11 @@ abstract class BaseCcFilesPeer {
|
|||
* e.g. self::$fieldNames[self::TYPE_PHPNAME][0] = 'Id'
|
||||
*/
|
||||
private static $fieldNames = array (
|
||||
BasePeer::TYPE_PHPNAME => array ('DbId', 'DbName', 'DbMime', 'DbFtype', 'DbDirectory', 'DbFilepath', 'DbState', 'DbCurrentlyaccessing', 'DbEditedby', 'DbMtime', 'DbUtime', 'DbLPtime', 'DbMd5', 'DbTrackTitle', 'DbArtistName', 'DbBitRate', 'DbSampleRate', 'DbFormat', 'DbLength', 'DbAlbumTitle', 'DbGenre', 'DbComments', 'DbYear', 'DbTrackNumber', 'DbChannels', 'DbUrl', 'DbBpm', 'DbRating', 'DbEncodedBy', 'DbDiscNumber', 'DbMood', 'DbLabel', 'DbComposer', 'DbEncoder', 'DbChecksum', 'DbLyrics', 'DbOrchestra', 'DbConductor', 'DbLyricist', 'DbOriginalLyricist', 'DbRadioStationName', 'DbInfoUrl', 'DbArtistUrl', 'DbAudioSourceUrl', 'DbRadioStationUrl', 'DbBuyThisUrl', 'DbIsrcNumber', 'DbCatalogNumber', 'DbOriginalArtist', 'DbCopyright', 'DbReportDatetime', 'DbReportLocation', 'DbReportOrganization', 'DbSubject', 'DbContributor', 'DbLanguage', 'DbFileExists', 'DbSoundcloudId', 'DbSoundcloudErrorCode', 'DbSoundcloudErrorMsg', 'DbSoundcloudLinkToFile', 'DbSoundCloundUploadTime', 'DbReplayGain', 'DbOwnerId', 'DbCuein', 'DbCueout', 'DbSilanCheck', 'DbHidden', 'DbIsScheduled', 'DbIsPlaylist', ),
|
||||
BasePeer::TYPE_STUDLYPHPNAME => array ('dbId', 'dbName', 'dbMime', 'dbFtype', 'dbDirectory', 'dbFilepath', 'dbState', 'dbCurrentlyaccessing', 'dbEditedby', 'dbMtime', 'dbUtime', 'dbLPtime', 'dbMd5', 'dbTrackTitle', 'dbArtistName', 'dbBitRate', 'dbSampleRate', 'dbFormat', 'dbLength', 'dbAlbumTitle', 'dbGenre', 'dbComments', 'dbYear', 'dbTrackNumber', 'dbChannels', 'dbUrl', 'dbBpm', 'dbRating', 'dbEncodedBy', 'dbDiscNumber', 'dbMood', 'dbLabel', 'dbComposer', 'dbEncoder', 'dbChecksum', 'dbLyrics', 'dbOrchestra', 'dbConductor', 'dbLyricist', 'dbOriginalLyricist', 'dbRadioStationName', 'dbInfoUrl', 'dbArtistUrl', 'dbAudioSourceUrl', 'dbRadioStationUrl', 'dbBuyThisUrl', 'dbIsrcNumber', 'dbCatalogNumber', 'dbOriginalArtist', 'dbCopyright', 'dbReportDatetime', 'dbReportLocation', 'dbReportOrganization', 'dbSubject', 'dbContributor', 'dbLanguage', 'dbFileExists', 'dbSoundcloudId', 'dbSoundcloudErrorCode', 'dbSoundcloudErrorMsg', 'dbSoundcloudLinkToFile', 'dbSoundCloundUploadTime', 'dbReplayGain', 'dbOwnerId', 'dbCuein', 'dbCueout', 'dbSilanCheck', 'dbHidden', 'dbIsScheduled', 'dbIsPlaylist', ),
|
||||
BasePeer::TYPE_COLNAME => array (self::ID, self::NAME, self::MIME, self::FTYPE, self::DIRECTORY, self::FILEPATH, self::STATE, self::CURRENTLYACCESSING, self::EDITEDBY, self::MTIME, self::UTIME, self::LPTIME, self::MD5, self::TRACK_TITLE, self::ARTIST_NAME, self::BIT_RATE, self::SAMPLE_RATE, self::FORMAT, self::LENGTH, self::ALBUM_TITLE, self::GENRE, self::COMMENTS, self::YEAR, self::TRACK_NUMBER, self::CHANNELS, self::URL, self::BPM, self::RATING, self::ENCODED_BY, self::DISC_NUMBER, self::MOOD, self::LABEL, self::COMPOSER, self::ENCODER, self::CHECKSUM, self::LYRICS, self::ORCHESTRA, self::CONDUCTOR, self::LYRICIST, self::ORIGINAL_LYRICIST, self::RADIO_STATION_NAME, self::INFO_URL, self::ARTIST_URL, self::AUDIO_SOURCE_URL, self::RADIO_STATION_URL, self::BUY_THIS_URL, self::ISRC_NUMBER, self::CATALOG_NUMBER, self::ORIGINAL_ARTIST, self::COPYRIGHT, self::REPORT_DATETIME, self::REPORT_LOCATION, self::REPORT_ORGANIZATION, self::SUBJECT, self::CONTRIBUTOR, self::LANGUAGE, self::FILE_EXISTS, self::SOUNDCLOUD_ID, self::SOUNDCLOUD_ERROR_CODE, self::SOUNDCLOUD_ERROR_MSG, self::SOUNDCLOUD_LINK_TO_FILE, self::SOUNDCLOUD_UPLOAD_TIME, self::REPLAY_GAIN, self::OWNER_ID, self::CUEIN, self::CUEOUT, self::SILAN_CHECK, self::HIDDEN, self::IS_SCHEDULED, self::IS_PLAYLIST, ),
|
||||
BasePeer::TYPE_RAW_COLNAME => array ('ID', 'NAME', 'MIME', 'FTYPE', 'DIRECTORY', 'FILEPATH', 'STATE', 'CURRENTLYACCESSING', 'EDITEDBY', 'MTIME', 'UTIME', 'LPTIME', 'MD5', 'TRACK_TITLE', 'ARTIST_NAME', 'BIT_RATE', 'SAMPLE_RATE', 'FORMAT', 'LENGTH', 'ALBUM_TITLE', 'GENRE', 'COMMENTS', 'YEAR', 'TRACK_NUMBER', 'CHANNELS', 'URL', 'BPM', 'RATING', 'ENCODED_BY', 'DISC_NUMBER', 'MOOD', 'LABEL', 'COMPOSER', 'ENCODER', 'CHECKSUM', 'LYRICS', 'ORCHESTRA', 'CONDUCTOR', 'LYRICIST', 'ORIGINAL_LYRICIST', 'RADIO_STATION_NAME', 'INFO_URL', 'ARTIST_URL', 'AUDIO_SOURCE_URL', 'RADIO_STATION_URL', 'BUY_THIS_URL', 'ISRC_NUMBER', 'CATALOG_NUMBER', 'ORIGINAL_ARTIST', 'COPYRIGHT', 'REPORT_DATETIME', 'REPORT_LOCATION', 'REPORT_ORGANIZATION', 'SUBJECT', 'CONTRIBUTOR', 'LANGUAGE', 'FILE_EXISTS', 'SOUNDCLOUD_ID', 'SOUNDCLOUD_ERROR_CODE', 'SOUNDCLOUD_ERROR_MSG', 'SOUNDCLOUD_LINK_TO_FILE', 'SOUNDCLOUD_UPLOAD_TIME', 'REPLAY_GAIN', 'OWNER_ID', 'CUEIN', 'CUEOUT', 'SILAN_CHECK', 'HIDDEN', 'IS_SCHEDULED', 'IS_PLAYLIST', ),
|
||||
BasePeer::TYPE_FIELDNAME => array ('id', 'name', 'mime', 'ftype', 'directory', 'filepath', 'state', 'currentlyaccessing', 'editedby', 'mtime', 'utime', 'lptime', 'md5', 'track_title', 'artist_name', 'bit_rate', 'sample_rate', 'format', 'length', 'album_title', 'genre', 'comments', 'year', 'track_number', 'channels', 'url', 'bpm', 'rating', 'encoded_by', 'disc_number', 'mood', 'label', 'composer', 'encoder', 'checksum', 'lyrics', 'orchestra', 'conductor', 'lyricist', 'original_lyricist', 'radio_station_name', 'info_url', 'artist_url', 'audio_source_url', 'radio_station_url', 'buy_this_url', 'isrc_number', 'catalog_number', 'original_artist', 'copyright', 'report_datetime', 'report_location', 'report_organization', 'subject', 'contributor', 'language', 'file_exists', 'soundcloud_id', 'soundcloud_error_code', 'soundcloud_error_msg', 'soundcloud_link_to_file', 'soundcloud_upload_time', 'replay_gain', 'owner_id', 'cuein', 'cueout', 'silan_check', 'hidden', 'is_scheduled', 'is_playlist', ),
|
||||
BasePeer::TYPE_PHPNAME => array ('DbId', 'DbName', 'DbMime', 'DbFtype', 'DbDirectory', 'DbFilepath', 'DbImportStatus', 'DbCurrentlyaccessing', 'DbEditedby', 'DbMtime', 'DbUtime', 'DbLPtime', 'DbMd5', 'DbTrackTitle', 'DbArtistName', 'DbBitRate', 'DbSampleRate', 'DbFormat', 'DbLength', 'DbAlbumTitle', 'DbGenre', 'DbComments', 'DbYear', 'DbTrackNumber', 'DbChannels', 'DbUrl', 'DbBpm', 'DbRating', 'DbEncodedBy', 'DbDiscNumber', 'DbMood', 'DbLabel', 'DbComposer', 'DbEncoder', 'DbChecksum', 'DbLyrics', 'DbOrchestra', 'DbConductor', 'DbLyricist', 'DbOriginalLyricist', 'DbRadioStationName', 'DbInfoUrl', 'DbArtistUrl', 'DbAudioSourceUrl', 'DbRadioStationUrl', 'DbBuyThisUrl', 'DbIsrcNumber', 'DbCatalogNumber', 'DbOriginalArtist', 'DbCopyright', 'DbReportDatetime', 'DbReportLocation', 'DbReportOrganization', 'DbSubject', 'DbContributor', 'DbLanguage', 'DbFileExists', 'DbSoundcloudId', 'DbSoundcloudErrorCode', 'DbSoundcloudErrorMsg', 'DbSoundcloudLinkToFile', 'DbSoundCloundUploadTime', 'DbReplayGain', 'DbOwnerId', 'DbCuein', 'DbCueout', 'DbSilanCheck', 'DbHidden', 'DbIsScheduled', 'DbIsPlaylist', ),
|
||||
BasePeer::TYPE_STUDLYPHPNAME => array ('dbId', 'dbName', 'dbMime', 'dbFtype', 'dbDirectory', 'dbFilepath', 'dbImportStatus', 'dbCurrentlyaccessing', 'dbEditedby', 'dbMtime', 'dbUtime', 'dbLPtime', 'dbMd5', 'dbTrackTitle', 'dbArtistName', 'dbBitRate', 'dbSampleRate', 'dbFormat', 'dbLength', 'dbAlbumTitle', 'dbGenre', 'dbComments', 'dbYear', 'dbTrackNumber', 'dbChannels', 'dbUrl', 'dbBpm', 'dbRating', 'dbEncodedBy', 'dbDiscNumber', 'dbMood', 'dbLabel', 'dbComposer', 'dbEncoder', 'dbChecksum', 'dbLyrics', 'dbOrchestra', 'dbConductor', 'dbLyricist', 'dbOriginalLyricist', 'dbRadioStationName', 'dbInfoUrl', 'dbArtistUrl', 'dbAudioSourceUrl', 'dbRadioStationUrl', 'dbBuyThisUrl', 'dbIsrcNumber', 'dbCatalogNumber', 'dbOriginalArtist', 'dbCopyright', 'dbReportDatetime', 'dbReportLocation', 'dbReportOrganization', 'dbSubject', 'dbContributor', 'dbLanguage', 'dbFileExists', 'dbSoundcloudId', 'dbSoundcloudErrorCode', 'dbSoundcloudErrorMsg', 'dbSoundcloudLinkToFile', 'dbSoundCloundUploadTime', 'dbReplayGain', 'dbOwnerId', 'dbCuein', 'dbCueout', 'dbSilanCheck', 'dbHidden', 'dbIsScheduled', 'dbIsPlaylist', ),
|
||||
BasePeer::TYPE_COLNAME => array (self::ID, self::NAME, self::MIME, self::FTYPE, self::DIRECTORY, self::FILEPATH, self::IMPORT_STATUS, self::CURRENTLYACCESSING, self::EDITEDBY, self::MTIME, self::UTIME, self::LPTIME, self::MD5, self::TRACK_TITLE, self::ARTIST_NAME, self::BIT_RATE, self::SAMPLE_RATE, self::FORMAT, self::LENGTH, self::ALBUM_TITLE, self::GENRE, self::COMMENTS, self::YEAR, self::TRACK_NUMBER, self::CHANNELS, self::URL, self::BPM, self::RATING, self::ENCODED_BY, self::DISC_NUMBER, self::MOOD, self::LABEL, self::COMPOSER, self::ENCODER, self::CHECKSUM, self::LYRICS, self::ORCHESTRA, self::CONDUCTOR, self::LYRICIST, self::ORIGINAL_LYRICIST, self::RADIO_STATION_NAME, self::INFO_URL, self::ARTIST_URL, self::AUDIO_SOURCE_URL, self::RADIO_STATION_URL, self::BUY_THIS_URL, self::ISRC_NUMBER, self::CATALOG_NUMBER, self::ORIGINAL_ARTIST, self::COPYRIGHT, self::REPORT_DATETIME, self::REPORT_LOCATION, self::REPORT_ORGANIZATION, self::SUBJECT, self::CONTRIBUTOR, self::LANGUAGE, self::FILE_EXISTS, self::SOUNDCLOUD_ID, self::SOUNDCLOUD_ERROR_CODE, self::SOUNDCLOUD_ERROR_MSG, self::SOUNDCLOUD_LINK_TO_FILE, self::SOUNDCLOUD_UPLOAD_TIME, self::REPLAY_GAIN, self::OWNER_ID, self::CUEIN, self::CUEOUT, self::SILAN_CHECK, self::HIDDEN, self::IS_SCHEDULED, self::IS_PLAYLIST, ),
|
||||
BasePeer::TYPE_RAW_COLNAME => array ('ID', 'NAME', 'MIME', 'FTYPE', 'DIRECTORY', 'FILEPATH', 'IMPORT_STATUS', 'CURRENTLYACCESSING', 'EDITEDBY', 'MTIME', 'UTIME', 'LPTIME', 'MD5', 'TRACK_TITLE', 'ARTIST_NAME', 'BIT_RATE', 'SAMPLE_RATE', 'FORMAT', 'LENGTH', 'ALBUM_TITLE', 'GENRE', 'COMMENTS', 'YEAR', 'TRACK_NUMBER', 'CHANNELS', 'URL', 'BPM', 'RATING', 'ENCODED_BY', 'DISC_NUMBER', 'MOOD', 'LABEL', 'COMPOSER', 'ENCODER', 'CHECKSUM', 'LYRICS', 'ORCHESTRA', 'CONDUCTOR', 'LYRICIST', 'ORIGINAL_LYRICIST', 'RADIO_STATION_NAME', 'INFO_URL', 'ARTIST_URL', 'AUDIO_SOURCE_URL', 'RADIO_STATION_URL', 'BUY_THIS_URL', 'ISRC_NUMBER', 'CATALOG_NUMBER', 'ORIGINAL_ARTIST', 'COPYRIGHT', 'REPORT_DATETIME', 'REPORT_LOCATION', 'REPORT_ORGANIZATION', 'SUBJECT', 'CONTRIBUTOR', 'LANGUAGE', 'FILE_EXISTS', 'SOUNDCLOUD_ID', 'SOUNDCLOUD_ERROR_CODE', 'SOUNDCLOUD_ERROR_MSG', 'SOUNDCLOUD_LINK_TO_FILE', 'SOUNDCLOUD_UPLOAD_TIME', 'REPLAY_GAIN', 'OWNER_ID', 'CUEIN', 'CUEOUT', 'SILAN_CHECK', 'HIDDEN', 'IS_SCHEDULED', 'IS_PLAYLIST', ),
|
||||
BasePeer::TYPE_FIELDNAME => array ('id', 'name', 'mime', 'ftype', 'directory', 'filepath', 'import_status', 'currentlyaccessing', 'editedby', 'mtime', 'utime', 'lptime', 'md5', 'track_title', 'artist_name', 'bit_rate', 'sample_rate', 'format', 'length', 'album_title', 'genre', 'comments', 'year', 'track_number', 'channels', 'url', 'bpm', 'rating', 'encoded_by', 'disc_number', 'mood', 'label', 'composer', 'encoder', 'checksum', 'lyrics', 'orchestra', 'conductor', 'lyricist', 'original_lyricist', 'radio_station_name', 'info_url', 'artist_url', 'audio_source_url', 'radio_station_url', 'buy_this_url', 'isrc_number', 'catalog_number', 'original_artist', 'copyright', 'report_datetime', 'report_location', 'report_organization', 'subject', 'contributor', 'language', 'file_exists', 'soundcloud_id', 'soundcloud_error_code', 'soundcloud_error_msg', 'soundcloud_link_to_file', 'soundcloud_upload_time', 'replay_gain', 'owner_id', 'cuein', 'cueout', 'silan_check', 'hidden', 'is_scheduled', 'is_playlist', ),
|
||||
BasePeer::TYPE_NUM => array (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, )
|
||||
);
|
||||
|
||||
|
@ -272,11 +272,11 @@ abstract class BaseCcFilesPeer {
|
|||
* e.g. self::$fieldNames[BasePeer::TYPE_PHPNAME]['Id'] = 0
|
||||
*/
|
||||
private static $fieldKeys = array (
|
||||
BasePeer::TYPE_PHPNAME => array ('DbId' => 0, 'DbName' => 1, 'DbMime' => 2, 'DbFtype' => 3, 'DbDirectory' => 4, 'DbFilepath' => 5, 'DbState' => 6, 'DbCurrentlyaccessing' => 7, 'DbEditedby' => 8, 'DbMtime' => 9, 'DbUtime' => 10, 'DbLPtime' => 11, 'DbMd5' => 12, 'DbTrackTitle' => 13, 'DbArtistName' => 14, 'DbBitRate' => 15, 'DbSampleRate' => 16, 'DbFormat' => 17, 'DbLength' => 18, 'DbAlbumTitle' => 19, 'DbGenre' => 20, 'DbComments' => 21, 'DbYear' => 22, 'DbTrackNumber' => 23, 'DbChannels' => 24, 'DbUrl' => 25, 'DbBpm' => 26, 'DbRating' => 27, 'DbEncodedBy' => 28, 'DbDiscNumber' => 29, 'DbMood' => 30, 'DbLabel' => 31, 'DbComposer' => 32, 'DbEncoder' => 33, 'DbChecksum' => 34, 'DbLyrics' => 35, 'DbOrchestra' => 36, 'DbConductor' => 37, 'DbLyricist' => 38, 'DbOriginalLyricist' => 39, 'DbRadioStationName' => 40, 'DbInfoUrl' => 41, 'DbArtistUrl' => 42, 'DbAudioSourceUrl' => 43, 'DbRadioStationUrl' => 44, 'DbBuyThisUrl' => 45, 'DbIsrcNumber' => 46, 'DbCatalogNumber' => 47, 'DbOriginalArtist' => 48, 'DbCopyright' => 49, 'DbReportDatetime' => 50, 'DbReportLocation' => 51, 'DbReportOrganization' => 52, 'DbSubject' => 53, 'DbContributor' => 54, 'DbLanguage' => 55, 'DbFileExists' => 56, 'DbSoundcloudId' => 57, 'DbSoundcloudErrorCode' => 58, 'DbSoundcloudErrorMsg' => 59, 'DbSoundcloudLinkToFile' => 60, 'DbSoundCloundUploadTime' => 61, 'DbReplayGain' => 62, 'DbOwnerId' => 63, 'DbCuein' => 64, 'DbCueout' => 65, 'DbSilanCheck' => 66, 'DbHidden' => 67, 'DbIsScheduled' => 68, 'DbIsPlaylist' => 69, ),
|
||||
BasePeer::TYPE_STUDLYPHPNAME => array ('dbId' => 0, 'dbName' => 1, 'dbMime' => 2, 'dbFtype' => 3, 'dbDirectory' => 4, 'dbFilepath' => 5, 'dbState' => 6, 'dbCurrentlyaccessing' => 7, 'dbEditedby' => 8, 'dbMtime' => 9, 'dbUtime' => 10, 'dbLPtime' => 11, 'dbMd5' => 12, 'dbTrackTitle' => 13, 'dbArtistName' => 14, 'dbBitRate' => 15, 'dbSampleRate' => 16, 'dbFormat' => 17, 'dbLength' => 18, 'dbAlbumTitle' => 19, 'dbGenre' => 20, 'dbComments' => 21, 'dbYear' => 22, 'dbTrackNumber' => 23, 'dbChannels' => 24, 'dbUrl' => 25, 'dbBpm' => 26, 'dbRating' => 27, 'dbEncodedBy' => 28, 'dbDiscNumber' => 29, 'dbMood' => 30, 'dbLabel' => 31, 'dbComposer' => 32, 'dbEncoder' => 33, 'dbChecksum' => 34, 'dbLyrics' => 35, 'dbOrchestra' => 36, 'dbConductor' => 37, 'dbLyricist' => 38, 'dbOriginalLyricist' => 39, 'dbRadioStationName' => 40, 'dbInfoUrl' => 41, 'dbArtistUrl' => 42, 'dbAudioSourceUrl' => 43, 'dbRadioStationUrl' => 44, 'dbBuyThisUrl' => 45, 'dbIsrcNumber' => 46, 'dbCatalogNumber' => 47, 'dbOriginalArtist' => 48, 'dbCopyright' => 49, 'dbReportDatetime' => 50, 'dbReportLocation' => 51, 'dbReportOrganization' => 52, 'dbSubject' => 53, 'dbContributor' => 54, 'dbLanguage' => 55, 'dbFileExists' => 56, 'dbSoundcloudId' => 57, 'dbSoundcloudErrorCode' => 58, 'dbSoundcloudErrorMsg' => 59, 'dbSoundcloudLinkToFile' => 60, 'dbSoundCloundUploadTime' => 61, 'dbReplayGain' => 62, 'dbOwnerId' => 63, 'dbCuein' => 64, 'dbCueout' => 65, 'dbSilanCheck' => 66, 'dbHidden' => 67, 'dbIsScheduled' => 68, 'dbIsPlaylist' => 69, ),
|
||||
BasePeer::TYPE_COLNAME => array (self::ID => 0, self::NAME => 1, self::MIME => 2, self::FTYPE => 3, self::DIRECTORY => 4, self::FILEPATH => 5, self::STATE => 6, self::CURRENTLYACCESSING => 7, self::EDITEDBY => 8, self::MTIME => 9, self::UTIME => 10, self::LPTIME => 11, self::MD5 => 12, self::TRACK_TITLE => 13, self::ARTIST_NAME => 14, self::BIT_RATE => 15, self::SAMPLE_RATE => 16, self::FORMAT => 17, self::LENGTH => 18, self::ALBUM_TITLE => 19, self::GENRE => 20, self::COMMENTS => 21, self::YEAR => 22, self::TRACK_NUMBER => 23, self::CHANNELS => 24, self::URL => 25, self::BPM => 26, self::RATING => 27, self::ENCODED_BY => 28, self::DISC_NUMBER => 29, self::MOOD => 30, self::LABEL => 31, self::COMPOSER => 32, self::ENCODER => 33, self::CHECKSUM => 34, self::LYRICS => 35, self::ORCHESTRA => 36, self::CONDUCTOR => 37, self::LYRICIST => 38, self::ORIGINAL_LYRICIST => 39, self::RADIO_STATION_NAME => 40, self::INFO_URL => 41, self::ARTIST_URL => 42, self::AUDIO_SOURCE_URL => 43, self::RADIO_STATION_URL => 44, self::BUY_THIS_URL => 45, self::ISRC_NUMBER => 46, self::CATALOG_NUMBER => 47, self::ORIGINAL_ARTIST => 48, self::COPYRIGHT => 49, self::REPORT_DATETIME => 50, self::REPORT_LOCATION => 51, self::REPORT_ORGANIZATION => 52, self::SUBJECT => 53, self::CONTRIBUTOR => 54, self::LANGUAGE => 55, self::FILE_EXISTS => 56, self::SOUNDCLOUD_ID => 57, self::SOUNDCLOUD_ERROR_CODE => 58, self::SOUNDCLOUD_ERROR_MSG => 59, self::SOUNDCLOUD_LINK_TO_FILE => 60, self::SOUNDCLOUD_UPLOAD_TIME => 61, self::REPLAY_GAIN => 62, self::OWNER_ID => 63, self::CUEIN => 64, self::CUEOUT => 65, self::SILAN_CHECK => 66, self::HIDDEN => 67, self::IS_SCHEDULED => 68, self::IS_PLAYLIST => 69, ),
|
||||
BasePeer::TYPE_RAW_COLNAME => array ('ID' => 0, 'NAME' => 1, 'MIME' => 2, 'FTYPE' => 3, 'DIRECTORY' => 4, 'FILEPATH' => 5, 'STATE' => 6, 'CURRENTLYACCESSING' => 7, 'EDITEDBY' => 8, 'MTIME' => 9, 'UTIME' => 10, 'LPTIME' => 11, 'MD5' => 12, 'TRACK_TITLE' => 13, 'ARTIST_NAME' => 14, 'BIT_RATE' => 15, 'SAMPLE_RATE' => 16, 'FORMAT' => 17, 'LENGTH' => 18, 'ALBUM_TITLE' => 19, 'GENRE' => 20, 'COMMENTS' => 21, 'YEAR' => 22, 'TRACK_NUMBER' => 23, 'CHANNELS' => 24, 'URL' => 25, 'BPM' => 26, 'RATING' => 27, 'ENCODED_BY' => 28, 'DISC_NUMBER' => 29, 'MOOD' => 30, 'LABEL' => 31, 'COMPOSER' => 32, 'ENCODER' => 33, 'CHECKSUM' => 34, 'LYRICS' => 35, 'ORCHESTRA' => 36, 'CONDUCTOR' => 37, 'LYRICIST' => 38, 'ORIGINAL_LYRICIST' => 39, 'RADIO_STATION_NAME' => 40, 'INFO_URL' => 41, 'ARTIST_URL' => 42, 'AUDIO_SOURCE_URL' => 43, 'RADIO_STATION_URL' => 44, 'BUY_THIS_URL' => 45, 'ISRC_NUMBER' => 46, 'CATALOG_NUMBER' => 47, 'ORIGINAL_ARTIST' => 48, 'COPYRIGHT' => 49, 'REPORT_DATETIME' => 50, 'REPORT_LOCATION' => 51, 'REPORT_ORGANIZATION' => 52, 'SUBJECT' => 53, 'CONTRIBUTOR' => 54, 'LANGUAGE' => 55, 'FILE_EXISTS' => 56, 'SOUNDCLOUD_ID' => 57, 'SOUNDCLOUD_ERROR_CODE' => 58, 'SOUNDCLOUD_ERROR_MSG' => 59, 'SOUNDCLOUD_LINK_TO_FILE' => 60, 'SOUNDCLOUD_UPLOAD_TIME' => 61, 'REPLAY_GAIN' => 62, 'OWNER_ID' => 63, 'CUEIN' => 64, 'CUEOUT' => 65, 'SILAN_CHECK' => 66, 'HIDDEN' => 67, 'IS_SCHEDULED' => 68, 'IS_PLAYLIST' => 69, ),
|
||||
BasePeer::TYPE_FIELDNAME => array ('id' => 0, 'name' => 1, 'mime' => 2, 'ftype' => 3, 'directory' => 4, 'filepath' => 5, 'state' => 6, 'currentlyaccessing' => 7, 'editedby' => 8, 'mtime' => 9, 'utime' => 10, 'lptime' => 11, 'md5' => 12, 'track_title' => 13, 'artist_name' => 14, 'bit_rate' => 15, 'sample_rate' => 16, 'format' => 17, 'length' => 18, 'album_title' => 19, 'genre' => 20, 'comments' => 21, 'year' => 22, 'track_number' => 23, 'channels' => 24, 'url' => 25, 'bpm' => 26, 'rating' => 27, 'encoded_by' => 28, 'disc_number' => 29, 'mood' => 30, 'label' => 31, 'composer' => 32, 'encoder' => 33, 'checksum' => 34, 'lyrics' => 35, 'orchestra' => 36, 'conductor' => 37, 'lyricist' => 38, 'original_lyricist' => 39, 'radio_station_name' => 40, 'info_url' => 41, 'artist_url' => 42, 'audio_source_url' => 43, 'radio_station_url' => 44, 'buy_this_url' => 45, 'isrc_number' => 46, 'catalog_number' => 47, 'original_artist' => 48, 'copyright' => 49, 'report_datetime' => 50, 'report_location' => 51, 'report_organization' => 52, 'subject' => 53, 'contributor' => 54, 'language' => 55, 'file_exists' => 56, 'soundcloud_id' => 57, 'soundcloud_error_code' => 58, 'soundcloud_error_msg' => 59, 'soundcloud_link_to_file' => 60, 'soundcloud_upload_time' => 61, 'replay_gain' => 62, 'owner_id' => 63, 'cuein' => 64, 'cueout' => 65, 'silan_check' => 66, 'hidden' => 67, 'is_scheduled' => 68, 'is_playlist' => 69, ),
|
||||
BasePeer::TYPE_PHPNAME => array ('DbId' => 0, 'DbName' => 1, 'DbMime' => 2, 'DbFtype' => 3, 'DbDirectory' => 4, 'DbFilepath' => 5, 'DbImportStatus' => 6, 'DbCurrentlyaccessing' => 7, 'DbEditedby' => 8, 'DbMtime' => 9, 'DbUtime' => 10, 'DbLPtime' => 11, 'DbMd5' => 12, 'DbTrackTitle' => 13, 'DbArtistName' => 14, 'DbBitRate' => 15, 'DbSampleRate' => 16, 'DbFormat' => 17, 'DbLength' => 18, 'DbAlbumTitle' => 19, 'DbGenre' => 20, 'DbComments' => 21, 'DbYear' => 22, 'DbTrackNumber' => 23, 'DbChannels' => 24, 'DbUrl' => 25, 'DbBpm' => 26, 'DbRating' => 27, 'DbEncodedBy' => 28, 'DbDiscNumber' => 29, 'DbMood' => 30, 'DbLabel' => 31, 'DbComposer' => 32, 'DbEncoder' => 33, 'DbChecksum' => 34, 'DbLyrics' => 35, 'DbOrchestra' => 36, 'DbConductor' => 37, 'DbLyricist' => 38, 'DbOriginalLyricist' => 39, 'DbRadioStationName' => 40, 'DbInfoUrl' => 41, 'DbArtistUrl' => 42, 'DbAudioSourceUrl' => 43, 'DbRadioStationUrl' => 44, 'DbBuyThisUrl' => 45, 'DbIsrcNumber' => 46, 'DbCatalogNumber' => 47, 'DbOriginalArtist' => 48, 'DbCopyright' => 49, 'DbReportDatetime' => 50, 'DbReportLocation' => 51, 'DbReportOrganization' => 52, 'DbSubject' => 53, 'DbContributor' => 54, 'DbLanguage' => 55, 'DbFileExists' => 56, 'DbSoundcloudId' => 57, 'DbSoundcloudErrorCode' => 58, 'DbSoundcloudErrorMsg' => 59, 'DbSoundcloudLinkToFile' => 60, 'DbSoundCloundUploadTime' => 61, 'DbReplayGain' => 62, 'DbOwnerId' => 63, 'DbCuein' => 64, 'DbCueout' => 65, 'DbSilanCheck' => 66, 'DbHidden' => 67, 'DbIsScheduled' => 68, 'DbIsPlaylist' => 69, ),
|
||||
BasePeer::TYPE_STUDLYPHPNAME => array ('dbId' => 0, 'dbName' => 1, 'dbMime' => 2, 'dbFtype' => 3, 'dbDirectory' => 4, 'dbFilepath' => 5, 'dbImportStatus' => 6, 'dbCurrentlyaccessing' => 7, 'dbEditedby' => 8, 'dbMtime' => 9, 'dbUtime' => 10, 'dbLPtime' => 11, 'dbMd5' => 12, 'dbTrackTitle' => 13, 'dbArtistName' => 14, 'dbBitRate' => 15, 'dbSampleRate' => 16, 'dbFormat' => 17, 'dbLength' => 18, 'dbAlbumTitle' => 19, 'dbGenre' => 20, 'dbComments' => 21, 'dbYear' => 22, 'dbTrackNumber' => 23, 'dbChannels' => 24, 'dbUrl' => 25, 'dbBpm' => 26, 'dbRating' => 27, 'dbEncodedBy' => 28, 'dbDiscNumber' => 29, 'dbMood' => 30, 'dbLabel' => 31, 'dbComposer' => 32, 'dbEncoder' => 33, 'dbChecksum' => 34, 'dbLyrics' => 35, 'dbOrchestra' => 36, 'dbConductor' => 37, 'dbLyricist' => 38, 'dbOriginalLyricist' => 39, 'dbRadioStationName' => 40, 'dbInfoUrl' => 41, 'dbArtistUrl' => 42, 'dbAudioSourceUrl' => 43, 'dbRadioStationUrl' => 44, 'dbBuyThisUrl' => 45, 'dbIsrcNumber' => 46, 'dbCatalogNumber' => 47, 'dbOriginalArtist' => 48, 'dbCopyright' => 49, 'dbReportDatetime' => 50, 'dbReportLocation' => 51, 'dbReportOrganization' => 52, 'dbSubject' => 53, 'dbContributor' => 54, 'dbLanguage' => 55, 'dbFileExists' => 56, 'dbSoundcloudId' => 57, 'dbSoundcloudErrorCode' => 58, 'dbSoundcloudErrorMsg' => 59, 'dbSoundcloudLinkToFile' => 60, 'dbSoundCloundUploadTime' => 61, 'dbReplayGain' => 62, 'dbOwnerId' => 63, 'dbCuein' => 64, 'dbCueout' => 65, 'dbSilanCheck' => 66, 'dbHidden' => 67, 'dbIsScheduled' => 68, 'dbIsPlaylist' => 69, ),
|
||||
BasePeer::TYPE_COLNAME => array (self::ID => 0, self::NAME => 1, self::MIME => 2, self::FTYPE => 3, self::DIRECTORY => 4, self::FILEPATH => 5, self::IMPORT_STATUS => 6, self::CURRENTLYACCESSING => 7, self::EDITEDBY => 8, self::MTIME => 9, self::UTIME => 10, self::LPTIME => 11, self::MD5 => 12, self::TRACK_TITLE => 13, self::ARTIST_NAME => 14, self::BIT_RATE => 15, self::SAMPLE_RATE => 16, self::FORMAT => 17, self::LENGTH => 18, self::ALBUM_TITLE => 19, self::GENRE => 20, self::COMMENTS => 21, self::YEAR => 22, self::TRACK_NUMBER => 23, self::CHANNELS => 24, self::URL => 25, self::BPM => 26, self::RATING => 27, self::ENCODED_BY => 28, self::DISC_NUMBER => 29, self::MOOD => 30, self::LABEL => 31, self::COMPOSER => 32, self::ENCODER => 33, self::CHECKSUM => 34, self::LYRICS => 35, self::ORCHESTRA => 36, self::CONDUCTOR => 37, self::LYRICIST => 38, self::ORIGINAL_LYRICIST => 39, self::RADIO_STATION_NAME => 40, self::INFO_URL => 41, self::ARTIST_URL => 42, self::AUDIO_SOURCE_URL => 43, self::RADIO_STATION_URL => 44, self::BUY_THIS_URL => 45, self::ISRC_NUMBER => 46, self::CATALOG_NUMBER => 47, self::ORIGINAL_ARTIST => 48, self::COPYRIGHT => 49, self::REPORT_DATETIME => 50, self::REPORT_LOCATION => 51, self::REPORT_ORGANIZATION => 52, self::SUBJECT => 53, self::CONTRIBUTOR => 54, self::LANGUAGE => 55, self::FILE_EXISTS => 56, self::SOUNDCLOUD_ID => 57, self::SOUNDCLOUD_ERROR_CODE => 58, self::SOUNDCLOUD_ERROR_MSG => 59, self::SOUNDCLOUD_LINK_TO_FILE => 60, self::SOUNDCLOUD_UPLOAD_TIME => 61, self::REPLAY_GAIN => 62, self::OWNER_ID => 63, self::CUEIN => 64, self::CUEOUT => 65, self::SILAN_CHECK => 66, self::HIDDEN => 67, self::IS_SCHEDULED => 68, self::IS_PLAYLIST => 69, ),
|
||||
BasePeer::TYPE_RAW_COLNAME => array ('ID' => 0, 'NAME' => 1, 'MIME' => 2, 'FTYPE' => 3, 'DIRECTORY' => 4, 'FILEPATH' => 5, 'IMPORT_STATUS' => 6, 'CURRENTLYACCESSING' => 7, 'EDITEDBY' => 8, 'MTIME' => 9, 'UTIME' => 10, 'LPTIME' => 11, 'MD5' => 12, 'TRACK_TITLE' => 13, 'ARTIST_NAME' => 14, 'BIT_RATE' => 15, 'SAMPLE_RATE' => 16, 'FORMAT' => 17, 'LENGTH' => 18, 'ALBUM_TITLE' => 19, 'GENRE' => 20, 'COMMENTS' => 21, 'YEAR' => 22, 'TRACK_NUMBER' => 23, 'CHANNELS' => 24, 'URL' => 25, 'BPM' => 26, 'RATING' => 27, 'ENCODED_BY' => 28, 'DISC_NUMBER' => 29, 'MOOD' => 30, 'LABEL' => 31, 'COMPOSER' => 32, 'ENCODER' => 33, 'CHECKSUM' => 34, 'LYRICS' => 35, 'ORCHESTRA' => 36, 'CONDUCTOR' => 37, 'LYRICIST' => 38, 'ORIGINAL_LYRICIST' => 39, 'RADIO_STATION_NAME' => 40, 'INFO_URL' => 41, 'ARTIST_URL' => 42, 'AUDIO_SOURCE_URL' => 43, 'RADIO_STATION_URL' => 44, 'BUY_THIS_URL' => 45, 'ISRC_NUMBER' => 46, 'CATALOG_NUMBER' => 47, 'ORIGINAL_ARTIST' => 48, 'COPYRIGHT' => 49, 'REPORT_DATETIME' => 50, 'REPORT_LOCATION' => 51, 'REPORT_ORGANIZATION' => 52, 'SUBJECT' => 53, 'CONTRIBUTOR' => 54, 'LANGUAGE' => 55, 'FILE_EXISTS' => 56, 'SOUNDCLOUD_ID' => 57, 'SOUNDCLOUD_ERROR_CODE' => 58, 'SOUNDCLOUD_ERROR_MSG' => 59, 'SOUNDCLOUD_LINK_TO_FILE' => 60, 'SOUNDCLOUD_UPLOAD_TIME' => 61, 'REPLAY_GAIN' => 62, 'OWNER_ID' => 63, 'CUEIN' => 64, 'CUEOUT' => 65, 'SILAN_CHECK' => 66, 'HIDDEN' => 67, 'IS_SCHEDULED' => 68, 'IS_PLAYLIST' => 69, ),
|
||||
BasePeer::TYPE_FIELDNAME => array ('id' => 0, 'name' => 1, 'mime' => 2, 'ftype' => 3, 'directory' => 4, 'filepath' => 5, 'import_status' => 6, 'currentlyaccessing' => 7, 'editedby' => 8, 'mtime' => 9, 'utime' => 10, 'lptime' => 11, 'md5' => 12, 'track_title' => 13, 'artist_name' => 14, 'bit_rate' => 15, 'sample_rate' => 16, 'format' => 17, 'length' => 18, 'album_title' => 19, 'genre' => 20, 'comments' => 21, 'year' => 22, 'track_number' => 23, 'channels' => 24, 'url' => 25, 'bpm' => 26, 'rating' => 27, 'encoded_by' => 28, 'disc_number' => 29, 'mood' => 30, 'label' => 31, 'composer' => 32, 'encoder' => 33, 'checksum' => 34, 'lyrics' => 35, 'orchestra' => 36, 'conductor' => 37, 'lyricist' => 38, 'original_lyricist' => 39, 'radio_station_name' => 40, 'info_url' => 41, 'artist_url' => 42, 'audio_source_url' => 43, 'radio_station_url' => 44, 'buy_this_url' => 45, 'isrc_number' => 46, 'catalog_number' => 47, 'original_artist' => 48, 'copyright' => 49, 'report_datetime' => 50, 'report_location' => 51, 'report_organization' => 52, 'subject' => 53, 'contributor' => 54, 'language' => 55, 'file_exists' => 56, 'soundcloud_id' => 57, 'soundcloud_error_code' => 58, 'soundcloud_error_msg' => 59, 'soundcloud_link_to_file' => 60, 'soundcloud_upload_time' => 61, 'replay_gain' => 62, 'owner_id' => 63, 'cuein' => 64, 'cueout' => 65, 'silan_check' => 66, 'hidden' => 67, 'is_scheduled' => 68, 'is_playlist' => 69, ),
|
||||
BasePeer::TYPE_NUM => array (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, )
|
||||
);
|
||||
|
||||
|
@ -355,7 +355,7 @@ abstract class BaseCcFilesPeer {
|
|||
$criteria->addSelectColumn(CcFilesPeer::FTYPE);
|
||||
$criteria->addSelectColumn(CcFilesPeer::DIRECTORY);
|
||||
$criteria->addSelectColumn(CcFilesPeer::FILEPATH);
|
||||
$criteria->addSelectColumn(CcFilesPeer::STATE);
|
||||
$criteria->addSelectColumn(CcFilesPeer::IMPORT_STATUS);
|
||||
$criteria->addSelectColumn(CcFilesPeer::CURRENTLYACCESSING);
|
||||
$criteria->addSelectColumn(CcFilesPeer::EDITEDBY);
|
||||
$criteria->addSelectColumn(CcFilesPeer::MTIME);
|
||||
|
@ -426,7 +426,7 @@ abstract class BaseCcFilesPeer {
|
|||
$criteria->addSelectColumn($alias . '.FTYPE');
|
||||
$criteria->addSelectColumn($alias . '.DIRECTORY');
|
||||
$criteria->addSelectColumn($alias . '.FILEPATH');
|
||||
$criteria->addSelectColumn($alias . '.STATE');
|
||||
$criteria->addSelectColumn($alias . '.IMPORT_STATUS');
|
||||
$criteria->addSelectColumn($alias . '.CURRENTLYACCESSING');
|
||||
$criteria->addSelectColumn($alias . '.EDITEDBY');
|
||||
$criteria->addSelectColumn($alias . '.MTIME');
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
* @method CcFilesQuery orderByDbFtype($order = Criteria::ASC) Order by the ftype column
|
||||
* @method CcFilesQuery orderByDbDirectory($order = Criteria::ASC) Order by the directory column
|
||||
* @method CcFilesQuery orderByDbFilepath($order = Criteria::ASC) Order by the filepath column
|
||||
* @method CcFilesQuery orderByDbState($order = Criteria::ASC) Order by the state column
|
||||
* @method CcFilesQuery orderByDbImportStatus($order = Criteria::ASC) Order by the import_status column
|
||||
* @method CcFilesQuery orderByDbCurrentlyaccessing($order = Criteria::ASC) Order by the currentlyaccessing column
|
||||
* @method CcFilesQuery orderByDbEditedby($order = Criteria::ASC) Order by the editedby column
|
||||
* @method CcFilesQuery orderByDbMtime($order = Criteria::ASC) Order by the mtime column
|
||||
|
@ -83,7 +83,7 @@
|
|||
* @method CcFilesQuery groupByDbFtype() Group by the ftype column
|
||||
* @method CcFilesQuery groupByDbDirectory() Group by the directory column
|
||||
* @method CcFilesQuery groupByDbFilepath() Group by the filepath column
|
||||
* @method CcFilesQuery groupByDbState() Group by the state column
|
||||
* @method CcFilesQuery groupByDbImportStatus() Group by the import_status column
|
||||
* @method CcFilesQuery groupByDbCurrentlyaccessing() Group by the currentlyaccessing column
|
||||
* @method CcFilesQuery groupByDbEditedby() Group by the editedby column
|
||||
* @method CcFilesQuery groupByDbMtime() Group by the mtime column
|
||||
|
@ -193,7 +193,7 @@
|
|||
* @method CcFiles findOneByDbFtype(string $ftype) Return the first CcFiles filtered by the ftype column
|
||||
* @method CcFiles findOneByDbDirectory(int $directory) Return the first CcFiles filtered by the directory column
|
||||
* @method CcFiles findOneByDbFilepath(string $filepath) Return the first CcFiles filtered by the filepath column
|
||||
* @method CcFiles findOneByDbState(string $state) Return the first CcFiles filtered by the state column
|
||||
* @method CcFiles findOneByDbImportStatus(int $import_status) Return the first CcFiles filtered by the import_status column
|
||||
* @method CcFiles findOneByDbCurrentlyaccessing(int $currentlyaccessing) Return the first CcFiles filtered by the currentlyaccessing column
|
||||
* @method CcFiles findOneByDbEditedby(int $editedby) Return the first CcFiles filtered by the editedby column
|
||||
* @method CcFiles findOneByDbMtime(string $mtime) Return the first CcFiles filtered by the mtime column
|
||||
|
@ -264,7 +264,7 @@
|
|||
* @method array findByDbFtype(string $ftype) Return CcFiles objects filtered by the ftype column
|
||||
* @method array findByDbDirectory(int $directory) Return CcFiles objects filtered by the directory column
|
||||
* @method array findByDbFilepath(string $filepath) Return CcFiles objects filtered by the filepath column
|
||||
* @method array findByDbState(string $state) Return CcFiles objects filtered by the state column
|
||||
* @method array findByDbImportStatus(int $import_status) Return CcFiles objects filtered by the import_status column
|
||||
* @method array findByDbCurrentlyaccessing(int $currentlyaccessing) Return CcFiles objects filtered by the currentlyaccessing column
|
||||
* @method array findByDbEditedby(int $editedby) Return CcFiles objects filtered by the editedby column
|
||||
* @method array findByDbMtime(string $mtime) Return CcFiles objects filtered by the mtime column
|
||||
|
@ -574,25 +574,34 @@ abstract class BaseCcFilesQuery extends ModelCriteria
|
|||
}
|
||||
|
||||
/**
|
||||
* Filter the query on the state column
|
||||
* Filter the query on the import_status column
|
||||
*
|
||||
* @param string $dbState The value to use as filter.
|
||||
* Accepts wildcards (* and % trigger a LIKE)
|
||||
* @param int|array $dbImportStatus The value to use as filter.
|
||||
* Accepts an associative array('min' => $minValue, 'max' => $maxValue)
|
||||
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
|
||||
*
|
||||
* @return CcFilesQuery The current query, for fluid interface
|
||||
*/
|
||||
public function filterByDbState($dbState = null, $comparison = null)
|
||||
public function filterByDbImportStatus($dbImportStatus = null, $comparison = null)
|
||||
{
|
||||
if (null === $comparison) {
|
||||
if (is_array($dbState)) {
|
||||
if (is_array($dbImportStatus)) {
|
||||
$useMinMax = false;
|
||||
if (isset($dbImportStatus['min'])) {
|
||||
$this->addUsingAlias(CcFilesPeer::IMPORT_STATUS, $dbImportStatus['min'], Criteria::GREATER_EQUAL);
|
||||
$useMinMax = true;
|
||||
}
|
||||
if (isset($dbImportStatus['max'])) {
|
||||
$this->addUsingAlias(CcFilesPeer::IMPORT_STATUS, $dbImportStatus['max'], Criteria::LESS_EQUAL);
|
||||
$useMinMax = true;
|
||||
}
|
||||
if ($useMinMax) {
|
||||
return $this;
|
||||
}
|
||||
if (null === $comparison) {
|
||||
$comparison = Criteria::IN;
|
||||
} elseif (preg_match('/[\%\*]/', $dbState)) {
|
||||
$dbState = str_replace('*', '%', $dbState);
|
||||
$comparison = Criteria::LIKE;
|
||||
}
|
||||
}
|
||||
return $this->addUsingAlias(CcFilesPeer::STATE, $dbState, $comparison);
|
||||
return $this->addUsingAlias(CcFilesPeer::IMPORT_STATUS, $dbImportStatus, $comparison);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
<?
|
||||
|
||||
class Rest_Bootstrap extends Zend_Application_Module_Bootstrap
|
||||
{
|
||||
protected function _initRouter()
|
||||
{
|
||||
$front = Zend_Controller_Front::getInstance();
|
||||
$router = $front->getRouter();
|
||||
|
||||
$restRoute = new Zend_Rest_Route($front, array(), array(
|
||||
'rest'=> array('media')));
|
||||
assert($router->addRoute('rest', $restRoute));
|
||||
|
||||
$downloadRoute = new Zend_Controller_Router_Route(
|
||||
'rest/media/:id/download',
|
||||
array(
|
||||
'controller' => 'media',
|
||||
'action' => 'download',
|
||||
'module' => 'rest'
|
||||
),
|
||||
array(
|
||||
'id' => '\d+'
|
||||
)
|
||||
);
|
||||
$router->addRoute('download', $downloadRoute);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,382 @@
|
|||
<?php
|
||||
|
||||
|
||||
class Rest_MediaController extends Zend_Rest_Controller
|
||||
{
|
||||
//fields that are not modifiable via our RESTful API
|
||||
private $blackList = array(
|
||||
'id',
|
||||
'file_exists',
|
||||
'silan_check',
|
||||
'soundcloud_id',
|
||||
'is_scheduled',
|
||||
'is_playlist'
|
||||
);
|
||||
|
||||
//fields we should never expose through our RESTful API
|
||||
private $privateFields = array(
|
||||
'file_exists',
|
||||
'silan_check',
|
||||
'is_scheduled',
|
||||
'is_playlist'
|
||||
);
|
||||
|
||||
public function init()
|
||||
{
|
||||
$this->view->layout()->disableLayout();
|
||||
}
|
||||
|
||||
public function indexAction()
|
||||
{
|
||||
if (!$this->verifyAuth(true, true))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
$files_array = array();
|
||||
foreach (CcFilesQuery::create()->find() as $file)
|
||||
{
|
||||
array_push($files_array, $this->sanitizeResponse($file));
|
||||
}
|
||||
|
||||
$this->getResponse()
|
||||
->setHttpResponseCode(200)
|
||||
->appendBody(json_encode($files_array));
|
||||
|
||||
/** TODO: Use this simpler code instead after we upgrade to Propel 1.7 (Airtime 2.6.x branch):
|
||||
$this->getResponse()
|
||||
->setHttpResponseCode(200)
|
||||
->appendBody(json_encode(CcFilesQuery::create()->find()->toArray(BasePeer::TYPE_FIELDNAME)));
|
||||
*/
|
||||
}
|
||||
|
||||
public function downloadAction()
|
||||
{
|
||||
if (!$this->verifyAuth(true, true))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
$id = $this->getId();
|
||||
if (!$id) {
|
||||
return;
|
||||
}
|
||||
|
||||
$file = CcFilesQuery::create()->findPk($id);
|
||||
if ($file) {
|
||||
$con = Propel::getConnection();
|
||||
$storedFile = new Application_Model_StoredFile($file, $con);
|
||||
$baseUrl = Application_Common_OsPath::getBaseDir();
|
||||
|
||||
$this->getResponse()
|
||||
->setHttpResponseCode(200)
|
||||
->appendBody($this->_redirect($storedFile->getRelativeFileUrl($baseUrl).'/download/true'));
|
||||
} else {
|
||||
$this->fileNotFoundResponse();
|
||||
}
|
||||
}
|
||||
|
||||
public function getAction()
|
||||
{
|
||||
if (!$this->verifyAuth(true, true))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
$id = $this->getId();
|
||||
if (!$id) {
|
||||
return;
|
||||
}
|
||||
|
||||
$file = CcFilesQuery::create()->findPk($id);
|
||||
if ($file) {
|
||||
|
||||
$this->getResponse()
|
||||
->setHttpResponseCode(200)
|
||||
->appendBody(json_encode($this->sanitizeResponse($file)));
|
||||
} else {
|
||||
$this->fileNotFoundResponse();
|
||||
}
|
||||
}
|
||||
|
||||
public function postAction()
|
||||
{
|
||||
if (!$this->verifyAuth(true, true))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
//If we do get an ID on a POST, then that doesn't make any sense
|
||||
//since POST is only for creating.
|
||||
if ($id = $this->_getParam('id', false)) {
|
||||
$resp = $this->getResponse();
|
||||
$resp->setHttpResponseCode(400);
|
||||
$resp->appendBody("ERROR: ID should not be specified when using POST. POST is only used for file creation, and an ID will be chosen by Airtime");
|
||||
return;
|
||||
}
|
||||
|
||||
$file = new CcFiles();
|
||||
$file->fromArray($this->validateRequestData($this->getRequest()->getPost()));
|
||||
$file->setDbOwnerId($this->getOwnerId());
|
||||
$now = new DateTime("now", new DateTimeZone("UTC"));
|
||||
$file->setDbTrackTitle($_FILES["file"]["name"]);
|
||||
$file->setDbUtime($now);
|
||||
$file->setDbMtime($now);
|
||||
$file->setDbHidden(true);
|
||||
$file->save();
|
||||
|
||||
$callbackUrl = $this->getRequest()->getScheme() . '://' . $this->getRequest()->getHttpHost() . $this->getRequest()->getRequestUri() . "/" . $file->getPrimaryKey();
|
||||
|
||||
$this->processUploadedFile($callbackUrl, $_FILES["file"]["name"], $this->getOwnerId());
|
||||
|
||||
$this->getResponse()
|
||||
->setHttpResponseCode(201)
|
||||
->appendBody(json_encode($this->sanitizeResponse($file)));
|
||||
}
|
||||
|
||||
public function putAction()
|
||||
{
|
||||
if (!$this->verifyAuth(true, true))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
$id = $this->getId();
|
||||
if (!$id) {
|
||||
return;
|
||||
}
|
||||
|
||||
$file = CcFilesQuery::create()->findPk($id);
|
||||
if ($file)
|
||||
{
|
||||
$requestData = json_decode($this->getRequest()->getRawBody(), true);
|
||||
$file->fromArray($this->validateRequestData($requestData), BasePeer::TYPE_FIELDNAME);
|
||||
|
||||
//Our RESTful API takes "full_path" as a field, which we then split and translate to match
|
||||
//our internal schema. Internally, file path is stored relative to a directory, with the directory
|
||||
//as a foreign key to cc_music_dirs.
|
||||
if (isset($requestData["full_path"])) {
|
||||
Application_Model_Preference::updateDiskUsage(filesize($requestData["full_path"]));
|
||||
|
||||
$fullPath = $requestData["full_path"];
|
||||
$storDir = Application_Model_MusicDir::getStorDir()->getDirectory();
|
||||
$pos = strpos($fullPath, $storDir);
|
||||
|
||||
if ($pos !== FALSE)
|
||||
{
|
||||
assert($pos == 0); //Path must start with the stor directory path
|
||||
|
||||
$filePathRelativeToStor = substr($fullPath, strlen($storDir));
|
||||
$file->setDbFilepath($filePathRelativeToStor);
|
||||
$file->setDbDirectory(1); //1 corresponds to the default stor/imported directory.
|
||||
}
|
||||
}
|
||||
|
||||
$now = new DateTime("now", new DateTimeZone("UTC"));
|
||||
$file->setDbMtime($now);
|
||||
$file->save();
|
||||
$this->getResponse()
|
||||
->setHttpResponseCode(200)
|
||||
->appendBody(json_encode($this->sanitizeResponse($file)));
|
||||
} else {
|
||||
$this->fileNotFoundResponse();
|
||||
}
|
||||
}
|
||||
|
||||
public function deleteAction()
|
||||
{
|
||||
if (!$this->verifyAuth(true, true))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
$id = $this->getId();
|
||||
if (!$id) {
|
||||
return;
|
||||
}
|
||||
$file = CcFilesQuery::create()->findPk($id);
|
||||
if ($file) {
|
||||
$con = Propel::getConnection();
|
||||
$storedFile = new Application_Model_StoredFile($file, $con);
|
||||
if ($storedFile->existsOnDisk()) {
|
||||
$storedFile->delete(); //TODO: This checks your session permissions... Make it work without a session?
|
||||
}
|
||||
$file->delete();
|
||||
$this->getResponse()
|
||||
->setHttpResponseCode(204);
|
||||
} else {
|
||||
$this->fileNotFoundResponse();
|
||||
}
|
||||
}
|
||||
|
||||
private function getId()
|
||||
{
|
||||
if (!$id = $this->_getParam('id', false)) {
|
||||
$resp = $this->getResponse();
|
||||
$resp->setHttpResponseCode(400);
|
||||
$resp->appendBody("ERROR: No file ID specified.");
|
||||
return false;
|
||||
}
|
||||
return $id;
|
||||
}
|
||||
|
||||
private function verifyAuth($checkApiKey, $checkSession)
|
||||
{
|
||||
//Session takes precedence over API key for now:
|
||||
if ($checkSession && $this->verifySession())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if ($checkApiKey && $this->verifyAPIKey())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
$resp = $this->getResponse();
|
||||
$resp->setHttpResponseCode(401);
|
||||
$resp->appendBody("ERROR: Incorrect API key.");
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private function verifyAPIKey()
|
||||
{
|
||||
//The API key is passed in via HTTP "basic authentication":
|
||||
// http://en.wikipedia.org/wiki/Basic_access_authentication
|
||||
|
||||
$CC_CONFIG = Config::getConfig();
|
||||
|
||||
//Decode the API key that was passed to us in the HTTP request.
|
||||
$authHeader = $this->getRequest()->getHeader("Authorization");
|
||||
$encodedRequestApiKey = substr($authHeader, strlen("Basic "));
|
||||
$encodedStoredApiKey = base64_encode($CC_CONFIG["apiKey"][0] . ":");
|
||||
|
||||
if ($encodedRequestApiKey === $encodedStoredApiKey)
|
||||
{
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private function verifySession()
|
||||
{
|
||||
$auth = Zend_Auth::getInstance();
|
||||
if ($auth->hasIdentity())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
||||
//Token checking stub code. We'd need to change LoginController.php to generate a token too, but
|
||||
//but luckily all the token code already exists and works.
|
||||
//$auth = new Application_Model_Auth();
|
||||
//$auth->checkToken(Application_Model_Preference::getUserId(), $token);
|
||||
}
|
||||
|
||||
private function fileNotFoundResponse()
|
||||
{
|
||||
$resp = $this->getResponse();
|
||||
$resp->setHttpResponseCode(404);
|
||||
$resp->appendBody("ERROR: Media not found.");
|
||||
}
|
||||
|
||||
private function processUploadedFile($callbackUrl, $originalFilename, $ownerId)
|
||||
{
|
||||
$CC_CONFIG = Config::getConfig();
|
||||
$apiKey = $CC_CONFIG["apiKey"][0];
|
||||
|
||||
$upload_dir = ini_get("upload_tmp_dir") . DIRECTORY_SEPARATOR . "plupload";
|
||||
$tempFilePath = Application_Model_StoredFile::uploadFile($upload_dir);
|
||||
$tempFileName = basename($tempFilePath);
|
||||
|
||||
//TODO: Remove copyFileToStor from StoredFile...
|
||||
|
||||
//TODO: Remove uploadFileAction from ApiController.php **IMPORTANT** - It's used by the recorder daemon?
|
||||
|
||||
$upload_dir = ini_get("upload_tmp_dir") . DIRECTORY_SEPARATOR . "plupload";
|
||||
$tempFilePath = $upload_dir . "/" . $tempFileName;
|
||||
|
||||
$storDir = Application_Model_MusicDir::getStorDir();
|
||||
//$finalFullFilePath = $storDir->getDirectory() . "/imported/" . $ownerId . "/" . $originalFilename;
|
||||
$importedStorageDirectory = $storDir->getDirectory() . "/imported/" . $ownerId;
|
||||
|
||||
|
||||
try {
|
||||
//Copy the temporary file over to the "organize" folder so that it's off our webserver
|
||||
//and accessible by airtime_analyzer which could be running on a different machine.
|
||||
$newTempFilePath = Application_Model_StoredFile::copyFileToStor($tempFilePath, $originalFilename);
|
||||
} catch (Exception $e) {
|
||||
Logging::error($e->getMessage());
|
||||
}
|
||||
|
||||
//Logging::info("New temporary file path: " . $newTempFilePath);
|
||||
//Logging::info("Final file path: " . $finalFullFilePath);
|
||||
|
||||
//Dispatch a message to airtime_analyzer through RabbitMQ,
|
||||
//notifying it that there's a new upload to process!
|
||||
Application_Model_RabbitMq::SendMessageToAnalyzer($newTempFilePath,
|
||||
$importedStorageDirectory, $originalFilename,
|
||||
$callbackUrl, $apiKey);
|
||||
}
|
||||
|
||||
private function getOwnerId()
|
||||
{
|
||||
try {
|
||||
if ($this->verifySession()) {
|
||||
$service_user = new Application_Service_UserService();
|
||||
return $service_user->getCurrentUser()->getDbId();
|
||||
} else {
|
||||
$defaultOwner = CcSubjsQuery::create()
|
||||
->filterByDbType('A')
|
||||
->orderByDbId()
|
||||
->findOne();
|
||||
if (!$defaultOwner) {
|
||||
// what to do if there is no admin user?
|
||||
// should we handle this case?
|
||||
return null;
|
||||
}
|
||||
return $defaultOwner->getDbId();
|
||||
}
|
||||
} catch(Exception $e) {
|
||||
Logging::info($e->getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Strips out fields from incoming request data that should never be modified
|
||||
* from outside of Airtime
|
||||
* @param array $data
|
||||
*/
|
||||
private function validateRequestData($data)
|
||||
{
|
||||
foreach ($this->blackList as $key) {
|
||||
unset($data[$key]);
|
||||
}
|
||||
|
||||
return $data;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Strips out the private fields we do not want to send back in API responses
|
||||
*/
|
||||
//TODO: rename this function?
|
||||
public function sanitizeResponse($file)
|
||||
{
|
||||
$response = $file->toArray(BasePeer::TYPE_FIELDNAME);
|
||||
|
||||
foreach ($this->privateFields as $key) {
|
||||
unset($response[$key]);
|
||||
}
|
||||
|
||||
return $response;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -9,3 +9,19 @@
|
|||
<div id="plupload_error">
|
||||
<table></table>
|
||||
</div>
|
||||
|
||||
<div id="recent_uploads_wrapper" class="lib-content ui-widget ui-widget-content block-shadow alpha-block">
|
||||
<div id="recent_uploads" class="padded">
|
||||
|
||||
<div id="recent_uploads_filter">
|
||||
<form>
|
||||
<input type="radio" name="upload_status" id="upload_status_all" checked></input><label for="upload_status_all">All</label>
|
||||
<input type="radio" name="upload_status" id="upload_status_failed"></input><label for="upload_status_failed">Failed</label>
|
||||
<input type="radio" name="upload_status" id="upload_status_pending"></input><label for="upload_status_pending">Pending</label>
|
||||
</form>
|
||||
</div>
|
||||
<H2>Recent Uploads</H2>
|
||||
<table id="recent_uploads_table" class="lib-content ui-widget ui-widget-content block-shadow alpha-block "></table>
|
||||
</div>
|
||||
<div style="clear: both;"></div>
|
||||
</div>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#Note: project.home is automatically generated by the propel-install script.
|
||||
#Any manual changes to this value will be overwritten.
|
||||
project.home = /home/asantoni/airtime/airtime_mvc
|
||||
project.home = /home/ubuntu/airtime/airtime_mvc
|
||||
project.build = ${project.home}/build
|
||||
|
||||
#Database driver
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
<column name="ftype" phpName="DbFtype" type="VARCHAR" size="128" required="true" defaultValue=""/>
|
||||
<column name="directory" phpName="DbDirectory" type="INTEGER" required="false"/>
|
||||
<column name="filepath" phpName="DbFilepath" type="LONGVARCHAR" required="false" defaultValue=""/>
|
||||
<column name="state" phpName="DbState" type="VARCHAR" size="128" required="true" defaultValue="empty"/>
|
||||
<column name="import_status" phpName="DbImportStatus" type="INTEGER" required="true" defaultValue="0"/>
|
||||
<column name="currentlyaccessing" phpName="DbCurrentlyaccessing" type="INTEGER" required="true" defaultValue="0"/>
|
||||
<column name="editedby" phpName="DbEditedby" type="INTEGER" required="false"/>
|
||||
<column name="mtime" phpName="DbMtime" type="TIMESTAMP" size="6" required="false"/>
|
||||
|
|
|
@ -36,7 +36,7 @@ CREATE TABLE "cc_files"
|
|||
"ftype" VARCHAR(128) default '' NOT NULL,
|
||||
"directory" INTEGER,
|
||||
"filepath" TEXT default '',
|
||||
"state" VARCHAR(128) default 'empty' NOT NULL,
|
||||
"import_status" INTEGER default 0 NOT NULL,
|
||||
"currentlyaccessing" INTEGER default 0 NOT NULL,
|
||||
"editedby" INTEGER,
|
||||
"mtime" TIMESTAMP(6),
|
||||
|
|
|
@ -49,6 +49,29 @@ $application = new Zend_Application(
|
|||
APPLICATION_ENV,
|
||||
$_SERVER["AIRTIME_APPINI"]
|
||||
);
|
||||
$application->bootstrap()
|
||||
->run();
|
||||
|
||||
require_once (APPLICATION_PATH."/logging/Logging.php");
|
||||
Logging::setLogPath('/var/log/airtime/zendphp.log');
|
||||
|
||||
// Create application, bootstrap, and run
|
||||
try {
|
||||
$sapi_type = php_sapi_name();
|
||||
if (substr($sapi_type, 0, 3) == 'cli') {
|
||||
set_include_path(APPLICATION_PATH . PATH_SEPARATOR . get_include_path());
|
||||
require_once("Bootstrap.php");
|
||||
} else {
|
||||
$application->bootstrap()->run();
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
echo $e->getMessage();
|
||||
echo "<pre>";
|
||||
echo $e->getTraceAsString();
|
||||
echo "</pre>";
|
||||
Logging::info($e->getMessage());
|
||||
if (VERBOSE_STACK_TRACE) {
|
||||
Logging::info($e->getTraceAsString());
|
||||
} else {
|
||||
Logging::info($e->getTrace());
|
||||
}
|
||||
throw $e;
|
||||
}
|
||||
|
|
|
@ -1,12 +1,24 @@
|
|||
$(document).ready(function() {
|
||||
|
||||
var uploader;
|
||||
var self = this;
|
||||
self.uploadFilter = "all";
|
||||
|
||||
self.IMPORT_STATUS_CODES = {
|
||||
0 : { message: $.i18n._("Successfully imported")},
|
||||
1 : { message: $.i18n._("Pending import")},
|
||||
2 : { message: $.i18n._("Import failed.")},
|
||||
UNKNOWN : { message: $.i18n._("Unknown")}
|
||||
};
|
||||
if (Object.freeze) {
|
||||
Object.freeze(self.IMPORT_STATUS_CODES);
|
||||
}
|
||||
|
||||
$("#plupload_files").pluploadQueue({
|
||||
// General settings
|
||||
runtimes : 'gears, html5, html4',
|
||||
url : baseUrl+'Plupload/upload/format/json',
|
||||
chunk_size : '5mb',
|
||||
url : baseUrl+'rest/media',
|
||||
//chunk_size : '5mb', //Disabling chunking since we're using the File Upload REST API now
|
||||
unique_names : 'true',
|
||||
multiple_queues : 'true',
|
||||
filters : [
|
||||
|
@ -16,37 +28,17 @@ $(document).ready(function() {
|
|||
|
||||
uploader = $("#plupload_files").pluploadQueue();
|
||||
|
||||
uploader.bind('FileUploaded', function(up, file, json) {
|
||||
var j = jQuery.parseJSON(json.response);
|
||||
|
||||
if(j.error !== undefined) {
|
||||
var row = $("<tr/>")
|
||||
.append('<td>' + file.name +'</td>')
|
||||
.append('<td>' + j.error.message + '</td>');
|
||||
|
||||
$("#plupload_error").find("table").append(row);
|
||||
$("#plupload_error table").css("display", "inline-table");
|
||||
}else{
|
||||
var tempFileName = j.tempfilepath;
|
||||
$.get(baseUrl+'Plupload/copyfile/format/json/name/'+
|
||||
encodeURIComponent(file.name)+'/tempname/' +
|
||||
encodeURIComponent(tempFileName), function(jr){
|
||||
if(jr.error !== undefined) {
|
||||
var row = $("<tr/>")
|
||||
.append('<td>' + file.name +'</td>')
|
||||
.append('<td>' + jr.error.message + '</td>');
|
||||
|
||||
$("#plupload_error").find("table").append(row);
|
||||
$("#plupload_error table").css("display", "inline-table");
|
||||
}
|
||||
});
|
||||
}
|
||||
uploader.bind('FileUploaded', function(up, file, json)
|
||||
{
|
||||
//Refresh the upload table:
|
||||
self.recentUploadsTable.fnDraw(); //Only works because we're using bServerSide
|
||||
//In DataTables 1.10 and greater, we can use .fnAjaxReload()
|
||||
});
|
||||
|
||||
var uploadProgress = false;
|
||||
|
||||
uploader.bind('QueueChanged', function(){
|
||||
uploadProgress = (uploader.files.length > 0)
|
||||
uploadProgress = (uploader.files.length > 0);
|
||||
});
|
||||
|
||||
uploader.bind('UploadComplete', function(){
|
||||
|
@ -59,5 +51,140 @@ $(document).ready(function() {
|
|||
"\n", "\n");
|
||||
}
|
||||
});
|
||||
|
||||
self.renderImportStatus = function ( data, type, full ) {
|
||||
if (typeof data !== "number") {
|
||||
console.log("Invalid data type for the import_status.");
|
||||
return;
|
||||
}
|
||||
var statusStr = self.IMPORT_STATUS_CODES.UNKNOWN.message;
|
||||
var importStatusCode = data;
|
||||
if (self.IMPORT_STATUS_CODES[importStatusCode]) {
|
||||
statusStr = self.IMPORT_STATUS_CODES[importStatusCode].message;
|
||||
};
|
||||
|
||||
return statusStr;
|
||||
};
|
||||
|
||||
self.renderFileActions = function ( data, type, full ) {
|
||||
if (full.import_status == 0) {
|
||||
return '<a class="deleteFileAction">' + $.i18n._('Delete from Library') + '</a>';
|
||||
} else if (full.import_status == 1) {
|
||||
//No actions for pending files
|
||||
return $.i18n._('N/A');
|
||||
} else { //Failed downloads
|
||||
return '<a class="deleteFileAction">' + $.i18n._('Clear') + '</a>';
|
||||
}
|
||||
};
|
||||
|
||||
$("#recent_uploads_table").on("click", "a.deleteFileAction", function () {
|
||||
//Grab the file object for the row that was clicked.
|
||||
// Some tips from the DataTables forums:
|
||||
// fnGetData is used to get the object behind the row - you can also use
|
||||
// fnGetPosition if you need to get the index instead
|
||||
file = $("#recent_uploads_table").dataTable().fnGetData($(this).closest("tr")[0]);
|
||||
|
||||
$.ajax({
|
||||
type: 'DELETE',
|
||||
url: '/rest/media/' + file.id,
|
||||
success: function(resp) {
|
||||
self.recentUploadsTable.fnDraw();
|
||||
},
|
||||
error: function() {
|
||||
alert($.i18n._("Error: The file could not be deleted. Please try again later."));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
self.setupRecentUploadsTable = function() {
|
||||
recentUploadsTable = $("#recent_uploads_table").dataTable({
|
||||
"bJQueryUI": true,
|
||||
"bProcessing": false,
|
||||
"bServerSide": true,
|
||||
"sAjaxSource": '/Plupload/recent-uploads/format/json',
|
||||
"sAjaxDataProp": 'files',
|
||||
"bSearchable": false,
|
||||
"bInfo": true,
|
||||
//"sScrollY": "200px",
|
||||
"bFilter": false,
|
||||
"bSort": false,
|
||||
"sDom": '<"H"l>frtip',
|
||||
"bPaginate" : true,
|
||||
"sPaginationType": "full_numbers",
|
||||
"aoColumns": [
|
||||
{ "mData" : "artist_name", "sTitle" : $.i18n._("Creator") },
|
||||
{ "mData" : "track_title", "sTitle" : $.i18n._("Title") },
|
||||
{ "mData" : "import_status", "sTitle" : $.i18n._("Import Status"),
|
||||
"mRender": self.renderImportStatus
|
||||
},
|
||||
{ "mData" : "utime", "sTitle" : $.i18n._("Uploaded") },
|
||||
{ "mData" : "id", "sTitle" : $.i18n._("Actions"),
|
||||
"mRender": self.renderFileActions
|
||||
}
|
||||
],
|
||||
"fnServerData": function ( sSource, aoData, fnCallback ) {
|
||||
/* Add some extra data to the sender */
|
||||
aoData.push( { "name": "uploadFilter", "value": self.uploadFilter } );
|
||||
$.getJSON( sSource, aoData, function (json) {
|
||||
fnCallback(json);
|
||||
if (json.files) {
|
||||
var areAnyFileImportsPending = false;
|
||||
for (var i = 0; i < json.files.length; i++) {
|
||||
//console.log(file);
|
||||
var file = json.files[i];
|
||||
if (file.import_status == 1)
|
||||
{
|
||||
areAnyFileImportsPending = true;
|
||||
}
|
||||
}
|
||||
if (areAnyFileImportsPending) {
|
||||
//alert("pending uploads, starting refresh on timer");
|
||||
self.startRefreshingRecentUploads();
|
||||
} else {
|
||||
self.stopRefreshingRecentUploads();
|
||||
}
|
||||
}
|
||||
} );
|
||||
}
|
||||
});
|
||||
|
||||
return recentUploadsTable;
|
||||
};
|
||||
|
||||
self.startRefreshingRecentUploads = function()
|
||||
{
|
||||
if (self.isRecentUploadsRefreshTimerActive()) { //Prevent multiple timers from running
|
||||
return;
|
||||
}
|
||||
self.recentUploadsRefreshTimer = setInterval("self.recentUploadsTable.fnDraw()", 3000);
|
||||
};
|
||||
|
||||
self.isRecentUploadsRefreshTimerActive = function()
|
||||
{
|
||||
return (self.recentUploadsRefreshTimer != null);
|
||||
};
|
||||
|
||||
self.stopRefreshingRecentUploads = function()
|
||||
{
|
||||
clearInterval(self.recentUploadsRefreshTimer);
|
||||
self.recentUploadsRefreshTimer = null;
|
||||
};
|
||||
|
||||
$("#upload_status_all").click(function() {
|
||||
self.uploadFilter = "all";
|
||||
self.recentUploadsTable.fnDraw();
|
||||
});
|
||||
$("#upload_status_pending").click(function() {
|
||||
self.uploadFilter = "pending";
|
||||
self.recentUploadsTable.fnDraw();
|
||||
});
|
||||
$("#upload_status_failed").click(function() {
|
||||
self.uploadFilter = "failed";
|
||||
self.recentUploadsTable.fnDraw();
|
||||
});
|
||||
|
||||
//Create the recent uploads table.
|
||||
self.recentUploadsTable = self.setupRecentUploadsTable();
|
||||
|
||||
//$("#recent_uploads_table.div.fg-toolbar").prepend('<b>Custom tool bar! Text/images etc.</b>');
|
||||
});
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
<?php
|
||||
|
||||
/* All functions other than start() should be marked as
|
||||
* private.
|
||||
*/
|
||||
class AirtimeDatabaseUpgrade{
|
||||
|
||||
public static function start($p_dbValues){
|
||||
echo "* Updating Database".PHP_EOL;
|
||||
self::task0($p_dbValues);
|
||||
echo " * Complete".PHP_EOL;
|
||||
}
|
||||
|
||||
private static function task0($p_dbValues){
|
||||
|
||||
$username = $p_dbValues['database']['dbuser'];
|
||||
$password = $p_dbValues['database']['dbpass'];
|
||||
$host = $p_dbValues['database']['host'];
|
||||
$database = $p_dbValues['database']['dbname'];
|
||||
$dir = __DIR__;
|
||||
|
||||
passthru("export PGPASSWORD=$password && psql -h $host -U $username -q -f $dir/data/upgrade.sql $database 2>&1 | grep -v \"will create implicit index\"");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
<?php
|
||||
|
||||
require_once 'DbUpgrade.php';
|
||||
|
||||
$filename = "/etc/airtime/airtime.conf";
|
||||
$values = parse_ini_file($filename, true);
|
||||
|
||||
AirtimeDatabaseUpgrade::start($values);
|
|
@ -0,0 +1,6 @@
|
|||
DELETE FROM cc_pref WHERE keystr = 'system_version';
|
||||
INSERT INTO cc_pref (keystr, valstr) VALUES ('system_version', '2.5.3');
|
||||
|
||||
ALTER TABLE cc_files DROP COLUMN state;
|
||||
ALTER TABLE cc_files ADD import_status integer default 1; -- Default is "pending"
|
||||
UPDATE cc_files SET import_status=0; -- Existing files are already "imported"
|
|
@ -0,0 +1 @@
|
|||
include README.rst
|
|
@ -0,0 +1,73 @@
|
|||
|
||||
Ghetto temporary installation instructions
|
||||
==========
|
||||
|
||||
$ sudo python setup.py install
|
||||
|
||||
You will need to allow the "airtime" RabbitMQ user to access all exchanges and queues within the /airtime vhost:
|
||||
|
||||
sudo rabbitmqctl set_permissions -p /airtime airtime .\* .\* .\*
|
||||
|
||||
|
||||
Usage
|
||||
==========
|
||||
|
||||
This program must run as a user with permissions to write to your Airtime music library
|
||||
directory. For standard Airtime installations, run it as the www-data user:
|
||||
|
||||
$ sudo -u www-data airtime_analyzer --daemon
|
||||
|
||||
Or during development, add the --debug flag for more verbose output:
|
||||
|
||||
$ sudo -u www-data airtime_analyzer --debug
|
||||
|
||||
To print usage instructions, run:
|
||||
|
||||
$ airtime_analyzer --help
|
||||
|
||||
This application can be run as a daemon by running:
|
||||
|
||||
$ airtime_analyzer -d
|
||||
|
||||
|
||||
|
||||
Developers
|
||||
==========
|
||||
|
||||
For development, you want to install airtime_analyzer system-wide but with everything symlinked back to the source
|
||||
directory for convenience. This is super easy to do, just run:
|
||||
|
||||
$ sudo python setup.py develop
|
||||
|
||||
To send an test message to airtime_analyzer, you can use the message_sender.php script in the tools directory.
|
||||
For example, run:
|
||||
|
||||
$ php tools/message_sender.php '{ "tmp_file_path" : "foo.mp3", "final_directory" : ".", "callback_url" : "http://airtime.localhost/rest/media/1", "api_key" : "YOUR_API_KEY" }'
|
||||
|
||||
$ php tools/message_sender.php '{"tmp_file_path":"foo.mp3", "import_directory":"/srv/airtime/stor/imported/1","original_filename":"foo.mp3","callback_url": "http://airtime.localhost/rest/media/1", "api_key":"YOUR_API_KEY"}'
|
||||
|
||||
Logging
|
||||
=========
|
||||
|
||||
By default, logs are saved to:
|
||||
|
||||
/var/log/airtime/airtime_analyzer.log
|
||||
|
||||
This application takes care of rotating logs for you.
|
||||
|
||||
|
||||
Unit Tests
|
||||
==========
|
||||
|
||||
To run the unit tests, execute:
|
||||
|
||||
$ nosetests
|
||||
|
||||
If you care about seeing console output (stdout), like when you're debugging or developing
|
||||
a test, run:
|
||||
|
||||
$ nosetests -s
|
||||
|
||||
To run the unit tests and generate a code coverage report, run:
|
||||
|
||||
$ nosetests --with-coverage --cover-package=airtime_analyzer
|
|
@ -0,0 +1 @@
|
|||
|
|
@ -0,0 +1,79 @@
|
|||
import ConfigParser
|
||||
import logging
|
||||
import logging.handlers
|
||||
import sys
|
||||
from metadata_analyzer import MetadataAnalyzer
|
||||
from replaygain_analyzer import ReplayGainAnalyzer
|
||||
from message_listener import MessageListener
|
||||
|
||||
|
||||
class AirtimeAnalyzerServer:
|
||||
|
||||
# Constants
|
||||
_CONFIG_PATH = '/etc/airtime/airtime.conf'
|
||||
_LOG_PATH = "/var/log/airtime/airtime_analyzer.log"
|
||||
|
||||
# Variables
|
||||
_log_level = logging.INFO
|
||||
|
||||
def __init__(self, debug=False):
|
||||
|
||||
# Configure logging
|
||||
self.setup_logging(debug)
|
||||
|
||||
# Read our config file
|
||||
rabbitmq_config = self.read_config_file()
|
||||
|
||||
# Start listening for RabbitMQ messages telling us about newly
|
||||
# uploaded files.
|
||||
self._msg_listener = MessageListener(rabbitmq_config)
|
||||
|
||||
|
||||
def setup_logging(self, debug):
|
||||
|
||||
if debug:
|
||||
self._log_level = logging.DEBUG
|
||||
else:
|
||||
#Disable most pika/rabbitmq logging:
|
||||
pika_logger = logging.getLogger('pika')
|
||||
pika_logger.setLevel(logging.CRITICAL)
|
||||
|
||||
#self.log = logging.getLogger(__name__)
|
||||
|
||||
# Set up logging
|
||||
logFormatter = logging.Formatter("%(asctime)s [%(module)s] [%(levelname)-5.5s] %(message)s")
|
||||
rootLogger = logging.getLogger()
|
||||
rootLogger.setLevel(self._log_level)
|
||||
|
||||
fileHandler = logging.handlers.RotatingFileHandler(filename=self._LOG_PATH, maxBytes=1024*1024*30,
|
||||
backupCount=8)
|
||||
fileHandler.setFormatter(logFormatter)
|
||||
rootLogger.addHandler(fileHandler)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(logFormatter)
|
||||
rootLogger.addHandler(consoleHandler)
|
||||
|
||||
|
||||
def read_config_file(self):
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
config_path = AirtimeAnalyzerServer._CONFIG_PATH
|
||||
try:
|
||||
config.readfp(open(config_path))
|
||||
except IOError as e:
|
||||
print "Failed to open config file at " + config_path + ": " + e.strerror
|
||||
exit(-1)
|
||||
except Exception:
|
||||
print e.strerror
|
||||
exit(-1)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
''' When being run from the command line, analyze a file passed
|
||||
as an argument. '''
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
analyzers = AnalyzerPipeline()
|
||||
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
class Analyzer:
|
||||
|
||||
@staticmethod
|
||||
def analyze(filename, metadata):
|
||||
raise NotImplementedError
|
||||
|
||||
'''
|
||||
class AnalyzerError(Exception):
|
||||
def __init__(self):
|
||||
super.__init__(self)
|
||||
'''
|
|
@ -0,0 +1,37 @@
|
|||
import logging
|
||||
import multiprocessing
|
||||
from metadata_analyzer import MetadataAnalyzer
|
||||
from filemover_analyzer import FileMoverAnalyzer
|
||||
|
||||
class AnalyzerPipeline:
|
||||
|
||||
# Take message dictionary and perform the necessary analysis.
|
||||
@staticmethod
|
||||
def run_analysis(queue, audio_file_path, import_directory, original_filename):
|
||||
|
||||
if not isinstance(queue, multiprocessing.queues.Queue):
|
||||
raise TypeError("queue must be a multiprocessing.Queue()")
|
||||
if not isinstance(audio_file_path, unicode):
|
||||
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__ + " instead.")
|
||||
if not isinstance(import_directory, unicode):
|
||||
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.")
|
||||
if not isinstance(original_filename, unicode):
|
||||
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.")
|
||||
|
||||
#print ReplayGainAnalyzer.analyze("foo.mp3")
|
||||
|
||||
# Analyze the audio file we were told to analyze:
|
||||
# First, we extract the ID3 tags and other metadata:
|
||||
metadata = dict()
|
||||
metadata = MetadataAnalyzer.analyze(audio_file_path, metadata)
|
||||
metadata = FileMoverAnalyzer.move(audio_file_path, import_directory, original_filename, metadata)
|
||||
metadata["import_status"] = 0 # imported
|
||||
|
||||
# Note that the queue we're putting the results into is our interprocess communication
|
||||
# back to the main process.
|
||||
|
||||
#Pass all the file metadata back to the main analyzer process, which then passes
|
||||
#it back to the Airtime web application.
|
||||
queue.put(metadata)
|
||||
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
import logging
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import os, errno
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from analyzer import Analyzer
|
||||
|
||||
class FileMoverAnalyzer(Analyzer):
|
||||
|
||||
@staticmethod
|
||||
def analyze(audio_file_path, metadata):
|
||||
raise Exception("Use FileMoverAnalyzer.move() instead.")
|
||||
|
||||
@staticmethod
|
||||
def move(audio_file_path, import_directory, original_filename, metadata):
|
||||
if not isinstance(audio_file_path, unicode):
|
||||
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__)
|
||||
if not isinstance(import_directory, unicode):
|
||||
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__)
|
||||
if not isinstance(original_filename, unicode):
|
||||
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__)
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
|
||||
|
||||
#Import the file over to it's final location.
|
||||
# TODO: Also, handle the case where the move fails and write some code
|
||||
# to possibly move the file to problem_files.
|
||||
|
||||
final_file_path = import_directory
|
||||
if metadata.has_key("artist_name"):
|
||||
final_file_path += "/" + metadata["artist_name"]
|
||||
if metadata.has_key("album_title"):
|
||||
final_file_path += "/" + metadata["album_title"]
|
||||
final_file_path += "/" + original_filename
|
||||
|
||||
#Ensure any redundant slashes are stripped
|
||||
final_file_path = os.path.normpath(final_file_path)
|
||||
|
||||
#If a file with the same name already exists in the "import" directory, then
|
||||
#we add a unique string to the end of this one. We never overwrite a file on import
|
||||
#because if we did that, it would mean Airtime's database would have
|
||||
#the wrong information for the file we just overwrote (eg. the song length would be wrong!)
|
||||
#If the final file path is the same as the file we've been told to import (which
|
||||
#you often do when you're debugging), then don't move the file at all.
|
||||
if os.path.exists(final_file_path):
|
||||
if os.path.samefile(audio_file_path, final_file_path):
|
||||
metadata["full_path"] = final_file_path
|
||||
return metadata
|
||||
base_file_path, file_extension = os.path.splitext(final_file_path)
|
||||
final_file_path = "%s_%s%s" % (base_file_path, time.strftime("%m-%d-%Y-%H-%M-%S", time.localtime()), file_extension)
|
||||
|
||||
#If THAT path exists, append a UUID instead:
|
||||
while os.path.exists(final_file_path):
|
||||
base_file_path, file_extension = os.path.splitext(final_file_path)
|
||||
final_file_path = "%s_%s%s" % (base_file_path, str(uuid.uuid4()), file_extension)
|
||||
|
||||
#Ensure the full path to the file exists
|
||||
mkdir_p(os.path.dirname(final_file_path))
|
||||
|
||||
#Move the file into its final destination directory
|
||||
logging.debug("Moving %s to %s" % (audio_file_path, final_file_path))
|
||||
shutil.move(audio_file_path, final_file_path)
|
||||
|
||||
metadata["full_path"] = final_file_path
|
||||
return metadata
|
||||
|
||||
def mkdir_p(path):
|
||||
if path == "":
|
||||
return
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as exc: # Python >2.5
|
||||
if exc.errno == errno.EEXIST and os.path.isdir(path):
|
||||
pass
|
||||
else: raise
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
import sys
|
||||
import pika
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import multiprocessing
|
||||
from analyzer_pipeline import AnalyzerPipeline
|
||||
from status_reporter import StatusReporter
|
||||
|
||||
EXCHANGE = "airtime-uploads"
|
||||
EXCHANGE_TYPE = "topic"
|
||||
ROUTING_KEY = "" #"airtime.analyzer.tasks"
|
||||
QUEUE = "airtime-uploads"
|
||||
|
||||
|
||||
''' TODO: Document me
|
||||
- round robin messaging
|
||||
- acking
|
||||
- why we use the multiprocess architecture
|
||||
- in general, how it works and why it works this way
|
||||
'''
|
||||
class MessageListener:
|
||||
|
||||
def __init__(self, config):
|
||||
|
||||
# Read the RabbitMQ connection settings from the config file
|
||||
# The exceptions throw here by default give good error messages.
|
||||
RMQ_CONFIG_SECTION = "rabbitmq"
|
||||
self._host = config.get(RMQ_CONFIG_SECTION, 'host')
|
||||
self._port = config.getint(RMQ_CONFIG_SECTION, 'port')
|
||||
self._username = config.get(RMQ_CONFIG_SECTION, 'user')
|
||||
self._password = config.get(RMQ_CONFIG_SECTION, 'password')
|
||||
self._vhost = config.get(RMQ_CONFIG_SECTION, 'vhost')
|
||||
|
||||
while True:
|
||||
try:
|
||||
self.connect_to_messaging_server()
|
||||
self.wait_for_messages()
|
||||
except KeyboardInterrupt:
|
||||
self.disconnect_from_messaging_server()
|
||||
break
|
||||
except pika.exceptions.AMQPError as e:
|
||||
logging.error("Connection to message queue failed. ")
|
||||
logging.error(e)
|
||||
logging.info("Retrying in 5 seconds...")
|
||||
time.sleep(5)
|
||||
|
||||
self._connection.close()
|
||||
|
||||
|
||||
def connect_to_messaging_server(self):
|
||||
|
||||
self._connection = pika.BlockingConnection(pika.ConnectionParameters(host=self._host,
|
||||
port=self._port, virtual_host=self._vhost,
|
||||
credentials=pika.credentials.PlainCredentials(self._username, self._password)))
|
||||
self._channel = self._connection.channel()
|
||||
self._channel.exchange_declare(exchange=EXCHANGE, type=EXCHANGE_TYPE, durable=True)
|
||||
result = self._channel.queue_declare(queue=QUEUE, durable=True)
|
||||
|
||||
self._channel.queue_bind(exchange=EXCHANGE, queue=QUEUE, routing_key=ROUTING_KEY)
|
||||
|
||||
logging.info(" Listening for messages...")
|
||||
self._channel.basic_consume(MessageListener.msg_received_callback,
|
||||
queue=QUEUE, no_ack=False)
|
||||
|
||||
def wait_for_messages(self):
|
||||
self._channel.start_consuming()
|
||||
|
||||
def disconnect_from_messaging_server(self):
|
||||
self._channel.stop_consuming()
|
||||
|
||||
|
||||
# consume callback function
|
||||
@staticmethod
|
||||
def msg_received_callback(channel, method_frame, header_frame, body):
|
||||
logging.info(" - Received '%s' on routing_key '%s'" % (body, method_frame.routing_key))
|
||||
|
||||
# Spin up a worker process. We use the multiprocessing module and multiprocessing.Queue
|
||||
# to pass objects between the processes so that if the analyzer process crashes, it does not
|
||||
# take down the rest of the daemon and we NACK that message so that it doesn't get
|
||||
# propagated to other airtime_analyzer daemons (eg. running on other servers).
|
||||
# We avoid cascading failure this way.
|
||||
try:
|
||||
msg_dict = json.loads(body)
|
||||
audio_file_path = msg_dict["tmp_file_path"]
|
||||
#final_file_path = msg_dict["final_file_path"]
|
||||
import_directory = msg_dict["import_directory"]
|
||||
original_filename = msg_dict["original_filename"]
|
||||
callback_url = msg_dict["callback_url"]
|
||||
api_key = msg_dict["api_key"]
|
||||
|
||||
audio_metadata = MessageListener.spawn_analyzer_process(audio_file_path, import_directory, original_filename)
|
||||
StatusReporter.report_success_to_callback_url(callback_url, api_key, audio_metadata)
|
||||
|
||||
except KeyError as e:
|
||||
# A field in msg_dict that we needed was missing (eg. audio_file_path)
|
||||
logging.exception("A mandatory airtime_analyzer message field was missing from the message.")
|
||||
# See the huge comment about NACK below.
|
||||
channel.basic_nack(delivery_tag=method_frame.delivery_tag, multiple=False,
|
||||
requeue=False) #Important that it doesn't requeue the message
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
#If ANY exception happens while processing a file, we're going to NACK to the
|
||||
#messaging server and tell it to remove the message from the queue.
|
||||
#(NACK is a negative acknowledgement. We could use ACK instead, but this might come
|
||||
# in handy in the future.)
|
||||
#Exceptions in this context are unexpected, unhandled errors. We try to recover
|
||||
#from as many errors as possble in AnalyzerPipeline, but we're safeguarding ourselves
|
||||
#here from any catastrophic or genuinely unexpected errors:
|
||||
channel.basic_nack(delivery_tag=method_frame.delivery_tag, multiple=False,
|
||||
requeue=False) #Important that it doesn't requeue the message
|
||||
|
||||
# TODO: Report this as a failed upload to the File Upload REST API.
|
||||
#
|
||||
# TODO: If the JSON was invalid or the web server is down,
|
||||
# then don't report that failure to the REST API
|
||||
#TODO: Catch exceptions from this HTTP request too:
|
||||
StatusReporter.report_failure_to_callback_url(callback_url, api_key, import_status=2,
|
||||
reason=u'An error occurred while importing this file')
|
||||
|
||||
|
||||
else:
|
||||
# ACK at the very end, after the message has been successfully processed.
|
||||
# If we don't ack, then RabbitMQ will redeliver the message in the future.
|
||||
channel.basic_ack(delivery_tag=method_frame.delivery_tag)
|
||||
|
||||
@staticmethod
|
||||
def spawn_analyzer_process(audio_file_path, import_directory, original_filename):
|
||||
|
||||
q = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(target=AnalyzerPipeline.run_analysis,
|
||||
args=(q, audio_file_path, import_directory, original_filename))
|
||||
p.start()
|
||||
p.join()
|
||||
if p.exitcode == 0:
|
||||
results = q.get()
|
||||
logging.info("Main process received results from child: ")
|
||||
logging.info(results)
|
||||
else:
|
||||
raise Exception("Analyzer process terminated unexpectedly.")
|
||||
|
||||
return results
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
import time
|
||||
import datetime
|
||||
import mutagen
|
||||
import magic # For MIME type detection
|
||||
from analyzer import Analyzer
|
||||
|
||||
class MetadataAnalyzer(Analyzer):
|
||||
|
||||
@staticmethod
|
||||
def analyze(filename, metadata):
|
||||
if not isinstance(filename, unicode):
|
||||
raise TypeError("filename must be unicode. Was of type " + type(filename).__name__)
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
|
||||
|
||||
#Extract metadata from an audio file using mutagen
|
||||
audio_file = mutagen.File(filename, easy=True)
|
||||
|
||||
#Grab other file information that isn't encoded in a tag, but instead usually
|
||||
#in the file header. Mutagen breaks that out into a separate "info" object:
|
||||
info = audio_file.info
|
||||
metadata["sample_rate"] = info.sample_rate
|
||||
metadata["length_seconds"] = info.length
|
||||
#Converting the length in seconds (float) to a formatted time string
|
||||
track_length = datetime.timedelta(seconds=info.length)
|
||||
metadata["length"] = str(track_length) #time.strftime("%H:%M:%S.%f", track_length)
|
||||
metadata["bit_rate"] = info.bitrate
|
||||
#metadata["channels"] = info.channels
|
||||
|
||||
#Use the python-magic module to get the MIME type.
|
||||
mime_magic = magic.Magic(mime=True)
|
||||
metadata["mime"] = mime_magic.from_file(filename)
|
||||
|
||||
if isinstance(info, mutagen.mp3.MPEGInfo):
|
||||
print "mode is: " + str(info.mode)
|
||||
|
||||
#Try to get the number of channels if mutagen can...
|
||||
try:
|
||||
#Special handling for getting the # of channels from MP3s. It's in the "mode" field
|
||||
#which is 0=Stereo, 1=Joint Stereo, 2=Dual Channel, 3=Mono. Part of the ID3 spec...
|
||||
if metadata["mime"] == "audio/mpeg":
|
||||
if info.mode == 3:
|
||||
metadata["channels"] = 1
|
||||
else:
|
||||
metadata["channels"] = 2
|
||||
else:
|
||||
metadata["channels"] = info.channels
|
||||
except (AttributeError, KeyError):
|
||||
#If mutagen can't figure out the number of channels, we'll just leave it out...
|
||||
pass
|
||||
|
||||
#Try to extract the number of tracks on the album if we can (the "track total")
|
||||
try:
|
||||
track_number = audio_file["tracknumber"]
|
||||
if isinstance(track_number, list): # Sometimes tracknumber is a list, ugh
|
||||
track_number = track_number[0]
|
||||
track_number_tokens = track_number.split(u'/')
|
||||
track_number = track_number_tokens[0]
|
||||
metadata["track_number"] = track_number
|
||||
track_total = track_number_tokens[1]
|
||||
metadata["track_total"] = track_total
|
||||
except (AttributeError, KeyError, IndexError):
|
||||
#If we couldn't figure out the track_number or track_total, just ignore it...
|
||||
pass
|
||||
|
||||
#We normalize the mutagen tags slightly here, so in case mutagen changes,
|
||||
#we find the
|
||||
mutagen_to_airtime_mapping = {
|
||||
'title': 'track_title',
|
||||
'artist': 'artist_name',
|
||||
'album': 'album_title',
|
||||
'bpm': 'bpm',
|
||||
'composer': 'composer',
|
||||
'conductor': 'conductor',
|
||||
'copyright': 'copyright',
|
||||
'comment': 'comment',
|
||||
'encoded_by': 'encoder',
|
||||
'genre': 'genre',
|
||||
'isrc': 'isrc',
|
||||
'label': 'label',
|
||||
'length': 'length',
|
||||
'language': 'language',
|
||||
'last_modified':'last_modified',
|
||||
'mood': 'mood',
|
||||
'replay_gain': 'replaygain',
|
||||
#'tracknumber': 'track_number',
|
||||
#'track_total': 'track_total',
|
||||
'website': 'website',
|
||||
'date': 'year',
|
||||
#'mime_type': 'mime',
|
||||
}
|
||||
|
||||
for mutagen_tag, airtime_tag in mutagen_to_airtime_mapping.iteritems():
|
||||
try:
|
||||
metadata[airtime_tag] = audio_file[mutagen_tag]
|
||||
|
||||
# Some tags are returned as lists because there could be multiple values.
|
||||
# This is unusual so we're going to always just take the first item in the list.
|
||||
if isinstance(metadata[airtime_tag], list):
|
||||
metadata[airtime_tag] = metadata[airtime_tag][0]
|
||||
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
#Airtime <= 2.5.x nonsense:
|
||||
metadata["ftype"] = "audioclip"
|
||||
#Other fields we'll want to set for Airtime:
|
||||
metadata["cueout"] = metadata["length"]
|
||||
metadata["hidden"] = False
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
|
||||
'''
|
||||
For reference, the Airtime metadata fields are:
|
||||
title
|
||||
artist ("Creator" in Airtime)
|
||||
album
|
||||
bit rate
|
||||
BPM
|
||||
composer
|
||||
conductor
|
||||
copyright
|
||||
cue in
|
||||
cue out
|
||||
encoded by
|
||||
genre
|
||||
ISRC
|
||||
label
|
||||
language
|
||||
last modified
|
||||
length
|
||||
mime
|
||||
mood
|
||||
owner
|
||||
replay gain
|
||||
sample rate
|
||||
track number
|
||||
website
|
||||
year
|
||||
'''
|
|
@ -0,0 +1,12 @@
|
|||
from analyzer import Analyzer
|
||||
|
||||
''' TODO: everything '''
|
||||
class ReplayGainAnalyzer(Analyzer):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def analyze(filename):
|
||||
pass
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
import requests
|
||||
import json
|
||||
import logging
|
||||
|
||||
class StatusReporter():
|
||||
|
||||
_HTTP_REQUEST_TIMEOUT = 30
|
||||
|
||||
# Report the extracted metadata and status of the successfully imported file
|
||||
# to the callback URL (which should be the Airtime File Upload API)
|
||||
@classmethod
|
||||
def report_success_to_callback_url(self, callback_url, api_key, audio_metadata):
|
||||
|
||||
# encode the audio metadata as json and post it back to the callback_url
|
||||
put_payload = json.dumps(audio_metadata)
|
||||
logging.debug("sending http put with payload: " + put_payload)
|
||||
r = requests.put(callback_url, data=put_payload,
|
||||
auth=requests.auth.HTTPBasicAuth(api_key, ''),
|
||||
timeout=StatusReporter._HTTP_REQUEST_TIMEOUT)
|
||||
logging.debug("HTTP request returned status: " + str(r.status_code))
|
||||
logging.debug(r.text) # log the response body
|
||||
|
||||
#todo: queue up failed requests and try them again later.
|
||||
r.raise_for_status() # raise an exception if there was an http error code returned
|
||||
|
||||
@classmethod
|
||||
def report_failure_to_callback_url(self, callback_url, api_key, import_status, reason):
|
||||
# TODO: Make import_status is an int?
|
||||
|
||||
logging.debug("Reporting import failure to Airtime REST API...")
|
||||
audio_metadata = dict()
|
||||
audio_metadata["import_status"] = import_status
|
||||
audio_metadata["comment"] = reason # hack attack
|
||||
put_payload = json.dumps(audio_metadata)
|
||||
logging.debug("sending http put with payload: " + put_payload)
|
||||
r = requests.put(callback_url, data=put_payload,
|
||||
auth=requests.auth.HTTPBasicAuth(api_key, ''),
|
||||
timeout=StatusReporter._HTTP_REQUEST_TIMEOUT)
|
||||
logging.debug("HTTP request returned status: " + str(r.status_code))
|
||||
logging.debug(r.text) # log the response body
|
||||
|
||||
#TODO: queue up failed requests and try them again later.
|
||||
r.raise_for_status() # raise an exception if there was an http error code returned
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import daemon
|
||||
import argparse
|
||||
import os
|
||||
import airtime_analyzer.airtime_analyzer as aa
|
||||
|
||||
VERSION = "1.0"
|
||||
|
||||
print "Airtime Analyzer " + VERSION
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-d", "--daemon", help="run as a daemon", action="store_true")
|
||||
parser.add_argument("--debug", help="log full debugging output", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
'''Ensure media_monitor isn't running before we start, because it'll move newly uploaded
|
||||
files into the library on us and screw up the operation of airtime_analyzer.
|
||||
media_monitor is deprecated.
|
||||
'''
|
||||
def check_if_media_monitor_is_running():
|
||||
pids = [pid for pid in os.listdir('/proc') if pid.isdigit()]
|
||||
|
||||
for pid in pids:
|
||||
try:
|
||||
process_name = open(os.path.join('/proc', pid, 'cmdline'), 'rb').read()
|
||||
if 'media_monitor.py' in process_name:
|
||||
print "Error: This process conflicts with media_monitor, and media_monitor is running."
|
||||
print " Please terminate the running media_monitor.py process and try again."
|
||||
exit(1)
|
||||
except IOError: # proc has already terminated
|
||||
continue
|
||||
|
||||
check_if_media_monitor_is_running()
|
||||
|
||||
if args.daemon:
|
||||
with daemon.DaemonContext():
|
||||
analyzer = aa.AirtimeAnalyzerServer(debug=args.debug)
|
||||
else:
|
||||
# Run without daemonizing
|
||||
analyzer = aa.AirtimeAnalyzerServer(debug=args.debug)
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
from setuptools import setup
|
||||
|
||||
setup(name='airtime_analyzer',
|
||||
version='0.1',
|
||||
description='Airtime Analyzer Worker and File Importer',
|
||||
url='http://github.com/sourcefabric/Airtime',
|
||||
author='Albert Santoni',
|
||||
author_email='albert.santoni@sourcefabric.org',
|
||||
license='MIT',
|
||||
packages=['airtime_analyzer'],
|
||||
scripts=['bin/airtime_analyzer'],
|
||||
install_requires=[
|
||||
'mutagen',
|
||||
'python-magic',
|
||||
'pika',
|
||||
'nose',
|
||||
'coverage',
|
||||
'mock',
|
||||
'python-daemon',
|
||||
'requests',
|
||||
],
|
||||
zip_safe=False)
|
|
@ -0,0 +1,12 @@
|
|||
from nose.tools import *
|
||||
import airtime_analyzer
|
||||
|
||||
def setup():
|
||||
pass
|
||||
|
||||
def teardown():
|
||||
pass
|
||||
|
||||
def test_basic():
|
||||
pass
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
from nose.tools import *
|
||||
import os
|
||||
import shutil
|
||||
import multiprocessing
|
||||
import Queue
|
||||
import datetime
|
||||
from airtime_analyzer.analyzer_pipeline import AnalyzerPipeline
|
||||
|
||||
DEFAULT_AUDIO_FILE = u'tests/test_data/44100Hz-16bit-mono.mp3'
|
||||
DEFAULT_IMPORT_DEST = u'Test Artist/Test Album/44100Hz-16bit-mono.mp3'
|
||||
|
||||
def setup():
|
||||
pass
|
||||
|
||||
def teardown():
|
||||
#Move the file back
|
||||
shutil.move(DEFAULT_IMPORT_DEST, DEFAULT_AUDIO_FILE)
|
||||
assert os.path.exists(DEFAULT_AUDIO_FILE)
|
||||
|
||||
def test_basic():
|
||||
filename = os.path.basename(DEFAULT_AUDIO_FILE)
|
||||
q = multiprocessing.Queue()
|
||||
#This actually imports the file into the "./Test Artist" directory.
|
||||
AnalyzerPipeline.run_analysis(q, DEFAULT_AUDIO_FILE, u'.', filename)
|
||||
results = q.get()
|
||||
assert results['track_title'] == u'Test Title'
|
||||
assert results['artist_name'] == u'Test Artist'
|
||||
assert results['album_title'] == u'Test Album'
|
||||
assert results['year'] == u'1999'
|
||||
assert results['genre'] == u'Test Genre'
|
||||
assert results['mime'] == 'audio/mpeg' # Not unicode because MIMEs aren't.
|
||||
assert results['length_seconds'] == 3.90925
|
||||
assert results["length"] == str(datetime.timedelta(seconds=results["length_seconds"]))
|
||||
assert os.path.exists(DEFAULT_IMPORT_DEST)
|
||||
|
||||
@raises(TypeError)
|
||||
def test_wrong_type_queue_param():
|
||||
AnalyzerPipeline.run_analysis(Queue.Queue(), u'', u'', u'')
|
||||
|
||||
@raises(TypeError)
|
||||
def test_wrong_type_string_param2():
|
||||
AnalyzerPipeline.run_analysis(multiprocessing.queues.Queue(), '', u'', u'')
|
||||
|
||||
@raises(TypeError)
|
||||
def test_wrong_type_string_param3():
|
||||
AnalyzerPipeline.run_analysis(multiprocessing.queues.Queue(), u'', '', u'')
|
||||
|
||||
@raises(TypeError)
|
||||
def test_wrong_type_string_param4():
|
||||
AnalyzerPipeline.run_analysis(multiprocessing.queues.Queue(), u'', u'', '')
|
||||
|
|
@ -0,0 +1,149 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import mutagen
|
||||
import mock
|
||||
from nose.tools import *
|
||||
from airtime_analyzer.metadata_analyzer import MetadataAnalyzer
|
||||
|
||||
def setup():
|
||||
pass
|
||||
|
||||
def teardown():
|
||||
pass
|
||||
|
||||
def check_default_metadata(metadata):
|
||||
assert metadata['track_title'] == u'Test Title'
|
||||
assert metadata['artist_name'] == u'Test Artist'
|
||||
assert metadata['album_title'] == u'Test Album'
|
||||
assert metadata['year'] == u'1999'
|
||||
assert metadata['genre'] == u'Test Genre'
|
||||
assert metadata['track_number'] == u'1'
|
||||
assert metadata["length"] == str(datetime.timedelta(seconds=metadata["length_seconds"]))
|
||||
|
||||
def test_mp3_mono():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-mono.mp3', dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 1
|
||||
assert metadata['bit_rate'] == 64000
|
||||
assert metadata['length_seconds'] == 3.90925
|
||||
assert metadata['mime'] == 'audio/mpeg' # Not unicode because MIMEs aren't.
|
||||
assert metadata['track_total'] == u'10' # MP3s can have a track_total
|
||||
#Mutagen doesn't extract comments from mp3s it seems
|
||||
|
||||
def test_mp3_jointstereo():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-jointstereo.mp3', dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 2
|
||||
assert metadata['bit_rate'] == 128000
|
||||
assert metadata['length_seconds'] == 3.90075
|
||||
assert metadata['mime'] == 'audio/mpeg'
|
||||
assert metadata['track_total'] == u'10' # MP3s can have a track_total
|
||||
|
||||
def test_mp3_simplestereo():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-simplestereo.mp3', dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 2
|
||||
assert metadata['bit_rate'] == 128000
|
||||
assert metadata['length_seconds'] == 3.90075
|
||||
assert metadata['mime'] == 'audio/mpeg'
|
||||
assert metadata['track_total'] == u'10' # MP3s can have a track_total
|
||||
|
||||
def test_mp3_dualmono():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-dualmono.mp3', dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 2
|
||||
assert metadata['bit_rate'] == 128000
|
||||
assert metadata['length_seconds'] == 3.90075
|
||||
assert metadata['mime'] == 'audio/mpeg'
|
||||
assert metadata['track_total'] == u'10' # MP3s can have a track_total
|
||||
|
||||
|
||||
def test_ogg_mono():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-mono.ogg', dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 1
|
||||
assert metadata['bit_rate'] == 80000
|
||||
assert metadata['length_seconds'] == 3.8394104308390022
|
||||
assert metadata['mime'] == 'application/ogg'
|
||||
assert metadata['comment'] == u'Test Comment'
|
||||
|
||||
def test_ogg_stereo():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-stereo.ogg', dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 2
|
||||
assert metadata['bit_rate'] == 112000
|
||||
assert metadata['length_seconds'] == 3.8394104308390022
|
||||
assert metadata['mime'] == 'application/ogg'
|
||||
assert metadata['comment'] == u'Test Comment'
|
||||
|
||||
''' faac and avconv can't seem to create a proper mono AAC file... ugh
|
||||
def test_aac_mono():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-mono.m4a')
|
||||
print "Mono AAC metadata:"
|
||||
print metadata
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 1
|
||||
assert metadata['bit_rate'] == 80000
|
||||
assert metadata['length_seconds'] == 3.8394104308390022
|
||||
assert metadata['mime'] == 'video/mp4'
|
||||
assert metadata['comment'] == u'Test Comment'
|
||||
'''
|
||||
|
||||
def test_aac_stereo():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-stereo.m4a', dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 2
|
||||
assert metadata['bit_rate'] == 102619
|
||||
assert metadata['length_seconds'] == 3.8626303854875284
|
||||
assert metadata['mime'] == 'video/mp4'
|
||||
assert metadata['comment'] == u'Test Comment'
|
||||
|
||||
def test_mp3_utf8():
|
||||
metadata = MetadataAnalyzer.analyze(u'tests/test_data/44100Hz-16bit-stereo-utf8.mp3', dict())
|
||||
# Using a bunch of different UTF-8 codepages here. Test data is from:
|
||||
# http://winrus.com/utf8-jap.htm
|
||||
assert metadata['track_title'] == u'アイウエオカキクケコサシスセソタチツテ'
|
||||
assert metadata['artist_name'] == u'てすと'
|
||||
assert metadata['album_title'] == u'Ä ä Ü ü ß'
|
||||
assert metadata['year'] == u'1999'
|
||||
assert metadata['genre'] == u'Я Б Г Д Ж Й'
|
||||
assert metadata['track_number'] == u'1'
|
||||
assert metadata['channels'] == 2
|
||||
assert metadata['bit_rate'] == 128000
|
||||
assert metadata['length_seconds'] == 3.90075
|
||||
assert metadata['mime'] == 'audio/mpeg'
|
||||
assert metadata['track_total'] == u'10' # MP3s can have a track_total
|
||||
|
||||
# Make sure the parameter checking works
|
||||
@raises(TypeError)
|
||||
def test_move_wrong_string_param1():
|
||||
not_unicode = 'asdfasdf'
|
||||
MetadataAnalyzer.analyze(not_unicode, dict())
|
||||
|
||||
@raises(TypeError)
|
||||
def test_move_wrong_metadata_dict():
|
||||
not_a_dict = list()
|
||||
MetadataAnalyzer.analyze(u'asdfasdf', not_a_dict)
|
||||
|
||||
# Test an mp3 file where the number of channels is invalid or missing:
|
||||
def test_mp3_bad_channels():
|
||||
filename = u'tests/test_data/44100Hz-16bit-mono.mp3'
|
||||
'''
|
||||
It'd be a pain in the ass to construct a real MP3 with an invalid number
|
||||
of channels by hand because that value is stored in every MP3 frame in the file
|
||||
'''
|
||||
print "testing bad channels..."
|
||||
audio_file = mutagen.File(filename, easy=True)
|
||||
audio_file.info.mode = 1777
|
||||
with mock.patch('airtime_analyzer.metadata_analyzer.mutagen') as mock_mutagen:
|
||||
mock_mutagen.File.return_value = audio_file
|
||||
#mock_mutagen.side_effect = lambda *args, **kw: audio_file #File(*args, **kw)
|
||||
|
||||
metadata = MetadataAnalyzer.analyze(filename, dict())
|
||||
check_default_metadata(metadata)
|
||||
assert metadata['channels'] == 1
|
||||
assert metadata['bit_rate'] == 64000
|
||||
assert metadata['length_seconds'] == 3.90925
|
||||
assert metadata['mime'] == 'audio/mpeg' # Not unicode because MIMEs aren't.
|
||||
assert metadata['track_total'] == u'10' # MP3s can have a track_total
|
||||
#Mutagen doesn't extract comments from mp3s it seems
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,5 @@
|
|||
#! /bin/bash
|
||||
|
||||
path=$1
|
||||
filename="${path##*/}"
|
||||
curl http://localhost/rest/media -u 3188BDIMPJROQP89Z0OX: -X POST -F "file=@$path" -F "name=$filename"
|
|
@ -0,0 +1,47 @@
|
|||
<?
|
||||
require_once('php-amqplib/amqp.inc');
|
||||
|
||||
//use PhpAmqpLibConnectionAMQPConnection;
|
||||
//use PhpAmqpLibMessageAMQPMessage;
|
||||
|
||||
define('HOST', '127.0.0.1');
|
||||
define('PORT', '5672');
|
||||
define('USER', 'airtime');
|
||||
define('PASS', 'QEFKX5GMKT4YNMOAL9R8');
|
||||
define('VHOST', '/airtime');//'/airtime');
|
||||
|
||||
$exchange = "airtime-uploads";
|
||||
$exchangeType = "topic";
|
||||
$queue = "airtime-uploads";
|
||||
$routingKey = ""; //"airtime.analyzer.tasks";
|
||||
|
||||
if ($argc <= 1)
|
||||
{
|
||||
echo("Usage: " . $argv[0] . " message\n");
|
||||
exit();
|
||||
}
|
||||
|
||||
$message = $argv[1];
|
||||
|
||||
$connection = new AMQPConnection(HOST, PORT, USER, PASS, VHOST);
|
||||
if (!isset($connection))
|
||||
{
|
||||
echo "Failed to connect to the RabbitMQ server.";
|
||||
return;
|
||||
}
|
||||
|
||||
$channel = $connection->channel();
|
||||
|
||||
// declare/create the queue
|
||||
$channel->queue_declare($queue, false, true, false, false);
|
||||
|
||||
// declare/create the exchange as a topic exchange.
|
||||
$channel->exchange_declare($exchange, $exchangeType, false, true, false);
|
||||
|
||||
$msg = new AMQPMessage($message, array("content_type" => "text/plain"));
|
||||
|
||||
$channel->basic_publish($msg, $exchange, $routingKey);
|
||||
print "Sent $message ($routingKey)\n";
|
||||
$channel->close();
|
||||
$connection->close();
|
||||
|
|
@ -0,0 +1 @@
|
|||
../../../airtime_mvc/library/php-amqplib
|
Loading…
Reference in New Issue