install successfully using py3
This commit is contained in:
parent
cf2dda4532
commit
8346e89e99
41 changed files with 259 additions and 287 deletions
|
@ -5,12 +5,12 @@ import logging.handlers
|
|||
import sys
|
||||
import signal
|
||||
import traceback
|
||||
import config_file
|
||||
from . import config_file
|
||||
from functools import partial
|
||||
from metadata_analyzer import MetadataAnalyzer
|
||||
from replaygain_analyzer import ReplayGainAnalyzer
|
||||
from status_reporter import StatusReporter
|
||||
from message_listener import MessageListener
|
||||
from .metadata_analyzer import MetadataAnalyzer
|
||||
from .replaygain_analyzer import ReplayGainAnalyzer
|
||||
from .status_reporter import StatusReporter
|
||||
from .message_listener import MessageListener
|
||||
|
||||
|
||||
class AirtimeAnalyzerServer:
|
||||
|
@ -76,7 +76,7 @@ class AirtimeAnalyzerServer:
|
|||
def dump_stacktrace(stack):
|
||||
''' Dump a stacktrace for all threads '''
|
||||
code = []
|
||||
for threadId, stack in sys._current_frames().items():
|
||||
for threadId, stack in list(sys._current_frames().items()):
|
||||
code.append("\n# ThreadID: %s" % threadId)
|
||||
for filename, lineno, name, line in traceback.extract_stack(stack):
|
||||
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
import logging
|
||||
import threading
|
||||
import multiprocessing
|
||||
import Queue
|
||||
import ConfigParser
|
||||
from metadata_analyzer import MetadataAnalyzer
|
||||
from filemover_analyzer import FileMoverAnalyzer
|
||||
from cuepoint_analyzer import CuePointAnalyzer
|
||||
from replaygain_analyzer import ReplayGainAnalyzer
|
||||
from playability_analyzer import *
|
||||
import queue
|
||||
import configparser
|
||||
from .metadata_analyzer import MetadataAnalyzer
|
||||
from .filemover_analyzer import FileMoverAnalyzer
|
||||
from .cuepoint_analyzer import CuePointAnalyzer
|
||||
from .replaygain_analyzer import ReplayGainAnalyzer
|
||||
from .playability_analyzer import *
|
||||
|
||||
class AnalyzerPipeline:
|
||||
""" Analyzes and imports an audio file into the Airtime library.
|
||||
|
@ -46,15 +46,15 @@ class AnalyzerPipeline:
|
|||
AnalyzerPipeline.python_logger_deadlock_workaround()
|
||||
|
||||
try:
|
||||
if not isinstance(queue, Queue.Queue):
|
||||
if not isinstance(queue, queue.Queue):
|
||||
raise TypeError("queue must be a Queue.Queue()")
|
||||
if not isinstance(audio_file_path, unicode):
|
||||
if not isinstance(audio_file_path, str):
|
||||
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__ + " instead.")
|
||||
if not isinstance(import_directory, unicode):
|
||||
if not isinstance(import_directory, str):
|
||||
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.")
|
||||
if not isinstance(original_filename, unicode):
|
||||
if not isinstance(original_filename, str):
|
||||
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.")
|
||||
if not isinstance(file_prefix, unicode):
|
||||
if not isinstance(file_prefix, str):
|
||||
raise TypeError("file_prefix must be unicode. Was of type " + type(file_prefix).__name__ + " instead.")
|
||||
|
||||
|
||||
|
@ -91,7 +91,7 @@ class AnalyzerPipeline:
|
|||
@staticmethod
|
||||
def python_logger_deadlock_workaround():
|
||||
# Workaround for: http://bugs.python.org/issue6721#msg140215
|
||||
logger_names = logging.Logger.manager.loggerDict.keys()
|
||||
logger_names = list(logging.Logger.manager.loggerDict.keys())
|
||||
logger_names.append(None) # Root logger
|
||||
for name in logger_names:
|
||||
for handler in logging.getLogger(name).handlers:
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from __future__ import print_function
|
||||
import ConfigParser
|
||||
|
||||
import configparser
|
||||
|
||||
def read_config_file(config_path):
|
||||
"""Parse the application's config file located at config_path."""
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
config = configparser.SafeConfigParser()
|
||||
try:
|
||||
config.readfp(open(config_path))
|
||||
except IOError as e:
|
||||
|
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import traceback
|
||||
import json
|
||||
import datetime
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
|
||||
class CuePointAnalyzer(Analyzer):
|
||||
|
|
|
@ -6,7 +6,7 @@ import os, errno
|
|||
import time
|
||||
import uuid
|
||||
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
class FileMoverAnalyzer(Analyzer):
|
||||
"""This analyzer copies a file over from a temporary directory (stor/organize)
|
||||
|
@ -29,11 +29,11 @@ class FileMoverAnalyzer(Analyzer):
|
|||
original_filename: The filename of the file when it was uploaded to Airtime.
|
||||
metadata: A dictionary where the "full_path" of where the file is moved to will be added.
|
||||
"""
|
||||
if not isinstance(audio_file_path, unicode):
|
||||
if not isinstance(audio_file_path, str):
|
||||
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__)
|
||||
if not isinstance(import_directory, unicode):
|
||||
if not isinstance(import_directory, str):
|
||||
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__)
|
||||
if not isinstance(original_filename, unicode):
|
||||
if not isinstance(original_filename, str):
|
||||
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__)
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
|
||||
|
@ -46,9 +46,9 @@ class FileMoverAnalyzer(Analyzer):
|
|||
max_file_len = 48
|
||||
final_file_path = import_directory
|
||||
orig_file_basename, orig_file_extension = os.path.splitext(original_filename)
|
||||
if metadata.has_key("artist_name"):
|
||||
if "artist_name" in metadata:
|
||||
final_file_path += "/" + metadata["artist_name"][0:max_dir_len] # truncating with array slicing
|
||||
if metadata.has_key("album_title"):
|
||||
if "album_title" in metadata:
|
||||
final_file_path += "/" + metadata["album_title"][0:max_dir_len]
|
||||
# Note that orig_file_extension includes the "." already
|
||||
final_file_path += "/" + orig_file_basename[0:max_file_len] + orig_file_extension
|
||||
|
|
|
@ -6,9 +6,9 @@ import select
|
|||
import signal
|
||||
import logging
|
||||
import multiprocessing
|
||||
import Queue
|
||||
from analyzer_pipeline import AnalyzerPipeline
|
||||
from status_reporter import StatusReporter
|
||||
import queue
|
||||
from .analyzer_pipeline import AnalyzerPipeline
|
||||
from .status_reporter import StatusReporter
|
||||
|
||||
EXCHANGE = "airtime-uploads"
|
||||
EXCHANGE_TYPE = "topic"
|
||||
|
@ -198,7 +198,7 @@ class MessageListener:
|
|||
if callback_url: # If we got an invalid message, there might be no callback_url in the JSON
|
||||
# Report this as a failed upload to the File Upload REST API.
|
||||
StatusReporter.report_failure_to_callback_url(callback_url, api_key, import_status=2,
|
||||
reason=u'An error occurred while importing this file')
|
||||
reason='An error occurred while importing this file')
|
||||
|
||||
|
||||
else:
|
||||
|
@ -224,7 +224,7 @@ class MessageListener:
|
|||
'''
|
||||
metadata = {}
|
||||
|
||||
q = Queue.Queue()
|
||||
q = queue.Queue()
|
||||
try:
|
||||
AnalyzerPipeline.run_analysis(q, audio_file_path, import_directory, original_filename, storage_backend, file_prefix)
|
||||
metadata = q.get()
|
||||
|
|
|
@ -6,7 +6,7 @@ import wave
|
|||
import logging
|
||||
import os
|
||||
import hashlib
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
class MetadataAnalyzer(Analyzer):
|
||||
|
||||
|
@ -18,7 +18,7 @@ class MetadataAnalyzer(Analyzer):
|
|||
filename: The path to the audio file to extract metadata from.
|
||||
metadata: A dictionary that the extracted metadata will be added to.
|
||||
'''
|
||||
if not isinstance(filename, unicode):
|
||||
if not isinstance(filename, str):
|
||||
raise TypeError("filename must be unicode. Was of type " + type(filename).__name__)
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
|
||||
|
@ -104,11 +104,11 @@ class MetadataAnalyzer(Analyzer):
|
|||
if isinstance(track_number, list): # Sometimes tracknumber is a list, ugh
|
||||
track_number = track_number[0]
|
||||
track_number_tokens = track_number
|
||||
if u'/' in track_number:
|
||||
track_number_tokens = track_number.split(u'/')
|
||||
if '/' in track_number:
|
||||
track_number_tokens = track_number.split('/')
|
||||
track_number = track_number_tokens[0]
|
||||
elif u'-' in track_number:
|
||||
track_number_tokens = track_number.split(u'-')
|
||||
elif '-' in track_number:
|
||||
track_number_tokens = track_number.split('-')
|
||||
track_number = track_number_tokens[0]
|
||||
metadata["track_number"] = track_number
|
||||
track_total = track_number_tokens[1]
|
||||
|
@ -146,7 +146,7 @@ class MetadataAnalyzer(Analyzer):
|
|||
#'mime_type': 'mime',
|
||||
}
|
||||
|
||||
for mutagen_tag, airtime_tag in mutagen_to_airtime_mapping.iteritems():
|
||||
for mutagen_tag, airtime_tag in mutagen_to_airtime_mapping.items():
|
||||
try:
|
||||
metadata[airtime_tag] = audio_file[mutagen_tag]
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ __author__ = 'asantoni'
|
|||
|
||||
import subprocess
|
||||
import logging
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
class UnplayableFileError(Exception):
|
||||
pass
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import subprocess
|
||||
import logging
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
|
||||
class ReplayGainAnalyzer(Analyzer):
|
||||
|
|
|
@ -2,12 +2,12 @@ import requests
|
|||
import json
|
||||
import logging
|
||||
import collections
|
||||
import Queue
|
||||
import queue
|
||||
import time
|
||||
import traceback
|
||||
import pickle
|
||||
import threading
|
||||
from urlparse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# Disable urllib3 warnings because these can cause a rare deadlock due to Python 2's crappy internal non-reentrant locking
|
||||
# around POSIX stuff. See SAAS-714. The hasattr() is for compatibility with older versions of requests.
|
||||
|
@ -68,7 +68,7 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
|
|||
break
|
||||
if not isinstance(request, PicklableHttpRequest):
|
||||
raise TypeError("request must be a PicklableHttpRequest. Was of type " + type(request).__name__)
|
||||
except Queue.Empty:
|
||||
except queue.Empty:
|
||||
request = None
|
||||
|
||||
# If there's no new HTTP request we need to execute, let's check our "retry
|
||||
|
@ -159,7 +159,7 @@ class StatusReporter():
|
|||
''' We use multiprocessing.Process again here because we need a thread for this stuff
|
||||
anyways, and Python gives us process isolation for free (crash safety).
|
||||
'''
|
||||
_ipc_queue = Queue.Queue()
|
||||
_ipc_queue = queue.Queue()
|
||||
#_http_thread = multiprocessing.Process(target=process_http_requests,
|
||||
# args=(_ipc_queue,))
|
||||
_http_thread = None
|
||||
|
@ -222,7 +222,7 @@ class StatusReporter():
|
|||
|
||||
@classmethod
|
||||
def report_failure_to_callback_url(self, callback_url, api_key, import_status, reason):
|
||||
if not isinstance(import_status, (int, long) ):
|
||||
if not isinstance(import_status, int ):
|
||||
raise TypeError("import_status must be an integer. Was of type " + type(import_status).__name__)
|
||||
|
||||
logging.debug("Reporting import failure to Airtime REST API...")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"""Runs the airtime_analyzer application.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import daemon
|
||||
import argparse
|
||||
import os
|
||||
|
|
|
@ -31,12 +31,11 @@ setup(name='airtime_analyzer',
|
|||
install_requires=[
|
||||
'mutagen>=1.41.1', # got rid of specific version requirement
|
||||
'pika',
|
||||
'daemon',
|
||||
'file-magic',
|
||||
'nose',
|
||||
'coverage',
|
||||
'mock',
|
||||
'python-daemon==1.6',
|
||||
'python-daemon',
|
||||
'requests>=2.7.0',
|
||||
'rgain3',
|
||||
# These next 3 are required for requests to support SSL with SNI. Learned this the hard way...
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue