install successfully using py3
This commit is contained in:
parent
cf2dda4532
commit
8346e89e99
|
@ -3,8 +3,8 @@
|
|||
echo "Updating Apt."
|
||||
apt-get update > /dev/null
|
||||
echo "Ensuring Pip is installed."
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -qq python-pip > /dev/null
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -qq python3-pip > /dev/null
|
||||
echo "Updating Pip."
|
||||
pip install pip -q -q --upgrade > /dev/null
|
||||
pip3 install pip -q -q --upgrade > /dev/null
|
||||
echo "Ensuring Mkdocs is installed."
|
||||
pip install -q mkdocs > /dev/null
|
||||
pip3 install mkdocs
|
||||
|
|
26
install
26
install
|
@ -923,22 +923,12 @@ loud "\n-----------------------------------------------------"
|
|||
loud " * Installing Airtime Services * "
|
||||
loud "-----------------------------------------------------"
|
||||
|
||||
verbose "\n * Installing necessary python services..."
|
||||
loudCmd "pip install setuptools --upgrade"
|
||||
loudCmd "pip install zipp==1.0.0"
|
||||
verbose "...Done"
|
||||
|
||||
# Ubuntu Trusty and Debian Wheezy needs a workaround for python version SSL downloads
|
||||
# This affects all python installs where python < 2.7.9
|
||||
python_version=$(python --version 2>&1 | awk '{ print $2 }')
|
||||
python_version=$(python3 --version 2>&1 | awk '{ print $2 }')
|
||||
verbose "Detected Python version: $python_version"
|
||||
# Convert version so each segment is zero padded for easy comparison
|
||||
python_version_formatted=$(awk 'BEGIN {FS = "."} {printf "%03d.%03d.%03d\n", $1,$2,$3}' <<< $python_version)
|
||||
if [[ "$python_version_formatted" < "002.007.009" ]]; then
|
||||
verbose "\n * Installing pyOpenSSL and ca db for SNI support..."
|
||||
loudCmd "pip install pyOpenSSL cryptography idna certifi --upgrade"
|
||||
|
||||
verbose "\n * Installing necessary python services..."
|
||||
loudCmd "pip3 install setuptools --upgrade"
|
||||
verbose "...Done"
|
||||
fi
|
||||
|
||||
verbose "\n * Creating /run/airtime..."
|
||||
mkdir -p /run/airtime
|
||||
|
@ -960,11 +950,11 @@ if [ ! -d /var/log/airtime ]; then
|
|||
fi
|
||||
|
||||
verbose "\n * Installing API client..."
|
||||
loudCmd "python ${AIRTIMEROOT}/python_apps/api_clients/setup.py install --install-scripts=/usr/bin"
|
||||
loudCmd "python3 ${AIRTIMEROOT}/python_apps/api_clients/setup.py install --install-scripts=/usr/bin"
|
||||
verbose "...Done"
|
||||
|
||||
verbose "\n * Installing pypo and liquidsoap..."
|
||||
loudCmd "python ${AIRTIMEROOT}/python_apps/pypo/setup.py install --install-scripts=/usr/bin --no-init-script"
|
||||
loudCmd "python3 ${AIRTIMEROOT}/python_apps/pypo/setup.py install --install-scripts=/usr/bin --no-init-script"
|
||||
loudCmd "mkdir -p /var/log/airtime/{pypo,pypo-liquidsoap} /var/tmp/airtime/pypo/{cache,files,tmp} /var/tmp/airtime/show-recorder/"
|
||||
loudCmd "chown -R ${web_user}:${web_user} /var/log/airtime/{pypo,pypo-liquidsoap} /var/tmp/airtime/pypo/{cache,files,tmp} /var/tmp/airtime/show-recorder/"
|
||||
systemInitInstall airtime-liquidsoap $web_user
|
||||
|
@ -972,7 +962,7 @@ systemInitInstall airtime-playout $web_user
|
|||
verbose "...Done"
|
||||
|
||||
verbose "\n * Installing airtime-celery..."
|
||||
loudCmd "python ${AIRTIMEROOT}/python_apps/airtime-celery/setup.py install --no-init-script"
|
||||
loudCmd "python3 ${AIRTIMEROOT}/python_apps/airtime-celery/setup.py install --no-init-script"
|
||||
# Create the Celery user
|
||||
if $is_centos_dist; then
|
||||
loudCmd "id celery 2>/dev/null || adduser --no-create-home -c 'LibreTime Celery' -r celery || true"
|
||||
|
@ -988,7 +978,7 @@ systemInitInstall airtime-celery
|
|||
verbose "...Done"
|
||||
|
||||
verbose "\n * Installing airtime_analyzer..."
|
||||
loudCmd "python ${AIRTIMEROOT}/python_apps/airtime_analyzer/setup.py install --install-scripts=/usr/bin --no-init-script"
|
||||
loudCmd "python3 ${AIRTIMEROOT}/python_apps/airtime_analyzer/setup.py install --install-scripts=/usr/bin --no-init-script"
|
||||
systemInitInstall airtime_analyzer $web_user
|
||||
verbose "...Done"
|
||||
|
||||
|
|
|
@ -1,71 +1,56 @@
|
|||
apache2
|
||||
coreutils
|
||||
curl
|
||||
ecasound
|
||||
flac
|
||||
git
|
||||
gstreamer1.0-plugins-ugly
|
||||
icecast2
|
||||
lame
|
||||
libao-ocaml
|
||||
libapache2-mod-php7.3
|
||||
php7.3
|
||||
php7.3-dev
|
||||
php7.3-bcmath
|
||||
php7.3-mbstring
|
||||
php-pear
|
||||
php7.3-gd
|
||||
php-amqplib
|
||||
|
||||
lsb-release
|
||||
|
||||
zip
|
||||
unzip
|
||||
|
||||
rabbitmq-server
|
||||
|
||||
postgresql
|
||||
postgresql-client
|
||||
php7.3-pgsql
|
||||
|
||||
python
|
||||
python-virtualenv
|
||||
python-pip
|
||||
|
||||
libcairo2-dev
|
||||
libcamomile-ocaml-data
|
||||
libfaad2
|
||||
libmad-ocaml
|
||||
libopus0
|
||||
libportaudio2
|
||||
libpulse0
|
||||
libsamplerate0
|
||||
libsoundtouch-ocaml
|
||||
libtaglib-ocaml
|
||||
libao-ocaml
|
||||
libmad-ocaml
|
||||
ecasound
|
||||
libportaudio2
|
||||
libsamplerate0
|
||||
libvo-aacenc0
|
||||
|
||||
python-rgain
|
||||
python-gst-1.0
|
||||
gstreamer1.0-plugins-ugly
|
||||
python-pika
|
||||
|
||||
patch
|
||||
|
||||
icecast2
|
||||
|
||||
curl
|
||||
php7.3-curl
|
||||
mpg123
|
||||
|
||||
libcamomile-ocaml-data
|
||||
libpulse0
|
||||
vorbis-tools
|
||||
liquidsoap
|
||||
lsb-release
|
||||
lsb-release
|
||||
lsof
|
||||
vorbisgain
|
||||
flac
|
||||
vorbis-tools
|
||||
pwgen
|
||||
libfaad2
|
||||
mpg123
|
||||
patch
|
||||
php7.3
|
||||
php7.3-bcmath
|
||||
php7.3-curl
|
||||
php7.3-dev
|
||||
php7.3-gd
|
||||
php7.3-mbstring
|
||||
php7.3-pgsql
|
||||
php-amqplib
|
||||
php-apcu
|
||||
|
||||
lame
|
||||
php-pear
|
||||
pkg-config
|
||||
postgresql
|
||||
postgresql-client
|
||||
pwgen
|
||||
python3
|
||||
python3-gst-1.0
|
||||
python3-pika
|
||||
python3-pip
|
||||
python3-virtualenv
|
||||
rabbitmq-server
|
||||
silan
|
||||
coreutils
|
||||
|
||||
liquidsoap
|
||||
|
||||
libopus0
|
||||
|
||||
systemd-sysv
|
||||
|
||||
unzip
|
||||
vorbisgain
|
||||
vorbis-tools
|
||||
vorbis-tools
|
||||
xmlstarlet
|
||||
zip
|
||||
|
|
|
@ -5,12 +5,12 @@ import logging.handlers
|
|||
import sys
|
||||
import signal
|
||||
import traceback
|
||||
import config_file
|
||||
from . import config_file
|
||||
from functools import partial
|
||||
from metadata_analyzer import MetadataAnalyzer
|
||||
from replaygain_analyzer import ReplayGainAnalyzer
|
||||
from status_reporter import StatusReporter
|
||||
from message_listener import MessageListener
|
||||
from .metadata_analyzer import MetadataAnalyzer
|
||||
from .replaygain_analyzer import ReplayGainAnalyzer
|
||||
from .status_reporter import StatusReporter
|
||||
from .message_listener import MessageListener
|
||||
|
||||
|
||||
class AirtimeAnalyzerServer:
|
||||
|
@ -76,7 +76,7 @@ class AirtimeAnalyzerServer:
|
|||
def dump_stacktrace(stack):
|
||||
''' Dump a stacktrace for all threads '''
|
||||
code = []
|
||||
for threadId, stack in sys._current_frames().items():
|
||||
for threadId, stack in list(sys._current_frames().items()):
|
||||
code.append("\n# ThreadID: %s" % threadId)
|
||||
for filename, lineno, name, line in traceback.extract_stack(stack):
|
||||
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
import logging
|
||||
import threading
|
||||
import multiprocessing
|
||||
import Queue
|
||||
import ConfigParser
|
||||
from metadata_analyzer import MetadataAnalyzer
|
||||
from filemover_analyzer import FileMoverAnalyzer
|
||||
from cuepoint_analyzer import CuePointAnalyzer
|
||||
from replaygain_analyzer import ReplayGainAnalyzer
|
||||
from playability_analyzer import *
|
||||
import queue
|
||||
import configparser
|
||||
from .metadata_analyzer import MetadataAnalyzer
|
||||
from .filemover_analyzer import FileMoverAnalyzer
|
||||
from .cuepoint_analyzer import CuePointAnalyzer
|
||||
from .replaygain_analyzer import ReplayGainAnalyzer
|
||||
from .playability_analyzer import *
|
||||
|
||||
class AnalyzerPipeline:
|
||||
""" Analyzes and imports an audio file into the Airtime library.
|
||||
|
@ -46,15 +46,15 @@ class AnalyzerPipeline:
|
|||
AnalyzerPipeline.python_logger_deadlock_workaround()
|
||||
|
||||
try:
|
||||
if not isinstance(queue, Queue.Queue):
|
||||
if not isinstance(queue, queue.Queue):
|
||||
raise TypeError("queue must be a Queue.Queue()")
|
||||
if not isinstance(audio_file_path, unicode):
|
||||
if not isinstance(audio_file_path, str):
|
||||
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__ + " instead.")
|
||||
if not isinstance(import_directory, unicode):
|
||||
if not isinstance(import_directory, str):
|
||||
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.")
|
||||
if not isinstance(original_filename, unicode):
|
||||
if not isinstance(original_filename, str):
|
||||
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.")
|
||||
if not isinstance(file_prefix, unicode):
|
||||
if not isinstance(file_prefix, str):
|
||||
raise TypeError("file_prefix must be unicode. Was of type " + type(file_prefix).__name__ + " instead.")
|
||||
|
||||
|
||||
|
@ -91,7 +91,7 @@ class AnalyzerPipeline:
|
|||
@staticmethod
|
||||
def python_logger_deadlock_workaround():
|
||||
# Workaround for: http://bugs.python.org/issue6721#msg140215
|
||||
logger_names = logging.Logger.manager.loggerDict.keys()
|
||||
logger_names = list(logging.Logger.manager.loggerDict.keys())
|
||||
logger_names.append(None) # Root logger
|
||||
for name in logger_names:
|
||||
for handler in logging.getLogger(name).handlers:
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from __future__ import print_function
|
||||
import ConfigParser
|
||||
|
||||
import configparser
|
||||
|
||||
def read_config_file(config_path):
|
||||
"""Parse the application's config file located at config_path."""
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
config = configparser.SafeConfigParser()
|
||||
try:
|
||||
config.readfp(open(config_path))
|
||||
except IOError as e:
|
||||
|
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import traceback
|
||||
import json
|
||||
import datetime
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
|
||||
class CuePointAnalyzer(Analyzer):
|
||||
|
|
|
@ -6,7 +6,7 @@ import os, errno
|
|||
import time
|
||||
import uuid
|
||||
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
class FileMoverAnalyzer(Analyzer):
|
||||
"""This analyzer copies a file over from a temporary directory (stor/organize)
|
||||
|
@ -29,11 +29,11 @@ class FileMoverAnalyzer(Analyzer):
|
|||
original_filename: The filename of the file when it was uploaded to Airtime.
|
||||
metadata: A dictionary where the "full_path" of where the file is moved to will be added.
|
||||
"""
|
||||
if not isinstance(audio_file_path, unicode):
|
||||
if not isinstance(audio_file_path, str):
|
||||
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__)
|
||||
if not isinstance(import_directory, unicode):
|
||||
if not isinstance(import_directory, str):
|
||||
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__)
|
||||
if not isinstance(original_filename, unicode):
|
||||
if not isinstance(original_filename, str):
|
||||
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__)
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
|
||||
|
@ -46,9 +46,9 @@ class FileMoverAnalyzer(Analyzer):
|
|||
max_file_len = 48
|
||||
final_file_path = import_directory
|
||||
orig_file_basename, orig_file_extension = os.path.splitext(original_filename)
|
||||
if metadata.has_key("artist_name"):
|
||||
if "artist_name" in metadata:
|
||||
final_file_path += "/" + metadata["artist_name"][0:max_dir_len] # truncating with array slicing
|
||||
if metadata.has_key("album_title"):
|
||||
if "album_title" in metadata:
|
||||
final_file_path += "/" + metadata["album_title"][0:max_dir_len]
|
||||
# Note that orig_file_extension includes the "." already
|
||||
final_file_path += "/" + orig_file_basename[0:max_file_len] + orig_file_extension
|
||||
|
|
|
@ -6,9 +6,9 @@ import select
|
|||
import signal
|
||||
import logging
|
||||
import multiprocessing
|
||||
import Queue
|
||||
from analyzer_pipeline import AnalyzerPipeline
|
||||
from status_reporter import StatusReporter
|
||||
import queue
|
||||
from .analyzer_pipeline import AnalyzerPipeline
|
||||
from .status_reporter import StatusReporter
|
||||
|
||||
EXCHANGE = "airtime-uploads"
|
||||
EXCHANGE_TYPE = "topic"
|
||||
|
@ -198,7 +198,7 @@ class MessageListener:
|
|||
if callback_url: # If we got an invalid message, there might be no callback_url in the JSON
|
||||
# Report this as a failed upload to the File Upload REST API.
|
||||
StatusReporter.report_failure_to_callback_url(callback_url, api_key, import_status=2,
|
||||
reason=u'An error occurred while importing this file')
|
||||
reason='An error occurred while importing this file')
|
||||
|
||||
|
||||
else:
|
||||
|
@ -224,7 +224,7 @@ class MessageListener:
|
|||
'''
|
||||
metadata = {}
|
||||
|
||||
q = Queue.Queue()
|
||||
q = queue.Queue()
|
||||
try:
|
||||
AnalyzerPipeline.run_analysis(q, audio_file_path, import_directory, original_filename, storage_backend, file_prefix)
|
||||
metadata = q.get()
|
||||
|
|
|
@ -6,7 +6,7 @@ import wave
|
|||
import logging
|
||||
import os
|
||||
import hashlib
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
class MetadataAnalyzer(Analyzer):
|
||||
|
||||
|
@ -18,7 +18,7 @@ class MetadataAnalyzer(Analyzer):
|
|||
filename: The path to the audio file to extract metadata from.
|
||||
metadata: A dictionary that the extracted metadata will be added to.
|
||||
'''
|
||||
if not isinstance(filename, unicode):
|
||||
if not isinstance(filename, str):
|
||||
raise TypeError("filename must be unicode. Was of type " + type(filename).__name__)
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
|
||||
|
@ -104,11 +104,11 @@ class MetadataAnalyzer(Analyzer):
|
|||
if isinstance(track_number, list): # Sometimes tracknumber is a list, ugh
|
||||
track_number = track_number[0]
|
||||
track_number_tokens = track_number
|
||||
if u'/' in track_number:
|
||||
track_number_tokens = track_number.split(u'/')
|
||||
if '/' in track_number:
|
||||
track_number_tokens = track_number.split('/')
|
||||
track_number = track_number_tokens[0]
|
||||
elif u'-' in track_number:
|
||||
track_number_tokens = track_number.split(u'-')
|
||||
elif '-' in track_number:
|
||||
track_number_tokens = track_number.split('-')
|
||||
track_number = track_number_tokens[0]
|
||||
metadata["track_number"] = track_number
|
||||
track_total = track_number_tokens[1]
|
||||
|
@ -146,7 +146,7 @@ class MetadataAnalyzer(Analyzer):
|
|||
#'mime_type': 'mime',
|
||||
}
|
||||
|
||||
for mutagen_tag, airtime_tag in mutagen_to_airtime_mapping.iteritems():
|
||||
for mutagen_tag, airtime_tag in mutagen_to_airtime_mapping.items():
|
||||
try:
|
||||
metadata[airtime_tag] = audio_file[mutagen_tag]
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ __author__ = 'asantoni'
|
|||
|
||||
import subprocess
|
||||
import logging
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
class UnplayableFileError(Exception):
|
||||
pass
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import subprocess
|
||||
import logging
|
||||
from analyzer import Analyzer
|
||||
from .analyzer import Analyzer
|
||||
|
||||
|
||||
class ReplayGainAnalyzer(Analyzer):
|
||||
|
|
|
@ -2,12 +2,12 @@ import requests
|
|||
import json
|
||||
import logging
|
||||
import collections
|
||||
import Queue
|
||||
import queue
|
||||
import time
|
||||
import traceback
|
||||
import pickle
|
||||
import threading
|
||||
from urlparse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# Disable urllib3 warnings because these can cause a rare deadlock due to Python 2's crappy internal non-reentrant locking
|
||||
# around POSIX stuff. See SAAS-714. The hasattr() is for compatibility with older versions of requests.
|
||||
|
@ -68,7 +68,7 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
|
|||
break
|
||||
if not isinstance(request, PicklableHttpRequest):
|
||||
raise TypeError("request must be a PicklableHttpRequest. Was of type " + type(request).__name__)
|
||||
except Queue.Empty:
|
||||
except queue.Empty:
|
||||
request = None
|
||||
|
||||
# If there's no new HTTP request we need to execute, let's check our "retry
|
||||
|
@ -159,7 +159,7 @@ class StatusReporter():
|
|||
''' We use multiprocessing.Process again here because we need a thread for this stuff
|
||||
anyways, and Python gives us process isolation for free (crash safety).
|
||||
'''
|
||||
_ipc_queue = Queue.Queue()
|
||||
_ipc_queue = queue.Queue()
|
||||
#_http_thread = multiprocessing.Process(target=process_http_requests,
|
||||
# args=(_ipc_queue,))
|
||||
_http_thread = None
|
||||
|
@ -222,7 +222,7 @@ class StatusReporter():
|
|||
|
||||
@classmethod
|
||||
def report_failure_to_callback_url(self, callback_url, api_key, import_status, reason):
|
||||
if not isinstance(import_status, (int, long) ):
|
||||
if not isinstance(import_status, int ):
|
||||
raise TypeError("import_status must be an integer. Was of type " + type(import_status).__name__)
|
||||
|
||||
logging.debug("Reporting import failure to Airtime REST API...")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"""Runs the airtime_analyzer application.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import daemon
|
||||
import argparse
|
||||
import os
|
||||
|
|
|
@ -31,12 +31,11 @@ setup(name='airtime_analyzer',
|
|||
install_requires=[
|
||||
'mutagen>=1.41.1', # got rid of specific version requirement
|
||||
'pika',
|
||||
'daemon',
|
||||
'file-magic',
|
||||
'nose',
|
||||
'coverage',
|
||||
'mock',
|
||||
'python-daemon==1.6',
|
||||
'python-daemon',
|
||||
'requests>=2.7.0',
|
||||
'rgain3',
|
||||
# These next 3 are required for requests to support SSL with SNI. Learned this the hard way...
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
###############################################################################
|
||||
import sys
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
import requests
|
||||
import socket
|
||||
import logging
|
||||
|
@ -26,19 +26,19 @@ AIRTIME_API_VERSION = "1.1"
|
|||
# instead of copy pasting them around
|
||||
|
||||
def to_unicode(obj, encoding='utf-8'):
|
||||
if isinstance(obj, basestring):
|
||||
if not isinstance(obj, unicode):
|
||||
obj = unicode(obj, encoding)
|
||||
if isinstance(obj, str):
|
||||
if not isinstance(obj, str):
|
||||
obj = str(obj, encoding)
|
||||
return obj
|
||||
|
||||
def encode_to(obj, encoding='utf-8'):
|
||||
if isinstance(obj, unicode):
|
||||
if isinstance(obj, str):
|
||||
obj = obj.encode(encoding)
|
||||
return obj
|
||||
|
||||
def convert_dict_value_to_utf8(md):
|
||||
#list comprehension to convert all values of md to utf-8
|
||||
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()])
|
||||
return dict([(item[0], encode_to(item[1], "utf-8")) for item in list(md.items())])
|
||||
|
||||
|
||||
api_config = {}
|
||||
|
@ -114,7 +114,7 @@ class ApcUrl(object):
|
|||
|
||||
def params(self, **params):
|
||||
temp_url = self.base_url
|
||||
for k, v in params.iteritems():
|
||||
for k, v in params.items():
|
||||
wrapped_param = "%%" + k + "%%"
|
||||
if wrapped_param in temp_url:
|
||||
temp_url = temp_url.replace(wrapped_param, str(v))
|
||||
|
@ -138,11 +138,11 @@ class ApiRequest(object):
|
|||
|
||||
def __call__(self,_post_data=None, **kwargs):
|
||||
final_url = self.url.params(**kwargs).url()
|
||||
if _post_data is not None: _post_data = urllib.urlencode(_post_data)
|
||||
if _post_data is not None: _post_data = urllib.parse.urlencode(_post_data)
|
||||
self.logger.debug(final_url)
|
||||
try:
|
||||
req = urllib2.Request(final_url, _post_data)
|
||||
f = urllib2.urlopen(req, timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
||||
req = urllib.request.Request(final_url, _post_data)
|
||||
f = urllib.request.urlopen(req, timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT)
|
||||
content_type = f.info().getheader('Content-Type')
|
||||
response = f.read()
|
||||
#Everything that calls an ApiRequest should be catching URLError explicitly
|
||||
|
@ -151,7 +151,7 @@ class ApiRequest(object):
|
|||
except socket.timeout:
|
||||
self.logger.error('HTTP request to %s timed out', final_url)
|
||||
raise
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
#self.logger.error('Exception: %s', e)
|
||||
#self.logger.error("traceback: %s", traceback.format_exc())
|
||||
raise
|
||||
|
@ -193,13 +193,13 @@ class RequestProvider(object):
|
|||
self.config["general"]["base_dir"], self.config["api_base"],
|
||||
'%%action%%'))
|
||||
# Now we must discover the possible actions
|
||||
actions = dict( (k,v) for k,v in cfg.iteritems() if '%%api_key%%' in v)
|
||||
for action_name, action_value in actions.iteritems():
|
||||
actions = dict( (k,v) for k,v in cfg.items() if '%%api_key%%' in v)
|
||||
for action_name, action_value in actions.items():
|
||||
new_url = self.url.params(action=action_value).params(
|
||||
api_key=self.config["general"]['api_key'])
|
||||
self.requests[action_name] = ApiRequest(action_name, new_url)
|
||||
|
||||
def available_requests(self) : return self.requests.keys()
|
||||
def available_requests(self) : return list(self.requests.keys())
|
||||
def __contains__(self, request) : return request in self.requests
|
||||
|
||||
def __getattr__(self, attr):
|
||||
|
@ -217,17 +217,17 @@ class AirtimeApiClient(object):
|
|||
self.config = ConfigObj(config_path)
|
||||
self.config.update(api_config)
|
||||
self.services = RequestProvider(self.config)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error('Error loading config file: %s', config_path)
|
||||
self.logger.error("traceback: %s", traceback.format_exc())
|
||||
sys.exit(1)
|
||||
|
||||
def __get_airtime_version(self):
|
||||
try: return self.services.version_url()[u'airtime_version']
|
||||
try: return self.services.version_url()['airtime_version']
|
||||
except Exception: return -1
|
||||
|
||||
def __get_api_version(self):
|
||||
try: return self.services.version_url()[u'api_version']
|
||||
try: return self.services.version_url()['api_version']
|
||||
except Exception: return -1
|
||||
|
||||
def is_server_compatible(self, verbose=True):
|
||||
|
@ -259,7 +259,7 @@ class AirtimeApiClient(object):
|
|||
def notify_liquidsoap_started(self):
|
||||
try:
|
||||
self.services.notify_liquidsoap_started()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
|
||||
def notify_media_item_start_playing(self, media_id):
|
||||
|
@ -268,14 +268,14 @@ class AirtimeApiClient(object):
|
|||
which we handed to liquidsoap in get_liquidsoap_data(). """
|
||||
try:
|
||||
return self.services.update_start_playing_url(media_id=media_id)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
return None
|
||||
|
||||
def get_shows_to_record(self):
|
||||
try:
|
||||
return self.services.show_schedule_url()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
return None
|
||||
|
||||
|
@ -321,13 +321,13 @@ class AirtimeApiClient(object):
|
|||
"""
|
||||
break
|
||||
|
||||
except requests.exceptions.HTTPError, e:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logger.error("Http error code: %s", e.code)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.error("Server is down: %s", e.args)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error("Exception: %s", e)
|
||||
logger.error("traceback: %s", traceback.format_exc())
|
||||
|
||||
|
@ -340,7 +340,7 @@ class AirtimeApiClient(object):
|
|||
try:
|
||||
return self.services.check_live_stream_auth(
|
||||
username=username, password=password, djtype=dj_type)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
return {}
|
||||
|
||||
|
@ -422,10 +422,10 @@ class AirtimeApiClient(object):
|
|||
def list_all_db_files(self, dir_id, all_files=True):
|
||||
logger = self.logger
|
||||
try:
|
||||
all_files = u"1" if all_files else u"0"
|
||||
all_files = "1" if all_files else "0"
|
||||
response = self.services.list_all_db_files(dir_id=dir_id,
|
||||
all=all_files)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
response = {}
|
||||
logger.error("Exception: %s", e)
|
||||
try:
|
||||
|
@ -483,12 +483,12 @@ class AirtimeApiClient(object):
|
|||
post_data = {"msg_post": msg}
|
||||
|
||||
#encoded_msg is no longer used server_side!!
|
||||
encoded_msg = urllib.quote('dummy')
|
||||
encoded_msg = urllib.parse.quote('dummy')
|
||||
self.services.update_liquidsoap_status.req(post_data,
|
||||
msg=encoded_msg,
|
||||
stream_id=stream_id,
|
||||
boot_time=time).retry(5)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
#TODO
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
|
@ -497,7 +497,7 @@ class AirtimeApiClient(object):
|
|||
logger = self.logger
|
||||
return self.services.update_source_status.req(sourcename=sourcename,
|
||||
status=status).retry(5)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
#TODO
|
||||
logger.error("Exception: %s", e)
|
||||
|
||||
|
@ -514,7 +514,7 @@ class AirtimeApiClient(object):
|
|||
#http://localhost/api/get-files-without-replay-gain/dir_id/1
|
||||
try:
|
||||
return self.services.get_files_without_replay_gain(dir_id=dir_id)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
return []
|
||||
|
||||
|
@ -526,7 +526,7 @@ class AirtimeApiClient(object):
|
|||
"""
|
||||
try:
|
||||
return self.services.get_files_without_silan_value()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
return []
|
||||
|
||||
|
@ -569,7 +569,7 @@ class AirtimeApiClient(object):
|
|||
try:
|
||||
response = self.services.update_stream_setting_table(_post_data={'data': json.dumps(data)})
|
||||
return response
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
#TODO
|
||||
self.logger.error(str(e))
|
||||
|
||||
|
|
|
@ -24,11 +24,10 @@ setup(name='api_clients',
|
|||
# 'docopt',
|
||||
# 'kombu',
|
||||
# 'mutagen',
|
||||
# 'poster',
|
||||
# 'poster3',
|
||||
# 'PyDispatcher',
|
||||
# 'pyinotify',
|
||||
# 'pytz',
|
||||
# 'wsgiref'
|
||||
],
|
||||
zip_safe=False,
|
||||
data_files=[])
|
||||
|
|
|
@ -5,16 +5,16 @@ class TestApcUrl(unittest.TestCase):
|
|||
def test_init(self):
|
||||
url = "/testing"
|
||||
u = ApcUrl(url)
|
||||
self.assertEquals( u.base_url, url)
|
||||
self.assertEqual( u.base_url, url)
|
||||
|
||||
def test_params_1(self):
|
||||
u = ApcUrl("/testing/%%key%%")
|
||||
self.assertEquals(u.params(key='val').url(), '/testing/val')
|
||||
self.assertEqual(u.params(key='val').url(), '/testing/val')
|
||||
|
||||
def test_params_2(self):
|
||||
u = ApcUrl('/testing/%%key%%/%%api%%/more_testing')
|
||||
full_url = u.params(key="AAA",api="BBB").url()
|
||||
self.assertEquals(full_url, '/testing/AAA/BBB/more_testing')
|
||||
self.assertEqual(full_url, '/testing/AAA/BBB/more_testing')
|
||||
|
||||
def test_params_ex(self):
|
||||
u = ApcUrl("/testing/%%key%%")
|
||||
|
@ -23,7 +23,7 @@ class TestApcUrl(unittest.TestCase):
|
|||
|
||||
def test_url(self):
|
||||
u = "one/two/three"
|
||||
self.assertEquals( ApcUrl(u).url(), u )
|
||||
self.assertEqual( ApcUrl(u).url(), u )
|
||||
|
||||
def test_url_ex(self):
|
||||
u = ApcUrl('/%%one%%/%%two%%/three').params(two='testing')
|
||||
|
|
|
@ -6,16 +6,16 @@ from .. api_client import ApcUrl, ApiRequest
|
|||
class TestApiRequest(unittest.TestCase):
|
||||
def test_init(self):
|
||||
u = ApiRequest('request_name', ApcUrl('/test/ing'))
|
||||
self.assertEquals(u.name, "request_name")
|
||||
self.assertEqual(u.name, "request_name")
|
||||
|
||||
def test_call(self):
|
||||
ret = json.dumps( {u'ok':u'ok'} )
|
||||
ret = json.dumps( {'ok':'ok'} )
|
||||
read = MagicMock()
|
||||
read.read = MagicMock(return_value=ret)
|
||||
u = '/testing'
|
||||
with patch('urllib2.urlopen') as mock_method:
|
||||
mock_method.return_value = read
|
||||
request = ApiRequest('mm', ApcUrl(u))()
|
||||
self.assertEquals(request, json.loads(ret))
|
||||
self.assertEqual(request, json.loads(ret))
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
||||
|
|
|
@ -19,7 +19,7 @@ class TestRequestProvider(unittest.TestCase):
|
|||
self.assertTrue( meth in rp )
|
||||
|
||||
def test_notify_webstream_data(self):
|
||||
ret = json.dumps( {u'testing' : u'123' } )
|
||||
ret = json.dumps( {'testing' : '123' } )
|
||||
rp = RequestProvider(self.cfg)
|
||||
read = MagicMock()
|
||||
read.read = MagicMock(return_value=ret)
|
||||
|
@ -27,6 +27,6 @@ class TestRequestProvider(unittest.TestCase):
|
|||
mock_method.return_value = read
|
||||
response = rp.notify_webstream_data(media_id=123)
|
||||
mock_method.called_once_with(media_id=123)
|
||||
self.assertEquals(json.loads(ret), response)
|
||||
self.assertEqual(json.loads(ret), response)
|
||||
|
||||
if __name__ == '__main__': unittest.main()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import shutil
|
||||
import os
|
||||
import sys
|
||||
|
@ -18,6 +18,6 @@ try:
|
|||
current_script_dir = get_current_script_dir()
|
||||
shutil.copy(current_script_dir+"/../airtime-icecast-status.xsl", "/usr/share/icecast2/web")
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
print("exception: {}".format(e))
|
||||
sys.exit(1)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import print_function
|
||||
|
||||
import traceback
|
||||
|
||||
"""
|
||||
|
@ -76,7 +76,7 @@ logger = rootLogger
|
|||
try:
|
||||
config = ConfigObj('/etc/airtime/airtime.conf')
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error('Error loading config file: %s', e)
|
||||
sys.exit()
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
""" Runs Airtime liquidsoap
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import generate_liquidsoap_cfg
|
||||
from . import generate_liquidsoap_cfg
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
@ -14,7 +14,7 @@ def generate_liquidsoap_config(ss):
|
|||
fh.write("################################################\n")
|
||||
fh.write("# The ignore() lines are to squash unused variable warnings\n")
|
||||
|
||||
for key, value in data.iteritems():
|
||||
for key, value in data.items():
|
||||
try:
|
||||
if not "port" in key and not "bitrate" in key: # Stupid hack
|
||||
raise ValueError()
|
||||
|
@ -49,7 +49,7 @@ def run():
|
|||
ss = ac.get_stream_setting()
|
||||
generate_liquidsoap_config(ss)
|
||||
successful = True
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
print("Unable to connect to the Airtime server.")
|
||||
logging.error(str(e))
|
||||
logging.error("traceback: %s", traceback.format_exc())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from __future__ import print_function
|
||||
|
||||
from api_clients import *
|
||||
import sys
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ try:
|
|||
tn.write('exit\n')
|
||||
tn.read_all()
|
||||
|
||||
except Exception, e:
|
||||
print('Error loading config file: %s', e)
|
||||
except Exception as e:
|
||||
print(('Error loading config file: %s', e))
|
||||
sys.exit()
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Python part of radio playout (pypo)
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
import locale
|
||||
import logging
|
||||
|
@ -16,10 +16,11 @@ from api_clients import api_client
|
|||
from configobj import ConfigObj
|
||||
from datetime import datetime
|
||||
from optparse import OptionParser
|
||||
import importlib
|
||||
try:
|
||||
from queue import Queue
|
||||
except ImportError: # Python 2.7.5 (CentOS 7)
|
||||
from Queue import Queue
|
||||
from queue import Queue
|
||||
from threading import Lock
|
||||
|
||||
from .listenerstat import ListenerStat
|
||||
|
@ -119,7 +120,7 @@ try:
|
|||
consoleHandler.setFormatter(logFormatter)
|
||||
rootLogger.addHandler(consoleHandler)
|
||||
except Exception as e:
|
||||
print("Couldn't configure logging", e)
|
||||
print(("Couldn't configure logging", e))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -160,7 +161,7 @@ def configure_locale():
|
|||
"New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale)
|
||||
)
|
||||
|
||||
reload(sys)
|
||||
importlib.reload(sys)
|
||||
sys.setdefaultencoding("UTF-8")
|
||||
current_locale_encoding = locale.getlocale()[1].lower()
|
||||
logger.debug("sys default encoding %s", sys.getdefaultencoding())
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from threading import Thread
|
||||
import urllib2
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
import defusedxml.minidom
|
||||
import base64
|
||||
from datetime import datetime
|
||||
|
@ -44,13 +44,13 @@ class ListenerStat(Thread):
|
|||
user_agent = "Mozilla/5.0 (Linux; rv:22.0) Gecko/20130405 Firefox/22.0"
|
||||
header["User-Agent"] = user_agent
|
||||
|
||||
req = urllib2.Request(
|
||||
req = urllib.request.Request(
|
||||
#assuming that the icecast stats path is /admin/stats.xml
|
||||
#need to fix this
|
||||
url=url,
|
||||
headers=header)
|
||||
|
||||
f = urllib2.urlopen(req, timeout=ListenerStat.HTTP_REQUEST_TIMEOUT)
|
||||
f = urllib.request.urlopen(req, timeout=ListenerStat.HTTP_REQUEST_TIMEOUT)
|
||||
document = f.read()
|
||||
|
||||
return document
|
||||
|
@ -109,7 +109,7 @@ class ListenerStat(Thread):
|
|||
#Note that there can be optimizations done, since if all three
|
||||
#streams are the same server, we will still initiate 3 separate
|
||||
#connections
|
||||
for k, v in stream_parameters.items():
|
||||
for k, v in list(stream_parameters.items()):
|
||||
if v["enable"] == 'true':
|
||||
try:
|
||||
if v["output"] == "icecast":
|
||||
|
@ -146,7 +146,7 @@ class ListenerStat(Thread):
|
|||
|
||||
if stats:
|
||||
self.push_stream_stats(stats)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error('Exception: %s', e)
|
||||
|
||||
time.sleep(120)
|
||||
|
|
|
@ -11,14 +11,14 @@ import subprocess
|
|||
import signal
|
||||
from datetime import datetime
|
||||
import traceback
|
||||
import pure
|
||||
from . import pure
|
||||
import mimetypes
|
||||
from Queue import Empty
|
||||
from queue import Empty
|
||||
from threading import Thread, Timer
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
from api_clients import api_client
|
||||
from timeout import ls_timeout
|
||||
from .timeout import ls_timeout
|
||||
|
||||
|
||||
def keyboardInterruptHandler(signum, frame):
|
||||
|
@ -65,7 +65,7 @@ class PypoFetch(Thread):
|
|||
"""
|
||||
self.logger.debug("Cache dir does not exist. Creating...")
|
||||
os.makedirs(dir)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
self.schedule_data = []
|
||||
|
@ -120,7 +120,7 @@ class PypoFetch(Thread):
|
|||
if self.listener_timeout < 0:
|
||||
self.listener_timeout = 0
|
||||
self.logger.info("New timeout: %s" % self.listener_timeout)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
top = traceback.format_exc()
|
||||
self.logger.error('Exception: %s', e)
|
||||
self.logger.error("traceback: %s", top)
|
||||
|
@ -151,13 +151,13 @@ class PypoFetch(Thread):
|
|||
self.logger.debug('Getting information needed on bootstrap from Airtime')
|
||||
try:
|
||||
info = self.api_client.get_bootstrap_info()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error('Unable to get bootstrap info.. Exiting pypo...')
|
||||
self.logger.error(str(e))
|
||||
|
||||
self.logger.debug('info:%s', info)
|
||||
commands = []
|
||||
for k, v in info['switch_status'].iteritems():
|
||||
for k, v in info['switch_status'].items():
|
||||
commands.append(self.switch_source_temp(k, v))
|
||||
|
||||
stream_format = info['stream_label']
|
||||
|
@ -194,11 +194,11 @@ class PypoFetch(Thread):
|
|||
tn.read_all()
|
||||
self.logger.info("Liquidsoap is up and running")
|
||||
break
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
#sleep 0.5 seconds and try again
|
||||
time.sleep(0.5)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
finally:
|
||||
if self.telnet_lock.locked():
|
||||
|
@ -237,7 +237,7 @@ class PypoFetch(Thread):
|
|||
tn.write('exit\n')
|
||||
|
||||
output = tn.read_all()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -271,7 +271,7 @@ class PypoFetch(Thread):
|
|||
tn.write(command)
|
||||
tn.write('exit\n')
|
||||
tn.read_all()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error("Exception %s", e)
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -288,7 +288,7 @@ class PypoFetch(Thread):
|
|||
tn.write(command)
|
||||
tn.write('exit\n')
|
||||
tn.read_all()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error("Exception %s", e)
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -306,11 +306,11 @@ class PypoFetch(Thread):
|
|||
tn.write(command)
|
||||
tn.write('exit\n')
|
||||
tn.read_all()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error("Exception %s", e)
|
||||
|
||||
"""
|
||||
|
@ -336,7 +336,7 @@ class PypoFetch(Thread):
|
|||
download_dir = self.cache_dir
|
||||
try:
|
||||
os.makedirs(download_dir)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
media_copy = {}
|
||||
|
@ -344,7 +344,7 @@ class PypoFetch(Thread):
|
|||
media_item = media[key]
|
||||
if (media_item['type'] == 'file'):
|
||||
fileExt = self.sanity_check_media_item(media_item)
|
||||
dst = os.path.join(download_dir, unicode(media_item['id']) + unicode(fileExt))
|
||||
dst = os.path.join(download_dir, str(media_item['id']) + str(fileExt))
|
||||
media_item['dst'] = dst
|
||||
media_item['file_ready'] = False
|
||||
media_filtered[key] = media_item
|
||||
|
@ -357,7 +357,7 @@ class PypoFetch(Thread):
|
|||
|
||||
|
||||
self.media_prepare_queue.put(copy.copy(media_filtered))
|
||||
except Exception, e: self.logger.error("%s", e)
|
||||
except Exception as e: self.logger.error("%s", e)
|
||||
|
||||
# Send the data to pypo-push
|
||||
self.logger.debug("Pushing to pypo-push")
|
||||
|
@ -366,7 +366,7 @@ class PypoFetch(Thread):
|
|||
|
||||
# cleanup
|
||||
try: self.cache_cleanup(media)
|
||||
except Exception, e: self.logger.error("%s", e)
|
||||
except Exception as e: self.logger.error("%s", e)
|
||||
|
||||
#do basic validation of file parameters. Useful for debugging
|
||||
#purposes
|
||||
|
@ -408,7 +408,7 @@ class PypoFetch(Thread):
|
|||
for mkey in media:
|
||||
media_item = media[mkey]
|
||||
if media_item['type'] == 'file':
|
||||
scheduled_file_set.add(unicode(media_item["id"]) + unicode(media_item["file_ext"]))
|
||||
scheduled_file_set.add(str(media_item["id"]) + str(media_item["file_ext"]))
|
||||
|
||||
expired_files = cached_file_set - scheduled_file_set
|
||||
|
||||
|
@ -426,7 +426,7 @@ class PypoFetch(Thread):
|
|||
self.logger.info("File '%s' removed" % path)
|
||||
else:
|
||||
self.logger.info("File '%s' not removed. Still busy!" % path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error("Problem removing file '%s'" % f)
|
||||
self.logger.error(traceback.format_exc())
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from threading import Thread
|
||||
from Queue import Empty
|
||||
from ConfigParser import NoOptionError
|
||||
from queue import Empty
|
||||
from configparser import NoOptionError
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
|
@ -12,7 +12,7 @@ import os
|
|||
import sys
|
||||
import stat
|
||||
import requests
|
||||
import ConfigParser
|
||||
import configparser
|
||||
import json
|
||||
import hashlib
|
||||
from requests.exceptions import ConnectionError, HTTPError, Timeout
|
||||
|
@ -44,7 +44,7 @@ class PypoFile(Thread):
|
|||
dst_exists = True
|
||||
try:
|
||||
dst_size = os.path.getsize(dst)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
dst_exists = False
|
||||
|
||||
do_copy = False
|
||||
|
@ -69,11 +69,11 @@ class PypoFile(Thread):
|
|||
baseurl = self._config.get(CONFIG_SECTION, 'base_url')
|
||||
try:
|
||||
port = self._config.get(CONFIG_SECTION, 'base_port')
|
||||
except NoOptionError, e:
|
||||
except NoOptionError as e:
|
||||
port = 80
|
||||
try:
|
||||
protocol = self._config.get(CONFIG_SECTION, 'protocol')
|
||||
except NoOptionError, e:
|
||||
except NoOptionError as e:
|
||||
protocol = str(("http", "https")[int(port) == 443])
|
||||
|
||||
try:
|
||||
|
@ -103,7 +103,7 @@ class PypoFile(Thread):
|
|||
media_item["filesize"] = file_size
|
||||
|
||||
media_item['file_ready'] = True
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error("Could not copy from %s to %s" % (src, dst))
|
||||
self.logger.error(e)
|
||||
|
||||
|
@ -172,7 +172,7 @@ class PypoFile(Thread):
|
|||
|
||||
def read_config_file(self, config_path):
|
||||
"""Parse the application's config file located at config_path."""
|
||||
config = ConfigParser.SafeConfigParser(allow_no_value=True)
|
||||
config = configparser.SafeConfigParser(allow_no_value=True)
|
||||
try:
|
||||
config.readfp(open(config_path))
|
||||
except IOError as e:
|
||||
|
@ -202,14 +202,14 @@ class PypoFile(Thread):
|
|||
"""
|
||||
try:
|
||||
self.media = self.media_queue.get_nowait()
|
||||
except Empty, e:
|
||||
except Empty as e:
|
||||
pass
|
||||
|
||||
|
||||
media_item = self.get_highest_priority_media_item(self.media)
|
||||
if media_item is not None:
|
||||
self.copy_file(media_item)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
top = traceback.format_exc()
|
||||
self.logger.error(str(e))
|
||||
|
@ -221,7 +221,7 @@ class PypoFile(Thread):
|
|||
Entry point of the thread
|
||||
"""
|
||||
try: self.main()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
top = traceback.format_exc()
|
||||
self.logger.error('PypoFile Exception: %s', top)
|
||||
time.sleep(5)
|
||||
|
|
|
@ -7,7 +7,7 @@ import sys
|
|||
import time
|
||||
|
||||
|
||||
from Queue import Empty
|
||||
from queue import Empty
|
||||
|
||||
import signal
|
||||
def keyboardInterruptHandler(signum, frame):
|
||||
|
@ -38,7 +38,7 @@ class PypoLiqQueue(Thread):
|
|||
time_until_next_play)
|
||||
media_schedule = self.queue.get(block=True, \
|
||||
timeout=time_until_next_play)
|
||||
except Empty, e:
|
||||
except Empty as e:
|
||||
#Time to push a scheduled item.
|
||||
media_item = schedule_deque.popleft()
|
||||
self.pypo_liquidsoap.play(media_item)
|
||||
|
@ -82,7 +82,7 @@ class PypoLiqQueue(Thread):
|
|||
|
||||
def run(self):
|
||||
try: self.main()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error('PypoLiqQueue Exception: %s', traceback.format_exc())
|
||||
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
from pypofetch import PypoFetch
|
||||
from telnetliquidsoap import TelnetLiquidsoap
|
||||
from .pypofetch import PypoFetch
|
||||
from .telnetliquidsoap import TelnetLiquidsoap
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
import eventtypes
|
||||
from . import eventtypes
|
||||
import time
|
||||
|
||||
class PypoLiquidsoap():
|
||||
|
@ -22,7 +22,7 @@ class PypoLiquidsoap():
|
|||
logger,\
|
||||
host,\
|
||||
port,\
|
||||
self.liq_queue_tracker.keys())
|
||||
list(self.liq_queue_tracker.keys()))
|
||||
|
||||
def get_telnet_dispatcher(self):
|
||||
return self.telnet_liquidsoap
|
||||
|
@ -120,13 +120,12 @@ class PypoLiquidsoap():
|
|||
|
||||
try:
|
||||
scheduled_now_files = \
|
||||
filter(lambda x: x["type"] == eventtypes.FILE, scheduled_now)
|
||||
[x for x in scheduled_now if x["type"] == eventtypes.FILE]
|
||||
|
||||
scheduled_now_webstream = \
|
||||
filter(lambda x: x["type"] == eventtypes.STREAM_OUTPUT_START, \
|
||||
scheduled_now)
|
||||
[x for x in scheduled_now if x["type"] == eventtypes.STREAM_OUTPUT_START]
|
||||
|
||||
schedule_ids = set(map(lambda x: x["row_id"], scheduled_now_files))
|
||||
schedule_ids = set([x["row_id"] for x in scheduled_now_files])
|
||||
|
||||
row_id_map = {}
|
||||
liq_queue_ids = set()
|
||||
|
@ -200,7 +199,7 @@ class PypoLiquidsoap():
|
|||
return media_item["type"] == eventtypes.FILE
|
||||
|
||||
def clear_queue_tracker(self):
|
||||
for i in self.liq_queue_tracker.keys():
|
||||
for i in list(self.liq_queue_tracker.keys()):
|
||||
self.liq_queue_tracker[i] = None
|
||||
|
||||
def modify_cue_point(self, link):
|
||||
|
|
|
@ -53,7 +53,7 @@ class PypoMessageHandler(Thread):
|
|||
heartbeat = 5) as connection:
|
||||
rabbit = RabbitConsumer(connection, [schedule_queue], self)
|
||||
rabbit.run()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
|
||||
"""
|
||||
|
@ -98,13 +98,13 @@ class PypoMessageHandler(Thread):
|
|||
self.recorder_queue.put(message)
|
||||
else:
|
||||
self.logger.info("Unknown command: %s" % command)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error("Exception in handling RabbitMQ message: %s", e)
|
||||
|
||||
def main(self):
|
||||
try:
|
||||
self.init_rabbit_mq()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error('Exception: %s', e)
|
||||
self.logger.error("traceback: %s", traceback.format_exc())
|
||||
self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds")
|
||||
|
|
|
@ -13,15 +13,15 @@ import math
|
|||
import traceback
|
||||
import os
|
||||
|
||||
from pypofetch import PypoFetch
|
||||
from pypoliqqueue import PypoLiqQueue
|
||||
from .pypofetch import PypoFetch
|
||||
from .pypoliqqueue import PypoLiqQueue
|
||||
|
||||
from Queue import Empty, Queue
|
||||
from queue import Empty, Queue
|
||||
|
||||
from threading import Thread
|
||||
|
||||
from api_clients import api_client
|
||||
from timeout import ls_timeout
|
||||
from .timeout import ls_timeout
|
||||
|
||||
logging.captureWarnings(True)
|
||||
|
||||
|
@ -67,7 +67,7 @@ class PypoPush(Thread):
|
|||
while True:
|
||||
try:
|
||||
media_schedule = self.queue.get(block=True)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
raise
|
||||
else:
|
||||
|
@ -138,7 +138,7 @@ class PypoPush(Thread):
|
|||
tn.write("exit\n")
|
||||
self.logger.debug(tn.read_all())
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -146,7 +146,7 @@ class PypoPush(Thread):
|
|||
def run(self):
|
||||
while True:
|
||||
try: self.main()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
top = traceback.format_exc()
|
||||
self.logger.error('Pypo Push Exception: %s', top)
|
||||
time.sleep(5)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import json
|
||||
import time
|
||||
|
@ -36,8 +36,8 @@ def api_client(logger):
|
|||
# loading config file
|
||||
try:
|
||||
config = ConfigObj('/etc/airtime/airtime.conf')
|
||||
except Exception, e:
|
||||
print('Error loading config file: %s', e)
|
||||
except Exception as e:
|
||||
print(('Error loading config file: %s', e))
|
||||
sys.exit()
|
||||
|
||||
# TODO : add docstrings everywhere in this module
|
||||
|
@ -153,10 +153,10 @@ class ShowRecorder(Thread):
|
|||
recorded_file['title'] = "%s-%s-%s" % (self.show_name,
|
||||
full_date, full_time)
|
||||
#You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string
|
||||
recorded_file['tracknumber'] = unicode(self.show_instance)
|
||||
recorded_file['tracknumber'] = str(self.show_instance)
|
||||
recorded_file.save()
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
top = traceback.format_exc()
|
||||
self.logger.error('Exception: %s', e)
|
||||
self.logger.error("traceback: %s", top)
|
||||
|
@ -173,7 +173,7 @@ class ShowRecorder(Thread):
|
|||
|
||||
self.upload_file(filepath)
|
||||
os.remove(filepath)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
else:
|
||||
self.logger.info("problem recording show")
|
||||
|
@ -196,7 +196,7 @@ class Recorder(Thread):
|
|||
try:
|
||||
self.api_client.register_component('show-recorder')
|
||||
success = True
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
time.sleep(10)
|
||||
|
||||
|
@ -221,12 +221,12 @@ class Recorder(Thread):
|
|||
temp_shows_to_record = {}
|
||||
shows = m['shows']
|
||||
for show in shows:
|
||||
show_starts = getDateTimeObj(show[u'starts'])
|
||||
show_end = getDateTimeObj(show[u'ends'])
|
||||
show_starts = getDateTimeObj(show['starts'])
|
||||
show_end = getDateTimeObj(show['ends'])
|
||||
time_delta = show_end - show_starts
|
||||
|
||||
temp_shows_to_record[show[u'starts']] = [time_delta,
|
||||
show[u'instance_id'], show[u'name'], m['server_timezone']]
|
||||
temp_shows_to_record[show['starts']] = [time_delta,
|
||||
show['instance_id'], show['name'], m['server_timezone']]
|
||||
self.shows_to_record = temp_shows_to_record
|
||||
|
||||
def get_time_till_next_show(self):
|
||||
|
@ -298,7 +298,7 @@ class Recorder(Thread):
|
|||
#remove show from shows to record.
|
||||
del self.shows_to_record[start_time]
|
||||
#self.time_till_next_show = self.get_time_till_next_show()
|
||||
except Exception, e :
|
||||
except Exception as e :
|
||||
top = traceback.format_exc()
|
||||
self.logger.error('Exception: %s', e)
|
||||
self.logger.error("traceback: %s", top)
|
||||
|
@ -318,7 +318,7 @@ class Recorder(Thread):
|
|||
if temp is not None:
|
||||
self.process_recorder_schedule(temp)
|
||||
self.logger.info("Bootstrap recorder schedule received: %s", temp)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error( traceback.format_exc() )
|
||||
self.logger.error(e)
|
||||
|
||||
|
@ -338,16 +338,16 @@ class Recorder(Thread):
|
|||
if temp is not None:
|
||||
self.process_recorder_schedule(temp)
|
||||
self.logger.info("updated recorder schedule received: %s", temp)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error( traceback.format_exc() )
|
||||
self.logger.error(e)
|
||||
try: self.handle_message()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error( traceback.format_exc() )
|
||||
self.logger.error('Pypo Recorder Exception: %s', e)
|
||||
time.sleep(PUSH_INTERVAL)
|
||||
self.loops += 1
|
||||
except Exception, e :
|
||||
except Exception as e :
|
||||
top = traceback.format_exc()
|
||||
self.logger.error('Exception: %s', e)
|
||||
self.logger.error("traceback: %s", top)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import telnetlib
|
||||
from timeout import ls_timeout
|
||||
from .timeout import ls_timeout
|
||||
|
||||
def create_liquidsoap_annotation(media):
|
||||
# We need liq_start_next value in the annotate. That is the value that controls overlap duration of crossfade.
|
||||
|
@ -140,7 +140,7 @@ class TelnetLiquidsoap:
|
|||
tn.write("exit\n")
|
||||
self.logger.debug(tn.read_all())
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -159,7 +159,7 @@ class TelnetLiquidsoap:
|
|||
tn.write("exit\n")
|
||||
self.logger.debug(tn.read_all())
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -182,7 +182,7 @@ class TelnetLiquidsoap:
|
|||
self.logger.debug(tn.read_all())
|
||||
|
||||
self.current_prebuffering_stream_id = None
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -205,7 +205,7 @@ class TelnetLiquidsoap:
|
|||
self.logger.debug(tn.read_all())
|
||||
|
||||
self.current_prebuffering_stream_id = media_item['row_id']
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -225,7 +225,7 @@ class TelnetLiquidsoap:
|
|||
self.logger.debug("stream_id: %s" % stream_id)
|
||||
|
||||
return stream_id
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -246,7 +246,7 @@ class TelnetLiquidsoap:
|
|||
tn.write(command)
|
||||
tn.write('exit\n')
|
||||
tn.read_all()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(traceback.format_exc())
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
@ -263,7 +263,7 @@ class TelnetLiquidsoap:
|
|||
|
||||
tn.write('exit\n')
|
||||
tn.read_all()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.error(str(e))
|
||||
finally:
|
||||
self.telnet_lock.release()
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from __future__ import print_function
|
||||
from pypoliqqueue import PypoLiqQueue
|
||||
from telnetliquidsoap import DummyTelnetLiquidsoap, TelnetLiquidsoap
|
||||
|
||||
from .pypoliqqueue import PypoLiqQueue
|
||||
from .telnetliquidsoap import DummyTelnetLiquidsoap, TelnetLiquidsoap
|
||||
|
||||
|
||||
from Queue import Queue
|
||||
from queue import Queue
|
||||
from threading import Lock
|
||||
|
||||
import sys
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import threading
|
||||
import pypofetch
|
||||
from . import pypofetch
|
||||
|
||||
def __timeout(func, timeout_duration, default, args, kwargs):
|
||||
|
||||
|
|
|
@ -55,12 +55,11 @@ setup(name='airtime-playout',
|
|||
'future',
|
||||
'kombu',
|
||||
'mutagen',
|
||||
'poster',
|
||||
'poster3',
|
||||
'PyDispatcher',
|
||||
'pyinotify',
|
||||
'pytz',
|
||||
'requests',
|
||||
'wsgiref',
|
||||
'defusedxml'
|
||||
],
|
||||
zip_safe=False,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from pypopush import PypoPush
|
||||
from threading import Lock
|
||||
from Queue import Queue
|
||||
from queue import Queue
|
||||
|
||||
import datetime
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ echo -e "The following files, directories, and services will be removed:\n"
|
|||
for i in ${FILES[*]}; do
|
||||
echo $i
|
||||
done
|
||||
echo "pip airtime-playout"
|
||||
echo "pip3 airtime-playout"
|
||||
|
||||
echo -e "\nIf your web root is not listed, you will need to manually remove it."
|
||||
|
||||
|
@ -103,6 +103,6 @@ if [[ "$IN" = "y" || "$IN" = "Y" ]]; then
|
|||
dropAirtimeDatabase
|
||||
fi
|
||||
|
||||
pip uninstall -y airtime-playout airtime-media-monitor airtime-analyzer
|
||||
pip3 uninstall -y airtime-playout airtime-media-monitor airtime-analyzer
|
||||
service apache2 restart
|
||||
echo "...Done"
|
||||
|
|
Loading…
Reference in New Issue