install successfully using py3

This commit is contained in:
Kyle Robbertze 2020-01-16 16:32:51 +02:00
parent cf2dda4532
commit 8346e89e99
41 changed files with 259 additions and 287 deletions

View File

@ -3,8 +3,8 @@
echo "Updating Apt." echo "Updating Apt."
apt-get update > /dev/null apt-get update > /dev/null
echo "Ensuring Pip is installed." echo "Ensuring Pip is installed."
DEBIAN_FRONTEND=noninteractive apt-get install -y -qq python-pip > /dev/null DEBIAN_FRONTEND=noninteractive apt-get install -y -qq python3-pip > /dev/null
echo "Updating Pip." echo "Updating Pip."
pip install pip -q -q --upgrade > /dev/null pip3 install pip -q -q --upgrade > /dev/null
echo "Ensuring Mkdocs is installed." echo "Ensuring Mkdocs is installed."
pip install -q mkdocs > /dev/null pip3 install mkdocs

30
install
View File

@ -465,7 +465,7 @@ while :; do
;; ;;
--no-rabbitmq) --no-rabbitmq)
skip_rabbitmq=1 skip_rabbitmq=1
;; ;;
--) --)
shift shift
break break
@ -923,22 +923,12 @@ loud "\n-----------------------------------------------------"
loud " * Installing Airtime Services * " loud " * Installing Airtime Services * "
loud "-----------------------------------------------------" loud "-----------------------------------------------------"
verbose "\n * Installing necessary python services..." python_version=$(python3 --version 2>&1 | awk '{ print $2 }')
loudCmd "pip install setuptools --upgrade"
loudCmd "pip install zipp==1.0.0"
verbose "...Done"
# Ubuntu Trusty and Debian Wheezy needs a workaround for python version SSL downloads
# This affects all python installs where python < 2.7.9
python_version=$(python --version 2>&1 | awk '{ print $2 }')
verbose "Detected Python version: $python_version" verbose "Detected Python version: $python_version"
# Convert version so each segment is zero padded for easy comparison
python_version_formatted=$(awk 'BEGIN {FS = "."} {printf "%03d.%03d.%03d\n", $1,$2,$3}' <<< $python_version) verbose "\n * Installing necessary python services..."
if [[ "$python_version_formatted" < "002.007.009" ]]; then loudCmd "pip3 install setuptools --upgrade"
verbose "\n * Installing pyOpenSSL and ca db for SNI support..." verbose "...Done"
loudCmd "pip install pyOpenSSL cryptography idna certifi --upgrade"
verbose "...Done"
fi
verbose "\n * Creating /run/airtime..." verbose "\n * Creating /run/airtime..."
mkdir -p /run/airtime mkdir -p /run/airtime
@ -960,11 +950,11 @@ if [ ! -d /var/log/airtime ]; then
fi fi
verbose "\n * Installing API client..." verbose "\n * Installing API client..."
loudCmd "python ${AIRTIMEROOT}/python_apps/api_clients/setup.py install --install-scripts=/usr/bin" loudCmd "python3 ${AIRTIMEROOT}/python_apps/api_clients/setup.py install --install-scripts=/usr/bin"
verbose "...Done" verbose "...Done"
verbose "\n * Installing pypo and liquidsoap..." verbose "\n * Installing pypo and liquidsoap..."
loudCmd "python ${AIRTIMEROOT}/python_apps/pypo/setup.py install --install-scripts=/usr/bin --no-init-script" loudCmd "python3 ${AIRTIMEROOT}/python_apps/pypo/setup.py install --install-scripts=/usr/bin --no-init-script"
loudCmd "mkdir -p /var/log/airtime/{pypo,pypo-liquidsoap} /var/tmp/airtime/pypo/{cache,files,tmp} /var/tmp/airtime/show-recorder/" loudCmd "mkdir -p /var/log/airtime/{pypo,pypo-liquidsoap} /var/tmp/airtime/pypo/{cache,files,tmp} /var/tmp/airtime/show-recorder/"
loudCmd "chown -R ${web_user}:${web_user} /var/log/airtime/{pypo,pypo-liquidsoap} /var/tmp/airtime/pypo/{cache,files,tmp} /var/tmp/airtime/show-recorder/" loudCmd "chown -R ${web_user}:${web_user} /var/log/airtime/{pypo,pypo-liquidsoap} /var/tmp/airtime/pypo/{cache,files,tmp} /var/tmp/airtime/show-recorder/"
systemInitInstall airtime-liquidsoap $web_user systemInitInstall airtime-liquidsoap $web_user
@ -972,7 +962,7 @@ systemInitInstall airtime-playout $web_user
verbose "...Done" verbose "...Done"
verbose "\n * Installing airtime-celery..." verbose "\n * Installing airtime-celery..."
loudCmd "python ${AIRTIMEROOT}/python_apps/airtime-celery/setup.py install --no-init-script" loudCmd "python3 ${AIRTIMEROOT}/python_apps/airtime-celery/setup.py install --no-init-script"
# Create the Celery user # Create the Celery user
if $is_centos_dist; then if $is_centos_dist; then
loudCmd "id celery 2>/dev/null || adduser --no-create-home -c 'LibreTime Celery' -r celery || true" loudCmd "id celery 2>/dev/null || adduser --no-create-home -c 'LibreTime Celery' -r celery || true"
@ -988,7 +978,7 @@ systemInitInstall airtime-celery
verbose "...Done" verbose "...Done"
verbose "\n * Installing airtime_analyzer..." verbose "\n * Installing airtime_analyzer..."
loudCmd "python ${AIRTIMEROOT}/python_apps/airtime_analyzer/setup.py install --install-scripts=/usr/bin --no-init-script" loudCmd "python3 ${AIRTIMEROOT}/python_apps/airtime_analyzer/setup.py install --install-scripts=/usr/bin --no-init-script"
systemInitInstall airtime_analyzer $web_user systemInitInstall airtime_analyzer $web_user
verbose "...Done" verbose "...Done"

View File

@ -1,71 +1,56 @@
apache2 apache2
coreutils
curl
ecasound
flac
git git
gstreamer1.0-plugins-ugly
icecast2
lame
libao-ocaml
libapache2-mod-php7.3 libapache2-mod-php7.3
php7.3 libcairo2-dev
php7.3-dev libcamomile-ocaml-data
php7.3-bcmath libfaad2
php7.3-mbstring libmad-ocaml
php-pear libopus0
php7.3-gd libportaudio2
php-amqplib libpulse0
libsamplerate0
lsb-release
zip
unzip
rabbitmq-server
postgresql
postgresql-client
php7.3-pgsql
python
python-virtualenv
python-pip
libsoundtouch-ocaml libsoundtouch-ocaml
libtaglib-ocaml libtaglib-ocaml
libao-ocaml
libmad-ocaml
ecasound
libportaudio2
libsamplerate0
libvo-aacenc0 libvo-aacenc0
liquidsoap
python-rgain lsb-release
python-gst-1.0
gstreamer1.0-plugins-ugly
python-pika
patch
icecast2
curl
php7.3-curl
mpg123
libcamomile-ocaml-data
libpulse0
vorbis-tools
lsb-release lsb-release
lsof lsof
vorbisgain mpg123
flac patch
vorbis-tools php7.3
pwgen php7.3-bcmath
libfaad2 php7.3-curl
php7.3-dev
php7.3-gd
php7.3-mbstring
php7.3-pgsql
php-amqplib
php-apcu php-apcu
php-pear
lame pkg-config
postgresql
postgresql-client
pwgen
python3
python3-gst-1.0
python3-pika
python3-pip
python3-virtualenv
rabbitmq-server
silan silan
coreutils
liquidsoap
libopus0
systemd-sysv systemd-sysv
unzip
vorbisgain
vorbis-tools
vorbis-tools
xmlstarlet xmlstarlet
zip

View File

@ -5,12 +5,12 @@ import logging.handlers
import sys import sys
import signal import signal
import traceback import traceback
import config_file from . import config_file
from functools import partial from functools import partial
from metadata_analyzer import MetadataAnalyzer from .metadata_analyzer import MetadataAnalyzer
from replaygain_analyzer import ReplayGainAnalyzer from .replaygain_analyzer import ReplayGainAnalyzer
from status_reporter import StatusReporter from .status_reporter import StatusReporter
from message_listener import MessageListener from .message_listener import MessageListener
class AirtimeAnalyzerServer: class AirtimeAnalyzerServer:
@ -76,7 +76,7 @@ class AirtimeAnalyzerServer:
def dump_stacktrace(stack): def dump_stacktrace(stack):
''' Dump a stacktrace for all threads ''' ''' Dump a stacktrace for all threads '''
code = [] code = []
for threadId, stack in sys._current_frames().items(): for threadId, stack in list(sys._current_frames().items()):
code.append("\n# ThreadID: %s" % threadId) code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack): for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) code.append('File: "%s", line %d, in %s' % (filename, lineno, name))

View File

@ -3,13 +3,13 @@
import logging import logging
import threading import threading
import multiprocessing import multiprocessing
import Queue import queue
import ConfigParser import configparser
from metadata_analyzer import MetadataAnalyzer from .metadata_analyzer import MetadataAnalyzer
from filemover_analyzer import FileMoverAnalyzer from .filemover_analyzer import FileMoverAnalyzer
from cuepoint_analyzer import CuePointAnalyzer from .cuepoint_analyzer import CuePointAnalyzer
from replaygain_analyzer import ReplayGainAnalyzer from .replaygain_analyzer import ReplayGainAnalyzer
from playability_analyzer import * from .playability_analyzer import *
class AnalyzerPipeline: class AnalyzerPipeline:
""" Analyzes and imports an audio file into the Airtime library. """ Analyzes and imports an audio file into the Airtime library.
@ -46,15 +46,15 @@ class AnalyzerPipeline:
AnalyzerPipeline.python_logger_deadlock_workaround() AnalyzerPipeline.python_logger_deadlock_workaround()
try: try:
if not isinstance(queue, Queue.Queue): if not isinstance(queue, queue.Queue):
raise TypeError("queue must be a Queue.Queue()") raise TypeError("queue must be a Queue.Queue()")
if not isinstance(audio_file_path, unicode): if not isinstance(audio_file_path, str):
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__ + " instead.") raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__ + " instead.")
if not isinstance(import_directory, unicode): if not isinstance(import_directory, str):
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.") raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__ + " instead.")
if not isinstance(original_filename, unicode): if not isinstance(original_filename, str):
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.") raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__ + " instead.")
if not isinstance(file_prefix, unicode): if not isinstance(file_prefix, str):
raise TypeError("file_prefix must be unicode. Was of type " + type(file_prefix).__name__ + " instead.") raise TypeError("file_prefix must be unicode. Was of type " + type(file_prefix).__name__ + " instead.")
@ -91,7 +91,7 @@ class AnalyzerPipeline:
@staticmethod @staticmethod
def python_logger_deadlock_workaround(): def python_logger_deadlock_workaround():
# Workaround for: http://bugs.python.org/issue6721#msg140215 # Workaround for: http://bugs.python.org/issue6721#msg140215
logger_names = logging.Logger.manager.loggerDict.keys() logger_names = list(logging.Logger.manager.loggerDict.keys())
logger_names.append(None) # Root logger logger_names.append(None) # Root logger
for name in logger_names: for name in logger_names:
for handler in logging.getLogger(name).handlers: for handler in logging.getLogger(name).handlers:

View File

@ -1,9 +1,9 @@
from __future__ import print_function
import ConfigParser import configparser
def read_config_file(config_path): def read_config_file(config_path):
"""Parse the application's config file located at config_path.""" """Parse the application's config file located at config_path."""
config = ConfigParser.SafeConfigParser() config = configparser.SafeConfigParser()
try: try:
config.readfp(open(config_path)) config.readfp(open(config_path))
except IOError as e: except IOError as e:

View File

@ -3,7 +3,7 @@ import logging
import traceback import traceback
import json import json
import datetime import datetime
from analyzer import Analyzer from .analyzer import Analyzer
class CuePointAnalyzer(Analyzer): class CuePointAnalyzer(Analyzer):

View File

@ -6,7 +6,7 @@ import os, errno
import time import time
import uuid import uuid
from analyzer import Analyzer from .analyzer import Analyzer
class FileMoverAnalyzer(Analyzer): class FileMoverAnalyzer(Analyzer):
"""This analyzer copies a file over from a temporary directory (stor/organize) """This analyzer copies a file over from a temporary directory (stor/organize)
@ -29,11 +29,11 @@ class FileMoverAnalyzer(Analyzer):
original_filename: The filename of the file when it was uploaded to Airtime. original_filename: The filename of the file when it was uploaded to Airtime.
metadata: A dictionary where the "full_path" of where the file is moved to will be added. metadata: A dictionary where the "full_path" of where the file is moved to will be added.
""" """
if not isinstance(audio_file_path, unicode): if not isinstance(audio_file_path, str):
raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__) raise TypeError("audio_file_path must be unicode. Was of type " + type(audio_file_path).__name__)
if not isinstance(import_directory, unicode): if not isinstance(import_directory, str):
raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__) raise TypeError("import_directory must be unicode. Was of type " + type(import_directory).__name__)
if not isinstance(original_filename, unicode): if not isinstance(original_filename, str):
raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__) raise TypeError("original_filename must be unicode. Was of type " + type(original_filename).__name__)
if not isinstance(metadata, dict): if not isinstance(metadata, dict):
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__) raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
@ -46,9 +46,9 @@ class FileMoverAnalyzer(Analyzer):
max_file_len = 48 max_file_len = 48
final_file_path = import_directory final_file_path = import_directory
orig_file_basename, orig_file_extension = os.path.splitext(original_filename) orig_file_basename, orig_file_extension = os.path.splitext(original_filename)
if metadata.has_key("artist_name"): if "artist_name" in metadata:
final_file_path += "/" + metadata["artist_name"][0:max_dir_len] # truncating with array slicing final_file_path += "/" + metadata["artist_name"][0:max_dir_len] # truncating with array slicing
if metadata.has_key("album_title"): if "album_title" in metadata:
final_file_path += "/" + metadata["album_title"][0:max_dir_len] final_file_path += "/" + metadata["album_title"][0:max_dir_len]
# Note that orig_file_extension includes the "." already # Note that orig_file_extension includes the "." already
final_file_path += "/" + orig_file_basename[0:max_file_len] + orig_file_extension final_file_path += "/" + orig_file_basename[0:max_file_len] + orig_file_extension

View File

@ -6,9 +6,9 @@ import select
import signal import signal
import logging import logging
import multiprocessing import multiprocessing
import Queue import queue
from analyzer_pipeline import AnalyzerPipeline from .analyzer_pipeline import AnalyzerPipeline
from status_reporter import StatusReporter from .status_reporter import StatusReporter
EXCHANGE = "airtime-uploads" EXCHANGE = "airtime-uploads"
EXCHANGE_TYPE = "topic" EXCHANGE_TYPE = "topic"
@ -198,7 +198,7 @@ class MessageListener:
if callback_url: # If we got an invalid message, there might be no callback_url in the JSON if callback_url: # If we got an invalid message, there might be no callback_url in the JSON
# Report this as a failed upload to the File Upload REST API. # Report this as a failed upload to the File Upload REST API.
StatusReporter.report_failure_to_callback_url(callback_url, api_key, import_status=2, StatusReporter.report_failure_to_callback_url(callback_url, api_key, import_status=2,
reason=u'An error occurred while importing this file') reason='An error occurred while importing this file')
else: else:
@ -224,7 +224,7 @@ class MessageListener:
''' '''
metadata = {} metadata = {}
q = Queue.Queue() q = queue.Queue()
try: try:
AnalyzerPipeline.run_analysis(q, audio_file_path, import_directory, original_filename, storage_backend, file_prefix) AnalyzerPipeline.run_analysis(q, audio_file_path, import_directory, original_filename, storage_backend, file_prefix)
metadata = q.get() metadata = q.get()

View File

@ -6,7 +6,7 @@ import wave
import logging import logging
import os import os
import hashlib import hashlib
from analyzer import Analyzer from .analyzer import Analyzer
class MetadataAnalyzer(Analyzer): class MetadataAnalyzer(Analyzer):
@ -18,7 +18,7 @@ class MetadataAnalyzer(Analyzer):
filename: The path to the audio file to extract metadata from. filename: The path to the audio file to extract metadata from.
metadata: A dictionary that the extracted metadata will be added to. metadata: A dictionary that the extracted metadata will be added to.
''' '''
if not isinstance(filename, unicode): if not isinstance(filename, str):
raise TypeError("filename must be unicode. Was of type " + type(filename).__name__) raise TypeError("filename must be unicode. Was of type " + type(filename).__name__)
if not isinstance(metadata, dict): if not isinstance(metadata, dict):
raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__) raise TypeError("metadata must be a dict. Was of type " + type(metadata).__name__)
@ -104,11 +104,11 @@ class MetadataAnalyzer(Analyzer):
if isinstance(track_number, list): # Sometimes tracknumber is a list, ugh if isinstance(track_number, list): # Sometimes tracknumber is a list, ugh
track_number = track_number[0] track_number = track_number[0]
track_number_tokens = track_number track_number_tokens = track_number
if u'/' in track_number: if '/' in track_number:
track_number_tokens = track_number.split(u'/') track_number_tokens = track_number.split('/')
track_number = track_number_tokens[0] track_number = track_number_tokens[0]
elif u'-' in track_number: elif '-' in track_number:
track_number_tokens = track_number.split(u'-') track_number_tokens = track_number.split('-')
track_number = track_number_tokens[0] track_number = track_number_tokens[0]
metadata["track_number"] = track_number metadata["track_number"] = track_number
track_total = track_number_tokens[1] track_total = track_number_tokens[1]
@ -146,7 +146,7 @@ class MetadataAnalyzer(Analyzer):
#'mime_type': 'mime', #'mime_type': 'mime',
} }
for mutagen_tag, airtime_tag in mutagen_to_airtime_mapping.iteritems(): for mutagen_tag, airtime_tag in mutagen_to_airtime_mapping.items():
try: try:
metadata[airtime_tag] = audio_file[mutagen_tag] metadata[airtime_tag] = audio_file[mutagen_tag]

View File

@ -2,7 +2,7 @@ __author__ = 'asantoni'
import subprocess import subprocess
import logging import logging
from analyzer import Analyzer from .analyzer import Analyzer
class UnplayableFileError(Exception): class UnplayableFileError(Exception):
pass pass

View File

@ -1,6 +1,6 @@
import subprocess import subprocess
import logging import logging
from analyzer import Analyzer from .analyzer import Analyzer
class ReplayGainAnalyzer(Analyzer): class ReplayGainAnalyzer(Analyzer):

View File

@ -2,12 +2,12 @@ import requests
import json import json
import logging import logging
import collections import collections
import Queue import queue
import time import time
import traceback import traceback
import pickle import pickle
import threading import threading
from urlparse import urlparse from urllib.parse import urlparse
# Disable urllib3 warnings because these can cause a rare deadlock due to Python 2's crappy internal non-reentrant locking # Disable urllib3 warnings because these can cause a rare deadlock due to Python 2's crappy internal non-reentrant locking
# around POSIX stuff. See SAAS-714. The hasattr() is for compatibility with older versions of requests. # around POSIX stuff. See SAAS-714. The hasattr() is for compatibility with older versions of requests.
@ -68,7 +68,7 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
break break
if not isinstance(request, PicklableHttpRequest): if not isinstance(request, PicklableHttpRequest):
raise TypeError("request must be a PicklableHttpRequest. Was of type " + type(request).__name__) raise TypeError("request must be a PicklableHttpRequest. Was of type " + type(request).__name__)
except Queue.Empty: except queue.Empty:
request = None request = None
# If there's no new HTTP request we need to execute, let's check our "retry # If there's no new HTTP request we need to execute, let's check our "retry
@ -159,7 +159,7 @@ class StatusReporter():
''' We use multiprocessing.Process again here because we need a thread for this stuff ''' We use multiprocessing.Process again here because we need a thread for this stuff
anyways, and Python gives us process isolation for free (crash safety). anyways, and Python gives us process isolation for free (crash safety).
''' '''
_ipc_queue = Queue.Queue() _ipc_queue = queue.Queue()
#_http_thread = multiprocessing.Process(target=process_http_requests, #_http_thread = multiprocessing.Process(target=process_http_requests,
# args=(_ipc_queue,)) # args=(_ipc_queue,))
_http_thread = None _http_thread = None
@ -222,7 +222,7 @@ class StatusReporter():
@classmethod @classmethod
def report_failure_to_callback_url(self, callback_url, api_key, import_status, reason): def report_failure_to_callback_url(self, callback_url, api_key, import_status, reason):
if not isinstance(import_status, (int, long) ): if not isinstance(import_status, int ):
raise TypeError("import_status must be an integer. Was of type " + type(import_status).__name__) raise TypeError("import_status must be an integer. Was of type " + type(import_status).__name__)
logging.debug("Reporting import failure to Airtime REST API...") logging.debug("Reporting import failure to Airtime REST API...")

View File

@ -2,7 +2,7 @@
"""Runs the airtime_analyzer application. """Runs the airtime_analyzer application.
""" """
from __future__ import print_function
import daemon import daemon
import argparse import argparse
import os import os

View File

@ -31,12 +31,11 @@ setup(name='airtime_analyzer',
install_requires=[ install_requires=[
'mutagen>=1.41.1', # got rid of specific version requirement 'mutagen>=1.41.1', # got rid of specific version requirement
'pika', 'pika',
'daemon',
'file-magic', 'file-magic',
'nose', 'nose',
'coverage', 'coverage',
'mock', 'mock',
'python-daemon==1.6', 'python-daemon',
'requests>=2.7.0', 'requests>=2.7.0',
'rgain3', 'rgain3',
# These next 3 are required for requests to support SSL with SNI. Learned this the hard way... # These next 3 are required for requests to support SSL with SNI. Learned this the hard way...

View File

@ -8,8 +8,8 @@
############################################################################### ###############################################################################
import sys import sys
import time import time
import urllib import urllib.request, urllib.parse, urllib.error
import urllib2 import urllib.request, urllib.error, urllib.parse
import requests import requests
import socket import socket
import logging import logging
@ -26,19 +26,19 @@ AIRTIME_API_VERSION = "1.1"
# instead of copy pasting them around # instead of copy pasting them around
def to_unicode(obj, encoding='utf-8'): def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring): if isinstance(obj, str):
if not isinstance(obj, unicode): if not isinstance(obj, str):
obj = unicode(obj, encoding) obj = str(obj, encoding)
return obj return obj
def encode_to(obj, encoding='utf-8'): def encode_to(obj, encoding='utf-8'):
if isinstance(obj, unicode): if isinstance(obj, str):
obj = obj.encode(encoding) obj = obj.encode(encoding)
return obj return obj
def convert_dict_value_to_utf8(md): def convert_dict_value_to_utf8(md):
#list comprehension to convert all values of md to utf-8 #list comprehension to convert all values of md to utf-8
return dict([(item[0], encode_to(item[1], "utf-8")) for item in md.items()]) return dict([(item[0], encode_to(item[1], "utf-8")) for item in list(md.items())])
api_config = {} api_config = {}
@ -114,7 +114,7 @@ class ApcUrl(object):
def params(self, **params): def params(self, **params):
temp_url = self.base_url temp_url = self.base_url
for k, v in params.iteritems(): for k, v in params.items():
wrapped_param = "%%" + k + "%%" wrapped_param = "%%" + k + "%%"
if wrapped_param in temp_url: if wrapped_param in temp_url:
temp_url = temp_url.replace(wrapped_param, str(v)) temp_url = temp_url.replace(wrapped_param, str(v))
@ -138,11 +138,11 @@ class ApiRequest(object):
def __call__(self,_post_data=None, **kwargs): def __call__(self,_post_data=None, **kwargs):
final_url = self.url.params(**kwargs).url() final_url = self.url.params(**kwargs).url()
if _post_data is not None: _post_data = urllib.urlencode(_post_data) if _post_data is not None: _post_data = urllib.parse.urlencode(_post_data)
self.logger.debug(final_url) self.logger.debug(final_url)
try: try:
req = urllib2.Request(final_url, _post_data) req = urllib.request.Request(final_url, _post_data)
f = urllib2.urlopen(req, timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT) f = urllib.request.urlopen(req, timeout=ApiRequest.API_HTTP_REQUEST_TIMEOUT)
content_type = f.info().getheader('Content-Type') content_type = f.info().getheader('Content-Type')
response = f.read() response = f.read()
#Everything that calls an ApiRequest should be catching URLError explicitly #Everything that calls an ApiRequest should be catching URLError explicitly
@ -151,7 +151,7 @@ class ApiRequest(object):
except socket.timeout: except socket.timeout:
self.logger.error('HTTP request to %s timed out', final_url) self.logger.error('HTTP request to %s timed out', final_url)
raise raise
except Exception, e: except Exception as e:
#self.logger.error('Exception: %s', e) #self.logger.error('Exception: %s', e)
#self.logger.error("traceback: %s", traceback.format_exc()) #self.logger.error("traceback: %s", traceback.format_exc())
raise raise
@ -193,13 +193,13 @@ class RequestProvider(object):
self.config["general"]["base_dir"], self.config["api_base"], self.config["general"]["base_dir"], self.config["api_base"],
'%%action%%')) '%%action%%'))
# Now we must discover the possible actions # Now we must discover the possible actions
actions = dict( (k,v) for k,v in cfg.iteritems() if '%%api_key%%' in v) actions = dict( (k,v) for k,v in cfg.items() if '%%api_key%%' in v)
for action_name, action_value in actions.iteritems(): for action_name, action_value in actions.items():
new_url = self.url.params(action=action_value).params( new_url = self.url.params(action=action_value).params(
api_key=self.config["general"]['api_key']) api_key=self.config["general"]['api_key'])
self.requests[action_name] = ApiRequest(action_name, new_url) self.requests[action_name] = ApiRequest(action_name, new_url)
def available_requests(self) : return self.requests.keys() def available_requests(self) : return list(self.requests.keys())
def __contains__(self, request) : return request in self.requests def __contains__(self, request) : return request in self.requests
def __getattr__(self, attr): def __getattr__(self, attr):
@ -217,17 +217,17 @@ class AirtimeApiClient(object):
self.config = ConfigObj(config_path) self.config = ConfigObj(config_path)
self.config.update(api_config) self.config.update(api_config)
self.services = RequestProvider(self.config) self.services = RequestProvider(self.config)
except Exception, e: except Exception as e:
self.logger.error('Error loading config file: %s', config_path) self.logger.error('Error loading config file: %s', config_path)
self.logger.error("traceback: %s", traceback.format_exc()) self.logger.error("traceback: %s", traceback.format_exc())
sys.exit(1) sys.exit(1)
def __get_airtime_version(self): def __get_airtime_version(self):
try: return self.services.version_url()[u'airtime_version'] try: return self.services.version_url()['airtime_version']
except Exception: return -1 except Exception: return -1
def __get_api_version(self): def __get_api_version(self):
try: return self.services.version_url()[u'api_version'] try: return self.services.version_url()['api_version']
except Exception: return -1 except Exception: return -1
def is_server_compatible(self, verbose=True): def is_server_compatible(self, verbose=True):
@ -259,7 +259,7 @@ class AirtimeApiClient(object):
def notify_liquidsoap_started(self): def notify_liquidsoap_started(self):
try: try:
self.services.notify_liquidsoap_started() self.services.notify_liquidsoap_started()
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
def notify_media_item_start_playing(self, media_id): def notify_media_item_start_playing(self, media_id):
@ -268,14 +268,14 @@ class AirtimeApiClient(object):
which we handed to liquidsoap in get_liquidsoap_data(). """ which we handed to liquidsoap in get_liquidsoap_data(). """
try: try:
return self.services.update_start_playing_url(media_id=media_id) return self.services.update_start_playing_url(media_id=media_id)
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
return None return None
def get_shows_to_record(self): def get_shows_to_record(self):
try: try:
return self.services.show_schedule_url() return self.services.show_schedule_url()
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
return None return None
@ -321,13 +321,13 @@ class AirtimeApiClient(object):
""" """
break break
except requests.exceptions.HTTPError, e: except requests.exceptions.HTTPError as e:
logger.error("Http error code: %s", e.code) logger.error("Http error code: %s", e.code)
logger.error("traceback: %s", traceback.format_exc()) logger.error("traceback: %s", traceback.format_exc())
except requests.exceptions.ConnectionError, e: except requests.exceptions.ConnectionError as e:
logger.error("Server is down: %s", e.args) logger.error("Server is down: %s", e.args)
logger.error("traceback: %s", traceback.format_exc()) logger.error("traceback: %s", traceback.format_exc())
except Exception, e: except Exception as e:
logger.error("Exception: %s", e) logger.error("Exception: %s", e)
logger.error("traceback: %s", traceback.format_exc()) logger.error("traceback: %s", traceback.format_exc())
@ -340,7 +340,7 @@ class AirtimeApiClient(object):
try: try:
return self.services.check_live_stream_auth( return self.services.check_live_stream_auth(
username=username, password=password, djtype=dj_type) username=username, password=password, djtype=dj_type)
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
return {} return {}
@ -422,10 +422,10 @@ class AirtimeApiClient(object):
def list_all_db_files(self, dir_id, all_files=True): def list_all_db_files(self, dir_id, all_files=True):
logger = self.logger logger = self.logger
try: try:
all_files = u"1" if all_files else u"0" all_files = "1" if all_files else "0"
response = self.services.list_all_db_files(dir_id=dir_id, response = self.services.list_all_db_files(dir_id=dir_id,
all=all_files) all=all_files)
except Exception, e: except Exception as e:
response = {} response = {}
logger.error("Exception: %s", e) logger.error("Exception: %s", e)
try: try:
@ -483,12 +483,12 @@ class AirtimeApiClient(object):
post_data = {"msg_post": msg} post_data = {"msg_post": msg}
#encoded_msg is no longer used server_side!! #encoded_msg is no longer used server_side!!
encoded_msg = urllib.quote('dummy') encoded_msg = urllib.parse.quote('dummy')
self.services.update_liquidsoap_status.req(post_data, self.services.update_liquidsoap_status.req(post_data,
msg=encoded_msg, msg=encoded_msg,
stream_id=stream_id, stream_id=stream_id,
boot_time=time).retry(5) boot_time=time).retry(5)
except Exception, e: except Exception as e:
#TODO #TODO
logger.error("Exception: %s", e) logger.error("Exception: %s", e)
@ -497,7 +497,7 @@ class AirtimeApiClient(object):
logger = self.logger logger = self.logger
return self.services.update_source_status.req(sourcename=sourcename, return self.services.update_source_status.req(sourcename=sourcename,
status=status).retry(5) status=status).retry(5)
except Exception, e: except Exception as e:
#TODO #TODO
logger.error("Exception: %s", e) logger.error("Exception: %s", e)
@ -514,7 +514,7 @@ class AirtimeApiClient(object):
#http://localhost/api/get-files-without-replay-gain/dir_id/1 #http://localhost/api/get-files-without-replay-gain/dir_id/1
try: try:
return self.services.get_files_without_replay_gain(dir_id=dir_id) return self.services.get_files_without_replay_gain(dir_id=dir_id)
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
return [] return []
@ -526,7 +526,7 @@ class AirtimeApiClient(object):
""" """
try: try:
return self.services.get_files_without_silan_value() return self.services.get_files_without_silan_value()
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
return [] return []
@ -569,7 +569,7 @@ class AirtimeApiClient(object):
try: try:
response = self.services.update_stream_setting_table(_post_data={'data': json.dumps(data)}) response = self.services.update_stream_setting_table(_post_data={'data': json.dumps(data)})
return response return response
except Exception, e: except Exception as e:
#TODO #TODO
self.logger.error(str(e)) self.logger.error(str(e))

View File

@ -24,11 +24,10 @@ setup(name='api_clients',
# 'docopt', # 'docopt',
# 'kombu', # 'kombu',
# 'mutagen', # 'mutagen',
# 'poster', # 'poster3',
# 'PyDispatcher', # 'PyDispatcher',
# 'pyinotify', # 'pyinotify',
# 'pytz', # 'pytz',
# 'wsgiref'
], ],
zip_safe=False, zip_safe=False,
data_files=[]) data_files=[])

View File

@ -5,16 +5,16 @@ class TestApcUrl(unittest.TestCase):
def test_init(self): def test_init(self):
url = "/testing" url = "/testing"
u = ApcUrl(url) u = ApcUrl(url)
self.assertEquals( u.base_url, url) self.assertEqual( u.base_url, url)
def test_params_1(self): def test_params_1(self):
u = ApcUrl("/testing/%%key%%") u = ApcUrl("/testing/%%key%%")
self.assertEquals(u.params(key='val').url(), '/testing/val') self.assertEqual(u.params(key='val').url(), '/testing/val')
def test_params_2(self): def test_params_2(self):
u = ApcUrl('/testing/%%key%%/%%api%%/more_testing') u = ApcUrl('/testing/%%key%%/%%api%%/more_testing')
full_url = u.params(key="AAA",api="BBB").url() full_url = u.params(key="AAA",api="BBB").url()
self.assertEquals(full_url, '/testing/AAA/BBB/more_testing') self.assertEqual(full_url, '/testing/AAA/BBB/more_testing')
def test_params_ex(self): def test_params_ex(self):
u = ApcUrl("/testing/%%key%%") u = ApcUrl("/testing/%%key%%")
@ -23,7 +23,7 @@ class TestApcUrl(unittest.TestCase):
def test_url(self): def test_url(self):
u = "one/two/three" u = "one/two/three"
self.assertEquals( ApcUrl(u).url(), u ) self.assertEqual( ApcUrl(u).url(), u )
def test_url_ex(self): def test_url_ex(self):
u = ApcUrl('/%%one%%/%%two%%/three').params(two='testing') u = ApcUrl('/%%one%%/%%two%%/three').params(two='testing')

View File

@ -6,16 +6,16 @@ from .. api_client import ApcUrl, ApiRequest
class TestApiRequest(unittest.TestCase): class TestApiRequest(unittest.TestCase):
def test_init(self): def test_init(self):
u = ApiRequest('request_name', ApcUrl('/test/ing')) u = ApiRequest('request_name', ApcUrl('/test/ing'))
self.assertEquals(u.name, "request_name") self.assertEqual(u.name, "request_name")
def test_call(self): def test_call(self):
ret = json.dumps( {u'ok':u'ok'} ) ret = json.dumps( {'ok':'ok'} )
read = MagicMock() read = MagicMock()
read.read = MagicMock(return_value=ret) read.read = MagicMock(return_value=ret)
u = '/testing' u = '/testing'
with patch('urllib2.urlopen') as mock_method: with patch('urllib2.urlopen') as mock_method:
mock_method.return_value = read mock_method.return_value = read
request = ApiRequest('mm', ApcUrl(u))() request = ApiRequest('mm', ApcUrl(u))()
self.assertEquals(request, json.loads(ret)) self.assertEqual(request, json.loads(ret))
if __name__ == '__main__': unittest.main() if __name__ == '__main__': unittest.main()

View File

@ -19,7 +19,7 @@ class TestRequestProvider(unittest.TestCase):
self.assertTrue( meth in rp ) self.assertTrue( meth in rp )
def test_notify_webstream_data(self): def test_notify_webstream_data(self):
ret = json.dumps( {u'testing' : u'123' } ) ret = json.dumps( {'testing' : '123' } )
rp = RequestProvider(self.cfg) rp = RequestProvider(self.cfg)
read = MagicMock() read = MagicMock()
read.read = MagicMock(return_value=ret) read.read = MagicMock(return_value=ret)
@ -27,6 +27,6 @@ class TestRequestProvider(unittest.TestCase):
mock_method.return_value = read mock_method.return_value = read
response = rp.notify_webstream_data(media_id=123) response = rp.notify_webstream_data(media_id=123)
mock_method.called_once_with(media_id=123) mock_method.called_once_with(media_id=123)
self.assertEquals(json.loads(ret), response) self.assertEqual(json.loads(ret), response)
if __name__ == '__main__': unittest.main() if __name__ == '__main__': unittest.main()

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import print_function
import shutil import shutil
import os import os
import sys import sys
@ -18,6 +18,6 @@ try:
current_script_dir = get_current_script_dir() current_script_dir = get_current_script_dir()
shutil.copy(current_script_dir+"/../airtime-icecast-status.xsl", "/usr/share/icecast2/web") shutil.copy(current_script_dir+"/../airtime-icecast-status.xsl", "/usr/share/icecast2/web")
except Exception, e: except Exception as e:
print("exception: {}".format(e)) print("exception: {}".format(e))
sys.exit(1) sys.exit(1)

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import print_function
import traceback import traceback
""" """
@ -76,7 +76,7 @@ logger = rootLogger
try: try:
config = ConfigObj('/etc/airtime/airtime.conf') config = ConfigObj('/etc/airtime/airtime.conf')
except Exception, e: except Exception as e:
logger.error('Error loading config file: %s', e) logger.error('Error loading config file: %s', e)
sys.exit() sys.exit()

View File

@ -1,10 +1,10 @@
""" Runs Airtime liquidsoap """ Runs Airtime liquidsoap
""" """
from __future__ import print_function
import argparse import argparse
import os import os
import generate_liquidsoap_cfg from . import generate_liquidsoap_cfg
import logging import logging
import subprocess import subprocess

View File

@ -1,4 +1,4 @@
from __future__ import print_function
import logging import logging
import os import os
import sys import sys
@ -14,7 +14,7 @@ def generate_liquidsoap_config(ss):
fh.write("################################################\n") fh.write("################################################\n")
fh.write("# The ignore() lines are to squash unused variable warnings\n") fh.write("# The ignore() lines are to squash unused variable warnings\n")
for key, value in data.iteritems(): for key, value in data.items():
try: try:
if not "port" in key and not "bitrate" in key: # Stupid hack if not "port" in key and not "bitrate" in key: # Stupid hack
raise ValueError() raise ValueError()
@ -49,7 +49,7 @@ def run():
ss = ac.get_stream_setting() ss = ac.get_stream_setting()
generate_liquidsoap_config(ss) generate_liquidsoap_config(ss)
successful = True successful = True
except Exception, e: except Exception as e:
print("Unable to connect to the Airtime server.") print("Unable to connect to the Airtime server.")
logging.error(str(e)) logging.error(str(e))
logging.error("traceback: %s", traceback.format_exc()) logging.error("traceback: %s", traceback.format_exc())

View File

@ -1,4 +1,4 @@
from __future__ import print_function
from api_clients import * from api_clients import *
import sys import sys

View File

@ -13,7 +13,7 @@ try:
tn.write('exit\n') tn.write('exit\n')
tn.read_all() tn.read_all()
except Exception, e: except Exception as e:
print('Error loading config file: %s', e) print(('Error loading config file: %s', e))
sys.exit() sys.exit()

View File

@ -1,7 +1,7 @@
""" """
Python part of radio playout (pypo) Python part of radio playout (pypo)
""" """
from __future__ import absolute_import
import locale import locale
import logging import logging
@ -16,10 +16,11 @@ from api_clients import api_client
from configobj import ConfigObj from configobj import ConfigObj
from datetime import datetime from datetime import datetime
from optparse import OptionParser from optparse import OptionParser
import importlib
try: try:
from queue import Queue from queue import Queue
except ImportError: # Python 2.7.5 (CentOS 7) except ImportError: # Python 2.7.5 (CentOS 7)
from Queue import Queue from queue import Queue
from threading import Lock from threading import Lock
from .listenerstat import ListenerStat from .listenerstat import ListenerStat
@ -119,7 +120,7 @@ try:
consoleHandler.setFormatter(logFormatter) consoleHandler.setFormatter(logFormatter)
rootLogger.addHandler(consoleHandler) rootLogger.addHandler(consoleHandler)
except Exception as e: except Exception as e:
print("Couldn't configure logging", e) print(("Couldn't configure logging", e))
sys.exit(1) sys.exit(1)
@ -160,7 +161,7 @@ def configure_locale():
"New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale) "New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale)
) )
reload(sys) importlib.reload(sys)
sys.setdefaultencoding("UTF-8") sys.setdefaultencoding("UTF-8")
current_locale_encoding = locale.getlocale()[1].lower() current_locale_encoding = locale.getlocale()[1].lower()
logger.debug("sys default encoding %s", sys.getdefaultencoding()) logger.debug("sys default encoding %s", sys.getdefaultencoding())

View File

@ -1,5 +1,5 @@
from threading import Thread from threading import Thread
import urllib2 import urllib.request, urllib.error, urllib.parse
import defusedxml.minidom import defusedxml.minidom
import base64 import base64
from datetime import datetime from datetime import datetime
@ -44,13 +44,13 @@ class ListenerStat(Thread):
user_agent = "Mozilla/5.0 (Linux; rv:22.0) Gecko/20130405 Firefox/22.0" user_agent = "Mozilla/5.0 (Linux; rv:22.0) Gecko/20130405 Firefox/22.0"
header["User-Agent"] = user_agent header["User-Agent"] = user_agent
req = urllib2.Request( req = urllib.request.Request(
#assuming that the icecast stats path is /admin/stats.xml #assuming that the icecast stats path is /admin/stats.xml
#need to fix this #need to fix this
url=url, url=url,
headers=header) headers=header)
f = urllib2.urlopen(req, timeout=ListenerStat.HTTP_REQUEST_TIMEOUT) f = urllib.request.urlopen(req, timeout=ListenerStat.HTTP_REQUEST_TIMEOUT)
document = f.read() document = f.read()
return document return document
@ -109,7 +109,7 @@ class ListenerStat(Thread):
#Note that there can be optimizations done, since if all three #Note that there can be optimizations done, since if all three
#streams are the same server, we will still initiate 3 separate #streams are the same server, we will still initiate 3 separate
#connections #connections
for k, v in stream_parameters.items(): for k, v in list(stream_parameters.items()):
if v["enable"] == 'true': if v["enable"] == 'true':
try: try:
if v["output"] == "icecast": if v["output"] == "icecast":
@ -146,7 +146,7 @@ class ListenerStat(Thread):
if stats: if stats:
self.push_stream_stats(stats) self.push_stream_stats(stats)
except Exception, e: except Exception as e:
self.logger.error('Exception: %s', e) self.logger.error('Exception: %s', e)
time.sleep(120) time.sleep(120)

View File

@ -11,14 +11,14 @@ import subprocess
import signal import signal
from datetime import datetime from datetime import datetime
import traceback import traceback
import pure from . import pure
import mimetypes import mimetypes
from Queue import Empty from queue import Empty
from threading import Thread, Timer from threading import Thread, Timer
from subprocess import Popen, PIPE from subprocess import Popen, PIPE
from api_clients import api_client from api_clients import api_client
from timeout import ls_timeout from .timeout import ls_timeout
def keyboardInterruptHandler(signum, frame): def keyboardInterruptHandler(signum, frame):
@ -65,7 +65,7 @@ class PypoFetch(Thread):
""" """
self.logger.debug("Cache dir does not exist. Creating...") self.logger.debug("Cache dir does not exist. Creating...")
os.makedirs(dir) os.makedirs(dir)
except Exception, e: except Exception as e:
pass pass
self.schedule_data = [] self.schedule_data = []
@ -120,7 +120,7 @@ class PypoFetch(Thread):
if self.listener_timeout < 0: if self.listener_timeout < 0:
self.listener_timeout = 0 self.listener_timeout = 0
self.logger.info("New timeout: %s" % self.listener_timeout) self.logger.info("New timeout: %s" % self.listener_timeout)
except Exception, e: except Exception as e:
top = traceback.format_exc() top = traceback.format_exc()
self.logger.error('Exception: %s', e) self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top) self.logger.error("traceback: %s", top)
@ -151,13 +151,13 @@ class PypoFetch(Thread):
self.logger.debug('Getting information needed on bootstrap from Airtime') self.logger.debug('Getting information needed on bootstrap from Airtime')
try: try:
info = self.api_client.get_bootstrap_info() info = self.api_client.get_bootstrap_info()
except Exception, e: except Exception as e:
self.logger.error('Unable to get bootstrap info.. Exiting pypo...') self.logger.error('Unable to get bootstrap info.. Exiting pypo...')
self.logger.error(str(e)) self.logger.error(str(e))
self.logger.debug('info:%s', info) self.logger.debug('info:%s', info)
commands = [] commands = []
for k, v in info['switch_status'].iteritems(): for k, v in info['switch_status'].items():
commands.append(self.switch_source_temp(k, v)) commands.append(self.switch_source_temp(k, v))
stream_format = info['stream_label'] stream_format = info['stream_label']
@ -194,11 +194,11 @@ class PypoFetch(Thread):
tn.read_all() tn.read_all()
self.logger.info("Liquidsoap is up and running") self.logger.info("Liquidsoap is up and running")
break break
except Exception, e: except Exception as e:
#sleep 0.5 seconds and try again #sleep 0.5 seconds and try again
time.sleep(0.5) time.sleep(0.5)
except Exception, e: except Exception as e:
self.logger.error(e) self.logger.error(e)
finally: finally:
if self.telnet_lock.locked(): if self.telnet_lock.locked():
@ -237,7 +237,7 @@ class PypoFetch(Thread):
tn.write('exit\n') tn.write('exit\n')
output = tn.read_all() output = tn.read_all()
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -271,7 +271,7 @@ class PypoFetch(Thread):
tn.write(command) tn.write(command)
tn.write('exit\n') tn.write('exit\n')
tn.read_all() tn.read_all()
except Exception, e: except Exception as e:
self.logger.error("Exception %s", e) self.logger.error("Exception %s", e)
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -288,7 +288,7 @@ class PypoFetch(Thread):
tn.write(command) tn.write(command)
tn.write('exit\n') tn.write('exit\n')
tn.read_all() tn.read_all()
except Exception, e: except Exception as e:
self.logger.error("Exception %s", e) self.logger.error("Exception %s", e)
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -306,11 +306,11 @@ class PypoFetch(Thread):
tn.write(command) tn.write(command)
tn.write('exit\n') tn.write('exit\n')
tn.read_all() tn.read_all()
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
except Exception, e: except Exception as e:
self.logger.error("Exception %s", e) self.logger.error("Exception %s", e)
""" """
@ -336,7 +336,7 @@ class PypoFetch(Thread):
download_dir = self.cache_dir download_dir = self.cache_dir
try: try:
os.makedirs(download_dir) os.makedirs(download_dir)
except Exception, e: except Exception as e:
pass pass
media_copy = {} media_copy = {}
@ -344,7 +344,7 @@ class PypoFetch(Thread):
media_item = media[key] media_item = media[key]
if (media_item['type'] == 'file'): if (media_item['type'] == 'file'):
fileExt = self.sanity_check_media_item(media_item) fileExt = self.sanity_check_media_item(media_item)
dst = os.path.join(download_dir, unicode(media_item['id']) + unicode(fileExt)) dst = os.path.join(download_dir, str(media_item['id']) + str(fileExt))
media_item['dst'] = dst media_item['dst'] = dst
media_item['file_ready'] = False media_item['file_ready'] = False
media_filtered[key] = media_item media_filtered[key] = media_item
@ -357,7 +357,7 @@ class PypoFetch(Thread):
self.media_prepare_queue.put(copy.copy(media_filtered)) self.media_prepare_queue.put(copy.copy(media_filtered))
except Exception, e: self.logger.error("%s", e) except Exception as e: self.logger.error("%s", e)
# Send the data to pypo-push # Send the data to pypo-push
self.logger.debug("Pushing to pypo-push") self.logger.debug("Pushing to pypo-push")
@ -366,7 +366,7 @@ class PypoFetch(Thread):
# cleanup # cleanup
try: self.cache_cleanup(media) try: self.cache_cleanup(media)
except Exception, e: self.logger.error("%s", e) except Exception as e: self.logger.error("%s", e)
#do basic validation of file parameters. Useful for debugging #do basic validation of file parameters. Useful for debugging
#purposes #purposes
@ -408,7 +408,7 @@ class PypoFetch(Thread):
for mkey in media: for mkey in media:
media_item = media[mkey] media_item = media[mkey]
if media_item['type'] == 'file': if media_item['type'] == 'file':
scheduled_file_set.add(unicode(media_item["id"]) + unicode(media_item["file_ext"])) scheduled_file_set.add(str(media_item["id"]) + str(media_item["file_ext"]))
expired_files = cached_file_set - scheduled_file_set expired_files = cached_file_set - scheduled_file_set
@ -426,7 +426,7 @@ class PypoFetch(Thread):
self.logger.info("File '%s' removed" % path) self.logger.info("File '%s' removed" % path)
else: else:
self.logger.info("File '%s' not removed. Still busy!" % path) self.logger.info("File '%s' not removed. Still busy!" % path)
except Exception, e: except Exception as e:
self.logger.error("Problem removing file '%s'" % f) self.logger.error("Problem removing file '%s'" % f)
self.logger.error(traceback.format_exc()) self.logger.error(traceback.format_exc())

View File

@ -1,8 +1,8 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from threading import Thread from threading import Thread
from Queue import Empty from queue import Empty
from ConfigParser import NoOptionError from configparser import NoOptionError
import logging import logging
import shutil import shutil
@ -12,7 +12,7 @@ import os
import sys import sys
import stat import stat
import requests import requests
import ConfigParser import configparser
import json import json
import hashlib import hashlib
from requests.exceptions import ConnectionError, HTTPError, Timeout from requests.exceptions import ConnectionError, HTTPError, Timeout
@ -44,7 +44,7 @@ class PypoFile(Thread):
dst_exists = True dst_exists = True
try: try:
dst_size = os.path.getsize(dst) dst_size = os.path.getsize(dst)
except Exception, e: except Exception as e:
dst_exists = False dst_exists = False
do_copy = False do_copy = False
@ -69,11 +69,11 @@ class PypoFile(Thread):
baseurl = self._config.get(CONFIG_SECTION, 'base_url') baseurl = self._config.get(CONFIG_SECTION, 'base_url')
try: try:
port = self._config.get(CONFIG_SECTION, 'base_port') port = self._config.get(CONFIG_SECTION, 'base_port')
except NoOptionError, e: except NoOptionError as e:
port = 80 port = 80
try: try:
protocol = self._config.get(CONFIG_SECTION, 'protocol') protocol = self._config.get(CONFIG_SECTION, 'protocol')
except NoOptionError, e: except NoOptionError as e:
protocol = str(("http", "https")[int(port) == 443]) protocol = str(("http", "https")[int(port) == 443])
try: try:
@ -103,7 +103,7 @@ class PypoFile(Thread):
media_item["filesize"] = file_size media_item["filesize"] = file_size
media_item['file_ready'] = True media_item['file_ready'] = True
except Exception, e: except Exception as e:
self.logger.error("Could not copy from %s to %s" % (src, dst)) self.logger.error("Could not copy from %s to %s" % (src, dst))
self.logger.error(e) self.logger.error(e)
@ -172,7 +172,7 @@ class PypoFile(Thread):
def read_config_file(self, config_path): def read_config_file(self, config_path):
"""Parse the application's config file located at config_path.""" """Parse the application's config file located at config_path."""
config = ConfigParser.SafeConfigParser(allow_no_value=True) config = configparser.SafeConfigParser(allow_no_value=True)
try: try:
config.readfp(open(config_path)) config.readfp(open(config_path))
except IOError as e: except IOError as e:
@ -202,14 +202,14 @@ class PypoFile(Thread):
""" """
try: try:
self.media = self.media_queue.get_nowait() self.media = self.media_queue.get_nowait()
except Empty, e: except Empty as e:
pass pass
media_item = self.get_highest_priority_media_item(self.media) media_item = self.get_highest_priority_media_item(self.media)
if media_item is not None: if media_item is not None:
self.copy_file(media_item) self.copy_file(media_item)
except Exception, e: except Exception as e:
import traceback import traceback
top = traceback.format_exc() top = traceback.format_exc()
self.logger.error(str(e)) self.logger.error(str(e))
@ -221,7 +221,7 @@ class PypoFile(Thread):
Entry point of the thread Entry point of the thread
""" """
try: self.main() try: self.main()
except Exception, e: except Exception as e:
top = traceback.format_exc() top = traceback.format_exc()
self.logger.error('PypoFile Exception: %s', top) self.logger.error('PypoFile Exception: %s', top)
time.sleep(5) time.sleep(5)

View File

@ -7,7 +7,7 @@ import sys
import time import time
from Queue import Empty from queue import Empty
import signal import signal
def keyboardInterruptHandler(signum, frame): def keyboardInterruptHandler(signum, frame):
@ -38,7 +38,7 @@ class PypoLiqQueue(Thread):
time_until_next_play) time_until_next_play)
media_schedule = self.queue.get(block=True, \ media_schedule = self.queue.get(block=True, \
timeout=time_until_next_play) timeout=time_until_next_play)
except Empty, e: except Empty as e:
#Time to push a scheduled item. #Time to push a scheduled item.
media_item = schedule_deque.popleft() media_item = schedule_deque.popleft()
self.pypo_liquidsoap.play(media_item) self.pypo_liquidsoap.play(media_item)
@ -82,7 +82,7 @@ class PypoLiqQueue(Thread):
def run(self): def run(self):
try: self.main() try: self.main()
except Exception, e: except Exception as e:
self.logger.error('PypoLiqQueue Exception: %s', traceback.format_exc()) self.logger.error('PypoLiqQueue Exception: %s', traceback.format_exc())

View File

@ -1,10 +1,10 @@
from pypofetch import PypoFetch from .pypofetch import PypoFetch
from telnetliquidsoap import TelnetLiquidsoap from .telnetliquidsoap import TelnetLiquidsoap
from datetime import datetime from datetime import datetime
from datetime import timedelta from datetime import timedelta
import eventtypes from . import eventtypes
import time import time
class PypoLiquidsoap(): class PypoLiquidsoap():
@ -22,7 +22,7 @@ class PypoLiquidsoap():
logger,\ logger,\
host,\ host,\
port,\ port,\
self.liq_queue_tracker.keys()) list(self.liq_queue_tracker.keys()))
def get_telnet_dispatcher(self): def get_telnet_dispatcher(self):
return self.telnet_liquidsoap return self.telnet_liquidsoap
@ -120,13 +120,12 @@ class PypoLiquidsoap():
try: try:
scheduled_now_files = \ scheduled_now_files = \
filter(lambda x: x["type"] == eventtypes.FILE, scheduled_now) [x for x in scheduled_now if x["type"] == eventtypes.FILE]
scheduled_now_webstream = \ scheduled_now_webstream = \
filter(lambda x: x["type"] == eventtypes.STREAM_OUTPUT_START, \ [x for x in scheduled_now if x["type"] == eventtypes.STREAM_OUTPUT_START]
scheduled_now)
schedule_ids = set(map(lambda x: x["row_id"], scheduled_now_files)) schedule_ids = set([x["row_id"] for x in scheduled_now_files])
row_id_map = {} row_id_map = {}
liq_queue_ids = set() liq_queue_ids = set()
@ -200,7 +199,7 @@ class PypoLiquidsoap():
return media_item["type"] == eventtypes.FILE return media_item["type"] == eventtypes.FILE
def clear_queue_tracker(self): def clear_queue_tracker(self):
for i in self.liq_queue_tracker.keys(): for i in list(self.liq_queue_tracker.keys()):
self.liq_queue_tracker[i] = None self.liq_queue_tracker[i] = None
def modify_cue_point(self, link): def modify_cue_point(self, link):

View File

@ -53,7 +53,7 @@ class PypoMessageHandler(Thread):
heartbeat = 5) as connection: heartbeat = 5) as connection:
rabbit = RabbitConsumer(connection, [schedule_queue], self) rabbit = RabbitConsumer(connection, [schedule_queue], self)
rabbit.run() rabbit.run()
except Exception, e: except Exception as e:
self.logger.error(e) self.logger.error(e)
""" """
@ -98,13 +98,13 @@ class PypoMessageHandler(Thread):
self.recorder_queue.put(message) self.recorder_queue.put(message)
else: else:
self.logger.info("Unknown command: %s" % command) self.logger.info("Unknown command: %s" % command)
except Exception, e: except Exception as e:
self.logger.error("Exception in handling RabbitMQ message: %s", e) self.logger.error("Exception in handling RabbitMQ message: %s", e)
def main(self): def main(self):
try: try:
self.init_rabbit_mq() self.init_rabbit_mq()
except Exception, e: except Exception as e:
self.logger.error('Exception: %s', e) self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", traceback.format_exc()) self.logger.error("traceback: %s", traceback.format_exc())
self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds") self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds")

View File

@ -13,15 +13,15 @@ import math
import traceback import traceback
import os import os
from pypofetch import PypoFetch from .pypofetch import PypoFetch
from pypoliqqueue import PypoLiqQueue from .pypoliqqueue import PypoLiqQueue
from Queue import Empty, Queue from queue import Empty, Queue
from threading import Thread from threading import Thread
from api_clients import api_client from api_clients import api_client
from timeout import ls_timeout from .timeout import ls_timeout
logging.captureWarnings(True) logging.captureWarnings(True)
@ -67,7 +67,7 @@ class PypoPush(Thread):
while True: while True:
try: try:
media_schedule = self.queue.get(block=True) media_schedule = self.queue.get(block=True)
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
raise raise
else: else:
@ -138,7 +138,7 @@ class PypoPush(Thread):
tn.write("exit\n") tn.write("exit\n")
self.logger.debug(tn.read_all()) self.logger.debug(tn.read_all())
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -146,7 +146,7 @@ class PypoPush(Thread):
def run(self): def run(self):
while True: while True:
try: self.main() try: self.main()
except Exception, e: except Exception as e:
top = traceback.format_exc() top = traceback.format_exc()
self.logger.error('Pypo Push Exception: %s', top) self.logger.error('Pypo Push Exception: %s', top)
time.sleep(5) time.sleep(5)

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import print_function
import logging import logging
import json import json
import time import time
@ -36,8 +36,8 @@ def api_client(logger):
# loading config file # loading config file
try: try:
config = ConfigObj('/etc/airtime/airtime.conf') config = ConfigObj('/etc/airtime/airtime.conf')
except Exception, e: except Exception as e:
print('Error loading config file: %s', e) print(('Error loading config file: %s', e))
sys.exit() sys.exit()
# TODO : add docstrings everywhere in this module # TODO : add docstrings everywhere in this module
@ -153,10 +153,10 @@ class ShowRecorder(Thread):
recorded_file['title'] = "%s-%s-%s" % (self.show_name, recorded_file['title'] = "%s-%s-%s" % (self.show_name,
full_date, full_time) full_date, full_time)
#You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string #You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string
recorded_file['tracknumber'] = unicode(self.show_instance) recorded_file['tracknumber'] = str(self.show_instance)
recorded_file.save() recorded_file.save()
except Exception, e: except Exception as e:
top = traceback.format_exc() top = traceback.format_exc()
self.logger.error('Exception: %s', e) self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top) self.logger.error("traceback: %s", top)
@ -173,7 +173,7 @@ class ShowRecorder(Thread):
self.upload_file(filepath) self.upload_file(filepath)
os.remove(filepath) os.remove(filepath)
except Exception, e: except Exception as e:
self.logger.error(e) self.logger.error(e)
else: else:
self.logger.info("problem recording show") self.logger.info("problem recording show")
@ -196,7 +196,7 @@ class Recorder(Thread):
try: try:
self.api_client.register_component('show-recorder') self.api_client.register_component('show-recorder')
success = True success = True
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
time.sleep(10) time.sleep(10)
@ -221,12 +221,12 @@ class Recorder(Thread):
temp_shows_to_record = {} temp_shows_to_record = {}
shows = m['shows'] shows = m['shows']
for show in shows: for show in shows:
show_starts = getDateTimeObj(show[u'starts']) show_starts = getDateTimeObj(show['starts'])
show_end = getDateTimeObj(show[u'ends']) show_end = getDateTimeObj(show['ends'])
time_delta = show_end - show_starts time_delta = show_end - show_starts
temp_shows_to_record[show[u'starts']] = [time_delta, temp_shows_to_record[show['starts']] = [time_delta,
show[u'instance_id'], show[u'name'], m['server_timezone']] show['instance_id'], show['name'], m['server_timezone']]
self.shows_to_record = temp_shows_to_record self.shows_to_record = temp_shows_to_record
def get_time_till_next_show(self): def get_time_till_next_show(self):
@ -298,7 +298,7 @@ class Recorder(Thread):
#remove show from shows to record. #remove show from shows to record.
del self.shows_to_record[start_time] del self.shows_to_record[start_time]
#self.time_till_next_show = self.get_time_till_next_show() #self.time_till_next_show = self.get_time_till_next_show()
except Exception, e : except Exception as e :
top = traceback.format_exc() top = traceback.format_exc()
self.logger.error('Exception: %s', e) self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top) self.logger.error("traceback: %s", top)
@ -318,7 +318,7 @@ class Recorder(Thread):
if temp is not None: if temp is not None:
self.process_recorder_schedule(temp) self.process_recorder_schedule(temp)
self.logger.info("Bootstrap recorder schedule received: %s", temp) self.logger.info("Bootstrap recorder schedule received: %s", temp)
except Exception, e: except Exception as e:
self.logger.error( traceback.format_exc() ) self.logger.error( traceback.format_exc() )
self.logger.error(e) self.logger.error(e)
@ -338,16 +338,16 @@ class Recorder(Thread):
if temp is not None: if temp is not None:
self.process_recorder_schedule(temp) self.process_recorder_schedule(temp)
self.logger.info("updated recorder schedule received: %s", temp) self.logger.info("updated recorder schedule received: %s", temp)
except Exception, e: except Exception as e:
self.logger.error( traceback.format_exc() ) self.logger.error( traceback.format_exc() )
self.logger.error(e) self.logger.error(e)
try: self.handle_message() try: self.handle_message()
except Exception, e: except Exception as e:
self.logger.error( traceback.format_exc() ) self.logger.error( traceback.format_exc() )
self.logger.error('Pypo Recorder Exception: %s', e) self.logger.error('Pypo Recorder Exception: %s', e)
time.sleep(PUSH_INTERVAL) time.sleep(PUSH_INTERVAL)
self.loops += 1 self.loops += 1
except Exception, e : except Exception as e :
top = traceback.format_exc() top = traceback.format_exc()
self.logger.error('Exception: %s', e) self.logger.error('Exception: %s', e)
self.logger.error("traceback: %s", top) self.logger.error("traceback: %s", top)

View File

@ -1,6 +1,6 @@
from __future__ import print_function
import telnetlib import telnetlib
from timeout import ls_timeout from .timeout import ls_timeout
def create_liquidsoap_annotation(media): def create_liquidsoap_annotation(media):
# We need liq_start_next value in the annotate. That is the value that controls overlap duration of crossfade. # We need liq_start_next value in the annotate. That is the value that controls overlap duration of crossfade.
@ -140,7 +140,7 @@ class TelnetLiquidsoap:
tn.write("exit\n") tn.write("exit\n")
self.logger.debug(tn.read_all()) self.logger.debug(tn.read_all())
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -159,7 +159,7 @@ class TelnetLiquidsoap:
tn.write("exit\n") tn.write("exit\n")
self.logger.debug(tn.read_all()) self.logger.debug(tn.read_all())
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -182,7 +182,7 @@ class TelnetLiquidsoap:
self.logger.debug(tn.read_all()) self.logger.debug(tn.read_all())
self.current_prebuffering_stream_id = None self.current_prebuffering_stream_id = None
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -205,7 +205,7 @@ class TelnetLiquidsoap:
self.logger.debug(tn.read_all()) self.logger.debug(tn.read_all())
self.current_prebuffering_stream_id = media_item['row_id'] self.current_prebuffering_stream_id = media_item['row_id']
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -225,7 +225,7 @@ class TelnetLiquidsoap:
self.logger.debug("stream_id: %s" % stream_id) self.logger.debug("stream_id: %s" % stream_id)
return stream_id return stream_id
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -246,7 +246,7 @@ class TelnetLiquidsoap:
tn.write(command) tn.write(command)
tn.write('exit\n') tn.write('exit\n')
tn.read_all() tn.read_all()
except Exception, e: except Exception as e:
self.logger.error(traceback.format_exc()) self.logger.error(traceback.format_exc())
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()
@ -263,7 +263,7 @@ class TelnetLiquidsoap:
tn.write('exit\n') tn.write('exit\n')
tn.read_all() tn.read_all()
except Exception, e: except Exception as e:
self.logger.error(str(e)) self.logger.error(str(e))
finally: finally:
self.telnet_lock.release() self.telnet_lock.release()

View File

@ -1,9 +1,9 @@
from __future__ import print_function
from pypoliqqueue import PypoLiqQueue from .pypoliqqueue import PypoLiqQueue
from telnetliquidsoap import DummyTelnetLiquidsoap, TelnetLiquidsoap from .telnetliquidsoap import DummyTelnetLiquidsoap, TelnetLiquidsoap
from Queue import Queue from queue import Queue
from threading import Lock from threading import Lock
import sys import sys

View File

@ -1,5 +1,5 @@
import threading import threading
import pypofetch from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs): def __timeout(func, timeout_duration, default, args, kwargs):

View File

@ -17,7 +17,7 @@ else:
for root, dirnames, filenames in os.walk('pypo'): for root, dirnames, filenames in os.walk('pypo'):
for filename in filenames: for filename in filenames:
pypo_files.append(os.path.join(root, filename)) pypo_files.append(os.path.join(root, filename))
data_files = [ data_files = [
('/etc/init', ['install/upstart/airtime-playout.conf.template']), ('/etc/init', ['install/upstart/airtime-playout.conf.template']),
('/etc/init', ['install/upstart/airtime-liquidsoap.conf.template']), ('/etc/init', ['install/upstart/airtime-liquidsoap.conf.template']),
@ -55,12 +55,11 @@ setup(name='airtime-playout',
'future', 'future',
'kombu', 'kombu',
'mutagen', 'mutagen',
'poster', 'poster3',
'PyDispatcher', 'PyDispatcher',
'pyinotify', 'pyinotify',
'pytz', 'pytz',
'requests', 'requests',
'wsgiref',
'defusedxml' 'defusedxml'
], ],
zip_safe=False, zip_safe=False,

View File

@ -1,6 +1,6 @@
from pypopush import PypoPush from pypopush import PypoPush
from threading import Lock from threading import Lock
from Queue import Queue from queue import Queue
import datetime import datetime

View File

@ -65,7 +65,7 @@ echo -e "The following files, directories, and services will be removed:\n"
for i in ${FILES[*]}; do for i in ${FILES[*]}; do
echo $i echo $i
done done
echo "pip airtime-playout" echo "pip3 airtime-playout"
echo -e "\nIf your web root is not listed, you will need to manually remove it." echo -e "\nIf your web root is not listed, you will need to manually remove it."
@ -103,6 +103,6 @@ if [[ "$IN" = "y" || "$IN" = "Y" ]]; then
dropAirtimeDatabase dropAirtimeDatabase
fi fi
pip uninstall -y airtime-playout airtime-media-monitor airtime-analyzer pip3 uninstall -y airtime-playout airtime-media-monitor airtime-analyzer
service apache2 restart service apache2 restart
echo "...Done" echo "...Done"