chore: add pyupgrade pre-commit hook
- add --py3-plus flag to pyupgrade hook - add --py36-plus flag to pyupgrade hook
This commit is contained in:
parent
21aaf9bca1
commit
32cb67806a
|
@ -34,6 +34,12 @@ repos:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
files: \.(md|yml|yaml|json)$
|
files: \.(md|yml|yaml|json)$
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.31.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py3-plus, --py36-plus]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 21.12b0
|
rev: 21.12b0
|
||||||
hooks:
|
hooks:
|
||||||
|
|
|
@ -77,7 +77,7 @@ class MessageListener:
|
||||||
self.wait_for_messages()
|
self.wait_for_messages()
|
||||||
except (KeyboardInterrupt, SystemExit):
|
except (KeyboardInterrupt, SystemExit):
|
||||||
break # Break out of the while loop and exit the application
|
break # Break out of the while loop and exit the application
|
||||||
except select.error:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
except pika.exceptions.AMQPError as e:
|
except pika.exceptions.AMQPError as e:
|
||||||
if self._shutdown:
|
if self._shutdown:
|
||||||
|
@ -141,9 +141,7 @@ class MessageListener:
|
||||||
Here we parse the message, spin up an analyzer process, and report the
|
Here we parse the message, spin up an analyzer process, and report the
|
||||||
metadata back to the Airtime web application (or report an error).
|
metadata back to the Airtime web application (or report an error).
|
||||||
"""
|
"""
|
||||||
logger.info(
|
logger.info(f" - Received '{body}' on routing_key '{method_frame.routing_key}'")
|
||||||
" - Received '%s' on routing_key '%s'" % (body, method_frame.routing_key)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Declare all variables here so they exist in the exception handlers below, no matter what.
|
# Declare all variables here so they exist in the exception handlers below, no matter what.
|
||||||
audio_file_path = ""
|
audio_file_path = ""
|
||||||
|
|
|
@ -53,7 +53,7 @@ def process_http_requests(ipc_queue, http_retry_queue_path):
|
||||||
try:
|
try:
|
||||||
with open(http_retry_queue_path, "rb") as pickle_file:
|
with open(http_retry_queue_path, "rb") as pickle_file:
|
||||||
retry_queue = pickle.load(pickle_file)
|
retry_queue = pickle.load(pickle_file)
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
if e.errno == 2:
|
if e.errno == 2:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -25,7 +25,7 @@ def analyze_metadata(filename: str, metadata: Dict[str, Any]):
|
||||||
"metadata must be a dict. Was of type " + type(metadata).__name__
|
"metadata must be a dict. Was of type " + type(metadata).__name__
|
||||||
)
|
)
|
||||||
if not os.path.exists(filename):
|
if not os.path.exists(filename):
|
||||||
raise FileNotFoundError("audio file not found: {}".format(filename))
|
raise FileNotFoundError(f"audio file not found: {filename}")
|
||||||
|
|
||||||
# Airtime <= 2.5.x nonsense:
|
# Airtime <= 2.5.x nonsense:
|
||||||
metadata["ftype"] = "audioclip"
|
metadata["ftype"] = "audioclip"
|
||||||
|
@ -186,6 +186,6 @@ def _analyze_wave(filename, metadata):
|
||||||
metadata["length_seconds"] = length_seconds
|
metadata["length_seconds"] = length_seconds
|
||||||
metadata["cueout"] = metadata["length"]
|
metadata["cueout"] = metadata["length"]
|
||||||
except wave.Error as ex:
|
except wave.Error as ex:
|
||||||
logger.error("Invalid WAVE file: {}".format(str(ex)))
|
logger.error(f"Invalid WAVE file: {str(ex)}")
|
||||||
raise
|
raise
|
||||||
return metadata
|
return metadata
|
||||||
|
|
|
@ -46,7 +46,7 @@ def organise_file(audio_file_path, import_directory, original_filename, metadata
|
||||||
"metadata must be a dict. Was of type " + type(metadata).__name__
|
"metadata must be a dict. Was of type " + type(metadata).__name__
|
||||||
)
|
)
|
||||||
if not os.path.exists(audio_file_path):
|
if not os.path.exists(audio_file_path):
|
||||||
raise FileNotFoundError("audio file not found: {}".format(audio_file_path))
|
raise FileNotFoundError(f"audio file not found: {audio_file_path}")
|
||||||
|
|
||||||
# Import the file over to it's final location.
|
# Import the file over to it's final location.
|
||||||
# TODO: Also, handle the case where the move fails and write some code
|
# TODO: Also, handle the case where the move fails and write some code
|
||||||
|
@ -80,7 +80,7 @@ def organise_file(audio_file_path, import_directory, original_filename, metadata
|
||||||
metadata["full_path"] = final_file_path
|
metadata["full_path"] = final_file_path
|
||||||
return metadata
|
return metadata
|
||||||
base_file_path, file_extension = os.path.splitext(final_file_path)
|
base_file_path, file_extension = os.path.splitext(final_file_path)
|
||||||
final_file_path = "%s_%s%s" % (
|
final_file_path = "{}_{}{}".format(
|
||||||
base_file_path,
|
base_file_path,
|
||||||
time.strftime("%m-%d-%Y-%H-%M-%S", time.localtime()),
|
time.strftime("%m-%d-%Y-%H-%M-%S", time.localtime()),
|
||||||
file_extension,
|
file_extension,
|
||||||
|
@ -89,7 +89,7 @@ def organise_file(audio_file_path, import_directory, original_filename, metadata
|
||||||
# If THAT path exists, append a UUID instead:
|
# If THAT path exists, append a UUID instead:
|
||||||
while os.path.exists(final_file_path):
|
while os.path.exists(final_file_path):
|
||||||
base_file_path, file_extension = os.path.splitext(final_file_path)
|
base_file_path, file_extension = os.path.splitext(final_file_path)
|
||||||
final_file_path = "%s_%s%s" % (
|
final_file_path = "{}_{}{}".format(
|
||||||
base_file_path,
|
base_file_path,
|
||||||
str(uuid.uuid4()),
|
str(uuid.uuid4()),
|
||||||
file_extension,
|
file_extension,
|
||||||
|
@ -99,7 +99,7 @@ def organise_file(audio_file_path, import_directory, original_filename, metadata
|
||||||
mkdir_p(os.path.dirname(final_file_path))
|
mkdir_p(os.path.dirname(final_file_path))
|
||||||
|
|
||||||
# Move the file into its final destination directory
|
# Move the file into its final destination directory
|
||||||
logger.debug("Moving %s to %s" % (audio_file_path, final_file_path))
|
logger.debug(f"Moving {audio_file_path} to {final_file_path}")
|
||||||
shutil.move(audio_file_path, final_file_path)
|
shutil.move(audio_file_path, final_file_path)
|
||||||
|
|
||||||
metadata["full_path"] = final_file_path
|
metadata["full_path"] = final_file_path
|
||||||
|
|
|
@ -60,7 +60,7 @@ class User(AbstractBaseUser):
|
||||||
objects = UserManager()
|
objects = UserManager()
|
||||||
|
|
||||||
def get_full_name(self):
|
def get_full_name(self):
|
||||||
return "{} {}".format(self.first_name, self.last_name)
|
return f"{self.first_name} {self.last_name}"
|
||||||
|
|
||||||
def get_short_name(self):
|
def get_short_name(self):
|
||||||
return self.first_name
|
return self.first_name
|
||||||
|
|
|
@ -38,7 +38,7 @@ def get_permission_for_view(request, view):
|
||||||
try:
|
try:
|
||||||
permission_type = REQUEST_PERMISSION_TYPE_MAP[request.method]
|
permission_type = REQUEST_PERMISSION_TYPE_MAP[request.method]
|
||||||
if view.__class__.__name__ == "APIRootView":
|
if view.__class__.__name__ == "APIRootView":
|
||||||
return "{}_apiroot".format(permission_type)
|
return f"{permission_type}_apiroot"
|
||||||
model = view.model_permission_name
|
model = view.model_permission_name
|
||||||
own_obj = get_own_obj(request, view)
|
own_obj = get_own_obj(request, view)
|
||||||
return "{permission_type}_{own_obj}{model}".format(
|
return "{permission_type}_{own_obj}{model}".format(
|
||||||
|
|
|
@ -14,10 +14,10 @@ class ManagedModelTestRunner(DiscoverRunner):
|
||||||
self.unmanaged_models = [m for m in apps.get_models() if not m._meta.managed]
|
self.unmanaged_models = [m for m in apps.get_models() if not m._meta.managed]
|
||||||
for m in self.unmanaged_models:
|
for m in self.unmanaged_models:
|
||||||
m._meta.managed = True
|
m._meta.managed = True
|
||||||
super(ManagedModelTestRunner, self).setup_test_environment(*args, **kwargs)
|
super().setup_test_environment(*args, **kwargs)
|
||||||
|
|
||||||
def teardown_test_environment(self, *args, **kwargs):
|
def teardown_test_environment(self, *args, **kwargs):
|
||||||
super(ManagedModelTestRunner, self).teardown_test_environment(*args, **kwargs)
|
super().teardown_test_environment(*args, **kwargs)
|
||||||
# reset unmanaged models
|
# reset unmanaged models
|
||||||
for m in self.unmanaged_models:
|
for m in self.unmanaged_models:
|
||||||
m._meta.managed = False
|
m._meta.managed = False
|
||||||
|
|
|
@ -28,7 +28,7 @@ class TestIsSystemTokenOrUser(APITestCase):
|
||||||
token = "doesnotexist"
|
token = "doesnotexist"
|
||||||
request = APIRequestFactory().get(self.path)
|
request = APIRequestFactory().get(self.path)
|
||||||
request.user = AnonymousUser()
|
request.user = AnonymousUser()
|
||||||
request.META["Authorization"] = "Api-Key {token}".format(token=token)
|
request.META["Authorization"] = f"Api-Key {token}"
|
||||||
allowed = IsSystemTokenOrUser().has_permission(request, None)
|
allowed = IsSystemTokenOrUser().has_permission(request, None)
|
||||||
self.assertFalse(allowed)
|
self.assertFalse(allowed)
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ class TestIsSystemTokenOrUser(APITestCase):
|
||||||
token = settings.CONFIG.get("general", "api_key")
|
token = settings.CONFIG.get("general", "api_key")
|
||||||
request = APIRequestFactory().get(self.path)
|
request = APIRequestFactory().get(self.path)
|
||||||
request.user = AnonymousUser()
|
request.user = AnonymousUser()
|
||||||
request.META["Authorization"] = "Api-Key {token}".format(token=token)
|
request.META["Authorization"] = f"Api-Key {token}"
|
||||||
allowed = IsSystemTokenOrUser().has_permission(request, None)
|
allowed = IsSystemTokenOrUser().has_permission(request, None)
|
||||||
self.assertTrue(allowed)
|
self.assertTrue(allowed)
|
||||||
|
|
||||||
|
@ -81,7 +81,7 @@ class TestPermissions(APITestCase):
|
||||||
for model in self.URLS:
|
for model in self.URLS:
|
||||||
response = self.logged_in_test_model(model, "guest", GUEST, self.client.get)
|
response = self.logged_in_test_model(model, "guest", GUEST, self.client.get)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
response.status_code, 200, msg="Invalid for model {}".format(model)
|
response.status_code, 200, msg=f"Invalid for model {model}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_guest_permissions_failure(self):
|
def test_guest_permissions_failure(self):
|
||||||
|
@ -90,14 +90,14 @@ class TestPermissions(APITestCase):
|
||||||
model, "guest", GUEST, self.client.post
|
model, "guest", GUEST, self.client.post
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
response.status_code, 403, msg="Invalid for model {}".format(model)
|
response.status_code, 403, msg=f"Invalid for model {model}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_dj_get_permissions(self):
|
def test_dj_get_permissions(self):
|
||||||
for model in self.URLS:
|
for model in self.URLS:
|
||||||
response = self.logged_in_test_model(model, "dj", DJ, self.client.get)
|
response = self.logged_in_test_model(model, "dj", DJ, self.client.get)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
response.status_code, 200, msg="Invalid for model {}".format(model)
|
response.status_code, 200, msg=f"Invalid for model {model}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_dj_post_permissions(self):
|
def test_dj_post_permissions(self):
|
||||||
|
@ -110,7 +110,7 @@ class TestPermissions(APITestCase):
|
||||||
last_name="user",
|
last_name="user",
|
||||||
)
|
)
|
||||||
f = baker.make("libretime_api.File", owner=user)
|
f = baker.make("libretime_api.File", owner=user)
|
||||||
model = "files/{}".format(f.id)
|
model = f"files/{f.id}"
|
||||||
path = self.path.format(model)
|
path = self.path.format(model)
|
||||||
self.client.login(username="test-dj", password="test")
|
self.client.login(username="test-dj", password="test")
|
||||||
response = self.client.patch(path, {"name": "newFilename"})
|
response = self.client.patch(path, {"name": "newFilename"})
|
||||||
|
@ -126,7 +126,7 @@ class TestPermissions(APITestCase):
|
||||||
last_name="user",
|
last_name="user",
|
||||||
)
|
)
|
||||||
f = baker.make("libretime_api.File")
|
f = baker.make("libretime_api.File")
|
||||||
model = "files/{}".format(f.id)
|
model = f"files/{f.id}"
|
||||||
path = self.path.format(model)
|
path = self.path.format(model)
|
||||||
self.client.login(username="test-dj", password="test")
|
self.client.login(username="test-dj", password="test")
|
||||||
response = self.client.patch(path, {"name": "newFilename"})
|
response = self.client.patch(path, {"name": "newFilename"})
|
||||||
|
|
|
@ -18,13 +18,13 @@ class TestFileViewSet(APITestCase):
|
||||||
|
|
||||||
def test_invalid(self):
|
def test_invalid(self):
|
||||||
path = self.path.format(id="a")
|
path = self.path.format(id="a")
|
||||||
self.client.credentials(HTTP_AUTHORIZATION="Api-Key {}".format(self.token))
|
self.client.credentials(HTTP_AUTHORIZATION=f"Api-Key {self.token}")
|
||||||
response = self.client.get(path)
|
response = self.client.get(path)
|
||||||
self.assertEqual(response.status_code, 400)
|
self.assertEqual(response.status_code, 400)
|
||||||
|
|
||||||
def test_does_not_exist(self):
|
def test_does_not_exist(self):
|
||||||
path = self.path.format(id="1")
|
path = self.path.format(id="1")
|
||||||
self.client.credentials(HTTP_AUTHORIZATION="Api-Key {}".format(self.token))
|
self.client.credentials(HTTP_AUTHORIZATION=f"Api-Key {self.token}")
|
||||||
response = self.client.get(path)
|
response = self.client.get(path)
|
||||||
self.assertEqual(response.status_code, 404)
|
self.assertEqual(response.status_code, 404)
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ class TestFileViewSet(APITestCase):
|
||||||
filepath="song.mp3",
|
filepath="song.mp3",
|
||||||
)
|
)
|
||||||
path = self.path.format(id=str(f.pk))
|
path = self.path.format(id=str(f.pk))
|
||||||
self.client.credentials(HTTP_AUTHORIZATION="Api-Key {}".format(self.token))
|
self.client.credentials(HTTP_AUTHORIZATION=f"Api-Key {self.token}")
|
||||||
response = self.client.get(path)
|
response = self.client.get(path)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ class TestScheduleViewSet(APITestCase):
|
||||||
instance=show,
|
instance=show,
|
||||||
file=f,
|
file=f,
|
||||||
)
|
)
|
||||||
self.client.credentials(HTTP_AUTHORIZATION="Api-Key {}".format(self.token))
|
self.client.credentials(HTTP_AUTHORIZATION=f"Api-Key {self.token}")
|
||||||
response = self.client.get(self.path)
|
response = self.client.get(self.path)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
result = response.json()
|
result = response.json()
|
||||||
|
@ -111,7 +111,7 @@ class TestScheduleViewSet(APITestCase):
|
||||||
instance=show,
|
instance=show,
|
||||||
file=f,
|
file=f,
|
||||||
)
|
)
|
||||||
self.client.credentials(HTTP_AUTHORIZATION="Api-Key {}".format(self.token))
|
self.client.credentials(HTTP_AUTHORIZATION=f"Api-Key {self.token}")
|
||||||
response = self.client.get(self.path)
|
response = self.client.get(self.path)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
result = response.json()
|
result = response.json()
|
||||||
|
@ -157,7 +157,7 @@ class TestScheduleViewSet(APITestCase):
|
||||||
instance=show,
|
instance=show,
|
||||||
file=f,
|
file=f,
|
||||||
)
|
)
|
||||||
self.client.credentials(HTTP_AUTHORIZATION="Api-Key {}".format(self.token))
|
self.client.credentials(HTTP_AUTHORIZATION=f"Api-Key {self.token}")
|
||||||
response = self.client.get(self.path, {"is_valid": True})
|
response = self.client.get(self.path, {"is_valid": True})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
result = response.json()
|
result = response.json()
|
||||||
|
@ -203,13 +203,13 @@ class TestScheduleViewSet(APITestCase):
|
||||||
instance=show,
|
instance=show,
|
||||||
file=f,
|
file=f,
|
||||||
)
|
)
|
||||||
self.client.credentials(HTTP_AUTHORIZATION="Api-Key {}".format(self.token))
|
self.client.credentials(HTTP_AUTHORIZATION=f"Api-Key {self.token}")
|
||||||
range_start = (filter_point - timedelta(minutes=1)).isoformat(
|
range_start = (filter_point - timedelta(minutes=1)).isoformat(
|
||||||
timespec="seconds"
|
timespec="seconds"
|
||||||
)
|
)
|
||||||
range_end = (filter_point + timedelta(minutes=1)).isoformat(timespec="seconds")
|
range_end = (filter_point + timedelta(minutes=1)).isoformat(timespec="seconds")
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
self.path, {"starts__range": "{},{}".format(range_start, range_end)}
|
self.path, {"starts__range": f"{range_start},{range_end}"}
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
result = response.json()
|
result = response.json()
|
||||||
|
|
|
@ -10,7 +10,7 @@ def read_config_file(config_filepath):
|
||||||
try:
|
try:
|
||||||
with open(config_filepath, encoding="utf-8") as config_file:
|
with open(config_filepath, encoding="utf-8") as config_file:
|
||||||
config.read_file(config_file)
|
config.read_file(config_file)
|
||||||
except IOError as error:
|
except OSError as error:
|
||||||
print(
|
print(
|
||||||
f"Unable to read config file at {config_filepath}: {error.strerror}",
|
f"Unable to read config file at {config_filepath}: {error.strerror}",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
|
|
|
@ -35,7 +35,7 @@ class IncompleteUrl(UrlException):
|
||||||
self.url = url
|
self.url = url
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "Incomplete url: '{}'".format(self.url)
|
return f"Incomplete url: '{self.url}'"
|
||||||
|
|
||||||
|
|
||||||
class UrlBadParam(UrlException):
|
class UrlBadParam(UrlException):
|
||||||
|
@ -44,7 +44,7 @@ class UrlBadParam(UrlException):
|
||||||
self.param = param
|
self.param = param
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "Bad param '{}' passed into url: '{}'".format(self.param, self.url)
|
return f"Bad param '{self.param}' passed into url: '{self.url}'"
|
||||||
|
|
||||||
|
|
||||||
class KeyAuth(AuthBase):
|
class KeyAuth(AuthBase):
|
||||||
|
@ -52,7 +52,7 @@ class KeyAuth(AuthBase):
|
||||||
self.key = key
|
self.key = key
|
||||||
|
|
||||||
def __call__(self, r):
|
def __call__(self, r):
|
||||||
r.headers["Authorization"] = "Api-Key {}".format(self.key)
|
r.headers["Authorization"] = f"Api-Key {self.key}"
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ class RequestProvider:
|
||||||
if attr in self:
|
if attr in self:
|
||||||
return self.requests[attr]
|
return self.requests[attr]
|
||||||
else:
|
else:
|
||||||
return super(RequestProvider, self).__getattribute__(attr)
|
return super().__getattribute__(attr)
|
||||||
|
|
||||||
|
|
||||||
def time_in_seconds(value):
|
def time_in_seconds(value):
|
||||||
|
|
|
@ -126,7 +126,7 @@ api_config["bin_dir"] = "/usr/lib/airtime/api_clients/"
|
||||||
################################################################################
|
################################################################################
|
||||||
# Airtime API Version 1 Client
|
# Airtime API Version 1 Client
|
||||||
################################################################################
|
################################################################################
|
||||||
class AirtimeApiClient(object):
|
class AirtimeApiClient:
|
||||||
def __init__(self, logger=None, config_path="/etc/airtime/airtime.conf"):
|
def __init__(self, logger=None, config_path="/etc/airtime/airtime.conf"):
|
||||||
if logger is None:
|
if logger is None:
|
||||||
self.logger = logging
|
self.logger = logging
|
||||||
|
@ -282,7 +282,7 @@ class AirtimeApiClient(object):
|
||||||
if self.config["general"]["base_dir"].startswith("/"):
|
if self.config["general"]["base_dir"].startswith("/"):
|
||||||
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
||||||
protocol = get_protocol(self.config)
|
protocol = get_protocol(self.config)
|
||||||
url = "%s://%s:%s/%s%s/%s" % (
|
url = "{}://{}:{}/{}{}/{}".format(
|
||||||
protocol,
|
protocol,
|
||||||
self.config["general"]["base_url"],
|
self.config["general"]["base_url"],
|
||||||
str(self.config["general"]["base_port"]),
|
str(self.config["general"]["base_port"]),
|
||||||
|
@ -298,7 +298,7 @@ class AirtimeApiClient(object):
|
||||||
if self.config["general"]["base_dir"].startswith("/"):
|
if self.config["general"]["base_dir"].startswith("/"):
|
||||||
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
self.config["general"]["base_dir"] = self.config["general"]["base_dir"][1:]
|
||||||
protocol = get_protocol(self.config)
|
protocol = get_protocol(self.config)
|
||||||
url = "%s://%s:@%s:%s/%s/%s" % (
|
url = "{}://{}:@{}:{}/{}/{}".format(
|
||||||
protocol,
|
protocol,
|
||||||
self.config["general"]["api_key"],
|
self.config["general"]["api_key"],
|
||||||
self.config["general"]["base_url"],
|
self.config["general"]["base_url"],
|
||||||
|
@ -348,9 +348,7 @@ class AirtimeApiClient(object):
|
||||||
# Note that we must prefix every key with: mdX where x is a number
|
# Note that we must prefix every key with: mdX where x is a number
|
||||||
# Is there a way to format the next line a little better? The
|
# Is there a way to format the next line a little better? The
|
||||||
# parenthesis make the code almost unreadable
|
# parenthesis make the code almost unreadable
|
||||||
md_list = dict(
|
md_list = {("md%d" % i): json.dumps(md) for i, md in enumerate(valid_actions)}
|
||||||
(("md%d" % i), json.dumps(md)) for i, md in enumerate(valid_actions)
|
|
||||||
)
|
|
||||||
# For testing we add the following "dry" parameter to tell the
|
# For testing we add the following "dry" parameter to tell the
|
||||||
# controller not to actually do any changes
|
# controller not to actually do any changes
|
||||||
if dry:
|
if dry:
|
||||||
|
|
|
@ -53,7 +53,7 @@ class AirtimeApiClient:
|
||||||
str_end = end_time.isoformat(timespec="seconds")
|
str_end = end_time.isoformat(timespec="seconds")
|
||||||
data = self.services.schedule_url(
|
data = self.services.schedule_url(
|
||||||
params={
|
params={
|
||||||
"ends__range": ("{}Z,{}Z".format(str_current, str_end)),
|
"ends__range": (f"{str_current}Z,{str_end}Z"),
|
||||||
"is_valid": True,
|
"is_valid": True,
|
||||||
"playout_status__gt": 0,
|
"playout_status__gt": 0,
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ class AirtimeApiClient:
|
||||||
# Stream events are instantaneous
|
# Stream events are instantaneous
|
||||||
current["end"] = current["start"]
|
current["end"] = current["start"]
|
||||||
|
|
||||||
result["{}_0".format(key)] = {
|
result[f"{key}_0"] = {
|
||||||
"id": current["id"],
|
"id": current["id"],
|
||||||
"type": "stream_output_start",
|
"type": "stream_output_start",
|
||||||
"start": current["start"],
|
"start": current["start"],
|
||||||
|
@ -123,7 +123,7 @@ class AirtimeApiClient:
|
||||||
"independent_event": current["independent_event"],
|
"independent_event": current["independent_event"],
|
||||||
}
|
}
|
||||||
|
|
||||||
result["{}_0".format(end.isoformat())] = {
|
result[f"{end.isoformat()}_0"] = {
|
||||||
"type": "stream_output_end",
|
"type": "stream_output_end",
|
||||||
"start": current["end"],
|
"start": current["end"],
|
||||||
"end": current["end"],
|
"end": current["end"],
|
||||||
|
|
|
@ -20,5 +20,5 @@ try:
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("exception: {}".format(e))
|
print(f"exception: {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -21,15 +21,15 @@ def generate_liquidsoap_config(ss, log_filepath: Optional[Path]):
|
||||||
try:
|
try:
|
||||||
if not "port" in key and not "bitrate" in key: # Stupid hack
|
if not "port" in key and not "bitrate" in key: # Stupid hack
|
||||||
raise ValueError()
|
raise ValueError()
|
||||||
str_buffer = "%s = %s\n" % (key, int(value))
|
str_buffer = f"{key} = {int(value)}\n"
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try: # Is it a boolean?
|
try: # Is it a boolean?
|
||||||
if value == "true" or value == "false":
|
if value == "true" or value == "false":
|
||||||
str_buffer = "%s = %s\n" % (key, value.lower())
|
str_buffer = f"{key} = {value.lower()}\n"
|
||||||
else:
|
else:
|
||||||
raise ValueError() # Just drop into the except below
|
raise ValueError() # Just drop into the except below
|
||||||
except: # Everything else is a string
|
except: # Everything else is a string
|
||||||
str_buffer = '%s = "%s"\n' % (key, value)
|
str_buffer = f'{key} = "{value}"\n'
|
||||||
|
|
||||||
fh.write(str_buffer)
|
fh.write(str_buffer)
|
||||||
# ignore squashes unused variable errors from Liquidsoap
|
# ignore squashes unused variable errors from Liquidsoap
|
||||||
|
|
|
@ -15,5 +15,5 @@ try:
|
||||||
tn.read_all()
|
tn.read_all()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Error loading config file: {}".format(e))
|
print(f"Error loading config file: {e}")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
|
@ -58,7 +58,7 @@ def liquidsoap_get_info(telnet_lock, host, port):
|
||||||
tn = telnetlib.Telnet(host, port)
|
tn = telnetlib.Telnet(host, port)
|
||||||
msg = "version\n"
|
msg = "version\n"
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
response = tn.read_all().decode("utf-8")
|
response = tn.read_all().decode("utf-8")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
|
@ -192,7 +192,7 @@ class PypoFetch(Thread):
|
||||||
self.config.playout.liquidsoap_host,
|
self.config.playout.liquidsoap_host,
|
||||||
self.config.playout.liquidsoap_port,
|
self.config.playout.liquidsoap_port,
|
||||||
)
|
)
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
tn.read_all()
|
tn.read_all()
|
||||||
logger.info("Liquidsoap is up and running")
|
logger.info("Liquidsoap is up and running")
|
||||||
break
|
break
|
||||||
|
@ -237,11 +237,11 @@ class PypoFetch(Thread):
|
||||||
logger.info(boot_up_time_command)
|
logger.info(boot_up_time_command)
|
||||||
tn.write(boot_up_time_command)
|
tn.write(boot_up_time_command)
|
||||||
|
|
||||||
connection_status = ("streams.connection_status\n").encode("utf-8")
|
connection_status = b"streams.connection_status\n"
|
||||||
logger.info(connection_status)
|
logger.info(connection_status)
|
||||||
tn.write(connection_status)
|
tn.write(connection_status)
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
|
|
||||||
output = tn.read_all()
|
output = tn.read_all()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -280,7 +280,7 @@ class PypoFetch(Thread):
|
||||||
command = ("vars.stream_metadata_type %s\n" % stream_format).encode("utf-8")
|
command = ("vars.stream_metadata_type %s\n" % stream_format).encode("utf-8")
|
||||||
logger.info(command)
|
logger.info(command)
|
||||||
tn.write(command)
|
tn.write(command)
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
tn.read_all()
|
tn.read_all()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
|
@ -300,7 +300,7 @@ class PypoFetch(Thread):
|
||||||
command = ("vars.default_dj_fade %s\n" % fade).encode("utf-8")
|
command = ("vars.default_dj_fade %s\n" % fade).encode("utf-8")
|
||||||
logger.info(command)
|
logger.info(command)
|
||||||
tn.write(command)
|
tn.write(command)
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
tn.read_all()
|
tn.read_all()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
|
@ -321,7 +321,7 @@ class PypoFetch(Thread):
|
||||||
command = ("vars.station_name %s\n" % station_name).encode("utf-8")
|
command = ("vars.station_name %s\n" % station_name).encode("utf-8")
|
||||||
logger.info(command)
|
logger.info(command)
|
||||||
tn.write(command)
|
tn.write(command)
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
tn.read_all()
|
tn.read_all()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
|
|
|
@ -53,7 +53,7 @@ class PypoFile(Thread):
|
||||||
media_item["file_ready"] = not do_copy
|
media_item["file_ready"] = not do_copy
|
||||||
|
|
||||||
if do_copy:
|
if do_copy:
|
||||||
logger.info("copying from %s to local cache %s" % (src, dst))
|
logger.info(f"copying from {src} to local cache {dst}")
|
||||||
try:
|
try:
|
||||||
with open(dst, "wb") as handle:
|
with open(dst, "wb") as handle:
|
||||||
logger.info(media_item)
|
logger.info(media_item)
|
||||||
|
@ -82,7 +82,7 @@ class PypoFile(Thread):
|
||||||
|
|
||||||
media_item["file_ready"] = True
|
media_item["file_ready"] = True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Could not copy from %s to %s" % (src, dst))
|
logger.error(f"Could not copy from {src} to {dst}")
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
def report_file_size_and_md5_to_api(self, file_path, file_id):
|
def report_file_size_and_md5_to_api(self, file_path, file_id):
|
||||||
|
@ -97,7 +97,7 @@ class PypoFile(Thread):
|
||||||
break
|
break
|
||||||
m.update(data)
|
m.update(data)
|
||||||
md5_hash = m.hexdigest()
|
md5_hash = m.hexdigest()
|
||||||
except (OSError, IOError) as e:
|
except OSError as e:
|
||||||
file_size = 0
|
file_size = 0
|
||||||
logger.error(
|
logger.error(
|
||||||
"Error getting file size and md5 hash for file id %s" % file_id
|
"Error getting file size and md5 hash for file id %s" % file_id
|
||||||
|
|
|
@ -130,7 +130,7 @@ class PypoLiquidsoap:
|
||||||
x for x in scheduled_now if x["type"] == eventtypes.STREAM_OUTPUT_START
|
x for x in scheduled_now if x["type"] == eventtypes.STREAM_OUTPUT_START
|
||||||
]
|
]
|
||||||
|
|
||||||
schedule_ids = set([x["row_id"] for x in scheduled_now_files])
|
schedule_ids = {x["row_id"] for x in scheduled_now_files}
|
||||||
|
|
||||||
row_id_map = {}
|
row_id_map = {}
|
||||||
liq_queue_ids = set()
|
liq_queue_ids = set()
|
||||||
|
|
|
@ -68,7 +68,7 @@ class ShowRecorder(Thread):
|
||||||
filename = filename.replace(" ", "-")
|
filename = filename.replace(" ", "-")
|
||||||
|
|
||||||
joined_path = os.path.join(RECORD_DIR, filename)
|
joined_path = os.path.join(RECORD_DIR, filename)
|
||||||
filepath = "%s.%s" % (joined_path, self.config.playout.record_file_format)
|
filepath = f"{joined_path}.{self.config.playout.record_file_format}"
|
||||||
|
|
||||||
br = self.config.playout.record_bitrate
|
br = self.config.playout.record_bitrate
|
||||||
sr = self.config.playout.record_samplerate
|
sr = self.config.playout.record_samplerate
|
||||||
|
@ -77,7 +77,7 @@ class ShowRecorder(Thread):
|
||||||
|
|
||||||
# -f:16,2,44100
|
# -f:16,2,44100
|
||||||
# -b:256
|
# -b:256
|
||||||
command = "ecasound -f:%s,%s,%s -i alsa -o %s,%s000 -t:%s" % (
|
command = "ecasound -f:{},{},{} -i alsa -o {},{}000 -t:{}".format(
|
||||||
ss,
|
ss,
|
||||||
c,
|
c,
|
||||||
sr,
|
sr,
|
||||||
|
@ -145,7 +145,9 @@ class ShowRecorder(Thread):
|
||||||
recorded_file = mutagen.File(filepath, easy=True)
|
recorded_file = mutagen.File(filepath, easy=True)
|
||||||
recorded_file["artist"] = artist
|
recorded_file["artist"] = artist
|
||||||
recorded_file["date"] = full_date
|
recorded_file["date"] = full_date
|
||||||
recorded_file["title"] = "%s-%s-%s" % (self.show_name, full_date, full_time)
|
recorded_file["title"] = "{}-{}-{}".format(
|
||||||
|
self.show_name, full_date, full_time
|
||||||
|
)
|
||||||
# You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string
|
# You cannot pass ints into the metadata of a file. Even tracknumber needs to be a string
|
||||||
recorded_file["tracknumber"] = self.show_instance
|
recorded_file["tracknumber"] = self.show_instance
|
||||||
recorded_file.save()
|
recorded_file.save()
|
||||||
|
@ -240,7 +242,7 @@ class Recorder(Thread):
|
||||||
next_show = getDateTimeObj(start_time)
|
next_show = getDateTimeObj(start_time)
|
||||||
|
|
||||||
delta = next_show - tnow
|
delta = next_show - tnow
|
||||||
s = "%s.%s" % (delta.seconds, delta.microseconds)
|
s = f"{delta.seconds}.{delta.microseconds}"
|
||||||
out = float(s)
|
out = float(s)
|
||||||
|
|
||||||
if out < 5:
|
if out < 5:
|
||||||
|
|
|
@ -76,7 +76,7 @@ class TelnetLiquidsoap:
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
logger.debug(tn.read_all().decode("utf-8"))
|
logger.debug(tn.read_all().decode("utf-8"))
|
||||||
except Exception:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
|
@ -93,7 +93,7 @@ class TelnetLiquidsoap:
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
logger.debug(tn.read_all().decode("utf-8"))
|
logger.debug(tn.read_all().decode("utf-8"))
|
||||||
except Exception:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
|
@ -110,7 +110,7 @@ class TelnetLiquidsoap:
|
||||||
|
|
||||||
tn = self.__connect()
|
tn = self.__connect()
|
||||||
annotation = create_liquidsoap_annotation(media_item)
|
annotation = create_liquidsoap_annotation(media_item)
|
||||||
msg = "%s.push %s\n" % (queue_id, annotation)
|
msg = f"{queue_id}.push {annotation}\n"
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
|
@ -119,7 +119,7 @@ class TelnetLiquidsoap:
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
logger.debug(tn.read_all().decode("utf-8"))
|
logger.debug(tn.read_all().decode("utf-8"))
|
||||||
except Exception:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
|
@ -141,7 +141,7 @@ class TelnetLiquidsoap:
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
logger.debug(tn.read_all().decode("utf-8"))
|
logger.debug(tn.read_all().decode("utf-8"))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -161,7 +161,7 @@ class TelnetLiquidsoap:
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
logger.debug(tn.read_all().decode("utf-8"))
|
logger.debug(tn.read_all().decode("utf-8"))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -184,7 +184,7 @@ class TelnetLiquidsoap:
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
logger.debug(tn.read_all().decode("utf-8"))
|
logger.debug(tn.read_all().decode("utf-8"))
|
||||||
|
|
||||||
self.current_prebuffering_stream_id = None
|
self.current_prebuffering_stream_id = None
|
||||||
|
@ -208,7 +208,7 @@ class TelnetLiquidsoap:
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
logger.debug(tn.read_all().decode("utf-8"))
|
logger.debug(tn.read_all().decode("utf-8"))
|
||||||
|
|
||||||
self.current_prebuffering_stream_id = media_item["row_id"]
|
self.current_prebuffering_stream_id = media_item["row_id"]
|
||||||
|
@ -228,7 +228,7 @@ class TelnetLiquidsoap:
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
tn.write(msg.encode("utf-8"))
|
tn.write(msg.encode("utf-8"))
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
stream_id = tn.read_all().decode("utf-8").splitlines()[0]
|
stream_id = tn.read_all().decode("utf-8").splitlines()[0]
|
||||||
logger.debug("stream_id: %s" % stream_id)
|
logger.debug("stream_id: %s" % stream_id)
|
||||||
|
|
||||||
|
@ -253,7 +253,7 @@ class TelnetLiquidsoap:
|
||||||
tn = telnetlib.Telnet(self.ls_host, self.ls_port)
|
tn = telnetlib.Telnet(self.ls_host, self.ls_port)
|
||||||
logger.info(command)
|
logger.info(command)
|
||||||
tn.write(command.encode("utf-8"))
|
tn.write(command.encode("utf-8"))
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
tn.read_all().decode("utf-8")
|
tn.read_all().decode("utf-8")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
|
@ -272,7 +272,7 @@ class TelnetLiquidsoap:
|
||||||
i = i.encode("utf-8")
|
i = i.encode("utf-8")
|
||||||
tn.write(i)
|
tn.write(i)
|
||||||
|
|
||||||
tn.write("exit\n".encode("utf-8"))
|
tn.write(b"exit\n")
|
||||||
tn.read_all().decode("utf-8")
|
tn.read_all().decode("utf-8")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(str(e))
|
logger.error(str(e))
|
||||||
|
@ -311,10 +311,10 @@ class DummyTelnetLiquidsoap:
|
||||||
try:
|
try:
|
||||||
self.telnet_lock.acquire()
|
self.telnet_lock.acquire()
|
||||||
|
|
||||||
logger.info("Pushing %s to queue %s" % (media_item, queue_id))
|
logger.info(f"Pushing {media_item} to queue {queue_id}")
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
print("Time now: {:s}".format(datetime.utcnow()))
|
print(f"Time now: {datetime.utcnow():s}")
|
||||||
|
|
||||||
annotation = create_liquidsoap_annotation(media_item)
|
annotation = create_liquidsoap_annotation(media_item)
|
||||||
self.liquidsoap_mock_queues[queue_id].append(annotation)
|
self.liquidsoap_mock_queues[queue_id].append(annotation)
|
||||||
|
@ -331,7 +331,7 @@ class DummyTelnetLiquidsoap:
|
||||||
logger.info("Purging queue %s" % queue_id)
|
logger.info("Purging queue %s" % queue_id)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
print("Time now: {:s}".format(datetime.utcnow()))
|
print(f"Time now: {datetime.utcnow():s}")
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -46,7 +46,7 @@ plq.daemon = True
|
||||||
plq.start()
|
plq.start()
|
||||||
|
|
||||||
|
|
||||||
print("Time now: {:s}".format(datetime.utcnow()))
|
print(f"Time now: {datetime.utcnow():s}")
|
||||||
|
|
||||||
media_schedule = {}
|
media_schedule = {}
|
||||||
|
|
||||||
|
|
|
@ -56,9 +56,9 @@ try:
|
||||||
out = proc.communicate()[0].strip("\r\n")
|
out = proc.communicate()[0].strip("\r\n")
|
||||||
info = json.loads(out)
|
info = json.loads(out)
|
||||||
data = {}
|
data = {}
|
||||||
data["cuein"] = str("{0:f}".format(info["sound"][0][0]))
|
data["cuein"] = str("{:f}".format(info["sound"][0][0]))
|
||||||
data["cueout"] = str("{0:f}".format(info["sound"][-1][1]))
|
data["cueout"] = str("{:f}".format(info["sound"][-1][1]))
|
||||||
data["length"] = str("{0:f}".format(info["file duration"]))
|
data["length"] = str("{:f}".format(info["file duration"]))
|
||||||
processed_data.append((f["id"], data))
|
processed_data.append((f["id"], data))
|
||||||
total += 1
|
total += 1
|
||||||
if total % 5 == 0:
|
if total % 5 == 0:
|
||||||
|
|
|
@ -66,7 +66,7 @@ def podcast_download(
|
||||||
metadata_audiofile.save()
|
metadata_audiofile.save()
|
||||||
filetypeinfo = metadata_audiofile.pprint()
|
filetypeinfo = metadata_audiofile.pprint()
|
||||||
logger.info(
|
logger.info(
|
||||||
"filetypeinfo is {0}".format(filetypeinfo.encode("ascii", "ignore"))
|
"filetypeinfo is {}".format(filetypeinfo.encode("ascii", "ignore"))
|
||||||
)
|
)
|
||||||
re = requests.post(
|
re = requests.post(
|
||||||
callback_url,
|
callback_url,
|
||||||
|
@ -85,7 +85,7 @@ def podcast_download(
|
||||||
obj["status"] = 1
|
obj["status"] = 1
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
obj["error"] = e.message
|
obj["error"] = e.message
|
||||||
logger.info("Error during file download: {0}".format(e))
|
logger.info(f"Error during file download: {e}")
|
||||||
logger.debug("Original Traceback: %s" % (traceback.format_exc(e)))
|
logger.debug("Original Traceback: %s" % (traceback.format_exc(e)))
|
||||||
obj["status"] = 0
|
obj["status"] = 0
|
||||||
return json.dumps(obj)
|
return json.dumps(obj)
|
||||||
|
@ -98,7 +98,7 @@ def podcast_override_metadata(m, podcast_name, override, track_title):
|
||||||
# if the album override option is enabled replace the album id3 tag with the podcast name even if the album tag contains data
|
# if the album override option is enabled replace the album id3 tag with the podcast name even if the album tag contains data
|
||||||
if override is True:
|
if override is True:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"overriding album name to {0} in podcast".format(
|
"overriding album name to {} in podcast".format(
|
||||||
podcast_name.encode("ascii", "ignore")
|
podcast_name.encode("ascii", "ignore")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -111,7 +111,7 @@ def podcast_override_metadata(m, podcast_name, override, track_title):
|
||||||
m["album"]
|
m["album"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"setting new album name to {0} in podcast".format(
|
"setting new album name to {} in podcast".format(
|
||||||
podcast_name.encode("ascii", "ignore")
|
podcast_name.encode("ascii", "ignore")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue