2020-01-21 08:13:42 +01:00
|
|
|
""" Analyzes and imports an audio file into the Airtime library.
|
2014-04-03 22:13:26 +02:00
|
|
|
"""
|
2020-01-21 08:13:42 +01:00
|
|
|
from queue import Queue
|
2021-06-03 15:20:39 +02:00
|
|
|
|
2022-01-17 09:26:30 +01:00
|
|
|
from loguru import logger
|
|
|
|
|
2022-01-17 20:31:43 +01:00
|
|
|
from .steps.analyze_cuepoint import analyze_cuepoint
|
|
|
|
from .steps.analyze_metadata import analyze_metadata
|
|
|
|
from .steps.analyze_playability import UnplayableFileError, analyze_playability
|
|
|
|
from .steps.analyze_replaygain import analyze_replaygain
|
|
|
|
from .steps.organise_file import organise_file
|
2014-03-05 18:15:25 +01:00
|
|
|
|
2021-05-27 16:23:02 +02:00
|
|
|
|
2022-01-17 20:31:43 +01:00
|
|
|
class Pipeline:
|
2021-05-27 16:23:02 +02:00
|
|
|
"""Analyzes and imports an audio file into the Airtime library.
|
2020-01-21 08:13:42 +01:00
|
|
|
|
2021-05-27 16:23:02 +02:00
|
|
|
This currently performs metadata extraction (eg. gets the ID3 tags from an MP3),
|
|
|
|
then moves the file to the Airtime music library (stor/imported), and returns
|
|
|
|
the results back to the parent process. This class is used in an isolated process
|
|
|
|
so that if it crashes, it does not kill the entire airtime_analyzer daemon and
|
|
|
|
the failure to import can be reported back to the web application.
|
2014-04-03 22:13:26 +02:00
|
|
|
"""
|
2015-04-06 23:33:08 +02:00
|
|
|
|
|
|
|
IMPORT_STATUS_FAILED = 2
|
|
|
|
|
2014-03-05 18:15:25 +01:00
|
|
|
@staticmethod
|
2021-05-27 16:23:02 +02:00
|
|
|
def run_analysis(
|
|
|
|
queue,
|
|
|
|
audio_file_path,
|
|
|
|
import_directory,
|
|
|
|
original_filename,
|
|
|
|
storage_backend,
|
|
|
|
file_prefix,
|
|
|
|
):
|
2014-04-03 22:13:26 +02:00
|
|
|
"""Analyze and import an audio file, and put all extracted metadata into queue.
|
2020-01-21 08:13:42 +01:00
|
|
|
|
2014-04-03 22:13:26 +02:00
|
|
|
Keyword arguments:
|
|
|
|
queue: A multiprocessing.queues.Queue which will be used to pass the
|
|
|
|
extracted metadata back to the parent process.
|
|
|
|
audio_file_path: Path on disk to the audio file to analyze.
|
2020-01-21 08:13:42 +01:00
|
|
|
import_directory: Path to the final Airtime "import" directory where
|
2014-04-03 22:13:26 +02:00
|
|
|
we will move the file.
|
2020-01-21 08:13:42 +01:00
|
|
|
original_filename: The original filename of the file, which we'll try to
|
|
|
|
preserve. The file at audio_file_path typically has a
|
2014-04-03 22:13:26 +02:00
|
|
|
temporary randomly generated name, which is why we want
|
2020-01-21 08:13:42 +01:00
|
|
|
to know what the original name was.
|
2015-02-21 00:21:49 +01:00
|
|
|
storage_backend: String indicating the storage backend (amazon_s3 or file)
|
2015-02-03 21:55:47 +01:00
|
|
|
file_prefix:
|
2014-04-03 22:13:26 +02:00
|
|
|
"""
|
2014-04-16 19:05:02 +02:00
|
|
|
try:
|
2020-01-21 08:13:42 +01:00
|
|
|
if not isinstance(queue, Queue):
|
2015-03-11 23:33:08 +01:00
|
|
|
raise TypeError("queue must be a Queue.Queue()")
|
2020-01-16 15:32:51 +01:00
|
|
|
if not isinstance(audio_file_path, str):
|
2021-05-27 16:23:02 +02:00
|
|
|
raise TypeError(
|
|
|
|
"audio_file_path must be unicode. Was of type "
|
|
|
|
+ type(audio_file_path).__name__
|
|
|
|
+ " instead."
|
|
|
|
)
|
2020-01-16 15:32:51 +01:00
|
|
|
if not isinstance(import_directory, str):
|
2021-05-27 16:23:02 +02:00
|
|
|
raise TypeError(
|
|
|
|
"import_directory must be unicode. Was of type "
|
|
|
|
+ type(import_directory).__name__
|
|
|
|
+ " instead."
|
|
|
|
)
|
2020-01-16 15:32:51 +01:00
|
|
|
if not isinstance(original_filename, str):
|
2021-05-27 16:23:02 +02:00
|
|
|
raise TypeError(
|
|
|
|
"original_filename must be unicode. Was of type "
|
|
|
|
+ type(original_filename).__name__
|
|
|
|
+ " instead."
|
|
|
|
)
|
2020-01-16 15:32:51 +01:00
|
|
|
if not isinstance(file_prefix, str):
|
2021-05-27 16:23:02 +02:00
|
|
|
raise TypeError(
|
|
|
|
"file_prefix must be unicode. Was of type "
|
|
|
|
+ type(file_prefix).__name__
|
|
|
|
+ " instead."
|
|
|
|
)
|
2014-03-06 04:43:47 +01:00
|
|
|
|
2014-04-16 19:05:02 +02:00
|
|
|
# Analyze the audio file we were told to analyze:
|
|
|
|
# First, we extract the ID3 tags and other metadata:
|
|
|
|
metadata = dict()
|
2014-12-17 22:48:20 +01:00
|
|
|
metadata["file_prefix"] = file_prefix
|
2014-12-12 15:36:27 +01:00
|
|
|
|
2022-01-17 20:31:43 +01:00
|
|
|
metadata = analyze_metadata(audio_file_path, metadata)
|
|
|
|
metadata = analyze_cuepoint(audio_file_path, metadata)
|
|
|
|
metadata = analyze_replaygain(audio_file_path, metadata)
|
|
|
|
metadata = analyze_playability(audio_file_path, metadata)
|
2014-12-12 15:36:27 +01:00
|
|
|
|
2022-01-17 20:31:43 +01:00
|
|
|
metadata = organise_file(
|
2021-05-27 16:23:02 +02:00
|
|
|
audio_file_path, import_directory, original_filename, metadata
|
|
|
|
)
|
2014-12-17 00:47:42 +01:00
|
|
|
|
2021-05-27 16:23:02 +02:00
|
|
|
metadata["import_status"] = 0 # Successfully imported
|
2014-03-06 04:43:47 +01:00
|
|
|
|
2020-01-21 08:13:42 +01:00
|
|
|
# Note that the queue we're putting the results into is our interprocess communication
|
2014-04-16 19:05:02 +02:00
|
|
|
# back to the main process.
|
|
|
|
|
|
|
|
# Pass all the file metadata back to the main analyzer process, which then passes
|
|
|
|
# it back to the Airtime web application.
|
|
|
|
queue.put(metadata)
|
2014-12-11 21:45:45 +01:00
|
|
|
except UnplayableFileError as e:
|
2022-01-17 09:26:30 +01:00
|
|
|
logger.exception(e)
|
2022-01-17 20:31:43 +01:00
|
|
|
metadata["import_status"] = Pipeline.IMPORT_STATUS_FAILED
|
2014-12-11 21:45:45 +01:00
|
|
|
metadata["reason"] = "The file could not be played."
|
|
|
|
raise e
|
2014-04-16 19:05:02 +02:00
|
|
|
except Exception as e:
|
|
|
|
# Ensures the traceback for this child process gets written to our log files:
|
2022-01-17 09:26:30 +01:00
|
|
|
logger.exception(e)
|
2014-04-16 19:05:02 +02:00
|
|
|
raise e
|