sintonia/analyzer/libretime_analyzer/pipeline.py

109 lines
4.5 KiB
Python
Raw Normal View History

2020-01-21 08:13:42 +01:00
""" Analyzes and imports an audio file into the Airtime library.
"""
2020-01-21 08:13:42 +01:00
from queue import Queue
from loguru import logger
from .steps.analyze_cuepoint import analyze_cuepoint
from .steps.analyze_metadata import analyze_metadata
from .steps.analyze_playability import UnplayableFileError, analyze_playability
from .steps.analyze_replaygain import analyze_replaygain
from .steps.organise_file import organise_file
2021-05-27 16:23:02 +02:00
class Pipeline:
2021-05-27 16:23:02 +02:00
"""Analyzes and imports an audio file into the Airtime library.
2020-01-21 08:13:42 +01:00
2021-05-27 16:23:02 +02:00
This currently performs metadata extraction (eg. gets the ID3 tags from an MP3),
then moves the file to the Airtime music library (stor/imported), and returns
the results back to the parent process. This class is used in an isolated process
so that if it crashes, it does not kill the entire airtime_analyzer daemon and
the failure to import can be reported back to the web application.
"""
IMPORT_STATUS_FAILED = 2
@staticmethod
2021-05-27 16:23:02 +02:00
def run_analysis(
queue,
audio_file_path,
import_directory,
original_filename,
storage_backend,
file_prefix,
):
"""Analyze and import an audio file, and put all extracted metadata into queue.
2020-01-21 08:13:42 +01:00
Keyword arguments:
queue: A multiprocessing.queues.Queue which will be used to pass the
extracted metadata back to the parent process.
audio_file_path: Path on disk to the audio file to analyze.
2020-01-21 08:13:42 +01:00
import_directory: Path to the final Airtime "import" directory where
we will move the file.
2020-01-21 08:13:42 +01:00
original_filename: The original filename of the file, which we'll try to
preserve. The file at audio_file_path typically has a
temporary randomly generated name, which is why we want
2020-01-21 08:13:42 +01:00
to know what the original name was.
storage_backend: String indicating the storage backend (amazon_s3 or file)
file_prefix:
"""
try:
2020-01-21 08:13:42 +01:00
if not isinstance(queue, Queue):
raise TypeError("queue must be a Queue.Queue()")
2020-01-16 15:32:51 +01:00
if not isinstance(audio_file_path, str):
2021-05-27 16:23:02 +02:00
raise TypeError(
"audio_file_path must be unicode. Was of type "
+ type(audio_file_path).__name__
+ " instead."
)
2020-01-16 15:32:51 +01:00
if not isinstance(import_directory, str):
2021-05-27 16:23:02 +02:00
raise TypeError(
"import_directory must be unicode. Was of type "
+ type(import_directory).__name__
+ " instead."
)
2020-01-16 15:32:51 +01:00
if not isinstance(original_filename, str):
2021-05-27 16:23:02 +02:00
raise TypeError(
"original_filename must be unicode. Was of type "
+ type(original_filename).__name__
+ " instead."
)
2020-01-16 15:32:51 +01:00
if not isinstance(file_prefix, str):
2021-05-27 16:23:02 +02:00
raise TypeError(
"file_prefix must be unicode. Was of type "
+ type(file_prefix).__name__
+ " instead."
)
# Analyze the audio file we were told to analyze:
# First, we extract the ID3 tags and other metadata:
metadata = dict()
metadata["file_prefix"] = file_prefix
metadata = analyze_metadata(audio_file_path, metadata)
metadata = analyze_cuepoint(audio_file_path, metadata)
metadata = analyze_replaygain(audio_file_path, metadata)
metadata = analyze_playability(audio_file_path, metadata)
metadata = organise_file(
2021-05-27 16:23:02 +02:00
audio_file_path, import_directory, original_filename, metadata
)
2021-05-27 16:23:02 +02:00
metadata["import_status"] = 0 # Successfully imported
2020-01-21 08:13:42 +01:00
# Note that the queue we're putting the results into is our interprocess communication
# back to the main process.
# Pass all the file metadata back to the main analyzer process, which then passes
# it back to the Airtime web application.
queue.put(metadata)
except UnplayableFileError as e:
logger.exception(e)
metadata["import_status"] = Pipeline.IMPORT_STATUS_FAILED
metadata["reason"] = "The file could not be played."
raise e
except Exception as e:
# Ensures the traceback for this child process gets written to our log files:
logger.exception(e)
raise e