Removed all ReplayGain and Silan stuff from pypo, plus a bugfix
* Ripped out all the ReplayGain and Silan analysis from pypo, since it's now implemented in airtime_analyzer. This fixes a bunch of race conditions. * Also renamed the replaygain field to replay_gain in airtime_analyzer to match Airtime.
This commit is contained in:
parent
4dd2768755
commit
3b1c776879
|
@ -24,7 +24,7 @@ class ReplayGainAnalyzer(Analyzer):
|
|||
rg_pos = results.find(filename_token, results.find("Calculating Replay Gain information")) + len(filename_token)
|
||||
db_pos = results.find(" dB", rg_pos)
|
||||
replaygain = results[rg_pos:db_pos]
|
||||
metadata['replaygain'] = float(replaygain)
|
||||
metadata['replay_gain'] = float(replaygain)
|
||||
|
||||
except OSError as e: # replaygain was not found
|
||||
logging.warn("Failed to run: %s - %s. %s" % (command[0], e.strerror, "Do you have python-rgain installed?"))
|
||||
|
|
|
@ -13,8 +13,8 @@ def check_default_metadata(metadata):
|
|||
'''
|
||||
tolerance = 0.30
|
||||
expected_replaygain = 5.0
|
||||
print metadata['replaygain']
|
||||
assert abs(metadata['replaygain'] - expected_replaygain) < tolerance
|
||||
print metadata['replay_gain']
|
||||
assert abs(metadata['replay_gain'] - expected_replaygain) < tolerance
|
||||
|
||||
def test_missing_replaygain():
|
||||
old_rg = ReplayGainAnalyzer.REPLAYGAIN_EXECUTABLE
|
||||
|
|
|
@ -1,161 +0,0 @@
|
|||
from subprocess import Popen, PIPE
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
def get_process_output(command):
|
||||
"""
|
||||
Run subprocess and return stdout
|
||||
"""
|
||||
logger.debug(command)
|
||||
p = Popen(command, stdout=PIPE, stderr=PIPE)
|
||||
return p.communicate()[0].strip()
|
||||
|
||||
def run_process(command):
|
||||
"""
|
||||
Run subprocess and return "return code"
|
||||
"""
|
||||
p = Popen(command, stdout=PIPE, stderr=PIPE)
|
||||
return os.waitpid(p.pid, 0)[1]
|
||||
|
||||
def get_mime_type(file_path):
|
||||
"""
|
||||
Attempts to get the mime type but will return prematurely if the process
|
||||
takes longer than 5 seconds. Note that this function should only be called
|
||||
for files which do not have a mp3/ogg/flac extension.
|
||||
"""
|
||||
|
||||
command = ['timeout', '5', 'file', '-b', '--mime-type', file_path]
|
||||
return get_process_output(command)
|
||||
|
||||
def duplicate_file(file_path):
|
||||
"""
|
||||
Makes a duplicate of the file and returns the path of this duplicate file.
|
||||
"""
|
||||
fsrc = open(file_path, 'r')
|
||||
fdst = tempfile.NamedTemporaryFile(delete=False)
|
||||
|
||||
logger.info("Copying %s to %s" % (file_path, fdst.name))
|
||||
|
||||
shutil.copyfileobj(fsrc, fdst)
|
||||
|
||||
fsrc.close()
|
||||
fdst.close()
|
||||
|
||||
return fdst.name
|
||||
|
||||
def get_file_type(file_path):
|
||||
file_type = None
|
||||
if re.search(r'mp3$', file_path, re.IGNORECASE):
|
||||
file_type = 'mp3'
|
||||
elif re.search(r'og(g|a)$', file_path, re.IGNORECASE):
|
||||
file_type = 'vorbis'
|
||||
elif re.search(r'm4a$', file_path, re.IGNORECASE):
|
||||
file_type = 'mp4'
|
||||
elif re.search(r'flac$', file_path, re.IGNORECASE):
|
||||
file_type = 'flac'
|
||||
else:
|
||||
mime_type = get_mime_type(file_path)
|
||||
if 'mpeg' in mime_type:
|
||||
file_type = 'mp3'
|
||||
elif 'ogg' in mime_type:
|
||||
file_type = 'vorbis'
|
||||
elif 'mp4' in mime_type:
|
||||
file_type = 'mp4'
|
||||
elif 'flac' in mime_type:
|
||||
file_type = 'flac'
|
||||
|
||||
return file_type
|
||||
|
||||
|
||||
def calculate_replay_gain(file_path):
|
||||
"""
|
||||
This function accepts files of type mp3/ogg/flac and returns a calculated
|
||||
ReplayGain value in dB.
|
||||
If the value cannot be calculated for some reason, then we default to 0
|
||||
(Unity Gain).
|
||||
|
||||
http://wiki.hydrogenaudio.org/index.php?title=ReplayGain_1.0_specification
|
||||
"""
|
||||
|
||||
try:
|
||||
"""
|
||||
Making a duplicate is required because the ReplayGain extraction utilities we use
|
||||
make unwanted modifications to the file.
|
||||
"""
|
||||
|
||||
search = None
|
||||
temp_file_path = duplicate_file(file_path)
|
||||
|
||||
file_type = get_file_type(file_path)
|
||||
nice_level = '19'
|
||||
|
||||
if file_type:
|
||||
if file_type == 'mp3':
|
||||
if run_process(['which', 'mp3gain']) == 0:
|
||||
command = ['nice', '-n', nice_level, 'mp3gain', '-q', temp_file_path]
|
||||
out = get_process_output(command)
|
||||
search = re.search(r'Recommended "Track" dB change: (.*)', \
|
||||
out)
|
||||
else:
|
||||
logger.warn("mp3gain not found")
|
||||
elif file_type == 'vorbis':
|
||||
if run_process(['which', 'ogginfo']) == 0 and \
|
||||
run_process(['which', 'vorbisgain']) == 0:
|
||||
command = ['nice', '-n', nice_level, 'vorbisgain', '-q', '-f', temp_file_path]
|
||||
run_process(command)
|
||||
|
||||
out = get_process_output(['ogginfo', temp_file_path])
|
||||
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
|
||||
else:
|
||||
logger.warn("vorbisgain/ogginfo not found")
|
||||
elif file_type == 'mp4':
|
||||
if run_process(['which', 'aacgain']) == 0:
|
||||
command = ['nice', '-n', nice_level, 'aacgain', '-q', temp_file_path]
|
||||
out = get_process_output(command)
|
||||
search = re.search(r'Recommended "Track" dB change: (.*)', \
|
||||
out)
|
||||
else:
|
||||
logger.warn("aacgain not found")
|
||||
elif file_type == 'flac':
|
||||
if run_process(['which', 'metaflac']) == 0:
|
||||
|
||||
command = ['nice', '-n', nice_level, 'metaflac', \
|
||||
'--add-replay-gain', temp_file_path]
|
||||
run_process(command)
|
||||
|
||||
command = ['nice', '-n', nice_level, 'metaflac', \
|
||||
'--show-tag=REPLAYGAIN_TRACK_GAIN', \
|
||||
temp_file_path]
|
||||
out = get_process_output(command)
|
||||
search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
|
||||
else: logger.warn("metaflac not found")
|
||||
|
||||
except Exception, e:
|
||||
logger.error(str(e))
|
||||
finally:
|
||||
#no longer need the temp, file simply remove it.
|
||||
try: os.remove(temp_file_path)
|
||||
except: pass
|
||||
|
||||
replay_gain = 0
|
||||
if search:
|
||||
matches = search.groups()
|
||||
if len(matches) == 1:
|
||||
replay_gain = matches[0]
|
||||
else:
|
||||
logger.warn("Received more than 1 match in: '%s'" % str(matches))
|
||||
|
||||
return replay_gain
|
||||
|
||||
|
||||
# Example of running from command line:
|
||||
# python replay_gain.py /path/to/filename.mp3
|
||||
if __name__ == "__main__":
|
||||
print calculate_replay_gain(sys.argv[1])
|
|
@ -1,86 +0,0 @@
|
|||
from threading import Thread
|
||||
|
||||
import traceback
|
||||
import os
|
||||
import time
|
||||
import logging
|
||||
|
||||
from media.update import replaygain
|
||||
|
||||
class ReplayGainUpdater(Thread):
|
||||
"""
|
||||
The purpose of the class is to query the server for a list of files which
|
||||
do not have a ReplayGain value calculated. This class will iterate over the
|
||||
list, calculate the values, update the server and repeat the process until
|
||||
the server reports there are no files left.
|
||||
|
||||
This class will see heavy activity right after a 2.1->2.2 upgrade since 2.2
|
||||
introduces ReplayGain normalization. A fresh install of Airtime 2.2 will
|
||||
see this class not used at all since a file imported in 2.2 will
|
||||
automatically have its ReplayGain value calculated.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def start_reply_gain(apc):
|
||||
me = ReplayGainUpdater(apc)
|
||||
me.daemon = True
|
||||
me.start()
|
||||
|
||||
def __init__(self,apc):
|
||||
Thread.__init__(self)
|
||||
self.api_client = apc
|
||||
self.logger = logging.getLogger()
|
||||
|
||||
def main(self):
|
||||
raw_response = self.api_client.list_all_watched_dirs()
|
||||
if 'dirs' not in raw_response:
|
||||
self.logger.error("Could not get a list of watched directories \
|
||||
with a dirs attribute. Printing full request:")
|
||||
self.logger.error( raw_response )
|
||||
return
|
||||
|
||||
directories = raw_response['dirs']
|
||||
|
||||
for dir_id, dir_path in directories.iteritems():
|
||||
try:
|
||||
# keep getting few rows at a time for current music_dir (stor
|
||||
# or watched folder).
|
||||
total = 0
|
||||
while True:
|
||||
# return a list of pairs where the first value is the
|
||||
# file's database row id and the second value is the
|
||||
# filepath
|
||||
files = self.api_client.get_files_without_replay_gain_value(dir_id)
|
||||
processed_data = []
|
||||
for f in files:
|
||||
full_path = os.path.join(dir_path, f['fp'])
|
||||
processed_data.append((f['id'], replaygain.calculate_replay_gain(full_path)))
|
||||
total += 1
|
||||
|
||||
try:
|
||||
if len(processed_data):
|
||||
self.api_client.update_replay_gain_values(processed_data)
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
self.logger.debug(traceback.format_exc())
|
||||
|
||||
if len(files) == 0: break
|
||||
self.logger.info("Processed: %d songs" % total)
|
||||
|
||||
except Exception, e:
|
||||
self.logger.error(e)
|
||||
self.logger.debug(traceback.format_exc())
|
||||
def run(self):
|
||||
while True:
|
||||
try:
|
||||
self.logger.info("Running replaygain updater")
|
||||
self.main()
|
||||
# Sleep for 5 minutes in case new files have been added
|
||||
except Exception, e:
|
||||
self.logger.error('ReplayGainUpdater Exception: %s', traceback.format_exc())
|
||||
self.logger.error(e)
|
||||
time.sleep(60 * 5)
|
||||
|
||||
if __name__ == "__main__":
|
||||
rgu = ReplayGainUpdater()
|
||||
rgu.main()
|
|
@ -1,94 +0,0 @@
|
|||
from threading import Thread
|
||||
|
||||
import traceback
|
||||
import time
|
||||
import subprocess
|
||||
import json
|
||||
|
||||
|
||||
class SilanAnalyzer(Thread):
|
||||
"""
|
||||
The purpose of the class is to query the server for a list of files which
|
||||
do not have a Silan value calculated. This class will iterate over the
|
||||
list calculate the values, update the server and repeat the process until
|
||||
the server reports there are no files left.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def start_silan(apc, logger):
|
||||
me = SilanAnalyzer(apc, logger)
|
||||
me.start()
|
||||
|
||||
def __init__(self, apc, logger):
|
||||
Thread.__init__(self)
|
||||
self.api_client = apc
|
||||
self.logger = logger
|
||||
|
||||
def main(self):
|
||||
while True:
|
||||
# keep getting few rows at a time for current music_dir (stor
|
||||
# or watched folder).
|
||||
total = 0
|
||||
|
||||
# return a list of pairs where the first value is the
|
||||
# file's database row id and the second value is the
|
||||
# filepath
|
||||
files = self.api_client.get_files_without_silan_value()
|
||||
total_files = len(files)
|
||||
if total_files == 0: return
|
||||
processed_data = []
|
||||
for f in files:
|
||||
full_path = f['fp']
|
||||
# silence detect(set default queue in and out)
|
||||
try:
|
||||
data = {}
|
||||
command = ['nice', '-n', '19', 'silan', '-b', '-f', 'JSON', full_path]
|
||||
try:
|
||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
comm = proc.communicate()
|
||||
if len(comm):
|
||||
out = comm[0].strip('\r\n')
|
||||
info = json.loads(out)
|
||||
try: data['length'] = str('{0:f}'.format(info['file duration']))
|
||||
except: pass
|
||||
try: data['cuein'] = str('{0:f}'.format(info['sound'][0][0]))
|
||||
except: pass
|
||||
try: data['cueout'] = str('{0:f}'.format(info['sound'][-1][1]))
|
||||
except: pass
|
||||
except Exception, e:
|
||||
self.logger.warn(str(command))
|
||||
self.logger.warn(e)
|
||||
processed_data.append((f['id'], data))
|
||||
total += 1
|
||||
if total % 5 == 0:
|
||||
self.logger.info("Total %s / %s files has been processed.." % (total, total_files))
|
||||
except Exception, e:
|
||||
self.logger.error(e)
|
||||
self.logger.error(traceback.format_exc())
|
||||
|
||||
try:
|
||||
self.api_client.update_cue_values_by_silan(processed_data)
|
||||
except Exception ,e:
|
||||
self.logger.error(e)
|
||||
self.logger.error(traceback.format_exc())
|
||||
|
||||
self.logger.info("Processed: %d songs" % total)
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
try:
|
||||
self.logger.info("Running Silan analyzer")
|
||||
self.main()
|
||||
except Exception, e:
|
||||
self.logger.error('Silan Analyzer Exception: %s', traceback.format_exc())
|
||||
self.logger.error(e)
|
||||
self.logger.info("Sleeping for 5...")
|
||||
time.sleep(60 * 5)
|
||||
|
||||
if __name__ == "__main__":
|
||||
from api_clients import api_client
|
||||
import logging
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
api_client = api_client.AirtimeApiClient()
|
||||
SilanAnalyzer.start_silan(api_client, logging)
|
||||
|
|
@ -27,9 +27,6 @@ from pypomessagehandler import PypoMessageHandler
|
|||
from pypoliquidsoap import PypoLiquidsoap
|
||||
from timeout import ls_timeout
|
||||
|
||||
from media.update.replaygainupdater import ReplayGainUpdater
|
||||
from media.update.silananalyzer import SilanAnalyzer
|
||||
|
||||
from configobj import ConfigObj
|
||||
|
||||
# custom imports
|
||||
|
@ -250,10 +247,6 @@ if __name__ == '__main__':
|
|||
g.test_api()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
ReplayGainUpdater.start_reply_gain(api_client)
|
||||
SilanAnalyzer.start_silan(api_client, logger)
|
||||
|
||||
pypoFetch_q = Queue()
|
||||
recorder_q = Queue()
|
||||
pypoPush_q = Queue()
|
||||
|
|
Loading…
Reference in New Issue