Convert beets plugins to lazy logging

This commit is contained in:
Bruno Cauet 2015-01-04 11:41:17 +01:00
parent e75f9a703d
commit 8cac47af2a
30 changed files with 272 additions and 380 deletions

View file

@ -194,7 +194,7 @@ class BeatportPlugin(BeetsPlugin):
try:
return self._get_releases(query)
except BeatportAPIError as e:
log.debug(u'Beatport API Error: {0} (query: {1})'.format(e, query))
log.debug(u'Beatport API Error: {0} (query: {1})', e, query)
return []
def item_candidates(self, item, artist, title):
@ -205,14 +205,14 @@ class BeatportPlugin(BeetsPlugin):
try:
return self._get_tracks(query)
except BeatportAPIError as e:
log.debug(u'Beatport API Error: {0} (query: {1})'.format(e, query))
log.debug(u'Beatport API Error: {0} (query: {1})', e, query)
return []
def album_for_id(self, release_id):
"""Fetches a release by its Beatport ID and returns an AlbumInfo object
or None if the release is not found.
"""
log.debug(u'Searching Beatport for release {0}'.format(release_id))
log.debug(u'Searching Beatport for release {0}', release_id)
match = re.search(r'(^|beatport\.com/release/.+/)(\d+)$', release_id)
if not match:
return None
@ -224,7 +224,7 @@ class BeatportPlugin(BeetsPlugin):
"""Fetches a track by its Beatport ID and returns a TrackInfo object
or None if the track is not found.
"""
log.debug(u'Searching Beatport for track {0}'.format(str(track_id)))
log.debug(u'Searching Beatport for track {0}', track_id)
match = re.search(r'(^|beatport\.com/track/.+/)(\d+)$', track_id)
if not match:
return None

View file

@ -73,15 +73,15 @@ class BPMPlugin(BeetsPlugin):
item = items[0]
if item['bpm']:
log.info(u'Found bpm {0}'.format(item['bpm']))
log.info(u'Found bpm {0}', item['bpm'])
if not overwrite:
return
log.info(u'Press Enter {0} times to the rhythm or Ctrl-D '
u'to exit'.format(self.config['max_strokes'].get(int)))
u'to exit', self.config['max_strokes'].get(int))
new_bpm = bpm(self.config['max_strokes'].get(int))
item['bpm'] = int(new_bpm)
if write:
item.try_write()
item.store()
log.info(u'Added new bpm {0}'.format(item['bpm']))
log.info(u'Added new bpm {0}', item['bpm'])

View file

@ -64,19 +64,19 @@ def acoustid_match(path):
try:
duration, fp = acoustid.fingerprint_file(util.syspath(path))
except acoustid.FingerprintGenerationError as exc:
log.error(u'fingerprinting of {0} failed: {1}'
.format(util.displayable_path(repr(path)), str(exc)))
log.error(u'fingerprinting of {0} failed: {1}',
util.displayable_path(repr(path)), str(exc))
return None
_fingerprints[path] = fp
try:
res = acoustid.lookup(API_KEY, fp, duration,
meta='recordings releases')
except acoustid.AcoustidError as exc:
log.debug(u'fingerprint matching {0} failed: {1}'
.format(util.displayable_path(repr(path)), str(exc)))
log.debug(u'fingerprint matching {0} failed: {1}',
util.displayable_path(repr(path)), exc)
return None
log.debug(u'chroma: fingerprinted {0}'
.format(util.displayable_path(repr(path))))
log.debug(u'chroma: fingerprinted {0}',
util.displayable_path(repr(path)))
# Ensure the response is usable and parse it.
if res['status'] != 'ok' or not res.get('results'):
@ -99,9 +99,8 @@ def acoustid_match(path):
if 'releases' in recording:
release_ids += [rel['id'] for rel in recording['releases']]
log.debug(u'chroma: matched recordings {0} on releases {1}'.format(
recording_ids, release_ids,
))
log.debug(u'chroma: matched recordings {0} on releases {1}',
recording_ids, release_ids)
_matches[path] = recording_ids, release_ids
@ -155,7 +154,7 @@ class AcoustidPlugin(plugins.BeetsPlugin):
if album:
albums.append(album)
log.debug(u'acoustid album candidates: {0}'.format(len(albums)))
log.debug(u'acoustid album candidates: {0}', len(albums))
return albums
def item_candidates(self, item, artist, title):
@ -168,7 +167,7 @@ class AcoustidPlugin(plugins.BeetsPlugin):
track = hooks.track_for_mbid(recording_id)
if track:
tracks.append(track)
log.debug(u'acoustid item candidates: {0}'.format(len(tracks)))
log.debug(u'acoustid item candidates: {0}', len(tracks))
return tracks
def commands(self):
@ -230,11 +229,11 @@ def submit_items(userkey, items, chunksize=64):
def submit_chunk():
"""Submit the current accumulated fingerprint data."""
log.info(u'submitting {0} fingerprints'.format(len(data)))
log.info(u'submitting {0} fingerprints', len(data))
try:
acoustid.submit(API_KEY, userkey, data)
except acoustid.AcoustidError as exc:
log.warn(u'acoustid submission error: {0}'.format(exc))
log.warn(u'acoustid submission error: {0}', exc)
del data[:]
for item in items:
@ -279,34 +278,28 @@ def fingerprint_item(item, write=False):
"""
# Get a fingerprint and length for this track.
if not item.length:
log.info(u'{0}: no duration available'.format(
util.displayable_path(item.path)
))
log.info(u'{0}: no duration available',
util.displayable_path(item.path))
elif item.acoustid_fingerprint:
if write:
log.info(u'{0}: fingerprint exists, skipping'.format(
util.displayable_path(item.path)
))
log.info(u'{0}: fingerprint exists, skipping',
util.displayable_path(item.path))
else:
log.info(u'{0}: using existing fingerprint'.format(
util.displayable_path(item.path)
))
log.info(u'{0}: using existing fingerprint',
util.displayable_path(item.path))
return item.acoustid_fingerprint
else:
log.info(u'{0}: fingerprinting'.format(
util.displayable_path(item.path)
))
log.info(u'{0}: fingerprinting',
util.displayable_path(item.path))
try:
_, fp = acoustid.fingerprint_file(item.path)
item.acoustid_fingerprint = fp
if write:
log.info(u'{0}: writing fingerprint'.format(
util.displayable_path(item.path)
))
log.info(u'{0}: writing fingerprint',
util.displayable_path(item.path))
item.try_write()
if item._db:
item.store()
return item.acoustid_fingerprint
except acoustid.FingerprintGenerationError as exc:
log.info(u'fingerprint generation failed: {0}'
.format(exc))
log.info(u'fingerprint generation failed: {0}', exc)

View file

@ -92,7 +92,7 @@ def encode(command, source, dest, pretend=False):
quiet = config['convert']['quiet'].get()
if not quiet and not pretend:
log.info(u'Encoding {0}'.format(util.displayable_path(source)))
log.info(u'Encoding {0}', util.displayable_path(source))
# Substitute $source and $dest in the argument list.
args = shlex.split(command)
@ -110,12 +110,11 @@ def encode(command, source, dest, pretend=False):
util.command_output(args)
except subprocess.CalledProcessError as exc:
# Something went wrong (probably Ctrl+C), remove temporary files
log.info(u'Encoding {0} failed. Cleaning up...'
.format(util.displayable_path(source)))
log.debug(u'Command {0} exited with status {1}'.format(
exc.cmd.decode('utf8', 'ignore'),
exc.returncode,
))
log.info(u'Encoding {0} failed. Cleaning up...',
util.displayable_path(source))
log.debug(u'Command {0} exited with status {1}',
exc.cmd.decode('utf8', 'ignore'),
exc.returncode)
util.remove(dest)
util.prune_dirs(os.path.dirname(dest))
raise
@ -127,9 +126,8 @@ def encode(command, source, dest, pretend=False):
)
if not quiet and not pretend:
log.info(u'Finished encoding {0}'.format(
util.displayable_path(source))
)
log.info(u'Finished encoding {0}',
util.displayable_path(source))
def should_transcode(item, format):
@ -173,21 +171,17 @@ def convert_item(dest_dir, keep_new, path_formats, format, pretend=False):
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
log.info(u'Skipping {0} (target file exists)'.format(
util.displayable_path(item.path)
))
log.info(u'Skipping {0} (target file exists)',
util.displayable_path(item.path))
continue
if keep_new:
if pretend:
log.info(u'mv {0} {1}'.format(
util.displayable_path(item.path),
util.displayable_path(original),
))
log.info(u'mv {0} {1}',
util.displayable_path(item.path),
util.displayable_path(original))
else:
log.info(u'Moving to {0}'.format(
util.displayable_path(original))
)
log.info(u'Moving to {0}', util.displayable_path(original))
util.move(item.path, original)
if should_transcode(item, format):
@ -197,15 +191,12 @@ def convert_item(dest_dir, keep_new, path_formats, format, pretend=False):
continue
else:
if pretend:
log.info(u'cp {0} {1}'.format(
util.displayable_path(original),
util.displayable_path(converted),
))
log.info(u'cp {0} {1}',
util.displayable_path(original),
util.displayable_path(converted))
else:
# No transcoding necessary.
log.info(u'Copying {0}'.format(
util.displayable_path(item.path))
)
log.info(u'Copying {0}', util.displayable_path(item.path))
util.copy(original, converted)
if pretend:
@ -281,19 +272,17 @@ def copy_album_art(album, dest_dir, path_formats, pretend=False):
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
log.info(u'Skipping {0} (target file exists)'.format(
util.displayable_path(album.artpath)
))
log.info(u'Skipping {0} (target file exists)',
util.displayable_path(album.artpath))
return
if pretend:
log.info(u'cp {0} {1}'.format(
util.displayable_path(album.artpath),
util.displayable_path(dest),
))
log.info(u'cp {0} {1}',
util.displayable_path(album.artpath),
util.displayable_path(dest))
else:
log.info(u'Copying cover art to {0}'.format(
util.displayable_path(dest)))
log.info(u'Copying cover art to {0}',
util.displayable_path(dest))
util.copy(album.artpath, dest)

View file

@ -89,7 +89,7 @@ class DiscogsPlugin(BeetsPlugin):
raise beets.ui.UserError('Discogs authorization failed')
# Save the token for later use.
log.debug('Discogs token {0}, secret {1}'.format(token, secret))
log.debug('Discogs token {0}, secret {1}', token, secret)
with open(self._tokenfile(), 'w') as f:
json.dump({'token': token, 'secret': secret}, f)
@ -117,10 +117,10 @@ class DiscogsPlugin(BeetsPlugin):
try:
return self.get_albums(query)
except DiscogsAPIError as e:
log.debug(u'Discogs API Error: {0} (query: {1})'.format(e, query))
log.debug(u'Discogs API Error: {0} (query: {1})', e, query)
return []
except ConnectionError as e:
log.debug(u'HTTP Connection Error: {0}'.format(e))
log.debug(u'HTTP Connection Error: {0}', e)
return []
def album_for_id(self, album_id):
@ -130,7 +130,7 @@ class DiscogsPlugin(BeetsPlugin):
if not self.discogs_client:
return
log.debug(u'Searching Discogs for release {0}'.format(str(album_id)))
log.debug(u'Searching Discogs for release {0}', album_id)
# Discogs-IDs are simple integers. We only look for those at the end
# of an input string as to avoid confusion with other metadata plugins.
# An optional bracket can follow the integer, as this is how discogs
@ -145,11 +145,11 @@ class DiscogsPlugin(BeetsPlugin):
getattr(result, 'title')
except DiscogsAPIError as e:
if e.message != '404 Not Found':
log.debug(u'Discogs API Error: {0} (query: {1})'
.format(e, result._uri))
log.debug(u'Discogs API Error: {0} (query: {1})',
e, result._uri)
return None
except ConnectionError as e:
log.debug(u'HTTP Connection Error: {0}'.format(e))
log.debug(u'HTTP Connection Error: {0}', e)
return None
return self.get_album_info(result)
@ -294,7 +294,7 @@ class DiscogsPlugin(BeetsPlugin):
if match:
medium, index = match.groups()
else:
log.debug(u'Invalid Discogs position: {0}'.format(position))
log.debug(u'Invalid Discogs position: {0}', position)
medium = index = None
return medium or None, index or None

View file

@ -56,20 +56,20 @@ def _checksum(item, prog):
key = args[0]
checksum = getattr(item, key, False)
if not checksum:
log.debug(u'{0}: key {1} on item {2} not cached: computing checksum'
.format(PLUGIN, key, displayable_path(item.path)))
log.debug(u'{0}: key {1} on item {2} not cached: computing checksum',
PLUGIN, key, displayable_path(item.path))
try:
checksum = command_output(args)
setattr(item, key, checksum)
item.store()
log.debug(u'{)}: computed checksum for {1} using {2}'
.format(PLUGIN, item.title, key))
log.debug(u'{)}: computed checksum for {1} using {2}',
PLUGIN, item.title, key)
except subprocess.CalledProcessError as e:
log.debug(u'{0}: failed to checksum {1}: {2}'
.format(PLUGIN, displayable_path(item.path), e))
log.debug(u'{0}: failed to checksum {1}: {2}',
PLUGIN, displayable_path(item.path), e)
else:
log.debug(u'{0}: key {1} on item {2} cached: not computing checksum'
.format(PLUGIN, key, displayable_path(item.path)))
log.debug(u'{0}: key {1} on item {2} cached: not computing checksum',
PLUGIN, key, displayable_path(item.path))
return key, checksum
@ -86,8 +86,8 @@ def _group_by(objs, keys):
key = '\001'.join(values)
counts[key].append(obj)
else:
log.debug(u'{0}: all keys {1} on item {2} are null: skipping'
.format(PLUGIN, str(keys), displayable_path(obj.path)))
log.debug(u'{0}: all keys {1} on item {2} are null: skipping',
PLUGIN, str(keys), displayable_path(obj.path))
return counts

View file

@ -154,23 +154,23 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
if e.code == 3:
# reached access limit per minute
log.debug(u'echonest: rate-limited on try {0}; '
u'waiting {1} seconds'
.format(i + 1, RETRY_INTERVAL))
u'waiting {1} seconds',
i + 1, RETRY_INTERVAL)
time.sleep(RETRY_INTERVAL)
elif e.code == 5:
# specified identifier does not exist
# no use in trying again.
log.debug(u'echonest: {0}'.format(e))
log.debug(u'echonest: {0}', e)
return None
else:
log.error(u'echonest: {0}'.format(e.args[0][0]))
log.error(u'echonest: {0}', e.args[0][0])
return None
except (pyechonest.util.EchoNestIOError, socket.error) as e:
log.warn(u'echonest: IO error: {0}'.format(e))
log.warn(u'echonest: IO error: {0}', e)
time.sleep(RETRY_INTERVAL)
except Exception as e:
# there was an error analyzing the track, status: error
log.debug(u'echonest: {0}'.format(e))
log.debug(u'echonest: {0}', e)
return None
else:
break
@ -292,10 +292,9 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
fd, dest = tempfile.mkstemp(u'.ogg')
os.close(fd)
log.info(u'echonest: encoding {0} to {1}'.format(
util.displayable_path(source),
util.displayable_path(dest),
))
log.info(u'echonest: encoding {0} to {1}',
util.displayable_path(source),
util.displayable_path(dest))
opts = []
for arg in CONVERT_COMMAND.split():
@ -306,13 +305,12 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
try:
util.command_output(opts)
except (OSError, subprocess.CalledProcessError) as exc:
log.debug(u'echonest: encode failed: {0}'.format(exc))
log.debug(u'echonest: encode failed: {0}', exc)
util.remove(dest)
return
log.info(u'echonest: finished encoding {0}'.format(
util.displayable_path(source))
)
log.info(u'echonest: finished encoding {0}',
util.displayable_path(source))
return dest
def truncate(self, source):
@ -320,10 +318,9 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
fd, dest = tempfile.mkstemp(u'.ogg')
os.close(fd)
log.info(u'echonest: truncating {0} to {1}'.format(
util.displayable_path(source),
util.displayable_path(dest),
))
log.info(u'echonest: truncating {0} to {1}',
util.displayable_path(source),
util.displayable_path(dest))
opts = []
for arg in TRUNCATE_COMMAND.split():
@ -334,13 +331,12 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
try:
util.command_output(opts)
except (OSError, subprocess.CalledProcessError) as exc:
log.debug(u'echonest: truncate failed: {0}'.format(exc))
log.debug(u'echonest: truncate failed: {0}', exc)
util.remove(dest)
return
log.info(u'echonest: truncate encoding {0}'.format(
util.displayable_path(source))
)
log.info(u'echonest: truncate encoding {0}',
util.displayable_path(source))
return dest
def analyze(self, item):
@ -411,13 +407,11 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
for method in methods:
song = method(item)
if song:
log.debug(
u'echonest: got song through {0}: {1} - {2} [{3}]'.format(
method.__name__,
item.artist,
item.title,
song.get('duration'),
)
log.debug(u'echonest: got song through {0}: {1} - {2} [{3}]',
method.__name__,
item.artist,
item.title,
song.get('duration'),
)
return song
@ -429,7 +423,7 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
for k, v in values.iteritems():
if k in ATTRIBUTES:
field = ATTRIBUTES[k]
log.debug(u'echonest: metadata: {0} = {1}'.format(field, v))
log.debug(u'echonest: metadata: {0} = {1}', field, v)
if field == 'bpm':
item[field] = int(v)
else:
@ -441,7 +435,7 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
item['initial_key'] = key
if 'id' in values:
enid = values['id']
log.debug(u'echonest: metadata: {0} = {1}'.format(ID_KEY, enid))
log.debug(u'echonest: metadata: {0} = {1}', ID_KEY, enid)
item[ID_KEY] = enid
# Write and save.
@ -483,8 +477,7 @@ class EchonestMetadataPlugin(plugins.BeetsPlugin):
self.config.set_args(opts)
write = config['import']['write'].get(bool)
for item in lib.items(ui.decargs(args)):
log.info(u'echonest: {0} - {1}'.format(item.artist,
item.title))
log.info(u'echonest: {0} - {1}', item.artist, item.title)
if self.config['force'] or self.requires_update(item):
song = self.fetch_song(item)
if song:

View file

@ -122,20 +122,17 @@ def embed_item(item, imagepath, maxwidth=None, itempath=None,
if not art:
pass
else:
log.debug(u'embedart: media file contained art already {0}'.format(
displayable_path(imagepath)
))
log.debug(u'embedart: media file contained art already {0}',
displayable_path(imagepath))
return
if maxwidth and not as_album:
imagepath = resize_image(imagepath, maxwidth)
try:
log.debug(u'embedart: embedding {0}'.format(
displayable_path(imagepath)
))
log.debug(u'embedart: embedding {0}', displayable_path(imagepath))
item['images'] = [_mediafile_image(imagepath, maxwidth)]
except IOError as exc:
log.error(u'embedart: could not read image file: {0}'.format(exc))
log.error(u'embedart: could not read image file: {0}', exc)
else:
# We don't want to store the image in the database.
item.try_write(itempath)
@ -147,19 +144,18 @@ def embed_album(album, maxwidth=None, quiet=False):
"""
imagepath = album.artpath
if not imagepath:
log.info(u'No album art present: {0} - {1}'.
format(album.albumartist, album.album))
log.info(u'No album art present: {0} - {1}',
album.albumartist, album.album)
return
if not os.path.isfile(syspath(imagepath)):
log.error(u'Album art not found at {0}'
.format(displayable_path(imagepath)))
log.error(u'Album art not found at {0}', displayable_path(imagepath))
return
if maxwidth:
imagepath = resize_image(imagepath, maxwidth)
log.log(
logging.DEBUG if quiet else logging.INFO,
u'Embedding album art into {0.albumartist} - {0.album}.'.format(album),
u'Embedding album art into {0.albumartist} - {0.album}.', album
)
for item in album.items():
@ -171,8 +167,7 @@ def embed_album(album, maxwidth=None, quiet=False):
def resize_image(imagepath, maxwidth):
"""Returns path to an image resized to maxwidth.
"""
log.info(u'Resizing album art to {0} pixels wide'
.format(maxwidth))
log.info(u'Resizing album art to {0} pixels wide', maxwidth)
imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath))
return imagepath
@ -197,15 +192,14 @@ def check_art_similarity(item, imagepath, compare_threshold):
stdout, stderr = proc.communicate()
if proc.returncode:
if proc.returncode != 1:
log.warn(u'embedart: IM phashes compare failed for {0}, \
{1}'.format(displayable_path(imagepath),
displayable_path(art)))
log.warn(u'embedart: IM phashes compare failed for {0}, {1}',
displayable_path(imagepath), displayable_path(art))
return
phashDiff = float(stderr)
else:
phashDiff = float(stdout)
log.info(u'embedart: compare PHASH score is {0}'.format(phashDiff))
log.info(u'embedart: compare PHASH score is {0}', phashDiff)
if phashDiff > compare_threshold:
return False
@ -226,9 +220,8 @@ def get_art(item):
try:
mf = mediafile.MediaFile(syspath(item.path))
except mediafile.UnreadableFileError as exc:
log.error(u'Could not extract art from {0}: {1}'.format(
displayable_path(item.path), exc
))
log.error(u'Could not extract art from {0}: {1}',
displayable_path(item.path), exc)
return
return mf.art
@ -244,8 +237,8 @@ def extract(outpath, item):
art = get_art(item)
if not art:
log.error(u'No album art present in {0} - {1}.'
.format(item.artist, item.title))
log.error(u'No album art present in {0} - {1}.',
item.artist, item.title)
return
# Add an extension to the filename.
@ -255,8 +248,8 @@ def extract(outpath, item):
return
outpath += '.' + ext
log.info(u'Extracting album art from: {0.artist} - {0.title} '
u'to: {1}'.format(item, displayable_path(outpath)))
log.info(u'Extracting album art from: {0.artist} - {0.title} to: {1}',
item, displayable_path(outpath))
with open(syspath(outpath), 'wb') as f:
f.write(art)
return outpath
@ -267,14 +260,13 @@ def extract(outpath, item):
def clear(lib, query):
log.info(u'Clearing album art from items:')
for item in lib.items(query):
log.info(u'{0} - {1}'.format(item.artist, item.title))
log.info(u'{0} - {1}', item.artist, item.title)
try:
mf = mediafile.MediaFile(syspath(item.path),
config['id3v23'].get(bool))
except mediafile.UnreadableFileError as exc:
log.error(u'Could not clear art from {0}: {1}'.format(
displayable_path(item.path), exc
))
log.error(u'Could not clear art from {0}: {1}',
displayable_path(item.path), exc)
continue
del mf.art
mf.save()

View file

@ -50,7 +50,7 @@ def _fetch_image(url):
actually be an image. If so, returns a path to the downloaded image.
Otherwise, returns None.
"""
log.debug(u'fetchart: downloading art: {0}'.format(url))
log.debug(u'fetchart: downloading art: {0}', url)
try:
with closing(requests_session.get(url, stream=True)) as resp:
if 'Content-Type' not in resp.headers \
@ -63,9 +63,8 @@ def _fetch_image(url):
as fh:
for chunk in resp.iter_content():
fh.write(chunk)
log.debug(u'fetchart: downloaded art to: {0}'.format(
util.displayable_path(fh.name)
))
log.debug(u'fetchart: downloaded art to: {0}',
util.displayable_path(fh.name))
return fh.name
except (IOError, requests.RequestException):
log.debug(u'fetchart: error fetching art')
@ -117,7 +116,7 @@ def aao_art(album):
# Get the page from albumart.org.
try:
resp = requests_session.get(AAO_URL, params={'asin': album.asin})
log.debug(u'fetchart: scraped art URL: {0}'.format(resp.url))
log.debug(u'fetchart: scraped art URL: {0}', resp.url)
except requests.RequestException:
log.debug(u'fetchart: error scraping art page')
return
@ -172,7 +171,7 @@ def itunes_art(album):
try:
itunes_album = itunes.search_album(search_string)[0]
except Exception as exc:
log.debug('fetchart: iTunes search failed: {0}'.format(exc))
log.debug('fetchart: iTunes search failed: {0}', exc)
return
if itunes_album.get_artwork()['100']:
@ -216,16 +215,14 @@ def art_in_path(path, cover_names, cautious):
cover_pat = r"(\b|_)({0})(\b|_)".format('|'.join(cover_names))
for fn in images:
if re.search(cover_pat, os.path.splitext(fn)[0], re.I):
log.debug(u'fetchart: using well-named art file {0}'.format(
util.displayable_path(fn)
))
log.debug(u'fetchart: using well-named art file {0}',
util.displayable_path(fn))
return os.path.join(path, fn)
# Fall back to any image in the folder.
if images and not cautious:
log.debug(u'fetchart: using fallback art file {0}'.format(
util.displayable_path(images[0])
))
log.debug(u'fetchart: using fallback art file {0}',
util.displayable_path(images[0]))
return os.path.join(path, images[0])
@ -315,8 +312,7 @@ def batch_fetch_art(lib, albums, force, maxwidth=None):
else:
message = ui.colorize('red', 'no art found')
log.info(u'{0} - {1}: {2}'.format(album.albumartist, album.album,
message))
log.info(u'{0} - {1}: {2}', album.albumartist, album.album, message)
class FetchArtPlugin(plugins.BeetsPlugin):

View file

@ -52,8 +52,7 @@ def update_metadata(item, feat_part, drop_feat, loglevel=logging.DEBUG):
remove it from the artist field.
"""
# In all cases, update the artist fields.
log.log(loglevel, u'artist: {0} -> {1}'.format(
item.artist, item.albumartist))
log.log(loglevel, u'artist: {0} -> {1}', item.artist, item.albumartist)
item.artist = item.albumartist
if item.artist_sort:
# Just strip the featured artist from the sort name.
@ -63,7 +62,7 @@ def update_metadata(item, feat_part, drop_feat, loglevel=logging.DEBUG):
# artist and if we do not drop featuring information.
if not drop_feat and not contains_feat(item.title):
new_title = u"{0} feat. {1}".format(item.title, feat_part)
log.log(loglevel, u'title: {0} -> {1}'.format(item.title, new_title))
log.log(loglevel, u'title: {0} -> {1}', item.title, new_title)
item.title = new_title

View file

@ -72,12 +72,11 @@ class IHatePlugin(BeetsPlugin):
self._log.debug(u'[ihate] processing your hate')
if self.do_i_hate_this(task, skip_queries):
task.choice_flag = action.SKIP
self._log.info(u'[ihate] skipped: {0}'
.format(summary(task)))
self._log.info(u'[ihate] skipped: {0}', summary(task))
return
if self.do_i_hate_this(task, warn_queries):
self._log.info(u'[ihate] you maybe hate this: {0}'
.format(summary(task)))
self._log.info(u'[ihate] you maybe hate this: {0}',
summary(task))
else:
self._log.debug(u'[ihate] nothing to do')
else:

View file

@ -75,8 +75,8 @@ def write_item_mtime(item, mtime):
item's file.
"""
if mtime is None:
log.warn(u"No mtime to be preserved for item '{0}'"
.format(util.displayable_path(item.path)))
log.warn(u"No mtime to be preserved for item '{0}'",
util.displayable_path(item.path))
return
# The file's mtime on disk must be in sync with the item's mtime
@ -97,17 +97,17 @@ def record_import_mtime(item, source, destination):
"""
mtime = os.stat(util.syspath(source)).st_mtime
item_mtime[destination] = mtime
log.debug(u"Recorded mtime {0} for item '{1}' imported from '{2}'".format(
mtime, util.displayable_path(destination),
util.displayable_path(source)))
log.debug(u"Recorded mtime {0} for item '{1}' imported from '{2}'",
mtime, util.displayable_path(destination),
util.displayable_path(source))
@ImportAddedPlugin.listen('album_imported')
def update_album_times(lib, album):
if reimported_album(album):
log.debug(u"Album '{0}' is reimported, skipping import of added dates"
u" for the album and its items."
.format(util.displayable_path(album.path)))
u" for the album and its items.",
util.displayable_path(album.path))
return
album_mtimes = []
@ -120,7 +120,7 @@ def update_album_times(lib, album):
item.store()
album.added = min(album_mtimes)
log.debug(u"Import of album '{0}', selected album.added={1} from item"
u" file mtimes.".format(album.album, album.added))
u" file mtimes.", album.album, album.added)
album.store()
@ -128,13 +128,13 @@ def update_album_times(lib, album):
def update_item_times(lib, item):
if reimported_item(item):
log.debug(u"Item '{0}' is reimported, skipping import of added "
u"date.".format(util.displayable_path(item.path)))
u"date.", util.displayable_path(item.path))
return
mtime = item_mtime.pop(item.path, None)
if mtime:
item.added = mtime
if config['importadded']['preserve_mtimes'].get(bool):
write_item_mtime(item, mtime)
log.debug(u"Import of item '{0}', selected item.added={1}"
.format(util.displayable_path(item.path), item.added))
log.debug(u"Import of item '{0}', selected item.added={1}",
util.displayable_path(item.path), item.added)
item.store()

View file

@ -132,7 +132,7 @@ def _record_items(lib, basename, items):
if 'echo' in formats:
log.info("Location of imported music:")
for path in paths:
log.info(" " + path)
log.info(" {0}", path)
@ImportFeedsPlugin.listen('library_opened')

View file

@ -52,7 +52,7 @@ def run(lib, opts, args):
try:
data = data_emitter()
except mediafile.UnreadableFileError as ex:
log.error(u'cannot read file: {0}'.format(ex.message))
log.error(u'cannot read file: {0}', ex.message)
continue
if opts.summarize:

View file

@ -64,9 +64,8 @@ def compile_inline(python_code, album):
try:
func = _compile_func(python_code)
except SyntaxError:
log.error(u'syntax error in inline field definition:\n{0}'.format(
traceback.format_exc()
))
log.error(u'syntax error in inline field definition:\n{0}',
traceback.format_exc())
return
else:
is_expr = False
@ -113,14 +112,14 @@ class InlinePlugin(BeetsPlugin):
# Item fields.
for key, view in itertools.chain(config['item_fields'].items(),
config['pathfields'].items()):
log.debug(u'inline: adding item field {0}'.format(key))
log.debug(u'inline: adding item field {0}', key)
func = compile_inline(view.get(unicode), False)
if func is not None:
self.template_fields[key] = func
# Album fields.
for key, view in config['album_fields'].items():
log.debug(u'inline: adding album field {0}'.format(key))
log.debug(u'inline: adding album field {0}', key)
func = compile_inline(view.get(unicode), True)
if func is not None:
self.album_template_fields[key] = func

View file

@ -62,11 +62,11 @@ class KeyFinderPlugin(BeetsPlugin):
try:
key = util.command_output([bin, '-f', item.path])
except (subprocess.CalledProcessError, OSError) as exc:
log.error(u'KeyFinder execution failed: {0}'.format(exc))
log.error(u'KeyFinder execution failed: {0}', exc)
continue
item['initial_key'] = key
log.debug(u'added computed initial key {0} for {1}'
.format(key, util.displayable_path(item.path)))
log.debug(u'added computed initial key {0} for {1}',
key, util.displayable_path(item.path))
item.try_write()
item.store()

View file

@ -71,7 +71,7 @@ def _tags_for(obj, min_weight=None):
else:
res = obj.get_top_tags()
except PYLAST_EXCEPTIONS as exc:
log.debug(u'last.fm error: {0}'.format(exc))
log.debug(u'last.fm error: {0}', exc)
return []
# Filter by weight (optionally).
@ -371,9 +371,8 @@ class LastGenrePlugin(plugins.BeetsPlugin):
for album in lib.albums(ui.decargs(args)):
album.genre, src = self._get_genre(album)
log.info(u'genre for album {0} - {1} ({2}): {3}'.format(
album.albumartist, album.album, src, album.genre
))
log.info(u'genre for album {0} - {1} ({2}): {3}',
album.albumartist, album.album, src, album.genre)
album.store()
for item in album.items():
@ -382,9 +381,8 @@ class LastGenrePlugin(plugins.BeetsPlugin):
if 'track' in self.sources:
item.genre, src = self._get_genre(item)
item.store()
log.info(u'genre for track {0} - {1} ({2}): {3}'
.format(item.artist, item.title, src,
item.genre))
log.info(u'genre for track {0} - {1} ({2}): {3}',
item.artist, item.title, src, item.genre)
if write:
item.try_write()
@ -397,20 +395,20 @@ class LastGenrePlugin(plugins.BeetsPlugin):
if task.is_album:
album = task.album
album.genre, src = self._get_genre(album)
log.debug(u'added last.fm album genre ({0}): {1}'.format(
src, album.genre))
log.debug(u'added last.fm album genre ({0}): {1}',
src, album.genre)
album.store()
if 'track' in self.sources:
for item in album.items():
item.genre, src = self._get_genre(item)
log.debug(u'added last.fm item genre ({0}): {1}'.format(
src, item.genre))
log.debug(u'added last.fm item genre ({0}): {1}',
src, item.genre)
item.store()
else:
item = task.item
item.genre, src = self._get_genre(item)
log.debug(u'added last.fm item genre ({0}): {1}'.format(
src, item.genre))
log.debug(u'added last.fm item genre ({0}): {1}',
src, item.genre)
item.store()

View file

@ -56,7 +56,7 @@ def import_lastfm(lib):
if not user:
raise ui.UserError('You must specify a user name for lastimport')
log.info('Fetching last.fm library for @{0}'.format(user))
log.info('Fetching last.fm library for @{0}', user)
page_total = 1
page_current = 0
@ -65,10 +65,10 @@ def import_lastfm(lib):
retry_limit = config['lastimport']['retry_limit'].get(int)
# Iterate through a yet to be known page total count
while page_current < page_total:
log.info('lastimport: Querying page #{0}{1}...'.format(
page_current + 1,
'/' + str(page_total) if page_total > 1 else ''
))
log.info('lastimport: Querying page #{0}{1}...',
page_current + 1,
'/{}'.format(page_total) if page_total > 1 else ''
)
for retry in range(0, retry_limit):
page = fetch_tracks(user, page_current + 1, per_page)
@ -84,27 +84,22 @@ def import_lastfm(lib):
unknown_total += unknown
break
else:
log.error('lastimport: ERROR: unable to read page #{0}'.format(
page_current + 1
))
log.error('lastimport: ERROR: unable to read page #{0}',
page_current + 1)
if retry < retry_limit:
log.info(
'lastimport: Retrying page #{0}... ({1}/{2} retry)'
.format(page_current + 1, retry + 1, retry_limit)
'lastimport: Retrying page #{0}... ({1}/{2} retry)',
page_current + 1, retry + 1, retry_limit
)
else:
log.error(
'lastimport: FAIL: unable to fetch page #{0}, '
'tried {1} times'.format(page_current, retry + 1)
)
log.error('lastimport: FAIL: unable to fetch page #{0}, ',
'tried {1} times', page_current, retry + 1)
page_current += 1
log.info('lastimport: ... done!')
log.info('lastimport: finished processing {0} song pages'.format(
page_total
))
log.info('lastimport: {0} unknown play-counts'.format(unknown_total))
log.info('lastimport: {0} play-counts imported'.format(found_total))
log.info('lastimport: finished processing {0} song pages', page_total)
log.info('lastimport: {0} unknown play-counts', unknown_total)
log.info('lastimport: {0} play-counts imported', found_total)
def fetch_tracks(user, page, limit):
@ -122,10 +117,8 @@ def process_tracks(lib, tracks):
total = len(tracks)
total_found = 0
total_fails = 0
log.info(
'lastimport: Received {0} tracks in this page, processing...'
.format(total)
)
log.info('lastimport: Received {0} tracks in this page, processing...',
total)
for num in xrange(0, total):
song = ''
@ -136,8 +129,7 @@ def process_tracks(lib, tracks):
if 'album' in tracks[num]:
album = tracks[num]['album'].get('name', '').strip()
log.debug(u'lastimport: query: {0} - {1} ({2})'
.format(artist, title, album))
log.debug(u'lastimport: query: {0} - {1} ({2})', artist, title, album)
# First try to query by musicbrainz's trackid
if trackid:
@ -148,7 +140,7 @@ def process_tracks(lib, tracks):
# Otherwise try artist/title/album
if not song:
log.debug(u'lastimport: no match for mb_trackid {0}, trying by '
u'artist/title/album'.format(trackid))
u'artist/title/album', trackid)
query = dbcore.AndQuery([
dbcore.query.SubstringQuery('artist', artist),
dbcore.query.SubstringQuery('title', title),
@ -178,26 +170,20 @@ def process_tracks(lib, tracks):
if song:
count = int(song.get('play_count', 0))
new_count = int(tracks[num]['playcount'])
log.debug(
u'lastimport: match: {0} - {1} ({2}) '
u'updating: play_count {3} => {4}'.format(
song.artist, song.title, song.album, count, new_count
)
log.debug(u'lastimport: match: {0} - {1} ({2}) '
u'updating: play_count {3} => {4}',
song.artist, song.title, song.album, count, new_count
)
song['play_count'] = new_count
song.store()
total_found += 1
else:
total_fails += 1
log.info(
u'lastimport: - No match: {0} - {1} ({2})'
.format(artist, title, album)
)
log.info(u'lastimport: - No match: {0} - {1} ({2})',
artist, title, album)
if total_fails > 0:
log.info(
'lastimport: Acquired {0}/{1} play-counts ({2} unknown)'
.format(total_found, total, total_fails)
)
log.info('lastimport: Acquired {0}/{1} play-counts ({2} unknown)',
total_found, total, total_fails)
return total_found, total_fails

View file

@ -63,12 +63,12 @@ def fetch_url(url):
try:
r = requests.get(url, verify=False)
except requests.RequestException as exc:
log.debug(u'lyrics request failed: {0}'.format(exc))
log.debug(u'lyrics request failed: {0}', exc)
return
if r.status_code == requests.codes.ok:
return r.text
else:
log.debug(u'failed to fetch: {0} ({1})'.format(url, r.status_code))
log.debug(u'failed to fetch: {0} ({1})', url, r.status_code)
def unescape(text):
@ -272,7 +272,7 @@ def slugify(text):
text = unicodedata.normalize('NFKD', text).encode('ascii', 'ignore')
text = unicode(re.sub('[-\s]+', ' ', text))
except UnicodeDecodeError:
log.exception(u"Failing to normalize '{0}'".format(text))
log.exception(u"Failing to normalize '{0}'", text)
return text
@ -323,7 +323,7 @@ def is_lyrics(text, artist=None):
badTriggersOcc = []
nbLines = text.count('\n')
if nbLines <= 1:
log.debug(u"Ignoring too short lyrics '{0}'".format(text))
log.debug(u"Ignoring too short lyrics '{0}'", text)
return False
elif nbLines < 5:
badTriggersOcc.append('too_short')
@ -341,7 +341,7 @@ def is_lyrics(text, artist=None):
text, re.I))
if badTriggersOcc:
log.debug(u'Bad triggers detected: {0}'.format(badTriggersOcc))
log.debug(u'Bad triggers detected: {0}', badTriggersOcc)
return len(badTriggersOcc) < 2
@ -409,7 +409,7 @@ def fetch_google(artist, title):
data = json.load(data)
if 'error' in data:
reason = data['error']['errors'][0]['reason']
log.debug(u'google lyrics backend error: {0}'.format(reason))
log.debug(u'google lyrics backend error: {0}', reason)
return
if 'items' in data.keys():
@ -424,7 +424,7 @@ def fetch_google(artist, title):
continue
if is_lyrics(lyrics, artist):
log.debug(u'got lyrics from {0}'.format(item['displayLink']))
log.debug(u'got lyrics from {0}', item['displayLink'])
return lyrics
@ -502,8 +502,8 @@ class LyricsPlugin(plugins.BeetsPlugin):
"""
# Skip if the item already has lyrics.
if not force and item.lyrics:
log.log(loglevel, u'lyrics already present: {0} - {1}'
.format(item.artist, item.title))
log.log(loglevel, u'lyrics already present: {0} - {1}',
item.artist, item.title)
return
lyrics = None
@ -515,11 +515,11 @@ class LyricsPlugin(plugins.BeetsPlugin):
lyrics = u"\n\n---\n\n".join([l for l in lyrics if l])
if lyrics:
log.log(loglevel, u'fetched lyrics: {0} - {1}'
.format(item.artist, item.title))
log.log(loglevel, u'fetched lyrics: {0} - {1}',
item.artist, item.title)
else:
log.log(loglevel, u'lyrics not found: {0} - {1}'
.format(item.artist, item.title))
log.log(loglevel, u'lyrics not found: {0} - {1}',
item.artist, item.title)
fallback = self.config['fallback'].get()
if fallback:
lyrics = fallback
@ -539,6 +539,5 @@ class LyricsPlugin(plugins.BeetsPlugin):
for backend in self.backends:
lyrics = backend(artist, title)
if lyrics:
log.debug(u'got lyrics from backend: {0}'
.format(backend.__name__))
log.debug(u'got lyrics from backend: {0}', backend.__name__)
return _scrape_strip_cruft(lyrics, True)

View file

@ -79,7 +79,7 @@ def update_album_list(album_list):
if re.match(UUID_REGEX, aid):
album_ids.append(aid)
else:
log.info(u'skipping invalid MBID: {0}'.format(aid))
log.info(u'skipping invalid MBID: {0}', aid)
# Submit to MusicBrainz.
print('Updating MusicBrainz collection {0}...'.format(collection_id))

View file

@ -31,14 +31,13 @@ def mbsync_singletons(lib, query, move, pretend, write):
"""
for item in lib.items(query + ['singleton:true']):
if not item.mb_trackid:
log.info(u'Skipping singleton {0}: has no mb_trackid'
.format(item.title))
log.info(u'Skipping singleton {0}: has no mb_trackid', item.title)
continue
# Get the MusicBrainz recording info.
track_info = hooks.track_for_mbid(item.mb_trackid)
if not track_info:
log.info(u'Recording ID not found: {0}'.format(item.mb_trackid))
log.info(u'Recording ID not found: {0}', item.mb_trackid)
continue
# Apply.
@ -54,7 +53,7 @@ def mbsync_albums(lib, query, move, pretend, write):
# Process matching albums.
for a in lib.albums(query):
if not a.mb_albumid:
log.info(u'Skipping album {0}: has no mb_albumid'.format(a.id))
log.info(u'Skipping album {0}: has no mb_albumid', a.id)
continue
items = list(a.items())
@ -62,7 +61,7 @@ def mbsync_albums(lib, query, move, pretend, write):
# Get the MusicBrainz album information.
album_info = hooks.album_for_mbid(a.mb_albumid)
if not album_info:
log.info(u'Release ID not found: {0}'.format(a.mb_albumid))
log.info(u'Release ID not found: {0}', a.mb_albumid)
continue
# Map recording MBIDs to their information. Recordings can appear
@ -109,7 +108,7 @@ def mbsync_albums(lib, query, move, pretend, write):
# Move album art (and any inconsistent items).
if move and lib.directory in util.ancestry(items[0].path):
log.debug(u'moving album {0}'.format(a.id))
log.debug(u'moving album {0}', a.id)
a.move()

View file

@ -43,10 +43,8 @@ def _missing(album):
for track_info in getattr(album_info, 'tracks', []):
if track_info.track_id not in item_mbids:
item = _item(track_info, album_info, album.id)
log.debug(u'{0}: track {1} in album {2}'
.format(PLUGIN,
track_info.track_id,
album_info.album_id))
log.debug(u'{0}: track {1} in album {2}',
PLUGIN, track_info.track_id, album_info.album_id)
yield item

View file

@ -71,7 +71,7 @@ class MPDClientWrapper(object):
if host[0] in ['/', '~']:
host = os.path.expanduser(host)
log.info(u'mpdstats: connecting to {0}:{1}'.format(host, port))
log.info(u'mpdstats: connecting to {0}:{1}', host, port)
try:
self.client.connect(host, port)
except socket.error as e:
@ -99,7 +99,7 @@ class MPDClientWrapper(object):
try:
return getattr(self.client, command)()
except (select.error, mpd.ConnectionError) as err:
log.error(u'mpdstats: {0}'.format(err))
log.error(u'mpdstats: {0}', err)
if retries <= 0:
# if we exited without breaking, we couldn't reconnect in time :(
@ -171,9 +171,7 @@ class MPDStats(object):
if item:
return item
else:
log.info(u'mpdstats: item not found: {0}'.format(
displayable_path(path)
))
log.info(u'mpdstats: item not found: {0}', displayable_path(path))
@staticmethod
def update_item(item, attribute, value=None, increment=None):
@ -192,11 +190,11 @@ class MPDStats(object):
item[attribute] = value
item.store()
log.debug(u'mpdstats: updated: {0} = {1} [{2}]'.format(
attribute,
item[attribute],
displayable_path(item.path),
))
log.debug(u'mpdstats: updated: {0} = {1} [{2}]',
attribute,
item[attribute],
displayable_path(item.path),
)
def update_rating(self, item, skipped):
"""Update the rating for a beets item.
@ -232,17 +230,13 @@ class MPDStats(object):
"""Updates the play count of a song.
"""
self.update_item(song['beets_item'], 'play_count', increment=1)
log.info(u'mpdstats: played {0}'.format(
displayable_path(song['path'])
))
log.info(u'mpdstats: played {0}', displayable_path(song['path']))
def handle_skipped(self, song):
"""Updates the skip count of a song.
"""
self.update_item(song['beets_item'], 'skip_count', increment=1)
log.info(u'mpdstats: skipped {0}'.format(
displayable_path(song['path'])
))
log.info(u'mpdstats: skipped {0}', displayable_path(song['path']))
def on_stop(self, status):
log.info(u'mpdstats: stop')
@ -264,9 +258,7 @@ class MPDStats(object):
return
if is_url(path):
log.info(u'mpdstats: playing stream {0}'.format(
displayable_path(path)
))
log.info(u'mpdstats: playing stream {0}', displayable_path(path))
return
played, duration = map(int, status['time'].split(':', 1))
@ -275,9 +267,7 @@ class MPDStats(object):
if self.now_playing and self.now_playing['path'] != path:
self.handle_song_change(self.now_playing)
log.info(u'mpdstats: playing {0}'.format(
displayable_path(path)
))
log.info(u'mpdstats: playing {0}', displayable_path(path))
self.now_playing = {
'started': time.time(),
@ -302,8 +292,7 @@ class MPDStats(object):
if handler:
handler(status)
else:
log.debug(u'mpdstats: unhandled status "{0}"'.
format(status))
log.debug(u'mpdstats: unhandled status "{0}"', status)
events = self.mpd.events()

View file

@ -101,10 +101,10 @@ def play_music(lib, opts, args):
# Invoke the command and log the output.
output = util.command_output(command)
if output:
log.debug(u'Output of {0}: {1}'.format(
util.displayable_path(command[0]),
output.decode('utf8', 'ignore'),
))
log.debug(u'Output of {0}: {1}',
util.displayable_path(command[0]),
output.decode('utf8', 'ignore'),
)
else:
log.debug(u'play: no output')

View file

@ -180,9 +180,9 @@ class CommandBackend(Backend):
cmd = cmd + ['-d', str(self.gain_offset)]
cmd = cmd + [syspath(i.path) for i in items]
log.debug(u'replaygain: analyzing {0} files'.format(len(items)))
log.debug(u"replaygain: executing {0}"
.format(" ".join(map(displayable_path, cmd))))
log.debug(u'replaygain: analyzing {0} files', len(items))
log.debug(u"replaygain: executing {0}",
" ".join(map(displayable_path, cmd)))
output = call(cmd)
log.debug(u'replaygain: analysis finished')
results = self.parse_tool_output(output,
@ -199,7 +199,7 @@ class CommandBackend(Backend):
for line in text.split('\n')[1:num_lines + 1]:
parts = line.split('\t')
if len(parts) != 6 or parts[0] == 'File':
log.debug(u'replaygain: bad tool output: {0}'.format(text))
log.debug(u'replaygain: bad tool output: {0}', text)
raise ReplayGainError('mp3gain failed')
d = {
'file': parts[0],
@ -548,14 +548,8 @@ class AudioToolsBackend(Backend):
# be obtained from an audiofile instance.
rg_track_gain, rg_track_peak = rg.title_gain(audiofile.to_pcm())
log.debug(
u'ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}'.format(
item.artist,
item.title,
rg_track_gain,
rg_track_peak
)
)
log.debug(u'ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}',
item.artist, item.title, rg_track_gain, rg_track_peak)
return Gain(gain=rg_track_gain, peak=rg_track_peak)
def compute_album_gain(self, album):
@ -563,12 +557,7 @@ class AudioToolsBackend(Backend):
:rtype: :class:`AlbumGain`
"""
log.debug(
u'Analysing album {0} - {1}'.format(
album.albumartist,
album.album
)
)
log.debug(u'Analysing album {0} - {1}', album.albumartist, album.album)
# The first item is taken and opened to get the sample rate to
# initialize the replaygain object. The object is used for all the
@ -584,26 +573,14 @@ class AudioToolsBackend(Backend):
track_gains.append(
Gain(gain=rg_track_gain, peak=rg_track_peak)
)
log.debug(
u'ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}'.format(
item.artist,
item.title,
rg_track_gain,
rg_track_peak
)
)
log.debug(u'ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}',
item.artist, item.title, rg_track_gain, rg_track_peak)
# After getting the values for all tracks, it's possible to get the
# album values.
rg_album_gain, rg_album_peak = rg.album_gain()
log.debug(
u'ReplayGain for Album {0} - {1}: {2:.2f}, {3:.2f}'.format(
album.albumartist,
album.album,
rg_album_gain,
rg_album_peak
)
)
log.debug(u'ReplayGain for Album {0} - {1}: {2:.2f}, {3:.2f}',
album.albumartist, album.album, rg_album_gain, rg_album_peak)
return AlbumGain(
Gain(gain=rg_album_gain, peak=rg_album_peak),
@ -674,19 +651,16 @@ class ReplayGainPlugin(BeetsPlugin):
item.rg_track_peak = track_gain.peak
item.store()
log.debug(u'replaygain: applied track gain {0}, peak {1}'.format(
item.rg_track_gain,
item.rg_track_peak
))
log.debug(u'replaygain: applied track gain {0}, peak {1}',
item.rg_track_gain, item.rg_track_peak)
def store_album_gain(self, album, album_gain):
album.rg_album_gain = album_gain.gain
album.rg_album_peak = album_gain.peak
album.store()
log.debug(u'replaygain: applied album gain {0}, peak {1}'.format(
album.rg_album_gain,
album.rg_album_peak))
log.debug(u'replaygain: applied album gain {0}, peak {1}',
album.rg_album_gain, album.rg_album_peak)
def handle_album(self, album, write):
"""Compute album and track replay gain store it in all of the
@ -697,12 +671,11 @@ class ReplayGainPlugin(BeetsPlugin):
items, nothing is done.
"""
if not self.album_requires_gain(album):
log.info(u'Skipping album {0} - {1}'.format(album.albumartist,
album.album))
log.info(u'Skipping album {0} - {1}',
album.albumartist, album.album)
return
log.info(u'analyzing {0} - {1}'.format(album.albumartist,
album.album))
log.info(u'analyzing {0} - {1}', album.albumartist, album.album)
try:
album_gain = self.backend_instance.compute_album_gain(album)
@ -721,7 +694,7 @@ class ReplayGainPlugin(BeetsPlugin):
if write:
item.try_write()
except ReplayGainError as e:
log.info(u"ReplayGain error: {0}".format(e))
log.info(u"ReplayGain error: {0}", e)
except FatalReplayGainError as e:
raise ui.UserError(
u"Fatal replay gain error: {0}".format(e)
@ -735,12 +708,10 @@ class ReplayGainPlugin(BeetsPlugin):
in the item, nothing is done.
"""
if not self.track_requires_gain(item):
log.info(u'Skipping track {0} - {1}'
.format(item.artist, item.title))
log.info(u'Skipping track {0} - {1}', item.artist, item.title)
return
log.info(u'analyzing {0} - {1}'
.format(item.artist, item.title))
log.info(u'analyzing {0} - {1}', item.artist, item.title)
try:
track_gains = self.backend_instance.compute_track_gain([item])
@ -755,7 +726,7 @@ class ReplayGainPlugin(BeetsPlugin):
if write:
item.try_write()
except ReplayGainError as e:
log.info(u"ReplayGain error: {0}".format(e))
log.info(u"ReplayGain error: {0}", e)
except FatalReplayGainError as e:
raise ui.UserError(
u"Fatal replay gain error: {0}".format(e)

View file

@ -59,7 +59,7 @@ class RewritePlugin(BeetsPlugin):
if fieldname not in library.Item._fields:
raise ui.UserError("invalid field name (%s) in rewriter" %
fieldname)
log.debug(u'adding template field {0}'.format(key))
log.debug(u'adding template field {0}', key)
pattern = re.compile(pattern.lower())
rules[fieldname].append((pattern, value))
if fieldname == 'artist':

View file

@ -64,8 +64,7 @@ class ScrubPlugin(BeetsPlugin):
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
log.info(u'scrubbing: {0}'.format(
util.displayable_path(item.path)))
log.info(u'scrubbing: {0}', util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
@ -132,14 +131,13 @@ def _scrub(path):
del f[tag]
f.save()
except IOError as exc:
log.error(u'could not scrub {0}: {1}'.format(
util.displayable_path(path), exc,
))
log.error(u'could not scrub {0}: {1}',
util.displayable_path(path), exc)
# Automatically embed art into imported albums.
@ScrubPlugin.listen('write')
def write_item(path):
if not scrubbing and config['scrub']['auto']:
log.debug(u'auto-scrubbing {0}'.format(util.displayable_path(path)))
log.debug(u'auto-scrubbing {0}', util.displayable_path(path))
_scrub(path)

View file

@ -63,8 +63,7 @@ class SpotifyPlugin(BeetsPlugin):
self.config['show_failures'].set(True)
if self.config['mode'].get() not in ['list', 'open']:
log.warn(u'{0} is not a valid mode'
.format(self.config['mode'].get()))
log.warn(u'{0} is not a valid mode', self.config['mode'].get())
return False
self.opts = opts
@ -81,7 +80,7 @@ class SpotifyPlugin(BeetsPlugin):
log.debug(u'Your beets query returned no items, skipping spotify')
return
log.info(u'Processing {0} tracks...'.format(len(items)))
log.info(u'Processing {0} tracks...', len(items))
for item in items:
@ -113,8 +112,7 @@ class SpotifyPlugin(BeetsPlugin):
try:
r.raise_for_status()
except HTTPError as e:
log.debug(u'URL returned a {0} error'
.format(e.response.status_code))
log.debug(u'URL returned a {0} error', e.response.status_code)
failures.append(search_url)
continue
@ -130,33 +128,29 @@ class SpotifyPlugin(BeetsPlugin):
# Simplest, take the first result
chosen_result = None
if len(r_data) == 1 or self.config['tiebreak'].get() == "first":
log.debug(u'Spotify track(s) found, count: {0}'
.format(len(r_data)))
log.debug(u'Spotify track(s) found, count: {0}', len(r_data))
chosen_result = r_data[0]
elif len(r_data) > 1:
# Use the popularity filter
log.debug(u'Most popular track chosen, count: {0}'
.format(len(r_data)))
log.debug(u'Most popular track chosen, count: {0}', len(r_data))
chosen_result = max(r_data, key=lambda x: x['popularity'])
if chosen_result:
results.append(chosen_result)
else:
log.debug(u'No spotify track found: {0}'.format(search_url))
log.debug(u'No spotify track found: {0}', search_url)
failures.append(search_url)
failure_count = len(failures)
if failure_count > 0:
if self.config['show_failures'].get():
log.info(u'{0} track(s) did not match a Spotify ID:'
.format(failure_count))
log.info(u'{0} track(s) did not match a Spotify ID:', failure_count)
for track in failures:
log.info(u'track: {0}'.format(track))
log.info(u'track: {0}', track)
log.info(u'')
else:
log.warn(u'{0} track(s) did not match a Spotify ID;\n'
u'use --show-failures to display'
.format(failure_count))
u'use --show-failures to display', failure_count)
return results

View file

@ -56,11 +56,11 @@ class ThePlugin(BeetsPlugin):
try:
re.compile(p)
except re.error:
self._log.error(u'[the] invalid pattern: {0}'.format(p))
self._log.error(u'[the] invalid pattern: {0}', p)
else:
if not (p.startswith('^') or p.endswith('$')):
self._log.warn(u'[the] warning: \"{0}\" will not '
'match string start/end'.format(p))
'match string start/end', p)
if self.config['a']:
self.patterns = [PATTERN_A] + self.patterns
if self.config['the']:
@ -99,7 +99,7 @@ class ThePlugin(BeetsPlugin):
r = self.unthe(text, p)
if r != text:
break
self._log.debug(u'[the] \"{0}\" -> \"{1}\"'.format(text, r))
self._log.debug(u'[the] \"{0}\" -> \"{1}\"', text, r)
return r
else:
return u''

View file

@ -49,10 +49,10 @@ class ZeroPlugin(BeetsPlugin):
for field in self.config['fields'].as_str_seq():
if field in ('id', 'path', 'album_id'):
log.warn(u'[zero] field \'{0}\' ignored, zeroing '
u'it would be dangerous'.format(field))
u'it would be dangerous', field)
continue
if field not in MediaFile.fields():
log.error(u'[zero] invalid field: {0}'.format(field))
log.error(u'[zero] invalid field: {0}', field)
continue
try:
@ -97,5 +97,5 @@ class ZeroPlugin(BeetsPlugin):
match = patterns is True
if match:
log.debug(u'[zero] {0}: {1} -> None'.format(field, value))
log.debug(u'[zero] {0}: {1} -> None', field, value)
tags[field] = None