Convert beets core to lazy logging

This commit is contained in:
Bruno Cauet 2015-01-03 17:46:23 +01:00
parent b40fb507c3
commit e75f9a703d
9 changed files with 103 additions and 123 deletions

View file

@ -267,7 +267,7 @@ def match_by_id(items):
# If all album IDs are equal, look up the album.
if bool(reduce(lambda x, y: x if x == y else (), albumids)):
albumid = albumids[0]
log.debug(u'Searching for discovered album ID: {0}'.format(albumid))
log.debug(u'Searching for discovered album ID: {0}', albumid)
return hooks.album_for_mbid(albumid)
else:
log.debug(u'No album ID consensus.')
@ -330,7 +330,7 @@ def _add_candidate(items, results, info):
checking the track count, ordering the items, checking for
duplicates, and calculating the distance.
"""
log.debug(u'Candidate: {0} - {1}'.format(info.artist, info.album))
log.debug(u'Candidate: {0} - {1}', info.artist, info.album)
# Discard albums with zero tracks.
if not info.tracks:
@ -345,7 +345,7 @@ def _add_candidate(items, results, info):
# Discard matches without required tags.
for req_tag in config['match']['required'].as_str_seq():
if getattr(info, req_tag) is None:
log.debug(u'Ignored. Missing required tag: {0}'.format(req_tag))
log.debug(u'Ignored. Missing required tag: {0}', req_tag)
return
# Find mapping between the items and the track info.
@ -358,10 +358,10 @@ def _add_candidate(items, results, info):
penalties = [key for _, key in dist]
for penalty in config['match']['ignored'].as_str_seq():
if penalty in penalties:
log.debug(u'Ignored. Penalty: {0}'.format(penalty))
log.debug(u'Ignored. Penalty: {0}', penalty)
return
log.debug(u'Success. Distance: {0}'.format(dist))
log.debug(u'Success. Distance: {0}', dist)
results[info.album_id] = hooks.AlbumMatch(dist, info, mapping,
extra_items, extra_tracks)
@ -387,7 +387,7 @@ def tag_album(items, search_artist=None, search_album=None,
likelies, consensus = current_metadata(items)
cur_artist = likelies['artist']
cur_album = likelies['album']
log.debug(u'Tagging {0} - {1}'.format(cur_artist, cur_album))
log.debug(u'Tagging {0} - {1}', cur_artist, cur_album)
# The output result (distance, AlbumInfo) tuples (keyed by MB album
# ID).
@ -395,7 +395,7 @@ def tag_album(items, search_artist=None, search_album=None,
# Search by explicit ID.
if search_id is not None:
log.debug(u'Searching for album ID: {0}'.format(search_id))
log.debug(u'Searching for album ID: {0}', search_id)
search_cands = hooks.albums_for_id(search_id)
# Use existing metadata or text search.
@ -405,7 +405,7 @@ def tag_album(items, search_artist=None, search_album=None,
if id_info:
_add_candidate(items, candidates, id_info)
rec = _recommendation(candidates.values())
log.debug(u'Album ID match recommendation is {0}'.format(str(rec)))
log.debug(u'Album ID match recommendation is {0}', str(rec))
if candidates and not config['import']['timid']:
# If we have a very good MBID match, return immediately.
# Otherwise, this match will compete against metadata-based
@ -418,20 +418,19 @@ def tag_album(items, search_artist=None, search_album=None,
if not (search_artist and search_album):
# No explicit search terms -- use current metadata.
search_artist, search_album = cur_artist, cur_album
log.debug(u'Search terms: {0} - {1}'.format(search_artist,
search_album))
log.debug(u'Search terms: {0} - {1}', search_artist, search_album)
# Is this album likely to be a "various artist" release?
va_likely = ((not consensus['artist']) or
(search_artist.lower() in VA_ARTISTS) or
any(item.comp for item in items))
log.debug(u'Album might be VA: {0}'.format(str(va_likely)))
log.debug(u'Album might be VA: {0}', str(va_likely))
# Get the results from the data sources.
search_cands = hooks.album_candidates(items, search_artist,
search_album, va_likely)
log.debug(u'Evaluating {0} candidates.'.format(len(search_cands)))
log.debug(u'Evaluating {0} candidates.', len(search_cands))
for info in search_cands:
_add_candidate(items, candidates, info)
@ -456,7 +455,7 @@ def tag_item(item, search_artist=None, search_title=None,
# First, try matching by MusicBrainz ID.
trackid = search_id or item.mb_trackid
if trackid:
log.debug(u'Searching for track ID: {0}'.format(trackid))
log.debug(u'Searching for track ID: {0}', trackid)
for track_info in hooks.tracks_for_id(trackid):
dist = track_distance(item, track_info, incl_artist=True)
candidates[track_info.track_id] = \
@ -477,8 +476,7 @@ def tag_item(item, search_artist=None, search_title=None,
# Search terms.
if not (search_artist and search_title):
search_artist, search_title = item.artist, item.title
log.debug(u'Item search terms: {0} - {1}'.format(search_artist,
search_title))
log.debug(u'Item search terms: {0} - {1}', search_artist, search_title)
# Get and evaluate candidate metadata.
for track_info in hooks.item_candidates(item, search_artist, search_title):
@ -486,7 +484,7 @@ def tag_item(item, search_artist=None, search_title=None,
candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info)
# Sort by distance and return with recommendation.
log.debug(u'Found {0} candidates.'.format(len(candidates)))
log.debug(u'Found {0} candidates.', len(candidates))
candidates = sorted(candidates.itervalues())
rec = _recommendation(candidates)
return candidates, rec

View file

@ -374,7 +374,7 @@ def album_for_id(releaseid):
"""
albumid = _parse_id(releaseid)
if not albumid:
log.debug(u'Invalid MBID ({0}).'.format(releaseid))
log.debug(u'Invalid MBID ({0}).', releaseid)
return
try:
res = musicbrainzngs.get_release_by_id(albumid,
@ -394,7 +394,7 @@ def track_for_id(releaseid):
"""
trackid = _parse_id(releaseid)
if not trackid:
log.debug(u'Invalid MBID ({0}).'.format(releaseid))
log.debug(u'Invalid MBID ({0}).', releaseid)
return
try:
res = musicbrainzngs.get_recording_by_id(trackid, TRACK_INCLUDES)

View file

@ -71,7 +71,7 @@ def _open_state():
# unpickling, including ImportError. We use a catch-all
# exception to avoid enumerating them all (the docs don't even have a
# full list!).
log.debug(u'state file could not be read: {0}'.format(exc))
log.debug(u'state file could not be read: {0}', exc)
return {}
@ -81,7 +81,7 @@ def _save_state(state):
with open(config['statefile'].as_filename(), 'w') as f:
pickle.dump(state, f)
except IOError as exc:
log.error(u'state file could not be written: {0}'.format(exc))
log.error(u'state file could not be written: {0}', exc)
# Utilities for reading and writing the beets progress file, which
@ -347,8 +347,8 @@ class ImportSession(object):
# Either accept immediately or prompt for input to decide.
if self.want_resume is True or \
self.should_resume(toppath):
log.warn(u'Resuming interrupted import of {0}'.format(
util.displayable_path(toppath)))
log.warn(u'Resuming interrupted import of {0}',
util.displayable_path(toppath))
self._is_resuming[toppath] = True
else:
# Clear progress; we're starting from the top.
@ -481,13 +481,12 @@ class ImportTask(object):
def remove_duplicates(self, lib):
duplicate_items = self.duplicate_items(lib)
log.debug(u'removing {0} old duplicated items'
.format(len(duplicate_items)))
log.debug(u'removing {0} old duplicated items', len(duplicate_items))
for item in duplicate_items:
item.remove()
if lib.directory in util.ancestry(item.path):
log.debug(u'deleting duplicate {0}'
.format(util.displayable_path(item.path)))
log.debug(u'deleting duplicate {0}',
util.displayable_path(item.path))
util.remove(item.path)
util.prune_dirs(os.path.dirname(item.path),
lib.directory)
@ -686,12 +685,11 @@ class ImportTask(object):
self.album.store()
log.debug(
u'Reimported album: added {0}, flexible '
u'attributes {1} from album {2} for {3}'.format(
self.album.added,
replaced_album._values_flex.keys(),
replaced_album.id,
displayable_path(self.album.path),
)
u'attributes {1} from album {2} for {3}',
self.album.added,
replaced_album._values_flex.keys(),
replaced_album.id,
displayable_path(self.album.path)
)
for item in self.imported_items():
@ -701,20 +699,18 @@ class ImportTask(object):
item.added = dup_item.added
log.debug(
u'Reimported item added {0} '
u'from item {1} for {2}'.format(
item.added,
dup_item.id,
displayable_path(item.path),
)
u'from item {1} for {2}',
item.added,
dup_item.id,
displayable_path(item.path)
)
item.update(dup_item._values_flex)
log.debug(
u'Reimported item flexible attributes {0} '
u'from item {1} for {2}'.format(
dup_item._values_flex.keys(),
dup_item.id,
displayable_path(item.path),
)
u'from item {1} for {2}',
dup_item._values_flex.keys(),
dup_item.id,
displayable_path(item.path)
)
item.store()
@ -724,13 +720,12 @@ class ImportTask(object):
"""
for item in self.imported_items():
for dup_item in self.replaced_items[item]:
log.debug(u'Replacing item {0}: {1}'
.format(dup_item.id,
displayable_path(item.path)))
log.debug(u'Replacing item {0}: {1}',
dup_item.id, displayable_path(item.path))
dup_item.remove()
log.debug(u'{0} of {1} items replaced'
.format(sum(bool(l) for l in self.replaced_items.values()),
len(self.imported_items())))
log.debug(u'{0} of {1} items replaced',
sum(bool(l) for l in self.replaced_items.values()),
len(self.imported_items()))
def choose_match(self, session):
"""Ask the session which match should apply and apply it.
@ -1002,8 +997,8 @@ class ImportTaskFactory(object):
def singleton(self, path):
if self.session.already_imported(self.toppath, [path]):
log.debug(u'Skipping previously-imported path: {0}'
.format(displayable_path(path)))
log.debug(u'Skipping previously-imported path: {0}',
displayable_path(path))
self.skipped += 1
return None
@ -1026,8 +1021,8 @@ class ImportTaskFactory(object):
dirs = list(set(os.path.dirname(p) for p in paths))
if self.session.already_imported(self.toppath, dirs):
log.debug(u'Skipping previously-imported path: {0}'
.format(displayable_path(dirs)))
log.debug(u'Skipping previously-imported path: {0}',
displayable_path(dirs))
self.skipped += 1
return None
@ -1055,14 +1050,10 @@ class ImportTaskFactory(object):
# Silently ignore non-music files.
pass
elif isinstance(exc.reason, mediafile.UnreadableFileError):
log.warn(u'unreadable file: {0}'.format(
displayable_path(path))
)
log.warn(u'unreadable file: {0}', displayable_path(path))
else:
log.error(u'error reading {0}: {1}'.format(
displayable_path(path),
exc,
))
log.error(u'error reading {0}: {1}',
displayable_path(path), exc)
# Full-album pipeline stages.
@ -1086,13 +1077,13 @@ def read_tasks(session):
"'copy' or 'move' to be enabled.")
continue
log.debug(u'extracting archive {0}'
.format(displayable_path(toppath)))
log.debug(u'extracting archive {0}',
displayable_path(toppath))
archive_task = ArchiveImportTask(toppath)
try:
archive_task.extract()
except Exception as exc:
log.error(u'extraction failed: {0}'.format(exc))
log.error(u'extraction failed: {0}', exc)
continue
# Continue reading albums from the extracted directory.
@ -1112,12 +1103,12 @@ def read_tasks(session):
yield archive_task
if not imported:
log.warn(u'No files imported from {0}'
.format(displayable_path(user_toppath)))
log.warn(u'No files imported from {0}',
displayable_path(user_toppath))
# Show skipped directories.
if skipped:
log.info(u'Skipped {0} directories.'.format(skipped))
log.info(u'Skipped {0} directories.', skipped)
def query_tasks(session):
@ -1133,8 +1124,8 @@ def query_tasks(session):
else:
# Search for albums.
for album in session.lib.albums(session.query):
log.debug(u'yielding album {0}: {1} - {2}'
.format(album.id, album.albumartist, album.album))
log.debug(u'yielding album {0}: {1} - {2}',
album.id, album.albumartist, album.album)
items = list(album.items())
# Clear IDs from re-tagged items so they appear "fresh" when
@ -1159,7 +1150,7 @@ def lookup_candidates(session, task):
return
plugins.send('import_task_start', session=session, task=task)
log.debug(u'Looking up: {0}'.format(displayable_path(task.paths)))
log.debug(u'Looking up: {0}', displayable_path(task.paths))
task.lookup_candidates()
@ -1300,12 +1291,11 @@ def log_files(session, task):
"""A coroutine (pipeline stage) to log each file which will be imported
"""
if isinstance(task, SingletonImportTask):
log.info(
'Singleton: {0}'.format(displayable_path(task.item['path'])))
log.info('Singleton: {0}', displayable_path(task.item['path']))
elif task.items:
log.info('Album {0}'.format(displayable_path(task.paths[0])))
log.info('Album {0}', displayable_path(task.paths[0]))
for item in task.items:
log.info(' {0}'.format(displayable_path(item['path'])))
log.info(' {0}', displayable_path(item['path']))
def group_albums(session):

View file

@ -837,9 +837,9 @@ class Album(LibModel):
return
new_art = util.unique_path(new_art)
log.debug(u'moving album art {0} to {1}'
.format(util.displayable_path(old_art),
util.displayable_path(new_art)))
log.debug(u'moving album art {0} to {1}',
util.displayable_path(old_art),
util.displayable_path(new_art))
if copy:
util.copy(old_art, new_art)
elif link:

View file

@ -1313,7 +1313,7 @@ class MediaFile(object):
try:
self.mgfile = mutagen.File(path)
except unreadable_exc as exc:
log.debug(u'header parsing failed: {0}'.format(unicode(exc)))
log.debug(u'header parsing failed: {0}', unicode(exc))
raise UnreadableFileError(path)
except IOError as exc:
if type(exc) == IOError:
@ -1326,7 +1326,7 @@ class MediaFile(object):
except Exception as exc:
# Isolate bugs in Mutagen.
log.debug(traceback.format_exc())
log.error(u'uncaught Mutagen exception in open: {0}'.format(exc))
log.error(u'uncaught Mutagen exception in open: {0}', exc)
raise MutagenError(path, exc)
if self.mgfile is None:
@ -1399,7 +1399,7 @@ class MediaFile(object):
raise
except Exception as exc:
log.debug(traceback.format_exc())
log.error(u'uncaught Mutagen exception in save: {0}'.format(exc))
log.error(u'uncaught Mutagen exception in save: {0}', exc)
raise MutagenError(self.path, exc)
def delete(self):

View file

@ -204,7 +204,7 @@ def load_plugins(names=()):
except ImportError as exc:
# Again, this is hacky:
if exc.args[0].endswith(' ' + name):
log.warn(u'** plugin {0} not found'.format(name))
log.warn(u'** plugin {0} not found', name)
else:
raise
else:
@ -214,7 +214,7 @@ def load_plugins(names=()):
_classes.add(obj)
except:
log.warn(u'** error loading plugin {0}'.format(name))
log.warn(u'** error loading plugin {0}', name)
log.warn(traceback.format_exc())
@ -398,7 +398,7 @@ def send(event, **arguments):
Returns a list of return values from the handlers.
"""
log.debug(u'Sending event: {0}'.format(event))
log.debug(u'Sending event: {0}', event)
for handler in event_handlers()[event]:
# Don't break legacy plugins if we want to pass more arguments
argspec = inspect.getargspec(handler).args

View file

@ -866,14 +866,14 @@ def _configure(options):
config_path = config.user_config_path()
if os.path.isfile(config_path):
log.debug(u'user configuration: {0}'.format(
util.displayable_path(config_path)))
log.debug(u'user configuration: {0}',
util.displayable_path(config_path))
else:
log.debug(u'no user configuration found at {0}'.format(
util.displayable_path(config_path)))
log.debug(u'no user configuration found at {0}',
util.displayable_path(config_path))
log.debug(u'data directory: {0}'
.format(util.displayable_path(config.config_dir())))
log.debug(u'data directory: {0}',
util.displayable_path(config.config_dir()))
return config
@ -895,9 +895,9 @@ def _open_library(config):
util.displayable_path(dbpath)
))
log.debug(u'library database: {0}\n'
u'library directory: {1}'
.format(util.displayable_path(lib.path),
util.displayable_path(lib.directory)))
u'library directory: {1}',
util.displayable_path(lib.path),
util.displayable_path(lib.directory))
return lib
@ -945,7 +945,7 @@ def main(args=None):
_raw_main(args)
except UserError as exc:
message = exc.args[0] if exc.args else None
log.error(u'error: {0}'.format(message))
log.error(u'error: {0}', message)
sys.exit(1)
except util.HumanReadableException as exc:
exc.log(log)
@ -957,7 +957,7 @@ def main(args=None):
log.error(exc)
sys.exit(1)
except confit.ConfigError as exc:
log.error(u'configuration error: {0}'.format(exc))
log.error(u'configuration error: {0}', exc)
sys.exit(1)
except IOError as exc:
if exc.errno == errno.EPIPE:

View file

@ -764,8 +764,8 @@ class TerminalImportSession(importer.ImportSession):
"""Decide what to do when a new album or item seems similar to one
that's already in the library.
"""
log.warn(u"This {0} is already in the library!"
.format("album" if task.is_album else "item"))
log.warn(u"This {0} is already in the library!",
("album" if task.is_album else "item"))
if config['import']['quiet']:
# In quiet mode, don't prompt -- just skip.
@ -1014,16 +1014,16 @@ def update_items(lib, query, album, move, pretend):
# Did the item change since last checked?
if item.current_mtime() <= item.mtime:
log.debug(u'skipping {0} because mtime is up to date ({1})'
.format(displayable_path(item.path), item.mtime))
log.debug(u'skipping {0} because mtime is up to date ({1})',
displayable_path(item.path), item.mtime)
continue
# Read new data.
try:
item.read()
except library.ReadError as exc:
log.error(u'error reading {0}: {1}'.format(
displayable_path(item.path), exc))
log.error(u'error reading {0}: {1}',
displayable_path(item.path), exc)
continue
# Special-case album artist when it matches track artist. (Hacky
@ -1065,7 +1065,7 @@ def update_items(lib, query, album, move, pretend):
continue
album = lib.get_album(album_id)
if not album: # Empty albums have already been removed.
log.debug(u'emptied album {0}'.format(album_id))
log.debug(u'emptied album {0}', album_id)
continue
first_item = album.items().get()
@ -1076,7 +1076,7 @@ def update_items(lib, query, album, move, pretend):
# Move album art (and any inconsistent items).
if move and lib.directory in ancestry(first_item.path):
log.debug(u'moving album {0}'.format(album_id))
log.debug(u'moving album {0}', album_id)
album.move()
@ -1298,8 +1298,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm):
if move:
cur_path = obj.path
if lib.directory in ancestry(cur_path): # In library?
log.debug(u'moving object {0}'
.format(displayable_path(cur_path)))
log.debug(u'moving object {0}', displayable_path(cur_path))
obj.move()
obj.try_sync(write)
@ -1377,9 +1376,9 @@ def move_items(lib, dest, query, copy, album):
action = 'Copying' if copy else 'Moving'
entity = 'album' if album else 'item'
log.info(u'{0} {1} {2}s.'.format(action, len(objs), entity))
log.info(u'{0} {1} {2}s.', action, len(objs), entity)
for obj in objs:
log.debug(u'moving: {0}'.format(util.displayable_path(obj.path)))
log.debug(u'moving: {0}', util.displayable_path(obj.path))
obj.move(copy, basedir=dest)
obj.store()
@ -1425,18 +1424,15 @@ def write_items(lib, query, pretend, force):
for item in items:
# Item deleted?
if not os.path.exists(syspath(item.path)):
log.info(u'missing file: {0}'.format(
util.displayable_path(item.path)
))
log.info(u'missing file: {0}', util.displayable_path(item.path))
continue
# Get an Item object reflecting the "clean" (on-disk) state.
try:
clean_item = library.Item.from_path(item.path)
except library.ReadError as exc:
log.error(u'error reading {0}: {1}'.format(
displayable_path(item.path), exc
))
log.error(u'error reading {0}: {1}',
displayable_path(item.path), exc)
continue
# Check for and display changes.

View file

@ -58,9 +58,8 @@ def pil_resize(maxwidth, path_in, path_out=None):
"""
path_out = path_out or temp_file_for(path_in)
from PIL import Image
log.debug(u'artresizer: PIL resizing {0} to {1}'.format(
util.displayable_path(path_in), util.displayable_path(path_out)
))
log.debug(u'artresizer: PIL resizing {0} to {1}',
util.displayable_path(path_in), util.displayable_path(path_out))
try:
im = Image.open(util.syspath(path_in))
@ -69,9 +68,8 @@ def pil_resize(maxwidth, path_in, path_out=None):
im.save(path_out)
return path_out
except IOError:
log.error(u"PIL cannot create thumbnail for '{0}'".format(
util.displayable_path(path_in)
))
log.error(u"PIL cannot create thumbnail for '{0}'",
util.displayable_path(path_in))
return path_in
@ -80,9 +78,8 @@ def im_resize(maxwidth, path_in, path_out=None):
Return the output path of resized image.
"""
path_out = path_out or temp_file_for(path_in)
log.debug(u'artresizer: ImageMagick resizing {0} to {1}'.format(
util.displayable_path(path_in), util.displayable_path(path_out)
))
log.debug(u'artresizer: ImageMagick resizing {0} to {1}',
util.displayable_path(path_in), util.displayable_path(path_out))
# "-resize widthxheight>" shrinks images with dimension(s) larger
# than the corresponding width and/or height dimension(s). The >
@ -94,9 +91,8 @@ def im_resize(maxwidth, path_in, path_out=None):
'-resize', '{0}x^>'.format(maxwidth), path_out
])
except subprocess.CalledProcessError:
log.warn(u'artresizer: IM convert failed for {0}'.format(
util.displayable_path(path_in)
))
log.warn(u'artresizer: IM convert failed for {0}',
util.displayable_path(path_in))
return path_in
return path_out
@ -134,7 +130,7 @@ class ArtResizer(object):
specified, with an inferred method.
"""
self.method = self._check_method(method)
log.debug(u"artresizer: method is {0}".format(self.method))
log.debug(u"artresizer: method is {0}", self.method)
self.can_compare = self._can_compare()
def resize(self, maxwidth, path_in, path_out=None):