Clean up of logging messages as described here

All logging now prefers the ' (single quote) over the " (double quote)

https://github.com/sampsyo/beets/wiki/Hacking
This commit is contained in:
e5e4eaeacd39c5cfba4d7c852c48277ae50331e6 2014-08-23 09:26:34 +10:00
parent 4ed568ba91
commit 66aee8094f
28 changed files with 174 additions and 173 deletions

1
.gitignore vendored
View file

@ -6,6 +6,7 @@
.svn
.tox
.coverage
.idea
# file patterns

View file

@ -4,3 +4,4 @@
^MANIFEST$
^docs/_build/
^\.tox/
^\.idea/

View file

@ -261,16 +261,16 @@ def match_by_id(items):
# Is there a consensus on the MB album ID?
albumids = [item.mb_albumid for item in items if item.mb_albumid]
if not albumids:
log.debug('No album IDs found.')
log.debug(u'No album IDs found.')
return None
# If all album IDs are equal, look up the album.
if bool(reduce(lambda x, y: x if x == y else (), albumids)):
albumid = albumids[0]
log.debug('Searching for discovered album ID: ' + albumid)
log.debug(u'Searching for discovered album ID: {0}'.format(albumid))
return hooks.album_for_mbid(albumid)
else:
log.debug('No album ID consensus.')
log.debug(u'No album ID consensus.')
def _recommendation(results):
@ -330,17 +330,17 @@ def _add_candidate(items, results, info):
checking the track count, ordering the items, checking for
duplicates, and calculating the distance.
"""
log.debug('Candidate: %s - %s' % (info.artist, info.album))
log.debug(u'Candidate: {0} - {1}'.format(info.artist, info.album))
# Don't duplicate.
if info.album_id in results:
log.debug('Duplicate.')
log.debug(u'Duplicate.')
return
# Discard matches without required tags.
for req_tag in config['match']['required'].as_str_seq():
if getattr(info, req_tag) is None:
log.debug('Ignored. Missing required tag: %s' % req_tag)
log.debug(u'Ignored. Missing required tag: {0}'.format(req_tag))
return
# Find mapping between the items and the track info.
@ -353,10 +353,10 @@ def _add_candidate(items, results, info):
penalties = [key for _, key in dist]
for penalty in config['match']['ignored'].as_str_seq():
if penalty in penalties:
log.debug('Ignored. Penalty: %s' % penalty)
log.debug(u'Ignored. Penalty: {0}'.format(penalty))
return
log.debug('Success. Distance: %f' % dist)
log.debug(u'Success. Distance: {02.2f}'.format(dist))
results[info.album_id] = hooks.AlbumMatch(dist, info, mapping,
extra_items, extra_tracks)
@ -377,7 +377,7 @@ def tag_album(items, search_artist=None, search_album=None,
likelies, consensus = current_metadata(items)
cur_artist = likelies['artist']
cur_album = likelies['album']
log.debug('Tagging %s - %s' % (cur_artist, cur_album))
log.debug(u'Tagging {0} - {1}'.format((cur_artist, cur_album)))
# The output result (distance, AlbumInfo) tuples (keyed by MB album
# ID).
@ -385,7 +385,7 @@ def tag_album(items, search_artist=None, search_album=None,
# Search by explicit ID.
if search_id is not None:
log.debug('Searching for album ID: ' + search_id)
log.debug(u'Searching for album ID: {0}'.format(search_id))
search_cands = hooks.albums_for_id(search_id)
# Use existing metadata or text search.
@ -395,32 +395,32 @@ def tag_album(items, search_artist=None, search_album=None,
if id_info:
_add_candidate(items, candidates, id_info)
rec = _recommendation(candidates.values())
log.debug('Album ID match recommendation is ' + str(rec))
log.debug(u'Album ID match recommendation is {0}'.format(str(rec)))
if candidates and not config['import']['timid']:
# If we have a very good MBID match, return immediately.
# Otherwise, this match will compete against metadata-based
# matches.
if rec == Recommendation.strong:
log.debug('ID match.')
log.debug(u'ID match.')
return cur_artist, cur_album, candidates.values(), rec
# Search terms.
if not (search_artist and search_album):
# No explicit search terms -- use current metadata.
search_artist, search_album = cur_artist, cur_album
log.debug(u'Search terms: %s - %s' % (search_artist, search_album))
log.debug(u'Search terms: {0} - {1}'.format(search_artist, search_album))
# Is this album likely to be a "various artist" release?
va_likely = ((not consensus['artist']) or
(search_artist.lower() in VA_ARTISTS) or
any(item.comp for item in items))
log.debug(u'Album might be VA: %s' % str(va_likely))
log.debug(u'Album might be VA: {0}'.format(str(va_likely)))
# Get the results from the data sources.
search_cands = hooks.album_candidates(items, search_artist,
search_album, va_likely)
log.debug(u'Evaluating %i candidates.' % len(search_cands))
log.debug(u'Evaluating {0} candidates.'.format(len(search_cands)))
for info in search_cands:
_add_candidate(items, candidates, info)
@ -445,7 +445,7 @@ def tag_item(item, search_artist=None, search_title=None,
# First, try matching by MusicBrainz ID.
trackid = search_id or item.mb_trackid
if trackid:
log.debug('Searching for track ID: ' + trackid)
log.debug(u'Searching for track ID: {0}'.format(trackid))
for track_info in hooks.tracks_for_id(trackid):
dist = track_distance(item, track_info, incl_artist=True)
candidates[track_info.track_id] = \
@ -453,7 +453,7 @@ def tag_item(item, search_artist=None, search_title=None,
# If this is a good match, then don't keep searching.
rec = _recommendation(candidates.values())
if rec == Recommendation.strong and not config['import']['timid']:
log.debug('Track ID match.')
log.debug(u'Track ID match.')
return candidates.values(), rec
# If we're searching by ID, don't proceed.
@ -466,7 +466,7 @@ def tag_item(item, search_artist=None, search_title=None,
# Search terms.
if not (search_artist and search_title):
search_artist, search_title = item.artist, item.title
log.debug(u'Item search terms: %s - %s' % (search_artist, search_title))
log.debug(u'Item search terms: {0} - {1}'.format(search_artist, search_title))
# Get and evaluate candidate metadata.
for track_info in hooks.item_candidates(item, search_artist, search_title):
@ -474,7 +474,7 @@ def tag_item(item, search_artist=None, search_title=None,
candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info)
# Sort by distance and return with recommendation.
log.debug('Found %i candidates.' % len(candidates))
log.debug(u'Found {0} candidates.'.format(len(candidates)))
candidates = sorted(candidates.itervalues())
rec = _recommendation(candidates)
return candidates, rec

View file

@ -372,13 +372,13 @@ def album_for_id(releaseid):
"""
albumid = _parse_id(releaseid)
if not albumid:
log.debug('Invalid MBID (%s).' % (releaseid))
log.debug(u'Invalid MBID ({0}).'.format(releaseid))
return
try:
res = musicbrainzngs.get_release_by_id(albumid,
RELEASE_INCLUDES)
except musicbrainzngs.ResponseError:
log.debug('Album ID match failed.')
log.debug(u'Album ID match failed.')
return None
except musicbrainzngs.MusicBrainzError as exc:
raise MusicBrainzAPIError(exc, 'get release by ID', albumid,
@ -392,12 +392,12 @@ def track_for_id(releaseid):
"""
trackid = _parse_id(releaseid)
if not trackid:
log.debug('Invalid MBID (%s).' % (releaseid))
log.debug(u'Invalid MBID ({0}).'.format(releaseid))
return
try:
res = musicbrainzngs.get_recording_by_id(trackid, TRACK_INCLUDES)
except musicbrainzngs.ResponseError:
log.debug('Track ID match failed.')
log.debug(u'Track ID match failed.')
return None
except musicbrainzngs.MusicBrainzError as exc:
raise MusicBrainzAPIError(exc, 'get recording by ID', trackid,

View file

@ -81,7 +81,7 @@ def _save_state(state):
with open(config['statefile'].as_filename(), 'w') as f:
pickle.dump(state, f)
except IOError as exc:
log.error(u'state file could not be written: %s' % unicode(exc))
log.error(u'state file could not be written: {0}'.format(exc))
# Utilities for reading and writing the beets progress file, which
@ -339,7 +339,8 @@ class ImportSession(object):
# Either accept immediately or prompt for input to decide.
if self.want_resume is True or \
self.should_resume(toppath):
log.warn('Resuming interrupted import of %s' % toppath)
log.warn(u'Resuming interrupted import of {0}'.format(
util.displayable_path(toppath)))
self._is_resuming[toppath] = True
else:
# Clear progress; we're starting from the top.
@ -446,13 +447,13 @@ class ImportTask(object):
def remove_duplicates(self, lib):
duplicate_items = self.duplicate_items(lib)
log.debug('removing %i old duplicated items' %
len(duplicate_items))
log.debug(u'removing {0} old duplicated items'
.format(len(duplicate_items)))
for item in duplicate_items:
item.remove()
if lib.directory in util.ancestry(item.path):
log.debug(u'deleting duplicate %s' %
util.displayable_path(item.path))
log.debug(u'deleting duplicate {0}'
.format(util.displayable_path(item.path)))
util.remove(item.path)
util.prune_dirs(os.path.dirname(item.path),
lib.directory)
@ -634,11 +635,13 @@ class ImportTask(object):
))
self.replaced_items[item] = dup_items
for dup_item in dup_items:
log.debug('replacing item %i: %s' %
(dup_item.id, displayable_path(item.path)))
log.debug(u'replacing item {0}: {1}'
.format((dup_item.id,
displayable_path(item.path))))
dup_item.remove()
log.debug('%i of %i items replaced' % (len(self.replaced_items),
len(self.imported_items())))
log.debug(u'{0} of {1} items replaced'
.format((len(self.replaced_items),
len(self.imported_items()))))
def choose_match(self, session):
"""Ask the session which match should apply and apply it.
@ -958,17 +961,17 @@ def read_tasks(session):
archive_task = None
if ArchiveImportTask.is_archive(syspath(toppath)):
if not (session.config['move'] or session.config['copy']):
log.warn("Archive importing requires either "
log.warn(u"Archive importing requires either "
"'copy' or 'move' to be enabled.")
continue
log.debug('extracting archive {0}'
log.debug(u'extracting archive {0}'
.format(displayable_path(toppath)))
archive_task = ArchiveImportTask(toppath)
try:
archive_task.extract()
except Exception as exc:
log.error('extraction failed: {0}'.format(exc))
log.error(u'extraction failed: {0}'.format(exc))
continue
# Continue reading albums from the extracted directory.
@ -1036,8 +1039,8 @@ def query_tasks(session):
else:
# Search for albums.
for album in session.lib.albums(session.query):
log.debug('yielding album %i: %s - %s' %
(album.id, album.albumartist, album.album))
log.debug(u'yielding album {0}: {1} - {2}'
.format((album.id, album.albumartist, album.album)))
items = list(album.items())
# Clear IDs from re-tagged items so they appear "fresh" when
@ -1062,7 +1065,7 @@ def lookup_candidates(session, task):
return
plugins.send('import_task_start', session=session, task=task)
log.debug('Looking up: %s' % displayable_path(task.paths))
log.debug(u'Looking up: {0}'.format(displayable_path(task.paths)))
task.lookup_candidates()

View file

@ -778,7 +778,7 @@ class Album(LibModel):
return
new_art = util.unique_path(new_art)
log.debug('moving album art %s to %s' % (old_art, new_art))
log.debug(u'moving album art {0} to {1}'.format(old_art, new_art))
if copy:
util.copy(old_art, new_art)
else:

View file

@ -194,7 +194,7 @@ def load_plugins(names=()):
except ImportError as exc:
# Again, this is hacky:
if exc.args[0].endswith(' ' + name):
log.warn('** plugin %s not found' % name)
log.warn(u'** plugin {0} not found'.format(name))
else:
raise
else:
@ -204,7 +204,7 @@ def load_plugins(names=()):
_classes.add(obj)
except:
log.warn('** error loading plugin %s' % name)
log.warn(u'** error loading plugin {0}'.format(name))
log.warn(traceback.format_exc())
@ -371,7 +371,7 @@ def send(event, **arguments):
Returns a list of return values from the handlers.
"""
log.debug('Sending event: %s' % event)
log.debug(u'Sending event: {0}'.format(event))
for handler in event_handlers()[event]:
# Don't break legacy plugins if we want to pass more arguments
argspec = inspect.getargspec(handler).args

View file

@ -909,10 +909,10 @@ def _configure(options):
config_path = config.user_config_path()
if os.path.isfile(config_path):
log.debug('user configuration: {0}'.format(
log.debug(u'user configuration: {0}'.format(
util.displayable_path(config_path)))
else:
log.debug('no user configuration found at {0}'.format(
log.debug(u'no user configuration found at {0}'.format(
util.displayable_path(config_path)))
log.debug(u'data directory: {0}'

View file

@ -777,12 +777,12 @@ class TerminalImportSession(importer.ImportSession):
"""Decide what to do when a new album or item seems similar to one
that's already in the library.
"""
log.warn("This %s is already in the library!" %
("album" if task.is_album else "item"))
log.warn(u"This {0} is already in the library!"
.format("album" if task.is_album else "item"))
if config['import']['quiet']:
# In quiet mode, don't prompt -- just skip.
log.info('Skipping.')
log.info(u'Skipping.')
sel = 's'
else:
# Print some detail about the existing and new items so the
@ -1030,8 +1030,8 @@ def update_items(lib, query, album, move, pretend):
# Did the item change since last checked?
if item.current_mtime() <= item.mtime:
log.debug(u'skipping %s because mtime is up to date (%i)' %
(displayable_path(item.path), item.mtime))
log.debug(u'skipping {0} because mtime is up to date ({1})'
.format(displayable_path(item.path), item.mtime))
continue
# Read new data.
@ -1081,7 +1081,7 @@ def update_items(lib, query, album, move, pretend):
continue
album = lib.get_album(album_id)
if not album: # Empty albums have already been removed.
log.debug('emptied album %i' % album_id)
log.debug(u'emptied album {0}'.format(album_id))
continue
first_item = album.items().get()
@ -1092,7 +1092,7 @@ def update_items(lib, query, album, move, pretend):
# Move album art (and any inconsistent items).
if move and lib.directory in ancestry(first_item.path):
log.debug('moving album %i' % album_id)
log.debug(u'moving album {0}'.format(album_id))
album.move()
@ -1304,7 +1304,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm):
if move:
cur_path = obj.path
if lib.directory in ancestry(cur_path): # In library?
log.debug('moving object %s' % cur_path)
log.debug(u'moving object {0}'.format(displayable_path(cur_path)))
obj.move()
obj.store()
@ -1391,9 +1391,9 @@ def move_items(lib, dest, query, copy, album):
action = 'Copying' if copy else 'Moving'
entity = 'album' if album else 'item'
log.info('%s %i %ss.' % (action, len(objs), entity))
log.info('{0} {1} {2}s.'.format(action, len(objs), entity))
for obj in objs:
log.debug('moving: %s' % obj.path)
log.debug(u'moving: {0}'.format(displayable_pathobj.path)))
obj.move(copy, basedir=dest)
obj.store()

View file

@ -194,7 +194,7 @@ class BeatportPlugin(BeetsPlugin):
try:
return self._get_releases(query)
except BeatportAPIError as e:
log.debug('Beatport API Error: %s (query: %s)' % (e, query))
log.debug(u'Beatport API Error: {0} (query: {1})'.format(e, query))
return []
def item_candidates(self, item, artist, title):
@ -205,14 +205,14 @@ class BeatportPlugin(BeetsPlugin):
try:
return self._get_tracks(query)
except BeatportAPIError as e:
log.debug('Beatport API Error: %s (query: %s)' % (e, query))
log.debug(u'Beatport API Error: {0} (query: {1})'.format(e, query))
return []
def album_for_id(self, release_id):
"""Fetches a release by its Beatport ID and returns an AlbumInfo object
or None if the release is not found.
"""
log.debug('Searching Beatport for release %s' % str(release_id))
log.debug(u'Searching Beatport for release {0}'.format(release_id))
match = re.search(r'(^|beatport\.com/release/.+/)(\d+)$', release_id)
if not match:
return None
@ -224,7 +224,7 @@ class BeatportPlugin(BeetsPlugin):
"""Fetches a track by its Beatport ID and returns a TrackInfo object
or None if the track is not found.
"""
log.debug('Searching Beatport for track %s' % str(track_id))
log.debug(u'Searching Beatport for track {0}'.format(str(track_id)))
match = re.search(r'(^|beatport\.com/track/.+/)(\d+)$', track_id)
if not match:
return None

View file

@ -1160,9 +1160,9 @@ class BPDPlugin(BeetsPlugin):
try:
Server(lib, host, port, password).run()
except NoGstreamerError:
global_log.error('Gstreamer Python bindings not found.')
global_log.error('Install "python-gst0.10", "py27-gst-python", '
'or similar package to use BPD.')
global_log.error(u'Gstreamer Python bindings not found.')
global_log.error(u'Install "python-gst0.10", "py27-gst-python", '
u'or similar package to use BPD.')
def commands(self):
cmd = beets.ui.Subcommand(

View file

@ -73,15 +73,15 @@ class BPMPlugin(BeetsPlugin):
item = items[0]
if item['bpm']:
log.info('Found bpm {0}'.format(item['bpm']))
log.info(u'Found bpm {0}'.format(item['bpm']))
if not overwrite:
return
log.info('Press Enter {0} times to the rhythm or Ctrl-D \
to exit'.format(self.config['max_strokes'].get(int)))
log.info(u'Press Enter {0} times to the rhythm or Ctrl-D '
u'to exit'.format(self.config['max_strokes'].get(int)))
new_bpm = bpm(self.config['max_strokes'].get(int))
item['bpm'] = int(new_bpm)
if write:
item.try_write()
item.store()
log.info('Added new bpm {0}'.format(item['bpm']))
log.info(u'Added new bpm {0}'.format(item['bpm']))

View file

@ -53,32 +53,32 @@ def acoustid_match(path):
try:
duration, fp = acoustid.fingerprint_file(util.syspath(path))
except acoustid.FingerprintGenerationError as exc:
log.error('fingerprinting of %s failed: %s' %
(repr(path), str(exc)))
log.error(u'fingerprinting of {0} failed: {1}'
.format(displayable_path(repr(path)), str(exc)))
return None
_fingerprints[path] = fp
try:
res = acoustid.lookup(API_KEY, fp, duration,
meta='recordings releases')
except acoustid.AcoustidError as exc:
log.debug('fingerprint matching %s failed: %s' %
(repr(path), str(exc)))
log.debug(u'fingerprint matching {0} failed: {1}'
.format(displayable_path(repr(path)), str(exc)))
return None
log.debug('chroma: fingerprinted %s' % repr(path))
log.debug(u'chroma: fingerprinted {0}'.format(displayable_path(repr(path))))
# Ensure the response is usable and parse it.
if res['status'] != 'ok' or not res.get('results'):
log.debug('chroma: no match found')
log.debug(u'chroma: no match found')
return None
result = res['results'][0] # Best match.
if result['score'] < SCORE_THRESH:
log.debug('chroma: no results above threshold')
log.debug(u'chroma: no results above threshold')
return None
_acoustids[path] = result['id']
# Get recording and releases from the result.
if not result.get('recordings'):
log.debug('chroma: no recordings found')
log.debug(u'chroma: no recordings found')
return None
recording_ids = []
release_ids = []
@ -87,7 +87,7 @@ def acoustid_match(path):
if 'releases' in recording:
release_ids += [rel['id'] for rel in recording['releases']]
log.debug('chroma: matched recordings {0}'.format(recording_ids))
log.debug(u'chroma: matched recordings {0}'.format(recording_ids))
_matches[path] = recording_ids, release_ids
@ -141,7 +141,7 @@ class AcoustidPlugin(plugins.BeetsPlugin):
if album:
albums.append(album)
log.debug('acoustid album candidates: %i' % len(albums))
log.debug(u'acoustid album candidates: {0}'.format(len(albums)))
return albums
def item_candidates(self, item, artist, title):
@ -154,7 +154,7 @@ class AcoustidPlugin(plugins.BeetsPlugin):
track = hooks.track_for_mbid(recording_id)
if track:
tracks.append(track)
log.debug('acoustid item candidates: {0}'.format(len(tracks)))
log.debug(u'acoustid item candidates: {0}'.format(len(tracks)))
return tracks
def commands(self):
@ -216,7 +216,7 @@ def submit_items(userkey, items, chunksize=64):
def submit_chunk():
"""Submit the current accumulated fingerprint data."""
log.info('submitting {0} fingerprints'.format(len(data)))
log.info(u'submitting {0} fingerprints'.format(len(data)))
try:
acoustid.submit(API_KEY, userkey, data)
except acoustid.AcoustidError as exc:
@ -233,7 +233,7 @@ def submit_items(userkey, items, chunksize=64):
}
if item.mb_trackid:
item_data['mbid'] = item.mb_trackid
log.debug('submitting MBID')
log.debug(u'submitting MBID')
else:
item_data.update({
'track': item.title,
@ -244,7 +244,7 @@ def submit_items(userkey, items, chunksize=64):
'trackno': item.track,
'discno': item.disc,
})
log.debug('submitting textual metadata')
log.debug(u'submitting textual metadata')
data.append(item_data)
# If we have enough data, submit a chunk.
@ -294,6 +294,5 @@ def fingerprint_item(item, write=False):
item.store()
return item.acoustid_fingerprint
except acoustid.FingerprintGenerationError as exc:
log.info(
'fingerprint generation failed: {0}'.format(exc)
)
log.info(u'fingerprint generation failed: {0}'
.format(exc))

View file

@ -60,14 +60,14 @@ class DiscogsPlugin(BeetsPlugin):
try:
return self.get_albums(query)
except DiscogsAPIError as e:
log.debug('Discogs API Error: %s (query: %s' % (e, query))
log.debug(u'Discogs API Error: {0} (query: {1})'.format(e, query))
return []
def album_for_id(self, album_id):
"""Fetches an album by its Discogs ID and returns an AlbumInfo object
or None if the album is not found.
"""
log.debug('Searching discogs for release %s' % str(album_id))
log.debug(u'Searching Discogs for release {0}'.format(str(album_id)))
# Discogs-IDs are simple integers. We only look for those at the end
# of an input string as to avoid confusion with other metadata plugins.
# An optional bracket can follow the integer, as this is how discogs
@ -82,8 +82,8 @@ class DiscogsPlugin(BeetsPlugin):
getattr(result, 'title')
except DiscogsAPIError as e:
if e.message != '404 Not Found':
log.debug('Discogs API Error: %s (query: %s)'
% (e, result._uri))
log.debug(u'Discogs API Error: {0} (query: {1})'
.format(e, result._uri))
return None
return self.get_album_info(result)
@ -225,7 +225,7 @@ class DiscogsPlugin(BeetsPlugin):
if match:
medium, index = match.groups()
else:
log.debug('Invalid discogs position: %s' % position)
log.debug(u'Invalid Discogs position: {0}'.format(position))
medium = index = None
return medium or None, index or None

View file

@ -56,20 +56,20 @@ def _checksum(item, prog):
key = args[0]
checksum = getattr(item, key, False)
if not checksum:
log.debug('%s: key %s on item %s not cached: computing checksum',
PLUGIN, key, displayable_path(item.path))
log.debug(u'{0}: key {1} on item {2} not cached: computing checksum'
.format(PLUGIN, key, displayable_path(item.path)))
try:
checksum = command_output(args)
setattr(item, key, checksum)
item.store()
log.debug('%s: computed checksum for %s using %s',
PLUGIN, item.title, key)
log.debug(u'{)}: computed checksum for {1} using {2}'
.format(PLUGIN, item.title, key))
except subprocess.CalledProcessError as e:
log.debug('%s: failed to checksum %s: %s',
PLUGIN, displayable_path(item.path), e)
log.debug(u'{0}: failed to checksum {1}: {2}'
.format(PLUGIN, displayable_path(item.path), e))
else:
log.debug('%s: key %s on item %s cached: not computing checksum',
PLUGIN, key, displayable_path(item.path))
log.debug(u'{0}: key {1} on item {2} cached: not computing checksum'
.format(PLUGIN, key, displayable_path(item.path)))
return key, checksum
@ -86,8 +86,8 @@ def _group_by(objs, keys):
key = '\001'.join(values)
counts[key].append(obj)
else:
log.debug('%s: all keys %s on item %s are null: skipping',
PLUGIN, str(keys), displayable_path(obj.path))
log.debug(u'{0}: all keys {1} on item {2} are null: skipping'
.format(PLUGIN, str(keys), displayable_path(obj.path)))
return counts

View file

@ -40,19 +40,19 @@ def fetch_item_tempo(lib, loglevel, item, write):
"""
# Skip if the item already has the tempo field.
if item.bpm:
log.log(loglevel, u'bpm already present: %s - %s' %
(item.artist, item.title))
log.log(loglevel, u'bpm already present: {0} - {1}'
.format(item.artist, item.title))
return
# Fetch tempo.
tempo = get_tempo(item.artist, item.title, item.length)
if not tempo:
log.log(loglevel, u'tempo not found: %s - %s' %
(item.artist, item.title))
log.log(loglevel, u'tempo not found: {0} - {1}'
.format(item.artist, item.title))
return
log.log(loglevel, u'fetched tempo: %s - %s' %
(item.artist, item.title))
log.log(loglevel, u'fetched tempo: {0} - {1}'
.format(item.artist, item.title))
item.bpm = int(tempo)
if write:
item.try_write()

View file

@ -40,8 +40,8 @@ class EmbedCoverArtPlugin(BeetsPlugin):
if self.config['maxwidth'].get(int) and \
not ArtResizer.shared.local:
self.config['maxwidth'] = 0
log.warn("embedart: ImageMagick or PIL not found; "
"'maxwidth' option ignored")
log.warn(u"embedart: ImageMagick or PIL not found; "
u"'maxwidth' option ignored")
def commands(self):
# Embed command.
@ -145,7 +145,7 @@ def _mediafile_image(image_path, maxwidth=None):
def extract(lib, outpath, query):
item = lib.items(query).get()
if not item:
log.error('No item matches query.')
log.error(u'No item matches query.')
return
# Extract the art.
@ -159,14 +159,14 @@ def extract(lib, outpath, query):
art = mf.art
if not art:
log.error('No album art present in %s - %s.' %
(item.artist, item.title))
log.error(u'No album art present in {0} - {1}.'
.format(item.artist, item.title))
return
# Add an extension to the filename.
ext = imghdr.what(None, h=art)
if not ext:
log.error('Unknown image type.')
log.error(u'Unknown image type.')
return
outpath += '.' + ext
@ -179,9 +179,9 @@ def extract(lib, outpath, query):
# 'clearart' command.
def clear(lib, query):
log.info('Clearing album art from items:')
log.info(u'Clearing album art from items:')
for item in lib.items(query):
log.info(u'%s - %s' % (item.artist, item.title))
log.info(u'{0} - {1}'.format(item.artist, item.title))
try:
mf = mediafile.MediaFile(syspath(item.path),
config['id3v23'].get(bool))

View file

@ -70,7 +70,7 @@ class IHatePlugin(BeetsPlugin):
if task.choice_flag == action.APPLY:
if skip_queries or warn_queries:
self._log.debug('[ihate] processing your hate')
self._log.debug(u'[ihate] processing your hate')
if self.do_i_hate_this(task, skip_queries):
task.choice_flag = action.SKIP
self._log.info(u'[ihate] skipped: {0}'
@ -80,6 +80,6 @@ class IHatePlugin(BeetsPlugin):
self._log.info(u'[ihate] you maybe hate this: {0}'
.format(summary(task)))
else:
self._log.debug('[ihate] nothing to do')
self._log.debug(u'[ihate] nothing to do')
else:
self._log.debug('[ihate] user made a decision, nothing to do')
self._log.debug(u'[ihate] user made a decision, nothing to do')

View file

@ -44,8 +44,8 @@ def write_item_mtime(item, mtime):
item's file.
"""
if mtime is None:
log.warn("No mtime to be preserved for item "
+ util.displayable_path(item.path))
log.warn(u"No mtime to be preserved for item {0}"
.format(util.displayable_path(item.path)))
return
# The file's mtime on disk must be in sync with the item's mtime
@ -64,10 +64,9 @@ def record_import_mtime(item, source, destination):
mtime = os.stat(util.syspath(source)).st_mtime
item_mtime[destination] = mtime
log.debug('Recorded mtime %s for item "%s" imported from "%s"',
mtime,
util.displayable_path(destination),
util.displayable_path(source))
log.debug(u"Recorded mtime {0} for item '{1}' imported from '{2}'"
.format(mtime, util.displayable_path(destination),
util.displayable_path(source)))
@ImportAddedPlugin.listen('album_imported')

View file

@ -64,7 +64,7 @@ def compile_inline(python_code, album):
try:
func = _compile_func(python_code)
except SyntaxError:
log.error(u'syntax error in inline field definition:\n%s' %
log.error(u'syntax error in inline field definition:\n{0}',
traceback.format_exc())
return
else:
@ -112,14 +112,14 @@ class InlinePlugin(BeetsPlugin):
# Item fields.
for key, view in itertools.chain(config['item_fields'].items(),
config['pathfields'].items()):
log.debug(u'inline: adding item field %s' % key)
log.debug(u'inline: adding item field {0}'.format(key))
func = compile_inline(view.get(unicode), False)
if func is not None:
self.template_fields[key] = func
# Album fields.
for key, view in config['album_fields'].items():
log.debug(u'inline: adding album field %s' % key)
log.debug(u'inline: adding album field {0}'.format(key))
func = compile_inline(view.get(unicode), True)
if func is not None:
self.album_template_fields[key] = func

View file

@ -66,7 +66,7 @@ def _tags_for(obj, min_weight=None):
else:
res = obj.get_top_tags()
except PYLAST_EXCEPTIONS as exc:
log.debug(u'last.fm error: %s' % unicode(exc))
log.debug(u'last.fm error: {0}'.format(exc))
return []
# Filter by weight (optionally).
@ -368,10 +368,9 @@ class LastGenrePlugin(plugins.BeetsPlugin):
if 'track' in self.sources:
item.genre, src = self._get_genre(item)
item.store()
log.info(
u'genre for track {0} - {1} ({2}): {3}'. format(
item.artist, item.title, src, item.genre
)
log.info(u'genre for track {0} - {1} ({2}): {3}'
.format(item.artist, item.title, src,
item.genre)
)
if write:

View file

@ -264,7 +264,7 @@ def slugify(text):
text = unicodedata.normalize('NFKD', text).encode('ascii', 'ignore')
text = unicode(re.sub('[-\s]+', ' ', text))
except UnicodeDecodeError:
log.exception("Failing to normalize '%s'" % (text))
log.exception(u"Failing to normalize '{0}'".format(text))
return text
@ -347,7 +347,7 @@ def is_lyrics(text, artist=None):
badTriggersOcc = []
nbLines = text.count('\n')
if nbLines <= 1:
log.debug("Ignoring too short lyrics '%s'" % text)
log.debug(u"Ignoring too short lyrics '{0}'".format(text))
return 0
elif nbLines < 5:
badTriggersOcc.append('too_short')
@ -365,7 +365,7 @@ def is_lyrics(text, artist=None):
text, re.I))
if badTriggersOcc:
log.debug('Bad triggers detected: %s' % badTriggersOcc)
log.debug(u'Bad triggers detected: {0}'.format(badTriggersOcc))
return len(badTriggersOcc) < 2
@ -394,8 +394,8 @@ def scrape_lyrics_from_url(url):
tag.name = 'p' # keep tag contents
except Exception, e:
log.debug('Error %s when replacing containing marker by p marker' % e,
exc_info=True)
log.debug('Error {0} when replacing containing marker by p marker'
.format(e,exc_info=True))
# Make better soup from current soup! The previous unclosed <p> sections
# are now closed. Use str() rather than prettify() as it's more
@ -437,7 +437,7 @@ def fetch_google(artist, title):
data = json.load(data)
if 'error' in data:
reason = data['error']['errors'][0]['reason']
log.debug(u'google lyrics backend error: %s' % reason)
log.debug(u'google lyrics backend error: {0}'.format(reason))
return
if 'items' in data.keys():
@ -453,7 +453,7 @@ def fetch_google(artist, title):
lyrics = sanitize_lyrics(lyrics)
if is_lyrics(lyrics, artist):
log.debug(u'got lyrics from %s' % item['displayLink'])
log.debug(u'got lyrics from {0}'.format(item['displayLink']))
return lyrics
@ -514,8 +514,8 @@ class LyricsPlugin(BeetsPlugin):
"""
# Skip if the item already has lyrics.
if not force and item.lyrics:
log.log(loglevel, u'lyrics already present: %s - %s' %
(item.artist, item.title))
log.log(loglevel, u'lyrics already present: {0} - {1}'
.format((item.artist, item.title)))
return
lyrics = None
@ -530,7 +530,7 @@ class LyricsPlugin(BeetsPlugin):
log.log(loglevel, u'fetched lyrics: %s - %s' %
(item.artist, item.title))
else:
log.log(loglevel, u'lyrics not found: %s - %s' %
log.log(loglevel, u'lyrics not found: {0} - {1}'
(item.artist, item.title))
fallback = self.config['fallback'].get()
if fallback:
@ -553,7 +553,7 @@ class LyricsPlugin(BeetsPlugin):
if lyrics:
if isinstance(lyrics, str):
lyrics = lyrics.decode('utf8', 'ignore')
log.debug(u'got lyrics from backend: {0}'.format(
backend.__name__
))
log.debug(u'got lyrics from backend: {0}'
.format(backend.__name__)
)
return lyrics.strip()

View file

@ -43,7 +43,7 @@ def _missing(album):
for track_info in getattr(album_info, 'tracks', []):
if track_info.track_id not in item_mbids:
item = _item(track_info, album_info, album.id)
log.debug('{0}: track {1} in album {2}'
log.debug(u'{0}: track {1} in album {2}'
.format(PLUGIN,
track_info.track_id,
album_info.album_id))

View file

@ -135,7 +135,7 @@ class CommandBackend(Backend):
supported_items = filter(self.format_supported, album.items())
if len(supported_items) != len(album.items()):
log.debug('replaygain: tracks are of unsupported format')
log.debug(u'replaygain: tracks are of unsupported format')
return AlbumGain(None, [])
output = self.compute_gain(supported_items, True)
@ -577,12 +577,12 @@ class ReplayGainPlugin(BeetsPlugin):
in the item, nothing is done.
"""
if not self.track_requires_gain(item):
log.info(u'Skipping track {0} - {1}'.format(item.artist,
item.title))
log.info(u'Skipping track {0} - {1}'
.format(item.artist,item.title))
return
log.info(u'analyzing {0} - {1}'.format(item.artist,
item.title))
log.info(u'analyzing {0} - {1}'
.format(item.artist,item.title))
try:
track_gains = self.backend_instance.compute_track_gain([item])

View file

@ -59,7 +59,7 @@ class RewritePlugin(BeetsPlugin):
if fieldname not in library.Item._fields:
raise ui.UserError("invalid field name (%s) in rewriter" %
fieldname)
log.debug(u'adding template field %s' % key)
log.debug(u'adding template field {0}'.format(key))
pattern = re.compile(pattern.lower())
rules[fieldname].append((pattern, value))
if fieldname == 'artist':

View file

@ -64,7 +64,7 @@ class ScrubPlugin(BeetsPlugin):
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
log.info(u'scrubbing: %s' % util.displayable_path(item.path))
log.info(u'scrubbing: {0}'.format(util.displayable_path(item.path)))
# Get album art if we need to restore it.
if opts.write:
@ -80,7 +80,7 @@ class ScrubPlugin(BeetsPlugin):
log.debug(u'writing new tags after scrub')
item.try_write()
if art:
log.info('restoring art')
log.info(u'restoring art')
mf = mediafile.MediaFile(item.path)
mf.art = art
mf.save()
@ -132,8 +132,7 @@ def _scrub(path):
f.save()
except IOError as exc:
log.error(u'could not scrub {0}: {1}'.format(
util.displayable_path(path),
exc,
util.displayable_path(path), exc,
))
@ -141,5 +140,5 @@ def _scrub(path):
@ScrubPlugin.listen('write')
def write_item(path):
if not scrubbing and config['scrub']['auto']:
log.debug(u'auto-scrubbing %s' % util.displayable_path(path))
log.debug(u'auto-scrubbing {0}'.format(util.displayable_path(path)))
_scrub(path)

View file

@ -63,7 +63,7 @@ class SpotifyPlugin(BeetsPlugin):
self.config['show_failures'].set(True)
if self.config['mode'].get() not in ['list', 'open']:
log.warn(self.config['mode'].get() + " is not a valid mode")
log.warn(u'{0} is not a valid mode'.format(self.config['mode'].get()))
return False
self.opts = opts
@ -77,10 +77,10 @@ class SpotifyPlugin(BeetsPlugin):
items = lib.items(query)
if not items:
log.debug("Your beets query returned no items, skipping spotify")
log.debug(u'Your beets query returned no items, skipping spotify')
return
log.info("Processing " + str(len(items)) + " tracks...")
log.info(u'Processing {0} tracks...'.format(len(items)))
for item in items:
@ -112,7 +112,7 @@ class SpotifyPlugin(BeetsPlugin):
try:
r.raise_for_status()
except HTTPError as e:
log.debug("URL returned a " + e.response.status_code + "error")
log.debug(u'URL returned a {0} error'.format(e.response.status_code))
failures.append(search_url)
continue
@ -128,34 +128,34 @@ class SpotifyPlugin(BeetsPlugin):
# Simplest, take the first result
chosen_result = None
if len(r_data) == 1 or self.config['tiebreak'].get() == "first":
log.debug("Spotify track(s) found, count: " + str(len(r_data)))
log.debug(u'Spotify track(s) found, count: {0}'.format(len(r_data)))
chosen_result = r_data[0]
elif len(r_data) > 1:
# Use the popularity filter
log.debug(
"Most popular track chosen, count: " + str(len(r_data))
log.debug(u'Most popular track chosen, count: {0}'
.format(len(r_data))
)
chosen_result = max(r_data, key=lambda x: x['popularity'])
if chosen_result:
results.append(chosen_result)
else:
log.debug("No spotify track found: " + search_url)
log.debug(u'No spotify track found: {0}'.format(search_url))
failures.append(search_url)
failure_count = len(failures)
if failure_count > 0:
if self.config['show_failures'].get():
log.info("{0} track(s) did not match a Spotify ID:".format(
failure_count
))
log.info(u'{0} track(s) did not match a Spotify ID:'
.format(failure_count)
)
for track in failures:
log.info("track:" + track)
log.info("")
log.info(u'track: {0}'.format(track))
log.info(u'') # Is this necesssary
else:
log.warn(
str(failure_count) + " track(s) did not match "
"a Spotify ID; use --show-failures to display\n"
log.warn(u'{0} track(s) did not match a Spotify ID;\n'
u'use --show-failures to display'
.format(failure_count)
)
return results
@ -164,7 +164,7 @@ class SpotifyPlugin(BeetsPlugin):
if results:
ids = map(lambda x: x['id'], results)
if self.config['mode'].get() == "open":
log.info("Attempting to open Spotify with playlist")
log.info(u'Attempting to open Spotify with playlist')
spotify_url = self.playlist_partial + ",".join(ids)
webbrowser.open(spotify_url)
@ -172,4 +172,4 @@ class SpotifyPlugin(BeetsPlugin):
for item in ids:
print(unicode.encode(self.open_url + item))
else:
log.warn("No Spotify tracks found from beets query")
log.warn(u'No Spotify tracks found from beets query')

View file

@ -164,7 +164,7 @@ class LyricsScrapingPluginTest(unittest.TestCase):
# a random improvement in the scraping algo: we want to
# be noticed if it's the case.
if is_lyrics_content_ok(s['title'], res):
log.debug('Source %s actually return valid lyrics!' % s['url'])
log.debug(u'Source {0} actually return valid lyrics!'.format(s['url']))
def test_is_page_candidate(self):
for s in self.sourcesOk: