Delegate attribute access to logging

This commit is contained in:
Šarūnas Nejus 2025-08-30 18:33:25 +01:00
parent e334e81d40
commit b3d434f58f
No known key found for this signature in database
GPG key ID: DD28F6704DBE3435
35 changed files with 179 additions and 201 deletions

View file

@ -38,7 +38,7 @@ def get_art(log, item):
try: try:
mf = mediafile.MediaFile(syspath(item.path)) mf = mediafile.MediaFile(syspath(item.path))
except mediafile.UnreadableFileError as exc: except mediafile.UnreadableFileError as exc:
log.warning("Could not extract art from {}: {}", item.filepath, exc) log.warning("Could not extract art from {.filepath}: {}", item, exc)
return return
return mf.art return mf.art
@ -88,7 +88,7 @@ def embed_item(
# Make sure the image kind is safe (some formats only support PNG # Make sure the image kind is safe (some formats only support PNG
# and JPEG). # and JPEG).
if image.mime_type not in ("image/jpeg", "image/png"): if image.mime_type not in ("image/jpeg", "image/png"):
log.info("not embedding image of unsupported type: {}", image.mime_type) log.info("not embedding image of unsupported type: {.mime_type}", image)
return return
item.try_write(path=itempath, tags={"images": [image]}, id3v23=id3v23) item.try_write(path=itempath, tags={"images": [image]}, id3v23=id3v23)
@ -185,7 +185,7 @@ def extract(log, outpath, item):
# Add an extension to the filename. # Add an extension to the filename.
ext = mediafile.image_extension(art) ext = mediafile.image_extension(art)
if not ext: if not ext:
log.warning("Unknown image type in {}.", item.filepath) log.warning("Unknown image type in {.filepath}.", item)
return return
outpath += bytestring_path(f".{ext}") outpath += bytestring_path(f".{ext}")

View file

@ -197,7 +197,7 @@ def _add_candidate(
checking the track count, ordering the items, checking for checking the track count, ordering the items, checking for
duplicates, and calculating the distance. duplicates, and calculating the distance.
""" """
log.debug("Candidate: {} - {} ({})", info.artist, info.album, info.album_id) log.debug("Candidate: {0.artist} - {0.album} ({0.album_id})", info)
# Discard albums with zero tracks. # Discard albums with zero tracks.
if not info.tracks: if not info.tracks:

View file

@ -82,10 +82,7 @@ def query_tasks(session: ImportSession):
# Search for albums. # Search for albums.
for album in session.lib.albums(session.query): for album in session.lib.albums(session.query):
log.debug( log.debug(
"yielding album {}: {} - {}", "yielding album {0.id}: {0.albumartist} - {0.album}", album
album.id,
album.albumartist,
album.album,
) )
items = list(album.items()) items = list(album.items())
_freshen_items(items) _freshen_items(items)

View file

@ -271,7 +271,7 @@ class ImportTask(BaseImportTask):
for item in duplicate_items: for item in duplicate_items:
item.remove() item.remove()
if lib.directory in util.ancestry(item.path): if lib.directory in util.ancestry(item.path):
log.debug("deleting duplicate {}", item.filepath) log.debug("deleting duplicate {.filepath}", item)
util.remove(item.path) util.remove(item.path)
util.prune_dirs(os.path.dirname(item.path), lib.directory) util.prune_dirs(os.path.dirname(item.path), lib.directory)
@ -552,12 +552,11 @@ class ImportTask(BaseImportTask):
] ]
if overwritten_fields: if overwritten_fields:
log.debug( log.debug(
"Reimported {} {}. Not preserving flexible attributes {}. " "Reimported {0} {1.id}. Not preserving flexible attributes {2}. "
"Path: {}", "Path: {1.filepath}",
noun, noun,
new_obj.id, new_obj,
overwritten_fields, overwritten_fields,
new_obj.filepath,
) )
for key in overwritten_fields: for key in overwritten_fields:
del existing_fields[key] del existing_fields[key]
@ -576,17 +575,15 @@ class ImportTask(BaseImportTask):
self.album.artpath = replaced_album.artpath self.album.artpath = replaced_album.artpath
self.album.store() self.album.store()
log.debug( log.debug(
"Reimported album {}. Preserving attribute ['added']. " "Reimported album {0.album.id}. Preserving attribute ['added']. "
"Path: {}", "Path: {0.album.filepath}",
self.album.id, self,
self.album.filepath,
) )
log.debug( log.debug(
"Reimported album {}. Preserving flexible attributes {}. " "Reimported album {0.album.id}. Preserving flexible"
"Path: {}", " attributes {1}. Path: {0.album.filepath}",
self.album.id, self,
list(album_fields.keys()), list(album_fields.keys()),
self.album.filepath,
) )
for item in self.imported_items(): for item in self.imported_items():
@ -595,21 +592,19 @@ class ImportTask(BaseImportTask):
if dup_item.added and dup_item.added != item.added: if dup_item.added and dup_item.added != item.added:
item.added = dup_item.added item.added = dup_item.added
log.debug( log.debug(
"Reimported item {}. Preserving attribute ['added']. " "Reimported item {0.id}. Preserving attribute ['added']. "
"Path: {}", "Path: {0.filepath}",
item.id, item,
item.filepath,
) )
item_fields = _reduce_and_log( item_fields = _reduce_and_log(
item, dup_item._values_flex, REIMPORT_FRESH_FIELDS_ITEM item, dup_item._values_flex, REIMPORT_FRESH_FIELDS_ITEM
) )
item.update(item_fields) item.update(item_fields)
log.debug( log.debug(
"Reimported item {}. Preserving flexible attributes {}. " "Reimported item {0.id}. Preserving flexible attributes {1}. "
"Path: {}", "Path: {0.filepath}",
item.id, item,
list(item_fields.keys()), list(item_fields.keys()),
item.filepath,
) )
item.store() item.store()
@ -619,7 +614,7 @@ class ImportTask(BaseImportTask):
""" """
for item in self.imported_items(): for item in self.imported_items():
for dup_item in self.replaced_items[item]: for dup_item in self.replaced_items[item]:
log.debug("Replacing item {}: {}", dup_item.id, item.filepath) log.debug("Replacing item {.id}: {.filepath}", dup_item, item)
dup_item.remove() dup_item.remove()
log.debug( log.debug(
"{} of {} items replaced", "{} of {} items replaced",
@ -1067,7 +1062,7 @@ class ImportTaskFactory:
# Now read albums from the extracted directory. # Now read albums from the extracted directory.
self.toppath = archive_task.toppath self.toppath = archive_task.toppath
log.debug("Archive extracted to: {}", self.toppath) log.debug("Archive extracted to: {.toppath}", self)
return archive_task return archive_task
def read_item(self, path: util.PathBytes): def read_item(self, path: util.PathBytes):

View file

@ -1012,7 +1012,7 @@ class Item(LibModel):
if move: if move:
# Check whether this file is inside the library directory. # Check whether this file is inside the library directory.
if self._db and self._db.directory in util.ancestry(self.path): if self._db and self._db.directory in util.ancestry(self.path):
log.debug("moving {} to synchronize path", self.filepath) log.debug("moving {.filepath} to synchronize path", self)
self.move(with_album=with_album) self.move(with_album=with_album)
self.store() self.store()

View file

@ -1280,11 +1280,10 @@ class TerminalImportSession(importer.ImportSession):
dup_choices = [c for c in all_choices if c.short == short] dup_choices = [c for c in all_choices if c.short == short]
for c in dup_choices[1:]: for c in dup_choices[1:]:
log.warning( log.warning(
"Prompt choice '{}' removed due to conflict " "Prompt choice '{0.long}' removed due to conflict "
"with '{}' (short letter: '{}')", "with '{1[0].long}' (short letter: '{0.short}')",
c.long, c,
dup_choices[0].long, dup_choices,
c.short,
) )
extra_choices.remove(c) extra_choices.remove(c)
@ -1639,9 +1638,8 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
# Did the item change since last checked? # Did the item change since last checked?
if item.current_mtime() <= item.mtime: if item.current_mtime() <= item.mtime:
log.debug( log.debug(
"skipping {} because mtime is up to date ({})", "skipping {0.filepath} because mtime is up to date ({0.mtime})",
item.filepath, item,
item.mtime,
) )
continue continue
@ -1649,7 +1647,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
try: try:
item.read() item.read()
except library.ReadError as exc: except library.ReadError as exc:
log.error("error reading {}: {}", item.filepath, exc) log.error("error reading {.filepath}: {}", item, exc)
continue continue
# Special-case album artist when it matches track artist. (Hacky # Special-case album artist when it matches track artist. (Hacky
@ -1882,7 +1880,7 @@ def show_stats(lib, query, exact):
try: try:
total_size += os.path.getsize(syspath(item.path)) total_size += os.path.getsize(syspath(item.path))
except OSError as exc: except OSError as exc:
log.info("could not get size of {}: {}", item.path, exc) log.info("could not get size of {.path}: {}", item, exc)
else: else:
total_size += int(item.length * item.bitrate / 8) total_size += int(item.length * item.bitrate / 8)
total_time += item.length total_time += item.length
@ -2173,7 +2171,7 @@ def move_items(
) )
for obj in objs: for obj in objs:
log.debug("moving: {}", obj.filepath) log.debug("moving: {.filepath}", obj)
if export: if export:
# Copy without affecting the database. # Copy without affecting the database.
@ -2256,14 +2254,14 @@ def write_items(lib, query, pretend, force):
for item in items: for item in items:
# Item deleted? # Item deleted?
if not os.path.exists(syspath(item.path)): if not os.path.exists(syspath(item.path)):
log.info("missing file: {}", item.filepath) log.info("missing file: {.filepath}", item)
continue continue
# Get an Item object reflecting the "clean" (on-disk) state. # Get an Item object reflecting the "clean" (on-disk) state.
try: try:
clean_item = library.Item.from_path(item.path) clean_item = library.Item.from_path(item.path)
except library.ReadError as exc: except library.ReadError as exc:
log.error("error reading {}: {}", item.filepath, exc) log.error("error reading {.filepath}: {}", item, exc)
continue continue
# Check for and display changes. # Check for and display changes.

View file

@ -126,7 +126,7 @@ class HumanReadableError(Exception):
""" """
if self.tb: if self.tb:
logger.debug(self.tb) logger.debug(self.tb)
logger.error("{}: {}", self.error_kind, self.args[0]) logger.error("{0.error_kind}: {0.args[0]}", self)
class FilesystemError(HumanReadableError): class FilesystemError(HumanReadableError):

View file

@ -306,9 +306,9 @@ class IMBackend(LocalBackend):
except subprocess.CalledProcessError as exc: except subprocess.CalledProcessError as exc:
log.warning("ImageMagick size query failed") log.warning("ImageMagick size query failed")
log.debug( log.debug(
"`convert` exited with (status {}) when " "`convert` exited with (status {.returncode}) when "
"getting size with command {}:\n{}", "getting size with command {}:\n{}",
exc.returncode, exc,
cmd, cmd,
exc.output.strip(), exc.output.strip(),
) )
@ -441,8 +441,8 @@ class IMBackend(LocalBackend):
convert_proc.wait() convert_proc.wait()
if convert_proc.returncode: if convert_proc.returncode:
log.debug( log.debug(
"ImageMagick convert failed with status {}: {!r}", "ImageMagick convert failed with status {.returncode}: {!r}",
convert_proc.returncode, convert_proc,
convert_stderr, convert_stderr,
) )
return None return None

View file

@ -153,7 +153,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
try: try:
data.update(res.json()) data.update(res.json())
except ValueError: except ValueError:
self._log.debug("Invalid Response: {}", res.text) self._log.debug("Invalid Response: {.text}", res)
return {} return {}
return data return data

View file

@ -127,12 +127,11 @@ class BadFiles(BeetsPlugin):
except CheckerCommandError as e: except CheckerCommandError as e:
if e.errno == errno.ENOENT: if e.errno == errno.ENOENT:
self._log.error( self._log.error(
"command not found: {} when validating file: {}", "command not found: {0.checker} when validating file: {0.path}",
e.checker, e,
e.path,
) )
else: else:
self._log.error("error invoking {}: {}", e.checker, e.msg) self._log.error("error invoking {0.checker}: {0.msg}", e)
return [] return []
error_lines = [] error_lines = []

View file

@ -763,7 +763,7 @@ class Connection:
def debug(self, message, kind=" "): def debug(self, message, kind=" "):
"""Log a debug message about this connection.""" """Log a debug message about this connection."""
self.server._log.debug("{}[{}]: {}", kind, self.address, message) self.server._log.debug("{}[{.address}]: {}", kind, self, message)
def run(self): def run(self):
pass pass
@ -911,7 +911,7 @@ class ControlConnection(Connection):
super().__init__(server, sock) super().__init__(server, sock)
def debug(self, message, kind=" "): def debug(self, message, kind=" "):
self.server._log.debug("CTRL {}[{}]: {}", kind, self.address, message) self.server._log.debug("CTRL {}[{.address}]: {}", kind, self, message)
def run(self): def run(self):
"""Listen for control commands and delegate to `ctrl_*` methods.""" """Listen for control commands and delegate to `ctrl_*` methods."""

View file

@ -82,8 +82,8 @@ class BPSyncPlugin(BeetsPlugin):
if not self.is_beatport_track(item): if not self.is_beatport_track(item):
self._log.info( self._log.info(
"Skipping non-{} singleton: {}", "Skipping non-{.beatport_plugin.data_source} singleton: {}",
self.beatport_plugin.data_source, self,
item, item,
) )
continue continue
@ -107,8 +107,8 @@ class BPSyncPlugin(BeetsPlugin):
return False return False
if not album.mb_albumid.isnumeric(): if not album.mb_albumid.isnumeric():
self._log.info( self._log.info(
"Skipping album with invalid {} ID: {}", "Skipping album with invalid {.beatport_plugin.data_source} ID: {}",
self.beatport_plugin.data_source, self,
album, album,
) )
return False return False
@ -117,8 +117,8 @@ class BPSyncPlugin(BeetsPlugin):
return items return items
if not all(self.is_beatport_track(item) for item in items): if not all(self.is_beatport_track(item) for item in items):
self._log.info( self._log.info(
"Skipping non-{} release: {}", "Skipping non-{.beatport_plugin.data_source} release: {}",
self.beatport_plugin.data_source, self,
album, album,
) )
return False return False
@ -139,9 +139,7 @@ class BPSyncPlugin(BeetsPlugin):
albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid) albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid)
if not albuminfo: if not albuminfo:
self._log.info( self._log.info(
"Release ID {} not found for album {}", "Release ID {0.mb_albumid} not found for album {0}", album
album.mb_albumid,
album,
) )
continue continue

View file

@ -343,20 +343,20 @@ def fingerprint_item(log, item, write=False):
""" """
# Get a fingerprint and length for this track. # Get a fingerprint and length for this track.
if not item.length: if not item.length:
log.info("{}: no duration available", item.filepath) log.info("{.filepath}: no duration available", item)
elif item.acoustid_fingerprint: elif item.acoustid_fingerprint:
if write: if write:
log.info("{}: fingerprint exists, skipping", item.filepath) log.info("{.filepath}: fingerprint exists, skipping", item)
else: else:
log.info("{}: using existing fingerprint", item.filepath) log.info("{.filepath}: using existing fingerprint", item)
return item.acoustid_fingerprint return item.acoustid_fingerprint
else: else:
log.info("{}: fingerprinting", item.filepath) log.info("{.filepath}: fingerprinting", item)
try: try:
_, fp = acoustid.fingerprint_file(util.syspath(item.path)) _, fp = acoustid.fingerprint_file(util.syspath(item.path))
item.acoustid_fingerprint = fp.decode() item.acoustid_fingerprint = fp.decode()
if write: if write:
log.info("{}: writing fingerprint", item.filepath) log.info("{.filepath}: writing fingerprint", item)
item.try_write() item.try_write()
if item._db: if item._db:
item.store() item.store()

View file

@ -319,10 +319,9 @@ class ConvertPlugin(BeetsPlugin):
util.displayable_path(source), util.displayable_path(source),
) )
self._log.debug( self._log.debug(
"Command {} exited with status {}: {}", "Command {0} exited with status {1.returncode}: {1.output}",
args, args,
exc.returncode, exc,
exc.output,
) )
util.remove(dest) util.remove(dest)
util.prune_dirs(os.path.dirname(dest)) util.prune_dirs(os.path.dirname(dest))
@ -388,15 +387,15 @@ class ConvertPlugin(BeetsPlugin):
if os.path.exists(util.syspath(dest)): if os.path.exists(util.syspath(dest)):
self._log.info( self._log.info(
"Skipping {} (target file exists)", item.filepath "Skipping {.filepath} (target file exists)", item
) )
continue continue
if keep_new: if keep_new:
if pretend: if pretend:
self._log.info( self._log.info(
"mv {} {}", "mv {.filepath} {}",
item.filepath, item,
util.displayable_path(original), util.displayable_path(original),
) )
else: else:
@ -430,7 +429,7 @@ class ConvertPlugin(BeetsPlugin):
else ("Linking" if link else "Copying") else ("Linking" if link else "Copying")
) )
self._log.info("{} {}", msg, item.filepath) self._log.info("{} {.filepath}", msg, item)
if hardlink: if hardlink:
util.hardlink(original, converted) util.hardlink(original, converted)
@ -461,7 +460,7 @@ class ConvertPlugin(BeetsPlugin):
if album and album.artpath: if album and album.artpath:
maxwidth = self._get_art_resize(album.artpath) maxwidth = self._get_art_resize(album.artpath)
self._log.debug( self._log.debug(
"embedding album art from {}", album.art_filepath "embedding album art from {.art_filepath}", album
) )
art.embed_item( art.embed_item(
self._log, self._log,
@ -519,7 +518,7 @@ class ConvertPlugin(BeetsPlugin):
if os.path.exists(util.syspath(dest)): if os.path.exists(util.syspath(dest)):
self._log.info( self._log.info(
"Skipping {} (target file exists)", album.art_filepath "Skipping {.art_filepath} (target file exists)", album
) )
return return
@ -529,8 +528,8 @@ class ConvertPlugin(BeetsPlugin):
# Either copy or resize (while copying) the image. # Either copy or resize (while copying) the image.
if maxwidth is not None: if maxwidth is not None:
self._log.info( self._log.info(
"Resizing cover art from {} to {}", "Resizing cover art from {.art_filepath} to {}",
album.art_filepath, album,
util.displayable_path(dest), util.displayable_path(dest),
) )
if not pretend: if not pretend:
@ -540,9 +539,9 @@ class ConvertPlugin(BeetsPlugin):
msg = "ln" if hardlink else ("ln -s" if link else "cp") msg = "ln" if hardlink else ("ln -s" if link else "cp")
self._log.info( self._log.info(
"{} {} {}", "{} {.art_filepath} {}",
msg, msg,
album.art_filepath, album,
util.displayable_path(dest), util.displayable_path(dest),
) )
else: else:
@ -553,9 +552,9 @@ class ConvertPlugin(BeetsPlugin):
) )
self._log.info( self._log.info(
"{} cover art from {} to {}", "{} cover art from {.art_filepath} to {}",
msg, msg,
album.art_filepath, album,
util.displayable_path(dest), util.displayable_path(dest),
) )
if hardlink: if hardlink:

View file

@ -251,16 +251,16 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
response.raise_for_status() response.raise_for_status()
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
self._log.error( self._log.error(
"Error fetching data from {} API\n Error: {}", "Error fetching data from {.data_source} API\n Error: {}",
self.data_source, self,
e, e,
) )
return () return ()
response_data: Sequence[IDResponse] = response.json().get("data", []) response_data: Sequence[IDResponse] = response.json().get("data", [])
self._log.debug( self._log.debug(
"Found {} result(s) from {} for '{}'", "Found {} result(s) from {.data_source} for '{}'",
len(response_data), len(response_data),
self.data_source, self,
query, query,
) )
return response_data return response_data

View file

@ -254,24 +254,24 @@ class DuplicatesPlugin(BeetsPlugin):
checksum = getattr(item, key, False) checksum = getattr(item, key, False)
if not checksum: if not checksum:
self._log.debug( self._log.debug(
"key {} on item {} not cached:computing checksum", "key {} on item {.filepath} not cached:computing checksum",
key, key,
item.filepath, item,
) )
try: try:
checksum = command_output(args).stdout checksum = command_output(args).stdout
setattr(item, key, checksum) setattr(item, key, checksum)
item.store() item.store()
self._log.debug( self._log.debug(
"computed checksum for {} using {}", item.title, key "computed checksum for {.title} using {}", item, key
) )
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
self._log.debug("failed to checksum {}: {}", item.filepath, e) self._log.debug("failed to checksum {.filepath}: {}", item, e)
else: else:
self._log.debug( self._log.debug(
"key {} on item {} cached:not computing checksum", "key {} on item {.filepath} cached:not computing checksum",
key, key,
item.filepath, item,
) )
return key, checksum return key, checksum
@ -289,15 +289,15 @@ class DuplicatesPlugin(BeetsPlugin):
values = [v for v in values if v not in (None, "")] values = [v for v in values if v not in (None, "")]
if strict and len(values) < len(keys): if strict and len(values) < len(keys):
self._log.debug( self._log.debug(
"some keys {} on item {} are null or empty: skipping", "some keys {} on item {.filepath} are null or empty: skipping",
keys, keys,
obj.filepath, obj,
) )
elif not strict and not len(values): elif not strict and not len(values):
self._log.debug( self._log.debug(
"all keys {} on item {} are null or empty: skipping", "all keys {} on item {.filepath} are null or empty: skipping",
keys, keys,
obj.filepath, obj,
) )
else: else:
key = tuple(values) key = tuple(values)
@ -356,10 +356,10 @@ class DuplicatesPlugin(BeetsPlugin):
if value: if value:
self._log.debug( self._log.debug(
"key {} on item {} is null " "key {} on item {} is null "
"or empty: setting from item {}", "or empty: setting from item {.filepath}",
f, f,
displayable_path(objs[0].path), displayable_path(objs[0].path),
o.filepath, o,
) )
setattr(objs[0], f, value) setattr(objs[0], f, value)
objs[0].store() objs[0].store()
@ -380,10 +380,10 @@ class DuplicatesPlugin(BeetsPlugin):
missing.add(i._db) missing.add(i._db)
self._log.debug( self._log.debug(
"item {} missing from album {}:" "item {} missing from album {}:"
" merging from {} into {}", " merging from {.filepath} into {}",
missing, missing,
objs[0], objs[0],
o.filepath, o,
displayable_path(missing.destination()), displayable_path(missing.destination()),
) )
missing.move(operation=MoveOperation.COPY) missing.move(operation=MoveOperation.COPY)

View file

@ -133,7 +133,7 @@ class Candidate:
# get_size returns None if no local imaging backend is available # get_size returns None if no local imaging backend is available
if not self.size: if not self.size:
self.size = ArtResizer.shared.get_size(self.path) self.size = ArtResizer.shared.get_size(self.path)
self._log.debug("image size: {}", self.size) self._log.debug("image size: {.size}", self)
if not self.size: if not self.size:
self._log.warning( self._log.warning(
@ -151,7 +151,7 @@ class Candidate:
# Check minimum dimension. # Check minimum dimension.
if plugin.minwidth and self.size[0] < plugin.minwidth: if plugin.minwidth and self.size[0] < plugin.minwidth:
self._log.debug( self._log.debug(
"image too small ({} < {})", self.size[0], plugin.minwidth "image too small ({} < {.minwidth})", self.size[0], plugin
) )
return ImageAction.BAD return ImageAction.BAD
@ -162,10 +162,10 @@ class Candidate:
if edge_diff > plugin.margin_px: if edge_diff > plugin.margin_px:
self._log.debug( self._log.debug(
"image is not close enough to being " "image is not close enough to being "
"square, ({} - {} > {})", "square, ({} - {} > {.margin_px})",
long_edge, long_edge,
short_edge, short_edge,
plugin.margin_px, plugin,
) )
return ImageAction.BAD return ImageAction.BAD
elif plugin.margin_percent: elif plugin.margin_percent:
@ -190,7 +190,7 @@ class Candidate:
downscale = False downscale = False
if plugin.maxwidth and self.size[0] > plugin.maxwidth: if plugin.maxwidth and self.size[0] > plugin.maxwidth:
self._log.debug( self._log.debug(
"image needs rescaling ({} > {})", self.size[0], plugin.maxwidth "image needs rescaling ({} > {.maxwidth})", self.size[0], plugin
) )
downscale = True downscale = True
@ -200,9 +200,9 @@ class Candidate:
filesize = os.stat(syspath(self.path)).st_size filesize = os.stat(syspath(self.path)).st_size
if filesize > plugin.max_filesize: if filesize > plugin.max_filesize:
self._log.debug( self._log.debug(
"image needs resizing ({}B > {}B)", "image needs resizing ({}B > {.max_filesize}B)",
filesize, filesize,
plugin.max_filesize, plugin,
) )
downsize = True downsize = True
@ -213,9 +213,9 @@ class Candidate:
reformat = fmt != plugin.cover_format reformat = fmt != plugin.cover_format
if reformat: if reformat:
self._log.debug( self._log.debug(
"image needs reformatting: {} -> {}", "image needs reformatting: {} -> {.cover_format}",
fmt, fmt,
plugin.cover_format, plugin,
) )
skip_check_for = skip_check_for or [] skip_check_for = skip_check_for or []
@ -329,7 +329,7 @@ def _logged_get(log: Logger, *args, **kwargs) -> requests.Response:
prepped.url, {}, None, None, None prepped.url, {}, None, None, None
) )
send_kwargs.update(settings) send_kwargs.update(settings)
log.debug("{}: {}", message, prepped.url) log.debug("{}: {.url}", message, prepped)
return s.send(prepped, **send_kwargs) return s.send(prepped, **send_kwargs)
@ -542,14 +542,14 @@ class CoverArtArchive(RemoteArtSource):
try: try:
response = self.request(url) response = self.request(url)
except requests.RequestException: except requests.RequestException:
self._log.debug("{}: error receiving response", self.NAME) self._log.debug("{.NAME}: error receiving response", self)
return return
try: try:
data = response.json() data = response.json()
except ValueError: except ValueError:
self._log.debug( self._log.debug(
"{}: error loading response: {}", self.NAME, response.text "{.NAME}: error loading response: {.text}", self, response
) )
return return
@ -629,7 +629,7 @@ class AlbumArtOrg(RemoteArtSource):
# Get the page from albumart.org. # Get the page from albumart.org.
try: try:
resp = self.request(self.URL, params={"asin": album.asin}) resp = self.request(self.URL, params={"asin": album.asin})
self._log.debug("scraped art URL: {}", resp.url) self._log.debug("scraped art URL: {.url}", resp)
except requests.RequestException: except requests.RequestException:
self._log.debug("error scraping art page") self._log.debug("error scraping art page")
return return
@ -702,7 +702,7 @@ class GoogleImages(RemoteArtSource):
try: try:
data = response.json() data = response.json()
except ValueError: except ValueError:
self._log.debug("google: error loading response: {}", response.text) self._log.debug("google: error loading response: {.text}", response)
return return
if "error" in data: if "error" in data:
@ -764,7 +764,7 @@ class FanartTV(RemoteArtSource):
data = response.json() data = response.json()
except ValueError: except ValueError:
self._log.debug( self._log.debug(
"fanart.tv: error loading response: {}", response.text "fanart.tv: error loading response: {.text}", response
) )
return return
@ -953,8 +953,8 @@ class Wikipedia(RemoteArtSource):
self._log.debug("wikipedia: album not found on dbpedia") self._log.debug("wikipedia: album not found on dbpedia")
except (ValueError, KeyError, IndexError): except (ValueError, KeyError, IndexError):
self._log.debug( self._log.debug(
"wikipedia: error scraping dbpedia response: {}", "wikipedia: error scraping dbpedia response: {.text}",
dbpedia_response.text, dbpedia_response,
) )
# Ensure we have a filename before attempting to query wikipedia # Ensure we have a filename before attempting to query wikipedia
@ -1179,7 +1179,7 @@ class LastFM(RemoteArtSource):
if "error" in data: if "error" in data:
if data["error"] == 6: if data["error"] == 6:
self._log.debug( self._log.debug(
"lastfm: no results for {}", album.mb_albumid "lastfm: no results for {.mb_albumid}", album
) )
else: else:
self._log.error( self._log.error(
@ -1200,7 +1200,7 @@ class LastFM(RemoteArtSource):
url=images[size], size=self.SIZES[size] url=images[size], size=self.SIZES[size]
) )
except ValueError: except ValueError:
self._log.debug("lastfm: error loading response: {}", response.text) self._log.debug("lastfm: error loading response: {.text}", response)
return return
@ -1244,7 +1244,7 @@ class Spotify(RemoteArtSource):
soup = BeautifulSoup(html, "html.parser") soup = BeautifulSoup(html, "html.parser")
except ValueError: except ValueError:
self._log.debug( self._log.debug(
"Spotify: error loading response: {}", response.text "Spotify: error loading response: {.text}", response
) )
return return
@ -1541,7 +1541,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
out = candidate out = candidate
assert out.path is not None # help mypy assert out.path is not None # help mypy
self._log.debug( self._log.debug(
"using {.LOC} image {}", source, out.path "using {.LOC} image {.path}", source, out
) )
break break
# Remove temporary files for invalid candidates. # Remove temporary files for invalid candidates.

View file

@ -150,10 +150,10 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
# In case the artist is kept, do not update the artist fields. # In case the artist is kept, do not update the artist fields.
if keep_in_artist_field: if keep_in_artist_field:
self._log.info( self._log.info(
"artist: {} (Not changing due to keep_in_artist)", item.artist "artist: {.artist} (Not changing due to keep_in_artist)", item
) )
else: else:
self._log.info("artist: {} -> {}", item.artist, item.albumartist) self._log.info("artist: {0.artist} -> {0.albumartist}", item)
item.artist = item.albumartist item.artist = item.albumartist
if item.artist_sort: if item.artist_sort:
@ -166,7 +166,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
feat_format = self.config["format"].as_str() feat_format = self.config["format"].as_str()
new_format = feat_format.format(feat_part) new_format = feat_format.format(feat_part)
new_title = f"{item.title} {new_format}" new_title = f"{item.title} {new_format}"
self._log.info("title: {} -> {}", item.title, new_title) self._log.info("title: {.title} -> {}", item, new_title)
item.title = new_title item.title = new_title
def ft_in_title( def ft_in_title(
@ -194,7 +194,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
if not featured: if not featured:
return False return False
self._log.info("{}", item.filepath) self._log.info("{.filepath}", item)
# Attempt to find the featured artist. # Attempt to find the featured artist.
feat_part = find_feat_part(artist, albumartist) feat_part = find_feat_part(artist, albumartist)

View file

@ -83,7 +83,7 @@ class HookPlugin(BeetsPlugin):
subprocess.check_call(command_pieces) subprocess.check_call(command_pieces)
except subprocess.CalledProcessError as exc: except subprocess.CalledProcessError as exc:
self._log.error( self._log.error(
"hook for {} exited with status {}", event, exc.returncode "hook for {} exited with status {.returncode}", event, exc
) )
except OSError as exc: except OSError as exc:
self._log.error("hook for {} failed: {}", event, exc) self._log.error("hook for {} failed: {}", event, exc)

View file

@ -103,9 +103,9 @@ class ImportAddedPlugin(BeetsPlugin):
def update_album_times(self, lib, album): def update_album_times(self, lib, album):
if self.reimported_album(album): if self.reimported_album(album):
self._log.debug( self._log.debug(
"Album '{}' is reimported, skipping import of " "Album '{.filepath}' is reimported, skipping import of "
"added dates for the album and its items.", "added dates for the album and its items.",
album.filepath, album,
) )
return return
@ -119,18 +119,17 @@ class ImportAddedPlugin(BeetsPlugin):
item.store() item.store()
album.added = min(album_mtimes) album.added = min(album_mtimes)
self._log.debug( self._log.debug(
"Import of album '{}', selected album.added={} " "Import of album '{0.album}', selected album.added={0.added} "
"from item file mtimes.", "from item file mtimes.",
album.album, album,
album.added,
) )
album.store() album.store()
def update_item_times(self, lib, item): def update_item_times(self, lib, item):
if self.reimported_item(item): if self.reimported_item(item):
self._log.debug( self._log.debug(
"Item '{}' is reimported, skipping import of added date.", "Item '{.filepath}' is reimported, skipping import of added date.",
item.filepath, item,
) )
return return
mtime = self.item_mtime.pop(item.path, None) mtime = self.item_mtime.pop(item.path, None)
@ -139,9 +138,8 @@ class ImportAddedPlugin(BeetsPlugin):
if self.config["preserve_mtimes"].get(bool): if self.config["preserve_mtimes"].get(bool):
self.write_item_mtime(item, mtime) self.write_item_mtime(item, mtime)
self._log.debug( self._log.debug(
"Import of item '{}', selected item.added={}", "Import of item '{0.filepath}', selected item.added={0.added}",
item.filepath, item,
item.added,
) )
item.store() item.store()
@ -153,7 +151,6 @@ class ImportAddedPlugin(BeetsPlugin):
if self.config["preserve_write_mtimes"].get(bool): if self.config["preserve_write_mtimes"].get(bool):
self.write_item_mtime(item, item.added) self.write_item_mtime(item, item.added)
self._log.debug( self._log.debug(
"Write of item '{}', selected item.added={}", "Write of item '{0.filepath}', selected item.added={0.added}",
item.filepath, item,
item.added,
) )

View file

@ -180,7 +180,7 @@ class IPFSPlugin(BeetsPlugin):
util.command_output(cmd) util.command_output(cmd)
except (OSError, subprocess.CalledProcessError) as err: except (OSError, subprocess.CalledProcessError) as err:
self._log.error( self._log.error(
"Failed to get {} from ipfs.\n{}", _hash, err.output "Failed to get {} from ipfs.\n{.output}", _hash, err
) )
return False return False

View file

@ -73,7 +73,7 @@ class KeyFinderPlugin(BeetsPlugin):
except IndexError: except IndexError:
# Sometimes keyfinder-cli returns 0 but with no key, usually # Sometimes keyfinder-cli returns 0 but with no key, usually
# when the file is silent or corrupt, so we log and skip. # when the file is silent or corrupt, so we log and skip.
self._log.error("no key returned for path: {}", item.path) self._log.error("no key returned for path: {.path}", item)
continue continue
try: try:
@ -84,7 +84,7 @@ class KeyFinderPlugin(BeetsPlugin):
item["initial_key"] = key item["initial_key"] = key
self._log.info( self._log.info(
"added computed initial key {} for {}", key, item.filepath "added computed initial key {} for {.filepath}", key, item
) )
if write: if write:

View file

@ -268,10 +268,9 @@ def process_tracks(lib, tracks, log):
count = int(song.get("play_count", 0)) count = int(song.get("play_count", 0))
new_count = int(tracks[num].get("playcount", 1)) new_count = int(tracks[num].get("playcount", 1))
log.debug( log.debug(
"match: {} - {} ({}) updating: play_count {} => {}", "match: {0.artist} - {0.title} ({0.album}) updating:"
song.artist, " play_count {1} => {2}",
song.title, song,
song.album,
count, count,
new_count, new_count,
) )

View file

@ -508,9 +508,9 @@ class SearchBackend(SoupMixin, Backend):
# log out the candidate that did not make it but was close. # log out the candidate that did not make it but was close.
# This may show a matching candidate with some noise in the name # This may show a matching candidate with some noise in the name
self.debug( self.debug(
"({}, {}) does not match ({}, {}) but dist was close: {:.2f}", "({0.artist}, {0.title}) does not match ({1}, {2}) but dist"
result.artist, " was close: {3:.2f}",
result.title, result,
target_artist, target_artist,
target_title, target_title,
max_dist, max_dist,
@ -838,15 +838,16 @@ class Translator(RequestHandler):
lyrics_language = langdetect.detect(new_lyrics).upper() lyrics_language = langdetect.detect(new_lyrics).upper()
if lyrics_language == self.to_language: if lyrics_language == self.to_language:
self.info( self.info(
"🔵 Lyrics are already in the target language {}", "🔵 Lyrics are already in the target language {.to_language}",
self.to_language, self,
) )
return new_lyrics return new_lyrics
if self.from_languages and lyrics_language not in self.from_languages: if self.from_languages and lyrics_language not in self.from_languages:
self.info( self.info(
"🔵 Configuration {} does not permit translating from {}", "🔵 Configuration {.from_languages} does not permit translating"
self.from_languages, " from {}",
self,
lyrics_language, lyrics_language,
) )
return new_lyrics return new_lyrics
@ -854,7 +855,7 @@ class Translator(RequestHandler):
lyrics, *url = new_lyrics.split("\n\nSource: ") lyrics, *url = new_lyrics.split("\n\nSource: ")
with self.handle_request(): with self.handle_request():
translated_lines = self.append_translations(lyrics.splitlines()) translated_lines = self.append_translations(lyrics.splitlines())
self.info("🟢 Translated lyrics to {}", self.to_language) self.info("🟢 Translated lyrics to {.to_language}", self)
return "\n\nSource: ".join(["\n".join(translated_lines), *url]) return "\n\nSource: ".join(["\n".join(translated_lines), *url])

View file

@ -226,8 +226,8 @@ class MissingPlugin(BeetsPlugin):
for track_info in album_info.tracks: for track_info in album_info.tracks:
if track_info.track_id not in item_mbids: if track_info.track_id not in item_mbids:
self._log.debug( self._log.debug(
"track {} in album {}", "track {.track_id} in album {.album_id}",
track_info.track_id, track_info,
album_info.album_id, album_info,
) )
yield _item(track_info, album_info, album.id) yield _item(track_info, album_info, album.id)

View file

@ -51,8 +51,8 @@ class MPDClientWrapper:
if not self.strip_path.endswith("/"): if not self.strip_path.endswith("/"):
self.strip_path += "/" self.strip_path += "/"
self._log.debug("music_directory: {}", self.music_directory) self._log.debug("music_directory: {.music_directory}", self)
self._log.debug("strip_path: {}", self.strip_path) self._log.debug("strip_path: {.strip_path}", self)
self.client = mpd.MPDClient() self.client = mpd.MPDClient()
@ -188,10 +188,10 @@ class MPDStats:
item.store() item.store()
self._log.debug( self._log.debug(
"updated: {} = {} [{}]", "updated: {} = {} [{.filepath}]",
attribute, attribute,
item[attribute], item[attribute],
item.filepath, item,
) )
def update_rating(self, item, skipped): def update_rating(self, item, skipped):

View file

@ -101,8 +101,8 @@ class MPDUpdatePlugin(BeetsPlugin):
try: try:
s = BufferedSocket(host, port) s = BufferedSocket(host, port)
except OSError as e: except OSError:
self._log.warning("MPD connection failed: {}", str(e.strerror)) self._log.warning("MPD connection failed", exc_info=True)
return return
resp = s.readline() resp = s.readline()

View file

@ -179,9 +179,8 @@ class ParentWorkPlugin(BeetsPlugin):
if not item.mb_workid: if not item.mb_workid:
self._log.info( self._log.info(
"No work for {}, add one at https://musicbrainz.org/recording/{}", "No work for {0}, add one at https://musicbrainz.org/recording/{0.mb_trackid}",
item, item,
item.mb_trackid,
) )
return return

View file

@ -142,7 +142,7 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
dir_contents = os.listdir(playlist_dir) dir_contents = os.listdir(playlist_dir)
except OSError: except OSError:
self._log.warning( self._log.warning(
"Unable to open playlist directory {}", self.playlist_dir "Unable to open playlist directory {.playlist_dir}", self
) )
return return

View file

@ -141,9 +141,8 @@ class RgTask:
item.rg_track_peak = track_gain.peak item.rg_track_peak = track_gain.peak
item.store() item.store()
self._log.debug( self._log.debug(
"applied track gain {} LU, peak {} of FS", "applied track gain {0.rg_track_gain} LU, peak {0.rg_track_peak} of FS",
item.rg_track_gain, item,
item.rg_track_peak,
) )
def _store_album_gain(self, item: Item, album_gain: Gain): def _store_album_gain(self, item: Item, album_gain: Gain):
@ -155,9 +154,8 @@ class RgTask:
item.rg_album_peak = album_gain.peak item.rg_album_peak = album_gain.peak
item.store() item.store()
self._log.debug( self._log.debug(
"applied album gain {} LU, peak {} of FS", "applied album gain {0.rg_album_gain} LU, peak {0.rg_album_peak} of FS",
item.rg_album_gain, item,
item.rg_album_peak,
) )
def _store_track(self, write: bool): def _store_track(self, write: bool):
@ -230,7 +228,7 @@ class R128Task(RgTask):
def _store_track_gain(self, item: Item, track_gain: Gain): def _store_track_gain(self, item: Item, track_gain: Gain):
item.r128_track_gain = track_gain.gain item.r128_track_gain = track_gain.gain
item.store() item.store()
self._log.debug("applied r128 track gain {} LU", item.r128_track_gain) self._log.debug("applied r128 track gain {.r128_track_gain} LU", item)
def _store_album_gain(self, item: Item, album_gain: Gain): def _store_album_gain(self, item: Item, album_gain: Gain):
""" """
@ -239,7 +237,7 @@ class R128Task(RgTask):
""" """
item.r128_album_gain = album_gain.gain item.r128_album_gain = album_gain.gain
item.store() item.store()
self._log.debug("applied r128 album gain {} LU", item.r128_album_gain) self._log.debug("applied r128 album gain {.r128_album_gain} LU", item)
AnyRgTask = TypeVar("AnyRgTask", bound=RgTask) AnyRgTask = TypeVar("AnyRgTask", bound=RgTask)
@ -380,7 +378,7 @@ class FfmpegBackend(Backend):
album_gain = target_level_lufs - album_gain album_gain = target_level_lufs - album_gain
self._log.debug( self._log.debug(
"{}: gain {} LU, peak {}", task.album, album_gain, album_peak "{.album}: gain {} LU, peak {}", task, album_gain, album_peak
) )
task.album_gain = Gain(album_gain, album_peak) task.album_gain = Gain(album_gain, album_peak)
@ -1093,9 +1091,8 @@ class AudioToolsBackend(Backend):
) )
self._log.debug( self._log.debug(
"ReplayGain for track {} - {}: {2:.2f}, {3:.2f}", "ReplayGain for track {0.artist} - {0.title}: {1:.2f}, {2:.2f}",
item.artist, item,
item.title,
rg_track_gain, rg_track_gain,
rg_track_peak, rg_track_peak,
) )
@ -1133,8 +1130,8 @@ class AudioToolsBackend(Backend):
rg_album_gain, task.target_level rg_album_gain, task.target_level
) )
self._log.debug( self._log.debug(
"ReplayGain for album {}: {.2f}, {.2f}", "ReplayGain for album {.items[0].album}: {.2f}, {.2f}",
task.items[0].album, task,
rg_album_gain, rg_album_gain,
rg_album_peak, rg_album_peak,
) )

View file

@ -59,7 +59,7 @@ class ScrubPlugin(BeetsPlugin):
def scrub_func(lib, opts, args): def scrub_func(lib, opts, args):
# Walk through matching files and remove tags. # Walk through matching files and remove tags.
for item in lib.items(args): for item in lib.items(args):
self._log.info("scrubbing: {}", item.filepath) self._log.info("scrubbing: {.filepath}", item)
self._scrub_item(item, opts.write) self._scrub_item(item, opts.write)
scrub_cmd = ui.Subcommand("scrub", help="clean audio tags") scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
@ -147,5 +147,5 @@ class ScrubPlugin(BeetsPlugin):
def import_task_files(self, session, task): def import_task_files(self, session, task):
"""Automatically scrub imported files.""" """Automatically scrub imported files."""
for item in task.imported_items(): for item in task.imported_items():
self._log.debug("auto-scrubbing {}", item.filepath) self._log.debug("auto-scrubbing {.filepath}", item)
self._scrub_item(item, ui.should_write()) self._scrub_item(item, ui.should_write())

View file

@ -188,9 +188,7 @@ class SpotifyPlugin(
self.access_token = response.json()["access_token"] self.access_token = response.json()["access_token"]
# Save the token for later use. # Save the token for later use.
self._log.debug( self._log.debug("{0.data_source} access token: {0.access_token}", self)
"{} access token: {}", self.data_source, self.access_token
)
with open(self._tokenfile(), "w") as f: with open(self._tokenfile(), "w") as f:
json.dump({"access_token": self.access_token}, f) json.dump({"access_token": self.access_token}, f)
@ -451,9 +449,9 @@ class SpotifyPlugin(
return () return ()
response_data = response.get(f"{query_type}s", {}).get("items", []) response_data = response.get(f"{query_type}s", {}).get("items", [])
self._log.debug( self._log.debug(
"Found {} result(s) from {} for '{}'", "Found {} result(s) from {.data_source} for '{}'",
len(response_data), len(response_data),
self.data_source, self,
query, query,
) )
return response_data return response_data
@ -539,8 +537,8 @@ class SpotifyPlugin(
if not items: if not items:
self._log.debug( self._log.debug(
"Your beets query returned no items, skipping {}.", "Your beets query returned no items, skipping {.data_source}.",
self.data_source, self,
) )
return return
@ -595,8 +593,8 @@ class SpotifyPlugin(
or self.config["tiebreak"].get() == "first" or self.config["tiebreak"].get() == "first"
): ):
self._log.debug( self._log.debug(
"{} track(s) found, count: {}", "{.data_source} track(s) found, count: {}",
self.data_source, self,
len(response_data_tracks), len(response_data_tracks),
) )
chosen_result = response_data_tracks[0] chosen_result = response_data_tracks[0]
@ -619,19 +617,19 @@ class SpotifyPlugin(
if failure_count > 0: if failure_count > 0:
if self.config["show_failures"].get(): if self.config["show_failures"].get():
self._log.info( self._log.info(
"{} track(s) did not match a {} ID:", "{} track(s) did not match a {.data_source} ID:",
failure_count, failure_count,
self.data_source, self,
) )
for track in failures: for track in failures:
self._log.info("track: {}", track) self._log.info("track: {}", track)
self._log.info("") self._log.info("")
else: else:
self._log.warning( self._log.warning(
"{} track(s) did not match a {} ID:\n" "{} track(s) did not match a {.data_source} ID:\n"
"use --show-failures to display", "use --show-failures to display",
failure_count, failure_count,
self.data_source, self,
) )
return results return results

View file

@ -108,7 +108,7 @@ class SubsonicUpdate(BeetsPlugin):
auth = self.config["auth"].as_str() auth = self.config["auth"].as_str()
url = self.__format_url("startScan") url = self.__format_url("startScan")
self._log.debug("URL is {}", url) self._log.debug("URL is {}", url)
self._log.debug("auth type is {}", self.config["auth"]) self._log.debug("auth type is {.config[auth]}", self)
if auth == "token": if auth == "token":
salt, token = self.__create_token() salt, token = self.__create_token()

View file

@ -127,7 +127,7 @@ class ThumbnailsPlugin(BeetsPlugin):
size = ArtResizer.shared.get_size(album.artpath) size = ArtResizer.shared.get_size(album.artpath)
if not size: if not size:
self._log.warning( self._log.warning(
"problem getting the picture size for {}", album.artpath "problem getting the picture size for {.artpath}", album
) )
return return

View file

@ -275,6 +275,7 @@ select = [
"E", # pycodestyle "E", # pycodestyle
"F", # pyflakes "F", # pyflakes
# "B", # flake8-bugbear # "B", # flake8-bugbear
"G", # flake8-logging-format
"I", # isort "I", # isort
"ISC", # flake8-implicit-str-concat "ISC", # flake8-implicit-str-concat
"N", # pep8-naming "N", # pep8-naming