Replace logging f-strings with arguments

This commit is contained in:
Šarūnas Nejus 2025-07-22 05:02:58 +01:00
parent d93ddf8dd4
commit d6b6ac3387
No known key found for this signature in database
GPG key ID: DD28F6704DBE3435
22 changed files with 58 additions and 62 deletions

View file

@ -341,7 +341,7 @@ def _resolve_duplicates(session: ImportSession, task: ImportTask):
if task.choice_flag in (Action.ASIS, Action.APPLY, Action.RETAG):
found_duplicates = task.find_duplicates(session.lib)
if found_duplicates:
log.debug(f"found duplicates: {[o.id for o in found_duplicates]}")
log.debug("found duplicates: {}", [o.id for o in found_duplicates])
# Get the default action to follow from config.
duplicate_action = config["import"]["duplicate_action"].as_choice(

View file

@ -1365,7 +1365,7 @@ def import_func(lib, opts, args: list[str]):
for path in paths_from_logfiles:
if not os.path.exists(syspath(normpath(path))):
log.warning(
f"No such file or directory: {displayable_path(path)}"
"No such file or directory: {}", displayable_path(path)
)
continue

View file

@ -221,11 +221,12 @@ def sorted_walk(
# Get all the directories and files at this level.
try:
contents = os.listdir(syspath(bytes_path))
except OSError as exc:
except OSError:
if logger:
logger.warning(
f"could not list directory {displayable_path(bytes_path)}:"
f" {exc.strerror}"
"could not list directory {}",
displayable_path(bytes_path),
exc_info=True,
)
return
dirs = []

View file

@ -696,7 +696,7 @@ class ArtResizer:
for backend_cls in BACKEND_CLASSES:
try:
self.local_method = backend_cls()
log.debug(f"artresizer: method is {self.local_method.NAME}")
log.debug("artresizer: method is {.local_method.NAME}", self)
break
except LocalBackendNotAvailableError:
continue

View file

@ -58,7 +58,8 @@ def extract_release_id(source: str, id_: str) -> str | None:
source_pattern = PATTERN_BY_SOURCE[source.lower()]
except KeyError:
log.debug(
f"Unknown source '{source}' for ID extraction. Returning id/url as-is."
"Unknown source '{}' for ID extraction. Returning id/url as-is.",
source,
)
return id_

View file

@ -1108,8 +1108,8 @@ class Server(BaseServer):
self.lib = library
self.player = gstplayer.GstPlayer(self.play_finished)
self.cmd_update(None)
log.info(f"Server ready and listening on {host}:{port}")
log.debug(f"Listening for control signals on {host}:{ctrl_port}")
log.info("Server ready and listening on {}:{}", host, port)
log.debug("Listening for control signals on {}:{}", host, ctrl_port)
def run(self):
self.player.run()

View file

@ -241,7 +241,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
query = self._construct_search_query(
query_string=query_string, filters=filters
)
self._log.debug(f"Searching {self.data_source} for '{query}'")
self._log.debug("Searching {.data_source} for '{}'", self, query)
try:
response = requests.get(
f"{self.search_url}{query_type}",

View file

@ -136,7 +136,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
response = requests.get(opts.url, timeout=5)
response.raise_for_status()
except requests.exceptions.RequestException as e:
self._log.error(f"{e}")
self._log.error("{}", e)
return
extension = guess_extension(response.headers["Content-Type"])
if extension is None:
@ -148,7 +148,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
with open(tempimg, "wb") as f:
f.write(response.content)
except Exception as e:
self._log.error(f"Unable to save image: {e}")
self._log.error("Unable to save image: {}", e)
return
items = lib.items(args)
# Confirm with user.

View file

@ -184,7 +184,7 @@ class EmbyUpdate(BeetsPlugin):
# Get user information from the Emby API.
user = get_user(host, port, username)
if not user:
self._log.warning(f"User {username} could not be found.")
self._log.warning("User {} could not be found.", username)
return
userid = user[0]["Id"]

View file

@ -112,7 +112,7 @@ def apply_matches(d, log):
for item in d:
if not item.artist:
item.artist = artist
log.info(f"Artist replaced with: {item.artist}")
log.info("Artist replaced with: {.artist}", item)
# No artist field: remaining field is the title.
else:
@ -122,11 +122,11 @@ def apply_matches(d, log):
for item in d:
if bad_title(item.title):
item.title = str(d[item][title_field])
log.info(f"Title replaced with: {item.title}")
log.info("Title replaced with: {.title}", item)
if "track" in d[item] and item.track == 0:
item.track = int(d[item]["track"])
log.info(f"Track replaced with: {item.track}")
log.info("Track replaced with: {.track}", item)
# Plugin structure and hook into import process.

View file

@ -232,7 +232,7 @@ class IPFSPlugin(BeetsPlugin):
try:
util.command_output(cmd)
except (OSError, subprocess.CalledProcessError):
self._log.error(f"Could not import {_hash}")
self._log.error("Could not import {}", _hash)
return False
# add all albums from remotes into a combined library

View file

@ -277,7 +277,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
genre = self._genre_cache[key]
if self.config["extended_debug"]:
self._log.debug(f"last.fm (unfiltered) {entity} tags: {genre}")
self._log.debug("last.fm (unfiltered) {} tags: {}", entity, genre)
return genre
def fetch_album_genre(self, obj):
@ -327,8 +327,8 @@ class LastGenrePlugin(plugins.BeetsPlugin):
self, old: list[str], new: list[str]
) -> list[str]:
"""Combine old and new genres and process via _resolve_genres."""
self._log.debug(f"raw last.fm tags: {new}")
self._log.debug(f"existing genres taken into account: {old}")
self._log.debug("raw last.fm tags: {}", new)
self._log.debug("existing genres taken into account: {}", old)
combined = old + new
return self._resolve_genres(combined)
@ -583,9 +583,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
item = task.item
item.genre, src = self._get_genre(item)
self._log.debug(
'genre for track "{0.title}" ({1}): {0.genre}',
item,
src,
'genre for track "{0.title}" ({1}): {0.genre}', item, src
)
item.store()

View file

@ -42,7 +42,7 @@ class ListenBrainzPlugin(BeetsPlugin):
unknown_total = 0
ls = self.get_listens()
tracks = self.get_tracks_from_listens(ls)
log.info(f"Found {len(ls)} listens")
log.info("Found {} listens", len(ls))
if tracks:
found, unknown = process_tracks(lib, tracks, log)
found_total += found
@ -63,7 +63,7 @@ class ListenBrainzPlugin(BeetsPlugin):
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
self._log.debug(f"Invalid Search Error: {e}")
self._log.debug("Invalid Search Error: {}", e)
return None
def get_listens(self, min_ts=None, max_ts=None, count=None):
@ -156,7 +156,7 @@ class ListenBrainzPlugin(BeetsPlugin):
playlist_info = playlist.get("playlist")
if playlist_info.get("creator") == "listenbrainz":
title = playlist_info.get("title")
self._log.debug(f"Playlist title: {title}")
self._log.debug("Playlist title: {}", title)
playlist_type = (
"Exploration" if "Exploration" in title else "Jams"
)
@ -179,9 +179,7 @@ class ListenBrainzPlugin(BeetsPlugin):
listenbrainz_playlists, key=lambda x: x["date"], reverse=True
)
for playlist in listenbrainz_playlists:
self._log.debug(
f"Playlist: {playlist['type']} - {playlist['date']}"
)
self._log.debug("Playlist: {0[type]} - {0[date]}", playlist)
return listenbrainz_playlists
def get_playlist(self, identifier):

View file

@ -73,7 +73,7 @@ class MBSubmitPlugin(BeetsPlugin):
subprocess.Popen([picard_path] + paths)
self._log.info("launched picard from\n{}", picard_path)
except OSError as exc:
self._log.error(f"Could not open picard, got error:\n{exc}")
self._log.error("Could not open picard, got error:\n{}", exc)
def print_tracks(self, session, task):
for i in sorted(task.items, key=lambda i: i.track):

View file

@ -117,13 +117,13 @@ class MetaSyncPlugin(BeetsPlugin):
try:
cls = META_SOURCES[player]
except KeyError:
self._log.error(f"Unknown metadata source '{player}'")
self._log.error("Unknown metadata source '{}'", player)
try:
meta_source_instances[player] = cls(self.config, self._log)
except (ImportError, ConfigValueError) as e:
self._log.error(
f"Failed to instantiate metadata source {player!r}: {e}"
"Failed to instantiate metadata source {!r}: {}", player, e
)
# Avoid needlessly iterating over items

View file

@ -76,7 +76,7 @@ class Itunes(MetaSource):
library_path = config["itunes"]["library"].as_filename()
try:
self._log.debug(f"loading iTunes library from {library_path}")
self._log.debug("loading iTunes library from {}", library_path)
with create_temporary_copy(library_path) as library_copy:
with open(library_copy, "rb") as library_copy_f:
raw_library = plistlib.load(library_copy_f)
@ -104,7 +104,7 @@ class Itunes(MetaSource):
result = self.collection.get(util.bytestring_path(item.path).lower())
if not result:
self._log.warning(f"no iTunes match found for {item}")
self._log.warning("no iTunes match found for {}", item)
return
item.itunes_rating = result.get("Rating")

View file

@ -123,7 +123,7 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
def cli_exit(self, lib):
for playlist in self.find_playlists():
self._log.info(f"Updating playlist: {playlist}")
self._log.info("Updating playlist: {}", playlist)
base_dir = beets.util.bytestring_path(
self.relative_to
if self.relative_to

View file

@ -380,10 +380,7 @@ class FfmpegBackend(Backend):
album_gain = target_level_lufs - album_gain
self._log.debug(
"{}: gain {} LU, peak {}",
task.album,
album_gain,
album_peak,
"{}: gain {} LU, peak {}", task.album, album_gain, album_peak
)
task.album_gain = Gain(album_gain, album_peak)
@ -426,7 +423,7 @@ class FfmpegBackend(Backend):
target_level_lufs = db_to_lufs(target_level)
# call ffmpeg
self._log.debug(f"analyzing {item}")
self._log.debug("analyzing {}", item)
cmd = self._construct_cmd(item, peak_method)
self._log.debug("executing {}", " ".join(map(displayable_path, cmd)))
output = call(cmd, self._log).stderr.splitlines()
@ -496,10 +493,10 @@ class FfmpegBackend(Backend):
if self._parse_float(b"M: " + line[1]) >= gating_threshold:
n_blocks += 1
self._log.debug(
f"{item}: {n_blocks} blocks over {gating_threshold} LUFS"
"{}: {} blocks over {} LUFS", item, n_blocks, gating_threshold
)
self._log.debug(f"{item}: gain {gain} LU, peak {peak}")
self._log.debug("{}: gain {} LU, peak {}", item, gain, peak)
return Gain(gain, peak), n_blocks

View file

@ -228,16 +228,16 @@ class SpotifyPlugin(
self._log.error("ReadTimeout.")
raise APIError("Request timed out.")
except requests.exceptions.ConnectionError as e:
self._log.error(f"Network error: {e}")
self._log.error("Network error: {}", e)
raise APIError("Network error.")
except requests.exceptions.RequestException as e:
if e.response is None:
self._log.error(f"Request failed: {e}")
self._log.error("Request failed: {}", e)
raise APIError("Request failed.")
if e.response.status_code == 401:
self._log.debug(
f"{self.data_source} access token has expired. "
"Reauthenticating."
"{.data_source} access token has expired. Reauthenticating.",
self,
)
self._authenticate()
return self._handle_response(
@ -256,7 +256,7 @@ class SpotifyPlugin(
"Retry-After", DEFAULT_WAITING_TIME
)
self._log.debug(
f"Too many API requests. Retrying after {seconds} seconds."
"Too many API requests. Retrying after {} seconds.", seconds
)
time.sleep(int(seconds) + 1)
return self._handle_response(
@ -277,7 +277,7 @@ class SpotifyPlugin(
f"URL:\n{url}\nparams:\n{params}"
)
else:
self._log.error(f"Request failed. Error: {e}")
self._log.error("Request failed. Error: {}", e)
raise APIError("Request failed.")
def album_for_id(self, album_id: str) -> AlbumInfo | None:
@ -439,7 +439,7 @@ class SpotifyPlugin(
filters=filters, query_string=query_string
)
self._log.debug(f"Searching {self.data_source} for '{query}'")
self._log.debug("Searching {.data_source} for '{}'", self, query)
try:
response = self._handle_response(
"get",
@ -648,7 +648,7 @@ class SpotifyPlugin(
spotify_ids = [track_data["id"] for track_data in results]
if self.config["mode"].get() == "open":
self._log.info(
f"Attempting to open {self.data_source} with playlist"
"Attempting to open {.data_source} with playlist", self
)
spotify_url = (
f"spotify:trackset:Playlist:{','.join(spotify_ids)}"
@ -659,7 +659,7 @@ class SpotifyPlugin(
print(f"{self.open_track_url}{spotify_id}")
else:
self._log.warning(
f"No {self.data_source} tracks found from beets query"
"No {.data_source} tracks found from beets query", self
)
def _fetch_info(self, items, write, force):

View file

@ -145,14 +145,15 @@ class SubsonicUpdate(BeetsPlugin):
and json["subsonic-response"]["status"] == "ok"
):
count = json["subsonic-response"]["scanStatus"]["count"]
self._log.info(f"Updating Subsonic; scanning {count} tracks")
self._log.info("Updating Subsonic; scanning {} tracks", count)
elif (
response.status_code == 200
and json["subsonic-response"]["status"] == "failed"
):
error_message = json["subsonic-response"]["error"]["message"]
self._log.error(f"Error: {error_message}")
self._log.error(
"Error: {[subsonic-response][error][message]}", json
)
else:
self._log.error("Error: {}", json)
except Exception as error:
self._log.error(f"Error: {error}")
self._log.error("Error: {}", error)

View file

@ -104,7 +104,7 @@ class ThumbnailsPlugin(BeetsPlugin):
f"Thumbnails: ArtResizer backend {ArtResizer.shared.method}"
f" unexpectedly cannot write image metadata."
)
self._log.debug(f"using {ArtResizer.shared.method} to write metadata")
self._log.debug("using {.shared.method} to write metadata", ArtResizer)
uri_getter = GioURI()
if not uri_getter.available:

View file

@ -58,9 +58,9 @@ class LoggingLevelTest(AsIsImporterMixin, PluginMixin, ImportTestCase):
self.register_listener("dummy_event", self.listener)
def log_all(self, name):
self._log.debug(f"debug {name}")
self._log.info(f"info {name}")
self._log.warning(f"warning {name}")
self._log.debug("debug {}", name)
self._log.info("info {}", name)
self._log.warning("warning {}", name)
def commands(self):
cmd = ui.Subcommand("dummy")
@ -172,9 +172,9 @@ class ConcurrentEventsTest(AsIsImporterMixin, ImportTestCase):
self.t1_step = self.t2_step = 0
def log_all(self, name):
self._log.debug(f"debug {name}")
self._log.info(f"info {name}")
self._log.warning(f"warning {name}")
self._log.debug("debug {}", name)
self._log.info("info {}", name)
self._log.warning("warning {}", name)
def listener1(self):
try: