mirror of
https://github.com/beetbox/beets.git
synced 2025-12-27 02:52:33 +01:00
Replace percent, string concat, format calls with f-strings (#5890)
This PR modernizes the codebase by replacing all `str.format()` calls, `%` operator, and most of string concatenation with f-string literals. Fixes #5293 Supersedes #5337 Once this is reviewed, I will squash all commits into one and add the hash to `.git-blame-ignore-revs` file.
This commit is contained in:
commit
b1c93552a3
120 changed files with 1252 additions and 1584 deletions
|
|
@ -57,3 +57,13 @@ c490ac5810b70f3cf5fd8649669838e8fdb19f4d
|
|||
769dcdc88a1263638ae25944ba6b2be3e8933666
|
||||
# Reformat all docs using docstrfmt
|
||||
ab5acaabb3cd24c482adb7fa4800c89fd6a2f08d
|
||||
# Replace format calls with f-strings
|
||||
4a361bd501e85de12c91c2474c423559ca672852
|
||||
# Replace percent formatting
|
||||
9352a79e4108bd67f7e40b1e944c01e0a7353272
|
||||
# Replace string concatenation (' + ')
|
||||
1c16b2b3087e9c3635d68d41c9541c4319d0bdbe
|
||||
# Do not use backslashes to deal with long strings
|
||||
2fccf64efe82851861e195b521b14680b480a42a
|
||||
# Do not use explicit indices for logging args when not needed
|
||||
d93ddf8dd43e4f9ed072a03829e287c78d2570a2
|
||||
|
|
|
|||
|
|
@ -238,25 +238,22 @@ There are a few coding conventions we use in beets:
|
|||
.. code-block:: python
|
||||
|
||||
with g.lib.transaction() as tx:
|
||||
rows = tx.query(
|
||||
"SELECT DISTINCT '{0}' FROM '{1}' ORDER BY '{2}'".format(
|
||||
field, model._table, sort_field
|
||||
)
|
||||
)
|
||||
rows = tx.query("SELECT DISTINCT {field} FROM {model._table} ORDER BY {sort_field}")
|
||||
|
||||
To fetch Item objects from the database, use lib.items(…) and supply a query
|
||||
as an argument. Resist the urge to write raw SQL for your query. If you must
|
||||
use lower-level queries into the database, do this:
|
||||
use lower-level queries into the database, do this, for example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with lib.transaction() as tx:
|
||||
rows = tx.query("SELECT …")
|
||||
rows = tx.query("SELECT path FROM items WHERE album_id = ?", (album_id,))
|
||||
|
||||
Transaction objects help control concurrent access to the database and assist
|
||||
in debugging conflicting accesses.
|
||||
|
||||
- ``str.format()`` should be used instead of the ``%`` operator
|
||||
- f-strings should be used instead of the ``%`` operator and ``str.format()``
|
||||
calls.
|
||||
- Never ``print`` informational messages; use the `logging
|
||||
<http://docs.python.org/library/logging.html>`__ module instead. In
|
||||
particular, we have our own logging shim, so you’ll see ``from beets import
|
||||
|
|
@ -264,7 +261,7 @@ There are a few coding conventions we use in beets:
|
|||
|
||||
- The loggers use `str.format
|
||||
<http://docs.python.org/library/stdtypes.html#str.format>`__-style logging
|
||||
instead of ``%``-style, so you can type ``log.debug("{0}", obj)`` to do your
|
||||
instead of ``%``-style, so you can type ``log.debug("{}", obj)`` to do your
|
||||
formatting.
|
||||
|
||||
- Exception handlers must use ``except A as B:`` instead of ``except A, B:``.
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class IncludeLazyConfig(confuse.LazyConfig):
|
|||
except confuse.NotFoundError:
|
||||
pass
|
||||
except confuse.ConfigReadError as err:
|
||||
stderr.write("configuration `import` failed: {}".format(err.reason))
|
||||
stderr.write(f"configuration `import` failed: {err.reason}")
|
||||
|
||||
|
||||
config = IncludeLazyConfig("beets", __name__)
|
||||
|
|
|
|||
33
beets/art.py
33
beets/art.py
|
|
@ -38,11 +38,7 @@ def get_art(log, item):
|
|||
try:
|
||||
mf = mediafile.MediaFile(syspath(item.path))
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
log.warning(
|
||||
"Could not extract art from {0}: {1}",
|
||||
displayable_path(item.path),
|
||||
exc,
|
||||
)
|
||||
log.warning("Could not extract art from {.filepath}: {}", item, exc)
|
||||
return
|
||||
|
||||
return mf.art
|
||||
|
|
@ -83,16 +79,16 @@ def embed_item(
|
|||
|
||||
# Get the `Image` object from the file.
|
||||
try:
|
||||
log.debug("embedding {0}", displayable_path(imagepath))
|
||||
log.debug("embedding {}", displayable_path(imagepath))
|
||||
image = mediafile_image(imagepath, maxwidth)
|
||||
except OSError as exc:
|
||||
log.warning("could not read image file: {0}", exc)
|
||||
log.warning("could not read image file: {}", exc)
|
||||
return
|
||||
|
||||
# Make sure the image kind is safe (some formats only support PNG
|
||||
# and JPEG).
|
||||
if image.mime_type not in ("image/jpeg", "image/png"):
|
||||
log.info("not embedding image of unsupported type: {}", image.mime_type)
|
||||
log.info("not embedding image of unsupported type: {.mime_type}", image)
|
||||
return
|
||||
|
||||
item.try_write(path=itempath, tags={"images": [image]}, id3v23=id3v23)
|
||||
|
|
@ -110,11 +106,11 @@ def embed_album(
|
|||
"""Embed album art into all of the album's items."""
|
||||
imagepath = album.artpath
|
||||
if not imagepath:
|
||||
log.info("No album art present for {0}", album)
|
||||
log.info("No album art present for {}", album)
|
||||
return
|
||||
if not os.path.isfile(syspath(imagepath)):
|
||||
log.info(
|
||||
"Album art not found at {0} for {1}",
|
||||
"Album art not found at {} for {}",
|
||||
displayable_path(imagepath),
|
||||
album,
|
||||
)
|
||||
|
|
@ -122,7 +118,7 @@ def embed_album(
|
|||
if maxwidth:
|
||||
imagepath = resize_image(log, imagepath, maxwidth, quality)
|
||||
|
||||
log.info("Embedding album art into {0}", album)
|
||||
log.info("Embedding album art into {}", album)
|
||||
|
||||
for item in album.items():
|
||||
embed_item(
|
||||
|
|
@ -143,8 +139,7 @@ def resize_image(log, imagepath, maxwidth, quality):
|
|||
specified quality level.
|
||||
"""
|
||||
log.debug(
|
||||
"Resizing album art to {0} pixels wide and encoding at quality \
|
||||
level {1}",
|
||||
"Resizing album art to {} pixels wide and encoding at quality level {}",
|
||||
maxwidth,
|
||||
quality,
|
||||
)
|
||||
|
|
@ -184,18 +179,18 @@ def extract(log, outpath, item):
|
|||
art = get_art(log, item)
|
||||
outpath = bytestring_path(outpath)
|
||||
if not art:
|
||||
log.info("No album art present in {0}, skipping.", item)
|
||||
log.info("No album art present in {}, skipping.", item)
|
||||
return
|
||||
|
||||
# Add an extension to the filename.
|
||||
ext = mediafile.image_extension(art)
|
||||
if not ext:
|
||||
log.warning("Unknown image type in {0}.", displayable_path(item.path))
|
||||
log.warning("Unknown image type in {.filepath}.", item)
|
||||
return
|
||||
outpath += bytestring_path("." + ext)
|
||||
outpath += bytestring_path(f".{ext}")
|
||||
|
||||
log.info(
|
||||
"Extracting album art from: {0} to: {1}",
|
||||
"Extracting album art from: {} to: {}",
|
||||
item,
|
||||
displayable_path(outpath),
|
||||
)
|
||||
|
|
@ -213,7 +208,7 @@ def extract_first(log, outpath, items):
|
|||
|
||||
def clear(log, lib, query):
|
||||
items = lib.items(query)
|
||||
log.info("Clearing album art from {0} items", len(items))
|
||||
log.info("Clearing album art from {} items", len(items))
|
||||
for item in items:
|
||||
log.debug("Clearing art for {0}", item)
|
||||
log.debug("Clearing art for {}", item)
|
||||
item.try_write(tags={"images": None})
|
||||
|
|
|
|||
|
|
@ -261,7 +261,7 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
|
|||
continue
|
||||
|
||||
for suffix in "year", "month", "day":
|
||||
key = prefix + suffix
|
||||
key = f"{prefix}{suffix}"
|
||||
value = getattr(album_info, key) or 0
|
||||
|
||||
# If we don't even have a year, apply nothing.
|
||||
|
|
|
|||
|
|
@ -78,10 +78,10 @@ def string_dist(str1: str | None, str2: str | None) -> float:
|
|||
# example, "the something" should be considered equal to
|
||||
# "something, the".
|
||||
for word in SD_END_WORDS:
|
||||
if str1.endswith(", %s" % word):
|
||||
str1 = "{} {}".format(word, str1[: -len(word) - 2])
|
||||
if str2.endswith(", %s" % word):
|
||||
str2 = "{} {}".format(word, str2[: -len(word) - 2])
|
||||
if str1.endswith(f", {word}"):
|
||||
str1 = f"{word} {str1[: -len(word) - 2]}"
|
||||
if str2.endswith(f", {word}"):
|
||||
str2 = f"{word} {str2[: -len(word) - 2]}"
|
||||
|
||||
# Perform a couple of basic normalizing substitutions.
|
||||
for pat, repl in SD_REPLACE:
|
||||
|
|
@ -230,7 +230,7 @@ class Distance:
|
|||
"""Adds all the distance penalties from `dist`."""
|
||||
if not isinstance(dist, Distance):
|
||||
raise ValueError(
|
||||
"`dist` must be a Distance object, not {}".format(type(dist))
|
||||
f"`dist` must be a Distance object, not {type(dist)}"
|
||||
)
|
||||
for key, penalties in dist._penalties.items():
|
||||
self._penalties.setdefault(key, []).extend(penalties)
|
||||
|
|
@ -444,7 +444,7 @@ def distance(
|
|||
# Preferred media options.
|
||||
media_patterns: Sequence[str] = preferred_config["media"].as_str_seq()
|
||||
options = [
|
||||
re.compile(r"(\d+x)?(%s)" % pat, re.I) for pat in media_patterns
|
||||
re.compile(rf"(\d+x)?({pat})", re.I) for pat in media_patterns
|
||||
]
|
||||
if options:
|
||||
dist.add_priority("media", album_info.media, options)
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ def match_by_id(items: Iterable[Item]) -> AlbumInfo | None:
|
|||
log.debug("No album ID consensus.")
|
||||
return None
|
||||
# If all album IDs are equal, look up the album.
|
||||
log.debug("Searching for discovered album ID: {0}", first)
|
||||
log.debug("Searching for discovered album ID: {}", first)
|
||||
return metadata_plugins.album_for_id(first)
|
||||
|
||||
|
||||
|
|
@ -197,9 +197,7 @@ def _add_candidate(
|
|||
checking the track count, ordering the items, checking for
|
||||
duplicates, and calculating the distance.
|
||||
"""
|
||||
log.debug(
|
||||
"Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id
|
||||
)
|
||||
log.debug("Candidate: {0.artist} - {0.album} ({0.album_id})", info)
|
||||
|
||||
# Discard albums with zero tracks.
|
||||
if not info.tracks:
|
||||
|
|
@ -215,7 +213,7 @@ def _add_candidate(
|
|||
required_tags: Sequence[str] = config["match"]["required"].as_str_seq()
|
||||
for req_tag in required_tags:
|
||||
if getattr(info, req_tag) is None:
|
||||
log.debug("Ignored. Missing required tag: {0}", req_tag)
|
||||
log.debug("Ignored. Missing required tag: {}", req_tag)
|
||||
return
|
||||
|
||||
# Find mapping between the items and the track info.
|
||||
|
|
@ -229,10 +227,10 @@ def _add_candidate(
|
|||
ignored_tags: Sequence[str] = config["match"]["ignored"].as_str_seq()
|
||||
for penalty in ignored_tags:
|
||||
if penalty in penalties:
|
||||
log.debug("Ignored. Penalty: {0}", penalty)
|
||||
log.debug("Ignored. Penalty: {}", penalty)
|
||||
return
|
||||
|
||||
log.debug("Success. Distance: {0}", dist)
|
||||
log.debug("Success. Distance: {}", dist)
|
||||
results[info.album_id] = hooks.AlbumMatch(
|
||||
dist, info, mapping, extra_items, extra_tracks
|
||||
)
|
||||
|
|
@ -265,7 +263,7 @@ def tag_album(
|
|||
likelies, consensus = get_most_common_tags(items)
|
||||
cur_artist: str = likelies["artist"]
|
||||
cur_album: str = likelies["album"]
|
||||
log.debug("Tagging {0} - {1}", cur_artist, cur_album)
|
||||
log.debug("Tagging {} - {}", cur_artist, cur_album)
|
||||
|
||||
# The output result, keys are the MB album ID.
|
||||
candidates: dict[Any, AlbumMatch] = {}
|
||||
|
|
@ -273,7 +271,7 @@ def tag_album(
|
|||
# Search by explicit ID.
|
||||
if search_ids:
|
||||
for search_id in search_ids:
|
||||
log.debug("Searching for album ID: {0}", search_id)
|
||||
log.debug("Searching for album ID: {}", search_id)
|
||||
if info := metadata_plugins.album_for_id(search_id):
|
||||
_add_candidate(items, candidates, info)
|
||||
|
||||
|
|
@ -283,7 +281,7 @@ def tag_album(
|
|||
if info := match_by_id(items):
|
||||
_add_candidate(items, candidates, info)
|
||||
rec = _recommendation(list(candidates.values()))
|
||||
log.debug("Album ID match recommendation is {0}", rec)
|
||||
log.debug("Album ID match recommendation is {}", rec)
|
||||
if candidates and not config["import"]["timid"]:
|
||||
# If we have a very good MBID match, return immediately.
|
||||
# Otherwise, this match will compete against metadata-based
|
||||
|
|
@ -300,7 +298,7 @@ def tag_album(
|
|||
if not (search_artist and search_album):
|
||||
# No explicit search terms -- use current metadata.
|
||||
search_artist, search_album = cur_artist, cur_album
|
||||
log.debug("Search terms: {0} - {1}", search_artist, search_album)
|
||||
log.debug("Search terms: {} - {}", search_artist, search_album)
|
||||
|
||||
# Is this album likely to be a "various artist" release?
|
||||
va_likely = (
|
||||
|
|
@ -308,7 +306,7 @@ def tag_album(
|
|||
or (search_artist.lower() in VA_ARTISTS)
|
||||
or any(item.comp for item in items)
|
||||
)
|
||||
log.debug("Album might be VA: {0}", va_likely)
|
||||
log.debug("Album might be VA: {}", va_likely)
|
||||
|
||||
# Get the results from the data sources.
|
||||
for matched_candidate in metadata_plugins.candidates(
|
||||
|
|
@ -316,7 +314,7 @@ def tag_album(
|
|||
):
|
||||
_add_candidate(items, candidates, matched_candidate)
|
||||
|
||||
log.debug("Evaluating {0} candidates.", len(candidates))
|
||||
log.debug("Evaluating {} candidates.", len(candidates))
|
||||
# Sort and get the recommendation.
|
||||
candidates_sorted = _sort_candidates(candidates.values())
|
||||
rec = _recommendation(candidates_sorted)
|
||||
|
|
@ -345,7 +343,7 @@ def tag_item(
|
|||
trackids = search_ids or [t for t in [item.mb_trackid] if t]
|
||||
if trackids:
|
||||
for trackid in trackids:
|
||||
log.debug("Searching for track ID: {0}", trackid)
|
||||
log.debug("Searching for track ID: {}", trackid)
|
||||
if info := metadata_plugins.track_for_id(trackid):
|
||||
dist = track_distance(item, info, incl_artist=True)
|
||||
candidates[info.track_id] = hooks.TrackMatch(dist, info)
|
||||
|
|
@ -369,7 +367,7 @@ def tag_item(
|
|||
# Search terms.
|
||||
search_artist = search_artist or item.artist
|
||||
search_title = search_title or item.title
|
||||
log.debug("Item search terms: {0} - {1}", search_artist, search_title)
|
||||
log.debug("Item search terms: {} - {}", search_artist, search_title)
|
||||
|
||||
# Get and evaluate candidate metadata.
|
||||
for track_info in metadata_plugins.item_candidates(
|
||||
|
|
@ -379,7 +377,7 @@ def tag_item(
|
|||
candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info)
|
||||
|
||||
# Sort by distance and return with recommendation.
|
||||
log.debug("Found {0} candidates.", len(candidates))
|
||||
log.debug("Found {} candidates.", len(candidates))
|
||||
candidates_sorted = _sort_candidates(candidates.values())
|
||||
rec = _recommendation(candidates_sorted)
|
||||
return Proposal(candidates_sorted, rec)
|
||||
|
|
|
|||
|
|
@ -390,9 +390,9 @@ class Model(ABC, Generic[D]):
|
|||
return obj
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{}({})".format(
|
||||
type(self).__name__,
|
||||
", ".join(f"{k}={v!r}" for k, v in dict(self).items()),
|
||||
return (
|
||||
f"{type(self).__name__}"
|
||||
f"({', '.join(f'{k}={v!r}' for k, v in dict(self).items())})"
|
||||
)
|
||||
|
||||
def clear_dirty(self):
|
||||
|
|
@ -409,9 +409,9 @@ class Model(ABC, Generic[D]):
|
|||
exception is raised otherwise.
|
||||
"""
|
||||
if not self._db:
|
||||
raise ValueError("{} has no database".format(type(self).__name__))
|
||||
raise ValueError(f"{type(self).__name__} has no database")
|
||||
if need_id and not self.id:
|
||||
raise ValueError("{} has no id".format(type(self).__name__))
|
||||
raise ValueError(f"{type(self).__name__} has no id")
|
||||
|
||||
return self._db
|
||||
|
||||
|
|
@ -588,16 +588,14 @@ class Model(ABC, Generic[D]):
|
|||
for key in fields:
|
||||
if key != "id" and key in self._dirty:
|
||||
self._dirty.remove(key)
|
||||
assignments.append(key + "=?")
|
||||
assignments.append(f"{key}=?")
|
||||
value = self._type(key).to_sql(self[key])
|
||||
subvars.append(value)
|
||||
|
||||
with db.transaction() as tx:
|
||||
# Main table update.
|
||||
if assignments:
|
||||
query = "UPDATE {} SET {} WHERE id=?".format(
|
||||
self._table, ",".join(assignments)
|
||||
)
|
||||
query = f"UPDATE {self._table} SET {','.join(assignments)} WHERE id=?"
|
||||
subvars.append(self.id)
|
||||
tx.mutate(query, subvars)
|
||||
|
||||
|
|
@ -607,9 +605,9 @@ class Model(ABC, Generic[D]):
|
|||
self._dirty.remove(key)
|
||||
value = self._type(key).to_sql(value)
|
||||
tx.mutate(
|
||||
"INSERT INTO {} "
|
||||
f"INSERT INTO {self._flex_table} "
|
||||
"(entity_id, key, value) "
|
||||
"VALUES (?, ?, ?);".format(self._flex_table),
|
||||
"VALUES (?, ?, ?);",
|
||||
(self.id, key, value),
|
||||
)
|
||||
|
||||
|
|
@ -1160,7 +1158,7 @@ class Database:
|
|||
"""
|
||||
# Get current schema.
|
||||
with self.transaction() as tx:
|
||||
rows = tx.query("PRAGMA table_info(%s)" % table)
|
||||
rows = tx.query(f"PRAGMA table_info({table})")
|
||||
current_fields = {row[1] for row in rows}
|
||||
|
||||
field_names = set(fields.keys())
|
||||
|
|
@ -1173,9 +1171,7 @@ class Database:
|
|||
columns = []
|
||||
for name, typ in fields.items():
|
||||
columns.append(f"{name} {typ.sql}")
|
||||
setup_sql = "CREATE TABLE {} ({});\n".format(
|
||||
table, ", ".join(columns)
|
||||
)
|
||||
setup_sql = f"CREATE TABLE {table} ({', '.join(columns)});\n"
|
||||
|
||||
else:
|
||||
# Table exists does not match the field set.
|
||||
|
|
@ -1183,8 +1179,8 @@ class Database:
|
|||
for name, typ in fields.items():
|
||||
if name in current_fields:
|
||||
continue
|
||||
setup_sql += "ALTER TABLE {} ADD COLUMN {} {};\n".format(
|
||||
table, name, typ.sql
|
||||
setup_sql += (
|
||||
f"ALTER TABLE {table} ADD COLUMN {name} {typ.sql};\n"
|
||||
)
|
||||
|
||||
with self.transaction() as tx:
|
||||
|
|
@ -1195,18 +1191,16 @@ class Database:
|
|||
for the given entity (if they don't exist).
|
||||
"""
|
||||
with self.transaction() as tx:
|
||||
tx.script(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS {0} (
|
||||
tx.script(f"""
|
||||
CREATE TABLE IF NOT EXISTS {flex_table} (
|
||||
id INTEGER PRIMARY KEY,
|
||||
entity_id INTEGER,
|
||||
key TEXT,
|
||||
value TEXT,
|
||||
UNIQUE(entity_id, key) ON CONFLICT REPLACE);
|
||||
CREATE INDEX IF NOT EXISTS {0}_by_entity
|
||||
ON {0} (entity_id);
|
||||
""".format(flex_table)
|
||||
)
|
||||
CREATE INDEX IF NOT EXISTS {flex_table}_by_entity
|
||||
ON {flex_table} (entity_id);
|
||||
""")
|
||||
|
||||
# Querying.
|
||||
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@ class MatchQuery(FieldQuery[AnySQLiteType]):
|
|||
"""A query that looks for exact matches in an Model field."""
|
||||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
return self.field + " = ?", [self.pattern]
|
||||
return f"{self.field} = ?", [self.pattern]
|
||||
|
||||
@classmethod
|
||||
def value_match(cls, pattern: AnySQLiteType, value: Any) -> bool:
|
||||
|
|
@ -204,7 +204,7 @@ class NoneQuery(FieldQuery[None]):
|
|||
super().__init__(field, None, fast)
|
||||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
return self.field + " IS NULL", ()
|
||||
return f"{self.field} IS NULL", ()
|
||||
|
||||
def match(self, obj: Model) -> bool:
|
||||
return obj.get(self.field_name) is None
|
||||
|
|
@ -246,7 +246,7 @@ class StringQuery(StringFieldQuery[str]):
|
|||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
)
|
||||
clause = self.field + " like ? escape '\\'"
|
||||
clause = f"{self.field} like ? escape '\\'"
|
||||
subvals = [search]
|
||||
return clause, subvals
|
||||
|
||||
|
|
@ -264,8 +264,8 @@ class SubstringQuery(StringFieldQuery[str]):
|
|||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
)
|
||||
search = "%" + pattern + "%"
|
||||
clause = self.field + " like ? escape '\\'"
|
||||
search = f"%{pattern}%"
|
||||
clause = f"{self.field} like ? escape '\\'"
|
||||
subvals = [search]
|
||||
return clause, subvals
|
||||
|
||||
|
|
@ -471,11 +471,11 @@ class NumericQuery(FieldQuery[str]):
|
|||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
if self.point is not None:
|
||||
return self.field + "=?", (self.point,)
|
||||
return f"{self.field}=?", (self.point,)
|
||||
else:
|
||||
if self.rangemin is not None and self.rangemax is not None:
|
||||
return (
|
||||
"{0} >= ? AND {0} <= ?".format(self.field),
|
||||
f"{self.field} >= ? AND {self.field} <= ?",
|
||||
(self.rangemin, self.rangemax),
|
||||
)
|
||||
elif self.rangemin is not None:
|
||||
|
|
@ -549,9 +549,9 @@ class CollectionQuery(Query):
|
|||
if not subq_clause:
|
||||
# Fall back to slow query.
|
||||
return None, ()
|
||||
clause_parts.append("(" + subq_clause + ")")
|
||||
clause_parts.append(f"({subq_clause})")
|
||||
subvals += subq_subvals
|
||||
clause = (" " + joiner + " ").join(clause_parts)
|
||||
clause = f" {joiner} ".join(clause_parts)
|
||||
return clause, subvals
|
||||
|
||||
def __repr__(self) -> str:
|
||||
|
|
@ -690,9 +690,7 @@ class Period:
|
|||
("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second
|
||||
)
|
||||
relative_units = {"y": 365, "m": 30, "w": 7, "d": 1}
|
||||
relative_re = (
|
||||
"(?P<sign>[+|-]?)(?P<quantity>[0-9]+)" + "(?P<timespan>[y|m|w|d])"
|
||||
)
|
||||
relative_re = "(?P<sign>[+|-]?)(?P<quantity>[0-9]+)(?P<timespan>[y|m|w|d])"
|
||||
|
||||
def __init__(self, date: datetime, precision: str):
|
||||
"""Create a period with the given date (a `datetime` object) and
|
||||
|
|
@ -800,9 +798,7 @@ class DateInterval:
|
|||
|
||||
def __init__(self, start: datetime | None, end: datetime | None):
|
||||
if start is not None and end is not None and not start < end:
|
||||
raise ValueError(
|
||||
"start date {} is not before end date {}".format(start, end)
|
||||
)
|
||||
raise ValueError(f"start date {start} is not before end date {end}")
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
|
|
@ -850,8 +846,6 @@ class DateQuery(FieldQuery[str]):
|
|||
date = datetime.fromtimestamp(timestamp)
|
||||
return self.interval.contains(date)
|
||||
|
||||
_clause_tmpl = "{0} {1} ?"
|
||||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
clause_parts = []
|
||||
subvals = []
|
||||
|
|
@ -859,11 +853,11 @@ class DateQuery(FieldQuery[str]):
|
|||
# Convert the `datetime` objects to an integer number of seconds since
|
||||
# the (local) Unix epoch using `datetime.timestamp()`.
|
||||
if self.interval.start:
|
||||
clause_parts.append(self._clause_tmpl.format(self.field, ">="))
|
||||
clause_parts.append(f"{self.field} >= ?")
|
||||
subvals.append(int(self.interval.start.timestamp()))
|
||||
|
||||
if self.interval.end:
|
||||
clause_parts.append(self._clause_tmpl.format(self.field, "<"))
|
||||
clause_parts.append(f"{self.field} < ?")
|
||||
subvals.append(int(self.interval.end.timestamp()))
|
||||
|
||||
if clause_parts:
|
||||
|
|
@ -1074,9 +1068,9 @@ class FixedFieldSort(FieldSort):
|
|||
if self.case_insensitive:
|
||||
field = (
|
||||
"(CASE "
|
||||
"WHEN TYPEOF({0})='text' THEN LOWER({0}) "
|
||||
"WHEN TYPEOF({0})='blob' THEN LOWER({0}) "
|
||||
"ELSE {0} END)".format(self.field)
|
||||
f"WHEN TYPEOF({self.field})='text' THEN LOWER({self.field}) "
|
||||
f"WHEN TYPEOF({self.field})='blob' THEN LOWER({self.field}) "
|
||||
f"ELSE {self.field} END)"
|
||||
)
|
||||
else:
|
||||
field = self.field
|
||||
|
|
|
|||
|
|
@ -194,7 +194,7 @@ class BasePaddedInt(BaseInteger[N]):
|
|||
self.digits = digits
|
||||
|
||||
def format(self, value: int | N) -> str:
|
||||
return "{0:0{1}d}".format(value or 0, self.digits)
|
||||
return f"{value or 0:0{self.digits}d}"
|
||||
|
||||
|
||||
class PaddedInt(BasePaddedInt[int]):
|
||||
|
|
@ -219,7 +219,7 @@ class ScaledInt(Integer):
|
|||
self.suffix = suffix
|
||||
|
||||
def format(self, value: int) -> str:
|
||||
return "{}{}".format((value or 0) // self.unit, self.suffix)
|
||||
return f"{(value or 0) // self.unit}{self.suffix}"
|
||||
|
||||
|
||||
class Id(NullInteger):
|
||||
|
|
@ -249,7 +249,7 @@ class BaseFloat(Type[float, N]):
|
|||
self.digits = digits
|
||||
|
||||
def format(self, value: float | N) -> str:
|
||||
return "{0:.{1}f}".format(value or 0, self.digits)
|
||||
return f"{value or 0:.{self.digits}f}"
|
||||
|
||||
|
||||
class Float(BaseFloat[float]):
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ class ImportSession:
|
|||
"""Log a message about a given album to the importer log. The status
|
||||
should reflect the reason the album couldn't be tagged.
|
||||
"""
|
||||
self.logger.info("{0} {1}", status, displayable_path(paths))
|
||||
self.logger.info("{} {}", status, displayable_path(paths))
|
||||
|
||||
def log_choice(self, task: ImportTask, duplicate=False):
|
||||
"""Logs the task's current choice if it should be logged. If
|
||||
|
|
@ -187,7 +187,7 @@ class ImportSession:
|
|||
|
||||
def run(self):
|
||||
"""Run the import task."""
|
||||
self.logger.info("import started {0}", time.asctime())
|
||||
self.logger.info("import started {}", time.asctime())
|
||||
self.set_config(config["import"])
|
||||
|
||||
# Set up the pipeline.
|
||||
|
|
@ -297,7 +297,7 @@ class ImportSession:
|
|||
# Either accept immediately or prompt for input to decide.
|
||||
if self.want_resume is True or self.should_resume(toppath):
|
||||
log.warning(
|
||||
"Resuming interrupted import of {0}",
|
||||
"Resuming interrupted import of {}",
|
||||
util.displayable_path(toppath),
|
||||
)
|
||||
self._is_resuming[toppath] = True
|
||||
|
|
|
|||
|
|
@ -58,11 +58,11 @@ def read_tasks(session: ImportSession):
|
|||
skipped += task_factory.skipped
|
||||
|
||||
if not task_factory.imported:
|
||||
log.warning("No files imported from {0}", displayable_path(toppath))
|
||||
log.warning("No files imported from {}", displayable_path(toppath))
|
||||
|
||||
# Show skipped directories (due to incremental/resume).
|
||||
if skipped:
|
||||
log.info("Skipped {0} paths.", skipped)
|
||||
log.info("Skipped {} paths.", skipped)
|
||||
|
||||
|
||||
def query_tasks(session: ImportSession):
|
||||
|
|
@ -82,10 +82,7 @@ def query_tasks(session: ImportSession):
|
|||
# Search for albums.
|
||||
for album in session.lib.albums(session.query):
|
||||
log.debug(
|
||||
"yielding album {0}: {1} - {2}",
|
||||
album.id,
|
||||
album.albumartist,
|
||||
album.album,
|
||||
"yielding album {0.id}: {0.albumartist} - {0.album}", album
|
||||
)
|
||||
items = list(album.items())
|
||||
_freshen_items(items)
|
||||
|
|
@ -140,7 +137,7 @@ def lookup_candidates(session: ImportSession, task: ImportTask):
|
|||
return
|
||||
|
||||
plugins.send("import_task_start", session=session, task=task)
|
||||
log.debug("Looking up: {0}", displayable_path(task.paths))
|
||||
log.debug("Looking up: {}", displayable_path(task.paths))
|
||||
|
||||
# Restrict the initial lookup to IDs specified by the user via the -m
|
||||
# option. Currently all the IDs are passed onto the tasks directly.
|
||||
|
|
@ -259,11 +256,11 @@ def plugin_stage(
|
|||
def log_files(session: ImportSession, task: ImportTask):
|
||||
"""A coroutine (pipeline stage) to log each file to be imported."""
|
||||
if isinstance(task, SingletonImportTask):
|
||||
log.info("Singleton: {0}", displayable_path(task.item["path"]))
|
||||
log.info("Singleton: {}", displayable_path(task.item["path"]))
|
||||
elif task.items:
|
||||
log.info("Album: {0}", displayable_path(task.paths[0]))
|
||||
log.info("Album: {}", displayable_path(task.paths[0]))
|
||||
for item in task.items:
|
||||
log.info(" {0}", displayable_path(item["path"]))
|
||||
log.info(" {}", displayable_path(item["path"]))
|
||||
|
||||
|
||||
# --------------------------------- Consumer --------------------------------- #
|
||||
|
|
@ -341,9 +338,7 @@ def _resolve_duplicates(session: ImportSession, task: ImportTask):
|
|||
if task.choice_flag in (Action.ASIS, Action.APPLY, Action.RETAG):
|
||||
found_duplicates = task.find_duplicates(session.lib)
|
||||
if found_duplicates:
|
||||
log.debug(
|
||||
"found duplicates: {}".format([o.id for o in found_duplicates])
|
||||
)
|
||||
log.debug("found duplicates: {}", [o.id for o in found_duplicates])
|
||||
|
||||
# Get the default action to follow from config.
|
||||
duplicate_action = config["import"]["duplicate_action"].as_choice(
|
||||
|
|
@ -355,7 +350,7 @@ def _resolve_duplicates(session: ImportSession, task: ImportTask):
|
|||
"ask": "a",
|
||||
}
|
||||
)
|
||||
log.debug("default action for duplicates: {0}", duplicate_action)
|
||||
log.debug("default action for duplicates: {}", duplicate_action)
|
||||
|
||||
if duplicate_action == "s":
|
||||
# Skip new.
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ class ImportState:
|
|||
# unpickling, including ImportError. We use a catch-all
|
||||
# exception to avoid enumerating them all (the docs don't even have a
|
||||
# full list!).
|
||||
log.debug("state file could not be read: {0}", exc)
|
||||
log.debug("state file could not be read: {}", exc)
|
||||
|
||||
def _save(self):
|
||||
try:
|
||||
|
|
@ -100,7 +100,7 @@ class ImportState:
|
|||
f,
|
||||
)
|
||||
except OSError as exc:
|
||||
log.error("state file could not be written: {0}", exc)
|
||||
log.error("state file could not be written: {}", exc)
|
||||
|
||||
# -------------------------------- Tagprogress ------------------------------- #
|
||||
|
||||
|
|
|
|||
|
|
@ -267,13 +267,11 @@ class ImportTask(BaseImportTask):
|
|||
|
||||
def remove_duplicates(self, lib: library.Library):
|
||||
duplicate_items = self.duplicate_items(lib)
|
||||
log.debug("removing {0} old duplicated items", len(duplicate_items))
|
||||
log.debug("removing {} old duplicated items", len(duplicate_items))
|
||||
for item in duplicate_items:
|
||||
item.remove()
|
||||
if lib.directory in util.ancestry(item.path):
|
||||
log.debug(
|
||||
"deleting duplicate {0}", util.displayable_path(item.path)
|
||||
)
|
||||
log.debug("deleting duplicate {.filepath}", item)
|
||||
util.remove(item.path)
|
||||
util.prune_dirs(os.path.dirname(item.path), lib.directory)
|
||||
|
||||
|
|
@ -285,10 +283,10 @@ class ImportTask(BaseImportTask):
|
|||
for field, view in config["import"]["set_fields"].items():
|
||||
value = str(view.get())
|
||||
log.debug(
|
||||
"Set field {1}={2} for {0}",
|
||||
util.displayable_path(self.paths),
|
||||
"Set field {}={} for {}",
|
||||
field,
|
||||
value,
|
||||
util.displayable_path(self.paths),
|
||||
)
|
||||
self.album.set_parse(field, format(self.album, value))
|
||||
for item in items:
|
||||
|
|
@ -554,12 +552,11 @@ class ImportTask(BaseImportTask):
|
|||
]
|
||||
if overwritten_fields:
|
||||
log.debug(
|
||||
"Reimported {} {}. Not preserving flexible attributes {}. "
|
||||
"Path: {}",
|
||||
"Reimported {0} {1.id}. Not preserving flexible attributes {2}. "
|
||||
"Path: {1.filepath}",
|
||||
noun,
|
||||
new_obj.id,
|
||||
new_obj,
|
||||
overwritten_fields,
|
||||
util.displayable_path(new_obj.path),
|
||||
)
|
||||
for key in overwritten_fields:
|
||||
del existing_fields[key]
|
||||
|
|
@ -578,17 +575,15 @@ class ImportTask(BaseImportTask):
|
|||
self.album.artpath = replaced_album.artpath
|
||||
self.album.store()
|
||||
log.debug(
|
||||
"Reimported album {}. Preserving attribute ['added']. "
|
||||
"Path: {}",
|
||||
self.album.id,
|
||||
util.displayable_path(self.album.path),
|
||||
"Reimported album {0.album.id}. Preserving attribute ['added']. "
|
||||
"Path: {0.album.filepath}",
|
||||
self,
|
||||
)
|
||||
log.debug(
|
||||
"Reimported album {}. Preserving flexible attributes {}. "
|
||||
"Path: {}",
|
||||
self.album.id,
|
||||
"Reimported album {0.album.id}. Preserving flexible"
|
||||
" attributes {1}. Path: {0.album.filepath}",
|
||||
self,
|
||||
list(album_fields.keys()),
|
||||
util.displayable_path(self.album.path),
|
||||
)
|
||||
|
||||
for item in self.imported_items():
|
||||
|
|
@ -597,21 +592,19 @@ class ImportTask(BaseImportTask):
|
|||
if dup_item.added and dup_item.added != item.added:
|
||||
item.added = dup_item.added
|
||||
log.debug(
|
||||
"Reimported item {}. Preserving attribute ['added']. "
|
||||
"Path: {}",
|
||||
item.id,
|
||||
util.displayable_path(item.path),
|
||||
"Reimported item {0.id}. Preserving attribute ['added']. "
|
||||
"Path: {0.filepath}",
|
||||
item,
|
||||
)
|
||||
item_fields = _reduce_and_log(
|
||||
item, dup_item._values_flex, REIMPORT_FRESH_FIELDS_ITEM
|
||||
)
|
||||
item.update(item_fields)
|
||||
log.debug(
|
||||
"Reimported item {}. Preserving flexible attributes {}. "
|
||||
"Path: {}",
|
||||
item.id,
|
||||
"Reimported item {0.id}. Preserving flexible attributes {1}. "
|
||||
"Path: {0.filepath}",
|
||||
item,
|
||||
list(item_fields.keys()),
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
item.store()
|
||||
|
||||
|
|
@ -621,14 +614,10 @@ class ImportTask(BaseImportTask):
|
|||
"""
|
||||
for item in self.imported_items():
|
||||
for dup_item in self.replaced_items[item]:
|
||||
log.debug(
|
||||
"Replacing item {0}: {1}",
|
||||
dup_item.id,
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
log.debug("Replacing item {.id}: {.filepath}", dup_item, item)
|
||||
dup_item.remove()
|
||||
log.debug(
|
||||
"{0} of {1} items replaced",
|
||||
"{} of {} items replaced",
|
||||
sum(bool(v) for v in self.replaced_items.values()),
|
||||
len(self.imported_items()),
|
||||
)
|
||||
|
|
@ -747,10 +736,10 @@ class SingletonImportTask(ImportTask):
|
|||
for field, view in config["import"]["set_fields"].items():
|
||||
value = str(view.get())
|
||||
log.debug(
|
||||
"Set field {1}={2} for {0}",
|
||||
util.displayable_path(self.paths),
|
||||
"Set field {}={} for {}",
|
||||
field,
|
||||
value,
|
||||
util.displayable_path(self.paths),
|
||||
)
|
||||
self.item.set_parse(field, format(self.item, value))
|
||||
self.item.store()
|
||||
|
|
@ -870,7 +859,7 @@ class ArchiveImportTask(SentinelImportTask):
|
|||
"""Removes the temporary directory the archive was extracted to."""
|
||||
if self.extracted and self.toppath:
|
||||
log.debug(
|
||||
"Removing extracted directory: {0}",
|
||||
"Removing extracted directory: {}",
|
||||
util.displayable_path(self.toppath),
|
||||
)
|
||||
shutil.rmtree(util.syspath(self.toppath))
|
||||
|
|
@ -1002,7 +991,7 @@ class ImportTaskFactory:
|
|||
"""Return a `SingletonImportTask` for the music file."""
|
||||
if self.session.already_imported(self.toppath, [path]):
|
||||
log.debug(
|
||||
"Skipping previously-imported path: {0}",
|
||||
"Skipping previously-imported path: {}",
|
||||
util.displayable_path(path),
|
||||
)
|
||||
self.skipped += 1
|
||||
|
|
@ -1026,7 +1015,7 @@ class ImportTaskFactory:
|
|||
|
||||
if self.session.already_imported(self.toppath, dirs):
|
||||
log.debug(
|
||||
"Skipping previously-imported path: {0}",
|
||||
"Skipping previously-imported path: {}",
|
||||
util.displayable_path(dirs),
|
||||
)
|
||||
self.skipped += 1
|
||||
|
|
@ -1063,19 +1052,17 @@ class ImportTaskFactory:
|
|||
)
|
||||
return
|
||||
|
||||
log.debug(
|
||||
"Extracting archive: {0}", util.displayable_path(self.toppath)
|
||||
)
|
||||
log.debug("Extracting archive: {}", util.displayable_path(self.toppath))
|
||||
archive_task = ArchiveImportTask(self.toppath)
|
||||
try:
|
||||
archive_task.extract()
|
||||
except Exception as exc:
|
||||
log.error("extraction failed: {0}", exc)
|
||||
log.error("extraction failed: {}", exc)
|
||||
return
|
||||
|
||||
# Now read albums from the extracted directory.
|
||||
self.toppath = archive_task.toppath
|
||||
log.debug("Archive extracted to: {0}", self.toppath)
|
||||
log.debug("Archive extracted to: {.toppath}", self)
|
||||
return archive_task
|
||||
|
||||
def read_item(self, path: util.PathBytes):
|
||||
|
|
@ -1091,10 +1078,10 @@ class ImportTaskFactory:
|
|||
# Silently ignore non-music files.
|
||||
pass
|
||||
elif isinstance(exc.reason, mediafile.UnreadableFileError):
|
||||
log.warning("unreadable file: {0}", util.displayable_path(path))
|
||||
log.warning("unreadable file: {}", util.displayable_path(path))
|
||||
else:
|
||||
log.error(
|
||||
"error reading {0}: {1}", util.displayable_path(path), exc
|
||||
"error reading {}: {}", util.displayable_path(path), exc
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -28,11 +28,11 @@ class ReadError(FileOperationError):
|
|||
"""An error while reading a file (i.e. in `Item.read`)."""
|
||||
|
||||
def __str__(self):
|
||||
return "error reading " + str(super())
|
||||
return f"error reading {super()}"
|
||||
|
||||
|
||||
class WriteError(FileOperationError):
|
||||
"""An error while writing a file (i.e. in `Item.write`)."""
|
||||
|
||||
def __str__(self):
|
||||
return "error writing " + str(super())
|
||||
return f"error writing {super()}"
|
||||
|
|
|
|||
|
|
@ -425,7 +425,7 @@ class Album(LibModel):
|
|||
|
||||
new_art = util.unique_path(new_art)
|
||||
log.debug(
|
||||
"moving album art {0} to {1}",
|
||||
"moving album art {} to {}",
|
||||
util.displayable_path(old_art),
|
||||
util.displayable_path(new_art),
|
||||
)
|
||||
|
|
@ -482,7 +482,7 @@ class Album(LibModel):
|
|||
"""
|
||||
item = self.items().get()
|
||||
if not item:
|
||||
raise ValueError("empty album for album id %d" % self.id)
|
||||
raise ValueError(f"empty album for album id {self.id}")
|
||||
return os.path.dirname(item.path)
|
||||
|
||||
def _albumtotal(self):
|
||||
|
|
@ -844,12 +844,9 @@ class Item(LibModel):
|
|||
# This must not use `with_album=True`, because that might access
|
||||
# the database. When debugging, that is not guaranteed to succeed, and
|
||||
# can even deadlock due to the database lock.
|
||||
return "{}({})".format(
|
||||
type(self).__name__,
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self[k])
|
||||
for k in self.keys(with_album=False)
|
||||
),
|
||||
return (
|
||||
f"{type(self).__name__}"
|
||||
f"({', '.join(f'{k}={self[k]!r}' for k in self.keys(with_album=False))})"
|
||||
)
|
||||
|
||||
def keys(self, computed=False, with_album=True):
|
||||
|
|
@ -995,7 +992,7 @@ class Item(LibModel):
|
|||
self.write(*args, **kwargs)
|
||||
return True
|
||||
except FileOperationError as exc:
|
||||
log.error("{0}", exc)
|
||||
log.error("{}", exc)
|
||||
return False
|
||||
|
||||
def try_sync(self, write, move, with_album=True):
|
||||
|
|
@ -1015,10 +1012,7 @@ class Item(LibModel):
|
|||
if move:
|
||||
# Check whether this file is inside the library directory.
|
||||
if self._db and self._db.directory in util.ancestry(self.path):
|
||||
log.debug(
|
||||
"moving {0} to synchronize path",
|
||||
util.displayable_path(self.path),
|
||||
)
|
||||
log.debug("moving {.filepath} to synchronize path", self)
|
||||
self.move(with_album=with_album)
|
||||
self.store()
|
||||
|
||||
|
|
@ -1090,7 +1084,7 @@ class Item(LibModel):
|
|||
try:
|
||||
return os.path.getsize(syspath(self.path))
|
||||
except (OSError, Exception) as exc:
|
||||
log.warning("could not get filesize: {0}", exc)
|
||||
log.warning("could not get filesize: {}", exc)
|
||||
return 0
|
||||
|
||||
# Model methods.
|
||||
|
|
|
|||
|
|
@ -130,9 +130,9 @@ class PluginLogFilter(logging.Filter):
|
|||
def filter(self, record):
|
||||
if hasattr(record.msg, "msg") and isinstance(record.msg.msg, str):
|
||||
# A _LogMessage from our hacked-up Logging replacement.
|
||||
record.msg.msg = self.prefix + record.msg.msg
|
||||
record.msg.msg = f"{self.prefix}{record.msg.msg}"
|
||||
elif isinstance(record.msg, str):
|
||||
record.msg = self.prefix + record.msg
|
||||
record.msg = f"{self.prefix}{record.msg}"
|
||||
return True
|
||||
|
||||
|
||||
|
|
@ -424,9 +424,9 @@ def types(model_cls: type[AnyModel]) -> dict[str, Type]:
|
|||
for field in plugin_types:
|
||||
if field in types and plugin_types[field] != types[field]:
|
||||
raise PluginConflictError(
|
||||
"Plugin {} defines flexible field {} "
|
||||
f"Plugin {plugin.name} defines flexible field {field} "
|
||||
"which has already been defined with "
|
||||
"another type.".format(plugin.name, field)
|
||||
"another type."
|
||||
)
|
||||
types.update(plugin_types)
|
||||
return types
|
||||
|
|
@ -543,7 +543,7 @@ def send(event: EventType, **arguments: Any) -> list[Any]:
|
|||
|
||||
Return a list of non-None values returned from the handlers.
|
||||
"""
|
||||
log.debug("Sending event: {0}", event)
|
||||
log.debug("Sending event: {}", event)
|
||||
return [
|
||||
r
|
||||
for handler in BeetsPlugin.listeners[event]
|
||||
|
|
@ -560,8 +560,8 @@ def feat_tokens(for_artist: bool = True) -> str:
|
|||
feat_words = ["ft", "featuring", "feat", "feat.", "ft."]
|
||||
if for_artist:
|
||||
feat_words += ["with", "vs", "and", "con", "&"]
|
||||
return r"(?<=[\s(\[])(?:{})(?=\s)".format(
|
||||
"|".join(re.escape(x) for x in feat_words)
|
||||
return (
|
||||
rf"(?<=[\s(\[])(?:{'|'.join(re.escape(x) for x in feat_words)})(?=\s)"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ class DummyIn:
|
|||
self.out = out
|
||||
|
||||
def add(self, s):
|
||||
self.buf.append(s + "\n")
|
||||
self.buf.append(f"{s}\n")
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -267,7 +267,7 @@ class TestHelper(ConfigMixin):
|
|||
The item is attached to the database from `self.lib`.
|
||||
"""
|
||||
values_ = {
|
||||
"title": "t\u00eftle {0}",
|
||||
"title": "t\u00eftle {}",
|
||||
"artist": "the \u00e4rtist",
|
||||
"album": "the \u00e4lbum",
|
||||
"track": 1,
|
||||
|
|
@ -278,7 +278,7 @@ class TestHelper(ConfigMixin):
|
|||
values_["db"] = self.lib
|
||||
item = Item(**values_)
|
||||
if "path" not in values:
|
||||
item["path"] = "audio." + item["format"].lower()
|
||||
item["path"] = f"audio.{item['format'].lower()}"
|
||||
# mtime needs to be set last since other assignments reset it.
|
||||
item.mtime = 12345
|
||||
return item
|
||||
|
|
@ -310,7 +310,7 @@ class TestHelper(ConfigMixin):
|
|||
item = self.create_item(**values)
|
||||
extension = item["format"].lower()
|
||||
item["path"] = os.path.join(
|
||||
_common.RSRC, util.bytestring_path("min." + extension)
|
||||
_common.RSRC, util.bytestring_path(f"min.{extension}")
|
||||
)
|
||||
item.add(self.lib)
|
||||
item.move(operation=MoveOperation.COPY)
|
||||
|
|
@ -325,7 +325,7 @@ class TestHelper(ConfigMixin):
|
|||
"""Add a number of items with files to the database."""
|
||||
# TODO base this on `add_item()`
|
||||
items = []
|
||||
path = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
|
||||
path = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
|
||||
for i in range(count):
|
||||
item = Item.from_path(path)
|
||||
item.album = f"\u00e4lbum {i}" # Check unicode paths
|
||||
|
|
@ -372,7 +372,7 @@ class TestHelper(ConfigMixin):
|
|||
specified extension a cover art image is added to the media
|
||||
file.
|
||||
"""
|
||||
src = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
|
||||
src = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
|
||||
handle, path = mkstemp(dir=self.temp_dir)
|
||||
path = bytestring_path(path)
|
||||
os.close(handle)
|
||||
|
|
@ -570,7 +570,7 @@ class ImportHelper(TestHelper):
|
|||
medium = MediaFile(track_path)
|
||||
medium.update(
|
||||
{
|
||||
"album": "Tag Album" + (f" {album_id}" if album_id else ""),
|
||||
"album": f"Tag Album{f' {album_id}' if album_id else ''}",
|
||||
"albumartist": None,
|
||||
"mb_albumid": None,
|
||||
"comp": None,
|
||||
|
|
@ -831,23 +831,21 @@ class AutotagStub:
|
|||
|
||||
def _make_track_match(self, artist, album, number):
|
||||
return TrackInfo(
|
||||
title="Applied Track %d" % number,
|
||||
track_id="match %d" % number,
|
||||
title=f"Applied Track {number}",
|
||||
track_id=f"match {number}",
|
||||
artist=artist,
|
||||
length=1,
|
||||
index=0,
|
||||
)
|
||||
|
||||
def _make_album_match(self, artist, album, tracks, distance=0, missing=0):
|
||||
if distance:
|
||||
id = " " + "M" * distance
|
||||
else:
|
||||
id = ""
|
||||
id = f" {'M' * distance}" if distance else ""
|
||||
|
||||
if artist is None:
|
||||
artist = "Various Artists"
|
||||
else:
|
||||
artist = artist.replace("Tag", "Applied") + id
|
||||
album = album.replace("Tag", "Applied") + id
|
||||
artist = f"{artist.replace('Tag', 'Applied')}{id}"
|
||||
album = f"{album.replace('Tag', 'Applied')}{id}"
|
||||
|
||||
track_infos = []
|
||||
for i in range(tracks - missing):
|
||||
|
|
@ -858,8 +856,8 @@ class AutotagStub:
|
|||
album=album,
|
||||
tracks=track_infos,
|
||||
va=False,
|
||||
album_id="albumid" + id,
|
||||
artist_id="artistid" + id,
|
||||
album_id=f"albumid{id}",
|
||||
artist_id=f"artistid{id}",
|
||||
albumtype="soundtrack",
|
||||
data_source="match_source",
|
||||
bandcamp_album_id="bc_url",
|
||||
|
|
@ -885,7 +883,7 @@ class FetchImageHelper:
|
|||
super().run(*args, **kwargs)
|
||||
|
||||
IMAGEHEADER: dict[str, bytes] = {
|
||||
"image/jpeg": b"\xff\xd8\xff" + b"\x00" * 3 + b"JFIF",
|
||||
"image/jpeg": b"\xff\xd8\xff\x00\x00\x00JFIF",
|
||||
"image/png": b"\211PNG\r\n\032\n",
|
||||
"image/gif": b"GIF89a",
|
||||
# dummy type that is definitely not a valid image content type
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ def print_(*strings: str, end: str = "\n") -> None:
|
|||
The `end` keyword argument behaves similarly to the built-in `print`
|
||||
(it defaults to a newline).
|
||||
"""
|
||||
txt = " ".join(strings or ("",)) + end
|
||||
txt = f"{' '.join(strings or ('',))}{end}"
|
||||
|
||||
# Encode the string and write it to stdout.
|
||||
# On Python 3, sys.stdout expects text strings and uses the
|
||||
|
|
@ -269,7 +269,7 @@ def input_options(
|
|||
)
|
||||
):
|
||||
# The first option is the default; mark it.
|
||||
show_letter = "[%s]" % found_letter.upper()
|
||||
show_letter = f"[{found_letter.upper()}]"
|
||||
is_default = True
|
||||
else:
|
||||
show_letter = found_letter.upper()
|
||||
|
|
@ -308,9 +308,9 @@ def input_options(
|
|||
if isinstance(default, int):
|
||||
default_name = str(default)
|
||||
default_name = colorize("action_default", default_name)
|
||||
tmpl = "# selection (default %s)"
|
||||
prompt_parts.append(tmpl % default_name)
|
||||
prompt_part_lengths.append(len(tmpl % str(default)))
|
||||
tmpl = "# selection (default {})"
|
||||
prompt_parts.append(tmpl.format(default_name))
|
||||
prompt_part_lengths.append(len(tmpl) - 2 + len(str(default)))
|
||||
else:
|
||||
prompt_parts.append("# selection")
|
||||
prompt_part_lengths.append(len(prompt_parts[-1]))
|
||||
|
|
@ -338,7 +338,7 @@ def input_options(
|
|||
|
||||
if line_length != 0:
|
||||
# Not the beginning of the line; need a space.
|
||||
part = " " + part
|
||||
part = f" {part}"
|
||||
length += 1
|
||||
|
||||
prompt += part
|
||||
|
|
@ -349,8 +349,8 @@ def input_options(
|
|||
if not fallback_prompt:
|
||||
fallback_prompt = "Enter one of "
|
||||
if numrange:
|
||||
fallback_prompt += "%i-%i, " % numrange
|
||||
fallback_prompt += ", ".join(display_letters) + ":"
|
||||
fallback_prompt += "{}-{}, ".format(*numrange)
|
||||
fallback_prompt += f"{', '.join(display_letters)}:"
|
||||
|
||||
resp = input_(prompt)
|
||||
while True:
|
||||
|
|
@ -406,7 +406,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None):
|
|||
objects individually.
|
||||
"""
|
||||
choice = input_options(
|
||||
("y", "n", "s"), False, "%s? (Yes/no/select)" % (prompt_all or prompt)
|
||||
("y", "n", "s"), False, f"{prompt_all or prompt}? (Yes/no/select)"
|
||||
)
|
||||
print() # Blank line.
|
||||
|
||||
|
|
@ -420,7 +420,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None):
|
|||
answer = input_options(
|
||||
("y", "n", "q"),
|
||||
True,
|
||||
"%s? (yes/no/quit)" % prompt,
|
||||
f"{prompt}? (yes/no/quit)",
|
||||
"Enter Y or N:",
|
||||
)
|
||||
if answer == "y":
|
||||
|
|
@ -494,7 +494,7 @@ ANSI_CODES = {
|
|||
"bg_cyan": 46,
|
||||
"bg_white": 47,
|
||||
}
|
||||
RESET_COLOR = COLOR_ESCAPE + "39;49;00m"
|
||||
RESET_COLOR = f"{COLOR_ESCAPE}39;49;00m"
|
||||
|
||||
# These abstract COLOR_NAMES are lazily mapped on to the actual color in COLORS
|
||||
# as they are defined in the configuration files, see function: colorize
|
||||
|
|
@ -534,8 +534,8 @@ def _colorize(color, text):
|
|||
# over all "ANSI codes" in `color`.
|
||||
escape = ""
|
||||
for code in color:
|
||||
escape = escape + COLOR_ESCAPE + "%im" % ANSI_CODES[code]
|
||||
return escape + text + RESET_COLOR
|
||||
escape = f"{escape}{COLOR_ESCAPE}{ANSI_CODES[code]}m"
|
||||
return f"{escape}{text}{RESET_COLOR}"
|
||||
|
||||
|
||||
def colorize(color_name, text):
|
||||
|
|
@ -572,7 +572,7 @@ def colorize(color_name, text):
|
|||
# instead of the abstract color name ('text_error')
|
||||
color = COLORS.get(color_name)
|
||||
if not color:
|
||||
log.debug("Invalid color_name: {0}", color_name)
|
||||
log.debug("Invalid color_name: {}", color_name)
|
||||
color = color_name
|
||||
return _colorize(color, text)
|
||||
else:
|
||||
|
|
@ -621,8 +621,8 @@ def color_split(colored_text, index):
|
|||
split_index = index - (length - color_len(part))
|
||||
found_split = True
|
||||
if found_color_code:
|
||||
pre_split += part[:split_index] + RESET_COLOR
|
||||
post_split += found_color_code + part[split_index:]
|
||||
pre_split += f"{part[:split_index]}{RESET_COLOR}"
|
||||
post_split += f"{found_color_code}{part[split_index:]}"
|
||||
else:
|
||||
pre_split += part[:split_index]
|
||||
post_split += part[split_index:]
|
||||
|
|
@ -726,7 +726,7 @@ def get_replacements():
|
|||
replacements.append((re.compile(pattern), repl))
|
||||
except re.error:
|
||||
raise UserError(
|
||||
"malformed regular expression in replace: {}".format(pattern)
|
||||
f"malformed regular expression in replace: {pattern}"
|
||||
)
|
||||
return replacements
|
||||
|
||||
|
|
@ -806,17 +806,17 @@ def split_into_lines(string, width_tuple):
|
|||
# Colorize each word with pre/post escapes
|
||||
# Reconstruct colored words
|
||||
words += [
|
||||
m.group("esc") + raw_word + RESET_COLOR
|
||||
f"{m['esc']}{raw_word}{RESET_COLOR}"
|
||||
for raw_word in raw_words
|
||||
]
|
||||
elif raw_words:
|
||||
# Pretext stops mid-word
|
||||
if m.group("esc") != RESET_COLOR:
|
||||
# Add the rest of the current word, with a reset after it
|
||||
words[-1] += m.group("esc") + raw_words[0] + RESET_COLOR
|
||||
words[-1] += f"{m['esc']}{raw_words[0]}{RESET_COLOR}"
|
||||
# Add the subsequent colored words:
|
||||
words += [
|
||||
m.group("esc") + raw_word + RESET_COLOR
|
||||
f"{m['esc']}{raw_word}{RESET_COLOR}"
|
||||
for raw_word in raw_words[1:]
|
||||
]
|
||||
else:
|
||||
|
|
@ -907,18 +907,12 @@ def print_column_layout(
|
|||
With subsequent lines (i.e. {lhs1}, {rhs1} onwards) being the
|
||||
rest of contents, wrapped if the width would be otherwise exceeded.
|
||||
"""
|
||||
if right["prefix"] + right["contents"] + right["suffix"] == "":
|
||||
if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
|
||||
# No right hand information, so we don't need a separator.
|
||||
separator = ""
|
||||
first_line_no_wrap = (
|
||||
indent_str
|
||||
+ left["prefix"]
|
||||
+ left["contents"]
|
||||
+ left["suffix"]
|
||||
+ separator
|
||||
+ right["prefix"]
|
||||
+ right["contents"]
|
||||
+ right["suffix"]
|
||||
f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
|
||||
f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
|
||||
)
|
||||
if color_len(first_line_no_wrap) < max_width:
|
||||
# Everything fits, print out line.
|
||||
|
|
@ -1044,18 +1038,12 @@ def print_newline_layout(
|
|||
If {lhs0} would go over the maximum width, the subsequent lines are
|
||||
indented a second time for ease of reading.
|
||||
"""
|
||||
if right["prefix"] + right["contents"] + right["suffix"] == "":
|
||||
if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
|
||||
# No right hand information, so we don't need a separator.
|
||||
separator = ""
|
||||
first_line_no_wrap = (
|
||||
indent_str
|
||||
+ left["prefix"]
|
||||
+ left["contents"]
|
||||
+ left["suffix"]
|
||||
+ separator
|
||||
+ right["prefix"]
|
||||
+ right["contents"]
|
||||
+ right["suffix"]
|
||||
f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
|
||||
f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
|
||||
)
|
||||
if color_len(first_line_no_wrap) < max_width:
|
||||
# Everything fits, print out line.
|
||||
|
|
@ -1069,7 +1057,7 @@ def print_newline_layout(
|
|||
empty_space - len(indent_str),
|
||||
empty_space - len(indent_str),
|
||||
)
|
||||
left_str = left["prefix"] + left["contents"] + left["suffix"]
|
||||
left_str = f"{left['prefix']}{left['contents']}{left['suffix']}"
|
||||
left_split = split_into_lines(left_str, left_width_tuple)
|
||||
# Repeat calculations for rhs, including separator on first line
|
||||
right_width_tuple = (
|
||||
|
|
@ -1077,19 +1065,19 @@ def print_newline_layout(
|
|||
empty_space - len(indent_str),
|
||||
empty_space - len(indent_str),
|
||||
)
|
||||
right_str = right["prefix"] + right["contents"] + right["suffix"]
|
||||
right_str = f"{right['prefix']}{right['contents']}{right['suffix']}"
|
||||
right_split = split_into_lines(right_str, right_width_tuple)
|
||||
for i, line in enumerate(left_split):
|
||||
if i == 0:
|
||||
print_(indent_str + line)
|
||||
print_(f"{indent_str}{line}")
|
||||
elif line != "":
|
||||
# Ignore empty lines
|
||||
print_(indent_str * 2 + line)
|
||||
print_(f"{indent_str * 2}{line}")
|
||||
for i, line in enumerate(right_split):
|
||||
if i == 0:
|
||||
print_(indent_str + separator + line)
|
||||
print_(f"{indent_str}{separator}{line}")
|
||||
elif line != "":
|
||||
print_(indent_str * 2 + line)
|
||||
print_(f"{indent_str * 2}{line}")
|
||||
|
||||
|
||||
FLOAT_EPSILON = 0.01
|
||||
|
|
@ -1163,7 +1151,7 @@ def show_model_changes(new, old=None, fields=None, always=False):
|
|||
continue
|
||||
|
||||
changes.append(
|
||||
" {}: {}".format(field, colorize("text_highlight", new_fmt[field]))
|
||||
f" {field}: {colorize('text_highlight', new_fmt[field])}"
|
||||
)
|
||||
|
||||
# Print changes.
|
||||
|
|
@ -1204,22 +1192,16 @@ def show_path_changes(path_changes):
|
|||
# Print every change over two lines
|
||||
for source, dest in zip(sources, destinations):
|
||||
color_source, color_dest = colordiff(source, dest)
|
||||
print_("{0} \n -> {1}".format(color_source, color_dest))
|
||||
print_(f"{color_source} \n -> {color_dest}")
|
||||
else:
|
||||
# Print every change on a single line, and add a header
|
||||
title_pad = max_width - len("Source ") + len(" -> ")
|
||||
|
||||
print_("Source {0} Destination".format(" " * title_pad))
|
||||
print_(f"Source {' ' * title_pad} Destination")
|
||||
for source, dest in zip(sources, destinations):
|
||||
pad = max_width - len(source)
|
||||
color_source, color_dest = colordiff(source, dest)
|
||||
print_(
|
||||
"{0} {1} -> {2}".format(
|
||||
color_source,
|
||||
" " * pad,
|
||||
color_dest,
|
||||
)
|
||||
)
|
||||
print_(f"{color_source} {' ' * pad} -> {color_dest}")
|
||||
|
||||
|
||||
# Helper functions for option parsing.
|
||||
|
|
@ -1245,9 +1227,7 @@ def _store_dict(option, opt_str, value, parser):
|
|||
raise ValueError
|
||||
except ValueError:
|
||||
raise UserError(
|
||||
"supplied argument `{}' is not of the form `key=value'".format(
|
||||
value
|
||||
)
|
||||
f"supplied argument `{value}' is not of the form `key=value'"
|
||||
)
|
||||
|
||||
option_values[key] = value
|
||||
|
|
@ -1426,8 +1406,8 @@ class Subcommand:
|
|||
@root_parser.setter
|
||||
def root_parser(self, root_parser):
|
||||
self._root_parser = root_parser
|
||||
self.parser.prog = "{} {}".format(
|
||||
as_string(root_parser.get_prog_name()), self.name
|
||||
self.parser.prog = (
|
||||
f"{as_string(root_parser.get_prog_name())} {self.name}"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -1483,7 +1463,7 @@ class SubcommandsOptionParser(CommonOptionsParser):
|
|||
for subcommand in subcommands:
|
||||
name = subcommand.name
|
||||
if subcommand.aliases:
|
||||
name += " (%s)" % ", ".join(subcommand.aliases)
|
||||
name += f" ({', '.join(subcommand.aliases)})"
|
||||
disp_names.append(name)
|
||||
|
||||
# Set the help position based on the max width.
|
||||
|
|
@ -1496,32 +1476,24 @@ class SubcommandsOptionParser(CommonOptionsParser):
|
|||
# Lifted directly from optparse.py.
|
||||
name_width = help_position - formatter.current_indent - 2
|
||||
if len(name) > name_width:
|
||||
name = "%*s%s\n" % (formatter.current_indent, "", name)
|
||||
name = f"{' ' * formatter.current_indent}{name}\n"
|
||||
indent_first = help_position
|
||||
else:
|
||||
name = "%*s%-*s " % (
|
||||
formatter.current_indent,
|
||||
"",
|
||||
name_width,
|
||||
name,
|
||||
)
|
||||
name = f"{' ' * formatter.current_indent}{name:<{name_width}}\n"
|
||||
indent_first = 0
|
||||
result.append(name)
|
||||
help_width = formatter.width - help_position
|
||||
help_lines = textwrap.wrap(subcommand.help, help_width)
|
||||
help_line = help_lines[0] if help_lines else ""
|
||||
result.append("%*s%s\n" % (indent_first, "", help_line))
|
||||
result.append(f"{' ' * indent_first}{help_line}\n")
|
||||
result.extend(
|
||||
[
|
||||
"%*s%s\n" % (help_position, "", line)
|
||||
for line in help_lines[1:]
|
||||
]
|
||||
[f"{' ' * help_position}{line}\n" for line in help_lines[1:]]
|
||||
)
|
||||
formatter.dedent()
|
||||
|
||||
# Concatenate the original help message with the subcommand
|
||||
# list.
|
||||
return out + "".join(result)
|
||||
return f"{out}{''.join(result)}"
|
||||
|
||||
def _subcommand_for_name(self, name):
|
||||
"""Return the subcommand in self.subcommands matching the
|
||||
|
|
@ -1615,19 +1587,19 @@ def _configure(options):
|
|||
|
||||
if overlay_path:
|
||||
log.debug(
|
||||
"overlaying configuration: {0}", util.displayable_path(overlay_path)
|
||||
"overlaying configuration: {}", util.displayable_path(overlay_path)
|
||||
)
|
||||
|
||||
config_path = config.user_config_path()
|
||||
if os.path.isfile(config_path):
|
||||
log.debug("user configuration: {0}", util.displayable_path(config_path))
|
||||
log.debug("user configuration: {}", util.displayable_path(config_path))
|
||||
else:
|
||||
log.debug(
|
||||
"no user configuration found at {0}",
|
||||
"no user configuration found at {}",
|
||||
util.displayable_path(config_path),
|
||||
)
|
||||
|
||||
log.debug("data directory: {0}", util.displayable_path(config.config_dir()))
|
||||
log.debug("data directory: {}", util.displayable_path(config.config_dir()))
|
||||
return config
|
||||
|
||||
|
||||
|
|
@ -1637,10 +1609,8 @@ def _ensure_db_directory_exists(path):
|
|||
newpath = os.path.dirname(path)
|
||||
if not os.path.isdir(newpath):
|
||||
if input_yn(
|
||||
"The database directory {} does not \
|
||||
exist. Create it (Y/n)?".format(
|
||||
util.displayable_path(newpath)
|
||||
)
|
||||
f"The database directory {util.displayable_path(newpath)} does not"
|
||||
" exist. Create it (Y/n)?"
|
||||
):
|
||||
os.makedirs(newpath)
|
||||
|
||||
|
|
@ -1660,12 +1630,11 @@ def _open_library(config: confuse.LazyConfig) -> library.Library:
|
|||
except (sqlite3.OperationalError, sqlite3.DatabaseError) as db_error:
|
||||
log.debug("{}", traceback.format_exc())
|
||||
raise UserError(
|
||||
"database file {} cannot not be opened: {}".format(
|
||||
util.displayable_path(dbpath), db_error
|
||||
)
|
||||
f"database file {util.displayable_path(dbpath)} cannot not be"
|
||||
f" opened: {db_error}"
|
||||
)
|
||||
log.debug(
|
||||
"library database: {0}\nlibrary directory: {1}",
|
||||
"library database: {}\nlibrary directory: {}",
|
||||
util.displayable_path(lib.path),
|
||||
util.displayable_path(lib.directory),
|
||||
)
|
||||
|
|
@ -1782,7 +1751,7 @@ def main(args=None):
|
|||
_raw_main(args)
|
||||
except UserError as exc:
|
||||
message = exc.args[0] if exc.args else None
|
||||
log.error("error: {0}", message)
|
||||
log.error("error: {}", message)
|
||||
sys.exit(1)
|
||||
except util.HumanReadableError as exc:
|
||||
exc.log(log)
|
||||
|
|
@ -1794,10 +1763,10 @@ def main(args=None):
|
|||
log.error("{}", exc)
|
||||
sys.exit(1)
|
||||
except confuse.ConfigError as exc:
|
||||
log.error("configuration error: {0}", exc)
|
||||
log.error("configuration error: {}", exc)
|
||||
sys.exit(1)
|
||||
except db_query.InvalidQueryError as exc:
|
||||
log.error("invalid query: {0}", exc)
|
||||
log.error("invalid query: {}", exc)
|
||||
sys.exit(1)
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EPIPE:
|
||||
|
|
@ -1810,7 +1779,7 @@ def main(args=None):
|
|||
log.debug("{}", traceback.format_exc())
|
||||
except db.DBAccessError as exc:
|
||||
log.error(
|
||||
"database access error: {0}\n"
|
||||
"database access error: {}\n"
|
||||
"the library file might have a permissions problem",
|
||||
exc,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ interface.
|
|||
|
||||
import os
|
||||
import re
|
||||
import textwrap
|
||||
from collections import Counter
|
||||
from collections.abc import Sequence
|
||||
from itertools import chain
|
||||
|
|
@ -112,15 +113,11 @@ def _parse_logfiles(logfiles):
|
|||
yield from _paths_from_logfile(syspath(normpath(logfile)))
|
||||
except ValueError as err:
|
||||
raise ui.UserError(
|
||||
"malformed logfile {}: {}".format(
|
||||
util.displayable_path(logfile), str(err)
|
||||
)
|
||||
f"malformed logfile {util.displayable_path(logfile)}: {err}"
|
||||
) from err
|
||||
except OSError as err:
|
||||
raise ui.UserError(
|
||||
"unreadable logfile {}: {}".format(
|
||||
util.displayable_path(logfile), str(err)
|
||||
)
|
||||
f"unreadable logfile {util.displayable_path(logfile)}: {err}"
|
||||
) from err
|
||||
|
||||
|
||||
|
|
@ -132,13 +129,13 @@ def _print_keys(query):
|
|||
returned row, with indentation of 2 spaces.
|
||||
"""
|
||||
for row in query:
|
||||
print_(" " * 2 + row["key"])
|
||||
print_(f" {row['key']}")
|
||||
|
||||
|
||||
def fields_func(lib, opts, args):
|
||||
def _print_rows(names):
|
||||
names.sort()
|
||||
print_(" " + "\n ".join(names))
|
||||
print_(textwrap.indent("\n".join(names), " "))
|
||||
|
||||
print_("Item fields:")
|
||||
_print_rows(library.Item.all_keys())
|
||||
|
|
@ -148,13 +145,13 @@ def fields_func(lib, opts, args):
|
|||
|
||||
with lib.transaction() as tx:
|
||||
# The SQL uses the DISTINCT to get unique values from the query
|
||||
unique_fields = "SELECT DISTINCT key FROM (%s)"
|
||||
unique_fields = "SELECT DISTINCT key FROM ({})"
|
||||
|
||||
print_("Item flexible attributes:")
|
||||
_print_keys(tx.query(unique_fields % library.Item._flex_table))
|
||||
_print_keys(tx.query(unique_fields.format(library.Item._flex_table)))
|
||||
|
||||
print_("Album flexible attributes:")
|
||||
_print_keys(tx.query(unique_fields % library.Album._flex_table))
|
||||
_print_keys(tx.query(unique_fields.format(library.Album._flex_table)))
|
||||
|
||||
|
||||
fields_cmd = ui.Subcommand(
|
||||
|
|
@ -213,10 +210,10 @@ def get_singleton_disambig_fields(info: hooks.TrackInfo) -> Sequence[str]:
|
|||
out = []
|
||||
chosen_fields = config["match"]["singleton_disambig_fields"].as_str_seq()
|
||||
calculated_values = {
|
||||
"index": "Index {}".format(str(info.index)),
|
||||
"track_alt": "Track {}".format(info.track_alt),
|
||||
"index": f"Index {info.index}",
|
||||
"track_alt": f"Track {info.track_alt}",
|
||||
"album": (
|
||||
"[{}]".format(info.album)
|
||||
f"[{info.album}]"
|
||||
if (
|
||||
config["import"]["singleton_album_disambig"].get()
|
||||
and info.get("album")
|
||||
|
|
@ -242,7 +239,7 @@ def get_album_disambig_fields(info: hooks.AlbumInfo) -> Sequence[str]:
|
|||
chosen_fields = config["match"]["album_disambig_fields"].as_str_seq()
|
||||
calculated_values = {
|
||||
"media": (
|
||||
"{}x{}".format(info.mediums, info.media)
|
||||
f"{info.mediums}x{info.media}"
|
||||
if (info.mediums and info.mediums > 1)
|
||||
else info.media
|
||||
),
|
||||
|
|
@ -277,7 +274,7 @@ def dist_string(dist):
|
|||
"""Formats a distance (a float) as a colorized similarity percentage
|
||||
string.
|
||||
"""
|
||||
string = "{:.1f}%".format(((1 - dist) * 100))
|
||||
string = f"{(1 - dist) * 100:.1f}%"
|
||||
return dist_colorize(string, dist)
|
||||
|
||||
|
||||
|
|
@ -295,7 +292,7 @@ def penalty_string(distance, limit=None):
|
|||
if limit and len(penalties) > limit:
|
||||
penalties = penalties[:limit] + ["..."]
|
||||
# Prefix penalty string with U+2260: Not Equal To
|
||||
penalty_string = "\u2260 {}".format(", ".join(penalties))
|
||||
penalty_string = f"\u2260 {', '.join(penalties)}"
|
||||
return ui.colorize("changed", penalty_string)
|
||||
|
||||
|
||||
|
|
@ -360,18 +357,18 @@ class ChangeRepresentation:
|
|||
|
||||
# 'Match' line and similarity.
|
||||
print_(
|
||||
self.indent_header + f"Match ({dist_string(self.match.distance)}):"
|
||||
f"{self.indent_header}Match ({dist_string(self.match.distance)}):"
|
||||
)
|
||||
|
||||
if isinstance(self.match.info, autotag.hooks.AlbumInfo):
|
||||
# Matching an album - print that
|
||||
artist_album_str = (
|
||||
f"{self.match.info.artist}" + f" - {self.match.info.album}"
|
||||
f"{self.match.info.artist} - {self.match.info.album}"
|
||||
)
|
||||
else:
|
||||
# Matching a single track
|
||||
artist_album_str = (
|
||||
f"{self.match.info.artist}" + f" - {self.match.info.title}"
|
||||
f"{self.match.info.artist} - {self.match.info.title}"
|
||||
)
|
||||
print_(
|
||||
self.indent_header
|
||||
|
|
@ -381,22 +378,23 @@ class ChangeRepresentation:
|
|||
# Penalties.
|
||||
penalties = penalty_string(self.match.distance)
|
||||
if penalties:
|
||||
print_(self.indent_header + penalties)
|
||||
print_(f"{self.indent_header}{penalties}")
|
||||
|
||||
# Disambiguation.
|
||||
disambig = disambig_string(self.match.info)
|
||||
if disambig:
|
||||
print_(self.indent_header + disambig)
|
||||
print_(f"{self.indent_header}{disambig}")
|
||||
|
||||
# Data URL.
|
||||
if self.match.info.data_url:
|
||||
url = ui.colorize("text_faint", f"{self.match.info.data_url}")
|
||||
print_(self.indent_header + url)
|
||||
print_(f"{self.indent_header}{url}")
|
||||
|
||||
def show_match_details(self):
|
||||
"""Print out the details of the match, including changes in album name
|
||||
and artist name.
|
||||
"""
|
||||
changed_prefix = ui.colorize("changed", "\u2260")
|
||||
# Artist.
|
||||
artist_l, artist_r = self.cur_artist or "", self.match.info.artist
|
||||
if artist_r == VARIOUS_ARTISTS:
|
||||
|
|
@ -406,7 +404,7 @@ class ChangeRepresentation:
|
|||
artist_l, artist_r = ui.colordiff(artist_l, artist_r)
|
||||
# Prefix with U+2260: Not Equal To
|
||||
left = {
|
||||
"prefix": ui.colorize("changed", "\u2260") + " Artist: ",
|
||||
"prefix": f"{changed_prefix} Artist: ",
|
||||
"contents": artist_l,
|
||||
"suffix": "",
|
||||
}
|
||||
|
|
@ -414,7 +412,7 @@ class ChangeRepresentation:
|
|||
self.print_layout(self.indent_detail, left, right)
|
||||
|
||||
else:
|
||||
print_(self.indent_detail + "*", "Artist:", artist_r)
|
||||
print_(f"{self.indent_detail}*", "Artist:", artist_r)
|
||||
|
||||
if self.cur_album:
|
||||
# Album
|
||||
|
|
@ -426,14 +424,14 @@ class ChangeRepresentation:
|
|||
album_l, album_r = ui.colordiff(album_l, album_r)
|
||||
# Prefix with U+2260: Not Equal To
|
||||
left = {
|
||||
"prefix": ui.colorize("changed", "\u2260") + " Album: ",
|
||||
"prefix": f"{changed_prefix} Album: ",
|
||||
"contents": album_l,
|
||||
"suffix": "",
|
||||
}
|
||||
right = {"prefix": "", "contents": album_r, "suffix": ""}
|
||||
self.print_layout(self.indent_detail, left, right)
|
||||
else:
|
||||
print_(self.indent_detail + "*", "Album:", album_r)
|
||||
print_(f"{self.indent_detail}*", "Album:", album_r)
|
||||
elif self.cur_title:
|
||||
# Title - for singletons
|
||||
title_l, title_r = self.cur_title or "", self.match.info.title
|
||||
|
|
@ -441,14 +439,14 @@ class ChangeRepresentation:
|
|||
title_l, title_r = ui.colordiff(title_l, title_r)
|
||||
# Prefix with U+2260: Not Equal To
|
||||
left = {
|
||||
"prefix": ui.colorize("changed", "\u2260") + " Title: ",
|
||||
"prefix": f"{changed_prefix} Title: ",
|
||||
"contents": title_l,
|
||||
"suffix": "",
|
||||
}
|
||||
right = {"prefix": "", "contents": title_r, "suffix": ""}
|
||||
self.print_layout(self.indent_detail, left, right)
|
||||
else:
|
||||
print_(self.indent_detail + "*", "Title:", title_r)
|
||||
print_(f"{self.indent_detail}*", "Title:", title_r)
|
||||
|
||||
def make_medium_info_line(self, track_info):
|
||||
"""Construct a line with the current medium's info."""
|
||||
|
|
@ -490,7 +488,6 @@ class ChangeRepresentation:
|
|||
"""Format colored track indices."""
|
||||
cur_track = self.format_index(item)
|
||||
new_track = self.format_index(track_info)
|
||||
templ = "(#{})"
|
||||
changed = False
|
||||
# Choose color based on change.
|
||||
if cur_track != new_track:
|
||||
|
|
@ -502,10 +499,8 @@ class ChangeRepresentation:
|
|||
else:
|
||||
highlight_color = "text_faint"
|
||||
|
||||
cur_track = templ.format(cur_track)
|
||||
new_track = templ.format(new_track)
|
||||
lhs_track = ui.colorize(highlight_color, cur_track)
|
||||
rhs_track = ui.colorize(highlight_color, new_track)
|
||||
lhs_track = ui.colorize(highlight_color, f"(#{cur_track})")
|
||||
rhs_track = ui.colorize(highlight_color, f"(#{new_track})")
|
||||
return lhs_track, rhs_track, changed
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -575,9 +570,9 @@ class ChangeRepresentation:
|
|||
|
||||
prefix = ui.colorize("changed", "\u2260 ") if changed else "* "
|
||||
lhs = {
|
||||
"prefix": prefix + lhs_track + " ",
|
||||
"prefix": f"{prefix}{lhs_track} ",
|
||||
"contents": lhs_title,
|
||||
"suffix": " " + lhs_length,
|
||||
"suffix": f" {lhs_length}",
|
||||
}
|
||||
rhs = {"prefix": "", "contents": "", "suffix": ""}
|
||||
if not changed:
|
||||
|
|
@ -586,9 +581,9 @@ class ChangeRepresentation:
|
|||
else:
|
||||
# Construct a dictionary for the "changed to" side
|
||||
rhs = {
|
||||
"prefix": rhs_track + " ",
|
||||
"prefix": f"{rhs_track} ",
|
||||
"contents": rhs_title,
|
||||
"suffix": " " + rhs_length,
|
||||
"suffix": f" {rhs_length}",
|
||||
}
|
||||
return (lhs, rhs)
|
||||
|
||||
|
|
@ -681,7 +676,7 @@ class AlbumChange(ChangeRepresentation):
|
|||
# Print tracks from previous medium
|
||||
self.print_tracklist(lines)
|
||||
lines = []
|
||||
print_(self.indent_detail + header)
|
||||
print_(f"{self.indent_detail}{header}")
|
||||
# Save new medium details for future comparison.
|
||||
medium, disctitle = track_info.medium, track_info.disctitle
|
||||
|
||||
|
|
@ -697,11 +692,9 @@ class AlbumChange(ChangeRepresentation):
|
|||
# Missing and unmatched tracks.
|
||||
if self.match.extra_tracks:
|
||||
print_(
|
||||
"Missing tracks ({0}/{1} - {2:.1%}):".format(
|
||||
len(self.match.extra_tracks),
|
||||
len(self.match.info.tracks),
|
||||
len(self.match.extra_tracks) / len(self.match.info.tracks),
|
||||
)
|
||||
"Missing tracks"
|
||||
f" ({len(self.match.extra_tracks)}/{len(self.match.info.tracks)} -"
|
||||
f" {len(self.match.extra_tracks) / len(self.match.info.tracks):.1%}):"
|
||||
)
|
||||
for track_info in self.match.extra_tracks:
|
||||
line = f" ! {track_info.title} (#{self.format_index(track_info)})"
|
||||
|
|
@ -711,9 +704,9 @@ class AlbumChange(ChangeRepresentation):
|
|||
if self.match.extra_items:
|
||||
print_(f"Unmatched tracks ({len(self.match.extra_items)}):")
|
||||
for item in self.match.extra_items:
|
||||
line = " ! {} (#{})".format(item.title, self.format_index(item))
|
||||
line = f" ! {item.title} (#{self.format_index(item)})"
|
||||
if item.length:
|
||||
line += " ({})".format(human_seconds_short(item.length))
|
||||
line += f" ({human_seconds_short(item.length)})"
|
||||
print_(ui.colorize("text_warning", line))
|
||||
|
||||
|
||||
|
|
@ -769,7 +762,7 @@ def summarize_items(items, singleton):
|
|||
"""
|
||||
summary_parts = []
|
||||
if not singleton:
|
||||
summary_parts.append("{} items".format(len(items)))
|
||||
summary_parts.append(f"{len(items)} items")
|
||||
|
||||
format_counts = {}
|
||||
for item in items:
|
||||
|
|
@ -789,10 +782,11 @@ def summarize_items(items, singleton):
|
|||
average_bitrate = sum([item.bitrate for item in items]) / len(items)
|
||||
total_duration = sum([item.length for item in items])
|
||||
total_filesize = sum([item.filesize for item in items])
|
||||
summary_parts.append("{}kbps".format(int(average_bitrate / 1000)))
|
||||
summary_parts.append(f"{int(average_bitrate / 1000)}kbps")
|
||||
if items[0].format == "FLAC":
|
||||
sample_bits = "{}kHz/{} bit".format(
|
||||
round(int(items[0].samplerate) / 1000, 1), items[0].bitdepth
|
||||
sample_bits = (
|
||||
f"{round(int(items[0].samplerate) / 1000, 1)}kHz"
|
||||
f"/{items[0].bitdepth} bit"
|
||||
)
|
||||
summary_parts.append(sample_bits)
|
||||
summary_parts.append(human_seconds_short(total_duration))
|
||||
|
|
@ -885,7 +879,7 @@ def choose_candidate(
|
|||
if singleton:
|
||||
print_("No matching recordings found.")
|
||||
else:
|
||||
print_("No matching release found for {} tracks.".format(itemcount))
|
||||
print_(f"No matching release found for {itemcount} tracks.")
|
||||
print_(
|
||||
"For help, see: "
|
||||
"https://beets.readthedocs.org/en/latest/faq.html#nomatch"
|
||||
|
|
@ -910,40 +904,38 @@ def choose_candidate(
|
|||
# Display list of candidates.
|
||||
print_("")
|
||||
print_(
|
||||
'Finding tags for {} "{} - {}".'.format(
|
||||
"track" if singleton else "album",
|
||||
item.artist if singleton else cur_artist,
|
||||
item.title if singleton else cur_album,
|
||||
)
|
||||
f"Finding tags for {'track' if singleton else 'album'}"
|
||||
f'"{item.artist if singleton else cur_artist} -'
|
||||
f' {item.title if singleton else cur_album}".'
|
||||
)
|
||||
|
||||
print_(ui.indent(2) + "Candidates:")
|
||||
print_(" Candidates:")
|
||||
for i, match in enumerate(candidates):
|
||||
# Index, metadata, and distance.
|
||||
index0 = "{0}.".format(i + 1)
|
||||
index0 = f"{i + 1}."
|
||||
index = dist_colorize(index0, match.distance)
|
||||
dist = "({:.1f}%)".format((1 - match.distance) * 100)
|
||||
dist = f"({(1 - match.distance) * 100:.1f}%)"
|
||||
distance = dist_colorize(dist, match.distance)
|
||||
metadata = "{0} - {1}".format(
|
||||
match.info.artist,
|
||||
match.info.title if singleton else match.info.album,
|
||||
metadata = (
|
||||
f"{match.info.artist} -"
|
||||
f" {match.info.title if singleton else match.info.album}"
|
||||
)
|
||||
if i == 0:
|
||||
metadata = dist_colorize(metadata, match.distance)
|
||||
else:
|
||||
metadata = ui.colorize("text_highlight_minor", metadata)
|
||||
line1 = [index, distance, metadata]
|
||||
print_(ui.indent(2) + " ".join(line1))
|
||||
print_(f" {' '.join(line1)}")
|
||||
|
||||
# Penalties.
|
||||
penalties = penalty_string(match.distance, 3)
|
||||
if penalties:
|
||||
print_(ui.indent(13) + penalties)
|
||||
print_(f"{' ' * 13}{penalties}")
|
||||
|
||||
# Disambiguation
|
||||
disambig = disambig_string(match.info)
|
||||
if disambig:
|
||||
print_(ui.indent(13) + disambig)
|
||||
print_(f"{' ' * 13}{disambig}")
|
||||
|
||||
# Ask the user for a choice.
|
||||
sel = ui.input_options(choice_opts, numrange=(1, len(candidates)))
|
||||
|
|
@ -1015,7 +1007,7 @@ def manual_id(session, task):
|
|||
|
||||
Input an ID, either for an album ("release") or a track ("recording").
|
||||
"""
|
||||
prompt = "Enter {} ID:".format("release" if task.is_album else "recording")
|
||||
prompt = f"Enter {'release' if task.is_album else 'recording'} ID:"
|
||||
search_id = input_(prompt).strip()
|
||||
|
||||
if task.is_album:
|
||||
|
|
@ -1043,7 +1035,7 @@ class TerminalImportSession(importer.ImportSession):
|
|||
|
||||
path_str0 = displayable_path(task.paths, "\n")
|
||||
path_str = ui.colorize("import_path", path_str0)
|
||||
items_str0 = "({} items)".format(len(task.items))
|
||||
items_str0 = f"({len(task.items)} items)"
|
||||
items_str = ui.colorize("import_path_items", items_str0)
|
||||
print_(" ".join([path_str, items_str]))
|
||||
|
||||
|
|
@ -1156,7 +1148,7 @@ class TerminalImportSession(importer.ImportSession):
|
|||
that's already in the library.
|
||||
"""
|
||||
log.warning(
|
||||
"This {0} is already in the library!",
|
||||
"This {} is already in the library!",
|
||||
("album" if task.is_album else "item"),
|
||||
)
|
||||
|
||||
|
|
@ -1217,8 +1209,8 @@ class TerminalImportSession(importer.ImportSession):
|
|||
|
||||
def should_resume(self, path):
|
||||
return ui.input_yn(
|
||||
"Import of the directory:\n{}\n"
|
||||
"was interrupted. Resume (Y/n)?".format(displayable_path(path))
|
||||
f"Import of the directory:\n{displayable_path(path)}\n"
|
||||
"was interrupted. Resume (Y/n)?"
|
||||
)
|
||||
|
||||
def _get_choices(self, task):
|
||||
|
|
@ -1288,11 +1280,10 @@ class TerminalImportSession(importer.ImportSession):
|
|||
dup_choices = [c for c in all_choices if c.short == short]
|
||||
for c in dup_choices[1:]:
|
||||
log.warning(
|
||||
"Prompt choice '{0}' removed due to conflict "
|
||||
"with '{1}' (short letter: '{2}')",
|
||||
c.long,
|
||||
dup_choices[0].long,
|
||||
c.short,
|
||||
"Prompt choice '{0.long}' removed due to conflict "
|
||||
"with '{1[0].long}' (short letter: '{0.short}')",
|
||||
c,
|
||||
dup_choices,
|
||||
)
|
||||
extra_choices.remove(c)
|
||||
|
||||
|
|
@ -1317,7 +1308,8 @@ def import_files(lib, paths: list[bytes], query):
|
|||
loghandler = logging.FileHandler(logpath, encoding="utf-8")
|
||||
except OSError:
|
||||
raise ui.UserError(
|
||||
f"Could not open log file for writing: {displayable_path(logpath)}"
|
||||
"Could not open log file for writing:"
|
||||
f" {displayable_path(logpath)}"
|
||||
)
|
||||
else:
|
||||
loghandler = None
|
||||
|
|
@ -1362,9 +1354,7 @@ def import_func(lib, opts, args: list[str]):
|
|||
for path in byte_paths:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
raise ui.UserError(
|
||||
"no such file or directory: {}".format(
|
||||
displayable_path(path)
|
||||
)
|
||||
f"no such file or directory: {displayable_path(path)}"
|
||||
)
|
||||
|
||||
# Check the directories from the logfiles, but don't throw an error in
|
||||
|
|
@ -1374,9 +1364,7 @@ def import_func(lib, opts, args: list[str]):
|
|||
for path in paths_from_logfiles:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
log.warning(
|
||||
"No such file or directory: {}".format(
|
||||
displayable_path(path)
|
||||
)
|
||||
"No such file or directory: {}", displayable_path(path)
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
@ -1650,9 +1638,8 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
# Did the item change since last checked?
|
||||
if item.current_mtime() <= item.mtime:
|
||||
log.debug(
|
||||
"skipping {0} because mtime is up to date ({1})",
|
||||
displayable_path(item.path),
|
||||
item.mtime,
|
||||
"skipping {0.filepath} because mtime is up to date ({0.mtime})",
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
@ -1660,9 +1647,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
try:
|
||||
item.read()
|
||||
except library.ReadError as exc:
|
||||
log.error(
|
||||
"error reading {0}: {1}", displayable_path(item.path), exc
|
||||
)
|
||||
log.error("error reading {.filepath}: {}", item, exc)
|
||||
continue
|
||||
|
||||
# Special-case album artist when it matches track artist. (Hacky
|
||||
|
|
@ -1703,7 +1688,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
continue
|
||||
album = lib.get_album(album_id)
|
||||
if not album: # Empty albums have already been removed.
|
||||
log.debug("emptied album {0}", album_id)
|
||||
log.debug("emptied album {}", album_id)
|
||||
continue
|
||||
first_item = album.items().get()
|
||||
|
||||
|
|
@ -1714,7 +1699,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
|
||||
# Move album art (and any inconsistent items).
|
||||
if move and lib.directory in ancestry(first_item.path):
|
||||
log.debug("moving album {0}", album_id)
|
||||
log.debug("moving album {}", album_id)
|
||||
|
||||
# Manually moving and storing the album.
|
||||
items = list(album.items())
|
||||
|
|
@ -1808,7 +1793,7 @@ def remove_items(lib, query, album, delete, force):
|
|||
if not force:
|
||||
# Prepare confirmation with user.
|
||||
album_str = (
|
||||
" in {} album{}".format(len(albums), "s" if len(albums) > 1 else "")
|
||||
f" in {len(albums)} album{'s' if len(albums) > 1 else ''}"
|
||||
if album
|
||||
else ""
|
||||
)
|
||||
|
|
@ -1816,14 +1801,17 @@ def remove_items(lib, query, album, delete, force):
|
|||
if delete:
|
||||
fmt = "$path - $title"
|
||||
prompt = "Really DELETE"
|
||||
prompt_all = "Really DELETE {} file{}{}".format(
|
||||
len(items), "s" if len(items) > 1 else "", album_str
|
||||
prompt_all = (
|
||||
"Really DELETE"
|
||||
f" {len(items)} file{'s' if len(items) > 1 else ''}{album_str}"
|
||||
)
|
||||
else:
|
||||
fmt = ""
|
||||
prompt = "Really remove from the library?"
|
||||
prompt_all = "Really remove {} item{}{} from the library?".format(
|
||||
len(items), "s" if len(items) > 1 else "", album_str
|
||||
prompt_all = (
|
||||
"Really remove"
|
||||
f" {len(items)} item{'s' if len(items) > 1 else ''}{album_str}"
|
||||
" from the library?"
|
||||
)
|
||||
|
||||
# Helpers for printing affected items
|
||||
|
|
@ -1892,7 +1880,7 @@ def show_stats(lib, query, exact):
|
|||
try:
|
||||
total_size += os.path.getsize(syspath(item.path))
|
||||
except OSError as exc:
|
||||
log.info("could not get size of {}: {}", item.path, exc)
|
||||
log.info("could not get size of {.path}: {}", item, exc)
|
||||
else:
|
||||
total_size += int(item.length * item.bitrate / 8)
|
||||
total_time += item.length
|
||||
|
|
@ -1902,27 +1890,17 @@ def show_stats(lib, query, exact):
|
|||
if item.album_id:
|
||||
albums.add(item.album_id)
|
||||
|
||||
size_str = "" + human_bytes(total_size)
|
||||
size_str = human_bytes(total_size)
|
||||
if exact:
|
||||
size_str += f" ({total_size} bytes)"
|
||||
|
||||
print_(
|
||||
"""Tracks: {}
|
||||
Total time: {}{}
|
||||
{}: {}
|
||||
Artists: {}
|
||||
Albums: {}
|
||||
Album artists: {}""".format(
|
||||
total_items,
|
||||
human_seconds(total_time),
|
||||
f" ({total_time:.2f} seconds)" if exact else "",
|
||||
"Total size" if exact else "Approximate total size",
|
||||
size_str,
|
||||
len(artists),
|
||||
len(albums),
|
||||
len(album_artists),
|
||||
),
|
||||
)
|
||||
print_(f"""Tracks: {total_items}
|
||||
Total time: {human_seconds(total_time)}
|
||||
{f" ({total_time:.2f} seconds)" if exact else ""}
|
||||
{"Total size" if exact else "Approximate total size"}: {size_str}
|
||||
Artists: {len(artists)}
|
||||
Albums: {len(albums)}
|
||||
Album artists: {len(album_artists)}""")
|
||||
|
||||
|
||||
def stats_func(lib, opts, args):
|
||||
|
|
@ -1943,7 +1921,7 @@ default_commands.append(stats_cmd)
|
|||
|
||||
|
||||
def show_version(lib, opts, args):
|
||||
print_("beets version %s" % beets.__version__)
|
||||
print_(f"beets version {beets.__version__}")
|
||||
print_(f"Python version {python_version()}")
|
||||
# Show plugins.
|
||||
names = sorted(p.name for p in plugins.find_plugins())
|
||||
|
|
@ -1977,7 +1955,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
|
|||
|
||||
# Apply changes *temporarily*, preview them, and collect modified
|
||||
# objects.
|
||||
print_("Modifying {} {}s.".format(len(objs), "album" if album else "item"))
|
||||
print_(f"Modifying {len(objs)} {'album' if album else 'item'}s.")
|
||||
changed = []
|
||||
templates = {
|
||||
key: functemplate.template(value) for key, value in mods.items()
|
||||
|
|
@ -2007,7 +1985,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
|
|||
extra = ""
|
||||
|
||||
changed = ui.input_select_objects(
|
||||
"Really modify%s" % extra,
|
||||
f"Really modify{extra}",
|
||||
changed,
|
||||
lambda o: print_and_modify(o, mods, dels),
|
||||
)
|
||||
|
|
@ -2159,7 +2137,7 @@ def move_items(
|
|||
act = "copy" if copy else "move"
|
||||
entity = "album" if album else "item"
|
||||
log.info(
|
||||
"{0} {1} {2}{3}{4}.",
|
||||
"{} {} {}{}{}.",
|
||||
action,
|
||||
len(objs),
|
||||
entity,
|
||||
|
|
@ -2185,7 +2163,7 @@ def move_items(
|
|||
else:
|
||||
if confirm:
|
||||
objs = ui.input_select_objects(
|
||||
"Really %s" % act,
|
||||
f"Really {act}",
|
||||
objs,
|
||||
lambda o: show_path_changes(
|
||||
[(o.path, o.destination(basedir=dest))]
|
||||
|
|
@ -2193,7 +2171,7 @@ def move_items(
|
|||
)
|
||||
|
||||
for obj in objs:
|
||||
log.debug("moving: {0}", util.displayable_path(obj.path))
|
||||
log.debug("moving: {.filepath}", obj)
|
||||
|
||||
if export:
|
||||
# Copy without affecting the database.
|
||||
|
|
@ -2213,9 +2191,7 @@ def move_func(lib, opts, args):
|
|||
if dest is not None:
|
||||
dest = normpath(dest)
|
||||
if not os.path.isdir(syspath(dest)):
|
||||
raise ui.UserError(
|
||||
"no such directory: {}".format(displayable_path(dest))
|
||||
)
|
||||
raise ui.UserError(f"no such directory: {displayable_path(dest)}")
|
||||
|
||||
move_items(
|
||||
lib,
|
||||
|
|
@ -2278,16 +2254,14 @@ def write_items(lib, query, pretend, force):
|
|||
for item in items:
|
||||
# Item deleted?
|
||||
if not os.path.exists(syspath(item.path)):
|
||||
log.info("missing file: {0}", util.displayable_path(item.path))
|
||||
log.info("missing file: {.filepath}", item)
|
||||
continue
|
||||
|
||||
# Get an Item object reflecting the "clean" (on-disk) state.
|
||||
try:
|
||||
clean_item = library.Item.from_path(item.path)
|
||||
except library.ReadError as exc:
|
||||
log.error(
|
||||
"error reading {0}: {1}", displayable_path(item.path), exc
|
||||
)
|
||||
log.error("error reading {.filepath}: {}", item, exc)
|
||||
continue
|
||||
|
||||
# Check for and display changes.
|
||||
|
|
@ -2480,30 +2454,27 @@ def completion_script(commands):
|
|||
yield "_beet() {\n"
|
||||
|
||||
# Command names
|
||||
yield " local commands='%s'\n" % " ".join(command_names)
|
||||
yield f" local commands={' '.join(command_names)!r}\n"
|
||||
yield "\n"
|
||||
|
||||
# Command aliases
|
||||
yield " local aliases='%s'\n" % " ".join(aliases.keys())
|
||||
yield f" local aliases={' '.join(aliases.keys())!r}\n"
|
||||
for alias, cmd in aliases.items():
|
||||
yield " local alias__{}={}\n".format(alias.replace("-", "_"), cmd)
|
||||
yield f" local alias__{alias.replace('-', '_')}={cmd}\n"
|
||||
yield "\n"
|
||||
|
||||
# Fields
|
||||
yield " fields='%s'\n" % " ".join(
|
||||
set(
|
||||
list(library.Item._fields.keys())
|
||||
+ list(library.Album._fields.keys())
|
||||
)
|
||||
)
|
||||
fields = library.Item._fields.keys() | library.Album._fields.keys()
|
||||
yield f" fields={' '.join(fields)!r}\n"
|
||||
|
||||
# Command options
|
||||
for cmd, opts in options.items():
|
||||
for option_type, option_list in opts.items():
|
||||
if option_list:
|
||||
option_list = " ".join(option_list)
|
||||
yield " local {}__{}='{}'\n".format(
|
||||
option_type, cmd.replace("-", "_"), option_list
|
||||
yield (
|
||||
" local"
|
||||
f" {option_type}__{cmd.replace('-', '_')}='{option_list}'\n"
|
||||
)
|
||||
|
||||
yield " _beet_dispatch\n"
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ class HumanReadableError(Exception):
|
|||
elif hasattr(self.reason, "strerror"): # i.e., EnvironmentError
|
||||
return self.reason.strerror
|
||||
else:
|
||||
return '"{}"'.format(str(self.reason))
|
||||
return f'"{self.reason}"'
|
||||
|
||||
def get_message(self):
|
||||
"""Create the human-readable description of the error, sans
|
||||
|
|
@ -126,7 +126,7 @@ class HumanReadableError(Exception):
|
|||
"""
|
||||
if self.tb:
|
||||
logger.debug(self.tb)
|
||||
logger.error("{0}: {1}", self.error_kind, self.args[0])
|
||||
logger.error("{0.error_kind}: {0.args[0]}", self)
|
||||
|
||||
|
||||
class FilesystemError(HumanReadableError):
|
||||
|
|
@ -142,18 +142,16 @@ class FilesystemError(HumanReadableError):
|
|||
def get_message(self):
|
||||
# Use a nicer English phrasing for some specific verbs.
|
||||
if self.verb in ("move", "copy", "rename"):
|
||||
clause = "while {} {} to {}".format(
|
||||
self._gerund(),
|
||||
displayable_path(self.paths[0]),
|
||||
displayable_path(self.paths[1]),
|
||||
clause = (
|
||||
f"while {self._gerund()} {displayable_path(self.paths[0])} to"
|
||||
f" {displayable_path(self.paths[1])}"
|
||||
)
|
||||
elif self.verb in ("delete", "write", "create", "read"):
|
||||
clause = "while {} {}".format(
|
||||
self._gerund(), displayable_path(self.paths[0])
|
||||
)
|
||||
clause = f"while {self._gerund()} {displayable_path(self.paths[0])}"
|
||||
else:
|
||||
clause = "during {} of paths {}".format(
|
||||
self.verb, ", ".join(displayable_path(p) for p in self.paths)
|
||||
clause = (
|
||||
f"during {self.verb} of paths"
|
||||
f" {', '.join(displayable_path(p) for p in self.paths)}"
|
||||
)
|
||||
|
||||
return f"{self._reasonstr()} {clause}"
|
||||
|
|
@ -223,12 +221,12 @@ def sorted_walk(
|
|||
# Get all the directories and files at this level.
|
||||
try:
|
||||
contents = os.listdir(syspath(bytes_path))
|
||||
except OSError as exc:
|
||||
except OSError:
|
||||
if logger:
|
||||
logger.warning(
|
||||
"could not list directory {}: {}".format(
|
||||
displayable_path(bytes_path), exc.strerror
|
||||
)
|
||||
"could not list directory {}",
|
||||
displayable_path(bytes_path),
|
||||
exc_info=True,
|
||||
)
|
||||
return
|
||||
dirs = []
|
||||
|
|
@ -436,8 +434,8 @@ def syspath(path: PathLike, prefix: bool = True) -> str:
|
|||
if prefix and not str_path.startswith(WINDOWS_MAGIC_PREFIX):
|
||||
if str_path.startswith("\\\\"):
|
||||
# UNC path. Final path should look like \\?\UNC\...
|
||||
str_path = "UNC" + str_path[1:]
|
||||
str_path = WINDOWS_MAGIC_PREFIX + str_path
|
||||
str_path = f"UNC{str_path[1:]}"
|
||||
str_path = f"{WINDOWS_MAGIC_PREFIX}{str_path}"
|
||||
|
||||
return str_path
|
||||
|
||||
|
|
@ -509,8 +507,8 @@ def move(path: bytes, dest: bytes, replace: bool = False):
|
|||
basename = os.path.basename(bytestring_path(dest))
|
||||
dirname = os.path.dirname(bytestring_path(dest))
|
||||
tmp = tempfile.NamedTemporaryFile(
|
||||
suffix=syspath(b".beets", prefix=False),
|
||||
prefix=syspath(b"." + basename + b".", prefix=False),
|
||||
suffix=".beets",
|
||||
prefix=f".{os.fsdecode(basename)}.",
|
||||
dir=syspath(dirname),
|
||||
delete=False,
|
||||
)
|
||||
|
|
@ -719,7 +717,7 @@ def truncate_path(str_path: str) -> str:
|
|||
path = Path(str_path)
|
||||
parent_parts = [truncate_str(p, max_length) for p in path.parts[:-1]]
|
||||
stem = truncate_str(path.stem, max_length - len(path.suffix))
|
||||
return str(Path(*parent_parts, stem)) + path.suffix
|
||||
return f"{Path(*parent_parts, stem)}{path.suffix}"
|
||||
|
||||
|
||||
def _legalize_stage(
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ def resize_url(url: str, maxwidth: int, quality: int = 0) -> str:
|
|||
if quality > 0:
|
||||
params["q"] = quality
|
||||
|
||||
return "{}?{}".format(PROXY_URL, urlencode(params))
|
||||
return f"{PROXY_URL}?{urlencode(params)}"
|
||||
|
||||
|
||||
class LocalBackendNotAvailableError(Exception):
|
||||
|
|
@ -255,7 +255,7 @@ class IMBackend(LocalBackend):
|
|||
path_out = get_temp_filename(__name__, "resize_IM_", path_in)
|
||||
|
||||
log.debug(
|
||||
"artresizer: ImageMagick resizing {0} to {1}",
|
||||
"artresizer: ImageMagick resizing {} to {}",
|
||||
displayable_path(path_in),
|
||||
displayable_path(path_out),
|
||||
)
|
||||
|
|
@ -287,7 +287,7 @@ class IMBackend(LocalBackend):
|
|||
util.command_output(cmd)
|
||||
except subprocess.CalledProcessError:
|
||||
log.warning(
|
||||
"artresizer: IM convert failed for {0}",
|
||||
"artresizer: IM convert failed for {}",
|
||||
displayable_path(path_in),
|
||||
)
|
||||
return path_in
|
||||
|
|
@ -306,9 +306,9 @@ class IMBackend(LocalBackend):
|
|||
except subprocess.CalledProcessError as exc:
|
||||
log.warning("ImageMagick size query failed")
|
||||
log.debug(
|
||||
"`convert` exited with (status {}) when "
|
||||
"`convert` exited with (status {.returncode}) when "
|
||||
"getting size with command {}:\n{}",
|
||||
exc.returncode,
|
||||
exc,
|
||||
cmd,
|
||||
exc.output.strip(),
|
||||
)
|
||||
|
|
@ -441,8 +441,8 @@ class IMBackend(LocalBackend):
|
|||
convert_proc.wait()
|
||||
if convert_proc.returncode:
|
||||
log.debug(
|
||||
"ImageMagick convert failed with status {}: {!r}",
|
||||
convert_proc.returncode,
|
||||
"ImageMagick convert failed with status {.returncode}: {!r}",
|
||||
convert_proc,
|
||||
convert_stderr,
|
||||
)
|
||||
return None
|
||||
|
|
@ -452,7 +452,7 @@ class IMBackend(LocalBackend):
|
|||
if compare_proc.returncode:
|
||||
if compare_proc.returncode != 1:
|
||||
log.debug(
|
||||
"ImageMagick compare failed: {0}, {1}",
|
||||
"ImageMagick compare failed: {}, {}",
|
||||
displayable_path(im2),
|
||||
displayable_path(im1),
|
||||
)
|
||||
|
|
@ -472,7 +472,7 @@ class IMBackend(LocalBackend):
|
|||
log.debug("IM output is not a number: {0!r}", out_str)
|
||||
return None
|
||||
|
||||
log.debug("ImageMagick compare score: {0}", phash_diff)
|
||||
log.debug("ImageMagick compare score: {}", phash_diff)
|
||||
return phash_diff <= compare_threshold
|
||||
|
||||
@property
|
||||
|
|
@ -523,7 +523,7 @@ class PILBackend(LocalBackend):
|
|||
from PIL import Image
|
||||
|
||||
log.debug(
|
||||
"artresizer: PIL resizing {0} to {1}",
|
||||
"artresizer: PIL resizing {} to {}",
|
||||
displayable_path(path_in),
|
||||
displayable_path(path_out),
|
||||
)
|
||||
|
|
@ -552,7 +552,7 @@ class PILBackend(LocalBackend):
|
|||
for i in range(5):
|
||||
# 5 attempts is an arbitrary choice
|
||||
filesize = os.stat(syspath(path_out)).st_size
|
||||
log.debug("PIL Pass {0} : Output size: {1}B", i, filesize)
|
||||
log.debug("PIL Pass {} : Output size: {}B", i, filesize)
|
||||
if filesize <= max_filesize:
|
||||
return path_out
|
||||
# The relationship between filesize & quality will be
|
||||
|
|
@ -569,7 +569,7 @@ class PILBackend(LocalBackend):
|
|||
progressive=False,
|
||||
)
|
||||
log.warning(
|
||||
"PIL Failed to resize file to below {0}B", max_filesize
|
||||
"PIL Failed to resize file to below {}B", max_filesize
|
||||
)
|
||||
return path_out
|
||||
|
||||
|
|
@ -577,7 +577,7 @@ class PILBackend(LocalBackend):
|
|||
return path_out
|
||||
except OSError:
|
||||
log.error(
|
||||
"PIL cannot create thumbnail for '{0}'",
|
||||
"PIL cannot create thumbnail for '{}'",
|
||||
displayable_path(path_in),
|
||||
)
|
||||
return path_in
|
||||
|
|
@ -696,7 +696,7 @@ class ArtResizer:
|
|||
for backend_cls in BACKEND_CLASSES:
|
||||
try:
|
||||
self.local_method = backend_cls()
|
||||
log.debug(f"artresizer: method is {self.local_method.NAME}")
|
||||
log.debug("artresizer: method is {.local_method.NAME}", self)
|
||||
break
|
||||
except LocalBackendNotAvailableError:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -559,7 +559,7 @@ def spawn(coro):
|
|||
and child coroutines run concurrently.
|
||||
"""
|
||||
if not isinstance(coro, types.GeneratorType):
|
||||
raise ValueError("%s is not a coroutine" % coro)
|
||||
raise ValueError(f"{coro} is not a coroutine")
|
||||
return SpawnEvent(coro)
|
||||
|
||||
|
||||
|
|
@ -569,7 +569,7 @@ def call(coro):
|
|||
returns a value using end(), then this event returns that value.
|
||||
"""
|
||||
if not isinstance(coro, types.GeneratorType):
|
||||
raise ValueError("%s is not a coroutine" % coro)
|
||||
raise ValueError(f"{coro} is not a coroutine")
|
||||
return DelegationEvent(coro)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ class Symbol:
|
|||
self.original = original
|
||||
|
||||
def __repr__(self):
|
||||
return "Symbol(%s)" % repr(self.ident)
|
||||
return f"Symbol({self.ident!r})"
|
||||
|
||||
def evaluate(self, env):
|
||||
"""Evaluate the symbol in the environment, returning a Unicode
|
||||
|
|
@ -152,7 +152,7 @@ class Symbol:
|
|||
def translate(self):
|
||||
"""Compile the variable lookup."""
|
||||
ident = self.ident
|
||||
expr = ex_rvalue(VARIABLE_PREFIX + ident)
|
||||
expr = ex_rvalue(f"{VARIABLE_PREFIX}{ident}")
|
||||
return [expr], {ident}, set()
|
||||
|
||||
|
||||
|
|
@ -165,9 +165,7 @@ class Call:
|
|||
self.original = original
|
||||
|
||||
def __repr__(self):
|
||||
return "Call({}, {}, {})".format(
|
||||
repr(self.ident), repr(self.args), repr(self.original)
|
||||
)
|
||||
return f"Call({self.ident!r}, {self.args!r}, {self.original!r})"
|
||||
|
||||
def evaluate(self, env):
|
||||
"""Evaluate the function call in the environment, returning a
|
||||
|
|
@ -180,7 +178,7 @@ class Call:
|
|||
except Exception as exc:
|
||||
# Function raised exception! Maybe inlining the name of
|
||||
# the exception will help debug.
|
||||
return "<%s>" % str(exc)
|
||||
return f"<{exc}>"
|
||||
return str(out)
|
||||
else:
|
||||
return self.original
|
||||
|
|
@ -213,7 +211,7 @@ class Call:
|
|||
)
|
||||
)
|
||||
|
||||
subexpr_call = ex_call(FUNCTION_PREFIX + self.ident, arg_exprs)
|
||||
subexpr_call = ex_call(f"{FUNCTION_PREFIX}{self.ident}", arg_exprs)
|
||||
return [subexpr_call], varnames, funcnames
|
||||
|
||||
|
||||
|
|
@ -226,7 +224,7 @@ class Expression:
|
|||
self.parts = parts
|
||||
|
||||
def __repr__(self):
|
||||
return "Expression(%s)" % (repr(self.parts))
|
||||
return f"Expression({self.parts!r})"
|
||||
|
||||
def evaluate(self, env):
|
||||
"""Evaluate the entire expression in the environment, returning
|
||||
|
|
@ -298,9 +296,6 @@ class Parser:
|
|||
GROUP_CLOSE,
|
||||
ESCAPE_CHAR,
|
||||
)
|
||||
special_char_re = re.compile(
|
||||
r"[%s]|\Z" % "".join(re.escape(c) for c in special_chars)
|
||||
)
|
||||
escapable_chars = (SYMBOL_DELIM, FUNC_DELIM, GROUP_CLOSE, ARG_SEP)
|
||||
terminator_chars = (GROUP_CLOSE,)
|
||||
|
||||
|
|
@ -312,24 +307,18 @@ class Parser:
|
|||
"""
|
||||
# Append comma (ARG_SEP) to the list of special characters only when
|
||||
# parsing function arguments.
|
||||
extra_special_chars = ()
|
||||
special_char_re = self.special_char_re
|
||||
if self.in_argument:
|
||||
extra_special_chars = (ARG_SEP,)
|
||||
special_char_re = re.compile(
|
||||
r"[%s]|\Z"
|
||||
% "".join(
|
||||
re.escape(c)
|
||||
for c in self.special_chars + extra_special_chars
|
||||
)
|
||||
)
|
||||
extra_special_chars = (ARG_SEP,) if self.in_argument else ()
|
||||
special_chars = (*self.special_chars, *extra_special_chars)
|
||||
special_char_re = re.compile(
|
||||
rf"[{''.join(map(re.escape, special_chars))}]|\Z"
|
||||
)
|
||||
|
||||
text_parts = []
|
||||
|
||||
while self.pos < len(self.string):
|
||||
char = self.string[self.pos]
|
||||
|
||||
if char not in self.special_chars + extra_special_chars:
|
||||
if char not in special_chars:
|
||||
# A non-special character. Skip to the next special
|
||||
# character, treating the interstice as literal text.
|
||||
next_pos = (
|
||||
|
|
@ -566,9 +555,9 @@ class Template:
|
|||
|
||||
argnames = []
|
||||
for varname in varnames:
|
||||
argnames.append(VARIABLE_PREFIX + varname)
|
||||
argnames.append(f"{VARIABLE_PREFIX}{varname}")
|
||||
for funcname in funcnames:
|
||||
argnames.append(FUNCTION_PREFIX + funcname)
|
||||
argnames.append(f"{FUNCTION_PREFIX}{funcname}")
|
||||
|
||||
func = compile_func(
|
||||
argnames,
|
||||
|
|
@ -578,9 +567,9 @@ class Template:
|
|||
def wrapper_func(values={}, functions={}):
|
||||
args = {}
|
||||
for varname in varnames:
|
||||
args[VARIABLE_PREFIX + varname] = values[varname]
|
||||
args[f"{VARIABLE_PREFIX}{varname}"] = values[varname]
|
||||
for funcname in funcnames:
|
||||
args[FUNCTION_PREFIX + funcname] = functions[funcname]
|
||||
args[f"{FUNCTION_PREFIX}{funcname}"] = functions[funcname]
|
||||
parts = func(**args)
|
||||
return "".join(parts)
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,8 @@ def extract_release_id(source: str, id_: str) -> str | None:
|
|||
source_pattern = PATTERN_BY_SOURCE[source.lower()]
|
||||
except KeyError:
|
||||
log.debug(
|
||||
f"Unknown source '{source}' for ID extraction. Returning id/url as-is."
|
||||
"Unknown source '{}' for ID extraction. Returning id/url as-is.",
|
||||
source,
|
||||
)
|
||||
return id_
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ def human_seconds_short(interval):
|
|||
string.
|
||||
"""
|
||||
interval = int(interval)
|
||||
return "%i:%02i" % (interval // 60, interval % 60)
|
||||
return f"{interval // 60}:{interval % 60:02d}"
|
||||
|
||||
|
||||
def human_bytes(size):
|
||||
|
|
|
|||
|
|
@ -42,9 +42,7 @@ def call(args):
|
|||
try:
|
||||
return util.command_output(args).stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ABSubmitError(
|
||||
"{} exited with status {}".format(args[0], e.returncode)
|
||||
)
|
||||
raise ABSubmitError(f"{args[0]} exited with status {e.returncode}")
|
||||
|
||||
|
||||
class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
||||
|
|
@ -63,9 +61,7 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
# Explicit path to extractor
|
||||
if not os.path.isfile(self.extractor):
|
||||
raise ui.UserError(
|
||||
"Extractor command does not exist: {0}.".format(
|
||||
self.extractor
|
||||
)
|
||||
f"Extractor command does not exist: {self.extractor}."
|
||||
)
|
||||
else:
|
||||
# Implicit path to extractor, search for it in path
|
||||
|
|
@ -101,8 +97,8 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
"with an HTTP scheme"
|
||||
)
|
||||
elif base_url[-1] != "/":
|
||||
base_url = base_url + "/"
|
||||
self.url = base_url + "{mbid}/low-level"
|
||||
base_url = f"{base_url}/"
|
||||
self.url = f"{base_url}{{mbid}}/low-level"
|
||||
|
||||
def commands(self):
|
||||
cmd = ui.Subcommand(
|
||||
|
|
@ -122,8 +118,10 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
dest="pretend_fetch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="pretend to perform action, but show \
|
||||
only files which would be processed",
|
||||
help=(
|
||||
"pretend to perform action, but show only files which would be"
|
||||
" processed"
|
||||
),
|
||||
)
|
||||
cmd.func = self.command
|
||||
return [cmd]
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
"with an HTTP scheme"
|
||||
)
|
||||
elif self.base_url[-1] != "/":
|
||||
self.base_url = self.base_url + "/"
|
||||
self.base_url = f"{self.base_url}/"
|
||||
|
||||
if self.config["auto"]:
|
||||
self.register_listener("import_task_files", self.import_task_files)
|
||||
|
|
@ -153,7 +153,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
try:
|
||||
data.update(res.json())
|
||||
except ValueError:
|
||||
self._log.debug("Invalid Response: {}", res.text)
|
||||
self._log.debug("Invalid Response: {.text}", res)
|
||||
return {}
|
||||
|
||||
return data
|
||||
|
|
@ -300,4 +300,4 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
def _generate_urls(base_url, mbid):
|
||||
"""Generates AcousticBrainz end point urls for given `mbid`."""
|
||||
for level in LEVELS:
|
||||
yield base_url + mbid + level
|
||||
yield f"{base_url}{mbid}{level}"
|
||||
|
|
|
|||
|
|
@ -236,14 +236,14 @@ class AURADocument:
|
|||
# Not the last page so work out links.next url
|
||||
if not self.args:
|
||||
# No existing arguments, so current page is 0
|
||||
next_url = request.url + "?page=1"
|
||||
next_url = f"{request.url}?page=1"
|
||||
elif not self.args.get("page", None):
|
||||
# No existing page argument, so add one to the end
|
||||
next_url = request.url + "&page=1"
|
||||
next_url = f"{request.url}&page=1"
|
||||
else:
|
||||
# Increment page token by 1
|
||||
next_url = request.url.replace(
|
||||
f"page={page}", "page={}".format(page + 1)
|
||||
f"page={page}", f"page={page + 1}"
|
||||
)
|
||||
# Get only the items in the page range
|
||||
data = [
|
||||
|
|
@ -427,9 +427,7 @@ class TrackDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No track with the requested id.",
|
||||
"There is no track with an id of {} in the library.".format(
|
||||
track_id
|
||||
),
|
||||
f"There is no track with an id of {track_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(
|
||||
self.get_resource_object(self.lib, track)
|
||||
|
|
@ -513,9 +511,7 @@ class AlbumDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No album with the requested id.",
|
||||
"There is no album with an id of {} in the library.".format(
|
||||
album_id
|
||||
),
|
||||
f"There is no album with an id of {album_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(
|
||||
self.get_resource_object(self.lib, album)
|
||||
|
|
@ -600,9 +596,7 @@ class ArtistDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No artist with the requested id.",
|
||||
"There is no artist with an id of {} in the library.".format(
|
||||
artist_id
|
||||
),
|
||||
f"There is no artist with an id of {artist_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(artist_resource)
|
||||
|
||||
|
|
@ -703,7 +697,7 @@ class ImageDocument(AURADocument):
|
|||
relationships = {}
|
||||
# Split id into [parent_type, parent_id, filename]
|
||||
id_split = image_id.split("-")
|
||||
relationships[id_split[0] + "s"] = {
|
||||
relationships[f"{id_split[0]}s"] = {
|
||||
"data": [{"type": id_split[0], "id": id_split[1]}]
|
||||
}
|
||||
|
||||
|
|
@ -727,9 +721,7 @@ class ImageDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No image with the requested id.",
|
||||
"There is no image with an id of {} in the library.".format(
|
||||
image_id
|
||||
),
|
||||
f"There is no image with an id of {image_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(image_resource)
|
||||
|
||||
|
|
@ -775,9 +767,7 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No track with the requested id.",
|
||||
"There is no track with an id of {} in the library.".format(
|
||||
track_id
|
||||
),
|
||||
f"There is no track with an id of {track_id} in the library.",
|
||||
)
|
||||
|
||||
path = os.fsdecode(track.path)
|
||||
|
|
@ -785,9 +775,8 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No audio file for the requested track.",
|
||||
(
|
||||
"There is no audio file for track {} at the expected location"
|
||||
).format(track_id),
|
||||
f"There is no audio file for track {track_id} at the expected"
|
||||
" location",
|
||||
)
|
||||
|
||||
file_mimetype = guess_type(path)[0]
|
||||
|
|
@ -795,10 +784,8 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"500 Internal Server Error",
|
||||
"Requested audio file has an unknown mimetype.",
|
||||
(
|
||||
"The audio file for track {} has an unknown mimetype. "
|
||||
"Its file extension is {}."
|
||||
).format(track_id, path.split(".")[-1]),
|
||||
f"The audio file for track {track_id} has an unknown mimetype. "
|
||||
f"Its file extension is {path.split('.')[-1]}.",
|
||||
)
|
||||
|
||||
# Check that the Accept header contains the file's mimetype
|
||||
|
|
@ -810,10 +797,8 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"406 Not Acceptable",
|
||||
"Unsupported MIME type or bitrate parameter in Accept header.",
|
||||
(
|
||||
"The audio file for track {} is only available as {} and "
|
||||
"bitrate parameters are not supported."
|
||||
).format(track_id, file_mimetype),
|
||||
f"The audio file for track {track_id} is only available as"
|
||||
f" {file_mimetype} and bitrate parameters are not supported.",
|
||||
)
|
||||
|
||||
return send_file(
|
||||
|
|
@ -896,9 +881,7 @@ def image_file(image_id):
|
|||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No image with the requested id.",
|
||||
"There is no image with an id of {} in the library".format(
|
||||
image_id
|
||||
),
|
||||
f"There is no image with an id of {image_id} in the library",
|
||||
)
|
||||
return send_file(img_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -110,9 +110,7 @@ class BadFiles(BeetsPlugin):
|
|||
self._log.debug("checking path: {}", dpath)
|
||||
if not os.path.exists(item.path):
|
||||
ui.print_(
|
||||
"{}: file does not exist".format(
|
||||
ui.colorize("text_error", dpath)
|
||||
)
|
||||
f"{ui.colorize('text_error', dpath)}: file does not exist"
|
||||
)
|
||||
|
||||
# Run the checker against the file if one is found
|
||||
|
|
@ -129,37 +127,32 @@ class BadFiles(BeetsPlugin):
|
|||
except CheckerCommandError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
self._log.error(
|
||||
"command not found: {} when validating file: {}",
|
||||
e.checker,
|
||||
e.path,
|
||||
"command not found: {0.checker} when validating file: {0.path}",
|
||||
e,
|
||||
)
|
||||
else:
|
||||
self._log.error("error invoking {}: {}", e.checker, e.msg)
|
||||
self._log.error("error invoking {0.checker}: {0.msg}", e)
|
||||
return []
|
||||
|
||||
error_lines = []
|
||||
|
||||
if status > 0:
|
||||
error_lines.append(
|
||||
"{}: checker exited with status {}".format(
|
||||
ui.colorize("text_error", dpath), status
|
||||
)
|
||||
f"{ui.colorize('text_error', dpath)}: checker exited with"
|
||||
f" status {status}"
|
||||
)
|
||||
for line in output:
|
||||
error_lines.append(f" {line}")
|
||||
|
||||
elif errors > 0:
|
||||
error_lines.append(
|
||||
"{}: checker found {} errors or warnings".format(
|
||||
ui.colorize("text_warning", dpath), errors
|
||||
)
|
||||
f"{ui.colorize('text_warning', dpath)}: checker found"
|
||||
f" {status} errors or warnings"
|
||||
)
|
||||
for line in output:
|
||||
error_lines.append(f" {line}")
|
||||
elif self.verbose:
|
||||
error_lines.append(
|
||||
"{}: ok".format(ui.colorize("text_success", dpath))
|
||||
)
|
||||
error_lines.append(f"{ui.colorize('text_success', dpath)}: ok")
|
||||
|
||||
return error_lines
|
||||
|
||||
|
|
@ -180,9 +173,8 @@ class BadFiles(BeetsPlugin):
|
|||
def on_import_task_before_choice(self, task, session):
|
||||
if hasattr(task, "_badfiles_checks_failed"):
|
||||
ui.print_(
|
||||
"{} one or more files failed checks:".format(
|
||||
ui.colorize("text_warning", "BAD")
|
||||
)
|
||||
f"{ui.colorize('text_warning', 'BAD')} one or more files failed"
|
||||
" checks:"
|
||||
)
|
||||
for error in task._badfiles_checks_failed:
|
||||
for error_line in error:
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ class BeatportClient:
|
|||
:returns: OAuth resource owner key and secret as unicode
|
||||
"""
|
||||
self.api.parse_authorization_response(
|
||||
"https://beets.io/auth?" + auth_data
|
||||
f"https://beets.io/auth?{auth_data}"
|
||||
)
|
||||
access_data = self.api.fetch_access_token(
|
||||
self._make_url("/identity/1/oauth/access-token")
|
||||
|
|
@ -200,8 +200,8 @@ class BeatportClient:
|
|||
def _make_url(self, endpoint: str) -> str:
|
||||
"""Get complete URL for a given API endpoint."""
|
||||
if not endpoint.startswith("/"):
|
||||
endpoint = "/" + endpoint
|
||||
return self._api_base + endpoint
|
||||
endpoint = f"/{endpoint}"
|
||||
return f"{self._api_base}{endpoint}"
|
||||
|
||||
def _get(self, endpoint: str, **kwargs) -> list[JSONDict]:
|
||||
"""Perform a GET request on a given API endpoint.
|
||||
|
|
@ -212,14 +212,10 @@ class BeatportClient:
|
|||
try:
|
||||
response = self.api.get(self._make_url(endpoint), params=kwargs)
|
||||
except Exception as e:
|
||||
raise BeatportAPIError(
|
||||
"Error connecting to Beatport API: {}".format(e)
|
||||
)
|
||||
raise BeatportAPIError(f"Error connecting to Beatport API: {e}")
|
||||
if not response:
|
||||
raise BeatportAPIError(
|
||||
"Error {0.status_code} for '{0.request.path_url}".format(
|
||||
response
|
||||
)
|
||||
f"Error {response.status_code} for '{response.request.path_url}"
|
||||
)
|
||||
return response.json()["results"]
|
||||
|
||||
|
|
@ -275,15 +271,14 @@ class BeatportRelease(BeatportObject):
|
|||
self.genre = data.get("genre")
|
||||
|
||||
if "slug" in data:
|
||||
self.url = "https://beatport.com/release/{}/{}".format(
|
||||
data["slug"], data["id"]
|
||||
self.url = (
|
||||
f"https://beatport.com/release/{data['slug']}/{data['id']}"
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<BeatportRelease: {} - {} ({})>".format(
|
||||
self.artists_str(),
|
||||
self.name,
|
||||
self.catalog_number,
|
||||
return (
|
||||
"<BeatportRelease: "
|
||||
f"{self.artists_str()} - {self.name} ({self.catalog_number})>"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -311,9 +306,7 @@ class BeatportTrack(BeatportObject):
|
|||
except ValueError:
|
||||
pass
|
||||
if "slug" in data:
|
||||
self.url = "https://beatport.com/track/{}/{}".format(
|
||||
data["slug"], data["id"]
|
||||
)
|
||||
self.url = f"https://beatport.com/track/{data['slug']}/{data['id']}"
|
||||
self.track_number = data.get("trackNumber")
|
||||
self.bpm = data.get("bpm")
|
||||
self.initial_key = str((data.get("key") or {}).get("shortName"))
|
||||
|
|
@ -373,7 +366,7 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
url = auth_client.get_authorize_url()
|
||||
except AUTH_ERRORS as e:
|
||||
self._log.debug("authentication error: {0}", e)
|
||||
self._log.debug("authentication error: {}", e)
|
||||
raise beets.ui.UserError("communication with Beatport failed")
|
||||
|
||||
beets.ui.print_("To authenticate with Beatport, visit:")
|
||||
|
|
@ -384,11 +377,11 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
token, secret = auth_client.get_access_token(data)
|
||||
except AUTH_ERRORS as e:
|
||||
self._log.debug("authentication error: {0}", e)
|
||||
self._log.debug("authentication error: {}", e)
|
||||
raise beets.ui.UserError("Beatport token request failed")
|
||||
|
||||
# Save the token for later use.
|
||||
self._log.debug("Beatport token {0}, secret {1}", token, secret)
|
||||
self._log.debug("Beatport token {}, secret {}", token, secret)
|
||||
with open(self._tokenfile(), "w") as f:
|
||||
json.dump({"token": token, "secret": secret}, f)
|
||||
|
||||
|
|
@ -412,7 +405,7 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
yield from self._get_releases(query)
|
||||
except BeatportAPIError as e:
|
||||
self._log.debug("API Error: {0} (query: {1})", e, query)
|
||||
self._log.debug("API Error: {} (query: {})", e, query)
|
||||
return
|
||||
|
||||
def item_candidates(
|
||||
|
|
@ -422,14 +415,14 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
return self._get_tracks(query)
|
||||
except BeatportAPIError as e:
|
||||
self._log.debug("API Error: {0} (query: {1})", e, query)
|
||||
self._log.debug("API Error: {} (query: {})", e, query)
|
||||
return []
|
||||
|
||||
def album_for_id(self, album_id: str):
|
||||
"""Fetches a release by its Beatport ID and returns an AlbumInfo object
|
||||
or None if the query is not a valid ID or release is not found.
|
||||
"""
|
||||
self._log.debug("Searching for release {0}", album_id)
|
||||
self._log.debug("Searching for release {}", album_id)
|
||||
|
||||
if not (release_id := self._extract_id(album_id)):
|
||||
self._log.debug("Not a valid Beatport release ID.")
|
||||
|
|
@ -444,7 +437,7 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
"""Fetches a track by its Beatport ID and returns a TrackInfo object
|
||||
or None if the track is not a valid Beatport ID or track is not found.
|
||||
"""
|
||||
self._log.debug("Searching for track {0}", track_id)
|
||||
self._log.debug("Searching for track {}", track_id)
|
||||
# TODO: move to extractor
|
||||
match = re.search(r"(^|beatport\.com/track/.+/)(\d+)$", track_id)
|
||||
if not match:
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ except ImportError as e:
|
|||
PROTOCOL_VERSION = "0.16.0"
|
||||
BUFSIZE = 1024
|
||||
|
||||
HELLO = "OK MPD %s" % PROTOCOL_VERSION
|
||||
HELLO = f"OK MPD {PROTOCOL_VERSION}"
|
||||
CLIST_BEGIN = "command_list_begin"
|
||||
CLIST_VERBOSE_BEGIN = "command_list_ok_begin"
|
||||
CLIST_END = "command_list_end"
|
||||
|
|
@ -282,7 +282,7 @@ class BaseServer:
|
|||
if not self.ctrl_sock:
|
||||
self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port))
|
||||
self.ctrl_sock.sendall((message + "\n").encode("utf-8"))
|
||||
self.ctrl_sock.sendall((f"{message}\n").encode("utf-8"))
|
||||
|
||||
def _send_event(self, event):
|
||||
"""Notify subscribed connections of an event."""
|
||||
|
|
@ -376,13 +376,13 @@ class BaseServer:
|
|||
if self.password and not conn.authenticated:
|
||||
# Not authenticated. Show limited list of commands.
|
||||
for cmd in SAFE_COMMANDS:
|
||||
yield "command: " + cmd
|
||||
yield f"command: {cmd}"
|
||||
|
||||
else:
|
||||
# Authenticated. Show all commands.
|
||||
for func in dir(self):
|
||||
if func.startswith("cmd_"):
|
||||
yield "command: " + func[4:]
|
||||
yield f"command: {func[4:]}"
|
||||
|
||||
def cmd_notcommands(self, conn):
|
||||
"""Lists all unavailable commands."""
|
||||
|
|
@ -392,7 +392,7 @@ class BaseServer:
|
|||
if func.startswith("cmd_"):
|
||||
cmd = func[4:]
|
||||
if cmd not in SAFE_COMMANDS:
|
||||
yield "command: " + cmd
|
||||
yield f"command: {cmd}"
|
||||
|
||||
else:
|
||||
# Authenticated. No commands are unavailable.
|
||||
|
|
@ -406,22 +406,22 @@ class BaseServer:
|
|||
playlist, playlistlength, and xfade.
|
||||
"""
|
||||
yield (
|
||||
"repeat: " + str(int(self.repeat)),
|
||||
"random: " + str(int(self.random)),
|
||||
"consume: " + str(int(self.consume)),
|
||||
"single: " + str(int(self.single)),
|
||||
"playlist: " + str(self.playlist_version),
|
||||
"playlistlength: " + str(len(self.playlist)),
|
||||
"mixrampdb: " + str(self.mixrampdb),
|
||||
f"repeat: {int(self.repeat)}",
|
||||
f"random: {int(self.random)}",
|
||||
f"consume: {int(self.consume)}",
|
||||
f"single: {int(self.single)}",
|
||||
f"playlist: {self.playlist_version}",
|
||||
f"playlistlength: {len(self.playlist)}",
|
||||
f"mixrampdb: {self.mixrampdb}",
|
||||
)
|
||||
|
||||
if self.volume > 0:
|
||||
yield "volume: " + str(self.volume)
|
||||
yield f"volume: {self.volume}"
|
||||
|
||||
if not math.isnan(self.mixrampdelay):
|
||||
yield "mixrampdelay: " + str(self.mixrampdelay)
|
||||
yield f"mixrampdelay: {self.mixrampdelay}"
|
||||
if self.crossfade > 0:
|
||||
yield "xfade: " + str(self.crossfade)
|
||||
yield f"xfade: {self.crossfade}"
|
||||
|
||||
if self.current_index == -1:
|
||||
state = "stop"
|
||||
|
|
@ -429,20 +429,20 @@ class BaseServer:
|
|||
state = "pause"
|
||||
else:
|
||||
state = "play"
|
||||
yield "state: " + state
|
||||
yield f"state: {state}"
|
||||
|
||||
if self.current_index != -1: # i.e., paused or playing
|
||||
current_id = self._item_id(self.playlist[self.current_index])
|
||||
yield "song: " + str(self.current_index)
|
||||
yield "songid: " + str(current_id)
|
||||
yield f"song: {self.current_index}"
|
||||
yield f"songid: {current_id}"
|
||||
if len(self.playlist) > self.current_index + 1:
|
||||
# If there's a next song, report its index too.
|
||||
next_id = self._item_id(self.playlist[self.current_index + 1])
|
||||
yield "nextsong: " + str(self.current_index + 1)
|
||||
yield "nextsongid: " + str(next_id)
|
||||
yield f"nextsong: {self.current_index + 1}"
|
||||
yield f"nextsongid: {next_id}"
|
||||
|
||||
if self.error:
|
||||
yield "error: " + self.error
|
||||
yield f"error: {self.error}"
|
||||
|
||||
def cmd_clearerror(self, conn):
|
||||
"""Removes the persistent error state of the server. This
|
||||
|
|
@ -522,7 +522,7 @@ class BaseServer:
|
|||
|
||||
def cmd_replay_gain_status(self, conn):
|
||||
"""Get the replaygain mode."""
|
||||
yield "replay_gain_mode: " + str(self.replay_gain_mode)
|
||||
yield f"replay_gain_mode: {self.replay_gain_mode}"
|
||||
|
||||
def cmd_clear(self, conn):
|
||||
"""Clear the playlist."""
|
||||
|
|
@ -643,8 +643,8 @@ class BaseServer:
|
|||
Also a dummy implementation.
|
||||
"""
|
||||
for idx, track in enumerate(self.playlist):
|
||||
yield "cpos: " + str(idx)
|
||||
yield "Id: " + str(track.id)
|
||||
yield f"cpos: {idx}"
|
||||
yield f"Id: {track.id}"
|
||||
|
||||
def cmd_currentsong(self, conn):
|
||||
"""Sends information about the currently-playing song."""
|
||||
|
|
@ -759,11 +759,11 @@ class Connection:
|
|||
"""Create a new connection for the accepted socket `client`."""
|
||||
self.server = server
|
||||
self.sock = sock
|
||||
self.address = "{}:{}".format(*sock.sock.getpeername())
|
||||
self.address = ":".join(map(str, sock.sock.getpeername()))
|
||||
|
||||
def debug(self, message, kind=" "):
|
||||
"""Log a debug message about this connection."""
|
||||
self.server._log.debug("{}[{}]: {}", kind, self.address, message)
|
||||
self.server._log.debug("{}[{.address}]: {}", kind, self, message)
|
||||
|
||||
def run(self):
|
||||
pass
|
||||
|
|
@ -899,9 +899,7 @@ class MPDConnection(Connection):
|
|||
return
|
||||
except BPDIdleError as e:
|
||||
self.idle_subscriptions = e.subsystems
|
||||
self.debug(
|
||||
"awaiting: {}".format(" ".join(e.subsystems)), kind="z"
|
||||
)
|
||||
self.debug(f"awaiting: {' '.join(e.subsystems)}", kind="z")
|
||||
yield bluelet.call(self.server.dispatch_events())
|
||||
|
||||
|
||||
|
|
@ -913,7 +911,7 @@ class ControlConnection(Connection):
|
|||
super().__init__(server, sock)
|
||||
|
||||
def debug(self, message, kind=" "):
|
||||
self.server._log.debug("CTRL {}[{}]: {}", kind, self.address, message)
|
||||
self.server._log.debug("CTRL {}[{.address}]: {}", kind, self, message)
|
||||
|
||||
def run(self):
|
||||
"""Listen for control commands and delegate to `ctrl_*` methods."""
|
||||
|
|
@ -933,7 +931,7 @@ class ControlConnection(Connection):
|
|||
func = command.delegate("ctrl_", self)
|
||||
yield bluelet.call(func(*command.args))
|
||||
except (AttributeError, TypeError) as e:
|
||||
yield self.send("ERROR: {}".format(e.args[0]))
|
||||
yield self.send(f"ERROR: {e.args[0]}")
|
||||
except Exception:
|
||||
yield self.send(
|
||||
["ERROR: server error", traceback.format_exc().rstrip()]
|
||||
|
|
@ -992,7 +990,7 @@ class Command:
|
|||
of arguments.
|
||||
"""
|
||||
# Attempt to get correct command function.
|
||||
func_name = prefix + self.name
|
||||
func_name = f"{prefix}{self.name}"
|
||||
if not hasattr(target, func_name):
|
||||
raise AttributeError(f'unknown command "{self.name}"')
|
||||
func = getattr(target, func_name)
|
||||
|
|
@ -1011,7 +1009,7 @@ class Command:
|
|||
# If the command accepts a variable number of arguments skip the check.
|
||||
if wrong_num and not argspec.varargs:
|
||||
raise TypeError(
|
||||
'wrong number of arguments for "{}"'.format(self.name),
|
||||
f'wrong number of arguments for "{self.name}"',
|
||||
self.name,
|
||||
)
|
||||
|
||||
|
|
@ -1110,10 +1108,8 @@ class Server(BaseServer):
|
|||
self.lib = library
|
||||
self.player = gstplayer.GstPlayer(self.play_finished)
|
||||
self.cmd_update(None)
|
||||
log.info("Server ready and listening on {}:{}".format(host, port))
|
||||
log.debug(
|
||||
"Listening for control signals on {}:{}".format(host, ctrl_port)
|
||||
)
|
||||
log.info("Server ready and listening on {}:{}", host, port)
|
||||
log.debug("Listening for control signals on {}:{}", host, ctrl_port)
|
||||
|
||||
def run(self):
|
||||
self.player.run()
|
||||
|
|
@ -1128,23 +1124,21 @@ class Server(BaseServer):
|
|||
|
||||
def _item_info(self, item):
|
||||
info_lines = [
|
||||
"file: " + as_string(item.destination(relative_to_libdir=True)),
|
||||
"Time: " + str(int(item.length)),
|
||||
"duration: " + f"{item.length:.3f}",
|
||||
"Id: " + str(item.id),
|
||||
f"file: {as_string(item.destination(relative_to_libdir=True))}",
|
||||
f"Time: {int(item.length)}",
|
||||
"duration: {item.length:.3f}",
|
||||
f"Id: {item.id}",
|
||||
]
|
||||
|
||||
try:
|
||||
pos = self._id_to_index(item.id)
|
||||
info_lines.append("Pos: " + str(pos))
|
||||
info_lines.append(f"Pos: {pos}")
|
||||
except ArgumentNotFoundError:
|
||||
# Don't include position if not in playlist.
|
||||
pass
|
||||
|
||||
for tagtype, field in self.tagtype_map.items():
|
||||
info_lines.append(
|
||||
"{}: {}".format(tagtype, str(getattr(item, field)))
|
||||
)
|
||||
info_lines.append(f"{tagtype}: {getattr(item, field)}")
|
||||
|
||||
return info_lines
|
||||
|
||||
|
|
@ -1207,7 +1201,7 @@ class Server(BaseServer):
|
|||
|
||||
def _path_join(self, p1, p2):
|
||||
"""Smashes together two BPD paths."""
|
||||
out = p1 + "/" + p2
|
||||
out = f"{p1}/{p2}"
|
||||
return out.replace("//", "/").replace("//", "/")
|
||||
|
||||
def cmd_lsinfo(self, conn, path="/"):
|
||||
|
|
@ -1225,7 +1219,7 @@ class Server(BaseServer):
|
|||
if dirpath.startswith("/"):
|
||||
# Strip leading slash (libmpc rejects this).
|
||||
dirpath = dirpath[1:]
|
||||
yield "directory: %s" % dirpath
|
||||
yield f"directory: {dirpath}"
|
||||
|
||||
def _listall(self, basepath, node, info=False):
|
||||
"""Helper function for recursive listing. If info, show
|
||||
|
|
@ -1237,7 +1231,7 @@ class Server(BaseServer):
|
|||
item = self.lib.get_item(node)
|
||||
yield self._item_info(item)
|
||||
else:
|
||||
yield "file: " + basepath
|
||||
yield f"file: {basepath}"
|
||||
else:
|
||||
# List a directory. Recurse into both directories and files.
|
||||
for name, itemid in sorted(node.files.items()):
|
||||
|
|
@ -1246,7 +1240,7 @@ class Server(BaseServer):
|
|||
yield from self._listall(newpath, itemid, info)
|
||||
for name, subdir in sorted(node.dirs.items()):
|
||||
newpath = self._path_join(basepath, name)
|
||||
yield "directory: " + newpath
|
||||
yield f"directory: {newpath}"
|
||||
yield from self._listall(newpath, subdir, info)
|
||||
|
||||
def cmd_listall(self, conn, path="/"):
|
||||
|
|
@ -1280,7 +1274,7 @@ class Server(BaseServer):
|
|||
for item in self._all_items(self._resolve_path(path)):
|
||||
self.playlist.append(item)
|
||||
if send_id:
|
||||
yield "Id: " + str(item.id)
|
||||
yield f"Id: {item.id}"
|
||||
self.playlist_version += 1
|
||||
self._send_event("playlist")
|
||||
|
||||
|
|
@ -1302,20 +1296,13 @@ class Server(BaseServer):
|
|||
item = self.playlist[self.current_index]
|
||||
|
||||
yield (
|
||||
"bitrate: " + str(item.bitrate / 1000),
|
||||
"audio: {}:{}:{}".format(
|
||||
str(item.samplerate),
|
||||
str(item.bitdepth),
|
||||
str(item.channels),
|
||||
),
|
||||
f"bitrate: {item.bitrate / 1000}",
|
||||
f"audio: {item.samplerate}:{item.bitdepth}:{item.channels}",
|
||||
)
|
||||
|
||||
(pos, total) = self.player.time()
|
||||
yield (
|
||||
"time: {}:{}".format(
|
||||
str(int(pos)),
|
||||
str(int(total)),
|
||||
),
|
||||
f"time: {int(pos)}:{int(total)}",
|
||||
"elapsed: " + f"{pos:.3f}",
|
||||
"duration: " + f"{total:.3f}",
|
||||
)
|
||||
|
|
@ -1335,13 +1322,13 @@ class Server(BaseServer):
|
|||
artists, albums, songs, totaltime = tx.query(statement)[0]
|
||||
|
||||
yield (
|
||||
"artists: " + str(artists),
|
||||
"albums: " + str(albums),
|
||||
"songs: " + str(songs),
|
||||
"uptime: " + str(int(time.time() - self.startup_time)),
|
||||
"playtime: " + "0", # Missing.
|
||||
"db_playtime: " + str(int(totaltime)),
|
||||
"db_update: " + str(int(self.updated_time)),
|
||||
f"artists: {artists}",
|
||||
f"albums: {albums}",
|
||||
f"songs: {songs}",
|
||||
f"uptime: {int(time.time() - self.startup_time)}",
|
||||
"playtime: 0", # Missing.
|
||||
f"db_playtime: {int(totaltime)}",
|
||||
f"db_update: {int(self.updated_time)}",
|
||||
)
|
||||
|
||||
def cmd_decoders(self, conn):
|
||||
|
|
@ -1383,7 +1370,7 @@ class Server(BaseServer):
|
|||
searching.
|
||||
"""
|
||||
for tag in self.tagtype_map:
|
||||
yield "tagtype: " + tag
|
||||
yield f"tagtype: {tag}"
|
||||
|
||||
def _tagtype_lookup(self, tag):
|
||||
"""Uses `tagtype_map` to look up the beets column name for an
|
||||
|
|
@ -1458,12 +1445,9 @@ class Server(BaseServer):
|
|||
|
||||
clause, subvals = query.clause()
|
||||
statement = (
|
||||
"SELECT DISTINCT "
|
||||
+ show_key
|
||||
+ " FROM items WHERE "
|
||||
+ clause
|
||||
+ " ORDER BY "
|
||||
+ show_key
|
||||
f"SELECT DISTINCT {show_key}"
|
||||
f" FROM items WHERE {clause}"
|
||||
f" ORDER BY {show_key}"
|
||||
)
|
||||
self._log.debug(statement)
|
||||
with self.lib.transaction() as tx:
|
||||
|
|
@ -1473,7 +1457,7 @@ class Server(BaseServer):
|
|||
if not row[0]:
|
||||
# Skip any empty values of the field.
|
||||
continue
|
||||
yield show_tag_canon + ": " + str(row[0])
|
||||
yield f"{show_tag_canon}: {row[0]}"
|
||||
|
||||
def cmd_count(self, conn, tag, value):
|
||||
"""Returns the number and total time of songs matching the
|
||||
|
|
@ -1487,8 +1471,8 @@ class Server(BaseServer):
|
|||
):
|
||||
songs += 1
|
||||
playtime += item.length
|
||||
yield "songs: " + str(songs)
|
||||
yield "playtime: " + str(int(playtime))
|
||||
yield f"songs: {songs}"
|
||||
yield f"playtime: {int(playtime)}"
|
||||
|
||||
# Persistent playlist manipulation. In MPD this is an optional feature so
|
||||
# these dummy implementations match MPD's behaviour with the feature off.
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ class GstPlayer:
|
|||
self.player.set_state(Gst.State.NULL)
|
||||
if isinstance(path, str):
|
||||
path = path.encode("utf-8")
|
||||
uri = "file://" + urllib.parse.quote(path)
|
||||
uri = f"file://{urllib.parse.quote(path)}"
|
||||
self.player.set_property("uri", uri)
|
||||
self.player.set_state(Gst.State.PLAYING)
|
||||
self.playing = True
|
||||
|
|
|
|||
|
|
@ -73,12 +73,12 @@ class BPMPlugin(BeetsPlugin):
|
|||
|
||||
item = items[0]
|
||||
if item["bpm"]:
|
||||
self._log.info("Found bpm {0}", item["bpm"])
|
||||
self._log.info("Found bpm {}", item["bpm"])
|
||||
if not overwrite:
|
||||
return
|
||||
|
||||
self._log.info(
|
||||
"Press Enter {0} times to the rhythm or Ctrl-D to exit",
|
||||
"Press Enter {} times to the rhythm or Ctrl-D to exit",
|
||||
self.config["max_strokes"].get(int),
|
||||
)
|
||||
new_bpm = bpm(self.config["max_strokes"].get(int))
|
||||
|
|
@ -86,4 +86,4 @@ class BPMPlugin(BeetsPlugin):
|
|||
if write:
|
||||
item.try_write()
|
||||
item.store()
|
||||
self._log.info("Added new bpm {0}", item["bpm"])
|
||||
self._log.info("Added new bpm {}", item["bpm"])
|
||||
|
|
|
|||
|
|
@ -82,8 +82,8 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
|
||||
if not self.is_beatport_track(item):
|
||||
self._log.info(
|
||||
"Skipping non-{} singleton: {}",
|
||||
self.beatport_plugin.data_source,
|
||||
"Skipping non-{.beatport_plugin.data_source} singleton: {}",
|
||||
self,
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
|
@ -107,8 +107,8 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
return False
|
||||
if not album.mb_albumid.isnumeric():
|
||||
self._log.info(
|
||||
"Skipping album with invalid {} ID: {}",
|
||||
self.beatport_plugin.data_source,
|
||||
"Skipping album with invalid {.beatport_plugin.data_source} ID: {}",
|
||||
self,
|
||||
album,
|
||||
)
|
||||
return False
|
||||
|
|
@ -117,8 +117,8 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
return items
|
||||
if not all(self.is_beatport_track(item) for item in items):
|
||||
self._log.info(
|
||||
"Skipping non-{} release: {}",
|
||||
self.beatport_plugin.data_source,
|
||||
"Skipping non-{.beatport_plugin.data_source} release: {}",
|
||||
self,
|
||||
album,
|
||||
)
|
||||
return False
|
||||
|
|
@ -139,9 +139,7 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid)
|
||||
if not albuminfo:
|
||||
self._log.info(
|
||||
"Release ID {} not found for album {}",
|
||||
album.mb_albumid,
|
||||
album,
|
||||
"Release ID {0.mb_albumid} not found for album {0}", album
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ def span_from_str(span_str):
|
|||
def normalize_year(d, yearfrom):
|
||||
"""Convert string to a 4 digits year"""
|
||||
if yearfrom < 100:
|
||||
raise BucketError("%d must be expressed on 4 digits" % yearfrom)
|
||||
raise BucketError(f"{yearfrom} must be expressed on 4 digits")
|
||||
|
||||
# if two digits only, pick closest year that ends by these two
|
||||
# digits starting from yearfrom
|
||||
|
|
@ -55,14 +55,13 @@ def span_from_str(span_str):
|
|||
years = [int(x) for x in re.findall(r"\d+", span_str)]
|
||||
if not years:
|
||||
raise ui.UserError(
|
||||
"invalid range defined for year bucket '%s': no "
|
||||
"year found" % span_str
|
||||
f"invalid range defined for year bucket {span_str!r}: no year found"
|
||||
)
|
||||
try:
|
||||
years = [normalize_year(x, years[0]) for x in years]
|
||||
except BucketError as exc:
|
||||
raise ui.UserError(
|
||||
"invalid range defined for year bucket '%s': %s" % (span_str, exc)
|
||||
f"invalid range defined for year bucket {span_str!r}: {exc}"
|
||||
)
|
||||
|
||||
res = {"from": years[0], "str": span_str}
|
||||
|
|
@ -125,22 +124,19 @@ def str2fmt(s):
|
|||
"fromnchars": len(m.group("fromyear")),
|
||||
"tonchars": len(m.group("toyear")),
|
||||
}
|
||||
res["fmt"] = "{}%s{}{}{}".format(
|
||||
m.group("bef"),
|
||||
m.group("sep"),
|
||||
"%s" if res["tonchars"] else "",
|
||||
m.group("after"),
|
||||
res["fmt"] = (
|
||||
f"{m['bef']}{{}}{m['sep']}{'{}' if res['tonchars'] else ''}{m['after']}"
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
def format_span(fmt, yearfrom, yearto, fromnchars, tonchars):
|
||||
"""Return a span string representation."""
|
||||
args = str(yearfrom)[-fromnchars:]
|
||||
args = [str(yearfrom)[-fromnchars:]]
|
||||
if tonchars:
|
||||
args = (str(yearfrom)[-fromnchars:], str(yearto)[-tonchars:])
|
||||
args.append(str(yearto)[-tonchars:])
|
||||
|
||||
return fmt % args
|
||||
return fmt.format(*args)
|
||||
|
||||
|
||||
def extract_modes(spans):
|
||||
|
|
@ -169,14 +165,12 @@ def build_alpha_spans(alpha_spans_str, alpha_regexs):
|
|||
else:
|
||||
raise ui.UserError(
|
||||
"invalid range defined for alpha bucket "
|
||||
"'%s': no alphanumeric character found" % elem
|
||||
f"'{elem}': no alphanumeric character found"
|
||||
)
|
||||
spans.append(
|
||||
re.compile(
|
||||
"^["
|
||||
+ ASCII_DIGITS[begin_index : end_index + 1]
|
||||
+ ASCII_DIGITS[begin_index : end_index + 1].upper()
|
||||
+ "]"
|
||||
rf"^[{ASCII_DIGITS[begin_index : end_index + 1]}]",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
)
|
||||
return spans
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def acoustid_match(log, path):
|
|||
duration, fp = acoustid.fingerprint_file(util.syspath(path))
|
||||
except acoustid.FingerprintGenerationError as exc:
|
||||
log.error(
|
||||
"fingerprinting of {0} failed: {1}",
|
||||
"fingerprinting of {} failed: {}",
|
||||
util.displayable_path(repr(path)),
|
||||
exc,
|
||||
)
|
||||
|
|
@ -103,12 +103,12 @@ def acoustid_match(log, path):
|
|||
)
|
||||
except acoustid.AcoustidError as exc:
|
||||
log.debug(
|
||||
"fingerprint matching {0} failed: {1}",
|
||||
"fingerprint matching {} failed: {}",
|
||||
util.displayable_path(repr(path)),
|
||||
exc,
|
||||
)
|
||||
return None
|
||||
log.debug("chroma: fingerprinted {0}", util.displayable_path(repr(path)))
|
||||
log.debug("chroma: fingerprinted {}", util.displayable_path(repr(path)))
|
||||
|
||||
# Ensure the response is usable and parse it.
|
||||
if res["status"] != "ok" or not res.get("results"):
|
||||
|
|
@ -146,7 +146,7 @@ def acoustid_match(log, path):
|
|||
release_ids = [rel["id"] for rel in releases]
|
||||
|
||||
log.debug(
|
||||
"matched recordings {0} on releases {1}", recording_ids, release_ids
|
||||
"matched recordings {} on releases {}", recording_ids, release_ids
|
||||
)
|
||||
_matches[path] = recording_ids, release_ids
|
||||
|
||||
|
|
@ -211,7 +211,7 @@ class AcoustidPlugin(MetadataSourcePlugin):
|
|||
if album:
|
||||
albums.append(album)
|
||||
|
||||
self._log.debug("acoustid album candidates: {0}", len(albums))
|
||||
self._log.debug("acoustid album candidates: {}", len(albums))
|
||||
return albums
|
||||
|
||||
def item_candidates(self, item, artist, title) -> Iterable[TrackInfo]:
|
||||
|
|
@ -224,7 +224,7 @@ class AcoustidPlugin(MetadataSourcePlugin):
|
|||
track = self.mb.track_for_id(recording_id)
|
||||
if track:
|
||||
tracks.append(track)
|
||||
self._log.debug("acoustid item candidates: {0}", len(tracks))
|
||||
self._log.debug("acoustid item candidates: {}", len(tracks))
|
||||
return tracks
|
||||
|
||||
def album_for_id(self, *args, **kwargs):
|
||||
|
|
@ -292,11 +292,11 @@ def submit_items(log, userkey, items, chunksize=64):
|
|||
|
||||
def submit_chunk():
|
||||
"""Submit the current accumulated fingerprint data."""
|
||||
log.info("submitting {0} fingerprints", len(data))
|
||||
log.info("submitting {} fingerprints", len(data))
|
||||
try:
|
||||
acoustid.submit(API_KEY, userkey, data, timeout=10)
|
||||
except acoustid.AcoustidError as exc:
|
||||
log.warning("acoustid submission error: {0}", exc)
|
||||
log.warning("acoustid submission error: {}", exc)
|
||||
del data[:]
|
||||
|
||||
for item in items:
|
||||
|
|
@ -343,31 +343,23 @@ def fingerprint_item(log, item, write=False):
|
|||
"""
|
||||
# Get a fingerprint and length for this track.
|
||||
if not item.length:
|
||||
log.info("{0}: no duration available", util.displayable_path(item.path))
|
||||
log.info("{.filepath}: no duration available", item)
|
||||
elif item.acoustid_fingerprint:
|
||||
if write:
|
||||
log.info(
|
||||
"{0}: fingerprint exists, skipping",
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
log.info("{.filepath}: fingerprint exists, skipping", item)
|
||||
else:
|
||||
log.info(
|
||||
"{0}: using existing fingerprint",
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
log.info("{.filepath}: using existing fingerprint", item)
|
||||
return item.acoustid_fingerprint
|
||||
else:
|
||||
log.info("{0}: fingerprinting", util.displayable_path(item.path))
|
||||
log.info("{.filepath}: fingerprinting", item)
|
||||
try:
|
||||
_, fp = acoustid.fingerprint_file(util.syspath(item.path))
|
||||
item.acoustid_fingerprint = fp.decode()
|
||||
if write:
|
||||
log.info(
|
||||
"{0}: writing fingerprint", util.displayable_path(item.path)
|
||||
)
|
||||
log.info("{.filepath}: writing fingerprint", item)
|
||||
item.try_write()
|
||||
if item._db:
|
||||
item.store()
|
||||
return item.acoustid_fingerprint
|
||||
except acoustid.FingerprintGenerationError as exc:
|
||||
log.info("fingerprint generation failed: {0}", exc)
|
||||
log.info("fingerprint generation failed: {}", exc)
|
||||
|
|
|
|||
|
|
@ -64,9 +64,7 @@ def get_format(fmt=None):
|
|||
command = format_info["command"]
|
||||
extension = format_info.get("extension", fmt)
|
||||
except KeyError:
|
||||
raise ui.UserError(
|
||||
'convert: format {} needs the "command" field'.format(fmt)
|
||||
)
|
||||
raise ui.UserError(f'convert: format {fmt} needs the "command" field')
|
||||
except ConfigTypeError:
|
||||
command = config["convert"]["formats"][fmt].get(str)
|
||||
extension = fmt
|
||||
|
|
@ -77,8 +75,8 @@ def get_format(fmt=None):
|
|||
command = config["convert"]["command"].as_str()
|
||||
elif "opts" in keys:
|
||||
# Undocumented option for backwards compatibility with < 1.3.1.
|
||||
command = "ffmpeg -i $source -y {} $dest".format(
|
||||
config["convert"]["opts"].as_str()
|
||||
command = (
|
||||
f"ffmpeg -i $source -y {config['convert']['opts'].as_str()} $dest"
|
||||
)
|
||||
if "extension" in keys:
|
||||
extension = config["convert"]["extension"].as_str()
|
||||
|
|
@ -125,18 +123,25 @@ class ConvertPlugin(BeetsPlugin):
|
|||
"id3v23": "inherit",
|
||||
"formats": {
|
||||
"aac": {
|
||||
"command": "ffmpeg -i $source -y -vn -acodec aac "
|
||||
"-aq 1 $dest",
|
||||
"command": (
|
||||
"ffmpeg -i $source -y -vn -acodec aac -aq 1 $dest"
|
||||
),
|
||||
"extension": "m4a",
|
||||
},
|
||||
"alac": {
|
||||
"command": "ffmpeg -i $source -y -vn -acodec alac $dest",
|
||||
"command": (
|
||||
"ffmpeg -i $source -y -vn -acodec alac $dest"
|
||||
),
|
||||
"extension": "m4a",
|
||||
},
|
||||
"flac": "ffmpeg -i $source -y -vn -acodec flac $dest",
|
||||
"mp3": "ffmpeg -i $source -y -vn -aq 2 $dest",
|
||||
"opus": "ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest",
|
||||
"ogg": "ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest",
|
||||
"opus": (
|
||||
"ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest"
|
||||
),
|
||||
"ogg": (
|
||||
"ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest"
|
||||
),
|
||||
"wma": "ffmpeg -i $source -y -vn -acodec wmav2 -vn $dest",
|
||||
},
|
||||
"max_bitrate": None,
|
||||
|
|
@ -171,16 +176,17 @@ class ConvertPlugin(BeetsPlugin):
|
|||
"--threads",
|
||||
action="store",
|
||||
type="int",
|
||||
help="change the number of threads, \
|
||||
defaults to maximum available processors",
|
||||
help=(
|
||||
"change the number of threads, defaults to maximum available"
|
||||
" processors"
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-k",
|
||||
"--keep-new",
|
||||
action="store_true",
|
||||
dest="keep_new",
|
||||
help="keep only the converted \
|
||||
and move the old files",
|
||||
help="keep only the converted and move the old files",
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-d", "--dest", action="store", help="set the destination directory"
|
||||
|
|
@ -204,16 +210,16 @@ class ConvertPlugin(BeetsPlugin):
|
|||
"--link",
|
||||
action="store_true",
|
||||
dest="link",
|
||||
help="symlink files that do not \
|
||||
need transcoding.",
|
||||
help="symlink files that do not need transcoding.",
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-H",
|
||||
"--hardlink",
|
||||
action="store_true",
|
||||
dest="hardlink",
|
||||
help="hardlink files that do not \
|
||||
need transcoding. Overrides --link.",
|
||||
help=(
|
||||
"hardlink files that do not need transcoding. Overrides --link."
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-m",
|
||||
|
|
@ -282,7 +288,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
quiet = self.config["quiet"].get(bool)
|
||||
|
||||
if not quiet and not pretend:
|
||||
self._log.info("Encoding {0}", util.displayable_path(source))
|
||||
self._log.info("Encoding {}", util.displayable_path(source))
|
||||
|
||||
command = os.fsdecode(command)
|
||||
source = os.fsdecode(source)
|
||||
|
|
@ -301,7 +307,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
encode_cmd.append(os.fsdecode(args[i]))
|
||||
|
||||
if pretend:
|
||||
self._log.info("{0}", " ".join(args))
|
||||
self._log.info("{}", " ".join(args))
|
||||
return
|
||||
|
||||
try:
|
||||
|
|
@ -309,26 +315,25 @@ class ConvertPlugin(BeetsPlugin):
|
|||
except subprocess.CalledProcessError as exc:
|
||||
# Something went wrong (probably Ctrl+C), remove temporary files
|
||||
self._log.info(
|
||||
"Encoding {0} failed. Cleaning up...",
|
||||
"Encoding {} failed. Cleaning up...",
|
||||
util.displayable_path(source),
|
||||
)
|
||||
self._log.debug(
|
||||
"Command {0} exited with status {1}: {2}",
|
||||
"Command {0} exited with status {1.returncode}: {1.output}",
|
||||
args,
|
||||
exc.returncode,
|
||||
exc.output,
|
||||
exc,
|
||||
)
|
||||
util.remove(dest)
|
||||
util.prune_dirs(os.path.dirname(dest))
|
||||
raise
|
||||
except OSError as exc:
|
||||
raise ui.UserError(
|
||||
"convert: couldn't invoke '{}': {}".format(" ".join(args), exc)
|
||||
f"convert: couldn't invoke {' '.join(args)!r}: {exc}"
|
||||
)
|
||||
|
||||
if not quiet and not pretend:
|
||||
self._log.info(
|
||||
"Finished encoding {0}", util.displayable_path(source)
|
||||
"Finished encoding {}", util.displayable_path(source)
|
||||
)
|
||||
|
||||
def convert_item(
|
||||
|
|
@ -356,7 +361,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
try:
|
||||
mediafile.MediaFile(util.syspath(item.path))
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
self._log.error("Could not open file to convert: {0}", exc)
|
||||
self._log.error("Could not open file to convert: {}", exc)
|
||||
continue
|
||||
|
||||
# When keeping the new file in the library, we first move the
|
||||
|
|
@ -382,21 +387,20 @@ class ConvertPlugin(BeetsPlugin):
|
|||
|
||||
if os.path.exists(util.syspath(dest)):
|
||||
self._log.info(
|
||||
"Skipping {0} (target file exists)",
|
||||
util.displayable_path(item.path),
|
||||
"Skipping {.filepath} (target file exists)", item
|
||||
)
|
||||
continue
|
||||
|
||||
if keep_new:
|
||||
if pretend:
|
||||
self._log.info(
|
||||
"mv {0} {1}",
|
||||
util.displayable_path(item.path),
|
||||
"mv {.filepath} {}",
|
||||
item,
|
||||
util.displayable_path(original),
|
||||
)
|
||||
else:
|
||||
self._log.info(
|
||||
"Moving to {0}", util.displayable_path(original)
|
||||
"Moving to {}", util.displayable_path(original)
|
||||
)
|
||||
util.move(item.path, original)
|
||||
|
||||
|
|
@ -412,10 +416,10 @@ class ConvertPlugin(BeetsPlugin):
|
|||
msg = "ln" if hardlink else ("ln -s" if link else "cp")
|
||||
|
||||
self._log.info(
|
||||
"{2} {0} {1}",
|
||||
"{} {} {}",
|
||||
msg,
|
||||
util.displayable_path(original),
|
||||
util.displayable_path(converted),
|
||||
msg,
|
||||
)
|
||||
else:
|
||||
# No transcoding necessary.
|
||||
|
|
@ -425,9 +429,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
else ("Linking" if link else "Copying")
|
||||
)
|
||||
|
||||
self._log.info(
|
||||
"{1} {0}", util.displayable_path(item.path), msg
|
||||
)
|
||||
self._log.info("{} {.filepath}", msg, item)
|
||||
|
||||
if hardlink:
|
||||
util.hardlink(original, converted)
|
||||
|
|
@ -458,8 +460,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
if album and album.artpath:
|
||||
maxwidth = self._get_art_resize(album.artpath)
|
||||
self._log.debug(
|
||||
"embedding album art from {}",
|
||||
util.displayable_path(album.artpath),
|
||||
"embedding album art from {.art_filepath}", album
|
||||
)
|
||||
art.embed_item(
|
||||
self._log,
|
||||
|
|
@ -517,8 +518,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
|
||||
if os.path.exists(util.syspath(dest)):
|
||||
self._log.info(
|
||||
"Skipping {0} (target file exists)",
|
||||
util.displayable_path(album.artpath),
|
||||
"Skipping {.art_filepath} (target file exists)", album
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -528,8 +528,8 @@ class ConvertPlugin(BeetsPlugin):
|
|||
# Either copy or resize (while copying) the image.
|
||||
if maxwidth is not None:
|
||||
self._log.info(
|
||||
"Resizing cover art from {0} to {1}",
|
||||
util.displayable_path(album.artpath),
|
||||
"Resizing cover art from {.art_filepath} to {}",
|
||||
album,
|
||||
util.displayable_path(dest),
|
||||
)
|
||||
if not pretend:
|
||||
|
|
@ -539,10 +539,10 @@ class ConvertPlugin(BeetsPlugin):
|
|||
msg = "ln" if hardlink else ("ln -s" if link else "cp")
|
||||
|
||||
self._log.info(
|
||||
"{2} {0} {1}",
|
||||
util.displayable_path(album.artpath),
|
||||
util.displayable_path(dest),
|
||||
"{} {.art_filepath} {}",
|
||||
msg,
|
||||
album,
|
||||
util.displayable_path(dest),
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
|
|
@ -552,10 +552,10 @@ class ConvertPlugin(BeetsPlugin):
|
|||
)
|
||||
|
||||
self._log.info(
|
||||
"{2} cover art from {0} to {1}",
|
||||
util.displayable_path(album.artpath),
|
||||
util.displayable_path(dest),
|
||||
"{} cover art from {.art_filepath} to {}",
|
||||
msg,
|
||||
album,
|
||||
util.displayable_path(dest),
|
||||
)
|
||||
if hardlink:
|
||||
util.hardlink(album.artpath, dest)
|
||||
|
|
@ -616,7 +616,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
# Playlist paths are understood as relative to the dest directory.
|
||||
pl_normpath = util.normpath(playlist)
|
||||
pl_dir = os.path.dirname(pl_normpath)
|
||||
self._log.info("Creating playlist file {0}", pl_normpath)
|
||||
self._log.info("Creating playlist file {}", pl_normpath)
|
||||
# Generates a list of paths to media files, ensures the paths are
|
||||
# relative to the playlist's location and translates the unicode
|
||||
# strings we get from item.destination to bytes.
|
||||
|
|
@ -644,7 +644,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
tmpdir = self.config["tmpdir"].get()
|
||||
if tmpdir:
|
||||
tmpdir = os.fsdecode(util.bytestring_path(tmpdir))
|
||||
fd, dest = tempfile.mkstemp(os.fsdecode(b"." + ext), dir=tmpdir)
|
||||
fd, dest = tempfile.mkstemp(f".{os.fsdecode(ext)}", dir=tmpdir)
|
||||
os.close(fd)
|
||||
dest = util.bytestring_path(dest)
|
||||
_temp_files.append(dest) # Delete the transcode later.
|
||||
|
|
@ -666,7 +666,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
if self.config["delete_originals"]:
|
||||
self._log.log(
|
||||
logging.DEBUG if self.config["quiet"] else logging.INFO,
|
||||
"Removing original file {0}",
|
||||
"Removing original file {}",
|
||||
source_path,
|
||||
)
|
||||
util.remove(source_path, False)
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
f"Invalid `release_date` returned by {self.data_source} API: "
|
||||
f"{release_date!r}"
|
||||
)
|
||||
tracks_obj = self.fetch_data(self.album_url + deezer_id + "/tracks")
|
||||
tracks_obj = self.fetch_data(f"{self.album_url}{deezer_id}/tracks")
|
||||
if tracks_obj is None:
|
||||
return None
|
||||
try:
|
||||
|
|
@ -169,7 +169,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
# the track's disc).
|
||||
if not (
|
||||
album_tracks_obj := self.fetch_data(
|
||||
self.album_url + str(track_data["album"]["id"]) + "/tracks"
|
||||
f"{self.album_url}{track_data['album']['id']}/tracks"
|
||||
)
|
||||
):
|
||||
return None
|
||||
|
|
@ -241,26 +241,26 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
query = self._construct_search_query(
|
||||
query_string=query_string, filters=filters
|
||||
)
|
||||
self._log.debug(f"Searching {self.data_source} for '{query}'")
|
||||
self._log.debug("Searching {.data_source} for '{}'", self, query)
|
||||
try:
|
||||
response = requests.get(
|
||||
self.search_url + query_type,
|
||||
f"{self.search_url}{query_type}",
|
||||
params={"q": query},
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.error(
|
||||
"Error fetching data from {} API\n Error: {}",
|
||||
self.data_source,
|
||||
"Error fetching data from {.data_source} API\n Error: {}",
|
||||
self,
|
||||
e,
|
||||
)
|
||||
return ()
|
||||
response_data: Sequence[IDResponse] = response.json().get("data", [])
|
||||
self._log.debug(
|
||||
"Found {} result(s) from {} for '{}'",
|
||||
"Found {} result(s) from {.data_source} for '{}'",
|
||||
len(response_data),
|
||||
self.data_source,
|
||||
self,
|
||||
query,
|
||||
)
|
||||
return response_data
|
||||
|
|
|
|||
|
|
@ -145,7 +145,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
_, _, url = auth_client.get_authorize_url()
|
||||
except CONNECTION_ERRORS as e:
|
||||
self._log.debug("connection error: {0}", e)
|
||||
self._log.debug("connection error: {}", e)
|
||||
raise beets.ui.UserError("communication with Discogs failed")
|
||||
|
||||
beets.ui.print_("To authenticate with Discogs, visit:")
|
||||
|
|
@ -158,11 +158,11 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
except DiscogsAPIError:
|
||||
raise beets.ui.UserError("Discogs authorization failed")
|
||||
except CONNECTION_ERRORS as e:
|
||||
self._log.debug("connection error: {0}", e)
|
||||
self._log.debug("connection error: {}", e)
|
||||
raise beets.ui.UserError("Discogs token request failed")
|
||||
|
||||
# Save the token for later use.
|
||||
self._log.debug("Discogs token {0}, secret {1}", token, secret)
|
||||
self._log.debug("Discogs token {}, secret {}", token, secret)
|
||||
with open(self._tokenfile(), "w") as f:
|
||||
json.dump({"token": token, "secret": secret}, f)
|
||||
|
||||
|
|
@ -202,7 +202,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
"""Fetches an album by its Discogs ID and returns an AlbumInfo object
|
||||
or None if the album is not found.
|
||||
"""
|
||||
self._log.debug("Searching for release {0}", album_id)
|
||||
self._log.debug("Searching for release {}", album_id)
|
||||
|
||||
discogs_id = self._extract_id(album_id)
|
||||
|
||||
|
|
@ -216,7 +216,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
except DiscogsAPIError as e:
|
||||
if e.status_code != 404:
|
||||
self._log.debug(
|
||||
"API Error: {0} (query: {1})",
|
||||
"API Error: {} (query: {})",
|
||||
e,
|
||||
result.data["resource_url"],
|
||||
)
|
||||
|
|
@ -266,7 +266,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
"""Fetches a master release given its Discogs ID and returns its year
|
||||
or None if the master release is not found.
|
||||
"""
|
||||
self._log.debug("Getting master release {0}", master_id)
|
||||
self._log.debug("Getting master release {}", master_id)
|
||||
result = Master(self.discogs_client, {"id": master_id})
|
||||
|
||||
try:
|
||||
|
|
@ -274,7 +274,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
except DiscogsAPIError as e:
|
||||
if e.status_code != 404:
|
||||
self._log.debug(
|
||||
"API Error: {0} (query: {1})",
|
||||
"API Error: {} (query: {})",
|
||||
e,
|
||||
result.data["resource_url"],
|
||||
)
|
||||
|
|
@ -385,7 +385,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
track.artist_id = artist_id
|
||||
# Discogs does not have track IDs. Invent our own IDs as proposed
|
||||
# in #2336.
|
||||
track.track_id = str(album_id) + "-" + track.track_alt
|
||||
track.track_id = f"{album_id}-{track.track_alt}"
|
||||
track.data_url = data_url
|
||||
track.data_source = "Discogs"
|
||||
|
||||
|
|
@ -552,7 +552,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
idx, medium_idx, sub_idx = self.get_track_index(
|
||||
subtracks[0]["position"]
|
||||
)
|
||||
position = "{}{}".format(idx or "", medium_idx or "")
|
||||
position = f"{idx or ''}{medium_idx or ''}"
|
||||
|
||||
if tracklist and not tracklist[-1]["position"]:
|
||||
# Assume the previous index track contains the track title.
|
||||
|
|
@ -574,8 +574,8 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
# option is set
|
||||
if self.config["index_tracks"]:
|
||||
for subtrack in subtracks:
|
||||
subtrack["title"] = "{}: {}".format(
|
||||
index_track["title"], subtrack["title"]
|
||||
subtrack["title"] = (
|
||||
f"{index_track['title']}: {subtrack['title']}"
|
||||
)
|
||||
tracklist.extend(subtracks)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
count = self.config["count"].get(bool)
|
||||
delete = self.config["delete"].get(bool)
|
||||
remove = self.config["remove"].get(bool)
|
||||
fmt = self.config["format"].get(str)
|
||||
fmt_tmpl = self.config["format"].get(str)
|
||||
full = self.config["full"].get(bool)
|
||||
keys = self.config["keys"].as_str_seq()
|
||||
merge = self.config["merge"].get(bool)
|
||||
|
|
@ -175,15 +175,14 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
if path:
|
||||
fmt = "$path"
|
||||
fmt_tmpl = "$path"
|
||||
|
||||
# Default format string for count mode.
|
||||
if count and not fmt:
|
||||
if count and not fmt_tmpl:
|
||||
if album:
|
||||
fmt = "$albumartist - $album"
|
||||
fmt_tmpl = "$albumartist - $album"
|
||||
else:
|
||||
fmt = "$albumartist - $album - $title"
|
||||
fmt += ": {0}"
|
||||
fmt_tmpl = "$albumartist - $album - $title"
|
||||
|
||||
if checksum:
|
||||
for i in items:
|
||||
|
|
@ -207,7 +206,7 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
delete=delete,
|
||||
remove=remove,
|
||||
tag=tag,
|
||||
fmt=fmt.format(obj_count),
|
||||
fmt=f"{fmt_tmpl}: {obj_count}",
|
||||
)
|
||||
|
||||
self._command.func = _dup
|
||||
|
|
@ -255,28 +254,24 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
checksum = getattr(item, key, False)
|
||||
if not checksum:
|
||||
self._log.debug(
|
||||
"key {0} on item {1} not cached:computing checksum",
|
||||
"key {} on item {.filepath} not cached:computing checksum",
|
||||
key,
|
||||
displayable_path(item.path),
|
||||
item,
|
||||
)
|
||||
try:
|
||||
checksum = command_output(args).stdout
|
||||
setattr(item, key, checksum)
|
||||
item.store()
|
||||
self._log.debug(
|
||||
"computed checksum for {0} using {1}", item.title, key
|
||||
"computed checksum for {.title} using {}", item, key
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
self._log.debug(
|
||||
"failed to checksum {0}: {1}",
|
||||
displayable_path(item.path),
|
||||
e,
|
||||
)
|
||||
self._log.debug("failed to checksum {.filepath}: {}", item, e)
|
||||
else:
|
||||
self._log.debug(
|
||||
"key {0} on item {1} cached:not computing checksum",
|
||||
"key {} on item {.filepath} cached:not computing checksum",
|
||||
key,
|
||||
displayable_path(item.path),
|
||||
item,
|
||||
)
|
||||
return key, checksum
|
||||
|
||||
|
|
@ -294,15 +289,15 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
values = [v for v in values if v not in (None, "")]
|
||||
if strict and len(values) < len(keys):
|
||||
self._log.debug(
|
||||
"some keys {0} on item {1} are null or empty: skipping",
|
||||
"some keys {} on item {.filepath} are null or empty: skipping",
|
||||
keys,
|
||||
displayable_path(obj.path),
|
||||
obj,
|
||||
)
|
||||
elif not strict and not len(values):
|
||||
self._log.debug(
|
||||
"all keys {0} on item {1} are null or empty: skipping",
|
||||
"all keys {} on item {.filepath} are null or empty: skipping",
|
||||
keys,
|
||||
displayable_path(obj.path),
|
||||
obj,
|
||||
)
|
||||
else:
|
||||
key = tuple(values)
|
||||
|
|
@ -360,11 +355,11 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
value = getattr(o, f, None)
|
||||
if value:
|
||||
self._log.debug(
|
||||
"key {0} on item {1} is null "
|
||||
"or empty: setting from item {2}",
|
||||
"key {} on item {} is null "
|
||||
"or empty: setting from item {.filepath}",
|
||||
f,
|
||||
displayable_path(objs[0].path),
|
||||
displayable_path(o.path),
|
||||
o,
|
||||
)
|
||||
setattr(objs[0], f, value)
|
||||
objs[0].store()
|
||||
|
|
@ -384,11 +379,11 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
missing.album_id = objs[0].id
|
||||
missing.add(i._db)
|
||||
self._log.debug(
|
||||
"item {0} missing from album {1}:"
|
||||
" merging from {2} into {3}",
|
||||
"item {} missing from album {}:"
|
||||
" merging from {.filepath} into {}",
|
||||
missing,
|
||||
objs[0],
|
||||
displayable_path(o.path),
|
||||
o,
|
||||
displayable_path(missing.destination()),
|
||||
)
|
||||
missing.move(operation=MoveOperation.COPY)
|
||||
|
|
|
|||
|
|
@ -46,9 +46,7 @@ def edit(filename, log):
|
|||
try:
|
||||
subprocess.call(cmd)
|
||||
except OSError as exc:
|
||||
raise ui.UserError(
|
||||
"could not run editor command {!r}: {}".format(cmd[0], exc)
|
||||
)
|
||||
raise ui.UserError(f"could not run editor command {cmd[0]!r}: {exc}")
|
||||
|
||||
|
||||
def dump(arg):
|
||||
|
|
@ -71,9 +69,7 @@ def load(s):
|
|||
for d in yaml.safe_load_all(s):
|
||||
if not isinstance(d, dict):
|
||||
raise ParseError(
|
||||
"each entry must be a dictionary; found {}".format(
|
||||
type(d).__name__
|
||||
)
|
||||
f"each entry must be a dictionary; found {type(d).__name__}"
|
||||
)
|
||||
|
||||
# Convert all keys to strings. They started out as strings,
|
||||
|
|
|
|||
|
|
@ -35,8 +35,9 @@ def _confirm(objs, album):
|
|||
to items).
|
||||
"""
|
||||
noun = "album" if album else "file"
|
||||
prompt = "Modify artwork for {} {}{} (Y/n)?".format(
|
||||
len(objs), noun, "s" if len(objs) > 1 else ""
|
||||
prompt = (
|
||||
"Modify artwork for"
|
||||
f" {len(objs)} {noun}{'s' if len(objs) > 1 else ''} (Y/n)?"
|
||||
)
|
||||
|
||||
# Show all the items or albums.
|
||||
|
|
@ -110,9 +111,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
imagepath = normpath(opts.file)
|
||||
if not os.path.isfile(syspath(imagepath)):
|
||||
raise ui.UserError(
|
||||
"image file {} not found".format(
|
||||
displayable_path(imagepath)
|
||||
)
|
||||
f"image file {displayable_path(imagepath)} not found"
|
||||
)
|
||||
|
||||
items = lib.items(args)
|
||||
|
|
@ -137,7 +136,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
response = requests.get(opts.url, timeout=5)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.error("{}".format(e))
|
||||
self._log.error("{}", e)
|
||||
return
|
||||
extension = guess_extension(response.headers["Content-Type"])
|
||||
if extension is None:
|
||||
|
|
@ -149,7 +148,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
with open(tempimg, "wb") as f:
|
||||
f.write(response.content)
|
||||
except Exception as e:
|
||||
self._log.error("Unable to save image: {}".format(e))
|
||||
self._log.error("Unable to save image: {}", e)
|
||||
return
|
||||
items = lib.items(args)
|
||||
# Confirm with user.
|
||||
|
|
@ -274,7 +273,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
"""
|
||||
if self.config["remove_art_file"] and album.artpath:
|
||||
if os.path.isfile(syspath(album.artpath)):
|
||||
self._log.debug("Removing album art file for {0}", album)
|
||||
self._log.debug("Removing album art file for {}", album)
|
||||
os.remove(syspath(album.artpath))
|
||||
album.artpath = None
|
||||
album.store()
|
||||
|
|
|
|||
|
|
@ -38,9 +38,7 @@ def api_url(host, port, endpoint):
|
|||
hostname_list.insert(0, "http://")
|
||||
hostname = "".join(hostname_list)
|
||||
|
||||
joined = urljoin(
|
||||
"{hostname}:{port}".format(hostname=hostname, port=port), endpoint
|
||||
)
|
||||
joined = urljoin(f"{hostname}:{port}", endpoint)
|
||||
|
||||
scheme, netloc, path, query_string, fragment = urlsplit(joined)
|
||||
query_params = parse_qs(query_string)
|
||||
|
|
@ -81,12 +79,12 @@ def create_headers(user_id, token=None):
|
|||
headers = {}
|
||||
|
||||
authorization = (
|
||||
'MediaBrowser UserId="{user_id}", '
|
||||
f'MediaBrowser UserId="{user_id}", '
|
||||
'Client="other", '
|
||||
'Device="beets", '
|
||||
'DeviceId="beets", '
|
||||
'Version="0.0.0"'
|
||||
).format(user_id=user_id)
|
||||
)
|
||||
|
||||
headers["x-emby-authorization"] = authorization
|
||||
|
||||
|
|
@ -186,7 +184,7 @@ class EmbyUpdate(BeetsPlugin):
|
|||
# Get user information from the Emby API.
|
||||
user = get_user(host, port, username)
|
||||
if not user:
|
||||
self._log.warning(f"User {username} could not be found.")
|
||||
self._log.warning("User {} could not be found.", username)
|
||||
return
|
||||
userid = user[0]["Id"]
|
||||
|
||||
|
|
@ -198,7 +196,7 @@ class EmbyUpdate(BeetsPlugin):
|
|||
# Get authentication token.
|
||||
token = get_token(host, port, headers, auth_data)
|
||||
if not token:
|
||||
self._log.warning("Could not get token for user {0}", username)
|
||||
self._log.warning("Could not get token for user {}", username)
|
||||
return
|
||||
|
||||
# Recreate headers with a token.
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ class ExportPlugin(BeetsPlugin):
|
|||
try:
|
||||
data, item = data_emitter(included_keys or "*")
|
||||
except (mediafile.UnreadableFileError, OSError) as ex:
|
||||
self._log.error("cannot read file: {0}", ex)
|
||||
self._log.error("cannot read file: {}", ex)
|
||||
continue
|
||||
|
||||
for key, value in data.items():
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ class Candidate:
|
|||
# get_size returns None if no local imaging backend is available
|
||||
if not self.size:
|
||||
self.size = ArtResizer.shared.get_size(self.path)
|
||||
self._log.debug("image size: {}", self.size)
|
||||
self._log.debug("image size: {.size}", self)
|
||||
|
||||
if not self.size:
|
||||
self._log.warning(
|
||||
|
|
@ -151,7 +151,7 @@ class Candidate:
|
|||
# Check minimum dimension.
|
||||
if plugin.minwidth and self.size[0] < plugin.minwidth:
|
||||
self._log.debug(
|
||||
"image too small ({} < {})", self.size[0], plugin.minwidth
|
||||
"image too small ({} < {.minwidth})", self.size[0], plugin
|
||||
)
|
||||
return ImageAction.BAD
|
||||
|
||||
|
|
@ -162,10 +162,10 @@ class Candidate:
|
|||
if edge_diff > plugin.margin_px:
|
||||
self._log.debug(
|
||||
"image is not close enough to being "
|
||||
"square, ({} - {} > {})",
|
||||
"square, ({} - {} > {.margin_px})",
|
||||
long_edge,
|
||||
short_edge,
|
||||
plugin.margin_px,
|
||||
plugin,
|
||||
)
|
||||
return ImageAction.BAD
|
||||
elif plugin.margin_percent:
|
||||
|
|
@ -190,7 +190,7 @@ class Candidate:
|
|||
downscale = False
|
||||
if plugin.maxwidth and self.size[0] > plugin.maxwidth:
|
||||
self._log.debug(
|
||||
"image needs rescaling ({} > {})", self.size[0], plugin.maxwidth
|
||||
"image needs rescaling ({} > {.maxwidth})", self.size[0], plugin
|
||||
)
|
||||
downscale = True
|
||||
|
||||
|
|
@ -200,9 +200,9 @@ class Candidate:
|
|||
filesize = os.stat(syspath(self.path)).st_size
|
||||
if filesize > plugin.max_filesize:
|
||||
self._log.debug(
|
||||
"image needs resizing ({}B > {}B)",
|
||||
"image needs resizing ({}B > {.max_filesize}B)",
|
||||
filesize,
|
||||
plugin.max_filesize,
|
||||
plugin,
|
||||
)
|
||||
downsize = True
|
||||
|
||||
|
|
@ -213,9 +213,9 @@ class Candidate:
|
|||
reformat = fmt != plugin.cover_format
|
||||
if reformat:
|
||||
self._log.debug(
|
||||
"image needs reformatting: {} -> {}",
|
||||
"image needs reformatting: {} -> {.cover_format}",
|
||||
fmt,
|
||||
plugin.cover_format,
|
||||
plugin,
|
||||
)
|
||||
|
||||
skip_check_for = skip_check_for or []
|
||||
|
|
@ -329,7 +329,7 @@ def _logged_get(log: Logger, *args, **kwargs) -> requests.Response:
|
|||
prepped.url, {}, None, None, None
|
||||
)
|
||||
send_kwargs.update(settings)
|
||||
log.debug("{}: {}", message, prepped.url)
|
||||
log.debug("{}: {.url}", message, prepped)
|
||||
return s.send(prepped, **send_kwargs)
|
||||
|
||||
|
||||
|
|
@ -542,14 +542,14 @@ class CoverArtArchive(RemoteArtSource):
|
|||
try:
|
||||
response = self.request(url)
|
||||
except requests.RequestException:
|
||||
self._log.debug("{}: error receiving response", self.NAME)
|
||||
self._log.debug("{.NAME}: error receiving response", self)
|
||||
return
|
||||
|
||||
try:
|
||||
data = response.json()
|
||||
except ValueError:
|
||||
self._log.debug(
|
||||
"{}: error loading response: {}", self.NAME, response.text
|
||||
"{.NAME}: error loading response: {.text}", self, response
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -593,7 +593,7 @@ class CoverArtArchive(RemoteArtSource):
|
|||
class Amazon(RemoteArtSource):
|
||||
NAME = "Amazon"
|
||||
ID = "amazon"
|
||||
URL = "https://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg"
|
||||
URL = "https://images.amazon.com/images/P/{}.{:02d}.LZZZZZZZ.jpg"
|
||||
INDICES = (1, 2)
|
||||
|
||||
def get(
|
||||
|
|
@ -606,7 +606,7 @@ class Amazon(RemoteArtSource):
|
|||
if album.asin:
|
||||
for index in self.INDICES:
|
||||
yield self._candidate(
|
||||
url=self.URL % (album.asin, index),
|
||||
url=self.URL.format(album.asin, index),
|
||||
match=MetadataMatch.EXACT,
|
||||
)
|
||||
|
||||
|
|
@ -629,7 +629,7 @@ class AlbumArtOrg(RemoteArtSource):
|
|||
# Get the page from albumart.org.
|
||||
try:
|
||||
resp = self.request(self.URL, params={"asin": album.asin})
|
||||
self._log.debug("scraped art URL: {}", resp.url)
|
||||
self._log.debug("scraped art URL: {.url}", resp)
|
||||
except requests.RequestException:
|
||||
self._log.debug("error scraping art page")
|
||||
return
|
||||
|
|
@ -682,7 +682,7 @@ class GoogleImages(RemoteArtSource):
|
|||
"""
|
||||
if not (album.albumartist and album.album):
|
||||
return
|
||||
search_string = (album.albumartist + "," + album.album).encode("utf-8")
|
||||
search_string = f"{album.albumartist},{album.album}".encode("utf-8")
|
||||
|
||||
try:
|
||||
response = self.request(
|
||||
|
|
@ -702,7 +702,7 @@ class GoogleImages(RemoteArtSource):
|
|||
try:
|
||||
data = response.json()
|
||||
except ValueError:
|
||||
self._log.debug("google: error loading response: {}", response.text)
|
||||
self._log.debug("google: error loading response: {.text}", response)
|
||||
return
|
||||
|
||||
if "error" in data:
|
||||
|
|
@ -723,7 +723,7 @@ class FanartTV(RemoteArtSource):
|
|||
NAME = "fanart.tv"
|
||||
ID = "fanarttv"
|
||||
API_URL = "https://webservice.fanart.tv/v3/"
|
||||
API_ALBUMS = API_URL + "music/albums/"
|
||||
API_ALBUMS = f"{API_URL}music/albums/"
|
||||
PROJECT_KEY = "61a7d0ab4e67162b7a0c7c35915cd48e"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -750,7 +750,7 @@ class FanartTV(RemoteArtSource):
|
|||
|
||||
try:
|
||||
response = self.request(
|
||||
self.API_ALBUMS + album.mb_releasegroupid,
|
||||
f"{self.API_ALBUMS}{album.mb_releasegroupid}",
|
||||
headers={
|
||||
"api-key": self.PROJECT_KEY,
|
||||
"client-key": self.client_key,
|
||||
|
|
@ -764,7 +764,7 @@ class FanartTV(RemoteArtSource):
|
|||
data = response.json()
|
||||
except ValueError:
|
||||
self._log.debug(
|
||||
"fanart.tv: error loading response: {}", response.text
|
||||
"fanart.tv: error loading response: {.text}", response
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -820,7 +820,7 @@ class ITunesStore(RemoteArtSource):
|
|||
return
|
||||
|
||||
payload = {
|
||||
"term": album.albumartist + " " + album.album,
|
||||
"term": f"{album.albumartist} {album.album}",
|
||||
"entity": "album",
|
||||
"media": "music",
|
||||
"limit": 200,
|
||||
|
|
@ -947,14 +947,14 @@ class Wikipedia(RemoteArtSource):
|
|||
data = dbpedia_response.json()
|
||||
results = data["results"]["bindings"]
|
||||
if results:
|
||||
cover_filename = "File:" + results[0]["coverFilename"]["value"]
|
||||
cover_filename = f"File:{results[0]['coverFilename']['value']}"
|
||||
page_id = results[0]["pageId"]["value"]
|
||||
else:
|
||||
self._log.debug("wikipedia: album not found on dbpedia")
|
||||
except (ValueError, KeyError, IndexError):
|
||||
self._log.debug(
|
||||
"wikipedia: error scraping dbpedia response: {}",
|
||||
dbpedia_response.text,
|
||||
"wikipedia: error scraping dbpedia response: {.text}",
|
||||
dbpedia_response,
|
||||
)
|
||||
|
||||
# Ensure we have a filename before attempting to query wikipedia
|
||||
|
|
@ -996,7 +996,7 @@ class Wikipedia(RemoteArtSource):
|
|||
results = data["query"]["pages"][page_id]["images"]
|
||||
for result in results:
|
||||
if re.match(
|
||||
re.escape(lpart) + r".*?\." + re.escape(rpart),
|
||||
rf"{re.escape(lpart)}.*?\.{re.escape(rpart)}",
|
||||
result["title"],
|
||||
):
|
||||
cover_filename = result["title"]
|
||||
|
|
@ -1179,7 +1179,7 @@ class LastFM(RemoteArtSource):
|
|||
if "error" in data:
|
||||
if data["error"] == 6:
|
||||
self._log.debug(
|
||||
"lastfm: no results for {}", album.mb_albumid
|
||||
"lastfm: no results for {.mb_albumid}", album
|
||||
)
|
||||
else:
|
||||
self._log.error(
|
||||
|
|
@ -1200,7 +1200,7 @@ class LastFM(RemoteArtSource):
|
|||
url=images[size], size=self.SIZES[size]
|
||||
)
|
||||
except ValueError:
|
||||
self._log.debug("lastfm: error loading response: {}", response.text)
|
||||
self._log.debug("lastfm: error loading response: {.text}", response)
|
||||
return
|
||||
|
||||
|
||||
|
|
@ -1227,7 +1227,7 @@ class Spotify(RemoteArtSource):
|
|||
paths: None | Sequence[bytes],
|
||||
) -> Iterator[Candidate]:
|
||||
try:
|
||||
url = self.SPOTIFY_ALBUM_URL + album.items().get().spotify_album_id
|
||||
url = f"{self.SPOTIFY_ALBUM_URL}{album.items().get().spotify_album_id}"
|
||||
except AttributeError:
|
||||
self._log.debug("Fetchart: no Spotify album ID found")
|
||||
return
|
||||
|
|
@ -1244,7 +1244,7 @@ class Spotify(RemoteArtSource):
|
|||
soup = BeautifulSoup(html, "html.parser")
|
||||
except ValueError:
|
||||
self._log.debug(
|
||||
"Spotify: error loading response: {}", response.text
|
||||
"Spotify: error loading response: {.text}", response
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -1541,9 +1541,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
out = candidate
|
||||
assert out.path is not None # help mypy
|
||||
self._log.debug(
|
||||
"using {0.LOC} image {1}",
|
||||
source,
|
||||
util.displayable_path(out.path),
|
||||
"using {.LOC} image {.path}", source, out
|
||||
)
|
||||
break
|
||||
# Remove temporary files for invalid candidates.
|
||||
|
|
@ -1576,7 +1574,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
message = ui.colorize(
|
||||
"text_highlight_minor", "has album art"
|
||||
)
|
||||
self._log.info("{0}: {1}", album, message)
|
||||
self._log.info("{}: {}", album, message)
|
||||
else:
|
||||
# In ordinary invocations, look for images on the
|
||||
# filesystem. When forcing, however, always go to the Web
|
||||
|
|
@ -1589,4 +1587,4 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
message = ui.colorize("text_success", "found album art")
|
||||
else:
|
||||
message = ui.colorize("text_error", "no art found")
|
||||
self._log.info("{0}: {1}", album, message)
|
||||
self._log.info("{}: {}", album, message)
|
||||
|
|
|
|||
|
|
@ -89,8 +89,9 @@ class FishPlugin(BeetsPlugin):
|
|||
"-o",
|
||||
"--output",
|
||||
default="~/.config/fish/completions/beet.fish",
|
||||
help="where to save the script. default: "
|
||||
"~/.config/fish/completions",
|
||||
help=(
|
||||
"where to save the script. default: ~/.config/fish/completions"
|
||||
),
|
||||
)
|
||||
return [cmd]
|
||||
|
||||
|
|
@ -122,23 +123,13 @@ class FishPlugin(BeetsPlugin):
|
|||
for name in names:
|
||||
cmd_names_help.append((name, cmd.help))
|
||||
# Concatenate the string
|
||||
totstring = HEAD + "\n"
|
||||
totstring = f"{HEAD}\n"
|
||||
totstring += get_cmds_list([name[0] for name in cmd_names_help])
|
||||
totstring += "" if nobasicfields else get_standard_fields(fields)
|
||||
totstring += get_extravalues(lib, extravalues) if extravalues else ""
|
||||
totstring += (
|
||||
"\n"
|
||||
+ "# ====== {} =====".format("setup basic beet completion")
|
||||
+ "\n" * 2
|
||||
)
|
||||
totstring += "\n# ====== setup basic beet completion =====\n\n"
|
||||
totstring += get_basic_beet_options()
|
||||
totstring += (
|
||||
"\n"
|
||||
+ "# ====== {} =====".format(
|
||||
"setup field completion for subcommands"
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
totstring += "\n# ====== setup field completion for subcommands =====\n"
|
||||
totstring += get_subcommands(cmd_names_help, nobasicfields, extravalues)
|
||||
# Set up completion for all the command options
|
||||
totstring += get_all_commands(beetcmds)
|
||||
|
|
@ -150,23 +141,19 @@ class FishPlugin(BeetsPlugin):
|
|||
def _escape(name):
|
||||
# Escape ? in fish
|
||||
if name == "?":
|
||||
name = "\\" + name
|
||||
name = f"\\{name}"
|
||||
return name
|
||||
|
||||
|
||||
def get_cmds_list(cmds_names):
|
||||
# Make a list of all Beets core & plugin commands
|
||||
substr = ""
|
||||
substr += "set CMDS " + " ".join(cmds_names) + ("\n" * 2)
|
||||
return substr
|
||||
return f"set CMDS {' '.join(cmds_names)}\n\n"
|
||||
|
||||
|
||||
def get_standard_fields(fields):
|
||||
# Make a list of album/track fields and append with ':'
|
||||
fields = (field + ":" for field in fields)
|
||||
substr = ""
|
||||
substr += "set FIELDS " + " ".join(fields) + ("\n" * 2)
|
||||
return substr
|
||||
fields = (f"{field}:" for field in fields)
|
||||
return f"set FIELDS {' '.join(fields)}\n\n"
|
||||
|
||||
|
||||
def get_extravalues(lib, extravalues):
|
||||
|
|
@ -175,14 +162,8 @@ def get_extravalues(lib, extravalues):
|
|||
word = ""
|
||||
values_set = get_set_of_values_for_field(lib, extravalues)
|
||||
for fld in extravalues:
|
||||
extraname = fld.upper() + "S"
|
||||
word += (
|
||||
"set "
|
||||
+ extraname
|
||||
+ " "
|
||||
+ " ".join(sorted(values_set[fld]))
|
||||
+ ("\n" * 2)
|
||||
)
|
||||
extraname = f"{fld.upper()}S"
|
||||
word += f"set {extraname} {' '.join(sorted(values_set[fld]))}\n\n"
|
||||
return word
|
||||
|
||||
|
||||
|
|
@ -226,35 +207,29 @@ def get_subcommands(cmd_name_and_help, nobasicfields, extravalues):
|
|||
for cmdname, cmdhelp in cmd_name_and_help:
|
||||
cmdname = _escape(cmdname)
|
||||
|
||||
word += (
|
||||
"\n"
|
||||
+ "# ------ {} -------".format("fieldsetups for " + cmdname)
|
||||
+ "\n"
|
||||
)
|
||||
word += f"\n# ------ fieldsetups for {cmdname} -------\n"
|
||||
word += BL_NEED2.format(
|
||||
("-a " + cmdname), ("-f " + "-d " + wrap(clean_whitespace(cmdhelp)))
|
||||
f"-a {cmdname}", f"-f -d {wrap(clean_whitespace(cmdhelp))}"
|
||||
)
|
||||
|
||||
if nobasicfields is False:
|
||||
word += BL_USE3.format(
|
||||
cmdname,
|
||||
("-a " + wrap("$FIELDS")),
|
||||
("-f " + "-d " + wrap("fieldname")),
|
||||
f"-a {wrap('$FIELDS')}",
|
||||
f"-f -d {wrap('fieldname')}",
|
||||
)
|
||||
|
||||
if extravalues:
|
||||
for f in extravalues:
|
||||
setvar = wrap("$" + f.upper() + "S")
|
||||
word += (
|
||||
" ".join(
|
||||
BL_EXTRA3.format(
|
||||
(cmdname + " " + f + ":"),
|
||||
("-f " + "-A " + "-a " + setvar),
|
||||
("-d " + wrap(f)),
|
||||
).split()
|
||||
)
|
||||
+ "\n"
|
||||
setvar = wrap(f"${f.upper()}S")
|
||||
word += " ".join(
|
||||
BL_EXTRA3.format(
|
||||
f"{cmdname} {f}:",
|
||||
f"-f -A -a {setvar}",
|
||||
f"-d {wrap(f)}",
|
||||
).split()
|
||||
)
|
||||
word += "\n"
|
||||
return word
|
||||
|
||||
|
||||
|
|
@ -267,59 +242,44 @@ def get_all_commands(beetcmds):
|
|||
for name in names:
|
||||
name = _escape(name)
|
||||
|
||||
word += "\n"
|
||||
word += (
|
||||
("\n" * 2)
|
||||
+ "# ====== {} =====".format("completions for " + name)
|
||||
+ "\n"
|
||||
)
|
||||
word += f"\n\n\n# ====== completions for {name} =====\n"
|
||||
|
||||
for option in cmd.parser._get_all_options()[1:]:
|
||||
cmd_l = (
|
||||
(" -l " + option._long_opts[0].replace("--", ""))
|
||||
f" -l {option._long_opts[0].replace('--', '')}"
|
||||
if option._long_opts
|
||||
else ""
|
||||
)
|
||||
cmd_s = (
|
||||
(" -s " + option._short_opts[0].replace("-", ""))
|
||||
f" -s {option._short_opts[0].replace('-', '')}"
|
||||
if option._short_opts
|
||||
else ""
|
||||
)
|
||||
cmd_need_arg = " -r " if option.nargs in [1] else ""
|
||||
cmd_helpstr = (
|
||||
(" -d " + wrap(" ".join(option.help.split())))
|
||||
f" -d {wrap(' '.join(option.help.split()))}"
|
||||
if option.help
|
||||
else ""
|
||||
)
|
||||
cmd_arglist = (
|
||||
(" -a " + wrap(" ".join(option.choices)))
|
||||
f" -a {wrap(' '.join(option.choices))}"
|
||||
if option.choices
|
||||
else ""
|
||||
)
|
||||
|
||||
word += (
|
||||
" ".join(
|
||||
BL_USE3.format(
|
||||
name,
|
||||
(
|
||||
cmd_need_arg
|
||||
+ cmd_s
|
||||
+ cmd_l
|
||||
+ " -f "
|
||||
+ cmd_arglist
|
||||
),
|
||||
cmd_helpstr,
|
||||
).split()
|
||||
)
|
||||
+ "\n"
|
||||
word += " ".join(
|
||||
BL_USE3.format(
|
||||
name,
|
||||
f"{cmd_need_arg}{cmd_s}{cmd_l} -f {cmd_arglist}",
|
||||
cmd_helpstr,
|
||||
).split()
|
||||
)
|
||||
word += "\n"
|
||||
|
||||
word = word + " ".join(
|
||||
BL_USE3.format(
|
||||
name,
|
||||
("-s " + "h " + "-l " + "help" + " -f "),
|
||||
("-d " + wrap("print help") + "\n"),
|
||||
).split()
|
||||
word = word + BL_USE3.format(
|
||||
name,
|
||||
"-s h -l help -f",
|
||||
f"-d {wrap('print help')}",
|
||||
)
|
||||
return word
|
||||
|
||||
|
|
@ -332,9 +292,9 @@ def clean_whitespace(word):
|
|||
def wrap(word):
|
||||
# Need " or ' around strings but watch out if they're in the string
|
||||
sptoken = '"'
|
||||
if ('"') in word and ("'") in word:
|
||||
if '"' in word and ("'") in word:
|
||||
word.replace('"', sptoken)
|
||||
return '"' + word + '"'
|
||||
return f'"{word}"'
|
||||
|
||||
tok = '"' if "'" in word else "'"
|
||||
return tok + word + tok
|
||||
return f"{tok}{word}{tok}"
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ def apply_matches(d, log):
|
|||
for item in d:
|
||||
if not item.artist:
|
||||
item.artist = artist
|
||||
log.info("Artist replaced with: {}".format(item.artist))
|
||||
log.info("Artist replaced with: {.artist}", item)
|
||||
|
||||
# No artist field: remaining field is the title.
|
||||
else:
|
||||
|
|
@ -122,11 +122,11 @@ def apply_matches(d, log):
|
|||
for item in d:
|
||||
if bad_title(item.title):
|
||||
item.title = str(d[item][title_field])
|
||||
log.info("Title replaced with: {}".format(item.title))
|
||||
log.info("Title replaced with: {.title}", item)
|
||||
|
||||
if "track" in d[item] and item.track == 0:
|
||||
item.track = int(d[item]["track"])
|
||||
log.info("Track replaced with: {}".format(item.track))
|
||||
log.info("Track replaced with: {.track}", item)
|
||||
|
||||
|
||||
# Plugin structure and hook into import process.
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ import re
|
|||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import plugins, ui
|
||||
from beets.util import displayable_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
|
|
@ -90,7 +89,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
{
|
||||
"auto": True,
|
||||
"drop": False,
|
||||
"format": "feat. {0}",
|
||||
"format": "feat. {}",
|
||||
"keep_in_artist": False,
|
||||
}
|
||||
)
|
||||
|
|
@ -151,10 +150,10 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
# In case the artist is kept, do not update the artist fields.
|
||||
if keep_in_artist_field:
|
||||
self._log.info(
|
||||
"artist: {0} (Not changing due to keep_in_artist)", item.artist
|
||||
"artist: {.artist} (Not changing due to keep_in_artist)", item
|
||||
)
|
||||
else:
|
||||
self._log.info("artist: {0} -> {1}", item.artist, item.albumartist)
|
||||
self._log.info("artist: {0.artist} -> {0.albumartist}", item)
|
||||
item.artist = item.albumartist
|
||||
|
||||
if item.artist_sort:
|
||||
|
|
@ -167,7 +166,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
feat_format = self.config["format"].as_str()
|
||||
new_format = feat_format.format(feat_part)
|
||||
new_title = f"{item.title} {new_format}"
|
||||
self._log.info("title: {0} -> {1}", item.title, new_title)
|
||||
self._log.info("title: {.title} -> {}", item, new_title)
|
||||
item.title = new_title
|
||||
|
||||
def ft_in_title(
|
||||
|
|
@ -195,7 +194,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
if not featured:
|
||||
return False
|
||||
|
||||
self._log.info("{}", displayable_path(item.path))
|
||||
self._log.info("{.filepath}", item)
|
||||
|
||||
# Attempt to find the featured artist.
|
||||
feat_part = find_feat_part(artist, albumartist)
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class HookPlugin(BeetsPlugin):
|
|||
def create_and_register_hook(self, event, command):
|
||||
def hook_function(**kwargs):
|
||||
if command is None or len(command) == 0:
|
||||
self._log.error('invalid command "{0}"', command)
|
||||
self._log.error('invalid command "{}"', command)
|
||||
return
|
||||
|
||||
# For backwards compatibility, use a string formatter that decodes
|
||||
|
|
@ -74,7 +74,7 @@ class HookPlugin(BeetsPlugin):
|
|||
]
|
||||
|
||||
self._log.debug(
|
||||
'running command "{0}" for event {1}',
|
||||
'running command "{}" for event {}',
|
||||
" ".join(command_pieces),
|
||||
event,
|
||||
)
|
||||
|
|
@ -83,9 +83,9 @@ class HookPlugin(BeetsPlugin):
|
|||
subprocess.check_call(command_pieces)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
self._log.error(
|
||||
"hook for {0} exited with status {1}", event, exc.returncode
|
||||
"hook for {} exited with status {.returncode}", event, exc
|
||||
)
|
||||
except OSError as exc:
|
||||
self._log.error("hook for {0} failed: {1}", event, exc)
|
||||
self._log.error("hook for {} failed: {}", event, exc)
|
||||
|
||||
self.register_listener(event, hook_function)
|
||||
|
|
|
|||
|
|
@ -70,10 +70,10 @@ class IHatePlugin(BeetsPlugin):
|
|||
self._log.debug("processing your hate")
|
||||
if self.do_i_hate_this(task, skip_queries):
|
||||
task.choice_flag = Action.SKIP
|
||||
self._log.info("skipped: {0}", summary(task))
|
||||
self._log.info("skipped: {}", summary(task))
|
||||
return
|
||||
if self.do_i_hate_this(task, warn_queries):
|
||||
self._log.info("you may hate this: {0}", summary(task))
|
||||
self._log.info("you may hate this: {}", summary(task))
|
||||
else:
|
||||
self._log.debug("nothing to do")
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
mtime = os.stat(util.syspath(source)).st_mtime
|
||||
self.item_mtime[destination] = mtime
|
||||
self._log.debug(
|
||||
"Recorded mtime {0} for item '{1}' imported from '{2}'",
|
||||
"Recorded mtime {} for item '{}' imported from '{}'",
|
||||
mtime,
|
||||
util.displayable_path(destination),
|
||||
util.displayable_path(source),
|
||||
|
|
@ -103,9 +103,9 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
def update_album_times(self, lib, album):
|
||||
if self.reimported_album(album):
|
||||
self._log.debug(
|
||||
"Album '{0}' is reimported, skipping import of "
|
||||
"Album '{.filepath}' is reimported, skipping import of "
|
||||
"added dates for the album and its items.",
|
||||
util.displayable_path(album.path),
|
||||
album,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -119,18 +119,17 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
item.store()
|
||||
album.added = min(album_mtimes)
|
||||
self._log.debug(
|
||||
"Import of album '{0}', selected album.added={1} "
|
||||
"Import of album '{0.album}', selected album.added={0.added} "
|
||||
"from item file mtimes.",
|
||||
album.album,
|
||||
album.added,
|
||||
album,
|
||||
)
|
||||
album.store()
|
||||
|
||||
def update_item_times(self, lib, item):
|
||||
if self.reimported_item(item):
|
||||
self._log.debug(
|
||||
"Item '{0}' is reimported, skipping import of added date.",
|
||||
util.displayable_path(item.path),
|
||||
"Item '{.filepath}' is reimported, skipping import of added date.",
|
||||
item,
|
||||
)
|
||||
return
|
||||
mtime = self.item_mtime.pop(item.path, None)
|
||||
|
|
@ -139,9 +138,8 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
if self.config["preserve_mtimes"].get(bool):
|
||||
self.write_item_mtime(item, mtime)
|
||||
self._log.debug(
|
||||
"Import of item '{0}', selected item.added={1}",
|
||||
util.displayable_path(item.path),
|
||||
item.added,
|
||||
"Import of item '{0.filepath}', selected item.added={0.added}",
|
||||
item,
|
||||
)
|
||||
item.store()
|
||||
|
||||
|
|
@ -153,7 +151,6 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
if self.config["preserve_write_mtimes"].get(bool):
|
||||
self.write_item_mtime(item, item.added)
|
||||
self._log.debug(
|
||||
"Write of item '{0}', selected item.added={1}",
|
||||
util.displayable_path(item.path),
|
||||
item.added,
|
||||
"Write of item '{0.filepath}', selected item.added={0.added}",
|
||||
item,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ def _build_m3u_filename(basename):
|
|||
path = normpath(
|
||||
os.path.join(
|
||||
config["importfeeds"]["dir"].as_filename(),
|
||||
date + "_" + basename + ".m3u",
|
||||
f"{date}_{basename}.m3u",
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
|
@ -136,7 +136,7 @@ class ImportFeedsPlugin(BeetsPlugin):
|
|||
if "echo" in formats:
|
||||
self._log.info("Location of imported music:")
|
||||
for path in paths:
|
||||
self._log.info(" {0}", path)
|
||||
self._log.info(" {}", path)
|
||||
|
||||
def album_imported(self, lib, album):
|
||||
self._record_items(lib, album.album, album.items())
|
||||
|
|
|
|||
|
|
@ -117,7 +117,6 @@ def print_data(data, item=None, fmt=None):
|
|||
return
|
||||
|
||||
maxwidth = max(len(key) for key in formatted)
|
||||
lineformat = f"{{0:>{maxwidth}}}: {{1}}"
|
||||
|
||||
if path:
|
||||
ui.print_(displayable_path(path))
|
||||
|
|
@ -126,7 +125,7 @@ def print_data(data, item=None, fmt=None):
|
|||
value = formatted[field]
|
||||
if isinstance(value, list):
|
||||
value = "; ".join(value)
|
||||
ui.print_(lineformat.format(field, value))
|
||||
ui.print_(f"{field:>{maxwidth}}: {value}")
|
||||
|
||||
|
||||
def print_data_keys(data, item=None):
|
||||
|
|
@ -139,12 +138,11 @@ def print_data_keys(data, item=None):
|
|||
if len(formatted) == 0:
|
||||
return
|
||||
|
||||
line_format = "{0}{{0}}".format(" " * 4)
|
||||
if path:
|
||||
ui.print_(displayable_path(path))
|
||||
|
||||
for field in sorted(formatted):
|
||||
ui.print_(line_format.format(field))
|
||||
ui.print_(f" {field}")
|
||||
|
||||
|
||||
class InfoPlugin(BeetsPlugin):
|
||||
|
|
@ -221,7 +219,7 @@ class InfoPlugin(BeetsPlugin):
|
|||
try:
|
||||
data, item = data_emitter(included_keys or "*")
|
||||
except (mediafile.UnreadableFileError, OSError) as ex:
|
||||
self._log.error("cannot read file: {0}", ex)
|
||||
self._log.error("cannot read file: {}", ex)
|
||||
continue
|
||||
|
||||
if opts.summarize:
|
||||
|
|
|
|||
|
|
@ -28,8 +28,7 @@ class InlineError(Exception):
|
|||
|
||||
def __init__(self, code, exc):
|
||||
super().__init__(
|
||||
("error in inline path field code:\n%s\n%s: %s")
|
||||
% (code, type(exc).__name__, str(exc))
|
||||
f"error in inline path field code:\n{code}\n{type(exc).__name__}: {exc}"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -37,7 +36,8 @@ def _compile_func(body):
|
|||
"""Given Python code for a function body, return a compiled
|
||||
callable that invokes that code.
|
||||
"""
|
||||
body = "def {}():\n {}".format(FUNC_NAME, body.replace("\n", "\n "))
|
||||
body = body.replace("\n", "\n ")
|
||||
body = f"def {FUNC_NAME}():\n {body}"
|
||||
code = compile(body, "inline", "exec")
|
||||
env = {}
|
||||
eval(code, env)
|
||||
|
|
@ -60,14 +60,14 @@ class InlinePlugin(BeetsPlugin):
|
|||
for key, view in itertools.chain(
|
||||
config["item_fields"].items(), config["pathfields"].items()
|
||||
):
|
||||
self._log.debug("adding item field {0}", key)
|
||||
self._log.debug("adding item field {}", key)
|
||||
func = self.compile_inline(view.as_str(), False)
|
||||
if func is not None:
|
||||
self.template_fields[key] = func
|
||||
|
||||
# Album fields.
|
||||
for key, view in config["album_fields"].items():
|
||||
self._log.debug("adding album field {0}", key)
|
||||
self._log.debug("adding album field {}", key)
|
||||
func = self.compile_inline(view.as_str(), True)
|
||||
if func is not None:
|
||||
self.album_template_fields[key] = func
|
||||
|
|
@ -87,7 +87,7 @@ class InlinePlugin(BeetsPlugin):
|
|||
func = _compile_func(python_code)
|
||||
except SyntaxError:
|
||||
self._log.error(
|
||||
"syntax error in inline field definition:\n{0}",
|
||||
"syntax error in inline field definition:\n{}",
|
||||
traceback.format_exc(),
|
||||
)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
for album in lib.albums(args):
|
||||
if len(album.items()) == 0:
|
||||
self._log.info(
|
||||
"{0} does not contain items, aborting", album
|
||||
"{} does not contain items, aborting", album
|
||||
)
|
||||
|
||||
self.ipfs_add(album)
|
||||
|
|
@ -122,13 +122,13 @@ class IPFSPlugin(BeetsPlugin):
|
|||
return False
|
||||
try:
|
||||
if album.ipfs:
|
||||
self._log.debug("{0} already added", album_dir)
|
||||
self._log.debug("{} already added", album_dir)
|
||||
# Already added to ipfs
|
||||
return False
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self._log.info("Adding {0} to ipfs", album_dir)
|
||||
self._log.info("Adding {} to ipfs", album_dir)
|
||||
|
||||
if self.config["nocopy"]:
|
||||
cmd = "ipfs add --nocopy -q -r".split()
|
||||
|
|
@ -138,7 +138,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
try:
|
||||
output = util.command_output(cmd).stdout.split()
|
||||
except (OSError, subprocess.CalledProcessError) as exc:
|
||||
self._log.error("Failed to add {0}, error: {1}", album_dir, exc)
|
||||
self._log.error("Failed to add {}, error: {}", album_dir, exc)
|
||||
return False
|
||||
length = len(output)
|
||||
|
||||
|
|
@ -146,12 +146,12 @@ class IPFSPlugin(BeetsPlugin):
|
|||
line = line.strip()
|
||||
if linenr == length - 1:
|
||||
# last printed line is the album hash
|
||||
self._log.info("album: {0}", line)
|
||||
self._log.info("album: {}", line)
|
||||
album.ipfs = line
|
||||
else:
|
||||
try:
|
||||
item = album.items()[linenr]
|
||||
self._log.info("item: {0}", line)
|
||||
self._log.info("item: {}", line)
|
||||
item.ipfs = line
|
||||
item.store()
|
||||
except IndexError:
|
||||
|
|
@ -180,11 +180,11 @@ class IPFSPlugin(BeetsPlugin):
|
|||
util.command_output(cmd)
|
||||
except (OSError, subprocess.CalledProcessError) as err:
|
||||
self._log.error(
|
||||
"Failed to get {0} from ipfs.\n{1}", _hash, err.output
|
||||
"Failed to get {} from ipfs.\n{.output}", _hash, err
|
||||
)
|
||||
return False
|
||||
|
||||
self._log.info("Getting {0} from ipfs", _hash)
|
||||
self._log.info("Getting {} from ipfs", _hash)
|
||||
imp = ui.commands.TerminalImportSession(
|
||||
lib, loghandler=None, query=None, paths=[_hash]
|
||||
)
|
||||
|
|
@ -208,7 +208,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
msg = f"Failed to publish library. Error: {err}"
|
||||
self._log.error(msg)
|
||||
return False
|
||||
self._log.info("hash of library: {0}", output)
|
||||
self._log.info("hash of library: {}", output)
|
||||
|
||||
def ipfs_import(self, lib, args):
|
||||
_hash = args[0]
|
||||
|
|
@ -232,7 +232,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
try:
|
||||
util.command_output(cmd)
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
self._log.error(f"Could not import {_hash}")
|
||||
self._log.error("Could not import {}", _hash)
|
||||
return False
|
||||
|
||||
# add all albums from remotes into a combined library
|
||||
|
|
@ -306,7 +306,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
items.append(item)
|
||||
if len(items) < 1:
|
||||
return False
|
||||
self._log.info("Adding '{0}' to temporary library", album)
|
||||
self._log.info("Adding '{}' to temporary library", album)
|
||||
new_album = tmplib.add_album(items)
|
||||
new_album.ipfs = album.ipfs
|
||||
new_album.store(inherit=False)
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
command + [util.syspath(item.path)]
|
||||
).stdout
|
||||
except (subprocess.CalledProcessError, OSError) as exc:
|
||||
self._log.error("execution failed: {0}", exc)
|
||||
self._log.error("execution failed: {}", exc)
|
||||
continue
|
||||
|
||||
try:
|
||||
|
|
@ -73,7 +73,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
except IndexError:
|
||||
# Sometimes keyfinder-cli returns 0 but with no key, usually
|
||||
# when the file is silent or corrupt, so we log and skip.
|
||||
self._log.error("no key returned for path: {0}", item.path)
|
||||
self._log.error("no key returned for path: {.path}", item)
|
||||
continue
|
||||
|
||||
try:
|
||||
|
|
@ -84,9 +84,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
|
||||
item["initial_key"] = key
|
||||
self._log.info(
|
||||
"added computed initial key {0} for {1}",
|
||||
key,
|
||||
util.displayable_path(item.path),
|
||||
"added computed initial key {} for {.filepath}", key, item
|
||||
)
|
||||
|
||||
if write:
|
||||
|
|
|
|||
|
|
@ -96,10 +96,10 @@ class KodiUpdate(BeetsPlugin):
|
|||
continue
|
||||
|
||||
self._log.info(
|
||||
"Kodi update triggered for {0}:{1}",
|
||||
"Kodi update triggered for {}:{}",
|
||||
instance["host"],
|
||||
instance["port"],
|
||||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.warning("Kodi update failed: {0}", str(e))
|
||||
self._log.warning("Kodi update failed: {}", str(e))
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
# Read the tree
|
||||
if c14n_filename:
|
||||
self._log.debug("Loading canonicalization tree {0}", c14n_filename)
|
||||
self._log.debug("Loading canonicalization tree {}", c14n_filename)
|
||||
c14n_filename = normpath(c14n_filename)
|
||||
with codecs.open(c14n_filename, "r", encoding="utf-8") as f:
|
||||
genres_tree = yaml.safe_load(f)
|
||||
|
|
@ -277,7 +277,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
genre = self._genre_cache[key]
|
||||
if self.config["extended_debug"]:
|
||||
self._log.debug(f"last.fm (unfiltered) {entity} tags: {genre}")
|
||||
self._log.debug("last.fm (unfiltered) {} tags: {}", entity, genre)
|
||||
return genre
|
||||
|
||||
def fetch_album_genre(self, obj):
|
||||
|
|
@ -327,8 +327,8 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
self, old: list[str], new: list[str]
|
||||
) -> list[str]:
|
||||
"""Combine old and new genres and process via _resolve_genres."""
|
||||
self._log.debug(f"raw last.fm tags: {new}")
|
||||
self._log.debug(f"existing genres taken into account: {old}")
|
||||
self._log.debug("raw last.fm tags: {}", new)
|
||||
self._log.debug("existing genres taken into account: {}", old)
|
||||
combined = old + new
|
||||
return self._resolve_genres(combined)
|
||||
|
||||
|
|
@ -361,7 +361,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
)
|
||||
if resolved_genres:
|
||||
suffix = "whitelist" if self.whitelist else "any"
|
||||
label = stage_label + f", {suffix}"
|
||||
label = f"{stage_label}, {suffix}"
|
||||
if keep_genres:
|
||||
label = f"keep + {label}"
|
||||
return self._format_and_stringify(resolved_genres), label
|
||||
|
|
@ -583,9 +583,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
item = task.item
|
||||
item.genre, src = self._get_genre(item)
|
||||
self._log.debug(
|
||||
'genre for track "{0.title}" ({1}): {0.genre}',
|
||||
item,
|
||||
src,
|
||||
'genre for track "{0.title}" ({1}): {0.genre}', item, src
|
||||
)
|
||||
item.store()
|
||||
|
||||
|
|
@ -607,12 +605,12 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
try:
|
||||
res = obj.get_top_tags()
|
||||
except PYLAST_EXCEPTIONS as exc:
|
||||
self._log.debug("last.fm error: {0}", exc)
|
||||
self._log.debug("last.fm error: {}", exc)
|
||||
return []
|
||||
except Exception as exc:
|
||||
# Isolate bugs in pylast.
|
||||
self._log.debug("{}", traceback.format_exc())
|
||||
self._log.error("error in pylast library: {0}", exc)
|
||||
self._log.error("error in pylast library: {}", exc)
|
||||
return []
|
||||
|
||||
# Filter by weight (optionally).
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class CustomUser(pylast.User):
|
|||
tuple with the total number of pages of results. Includes an MBID, if
|
||||
found.
|
||||
"""
|
||||
doc = self._request(self.ws_prefix + "." + method, cacheable, params)
|
||||
doc = self._request(f"{self.ws_prefix}.{method}", cacheable, params)
|
||||
|
||||
toptracks_node = doc.getElementsByTagName("toptracks")[0]
|
||||
total_pages = int(toptracks_node.getAttribute("totalPages"))
|
||||
|
|
@ -120,7 +120,7 @@ def import_lastfm(lib, log):
|
|||
if not user:
|
||||
raise ui.UserError("You must specify a user name for lastimport")
|
||||
|
||||
log.info("Fetching last.fm library for @{0}", user)
|
||||
log.info("Fetching last.fm library for @{}", user)
|
||||
|
||||
page_total = 1
|
||||
page_current = 0
|
||||
|
|
@ -130,7 +130,7 @@ def import_lastfm(lib, log):
|
|||
# Iterate through a yet to be known page total count
|
||||
while page_current < page_total:
|
||||
log.info(
|
||||
"Querying page #{0}{1}...",
|
||||
"Querying page #{}{}...",
|
||||
page_current + 1,
|
||||
f"/{page_total}" if page_total > 1 else "",
|
||||
)
|
||||
|
|
@ -147,27 +147,27 @@ def import_lastfm(lib, log):
|
|||
unknown_total += unknown
|
||||
break
|
||||
else:
|
||||
log.error("ERROR: unable to read page #{0}", page_current + 1)
|
||||
log.error("ERROR: unable to read page #{}", page_current + 1)
|
||||
if retry < retry_limit:
|
||||
log.info(
|
||||
"Retrying page #{0}... ({1}/{2} retry)",
|
||||
"Retrying page #{}... ({}/{} retry)",
|
||||
page_current + 1,
|
||||
retry + 1,
|
||||
retry_limit,
|
||||
)
|
||||
else:
|
||||
log.error(
|
||||
"FAIL: unable to fetch page #{0}, ",
|
||||
"tried {1} times",
|
||||
"FAIL: unable to fetch page #{}, ",
|
||||
"tried {} times",
|
||||
page_current,
|
||||
retry + 1,
|
||||
)
|
||||
page_current += 1
|
||||
|
||||
log.info("... done!")
|
||||
log.info("finished processing {0} song pages", page_total)
|
||||
log.info("{0} unknown play-counts", unknown_total)
|
||||
log.info("{0} play-counts imported", found_total)
|
||||
log.info("finished processing {} song pages", page_total)
|
||||
log.info("{} unknown play-counts", unknown_total)
|
||||
log.info("{} play-counts imported", found_total)
|
||||
|
||||
|
||||
def fetch_tracks(user, page, limit):
|
||||
|
|
@ -201,7 +201,7 @@ def process_tracks(lib, tracks, log):
|
|||
total = len(tracks)
|
||||
total_found = 0
|
||||
total_fails = 0
|
||||
log.info("Received {0} tracks in this page, processing...", total)
|
||||
log.info("Received {} tracks in this page, processing...", total)
|
||||
|
||||
for num in range(0, total):
|
||||
song = None
|
||||
|
|
@ -220,7 +220,7 @@ def process_tracks(lib, tracks, log):
|
|||
else None
|
||||
)
|
||||
|
||||
log.debug("query: {0} - {1} ({2})", artist, title, album)
|
||||
log.debug("query: {} - {} ({})", artist, title, album)
|
||||
|
||||
# First try to query by musicbrainz's trackid
|
||||
if trackid:
|
||||
|
|
@ -231,7 +231,7 @@ def process_tracks(lib, tracks, log):
|
|||
# If not, try just album/title
|
||||
if song is None:
|
||||
log.debug(
|
||||
"no album match, trying by album/title: {0} - {1}", album, title
|
||||
"no album match, trying by album/title: {} - {}", album, title
|
||||
)
|
||||
query = dbcore.AndQuery(
|
||||
[
|
||||
|
|
@ -268,10 +268,9 @@ def process_tracks(lib, tracks, log):
|
|||
count = int(song.get("play_count", 0))
|
||||
new_count = int(tracks[num].get("playcount", 1))
|
||||
log.debug(
|
||||
"match: {0} - {1} ({2}) updating: play_count {3} => {4}",
|
||||
song.artist,
|
||||
song.title,
|
||||
song.album,
|
||||
"match: {0.artist} - {0.title} ({0.album}) updating:"
|
||||
" play_count {1} => {2}",
|
||||
song,
|
||||
count,
|
||||
new_count,
|
||||
)
|
||||
|
|
@ -280,11 +279,11 @@ def process_tracks(lib, tracks, log):
|
|||
total_found += 1
|
||||
else:
|
||||
total_fails += 1
|
||||
log.info(" - No match: {0} - {1} ({2})", artist, title, album)
|
||||
log.info(" - No match: {} - {} ({})", artist, title, album)
|
||||
|
||||
if total_fails > 0:
|
||||
log.info(
|
||||
"Acquired {0}/{1} play-counts ({2} unknown)",
|
||||
"Acquired {}/{} play-counts ({} unknown)",
|
||||
total_found,
|
||||
total,
|
||||
total_fails,
|
||||
|
|
|
|||
|
|
@ -42,14 +42,14 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
unknown_total = 0
|
||||
ls = self.get_listens()
|
||||
tracks = self.get_tracks_from_listens(ls)
|
||||
log.info(f"Found {len(ls)} listens")
|
||||
log.info("Found {} listens", len(ls))
|
||||
if tracks:
|
||||
found, unknown = process_tracks(lib, tracks, log)
|
||||
found_total += found
|
||||
unknown_total += unknown
|
||||
log.info("... done!")
|
||||
log.info("{0} unknown play-counts", unknown_total)
|
||||
log.info("{0} play-counts imported", found_total)
|
||||
log.info("{} unknown play-counts", unknown_total)
|
||||
log.info("{} play-counts imported", found_total)
|
||||
|
||||
def _make_request(self, url, params=None):
|
||||
"""Makes a request to the ListenBrainz API."""
|
||||
|
|
@ -63,7 +63,7 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.debug(f"Invalid Search Error: {e}")
|
||||
self._log.debug("Invalid Search Error: {}", e)
|
||||
return None
|
||||
|
||||
def get_listens(self, min_ts=None, max_ts=None, count=None):
|
||||
|
|
@ -156,7 +156,7 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
playlist_info = playlist.get("playlist")
|
||||
if playlist_info.get("creator") == "listenbrainz":
|
||||
title = playlist_info.get("title")
|
||||
self._log.debug(f"Playlist title: {title}")
|
||||
self._log.debug("Playlist title: {}", title)
|
||||
playlist_type = (
|
||||
"Exploration" if "Exploration" in title else "Jams"
|
||||
)
|
||||
|
|
@ -179,9 +179,7 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
listenbrainz_playlists, key=lambda x: x["date"], reverse=True
|
||||
)
|
||||
for playlist in listenbrainz_playlists:
|
||||
self._log.debug(
|
||||
f"Playlist: {playlist['type']} - {playlist['date']}"
|
||||
)
|
||||
self._log.debug("Playlist: {0[type]} - {0[date]}", playlist)
|
||||
return listenbrainz_playlists
|
||||
|
||||
def get_playlist(self, identifier):
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ def search_pairs(item):
|
|||
# examples include (live), (remix), and (acoustic).
|
||||
r"(.+?)\s+[(].*[)]$",
|
||||
# Remove any featuring artists from the title
|
||||
r"(.*?) {}".format(plugins.feat_tokens(for_artist=False)),
|
||||
rf"(.*?) {plugins.feat_tokens(for_artist=False)}",
|
||||
# Remove part of title after colon ':' for songs with subtitles
|
||||
r"(.+?)\s*:.*",
|
||||
]
|
||||
|
|
@ -508,9 +508,9 @@ class SearchBackend(SoupMixin, Backend):
|
|||
# log out the candidate that did not make it but was close.
|
||||
# This may show a matching candidate with some noise in the name
|
||||
self.debug(
|
||||
"({}, {}) does not match ({}, {}) but dist was close: {:.2f}",
|
||||
result.artist,
|
||||
result.title,
|
||||
"({0.artist}, {0.title}) does not match ({1}, {2}) but dist"
|
||||
" was close: {3:.2f}",
|
||||
result,
|
||||
target_artist,
|
||||
target_title,
|
||||
max_dist,
|
||||
|
|
@ -582,7 +582,7 @@ class Tekstowo(SearchBackend):
|
|||
"""Fetch lyrics from Tekstowo.pl."""
|
||||
|
||||
BASE_URL = "https://www.tekstowo.pl"
|
||||
SEARCH_URL = BASE_URL + "/szukaj,{}.html"
|
||||
SEARCH_URL = f"{BASE_URL}/szukaj,{{}}.html"
|
||||
|
||||
def build_url(self, artist, title):
|
||||
artistitle = f"{artist.title()} {title.title()}"
|
||||
|
|
@ -644,7 +644,7 @@ class Google(SearchBackend):
|
|||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
#: Split cleaned up URL title into artist and title parts.
|
||||
URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +")
|
||||
URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +|, ")
|
||||
|
||||
SOURCE_DIST_FACTOR = {"www.azlyrics.com": 0.5, "www.songlyrics.com": 0.6}
|
||||
|
||||
|
|
@ -702,8 +702,8 @@ class Google(SearchBackend):
|
|||
result_artist, result_title = "", parts[0]
|
||||
else:
|
||||
# sort parts by their similarity to the artist
|
||||
parts.sort(key=lambda p: cls.get_part_dist(artist, title, p))
|
||||
result_artist, result_title = parts[0], " ".join(parts[1:])
|
||||
result_artist = min(parts, key=lambda p: string_dist(artist, p))
|
||||
result_title = min(parts, key=lambda p: string_dist(title, p))
|
||||
|
||||
return SearchResult(result_artist, result_title, item["link"])
|
||||
|
||||
|
|
@ -838,15 +838,16 @@ class Translator(RequestHandler):
|
|||
lyrics_language = langdetect.detect(new_lyrics).upper()
|
||||
if lyrics_language == self.to_language:
|
||||
self.info(
|
||||
"🔵 Lyrics are already in the target language {}",
|
||||
self.to_language,
|
||||
"🔵 Lyrics are already in the target language {.to_language}",
|
||||
self,
|
||||
)
|
||||
return new_lyrics
|
||||
|
||||
if self.from_languages and lyrics_language not in self.from_languages:
|
||||
self.info(
|
||||
"🔵 Configuration {} does not permit translating from {}",
|
||||
self.from_languages,
|
||||
"🔵 Configuration {.from_languages} does not permit translating"
|
||||
" from {}",
|
||||
self,
|
||||
lyrics_language,
|
||||
)
|
||||
return new_lyrics
|
||||
|
|
@ -854,7 +855,7 @@ class Translator(RequestHandler):
|
|||
lyrics, *url = new_lyrics.split("\n\nSource: ")
|
||||
with self.handle_request():
|
||||
translated_lines = self.append_translations(lyrics.splitlines())
|
||||
self.info("🟢 Translated lyrics to {}", self.to_language)
|
||||
self.info("🟢 Translated lyrics to {.to_language}", self)
|
||||
return "\n\nSource: ".join(["\n".join(translated_lines), *url])
|
||||
|
||||
|
||||
|
|
@ -1090,7 +1091,7 @@ class LyricsPlugin(RequestHandler, plugins.BeetsPlugin):
|
|||
return
|
||||
|
||||
if lyrics := self.find_lyrics(item):
|
||||
self.info("🟢 Found lyrics: {0}", item)
|
||||
self.info("🟢 Found lyrics: {}", item)
|
||||
if translator := self.translator:
|
||||
lyrics = translator.translate(lyrics, item.lyrics)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -83,9 +83,7 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
collection = self.config["collection"].as_str()
|
||||
if collection:
|
||||
if collection not in collection_ids:
|
||||
raise ui.UserError(
|
||||
"invalid collection ID: {}".format(collection)
|
||||
)
|
||||
raise ui.UserError(f"invalid collection ID: {collection}")
|
||||
return collection
|
||||
|
||||
# No specified collection. Just return the first collection ID
|
||||
|
|
@ -156,10 +154,10 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
if re.match(UUID_REGEX, aid):
|
||||
album_ids.append(aid)
|
||||
else:
|
||||
self._log.info("skipping invalid MBID: {0}", aid)
|
||||
self._log.info("skipping invalid MBID: {}", aid)
|
||||
|
||||
# Submit to MusicBrainz.
|
||||
self._log.info("Updating MusicBrainz collection {0}...", collection_id)
|
||||
self._log.info("Updating MusicBrainz collection {}...", collection_id)
|
||||
submit_albums(collection_id, album_ids)
|
||||
if remove_missing:
|
||||
self.remove_missing(collection_id, lib.albums())
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ class MBSubmitPlugin(BeetsPlugin):
|
|||
subprocess.Popen([picard_path] + paths)
|
||||
self._log.info("launched picard from\n{}", picard_path)
|
||||
except OSError as exc:
|
||||
self._log.error(f"Could not open picard, got error:\n{exc}")
|
||||
self._log.error("Could not open picard, got error:\n{}", exc)
|
||||
|
||||
def print_tracks(self, session, task):
|
||||
for i in sorted(task.items, key=lambda i: i.track):
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ def load_meta_sources():
|
|||
meta_sources = {}
|
||||
|
||||
for module_path, class_name in SOURCES.items():
|
||||
module = import_module(METASYNC_MODULE + "." + module_path)
|
||||
module = import_module(f"{METASYNC_MODULE}.{module_path}")
|
||||
meta_sources[class_name.lower()] = getattr(module, class_name)
|
||||
|
||||
return meta_sources
|
||||
|
|
@ -117,13 +117,13 @@ class MetaSyncPlugin(BeetsPlugin):
|
|||
try:
|
||||
cls = META_SOURCES[player]
|
||||
except KeyError:
|
||||
self._log.error("Unknown metadata source '{}'".format(player))
|
||||
self._log.error("Unknown metadata source '{}'", player)
|
||||
|
||||
try:
|
||||
meta_source_instances[player] = cls(self.config, self._log)
|
||||
except (ImportError, ConfigValueError) as e:
|
||||
self._log.error(
|
||||
f"Failed to instantiate metadata source {player!r}: {e}"
|
||||
"Failed to instantiate metadata source {!r}: {}", player, e
|
||||
)
|
||||
|
||||
# Avoid needlessly iterating over items
|
||||
|
|
|
|||
|
|
@ -44,11 +44,12 @@ class Amarok(MetaSource):
|
|||
"amarok_lastplayed": types.DATE,
|
||||
}
|
||||
|
||||
query_xml = '<query version="1.0"> \
|
||||
<filters> \
|
||||
<and><include field="filename" value=%s /></and> \
|
||||
</filters> \
|
||||
</query>'
|
||||
query_xml = """
|
||||
<query version="1.0">
|
||||
<filters>
|
||||
<and><include field="filename" value={} /></and>
|
||||
</filters>
|
||||
</query>"""
|
||||
|
||||
def __init__(self, config, log):
|
||||
super().__init__(config, log)
|
||||
|
|
@ -68,7 +69,7 @@ class Amarok(MetaSource):
|
|||
# of the result set. So query for the filename and then try to match
|
||||
# the correct item from the results we get back
|
||||
results = self.collection.Query(
|
||||
self.query_xml % quoteattr(basename(path))
|
||||
self.query_xml.format(quoteattr(basename(path)))
|
||||
)
|
||||
for result in results:
|
||||
if result["xesam:url"] != path:
|
||||
|
|
|
|||
|
|
@ -76,12 +76,12 @@ class Itunes(MetaSource):
|
|||
library_path = config["itunes"]["library"].as_filename()
|
||||
|
||||
try:
|
||||
self._log.debug(f"loading iTunes library from {library_path}")
|
||||
self._log.debug("loading iTunes library from {}", library_path)
|
||||
with create_temporary_copy(library_path) as library_copy:
|
||||
with open(library_copy, "rb") as library_copy_f:
|
||||
raw_library = plistlib.load(library_copy_f)
|
||||
except OSError as e:
|
||||
raise ConfigValueError("invalid iTunes library: " + e.strerror)
|
||||
raise ConfigValueError(f"invalid iTunes library: {e.strerror}")
|
||||
except Exception:
|
||||
# It's likely the user configured their '.itl' library (<> xml)
|
||||
if os.path.splitext(library_path)[1].lower() != ".xml":
|
||||
|
|
@ -91,7 +91,7 @@ class Itunes(MetaSource):
|
|||
)
|
||||
else:
|
||||
hint = ""
|
||||
raise ConfigValueError("invalid iTunes library" + hint)
|
||||
raise ConfigValueError(f"invalid iTunes library{hint}")
|
||||
|
||||
# Make the iTunes library queryable using the path
|
||||
self.collection = {
|
||||
|
|
@ -104,7 +104,7 @@ class Itunes(MetaSource):
|
|||
result = self.collection.get(util.bytestring_path(item.path).lower())
|
||||
|
||||
if not result:
|
||||
self._log.warning(f"no iTunes match found for {item}")
|
||||
self._log.warning("no iTunes match found for {}", item)
|
||||
return
|
||||
|
||||
item.itunes_rating = result.get("Rating")
|
||||
|
|
|
|||
|
|
@ -226,8 +226,8 @@ class MissingPlugin(BeetsPlugin):
|
|||
for track_info in album_info.tracks:
|
||||
if track_info.track_id not in item_mbids:
|
||||
self._log.debug(
|
||||
"track {0} in album {1}",
|
||||
track_info.track_id,
|
||||
album_info.album_id,
|
||||
"track {.track_id} in album {.album_id}",
|
||||
track_info,
|
||||
album_info,
|
||||
)
|
||||
yield _item(track_info, album_info, album.id)
|
||||
|
|
|
|||
|
|
@ -51,8 +51,8 @@ class MPDClientWrapper:
|
|||
if not self.strip_path.endswith("/"):
|
||||
self.strip_path += "/"
|
||||
|
||||
self._log.debug("music_directory: {0}", self.music_directory)
|
||||
self._log.debug("strip_path: {0}", self.strip_path)
|
||||
self._log.debug("music_directory: {.music_directory}", self)
|
||||
self._log.debug("strip_path: {.strip_path}", self)
|
||||
|
||||
self.client = mpd.MPDClient()
|
||||
|
||||
|
|
@ -64,7 +64,7 @@ class MPDClientWrapper:
|
|||
if host[0] in ["/", "~"]:
|
||||
host = os.path.expanduser(host)
|
||||
|
||||
self._log.info("connecting to {0}:{1}", host, port)
|
||||
self._log.info("connecting to {}:{}", host, port)
|
||||
try:
|
||||
self.client.connect(host, port)
|
||||
except OSError as e:
|
||||
|
|
@ -89,7 +89,7 @@ class MPDClientWrapper:
|
|||
try:
|
||||
return getattr(self.client, command)()
|
||||
except (OSError, mpd.ConnectionError) as err:
|
||||
self._log.error("{0}", err)
|
||||
self._log.error("{}", err)
|
||||
|
||||
if retries <= 0:
|
||||
# if we exited without breaking, we couldn't reconnect in time :(
|
||||
|
|
@ -123,7 +123,7 @@ class MPDClientWrapper:
|
|||
result = os.path.join(self.music_directory, file)
|
||||
else:
|
||||
result = entry["file"]
|
||||
self._log.debug("returning: {0}", result)
|
||||
self._log.debug("returning: {}", result)
|
||||
return result, entry.get("id")
|
||||
|
||||
def status(self):
|
||||
|
|
@ -169,7 +169,7 @@ class MPDStats:
|
|||
if item:
|
||||
return item
|
||||
else:
|
||||
self._log.info("item not found: {0}", displayable_path(path))
|
||||
self._log.info("item not found: {}", displayable_path(path))
|
||||
|
||||
def update_item(self, item, attribute, value=None, increment=None):
|
||||
"""Update the beets item. Set attribute to value or increment the value
|
||||
|
|
@ -188,10 +188,10 @@ class MPDStats:
|
|||
item.store()
|
||||
|
||||
self._log.debug(
|
||||
"updated: {0} = {1} [{2}]",
|
||||
"updated: {} = {} [{.filepath}]",
|
||||
attribute,
|
||||
item[attribute],
|
||||
displayable_path(item.path),
|
||||
item,
|
||||
)
|
||||
|
||||
def update_rating(self, item, skipped):
|
||||
|
|
@ -234,12 +234,12 @@ class MPDStats:
|
|||
def handle_played(self, song):
|
||||
"""Updates the play count of a song."""
|
||||
self.update_item(song["beets_item"], "play_count", increment=1)
|
||||
self._log.info("played {0}", displayable_path(song["path"]))
|
||||
self._log.info("played {}", displayable_path(song["path"]))
|
||||
|
||||
def handle_skipped(self, song):
|
||||
"""Updates the skip count of a song."""
|
||||
self.update_item(song["beets_item"], "skip_count", increment=1)
|
||||
self._log.info("skipped {0}", displayable_path(song["path"]))
|
||||
self._log.info("skipped {}", displayable_path(song["path"]))
|
||||
|
||||
def on_stop(self, status):
|
||||
self._log.info("stop")
|
||||
|
|
@ -278,11 +278,11 @@ class MPDStats:
|
|||
self.handle_song_change(self.now_playing)
|
||||
|
||||
if is_url(path):
|
||||
self._log.info("playing stream {0}", displayable_path(path))
|
||||
self._log.info("playing stream {}", displayable_path(path))
|
||||
self.now_playing = None
|
||||
return
|
||||
|
||||
self._log.info("playing {0}", displayable_path(path))
|
||||
self._log.info("playing {}", displayable_path(path))
|
||||
|
||||
self.now_playing = {
|
||||
"started": time.time(),
|
||||
|
|
@ -307,12 +307,12 @@ class MPDStats:
|
|||
if "player" in events:
|
||||
status = self.mpd.status()
|
||||
|
||||
handler = getattr(self, "on_" + status["state"], None)
|
||||
handler = getattr(self, f"on_{status['state']}", None)
|
||||
|
||||
if handler:
|
||||
handler(status)
|
||||
else:
|
||||
self._log.debug('unhandled status "{0}"', status)
|
||||
self._log.debug('unhandled status "{}"', status)
|
||||
|
||||
events = self.mpd.events()
|
||||
|
||||
|
|
|
|||
|
|
@ -101,8 +101,8 @@ class MPDUpdatePlugin(BeetsPlugin):
|
|||
|
||||
try:
|
||||
s = BufferedSocket(host, port)
|
||||
except OSError as e:
|
||||
self._log.warning("MPD connection failed: {0}", str(e.strerror))
|
||||
except OSError:
|
||||
self._log.warning("MPD connection failed", exc_info=True)
|
||||
return
|
||||
|
||||
resp = s.readline()
|
||||
|
|
@ -111,7 +111,7 @@ class MPDUpdatePlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
if password:
|
||||
s.send(b'password "%s"\n' % password.encode("utf8"))
|
||||
s.send(f'password "{password}"\n'.encode())
|
||||
resp = s.readline()
|
||||
if b"OK" not in resp:
|
||||
self._log.warning("Authentication failed: {0!r}", resp)
|
||||
|
|
|
|||
|
|
@ -68,9 +68,7 @@ class MusicBrainzAPIError(util.HumanReadableError):
|
|||
super().__init__(reason, verb, tb)
|
||||
|
||||
def get_message(self):
|
||||
return "{} in {} with query {}".format(
|
||||
self._reasonstr(), self.verb, repr(self.query)
|
||||
)
|
||||
return f"{self._reasonstr()} in {self.verb} with query {self.query!r}"
|
||||
|
||||
|
||||
RELEASE_INCLUDES = list(
|
||||
|
|
@ -203,7 +201,7 @@ def _multi_artist_credit(
|
|||
|
||||
|
||||
def track_url(trackid: str) -> str:
|
||||
return urljoin(BASE_URL, "recording/" + trackid)
|
||||
return urljoin(BASE_URL, f"recording/{trackid}")
|
||||
|
||||
|
||||
def _flatten_artist_credit(credit: list[JSONDict]) -> tuple[str, str, str]:
|
||||
|
|
@ -248,7 +246,7 @@ def _get_related_artist_names(relations, relation_type):
|
|||
|
||||
|
||||
def album_url(albumid: str) -> str:
|
||||
return urljoin(BASE_URL, "release/" + albumid)
|
||||
return urljoin(BASE_URL, f"release/{albumid}")
|
||||
|
||||
|
||||
def _preferred_release_event(
|
||||
|
|
@ -293,7 +291,7 @@ def _set_date_str(
|
|||
continue
|
||||
|
||||
if original:
|
||||
key = "original_" + key
|
||||
key = f"original_{key}"
|
||||
setattr(info, key, date_num)
|
||||
|
||||
|
||||
|
|
@ -838,7 +836,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
"""
|
||||
self._log.debug("Requesting MusicBrainz release {}", album_id)
|
||||
if not (albumid := self._extract_id(album_id)):
|
||||
self._log.debug("Invalid MBID ({0}).", album_id)
|
||||
self._log.debug("Invalid MBID ({}).", album_id)
|
||||
return None
|
||||
|
||||
try:
|
||||
|
|
@ -875,7 +873,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
or None if no track is found. May raise a MusicBrainzAPIError.
|
||||
"""
|
||||
if not (trackid := self._extract_id(track_id)):
|
||||
self._log.debug("Invalid MBID ({0}).", track_id)
|
||||
self._log.debug("Invalid MBID ({}).", track_id)
|
||||
return None
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -179,10 +179,8 @@ class ParentWorkPlugin(BeetsPlugin):
|
|||
|
||||
if not item.mb_workid:
|
||||
self._log.info(
|
||||
"No work for {}, \
|
||||
add one at https://musicbrainz.org/recording/{}",
|
||||
"No work for {0}, add one at https://musicbrainz.org/recording/{0.mb_trackid}",
|
||||
item,
|
||||
item.mb_trackid,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ def play(
|
|||
"""
|
||||
# Print number of tracks or albums to be played, log command to be run.
|
||||
item_type += "s" if len(selection) > 1 else ""
|
||||
ui.print_("Playing {} {}.".format(len(selection), item_type))
|
||||
ui.print_(f"Playing {len(selection)} {item_type}.")
|
||||
log.debug("executing command: {} {!r}", command_str, open_args)
|
||||
|
||||
try:
|
||||
|
|
@ -154,7 +154,7 @@ class PlayPlugin(BeetsPlugin):
|
|||
return f"{command_str} {args}"
|
||||
else:
|
||||
# Don't include the marker in the command.
|
||||
return command_str.replace(" " + ARGS_MARKER, "")
|
||||
return command_str.replace(f" {ARGS_MARKER}", "")
|
||||
|
||||
def _playlist_or_paths(self, paths):
|
||||
"""Return either the raw paths of items or a playlist of the items."""
|
||||
|
|
@ -179,9 +179,7 @@ class PlayPlugin(BeetsPlugin):
|
|||
ui.print_(
|
||||
ui.colorize(
|
||||
"text_warning",
|
||||
"You are about to queue {} {}.".format(
|
||||
len(selection), item_type
|
||||
),
|
||||
f"You are about to queue {len(selection)} {item_type}.",
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
|
||||
def cli_exit(self, lib):
|
||||
for playlist in self.find_playlists():
|
||||
self._log.info(f"Updating playlist: {playlist}")
|
||||
self._log.info("Updating playlist: {}", playlist)
|
||||
base_dir = beets.util.bytestring_path(
|
||||
self.relative_to
|
||||
if self.relative_to
|
||||
|
|
@ -133,21 +133,16 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
try:
|
||||
self.update_playlist(playlist, base_dir)
|
||||
except beets.util.FilesystemError:
|
||||
self._log.error(
|
||||
"Failed to update playlist: {}".format(
|
||||
beets.util.displayable_path(playlist)
|
||||
)
|
||||
)
|
||||
self._log.error("Failed to update playlist: {}", playlist)
|
||||
|
||||
def find_playlists(self):
|
||||
"""Find M3U playlists in the playlist directory."""
|
||||
playlist_dir = beets.util.syspath(self.playlist_dir)
|
||||
try:
|
||||
dir_contents = os.listdir(beets.util.syspath(self.playlist_dir))
|
||||
dir_contents = os.listdir(playlist_dir)
|
||||
except OSError:
|
||||
self._log.warning(
|
||||
"Unable to open playlist directory {}".format(
|
||||
beets.util.displayable_path(self.playlist_dir)
|
||||
)
|
||||
"Unable to open playlist directory {.playlist_dir}", self
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -195,9 +190,10 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
|
||||
if changes or deletions:
|
||||
self._log.info(
|
||||
"Updated playlist {} ({} changes, {} deletions)".format(
|
||||
filename, changes, deletions
|
||||
)
|
||||
"Updated playlist {} ({} changes, {} deletions)",
|
||||
filename,
|
||||
changes,
|
||||
deletions,
|
||||
)
|
||||
beets.util.copy(new_playlist, filename, replace=True)
|
||||
beets.util.remove(new_playlist)
|
||||
|
|
|
|||
|
|
@ -22,9 +22,7 @@ def get_music_section(
|
|||
):
|
||||
"""Getting the section key for the music library in Plex."""
|
||||
api_endpoint = append_token("library/sections", token)
|
||||
url = urljoin(
|
||||
"{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint
|
||||
)
|
||||
url = urljoin(f"{get_protocol(secure)}://{host}:{port}", api_endpoint)
|
||||
|
||||
# Sends request.
|
||||
r = requests.get(
|
||||
|
|
@ -54,9 +52,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors):
|
|||
)
|
||||
api_endpoint = f"library/sections/{section_key}/refresh"
|
||||
api_endpoint = append_token(api_endpoint, token)
|
||||
url = urljoin(
|
||||
"{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint
|
||||
)
|
||||
url = urljoin(f"{get_protocol(secure)}://{host}:{port}", api_endpoint)
|
||||
|
||||
# Sends request and returns requests object.
|
||||
r = requests.get(
|
||||
|
|
@ -70,7 +66,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors):
|
|||
def append_token(url, token):
|
||||
"""Appends the Plex Home token to the api call if required."""
|
||||
if token:
|
||||
url += "?" + urlencode({"X-Plex-Token": token})
|
||||
url += f"?{urlencode({'X-Plex-Token': token})}"
|
||||
return url
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -70,9 +70,7 @@ def call(args: list[str], log: Logger, **kwargs: Any):
|
|||
return command_output(args, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
log.debug(e.output.decode("utf8", "ignore"))
|
||||
raise ReplayGainError(
|
||||
"{} exited with status {}".format(args[0], e.returncode)
|
||||
)
|
||||
raise ReplayGainError(f"{args[0]} exited with status {e.returncode}")
|
||||
|
||||
|
||||
def db_to_lufs(db: float) -> float:
|
||||
|
|
@ -143,9 +141,8 @@ class RgTask:
|
|||
item.rg_track_peak = track_gain.peak
|
||||
item.store()
|
||||
self._log.debug(
|
||||
"applied track gain {0} LU, peak {1} of FS",
|
||||
item.rg_track_gain,
|
||||
item.rg_track_peak,
|
||||
"applied track gain {0.rg_track_gain} LU, peak {0.rg_track_peak} of FS",
|
||||
item,
|
||||
)
|
||||
|
||||
def _store_album_gain(self, item: Item, album_gain: Gain):
|
||||
|
|
@ -157,9 +154,8 @@ class RgTask:
|
|||
item.rg_album_peak = album_gain.peak
|
||||
item.store()
|
||||
self._log.debug(
|
||||
"applied album gain {0} LU, peak {1} of FS",
|
||||
item.rg_album_gain,
|
||||
item.rg_album_peak,
|
||||
"applied album gain {0.rg_album_gain} LU, peak {0.rg_album_peak} of FS",
|
||||
item,
|
||||
)
|
||||
|
||||
def _store_track(self, write: bool):
|
||||
|
|
@ -170,15 +166,14 @@ class RgTask:
|
|||
# `track_gains` without throwing FatalReplayGainError
|
||||
# => raise non-fatal exception & continue
|
||||
raise ReplayGainError(
|
||||
"ReplayGain backend `{}` failed for track {}".format(
|
||||
self.backend_name, item
|
||||
)
|
||||
f"ReplayGain backend `{self.backend_name}` failed for track"
|
||||
f" {item}"
|
||||
)
|
||||
|
||||
self._store_track_gain(item, self.track_gains[0])
|
||||
if write:
|
||||
item.try_write()
|
||||
self._log.debug("done analyzing {0}", item)
|
||||
self._log.debug("done analyzing {}", item)
|
||||
|
||||
def _store_album(self, write: bool):
|
||||
"""Store track/album gains for all tracks of the task in the database."""
|
||||
|
|
@ -191,17 +186,15 @@ class RgTask:
|
|||
# `album_gain` without throwing FatalReplayGainError
|
||||
# => raise non-fatal exception & continue
|
||||
raise ReplayGainError(
|
||||
"ReplayGain backend `{}` failed "
|
||||
"for some tracks in album {}".format(
|
||||
self.backend_name, self.album
|
||||
)
|
||||
f"ReplayGain backend `{self.backend_name}` failed "
|
||||
f"for some tracks in album {self.album}"
|
||||
)
|
||||
for item, track_gain in zip(self.items, self.track_gains):
|
||||
self._store_track_gain(item, track_gain)
|
||||
self._store_album_gain(item, self.album_gain)
|
||||
if write:
|
||||
item.try_write()
|
||||
self._log.debug("done analyzing {0}", item)
|
||||
self._log.debug("done analyzing {}", item)
|
||||
|
||||
def store(self, write: bool):
|
||||
"""Store computed gains for the items of this task in the database."""
|
||||
|
|
@ -235,7 +228,7 @@ class R128Task(RgTask):
|
|||
def _store_track_gain(self, item: Item, track_gain: Gain):
|
||||
item.r128_track_gain = track_gain.gain
|
||||
item.store()
|
||||
self._log.debug("applied r128 track gain {0} LU", item.r128_track_gain)
|
||||
self._log.debug("applied r128 track gain {.r128_track_gain} LU", item)
|
||||
|
||||
def _store_album_gain(self, item: Item, album_gain: Gain):
|
||||
"""
|
||||
|
|
@ -244,7 +237,7 @@ class R128Task(RgTask):
|
|||
"""
|
||||
item.r128_album_gain = album_gain.gain
|
||||
item.store()
|
||||
self._log.debug("applied r128 album gain {0} LU", item.r128_album_gain)
|
||||
self._log.debug("applied r128 album gain {.r128_album_gain} LU", item)
|
||||
|
||||
|
||||
AnyRgTask = TypeVar("AnyRgTask", bound=RgTask)
|
||||
|
|
@ -385,10 +378,7 @@ class FfmpegBackend(Backend):
|
|||
album_gain = target_level_lufs - album_gain
|
||||
|
||||
self._log.debug(
|
||||
"{}: gain {} LU, peak {}",
|
||||
task.album,
|
||||
album_gain,
|
||||
album_peak,
|
||||
"{.album}: gain {} LU, peak {}", task, album_gain, album_peak
|
||||
)
|
||||
|
||||
task.album_gain = Gain(album_gain, album_peak)
|
||||
|
|
@ -431,9 +421,9 @@ class FfmpegBackend(Backend):
|
|||
target_level_lufs = db_to_lufs(target_level)
|
||||
|
||||
# call ffmpeg
|
||||
self._log.debug(f"analyzing {item}")
|
||||
self._log.debug("analyzing {}", item)
|
||||
cmd = self._construct_cmd(item, peak_method)
|
||||
self._log.debug("executing {0}", " ".join(map(displayable_path, cmd)))
|
||||
self._log.debug("executing {}", " ".join(map(displayable_path, cmd)))
|
||||
output = call(cmd, self._log).stderr.splitlines()
|
||||
|
||||
# parse output
|
||||
|
|
@ -501,12 +491,10 @@ class FfmpegBackend(Backend):
|
|||
if self._parse_float(b"M: " + line[1]) >= gating_threshold:
|
||||
n_blocks += 1
|
||||
self._log.debug(
|
||||
"{}: {} blocks over {} LUFS".format(
|
||||
item, n_blocks, gating_threshold
|
||||
)
|
||||
"{}: {} blocks over {} LUFS", item, n_blocks, gating_threshold
|
||||
)
|
||||
|
||||
self._log.debug("{}: gain {} LU, peak {}".format(item, gain, peak))
|
||||
self._log.debug("{}: gain {} LU, peak {}", item, gain, peak)
|
||||
|
||||
return Gain(gain, peak), n_blocks
|
||||
|
||||
|
|
@ -526,9 +514,7 @@ class FfmpegBackend(Backend):
|
|||
if output[i].startswith(search):
|
||||
return i
|
||||
raise ReplayGainError(
|
||||
"ffmpeg output: missing {} after line {}".format(
|
||||
repr(search), start_line
|
||||
)
|
||||
f"ffmpeg output: missing {search!r} after line {start_line}"
|
||||
)
|
||||
|
||||
def _parse_float(self, line: bytes) -> float:
|
||||
|
|
@ -575,7 +561,7 @@ class CommandBackend(Backend):
|
|||
# Explicit executable path.
|
||||
if not os.path.isfile(self.command):
|
||||
raise FatalReplayGainError(
|
||||
"replaygain command does not exist: {}".format(self.command)
|
||||
f"replaygain command does not exist: {self.command}"
|
||||
)
|
||||
else:
|
||||
# Check whether the program is in $PATH.
|
||||
|
|
@ -663,8 +649,8 @@ class CommandBackend(Backend):
|
|||
cmd = cmd + ["-d", str(int(target_level - 89))]
|
||||
cmd = cmd + [syspath(i.path) for i in items]
|
||||
|
||||
self._log.debug("analyzing {0} files", len(items))
|
||||
self._log.debug("executing {0}", " ".join(map(displayable_path, cmd)))
|
||||
self._log.debug("analyzing {} files", len(items))
|
||||
self._log.debug("executing {}", " ".join(map(displayable_path, cmd)))
|
||||
output = call(cmd, self._log).stdout
|
||||
self._log.debug("analysis finished")
|
||||
return self.parse_tool_output(
|
||||
|
|
@ -680,7 +666,7 @@ class CommandBackend(Backend):
|
|||
for line in text.split(b"\n")[1 : num_lines + 1]:
|
||||
parts = line.split(b"\t")
|
||||
if len(parts) != 6 or parts[0] == b"File":
|
||||
self._log.debug("bad tool output: {0}", text)
|
||||
self._log.debug("bad tool output: {}", text)
|
||||
raise ReplayGainError("mp3gain failed")
|
||||
|
||||
# _file = parts[0]
|
||||
|
|
@ -1105,9 +1091,8 @@ class AudioToolsBackend(Backend):
|
|||
)
|
||||
|
||||
self._log.debug(
|
||||
"ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}",
|
||||
item.artist,
|
||||
item.title,
|
||||
"ReplayGain for track {0.artist} - {0.title}: {1:.2f}, {2:.2f}",
|
||||
item,
|
||||
rg_track_gain,
|
||||
rg_track_peak,
|
||||
)
|
||||
|
|
@ -1132,7 +1117,7 @@ class AudioToolsBackend(Backend):
|
|||
)
|
||||
track_gains.append(Gain(gain=rg_track_gain, peak=rg_track_peak))
|
||||
self._log.debug(
|
||||
"ReplayGain for track {0}: {1:.2f}, {2:.2f}",
|
||||
"ReplayGain for track {}: {.2f}, {.2f}",
|
||||
item,
|
||||
rg_track_gain,
|
||||
rg_track_peak,
|
||||
|
|
@ -1145,8 +1130,8 @@ class AudioToolsBackend(Backend):
|
|||
rg_album_gain, task.target_level
|
||||
)
|
||||
self._log.debug(
|
||||
"ReplayGain for album {0}: {1:.2f}, {2:.2f}",
|
||||
task.items[0].album,
|
||||
"ReplayGain for album {.items[0].album}: {.2f}, {.2f}",
|
||||
task,
|
||||
rg_album_gain,
|
||||
rg_album_peak,
|
||||
)
|
||||
|
|
@ -1229,10 +1214,8 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
|
||||
if self.backend_name not in BACKENDS:
|
||||
raise ui.UserError(
|
||||
"Selected ReplayGain backend {} is not supported. "
|
||||
"Please select one of: {}".format(
|
||||
self.backend_name, ", ".join(BACKENDS.keys())
|
||||
)
|
||||
f"Selected ReplayGain backend {self.backend_name} is not"
|
||||
f" supported. Please select one of: {', '.join(BACKENDS)}"
|
||||
)
|
||||
|
||||
# FIXME: Consider renaming the configuration option to 'peak_method'
|
||||
|
|
@ -1240,10 +1223,9 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
peak_method = self.config["peak"].as_str()
|
||||
if peak_method not in PeakMethod.__members__:
|
||||
raise ui.UserError(
|
||||
"Selected ReplayGain peak method {} is not supported. "
|
||||
"Please select one of: {}".format(
|
||||
peak_method, ", ".join(PeakMethod.__members__)
|
||||
)
|
||||
f"Selected ReplayGain peak method {peak_method} is not"
|
||||
" supported. Please select one of:"
|
||||
f" {', '.join(PeakMethod.__members__)}"
|
||||
)
|
||||
# This only applies to plain old rg tags, r128 doesn't store peak
|
||||
# values.
|
||||
|
|
@ -1348,19 +1330,19 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
items, nothing is done.
|
||||
"""
|
||||
if not force and not self.album_requires_gain(album):
|
||||
self._log.info("Skipping album {0}", album)
|
||||
self._log.info("Skipping album {}", album)
|
||||
return
|
||||
|
||||
items_iter = iter(album.items())
|
||||
use_r128 = self.should_use_r128(next(items_iter))
|
||||
if any(use_r128 != self.should_use_r128(i) for i in items_iter):
|
||||
self._log.error(
|
||||
"Cannot calculate gain for album {0} (incompatible formats)",
|
||||
"Cannot calculate gain for album {} (incompatible formats)",
|
||||
album,
|
||||
)
|
||||
return
|
||||
|
||||
self._log.info("analyzing {0}", album)
|
||||
self._log.info("analyzing {}", album)
|
||||
|
||||
discs: dict[int, list[Item]] = {}
|
||||
if self.config["per_disc"].get(bool):
|
||||
|
|
@ -1384,7 +1366,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
callback=store_cb,
|
||||
)
|
||||
except ReplayGainError as e:
|
||||
self._log.info("ReplayGain error: {0}", e)
|
||||
self._log.info("ReplayGain error: {}", e)
|
||||
except FatalReplayGainError as e:
|
||||
raise ui.UserError(f"Fatal replay gain error: {e}")
|
||||
|
||||
|
|
@ -1396,7 +1378,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
in the item, nothing is done.
|
||||
"""
|
||||
if not force and not self.track_requires_gain(item):
|
||||
self._log.info("Skipping track {0}", item)
|
||||
self._log.info("Skipping track {}", item)
|
||||
return
|
||||
|
||||
use_r128 = self.should_use_r128(item)
|
||||
|
|
@ -1413,7 +1395,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
callback=store_cb,
|
||||
)
|
||||
except ReplayGainError as e:
|
||||
self._log.info("ReplayGain error: {0}", e)
|
||||
self._log.info("ReplayGain error: {}", e)
|
||||
except FatalReplayGainError as e:
|
||||
raise ui.UserError(f"Fatal replay gain error: {e}")
|
||||
|
||||
|
|
@ -1526,18 +1508,16 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
if opts.album:
|
||||
albums = lib.albums(args)
|
||||
self._log.info(
|
||||
"Analyzing {} albums ~ {} backend...".format(
|
||||
len(albums), self.backend_name
|
||||
)
|
||||
f"Analyzing {len(albums)} albums ~"
|
||||
f" {self.backend_name} backend..."
|
||||
)
|
||||
for album in albums:
|
||||
self.handle_album(album, write, force)
|
||||
else:
|
||||
items = lib.items(args)
|
||||
self._log.info(
|
||||
"Analyzing {} tracks ~ {} backend...".format(
|
||||
len(items), self.backend_name
|
||||
)
|
||||
f"Analyzing {len(items)} tracks ~"
|
||||
f" {self.backend_name} backend..."
|
||||
)
|
||||
for item in items:
|
||||
self.handle_track(item, write, force)
|
||||
|
|
@ -1556,8 +1536,10 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
"--threads",
|
||||
dest="threads",
|
||||
type=int,
|
||||
help="change the number of threads, \
|
||||
defaults to maximum available processors",
|
||||
help=(
|
||||
"change the number of threads, defaults to maximum available"
|
||||
" processors"
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-f",
|
||||
|
|
@ -1565,8 +1547,10 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
dest="force",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="analyze all files, including those that "
|
||||
"already have ReplayGain metadata",
|
||||
help=(
|
||||
"analyze all files, including those that already have"
|
||||
" ReplayGain metadata"
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-w",
|
||||
|
|
|
|||
|
|
@ -57,9 +57,9 @@ class RewritePlugin(BeetsPlugin):
|
|||
raise ui.UserError("invalid rewrite specification")
|
||||
if fieldname not in library.Item._fields:
|
||||
raise ui.UserError(
|
||||
"invalid field name (%s) in rewriter" % fieldname
|
||||
f"invalid field name ({fieldname}) in rewriter"
|
||||
)
|
||||
self._log.debug("adding template field {0}", key)
|
||||
self._log.debug("adding template field {}", key)
|
||||
pattern = re.compile(pattern.lower())
|
||||
rules[fieldname].append((pattern, value))
|
||||
if fieldname == "artist":
|
||||
|
|
|
|||
|
|
@ -59,9 +59,7 @@ class ScrubPlugin(BeetsPlugin):
|
|||
def scrub_func(lib, opts, args):
|
||||
# Walk through matching files and remove tags.
|
||||
for item in lib.items(args):
|
||||
self._log.info(
|
||||
"scrubbing: {0}", util.displayable_path(item.path)
|
||||
)
|
||||
self._log.info("scrubbing: {.filepath}", item)
|
||||
self._scrub_item(item, opts.write)
|
||||
|
||||
scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
|
||||
|
|
@ -110,7 +108,7 @@ class ScrubPlugin(BeetsPlugin):
|
|||
f.save()
|
||||
except (OSError, mutagen.MutagenError) as exc:
|
||||
self._log.error(
|
||||
"could not scrub {0}: {1}", util.displayable_path(path), exc
|
||||
"could not scrub {}: {}", util.displayable_path(path), exc
|
||||
)
|
||||
|
||||
def _scrub_item(self, item, restore):
|
||||
|
|
@ -124,7 +122,7 @@ class ScrubPlugin(BeetsPlugin):
|
|||
util.syspath(item.path), config["id3v23"].get(bool)
|
||||
)
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
self._log.error("could not open file to scrub: {0}", exc)
|
||||
self._log.error("could not open file to scrub: {}", exc)
|
||||
return
|
||||
images = mf.images
|
||||
|
||||
|
|
@ -144,12 +142,10 @@ class ScrubPlugin(BeetsPlugin):
|
|||
mf.images = images
|
||||
mf.save()
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
self._log.error("could not write tags: {0}", exc)
|
||||
self._log.error("could not write tags: {}", exc)
|
||||
|
||||
def import_task_files(self, session, task):
|
||||
"""Automatically scrub imported files."""
|
||||
for item in task.imported_items():
|
||||
self._log.debug(
|
||||
"auto-scrubbing {0}", util.displayable_path(item.path)
|
||||
)
|
||||
self._log.debug("auto-scrubbing {.filepath}", item)
|
||||
self._scrub_item(item, ui.should_write())
|
||||
|
|
|
|||
|
|
@ -138,10 +138,9 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
if name in args
|
||||
}
|
||||
if not playlists:
|
||||
unmatched = [name for name, _, _ in self._unmatched_playlists]
|
||||
raise ui.UserError(
|
||||
"No playlist matching any of {} found".format(
|
||||
[name for name, _, _ in self._unmatched_playlists]
|
||||
)
|
||||
f"No playlist matching any of {unmatched} found"
|
||||
)
|
||||
|
||||
self._matched_playlists = playlists
|
||||
|
|
@ -235,7 +234,7 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
for playlist in self._unmatched_playlists:
|
||||
n, (q, _), (a_q, _) = playlist
|
||||
if self.matches(model, q, a_q):
|
||||
self._log.debug("{0} will be updated because of {1}", n, model)
|
||||
self._log.debug("{} will be updated because of {}", n, model)
|
||||
self._matched_playlists.add(playlist)
|
||||
self.register_listener("cli_exit", self.update_playlists)
|
||||
|
||||
|
|
@ -244,12 +243,12 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
def update_playlists(self, lib, pretend=False):
|
||||
if pretend:
|
||||
self._log.info(
|
||||
"Showing query results for {0} smart playlists...",
|
||||
"Showing query results for {} smart playlists...",
|
||||
len(self._matched_playlists),
|
||||
)
|
||||
else:
|
||||
self._log.info(
|
||||
"Updating {0} smart playlists...", len(self._matched_playlists)
|
||||
"Updating {} smart playlists...", len(self._matched_playlists)
|
||||
)
|
||||
|
||||
playlist_dir = self.config["playlist_dir"].as_filename()
|
||||
|
|
@ -268,7 +267,7 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
if pretend:
|
||||
self._log.info("Results for playlist {}:", name)
|
||||
else:
|
||||
self._log.info("Creating playlist {0}", name)
|
||||
self._log.info("Creating playlist {}", name)
|
||||
items = []
|
||||
|
||||
if query:
|
||||
|
|
@ -331,8 +330,9 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
for key, value in attr
|
||||
]
|
||||
attrs = "".join(al)
|
||||
comment = "#EXTINF:{}{},{} - {}\n".format(
|
||||
int(item.length), attrs, item.artist, item.title
|
||||
comment = (
|
||||
f"#EXTINF:{int(item.length)}{attrs},"
|
||||
f"{item.artist} - {item.title}\n"
|
||||
)
|
||||
f.write(comment.encode("utf-8") + entry.uri + b"\n")
|
||||
# Send an event when playlists were updated.
|
||||
|
|
@ -340,13 +340,11 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
|
||||
if pretend:
|
||||
self._log.info(
|
||||
"Displayed results for {0} playlists",
|
||||
"Displayed results for {} playlists",
|
||||
len(self._matched_playlists),
|
||||
)
|
||||
else:
|
||||
self._log.info(
|
||||
"{0} playlists updated", len(self._matched_playlists)
|
||||
)
|
||||
self._log.info("{} playlists updated", len(self._matched_playlists))
|
||||
|
||||
|
||||
class PlaylistItem:
|
||||
|
|
|
|||
|
|
@ -168,8 +168,9 @@ class SpotifyPlugin(
|
|||
c_secret: str = self.config["client_secret"].as_str()
|
||||
|
||||
headers = {
|
||||
"Authorization": "Basic {}".format(
|
||||
base64.b64encode(f"{c_id}:{c_secret}".encode()).decode()
|
||||
"Authorization": (
|
||||
"Basic"
|
||||
f" {base64.b64encode(f'{c_id}:{c_secret}'.encode()).decode()}"
|
||||
)
|
||||
}
|
||||
response = requests.post(
|
||||
|
|
@ -182,14 +183,12 @@ class SpotifyPlugin(
|
|||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
raise ui.UserError(
|
||||
"Spotify authorization failed: {}\n{}".format(e, response.text)
|
||||
f"Spotify authorization failed: {e}\n{response.text}"
|
||||
)
|
||||
self.access_token = response.json()["access_token"]
|
||||
|
||||
# Save the token for later use.
|
||||
self._log.debug(
|
||||
"{} access token: {}", self.data_source, self.access_token
|
||||
)
|
||||
self._log.debug("{0.data_source} access token: {0.access_token}", self)
|
||||
with open(self._tokenfile(), "w") as f:
|
||||
json.dump({"access_token": self.access_token}, f)
|
||||
|
||||
|
|
@ -227,16 +226,16 @@ class SpotifyPlugin(
|
|||
self._log.error("ReadTimeout.")
|
||||
raise APIError("Request timed out.")
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
self._log.error(f"Network error: {e}")
|
||||
self._log.error("Network error: {}", e)
|
||||
raise APIError("Network error.")
|
||||
except requests.exceptions.RequestException as e:
|
||||
if e.response is None:
|
||||
self._log.error(f"Request failed: {e}")
|
||||
self._log.error("Request failed: {}", e)
|
||||
raise APIError("Request failed.")
|
||||
if e.response.status_code == 401:
|
||||
self._log.debug(
|
||||
f"{self.data_source} access token has expired. "
|
||||
f"Reauthenticating."
|
||||
"{.data_source} access token has expired. Reauthenticating.",
|
||||
self,
|
||||
)
|
||||
self._authenticate()
|
||||
return self._handle_response(
|
||||
|
|
@ -255,7 +254,7 @@ class SpotifyPlugin(
|
|||
"Retry-After", DEFAULT_WAITING_TIME
|
||||
)
|
||||
self._log.debug(
|
||||
f"Too many API requests. Retrying after {seconds} seconds."
|
||||
"Too many API requests. Retrying after {} seconds.", seconds
|
||||
)
|
||||
time.sleep(int(seconds) + 1)
|
||||
return self._handle_response(
|
||||
|
|
@ -276,7 +275,7 @@ class SpotifyPlugin(
|
|||
f"URL:\n{url}\nparams:\n{params}"
|
||||
)
|
||||
else:
|
||||
self._log.error(f"Request failed. Error: {e}")
|
||||
self._log.error("Request failed. Error: {}", e)
|
||||
raise APIError("Request failed.")
|
||||
|
||||
def album_for_id(self, album_id: str) -> AlbumInfo | None:
|
||||
|
|
@ -291,7 +290,9 @@ class SpotifyPlugin(
|
|||
if not (spotify_id := self._extract_id(album_id)):
|
||||
return None
|
||||
|
||||
album_data = self._handle_response("get", self.album_url + spotify_id)
|
||||
album_data = self._handle_response(
|
||||
"get", f"{self.album_url}{spotify_id}"
|
||||
)
|
||||
if album_data["name"] == "":
|
||||
self._log.debug("Album removed from Spotify: {}", album_id)
|
||||
return None
|
||||
|
|
@ -314,9 +315,7 @@ class SpotifyPlugin(
|
|||
else:
|
||||
raise ui.UserError(
|
||||
"Invalid `release_date_precision` returned "
|
||||
"by {} API: '{}'".format(
|
||||
self.data_source, release_date_precision
|
||||
)
|
||||
f"by {self.data_source} API: '{release_date_precision}'"
|
||||
)
|
||||
|
||||
tracks_data = album_data["tracks"]
|
||||
|
|
@ -409,7 +408,7 @@ class SpotifyPlugin(
|
|||
# release) and `track.medium_total` (total number of tracks on
|
||||
# the track's disc).
|
||||
album_data = self._handle_response(
|
||||
"get", self.album_url + track_data["album"]["id"]
|
||||
"get", f"{self.album_url}{track_data['album']['id']}"
|
||||
)
|
||||
medium_total = 0
|
||||
for i, track_data in enumerate(album_data["tracks"]["items"], start=1):
|
||||
|
|
@ -438,7 +437,7 @@ class SpotifyPlugin(
|
|||
filters=filters, query_string=query_string
|
||||
)
|
||||
|
||||
self._log.debug(f"Searching {self.data_source} for '{query}'")
|
||||
self._log.debug("Searching {.data_source} for '{}'", self, query)
|
||||
try:
|
||||
response = self._handle_response(
|
||||
"get",
|
||||
|
|
@ -448,11 +447,11 @@ class SpotifyPlugin(
|
|||
except APIError as e:
|
||||
self._log.debug("Spotify API error: {}", e)
|
||||
return ()
|
||||
response_data = response.get(query_type + "s", {}).get("items", [])
|
||||
response_data = response.get(f"{query_type}s", {}).get("items", [])
|
||||
self._log.debug(
|
||||
"Found {} result(s) from {} for '{}'",
|
||||
"Found {} result(s) from {.data_source} for '{}'",
|
||||
len(response_data),
|
||||
self.data_source,
|
||||
self,
|
||||
query,
|
||||
)
|
||||
return response_data
|
||||
|
|
@ -472,17 +471,17 @@ class SpotifyPlugin(
|
|||
"-m",
|
||||
"--mode",
|
||||
action="store",
|
||||
help='"open" to open {} with playlist, '
|
||||
'"list" to print (default)'.format(self.data_source),
|
||||
help=(
|
||||
f'"open" to open {self.data_source} with playlist, '
|
||||
'"list" to print (default)'
|
||||
),
|
||||
)
|
||||
spotify_cmd.parser.add_option(
|
||||
"-f",
|
||||
"--show-failures",
|
||||
action="store_true",
|
||||
dest="show_failures",
|
||||
help="list tracks that did not match a {} ID".format(
|
||||
self.data_source
|
||||
),
|
||||
help=f"list tracks that did not match a {self.data_source} ID",
|
||||
)
|
||||
spotify_cmd.func = queries
|
||||
|
||||
|
|
@ -515,7 +514,7 @@ class SpotifyPlugin(
|
|||
|
||||
if self.config["mode"].get() not in ["list", "open"]:
|
||||
self._log.warning(
|
||||
"{0} is not a valid mode", self.config["mode"].get()
|
||||
"{} is not a valid mode", self.config["mode"].get()
|
||||
)
|
||||
return False
|
||||
|
||||
|
|
@ -538,8 +537,8 @@ class SpotifyPlugin(
|
|||
|
||||
if not items:
|
||||
self._log.debug(
|
||||
"Your beets query returned no items, skipping {}.",
|
||||
self.data_source,
|
||||
"Your beets query returned no items, skipping {.data_source}.",
|
||||
self,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -594,8 +593,8 @@ class SpotifyPlugin(
|
|||
or self.config["tiebreak"].get() == "first"
|
||||
):
|
||||
self._log.debug(
|
||||
"{} track(s) found, count: {}",
|
||||
self.data_source,
|
||||
"{.data_source} track(s) found, count: {}",
|
||||
self,
|
||||
len(response_data_tracks),
|
||||
)
|
||||
chosen_result = response_data_tracks[0]
|
||||
|
|
@ -618,19 +617,19 @@ class SpotifyPlugin(
|
|||
if failure_count > 0:
|
||||
if self.config["show_failures"].get():
|
||||
self._log.info(
|
||||
"{} track(s) did not match a {} ID:",
|
||||
"{} track(s) did not match a {.data_source} ID:",
|
||||
failure_count,
|
||||
self.data_source,
|
||||
self,
|
||||
)
|
||||
for track in failures:
|
||||
self._log.info("track: {}", track)
|
||||
self._log.info("")
|
||||
else:
|
||||
self._log.warning(
|
||||
"{} track(s) did not match a {} ID:\n"
|
||||
"{} track(s) did not match a {.data_source} ID:\n"
|
||||
"use --show-failures to display",
|
||||
failure_count,
|
||||
self.data_source,
|
||||
self,
|
||||
)
|
||||
|
||||
return results
|
||||
|
|
@ -647,20 +646,18 @@ class SpotifyPlugin(
|
|||
spotify_ids = [track_data["id"] for track_data in results]
|
||||
if self.config["mode"].get() == "open":
|
||||
self._log.info(
|
||||
"Attempting to open {} with playlist".format(
|
||||
self.data_source
|
||||
)
|
||||
"Attempting to open {.data_source} with playlist", self
|
||||
)
|
||||
spotify_url = "spotify:trackset:Playlist:" + ",".join(
|
||||
spotify_ids
|
||||
spotify_url = (
|
||||
f"spotify:trackset:Playlist:{','.join(spotify_ids)}"
|
||||
)
|
||||
webbrowser.open(spotify_url)
|
||||
else:
|
||||
for spotify_id in spotify_ids:
|
||||
print(self.open_track_url + spotify_id)
|
||||
print(f"{self.open_track_url}{spotify_id}")
|
||||
else:
|
||||
self._log.warning(
|
||||
f"No {self.data_source} tracks found from beets query"
|
||||
"No {.data_source} tracks found from beets query", self
|
||||
)
|
||||
|
||||
def _fetch_info(self, items, write, force):
|
||||
|
|
@ -705,7 +702,7 @@ class SpotifyPlugin(
|
|||
|
||||
def track_info(self, track_id: str):
|
||||
"""Fetch a track's popularity and external IDs using its Spotify ID."""
|
||||
track_data = self._handle_response("get", self.track_url + track_id)
|
||||
track_data = self._handle_response("get", f"{self.track_url}{track_id}")
|
||||
external_ids = track_data.get("external_ids", {})
|
||||
popularity = track_data.get("popularity")
|
||||
self._log.debug(
|
||||
|
|
@ -724,7 +721,7 @@ class SpotifyPlugin(
|
|||
"""Fetch track audio features by its Spotify ID."""
|
||||
try:
|
||||
return self._handle_response(
|
||||
"get", self.audio_features_url + track_id
|
||||
"get", f"{self.audio_features_url}{track_id}"
|
||||
)
|
||||
except APIError as e:
|
||||
self._log.debug("Spotify API error: {}", e)
|
||||
|
|
|
|||
|
|
@ -168,9 +168,7 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
|
|||
params["v"] = "1.12.0"
|
||||
params["c"] = "beets"
|
||||
resp = requests.get(
|
||||
"{}/rest/{}?{}".format(
|
||||
self.config["base_url"].get(), endpoint, urlencode(params)
|
||||
),
|
||||
f"{self.config['base_url'].get()}/rest/{endpoint}?{urlencode(params)}",
|
||||
timeout=10,
|
||||
)
|
||||
return resp
|
||||
|
|
@ -182,5 +180,5 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
|
|||
for track in tracks:
|
||||
if track not in output:
|
||||
output[track] = ";"
|
||||
output[track] += name + ";"
|
||||
output[track] += f"{name};"
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class SubsonicUpdate(BeetsPlugin):
|
|||
# Pick the random sequence and salt the password
|
||||
r = string.ascii_letters + string.digits
|
||||
salt = "".join([random.choice(r) for _ in range(6)])
|
||||
salted_password = password + salt
|
||||
salted_password = f"{password}{salt}"
|
||||
token = hashlib.md5(salted_password.encode("utf-8")).hexdigest()
|
||||
|
||||
# Put together the payload of the request to the server and the URL
|
||||
|
|
@ -101,14 +101,14 @@ class SubsonicUpdate(BeetsPlugin):
|
|||
context_path = ""
|
||||
url = f"http://{host}:{port}{context_path}"
|
||||
|
||||
return url + f"/rest/{endpoint}"
|
||||
return f"{url}/rest/{endpoint}"
|
||||
|
||||
def start_scan(self):
|
||||
user = self.config["user"].as_str()
|
||||
auth = self.config["auth"].as_str()
|
||||
url = self.__format_url("startScan")
|
||||
self._log.debug("URL is {0}", url)
|
||||
self._log.debug("auth type is {0}", self.config["auth"])
|
||||
self._log.debug("URL is {}", url)
|
||||
self._log.debug("auth type is {.config[auth]}", self)
|
||||
|
||||
if auth == "token":
|
||||
salt, token = self.__create_token()
|
||||
|
|
@ -145,14 +145,15 @@ class SubsonicUpdate(BeetsPlugin):
|
|||
and json["subsonic-response"]["status"] == "ok"
|
||||
):
|
||||
count = json["subsonic-response"]["scanStatus"]["count"]
|
||||
self._log.info(f"Updating Subsonic; scanning {count} tracks")
|
||||
self._log.info("Updating Subsonic; scanning {} tracks", count)
|
||||
elif (
|
||||
response.status_code == 200
|
||||
and json["subsonic-response"]["status"] == "failed"
|
||||
):
|
||||
error_message = json["subsonic-response"]["error"]["message"]
|
||||
self._log.error(f"Error: {error_message}")
|
||||
self._log.error(
|
||||
"Error: {[subsonic-response][error][message]}", json
|
||||
)
|
||||
else:
|
||||
self._log.error("Error: {0}", json)
|
||||
self._log.error("Error: {}", json)
|
||||
except Exception as error:
|
||||
self._log.error(f"Error: {error}")
|
||||
self._log.error("Error: {}", error)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ __version__ = "1.1"
|
|||
|
||||
PATTERN_THE = "^the\\s"
|
||||
PATTERN_A = "^[a][n]?\\s"
|
||||
FORMAT = "{0}, {1}"
|
||||
FORMAT = "{}, {}"
|
||||
|
||||
|
||||
class ThePlugin(BeetsPlugin):
|
||||
|
|
@ -38,7 +38,7 @@ class ThePlugin(BeetsPlugin):
|
|||
{
|
||||
"the": True,
|
||||
"a": True,
|
||||
"format": "{0}, {1}",
|
||||
"format": "{}, {}",
|
||||
"strip": False,
|
||||
"patterns": [],
|
||||
}
|
||||
|
|
@ -50,11 +50,11 @@ class ThePlugin(BeetsPlugin):
|
|||
try:
|
||||
re.compile(p)
|
||||
except re.error:
|
||||
self._log.error("invalid pattern: {0}", p)
|
||||
self._log.error("invalid pattern: {}", p)
|
||||
else:
|
||||
if not (p.startswith("^") or p.endswith("$")):
|
||||
self._log.warning(
|
||||
'warning: "{0}" will not match string start/end',
|
||||
'warning: "{}" will not match string start/end',
|
||||
p,
|
||||
)
|
||||
if self.config["a"]:
|
||||
|
|
@ -94,7 +94,7 @@ class ThePlugin(BeetsPlugin):
|
|||
for p in self.patterns:
|
||||
r = self.unthe(text, p)
|
||||
if r != text:
|
||||
self._log.debug('"{0}" -> "{1}"', text, r)
|
||||
self._log.debug('"{}" -> "{}"', text, r)
|
||||
break
|
||||
return r
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -104,21 +104,21 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
f"Thumbnails: ArtResizer backend {ArtResizer.shared.method}"
|
||||
f" unexpectedly cannot write image metadata."
|
||||
)
|
||||
self._log.debug(f"using {ArtResizer.shared.method} to write metadata")
|
||||
self._log.debug("using {.shared.method} to write metadata", ArtResizer)
|
||||
|
||||
uri_getter = GioURI()
|
||||
if not uri_getter.available:
|
||||
uri_getter = PathlibURI()
|
||||
self._log.debug("using {0.name} to compute URIs", uri_getter)
|
||||
self._log.debug("using {.name} to compute URIs", uri_getter)
|
||||
self.get_uri = uri_getter.uri
|
||||
|
||||
return True
|
||||
|
||||
def process_album(self, album):
|
||||
"""Produce thumbnails for the album folder."""
|
||||
self._log.debug("generating thumbnail for {0}", album)
|
||||
self._log.debug("generating thumbnail for {}", album)
|
||||
if not album.artpath:
|
||||
self._log.info("album {0} has no art", album)
|
||||
self._log.info("album {} has no art", album)
|
||||
return
|
||||
|
||||
if self.config["dolphin"]:
|
||||
|
|
@ -127,7 +127,7 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
size = ArtResizer.shared.get_size(album.artpath)
|
||||
if not size:
|
||||
self._log.warning(
|
||||
"problem getting the picture size for {0}", album.artpath
|
||||
"problem getting the picture size for {.artpath}", album
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -137,9 +137,9 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
wrote &= self.make_cover_thumbnail(album, 128, NORMAL_DIR)
|
||||
|
||||
if wrote:
|
||||
self._log.info("wrote thumbnail for {0}", album)
|
||||
self._log.info("wrote thumbnail for {}", album)
|
||||
else:
|
||||
self._log.info("nothing to do for {0}", album)
|
||||
self._log.info("nothing to do for {}", album)
|
||||
|
||||
def make_cover_thumbnail(self, album, size, target_dir):
|
||||
"""Make a thumbnail of given size for `album` and put it in
|
||||
|
|
@ -154,16 +154,16 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
):
|
||||
if self.config["force"]:
|
||||
self._log.debug(
|
||||
"found a suitable {1}x{1} thumbnail for {0}, "
|
||||
"found a suitable {0}x{0} thumbnail for {1}, "
|
||||
"forcing regeneration",
|
||||
album,
|
||||
size,
|
||||
album,
|
||||
)
|
||||
else:
|
||||
self._log.debug(
|
||||
"{1}x{1} thumbnail for {0} exists and is recent enough",
|
||||
album,
|
||||
"{0}x{0} thumbnail for {1} exists and is recent enough",
|
||||
size,
|
||||
album,
|
||||
)
|
||||
return False
|
||||
resized = ArtResizer.shared.resize(size, album.artpath, target)
|
||||
|
|
@ -192,7 +192,7 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
ArtResizer.shared.write_metadata(image_path, metadata)
|
||||
except Exception:
|
||||
self._log.exception(
|
||||
"could not write metadata to {0}", displayable_path(image_path)
|
||||
"could not write metadata to {}", displayable_path(image_path)
|
||||
)
|
||||
|
||||
def make_dolphin_cover_thumbnail(self, album):
|
||||
|
|
@ -202,9 +202,9 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
artfile = os.path.split(album.artpath)[1]
|
||||
with open(syspath(outfilename), "w") as f:
|
||||
f.write("[Desktop Entry]\n")
|
||||
f.write("Icon=./{}".format(artfile.decode("utf-8")))
|
||||
f.write(f"Icon=./{artfile.decode('utf-8')}")
|
||||
f.close()
|
||||
self._log.debug("Wrote file {0}", displayable_path(outfilename))
|
||||
self._log.debug("Wrote file {}", displayable_path(outfilename))
|
||||
|
||||
|
||||
class URIGetter:
|
||||
|
|
@ -230,8 +230,7 @@ def copy_c_string(c_string):
|
|||
# This is a pretty dumb way to get a string copy, but it seems to
|
||||
# work. A more surefire way would be to allocate a ctypes buffer and copy
|
||||
# the data with `memcpy` or somesuch.
|
||||
s = ctypes.cast(c_string, ctypes.c_char_p).value
|
||||
return b"" + s
|
||||
return ctypes.cast(c_string, ctypes.c_char_p).value
|
||||
|
||||
|
||||
class GioURI(URIGetter):
|
||||
|
|
@ -266,9 +265,7 @@ class GioURI(URIGetter):
|
|||
g_file_ptr = self.libgio.g_file_new_for_path(path)
|
||||
if not g_file_ptr:
|
||||
raise RuntimeError(
|
||||
"No gfile pointer received for {}".format(
|
||||
displayable_path(path)
|
||||
)
|
||||
f"No gfile pointer received for {displayable_path(path)}"
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -44,6 +44,6 @@ class TypesPlugin(BeetsPlugin):
|
|||
mytypes[key] = types.DATE
|
||||
else:
|
||||
raise ConfigValueError(
|
||||
"unknown type '{}' for the '{}' field".format(value, key)
|
||||
f"unknown type '{value}' for the '{key}' field"
|
||||
)
|
||||
return mytypes
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class Unimported(BeetsPlugin):
|
|||
def commands(self):
|
||||
def print_unimported(lib, opts, args):
|
||||
ignore_exts = [
|
||||
("." + x).encode()
|
||||
f".{x}".encode()
|
||||
for x in self.config["ignore_extensions"].as_str_seq()
|
||||
]
|
||||
ignore_dirs = [
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ def json_generator(items, root, expand=False):
|
|||
representation
|
||||
:returns: generator that yields strings
|
||||
"""
|
||||
yield '{"%s":[' % root
|
||||
yield f'{{"{root}":['
|
||||
first = True
|
||||
for item in items:
|
||||
if first:
|
||||
|
|
@ -232,9 +232,7 @@ def _get_unique_table_field_values(model, field, sort_field):
|
|||
raise KeyError
|
||||
with g.lib.transaction() as tx:
|
||||
rows = tx.query(
|
||||
"SELECT DISTINCT '{}' FROM '{}' ORDER BY '{}'".format(
|
||||
field, model._table, sort_field
|
||||
)
|
||||
f"SELECT DISTINCT '{field}' FROM '{model._table}' ORDER BY '{sort_field}'"
|
||||
)
|
||||
return [row[0] for row in rows]
|
||||
|
||||
|
|
@ -476,7 +474,7 @@ class WebPlugin(BeetsPlugin):
|
|||
# Enable CORS if required.
|
||||
if self.config["cors"]:
|
||||
self._log.info(
|
||||
"Enabling CORS with origin: {0}", self.config["cors"]
|
||||
"Enabling CORS with origin: {}", self.config["cors"]
|
||||
)
|
||||
from flask_cors import CORS
|
||||
|
||||
|
|
|
|||
|
|
@ -90,10 +90,10 @@ class ZeroPlugin(BeetsPlugin):
|
|||
Do some sanity checks then compile the regexes.
|
||||
"""
|
||||
if field not in MediaFile.fields():
|
||||
self._log.error("invalid field: {0}", field)
|
||||
self._log.error("invalid field: {}", field)
|
||||
elif field in ("id", "path", "album_id"):
|
||||
self._log.warning(
|
||||
"field '{0}' ignored, zeroing it would be dangerous", field
|
||||
"field '{}' ignored, zeroing it would be dangerous", field
|
||||
)
|
||||
else:
|
||||
try:
|
||||
|
|
@ -137,7 +137,7 @@ class ZeroPlugin(BeetsPlugin):
|
|||
|
||||
if match:
|
||||
fields_set = True
|
||||
self._log.debug("{0}: {1} -> None", field, value)
|
||||
self._log.debug("{}: {} -> None", field, value)
|
||||
tags[field] = None
|
||||
if self.config["update_database"]:
|
||||
item[field] = None
|
||||
|
|
|
|||
|
|
@ -384,9 +384,9 @@ Here's an example that adds a ``$disc_and_track`` field:
|
|||
number.
|
||||
"""
|
||||
if item.disctotal > 1:
|
||||
return u'%02i.%02i' % (item.disc, item.track)
|
||||
return f"{item.disc:02d}.{item.track:02d}"
|
||||
else:
|
||||
return u'%02i' % (item.track)
|
||||
return f"{item.track:02d}"
|
||||
|
||||
With this plugin enabled, templates can reference ``$disc_and_track`` as they
|
||||
can any standard metadata field.
|
||||
|
|
|
|||
|
|
@ -20,8 +20,7 @@ Here are a couple of examples of expressions:
|
|||
|
||||
item_fields:
|
||||
initial: albumartist[0].upper() + u'.'
|
||||
disc_and_track: u'%02i.%02i' % (disc, track) if
|
||||
disctotal > 1 else u'%02i' % (track)
|
||||
disc_and_track: f"{disc:02d}.{track:02d}" if disctotal > 1 else f"{track:02d}"
|
||||
|
||||
Note that YAML syntax allows newlines in values if the subsequent lines are
|
||||
indented.
|
||||
|
|
|
|||
|
|
@ -275,11 +275,15 @@ select = [
|
|||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
# "B", # flake8-bugbear
|
||||
"G", # flake8-logging-format
|
||||
"I", # isort
|
||||
"ISC", # flake8-implicit-str-concat
|
||||
"N", # pep8-naming
|
||||
"PT", # flake8-pytest-style
|
||||
# "RUF", # ruff
|
||||
# "UP", # pyupgrade
|
||||
"UP031", # do not use percent formatting
|
||||
"UP032", # use f-string instead of format call
|
||||
"TCH", # flake8-type-checking
|
||||
"W", # pycodestyle
|
||||
]
|
||||
|
|
|
|||
|
|
@ -128,6 +128,7 @@ lyrics_pages = [
|
|||
artist="Atlanta",
|
||||
track_title="Mergaitės Nori Mylėt",
|
||||
url_title="Mergaitės nori mylėt – Atlanta | Dainų Žodžiai",
|
||||
marks=[xfail_on_ci("Expired SSL certificate")],
|
||||
),
|
||||
LyricsPage.make(
|
||||
"https://genius.com/The-beatles-lady-madonna-lyrics",
|
||||
|
|
@ -328,34 +329,40 @@ lyrics_pages = [
|
|||
url_title="The Beatles - Lady Madonna Lyrics",
|
||||
),
|
||||
LyricsPage.make(
|
||||
"https://www.lyricsmode.com/lyrics/b/beatles/lady_madonna.html",
|
||||
"https://www.lyricsmode.com/lyrics/b/beatles/mother_natures_son.html",
|
||||
"""
|
||||
Lady Madonna, children at your feet.
|
||||
Wonder how you manage to make ends meet.
|
||||
Who finds the money? When you pay the rent?
|
||||
Did you think that money was heaven sent?
|
||||
Born a poor young country boy, Mother Nature's son
|
||||
All day long I'm sitting singing songs for everyone
|
||||
|
||||
Friday night arrives without a suitcase.
|
||||
Sunday morning creep in like a nun.
|
||||
Mondays child has learned to tie his bootlace.
|
||||
See how they run.
|
||||
Sit beside a mountain stream, see her waters rise
|
||||
Listen to the pretty sound of music as she flies
|
||||
|
||||
Lady Madonna, baby at your breast.
|
||||
Wonder how you manage to feed the rest.
|
||||
Doo doo doo doo doo doo doo doo doo doo doo
|
||||
Doo doo doo doo doo doo doo doo doo
|
||||
Doo doo doo
|
||||
|
||||
See how they run.
|
||||
Lady Madonna, lying on the bed,
|
||||
Listen to the music playing in your head.
|
||||
Find me in my field of grass, Mother Nature's son
|
||||
Swaying daises sing a lazy song beneath the sun
|
||||
|
||||
Tuesday afternoon is never ending.
|
||||
Wednesday morning papers didn't come.
|
||||
Thursday night you stockings needed mending.
|
||||
See how they run.
|
||||
Doo doo doo doo doo doo doo doo doo doo doo
|
||||
Doo doo doo doo doo doo doo doo doo
|
||||
Doo doo doo doo doo doo
|
||||
Yeah yeah yeah
|
||||
|
||||
Lady Madonna, children at your feet.
|
||||
Wonder how you manage to make ends meet.
|
||||
Mm mm mm mm mm mm mm
|
||||
Mm mm mm, ooh ooh ooh
|
||||
Mm mm mm mm mm mm mm
|
||||
Mm mm mm mm, wah wah wah
|
||||
|
||||
Wah, Mother Nature's son
|
||||
""",
|
||||
url_title="Lady Madonna lyrics by The Beatles - original song full text. Official Lady Madonna lyrics, 2024 version | LyricsMode.com", # noqa: E501
|
||||
artist="The Beatles",
|
||||
track_title="Mother Nature's Son",
|
||||
url_title=(
|
||||
"Mother Nature's Son lyrics by The Beatles - original song full"
|
||||
" text. Official Mother Nature's Son lyrics, 2025 version"
|
||||
" | LyricsMode.com"
|
||||
),
|
||||
),
|
||||
LyricsPage.make(
|
||||
"https://www.lyricsontop.com/amy-winehouse-songs/jazz-n-blues-lyrics.html",
|
||||
|
|
@ -528,6 +535,7 @@ lyrics_pages = [
|
|||
Wonder how you manage to make ends meet.
|
||||
""",
|
||||
url_title="The Beatles - Lady Madonna",
|
||||
marks=[xfail_on_ci("Sweetslyrics also fails with 403 FORBIDDEN in CI")],
|
||||
),
|
||||
LyricsPage.make(
|
||||
"https://www.tekstowo.pl/piosenka,the_beatles,lady_madonna.html",
|
||||
|
|
|
|||
|
|
@ -89,11 +89,11 @@ class CAAHelper:
|
|||
MBID_RELASE = "rid"
|
||||
MBID_GROUP = "rgid"
|
||||
|
||||
RELEASE_URL = "coverartarchive.org/release/{}".format(MBID_RELASE)
|
||||
GROUP_URL = "coverartarchive.org/release-group/{}".format(MBID_GROUP)
|
||||
RELEASE_URL = f"coverartarchive.org/release/{MBID_RELASE}"
|
||||
GROUP_URL = f"coverartarchive.org/release-group/{MBID_GROUP}"
|
||||
|
||||
RELEASE_URL = "https://" + RELEASE_URL
|
||||
GROUP_URL = "https://" + GROUP_URL
|
||||
RELEASE_URL = f"https://{RELEASE_URL}"
|
||||
GROUP_URL = f"https://{GROUP_URL}"
|
||||
|
||||
RESPONSE_RELEASE = """{
|
||||
"images": [
|
||||
|
|
@ -305,10 +305,8 @@ class FSArtTest(UseThePlugin):
|
|||
class CombinedTest(FetchImageTestCase, CAAHelper):
|
||||
ASIN = "xxxx"
|
||||
MBID = "releaseid"
|
||||
AMAZON_URL = "https://images.amazon.com/images/P/{}.01.LZZZZZZZ.jpg".format(
|
||||
ASIN
|
||||
)
|
||||
AAO_URL = "https://www.albumart.org/index_detail.php?asin={}".format(ASIN)
|
||||
AMAZON_URL = f"https://images.amazon.com/images/P/{ASIN}.01.LZZZZZZZ.jpg"
|
||||
AAO_URL = f"https://www.albumart.org/index_detail.php?asin={ASIN}"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
|
@ -708,7 +706,7 @@ class FanartTVTest(UseThePlugin):
|
|||
def test_fanarttv_finds_image(self):
|
||||
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
|
||||
self.mock_response(
|
||||
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
|
||||
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
|
||||
self.RESPONSE_MULTIPLE,
|
||||
)
|
||||
candidate = next(self.source.get(album, self.settings, []))
|
||||
|
|
@ -717,7 +715,7 @@ class FanartTVTest(UseThePlugin):
|
|||
def test_fanarttv_returns_no_result_when_error_received(self):
|
||||
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
|
||||
self.mock_response(
|
||||
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
|
||||
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
|
||||
self.RESPONSE_ERROR,
|
||||
)
|
||||
with pytest.raises(StopIteration):
|
||||
|
|
@ -726,7 +724,7 @@ class FanartTVTest(UseThePlugin):
|
|||
def test_fanarttv_returns_no_result_with_malformed_response(self):
|
||||
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
|
||||
self.mock_response(
|
||||
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
|
||||
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
|
||||
self.RESPONSE_MALFORMED,
|
||||
)
|
||||
with pytest.raises(StopIteration):
|
||||
|
|
@ -736,7 +734,7 @@ class FanartTVTest(UseThePlugin):
|
|||
# The source used to fail when there were images present, but no cover
|
||||
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
|
||||
self.mock_response(
|
||||
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
|
||||
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
|
||||
self.RESPONSE_NO_ART,
|
||||
)
|
||||
with pytest.raises(StopIteration):
|
||||
|
|
|
|||
|
|
@ -49,14 +49,12 @@ class ConvertMixin:
|
|||
"""
|
||||
if re.search("[^a-zA-Z0-9]", tag):
|
||||
raise ValueError(
|
||||
"tag '{}' must only contain letters and digits".format(tag)
|
||||
f"tag '{tag}' must only contain letters and digits"
|
||||
)
|
||||
|
||||
# A Python script that copies the file and appends a tag.
|
||||
stub = os.path.join(_common.RSRC, b"convert_stub.py").decode("utf-8")
|
||||
return "{} {} $source $dest {}".format(
|
||||
shell_quote(sys.executable), shell_quote(stub), tag
|
||||
)
|
||||
return f"{shell_quote(sys.executable)} {shell_quote(stub)} $source $dest {tag}"
|
||||
|
||||
def file_endswith(self, path: Path, tag: str):
|
||||
"""Check the path is a file and if its content ends with `tag`."""
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ class DGAlbumInfoTest(BeetsTestCase):
|
|||
"""Return a Bag that mimics a discogs_client.Release with a
|
||||
tracklist where tracks have the specified `positions`."""
|
||||
tracks = [
|
||||
self._make_track("TITLE%s" % i, position)
|
||||
self._make_track(f"TITLE{i}", position)
|
||||
for (i, position) in enumerate(positions, start=1)
|
||||
]
|
||||
return self._make_release(tracks)
|
||||
|
|
|
|||
|
|
@ -144,9 +144,7 @@ class EmbedartCliTest(IOMixin, PluginMixin, FetchImageHelper, BeetsTestCase):
|
|||
if os.path.isfile(syspath(tmp_path)):
|
||||
os.remove(syspath(tmp_path))
|
||||
self.fail(
|
||||
"Artwork file {} was not deleted".format(
|
||||
displayable_path(tmp_path)
|
||||
)
|
||||
f"Artwork file {displayable_path(tmp_path)} was not deleted"
|
||||
)
|
||||
|
||||
def test_art_file_missing(self):
|
||||
|
|
|
|||
|
|
@ -56,21 +56,21 @@ class FtInTitlePluginFunctional(PluginTestCase):
|
|||
assert item["title"] == "Song 1"
|
||||
|
||||
def test_functional_custom_format(self):
|
||||
self._ft_set_config("feat. {0}")
|
||||
self._ft_set_config("feat. {}")
|
||||
item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice")
|
||||
self.run_command("ftintitle")
|
||||
item.load()
|
||||
assert item["artist"] == "Alice"
|
||||
assert item["title"] == "Song 1 feat. Bob"
|
||||
|
||||
self._ft_set_config("featuring {0}")
|
||||
self._ft_set_config("featuring {}")
|
||||
item = self._ft_add_item("/", "Alice feat. Bob", "Song 1", "Alice")
|
||||
self.run_command("ftintitle")
|
||||
item.load()
|
||||
assert item["artist"] == "Alice"
|
||||
assert item["title"] == "Song 1 featuring Bob"
|
||||
|
||||
self._ft_set_config("with {0}")
|
||||
self._ft_set_config("with {}")
|
||||
item = self._ft_add_item("/", "Alice feat Bob", "Song 1", "Alice")
|
||||
self.run_command("ftintitle")
|
||||
item.load()
|
||||
|
|
@ -78,7 +78,7 @@ class FtInTitlePluginFunctional(PluginTestCase):
|
|||
assert item["title"] == "Song 1 with Bob"
|
||||
|
||||
def test_functional_keep_in_artist(self):
|
||||
self._ft_set_config("feat. {0}", keep_in_artist=True)
|
||||
self._ft_set_config("feat. {}", keep_in_artist=True)
|
||||
item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice")
|
||||
self.run_command("ftintitle")
|
||||
item.load()
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
|
|||
if m.title.replace("Tag", "Applied") == item.title:
|
||||
return m
|
||||
raise AssertionError(
|
||||
"No MediaFile found for Item " + displayable_path(item.path)
|
||||
f"No MediaFile found for Item {displayable_path(item.path)}"
|
||||
)
|
||||
|
||||
def test_import_album_with_added_dates(self):
|
||||
|
|
@ -117,7 +117,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
|
|||
for item_path, added_after in items_added_after.items():
|
||||
assert items_added_before[item_path] == pytest.approx(
|
||||
added_after, rel=1e-4
|
||||
), "reimport modified Item.added for " + displayable_path(item_path)
|
||||
), f"reimport modified Item.added for {displayable_path(item_path)}"
|
||||
|
||||
def test_import_singletons_with_added_dates(self):
|
||||
self.config["import"]["singletons"] = True
|
||||
|
|
@ -157,4 +157,4 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
|
|||
for item_path, added_after in items_added_after.items():
|
||||
assert items_added_before[item_path] == pytest.approx(
|
||||
added_after, rel=1e-4
|
||||
), "reimport modified Item.added for " + displayable_path(item_path)
|
||||
), f"reimport modified Item.added for {displayable_path(item_path)}"
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class IPFSPluginTest(PluginTestCase):
|
|||
try:
|
||||
if check_item.get("ipfs", with_album=False):
|
||||
ipfs_item = os.fsdecode(os.path.basename(want_item.path))
|
||||
want_path = "/ipfs/{}/{}".format(test_album.ipfs, ipfs_item)
|
||||
want_path = f"/ipfs/{test_album.ipfs}/{ipfs_item}"
|
||||
want_path = bytestring_path(want_path)
|
||||
assert check_item.path == want_path
|
||||
assert (
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue