Replace string concatenation (' + ')

- Join hardcoded strings
- Replace concatenated variables with f-strings
This commit is contained in:
Šarūnas Nejus 2025-08-30 23:10:15 +01:00
parent a7c83d91e9
commit 1c16b2b308
No known key found for this signature in database
GPG key ID: DD28F6704DBE3435
56 changed files with 328 additions and 377 deletions

View file

@ -192,7 +192,7 @@ def extract(log, outpath, item):
if not ext: if not ext:
log.warning("Unknown image type in {0}.", displayable_path(item.path)) log.warning("Unknown image type in {0}.", displayable_path(item.path))
return return
outpath += bytestring_path("." + ext) outpath += bytestring_path(f".{ext}")
log.info( log.info(
"Extracting album art from: {0} to: {1}", "Extracting album art from: {0} to: {1}",

View file

@ -261,7 +261,7 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
continue continue
for suffix in "year", "month", "day": for suffix in "year", "month", "day":
key = prefix + suffix key = f"{prefix}{suffix}"
value = getattr(album_info, key) or 0 value = getattr(album_info, key) or 0
# If we don't even have a year, apply nothing. # If we don't even have a year, apply nothing.

View file

@ -588,7 +588,7 @@ class Model(ABC, Generic[D]):
for key in fields: for key in fields:
if key != "id" and key in self._dirty: if key != "id" and key in self._dirty:
self._dirty.remove(key) self._dirty.remove(key)
assignments.append(key + "=?") assignments.append(f"{key}=?")
value = self._type(key).to_sql(self[key]) value = self._type(key).to_sql(self[key])
subvars.append(value) subvars.append(value)

View file

@ -190,7 +190,7 @@ class MatchQuery(FieldQuery[AnySQLiteType]):
"""A query that looks for exact matches in an Model field.""" """A query that looks for exact matches in an Model field."""
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
return self.field + " = ?", [self.pattern] return f"{self.field} = ?", [self.pattern]
@classmethod @classmethod
def value_match(cls, pattern: AnySQLiteType, value: Any) -> bool: def value_match(cls, pattern: AnySQLiteType, value: Any) -> bool:
@ -204,7 +204,7 @@ class NoneQuery(FieldQuery[None]):
super().__init__(field, None, fast) super().__init__(field, None, fast)
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
return self.field + " IS NULL", () return f"{self.field} IS NULL", ()
def match(self, obj: Model) -> bool: def match(self, obj: Model) -> bool:
return obj.get(self.field_name) is None return obj.get(self.field_name) is None
@ -246,7 +246,7 @@ class StringQuery(StringFieldQuery[str]):
.replace("%", "\\%") .replace("%", "\\%")
.replace("_", "\\_") .replace("_", "\\_")
) )
clause = self.field + " like ? escape '\\'" clause = f"{self.field} like ? escape '\\'"
subvals = [search] subvals = [search]
return clause, subvals return clause, subvals
@ -264,8 +264,8 @@ class SubstringQuery(StringFieldQuery[str]):
.replace("%", "\\%") .replace("%", "\\%")
.replace("_", "\\_") .replace("_", "\\_")
) )
search = "%" + pattern + "%" search = f"%{pattern}%"
clause = self.field + " like ? escape '\\'" clause = f"{self.field} like ? escape '\\'"
subvals = [search] subvals = [search]
return clause, subvals return clause, subvals
@ -471,7 +471,7 @@ class NumericQuery(FieldQuery[str]):
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
if self.point is not None: if self.point is not None:
return self.field + "=?", (self.point,) return f"{self.field}=?", (self.point,)
else: else:
if self.rangemin is not None and self.rangemax is not None: if self.rangemin is not None and self.rangemax is not None:
return ( return (
@ -549,9 +549,9 @@ class CollectionQuery(Query):
if not subq_clause: if not subq_clause:
# Fall back to slow query. # Fall back to slow query.
return None, () return None, ()
clause_parts.append("(" + subq_clause + ")") clause_parts.append(f"({subq_clause})")
subvals += subq_subvals subvals += subq_subvals
clause = (" " + joiner + " ").join(clause_parts) clause = f" {joiner} ".join(clause_parts)
return clause, subvals return clause, subvals
def __repr__(self) -> str: def __repr__(self) -> str:
@ -690,9 +690,7 @@ class Period:
("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second
) )
relative_units = {"y": 365, "m": 30, "w": 7, "d": 1} relative_units = {"y": 365, "m": 30, "w": 7, "d": 1}
relative_re = ( relative_re = "(?P<sign>[+|-]?)(?P<quantity>[0-9]+)(?P<timespan>[y|m|w|d])"
"(?P<sign>[+|-]?)(?P<quantity>[0-9]+)" + "(?P<timespan>[y|m|w|d])"
)
def __init__(self, date: datetime, precision: str): def __init__(self, date: datetime, precision: str):
"""Create a period with the given date (a `datetime` object) and """Create a period with the given date (a `datetime` object) and

View file

@ -28,11 +28,11 @@ class ReadError(FileOperationError):
"""An error while reading a file (i.e. in `Item.read`).""" """An error while reading a file (i.e. in `Item.read`)."""
def __str__(self): def __str__(self):
return "error reading " + str(super()) return f"error reading {super()}"
class WriteError(FileOperationError): class WriteError(FileOperationError):
"""An error while writing a file (i.e. in `Item.write`).""" """An error while writing a file (i.e. in `Item.write`)."""
def __str__(self): def __str__(self):
return "error writing " + str(super()) return f"error writing {super()}"

View file

@ -130,9 +130,9 @@ class PluginLogFilter(logging.Filter):
def filter(self, record): def filter(self, record):
if hasattr(record.msg, "msg") and isinstance(record.msg.msg, str): if hasattr(record.msg, "msg") and isinstance(record.msg.msg, str):
# A _LogMessage from our hacked-up Logging replacement. # A _LogMessage from our hacked-up Logging replacement.
record.msg.msg = self.prefix + record.msg.msg record.msg.msg = f"{self.prefix}{record.msg.msg}"
elif isinstance(record.msg, str): elif isinstance(record.msg, str):
record.msg = self.prefix + record.msg record.msg = f"{self.prefix}{record.msg}"
return True return True

View file

@ -153,7 +153,7 @@ class DummyIn:
self.out = out self.out = out
def add(self, s): def add(self, s):
self.buf.append(s + "\n") self.buf.append(f"{s}\n")
def close(self): def close(self):
pass pass

View file

@ -278,7 +278,7 @@ class TestHelper(ConfigMixin):
values_["db"] = self.lib values_["db"] = self.lib
item = Item(**values_) item = Item(**values_)
if "path" not in values: if "path" not in values:
item["path"] = "audio." + item["format"].lower() item["path"] = f"audio.{item['format'].lower()}"
# mtime needs to be set last since other assignments reset it. # mtime needs to be set last since other assignments reset it.
item.mtime = 12345 item.mtime = 12345
return item return item
@ -310,7 +310,7 @@ class TestHelper(ConfigMixin):
item = self.create_item(**values) item = self.create_item(**values)
extension = item["format"].lower() extension = item["format"].lower()
item["path"] = os.path.join( item["path"] = os.path.join(
_common.RSRC, util.bytestring_path("min." + extension) _common.RSRC, util.bytestring_path(f"min.{extension}")
) )
item.add(self.lib) item.add(self.lib)
item.move(operation=MoveOperation.COPY) item.move(operation=MoveOperation.COPY)
@ -325,7 +325,7 @@ class TestHelper(ConfigMixin):
"""Add a number of items with files to the database.""" """Add a number of items with files to the database."""
# TODO base this on `add_item()` # TODO base this on `add_item()`
items = [] items = []
path = os.path.join(_common.RSRC, util.bytestring_path("full." + ext)) path = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
for i in range(count): for i in range(count):
item = Item.from_path(path) item = Item.from_path(path)
item.album = f"\u00e4lbum {i}" # Check unicode paths item.album = f"\u00e4lbum {i}" # Check unicode paths
@ -372,7 +372,7 @@ class TestHelper(ConfigMixin):
specified extension a cover art image is added to the media specified extension a cover art image is added to the media
file. file.
""" """
src = os.path.join(_common.RSRC, util.bytestring_path("full." + ext)) src = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
handle, path = mkstemp(dir=self.temp_dir) handle, path = mkstemp(dir=self.temp_dir)
path = bytestring_path(path) path = bytestring_path(path)
os.close(handle) os.close(handle)
@ -570,7 +570,7 @@ class ImportHelper(TestHelper):
medium = MediaFile(track_path) medium = MediaFile(track_path)
medium.update( medium.update(
{ {
"album": "Tag Album" + (f" {album_id}" if album_id else ""), "album": f"Tag Album{f' {album_id}' if album_id else ''}",
"albumartist": None, "albumartist": None,
"mb_albumid": None, "mb_albumid": None,
"comp": None, "comp": None,
@ -839,15 +839,13 @@ class AutotagStub:
) )
def _make_album_match(self, artist, album, tracks, distance=0, missing=0): def _make_album_match(self, artist, album, tracks, distance=0, missing=0):
if distance: id = f" {'M' * distance}" if distance else ""
id = " " + "M" * distance
else:
id = ""
if artist is None: if artist is None:
artist = "Various Artists" artist = "Various Artists"
else: else:
artist = artist.replace("Tag", "Applied") + id artist = f"{artist.replace('Tag', 'Applied')}{id}"
album = album.replace("Tag", "Applied") + id album = f"{album.replace('Tag', 'Applied')}{id}"
track_infos = [] track_infos = []
for i in range(tracks - missing): for i in range(tracks - missing):
@ -858,8 +856,8 @@ class AutotagStub:
album=album, album=album,
tracks=track_infos, tracks=track_infos,
va=False, va=False,
album_id="albumid" + id, album_id=f"albumid{id}",
artist_id="artistid" + id, artist_id=f"artistid{id}",
albumtype="soundtrack", albumtype="soundtrack",
data_source="match_source", data_source="match_source",
bandcamp_album_id="bc_url", bandcamp_album_id="bc_url",
@ -885,7 +883,7 @@ class FetchImageHelper:
super().run(*args, **kwargs) super().run(*args, **kwargs)
IMAGEHEADER: dict[str, bytes] = { IMAGEHEADER: dict[str, bytes] = {
"image/jpeg": b"\xff\xd8\xff" + b"\x00" * 3 + b"JFIF", "image/jpeg": b"\xff\xd8\xff\x00\x00\x00JFIF",
"image/png": b"\211PNG\r\n\032\n", "image/png": b"\211PNG\r\n\032\n",
"image/gif": b"GIF89a", "image/gif": b"GIF89a",
# dummy type that is definitely not a valid image content type # dummy type that is definitely not a valid image content type

View file

@ -125,7 +125,7 @@ def print_(*strings: str, end: str = "\n") -> None:
The `end` keyword argument behaves similarly to the built-in `print` The `end` keyword argument behaves similarly to the built-in `print`
(it defaults to a newline). (it defaults to a newline).
""" """
txt = " ".join(strings or ("",)) + end txt = f"{' '.join(strings or ('',))}{end}"
# Encode the string and write it to stdout. # Encode the string and write it to stdout.
# On Python 3, sys.stdout expects text strings and uses the # On Python 3, sys.stdout expects text strings and uses the
@ -338,7 +338,7 @@ def input_options(
if line_length != 0: if line_length != 0:
# Not the beginning of the line; need a space. # Not the beginning of the line; need a space.
part = " " + part part = f" {part}"
length += 1 length += 1
prompt += part prompt += part
@ -350,7 +350,7 @@ def input_options(
fallback_prompt = "Enter one of " fallback_prompt = "Enter one of "
if numrange: if numrange:
fallback_prompt += "{}-{}, ".format(*numrange) fallback_prompt += "{}-{}, ".format(*numrange)
fallback_prompt += ", ".join(display_letters) + ":" fallback_prompt += f"{', '.join(display_letters)}:"
resp = input_(prompt) resp = input_(prompt)
while True: while True:
@ -494,7 +494,7 @@ ANSI_CODES = {
"bg_cyan": 46, "bg_cyan": 46,
"bg_white": 47, "bg_white": 47,
} }
RESET_COLOR = COLOR_ESCAPE + "39;49;00m" RESET_COLOR = f"{COLOR_ESCAPE}39;49;00m"
# These abstract COLOR_NAMES are lazily mapped on to the actual color in COLORS # These abstract COLOR_NAMES are lazily mapped on to the actual color in COLORS
# as they are defined in the configuration files, see function: colorize # as they are defined in the configuration files, see function: colorize
@ -534,8 +534,8 @@ def _colorize(color, text):
# over all "ANSI codes" in `color`. # over all "ANSI codes" in `color`.
escape = "" escape = ""
for code in color: for code in color:
escape = escape + COLOR_ESCAPE + f"{ANSI_CODES[code]}m" escape = f"{escape}{COLOR_ESCAPE}{ANSI_CODES[code]}m"
return escape + text + RESET_COLOR return f"{escape}{text}{RESET_COLOR}"
def colorize(color_name, text): def colorize(color_name, text):
@ -621,8 +621,8 @@ def color_split(colored_text, index):
split_index = index - (length - color_len(part)) split_index = index - (length - color_len(part))
found_split = True found_split = True
if found_color_code: if found_color_code:
pre_split += part[:split_index] + RESET_COLOR pre_split += f"{part[:split_index]}{RESET_COLOR}"
post_split += found_color_code + part[split_index:] post_split += f"{found_color_code}{part[split_index:]}"
else: else:
pre_split += part[:split_index] pre_split += part[:split_index]
post_split += part[split_index:] post_split += part[split_index:]
@ -806,17 +806,17 @@ def split_into_lines(string, width_tuple):
# Colorize each word with pre/post escapes # Colorize each word with pre/post escapes
# Reconstruct colored words # Reconstruct colored words
words += [ words += [
m.group("esc") + raw_word + RESET_COLOR f"{m['esc']}{raw_word}{RESET_COLOR}"
for raw_word in raw_words for raw_word in raw_words
] ]
elif raw_words: elif raw_words:
# Pretext stops mid-word # Pretext stops mid-word
if m.group("esc") != RESET_COLOR: if m.group("esc") != RESET_COLOR:
# Add the rest of the current word, with a reset after it # Add the rest of the current word, with a reset after it
words[-1] += m.group("esc") + raw_words[0] + RESET_COLOR words[-1] += f"{m['esc']}{raw_words[0]}{RESET_COLOR}"
# Add the subsequent colored words: # Add the subsequent colored words:
words += [ words += [
m.group("esc") + raw_word + RESET_COLOR f"{m['esc']}{raw_word}{RESET_COLOR}"
for raw_word in raw_words[1:] for raw_word in raw_words[1:]
] ]
else: else:
@ -907,18 +907,12 @@ def print_column_layout(
With subsequent lines (i.e. {lhs1}, {rhs1} onwards) being the With subsequent lines (i.e. {lhs1}, {rhs1} onwards) being the
rest of contents, wrapped if the width would be otherwise exceeded. rest of contents, wrapped if the width would be otherwise exceeded.
""" """
if right["prefix"] + right["contents"] + right["suffix"] == "": if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
# No right hand information, so we don't need a separator. # No right hand information, so we don't need a separator.
separator = "" separator = ""
first_line_no_wrap = ( first_line_no_wrap = (
indent_str f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
+ left["prefix"] f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
+ left["contents"]
+ left["suffix"]
+ separator
+ right["prefix"]
+ right["contents"]
+ right["suffix"]
) )
if color_len(first_line_no_wrap) < max_width: if color_len(first_line_no_wrap) < max_width:
# Everything fits, print out line. # Everything fits, print out line.
@ -1044,18 +1038,12 @@ def print_newline_layout(
If {lhs0} would go over the maximum width, the subsequent lines are If {lhs0} would go over the maximum width, the subsequent lines are
indented a second time for ease of reading. indented a second time for ease of reading.
""" """
if right["prefix"] + right["contents"] + right["suffix"] == "": if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
# No right hand information, so we don't need a separator. # No right hand information, so we don't need a separator.
separator = "" separator = ""
first_line_no_wrap = ( first_line_no_wrap = (
indent_str f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
+ left["prefix"] f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
+ left["contents"]
+ left["suffix"]
+ separator
+ right["prefix"]
+ right["contents"]
+ right["suffix"]
) )
if color_len(first_line_no_wrap) < max_width: if color_len(first_line_no_wrap) < max_width:
# Everything fits, print out line. # Everything fits, print out line.
@ -1069,7 +1057,7 @@ def print_newline_layout(
empty_space - len(indent_str), empty_space - len(indent_str),
empty_space - len(indent_str), empty_space - len(indent_str),
) )
left_str = left["prefix"] + left["contents"] + left["suffix"] left_str = f"{left['prefix']}{left['contents']}{left['suffix']}"
left_split = split_into_lines(left_str, left_width_tuple) left_split = split_into_lines(left_str, left_width_tuple)
# Repeat calculations for rhs, including separator on first line # Repeat calculations for rhs, including separator on first line
right_width_tuple = ( right_width_tuple = (
@ -1077,19 +1065,19 @@ def print_newline_layout(
empty_space - len(indent_str), empty_space - len(indent_str),
empty_space - len(indent_str), empty_space - len(indent_str),
) )
right_str = right["prefix"] + right["contents"] + right["suffix"] right_str = f"{right['prefix']}{right['contents']}{right['suffix']}"
right_split = split_into_lines(right_str, right_width_tuple) right_split = split_into_lines(right_str, right_width_tuple)
for i, line in enumerate(left_split): for i, line in enumerate(left_split):
if i == 0: if i == 0:
print_(indent_str + line) print_(f"{indent_str}{line}")
elif line != "": elif line != "":
# Ignore empty lines # Ignore empty lines
print_(indent_str * 2 + line) print_(f"{indent_str * 2}{line}")
for i, line in enumerate(right_split): for i, line in enumerate(right_split):
if i == 0: if i == 0:
print_(indent_str + separator + line) print_(f"{indent_str}{separator}{line}")
elif line != "": elif line != "":
print_(indent_str * 2 + line) print_(f"{indent_str * 2}{line}")
FLOAT_EPSILON = 0.01 FLOAT_EPSILON = 0.01
@ -1505,7 +1493,7 @@ class SubcommandsOptionParser(CommonOptionsParser):
# Concatenate the original help message with the subcommand # Concatenate the original help message with the subcommand
# list. # list.
return out + "".join(result) return f"{out}{''.join(result)}"
def _subcommand_for_name(self, name): def _subcommand_for_name(self, name):
"""Return the subcommand in self.subcommands matching the """Return the subcommand in self.subcommands matching the

View file

@ -18,6 +18,7 @@ interface.
import os import os
import re import re
import textwrap
from collections import Counter from collections import Counter
from collections.abc import Sequence from collections.abc import Sequence
from itertools import chain from itertools import chain
@ -128,13 +129,13 @@ def _print_keys(query):
returned row, with indentation of 2 spaces. returned row, with indentation of 2 spaces.
""" """
for row in query: for row in query:
print_(" " * 2 + row["key"]) print_(f" {row['key']}")
def fields_func(lib, opts, args): def fields_func(lib, opts, args):
def _print_rows(names): def _print_rows(names):
names.sort() names.sort()
print_(" " + "\n ".join(names)) print_(textwrap.indent("\n".join(names), " "))
print_("Item fields:") print_("Item fields:")
_print_rows(library.Item.all_keys()) _print_rows(library.Item.all_keys())
@ -356,18 +357,18 @@ class ChangeRepresentation:
# 'Match' line and similarity. # 'Match' line and similarity.
print_( print_(
self.indent_header + f"Match ({dist_string(self.match.distance)}):" f"{self.indent_header}Match ({dist_string(self.match.distance)}):"
) )
if isinstance(self.match.info, autotag.hooks.AlbumInfo): if isinstance(self.match.info, autotag.hooks.AlbumInfo):
# Matching an album - print that # Matching an album - print that
artist_album_str = ( artist_album_str = (
f"{self.match.info.artist}" + f" - {self.match.info.album}" f"{self.match.info.artist} - {self.match.info.album}"
) )
else: else:
# Matching a single track # Matching a single track
artist_album_str = ( artist_album_str = (
f"{self.match.info.artist}" + f" - {self.match.info.title}" f"{self.match.info.artist} - {self.match.info.title}"
) )
print_( print_(
self.indent_header self.indent_header
@ -377,22 +378,23 @@ class ChangeRepresentation:
# Penalties. # Penalties.
penalties = penalty_string(self.match.distance) penalties = penalty_string(self.match.distance)
if penalties: if penalties:
print_(self.indent_header + penalties) print_(f"{self.indent_header}{penalties}")
# Disambiguation. # Disambiguation.
disambig = disambig_string(self.match.info) disambig = disambig_string(self.match.info)
if disambig: if disambig:
print_(self.indent_header + disambig) print_(f"{self.indent_header}{disambig}")
# Data URL. # Data URL.
if self.match.info.data_url: if self.match.info.data_url:
url = ui.colorize("text_faint", f"{self.match.info.data_url}") url = ui.colorize("text_faint", f"{self.match.info.data_url}")
print_(self.indent_header + url) print_(f"{self.indent_header}{url}")
def show_match_details(self): def show_match_details(self):
"""Print out the details of the match, including changes in album name """Print out the details of the match, including changes in album name
and artist name. and artist name.
""" """
changed_prefix = ui.colorize("changed", "\u2260")
# Artist. # Artist.
artist_l, artist_r = self.cur_artist or "", self.match.info.artist artist_l, artist_r = self.cur_artist or "", self.match.info.artist
if artist_r == VARIOUS_ARTISTS: if artist_r == VARIOUS_ARTISTS:
@ -402,7 +404,7 @@ class ChangeRepresentation:
artist_l, artist_r = ui.colordiff(artist_l, artist_r) artist_l, artist_r = ui.colordiff(artist_l, artist_r)
# Prefix with U+2260: Not Equal To # Prefix with U+2260: Not Equal To
left = { left = {
"prefix": ui.colorize("changed", "\u2260") + " Artist: ", "prefix": f"{changed_prefix} Artist: ",
"contents": artist_l, "contents": artist_l,
"suffix": "", "suffix": "",
} }
@ -410,7 +412,7 @@ class ChangeRepresentation:
self.print_layout(self.indent_detail, left, right) self.print_layout(self.indent_detail, left, right)
else: else:
print_(self.indent_detail + "*", "Artist:", artist_r) print_(f"{self.indent_detail}*", "Artist:", artist_r)
if self.cur_album: if self.cur_album:
# Album # Album
@ -422,14 +424,14 @@ class ChangeRepresentation:
album_l, album_r = ui.colordiff(album_l, album_r) album_l, album_r = ui.colordiff(album_l, album_r)
# Prefix with U+2260: Not Equal To # Prefix with U+2260: Not Equal To
left = { left = {
"prefix": ui.colorize("changed", "\u2260") + " Album: ", "prefix": f"{changed_prefix} Album: ",
"contents": album_l, "contents": album_l,
"suffix": "", "suffix": "",
} }
right = {"prefix": "", "contents": album_r, "suffix": ""} right = {"prefix": "", "contents": album_r, "suffix": ""}
self.print_layout(self.indent_detail, left, right) self.print_layout(self.indent_detail, left, right)
else: else:
print_(self.indent_detail + "*", "Album:", album_r) print_(f"{self.indent_detail}*", "Album:", album_r)
elif self.cur_title: elif self.cur_title:
# Title - for singletons # Title - for singletons
title_l, title_r = self.cur_title or "", self.match.info.title title_l, title_r = self.cur_title or "", self.match.info.title
@ -437,14 +439,14 @@ class ChangeRepresentation:
title_l, title_r = ui.colordiff(title_l, title_r) title_l, title_r = ui.colordiff(title_l, title_r)
# Prefix with U+2260: Not Equal To # Prefix with U+2260: Not Equal To
left = { left = {
"prefix": ui.colorize("changed", "\u2260") + " Title: ", "prefix": f"{changed_prefix} Title: ",
"contents": title_l, "contents": title_l,
"suffix": "", "suffix": "",
} }
right = {"prefix": "", "contents": title_r, "suffix": ""} right = {"prefix": "", "contents": title_r, "suffix": ""}
self.print_layout(self.indent_detail, left, right) self.print_layout(self.indent_detail, left, right)
else: else:
print_(self.indent_detail + "*", "Title:", title_r) print_(f"{self.indent_detail}*", "Title:", title_r)
def make_medium_info_line(self, track_info): def make_medium_info_line(self, track_info):
"""Construct a line with the current medium's info.""" """Construct a line with the current medium's info."""
@ -568,9 +570,9 @@ class ChangeRepresentation:
prefix = ui.colorize("changed", "\u2260 ") if changed else "* " prefix = ui.colorize("changed", "\u2260 ") if changed else "* "
lhs = { lhs = {
"prefix": prefix + lhs_track + " ", "prefix": f"{prefix}{lhs_track} ",
"contents": lhs_title, "contents": lhs_title,
"suffix": " " + lhs_length, "suffix": f" {lhs_length}",
} }
rhs = {"prefix": "", "contents": "", "suffix": ""} rhs = {"prefix": "", "contents": "", "suffix": ""}
if not changed: if not changed:
@ -579,9 +581,9 @@ class ChangeRepresentation:
else: else:
# Construct a dictionary for the "changed to" side # Construct a dictionary for the "changed to" side
rhs = { rhs = {
"prefix": rhs_track + " ", "prefix": f"{rhs_track} ",
"contents": rhs_title, "contents": rhs_title,
"suffix": " " + rhs_length, "suffix": f" {rhs_length}",
} }
return (lhs, rhs) return (lhs, rhs)
@ -674,7 +676,7 @@ class AlbumChange(ChangeRepresentation):
# Print tracks from previous medium # Print tracks from previous medium
self.print_tracklist(lines) self.print_tracklist(lines)
lines = [] lines = []
print_(self.indent_detail + header) print_(f"{self.indent_detail}{header}")
# Save new medium details for future comparison. # Save new medium details for future comparison.
medium, disctitle = track_info.medium, track_info.disctitle medium, disctitle = track_info.medium, track_info.disctitle
@ -907,7 +909,7 @@ def choose_candidate(
f' {item.title if singleton else cur_album}".' f' {item.title if singleton else cur_album}".'
) )
print_(ui.indent(2) + "Candidates:") print_(" Candidates:")
for i, match in enumerate(candidates): for i, match in enumerate(candidates):
# Index, metadata, and distance. # Index, metadata, and distance.
index0 = f"{i + 1}." index0 = f"{i + 1}."
@ -923,17 +925,17 @@ def choose_candidate(
else: else:
metadata = ui.colorize("text_highlight_minor", metadata) metadata = ui.colorize("text_highlight_minor", metadata)
line1 = [index, distance, metadata] line1 = [index, distance, metadata]
print_(ui.indent(2) + " ".join(line1)) print_(f" {' '.join(line1)}")
# Penalties. # Penalties.
penalties = penalty_string(match.distance, 3) penalties = penalty_string(match.distance, 3)
if penalties: if penalties:
print_(ui.indent(13) + penalties) print_(f"{' ' * 13}{penalties}")
# Disambiguation # Disambiguation
disambig = disambig_string(match.info) disambig = disambig_string(match.info)
if disambig: if disambig:
print_(ui.indent(13) + disambig) print_(f"{' ' * 13}{disambig}")
# Ask the user for a choice. # Ask the user for a choice.
sel = ui.input_options(choice_opts, numrange=(1, len(candidates))) sel = ui.input_options(choice_opts, numrange=(1, len(candidates)))
@ -1892,7 +1894,7 @@ def show_stats(lib, query, exact):
if item.album_id: if item.album_id:
albums.add(item.album_id) albums.add(item.album_id)
size_str = "" + human_bytes(total_size) size_str = human_bytes(total_size)
if exact: if exact:
size_str += f" ({total_size} bytes)" size_str += f" ({total_size} bytes)"

View file

@ -433,8 +433,8 @@ def syspath(path: PathLike, prefix: bool = True) -> str:
if prefix and not str_path.startswith(WINDOWS_MAGIC_PREFIX): if prefix and not str_path.startswith(WINDOWS_MAGIC_PREFIX):
if str_path.startswith("\\\\"): if str_path.startswith("\\\\"):
# UNC path. Final path should look like \\?\UNC\... # UNC path. Final path should look like \\?\UNC\...
str_path = "UNC" + str_path[1:] str_path = f"UNC{str_path[1:]}"
str_path = WINDOWS_MAGIC_PREFIX + str_path str_path = f"{WINDOWS_MAGIC_PREFIX}{str_path}"
return str_path return str_path
@ -506,8 +506,8 @@ def move(path: bytes, dest: bytes, replace: bool = False):
basename = os.path.basename(bytestring_path(dest)) basename = os.path.basename(bytestring_path(dest))
dirname = os.path.dirname(bytestring_path(dest)) dirname = os.path.dirname(bytestring_path(dest))
tmp = tempfile.NamedTemporaryFile( tmp = tempfile.NamedTemporaryFile(
suffix=syspath(b".beets", prefix=False), suffix=".beets",
prefix=syspath(b"." + basename + b".", prefix=False), prefix=f".{os.fsdecode(basename)}.",
dir=syspath(dirname), dir=syspath(dirname),
delete=False, delete=False,
) )
@ -716,7 +716,7 @@ def truncate_path(str_path: str) -> str:
path = Path(str_path) path = Path(str_path)
parent_parts = [truncate_str(p, max_length) for p in path.parts[:-1]] parent_parts = [truncate_str(p, max_length) for p in path.parts[:-1]]
stem = truncate_str(path.stem, max_length - len(path.suffix)) stem = truncate_str(path.stem, max_length - len(path.suffix))
return str(Path(*parent_parts, stem)) + path.suffix return f"{Path(*parent_parts, stem)}{path.suffix}"
def _legalize_stage( def _legalize_stage(

View file

@ -152,7 +152,7 @@ class Symbol:
def translate(self): def translate(self):
"""Compile the variable lookup.""" """Compile the variable lookup."""
ident = self.ident ident = self.ident
expr = ex_rvalue(VARIABLE_PREFIX + ident) expr = ex_rvalue(f"{VARIABLE_PREFIX}{ident}")
return [expr], {ident}, set() return [expr], {ident}, set()
@ -211,7 +211,7 @@ class Call:
) )
) )
subexpr_call = ex_call(FUNCTION_PREFIX + self.ident, arg_exprs) subexpr_call = ex_call(f"{FUNCTION_PREFIX}{self.ident}", arg_exprs)
return [subexpr_call], varnames, funcnames return [subexpr_call], varnames, funcnames
@ -555,9 +555,9 @@ class Template:
argnames = [] argnames = []
for varname in varnames: for varname in varnames:
argnames.append(VARIABLE_PREFIX + varname) argnames.append(f"{VARIABLE_PREFIX}{varname}")
for funcname in funcnames: for funcname in funcnames:
argnames.append(FUNCTION_PREFIX + funcname) argnames.append(f"{FUNCTION_PREFIX}{funcname}")
func = compile_func( func = compile_func(
argnames, argnames,
@ -567,9 +567,9 @@ class Template:
def wrapper_func(values={}, functions={}): def wrapper_func(values={}, functions={}):
args = {} args = {}
for varname in varnames: for varname in varnames:
args[VARIABLE_PREFIX + varname] = values[varname] args[f"{VARIABLE_PREFIX}{varname}"] = values[varname]
for funcname in funcnames: for funcname in funcnames:
args[FUNCTION_PREFIX + funcname] = functions[funcname] args[f"{FUNCTION_PREFIX}{funcname}"] = functions[funcname]
parts = func(**args) parts = func(**args)
return "".join(parts) return "".join(parts)

View file

@ -97,8 +97,8 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
"with an HTTP scheme" "with an HTTP scheme"
) )
elif base_url[-1] != "/": elif base_url[-1] != "/":
base_url = base_url + "/" base_url = f"{base_url}/"
self.url = base_url + "{mbid}/low-level" self.url = f"{base_url}{{mbid}}/low-level"
def commands(self): def commands(self):
cmd = ui.Subcommand( cmd = ui.Subcommand(

View file

@ -97,7 +97,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
"with an HTTP scheme" "with an HTTP scheme"
) )
elif self.base_url[-1] != "/": elif self.base_url[-1] != "/":
self.base_url = self.base_url + "/" self.base_url = f"{self.base_url}/"
if self.config["auto"]: if self.config["auto"]:
self.register_listener("import_task_files", self.import_task_files) self.register_listener("import_task_files", self.import_task_files)
@ -300,4 +300,4 @@ class AcousticPlugin(plugins.BeetsPlugin):
def _generate_urls(base_url, mbid): def _generate_urls(base_url, mbid):
"""Generates AcousticBrainz end point urls for given `mbid`.""" """Generates AcousticBrainz end point urls for given `mbid`."""
for level in LEVELS: for level in LEVELS:
yield base_url + mbid + level yield f"{base_url}{mbid}{level}"

View file

@ -236,10 +236,10 @@ class AURADocument:
# Not the last page so work out links.next url # Not the last page so work out links.next url
if not self.args: if not self.args:
# No existing arguments, so current page is 0 # No existing arguments, so current page is 0
next_url = request.url + "?page=1" next_url = f"{request.url}?page=1"
elif not self.args.get("page", None): elif not self.args.get("page", None):
# No existing page argument, so add one to the end # No existing page argument, so add one to the end
next_url = request.url + "&page=1" next_url = f"{request.url}&page=1"
else: else:
# Increment page token by 1 # Increment page token by 1
next_url = request.url.replace( next_url = request.url.replace(
@ -697,7 +697,7 @@ class ImageDocument(AURADocument):
relationships = {} relationships = {}
# Split id into [parent_type, parent_id, filename] # Split id into [parent_type, parent_id, filename]
id_split = image_id.split("-") id_split = image_id.split("-")
relationships[id_split[0] + "s"] = { relationships[f"{id_split[0]}s"] = {
"data": [{"type": id_split[0], "id": id_split[1]}] "data": [{"type": id_split[0], "id": id_split[1]}]
} }

View file

@ -110,7 +110,7 @@ class BeatportClient:
:returns: OAuth resource owner key and secret as unicode :returns: OAuth resource owner key and secret as unicode
""" """
self.api.parse_authorization_response( self.api.parse_authorization_response(
"https://beets.io/auth?" + auth_data f"https://beets.io/auth?{auth_data}"
) )
access_data = self.api.fetch_access_token( access_data = self.api.fetch_access_token(
self._make_url("/identity/1/oauth/access-token") self._make_url("/identity/1/oauth/access-token")
@ -200,8 +200,8 @@ class BeatportClient:
def _make_url(self, endpoint: str) -> str: def _make_url(self, endpoint: str) -> str:
"""Get complete URL for a given API endpoint.""" """Get complete URL for a given API endpoint."""
if not endpoint.startswith("/"): if not endpoint.startswith("/"):
endpoint = "/" + endpoint endpoint = f"/{endpoint}"
return self._api_base + endpoint return f"{self._api_base}{endpoint}"
def _get(self, endpoint: str, **kwargs) -> list[JSONDict]: def _get(self, endpoint: str, **kwargs) -> list[JSONDict]:
"""Perform a GET request on a given API endpoint. """Perform a GET request on a given API endpoint.

View file

@ -282,7 +282,7 @@ class BaseServer:
if not self.ctrl_sock: if not self.ctrl_sock:
self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port)) self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port))
self.ctrl_sock.sendall((message + "\n").encode("utf-8")) self.ctrl_sock.sendall((f"{message}\n").encode("utf-8"))
def _send_event(self, event): def _send_event(self, event):
"""Notify subscribed connections of an event.""" """Notify subscribed connections of an event."""
@ -376,13 +376,13 @@ class BaseServer:
if self.password and not conn.authenticated: if self.password and not conn.authenticated:
# Not authenticated. Show limited list of commands. # Not authenticated. Show limited list of commands.
for cmd in SAFE_COMMANDS: for cmd in SAFE_COMMANDS:
yield "command: " + cmd yield f"command: {cmd}"
else: else:
# Authenticated. Show all commands. # Authenticated. Show all commands.
for func in dir(self): for func in dir(self):
if func.startswith("cmd_"): if func.startswith("cmd_"):
yield "command: " + func[4:] yield f"command: {func[4:]}"
def cmd_notcommands(self, conn): def cmd_notcommands(self, conn):
"""Lists all unavailable commands.""" """Lists all unavailable commands."""
@ -392,7 +392,7 @@ class BaseServer:
if func.startswith("cmd_"): if func.startswith("cmd_"):
cmd = func[4:] cmd = func[4:]
if cmd not in SAFE_COMMANDS: if cmd not in SAFE_COMMANDS:
yield "command: " + cmd yield f"command: {cmd}"
else: else:
# Authenticated. No commands are unavailable. # Authenticated. No commands are unavailable.
@ -406,22 +406,22 @@ class BaseServer:
playlist, playlistlength, and xfade. playlist, playlistlength, and xfade.
""" """
yield ( yield (
"repeat: " + str(int(self.repeat)), f"repeat: {int(self.repeat)}",
"random: " + str(int(self.random)), f"random: {int(self.random)}",
"consume: " + str(int(self.consume)), f"consume: {int(self.consume)}",
"single: " + str(int(self.single)), f"single: {int(self.single)}",
"playlist: " + str(self.playlist_version), f"playlist: {self.playlist_version}",
"playlistlength: " + str(len(self.playlist)), f"playlistlength: {len(self.playlist)}",
"mixrampdb: " + str(self.mixrampdb), f"mixrampdb: {self.mixrampdb}",
) )
if self.volume > 0: if self.volume > 0:
yield "volume: " + str(self.volume) yield f"volume: {self.volume}"
if not math.isnan(self.mixrampdelay): if not math.isnan(self.mixrampdelay):
yield "mixrampdelay: " + str(self.mixrampdelay) yield f"mixrampdelay: {self.mixrampdelay}"
if self.crossfade > 0: if self.crossfade > 0:
yield "xfade: " + str(self.crossfade) yield f"xfade: {self.crossfade}"
if self.current_index == -1: if self.current_index == -1:
state = "stop" state = "stop"
@ -429,20 +429,20 @@ class BaseServer:
state = "pause" state = "pause"
else: else:
state = "play" state = "play"
yield "state: " + state yield f"state: {state}"
if self.current_index != -1: # i.e., paused or playing if self.current_index != -1: # i.e., paused or playing
current_id = self._item_id(self.playlist[self.current_index]) current_id = self._item_id(self.playlist[self.current_index])
yield "song: " + str(self.current_index) yield f"song: {self.current_index}"
yield "songid: " + str(current_id) yield f"songid: {current_id}"
if len(self.playlist) > self.current_index + 1: if len(self.playlist) > self.current_index + 1:
# If there's a next song, report its index too. # If there's a next song, report its index too.
next_id = self._item_id(self.playlist[self.current_index + 1]) next_id = self._item_id(self.playlist[self.current_index + 1])
yield "nextsong: " + str(self.current_index + 1) yield f"nextsong: {self.current_index + 1}"
yield "nextsongid: " + str(next_id) yield f"nextsongid: {next_id}"
if self.error: if self.error:
yield "error: " + self.error yield f"error: {self.error}"
def cmd_clearerror(self, conn): def cmd_clearerror(self, conn):
"""Removes the persistent error state of the server. This """Removes the persistent error state of the server. This
@ -522,7 +522,7 @@ class BaseServer:
def cmd_replay_gain_status(self, conn): def cmd_replay_gain_status(self, conn):
"""Get the replaygain mode.""" """Get the replaygain mode."""
yield "replay_gain_mode: " + str(self.replay_gain_mode) yield f"replay_gain_mode: {self.replay_gain_mode}"
def cmd_clear(self, conn): def cmd_clear(self, conn):
"""Clear the playlist.""" """Clear the playlist."""
@ -643,8 +643,8 @@ class BaseServer:
Also a dummy implementation. Also a dummy implementation.
""" """
for idx, track in enumerate(self.playlist): for idx, track in enumerate(self.playlist):
yield "cpos: " + str(idx) yield f"cpos: {idx}"
yield "Id: " + str(track.id) yield f"Id: {track.id}"
def cmd_currentsong(self, conn): def cmd_currentsong(self, conn):
"""Sends information about the currently-playing song.""" """Sends information about the currently-playing song."""
@ -990,7 +990,7 @@ class Command:
of arguments. of arguments.
""" """
# Attempt to get correct command function. # Attempt to get correct command function.
func_name = prefix + self.name func_name = f"{prefix}{self.name}"
if not hasattr(target, func_name): if not hasattr(target, func_name):
raise AttributeError(f'unknown command "{self.name}"') raise AttributeError(f'unknown command "{self.name}"')
func = getattr(target, func_name) func = getattr(target, func_name)
@ -1124,15 +1124,15 @@ class Server(BaseServer):
def _item_info(self, item): def _item_info(self, item):
info_lines = [ info_lines = [
"file: " + as_string(item.destination(relative_to_libdir=True)), f"file: {as_string(item.destination(relative_to_libdir=True))}",
"Time: " + str(int(item.length)), f"Time: {int(item.length)}",
"duration: " + f"{item.length:.3f}", "duration: {item.length:.3f}",
"Id: " + str(item.id), f"Id: {item.id}",
] ]
try: try:
pos = self._id_to_index(item.id) pos = self._id_to_index(item.id)
info_lines.append("Pos: " + str(pos)) info_lines.append(f"Pos: {pos}")
except ArgumentNotFoundError: except ArgumentNotFoundError:
# Don't include position if not in playlist. # Don't include position if not in playlist.
pass pass
@ -1201,7 +1201,7 @@ class Server(BaseServer):
def _path_join(self, p1, p2): def _path_join(self, p1, p2):
"""Smashes together two BPD paths.""" """Smashes together two BPD paths."""
out = p1 + "/" + p2 out = f"{p1}/{p2}"
return out.replace("//", "/").replace("//", "/") return out.replace("//", "/").replace("//", "/")
def cmd_lsinfo(self, conn, path="/"): def cmd_lsinfo(self, conn, path="/"):
@ -1231,7 +1231,7 @@ class Server(BaseServer):
item = self.lib.get_item(node) item = self.lib.get_item(node)
yield self._item_info(item) yield self._item_info(item)
else: else:
yield "file: " + basepath yield f"file: {basepath}"
else: else:
# List a directory. Recurse into both directories and files. # List a directory. Recurse into both directories and files.
for name, itemid in sorted(node.files.items()): for name, itemid in sorted(node.files.items()):
@ -1240,7 +1240,7 @@ class Server(BaseServer):
yield from self._listall(newpath, itemid, info) yield from self._listall(newpath, itemid, info)
for name, subdir in sorted(node.dirs.items()): for name, subdir in sorted(node.dirs.items()):
newpath = self._path_join(basepath, name) newpath = self._path_join(basepath, name)
yield "directory: " + newpath yield f"directory: {newpath}"
yield from self._listall(newpath, subdir, info) yield from self._listall(newpath, subdir, info)
def cmd_listall(self, conn, path="/"): def cmd_listall(self, conn, path="/"):
@ -1274,7 +1274,7 @@ class Server(BaseServer):
for item in self._all_items(self._resolve_path(path)): for item in self._all_items(self._resolve_path(path)):
self.playlist.append(item) self.playlist.append(item)
if send_id: if send_id:
yield "Id: " + str(item.id) yield f"Id: {item.id}"
self.playlist_version += 1 self.playlist_version += 1
self._send_event("playlist") self._send_event("playlist")
@ -1296,7 +1296,7 @@ class Server(BaseServer):
item = self.playlist[self.current_index] item = self.playlist[self.current_index]
yield ( yield (
"bitrate: " + str(item.bitrate / 1000), f"bitrate: {item.bitrate / 1000}",
f"audio: {item.samplerate}:{item.bitdepth}:{item.channels}", f"audio: {item.samplerate}:{item.bitdepth}:{item.channels}",
) )
@ -1322,13 +1322,13 @@ class Server(BaseServer):
artists, albums, songs, totaltime = tx.query(statement)[0] artists, albums, songs, totaltime = tx.query(statement)[0]
yield ( yield (
"artists: " + str(artists), f"artists: {artists}",
"albums: " + str(albums), f"albums: {albums}",
"songs: " + str(songs), f"songs: {songs}",
"uptime: " + str(int(time.time() - self.startup_time)), f"uptime: {int(time.time() - self.startup_time)}",
"playtime: " + "0", # Missing. "playtime: 0", # Missing.
"db_playtime: " + str(int(totaltime)), f"db_playtime: {int(totaltime)}",
"db_update: " + str(int(self.updated_time)), f"db_update: {int(self.updated_time)}",
) )
def cmd_decoders(self, conn): def cmd_decoders(self, conn):
@ -1370,7 +1370,7 @@ class Server(BaseServer):
searching. searching.
""" """
for tag in self.tagtype_map: for tag in self.tagtype_map:
yield "tagtype: " + tag yield f"tagtype: {tag}"
def _tagtype_lookup(self, tag): def _tagtype_lookup(self, tag):
"""Uses `tagtype_map` to look up the beets column name for an """Uses `tagtype_map` to look up the beets column name for an
@ -1445,12 +1445,9 @@ class Server(BaseServer):
clause, subvals = query.clause() clause, subvals = query.clause()
statement = ( statement = (
"SELECT DISTINCT " f"SELECT DISTINCT {show_key}"
+ show_key f" FROM items WHERE {clause}"
+ " FROM items WHERE " f" ORDER BY {show_key}"
+ clause
+ " ORDER BY "
+ show_key
) )
self._log.debug(statement) self._log.debug(statement)
with self.lib.transaction() as tx: with self.lib.transaction() as tx:
@ -1460,7 +1457,7 @@ class Server(BaseServer):
if not row[0]: if not row[0]:
# Skip any empty values of the field. # Skip any empty values of the field.
continue continue
yield show_tag_canon + ": " + str(row[0]) yield f"{show_tag_canon}: {row[0]}"
def cmd_count(self, conn, tag, value): def cmd_count(self, conn, tag, value):
"""Returns the number and total time of songs matching the """Returns the number and total time of songs matching the
@ -1474,8 +1471,8 @@ class Server(BaseServer):
): ):
songs += 1 songs += 1
playtime += item.length playtime += item.length
yield "songs: " + str(songs) yield f"songs: {songs}"
yield "playtime: " + str(int(playtime)) yield f"playtime: {int(playtime)}"
# Persistent playlist manipulation. In MPD this is an optional feature so # Persistent playlist manipulation. In MPD this is an optional feature so
# these dummy implementations match MPD's behaviour with the feature off. # these dummy implementations match MPD's behaviour with the feature off.

View file

@ -129,7 +129,7 @@ class GstPlayer:
self.player.set_state(Gst.State.NULL) self.player.set_state(Gst.State.NULL)
if isinstance(path, str): if isinstance(path, str):
path = path.encode("utf-8") path = path.encode("utf-8")
uri = "file://" + urllib.parse.quote(path) uri = f"file://{urllib.parse.quote(path)}"
self.player.set_property("uri", uri) self.player.set_property("uri", uri)
self.player.set_state(Gst.State.PLAYING) self.player.set_state(Gst.State.PLAYING)
self.playing = True self.playing = True

View file

@ -169,10 +169,8 @@ def build_alpha_spans(alpha_spans_str, alpha_regexs):
) )
spans.append( spans.append(
re.compile( re.compile(
"^[" rf"^[{ASCII_DIGITS[begin_index : end_index + 1]}]",
+ ASCII_DIGITS[begin_index : end_index + 1] re.IGNORECASE,
+ ASCII_DIGITS[begin_index : end_index + 1].upper()
+ "]"
) )
) )
return spans return spans

View file

@ -649,7 +649,7 @@ class ConvertPlugin(BeetsPlugin):
tmpdir = self.config["tmpdir"].get() tmpdir = self.config["tmpdir"].get()
if tmpdir: if tmpdir:
tmpdir = os.fsdecode(util.bytestring_path(tmpdir)) tmpdir = os.fsdecode(util.bytestring_path(tmpdir))
fd, dest = tempfile.mkstemp(os.fsdecode(b"." + ext), dir=tmpdir) fd, dest = tempfile.mkstemp(f".{os.fsdecode(ext)}", dir=tmpdir)
os.close(fd) os.close(fd)
dest = util.bytestring_path(dest) dest = util.bytestring_path(dest)
_temp_files.append(dest) # Delete the transcode later. _temp_files.append(dest) # Delete the transcode later.

View file

@ -96,7 +96,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
f"Invalid `release_date` returned by {self.data_source} API: " f"Invalid `release_date` returned by {self.data_source} API: "
f"{release_date!r}" f"{release_date!r}"
) )
tracks_obj = self.fetch_data(self.album_url + deezer_id + "/tracks") tracks_obj = self.fetch_data(f"{self.album_url}{deezer_id}/tracks")
if tracks_obj is None: if tracks_obj is None:
return None return None
try: try:
@ -169,7 +169,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
# the track's disc). # the track's disc).
if not ( if not (
album_tracks_obj := self.fetch_data( album_tracks_obj := self.fetch_data(
self.album_url + str(track_data["album"]["id"]) + "/tracks" f"{self.album_url}{track_data['album']['id']}/tracks"
) )
): ):
return None return None
@ -244,7 +244,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
self._log.debug(f"Searching {self.data_source} for '{query}'") self._log.debug(f"Searching {self.data_source} for '{query}'")
try: try:
response = requests.get( response = requests.get(
self.search_url + query_type, f"{self.search_url}{query_type}",
params={"q": query}, params={"q": query},
timeout=10, timeout=10,
) )

View file

@ -385,7 +385,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
track.artist_id = artist_id track.artist_id = artist_id
# Discogs does not have track IDs. Invent our own IDs as proposed # Discogs does not have track IDs. Invent our own IDs as proposed
# in #2336. # in #2336.
track.track_id = str(album_id) + "-" + track.track_alt track.track_id = f"{album_id}-{track.track_alt}"
track.data_url = data_url track.data_url = data_url
track.data_source = "Discogs" track.data_source = "Discogs"

View file

@ -682,7 +682,7 @@ class GoogleImages(RemoteArtSource):
""" """
if not (album.albumartist and album.album): if not (album.albumartist and album.album):
return return
search_string = (album.albumartist + "," + album.album).encode("utf-8") search_string = f"{album.albumartist},{album.album}".encode("utf-8")
try: try:
response = self.request( response = self.request(
@ -723,7 +723,7 @@ class FanartTV(RemoteArtSource):
NAME = "fanart.tv" NAME = "fanart.tv"
ID = "fanarttv" ID = "fanarttv"
API_URL = "https://webservice.fanart.tv/v3/" API_URL = "https://webservice.fanart.tv/v3/"
API_ALBUMS = API_URL + "music/albums/" API_ALBUMS = f"{API_URL}music/albums/"
PROJECT_KEY = "61a7d0ab4e67162b7a0c7c35915cd48e" PROJECT_KEY = "61a7d0ab4e67162b7a0c7c35915cd48e"
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@ -750,7 +750,7 @@ class FanartTV(RemoteArtSource):
try: try:
response = self.request( response = self.request(
self.API_ALBUMS + album.mb_releasegroupid, f"{self.API_ALBUMS}{album.mb_releasegroupid}",
headers={ headers={
"api-key": self.PROJECT_KEY, "api-key": self.PROJECT_KEY,
"client-key": self.client_key, "client-key": self.client_key,
@ -820,7 +820,7 @@ class ITunesStore(RemoteArtSource):
return return
payload = { payload = {
"term": album.albumartist + " " + album.album, "term": f"{album.albumartist} {album.album}",
"entity": "album", "entity": "album",
"media": "music", "media": "music",
"limit": 200, "limit": 200,
@ -947,7 +947,7 @@ class Wikipedia(RemoteArtSource):
data = dbpedia_response.json() data = dbpedia_response.json()
results = data["results"]["bindings"] results = data["results"]["bindings"]
if results: if results:
cover_filename = "File:" + results[0]["coverFilename"]["value"] cover_filename = f"File:{results[0]['coverFilename']['value']}"
page_id = results[0]["pageId"]["value"] page_id = results[0]["pageId"]["value"]
else: else:
self._log.debug("wikipedia: album not found on dbpedia") self._log.debug("wikipedia: album not found on dbpedia")
@ -996,7 +996,7 @@ class Wikipedia(RemoteArtSource):
results = data["query"]["pages"][page_id]["images"] results = data["query"]["pages"][page_id]["images"]
for result in results: for result in results:
if re.match( if re.match(
re.escape(lpart) + r".*?\." + re.escape(rpart), rf"{re.escape(lpart)}.*?\.{re.escape(rpart)}",
result["title"], result["title"],
): ):
cover_filename = result["title"] cover_filename = result["title"]
@ -1227,7 +1227,7 @@ class Spotify(RemoteArtSource):
paths: None | Sequence[bytes], paths: None | Sequence[bytes],
) -> Iterator[Candidate]: ) -> Iterator[Candidate]:
try: try:
url = self.SPOTIFY_ALBUM_URL + album.items().get().spotify_album_id url = f"{self.SPOTIFY_ALBUM_URL}{album.items().get().spotify_album_id}"
except AttributeError: except AttributeError:
self._log.debug("Fetchart: no Spotify album ID found") self._log.debug("Fetchart: no Spotify album ID found")
return return

View file

@ -123,19 +123,13 @@ class FishPlugin(BeetsPlugin):
for name in names: for name in names:
cmd_names_help.append((name, cmd.help)) cmd_names_help.append((name, cmd.help))
# Concatenate the string # Concatenate the string
totstring = HEAD + "\n" totstring = f"{HEAD}\n"
totstring += get_cmds_list([name[0] for name in cmd_names_help]) totstring += get_cmds_list([name[0] for name in cmd_names_help])
totstring += "" if nobasicfields else get_standard_fields(fields) totstring += "" if nobasicfields else get_standard_fields(fields)
totstring += get_extravalues(lib, extravalues) if extravalues else "" totstring += get_extravalues(lib, extravalues) if extravalues else ""
totstring += ( totstring += "\n# ====== setup basic beet completion =====\n\n"
"\n" + "# ====== setup basic beet completion =====" + "\n" * 2
)
totstring += get_basic_beet_options() totstring += get_basic_beet_options()
totstring += ( totstring += "\n# ====== setup field completion for subcommands =====\n"
"\n"
+ "# ====== setup field completion for subcommands ====="
+ "\n"
)
totstring += get_subcommands(cmd_names_help, nobasicfields, extravalues) totstring += get_subcommands(cmd_names_help, nobasicfields, extravalues)
# Set up completion for all the command options # Set up completion for all the command options
totstring += get_all_commands(beetcmds) totstring += get_all_commands(beetcmds)
@ -147,23 +141,19 @@ class FishPlugin(BeetsPlugin):
def _escape(name): def _escape(name):
# Escape ? in fish # Escape ? in fish
if name == "?": if name == "?":
name = "\\" + name name = f"\\{name}"
return name return name
def get_cmds_list(cmds_names): def get_cmds_list(cmds_names):
# Make a list of all Beets core & plugin commands # Make a list of all Beets core & plugin commands
substr = "" return f"set CMDS {' '.join(cmds_names)}\n\n"
substr += "set CMDS " + " ".join(cmds_names) + ("\n" * 2)
return substr
def get_standard_fields(fields): def get_standard_fields(fields):
# Make a list of album/track fields and append with ':' # Make a list of album/track fields and append with ':'
fields = (field + ":" for field in fields) fields = (f"{field}:" for field in fields)
substr = "" return f"set FIELDS {' '.join(fields)}\n\n"
substr += "set FIELDS " + " ".join(fields) + ("\n" * 2)
return substr
def get_extravalues(lib, extravalues): def get_extravalues(lib, extravalues):
@ -172,14 +162,8 @@ def get_extravalues(lib, extravalues):
word = "" word = ""
values_set = get_set_of_values_for_field(lib, extravalues) values_set = get_set_of_values_for_field(lib, extravalues)
for fld in extravalues: for fld in extravalues:
extraname = fld.upper() + "S" extraname = f"{fld.upper()}S"
word += ( word += f"set {extraname} {' '.join(sorted(values_set[fld]))}\n\n"
"set "
+ extraname
+ " "
+ " ".join(sorted(values_set[fld]))
+ ("\n" * 2)
)
return word return word
@ -223,31 +207,29 @@ def get_subcommands(cmd_name_and_help, nobasicfields, extravalues):
for cmdname, cmdhelp in cmd_name_and_help: for cmdname, cmdhelp in cmd_name_and_help:
cmdname = _escape(cmdname) cmdname = _escape(cmdname)
word += "\n" + f"# ------ fieldsetups for {cmdname} -------" + "\n" word += f"\n# ------ fieldsetups for {cmdname} -------\n"
word += BL_NEED2.format( word += BL_NEED2.format(
("-a " + cmdname), ("-f " + "-d " + wrap(clean_whitespace(cmdhelp))) f"-a {cmdname}", f"-f -d {wrap(clean_whitespace(cmdhelp))}"
) )
if nobasicfields is False: if nobasicfields is False:
word += BL_USE3.format( word += BL_USE3.format(
cmdname, cmdname,
("-a " + wrap("$FIELDS")), f"-a {wrap('$FIELDS')}",
("-f " + "-d " + wrap("fieldname")), f"-f -d {wrap('fieldname')}",
) )
if extravalues: if extravalues:
for f in extravalues: for f in extravalues:
setvar = wrap("$" + f.upper() + "S") setvar = wrap(f"${f.upper()}S")
word += ( word += " ".join(
" ".join( BL_EXTRA3.format(
BL_EXTRA3.format( f"{cmdname} {f}:",
(cmdname + " " + f + ":"), f"-f -A -a {setvar}",
("-f " + "-A " + "-a " + setvar), f"-d {wrap(f)}",
("-d " + wrap(f)), ).split()
).split()
)
+ "\n"
) )
word += "\n"
return word return word
@ -260,55 +242,44 @@ def get_all_commands(beetcmds):
for name in names: for name in names:
name = _escape(name) name = _escape(name)
word += "\n" word += f"\n\n\n# ====== completions for {name} =====\n"
word += ("\n" * 2) + f"# ====== completions for {name} =====" + "\n"
for option in cmd.parser._get_all_options()[1:]: for option in cmd.parser._get_all_options()[1:]:
cmd_l = ( cmd_l = (
(" -l " + option._long_opts[0].replace("--", "")) f" -l {option._long_opts[0].replace('--', '')}"
if option._long_opts if option._long_opts
else "" else ""
) )
cmd_s = ( cmd_s = (
(" -s " + option._short_opts[0].replace("-", "")) f" -s {option._short_opts[0].replace('-', '')}"
if option._short_opts if option._short_opts
else "" else ""
) )
cmd_need_arg = " -r " if option.nargs in [1] else "" cmd_need_arg = " -r " if option.nargs in [1] else ""
cmd_helpstr = ( cmd_helpstr = (
(" -d " + wrap(" ".join(option.help.split()))) f" -d {wrap(' '.join(option.help.split()))}"
if option.help if option.help
else "" else ""
) )
cmd_arglist = ( cmd_arglist = (
(" -a " + wrap(" ".join(option.choices))) f" -a {wrap(' '.join(option.choices))}"
if option.choices if option.choices
else "" else ""
) )
word += ( word += " ".join(
" ".join( BL_USE3.format(
BL_USE3.format( name,
name, f"{cmd_need_arg}{cmd_s}{cmd_l} -f {cmd_arglist}",
( cmd_helpstr,
cmd_need_arg ).split()
+ cmd_s
+ cmd_l
+ " -f "
+ cmd_arglist
),
cmd_helpstr,
).split()
)
+ "\n"
) )
word += "\n"
word = word + " ".join( word = word + BL_USE3.format(
BL_USE3.format( name,
name, "-s h -l help -f",
("-s " + "h " + "-l " + "help" + " -f "), f"-d {wrap('print help')}",
("-d " + wrap("print help") + "\n"),
).split()
) )
return word return word
@ -323,7 +294,7 @@ def wrap(word):
sptoken = '"' sptoken = '"'
if '"' in word and ("'") in word: if '"' in word and ("'") in word:
word.replace('"', sptoken) word.replace('"', sptoken)
return '"' + word + '"' return f'"{word}"'
tok = '"' if "'" in word else "'" tok = '"' if "'" in word else "'"
return tok + word + tok return f"{tok}{word}{tok}"

View file

@ -50,7 +50,7 @@ def _build_m3u_filename(basename):
path = normpath( path = normpath(
os.path.join( os.path.join(
config["importfeeds"]["dir"].as_filename(), config["importfeeds"]["dir"].as_filename(),
date + "_" + basename + ".m3u", f"{date}_{basename}.m3u",
) )
) )
return path return path

View file

@ -361,7 +361,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
) )
if resolved_genres: if resolved_genres:
suffix = "whitelist" if self.whitelist else "any" suffix = "whitelist" if self.whitelist else "any"
label = stage_label + f", {suffix}" label = f"{stage_label}, {suffix}"
if keep_genres: if keep_genres:
label = f"keep + {label}" label = f"keep + {label}"
return self._format_and_stringify(resolved_genres), label return self._format_and_stringify(resolved_genres), label

View file

@ -70,7 +70,7 @@ class CustomUser(pylast.User):
tuple with the total number of pages of results. Includes an MBID, if tuple with the total number of pages of results. Includes an MBID, if
found. found.
""" """
doc = self._request(self.ws_prefix + "." + method, cacheable, params) doc = self._request(f"{self.ws_prefix}.{method}", cacheable, params)
toptracks_node = doc.getElementsByTagName("toptracks")[0] toptracks_node = doc.getElementsByTagName("toptracks")[0]
total_pages = int(toptracks_node.getAttribute("totalPages")) total_pages = int(toptracks_node.getAttribute("totalPages"))

View file

@ -582,7 +582,7 @@ class Tekstowo(SearchBackend):
"""Fetch lyrics from Tekstowo.pl.""" """Fetch lyrics from Tekstowo.pl."""
BASE_URL = "https://www.tekstowo.pl" BASE_URL = "https://www.tekstowo.pl"
SEARCH_URL = BASE_URL + "/szukaj,{}.html" SEARCH_URL = f"{BASE_URL}/szukaj,{{}}.html"
def build_url(self, artist, title): def build_url(self, artist, title):
artistitle = f"{artist.title()} {title.title()}" artistitle = f"{artist.title()} {title.title()}"

View file

@ -49,7 +49,7 @@ def load_meta_sources():
meta_sources = {} meta_sources = {}
for module_path, class_name in SOURCES.items(): for module_path, class_name in SOURCES.items():
module = import_module(METASYNC_MODULE + "." + module_path) module = import_module(f"{METASYNC_MODULE}.{module_path}")
meta_sources[class_name.lower()] = getattr(module, class_name) meta_sources[class_name.lower()] = getattr(module, class_name)
return meta_sources return meta_sources

View file

@ -81,7 +81,7 @@ class Itunes(MetaSource):
with open(library_copy, "rb") as library_copy_f: with open(library_copy, "rb") as library_copy_f:
raw_library = plistlib.load(library_copy_f) raw_library = plistlib.load(library_copy_f)
except OSError as e: except OSError as e:
raise ConfigValueError("invalid iTunes library: " + e.strerror) raise ConfigValueError(f"invalid iTunes library: {e.strerror}")
except Exception: except Exception:
# It's likely the user configured their '.itl' library (<> xml) # It's likely the user configured their '.itl' library (<> xml)
if os.path.splitext(library_path)[1].lower() != ".xml": if os.path.splitext(library_path)[1].lower() != ".xml":
@ -91,7 +91,7 @@ class Itunes(MetaSource):
) )
else: else:
hint = "" hint = ""
raise ConfigValueError("invalid iTunes library" + hint) raise ConfigValueError(f"invalid iTunes library{hint}")
# Make the iTunes library queryable using the path # Make the iTunes library queryable using the path
self.collection = { self.collection = {

View file

@ -307,7 +307,7 @@ class MPDStats:
if "player" in events: if "player" in events:
status = self.mpd.status() status = self.mpd.status()
handler = getattr(self, "on_" + status["state"], None) handler = getattr(self, f"on_{status['state']}", None)
if handler: if handler:
handler(status) handler(status)

View file

@ -201,7 +201,7 @@ def _multi_artist_credit(
def track_url(trackid: str) -> str: def track_url(trackid: str) -> str:
return urljoin(BASE_URL, "recording/" + trackid) return urljoin(BASE_URL, f"recording/{trackid}")
def _flatten_artist_credit(credit: list[JSONDict]) -> tuple[str, str, str]: def _flatten_artist_credit(credit: list[JSONDict]) -> tuple[str, str, str]:
@ -246,7 +246,7 @@ def _get_related_artist_names(relations, relation_type):
def album_url(albumid: str) -> str: def album_url(albumid: str) -> str:
return urljoin(BASE_URL, "release/" + albumid) return urljoin(BASE_URL, f"release/{albumid}")
def _preferred_release_event( def _preferred_release_event(
@ -291,7 +291,7 @@ def _set_date_str(
continue continue
if original: if original:
key = "original_" + key key = f"original_{key}"
setattr(info, key, date_num) setattr(info, key, date_num)

View file

@ -154,7 +154,7 @@ class PlayPlugin(BeetsPlugin):
return f"{command_str} {args}" return f"{command_str} {args}"
else: else:
# Don't include the marker in the command. # Don't include the marker in the command.
return command_str.replace(" " + ARGS_MARKER, "") return command_str.replace(f" {ARGS_MARKER}", "")
def _playlist_or_paths(self, paths): def _playlist_or_paths(self, paths):
"""Return either the raw paths of items or a playlist of the items.""" """Return either the raw paths of items or a playlist of the items."""

View file

@ -66,7 +66,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors):
def append_token(url, token): def append_token(url, token):
"""Appends the Plex Home token to the api call if required.""" """Appends the Plex Home token to the api call if required."""
if token: if token:
url += "?" + urlencode({"X-Plex-Token": token}) url += f"?{urlencode({'X-Plex-Token': token})}"
return url return url

View file

@ -292,7 +292,9 @@ class SpotifyPlugin(
if not (spotify_id := self._extract_id(album_id)): if not (spotify_id := self._extract_id(album_id)):
return None return None
album_data = self._handle_response("get", self.album_url + spotify_id) album_data = self._handle_response(
"get", f"{self.album_url}{spotify_id}"
)
if album_data["name"] == "": if album_data["name"] == "":
self._log.debug("Album removed from Spotify: {}", album_id) self._log.debug("Album removed from Spotify: {}", album_id)
return None return None
@ -408,7 +410,7 @@ class SpotifyPlugin(
# release) and `track.medium_total` (total number of tracks on # release) and `track.medium_total` (total number of tracks on
# the track's disc). # the track's disc).
album_data = self._handle_response( album_data = self._handle_response(
"get", self.album_url + track_data["album"]["id"] "get", f"{self.album_url}{track_data['album']['id']}"
) )
medium_total = 0 medium_total = 0
for i, track_data in enumerate(album_data["tracks"]["items"], start=1): for i, track_data in enumerate(album_data["tracks"]["items"], start=1):
@ -447,7 +449,7 @@ class SpotifyPlugin(
except APIError as e: except APIError as e:
self._log.debug("Spotify API error: {}", e) self._log.debug("Spotify API error: {}", e)
return () return ()
response_data = response.get(query_type + "s", {}).get("items", []) response_data = response.get(f"{query_type}s", {}).get("items", [])
self._log.debug( self._log.debug(
"Found {} result(s) from {} for '{}'", "Found {} result(s) from {} for '{}'",
len(response_data), len(response_data),
@ -648,13 +650,13 @@ class SpotifyPlugin(
self._log.info( self._log.info(
f"Attempting to open {self.data_source} with playlist" f"Attempting to open {self.data_source} with playlist"
) )
spotify_url = "spotify:trackset:Playlist:" + ",".join( spotify_url = (
spotify_ids f"spotify:trackset:Playlist:{','.join(spotify_ids)}"
) )
webbrowser.open(spotify_url) webbrowser.open(spotify_url)
else: else:
for spotify_id in spotify_ids: for spotify_id in spotify_ids:
print(self.open_track_url + spotify_id) print(f"{self.open_track_url}{spotify_id}")
else: else:
self._log.warning( self._log.warning(
f"No {self.data_source} tracks found from beets query" f"No {self.data_source} tracks found from beets query"
@ -702,7 +704,7 @@ class SpotifyPlugin(
def track_info(self, track_id: str): def track_info(self, track_id: str):
"""Fetch a track's popularity and external IDs using its Spotify ID.""" """Fetch a track's popularity and external IDs using its Spotify ID."""
track_data = self._handle_response("get", self.track_url + track_id) track_data = self._handle_response("get", f"{self.track_url}{track_id}")
external_ids = track_data.get("external_ids", {}) external_ids = track_data.get("external_ids", {})
popularity = track_data.get("popularity") popularity = track_data.get("popularity")
self._log.debug( self._log.debug(
@ -721,7 +723,7 @@ class SpotifyPlugin(
"""Fetch track audio features by its Spotify ID.""" """Fetch track audio features by its Spotify ID."""
try: try:
return self._handle_response( return self._handle_response(
"get", self.audio_features_url + track_id "get", f"{self.audio_features_url}{track_id}"
) )
except APIError as e: except APIError as e:
self._log.debug("Spotify API error: {}", e) self._log.debug("Spotify API error: {}", e)

View file

@ -180,5 +180,5 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
for track in tracks: for track in tracks:
if track not in output: if track not in output:
output[track] = ";" output[track] = ";"
output[track] += name + ";" output[track] += f"{name};"
return output return output

View file

@ -74,7 +74,7 @@ class SubsonicUpdate(BeetsPlugin):
# Pick the random sequence and salt the password # Pick the random sequence and salt the password
r = string.ascii_letters + string.digits r = string.ascii_letters + string.digits
salt = "".join([random.choice(r) for _ in range(6)]) salt = "".join([random.choice(r) for _ in range(6)])
salted_password = password + salt salted_password = f"{password}{salt}"
token = hashlib.md5(salted_password.encode("utf-8")).hexdigest() token = hashlib.md5(salted_password.encode("utf-8")).hexdigest()
# Put together the payload of the request to the server and the URL # Put together the payload of the request to the server and the URL
@ -101,7 +101,7 @@ class SubsonicUpdate(BeetsPlugin):
context_path = "" context_path = ""
url = f"http://{host}:{port}{context_path}" url = f"http://{host}:{port}{context_path}"
return url + f"/rest/{endpoint}" return f"{url}/rest/{endpoint}"
def start_scan(self): def start_scan(self):
user = self.config["user"].as_str() user = self.config["user"].as_str()

View file

@ -230,8 +230,7 @@ def copy_c_string(c_string):
# This is a pretty dumb way to get a string copy, but it seems to # This is a pretty dumb way to get a string copy, but it seems to
# work. A more surefire way would be to allocate a ctypes buffer and copy # work. A more surefire way would be to allocate a ctypes buffer and copy
# the data with `memcpy` or somesuch. # the data with `memcpy` or somesuch.
s = ctypes.cast(c_string, ctypes.c_char_p).value return ctypes.cast(c_string, ctypes.c_char_p).value
return b"" + s
class GioURI(URIGetter): class GioURI(URIGetter):

View file

@ -34,7 +34,7 @@ class Unimported(BeetsPlugin):
def commands(self): def commands(self):
def print_unimported(lib, opts, args): def print_unimported(lib, opts, args):
ignore_exts = [ ignore_exts = [
("." + x).encode() f".{x}".encode()
for x in self.config["ignore_extensions"].as_str_seq() for x in self.config["ignore_extensions"].as_str_seq()
] ]
ignore_dirs = [ ignore_dirs = [

View file

@ -276,6 +276,7 @@ select = [
"F", # pyflakes "F", # pyflakes
# "B", # flake8-bugbear # "B", # flake8-bugbear
"I", # isort "I", # isort
"ISC", # flake8-implicit-str-concat
"N", # pep8-naming "N", # pep8-naming
"PT", # flake8-pytest-style "PT", # flake8-pytest-style
# "RUF", # ruff # "RUF", # ruff

View file

@ -92,8 +92,8 @@ class CAAHelper:
RELEASE_URL = f"coverartarchive.org/release/{MBID_RELASE}" RELEASE_URL = f"coverartarchive.org/release/{MBID_RELASE}"
GROUP_URL = f"coverartarchive.org/release-group/{MBID_GROUP}" GROUP_URL = f"coverartarchive.org/release-group/{MBID_GROUP}"
RELEASE_URL = "https://" + RELEASE_URL RELEASE_URL = f"https://{RELEASE_URL}"
GROUP_URL = "https://" + GROUP_URL GROUP_URL = f"https://{GROUP_URL}"
RESPONSE_RELEASE = """{ RESPONSE_RELEASE = """{
"images": [ "images": [
@ -706,7 +706,7 @@ class FanartTVTest(UseThePlugin):
def test_fanarttv_finds_image(self): def test_fanarttv_finds_image(self):
album = _common.Bag(mb_releasegroupid="thereleasegroupid") album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response( self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_MULTIPLE, self.RESPONSE_MULTIPLE,
) )
candidate = next(self.source.get(album, self.settings, [])) candidate = next(self.source.get(album, self.settings, []))
@ -715,7 +715,7 @@ class FanartTVTest(UseThePlugin):
def test_fanarttv_returns_no_result_when_error_received(self): def test_fanarttv_returns_no_result_when_error_received(self):
album = _common.Bag(mb_releasegroupid="thereleasegroupid") album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response( self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_ERROR, self.RESPONSE_ERROR,
) )
with pytest.raises(StopIteration): with pytest.raises(StopIteration):
@ -724,7 +724,7 @@ class FanartTVTest(UseThePlugin):
def test_fanarttv_returns_no_result_with_malformed_response(self): def test_fanarttv_returns_no_result_with_malformed_response(self):
album = _common.Bag(mb_releasegroupid="thereleasegroupid") album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response( self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_MALFORMED, self.RESPONSE_MALFORMED,
) )
with pytest.raises(StopIteration): with pytest.raises(StopIteration):
@ -734,7 +734,7 @@ class FanartTVTest(UseThePlugin):
# The source used to fail when there were images present, but no cover # The source used to fail when there were images present, but no cover
album = _common.Bag(mb_releasegroupid="thereleasegroupid") album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response( self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_NO_ART, self.RESPONSE_NO_ART,
) )
with pytest.raises(StopIteration): with pytest.raises(StopIteration):

View file

@ -65,7 +65,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
if m.title.replace("Tag", "Applied") == item.title: if m.title.replace("Tag", "Applied") == item.title:
return m return m
raise AssertionError( raise AssertionError(
"No MediaFile found for Item " + displayable_path(item.path) f"No MediaFile found for Item {displayable_path(item.path)}"
) )
def test_import_album_with_added_dates(self): def test_import_album_with_added_dates(self):
@ -117,7 +117,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
for item_path, added_after in items_added_after.items(): for item_path, added_after in items_added_after.items():
assert items_added_before[item_path] == pytest.approx( assert items_added_before[item_path] == pytest.approx(
added_after, rel=1e-4 added_after, rel=1e-4
), "reimport modified Item.added for " + displayable_path(item_path) ), f"reimport modified Item.added for {displayable_path(item_path)}"
def test_import_singletons_with_added_dates(self): def test_import_singletons_with_added_dates(self):
self.config["import"]["singletons"] = True self.config["import"]["singletons"] = True
@ -157,4 +157,4 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
for item_path, added_after in items_added_after.items(): for item_path, added_after in items_added_after.items():
assert items_added_before[item_path] == pytest.approx( assert items_added_before[item_path] == pytest.approx(
added_after, rel=1e-4 added_after, rel=1e-4
), "reimport modified Item.added for " + displayable_path(item_path) ), f"reimport modified Item.added for {displayable_path(item_path)}"

View file

@ -42,8 +42,8 @@ class LimitPluginTest(PluginTestCase):
# a subset of tests has only `num_limit` results, identified by a # a subset of tests has only `num_limit` results, identified by a
# range filter on the track number # range filter on the track number
self.track_head_range = "track:.." + str(self.num_limit) self.track_head_range = f"track:..{self.num_limit}"
self.track_tail_range = "track:" + str(self.num_limit + 1) + ".." self.track_tail_range = f"track:{self.num_limit + 1}{'..'}"
def test_no_limit(self): def test_no_limit(self):
"""Returns all when there is no limit or filter.""" """Returns all when there is no limit or filter."""
@ -82,13 +82,13 @@ class LimitPluginTest(PluginTestCase):
def test_prefix_when_correctly_ordered(self): def test_prefix_when_correctly_ordered(self):
"""Returns the expected number with the query prefix and filter when """Returns the expected number with the query prefix and filter when
the prefix portion (correctly) appears last.""" the prefix portion (correctly) appears last."""
correct_order = self.track_tail_range + " " + self.num_limit_prefix correct_order = f"{self.track_tail_range} {self.num_limit_prefix}"
result = self.lib.items(correct_order) result = self.lib.items(correct_order)
assert len(result) == self.num_limit assert len(result) == self.num_limit
def test_prefix_when_incorrectly_ordred(self): def test_prefix_when_incorrectly_ordred(self):
"""Returns no results with the query prefix and filter when the prefix """Returns no results with the query prefix and filter when the prefix
portion (incorrectly) appears first.""" portion (incorrectly) appears first."""
incorrect_order = self.num_limit_prefix + " " + self.track_tail_range incorrect_order = f"{self.num_limit_prefix} {self.track_tail_range}"
result = self.lib.items(incorrect_order) result = self.lib.items(incorrect_order)
assert len(result) == 0 assert len(result) == 0

View file

@ -670,17 +670,17 @@ class ArtistFlatteningTest(unittest.TestCase):
def _credit_dict(self, suffix=""): def _credit_dict(self, suffix=""):
return { return {
"artist": { "artist": {
"name": "NAME" + suffix, "name": f"NAME{suffix}",
"sort-name": "SORT" + suffix, "sort-name": f"SORT{suffix}",
}, },
"name": "CREDIT" + suffix, "name": f"CREDIT{suffix}",
} }
def _add_alias(self, credit_dict, suffix="", locale="", primary=False): def _add_alias(self, credit_dict, suffix="", locale="", primary=False):
alias = { alias = {
"alias": "ALIAS" + suffix, "alias": f"ALIAS{suffix}",
"locale": locale, "locale": locale,
"sort-name": "ALIASSORT" + suffix, "sort-name": f"ALIASSORT{suffix}",
} }
if primary: if primary:
alias["primary"] = "primary" alias["primary"] = "primary"

View file

@ -49,7 +49,7 @@ class PlayPluginTest(CleanupModulesMixin, PluginTestCase):
open_mock.assert_called_once_with(ANY, expected_cmd) open_mock.assert_called_once_with(ANY, expected_cmd)
expected_playlist = expected_playlist or self.item.path.decode("utf-8") expected_playlist = expected_playlist or self.item.path.decode("utf-8")
exp_playlist = expected_playlist + "\n" exp_playlist = f"{expected_playlist}\n"
with open(open_mock.call_args[0][0][0], "rb") as playlist: with open(open_mock.call_args[0][0][0], "rb") as playlist:
assert exp_playlist == playlist.read().decode("utf-8") assert exp_playlist == playlist.read().decode("utf-8")

View file

@ -132,7 +132,7 @@ class PlaylistTestRelativeToLib(PlaylistQueryTest, PlaylistTestCase):
[ [
os.path.join("a", "b", "c.mp3") + "\n", os.path.join("a", "b", "c.mp3") + "\n",
os.path.join("d", "e", "f.mp3") + "\n", os.path.join("d", "e", "f.mp3") + "\n",
"nonexisting.mp3" + "\n", "nonexisting.mp3\n",
] ]
) )
@ -155,7 +155,7 @@ class PlaylistTestRelativeToDir(PlaylistQueryTest, PlaylistTestCase):
[ [
os.path.join("a", "b", "c.mp3") + "\n", os.path.join("a", "b", "c.mp3") + "\n",
os.path.join("d", "e", "f.mp3") + "\n", os.path.join("d", "e", "f.mp3") + "\n",
"nonexisting.mp3" + "\n", "nonexisting.mp3\n",
] ]
) )
@ -214,7 +214,7 @@ class PlaylistUpdateTest:
[ [
os.path.join("a", "b", "c.mp3") + "\n", os.path.join("a", "b", "c.mp3") + "\n",
os.path.join("d", "e", "f.mp3") + "\n", os.path.join("d", "e", "f.mp3") + "\n",
"nonexisting.mp3" + "\n", "nonexisting.mp3\n",
] ]
) )

View file

@ -29,7 +29,7 @@ class PlexUpdateTest(PluginTestCase):
"</Directory>" "</Directory>"
'<Directory allowSync="0" art="/:/resources/artist-fanart.jpg" ' '<Directory allowSync="0" art="/:/resources/artist-fanart.jpg" '
'filters="1" refreshing="0" thumb="/:/resources/artist.png" ' 'filters="1" refreshing="0" thumb="/:/resources/artist.png" '
'key="2" type="artist" title="' + escaped_section_name + '" ' f'key="2" type="artist" title="{escaped_section_name}" '
'composite="/library/sections/2/composite/1416929243" ' 'composite="/library/sections/2/composite/1416929243" '
'agent="com.plexapp.agents.lastfm" scanner="Plex Music Scanner" ' 'agent="com.plexapp.agents.lastfm" scanner="Plex Music Scanner" '
'language="en" uuid="90897c95-b3bd-4778-a9c8-1f43cb78f047" ' 'language="en" uuid="90897c95-b3bd-4778-a9c8-1f43cb78f047" '

View file

@ -43,7 +43,7 @@ list_field_extension = mediafile.ListMediaField(
class ExtendedFieldTestMixin(BeetsTestCase): class ExtendedFieldTestMixin(BeetsTestCase):
def _mediafile_fixture(self, name, extension="mp3"): def _mediafile_fixture(self, name, extension="mp3"):
name = bytestring_path(name + "." + extension) name = bytestring_path(f"{name}.{extension}")
src = os.path.join(_common.RSRC, name) src = os.path.join(_common.RSRC, name)
target = os.path.join(self.temp_dir, name) target = os.path.join(self.temp_dir, name)
shutil.copy(syspath(src), syspath(target)) shutil.copy(syspath(src), syspath(target))

View file

@ -227,11 +227,10 @@ class SmartPlaylistTest(BeetsTestCase):
content = m3u_filepath.read_bytes() content = m3u_filepath.read_bytes()
rmtree(syspath(dir)) rmtree(syspath(dir))
assert ( assert content == (
content b"#EXTM3U\n"
== b"#EXTM3U\n" b"#EXTINF:300,fake artist - fake title\n"
+ b"#EXTINF:300,fake artist - fake title\n" b"http://beets:8337/files/tagada.mp3\n"
+ b"http://beets:8337/files/tagada.mp3\n"
) )
def test_playlist_update_output_extm3u_fields(self): def test_playlist_update_output_extm3u_fields(self):
@ -278,11 +277,10 @@ class SmartPlaylistTest(BeetsTestCase):
content = m3u_filepath.read_bytes() content = m3u_filepath.read_bytes()
rmtree(syspath(dir)) rmtree(syspath(dir))
assert ( assert content == (
content b"#EXTM3U\n"
== b"#EXTM3U\n" b'#EXTINF:300 id="456" genre="Fake%20Genre",Fake Artist - fake Title\n'
+ b'#EXTINF:300 id="456" genre="Fake%20Genre",Fake Artist - fake Title\n' b"/tagada.mp3\n"
+ b"/tagada.mp3\n"
) )
def test_playlist_update_uri_format(self): def test_playlist_update_uri_format(self):

View file

@ -132,7 +132,7 @@ class SpotifyPluginTest(PluginTestCase):
responses.add( responses.add(
responses.GET, responses.GET,
spotify.SpotifyPlugin.track_url + "6NPVjNh8Jhru9xOmyQigds", f"{spotify.SpotifyPlugin.track_url}6NPVjNh8Jhru9xOmyQigds",
body=response_body, body=response_body,
status=200, status=200,
content_type="application/json", content_type="application/json",
@ -145,7 +145,7 @@ class SpotifyPluginTest(PluginTestCase):
responses.add( responses.add(
responses.GET, responses.GET,
spotify.SpotifyPlugin.album_url + "5l3zEmMrOhOzG8d8s83GOL", f"{spotify.SpotifyPlugin.album_url}5l3zEmMrOhOzG8d8s83GOL",
body=response_body, body=response_body,
status=200, status=200,
content_type="application/json", content_type="application/json",

View file

@ -55,8 +55,10 @@ class SubstitutePluginTest(PluginTestCase):
[ [
("King Creosote & Jon Hopkins", "King Creosote"), ("King Creosote & Jon Hopkins", "King Creosote"),
( (
"Michael Hurley, The Holy Modal Rounders, Jeffrey Frederick & " (
+ "The Clamtones", "Michael Hurley, The Holy Modal Rounders, Jeffrey"
" Frederick & The Clamtones"
),
"Michael Hurley", "Michael Hurley",
), ),
("James Yorkston and the Athletes", "James Yorkston"), ("James Yorkston and the Athletes", "James Yorkston"),

View file

@ -142,7 +142,7 @@ class WebPluginTest(ItemInDBTestCase):
def test_get_single_item_by_path(self): def test_get_single_item_by_path(self):
data_path = os.path.join(_common.RSRC, b"full.mp3") data_path = os.path.join(_common.RSRC, b"full.mp3")
self.lib.add(Item.from_path(data_path)) self.lib.add(Item.from_path(data_path))
response = self.client.get("/item/path/" + data_path.decode("utf-8")) response = self.client.get(f"/item/path/{data_path.decode('utf-8')}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -152,12 +152,11 @@ class WebPluginTest(ItemInDBTestCase):
data_path = os.path.join(_common.RSRC, b"full.mp3") data_path = os.path.join(_common.RSRC, b"full.mp3")
# data_path points to a valid file, but we have not added the file # data_path points to a valid file, but we have not added the file
# to the library. # to the library.
response = self.client.get("/item/path/" + data_path.decode("utf-8")) response = self.client.get(f"/item/path/{data_path.decode('utf-8')}")
assert response.status_code == 404 assert response.status_code == 404
def test_get_item_empty_query(self): def test_get_item_empty_query(self):
"""testing item query: <empty>"""
response = self.client.get("/item/query/") response = self.client.get("/item/query/")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
@ -165,7 +164,6 @@ class WebPluginTest(ItemInDBTestCase):
assert len(res_json["items"]) == 3 assert len(res_json["items"]) == 3
def test_get_simple_item_query(self): def test_get_simple_item_query(self):
"""testing item query: another"""
response = self.client.get("/item/query/another") response = self.client.get("/item/query/another")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
@ -174,8 +172,7 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "another title" assert res_json["results"][0]["title"] == "another title"
def test_query_item_string(self): def test_query_item_string(self):
"""testing item query: testattr:ABC""" response = self.client.get("/item/query/testattr%3aABC") # testattr:ABC
response = self.client.get("/item/query/testattr%3aABC")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -183,8 +180,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "and a third" assert res_json["results"][0]["title"] == "and a third"
def test_query_item_regex(self): def test_query_item_regex(self):
"""testing item query: testattr::[A-C]+""" response = self.client.get(
response = self.client.get("/item/query/testattr%3a%3a[A-C]%2b") "/item/query/testattr%3a%3a[A-C]%2b"
) # testattr::[A-C]+
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -192,8 +190,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "and a third" assert res_json["results"][0]["title"] == "and a third"
def test_query_item_regex_backslash(self): def test_query_item_regex_backslash(self):
# """ testing item query: testattr::\w+ """ response = self.client.get(
response = self.client.get("/item/query/testattr%3a%3a%5cw%2b") "/item/query/testattr%3a%3a%5cw%2b"
) # testattr::\w+
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -201,7 +200,6 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "and a third" assert res_json["results"][0]["title"] == "and a third"
def test_query_item_path(self): def test_query_item_path(self):
# """ testing item query: path:\somewhere\a """
"""Note: path queries are special: the query item must match the path """Note: path queries are special: the query item must match the path
from the root all the way to a directory, so this matches 1 item""" from the root all the way to a directory, so this matches 1 item"""
""" Note: filesystem separators in the query must be '\' """ """ Note: filesystem separators in the query must be '\' """
@ -267,8 +265,9 @@ class WebPluginTest(ItemInDBTestCase):
assert response_track_titles == {"title", "and a third"} assert response_track_titles == {"title", "and a third"}
def test_query_album_string(self): def test_query_album_string(self):
"""testing query: albumtest:xy""" response = self.client.get(
response = self.client.get("/album/query/albumtest%3axy") "/album/query/albumtest%3axy"
) # albumtest:xy
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -276,8 +275,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["album"] == "album" assert res_json["results"][0]["album"] == "album"
def test_query_album_artpath_regex(self): def test_query_album_artpath_regex(self):
"""testing query: artpath::art_""" response = self.client.get(
response = self.client.get("/album/query/artpath%3a%3aart_") "/album/query/artpath%3a%3aart_"
) # artpath::art_
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -285,8 +285,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["album"] == "other album" assert res_json["results"][0]["album"] == "other album"
def test_query_album_regex_backslash(self): def test_query_album_regex_backslash(self):
# """ testing query: albumtest::\w+ """ response = self.client.get(
response = self.client.get("/album/query/albumtest%3a%3a%5cw%2b") "/album/query/albumtest%3a%3a%5cw%2b"
) # albumtest::\w+
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -310,18 +311,18 @@ class WebPluginTest(ItemInDBTestCase):
) )
# Check we can find the temporary item we just created # Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
# Delete item by id # Delete item by id
response = self.client.delete("/item/" + str(item_id)) response = self.client.delete(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
# Check the item has gone # Check the item has gone
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
assert response.status_code == 404 assert response.status_code == 404
# Note: if this fails, the item may still be around # Note: if this fails, the item may still be around
# and may cause other tests to fail # and may cause other tests to fail
@ -336,18 +337,18 @@ class WebPluginTest(ItemInDBTestCase):
item_id = self.lib.add(Item.from_path(ipath)) item_id = self.lib.add(Item.from_path(ipath))
# Check we can find the temporary item we just created # Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
# Delete item by id, without deleting file # Delete item by id, without deleting file
response = self.client.delete("/item/" + str(item_id)) response = self.client.delete(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
# Check the item has gone # Check the item has gone
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
assert response.status_code == 404 assert response.status_code == 404
# Check the file has not gone # Check the file has not gone
@ -364,18 +365,18 @@ class WebPluginTest(ItemInDBTestCase):
item_id = self.lib.add(Item.from_path(ipath)) item_id = self.lib.add(Item.from_path(ipath))
# Check we can find the temporary item we just created # Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
# Delete item by id, with file # Delete item by id, with file
response = self.client.delete("/item/" + str(item_id) + "?delete") response = self.client.delete(f"/item/{item_id}?delete")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
# Check the item has gone # Check the item has gone
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
assert response.status_code == 404 assert response.status_code == 404
# Check the file has gone # Check the file has gone
@ -427,17 +428,17 @@ class WebPluginTest(ItemInDBTestCase):
) )
# Check we can find the temporary item we just created # Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
# Try to delete item by id # Try to delete item by id
response = self.client.delete("/item/" + str(item_id)) response = self.client.delete(f"/item/{item_id}")
assert response.status_code == 405 assert response.status_code == 405
# Check the item has not gone # Check the item has not gone
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
@ -481,18 +482,18 @@ class WebPluginTest(ItemInDBTestCase):
) )
# Check we can find the temporary album we just created # Check we can find the temporary album we just created
response = self.client.get("/album/" + str(album_id)) response = self.client.get(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == album_id assert res_json["id"] == album_id
# Delete album by id # Delete album by id
response = self.client.delete("/album/" + str(album_id)) response = self.client.delete(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
# Check the album has gone # Check the album has gone
response = self.client.get("/album/" + str(album_id)) response = self.client.get(f"/album/{album_id}")
assert response.status_code == 404 assert response.status_code == 404
# Note: if this fails, the album may still be around # Note: if this fails, the album may still be around
# and may cause other tests to fail # and may cause other tests to fail
@ -543,17 +544,17 @@ class WebPluginTest(ItemInDBTestCase):
) )
# Check we can find the temporary album we just created # Check we can find the temporary album we just created
response = self.client.get("/album/" + str(album_id)) response = self.client.get(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == album_id assert res_json["id"] == album_id
# Try to delete album by id # Try to delete album by id
response = self.client.delete("/album/" + str(album_id)) response = self.client.delete(f"/album/{album_id}")
assert response.status_code == 405 assert response.status_code == 405
# Check the item has not gone # Check the item has not gone
response = self.client.get("/album/" + str(album_id)) response = self.client.get(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == album_id assert res_json["id"] == album_id
@ -603,7 +604,7 @@ class WebPluginTest(ItemInDBTestCase):
) )
# Check we can find the temporary item we just created # Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
@ -613,7 +614,7 @@ class WebPluginTest(ItemInDBTestCase):
# Patch item by id # Patch item by id
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}]}) # patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}]})
response = self.client.patch( response = self.client.patch(
"/item/" + str(item_id), json={"test_patch_f2": "New"} f"/item/{item_id}", json={"test_patch_f2": "New"}
) )
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
@ -622,7 +623,7 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["test_patch_f2"] == "New" assert res_json["test_patch_f2"] == "New"
# Check the update has really worked # Check the update has really worked
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
@ -647,7 +648,7 @@ class WebPluginTest(ItemInDBTestCase):
) )
# Check we can find the temporary item we just created # Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id)) response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200 assert response.status_code == 200
assert res_json["id"] == item_id assert res_json["id"] == item_id
@ -657,7 +658,7 @@ class WebPluginTest(ItemInDBTestCase):
# Patch item by id # Patch item by id
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}) # patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"})
response = self.client.patch( response = self.client.patch(
"/item/" + str(item_id), json={"test_patch_f2": "New"} f"/item/{item_id}", json={"test_patch_f2": "New"}
) )
assert response.status_code == 405 assert response.status_code == 405
@ -670,6 +671,6 @@ class WebPluginTest(ItemInDBTestCase):
assert os.path.exists(ipath) assert os.path.exists(ipath)
item_id = self.lib.add(Item.from_path(ipath)) item_id = self.lib.add(Item.from_path(ipath))
response = self.client.get("/item/" + str(item_id) + "/file") response = self.client.get(f"/item/{item_id}/file")
assert response.status_code == 200 assert response.status_code == 200

View file

@ -150,9 +150,5 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase):
metadata = {"a": "A", "b": "B"} metadata = {"a": "A", "b": "B"}
im = DummyIMBackend() im = DummyIMBackend()
im.write_metadata("foo", metadata) im.write_metadata("foo", metadata)
try: command = [*im.convert_cmd, *"foo -set a A -set b B foo".split()]
command = im.convert_cmd + "foo -set a A -set b B foo".split() mock_util.command_output.assert_called_once_with(command)
mock_util.command_output.assert_called_once_with(command)
except AssertionError:
command = im.convert_cmd + "foo -set b B -set a A foo".split()
mock_util.command_output.assert_called_once_with(command)

View file

@ -186,37 +186,37 @@ class DateQueryTestRelativeMore(ItemInDBTestCase):
def test_relative(self): def test_relative(self):
for timespan in ["d", "w", "m", "y"]: for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "-4" + timespan + "..+4" + timespan) query = DateQuery("added", f"-4{timespan}..+4{timespan}")
matched = self.lib.items(query) matched = self.lib.items(query)
assert len(matched) == 1 assert len(matched) == 1
def test_relative_fail(self): def test_relative_fail(self):
for timespan in ["d", "w", "m", "y"]: for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "-2" + timespan + "..-1" + timespan) query = DateQuery("added", f"-2{timespan}..-1{timespan}")
matched = self.lib.items(query) matched = self.lib.items(query)
assert len(matched) == 0 assert len(matched) == 0
def test_start_relative(self): def test_start_relative(self):
for timespan in ["d", "w", "m", "y"]: for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "-4" + timespan + "..") query = DateQuery("added", f"-4{timespan}..")
matched = self.lib.items(query) matched = self.lib.items(query)
assert len(matched) == 1 assert len(matched) == 1
def test_start_relative_fail(self): def test_start_relative_fail(self):
for timespan in ["d", "w", "m", "y"]: for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "4" + timespan + "..") query = DateQuery("added", f"4{timespan}..")
matched = self.lib.items(query) matched = self.lib.items(query)
assert len(matched) == 0 assert len(matched) == 0
def test_end_relative(self): def test_end_relative(self):
for timespan in ["d", "w", "m", "y"]: for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "..+4" + timespan) query = DateQuery("added", f"..+4{timespan}")
matched = self.lib.items(query) matched = self.lib.items(query)
assert len(matched) == 1 assert len(matched) == 1
def test_end_relative_fail(self): def test_end_relative_fail(self):
for timespan in ["d", "w", "m", "y"]: for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "..-4" + timespan) query = DateQuery("added", f"..-4{timespan}")
matched = self.lib.items(query) matched = self.lib.items(query)
assert len(matched) == 0 assert len(matched) == 0

View file

@ -58,9 +58,9 @@ class LoggingLevelTest(AsIsImporterMixin, PluginMixin, ImportTestCase):
self.register_listener("dummy_event", self.listener) self.register_listener("dummy_event", self.listener)
def log_all(self, name): def log_all(self, name):
self._log.debug("debug " + name) self._log.debug(f"debug {name}")
self._log.info("info " + name) self._log.info(f"info {name}")
self._log.warning("warning " + name) self._log.warning(f"warning {name}")
def commands(self): def commands(self):
cmd = ui.Subcommand("dummy") cmd = ui.Subcommand("dummy")
@ -172,9 +172,9 @@ class ConcurrentEventsTest(AsIsImporterMixin, ImportTestCase):
self.t1_step = self.t2_step = 0 self.t1_step = self.t2_step = 0
def log_all(self, name): def log_all(self, name):
self._log.debug("debug " + name) self._log.debug(f"debug {name}")
self._log.info("info " + name) self._log.info(f"info {name}")
self._log.warning("warning " + name) self._log.warning(f"warning {name}")
def listener1(self): def listener1(self):
try: try:

View file

@ -1257,7 +1257,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase):
with patch("beets.ui.commands.ui.term_width", return_value=30): with patch("beets.ui.commands.ui.term_width", return_value=30):
# Test newline layout # Test newline layout
config["ui"]["import"]["layout"] = "newline" config["ui"]["import"]["layout"] = "newline"
long_name = "another artist with a" + (" very" * 10) + " long name" long_name = f"another artist with a{' very' * 10} long name"
msg = self._show_change( msg = self._show_change(
cur_artist=long_name, cur_album="another album" cur_artist=long_name, cur_album="another album"
) )
@ -1270,7 +1270,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase):
with patch("beets.ui.commands.ui.term_width", return_value=54): with patch("beets.ui.commands.ui.term_width", return_value=54):
# Test Column layout # Test Column layout
config["ui"]["import"]["layout"] = "column" config["ui"]["import"]["layout"] = "column"
long_title = "a track with a" + (" very" * 10) + " long name" long_title = f"a track with a{' very' * 10} long name"
self.items[0].title = long_title self.items[0].title = long_title
msg = self._show_change() msg = self._show_change()
assert "(#1) a track (1:00) -> (#1) the title (0:00)" in msg assert "(#1) a track (1:00) -> (#1) the title (0:00)" in msg
@ -1279,7 +1279,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase):
# Patch ui.term_width to force wrapping # Patch ui.term_width to force wrapping
with patch("beets.ui.commands.ui.term_width", return_value=30): with patch("beets.ui.commands.ui.term_width", return_value=30):
config["ui"]["import"]["layout"] = "newline" config["ui"]["import"]["layout"] = "newline"
long_title = "a track with a" + (" very" * 10) + " long name" long_title = f"a track with a{' very' * 10} long name"
self.items[0].title = long_title self.items[0].title = long_title
msg = self._show_change() msg = self._show_change()
assert "(#1) a track with" in msg assert "(#1) a track with" in msg