Replace string concatenation (' + ')

- Join hardcoded strings
- Replace concatenated variables with f-strings
This commit is contained in:
Šarūnas Nejus 2025-08-30 23:10:15 +01:00
parent a7c83d91e9
commit 1c16b2b308
No known key found for this signature in database
GPG key ID: DD28F6704DBE3435
56 changed files with 328 additions and 377 deletions

View file

@ -192,7 +192,7 @@ def extract(log, outpath, item):
if not ext:
log.warning("Unknown image type in {0}.", displayable_path(item.path))
return
outpath += bytestring_path("." + ext)
outpath += bytestring_path(f".{ext}")
log.info(
"Extracting album art from: {0} to: {1}",

View file

@ -261,7 +261,7 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
continue
for suffix in "year", "month", "day":
key = prefix + suffix
key = f"{prefix}{suffix}"
value = getattr(album_info, key) or 0
# If we don't even have a year, apply nothing.

View file

@ -588,7 +588,7 @@ class Model(ABC, Generic[D]):
for key in fields:
if key != "id" and key in self._dirty:
self._dirty.remove(key)
assignments.append(key + "=?")
assignments.append(f"{key}=?")
value = self._type(key).to_sql(self[key])
subvars.append(value)

View file

@ -190,7 +190,7 @@ class MatchQuery(FieldQuery[AnySQLiteType]):
"""A query that looks for exact matches in an Model field."""
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
return self.field + " = ?", [self.pattern]
return f"{self.field} = ?", [self.pattern]
@classmethod
def value_match(cls, pattern: AnySQLiteType, value: Any) -> bool:
@ -204,7 +204,7 @@ class NoneQuery(FieldQuery[None]):
super().__init__(field, None, fast)
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
return self.field + " IS NULL", ()
return f"{self.field} IS NULL", ()
def match(self, obj: Model) -> bool:
return obj.get(self.field_name) is None
@ -246,7 +246,7 @@ class StringQuery(StringFieldQuery[str]):
.replace("%", "\\%")
.replace("_", "\\_")
)
clause = self.field + " like ? escape '\\'"
clause = f"{self.field} like ? escape '\\'"
subvals = [search]
return clause, subvals
@ -264,8 +264,8 @@ class SubstringQuery(StringFieldQuery[str]):
.replace("%", "\\%")
.replace("_", "\\_")
)
search = "%" + pattern + "%"
clause = self.field + " like ? escape '\\'"
search = f"%{pattern}%"
clause = f"{self.field} like ? escape '\\'"
subvals = [search]
return clause, subvals
@ -471,7 +471,7 @@ class NumericQuery(FieldQuery[str]):
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
if self.point is not None:
return self.field + "=?", (self.point,)
return f"{self.field}=?", (self.point,)
else:
if self.rangemin is not None and self.rangemax is not None:
return (
@ -549,9 +549,9 @@ class CollectionQuery(Query):
if not subq_clause:
# Fall back to slow query.
return None, ()
clause_parts.append("(" + subq_clause + ")")
clause_parts.append(f"({subq_clause})")
subvals += subq_subvals
clause = (" " + joiner + " ").join(clause_parts)
clause = f" {joiner} ".join(clause_parts)
return clause, subvals
def __repr__(self) -> str:
@ -690,9 +690,7 @@ class Period:
("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second
)
relative_units = {"y": 365, "m": 30, "w": 7, "d": 1}
relative_re = (
"(?P<sign>[+|-]?)(?P<quantity>[0-9]+)" + "(?P<timespan>[y|m|w|d])"
)
relative_re = "(?P<sign>[+|-]?)(?P<quantity>[0-9]+)(?P<timespan>[y|m|w|d])"
def __init__(self, date: datetime, precision: str):
"""Create a period with the given date (a `datetime` object) and

View file

@ -28,11 +28,11 @@ class ReadError(FileOperationError):
"""An error while reading a file (i.e. in `Item.read`)."""
def __str__(self):
return "error reading " + str(super())
return f"error reading {super()}"
class WriteError(FileOperationError):
"""An error while writing a file (i.e. in `Item.write`)."""
def __str__(self):
return "error writing " + str(super())
return f"error writing {super()}"

View file

@ -130,9 +130,9 @@ class PluginLogFilter(logging.Filter):
def filter(self, record):
if hasattr(record.msg, "msg") and isinstance(record.msg.msg, str):
# A _LogMessage from our hacked-up Logging replacement.
record.msg.msg = self.prefix + record.msg.msg
record.msg.msg = f"{self.prefix}{record.msg.msg}"
elif isinstance(record.msg, str):
record.msg = self.prefix + record.msg
record.msg = f"{self.prefix}{record.msg}"
return True

View file

@ -153,7 +153,7 @@ class DummyIn:
self.out = out
def add(self, s):
self.buf.append(s + "\n")
self.buf.append(f"{s}\n")
def close(self):
pass

View file

@ -278,7 +278,7 @@ class TestHelper(ConfigMixin):
values_["db"] = self.lib
item = Item(**values_)
if "path" not in values:
item["path"] = "audio." + item["format"].lower()
item["path"] = f"audio.{item['format'].lower()}"
# mtime needs to be set last since other assignments reset it.
item.mtime = 12345
return item
@ -310,7 +310,7 @@ class TestHelper(ConfigMixin):
item = self.create_item(**values)
extension = item["format"].lower()
item["path"] = os.path.join(
_common.RSRC, util.bytestring_path("min." + extension)
_common.RSRC, util.bytestring_path(f"min.{extension}")
)
item.add(self.lib)
item.move(operation=MoveOperation.COPY)
@ -325,7 +325,7 @@ class TestHelper(ConfigMixin):
"""Add a number of items with files to the database."""
# TODO base this on `add_item()`
items = []
path = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
path = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
for i in range(count):
item = Item.from_path(path)
item.album = f"\u00e4lbum {i}" # Check unicode paths
@ -372,7 +372,7 @@ class TestHelper(ConfigMixin):
specified extension a cover art image is added to the media
file.
"""
src = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
src = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
handle, path = mkstemp(dir=self.temp_dir)
path = bytestring_path(path)
os.close(handle)
@ -570,7 +570,7 @@ class ImportHelper(TestHelper):
medium = MediaFile(track_path)
medium.update(
{
"album": "Tag Album" + (f" {album_id}" if album_id else ""),
"album": f"Tag Album{f' {album_id}' if album_id else ''}",
"albumartist": None,
"mb_albumid": None,
"comp": None,
@ -839,15 +839,13 @@ class AutotagStub:
)
def _make_album_match(self, artist, album, tracks, distance=0, missing=0):
if distance:
id = " " + "M" * distance
else:
id = ""
id = f" {'M' * distance}" if distance else ""
if artist is None:
artist = "Various Artists"
else:
artist = artist.replace("Tag", "Applied") + id
album = album.replace("Tag", "Applied") + id
artist = f"{artist.replace('Tag', 'Applied')}{id}"
album = f"{album.replace('Tag', 'Applied')}{id}"
track_infos = []
for i in range(tracks - missing):
@ -858,8 +856,8 @@ class AutotagStub:
album=album,
tracks=track_infos,
va=False,
album_id="albumid" + id,
artist_id="artistid" + id,
album_id=f"albumid{id}",
artist_id=f"artistid{id}",
albumtype="soundtrack",
data_source="match_source",
bandcamp_album_id="bc_url",
@ -885,7 +883,7 @@ class FetchImageHelper:
super().run(*args, **kwargs)
IMAGEHEADER: dict[str, bytes] = {
"image/jpeg": b"\xff\xd8\xff" + b"\x00" * 3 + b"JFIF",
"image/jpeg": b"\xff\xd8\xff\x00\x00\x00JFIF",
"image/png": b"\211PNG\r\n\032\n",
"image/gif": b"GIF89a",
# dummy type that is definitely not a valid image content type

View file

@ -125,7 +125,7 @@ def print_(*strings: str, end: str = "\n") -> None:
The `end` keyword argument behaves similarly to the built-in `print`
(it defaults to a newline).
"""
txt = " ".join(strings or ("",)) + end
txt = f"{' '.join(strings or ('',))}{end}"
# Encode the string and write it to stdout.
# On Python 3, sys.stdout expects text strings and uses the
@ -338,7 +338,7 @@ def input_options(
if line_length != 0:
# Not the beginning of the line; need a space.
part = " " + part
part = f" {part}"
length += 1
prompt += part
@ -350,7 +350,7 @@ def input_options(
fallback_prompt = "Enter one of "
if numrange:
fallback_prompt += "{}-{}, ".format(*numrange)
fallback_prompt += ", ".join(display_letters) + ":"
fallback_prompt += f"{', '.join(display_letters)}:"
resp = input_(prompt)
while True:
@ -494,7 +494,7 @@ ANSI_CODES = {
"bg_cyan": 46,
"bg_white": 47,
}
RESET_COLOR = COLOR_ESCAPE + "39;49;00m"
RESET_COLOR = f"{COLOR_ESCAPE}39;49;00m"
# These abstract COLOR_NAMES are lazily mapped on to the actual color in COLORS
# as they are defined in the configuration files, see function: colorize
@ -534,8 +534,8 @@ def _colorize(color, text):
# over all "ANSI codes" in `color`.
escape = ""
for code in color:
escape = escape + COLOR_ESCAPE + f"{ANSI_CODES[code]}m"
return escape + text + RESET_COLOR
escape = f"{escape}{COLOR_ESCAPE}{ANSI_CODES[code]}m"
return f"{escape}{text}{RESET_COLOR}"
def colorize(color_name, text):
@ -621,8 +621,8 @@ def color_split(colored_text, index):
split_index = index - (length - color_len(part))
found_split = True
if found_color_code:
pre_split += part[:split_index] + RESET_COLOR
post_split += found_color_code + part[split_index:]
pre_split += f"{part[:split_index]}{RESET_COLOR}"
post_split += f"{found_color_code}{part[split_index:]}"
else:
pre_split += part[:split_index]
post_split += part[split_index:]
@ -806,17 +806,17 @@ def split_into_lines(string, width_tuple):
# Colorize each word with pre/post escapes
# Reconstruct colored words
words += [
m.group("esc") + raw_word + RESET_COLOR
f"{m['esc']}{raw_word}{RESET_COLOR}"
for raw_word in raw_words
]
elif raw_words:
# Pretext stops mid-word
if m.group("esc") != RESET_COLOR:
# Add the rest of the current word, with a reset after it
words[-1] += m.group("esc") + raw_words[0] + RESET_COLOR
words[-1] += f"{m['esc']}{raw_words[0]}{RESET_COLOR}"
# Add the subsequent colored words:
words += [
m.group("esc") + raw_word + RESET_COLOR
f"{m['esc']}{raw_word}{RESET_COLOR}"
for raw_word in raw_words[1:]
]
else:
@ -907,18 +907,12 @@ def print_column_layout(
With subsequent lines (i.e. {lhs1}, {rhs1} onwards) being the
rest of contents, wrapped if the width would be otherwise exceeded.
"""
if right["prefix"] + right["contents"] + right["suffix"] == "":
if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
# No right hand information, so we don't need a separator.
separator = ""
first_line_no_wrap = (
indent_str
+ left["prefix"]
+ left["contents"]
+ left["suffix"]
+ separator
+ right["prefix"]
+ right["contents"]
+ right["suffix"]
f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
)
if color_len(first_line_no_wrap) < max_width:
# Everything fits, print out line.
@ -1044,18 +1038,12 @@ def print_newline_layout(
If {lhs0} would go over the maximum width, the subsequent lines are
indented a second time for ease of reading.
"""
if right["prefix"] + right["contents"] + right["suffix"] == "":
if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
# No right hand information, so we don't need a separator.
separator = ""
first_line_no_wrap = (
indent_str
+ left["prefix"]
+ left["contents"]
+ left["suffix"]
+ separator
+ right["prefix"]
+ right["contents"]
+ right["suffix"]
f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
)
if color_len(first_line_no_wrap) < max_width:
# Everything fits, print out line.
@ -1069,7 +1057,7 @@ def print_newline_layout(
empty_space - len(indent_str),
empty_space - len(indent_str),
)
left_str = left["prefix"] + left["contents"] + left["suffix"]
left_str = f"{left['prefix']}{left['contents']}{left['suffix']}"
left_split = split_into_lines(left_str, left_width_tuple)
# Repeat calculations for rhs, including separator on first line
right_width_tuple = (
@ -1077,19 +1065,19 @@ def print_newline_layout(
empty_space - len(indent_str),
empty_space - len(indent_str),
)
right_str = right["prefix"] + right["contents"] + right["suffix"]
right_str = f"{right['prefix']}{right['contents']}{right['suffix']}"
right_split = split_into_lines(right_str, right_width_tuple)
for i, line in enumerate(left_split):
if i == 0:
print_(indent_str + line)
print_(f"{indent_str}{line}")
elif line != "":
# Ignore empty lines
print_(indent_str * 2 + line)
print_(f"{indent_str * 2}{line}")
for i, line in enumerate(right_split):
if i == 0:
print_(indent_str + separator + line)
print_(f"{indent_str}{separator}{line}")
elif line != "":
print_(indent_str * 2 + line)
print_(f"{indent_str * 2}{line}")
FLOAT_EPSILON = 0.01
@ -1505,7 +1493,7 @@ class SubcommandsOptionParser(CommonOptionsParser):
# Concatenate the original help message with the subcommand
# list.
return out + "".join(result)
return f"{out}{''.join(result)}"
def _subcommand_for_name(self, name):
"""Return the subcommand in self.subcommands matching the

View file

@ -18,6 +18,7 @@ interface.
import os
import re
import textwrap
from collections import Counter
from collections.abc import Sequence
from itertools import chain
@ -128,13 +129,13 @@ def _print_keys(query):
returned row, with indentation of 2 spaces.
"""
for row in query:
print_(" " * 2 + row["key"])
print_(f" {row['key']}")
def fields_func(lib, opts, args):
def _print_rows(names):
names.sort()
print_(" " + "\n ".join(names))
print_(textwrap.indent("\n".join(names), " "))
print_("Item fields:")
_print_rows(library.Item.all_keys())
@ -356,18 +357,18 @@ class ChangeRepresentation:
# 'Match' line and similarity.
print_(
self.indent_header + f"Match ({dist_string(self.match.distance)}):"
f"{self.indent_header}Match ({dist_string(self.match.distance)}):"
)
if isinstance(self.match.info, autotag.hooks.AlbumInfo):
# Matching an album - print that
artist_album_str = (
f"{self.match.info.artist}" + f" - {self.match.info.album}"
f"{self.match.info.artist} - {self.match.info.album}"
)
else:
# Matching a single track
artist_album_str = (
f"{self.match.info.artist}" + f" - {self.match.info.title}"
f"{self.match.info.artist} - {self.match.info.title}"
)
print_(
self.indent_header
@ -377,22 +378,23 @@ class ChangeRepresentation:
# Penalties.
penalties = penalty_string(self.match.distance)
if penalties:
print_(self.indent_header + penalties)
print_(f"{self.indent_header}{penalties}")
# Disambiguation.
disambig = disambig_string(self.match.info)
if disambig:
print_(self.indent_header + disambig)
print_(f"{self.indent_header}{disambig}")
# Data URL.
if self.match.info.data_url:
url = ui.colorize("text_faint", f"{self.match.info.data_url}")
print_(self.indent_header + url)
print_(f"{self.indent_header}{url}")
def show_match_details(self):
"""Print out the details of the match, including changes in album name
and artist name.
"""
changed_prefix = ui.colorize("changed", "\u2260")
# Artist.
artist_l, artist_r = self.cur_artist or "", self.match.info.artist
if artist_r == VARIOUS_ARTISTS:
@ -402,7 +404,7 @@ class ChangeRepresentation:
artist_l, artist_r = ui.colordiff(artist_l, artist_r)
# Prefix with U+2260: Not Equal To
left = {
"prefix": ui.colorize("changed", "\u2260") + " Artist: ",
"prefix": f"{changed_prefix} Artist: ",
"contents": artist_l,
"suffix": "",
}
@ -410,7 +412,7 @@ class ChangeRepresentation:
self.print_layout(self.indent_detail, left, right)
else:
print_(self.indent_detail + "*", "Artist:", artist_r)
print_(f"{self.indent_detail}*", "Artist:", artist_r)
if self.cur_album:
# Album
@ -422,14 +424,14 @@ class ChangeRepresentation:
album_l, album_r = ui.colordiff(album_l, album_r)
# Prefix with U+2260: Not Equal To
left = {
"prefix": ui.colorize("changed", "\u2260") + " Album: ",
"prefix": f"{changed_prefix} Album: ",
"contents": album_l,
"suffix": "",
}
right = {"prefix": "", "contents": album_r, "suffix": ""}
self.print_layout(self.indent_detail, left, right)
else:
print_(self.indent_detail + "*", "Album:", album_r)
print_(f"{self.indent_detail}*", "Album:", album_r)
elif self.cur_title:
# Title - for singletons
title_l, title_r = self.cur_title or "", self.match.info.title
@ -437,14 +439,14 @@ class ChangeRepresentation:
title_l, title_r = ui.colordiff(title_l, title_r)
# Prefix with U+2260: Not Equal To
left = {
"prefix": ui.colorize("changed", "\u2260") + " Title: ",
"prefix": f"{changed_prefix} Title: ",
"contents": title_l,
"suffix": "",
}
right = {"prefix": "", "contents": title_r, "suffix": ""}
self.print_layout(self.indent_detail, left, right)
else:
print_(self.indent_detail + "*", "Title:", title_r)
print_(f"{self.indent_detail}*", "Title:", title_r)
def make_medium_info_line(self, track_info):
"""Construct a line with the current medium's info."""
@ -568,9 +570,9 @@ class ChangeRepresentation:
prefix = ui.colorize("changed", "\u2260 ") if changed else "* "
lhs = {
"prefix": prefix + lhs_track + " ",
"prefix": f"{prefix}{lhs_track} ",
"contents": lhs_title,
"suffix": " " + lhs_length,
"suffix": f" {lhs_length}",
}
rhs = {"prefix": "", "contents": "", "suffix": ""}
if not changed:
@ -579,9 +581,9 @@ class ChangeRepresentation:
else:
# Construct a dictionary for the "changed to" side
rhs = {
"prefix": rhs_track + " ",
"prefix": f"{rhs_track} ",
"contents": rhs_title,
"suffix": " " + rhs_length,
"suffix": f" {rhs_length}",
}
return (lhs, rhs)
@ -674,7 +676,7 @@ class AlbumChange(ChangeRepresentation):
# Print tracks from previous medium
self.print_tracklist(lines)
lines = []
print_(self.indent_detail + header)
print_(f"{self.indent_detail}{header}")
# Save new medium details for future comparison.
medium, disctitle = track_info.medium, track_info.disctitle
@ -907,7 +909,7 @@ def choose_candidate(
f' {item.title if singleton else cur_album}".'
)
print_(ui.indent(2) + "Candidates:")
print_(" Candidates:")
for i, match in enumerate(candidates):
# Index, metadata, and distance.
index0 = f"{i + 1}."
@ -923,17 +925,17 @@ def choose_candidate(
else:
metadata = ui.colorize("text_highlight_minor", metadata)
line1 = [index, distance, metadata]
print_(ui.indent(2) + " ".join(line1))
print_(f" {' '.join(line1)}")
# Penalties.
penalties = penalty_string(match.distance, 3)
if penalties:
print_(ui.indent(13) + penalties)
print_(f"{' ' * 13}{penalties}")
# Disambiguation
disambig = disambig_string(match.info)
if disambig:
print_(ui.indent(13) + disambig)
print_(f"{' ' * 13}{disambig}")
# Ask the user for a choice.
sel = ui.input_options(choice_opts, numrange=(1, len(candidates)))
@ -1892,7 +1894,7 @@ def show_stats(lib, query, exact):
if item.album_id:
albums.add(item.album_id)
size_str = "" + human_bytes(total_size)
size_str = human_bytes(total_size)
if exact:
size_str += f" ({total_size} bytes)"

View file

@ -433,8 +433,8 @@ def syspath(path: PathLike, prefix: bool = True) -> str:
if prefix and not str_path.startswith(WINDOWS_MAGIC_PREFIX):
if str_path.startswith("\\\\"):
# UNC path. Final path should look like \\?\UNC\...
str_path = "UNC" + str_path[1:]
str_path = WINDOWS_MAGIC_PREFIX + str_path
str_path = f"UNC{str_path[1:]}"
str_path = f"{WINDOWS_MAGIC_PREFIX}{str_path}"
return str_path
@ -506,8 +506,8 @@ def move(path: bytes, dest: bytes, replace: bool = False):
basename = os.path.basename(bytestring_path(dest))
dirname = os.path.dirname(bytestring_path(dest))
tmp = tempfile.NamedTemporaryFile(
suffix=syspath(b".beets", prefix=False),
prefix=syspath(b"." + basename + b".", prefix=False),
suffix=".beets",
prefix=f".{os.fsdecode(basename)}.",
dir=syspath(dirname),
delete=False,
)
@ -716,7 +716,7 @@ def truncate_path(str_path: str) -> str:
path = Path(str_path)
parent_parts = [truncate_str(p, max_length) for p in path.parts[:-1]]
stem = truncate_str(path.stem, max_length - len(path.suffix))
return str(Path(*parent_parts, stem)) + path.suffix
return f"{Path(*parent_parts, stem)}{path.suffix}"
def _legalize_stage(

View file

@ -152,7 +152,7 @@ class Symbol:
def translate(self):
"""Compile the variable lookup."""
ident = self.ident
expr = ex_rvalue(VARIABLE_PREFIX + ident)
expr = ex_rvalue(f"{VARIABLE_PREFIX}{ident}")
return [expr], {ident}, set()
@ -211,7 +211,7 @@ class Call:
)
)
subexpr_call = ex_call(FUNCTION_PREFIX + self.ident, arg_exprs)
subexpr_call = ex_call(f"{FUNCTION_PREFIX}{self.ident}", arg_exprs)
return [subexpr_call], varnames, funcnames
@ -555,9 +555,9 @@ class Template:
argnames = []
for varname in varnames:
argnames.append(VARIABLE_PREFIX + varname)
argnames.append(f"{VARIABLE_PREFIX}{varname}")
for funcname in funcnames:
argnames.append(FUNCTION_PREFIX + funcname)
argnames.append(f"{FUNCTION_PREFIX}{funcname}")
func = compile_func(
argnames,
@ -567,9 +567,9 @@ class Template:
def wrapper_func(values={}, functions={}):
args = {}
for varname in varnames:
args[VARIABLE_PREFIX + varname] = values[varname]
args[f"{VARIABLE_PREFIX}{varname}"] = values[varname]
for funcname in funcnames:
args[FUNCTION_PREFIX + funcname] = functions[funcname]
args[f"{FUNCTION_PREFIX}{funcname}"] = functions[funcname]
parts = func(**args)
return "".join(parts)

View file

@ -97,8 +97,8 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
"with an HTTP scheme"
)
elif base_url[-1] != "/":
base_url = base_url + "/"
self.url = base_url + "{mbid}/low-level"
base_url = f"{base_url}/"
self.url = f"{base_url}{{mbid}}/low-level"
def commands(self):
cmd = ui.Subcommand(

View file

@ -97,7 +97,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
"with an HTTP scheme"
)
elif self.base_url[-1] != "/":
self.base_url = self.base_url + "/"
self.base_url = f"{self.base_url}/"
if self.config["auto"]:
self.register_listener("import_task_files", self.import_task_files)
@ -300,4 +300,4 @@ class AcousticPlugin(plugins.BeetsPlugin):
def _generate_urls(base_url, mbid):
"""Generates AcousticBrainz end point urls for given `mbid`."""
for level in LEVELS:
yield base_url + mbid + level
yield f"{base_url}{mbid}{level}"

View file

@ -236,10 +236,10 @@ class AURADocument:
# Not the last page so work out links.next url
if not self.args:
# No existing arguments, so current page is 0
next_url = request.url + "?page=1"
next_url = f"{request.url}?page=1"
elif not self.args.get("page", None):
# No existing page argument, so add one to the end
next_url = request.url + "&page=1"
next_url = f"{request.url}&page=1"
else:
# Increment page token by 1
next_url = request.url.replace(
@ -697,7 +697,7 @@ class ImageDocument(AURADocument):
relationships = {}
# Split id into [parent_type, parent_id, filename]
id_split = image_id.split("-")
relationships[id_split[0] + "s"] = {
relationships[f"{id_split[0]}s"] = {
"data": [{"type": id_split[0], "id": id_split[1]}]
}

View file

@ -110,7 +110,7 @@ class BeatportClient:
:returns: OAuth resource owner key and secret as unicode
"""
self.api.parse_authorization_response(
"https://beets.io/auth?" + auth_data
f"https://beets.io/auth?{auth_data}"
)
access_data = self.api.fetch_access_token(
self._make_url("/identity/1/oauth/access-token")
@ -200,8 +200,8 @@ class BeatportClient:
def _make_url(self, endpoint: str) -> str:
"""Get complete URL for a given API endpoint."""
if not endpoint.startswith("/"):
endpoint = "/" + endpoint
return self._api_base + endpoint
endpoint = f"/{endpoint}"
return f"{self._api_base}{endpoint}"
def _get(self, endpoint: str, **kwargs) -> list[JSONDict]:
"""Perform a GET request on a given API endpoint.

View file

@ -282,7 +282,7 @@ class BaseServer:
if not self.ctrl_sock:
self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port))
self.ctrl_sock.sendall((message + "\n").encode("utf-8"))
self.ctrl_sock.sendall((f"{message}\n").encode("utf-8"))
def _send_event(self, event):
"""Notify subscribed connections of an event."""
@ -376,13 +376,13 @@ class BaseServer:
if self.password and not conn.authenticated:
# Not authenticated. Show limited list of commands.
for cmd in SAFE_COMMANDS:
yield "command: " + cmd
yield f"command: {cmd}"
else:
# Authenticated. Show all commands.
for func in dir(self):
if func.startswith("cmd_"):
yield "command: " + func[4:]
yield f"command: {func[4:]}"
def cmd_notcommands(self, conn):
"""Lists all unavailable commands."""
@ -392,7 +392,7 @@ class BaseServer:
if func.startswith("cmd_"):
cmd = func[4:]
if cmd not in SAFE_COMMANDS:
yield "command: " + cmd
yield f"command: {cmd}"
else:
# Authenticated. No commands are unavailable.
@ -406,22 +406,22 @@ class BaseServer:
playlist, playlistlength, and xfade.
"""
yield (
"repeat: " + str(int(self.repeat)),
"random: " + str(int(self.random)),
"consume: " + str(int(self.consume)),
"single: " + str(int(self.single)),
"playlist: " + str(self.playlist_version),
"playlistlength: " + str(len(self.playlist)),
"mixrampdb: " + str(self.mixrampdb),
f"repeat: {int(self.repeat)}",
f"random: {int(self.random)}",
f"consume: {int(self.consume)}",
f"single: {int(self.single)}",
f"playlist: {self.playlist_version}",
f"playlistlength: {len(self.playlist)}",
f"mixrampdb: {self.mixrampdb}",
)
if self.volume > 0:
yield "volume: " + str(self.volume)
yield f"volume: {self.volume}"
if not math.isnan(self.mixrampdelay):
yield "mixrampdelay: " + str(self.mixrampdelay)
yield f"mixrampdelay: {self.mixrampdelay}"
if self.crossfade > 0:
yield "xfade: " + str(self.crossfade)
yield f"xfade: {self.crossfade}"
if self.current_index == -1:
state = "stop"
@ -429,20 +429,20 @@ class BaseServer:
state = "pause"
else:
state = "play"
yield "state: " + state
yield f"state: {state}"
if self.current_index != -1: # i.e., paused or playing
current_id = self._item_id(self.playlist[self.current_index])
yield "song: " + str(self.current_index)
yield "songid: " + str(current_id)
yield f"song: {self.current_index}"
yield f"songid: {current_id}"
if len(self.playlist) > self.current_index + 1:
# If there's a next song, report its index too.
next_id = self._item_id(self.playlist[self.current_index + 1])
yield "nextsong: " + str(self.current_index + 1)
yield "nextsongid: " + str(next_id)
yield f"nextsong: {self.current_index + 1}"
yield f"nextsongid: {next_id}"
if self.error:
yield "error: " + self.error
yield f"error: {self.error}"
def cmd_clearerror(self, conn):
"""Removes the persistent error state of the server. This
@ -522,7 +522,7 @@ class BaseServer:
def cmd_replay_gain_status(self, conn):
"""Get the replaygain mode."""
yield "replay_gain_mode: " + str(self.replay_gain_mode)
yield f"replay_gain_mode: {self.replay_gain_mode}"
def cmd_clear(self, conn):
"""Clear the playlist."""
@ -643,8 +643,8 @@ class BaseServer:
Also a dummy implementation.
"""
for idx, track in enumerate(self.playlist):
yield "cpos: " + str(idx)
yield "Id: " + str(track.id)
yield f"cpos: {idx}"
yield f"Id: {track.id}"
def cmd_currentsong(self, conn):
"""Sends information about the currently-playing song."""
@ -990,7 +990,7 @@ class Command:
of arguments.
"""
# Attempt to get correct command function.
func_name = prefix + self.name
func_name = f"{prefix}{self.name}"
if not hasattr(target, func_name):
raise AttributeError(f'unknown command "{self.name}"')
func = getattr(target, func_name)
@ -1124,15 +1124,15 @@ class Server(BaseServer):
def _item_info(self, item):
info_lines = [
"file: " + as_string(item.destination(relative_to_libdir=True)),
"Time: " + str(int(item.length)),
"duration: " + f"{item.length:.3f}",
"Id: " + str(item.id),
f"file: {as_string(item.destination(relative_to_libdir=True))}",
f"Time: {int(item.length)}",
"duration: {item.length:.3f}",
f"Id: {item.id}",
]
try:
pos = self._id_to_index(item.id)
info_lines.append("Pos: " + str(pos))
info_lines.append(f"Pos: {pos}")
except ArgumentNotFoundError:
# Don't include position if not in playlist.
pass
@ -1201,7 +1201,7 @@ class Server(BaseServer):
def _path_join(self, p1, p2):
"""Smashes together two BPD paths."""
out = p1 + "/" + p2
out = f"{p1}/{p2}"
return out.replace("//", "/").replace("//", "/")
def cmd_lsinfo(self, conn, path="/"):
@ -1231,7 +1231,7 @@ class Server(BaseServer):
item = self.lib.get_item(node)
yield self._item_info(item)
else:
yield "file: " + basepath
yield f"file: {basepath}"
else:
# List a directory. Recurse into both directories and files.
for name, itemid in sorted(node.files.items()):
@ -1240,7 +1240,7 @@ class Server(BaseServer):
yield from self._listall(newpath, itemid, info)
for name, subdir in sorted(node.dirs.items()):
newpath = self._path_join(basepath, name)
yield "directory: " + newpath
yield f"directory: {newpath}"
yield from self._listall(newpath, subdir, info)
def cmd_listall(self, conn, path="/"):
@ -1274,7 +1274,7 @@ class Server(BaseServer):
for item in self._all_items(self._resolve_path(path)):
self.playlist.append(item)
if send_id:
yield "Id: " + str(item.id)
yield f"Id: {item.id}"
self.playlist_version += 1
self._send_event("playlist")
@ -1296,7 +1296,7 @@ class Server(BaseServer):
item = self.playlist[self.current_index]
yield (
"bitrate: " + str(item.bitrate / 1000),
f"bitrate: {item.bitrate / 1000}",
f"audio: {item.samplerate}:{item.bitdepth}:{item.channels}",
)
@ -1322,13 +1322,13 @@ class Server(BaseServer):
artists, albums, songs, totaltime = tx.query(statement)[0]
yield (
"artists: " + str(artists),
"albums: " + str(albums),
"songs: " + str(songs),
"uptime: " + str(int(time.time() - self.startup_time)),
"playtime: " + "0", # Missing.
"db_playtime: " + str(int(totaltime)),
"db_update: " + str(int(self.updated_time)),
f"artists: {artists}",
f"albums: {albums}",
f"songs: {songs}",
f"uptime: {int(time.time() - self.startup_time)}",
"playtime: 0", # Missing.
f"db_playtime: {int(totaltime)}",
f"db_update: {int(self.updated_time)}",
)
def cmd_decoders(self, conn):
@ -1370,7 +1370,7 @@ class Server(BaseServer):
searching.
"""
for tag in self.tagtype_map:
yield "tagtype: " + tag
yield f"tagtype: {tag}"
def _tagtype_lookup(self, tag):
"""Uses `tagtype_map` to look up the beets column name for an
@ -1445,12 +1445,9 @@ class Server(BaseServer):
clause, subvals = query.clause()
statement = (
"SELECT DISTINCT "
+ show_key
+ " FROM items WHERE "
+ clause
+ " ORDER BY "
+ show_key
f"SELECT DISTINCT {show_key}"
f" FROM items WHERE {clause}"
f" ORDER BY {show_key}"
)
self._log.debug(statement)
with self.lib.transaction() as tx:
@ -1460,7 +1457,7 @@ class Server(BaseServer):
if not row[0]:
# Skip any empty values of the field.
continue
yield show_tag_canon + ": " + str(row[0])
yield f"{show_tag_canon}: {row[0]}"
def cmd_count(self, conn, tag, value):
"""Returns the number and total time of songs matching the
@ -1474,8 +1471,8 @@ class Server(BaseServer):
):
songs += 1
playtime += item.length
yield "songs: " + str(songs)
yield "playtime: " + str(int(playtime))
yield f"songs: {songs}"
yield f"playtime: {int(playtime)}"
# Persistent playlist manipulation. In MPD this is an optional feature so
# these dummy implementations match MPD's behaviour with the feature off.

View file

@ -129,7 +129,7 @@ class GstPlayer:
self.player.set_state(Gst.State.NULL)
if isinstance(path, str):
path = path.encode("utf-8")
uri = "file://" + urllib.parse.quote(path)
uri = f"file://{urllib.parse.quote(path)}"
self.player.set_property("uri", uri)
self.player.set_state(Gst.State.PLAYING)
self.playing = True

View file

@ -169,10 +169,8 @@ def build_alpha_spans(alpha_spans_str, alpha_regexs):
)
spans.append(
re.compile(
"^["
+ ASCII_DIGITS[begin_index : end_index + 1]
+ ASCII_DIGITS[begin_index : end_index + 1].upper()
+ "]"
rf"^[{ASCII_DIGITS[begin_index : end_index + 1]}]",
re.IGNORECASE,
)
)
return spans

View file

@ -649,7 +649,7 @@ class ConvertPlugin(BeetsPlugin):
tmpdir = self.config["tmpdir"].get()
if tmpdir:
tmpdir = os.fsdecode(util.bytestring_path(tmpdir))
fd, dest = tempfile.mkstemp(os.fsdecode(b"." + ext), dir=tmpdir)
fd, dest = tempfile.mkstemp(f".{os.fsdecode(ext)}", dir=tmpdir)
os.close(fd)
dest = util.bytestring_path(dest)
_temp_files.append(dest) # Delete the transcode later.

View file

@ -96,7 +96,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
f"Invalid `release_date` returned by {self.data_source} API: "
f"{release_date!r}"
)
tracks_obj = self.fetch_data(self.album_url + deezer_id + "/tracks")
tracks_obj = self.fetch_data(f"{self.album_url}{deezer_id}/tracks")
if tracks_obj is None:
return None
try:
@ -169,7 +169,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
# the track's disc).
if not (
album_tracks_obj := self.fetch_data(
self.album_url + str(track_data["album"]["id"]) + "/tracks"
f"{self.album_url}{track_data['album']['id']}/tracks"
)
):
return None
@ -244,7 +244,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
self._log.debug(f"Searching {self.data_source} for '{query}'")
try:
response = requests.get(
self.search_url + query_type,
f"{self.search_url}{query_type}",
params={"q": query},
timeout=10,
)

View file

@ -385,7 +385,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
track.artist_id = artist_id
# Discogs does not have track IDs. Invent our own IDs as proposed
# in #2336.
track.track_id = str(album_id) + "-" + track.track_alt
track.track_id = f"{album_id}-{track.track_alt}"
track.data_url = data_url
track.data_source = "Discogs"

View file

@ -682,7 +682,7 @@ class GoogleImages(RemoteArtSource):
"""
if not (album.albumartist and album.album):
return
search_string = (album.albumartist + "," + album.album).encode("utf-8")
search_string = f"{album.albumartist},{album.album}".encode("utf-8")
try:
response = self.request(
@ -723,7 +723,7 @@ class FanartTV(RemoteArtSource):
NAME = "fanart.tv"
ID = "fanarttv"
API_URL = "https://webservice.fanart.tv/v3/"
API_ALBUMS = API_URL + "music/albums/"
API_ALBUMS = f"{API_URL}music/albums/"
PROJECT_KEY = "61a7d0ab4e67162b7a0c7c35915cd48e"
def __init__(self, *args, **kwargs):
@ -750,7 +750,7 @@ class FanartTV(RemoteArtSource):
try:
response = self.request(
self.API_ALBUMS + album.mb_releasegroupid,
f"{self.API_ALBUMS}{album.mb_releasegroupid}",
headers={
"api-key": self.PROJECT_KEY,
"client-key": self.client_key,
@ -820,7 +820,7 @@ class ITunesStore(RemoteArtSource):
return
payload = {
"term": album.albumartist + " " + album.album,
"term": f"{album.albumartist} {album.album}",
"entity": "album",
"media": "music",
"limit": 200,
@ -947,7 +947,7 @@ class Wikipedia(RemoteArtSource):
data = dbpedia_response.json()
results = data["results"]["bindings"]
if results:
cover_filename = "File:" + results[0]["coverFilename"]["value"]
cover_filename = f"File:{results[0]['coverFilename']['value']}"
page_id = results[0]["pageId"]["value"]
else:
self._log.debug("wikipedia: album not found on dbpedia")
@ -996,7 +996,7 @@ class Wikipedia(RemoteArtSource):
results = data["query"]["pages"][page_id]["images"]
for result in results:
if re.match(
re.escape(lpart) + r".*?\." + re.escape(rpart),
rf"{re.escape(lpart)}.*?\.{re.escape(rpart)}",
result["title"],
):
cover_filename = result["title"]
@ -1227,7 +1227,7 @@ class Spotify(RemoteArtSource):
paths: None | Sequence[bytes],
) -> Iterator[Candidate]:
try:
url = self.SPOTIFY_ALBUM_URL + album.items().get().spotify_album_id
url = f"{self.SPOTIFY_ALBUM_URL}{album.items().get().spotify_album_id}"
except AttributeError:
self._log.debug("Fetchart: no Spotify album ID found")
return

View file

@ -123,19 +123,13 @@ class FishPlugin(BeetsPlugin):
for name in names:
cmd_names_help.append((name, cmd.help))
# Concatenate the string
totstring = HEAD + "\n"
totstring = f"{HEAD}\n"
totstring += get_cmds_list([name[0] for name in cmd_names_help])
totstring += "" if nobasicfields else get_standard_fields(fields)
totstring += get_extravalues(lib, extravalues) if extravalues else ""
totstring += (
"\n" + "# ====== setup basic beet completion =====" + "\n" * 2
)
totstring += "\n# ====== setup basic beet completion =====\n\n"
totstring += get_basic_beet_options()
totstring += (
"\n"
+ "# ====== setup field completion for subcommands ====="
+ "\n"
)
totstring += "\n# ====== setup field completion for subcommands =====\n"
totstring += get_subcommands(cmd_names_help, nobasicfields, extravalues)
# Set up completion for all the command options
totstring += get_all_commands(beetcmds)
@ -147,23 +141,19 @@ class FishPlugin(BeetsPlugin):
def _escape(name):
# Escape ? in fish
if name == "?":
name = "\\" + name
name = f"\\{name}"
return name
def get_cmds_list(cmds_names):
# Make a list of all Beets core & plugin commands
substr = ""
substr += "set CMDS " + " ".join(cmds_names) + ("\n" * 2)
return substr
return f"set CMDS {' '.join(cmds_names)}\n\n"
def get_standard_fields(fields):
# Make a list of album/track fields and append with ':'
fields = (field + ":" for field in fields)
substr = ""
substr += "set FIELDS " + " ".join(fields) + ("\n" * 2)
return substr
fields = (f"{field}:" for field in fields)
return f"set FIELDS {' '.join(fields)}\n\n"
def get_extravalues(lib, extravalues):
@ -172,14 +162,8 @@ def get_extravalues(lib, extravalues):
word = ""
values_set = get_set_of_values_for_field(lib, extravalues)
for fld in extravalues:
extraname = fld.upper() + "S"
word += (
"set "
+ extraname
+ " "
+ " ".join(sorted(values_set[fld]))
+ ("\n" * 2)
)
extraname = f"{fld.upper()}S"
word += f"set {extraname} {' '.join(sorted(values_set[fld]))}\n\n"
return word
@ -223,31 +207,29 @@ def get_subcommands(cmd_name_and_help, nobasicfields, extravalues):
for cmdname, cmdhelp in cmd_name_and_help:
cmdname = _escape(cmdname)
word += "\n" + f"# ------ fieldsetups for {cmdname} -------" + "\n"
word += f"\n# ------ fieldsetups for {cmdname} -------\n"
word += BL_NEED2.format(
("-a " + cmdname), ("-f " + "-d " + wrap(clean_whitespace(cmdhelp)))
f"-a {cmdname}", f"-f -d {wrap(clean_whitespace(cmdhelp))}"
)
if nobasicfields is False:
word += BL_USE3.format(
cmdname,
("-a " + wrap("$FIELDS")),
("-f " + "-d " + wrap("fieldname")),
f"-a {wrap('$FIELDS')}",
f"-f -d {wrap('fieldname')}",
)
if extravalues:
for f in extravalues:
setvar = wrap("$" + f.upper() + "S")
word += (
" ".join(
BL_EXTRA3.format(
(cmdname + " " + f + ":"),
("-f " + "-A " + "-a " + setvar),
("-d " + wrap(f)),
).split()
)
+ "\n"
setvar = wrap(f"${f.upper()}S")
word += " ".join(
BL_EXTRA3.format(
f"{cmdname} {f}:",
f"-f -A -a {setvar}",
f"-d {wrap(f)}",
).split()
)
word += "\n"
return word
@ -260,55 +242,44 @@ def get_all_commands(beetcmds):
for name in names:
name = _escape(name)
word += "\n"
word += ("\n" * 2) + f"# ====== completions for {name} =====" + "\n"
word += f"\n\n\n# ====== completions for {name} =====\n"
for option in cmd.parser._get_all_options()[1:]:
cmd_l = (
(" -l " + option._long_opts[0].replace("--", ""))
f" -l {option._long_opts[0].replace('--', '')}"
if option._long_opts
else ""
)
cmd_s = (
(" -s " + option._short_opts[0].replace("-", ""))
f" -s {option._short_opts[0].replace('-', '')}"
if option._short_opts
else ""
)
cmd_need_arg = " -r " if option.nargs in [1] else ""
cmd_helpstr = (
(" -d " + wrap(" ".join(option.help.split())))
f" -d {wrap(' '.join(option.help.split()))}"
if option.help
else ""
)
cmd_arglist = (
(" -a " + wrap(" ".join(option.choices)))
f" -a {wrap(' '.join(option.choices))}"
if option.choices
else ""
)
word += (
" ".join(
BL_USE3.format(
name,
(
cmd_need_arg
+ cmd_s
+ cmd_l
+ " -f "
+ cmd_arglist
),
cmd_helpstr,
).split()
)
+ "\n"
word += " ".join(
BL_USE3.format(
name,
f"{cmd_need_arg}{cmd_s}{cmd_l} -f {cmd_arglist}",
cmd_helpstr,
).split()
)
word += "\n"
word = word + " ".join(
BL_USE3.format(
name,
("-s " + "h " + "-l " + "help" + " -f "),
("-d " + wrap("print help") + "\n"),
).split()
word = word + BL_USE3.format(
name,
"-s h -l help -f",
f"-d {wrap('print help')}",
)
return word
@ -323,7 +294,7 @@ def wrap(word):
sptoken = '"'
if '"' in word and ("'") in word:
word.replace('"', sptoken)
return '"' + word + '"'
return f'"{word}"'
tok = '"' if "'" in word else "'"
return tok + word + tok
return f"{tok}{word}{tok}"

View file

@ -50,7 +50,7 @@ def _build_m3u_filename(basename):
path = normpath(
os.path.join(
config["importfeeds"]["dir"].as_filename(),
date + "_" + basename + ".m3u",
f"{date}_{basename}.m3u",
)
)
return path

View file

@ -361,7 +361,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
)
if resolved_genres:
suffix = "whitelist" if self.whitelist else "any"
label = stage_label + f", {suffix}"
label = f"{stage_label}, {suffix}"
if keep_genres:
label = f"keep + {label}"
return self._format_and_stringify(resolved_genres), label

View file

@ -70,7 +70,7 @@ class CustomUser(pylast.User):
tuple with the total number of pages of results. Includes an MBID, if
found.
"""
doc = self._request(self.ws_prefix + "." + method, cacheable, params)
doc = self._request(f"{self.ws_prefix}.{method}", cacheable, params)
toptracks_node = doc.getElementsByTagName("toptracks")[0]
total_pages = int(toptracks_node.getAttribute("totalPages"))

View file

@ -582,7 +582,7 @@ class Tekstowo(SearchBackend):
"""Fetch lyrics from Tekstowo.pl."""
BASE_URL = "https://www.tekstowo.pl"
SEARCH_URL = BASE_URL + "/szukaj,{}.html"
SEARCH_URL = f"{BASE_URL}/szukaj,{{}}.html"
def build_url(self, artist, title):
artistitle = f"{artist.title()} {title.title()}"

View file

@ -49,7 +49,7 @@ def load_meta_sources():
meta_sources = {}
for module_path, class_name in SOURCES.items():
module = import_module(METASYNC_MODULE + "." + module_path)
module = import_module(f"{METASYNC_MODULE}.{module_path}")
meta_sources[class_name.lower()] = getattr(module, class_name)
return meta_sources

View file

@ -81,7 +81,7 @@ class Itunes(MetaSource):
with open(library_copy, "rb") as library_copy_f:
raw_library = plistlib.load(library_copy_f)
except OSError as e:
raise ConfigValueError("invalid iTunes library: " + e.strerror)
raise ConfigValueError(f"invalid iTunes library: {e.strerror}")
except Exception:
# It's likely the user configured their '.itl' library (<> xml)
if os.path.splitext(library_path)[1].lower() != ".xml":
@ -91,7 +91,7 @@ class Itunes(MetaSource):
)
else:
hint = ""
raise ConfigValueError("invalid iTunes library" + hint)
raise ConfigValueError(f"invalid iTunes library{hint}")
# Make the iTunes library queryable using the path
self.collection = {

View file

@ -307,7 +307,7 @@ class MPDStats:
if "player" in events:
status = self.mpd.status()
handler = getattr(self, "on_" + status["state"], None)
handler = getattr(self, f"on_{status['state']}", None)
if handler:
handler(status)

View file

@ -201,7 +201,7 @@ def _multi_artist_credit(
def track_url(trackid: str) -> str:
return urljoin(BASE_URL, "recording/" + trackid)
return urljoin(BASE_URL, f"recording/{trackid}")
def _flatten_artist_credit(credit: list[JSONDict]) -> tuple[str, str, str]:
@ -246,7 +246,7 @@ def _get_related_artist_names(relations, relation_type):
def album_url(albumid: str) -> str:
return urljoin(BASE_URL, "release/" + albumid)
return urljoin(BASE_URL, f"release/{albumid}")
def _preferred_release_event(
@ -291,7 +291,7 @@ def _set_date_str(
continue
if original:
key = "original_" + key
key = f"original_{key}"
setattr(info, key, date_num)

View file

@ -154,7 +154,7 @@ class PlayPlugin(BeetsPlugin):
return f"{command_str} {args}"
else:
# Don't include the marker in the command.
return command_str.replace(" " + ARGS_MARKER, "")
return command_str.replace(f" {ARGS_MARKER}", "")
def _playlist_or_paths(self, paths):
"""Return either the raw paths of items or a playlist of the items."""

View file

@ -66,7 +66,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors):
def append_token(url, token):
"""Appends the Plex Home token to the api call if required."""
if token:
url += "?" + urlencode({"X-Plex-Token": token})
url += f"?{urlencode({'X-Plex-Token': token})}"
return url

View file

@ -292,7 +292,9 @@ class SpotifyPlugin(
if not (spotify_id := self._extract_id(album_id)):
return None
album_data = self._handle_response("get", self.album_url + spotify_id)
album_data = self._handle_response(
"get", f"{self.album_url}{spotify_id}"
)
if album_data["name"] == "":
self._log.debug("Album removed from Spotify: {}", album_id)
return None
@ -408,7 +410,7 @@ class SpotifyPlugin(
# release) and `track.medium_total` (total number of tracks on
# the track's disc).
album_data = self._handle_response(
"get", self.album_url + track_data["album"]["id"]
"get", f"{self.album_url}{track_data['album']['id']}"
)
medium_total = 0
for i, track_data in enumerate(album_data["tracks"]["items"], start=1):
@ -447,7 +449,7 @@ class SpotifyPlugin(
except APIError as e:
self._log.debug("Spotify API error: {}", e)
return ()
response_data = response.get(query_type + "s", {}).get("items", [])
response_data = response.get(f"{query_type}s", {}).get("items", [])
self._log.debug(
"Found {} result(s) from {} for '{}'",
len(response_data),
@ -648,13 +650,13 @@ class SpotifyPlugin(
self._log.info(
f"Attempting to open {self.data_source} with playlist"
)
spotify_url = "spotify:trackset:Playlist:" + ",".join(
spotify_ids
spotify_url = (
f"spotify:trackset:Playlist:{','.join(spotify_ids)}"
)
webbrowser.open(spotify_url)
else:
for spotify_id in spotify_ids:
print(self.open_track_url + spotify_id)
print(f"{self.open_track_url}{spotify_id}")
else:
self._log.warning(
f"No {self.data_source} tracks found from beets query"
@ -702,7 +704,7 @@ class SpotifyPlugin(
def track_info(self, track_id: str):
"""Fetch a track's popularity and external IDs using its Spotify ID."""
track_data = self._handle_response("get", self.track_url + track_id)
track_data = self._handle_response("get", f"{self.track_url}{track_id}")
external_ids = track_data.get("external_ids", {})
popularity = track_data.get("popularity")
self._log.debug(
@ -721,7 +723,7 @@ class SpotifyPlugin(
"""Fetch track audio features by its Spotify ID."""
try:
return self._handle_response(
"get", self.audio_features_url + track_id
"get", f"{self.audio_features_url}{track_id}"
)
except APIError as e:
self._log.debug("Spotify API error: {}", e)

View file

@ -180,5 +180,5 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
for track in tracks:
if track not in output:
output[track] = ";"
output[track] += name + ";"
output[track] += f"{name};"
return output

View file

@ -74,7 +74,7 @@ class SubsonicUpdate(BeetsPlugin):
# Pick the random sequence and salt the password
r = string.ascii_letters + string.digits
salt = "".join([random.choice(r) for _ in range(6)])
salted_password = password + salt
salted_password = f"{password}{salt}"
token = hashlib.md5(salted_password.encode("utf-8")).hexdigest()
# Put together the payload of the request to the server and the URL
@ -101,7 +101,7 @@ class SubsonicUpdate(BeetsPlugin):
context_path = ""
url = f"http://{host}:{port}{context_path}"
return url + f"/rest/{endpoint}"
return f"{url}/rest/{endpoint}"
def start_scan(self):
user = self.config["user"].as_str()

View file

@ -230,8 +230,7 @@ def copy_c_string(c_string):
# This is a pretty dumb way to get a string copy, but it seems to
# work. A more surefire way would be to allocate a ctypes buffer and copy
# the data with `memcpy` or somesuch.
s = ctypes.cast(c_string, ctypes.c_char_p).value
return b"" + s
return ctypes.cast(c_string, ctypes.c_char_p).value
class GioURI(URIGetter):

View file

@ -34,7 +34,7 @@ class Unimported(BeetsPlugin):
def commands(self):
def print_unimported(lib, opts, args):
ignore_exts = [
("." + x).encode()
f".{x}".encode()
for x in self.config["ignore_extensions"].as_str_seq()
]
ignore_dirs = [

View file

@ -276,6 +276,7 @@ select = [
"F", # pyflakes
# "B", # flake8-bugbear
"I", # isort
"ISC", # flake8-implicit-str-concat
"N", # pep8-naming
"PT", # flake8-pytest-style
# "RUF", # ruff

View file

@ -92,8 +92,8 @@ class CAAHelper:
RELEASE_URL = f"coverartarchive.org/release/{MBID_RELASE}"
GROUP_URL = f"coverartarchive.org/release-group/{MBID_GROUP}"
RELEASE_URL = "https://" + RELEASE_URL
GROUP_URL = "https://" + GROUP_URL
RELEASE_URL = f"https://{RELEASE_URL}"
GROUP_URL = f"https://{GROUP_URL}"
RESPONSE_RELEASE = """{
"images": [
@ -706,7 +706,7 @@ class FanartTVTest(UseThePlugin):
def test_fanarttv_finds_image(self):
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_MULTIPLE,
)
candidate = next(self.source.get(album, self.settings, []))
@ -715,7 +715,7 @@ class FanartTVTest(UseThePlugin):
def test_fanarttv_returns_no_result_when_error_received(self):
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_ERROR,
)
with pytest.raises(StopIteration):
@ -724,7 +724,7 @@ class FanartTVTest(UseThePlugin):
def test_fanarttv_returns_no_result_with_malformed_response(self):
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_MALFORMED,
)
with pytest.raises(StopIteration):
@ -734,7 +734,7 @@ class FanartTVTest(UseThePlugin):
# The source used to fail when there were images present, but no cover
album = _common.Bag(mb_releasegroupid="thereleasegroupid")
self.mock_response(
fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid",
self.RESPONSE_NO_ART,
)
with pytest.raises(StopIteration):

View file

@ -65,7 +65,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
if m.title.replace("Tag", "Applied") == item.title:
return m
raise AssertionError(
"No MediaFile found for Item " + displayable_path(item.path)
f"No MediaFile found for Item {displayable_path(item.path)}"
)
def test_import_album_with_added_dates(self):
@ -117,7 +117,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
for item_path, added_after in items_added_after.items():
assert items_added_before[item_path] == pytest.approx(
added_after, rel=1e-4
), "reimport modified Item.added for " + displayable_path(item_path)
), f"reimport modified Item.added for {displayable_path(item_path)}"
def test_import_singletons_with_added_dates(self):
self.config["import"]["singletons"] = True
@ -157,4 +157,4 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase):
for item_path, added_after in items_added_after.items():
assert items_added_before[item_path] == pytest.approx(
added_after, rel=1e-4
), "reimport modified Item.added for " + displayable_path(item_path)
), f"reimport modified Item.added for {displayable_path(item_path)}"

View file

@ -42,8 +42,8 @@ class LimitPluginTest(PluginTestCase):
# a subset of tests has only `num_limit` results, identified by a
# range filter on the track number
self.track_head_range = "track:.." + str(self.num_limit)
self.track_tail_range = "track:" + str(self.num_limit + 1) + ".."
self.track_head_range = f"track:..{self.num_limit}"
self.track_tail_range = f"track:{self.num_limit + 1}{'..'}"
def test_no_limit(self):
"""Returns all when there is no limit or filter."""
@ -82,13 +82,13 @@ class LimitPluginTest(PluginTestCase):
def test_prefix_when_correctly_ordered(self):
"""Returns the expected number with the query prefix and filter when
the prefix portion (correctly) appears last."""
correct_order = self.track_tail_range + " " + self.num_limit_prefix
correct_order = f"{self.track_tail_range} {self.num_limit_prefix}"
result = self.lib.items(correct_order)
assert len(result) == self.num_limit
def test_prefix_when_incorrectly_ordred(self):
"""Returns no results with the query prefix and filter when the prefix
portion (incorrectly) appears first."""
incorrect_order = self.num_limit_prefix + " " + self.track_tail_range
incorrect_order = f"{self.num_limit_prefix} {self.track_tail_range}"
result = self.lib.items(incorrect_order)
assert len(result) == 0

View file

@ -670,17 +670,17 @@ class ArtistFlatteningTest(unittest.TestCase):
def _credit_dict(self, suffix=""):
return {
"artist": {
"name": "NAME" + suffix,
"sort-name": "SORT" + suffix,
"name": f"NAME{suffix}",
"sort-name": f"SORT{suffix}",
},
"name": "CREDIT" + suffix,
"name": f"CREDIT{suffix}",
}
def _add_alias(self, credit_dict, suffix="", locale="", primary=False):
alias = {
"alias": "ALIAS" + suffix,
"alias": f"ALIAS{suffix}",
"locale": locale,
"sort-name": "ALIASSORT" + suffix,
"sort-name": f"ALIASSORT{suffix}",
}
if primary:
alias["primary"] = "primary"

View file

@ -49,7 +49,7 @@ class PlayPluginTest(CleanupModulesMixin, PluginTestCase):
open_mock.assert_called_once_with(ANY, expected_cmd)
expected_playlist = expected_playlist or self.item.path.decode("utf-8")
exp_playlist = expected_playlist + "\n"
exp_playlist = f"{expected_playlist}\n"
with open(open_mock.call_args[0][0][0], "rb") as playlist:
assert exp_playlist == playlist.read().decode("utf-8")

View file

@ -132,7 +132,7 @@ class PlaylistTestRelativeToLib(PlaylistQueryTest, PlaylistTestCase):
[
os.path.join("a", "b", "c.mp3") + "\n",
os.path.join("d", "e", "f.mp3") + "\n",
"nonexisting.mp3" + "\n",
"nonexisting.mp3\n",
]
)
@ -155,7 +155,7 @@ class PlaylistTestRelativeToDir(PlaylistQueryTest, PlaylistTestCase):
[
os.path.join("a", "b", "c.mp3") + "\n",
os.path.join("d", "e", "f.mp3") + "\n",
"nonexisting.mp3" + "\n",
"nonexisting.mp3\n",
]
)
@ -214,7 +214,7 @@ class PlaylistUpdateTest:
[
os.path.join("a", "b", "c.mp3") + "\n",
os.path.join("d", "e", "f.mp3") + "\n",
"nonexisting.mp3" + "\n",
"nonexisting.mp3\n",
]
)

View file

@ -29,7 +29,7 @@ class PlexUpdateTest(PluginTestCase):
"</Directory>"
'<Directory allowSync="0" art="/:/resources/artist-fanart.jpg" '
'filters="1" refreshing="0" thumb="/:/resources/artist.png" '
'key="2" type="artist" title="' + escaped_section_name + '" '
f'key="2" type="artist" title="{escaped_section_name}" '
'composite="/library/sections/2/composite/1416929243" '
'agent="com.plexapp.agents.lastfm" scanner="Plex Music Scanner" '
'language="en" uuid="90897c95-b3bd-4778-a9c8-1f43cb78f047" '

View file

@ -43,7 +43,7 @@ list_field_extension = mediafile.ListMediaField(
class ExtendedFieldTestMixin(BeetsTestCase):
def _mediafile_fixture(self, name, extension="mp3"):
name = bytestring_path(name + "." + extension)
name = bytestring_path(f"{name}.{extension}")
src = os.path.join(_common.RSRC, name)
target = os.path.join(self.temp_dir, name)
shutil.copy(syspath(src), syspath(target))

View file

@ -227,11 +227,10 @@ class SmartPlaylistTest(BeetsTestCase):
content = m3u_filepath.read_bytes()
rmtree(syspath(dir))
assert (
content
== b"#EXTM3U\n"
+ b"#EXTINF:300,fake artist - fake title\n"
+ b"http://beets:8337/files/tagada.mp3\n"
assert content == (
b"#EXTM3U\n"
b"#EXTINF:300,fake artist - fake title\n"
b"http://beets:8337/files/tagada.mp3\n"
)
def test_playlist_update_output_extm3u_fields(self):
@ -278,11 +277,10 @@ class SmartPlaylistTest(BeetsTestCase):
content = m3u_filepath.read_bytes()
rmtree(syspath(dir))
assert (
content
== b"#EXTM3U\n"
+ b'#EXTINF:300 id="456" genre="Fake%20Genre",Fake Artist - fake Title\n'
+ b"/tagada.mp3\n"
assert content == (
b"#EXTM3U\n"
b'#EXTINF:300 id="456" genre="Fake%20Genre",Fake Artist - fake Title\n'
b"/tagada.mp3\n"
)
def test_playlist_update_uri_format(self):

View file

@ -132,7 +132,7 @@ class SpotifyPluginTest(PluginTestCase):
responses.add(
responses.GET,
spotify.SpotifyPlugin.track_url + "6NPVjNh8Jhru9xOmyQigds",
f"{spotify.SpotifyPlugin.track_url}6NPVjNh8Jhru9xOmyQigds",
body=response_body,
status=200,
content_type="application/json",
@ -145,7 +145,7 @@ class SpotifyPluginTest(PluginTestCase):
responses.add(
responses.GET,
spotify.SpotifyPlugin.album_url + "5l3zEmMrOhOzG8d8s83GOL",
f"{spotify.SpotifyPlugin.album_url}5l3zEmMrOhOzG8d8s83GOL",
body=response_body,
status=200,
content_type="application/json",

View file

@ -55,8 +55,10 @@ class SubstitutePluginTest(PluginTestCase):
[
("King Creosote & Jon Hopkins", "King Creosote"),
(
"Michael Hurley, The Holy Modal Rounders, Jeffrey Frederick & "
+ "The Clamtones",
(
"Michael Hurley, The Holy Modal Rounders, Jeffrey"
" Frederick & The Clamtones"
),
"Michael Hurley",
),
("James Yorkston and the Athletes", "James Yorkston"),

View file

@ -142,7 +142,7 @@ class WebPluginTest(ItemInDBTestCase):
def test_get_single_item_by_path(self):
data_path = os.path.join(_common.RSRC, b"full.mp3")
self.lib.add(Item.from_path(data_path))
response = self.client.get("/item/path/" + data_path.decode("utf-8"))
response = self.client.get(f"/item/path/{data_path.decode('utf-8')}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -152,12 +152,11 @@ class WebPluginTest(ItemInDBTestCase):
data_path = os.path.join(_common.RSRC, b"full.mp3")
# data_path points to a valid file, but we have not added the file
# to the library.
response = self.client.get("/item/path/" + data_path.decode("utf-8"))
response = self.client.get(f"/item/path/{data_path.decode('utf-8')}")
assert response.status_code == 404
def test_get_item_empty_query(self):
"""testing item query: <empty>"""
response = self.client.get("/item/query/")
res_json = json.loads(response.data.decode("utf-8"))
@ -165,7 +164,6 @@ class WebPluginTest(ItemInDBTestCase):
assert len(res_json["items"]) == 3
def test_get_simple_item_query(self):
"""testing item query: another"""
response = self.client.get("/item/query/another")
res_json = json.loads(response.data.decode("utf-8"))
@ -174,8 +172,7 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "another title"
def test_query_item_string(self):
"""testing item query: testattr:ABC"""
response = self.client.get("/item/query/testattr%3aABC")
response = self.client.get("/item/query/testattr%3aABC") # testattr:ABC
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -183,8 +180,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "and a third"
def test_query_item_regex(self):
"""testing item query: testattr::[A-C]+"""
response = self.client.get("/item/query/testattr%3a%3a[A-C]%2b")
response = self.client.get(
"/item/query/testattr%3a%3a[A-C]%2b"
) # testattr::[A-C]+
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -192,8 +190,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "and a third"
def test_query_item_regex_backslash(self):
# """ testing item query: testattr::\w+ """
response = self.client.get("/item/query/testattr%3a%3a%5cw%2b")
response = self.client.get(
"/item/query/testattr%3a%3a%5cw%2b"
) # testattr::\w+
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -201,7 +200,6 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["title"] == "and a third"
def test_query_item_path(self):
# """ testing item query: path:\somewhere\a """
"""Note: path queries are special: the query item must match the path
from the root all the way to a directory, so this matches 1 item"""
""" Note: filesystem separators in the query must be '\' """
@ -267,8 +265,9 @@ class WebPluginTest(ItemInDBTestCase):
assert response_track_titles == {"title", "and a third"}
def test_query_album_string(self):
"""testing query: albumtest:xy"""
response = self.client.get("/album/query/albumtest%3axy")
response = self.client.get(
"/album/query/albumtest%3axy"
) # albumtest:xy
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -276,8 +275,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["album"] == "album"
def test_query_album_artpath_regex(self):
"""testing query: artpath::art_"""
response = self.client.get("/album/query/artpath%3a%3aart_")
response = self.client.get(
"/album/query/artpath%3a%3aart_"
) # artpath::art_
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -285,8 +285,9 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["results"][0]["album"] == "other album"
def test_query_album_regex_backslash(self):
# """ testing query: albumtest::\w+ """
response = self.client.get("/album/query/albumtest%3a%3a%5cw%2b")
response = self.client.get(
"/album/query/albumtest%3a%3a%5cw%2b"
) # albumtest::\w+
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -310,18 +311,18 @@ class WebPluginTest(ItemInDBTestCase):
)
# Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
# Delete item by id
response = self.client.delete("/item/" + str(item_id))
response = self.client.delete(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
# Check the item has gone
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
assert response.status_code == 404
# Note: if this fails, the item may still be around
# and may cause other tests to fail
@ -336,18 +337,18 @@ class WebPluginTest(ItemInDBTestCase):
item_id = self.lib.add(Item.from_path(ipath))
# Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
# Delete item by id, without deleting file
response = self.client.delete("/item/" + str(item_id))
response = self.client.delete(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
# Check the item has gone
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
assert response.status_code == 404
# Check the file has not gone
@ -364,18 +365,18 @@ class WebPluginTest(ItemInDBTestCase):
item_id = self.lib.add(Item.from_path(ipath))
# Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
# Delete item by id, with file
response = self.client.delete("/item/" + str(item_id) + "?delete")
response = self.client.delete(f"/item/{item_id}?delete")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
# Check the item has gone
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
assert response.status_code == 404
# Check the file has gone
@ -427,17 +428,17 @@ class WebPluginTest(ItemInDBTestCase):
)
# Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
# Try to delete item by id
response = self.client.delete("/item/" + str(item_id))
response = self.client.delete(f"/item/{item_id}")
assert response.status_code == 405
# Check the item has not gone
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
@ -481,18 +482,18 @@ class WebPluginTest(ItemInDBTestCase):
)
# Check we can find the temporary album we just created
response = self.client.get("/album/" + str(album_id))
response = self.client.get(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == album_id
# Delete album by id
response = self.client.delete("/album/" + str(album_id))
response = self.client.delete(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
# Check the album has gone
response = self.client.get("/album/" + str(album_id))
response = self.client.get(f"/album/{album_id}")
assert response.status_code == 404
# Note: if this fails, the album may still be around
# and may cause other tests to fail
@ -543,17 +544,17 @@ class WebPluginTest(ItemInDBTestCase):
)
# Check we can find the temporary album we just created
response = self.client.get("/album/" + str(album_id))
response = self.client.get(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == album_id
# Try to delete album by id
response = self.client.delete("/album/" + str(album_id))
response = self.client.delete(f"/album/{album_id}")
assert response.status_code == 405
# Check the item has not gone
response = self.client.get("/album/" + str(album_id))
response = self.client.get(f"/album/{album_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == album_id
@ -603,7 +604,7 @@ class WebPluginTest(ItemInDBTestCase):
)
# Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
@ -613,7 +614,7 @@ class WebPluginTest(ItemInDBTestCase):
# Patch item by id
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}]})
response = self.client.patch(
"/item/" + str(item_id), json={"test_patch_f2": "New"}
f"/item/{item_id}", json={"test_patch_f2": "New"}
)
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
@ -622,7 +623,7 @@ class WebPluginTest(ItemInDBTestCase):
assert res_json["test_patch_f2"] == "New"
# Check the update has really worked
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
@ -647,7 +648,7 @@ class WebPluginTest(ItemInDBTestCase):
)
# Check we can find the temporary item we just created
response = self.client.get("/item/" + str(item_id))
response = self.client.get(f"/item/{item_id}")
res_json = json.loads(response.data.decode("utf-8"))
assert response.status_code == 200
assert res_json["id"] == item_id
@ -657,7 +658,7 @@ class WebPluginTest(ItemInDBTestCase):
# Patch item by id
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"})
response = self.client.patch(
"/item/" + str(item_id), json={"test_patch_f2": "New"}
f"/item/{item_id}", json={"test_patch_f2": "New"}
)
assert response.status_code == 405
@ -670,6 +671,6 @@ class WebPluginTest(ItemInDBTestCase):
assert os.path.exists(ipath)
item_id = self.lib.add(Item.from_path(ipath))
response = self.client.get("/item/" + str(item_id) + "/file")
response = self.client.get(f"/item/{item_id}/file")
assert response.status_code == 200

View file

@ -150,9 +150,5 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase):
metadata = {"a": "A", "b": "B"}
im = DummyIMBackend()
im.write_metadata("foo", metadata)
try:
command = im.convert_cmd + "foo -set a A -set b B foo".split()
mock_util.command_output.assert_called_once_with(command)
except AssertionError:
command = im.convert_cmd + "foo -set b B -set a A foo".split()
mock_util.command_output.assert_called_once_with(command)
command = [*im.convert_cmd, *"foo -set a A -set b B foo".split()]
mock_util.command_output.assert_called_once_with(command)

View file

@ -186,37 +186,37 @@ class DateQueryTestRelativeMore(ItemInDBTestCase):
def test_relative(self):
for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "-4" + timespan + "..+4" + timespan)
query = DateQuery("added", f"-4{timespan}..+4{timespan}")
matched = self.lib.items(query)
assert len(matched) == 1
def test_relative_fail(self):
for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "-2" + timespan + "..-1" + timespan)
query = DateQuery("added", f"-2{timespan}..-1{timespan}")
matched = self.lib.items(query)
assert len(matched) == 0
def test_start_relative(self):
for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "-4" + timespan + "..")
query = DateQuery("added", f"-4{timespan}..")
matched = self.lib.items(query)
assert len(matched) == 1
def test_start_relative_fail(self):
for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "4" + timespan + "..")
query = DateQuery("added", f"4{timespan}..")
matched = self.lib.items(query)
assert len(matched) == 0
def test_end_relative(self):
for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "..+4" + timespan)
query = DateQuery("added", f"..+4{timespan}")
matched = self.lib.items(query)
assert len(matched) == 1
def test_end_relative_fail(self):
for timespan in ["d", "w", "m", "y"]:
query = DateQuery("added", "..-4" + timespan)
query = DateQuery("added", f"..-4{timespan}")
matched = self.lib.items(query)
assert len(matched) == 0

View file

@ -58,9 +58,9 @@ class LoggingLevelTest(AsIsImporterMixin, PluginMixin, ImportTestCase):
self.register_listener("dummy_event", self.listener)
def log_all(self, name):
self._log.debug("debug " + name)
self._log.info("info " + name)
self._log.warning("warning " + name)
self._log.debug(f"debug {name}")
self._log.info(f"info {name}")
self._log.warning(f"warning {name}")
def commands(self):
cmd = ui.Subcommand("dummy")
@ -172,9 +172,9 @@ class ConcurrentEventsTest(AsIsImporterMixin, ImportTestCase):
self.t1_step = self.t2_step = 0
def log_all(self, name):
self._log.debug("debug " + name)
self._log.info("info " + name)
self._log.warning("warning " + name)
self._log.debug(f"debug {name}")
self._log.info(f"info {name}")
self._log.warning(f"warning {name}")
def listener1(self):
try:

View file

@ -1257,7 +1257,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase):
with patch("beets.ui.commands.ui.term_width", return_value=30):
# Test newline layout
config["ui"]["import"]["layout"] = "newline"
long_name = "another artist with a" + (" very" * 10) + " long name"
long_name = f"another artist with a{' very' * 10} long name"
msg = self._show_change(
cur_artist=long_name, cur_album="another album"
)
@ -1270,7 +1270,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase):
with patch("beets.ui.commands.ui.term_width", return_value=54):
# Test Column layout
config["ui"]["import"]["layout"] = "column"
long_title = "a track with a" + (" very" * 10) + " long name"
long_title = f"a track with a{' very' * 10} long name"
self.items[0].title = long_title
msg = self._show_change()
assert "(#1) a track (1:00) -> (#1) the title (0:00)" in msg
@ -1279,7 +1279,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase):
# Patch ui.term_width to force wrapping
with patch("beets.ui.commands.ui.term_width", return_value=30):
config["ui"]["import"]["layout"] = "newline"
long_title = "a track with a" + (" very" * 10) + " long name"
long_title = f"a track with a{' very' * 10} long name"
self.items[0].title = long_title
msg = self._show_change()
assert "(#1) a track with" in msg