Enable RUF rules

This commit is contained in:
Šarūnas Nejus 2025-12-28 01:05:02 +00:00
parent 078ffc1c57
commit c52656fb0a
No known key found for this signature in database
40 changed files with 118 additions and 112 deletions

View file

@ -250,7 +250,7 @@ def parse_sorted_query(
# Split up query in to comma-separated subqueries, each representing
# an AndQuery, which need to be joined together in one OrQuery
subquery_parts = []
for part in parts + [","]:
for part in [*parts, ","]:
if part.endswith(","):
# Ensure we can catch "foo, bar" as well as "foo , bar"
last_subquery_part = part[:-1]

View file

@ -28,11 +28,11 @@ from .tasks import (
# Note: Stages are not exposed to the public API
__all__ = [
"ImportSession",
"ImportAbortError",
"Action",
"ImportTask",
"ArchiveImportTask",
"ImportAbortError",
"ImportSession",
"ImportTask",
"SentinelImportTask",
"SingletonImportTask",
]

View file

@ -388,5 +388,5 @@ def _extend_pipeline(tasks, *stages):
else:
task_iter = tasks
ipl = pipeline.Pipeline([task_iter] + list(stages))
ipl = pipeline.Pipeline([task_iter, *list(stages)])
return pipeline.multiple(ipl.pull())

View file

@ -234,7 +234,7 @@ class ImportTask(BaseImportTask):
or APPLY (in which case the data comes from the choice).
"""
if self.choice_flag in (Action.ASIS, Action.RETAG):
likelies, consensus = util.get_most_common_tags(self.items)
likelies, _ = util.get_most_common_tags(self.items)
return likelies
elif self.choice_flag is Action.APPLY and self.match:
return self.match.info.copy()
@ -892,7 +892,7 @@ class ArchiveImportTask(SentinelImportTask):
# The (0, 0, -1) is added to date_time because the
# function time.mktime expects a 9-element tuple.
# The -1 indicates that the DST flag is unknown.
date_time = time.mktime(f.date_time + (0, 0, -1))
date_time = time.mktime((*f.date_time, 0, 0, -1))
fullpath = os.path.join(extract_to, f.filename)
os.utime(fullpath, (date_time, date_time))

View file

@ -17,13 +17,13 @@ def __getattr__(name: str):
__all__ = [
"Library",
"LibModel",
"Album",
"Item",
"parse_query_parts",
"parse_query_string",
"FileOperationError",
"Item",
"LibModel",
"Library",
"ReadError",
"WriteError",
"parse_query_parts",
"parse_query_string",
]

View file

@ -161,7 +161,7 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
import_stages: list[ImportStageFunc]
def __init_subclass__(cls) -> None:
"""Enable legacy metadatasource plugins to work with the new interface.
"""Enable legacy metadata source plugins to work with the new interface.
When a plugin subclass of BeetsPlugin defines a `data_source` attribute
but does not inherit from MetadataSourcePlugin, this hook:

View file

@ -120,7 +120,7 @@ def capture_stdout():
def has_program(cmd, args=["--version"]):
"""Returns `True` if `cmd` can be executed."""
full_cmd = [cmd] + args
full_cmd = [cmd, *args]
try:
with open(os.devnull, "wb") as devnull:
subprocess.check_call(

View file

@ -339,13 +339,9 @@ class ChangeRepresentation:
max_width_l = max(get_width(line_tuple[0]) for line_tuple in lines)
max_width_r = max(get_width(line_tuple[1]) for line_tuple in lines)
if (
(max_width_l <= col_width)
and (max_width_r <= col_width)
or (
((max_width_l > col_width) or (max_width_r > col_width))
and ((max_width_l + max_width_r) <= col_width * 2)
)
if ((max_width_l <= col_width) and (max_width_r <= col_width)) or (
((max_width_l > col_width) or (max_width_r > col_width))
and ((max_width_l + max_width_r) <= col_width * 2)
):
# All content fits. Either both maximum widths are below column
# widths, or one of the columns is larger than allowed but the
@ -559,7 +555,7 @@ def penalty_string(distance: Distance, limit: int | None = None) -> str:
penalties.append(key)
if penalties:
if limit and len(penalties) > limit:
penalties = penalties[:limit] + ["..."]
penalties = [*penalties[:limit], "..."]
# Prefix penalty string with U+2260: Not Equal To
penalty_string = f"\u2260 {', '.join(penalties)}"
return ui.colorize("changed", penalty_string)

View file

@ -256,13 +256,11 @@ class TerminalImportSession(importer.ImportSession):
# Add a "dummy" choice for the other baked-in option, for
# duplicate checking.
all_choices = (
[
PromptChoice("a", "Apply", None),
]
+ choices
+ extra_choices
)
all_choices = [
PromptChoice("a", "Apply", None),
*choices,
*extra_choices,
]
# Check for conflicts.
short_letters = [c.short for c in all_choices]
@ -501,7 +499,7 @@ def choose_candidate(
if config["import"]["bell"]:
ui.print_("\a", end="")
sel = ui.input_options(
("Apply", "More candidates") + choice_opts,
("Apply", "More candidates", *choice_opts),
require=require,
default=default,
)

View file

@ -15,7 +15,7 @@ def write_items(lib, query, pretend, force):
"""Write tag information from the database to the respective files
in the filesystem.
"""
items, albums = do_query(lib, query, False, False)
items, _ = do_query(lib, query, False, False)
for item in items:
# Item deleted?

View file

@ -268,7 +268,8 @@ class IMBackend(LocalBackend):
# with regards to the height.
# ImageMagick already seems to default to no interlace, but we include
# it here for the sake of explicitness.
cmd: list[str] = self.convert_cmd + [
cmd: list[str] = [
*self.convert_cmd,
syspath(path_in, prefix=False),
"-resize",
f"{maxwidth}x>",
@ -298,7 +299,8 @@ class IMBackend(LocalBackend):
return path_out
def get_size(self, path_in: bytes) -> tuple[int, int] | None:
cmd: list[str] = self.identify_cmd + [
cmd: list[str] = [
*self.identify_cmd,
"-format",
"%w %h",
syspath(path_in, prefix=False),
@ -336,7 +338,8 @@ class IMBackend(LocalBackend):
if not path_out:
path_out = get_temp_filename(__name__, "deinterlace_IM_", path_in)
cmd = self.convert_cmd + [
cmd = [
*self.convert_cmd,
syspath(path_in, prefix=False),
"-interlace",
"none",
@ -351,7 +354,7 @@ class IMBackend(LocalBackend):
return path_in
def get_format(self, path_in: bytes) -> str | None:
cmd = self.identify_cmd + ["-format", "%[magick]", syspath(path_in)]
cmd = [*self.identify_cmd, "-format", "%[magick]", syspath(path_in)]
try:
# Image formats should really only be ASCII strings such as "PNG",
@ -368,7 +371,8 @@ class IMBackend(LocalBackend):
target: bytes,
deinterlaced: bool,
) -> bytes:
cmd = self.convert_cmd + [
cmd = [
*self.convert_cmd,
syspath(source),
*(["-interlace", "none"] if deinterlaced else []),
syspath(target),
@ -400,14 +404,16 @@ class IMBackend(LocalBackend):
# to grayscale and then pipe them into the `compare` command.
# On Windows, ImageMagick doesn't support the magic \\?\ prefix
# on paths, so we pass `prefix=False` to `syspath`.
convert_cmd = self.convert_cmd + [
convert_cmd = [
*self.convert_cmd,
syspath(im2, prefix=False),
syspath(im1, prefix=False),
"-colorspace",
"gray",
"MIFF:-",
]
compare_cmd = self.compare_cmd + [
compare_cmd = [
*self.compare_cmd,
"-define",
"phash:colorspaces=sRGB,HCLp",
"-metric",
@ -487,7 +493,7 @@ class IMBackend(LocalBackend):
("-set", k, v) for k, v in metadata.items()
)
str_file = os.fsdecode(file)
command = self.convert_cmd + [str_file, *assignments, str_file]
command = [*self.convert_cmd, str_file, *assignments, str_file]
util.command_output(command)
@ -828,7 +834,7 @@ class ArtResizer:
"jpeg": "jpg",
}.get(new_format, new_format)
fname, ext = os.path.splitext(path_in)
fname, _ = os.path.splitext(path_in)
path_new = fname + b"." + new_format.encode("utf8")
# allows the exception to propagate, while still making sure a changed

View file

@ -192,7 +192,7 @@ def stage(
task: R | T | None = None
while True:
task = yield task
task = func(*(args + (task,)))
task = func(*args, task)
return coro
@ -216,7 +216,7 @@ def mutator_stage(func: Callable[[Unpack[A], T], R]):
task = None
while True:
task = yield task
func(*(args + (task,)))
func(*args, task)
return coro

View file

@ -1037,7 +1037,7 @@ class Command:
raise BPDError(ERROR_PERMISSION, "insufficient privileges")
try:
args = [conn] + self.args
args = [conn, *self.args]
results = func(*args)
if results:
for data in results:

View file

@ -37,7 +37,7 @@ except ValueError as e:
# makes it so the test collector functions as inteded.
raise ImportError from e
from gi.repository import GLib, Gst # noqa: E402
from gi.repository import GLib, Gst
Gst.init(None)
@ -115,7 +115,7 @@ class GstPlayer:
elif message.type == Gst.MessageType.ERROR:
# error
self.player.set_state(Gst.State.NULL)
err, debug = message.parse_error()
err, _ = message.parse_error()
print(f"Error: {err}")
self.playing = False
@ -205,7 +205,7 @@ class GstPlayer:
def seek(self, position):
"""Seeks to position (in seconds)."""
cur_pos, cur_len = self.time()
_, cur_len = self.time()
if position > cur_len:
self.stop()
return

View file

@ -73,7 +73,7 @@ class BPSyncPlugin(BeetsPlugin):
"""Retrieve and apply info from the autotagger for items matched by
query.
"""
for item in lib.items(query + ["singleton:true"]):
for item in lib.items([*query, "singleton:true"]):
if not item.mb_trackid:
self._log.info(
"Skipping singleton with no mb_trackid: {}", item

View file

@ -274,7 +274,7 @@ class ConvertPlugin(BeetsPlugin):
pretend,
hardlink,
link,
playlist,
_,
force,
) = self._get_opts_and_config(empty_opts)

View file

@ -572,7 +572,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
processed = self._process_clean_tracklist(
clean_tracklist, album_artist_data
)
tracks, index_tracks, index, divisions, next_divisions = processed
tracks, index_tracks, *_ = processed
# Fix up medium and medium_index for each track. Discogs position is
# unreliable, but tracks are in order.
medium = None

View file

@ -148,7 +148,7 @@ class ExportPlugin(BeetsPlugin):
album=opts.album,
):
try:
data, item = data_emitter(included_keys or "*")
data, _ = data_emitter(included_keys or "*")
except (mediafile.UnreadableFileError, OSError) as ex:
self._log.error("cannot read file: {}", ex)
continue

View file

@ -867,7 +867,7 @@ class ITunesStore(RemoteArtSource):
)
except KeyError as e:
self._log.debug(
"Malformed itunes candidate: {} not found in {}", # NOQA E501
"Malformed itunes candidate: {} not found in {}",
e,
list(c.keys()),
)

View file

@ -88,7 +88,7 @@ def apply_matches(d, log):
"""Given a mapping from items to field dicts, apply the fields to
the objects.
"""
some_map = list(d.values())[0]
some_map = next(iter(d.values()))
keys = some_map.keys()
# Only proceed if the "tag" field is equal across all filenames.

View file

@ -62,7 +62,7 @@ class KeyFinderPlugin(BeetsPlugin):
try:
output = util.command_output(
command + [util.syspath(item.path)]
[*command, util.syspath(item.path)]
).stdout
except (subprocess.CalledProcessError, OSError) as exc:
self._log.error("execution failed: {}", exc)

View file

@ -68,12 +68,12 @@ def flatten_tree(
if isinstance(elem, dict):
for k, v in elem.items():
flatten_tree(v, path + [k], branches)
flatten_tree(v, [*path, k], branches)
elif isinstance(elem, list):
for sub in elem:
flatten_tree(sub, path, branches)
else:
branches.append(path + [str(elem)])
branches.append([*path, str(elem)])
def find_parents(candidate: str, branches: list[list[str]]) -> list[str]:

View file

@ -69,7 +69,7 @@ class MBSubmitPlugin(BeetsPlugin):
paths.append(displayable_path(p))
try:
picard_path = self.config["picard_path"].as_str()
subprocess.Popen([picard_path] + paths)
subprocess.Popen([picard_path, *paths])
self._log.info("launched picard from\n{}", picard_path)
except OSError as exc:
self._log.error("Could not open picard, got error:\n{}", exc)

View file

@ -71,7 +71,7 @@ class MBSyncPlugin(BeetsPlugin):
"""Retrieve and apply info from the autotagger for items matched by
query.
"""
for item in lib.items(query + ["singleton:true"]):
for item in lib.items([*query, "singleton:true"]):
if not item.mb_trackid:
self._log.info(
"Skipping singleton with no mb_trackid: {}", item

View file

@ -642,11 +642,11 @@ class CommandBackend(Backend):
cmd: list[str] = [self.command, "-o", "-s", "s"]
if self.noclip:
# Adjust to avoid clipping.
cmd = cmd + ["-k"]
cmd = [*cmd, "-k"]
else:
# Disable clipping warning.
cmd = cmd + ["-c"]
cmd = cmd + ["-d", str(int(target_level - 89))]
cmd = [*cmd, "-c"]
cmd = [*cmd, "-d", str(int(target_level - 89))]
cmd = cmd + [syspath(i.path) for i in items]
self._log.debug("analyzing {} files", len(items))
@ -1105,7 +1105,7 @@ class AudioToolsBackend(Backend):
# The first item is taken and opened to get the sample rate to
# initialize the replaygain object. The object is used for all the
# tracks in the album to get the album values.
item = list(task.items)[0]
item = next(iter(task.items))
audiofile = self.open_audio_file(item)
rg = self.init_replaygain(audiofile, item)

View file

@ -58,9 +58,9 @@ class ThePlugin(BeetsPlugin):
p,
)
if self.config["a"]:
self.patterns = [PATTERN_A] + self.patterns
self.patterns = [PATTERN_A, *self.patterns]
if self.config["the"]:
self.patterns = [PATTERN_THE] + self.patterns
self.patterns = [PATTERN_THE, *self.patterns]
if not self.patterns:
self._log.warning("no patterns defined!")

View file

@ -306,7 +306,7 @@ select = [
"ISC", # flake8-implicit-str-concat
"N", # pep8-naming
"PT", # flake8-pytest-style
# "RUF", # ruff
"RUF", # ruff
"UP", # pyupgrade
"TC", # flake8-type-checking
"W", # pycodestyle
@ -320,6 +320,8 @@ ignore = [
"test/plugins/test_ftintitle.py" = ["E501"]
"test/test_util.py" = ["E501"]
"test/ui/test_field_diff.py" = ["E501"]
"test/util/test_id_extractors.py" = ["E501"]
"test/**" = ["RUF001"] # we use Unicode characters in tests
[tool.ruff.lint.isort]
split-on-trailing-comma = false

View file

@ -337,15 +337,15 @@ class TestDataSourceDistance:
_p("Original", "Original", 0.5, 1.0, True, MATCH, id="match"),
_p("Original", "Other", 0.5, 1.0, True, MISMATCH, id="mismatch"),
_p("Other", "Original", 0.5, 1.0, True, MISMATCH, id="mismatch"),
_p("Original", "unknown", 0.5, 1.0, True, MISMATCH, id="mismatch-unknown"), # noqa: E501
_p("Original", None, 0.5, 1.0, True, MISMATCH, id="mismatch-no-info"), # noqa: E501
_p("Original", "unknown", 0.5, 1.0, True, MISMATCH, id="mismatch-unknown"),
_p("Original", None, 0.5, 1.0, True, MISMATCH, id="mismatch-no-info"),
_p(None, "Other", 0.5, 1.0, True, MISMATCH, id="mismatch-no-original-multiple-sources"), # noqa: E501
_p(None, "Other", 0.5, 1.0, False, MATCH, id="match-no-original-but-single-source"), # noqa: E501
_p("unknown", "unknown", 0.5, 1.0, True, MATCH, id="match-unknown"),
_p("Original", "Other", 1.0, 1.0, True, 0.25, id="mismatch-max-penalty"), # noqa: E501
_p("Original", "Other", 0.5, 5.0, True, 0.3125, id="mismatch-high-weight"), # noqa: E501
_p("Original", "Other", 0.0, 1.0, True, MATCH, id="match-no-penalty"), # noqa: E501
_p("Original", "Other", 0.5, 0.0, True, MATCH, id="match-no-weight"), # noqa: E501
_p("Original", "Other", 1.0, 1.0, True, 0.25, id="mismatch-max-penalty"),
_p("Original", "Other", 0.5, 5.0, True, 0.3125, id="mismatch-high-weight"),
_p("Original", "Other", 0.0, 1.0, True, MATCH, id="match-no-penalty"),
_p("Original", "Other", 0.5, 0.0, True, MATCH, id="match-no-weight"),
],
) # fmt: skip
def test_distance(self, item, info, expected_distance):

View file

@ -350,8 +350,8 @@ class EditDuringImporterNonSingletonTest(EditDuringImporterTestCase):
self.lib.items(),
self.items_orig,
["title"],
self.IGNORED
+ [
[
*self.IGNORED,
"albumartist",
"mb_albumartistid",
"mb_albumartistids",
@ -378,7 +378,7 @@ class EditDuringImporterNonSingletonTest(EditDuringImporterTestCase):
self.lib.items(),
self.items_orig,
[],
self.IGNORED + ["albumartist", "mb_albumartistid"],
[*self.IGNORED, "albumartist", "mb_albumartistid"],
)
assert all("Tag Track" in i.title for i in self.lib.items())
@ -490,6 +490,6 @@ class EditDuringImporterSingletonTest(EditDuringImporterTestCase):
self.lib.items(),
self.items_orig,
["title"],
self.IGNORED + ["albumartist", "mb_albumartistid"],
[*self.IGNORED, "albumartist", "mb_albumartistid"],
)
assert all("Edited Track" in i.title for i in self.lib.items())

View file

@ -431,7 +431,7 @@ class TestTekstowoLyrics(LyricsBackendTest):
[
("tekstowopl/piosenka24kgoldncityofangels1", True),
(
"tekstowopl/piosenkabeethovenbeethovenpianosonata17tempestthe3rdmovement", # noqa: E501
"tekstowopl/piosenkabeethovenbeethovenpianosonata17tempestthe3rdmovement",
False,
),
],
@ -614,7 +614,7 @@ class TestTranslation:
[00:00:50]
[00:01.00] Some more synced lyrics / Quelques paroles plus synchronisées
Source: https://lrclib.net/api/123""", # noqa: E501
Source: https://lrclib.net/api/123""",
id="synced",
),
pytest.param(

View file

@ -72,8 +72,8 @@ class RandomTest(TestHelper, unittest.TestCase):
print(f"{i:2d} {'*' * positions.count(i)}")
return self._stats(positions)
mean1, stdev1, median1 = experiment("artist")
mean2, stdev2, median2 = experiment("track")
_, stdev1, median1 = experiment("artist")
_, stdev2, median2 = experiment("track")
assert 0 == pytest.approx(median1, abs=1)
assert len(self.items) // 2 == pytest.approx(median2, abs=1)
assert stdev2 > stdev1

View file

@ -136,7 +136,8 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase):
"""
im = IMBackend()
path = im.deinterlace(self.IMG_225x225)
cmd = im.identify_cmd + [
cmd = [
*im.identify_cmd,
"-format",
"%[interlace]",
syspath(path, prefix=False),

View file

@ -411,7 +411,7 @@ class ModelTest(unittest.TestCase):
def test_computed_field(self):
model = ModelFixtureWithGetters()
assert model.aComputedField == "thing"
with pytest.raises(KeyError, match="computed field .+ deleted"):
with pytest.raises(KeyError, match=r"computed field .+ deleted"):
del model.aComputedField
def test_items(self):

View file

@ -1056,7 +1056,7 @@ class PathStringTest(BeetsTestCase):
assert isinstance(self.i.path, bytes)
def test_fetched_item_path_is_bytestring(self):
i = list(self.lib.items())[0]
i = next(iter(self.lib.items()))
assert isinstance(i.path, bytes)
def test_unicode_path_becomes_bytestring(self):
@ -1070,14 +1070,14 @@ class PathStringTest(BeetsTestCase):
""",
(self.i.id, "somepath"),
)
i = list(self.lib.items())[0]
i = next(iter(self.lib.items()))
assert isinstance(i.path, bytes)
def test_special_chars_preserved_in_database(self):
path = "b\xe1r".encode()
self.i.path = path
self.i.store()
i = list(self.lib.items())[0]
i = next(iter(self.lib.items()))
assert i.path == path
def test_special_char_path_added_to_database(self):
@ -1086,7 +1086,7 @@ class PathStringTest(BeetsTestCase):
i = item()
i.path = path
self.lib.add(i)
i = list(self.lib.items())[0]
i = next(iter(self.lib.items()))
assert i.path == path
def test_destination_returns_bytestring(self):

View file

@ -308,7 +308,9 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
"Enter search",
"enter Id",
"aBort",
) + ("Foo", "baR")
"Foo",
"baR",
)
self.importer.add_choice(Action.SKIP)
self.importer.run()
@ -342,7 +344,9 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
"Enter search",
"enter Id",
"aBort",
) + ("Foo", "baR")
"Foo",
"baR",
)
config["import"]["singletons"] = True
self.importer.add_choice(Action.SKIP)
@ -381,7 +385,8 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
"Enter search",
"enter Id",
"aBort",
) + ("baZ",)
"baZ",
)
self.importer.add_choice(Action.SKIP)
self.importer.run()
self.mock_input_options.assert_called_once_with(
@ -416,7 +421,8 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
"Enter search",
"enter Id",
"aBort",
) + ("Foo",)
"Foo",
)
# DummyPlugin.foo() should be called once
with patch.object(DummyPlugin, "foo", autospec=True) as mock_foo:
@ -458,7 +464,8 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
"Enter search",
"enter Id",
"aBort",
) + ("Foo",)
"Foo",
)
# DummyPlugin.foo() should be called once
with helper.control_stdin("f\n"):

View file

@ -56,7 +56,7 @@ class CompletionTest(IOMixin, TestPluginTestCase):
test_script_name = os.path.join(_common.RSRC, b"test_completion.sh")
with open(test_script_name, "rb") as test_script_file:
tester.stdin.writelines(test_script_file)
out, err = tester.communicate()
out, _ = tester.communicate()
assert tester.returncode == 0
assert out == b"completion tests passed\n", (
"test/test_completion.sh did not execute properly. "

View file

@ -190,27 +190,23 @@ class ModifyTest(BeetsTestCase):
assert mediafile.initial_key is None
def test_arg_parsing_colon_query(self):
(query, mods, dels) = modify_parse_args(
["title:oldTitle", "title=newTitle"]
)
query, mods, _ = modify_parse_args(["title:oldTitle", "title=newTitle"])
assert query == ["title:oldTitle"]
assert mods == {"title": "newTitle"}
def test_arg_parsing_delete(self):
(query, mods, dels) = modify_parse_args(["title:oldTitle", "title!"])
query, _, dels = modify_parse_args(["title:oldTitle", "title!"])
assert query == ["title:oldTitle"]
assert dels == ["title"]
def test_arg_parsing_query_with_exclaimation(self):
(query, mods, dels) = modify_parse_args(
query, mods, _ = modify_parse_args(
["title:oldTitle!", "title=newTitle!"]
)
assert query == ["title:oldTitle!"]
assert mods == {"title": "newTitle!"}
def test_arg_parsing_equals_in_value(self):
(query, mods, dels) = modify_parse_args(
["title:foo=bar", "title=newTitle"]
)
query, mods, _ = modify_parse_args(["title:foo=bar", "title=newTitle"])
assert query == ["title:foo=bar"]
assert mods == {"title": "newTitle"}

View file

@ -19,7 +19,7 @@ class QueryTest(BeetsTestCase):
)
item = library.Item.from_path(itempath)
self.lib.add(item)
return item, itempath
return item
def add_album(self, items):
album = self.lib.add_album(items)
@ -47,13 +47,13 @@ class QueryTest(BeetsTestCase):
self.check_do_query(2, 0, album=False)
def test_query_album(self):
item, itempath = self.add_item()
item = self.add_item()
self.add_album([item])
self.check_do_query(1, 1, album=True)
self.check_do_query(0, 1, album=True, also_items=False)
item, itempath = self.add_item()
item2, itempath = self.add_item()
item = self.add_item()
item2 = self.add_item()
self.add_album([item, item2])
self.check_do_query(3, 2, album=True)
self.check_do_query(0, 2, album=True, also_items=False)

View file

@ -374,7 +374,7 @@ class ShowModelChangeTest(IOMixin, unittest.TestCase):
def test_both_values_shown(self):
self.a.title = "foo"
self.b.title = "bar"
change, out = self._show()
_, out = self._show()
assert "foo" in out
assert "bar" in out

View file

@ -10,26 +10,26 @@ from beets.util.id_extractors import extract_release_id
[
("spotify", "39WqpoPgZxygo6YQjehLJJ", "39WqpoPgZxygo6YQjehLJJ"),
("spotify", "blah blah", None),
("spotify", "https://open.spotify.com/album/39WqpoPgZxygo6YQjehLJJ", "39WqpoPgZxygo6YQjehLJJ"), # noqa: E501
("spotify", "https://open.spotify.com/album/39WqpoPgZxygo6YQjehLJJ", "39WqpoPgZxygo6YQjehLJJ"),
("deezer", "176356382", "176356382"),
("deezer", "blah blah", None),
("deezer", "https://www.deezer.com/album/176356382", "176356382"),
("beatport", "3089651", "3089651"),
("beatport", "blah blah", None),
("beatport", "https://www.beatport.com/release/album-name/3089651", "3089651"), # noqa: E501
("discogs", "http://www.discogs.com/G%C3%BCnther-Lause-Meru-Ep/release/4354798", "4354798"), # noqa: E501
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-Lause-Meru-Ep", "4354798"), # noqa: E501
("discogs", "http://www.discogs.com/G%C3%BCnther-4354798Lause-Meru-Ep/release/4354798", "4354798"), # noqa: E501
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-4354798Lause-Meru-Ep/", "4354798"), # noqa: E501
("beatport", "https://www.beatport.com/release/album-name/3089651", "3089651"),
("discogs", "http://www.discogs.com/G%C3%BCnther-Lause-Meru-Ep/release/4354798", "4354798"),
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-Lause-Meru-Ep", "4354798"),
("discogs", "http://www.discogs.com/G%C3%BCnther-4354798Lause-Meru-Ep/release/4354798", "4354798"),
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-4354798Lause-Meru-Ep/", "4354798"),
("discogs", "[r4354798]", "4354798"),
("discogs", "r4354798", "4354798"),
("discogs", "4354798", "4354798"),
("discogs", "yet-another-metadata-provider.org/foo/12345", None),
("discogs", "005b84a0-ecd6-39f1-b2f6-6eb48756b268", None),
("musicbrainz", "28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"), # noqa: E501
("musicbrainz", "28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"),
("musicbrainz", "blah blah", None),
("musicbrainz", "https://musicbrainz.org/entity/28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"), # noqa: E501
("bandcamp", "https://nameofartist.bandcamp.com/album/nameofalbum", "https://nameofartist.bandcamp.com/album/nameofalbum"), # noqa: E501
("musicbrainz", "https://musicbrainz.org/entity/28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"),
("bandcamp", "https://nameofartist.bandcamp.com/album/nameofalbum", "https://nameofartist.bandcamp.com/album/nameofalbum"),
],
) # fmt: skip
def test_extract_release_id(source, id_string, expected):