mirror of
https://github.com/beetbox/beets.git
synced 2026-01-24 09:01:24 +01:00
Enable ruff's future-annotations and RUF* rules (#6245)
## Summary
This PR updates typing and linting across the codebase and enables
stricter `ruff` checks for Python 3.10:
1. Enable `tool.ruff.lint.future-annotations`
Very handy feature released in `0.13.0`: if required, it _automatically_
adds `from __future__ import annotations` and moves relevant imports
under `if TYPE_CHECKING`:
```py
# before (runtime import)
from beets.library import Library
# after
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from beets.library import Library
```
2. Set `tool.ruff.target-version = "py310"`
This enforced PEP 604 unions in the codebase:
```py
# before
SQLiteType = Union[str, bytes, float, int, memoryview, None]
# after
SQLiteType = str | bytes | float | int | memoryview | None
```
3. Enable `RUF*` family of checks
- Remove unused `# noqa`s
- Ignore unused unpacked variables
```py
# before
likelies, consensus = util.get_most_common_tags(self.items)
# after
likelies, _ = util.get_most_common_tags(self.items)
```
- Avoid list materialization
```py
# before
for part in parts + [","]:
# after
for part in [*parts, ","]:
```
- And, most importantly, **RUF012**: use `ClassVar` for mutable class
attributes
- This underlined our messy `BeetsPlugin.template_*` attributes design,
where I have now defined `BeetsPluginMeta` to make a clear distinction
between class and instance attributes. @semohr and @asardaes I saw you
had a discussion regarding these earlier - we will need to revisit this
at some point to sort it out for good.
- It also revealed a legitimate issue in `metasync.MetaSource` where
`item_types` were initialised as an instance attribute (but luckily
never used).
This commit is contained in:
commit
b3c42a3350
79 changed files with 414 additions and 294 deletions
|
|
@ -81,9 +81,17 @@ d93ddf8dd43e4f9ed072a03829e287c78d2570a2
|
|||
59c93e70139f70e9fd1c6f3c1bceb005945bec33
|
||||
# Moved ui.commands._utils into ui.commands.utils
|
||||
25ae330044abf04045e3f378f72bbaed739fb30d
|
||||
# Refactor test_ui_command.py into multiple modules
|
||||
# Refactor test_ui_command.py into multiple modules
|
||||
a59e41a88365e414db3282658d2aa456e0b3468a
|
||||
# pyupgrade Python 3.10
|
||||
301637a1609831947cb5dd90270ed46c24b1ab1b
|
||||
# Fix changelog formatting
|
||||
658b184c59388635787b447983ecd3a575f4fe56
|
||||
# Configure future-annotations
|
||||
ac7f3d9da95c2d0a32e5c908ea68480518a1582d
|
||||
# Configure ruff for py310
|
||||
c46069654628040316dea9db85d01b263db3ba9e
|
||||
# Enable RUF rules
|
||||
4749599913a42e02e66b37db9190de11d6be2cdf
|
||||
# Address RUF012
|
||||
bc71ec308eb938df1d349f6857634ddf2a82e339
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ import lap
|
|||
import numpy as np
|
||||
|
||||
from beets import config, logging, metadata_plugins, plugins
|
||||
from beets.autotag import AlbumInfo, AlbumMatch, TrackInfo, TrackMatch, hooks
|
||||
from beets.autotag import AlbumMatch, TrackMatch, hooks
|
||||
from beets.util import get_most_common_tags
|
||||
|
||||
from .distance import VA_ARTISTS, distance, track_distance
|
||||
|
|
@ -33,6 +33,7 @@ from .distance import VA_ARTISTS, distance, track_distance
|
|||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
from beets.autotag import AlbumInfo, TrackInfo
|
||||
from beets.library import Item
|
||||
|
||||
# Global logger.
|
||||
|
|
|
|||
|
|
@ -26,17 +26,10 @@ import threading
|
|||
import time
|
||||
from abc import ABC
|
||||
from collections import defaultdict
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Generator,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
Sequence,
|
||||
)
|
||||
from collections.abc import Mapping
|
||||
from functools import cached_property
|
||||
from sqlite3 import Connection, sqlite_version_info
|
||||
from typing import TYPE_CHECKING, Any, AnyStr, Generic
|
||||
from sqlite3 import sqlite_version_info
|
||||
from typing import TYPE_CHECKING, Any, AnyStr, ClassVar, Generic
|
||||
|
||||
from typing_extensions import (
|
||||
Self,
|
||||
|
|
@ -48,20 +41,20 @@ import beets
|
|||
|
||||
from ..util import cached_classproperty, functemplate
|
||||
from . import types
|
||||
from .query import (
|
||||
FieldQueryType,
|
||||
FieldSort,
|
||||
MatchQuery,
|
||||
NullSort,
|
||||
Query,
|
||||
Sort,
|
||||
TrueQuery,
|
||||
)
|
||||
from .query import MatchQuery, NullSort, TrueQuery
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Generator,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Sequence,
|
||||
)
|
||||
from sqlite3 import Connection
|
||||
from types import TracebackType
|
||||
|
||||
from .query import SQLiteType
|
||||
from .query import FieldQueryType, FieldSort, Query, Sort, SQLiteType
|
||||
|
||||
D = TypeVar("D", bound="Database", default=Any)
|
||||
|
||||
|
|
@ -306,7 +299,7 @@ class Model(ABC, Generic[D]):
|
|||
"""The flex field SQLite table name.
|
||||
"""
|
||||
|
||||
_fields: dict[str, types.Type] = {}
|
||||
_fields: ClassVar[dict[str, types.Type]] = {}
|
||||
"""A mapping indicating available "fixed" fields on this type. The
|
||||
keys are field names and the values are `Type` objects.
|
||||
"""
|
||||
|
|
@ -321,7 +314,7 @@ class Model(ABC, Generic[D]):
|
|||
"""Optional types for non-fixed (flexible and computed) fields."""
|
||||
return {}
|
||||
|
||||
_sorts: dict[str, type[FieldSort]] = {}
|
||||
_sorts: ClassVar[dict[str, type[FieldSort]]] = {}
|
||||
"""Optional named sort criteria. The keys are strings and the values
|
||||
are subclasses of `Sort`.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -20,17 +20,19 @@ import os
|
|||
import re
|
||||
import unicodedata
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Iterator, MutableSequence, Sequence
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime, timedelta
|
||||
from functools import cached_property, reduce
|
||||
from operator import mul, or_
|
||||
from re import Pattern
|
||||
from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar
|
||||
|
||||
from beets import util
|
||||
from beets.util.units import raw_seconds_short
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator, MutableSequence
|
||||
|
||||
from beets.dbcore.db import AnyModel, Model
|
||||
|
||||
P = TypeVar("P", default=Any)
|
||||
|
|
@ -122,7 +124,7 @@ class Query(ABC):
|
|||
return hash(type(self))
|
||||
|
||||
|
||||
SQLiteType = Union[str, bytes, float, int, memoryview, None]
|
||||
SQLiteType = str | bytes | float | int | memoryview | None
|
||||
AnySQLiteType = TypeVar("AnySQLiteType", bound=SQLiteType)
|
||||
FieldQueryType = type["FieldQuery"]
|
||||
|
||||
|
|
@ -689,7 +691,12 @@ class Period:
|
|||
("%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M"), # minute
|
||||
("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second
|
||||
)
|
||||
relative_units = {"y": 365, "m": 30, "w": 7, "d": 1}
|
||||
relative_units: ClassVar[dict[str, int]] = {
|
||||
"y": 365,
|
||||
"m": 30,
|
||||
"w": 7,
|
||||
"d": 1,
|
||||
}
|
||||
relative_re = "(?P<sign>[+|-]?)(?P<quantity>[0-9]+)(?P<timespan>[y|m|w|d])"
|
||||
|
||||
def __init__(self, date: datetime, precision: str):
|
||||
|
|
|
|||
|
|
@ -250,7 +250,7 @@ def parse_sorted_query(
|
|||
# Split up query in to comma-separated subqueries, each representing
|
||||
# an AndQuery, which need to be joined together in one OrQuery
|
||||
subquery_parts = []
|
||||
for part in parts + [","]:
|
||||
for part in [*parts, ","]:
|
||||
if part.endswith(","):
|
||||
# Ensure we can catch "foo, bar" as well as "foo , bar"
|
||||
last_subquery_part = part[:-1]
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import re
|
|||
import time
|
||||
import typing
|
||||
from abc import ABC
|
||||
from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar, cast
|
||||
|
||||
import beets
|
||||
from beets import util
|
||||
|
|
@ -406,7 +406,7 @@ class MusicalKey(String):
|
|||
The standard format is C, Cm, C#, C#m, etc.
|
||||
"""
|
||||
|
||||
ENHARMONIC = {
|
||||
ENHARMONIC: ClassVar[dict[str, str]] = {
|
||||
r"db": "c#",
|
||||
r"eb": "d#",
|
||||
r"gb": "f#",
|
||||
|
|
|
|||
|
|
@ -28,11 +28,11 @@ from .tasks import (
|
|||
# Note: Stages are not exposed to the public API
|
||||
|
||||
__all__ = [
|
||||
"ImportSession",
|
||||
"ImportAbortError",
|
||||
"Action",
|
||||
"ImportTask",
|
||||
"ArchiveImportTask",
|
||||
"ImportAbortError",
|
||||
"ImportSession",
|
||||
"ImportTask",
|
||||
"SentinelImportTask",
|
||||
"SingletonImportTask",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ import os
|
|||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import config, dbcore, library, logging, plugins, util
|
||||
from beets import config, logging, plugins, util
|
||||
from beets.importer.tasks import Action
|
||||
from beets.util import displayable_path, normpath, pipeline, syspath
|
||||
|
||||
|
|
@ -27,6 +27,7 @@ from .state import ImportState
|
|||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
from beets import dbcore, library
|
||||
from beets.util import PathBytes
|
||||
|
||||
from .tasks import ImportTask
|
||||
|
|
|
|||
|
|
@ -388,5 +388,5 @@ def _extend_pipeline(tasks, *stages):
|
|||
else:
|
||||
task_iter = tasks
|
||||
|
||||
ipl = pipeline.Pipeline([task_iter] + list(stages))
|
||||
ipl = pipeline.Pipeline([task_iter, *list(stages)])
|
||||
return pipeline.multiple(ipl.pull())
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import re
|
|||
import shutil
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable, Iterable, Sequence
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from tempfile import mkdtemp
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
|
@ -33,6 +33,8 @@ from beets.dbcore.query import PathQuery
|
|||
from .state import ImportState
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
from beets.autotag.match import Recommendation
|
||||
|
||||
from .session import ImportSession
|
||||
|
|
@ -232,7 +234,7 @@ class ImportTask(BaseImportTask):
|
|||
or APPLY (in which case the data comes from the choice).
|
||||
"""
|
||||
if self.choice_flag in (Action.ASIS, Action.RETAG):
|
||||
likelies, consensus = util.get_most_common_tags(self.items)
|
||||
likelies, _ = util.get_most_common_tags(self.items)
|
||||
return likelies
|
||||
elif self.choice_flag is Action.APPLY and self.match:
|
||||
return self.match.info.copy()
|
||||
|
|
@ -890,7 +892,7 @@ class ArchiveImportTask(SentinelImportTask):
|
|||
# The (0, 0, -1) is added to date_time because the
|
||||
# function time.mktime expects a 9-element tuple.
|
||||
# The -1 indicates that the DST flag is unknown.
|
||||
date_time = time.mktime(f.date_time + (0, 0, -1))
|
||||
date_time = time.mktime((*f.date_time, 0, 0, -1))
|
||||
fullpath = os.path.join(extract_to, f.filename)
|
||||
os.utime(fullpath, (date_time, date_time))
|
||||
|
||||
|
|
|
|||
|
|
@ -17,13 +17,13 @@ def __getattr__(name: str):
|
|||
|
||||
|
||||
__all__ = [
|
||||
"Library",
|
||||
"LibModel",
|
||||
"Album",
|
||||
"Item",
|
||||
"parse_query_parts",
|
||||
"parse_query_string",
|
||||
"FileOperationError",
|
||||
"Item",
|
||||
"LibModel",
|
||||
"Library",
|
||||
"ReadError",
|
||||
"WriteError",
|
||||
"parse_query_parts",
|
||||
"parse_query_string",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import time
|
|||
import unicodedata
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from mediafile import MediaFile, UnreadableFileError
|
||||
|
||||
|
|
@ -229,7 +229,7 @@ class Album(LibModel):
|
|||
_table = "albums"
|
||||
_flex_table = "album_attributes"
|
||||
_always_dirty = True
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, types.Type]] = {
|
||||
"id": types.PRIMARY_ID,
|
||||
"artpath": types.NullPathType(),
|
||||
"added": types.DATE,
|
||||
|
|
@ -281,13 +281,13 @@ class Album(LibModel):
|
|||
def _types(cls) -> dict[str, types.Type]:
|
||||
return {**super()._types, "path": types.PathType()}
|
||||
|
||||
_sorts = {
|
||||
_sorts: ClassVar[dict[str, type[dbcore.query.FieldSort]]] = {
|
||||
"albumartist": dbcore.query.SmartArtistSort,
|
||||
"artist": dbcore.query.SmartArtistSort,
|
||||
}
|
||||
|
||||
# List of keys that are set on an album's items.
|
||||
item_keys = [
|
||||
item_keys: ClassVar[list[str]] = [
|
||||
"added",
|
||||
"albumartist",
|
||||
"albumartists",
|
||||
|
|
@ -624,7 +624,7 @@ class Item(LibModel):
|
|||
|
||||
_table = "items"
|
||||
_flex_table = "item_attributes"
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, types.Type]] = {
|
||||
"id": types.PRIMARY_ID,
|
||||
"path": types.PathType(),
|
||||
"album_id": types.FOREIGN_ID,
|
||||
|
|
@ -744,7 +744,9 @@ class Item(LibModel):
|
|||
|
||||
_formatter = FormattedItemMapping
|
||||
|
||||
_sorts = {"artist": dbcore.query.SmartArtistSort}
|
||||
_sorts: ClassVar[dict[str, type[dbcore.query.FieldSort]]] = {
|
||||
"artist": dbcore.query.SmartArtistSort
|
||||
}
|
||||
|
||||
@cached_classproperty
|
||||
def _queries(cls) -> dict[str, FieldQueryType]:
|
||||
|
|
|
|||
|
|
@ -35,10 +35,25 @@ from logging import (
|
|||
Handler,
|
||||
Logger,
|
||||
NullHandler,
|
||||
RootLogger,
|
||||
StreamHandler,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Any, TypeVar, Union, overload
|
||||
from typing import TYPE_CHECKING, Any, TypeVar, overload
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
from logging import RootLogger
|
||||
from types import TracebackType
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
# see https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi
|
||||
_SysExcInfoType = (
|
||||
tuple[type[BaseException], BaseException, TracebackType | None]
|
||||
| tuple[None, None, None]
|
||||
)
|
||||
_ExcInfoType = _SysExcInfoType | BaseException | bool | None
|
||||
_ArgsType = tuple[object, ...] | Mapping[str, object]
|
||||
|
||||
|
||||
__all__ = [
|
||||
"DEBUG",
|
||||
|
|
@ -54,21 +69,6 @@ __all__ = [
|
|||
"getLogger",
|
||||
]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
|
||||
T = TypeVar("T")
|
||||
from types import TracebackType
|
||||
|
||||
# see https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi
|
||||
_SysExcInfoType = Union[
|
||||
tuple[type[BaseException], BaseException, Union[TracebackType, None]],
|
||||
tuple[None, None, None],
|
||||
]
|
||||
_ExcInfoType = Union[None, bool, _SysExcInfoType, BaseException]
|
||||
_ArgsType = Union[tuple[object, ...], Mapping[str, object]]
|
||||
|
||||
|
||||
# Regular expression to match:
|
||||
# - C0 control characters (0x00-0x1F) except useful whitespace (\t, \n, \r)
|
||||
# - DEL control character (0x7f)
|
||||
|
|
|
|||
|
|
@ -141,7 +141,13 @@ class PluginLogFilter(logging.Filter):
|
|||
# Managing the plugins themselves.
|
||||
|
||||
|
||||
class BeetsPlugin(metaclass=abc.ABCMeta):
|
||||
class BeetsPluginMeta(abc.ABCMeta):
|
||||
template_funcs: ClassVar[TFuncMap[str]] = {}
|
||||
template_fields: ClassVar[TFuncMap[Item]] = {}
|
||||
album_template_fields: ClassVar[TFuncMap[Album]] = {}
|
||||
|
||||
|
||||
class BeetsPlugin(metaclass=BeetsPluginMeta):
|
||||
"""The base class for all beets plugins. Plugins provide
|
||||
functionality by defining a subclass of BeetsPlugin and overriding
|
||||
the abstract methods defined here.
|
||||
|
|
@ -151,9 +157,10 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
list
|
||||
)
|
||||
listeners: ClassVar[dict[EventType, list[Listener]]] = defaultdict(list)
|
||||
template_funcs: ClassVar[TFuncMap[str]] | TFuncMap[str] = {} # type: ignore[valid-type]
|
||||
template_fields: ClassVar[TFuncMap[Item]] | TFuncMap[Item] = {} # type: ignore[valid-type]
|
||||
album_template_fields: ClassVar[TFuncMap[Album]] | TFuncMap[Album] = {} # type: ignore[valid-type]
|
||||
|
||||
template_funcs: TFuncMap[str]
|
||||
template_fields: TFuncMap[Item]
|
||||
album_template_fields: TFuncMap[Album]
|
||||
|
||||
name: str
|
||||
config: ConfigView
|
||||
|
|
@ -161,7 +168,7 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
import_stages: list[ImportStageFunc]
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
"""Enable legacy metadata‐source plugins to work with the new interface.
|
||||
"""Enable legacy metadata source plugins to work with the new interface.
|
||||
|
||||
When a plugin subclass of BeetsPlugin defines a `data_source` attribute
|
||||
but does not inherit from MetadataSourcePlugin, this hook:
|
||||
|
|
@ -220,14 +227,10 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
self.name = name or self.__module__.split(".")[-1]
|
||||
self.config = beets.config[self.name]
|
||||
|
||||
# If the class attributes are not set, initialize as instance attributes.
|
||||
# TODO: Revise with v3.0.0, see also type: ignore[valid-type] above
|
||||
if not self.template_funcs:
|
||||
self.template_funcs = {}
|
||||
if not self.template_fields:
|
||||
self.template_fields = {}
|
||||
if not self.album_template_fields:
|
||||
self.album_template_fields = {}
|
||||
# create per-instance storage for template fields and functions
|
||||
self.template_funcs = {}
|
||||
self.template_fields = {}
|
||||
self.album_template_fields = {}
|
||||
|
||||
self.early_import_stages = []
|
||||
self.import_stages = []
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ def capture_stdout():
|
|||
|
||||
def has_program(cmd, args=["--version"]):
|
||||
"""Returns `True` if `cmd` can be executed."""
|
||||
full_cmd = [cmd] + args
|
||||
full_cmd = [cmd, *args]
|
||||
try:
|
||||
with open(os.devnull, "wb") as devnull:
|
||||
subprocess.check_call(
|
||||
|
|
@ -524,7 +524,7 @@ class ImportHelper(TestHelper):
|
|||
autotagging library and several assertions for the library.
|
||||
"""
|
||||
|
||||
default_import_config = {
|
||||
default_import_config: ClassVar[dict[str, bool]] = {
|
||||
"autotag": True,
|
||||
"copy": True,
|
||||
"hardlink": False,
|
||||
|
|
@ -880,7 +880,7 @@ class FetchImageHelper:
|
|||
def run(self, *args, **kwargs):
|
||||
super().run(*args, **kwargs)
|
||||
|
||||
IMAGEHEADER: dict[str, bytes] = {
|
||||
IMAGEHEADER: ClassVar[dict[str, bytes]] = {
|
||||
"image/jpeg": b"\xff\xd8\xff\x00\x00\x00JFIF",
|
||||
"image/png": b"\211PNG\r\n\032\n",
|
||||
"image/gif": b"GIF89a",
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, TypedDict
|
|||
|
||||
from typing_extensions import NotRequired
|
||||
|
||||
from beets import autotag, config, ui
|
||||
from beets import config, ui
|
||||
from beets.autotag import hooks
|
||||
from beets.util import displayable_path
|
||||
from beets.util.units import human_seconds_short
|
||||
|
|
@ -17,6 +17,7 @@ if TYPE_CHECKING:
|
|||
|
||||
import confuse
|
||||
|
||||
from beets import autotag
|
||||
from beets.autotag.distance import Distance
|
||||
from beets.library.models import Item
|
||||
from beets.ui import ColorName
|
||||
|
|
@ -338,13 +339,9 @@ class ChangeRepresentation:
|
|||
max_width_l = max(get_width(line_tuple[0]) for line_tuple in lines)
|
||||
max_width_r = max(get_width(line_tuple[1]) for line_tuple in lines)
|
||||
|
||||
if (
|
||||
(max_width_l <= col_width)
|
||||
and (max_width_r <= col_width)
|
||||
or (
|
||||
((max_width_l > col_width) or (max_width_r > col_width))
|
||||
and ((max_width_l + max_width_r) <= col_width * 2)
|
||||
)
|
||||
if ((max_width_l <= col_width) and (max_width_r <= col_width)) or (
|
||||
((max_width_l > col_width) or (max_width_r > col_width))
|
||||
and ((max_width_l + max_width_r) <= col_width * 2)
|
||||
):
|
||||
# All content fits. Either both maximum widths are below column
|
||||
# widths, or one of the columns is larger than allowed but the
|
||||
|
|
@ -558,7 +555,7 @@ def penalty_string(distance: Distance, limit: int | None = None) -> str:
|
|||
penalties.append(key)
|
||||
if penalties:
|
||||
if limit and len(penalties) > limit:
|
||||
penalties = penalties[:limit] + ["..."]
|
||||
penalties = [*penalties[:limit], "..."]
|
||||
# Prefix penalty string with U+2260: Not Equal To
|
||||
penalty_string = f"\u2260 {', '.join(penalties)}"
|
||||
return ui.colorize("changed", penalty_string)
|
||||
|
|
|
|||
|
|
@ -256,13 +256,11 @@ class TerminalImportSession(importer.ImportSession):
|
|||
|
||||
# Add a "dummy" choice for the other baked-in option, for
|
||||
# duplicate checking.
|
||||
all_choices = (
|
||||
[
|
||||
PromptChoice("a", "Apply", None),
|
||||
]
|
||||
+ choices
|
||||
+ extra_choices
|
||||
)
|
||||
all_choices = [
|
||||
PromptChoice("a", "Apply", None),
|
||||
*choices,
|
||||
*extra_choices,
|
||||
]
|
||||
|
||||
# Check for conflicts.
|
||||
short_letters = [c.short for c in all_choices]
|
||||
|
|
@ -501,7 +499,7 @@ def choose_candidate(
|
|||
if config["import"]["bell"]:
|
||||
ui.print_("\a", end="")
|
||||
sel = ui.input_options(
|
||||
("Apply", "More candidates") + choice_opts,
|
||||
("Apply", "More candidates", *choice_opts),
|
||||
require=require,
|
||||
default=default,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,18 @@
|
|||
"""The 'move' command: Move/copy files to the library or a new base directory."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import logging, ui
|
||||
from beets.util import (
|
||||
MoveOperation,
|
||||
PathLike,
|
||||
displayable_path,
|
||||
normpath,
|
||||
syspath,
|
||||
)
|
||||
from beets.util import MoveOperation, displayable_path, normpath, syspath
|
||||
|
||||
from .utils import do_query
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.util import PathLike
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ def write_items(lib, query, pretend, force):
|
|||
"""Write tag information from the database to the respective files
|
||||
in the filesystem.
|
||||
"""
|
||||
items, albums = do_query(lib, query, False, False)
|
||||
items, _ = do_query(lib, query, False, False)
|
||||
|
||||
for item in items:
|
||||
# Item deleted?
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ import sys
|
|||
import tempfile
|
||||
import traceback
|
||||
from collections import Counter
|
||||
from collections.abc import Callable, Sequence
|
||||
from collections.abc import Sequence
|
||||
from contextlib import suppress
|
||||
from enum import Enum
|
||||
from functools import cache
|
||||
|
|
@ -44,7 +44,6 @@ from typing import (
|
|||
Generic,
|
||||
NamedTuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
|
|
@ -54,7 +53,7 @@ import beets
|
|||
from beets.util import hidden
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Iterator
|
||||
from collections.abc import Callable, Iterable, Iterator
|
||||
from logging import Logger
|
||||
|
||||
from beets.library import Item
|
||||
|
|
@ -63,8 +62,8 @@ if TYPE_CHECKING:
|
|||
MAX_FILENAME_LENGTH = 200
|
||||
WINDOWS_MAGIC_PREFIX = "\\\\?\\"
|
||||
T = TypeVar("T")
|
||||
PathLike = Union[str, bytes, Path]
|
||||
StrPath = Union[str, Path]
|
||||
StrPath = str | Path
|
||||
PathLike = StrPath | bytes
|
||||
Replacements = Sequence[tuple[Pattern[str], str]]
|
||||
|
||||
# Here for now to allow for a easy replace later on
|
||||
|
|
|
|||
|
|
@ -268,7 +268,8 @@ class IMBackend(LocalBackend):
|
|||
# with regards to the height.
|
||||
# ImageMagick already seems to default to no interlace, but we include
|
||||
# it here for the sake of explicitness.
|
||||
cmd: list[str] = self.convert_cmd + [
|
||||
cmd: list[str] = [
|
||||
*self.convert_cmd,
|
||||
syspath(path_in, prefix=False),
|
||||
"-resize",
|
||||
f"{maxwidth}x>",
|
||||
|
|
@ -298,7 +299,8 @@ class IMBackend(LocalBackend):
|
|||
return path_out
|
||||
|
||||
def get_size(self, path_in: bytes) -> tuple[int, int] | None:
|
||||
cmd: list[str] = self.identify_cmd + [
|
||||
cmd: list[str] = [
|
||||
*self.identify_cmd,
|
||||
"-format",
|
||||
"%w %h",
|
||||
syspath(path_in, prefix=False),
|
||||
|
|
@ -336,7 +338,8 @@ class IMBackend(LocalBackend):
|
|||
if not path_out:
|
||||
path_out = get_temp_filename(__name__, "deinterlace_IM_", path_in)
|
||||
|
||||
cmd = self.convert_cmd + [
|
||||
cmd = [
|
||||
*self.convert_cmd,
|
||||
syspath(path_in, prefix=False),
|
||||
"-interlace",
|
||||
"none",
|
||||
|
|
@ -351,7 +354,7 @@ class IMBackend(LocalBackend):
|
|||
return path_in
|
||||
|
||||
def get_format(self, path_in: bytes) -> str | None:
|
||||
cmd = self.identify_cmd + ["-format", "%[magick]", syspath(path_in)]
|
||||
cmd = [*self.identify_cmd, "-format", "%[magick]", syspath(path_in)]
|
||||
|
||||
try:
|
||||
# Image formats should really only be ASCII strings such as "PNG",
|
||||
|
|
@ -368,7 +371,8 @@ class IMBackend(LocalBackend):
|
|||
target: bytes,
|
||||
deinterlaced: bool,
|
||||
) -> bytes:
|
||||
cmd = self.convert_cmd + [
|
||||
cmd = [
|
||||
*self.convert_cmd,
|
||||
syspath(source),
|
||||
*(["-interlace", "none"] if deinterlaced else []),
|
||||
syspath(target),
|
||||
|
|
@ -400,14 +404,16 @@ class IMBackend(LocalBackend):
|
|||
# to grayscale and then pipe them into the `compare` command.
|
||||
# On Windows, ImageMagick doesn't support the magic \\?\ prefix
|
||||
# on paths, so we pass `prefix=False` to `syspath`.
|
||||
convert_cmd = self.convert_cmd + [
|
||||
convert_cmd = [
|
||||
*self.convert_cmd,
|
||||
syspath(im2, prefix=False),
|
||||
syspath(im1, prefix=False),
|
||||
"-colorspace",
|
||||
"gray",
|
||||
"MIFF:-",
|
||||
]
|
||||
compare_cmd = self.compare_cmd + [
|
||||
compare_cmd = [
|
||||
*self.compare_cmd,
|
||||
"-define",
|
||||
"phash:colorspaces=sRGB,HCLp",
|
||||
"-metric",
|
||||
|
|
@ -487,7 +493,7 @@ class IMBackend(LocalBackend):
|
|||
("-set", k, v) for k, v in metadata.items()
|
||||
)
|
||||
str_file = os.fsdecode(file)
|
||||
command = self.convert_cmd + [str_file, *assignments, str_file]
|
||||
command = [*self.convert_cmd, str_file, *assignments, str_file]
|
||||
|
||||
util.command_output(command)
|
||||
|
||||
|
|
@ -828,7 +834,7 @@ class ArtResizer:
|
|||
"jpeg": "jpg",
|
||||
}.get(new_format, new_format)
|
||||
|
||||
fname, ext = os.path.splitext(path_in)
|
||||
fname, _ = os.path.splitext(path_in)
|
||||
path_new = fname + b"." + new_format.encode("utf8")
|
||||
|
||||
# allows the exception to propagate, while still making sure a changed
|
||||
|
|
|
|||
|
|
@ -192,7 +192,7 @@ def stage(
|
|||
task: R | T | None = None
|
||||
while True:
|
||||
task = yield task
|
||||
task = func(*(args + (task,)))
|
||||
task = func(*args, task)
|
||||
|
||||
return coro
|
||||
|
||||
|
|
@ -216,7 +216,7 @@ def mutator_stage(func: Callable[[Unpack[A], T], R]):
|
|||
task = None
|
||||
while True:
|
||||
task = yield task
|
||||
func(*(args + (task,)))
|
||||
func(*args, task)
|
||||
|
||||
return coro
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
"""Fetch various AcousticBrainz metadata using MBID."""
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import ClassVar
|
||||
|
||||
import requests
|
||||
|
||||
|
|
@ -55,7 +56,7 @@ ABSCHEME = {
|
|||
|
||||
|
||||
class AcousticPlugin(plugins.BeetsPlugin):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"average_loudness": types.Float(6),
|
||||
"chords_changes_rate": types.Float(6),
|
||||
"chords_key": types.STRING,
|
||||
|
|
|
|||
|
|
@ -14,11 +14,17 @@
|
|||
|
||||
"""Adds an album template field for formatted album types."""
|
||||
|
||||
from beets.library import Album
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
from .musicbrainz import VARIOUS_ARTISTS_ID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import Album
|
||||
|
||||
|
||||
class AlbumTypesPlugin(BeetsPlugin):
|
||||
"""Adds an album template field for formatted album types."""
|
||||
|
|
|
|||
|
|
@ -14,12 +14,13 @@
|
|||
|
||||
"""An AURA server using Flask."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
from mimetypes import guess_type
|
||||
from typing import ClassVar
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from flask import (
|
||||
Blueprint,
|
||||
|
|
@ -40,12 +41,17 @@ from beets.dbcore.query import (
|
|||
NotQuery,
|
||||
RegexpQuery,
|
||||
SlowFieldSort,
|
||||
SQLiteType,
|
||||
)
|
||||
from beets.library import Album, Item, LibModel, Library
|
||||
from beets.library import Album, Item
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand, _open_library
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
|
||||
from beets.dbcore.query import SQLiteType
|
||||
from beets.library import LibModel, Library
|
||||
|
||||
# Constants
|
||||
|
||||
# AURA server information
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ import sys
|
|||
import time
|
||||
import traceback
|
||||
from string import Template
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import beets
|
||||
import beets.ui
|
||||
|
|
@ -1037,7 +1037,7 @@ class Command:
|
|||
raise BPDError(ERROR_PERMISSION, "insufficient privileges")
|
||||
|
||||
try:
|
||||
args = [conn] + self.args
|
||||
args = [conn, *self.args]
|
||||
results = func(*args)
|
||||
if results:
|
||||
for data in results:
|
||||
|
|
@ -1344,7 +1344,7 @@ class Server(BaseServer):
|
|||
|
||||
# Searching.
|
||||
|
||||
tagtype_map = {
|
||||
tagtype_map: ClassVar[dict[str, str]] = {
|
||||
"Artist": "artist",
|
||||
"ArtistSort": "artist_sort",
|
||||
"Album": "album",
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ except ValueError as e:
|
|||
# makes it so the test collector functions as inteded.
|
||||
raise ImportError from e
|
||||
|
||||
from gi.repository import GLib, Gst # noqa: E402
|
||||
from gi.repository import GLib, Gst
|
||||
|
||||
Gst.init(None)
|
||||
|
||||
|
|
@ -115,7 +115,7 @@ class GstPlayer:
|
|||
elif message.type == Gst.MessageType.ERROR:
|
||||
# error
|
||||
self.player.set_state(Gst.State.NULL)
|
||||
err, debug = message.parse_error()
|
||||
err, _ = message.parse_error()
|
||||
print(f"Error: {err}")
|
||||
self.playing = False
|
||||
|
||||
|
|
@ -205,7 +205,7 @@ class GstPlayer:
|
|||
|
||||
def seek(self, position):
|
||||
"""Seeks to position (in seconds)."""
|
||||
cur_pos, cur_len = self.time()
|
||||
_, cur_len = self.time()
|
||||
if position > cur_len:
|
||||
self.stop()
|
||||
return
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
"""Retrieve and apply info from the autotagger for items matched by
|
||||
query.
|
||||
"""
|
||||
for item in lib.items(query + ["singleton:true"]):
|
||||
for item in lib.items([*query, "singleton:true"]):
|
||||
if not item.mb_trackid:
|
||||
self._log.info(
|
||||
"Skipping singleton with no mb_trackid: {}", item
|
||||
|
|
|
|||
|
|
@ -16,20 +16,26 @@
|
|||
autotagger. Requires the pyacoustid library.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from collections.abc import Iterable
|
||||
from functools import cached_property, partial
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import acoustid
|
||||
import confuse
|
||||
|
||||
from beets import config, ui, util
|
||||
from beets.autotag.distance import Distance
|
||||
from beets.autotag.hooks import TrackInfo
|
||||
from beets.metadata_plugins import MetadataSourcePlugin
|
||||
from beetsplug.musicbrainz import MusicBrainzPlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from beets.autotag.hooks import TrackInfo
|
||||
|
||||
API_KEY = "1vOwZtEn"
|
||||
SCORE_THRESH = 0.5
|
||||
TRACK_ID_WEIGHT = 10.0
|
||||
|
|
|
|||
|
|
@ -274,7 +274,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
pretend,
|
||||
hardlink,
|
||||
link,
|
||||
playlist,
|
||||
_,
|
||||
force,
|
||||
) = self._get_opts_and_config(empty_opts)
|
||||
|
||||
|
|
|
|||
|
|
@ -18,29 +18,26 @@ from __future__ import annotations
|
|||
|
||||
import collections
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
from typing import TYPE_CHECKING, ClassVar, Literal
|
||||
|
||||
import requests
|
||||
|
||||
from beets import ui
|
||||
from beets.autotag import AlbumInfo, TrackInfo
|
||||
from beets.dbcore import types
|
||||
from beets.metadata_plugins import (
|
||||
IDResponse,
|
||||
SearchApiMetadataSourcePlugin,
|
||||
SearchFilter,
|
||||
)
|
||||
from beets.metadata_plugins import IDResponse, SearchApiMetadataSourcePlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
from beets.library import Item, Library
|
||||
from beets.metadata_plugins import SearchFilter
|
||||
|
||||
from ._typing import JSONDict
|
||||
|
||||
|
||||
class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"deezer_track_rank": types.INTEGER,
|
||||
"deezer_track_id": types.INTEGER,
|
||||
"deezer_updated": types.DATE,
|
||||
|
|
|
|||
|
|
@ -572,7 +572,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
processed = self._process_clean_tracklist(
|
||||
clean_tracklist, album_artist_data
|
||||
)
|
||||
tracks, index_tracks, index, divisions, next_divisions = processed
|
||||
tracks, index_tracks, *_ = processed
|
||||
# Fix up medium and medium_index for each track. Discogs position is
|
||||
# unreliable, but tracks are in order.
|
||||
medium = None
|
||||
|
|
|
|||
|
|
@ -148,7 +148,7 @@ class ExportPlugin(BeetsPlugin):
|
|||
album=opts.album,
|
||||
):
|
||||
try:
|
||||
data, item = data_emitter(included_keys or "*")
|
||||
data, _ = data_emitter(included_keys or "*")
|
||||
except (mediafile.UnreadableFileError, OSError) as ex:
|
||||
self._log.error("cannot read file: {}", ex)
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -355,7 +355,7 @@ class ArtSource(RequestMixin, ABC):
|
|||
# Specify whether this source fetches local or remote images
|
||||
LOC: ClassVar[SourceLocation]
|
||||
# A list of methods to match metadata, sorted by descending accuracy
|
||||
VALID_MATCHING_CRITERIA: list[str] = ["default"]
|
||||
VALID_MATCHING_CRITERIA: ClassVar[list[str]] = ["default"]
|
||||
# A human-readable name for the art source
|
||||
NAME: ClassVar[str]
|
||||
# The key to select the art source in the config. This value will also be
|
||||
|
|
@ -518,8 +518,8 @@ class RemoteArtSource(ArtSource):
|
|||
class CoverArtArchive(RemoteArtSource):
|
||||
NAME = "Cover Art Archive"
|
||||
ID = "coverart"
|
||||
VALID_MATCHING_CRITERIA = ["release", "releasegroup"]
|
||||
VALID_THUMBNAIL_SIZES = [250, 500, 1200]
|
||||
VALID_MATCHING_CRITERIA: ClassVar[list[str]] = ["release", "releasegroup"]
|
||||
VALID_THUMBNAIL_SIZES: ClassVar[list[int]] = [250, 500, 1200]
|
||||
|
||||
URL = "https://coverartarchive.org/release/{mbid}"
|
||||
GROUP_URL = "https://coverartarchive.org/release-group/{mbid}"
|
||||
|
|
@ -867,7 +867,7 @@ class ITunesStore(RemoteArtSource):
|
|||
)
|
||||
except KeyError as e:
|
||||
self._log.debug(
|
||||
"Malformed itunes candidate: {} not found in {}", # NOQA E501
|
||||
"Malformed itunes candidate: {} not found in {}",
|
||||
e,
|
||||
list(c.keys()),
|
||||
)
|
||||
|
|
@ -1128,7 +1128,7 @@ class LastFM(RemoteArtSource):
|
|||
ID = "lastfm"
|
||||
|
||||
# Sizes in priority order.
|
||||
SIZES = OrderedDict(
|
||||
SIZES: ClassVar[dict[str, tuple[int, int]]] = OrderedDict(
|
||||
[
|
||||
("mega", (300, 300)),
|
||||
("extralarge", (300, 300)),
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ def apply_matches(d, log):
|
|||
"""Given a mapping from items to field dicts, apply the fields to
|
||||
the objects.
|
||||
"""
|
||||
some_map = list(d.values())[0]
|
||||
some_map = next(iter(d.values()))
|
||||
keys = some_map.keys()
|
||||
|
||||
# Only proceed if the "tag" field is equal across all filenames.
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
|
||||
try:
|
||||
output = util.command_output(
|
||||
command + [util.syspath(item.path)]
|
||||
[*command, util.syspath(item.path)]
|
||||
).stdout
|
||||
except (subprocess.CalledProcessError, OSError) as exc:
|
||||
self._log.error("execution failed: {}", exc)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ import os
|
|||
import traceback
|
||||
from functools import singledispatchmethod
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import pylast
|
||||
import yaml
|
||||
|
|
@ -39,6 +39,7 @@ from beets.util import plurality, unique_list
|
|||
|
||||
if TYPE_CHECKING:
|
||||
import optparse
|
||||
from collections.abc import Callable
|
||||
|
||||
from beets.library import LibModel
|
||||
|
||||
|
|
@ -67,12 +68,12 @@ def flatten_tree(
|
|||
|
||||
if isinstance(elem, dict):
|
||||
for k, v in elem.items():
|
||||
flatten_tree(v, path + [k], branches)
|
||||
flatten_tree(v, [*path, k], branches)
|
||||
elif isinstance(elem, list):
|
||||
for sub in elem:
|
||||
flatten_tree(sub, path, branches)
|
||||
else:
|
||||
branches.append(path + [str(elem)])
|
||||
branches.append([*path, str(elem)])
|
||||
|
||||
|
||||
def find_parents(candidate: str, branches: list[list[str]]) -> list[str]:
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ from functools import cached_property, partial, total_ordering
|
|||
from html import unescape
|
||||
from itertools import groupby
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, NamedTuple
|
||||
from typing import TYPE_CHECKING, ClassVar, NamedTuple
|
||||
from urllib.parse import quote, quote_plus, urlencode, urlparse
|
||||
|
||||
import langdetect
|
||||
|
|
@ -367,7 +367,7 @@ class LRCLib(Backend):
|
|||
class MusiXmatch(Backend):
|
||||
URL_TEMPLATE = "https://www.musixmatch.com/lyrics/{}/{}"
|
||||
|
||||
REPLACEMENTS = {
|
||||
REPLACEMENTS: ClassVar[dict[str, str]] = {
|
||||
r"\s+": "-",
|
||||
"<": "Less_Than",
|
||||
">": "Greater_Than",
|
||||
|
|
@ -600,7 +600,7 @@ class Google(SearchBackend):
|
|||
SEARCH_URL = "https://www.googleapis.com/customsearch/v1"
|
||||
|
||||
#: Exclude some letras.mus.br pages which do not contain lyrics.
|
||||
EXCLUDE_PAGES = [
|
||||
EXCLUDE_PAGES: ClassVar[list[str]] = [
|
||||
"significado.html",
|
||||
"traduccion.html",
|
||||
"traducao.html",
|
||||
|
|
@ -630,9 +630,12 @@ class Google(SearchBackend):
|
|||
#: Split cleaned up URL title into artist and title parts.
|
||||
URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +|, ")
|
||||
|
||||
SOURCE_DIST_FACTOR = {"www.azlyrics.com": 0.5, "www.songlyrics.com": 0.6}
|
||||
SOURCE_DIST_FACTOR: ClassVar[dict[str, float]] = {
|
||||
"www.azlyrics.com": 0.5,
|
||||
"www.songlyrics.com": 0.6,
|
||||
}
|
||||
|
||||
ignored_domains: set[str] = set()
|
||||
ignored_domains: ClassVar[set[str]] = set()
|
||||
|
||||
@classmethod
|
||||
def pre_process_html(cls, html: str) -> str:
|
||||
|
|
@ -937,7 +940,7 @@ class RestFiles:
|
|||
|
||||
|
||||
class LyricsPlugin(LyricsRequestHandler, plugins.BeetsPlugin):
|
||||
BACKEND_BY_NAME = {
|
||||
BACKEND_BY_NAME: ClassVar[dict[str, type[Backend]]] = {
|
||||
b.name: b for b in [LRCLib, Google, Genius, Tekstowo, MusiXmatch]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ import mediafile
|
|||
from typing_extensions import override
|
||||
|
||||
from beets import config
|
||||
from beets.autotag.distance import Distance, distance
|
||||
from beets.autotag.distance import distance
|
||||
from beets.autotag.hooks import AlbumInfo
|
||||
from beets.autotag.match import assign_items
|
||||
from beets.plugins import find_plugins
|
||||
|
|
@ -39,6 +39,7 @@ if TYPE_CHECKING:
|
|||
from collections.abc import Iterable, Sequence
|
||||
|
||||
from beets.autotag import AlbumMatch
|
||||
from beets.autotag.distance import Distance
|
||||
from beets.library import Item
|
||||
from beetsplug._typing import JSONDict
|
||||
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class MBSubmitPlugin(BeetsPlugin):
|
|||
paths.append(displayable_path(p))
|
||||
try:
|
||||
picard_path = self.config["picard_path"].as_str()
|
||||
subprocess.Popen([picard_path] + paths)
|
||||
subprocess.Popen([picard_path, *paths])
|
||||
self._log.info("launched picard from\n{}", picard_path)
|
||||
except OSError as exc:
|
||||
self._log.error("Could not open picard, got error:\n{}", exc)
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class MBSyncPlugin(BeetsPlugin):
|
|||
"""Retrieve and apply info from the autotagger for items matched by
|
||||
query.
|
||||
"""
|
||||
for item in lib.items(query + ["singleton:true"]):
|
||||
for item in lib.items([*query, "singleton:true"]):
|
||||
if not item.mb_trackid:
|
||||
self._log.info(
|
||||
"Skipping singleton with no mb_trackid: {}", item
|
||||
|
|
|
|||
|
|
@ -14,14 +14,20 @@
|
|||
|
||||
"""Synchronize information from music player libraries"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from importlib import import_module
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from confuse import ConfigValueError
|
||||
|
||||
from beets import ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.dbcore import types
|
||||
|
||||
METASYNC_MODULE = "beetsplug.metasync"
|
||||
|
||||
# Dictionary to map the MODULE and the CLASS NAME of meta sources
|
||||
|
|
@ -32,8 +38,9 @@ SOURCES = {
|
|||
|
||||
|
||||
class MetaSource(metaclass=ABCMeta):
|
||||
item_types: ClassVar[dict[str, types.Type]]
|
||||
|
||||
def __init__(self, config, log):
|
||||
self.item_types = {}
|
||||
self.config = config
|
||||
self._log = log
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@
|
|||
from datetime import datetime
|
||||
from os.path import basename
|
||||
from time import mktime
|
||||
from typing import ClassVar
|
||||
from xml.sax.saxutils import quoteattr
|
||||
|
||||
from beets.dbcore import types
|
||||
|
|
@ -35,7 +36,7 @@ dbus = import_dbus()
|
|||
|
||||
|
||||
class Amarok(MetaSource):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"amarok_rating": types.INTEGER,
|
||||
"amarok_score": types.FLOAT,
|
||||
"amarok_uid": types.STRING,
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import shutil
|
|||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from time import mktime
|
||||
from typing import ClassVar
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
from confuse import ConfigValueError
|
||||
|
|
@ -58,7 +59,7 @@ def _norm_itunes_path(path):
|
|||
|
||||
|
||||
class Itunes(MetaSource):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"itunes_rating": types.INTEGER, # 0..100 scale
|
||||
"itunes_playcount": types.INTEGER,
|
||||
"itunes_skipcount": types.INTEGER,
|
||||
|
|
|
|||
|
|
@ -15,19 +15,26 @@
|
|||
|
||||
"""List missing tracks."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from collections.abc import Iterator
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import requests
|
||||
|
||||
from beets import config, metadata_plugins
|
||||
from beets.dbcore import types
|
||||
from beets.library import Album, Item, Library
|
||||
from beets.library import Item
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand, print_
|
||||
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
|
||||
from beets.library import Album, Library
|
||||
|
||||
MB_ARTIST_QUERY = r"mb_albumartistid::^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$"
|
||||
|
||||
|
||||
|
|
@ -89,7 +96,7 @@ def _item(track_info, album_info, album_id):
|
|||
class MissingPlugin(MusicBrainzAPIMixin, BeetsPlugin):
|
||||
"""List missing tracks"""
|
||||
|
||||
album_types = {
|
||||
album_types: ClassVar[dict[str, types.Type]] = {
|
||||
"missing": types.INTEGER,
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
import os
|
||||
import time
|
||||
from typing import ClassVar
|
||||
|
||||
import mpd
|
||||
|
||||
|
|
@ -318,7 +319,7 @@ class MPDStats:
|
|||
|
||||
|
||||
class MPDStatsPlugin(plugins.BeetsPlugin):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"play_count": types.INTEGER,
|
||||
"skip_count": types.INTEGER,
|
||||
"last_played": types.DATE,
|
||||
|
|
|
|||
|
|
@ -10,17 +10,22 @@
|
|||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from collections.abc import Sequence
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import beets
|
||||
from beets.dbcore.query import BLOB_TYPE, InQuery
|
||||
from beets.util import path_as_posix
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
from beets.dbcore.query import FieldQueryType
|
||||
|
||||
|
||||
def is_m3u_file(path: str) -> bool:
|
||||
return Path(path).suffix.lower() in {".m3u", ".m3u8"}
|
||||
|
|
@ -82,7 +87,9 @@ class PlaylistQuery(InQuery[bytes]):
|
|||
|
||||
|
||||
class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
||||
item_queries = {"playlist": PlaylistQuery}
|
||||
item_queries: ClassVar[dict[str, FieldQueryType]] = {
|
||||
"playlist": PlaylistQuery
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
|
|
|||
|
|
@ -1,12 +1,17 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import mediafile
|
||||
|
||||
from beets import ui, util
|
||||
from beets.library import Item, Library
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import Item, Library
|
||||
|
||||
|
||||
class ReplacePlugin(BeetsPlugin):
|
||||
def commands(self):
|
||||
|
|
|
|||
|
|
@ -642,11 +642,11 @@ class CommandBackend(Backend):
|
|||
cmd: list[str] = [self.command, "-o", "-s", "s"]
|
||||
if self.noclip:
|
||||
# Adjust to avoid clipping.
|
||||
cmd = cmd + ["-k"]
|
||||
cmd = [*cmd, "-k"]
|
||||
else:
|
||||
# Disable clipping warning.
|
||||
cmd = cmd + ["-c"]
|
||||
cmd = cmd + ["-d", str(int(target_level - 89))]
|
||||
cmd = [*cmd, "-c"]
|
||||
cmd = [*cmd, "-d", str(int(target_level - 89))]
|
||||
cmd = cmd + [syspath(i.path) for i in items]
|
||||
|
||||
self._log.debug("analyzing {} files", len(items))
|
||||
|
|
@ -1105,7 +1105,7 @@ class AudioToolsBackend(Backend):
|
|||
# The first item is taken and opened to get the sample rate to
|
||||
# initialize the replaygain object. The object is used for all the
|
||||
# tracks in the album to get the album values.
|
||||
item = list(task.items)[0]
|
||||
item = next(iter(task.items))
|
||||
audiofile = self.open_audio_file(item)
|
||||
rg = self.init_replaygain(audiofile, item)
|
||||
|
||||
|
|
|
|||
|
|
@ -17,13 +17,13 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, TypeAlias
|
||||
from typing import TYPE_CHECKING, Any, TypeAlias
|
||||
from urllib.parse import quote
|
||||
from urllib.request import pathname2url
|
||||
|
||||
from beets import ui
|
||||
from beets.dbcore.query import ParsingError, Query, Sort
|
||||
from beets.library import Album, Item, Library, parse_query_string
|
||||
from beets.library import Album, Item, parse_query_string
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.plugins import send as send_event
|
||||
from beets.util import (
|
||||
|
|
@ -36,6 +36,9 @@ from beets.util import (
|
|||
syspath,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import Library
|
||||
|
||||
QueryAndSort = tuple[Query, Sort]
|
||||
PlaylistQuery = Query | tuple[QueryAndSort, ...] | None
|
||||
PlaylistMatch: TypeAlias = tuple[
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ import re
|
|||
import threading
|
||||
import time
|
||||
import webbrowser
|
||||
from typing import TYPE_CHECKING, Any, Literal, Union
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Literal
|
||||
|
||||
import confuse
|
||||
import requests
|
||||
|
|
@ -36,16 +36,13 @@ from beets import ui
|
|||
from beets.autotag.hooks import AlbumInfo, TrackInfo
|
||||
from beets.dbcore import types
|
||||
from beets.library import Library
|
||||
from beets.metadata_plugins import (
|
||||
IDResponse,
|
||||
SearchApiMetadataSourcePlugin,
|
||||
SearchFilter,
|
||||
)
|
||||
from beets.metadata_plugins import IDResponse, SearchApiMetadataSourcePlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
from beets.library import Library
|
||||
from beets.metadata_plugins import SearchFilter
|
||||
from beetsplug._typing import JSONDict
|
||||
|
||||
DEFAULT_WAITING_TIME = 5
|
||||
|
|
@ -89,11 +86,9 @@ class AudioFeaturesUnavailableError(Exception):
|
|||
|
||||
|
||||
class SpotifyPlugin(
|
||||
SearchApiMetadataSourcePlugin[
|
||||
Union[SearchResponseAlbums, SearchResponseTracks]
|
||||
]
|
||||
SearchApiMetadataSourcePlugin[SearchResponseAlbums | SearchResponseTracks]
|
||||
):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"spotify_track_popularity": types.INTEGER,
|
||||
"spotify_acousticness": types.FLOAT,
|
||||
"spotify_danceability": types.FLOAT,
|
||||
|
|
@ -119,7 +114,7 @@ class SpotifyPlugin(
|
|||
track_url = "https://api.spotify.com/v1/tracks/"
|
||||
audio_features_url = "https://api.spotify.com/v1/audio-features/"
|
||||
|
||||
spotify_audio_features = {
|
||||
spotify_audio_features: ClassVar[dict[str, str]] = {
|
||||
"acousticness": "spotify_acousticness",
|
||||
"danceability": "spotify_danceability",
|
||||
"energy": "spotify_energy",
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
"""Moves patterns in path formats (suitable for moving articles)."""
|
||||
|
||||
import re
|
||||
from typing import ClassVar
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
|
|
@ -27,7 +28,7 @@ FORMAT = "{}, {}"
|
|||
|
||||
|
||||
class ThePlugin(BeetsPlugin):
|
||||
patterns: list[str] = []
|
||||
patterns: ClassVar[list[str]] = []
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
|
@ -58,9 +59,9 @@ class ThePlugin(BeetsPlugin):
|
|||
p,
|
||||
)
|
||||
if self.config["a"]:
|
||||
self.patterns = [PATTERN_A] + self.patterns
|
||||
self.patterns = [PATTERN_A, *self.patterns]
|
||||
if self.config["the"]:
|
||||
self.patterns = [PATTERN_THE] + self.patterns
|
||||
self.patterns = [PATTERN_THE, *self.patterns]
|
||||
if not self.patterns:
|
||||
self._log.warning("no patterns defined!")
|
||||
|
||||
|
|
|
|||
|
|
@ -16,18 +16,23 @@
|
|||
Title case logic is derived from the python-titlecase library.
|
||||
Provides a template function and a tag modification function."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from functools import cached_property
|
||||
from typing import TypedDict
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
|
||||
from titlecase import titlecase
|
||||
|
||||
from beets import ui
|
||||
from beets.autotag.hooks import AlbumInfo, Info
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
from beets.library import Item
|
||||
from beets.autotag.hooks import AlbumInfo
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.autotag.hooks import Info
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
from beets.library import Item
|
||||
|
||||
__author__ = "henryoberholtzer@gmail.com"
|
||||
__version__ = "1.0"
|
||||
|
||||
|
|
|
|||
|
|
@ -72,10 +72,10 @@ class ConfDomain(Domain):
|
|||
|
||||
name = "conf"
|
||||
label = "Simple Configuration"
|
||||
object_types = {"conf": ObjType("conf", "conf")}
|
||||
directives = {"conf": Conf}
|
||||
roles = {"conf": XRefRole()}
|
||||
initial_data: dict[str, Any] = {"objects": {}}
|
||||
object_types = {"conf": ObjType("conf", "conf")} # noqa: RUF012
|
||||
directives = {"conf": Conf} # noqa: RUF012
|
||||
roles = {"conf": XRefRole()} # noqa: RUF012
|
||||
initial_data: dict[str, Any] = {"objects": {}} # noqa: RUF012
|
||||
|
||||
def get_objects(self) -> Iterable[tuple[str, str, str, str, str, int]]:
|
||||
"""Return an iterable of object tuples for the inventory."""
|
||||
|
|
|
|||
2
poetry.lock
generated
2
poetry.lock
generated
|
|
@ -4583,4 +4583,4 @@ web = ["flask", "flask-cors"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4"
|
||||
content-hash = "8a1714daca55eab559558f2d4bd63d4857686eb607bf4b24f1ea6dbd412e6641"
|
||||
content-hash = "f8ce55ae74c5e3c5d1d330582f83dae30ef963a0b8dd8c8b79f16c3bcfdb525a"
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ titlecase = "^2.4.1"
|
|||
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
docstrfmt = ">=1.11.1"
|
||||
ruff = ">=0.6.4"
|
||||
ruff = ">=0.13.0"
|
||||
sphinx-lint = ">=1.0.0"
|
||||
|
||||
[tool.poetry.group.typing.dependencies]
|
||||
|
|
@ -226,7 +226,7 @@ cmd = "make -C docs $COMMANDS"
|
|||
|
||||
[tool.poe.tasks.format]
|
||||
help = "Format the codebase"
|
||||
cmd = "ruff format"
|
||||
cmd = "ruff format --config=pyproject.toml"
|
||||
|
||||
[tool.poe.tasks.format-docs]
|
||||
help = "Format the documentation"
|
||||
|
|
@ -234,7 +234,7 @@ cmd = "docstrfmt docs *.rst"
|
|||
|
||||
[tool.poe.tasks.lint]
|
||||
help = "Check the code for linting issues. Accepts ruff options."
|
||||
cmd = "ruff check"
|
||||
cmd = "ruff check --config=pyproject.toml"
|
||||
|
||||
[tool.poe.tasks.lint-docs]
|
||||
help = "Lint the documentation"
|
||||
|
|
@ -290,10 +290,11 @@ extend-exclude = [
|
|||
]
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py39"
|
||||
target-version = "py310"
|
||||
line-length = 80
|
||||
|
||||
[tool.ruff.lint]
|
||||
future-annotations = true
|
||||
select = [
|
||||
# "ARG", # flake8-unused-arguments
|
||||
# "C4", # flake8-comprehensions
|
||||
|
|
@ -305,9 +306,9 @@ select = [
|
|||
"ISC", # flake8-implicit-str-concat
|
||||
"N", # pep8-naming
|
||||
"PT", # flake8-pytest-style
|
||||
# "RUF", # ruff
|
||||
"RUF", # ruff
|
||||
"UP", # pyupgrade
|
||||
"TCH", # flake8-type-checking
|
||||
"TC", # flake8-type-checking
|
||||
"W", # pycodestyle
|
||||
]
|
||||
ignore = [
|
||||
|
|
@ -319,6 +320,8 @@ ignore = [
|
|||
"test/plugins/test_ftintitle.py" = ["E501"]
|
||||
"test/test_util.py" = ["E501"]
|
||||
"test/ui/test_field_diff.py" = ["E501"]
|
||||
"test/util/test_id_extractors.py" = ["E501"]
|
||||
"test/**" = ["RUF001"] # we use Unicode characters in tests
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
split-on-trailing-comma = false
|
||||
|
|
|
|||
|
|
@ -337,15 +337,15 @@ class TestDataSourceDistance:
|
|||
_p("Original", "Original", 0.5, 1.0, True, MATCH, id="match"),
|
||||
_p("Original", "Other", 0.5, 1.0, True, MISMATCH, id="mismatch"),
|
||||
_p("Other", "Original", 0.5, 1.0, True, MISMATCH, id="mismatch"),
|
||||
_p("Original", "unknown", 0.5, 1.0, True, MISMATCH, id="mismatch-unknown"), # noqa: E501
|
||||
_p("Original", None, 0.5, 1.0, True, MISMATCH, id="mismatch-no-info"), # noqa: E501
|
||||
_p("Original", "unknown", 0.5, 1.0, True, MISMATCH, id="mismatch-unknown"),
|
||||
_p("Original", None, 0.5, 1.0, True, MISMATCH, id="mismatch-no-info"),
|
||||
_p(None, "Other", 0.5, 1.0, True, MISMATCH, id="mismatch-no-original-multiple-sources"), # noqa: E501
|
||||
_p(None, "Other", 0.5, 1.0, False, MATCH, id="match-no-original-but-single-source"), # noqa: E501
|
||||
_p("unknown", "unknown", 0.5, 1.0, True, MATCH, id="match-unknown"),
|
||||
_p("Original", "Other", 1.0, 1.0, True, 0.25, id="mismatch-max-penalty"), # noqa: E501
|
||||
_p("Original", "Other", 0.5, 5.0, True, 0.3125, id="mismatch-high-weight"), # noqa: E501
|
||||
_p("Original", "Other", 0.0, 1.0, True, MATCH, id="match-no-penalty"), # noqa: E501
|
||||
_p("Original", "Other", 0.5, 0.0, True, MATCH, id="match-no-weight"), # noqa: E501
|
||||
_p("Original", "Other", 1.0, 1.0, True, 0.25, id="mismatch-max-penalty"),
|
||||
_p("Original", "Other", 0.5, 5.0, True, 0.3125, id="mismatch-high-weight"),
|
||||
_p("Original", "Other", 0.0, 1.0, True, MATCH, id="match-no-penalty"),
|
||||
_p("Original", "Other", 0.5, 0.0, True, MATCH, id="match-no-weight"),
|
||||
],
|
||||
) # fmt: skip
|
||||
def test_distance(self, item, info, expected_distance):
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class LyricsPage(NamedTuple):
|
|||
artist: str = "The Beatles"
|
||||
track_title: str = "Lady Madonna"
|
||||
url_title: str | None = None # only relevant to the Google backend
|
||||
marks: list[str] = [] # markers for pytest.param
|
||||
marks: list[str] = [] # markers for pytest.param # noqa: RUF012
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return name of this test case."""
|
||||
|
|
|
|||
|
|
@ -14,12 +14,17 @@
|
|||
|
||||
"""Tests for the 'albumtypes' plugin."""
|
||||
|
||||
from collections.abc import Sequence
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets.test.helper import PluginTestCase
|
||||
from beetsplug.albumtypes import AlbumTypesPlugin
|
||||
from beetsplug.musicbrainz import VARIOUS_ARTISTS_ID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
|
||||
class AlbumTypesPluginTest(PluginTestCase):
|
||||
"""Tests for albumtypes plugin."""
|
||||
|
|
|
|||
|
|
@ -1,13 +1,17 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import pytest
|
||||
from flask.testing import Client
|
||||
|
||||
from beets.test.helper import TestHelper
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from flask.testing import Client
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def helper():
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ import threading
|
|||
import time
|
||||
import unittest
|
||||
from contextlib import contextmanager
|
||||
from typing import ClassVar
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import confuse
|
||||
|
|
@ -837,7 +838,7 @@ class BPDQueueTest(BPDTestHelper):
|
|||
fail=True,
|
||||
)
|
||||
|
||||
METADATA = {"Pos", "Time", "Id", "file", "duration"}
|
||||
METADATA: ClassVar[set[str]] = {"Pos", "Time", "Id", "file", "duration"}
|
||||
|
||||
def test_cmd_add(self):
|
||||
with self.run_bpd() as client:
|
||||
|
|
@ -1032,7 +1033,7 @@ class BPDConnectionTest(BPDTestHelper):
|
|||
}
|
||||
)
|
||||
|
||||
ALL_MPD_TAGTYPES = {
|
||||
ALL_MPD_TAGTYPES: ClassVar[set[str]] = {
|
||||
"Artist",
|
||||
"ArtistSort",
|
||||
"Album",
|
||||
|
|
@ -1057,7 +1058,7 @@ class BPDConnectionTest(BPDTestHelper):
|
|||
"MUSICBRAINZ_RELEASETRACKID",
|
||||
"MUSICBRAINZ_WORKID",
|
||||
}
|
||||
UNSUPPORTED_TAGTYPES = {
|
||||
UNSUPPORTED_TAGTYPES: ClassVar[set[str]] = {
|
||||
"MUSICBRAINZ_WORKID", # not tracked by beets
|
||||
"Performer", # not tracked by beets
|
||||
"AlbumSort", # not tracked by beets
|
||||
|
|
|
|||
|
|
@ -11,14 +11,14 @@
|
|||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from mediafile import MediaFile
|
||||
|
|
@ -35,6 +35,9 @@ from beets.test.helper import (
|
|||
)
|
||||
from beetsplug import convert
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def shell_quote(text):
|
||||
import shlex
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@
|
|||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
import codecs
|
||||
from typing import ClassVar
|
||||
from unittest.mock import patch
|
||||
|
||||
from beets.dbcore.query import TrueQuery
|
||||
|
|
@ -319,7 +320,7 @@ class EditDuringImporterTestCase(
|
|||
|
||||
matching = AutotagStub.GOOD
|
||||
|
||||
IGNORED = ["added", "album_id", "id", "mtime", "path"]
|
||||
IGNORED: ClassVar[list[str]] = ["added", "album_id", "id", "mtime", "path"]
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
|
@ -350,8 +351,8 @@ class EditDuringImporterNonSingletonTest(EditDuringImporterTestCase):
|
|||
self.lib.items(),
|
||||
self.items_orig,
|
||||
["title"],
|
||||
self.IGNORED
|
||||
+ [
|
||||
[
|
||||
*self.IGNORED,
|
||||
"albumartist",
|
||||
"mb_albumartistid",
|
||||
"mb_albumartistids",
|
||||
|
|
@ -378,7 +379,7 @@ class EditDuringImporterNonSingletonTest(EditDuringImporterTestCase):
|
|||
self.lib.items(),
|
||||
self.items_orig,
|
||||
[],
|
||||
self.IGNORED + ["albumartist", "mb_albumartistid"],
|
||||
[*self.IGNORED, "albumartist", "mb_albumartistid"],
|
||||
)
|
||||
assert all("Tag Track" in i.title for i in self.lib.items())
|
||||
|
||||
|
|
@ -490,6 +491,6 @@ class EditDuringImporterSingletonTest(EditDuringImporterTestCase):
|
|||
self.lib.items(),
|
||||
self.items_orig,
|
||||
["title"],
|
||||
self.IGNORED + ["albumartist", "mb_albumartistid"],
|
||||
[*self.IGNORED, "albumartist", "mb_albumartistid"],
|
||||
)
|
||||
assert all("Edited Track" in i.title for i in self.lib.items())
|
||||
|
|
|
|||
|
|
@ -14,15 +14,21 @@
|
|||
|
||||
"""Tests for the 'ftintitle' plugin."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import TypeAlias
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypeAlias
|
||||
|
||||
import pytest
|
||||
|
||||
from beets.library.models import Album, Item
|
||||
from beets.library.models import Album
|
||||
from beets.test.helper import PluginTestCase
|
||||
from beetsplug import ftintitle
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
|
||||
from beets.library.models import Item
|
||||
|
||||
ConfigValue: TypeAlias = str | bool | list[str]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import os
|
|||
import sys
|
||||
import unittest
|
||||
from contextlib import contextmanager
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from beets import plugins
|
||||
from beets.test.helper import PluginTestCase, capture_log
|
||||
|
|
@ -70,7 +70,7 @@ class HookLogsTest(HookTestCase):
|
|||
|
||||
|
||||
class HookCommandTest(HookTestCase):
|
||||
EVENTS: list[plugins.EventType] = ["write", "after_write"]
|
||||
EVENTS: ClassVar[list[plugins.EventType]] = ["write", "after_write"]
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
|
|
|||
|
|
@ -14,11 +14,13 @@
|
|||
|
||||
"""Tests for the 'lyrics' plugin."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
from functools import partial
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -26,7 +28,12 @@ from beets.library import Item
|
|||
from beets.test.helper import PluginMixin, TestHelper
|
||||
from beetsplug import lyrics
|
||||
|
||||
from .lyrics_pages import LyricsPage, lyrics_pages
|
||||
from .lyrics_pages import lyrics_pages
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from .lyrics_pages import LyricsPage
|
||||
|
||||
PHRASE_BY_TITLE = {
|
||||
"Lady Madonna": "friday night arrives without a suitcase",
|
||||
|
|
@ -424,7 +431,7 @@ class TestTekstowoLyrics(LyricsBackendTest):
|
|||
[
|
||||
("tekstowopl/piosenka24kgoldncityofangels1", True),
|
||||
(
|
||||
"tekstowopl/piosenkabeethovenbeethovenpianosonata17tempestthe3rdmovement", # noqa: E501
|
||||
"tekstowopl/piosenkabeethovenbeethovenpianosonata17tempestthe3rdmovement",
|
||||
False,
|
||||
),
|
||||
],
|
||||
|
|
@ -607,7 +614,7 @@ class TestTranslation:
|
|||
[00:00:50]
|
||||
[00:01.00] Some more synced lyrics / Quelques paroles plus synchronisées
|
||||
|
||||
Source: https://lrclib.net/api/123""", # noqa: E501
|
||||
Source: https://lrclib.net/api/123""",
|
||||
id="synced",
|
||||
),
|
||||
pytest.param(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import pathlib
|
||||
from copy import deepcopy
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -9,13 +11,17 @@ from beets.autotag.distance import Distance
|
|||
from beets.autotag.hooks import AlbumInfo, TrackInfo
|
||||
from beets.library import Item
|
||||
from beets.test.helper import PluginMixin
|
||||
from beetsplug._typing import JSONDict
|
||||
from beetsplug.mbpseudo import (
|
||||
_STATUS_PSEUDO,
|
||||
MusicBrainzPseudoReleasePlugin,
|
||||
PseudoAlbumInfo,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import pathlib
|
||||
|
||||
from beetsplug._typing import JSONDict
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def rsrc_dir(pytestconfig: pytest.Config):
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@
|
|||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
from typing import Any, ClassVar
|
||||
from unittest.mock import ANY, Mock, call, patch
|
||||
|
||||
from beets import util
|
||||
|
|
@ -46,9 +47,8 @@ class MPDStatsTest(PluginTestCase):
|
|||
assert mpdstats.get_item("/some/non-existing/path") is None
|
||||
assert "item not found:" in log.info.call_args[0][0]
|
||||
|
||||
FAKE_UNKNOWN_STATE = "some-unknown-one"
|
||||
STATUSES = [
|
||||
{"state": FAKE_UNKNOWN_STATE},
|
||||
STATUSES: ClassVar[list[dict[str, Any]]] = [
|
||||
{"state": "some-unknown-one"},
|
||||
{"state": "pause"},
|
||||
{"state": "play", "songid": 1, "time": "0:1"},
|
||||
{"state": "stop"},
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
"""Tests for MusicBrainz API wrapper."""
|
||||
|
||||
import unittest
|
||||
from typing import ClassVar
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
|
@ -1017,7 +1018,11 @@ class TestMusicBrainzPlugin(PluginMixin):
|
|||
plugin = "musicbrainz"
|
||||
|
||||
mbid = "d2a6f856-b553-40a0-ac54-a321e8e2da99"
|
||||
RECORDING = {"title": "foo", "id": "bar", "length": 42}
|
||||
RECORDING: ClassVar[dict[str, int | str]] = {
|
||||
"title": "foo",
|
||||
"id": "bar",
|
||||
"length": 42,
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def plugin_config(self):
|
||||
|
|
|
|||
|
|
@ -72,8 +72,8 @@ class RandomTest(TestHelper, unittest.TestCase):
|
|||
print(f"{i:2d} {'*' * positions.count(i)}")
|
||||
return self._stats(positions)
|
||||
|
||||
mean1, stdev1, median1 = experiment("artist")
|
||||
mean2, stdev2, median2 = experiment("track")
|
||||
_, stdev1, median1 = experiment("artist")
|
||||
_, stdev2, median2 = experiment("track")
|
||||
assert 0 == pytest.approx(median1, abs=1)
|
||||
assert len(self.items) // 2 == pytest.approx(median2, abs=1)
|
||||
assert stdev2 > stdev1
|
||||
|
|
|
|||
|
|
@ -136,7 +136,8 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase):
|
|||
"""
|
||||
im = IMBackend()
|
||||
path = im.deinterlace(self.IMG_225x225)
|
||||
cmd = im.identify_cmd + [
|
||||
cmd = [
|
||||
*im.identify_cmd,
|
||||
"-format",
|
||||
"%[interlace]",
|
||||
syspath(path, prefix=False),
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import shutil
|
|||
import sqlite3
|
||||
import unittest
|
||||
from tempfile import mkstemp
|
||||
from typing import ClassVar
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -57,13 +58,13 @@ class QueryFixture(dbcore.query.FieldQuery):
|
|||
class ModelFixture1(LibModel):
|
||||
_table = "test"
|
||||
_flex_table = "testflex"
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, dbcore.types.Type]] = {
|
||||
"id": dbcore.types.PRIMARY_ID,
|
||||
"field_one": dbcore.types.INTEGER,
|
||||
"field_two": dbcore.types.STRING,
|
||||
}
|
||||
|
||||
_sorts = {
|
||||
_sorts: ClassVar[dict[str, type[dbcore.query.FieldSort]]] = {
|
||||
"some_sort": SortFixture,
|
||||
}
|
||||
|
||||
|
|
@ -92,7 +93,7 @@ class DatabaseFixture1(dbcore.Database):
|
|||
|
||||
|
||||
class ModelFixture2(ModelFixture1):
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, dbcore.types.Type]] = {
|
||||
"id": dbcore.types.PRIMARY_ID,
|
||||
"field_one": dbcore.types.INTEGER,
|
||||
"field_two": dbcore.types.INTEGER,
|
||||
|
|
@ -104,7 +105,7 @@ class DatabaseFixture2(dbcore.Database):
|
|||
|
||||
|
||||
class ModelFixture3(ModelFixture1):
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, dbcore.types.Type]] = {
|
||||
"id": dbcore.types.PRIMARY_ID,
|
||||
"field_one": dbcore.types.INTEGER,
|
||||
"field_two": dbcore.types.INTEGER,
|
||||
|
|
@ -117,7 +118,7 @@ class DatabaseFixture3(dbcore.Database):
|
|||
|
||||
|
||||
class ModelFixture4(ModelFixture1):
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, dbcore.types.Type]] = {
|
||||
"id": dbcore.types.PRIMARY_ID,
|
||||
"field_one": dbcore.types.INTEGER,
|
||||
"field_two": dbcore.types.INTEGER,
|
||||
|
|
@ -133,14 +134,14 @@ class DatabaseFixture4(dbcore.Database):
|
|||
class AnotherModelFixture(ModelFixture1):
|
||||
_table = "another"
|
||||
_flex_table = "anotherflex"
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, dbcore.types.Type]] = {
|
||||
"id": dbcore.types.PRIMARY_ID,
|
||||
"foo": dbcore.types.INTEGER,
|
||||
}
|
||||
|
||||
|
||||
class ModelFixture5(ModelFixture1):
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, dbcore.types.Type]] = {
|
||||
"some_string_field": dbcore.types.STRING,
|
||||
"some_float_field": dbcore.types.FLOAT,
|
||||
"some_boolean_field": dbcore.types.BOOLEAN,
|
||||
|
|
@ -411,7 +412,7 @@ class ModelTest(unittest.TestCase):
|
|||
def test_computed_field(self):
|
||||
model = ModelFixtureWithGetters()
|
||||
assert model.aComputedField == "thing"
|
||||
with pytest.raises(KeyError, match="computed field .+ deleted"):
|
||||
with pytest.raises(KeyError, match=r"computed field .+ deleted"):
|
||||
del model.aComputedField
|
||||
|
||||
def test_items(self):
|
||||
|
|
|
|||
|
|
@ -1056,7 +1056,7 @@ class PathStringTest(BeetsTestCase):
|
|||
assert isinstance(self.i.path, bytes)
|
||||
|
||||
def test_fetched_item_path_is_bytestring(self):
|
||||
i = list(self.lib.items())[0]
|
||||
i = next(iter(self.lib.items()))
|
||||
assert isinstance(i.path, bytes)
|
||||
|
||||
def test_unicode_path_becomes_bytestring(self):
|
||||
|
|
@ -1070,14 +1070,14 @@ class PathStringTest(BeetsTestCase):
|
|||
""",
|
||||
(self.i.id, "somepath"),
|
||||
)
|
||||
i = list(self.lib.items())[0]
|
||||
i = next(iter(self.lib.items()))
|
||||
assert isinstance(i.path, bytes)
|
||||
|
||||
def test_special_chars_preserved_in_database(self):
|
||||
path = "b\xe1r".encode()
|
||||
self.i.path = path
|
||||
self.i.store()
|
||||
i = list(self.lib.items())[0]
|
||||
i = next(iter(self.lib.items()))
|
||||
assert i.path == path
|
||||
|
||||
def test_special_char_path_added_to_database(self):
|
||||
|
|
@ -1086,7 +1086,7 @@ class PathStringTest(BeetsTestCase):
|
|||
i = item()
|
||||
i.path = path
|
||||
self.lib.add(i)
|
||||
i = list(self.lib.items())[0]
|
||||
i = next(iter(self.lib.items()))
|
||||
assert i.path == path
|
||||
|
||||
def test_destination_returns_bytestring(self):
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import logging
|
|||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
from typing import ClassVar
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
|
@ -46,7 +47,7 @@ from beets.util import PromptChoice, displayable_path, syspath
|
|||
|
||||
class TestPluginRegistration(PluginTestCase):
|
||||
class RatingPlugin(plugins.BeetsPlugin):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"rating": types.Float(),
|
||||
"multi_value": types.MULTI_VALUE_DSV,
|
||||
}
|
||||
|
|
@ -70,7 +71,9 @@ class TestPluginRegistration(PluginTestCase):
|
|||
|
||||
def test_duplicate_type(self):
|
||||
class DuplicateTypePlugin(plugins.BeetsPlugin):
|
||||
item_types = {"rating": types.INTEGER}
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"rating": types.INTEGER
|
||||
}
|
||||
|
||||
self.register_plugin(DuplicateTypePlugin)
|
||||
with pytest.raises(
|
||||
|
|
@ -308,7 +311,9 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
|
|||
"Enter search",
|
||||
"enter Id",
|
||||
"aBort",
|
||||
) + ("Foo", "baR")
|
||||
"Foo",
|
||||
"baR",
|
||||
)
|
||||
|
||||
self.importer.add_choice(Action.SKIP)
|
||||
self.importer.run()
|
||||
|
|
@ -342,7 +347,9 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
|
|||
"Enter search",
|
||||
"enter Id",
|
||||
"aBort",
|
||||
) + ("Foo", "baR")
|
||||
"Foo",
|
||||
"baR",
|
||||
)
|
||||
|
||||
config["import"]["singletons"] = True
|
||||
self.importer.add_choice(Action.SKIP)
|
||||
|
|
@ -381,7 +388,8 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
|
|||
"Enter search",
|
||||
"enter Id",
|
||||
"aBort",
|
||||
) + ("baZ",)
|
||||
"baZ",
|
||||
)
|
||||
self.importer.add_choice(Action.SKIP)
|
||||
self.importer.run()
|
||||
self.mock_input_options.assert_called_once_with(
|
||||
|
|
@ -416,7 +424,8 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
|
|||
"Enter search",
|
||||
"enter Id",
|
||||
"aBort",
|
||||
) + ("Foo",)
|
||||
"Foo",
|
||||
)
|
||||
|
||||
# DummyPlugin.foo() should be called once
|
||||
with patch.object(DummyPlugin, "foo", autospec=True) as mock_foo:
|
||||
|
|
@ -458,7 +467,8 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase):
|
|||
"Enter search",
|
||||
"enter Id",
|
||||
"aBort",
|
||||
) + ("Foo",)
|
||||
"Foo",
|
||||
)
|
||||
|
||||
# DummyPlugin.foo() should be called once
|
||||
with helper.control_stdin("f\n"):
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ class CompletionTest(IOMixin, TestPluginTestCase):
|
|||
test_script_name = os.path.join(_common.RSRC, b"test_completion.sh")
|
||||
with open(test_script_name, "rb") as test_script_file:
|
||||
tester.stdin.writelines(test_script_file)
|
||||
out, err = tester.communicate()
|
||||
out, _ = tester.communicate()
|
||||
assert tester.returncode == 0
|
||||
assert out == b"completion tests passed\n", (
|
||||
"test/test_completion.sh did not execute properly. "
|
||||
|
|
|
|||
|
|
@ -190,27 +190,23 @@ class ModifyTest(BeetsTestCase):
|
|||
assert mediafile.initial_key is None
|
||||
|
||||
def test_arg_parsing_colon_query(self):
|
||||
(query, mods, dels) = modify_parse_args(
|
||||
["title:oldTitle", "title=newTitle"]
|
||||
)
|
||||
query, mods, _ = modify_parse_args(["title:oldTitle", "title=newTitle"])
|
||||
assert query == ["title:oldTitle"]
|
||||
assert mods == {"title": "newTitle"}
|
||||
|
||||
def test_arg_parsing_delete(self):
|
||||
(query, mods, dels) = modify_parse_args(["title:oldTitle", "title!"])
|
||||
query, _, dels = modify_parse_args(["title:oldTitle", "title!"])
|
||||
assert query == ["title:oldTitle"]
|
||||
assert dels == ["title"]
|
||||
|
||||
def test_arg_parsing_query_with_exclaimation(self):
|
||||
(query, mods, dels) = modify_parse_args(
|
||||
query, mods, _ = modify_parse_args(
|
||||
["title:oldTitle!", "title=newTitle!"]
|
||||
)
|
||||
assert query == ["title:oldTitle!"]
|
||||
assert mods == {"title": "newTitle!"}
|
||||
|
||||
def test_arg_parsing_equals_in_value(self):
|
||||
(query, mods, dels) = modify_parse_args(
|
||||
["title:foo=bar", "title=newTitle"]
|
||||
)
|
||||
query, mods, _ = modify_parse_args(["title:foo=bar", "title=newTitle"])
|
||||
assert query == ["title:foo=bar"]
|
||||
assert mods == {"title": "newTitle"}
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class QueryTest(BeetsTestCase):
|
|||
)
|
||||
item = library.Item.from_path(itempath)
|
||||
self.lib.add(item)
|
||||
return item, itempath
|
||||
return item
|
||||
|
||||
def add_album(self, items):
|
||||
album = self.lib.add_album(items)
|
||||
|
|
@ -47,13 +47,13 @@ class QueryTest(BeetsTestCase):
|
|||
self.check_do_query(2, 0, album=False)
|
||||
|
||||
def test_query_album(self):
|
||||
item, itempath = self.add_item()
|
||||
item = self.add_item()
|
||||
self.add_album([item])
|
||||
self.check_do_query(1, 1, album=True)
|
||||
self.check_do_query(0, 1, album=True, also_items=False)
|
||||
|
||||
item, itempath = self.add_item()
|
||||
item2, itempath = self.add_item()
|
||||
item = self.add_item()
|
||||
item2 = self.add_item()
|
||||
self.add_album([item, item2])
|
||||
self.check_do_query(3, 2, album=True)
|
||||
self.check_do_query(0, 2, album=True, also_items=False)
|
||||
|
|
|
|||
|
|
@ -374,7 +374,7 @@ class ShowModelChangeTest(IOMixin, unittest.TestCase):
|
|||
def test_both_values_shown(self):
|
||||
self.a.title = "foo"
|
||||
self.b.title = "bar"
|
||||
change, out = self._show()
|
||||
_, out = self._show()
|
||||
assert "foo" in out
|
||||
assert "bar" in out
|
||||
|
||||
|
|
|
|||
|
|
@ -10,26 +10,26 @@ from beets.util.id_extractors import extract_release_id
|
|||
[
|
||||
("spotify", "39WqpoPgZxygo6YQjehLJJ", "39WqpoPgZxygo6YQjehLJJ"),
|
||||
("spotify", "blah blah", None),
|
||||
("spotify", "https://open.spotify.com/album/39WqpoPgZxygo6YQjehLJJ", "39WqpoPgZxygo6YQjehLJJ"), # noqa: E501
|
||||
("spotify", "https://open.spotify.com/album/39WqpoPgZxygo6YQjehLJJ", "39WqpoPgZxygo6YQjehLJJ"),
|
||||
("deezer", "176356382", "176356382"),
|
||||
("deezer", "blah blah", None),
|
||||
("deezer", "https://www.deezer.com/album/176356382", "176356382"),
|
||||
("beatport", "3089651", "3089651"),
|
||||
("beatport", "blah blah", None),
|
||||
("beatport", "https://www.beatport.com/release/album-name/3089651", "3089651"), # noqa: E501
|
||||
("discogs", "http://www.discogs.com/G%C3%BCnther-Lause-Meru-Ep/release/4354798", "4354798"), # noqa: E501
|
||||
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-Lause-Meru-Ep", "4354798"), # noqa: E501
|
||||
("discogs", "http://www.discogs.com/G%C3%BCnther-4354798Lause-Meru-Ep/release/4354798", "4354798"), # noqa: E501
|
||||
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-4354798Lause-Meru-Ep/", "4354798"), # noqa: E501
|
||||
("beatport", "https://www.beatport.com/release/album-name/3089651", "3089651"),
|
||||
("discogs", "http://www.discogs.com/G%C3%BCnther-Lause-Meru-Ep/release/4354798", "4354798"),
|
||||
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-Lause-Meru-Ep", "4354798"),
|
||||
("discogs", "http://www.discogs.com/G%C3%BCnther-4354798Lause-Meru-Ep/release/4354798", "4354798"),
|
||||
("discogs", "http://www.discogs.com/release/4354798-G%C3%BCnther-4354798Lause-Meru-Ep/", "4354798"),
|
||||
("discogs", "[r4354798]", "4354798"),
|
||||
("discogs", "r4354798", "4354798"),
|
||||
("discogs", "4354798", "4354798"),
|
||||
("discogs", "yet-another-metadata-provider.org/foo/12345", None),
|
||||
("discogs", "005b84a0-ecd6-39f1-b2f6-6eb48756b268", None),
|
||||
("musicbrainz", "28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"), # noqa: E501
|
||||
("musicbrainz", "28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"),
|
||||
("musicbrainz", "blah blah", None),
|
||||
("musicbrainz", "https://musicbrainz.org/entity/28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"), # noqa: E501
|
||||
("bandcamp", "https://nameofartist.bandcamp.com/album/nameofalbum", "https://nameofartist.bandcamp.com/album/nameofalbum"), # noqa: E501
|
||||
("musicbrainz", "https://musicbrainz.org/entity/28e32c71-1450-463e-92bf-e0a46446fc11", "28e32c71-1450-463e-92bf-e0a46446fc11"),
|
||||
("bandcamp", "https://nameofartist.bandcamp.com/album/nameofalbum", "https://nameofartist.bandcamp.com/album/nameofalbum"),
|
||||
],
|
||||
) # fmt: skip
|
||||
def test_extract_release_id(source, id_string, expected):
|
||||
|
|
|
|||
Loading…
Reference in a new issue