mirror of
https://github.com/beetbox/beets.git
synced 2026-01-12 11:14:42 +01:00
Merge Discogs Fixes with master
This commit is contained in:
commit
4ac4e19f5d
34 changed files with 1664 additions and 589 deletions
|
|
@ -85,3 +85,5 @@ d93ddf8dd43e4f9ed072a03829e287c78d2570a2
|
|||
a59e41a88365e414db3282658d2aa456e0b3468a
|
||||
# pyupgrade Python 3.10
|
||||
301637a1609831947cb5dd90270ed46c24b1ab1b
|
||||
# Fix changelog formatting
|
||||
658b184c59388635787b447983ecd3a575f4fe56
|
||||
|
|
|
|||
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
|
|
@ -66,7 +66,7 @@ jobs:
|
|||
- if: ${{ env.IS_MAIN_PYTHON != 'true' }}
|
||||
name: Test without coverage
|
||||
run: |
|
||||
poetry install --without=lint --extras=autobpm --extras=lyrics --extras=replaygain --extras=reflink --extras=fetchart --extras=chroma --extras=sonosupdate --extras=parentwork
|
||||
poetry install --without=lint --extras=autobpm --extras=lyrics --extras=replaygain --extras=reflink --extras=fetchart --extras=chroma --extras=sonosupdate
|
||||
poe test
|
||||
|
||||
- if: ${{ env.IS_MAIN_PYTHON == 'true' }}
|
||||
|
|
@ -74,7 +74,7 @@ jobs:
|
|||
env:
|
||||
LYRICS_UPDATED: ${{ steps.lyrics-update.outputs.any_changed }}
|
||||
run: |
|
||||
poetry install --extras=autobpm --extras=lyrics --extras=docs --extras=replaygain --extras=reflink --extras=fetchart --extras=chroma --extras=sonosupdate --extras=parentwork
|
||||
poetry install --extras=autobpm --extras=lyrics --extras=docs --extras=replaygain --extras=reflink --extras=fetchart --extras=chroma --extras=sonosupdate
|
||||
poe docs
|
||||
poe test-with-coverage
|
||||
|
||||
|
|
|
|||
290
beetsplug/_utils/musicbrainz.py
Normal file
290
beetsplug/_utils/musicbrainz.py
Normal file
|
|
@ -0,0 +1,290 @@
|
|||
"""Helpers for communicating with the MusicBrainz webservice.
|
||||
|
||||
Provides rate-limited HTTP session and convenience methods to fetch and
|
||||
normalize API responses.
|
||||
|
||||
This module centralizes request handling and response shaping so callers can
|
||||
work with consistently structured data without embedding HTTP or rate-limit
|
||||
logic throughout the codebase.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import operator
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property, singledispatchmethod, wraps
|
||||
from itertools import groupby
|
||||
from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypedDict, TypeVar
|
||||
|
||||
from requests_ratelimiter import LimiterMixin
|
||||
from typing_extensions import NotRequired, Unpack
|
||||
|
||||
from beets import config, logging
|
||||
|
||||
from .requests import RequestHandler, TimeoutAndRetrySession
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from requests import Response
|
||||
|
||||
from .._typing import JSONDict
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LimiterTimeoutSession(LimiterMixin, TimeoutAndRetrySession):
|
||||
"""HTTP session that enforces rate limits."""
|
||||
|
||||
|
||||
Entity = Literal[
|
||||
"area",
|
||||
"artist",
|
||||
"collection",
|
||||
"event",
|
||||
"genre",
|
||||
"instrument",
|
||||
"label",
|
||||
"place",
|
||||
"recording",
|
||||
"release",
|
||||
"release-group",
|
||||
"series",
|
||||
"work",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
||||
class LookupKwargs(TypedDict, total=False):
|
||||
includes: NotRequired[list[str]]
|
||||
|
||||
|
||||
class PagingKwargs(TypedDict, total=False):
|
||||
limit: NotRequired[int]
|
||||
offset: NotRequired[int]
|
||||
|
||||
|
||||
class SearchKwargs(PagingKwargs):
|
||||
query: NotRequired[str]
|
||||
|
||||
|
||||
class BrowseKwargs(LookupKwargs, PagingKwargs, total=False):
|
||||
pass
|
||||
|
||||
|
||||
class BrowseReleaseGroupsKwargs(BrowseKwargs, total=False):
|
||||
artist: NotRequired[str]
|
||||
collection: NotRequired[str]
|
||||
release: NotRequired[str]
|
||||
|
||||
|
||||
class BrowseRecordingsKwargs(BrowseReleaseGroupsKwargs, total=False):
|
||||
work: NotRequired[str]
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
def require_one_of(*keys: str) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
||||
required = frozenset(keys)
|
||||
|
||||
def deco(func: Callable[P, R]) -> Callable[P, R]:
|
||||
@wraps(func)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
# kwargs is a real dict at runtime; safe to inspect here
|
||||
if not required & kwargs.keys():
|
||||
required_str = ", ".join(sorted(required))
|
||||
raise ValueError(
|
||||
f"At least one of {required_str} filter is required"
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return deco
|
||||
|
||||
|
||||
@dataclass
|
||||
class MusicBrainzAPI(RequestHandler):
|
||||
"""High-level interface to the MusicBrainz WS/2 API.
|
||||
|
||||
Responsibilities:
|
||||
|
||||
- Configure the API host and request rate from application configuration.
|
||||
- Offer helpers to fetch common entity types and to run searches.
|
||||
- Normalize MusicBrainz responses so relation lists are grouped by target
|
||||
type for easier downstream consumption.
|
||||
|
||||
Documentation: https://musicbrainz.org/doc/MusicBrainz_API
|
||||
"""
|
||||
|
||||
api_host: str = field(init=False)
|
||||
rate_limit: float = field(init=False)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
mb_config = config["musicbrainz"]
|
||||
mb_config.add(
|
||||
{
|
||||
"host": "musicbrainz.org",
|
||||
"https": False,
|
||||
"ratelimit": 1,
|
||||
"ratelimit_interval": 1,
|
||||
}
|
||||
)
|
||||
|
||||
hostname = mb_config["host"].as_str()
|
||||
if hostname == "musicbrainz.org":
|
||||
self.api_host, self.rate_limit = "https://musicbrainz.org", 1.0
|
||||
else:
|
||||
https = mb_config["https"].get(bool)
|
||||
self.api_host = f"http{'s' if https else ''}://{hostname}"
|
||||
self.rate_limit = (
|
||||
mb_config["ratelimit"].get(int)
|
||||
/ mb_config["ratelimit_interval"].as_number()
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def api_root(self) -> str:
|
||||
return f"{self.api_host}/ws/2"
|
||||
|
||||
def create_session(self) -> LimiterTimeoutSession:
|
||||
return LimiterTimeoutSession(per_second=self.rate_limit)
|
||||
|
||||
def request(self, *args, **kwargs) -> Response:
|
||||
"""Ensure all requests specify JSON response format by default."""
|
||||
kwargs.setdefault("params", {})
|
||||
kwargs["params"]["fmt"] = "json"
|
||||
return super().request(*args, **kwargs)
|
||||
|
||||
def _get_resource(
|
||||
self, resource: str, includes: list[str] | None = None, **kwargs
|
||||
) -> JSONDict:
|
||||
"""Retrieve and normalize data from the API resource endpoint.
|
||||
|
||||
If requested, includes are appended to the request. The response is
|
||||
passed through a normalizer that groups relation entries by their
|
||||
target type so that callers receive a consistently structured mapping.
|
||||
"""
|
||||
if includes:
|
||||
kwargs["inc"] = "+".join(includes)
|
||||
|
||||
return self._group_relations(
|
||||
self.get_json(f"{self.api_root}/{resource}", params=kwargs)
|
||||
)
|
||||
|
||||
def _lookup(
|
||||
self, entity: Entity, id_: str, **kwargs: Unpack[LookupKwargs]
|
||||
) -> JSONDict:
|
||||
return self._get_resource(f"{entity}/{id_}", **kwargs)
|
||||
|
||||
def _browse(self, entity: Entity, **kwargs) -> list[JSONDict]:
|
||||
return self._get_resource(entity, **kwargs).get(f"{entity}s", [])
|
||||
|
||||
def search(
|
||||
self,
|
||||
entity: Entity,
|
||||
filters: dict[str, str],
|
||||
**kwargs: Unpack[SearchKwargs],
|
||||
) -> list[JSONDict]:
|
||||
"""Search for MusicBrainz entities matching the given filters.
|
||||
|
||||
* Query is constructed by combining the provided filters using AND logic
|
||||
* Each filter key-value pair is formatted as 'key:"value"' unless
|
||||
- 'key' is empty, in which case only the value is used, '"value"'
|
||||
- 'value' is empty, in which case the filter is ignored
|
||||
* Values are lowercased and stripped of whitespace.
|
||||
"""
|
||||
query = " AND ".join(
|
||||
":".join(filter(None, (k, f'"{_v}"')))
|
||||
for k, v in filters.items()
|
||||
if (_v := v.lower().strip())
|
||||
)
|
||||
log.debug("Searching for MusicBrainz {}s with: {!r}", entity, query)
|
||||
kwargs["query"] = query
|
||||
return self._get_resource(entity, **kwargs)[f"{entity}s"]
|
||||
|
||||
def get_release(self, id_: str, **kwargs: Unpack[LookupKwargs]) -> JSONDict:
|
||||
"""Retrieve a release by its MusicBrainz ID."""
|
||||
return self._lookup("release", id_, **kwargs)
|
||||
|
||||
def get_recording(
|
||||
self, id_: str, **kwargs: Unpack[LookupKwargs]
|
||||
) -> JSONDict:
|
||||
"""Retrieve a recording by its MusicBrainz ID."""
|
||||
return self._lookup("recording", id_, **kwargs)
|
||||
|
||||
def get_work(self, id_: str, **kwargs: Unpack[LookupKwargs]) -> JSONDict:
|
||||
"""Retrieve a work by its MusicBrainz ID."""
|
||||
return self._lookup("work", id_, **kwargs)
|
||||
|
||||
@require_one_of("artist", "collection", "release", "work")
|
||||
def browse_recordings(
|
||||
self, **kwargs: Unpack[BrowseRecordingsKwargs]
|
||||
) -> list[JSONDict]:
|
||||
"""Browse recordings related to the given entities.
|
||||
|
||||
At least one of artist, collection, release, or work must be provided.
|
||||
"""
|
||||
return self._browse("recording", **kwargs)
|
||||
|
||||
@require_one_of("artist", "collection", "release")
|
||||
def browse_release_groups(
|
||||
self, **kwargs: Unpack[BrowseReleaseGroupsKwargs]
|
||||
) -> list[JSONDict]:
|
||||
"""Browse release groups related to the given entities.
|
||||
|
||||
At least one of artist, collection, or release must be provided.
|
||||
"""
|
||||
return self._get_resource("release-group", **kwargs)["release-groups"]
|
||||
|
||||
@singledispatchmethod
|
||||
@classmethod
|
||||
def _group_relations(cls, data: Any) -> Any:
|
||||
"""Normalize MusicBrainz 'relations' into type-keyed fields recursively.
|
||||
|
||||
This helper rewrites payloads that use a generic 'relations' list into
|
||||
a structure that is easier to consume downstream. When a mapping
|
||||
contains 'relations', those entries are regrouped by their 'target-type'
|
||||
and stored under keys like '<target-type>-relations'. The original
|
||||
'relations' key is removed to avoid ambiguous access patterns.
|
||||
|
||||
The transformation is applied recursively so that nested objects and
|
||||
sequences are normalized consistently, while non-container values are
|
||||
left unchanged.
|
||||
"""
|
||||
return data
|
||||
|
||||
@_group_relations.register(list)
|
||||
@classmethod
|
||||
def _(cls, data: list[Any]) -> list[Any]:
|
||||
return [cls._group_relations(i) for i in data]
|
||||
|
||||
@_group_relations.register(dict)
|
||||
@classmethod
|
||||
def _(cls, data: JSONDict) -> JSONDict:
|
||||
for k, v in list(data.items()):
|
||||
if k == "relations":
|
||||
get_target_type = operator.methodcaller("get", "target-type")
|
||||
for target_type, group in groupby(
|
||||
sorted(v, key=get_target_type), get_target_type
|
||||
):
|
||||
relations = [
|
||||
{k: v for k, v in item.items() if k != "target-type"}
|
||||
for item in group
|
||||
]
|
||||
data[f"{target_type}-relations"] = cls._group_relations(
|
||||
relations
|
||||
)
|
||||
data.pop("relations")
|
||||
else:
|
||||
data[k] = cls._group_relations(v)
|
||||
return data
|
||||
|
||||
|
||||
class MusicBrainzAPIMixin:
|
||||
"""Mixin that provides a cached MusicBrainzAPI helper instance."""
|
||||
|
||||
@cached_property
|
||||
def mb_api(self) -> MusicBrainzAPI:
|
||||
return MusicBrainzAPI()
|
||||
|
|
@ -67,7 +67,7 @@ class TimeoutAndRetrySession(requests.Session, metaclass=SingletonMeta):
|
|||
|
||||
* default beets User-Agent header
|
||||
* default request timeout
|
||||
* automatic retries on transient connection errors
|
||||
* automatic retries on transient connection or server errors
|
||||
* raises exceptions for HTTP error status codes
|
||||
"""
|
||||
|
||||
|
|
@ -75,7 +75,18 @@ class TimeoutAndRetrySession(requests.Session, metaclass=SingletonMeta):
|
|||
super().__init__(*args, **kwargs)
|
||||
self.headers["User-Agent"] = f"beets/{__version__} https://beets.io/"
|
||||
|
||||
retry = Retry(connect=2, total=2, backoff_factor=1)
|
||||
retry = Retry(
|
||||
connect=2,
|
||||
total=2,
|
||||
backoff_factor=1,
|
||||
# Retry on server errors
|
||||
status_forcelist=[
|
||||
HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
HTTPStatus.BAD_GATEWAY,
|
||||
HTTPStatus.SERVICE_UNAVAILABLE,
|
||||
HTTPStatus.GATEWAY_TIMEOUT,
|
||||
],
|
||||
)
|
||||
adapter = HTTPAdapter(max_retries=retry)
|
||||
self.mount("https://", adapter)
|
||||
self.mount("http://", adapter)
|
||||
|
|
@ -102,18 +113,20 @@ class RequestHandler:
|
|||
subclasses.
|
||||
|
||||
Usage:
|
||||
Subclass and override :class:`RequestHandler.session_type`,
|
||||
Subclass and override :class:`RequestHandler.create_session`,
|
||||
:class:`RequestHandler.explicit_http_errors` or
|
||||
:class:`RequestHandler.status_to_error()` to customize behavior.
|
||||
|
||||
Use
|
||||
* :class:`RequestHandler.get_json()` to get JSON response data
|
||||
* :class:`RequestHandler.get()` to get HTTP response object
|
||||
* :class:`RequestHandler.request()` to invoke arbitrary HTTP methods
|
||||
Use
|
||||
|
||||
Feel free to define common methods that are used in multiple plugins.
|
||||
- :class:`RequestHandler.get_json()` to get JSON response data
|
||||
- :class:`RequestHandler.get()` to get HTTP response object
|
||||
- :class:`RequestHandler.request()` to invoke arbitrary HTTP methods
|
||||
|
||||
Feel free to define common methods that are used in multiple plugins.
|
||||
"""
|
||||
|
||||
#: List of custom exceptions to be raised for specific status codes.
|
||||
explicit_http_errors: ClassVar[list[type[BeetsHTTPError]]] = [
|
||||
HTTPNotFoundError
|
||||
]
|
||||
|
|
@ -127,7 +140,6 @@ class RequestHandler:
|
|||
|
||||
@cached_property
|
||||
def session(self) -> TimeoutAndRetrySession:
|
||||
"""Lazily initialize and cache the HTTP session."""
|
||||
return self.create_session()
|
||||
|
||||
def status_to_error(
|
||||
|
|
@ -155,6 +167,7 @@ class RequestHandler:
|
|||
except requests.exceptions.HTTPError as e:
|
||||
if beets_error := self.status_to_error(e.response.status_code):
|
||||
raise beets_error(response=e.response) from e
|
||||
|
||||
raise
|
||||
|
||||
def request(self, *args, **kwargs) -> requests.Response:
|
||||
|
|
@ -170,6 +183,14 @@ class RequestHandler:
|
|||
"""Perform HTTP GET request with automatic error handling."""
|
||||
return self.request("get", *args, **kwargs)
|
||||
|
||||
def put(self, *args, **kwargs) -> requests.Response:
|
||||
"""Perform HTTP PUT request with automatic error handling."""
|
||||
return self.request("put", *args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs) -> requests.Response:
|
||||
"""Perform HTTP DELETE request with automatic error handling."""
|
||||
return self.request("delete", *args, **kwargs)
|
||||
|
||||
def get_json(self, *args, **kwargs):
|
||||
"""Fetch and parse JSON data from an HTTP endpoint."""
|
||||
return self.get(*args, **kwargs).json()
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from functools import cached_property, lru_cache
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import config, plugins, ui
|
||||
|
|
@ -26,6 +27,30 @@ if TYPE_CHECKING:
|
|||
from beets.library import Album, Item
|
||||
|
||||
|
||||
DEFAULT_BRACKET_KEYWORDS: tuple[str, ...] = (
|
||||
"abridged",
|
||||
"acapella",
|
||||
"club",
|
||||
"demo",
|
||||
"edit",
|
||||
"edition",
|
||||
"extended",
|
||||
"instrumental",
|
||||
"live",
|
||||
"mix",
|
||||
"radio",
|
||||
"release",
|
||||
"remaster",
|
||||
"remastered",
|
||||
"remix",
|
||||
"rmx",
|
||||
"unabridged",
|
||||
"unreleased",
|
||||
"version",
|
||||
"vip",
|
||||
)
|
||||
|
||||
|
||||
def split_on_feat(
|
||||
artist: str,
|
||||
for_artist: bool = True,
|
||||
|
|
@ -104,6 +129,40 @@ def _album_artist_no_feat(album: Album) -> str:
|
|||
|
||||
|
||||
class FtInTitlePlugin(plugins.BeetsPlugin):
|
||||
@cached_property
|
||||
def bracket_keywords(self) -> list[str]:
|
||||
return self.config["bracket_keywords"].as_str_seq()
|
||||
|
||||
@staticmethod
|
||||
@lru_cache(maxsize=256)
|
||||
def _bracket_position_pattern(keywords: tuple[str, ...]) -> re.Pattern[str]:
|
||||
"""
|
||||
Build a compiled regex to find the first bracketed segment that contains
|
||||
any of the provided keywords.
|
||||
|
||||
Cached by keyword tuple to avoid recompiling on every track/title.
|
||||
"""
|
||||
kw_inner = "|".join(map(re.escape, keywords))
|
||||
|
||||
# If we have keywords, require one of them to appear in the bracket text.
|
||||
# If kw == "", the lookahead becomes true and we match any bracket content.
|
||||
kw = rf"\b(?={kw_inner})\b" if kw_inner else ""
|
||||
return re.compile(
|
||||
rf"""
|
||||
(?: # non-capturing group for the split
|
||||
\s*? # optional whitespace before brackets
|
||||
(?= # any bracket containing a keyword
|
||||
\([^)]*{kw}.*?\)
|
||||
| \[[^]]*{kw}.*?\]
|
||||
| <[^>]*{kw}.*? >
|
||||
| \{{[^}}]*{kw}.*?\}}
|
||||
| $ # or the end of the string
|
||||
)
|
||||
)
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
|
|
@ -115,6 +174,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
"keep_in_artist": False,
|
||||
"preserve_album_artist": True,
|
||||
"custom_words": [],
|
||||
"bracket_keywords": list(DEFAULT_BRACKET_KEYWORDS),
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -216,8 +276,10 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
# artist and if we do not drop featuring information.
|
||||
if not drop_feat and not contains_feat(item.title, custom_words):
|
||||
feat_format = self.config["format"].as_str()
|
||||
new_format = feat_format.format(feat_part)
|
||||
new_title = f"{item.title} {new_format}"
|
||||
formatted = feat_format.format(feat_part)
|
||||
new_title = self.insert_ft_into_title(
|
||||
item.title, formatted, self.bracket_keywords
|
||||
)
|
||||
self._log.info("title: {.title} -> {}", item, new_title)
|
||||
item.title = new_title
|
||||
|
||||
|
|
@ -262,3 +324,28 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
item, feat_part, drop_feat, keep_in_artist_field, custom_words
|
||||
)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def find_bracket_position(
|
||||
title: str, keywords: list[str] | None = None
|
||||
) -> int | None:
|
||||
normalized = (
|
||||
DEFAULT_BRACKET_KEYWORDS if keywords is None else tuple(keywords)
|
||||
)
|
||||
pattern = FtInTitlePlugin._bracket_position_pattern(normalized)
|
||||
m: re.Match[str] | None = pattern.search(title)
|
||||
return m.start() if m else None
|
||||
|
||||
@classmethod
|
||||
def insert_ft_into_title(
|
||||
cls, title: str, feat_part: str, keywords: list[str] | None = None
|
||||
) -> str:
|
||||
"""Insert featured artist before the first bracket containing
|
||||
remix/edit keywords if present.
|
||||
"""
|
||||
normalized = (
|
||||
DEFAULT_BRACKET_KEYWORDS if keywords is None else tuple(keywords)
|
||||
)
|
||||
pattern = cls._bracket_position_pattern(normalized)
|
||||
parts = pattern.split(title, maxsplit=1)
|
||||
return f" {feat_part} ".join(parts).strip()
|
||||
|
|
|
|||
|
|
@ -2,15 +2,16 @@
|
|||
|
||||
import datetime
|
||||
|
||||
import musicbrainzngs
|
||||
import requests
|
||||
|
||||
from beets import config, ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beetsplug.lastimport import process_tracks
|
||||
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
|
||||
class ListenBrainzPlugin(BeetsPlugin):
|
||||
|
||||
class ListenBrainzPlugin(MusicBrainzAPIMixin, BeetsPlugin):
|
||||
"""A Beets plugin for interacting with ListenBrainz."""
|
||||
|
||||
ROOT = "http://api.listenbrainz.org/1/"
|
||||
|
|
@ -129,17 +130,16 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
)
|
||||
return tracks
|
||||
|
||||
def get_mb_recording_id(self, track):
|
||||
def get_mb_recording_id(self, track) -> str | None:
|
||||
"""Returns the MusicBrainz recording ID for a track."""
|
||||
resp = musicbrainzngs.search_recordings(
|
||||
query=track["track_metadata"].get("track_name"),
|
||||
release=track["track_metadata"].get("release_name"),
|
||||
strict=True,
|
||||
results = self.mb_api.search(
|
||||
"recording",
|
||||
{
|
||||
"": track["track_metadata"].get("track_name"),
|
||||
"release": track["track_metadata"].get("release_name"),
|
||||
},
|
||||
)
|
||||
if resp.get("recording-count") == "1":
|
||||
return resp.get("recording-list")[0].get("id")
|
||||
else:
|
||||
return None
|
||||
return next((r["id"] for r in results), None)
|
||||
|
||||
def get_playlists_createdfor(self, username):
|
||||
"""Returns a list of playlists created by a user."""
|
||||
|
|
@ -207,17 +207,16 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
track_info = []
|
||||
for track in tracks:
|
||||
identifier = track.get("identifier")
|
||||
resp = musicbrainzngs.get_recording_by_id(
|
||||
recording = self.mb_api.get_recording(
|
||||
identifier, includes=["releases", "artist-credits"]
|
||||
)
|
||||
recording = resp.get("recording")
|
||||
title = recording.get("title")
|
||||
artist_credit = recording.get("artist-credit", [])
|
||||
if artist_credit:
|
||||
artist = artist_credit[0].get("artist", {}).get("name")
|
||||
else:
|
||||
artist = None
|
||||
releases = recording.get("release-list", [])
|
||||
releases = recording.get("releases", [])
|
||||
if releases:
|
||||
album = releases[0].get("title")
|
||||
date = releases[0].get("date")
|
||||
|
|
|
|||
|
|
@ -13,48 +13,151 @@
|
|||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import musicbrainzngs
|
||||
from requests.auth import HTTPDigestAuth
|
||||
|
||||
from beets import config, ui
|
||||
from beets import __version__, config, ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand
|
||||
|
||||
SUBMISSION_CHUNK_SIZE = 200
|
||||
FETCH_CHUNK_SIZE = 100
|
||||
UUID_REGEX = r"^[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}$"
|
||||
from ._utils.musicbrainz import MusicBrainzAPI
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Iterator
|
||||
|
||||
from requests import Response
|
||||
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
from beets.library import Album, Library
|
||||
|
||||
from ._typing import JSONDict
|
||||
|
||||
UUID_PAT = re.compile(r"^[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}$")
|
||||
|
||||
|
||||
def mb_call(func, *args, **kwargs):
|
||||
"""Call a MusicBrainz API function and catch exceptions."""
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except musicbrainzngs.AuthenticationError:
|
||||
raise ui.UserError("authentication with MusicBrainz failed")
|
||||
except (musicbrainzngs.ResponseError, musicbrainzngs.NetworkError) as exc:
|
||||
raise ui.UserError(f"MusicBrainz API error: {exc}")
|
||||
except musicbrainzngs.UsageError:
|
||||
raise ui.UserError("MusicBrainz credentials missing")
|
||||
@dataclass
|
||||
class MusicBrainzUserAPI(MusicBrainzAPI):
|
||||
"""MusicBrainz API client with user authentication.
|
||||
|
||||
In order to retrieve private user collections and modify them, we need to
|
||||
authenticate the requests with the user's MusicBrainz credentials.
|
||||
|
||||
def submit_albums(collection_id, release_ids):
|
||||
"""Add all of the release IDs to the indicated collection. Multiple
|
||||
requests are made if there are many release IDs to submit.
|
||||
See documentation for authentication details:
|
||||
https://musicbrainz.org/doc/MusicBrainz_API#Authentication
|
||||
|
||||
Note that the documentation misleadingly states HTTP 'basic' authentication,
|
||||
and I had to reverse-engineer musicbrainzngs to discover that it actually
|
||||
uses HTTP 'digest' authentication.
|
||||
"""
|
||||
for i in range(0, len(release_ids), SUBMISSION_CHUNK_SIZE):
|
||||
chunk = release_ids[i : i + SUBMISSION_CHUNK_SIZE]
|
||||
mb_call(musicbrainzngs.add_releases_to_collection, collection_id, chunk)
|
||||
|
||||
auth: HTTPDigestAuth = field(init=False)
|
||||
|
||||
class MusicBrainzCollectionPlugin(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
def __post_init__(self) -> None:
|
||||
super().__post_init__()
|
||||
config["musicbrainz"]["pass"].redact = True
|
||||
musicbrainzngs.auth(
|
||||
self.auth = HTTPDigestAuth(
|
||||
config["musicbrainz"]["user"].as_str(),
|
||||
config["musicbrainz"]["pass"].as_str(),
|
||||
)
|
||||
|
||||
def request(self, *args, **kwargs) -> Response:
|
||||
"""Authenticate and include required client param in all requests."""
|
||||
kwargs.setdefault("params", {})
|
||||
kwargs["params"]["client"] = f"beets-{__version__}"
|
||||
kwargs["auth"] = self.auth
|
||||
return super().request(*args, **kwargs)
|
||||
|
||||
def browse_collections(self) -> list[JSONDict]:
|
||||
"""Get all collections for the authenticated user."""
|
||||
return self._browse("collection")
|
||||
|
||||
|
||||
@dataclass
|
||||
class MBCollection:
|
||||
"""Representation of a user's MusicBrainz collection.
|
||||
|
||||
Provides convenient, chunked operations for retrieving releases and updating
|
||||
the collection via the MusicBrainz web API. Fetch and submission limits are
|
||||
controlled by class-level constants to avoid oversized requests.
|
||||
"""
|
||||
|
||||
SUBMISSION_CHUNK_SIZE: ClassVar[int] = 200
|
||||
FETCH_CHUNK_SIZE: ClassVar[int] = 100
|
||||
|
||||
data: JSONDict
|
||||
mb_api: MusicBrainzUserAPI
|
||||
|
||||
@property
|
||||
def id(self) -> str:
|
||||
"""Unique identifier assigned to the collection by MusicBrainz."""
|
||||
return self.data["id"]
|
||||
|
||||
@property
|
||||
def release_count(self) -> int:
|
||||
"""Total number of releases recorded in the collection."""
|
||||
return self.data["release-count"]
|
||||
|
||||
@property
|
||||
def releases_url(self) -> str:
|
||||
"""Complete API endpoint URL for listing releases in this collection."""
|
||||
return f"{self.mb_api.api_root}/collection/{self.id}/releases"
|
||||
|
||||
@property
|
||||
def releases(self) -> list[JSONDict]:
|
||||
"""Retrieve all releases in the collection, fetched in successive pages.
|
||||
|
||||
The fetch is performed in chunks and returns a flattened sequence of
|
||||
release records.
|
||||
"""
|
||||
offsets = list(range(0, self.release_count, self.FETCH_CHUNK_SIZE))
|
||||
return [r for offset in offsets for r in self.get_releases(offset)]
|
||||
|
||||
def get_releases(self, offset: int) -> list[JSONDict]:
|
||||
"""Fetch a single page of releases beginning at a given position."""
|
||||
return self.mb_api.get_json(
|
||||
self.releases_url,
|
||||
params={"limit": self.FETCH_CHUNK_SIZE, "offset": offset},
|
||||
)["releases"]
|
||||
|
||||
@classmethod
|
||||
def get_id_chunks(cls, id_list: list[str]) -> Iterator[list[str]]:
|
||||
"""Yield successive sublists of identifiers sized for safe submission.
|
||||
|
||||
Splits a long sequence of identifiers into batches that respect the
|
||||
service's submission limits to avoid oversized requests.
|
||||
"""
|
||||
for i in range(0, len(id_list), cls.SUBMISSION_CHUNK_SIZE):
|
||||
yield id_list[i : i + cls.SUBMISSION_CHUNK_SIZE]
|
||||
|
||||
def add_releases(self, releases: list[str]) -> None:
|
||||
"""Add releases to the collection in batches."""
|
||||
for chunk in self.get_id_chunks(releases):
|
||||
# Need to escape semicolons: https://github.com/psf/requests/issues/6990
|
||||
self.mb_api.put(f"{self.releases_url}/{'%3B'.join(chunk)}")
|
||||
|
||||
def remove_releases(self, releases: list[str]) -> None:
|
||||
"""Remove releases from the collection in chunks."""
|
||||
for chunk in self.get_id_chunks(releases):
|
||||
# Need to escape semicolons: https://github.com/psf/requests/issues/6990
|
||||
self.mb_api.delete(f"{self.releases_url}/{'%3B'.join(chunk)}")
|
||||
|
||||
|
||||
def submit_albums(collection: MBCollection, release_ids):
|
||||
"""Add all of the release IDs to the indicated collection. Multiple
|
||||
requests are made if there are many release IDs to submit.
|
||||
"""
|
||||
collection.add_releases(release_ids)
|
||||
|
||||
|
||||
class MusicBrainzCollectionPlugin(BeetsPlugin):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.config.add(
|
||||
{
|
||||
"auto": False,
|
||||
|
|
@ -65,47 +168,32 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
if self.config["auto"]:
|
||||
self.import_stages = [self.imported]
|
||||
|
||||
def _get_collection(self):
|
||||
collections = mb_call(musicbrainzngs.get_collections)
|
||||
if not collections["collection-list"]:
|
||||
@cached_property
|
||||
def mb_api(self) -> MusicBrainzUserAPI:
|
||||
return MusicBrainzUserAPI()
|
||||
|
||||
@cached_property
|
||||
def collection(self) -> MBCollection:
|
||||
if not (collections := self.mb_api.browse_collections()):
|
||||
raise ui.UserError("no collections exist for user")
|
||||
|
||||
# Get all release collection IDs, avoiding event collections
|
||||
collection_ids = [
|
||||
x["id"]
|
||||
for x in collections["collection-list"]
|
||||
if x["entity-type"] == "release"
|
||||
]
|
||||
if not collection_ids:
|
||||
if not (
|
||||
collection_by_id := {
|
||||
c["id"]: c for c in collections if c["entity-type"] == "release"
|
||||
}
|
||||
):
|
||||
raise ui.UserError("No release collection found.")
|
||||
|
||||
# Check that the collection exists so we can present a nice error
|
||||
collection = self.config["collection"].as_str()
|
||||
if collection:
|
||||
if collection not in collection_ids:
|
||||
raise ui.UserError(f"invalid collection ID: {collection}")
|
||||
return collection
|
||||
if collection_id := self.config["collection"].as_str():
|
||||
if not (collection := collection_by_id.get(collection_id)):
|
||||
raise ui.UserError(f"invalid collection ID: {collection_id}")
|
||||
else:
|
||||
# No specified collection. Just return the first collection ID
|
||||
collection = next(iter(collection_by_id.values()))
|
||||
|
||||
# No specified collection. Just return the first collection ID
|
||||
return collection_ids[0]
|
||||
|
||||
def _get_albums_in_collection(self, id):
|
||||
def _fetch(offset):
|
||||
res = mb_call(
|
||||
musicbrainzngs.get_releases_in_collection,
|
||||
id,
|
||||
limit=FETCH_CHUNK_SIZE,
|
||||
offset=offset,
|
||||
)["collection"]
|
||||
return [x["id"] for x in res["release-list"]], res["release-count"]
|
||||
|
||||
offset = 0
|
||||
albums_in_collection, release_count = _fetch(offset)
|
||||
for i in range(0, release_count, FETCH_CHUNK_SIZE):
|
||||
albums_in_collection += _fetch(offset)[0]
|
||||
offset += FETCH_CHUNK_SIZE
|
||||
|
||||
return albums_in_collection
|
||||
return MBCollection(collection, self.mb_api)
|
||||
|
||||
def commands(self):
|
||||
mbupdate = Subcommand("mbupdate", help="Update MusicBrainz collection")
|
||||
|
|
@ -120,45 +208,33 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
mbupdate.func = self.update_collection
|
||||
return [mbupdate]
|
||||
|
||||
def remove_missing(self, collection_id, lib_albums):
|
||||
lib_ids = {x.mb_albumid for x in lib_albums}
|
||||
albums_in_collection = self._get_albums_in_collection(collection_id)
|
||||
remove_me = list(set(albums_in_collection) - lib_ids)
|
||||
for i in range(0, len(remove_me), FETCH_CHUNK_SIZE):
|
||||
chunk = remove_me[i : i + FETCH_CHUNK_SIZE]
|
||||
mb_call(
|
||||
musicbrainzngs.remove_releases_from_collection,
|
||||
collection_id,
|
||||
chunk,
|
||||
)
|
||||
|
||||
def update_collection(self, lib, opts, args):
|
||||
def update_collection(self, lib: Library, opts, args) -> None:
|
||||
self.config.set_args(opts)
|
||||
remove_missing = self.config["remove"].get(bool)
|
||||
self.update_album_list(lib, lib.albums(), remove_missing)
|
||||
|
||||
def imported(self, session, task):
|
||||
def imported(self, session: ImportSession, task: ImportTask) -> None:
|
||||
"""Add each imported album to the collection."""
|
||||
if task.is_album:
|
||||
self.update_album_list(session.lib, [task.album])
|
||||
self.update_album_list(
|
||||
session.lib, [task.album], remove_missing=False
|
||||
)
|
||||
|
||||
def update_album_list(self, lib, album_list, remove_missing=False):
|
||||
def update_album_list(
|
||||
self, lib: Library, albums: Iterable[Album], remove_missing: bool
|
||||
) -> None:
|
||||
"""Update the MusicBrainz collection from a list of Beets albums"""
|
||||
collection_id = self._get_collection()
|
||||
collection = self.collection
|
||||
|
||||
# Get a list of all the album IDs.
|
||||
album_ids = []
|
||||
for album in album_list:
|
||||
aid = album.mb_albumid
|
||||
if aid:
|
||||
if re.match(UUID_REGEX, aid):
|
||||
album_ids.append(aid)
|
||||
else:
|
||||
self._log.info("skipping invalid MBID: {}", aid)
|
||||
album_ids = [id_ for a in albums if UUID_PAT.match(id_ := a.mb_albumid)]
|
||||
|
||||
# Submit to MusicBrainz.
|
||||
self._log.info("Updating MusicBrainz collection {}...", collection_id)
|
||||
submit_albums(collection_id, album_ids)
|
||||
self._log.info("Updating MusicBrainz collection {}...", collection.id)
|
||||
collection.add_releases(album_ids)
|
||||
if remove_missing:
|
||||
self.remove_missing(collection_id, lib.albums())
|
||||
lib_ids = {x.mb_albumid for x in lib.albums()}
|
||||
albums_in_collection = {r["id"] for r in collection.releases}
|
||||
collection.remove_releases(list(albums_in_collection - lib_ids))
|
||||
|
||||
self._log.info("...MusicBrainz collection updated.")
|
||||
|
|
|
|||
|
|
@ -141,7 +141,7 @@ class MusicBrainzPseudoReleasePlugin(MusicBrainzPlugin):
|
|||
if (ids := self._intercept_mb_release(release)) and (
|
||||
album_id := self._extract_id(ids[0])
|
||||
):
|
||||
raw_pseudo_release = self.api.get_release(album_id)
|
||||
raw_pseudo_release = self.mb_api.get_release(album_id)
|
||||
pseudo_release = super().album_info(raw_pseudo_release)
|
||||
|
||||
if self.config["custom_tags_only"].get(bool):
|
||||
|
|
|
|||
|
|
@ -18,8 +18,7 @@
|
|||
from collections import defaultdict
|
||||
from collections.abc import Iterator
|
||||
|
||||
import musicbrainzngs
|
||||
from musicbrainzngs.musicbrainz import MusicBrainzError
|
||||
import requests
|
||||
|
||||
from beets import config, metadata_plugins
|
||||
from beets.dbcore import types
|
||||
|
|
@ -27,6 +26,8 @@ from beets.library import Album, Item, Library
|
|||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand, print_
|
||||
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
|
||||
MB_ARTIST_QUERY = r"mb_albumartistid::^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$"
|
||||
|
||||
|
||||
|
|
@ -85,7 +86,7 @@ def _item(track_info, album_info, album_id):
|
|||
)
|
||||
|
||||
|
||||
class MissingPlugin(BeetsPlugin):
|
||||
class MissingPlugin(MusicBrainzAPIMixin, BeetsPlugin):
|
||||
"""List missing tracks"""
|
||||
|
||||
album_types = {
|
||||
|
|
@ -189,19 +190,19 @@ class MissingPlugin(BeetsPlugin):
|
|||
calculating_total = self.config["total"].get()
|
||||
for (artist, artist_id), album_ids in album_ids_by_artist.items():
|
||||
try:
|
||||
resp = musicbrainzngs.browse_release_groups(artist=artist_id)
|
||||
except MusicBrainzError as err:
|
||||
resp = self.mb_api.browse_release_groups(artist=artist_id)
|
||||
except requests.exceptions.RequestException:
|
||||
self._log.info(
|
||||
"Couldn't fetch info for artist '{}' ({}) - '{}'",
|
||||
"Couldn't fetch info for artist '{}' ({})",
|
||||
artist,
|
||||
artist_id,
|
||||
err,
|
||||
exc_info=True,
|
||||
)
|
||||
continue
|
||||
|
||||
missing_titles = [
|
||||
f"{artist} - {rg['title']}"
|
||||
for rg in resp["release-group-list"]
|
||||
for rg in resp
|
||||
if rg["id"] not in album_ids
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -16,17 +16,14 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import operator
|
||||
from collections import Counter
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from functools import cached_property, singledispatchmethod
|
||||
from itertools import groupby, product
|
||||
from functools import cached_property
|
||||
from itertools import product
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from confuse.exceptions import NotFoundError
|
||||
from requests_ratelimiter import LimiterMixin
|
||||
|
||||
import beets
|
||||
import beets.autotag.hooks
|
||||
|
|
@ -35,11 +32,8 @@ from beets.metadata_plugins import MetadataSourcePlugin
|
|||
from beets.util.deprecation import deprecate_for_user
|
||||
from beets.util.id_extractors import extract_release_id
|
||||
|
||||
from ._utils.requests import (
|
||||
HTTPNotFoundError,
|
||||
RequestHandler,
|
||||
TimeoutAndRetrySession,
|
||||
)
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
from ._utils.requests import HTTPNotFoundError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
|
@ -103,86 +97,6 @@ BROWSE_CHUNKSIZE = 100
|
|||
BROWSE_MAXTRACKS = 500
|
||||
|
||||
|
||||
class LimiterTimeoutSession(LimiterMixin, TimeoutAndRetrySession):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class MusicBrainzAPI(RequestHandler):
|
||||
api_host: str
|
||||
rate_limit: float
|
||||
|
||||
def create_session(self) -> LimiterTimeoutSession:
|
||||
return LimiterTimeoutSession(per_second=self.rate_limit)
|
||||
|
||||
def get_entity(
|
||||
self, entity: str, inc_list: list[str] | None = None, **kwargs
|
||||
) -> JSONDict:
|
||||
if inc_list:
|
||||
kwargs["inc"] = "+".join(inc_list)
|
||||
|
||||
return self._group_relations(
|
||||
self.get_json(
|
||||
f"{self.api_host}/ws/2/{entity}",
|
||||
params={**kwargs, "fmt": "json"},
|
||||
)
|
||||
)
|
||||
|
||||
def get_release(self, id_: str) -> JSONDict:
|
||||
return self.get_entity(f"release/{id_}", inc_list=RELEASE_INCLUDES)
|
||||
|
||||
def get_recording(self, id_: str) -> JSONDict:
|
||||
return self.get_entity(f"recording/{id_}", inc_list=TRACK_INCLUDES)
|
||||
|
||||
def browse_recordings(self, **kwargs) -> list[JSONDict]:
|
||||
kwargs.setdefault("limit", BROWSE_CHUNKSIZE)
|
||||
kwargs.setdefault("inc_list", BROWSE_INCLUDES)
|
||||
return self.get_entity("recording", **kwargs)["recordings"]
|
||||
|
||||
@singledispatchmethod
|
||||
@classmethod
|
||||
def _group_relations(cls, data: Any) -> Any:
|
||||
"""Normalize MusicBrainz 'relations' into type-keyed fields recursively.
|
||||
|
||||
This helper rewrites payloads that use a generic 'relations' list into
|
||||
a structure that is easier to consume downstream. When a mapping
|
||||
contains 'relations', those entries are regrouped by their 'target-type'
|
||||
and stored under keys like '<target-type>-relations'. The original
|
||||
'relations' key is removed to avoid ambiguous access patterns.
|
||||
|
||||
The transformation is applied recursively so that nested objects and
|
||||
sequences are normalized consistently, while non-container values are
|
||||
left unchanged.
|
||||
"""
|
||||
return data
|
||||
|
||||
@_group_relations.register(list)
|
||||
@classmethod
|
||||
def _(cls, data: list[Any]) -> list[Any]:
|
||||
return [cls._group_relations(i) for i in data]
|
||||
|
||||
@_group_relations.register(dict)
|
||||
@classmethod
|
||||
def _(cls, data: JSONDict) -> JSONDict:
|
||||
for k, v in list(data.items()):
|
||||
if k == "relations":
|
||||
get_target_type = operator.methodcaller("get", "target-type")
|
||||
for target_type, group in groupby(
|
||||
sorted(v, key=get_target_type), get_target_type
|
||||
):
|
||||
relations = [
|
||||
{k: v for k, v in item.items() if k != "target-type"}
|
||||
for item in group
|
||||
]
|
||||
data[f"{target_type}-relations"] = cls._group_relations(
|
||||
relations
|
||||
)
|
||||
data.pop("relations")
|
||||
else:
|
||||
data[k] = cls._group_relations(v)
|
||||
return data
|
||||
|
||||
|
||||
def _preferred_alias(
|
||||
aliases: list[JSONDict], languages: list[str] | None = None
|
||||
) -> JSONDict | None:
|
||||
|
|
@ -405,25 +319,11 @@ def _merge_pseudo_and_actual_album(
|
|||
return merged
|
||||
|
||||
|
||||
class MusicBrainzPlugin(MetadataSourcePlugin):
|
||||
class MusicBrainzPlugin(MusicBrainzAPIMixin, MetadataSourcePlugin):
|
||||
@cached_property
|
||||
def genres_field(self) -> str:
|
||||
return f"{self.config['genres_tag'].as_choice(['genre', 'tag'])}s"
|
||||
|
||||
@cached_property
|
||||
def api(self) -> MusicBrainzAPI:
|
||||
hostname = self.config["host"].as_str()
|
||||
if hostname == "musicbrainz.org":
|
||||
hostname, rate_limit = "https://musicbrainz.org", 1.0
|
||||
else:
|
||||
https = self.config["https"].get(bool)
|
||||
hostname = f"http{'s' if https else ''}://{hostname}"
|
||||
rate_limit = (
|
||||
self.config["ratelimit"].get(int)
|
||||
/ self.config["ratelimit_interval"].as_number()
|
||||
)
|
||||
return MusicBrainzAPI(hostname, rate_limit)
|
||||
|
||||
def __init__(self):
|
||||
"""Set up the python-musicbrainz-ngs module according to settings
|
||||
from the beets configuration. This should be called at startup.
|
||||
|
|
@ -431,10 +331,6 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
super().__init__()
|
||||
self.config.add(
|
||||
{
|
||||
"host": "musicbrainz.org",
|
||||
"https": False,
|
||||
"ratelimit": 1,
|
||||
"ratelimit_interval": 1,
|
||||
"genres": False,
|
||||
"genres_tag": "genre",
|
||||
"external_ids": {
|
||||
|
|
@ -589,7 +485,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
for i in range(0, ntracks, BROWSE_CHUNKSIZE):
|
||||
self._log.debug("Retrieving tracks starting at {}", i)
|
||||
recording_list.extend(
|
||||
self.api.browse_recordings(release=release["id"], offset=i)
|
||||
self.mb_api.browse_recordings(
|
||||
release=release["id"], offset=i
|
||||
)
|
||||
)
|
||||
track_map = {r["id"]: r for r in recording_list}
|
||||
for medium in release["media"]:
|
||||
|
|
@ -853,17 +751,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
using the provided criteria. Handles API errors by converting them into
|
||||
MusicBrainzAPIError exceptions with contextual information.
|
||||
"""
|
||||
query = " AND ".join(
|
||||
f'{k}:"{_v}"'
|
||||
for k, v in filters.items()
|
||||
if (_v := v.lower().strip())
|
||||
return self.mb_api.search(
|
||||
query_type, filters, limit=self.config["search_limit"].get()
|
||||
)
|
||||
self._log.debug(
|
||||
"Searching for MusicBrainz {}s with: {!r}", query_type, query
|
||||
)
|
||||
return self.api.get_entity(
|
||||
query_type, query=query, limit=self.config["search_limit"].get()
|
||||
)[f"{query_type}s"]
|
||||
|
||||
def candidates(
|
||||
self,
|
||||
|
|
@ -901,7 +791,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
self._log.debug("Invalid MBID ({}).", album_id)
|
||||
return None
|
||||
|
||||
res = self.api.get_release(albumid)
|
||||
res = self.mb_api.get_release(albumid, includes=RELEASE_INCLUDES)
|
||||
|
||||
# resolve linked release relations
|
||||
actual_res = None
|
||||
|
|
@ -914,7 +804,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
rel["type"] == "transl-tracklisting"
|
||||
and rel["direction"] == "backward"
|
||||
):
|
||||
actual_res = self.api.get_release(rel["release"]["id"])
|
||||
actual_res = self.mb_api.get_release(
|
||||
rel["release"]["id"], includes=RELEASE_INCLUDES
|
||||
)
|
||||
|
||||
# release is potentially a pseudo release
|
||||
release = self.album_info(res)
|
||||
|
|
@ -937,6 +829,8 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
return None
|
||||
|
||||
with suppress(HTTPNotFoundError):
|
||||
return self.track_info(self.api.get_recording(trackid))
|
||||
return self.track_info(
|
||||
self.mb_api.get_recording(trackid, includes=TRACK_INCLUDES)
|
||||
)
|
||||
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -16,59 +16,19 @@
|
|||
and work composition date
|
||||
"""
|
||||
|
||||
import musicbrainzngs
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
from beets import ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
|
||||
def direct_parent_id(mb_workid, work_date=None):
|
||||
"""Given a Musicbrainz work id, find the id one of the works the work is
|
||||
part of and the first composition date it encounters.
|
||||
"""
|
||||
work_info = musicbrainzngs.get_work_by_id(
|
||||
mb_workid, includes=["work-rels", "artist-rels"]
|
||||
)
|
||||
if "artist-relation-list" in work_info["work"] and work_date is None:
|
||||
for artist in work_info["work"]["artist-relation-list"]:
|
||||
if artist["type"] == "composer":
|
||||
if "end" in artist.keys():
|
||||
work_date = artist["end"]
|
||||
|
||||
if "work-relation-list" in work_info["work"]:
|
||||
for direct_parent in work_info["work"]["work-relation-list"]:
|
||||
if (
|
||||
direct_parent["type"] == "parts"
|
||||
and direct_parent.get("direction") == "backward"
|
||||
):
|
||||
direct_id = direct_parent["work"]["id"]
|
||||
return direct_id, work_date
|
||||
return None, work_date
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
|
||||
|
||||
def work_parent_id(mb_workid):
|
||||
"""Find the parent work id and composition date of a work given its id."""
|
||||
work_date = None
|
||||
while True:
|
||||
new_mb_workid, work_date = direct_parent_id(mb_workid, work_date)
|
||||
if not new_mb_workid:
|
||||
return mb_workid, work_date
|
||||
mb_workid = new_mb_workid
|
||||
return mb_workid, work_date
|
||||
|
||||
|
||||
def find_parentwork_info(mb_workid):
|
||||
"""Get the MusicBrainz information dict about a parent work, including
|
||||
the artist relations, and the composition date for a work's parent work.
|
||||
"""
|
||||
parent_id, work_date = work_parent_id(mb_workid)
|
||||
work_info = musicbrainzngs.get_work_by_id(
|
||||
parent_id, includes=["artist-rels"]
|
||||
)
|
||||
return work_info, work_date
|
||||
|
||||
|
||||
class ParentWorkPlugin(BeetsPlugin):
|
||||
class ParentWorkPlugin(MusicBrainzAPIMixin, BeetsPlugin):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
|
@ -130,14 +90,13 @@ class ParentWorkPlugin(BeetsPlugin):
|
|||
parentwork_info = {}
|
||||
|
||||
composer_exists = False
|
||||
if "artist-relation-list" in work_info["work"]:
|
||||
for artist in work_info["work"]["artist-relation-list"]:
|
||||
if artist["type"] == "composer":
|
||||
composer_exists = True
|
||||
parent_composer.append(artist["artist"]["name"])
|
||||
parent_composer_sort.append(artist["artist"]["sort-name"])
|
||||
if "end" in artist.keys():
|
||||
parentwork_info["parentwork_date"] = artist["end"]
|
||||
for artist in work_info.get("artist-relations", []):
|
||||
if artist["type"] == "composer":
|
||||
composer_exists = True
|
||||
parent_composer.append(artist["artist"]["name"])
|
||||
parent_composer_sort.append(artist["artist"]["sort-name"])
|
||||
if "end" in artist.keys():
|
||||
parentwork_info["parentwork_date"] = artist["end"]
|
||||
|
||||
parentwork_info["parent_composer"] = ", ".join(parent_composer)
|
||||
parentwork_info["parent_composer_sort"] = ", ".join(
|
||||
|
|
@ -149,16 +108,14 @@ class ParentWorkPlugin(BeetsPlugin):
|
|||
"no composer for {}; add one at "
|
||||
"https://musicbrainz.org/work/{}",
|
||||
item,
|
||||
work_info["work"]["id"],
|
||||
work_info["id"],
|
||||
)
|
||||
|
||||
parentwork_info["parentwork"] = work_info["work"]["title"]
|
||||
parentwork_info["mb_parentworkid"] = work_info["work"]["id"]
|
||||
parentwork_info["parentwork"] = work_info["title"]
|
||||
parentwork_info["mb_parentworkid"] = work_info["id"]
|
||||
|
||||
if "disambiguation" in work_info["work"]:
|
||||
parentwork_info["parentwork_disambig"] = work_info["work"][
|
||||
"disambiguation"
|
||||
]
|
||||
if "disambiguation" in work_info:
|
||||
parentwork_info["parentwork_disambig"] = work_info["disambiguation"]
|
||||
|
||||
else:
|
||||
parentwork_info["parentwork_disambig"] = None
|
||||
|
|
@ -190,9 +147,9 @@ class ParentWorkPlugin(BeetsPlugin):
|
|||
work_changed = item.parentwork_workid_current != item.mb_workid
|
||||
if force or not hasparent or work_changed:
|
||||
try:
|
||||
work_info, work_date = find_parentwork_info(item.mb_workid)
|
||||
except musicbrainzngs.musicbrainz.WebServiceError as e:
|
||||
self._log.debug("error fetching work: {}", e)
|
||||
work_info, work_date = self.find_parentwork_info(item.mb_workid)
|
||||
except requests.exceptions.RequestException:
|
||||
self._log.debug("error fetching work", item, exc_info=True)
|
||||
return
|
||||
parent_info = self.get_info(item, work_info)
|
||||
parent_info["parentwork_workid_current"] = item.mb_workid
|
||||
|
|
@ -233,3 +190,37 @@ class ParentWorkPlugin(BeetsPlugin):
|
|||
"parentwork_date",
|
||||
],
|
||||
)
|
||||
|
||||
def find_parentwork_info(
|
||||
self, mb_workid: str
|
||||
) -> tuple[dict[str, Any], str | None]:
|
||||
"""Get the MusicBrainz information dict about a parent work, including
|
||||
the artist relations, and the composition date for a work's parent work.
|
||||
"""
|
||||
work_date = None
|
||||
|
||||
parent_id: str | None = mb_workid
|
||||
|
||||
while parent_id:
|
||||
current_id = parent_id
|
||||
work_info = self.mb_api.get_work(
|
||||
current_id, includes=["work-rels", "artist-rels"]
|
||||
)
|
||||
work_date = work_date or next(
|
||||
(
|
||||
end
|
||||
for a in work_info.get("artist-relations", [])
|
||||
if a["type"] == "composer" and (end := a.get("end"))
|
||||
),
|
||||
None,
|
||||
)
|
||||
parent_id = next(
|
||||
(
|
||||
w["work"]["id"]
|
||||
for w in work_info.get("work-relations", [])
|
||||
if w["type"] == "parts" and w["direction"] == "backward"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
return work_info, work_date
|
||||
|
|
|
|||
11
docs/_templates/autosummary/class.rst
vendored
11
docs/_templates/autosummary/class.rst
vendored
|
|
@ -25,3 +25,14 @@
|
|||
{% endblock %}
|
||||
|
||||
.. rubric:: {{ _('Methods definition') }}
|
||||
|
||||
{% if objname in related_typeddicts %}
|
||||
Related TypedDicts
|
||||
------------------
|
||||
|
||||
{% for typeddict in related_typeddicts[objname] %}
|
||||
.. autotypeddict:: {{ typeddict }}
|
||||
:show-inheritance:
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
|
|
|||
|
|
@ -6,4 +6,5 @@ API Reference
|
|||
:titlesonly:
|
||||
|
||||
plugins
|
||||
plugin_utilities
|
||||
database
|
||||
|
|
|
|||
16
docs/api/plugin_utilities.rst
Normal file
16
docs/api/plugin_utilities.rst
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
Plugin Utilities
|
||||
================
|
||||
|
||||
.. currentmodule:: beetsplug._utils.requests
|
||||
|
||||
.. autosummary::
|
||||
:toctree: generated/
|
||||
|
||||
RequestHandler
|
||||
|
||||
.. currentmodule:: beetsplug._utils.musicbrainz
|
||||
|
||||
.. autosummary::
|
||||
:toctree: generated/
|
||||
|
||||
MusicBrainzAPI
|
||||
|
|
@ -20,23 +20,30 @@ New features:
|
|||
- :doc:`plugins/ftintitle`: Added argument to skip the processing of artist and
|
||||
album artist are the same in ftintitle.
|
||||
- :doc:`plugins/play`: Added `$playlist` marker to precisely edit the playlist
|
||||
filepath into the command calling the player program.
|
||||
- :doc:`plugins/lastgenre`: For tuning plugin settings ``-vvv`` can be passed
|
||||
to receive extra verbose logging around last.fm results and how they are
|
||||
resolved. The ``extended_debug`` config setting and ``--debug`` option
|
||||
have been removed.
|
||||
filepath into the command calling the player program.
|
||||
- :doc:`plugins/lastgenre`: For tuning plugin settings ``-vvv`` can be passed to
|
||||
receive extra verbose logging around last.fm results and how they are
|
||||
resolved. The ``extended_debug`` config setting and ``--debug`` option have
|
||||
been removed.
|
||||
- :doc:`plugins/importsource`: Added new plugin that tracks original import
|
||||
paths and optionally suggests removing source files when items are removed
|
||||
from the library.
|
||||
- :doc:`plugins/mbpseudo`: Add a new `mbpseudo` plugin to proactively receive
|
||||
MusicBrainz pseudo-releases as recommendations during import.
|
||||
MusicBrainz pseudo-releases as recommendations during import.
|
||||
- Added support for Python 3.13.
|
||||
- :doc:`/plugins/convert`: ``force`` can be passed to override checks like
|
||||
no_convert, never_convert_lossy_files, same format, and max_bitrate
|
||||
- :doc:`plugins/titlecase`: Add the `titlecase` plugin to allow users to
|
||||
resolve differences in metadata source styles.
|
||||
- :doc:`plugins/titlecase`: Add the `titlecase` plugin to allow users to resolve
|
||||
differences in metadata source styles.
|
||||
- :doc:`plugins/spotify`: Added support for multi-artist albums and tracks,
|
||||
saving all contributing artists to the respective fields.
|
||||
saving all contributing artists to the respective fields.
|
||||
- :doc:`plugins/ftintitle`: Featured artists are now inserted before brackets
|
||||
containing remix/edit-related keywords (e.g., "Remix", "Live", "Edit") instead
|
||||
of being appended at the end. This improves formatting for titles like "Song 1
|
||||
(Carol Remix) ft. Bob" which becomes "Song 1 ft. Bob (Carol Remix)". A variety
|
||||
of brackets are supported and a new ``bracket_keywords`` configuration option
|
||||
allows customizing the keywords. Setting ``bracket_keywords`` to an empty list
|
||||
matches any bracket content regardless of keywords.
|
||||
- :doc:`plugins/discogs`: Added support for multi value fields. :bug:`6068`
|
||||
|
||||
Bug fixes:
|
||||
|
|
@ -87,10 +94,26 @@ For plugin developers:
|
|||
- A new plugin event, ``album_matched``, is sent when an album that is being
|
||||
imported has been matched to its metadata and the corresponding distance has
|
||||
been calculated.
|
||||
- Added a reusable requests handler which can be used by plugins to make HTTP
|
||||
requests with built-in retry and backoff logic. It uses beets user-agent and
|
||||
configures timeouts. See :class:`~beetsplug._utils.requests.RequestHandler`
|
||||
for documentation.
|
||||
- Replaced dependency on ``python-musicbrainzngs`` with a lightweight custom
|
||||
MusicBrainz client implementation and updated relevant plugins accordingly:
|
||||
|
||||
- :doc:`plugins/listenbrainz`
|
||||
- :doc:`plugins/mbcollection`
|
||||
- :doc:`plugins/mbpseudo`
|
||||
- :doc:`plugins/missing`
|
||||
- :doc:`plugins/musicbrainz`
|
||||
- :doc:`plugins/parentwork`
|
||||
|
||||
See :class:`~beetsplug._utils.musicbrainz.MusicBrainzAPI` for documentation.
|
||||
|
||||
For packagers:
|
||||
|
||||
- The minimum supported Python version is now 3.10.
|
||||
- An unused dependency on ``mock`` has been removed.
|
||||
|
||||
Other changes:
|
||||
|
||||
|
|
|
|||
13
docs/conf.py
13
docs/conf.py
|
|
@ -32,9 +32,22 @@ extensions = [
|
|||
"sphinx_design",
|
||||
"sphinx_copybutton",
|
||||
"conf",
|
||||
"sphinx_toolbox.more_autodoc.autotypeddict",
|
||||
]
|
||||
|
||||
autosummary_generate = True
|
||||
autosummary_context = {
|
||||
"related_typeddicts": {
|
||||
"MusicBrainzAPI": [
|
||||
"beetsplug._utils.musicbrainz.LookupKwargs",
|
||||
"beetsplug._utils.musicbrainz.SearchKwargs",
|
||||
"beetsplug._utils.musicbrainz.BrowseKwargs",
|
||||
"beetsplug._utils.musicbrainz.BrowseRecordingsKwargs",
|
||||
"beetsplug._utils.musicbrainz.BrowseReleaseGroupsKwargs",
|
||||
],
|
||||
}
|
||||
}
|
||||
autodoc_member_order = "bysource"
|
||||
exclude_patterns = ["_build"]
|
||||
templates_path = ["_templates"]
|
||||
source_suffix = {".rst": "restructuredtext", ".md": "markdown"}
|
||||
|
|
|
|||
|
|
@ -32,6 +32,18 @@ file. The available options are:
|
|||
skip the ftintitle processing. Default: ``yes``.
|
||||
- **custom_words**: List of additional words that will be treated as a marker
|
||||
for artist features. Default: ``[]``.
|
||||
- **bracket_keywords**: Controls where the featuring text is inserted when the
|
||||
title includes bracketed qualifiers such as ``(Remix)`` or ``[Live]``.
|
||||
FtInTitle inserts the new text before the first bracket whose contents match
|
||||
any of these keywords. Supply a list of words to fine-tune the behavior or set
|
||||
the list to ``[]`` to match *any* bracket regardless of its contents. Default:
|
||||
|
||||
::
|
||||
|
||||
["abridged", "acapella", "club", "demo", "edit", "edition", "extended",
|
||||
"instrumental", "live", "mix", "radio", "release", "remaster",
|
||||
"remastered", "remix", "rmx", "unabridged", "unreleased",
|
||||
"version", "vip"]
|
||||
|
||||
Path Template Values
|
||||
--------------------
|
||||
|
|
|
|||
|
|
@ -6,15 +6,16 @@ ListenBrainz Plugin
|
|||
The ListenBrainz plugin for beets allows you to interact with the ListenBrainz
|
||||
service.
|
||||
|
||||
Installation
|
||||
------------
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
To use the ``listenbrainz`` plugin, first enable it in your configuration (see
|
||||
:ref:`using-plugins`). Then, install ``beets`` with ``listenbrainz`` extra
|
||||
To enable the ListenBrainz plugin, add the following to your beets configuration
|
||||
file (config.yaml_):
|
||||
|
||||
.. code-block:: bash
|
||||
.. code-block:: yaml
|
||||
|
||||
pip install "beets[listenbrainz]"
|
||||
plugins:
|
||||
- listenbrainz
|
||||
|
||||
You can then configure the plugin by providing your Listenbrainz token (see
|
||||
intructions here_) and username:
|
||||
|
|
|
|||
|
|
@ -6,18 +6,9 @@ maintain your `music collection`_ list there.
|
|||
|
||||
.. _music collection: https://musicbrainz.org/doc/Collections
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
To use the ``mbcollection`` plugin, first enable it in your configuration (see
|
||||
:ref:`using-plugins`). Then, install ``beets`` with ``mbcollection`` extra
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install "beets[mbcollection]"
|
||||
|
||||
Then, add your MusicBrainz username and password to your :doc:`configuration
|
||||
file </reference/config>` under a ``musicbrainz`` section:
|
||||
To begin, just enable the ``mbcollection`` plugin in your configuration (see
|
||||
:ref:`using-plugins`). Then, add your MusicBrainz username and password to your
|
||||
:doc:`configuration file </reference/config>` under a ``musicbrainz`` section:
|
||||
|
||||
::
|
||||
|
||||
|
|
|
|||
|
|
@ -5,16 +5,6 @@ This plugin adds a new command, ``missing`` or ``miss``, which finds and lists
|
|||
missing tracks for albums in your collection. Each album requires one network
|
||||
call to album data source.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
To use the ``missing`` plugin, first enable it in your configuration (see
|
||||
:ref:`using-plugins`). Then, install ``beets`` with ``missing`` extra
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install "beets[missing]"
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
|
|
|
|||
|
|
@ -38,16 +38,6 @@ This plugin adds seven tags:
|
|||
to keep track of recordings whose works have changed.
|
||||
- **parentwork_date**: The composition date of the parent work.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
To use the ``parentwork`` plugin, first enable it in your configuration (see
|
||||
:ref:`using-plugins`). Then, install ``beets`` with ``parentwork`` extra
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install "beets[parentwork]"
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
|
|
|
|||
437
poetry.lock
generated
437
poetry.lock
generated
|
|
@ -49,6 +49,42 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
|
|||
[package.extras]
|
||||
trio = ["trio (>=0.31.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "apeye"
|
||||
version = "1.4.1"
|
||||
description = "Handy tools for working with URLs and APIs."
|
||||
optional = true
|
||||
python-versions = ">=3.6.1"
|
||||
files = [
|
||||
{file = "apeye-1.4.1-py3-none-any.whl", hash = "sha256:44e58a9104ec189bf42e76b3a7fe91e2b2879d96d48e9a77e5e32ff699c9204e"},
|
||||
{file = "apeye-1.4.1.tar.gz", hash = "sha256:14ea542fad689e3bfdbda2189a354a4908e90aee4bf84c15ab75d68453d76a36"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
apeye-core = ">=1.0.0b2"
|
||||
domdf-python-tools = ">=2.6.0"
|
||||
platformdirs = ">=2.3.0"
|
||||
requests = ">=2.24.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["cachecontrol[filecache] (>=0.12.6)", "lockfile (>=0.12.2)"]
|
||||
limiter = ["cachecontrol[filecache] (>=0.12.6)", "lockfile (>=0.12.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "apeye-core"
|
||||
version = "1.1.5"
|
||||
description = "Core (offline) functionality for the apeye library."
|
||||
optional = true
|
||||
python-versions = ">=3.6.1"
|
||||
files = [
|
||||
{file = "apeye_core-1.1.5-py3-none-any.whl", hash = "sha256:dc27a93f8c9e246b3b238c5ea51edf6115ab2618ef029b9f2d9a190ec8228fbf"},
|
||||
{file = "apeye_core-1.1.5.tar.gz", hash = "sha256:5de72ed3d00cc9b20fea55e54b7ab8f5ef8500eb33a5368bc162a5585e238a55"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
domdf-python-tools = ">=2.6.0"
|
||||
idna = ">=2.5"
|
||||
|
||||
[[package]]
|
||||
name = "appdirs"
|
||||
version = "1.4.4"
|
||||
|
|
@ -138,6 +174,20 @@ gi = ["pygobject (>=3.54.2,<4.0.0)"]
|
|||
mad = ["pymad[mad] (>=0.11.3,<0.12.0)"]
|
||||
test = ["pytest (>=8.4.2)", "pytest-cov (>=7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "autodocsumm"
|
||||
version = "0.2.14"
|
||||
description = "Extended sphinx autodoc including automatic autosummaries"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "autodocsumm-0.2.14-py3-none-any.whl", hash = "sha256:3bad8717fc5190802c60392a7ab04b9f3c97aa9efa8b3780b3d81d615bfe5dc0"},
|
||||
{file = "autodocsumm-0.2.14.tar.gz", hash = "sha256:2839a9d4facc3c4eccd306c08695540911042b46eeafcdc3203e6d0bab40bc77"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
Sphinx = ">=4.0,<9.0"
|
||||
|
||||
[[package]]
|
||||
name = "babel"
|
||||
version = "2.17.0"
|
||||
|
|
@ -405,6 +455,27 @@ files = [
|
|||
[package.dependencies]
|
||||
cffi = ">=1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "cachecontrol"
|
||||
version = "0.14.4"
|
||||
description = "httplib2 caching for requests"
|
||||
optional = true
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "cachecontrol-0.14.4-py3-none-any.whl", hash = "sha256:b7ac014ff72ee199b5f8af1de29d60239954f223e948196fa3d84adaffc71d2b"},
|
||||
{file = "cachecontrol-0.14.4.tar.gz", hash = "sha256:e6220afafa4c22a47dd0badb319f84475d79108100d04e26e8542ef7d3ab05a1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
filelock = {version = ">=3.8.0", optional = true, markers = "extra == \"filecache\""}
|
||||
msgpack = ">=0.5.2,<2.0.0"
|
||||
requests = ">=2.16.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["cachecontrol[filecache,redis]", "cheroot (>=11.1.2)", "cherrypy", "codespell", "furo", "mypy", "pytest", "pytest-cov", "ruff", "sphinx", "sphinx-copybutton", "types-redis", "types-requests"]
|
||||
filecache = ["filelock (>=3.8.0)"]
|
||||
redis = ["redis (>=2.10.5)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.10.5"
|
||||
|
|
@ -795,6 +866,24 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
|
|||
[package.extras]
|
||||
toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "cssutils"
|
||||
version = "2.11.1"
|
||||
description = "A CSS Cascading Style Sheets library for Python"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "cssutils-2.11.1-py3-none-any.whl", hash = "sha256:a67bfdfdff4f3867fab43698ec4897c1a828eca5973f4073321b3bccaf1199b1"},
|
||||
{file = "cssutils-2.11.1.tar.gz", hash = "sha256:0563a76513b6af6eebbe788c3bf3d01c920e46b3f90c8416738c5cfc773ff8e2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
more-itertools = "*"
|
||||
|
||||
[package.extras]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
test = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "dbus-python"
|
||||
version = "1.4.0"
|
||||
|
|
@ -820,6 +909,21 @@ files = [
|
|||
{file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dict2css"
|
||||
version = "0.3.0.post1"
|
||||
description = "A μ-library for constructing cascading style sheets from Python dictionaries."
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "dict2css-0.3.0.post1-py3-none-any.whl", hash = "sha256:f006a6b774c3e31869015122ae82c491fd25e7de4a75607a62aa3e798f837e0d"},
|
||||
{file = "dict2css-0.3.0.post1.tar.gz", hash = "sha256:89c544c21c4ca7472c3fffb9d37d3d926f606329afdb751dc1de67a411b70719"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cssutils = ">=2.2.0"
|
||||
domdf-python-tools = ">=2.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "docstrfmt"
|
||||
version = "1.11.1"
|
||||
|
|
@ -860,6 +964,25 @@ files = [
|
|||
{file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "domdf-python-tools"
|
||||
version = "3.10.0"
|
||||
description = "Helpful functions for Python 🐍 🛠️"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "domdf_python_tools-3.10.0-py3-none-any.whl", hash = "sha256:5e71c1be71bbcc1f881d690c8984b60e64298ec256903b3147f068bc33090c36"},
|
||||
{file = "domdf_python_tools-3.10.0.tar.gz", hash = "sha256:2ae308d2f4f1e9145f5f4ba57f840fbfd1c2983ee26e4824347789649d3ae298"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
natsort = ">=7.0.1"
|
||||
typing-extensions = ">=3.7.4.1"
|
||||
|
||||
[package.extras]
|
||||
all = ["pytz (>=2019.1)"]
|
||||
dates = ["pytz (>=2019.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.3.0"
|
||||
|
|
@ -877,6 +1000,17 @@ typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""}
|
|||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.20.2"
|
||||
description = "A platform independent file lock."
|
||||
optional = true
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "filelock-3.20.2-py3-none-any.whl", hash = "sha256:fbba7237d6ea277175a32c54bb71ef814a8546d8601269e1bfc388de333974e8"},
|
||||
{file = "filelock-3.20.2.tar.gz", hash = "sha256:a2241ff4ddde2a7cebddf78e39832509cb045d18ec1a09d7248d6bfc6bfbbe64"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filetype"
|
||||
version = "1.2.0"
|
||||
|
|
@ -937,6 +1071,27 @@ files = [
|
|||
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "html5lib"
|
||||
version = "1.1"
|
||||
description = "HTML parser based on the WHATWG HTML specification"
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
files = [
|
||||
{file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"},
|
||||
{file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.9"
|
||||
webencodings = "*"
|
||||
|
||||
[package.extras]
|
||||
all = ["chardet (>=2.2)", "genshi", "lxml"]
|
||||
chardet = ["chardet (>=2.2)"]
|
||||
genshi = ["genshi"]
|
||||
lxml = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
|
|
@ -1732,21 +1887,16 @@ mutagen = ">=1.46"
|
|||
test = ["tox"]
|
||||
|
||||
[[package]]
|
||||
name = "mock"
|
||||
version = "5.2.0"
|
||||
description = "Rolling backport of unittest.mock for all Pythons"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
name = "more-itertools"
|
||||
version = "10.8.0"
|
||||
description = "More routines for operating on iterables, beyond itertools"
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "mock-5.2.0-py3-none-any.whl", hash = "sha256:7ba87f72ca0e915175596069dbbcc7c75af7b5e9b9bc107ad6349ede0819982f"},
|
||||
{file = "mock-5.2.0.tar.gz", hash = "sha256:4e460e818629b4b173f32d08bf30d3af8123afbb8e04bb5707a1fd4799e503f0"},
|
||||
{file = "more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b"},
|
||||
{file = "more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
build = ["blurb", "twine", "wheel"]
|
||||
docs = ["sphinx"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "msgpack"
|
||||
version = "1.1.2"
|
||||
|
|
@ -1834,17 +1984,6 @@ check = ["check-manifest", "flake8", "flake8-black", "isort (>=5.0.3)", "pygment
|
|||
test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "hypothesis", "pyannotate", "pytest", "pytest-cov"]
|
||||
type = ["mypy", "mypy-extensions"]
|
||||
|
||||
[[package]]
|
||||
name = "musicbrainzngs"
|
||||
version = "0.7.1"
|
||||
description = "Python bindings for the MusicBrainz NGS and the Cover Art Archive webservices"
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "musicbrainzngs-0.7.1-py2.py3-none-any.whl", hash = "sha256:e841a8f975104c0a72290b09f59326050194081a5ae62ee512f41915090e1a10"},
|
||||
{file = "musicbrainzngs-0.7.1.tar.gz", hash = "sha256:ab1c0100fd0b305852e65f2ed4113c6de12e68afd55186987b8ed97e0f98e627"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mutagen"
|
||||
version = "1.47.0"
|
||||
|
|
@ -1927,6 +2066,21 @@ files = [
|
|||
{file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "natsort"
|
||||
version = "8.4.0"
|
||||
description = "Simple yet flexible natural sorting in Python."
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"},
|
||||
{file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
fast = ["fastnumbers (>=2.0.0)"]
|
||||
icu = ["PyICU (>=1.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "numba"
|
||||
version = "0.62.1"
|
||||
|
|
@ -3319,6 +3473,94 @@ files = [
|
|||
{file = "roman-5.1.tar.gz", hash = "sha256:3a86572e9bc9183e771769601189e5fa32f1620ffeceebb9eca836affb409986"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruamel-yaml"
|
||||
version = "0.18.16"
|
||||
description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "ruamel.yaml-0.18.16-py3-none-any.whl", hash = "sha256:048f26d64245bae57a4f9ef6feb5b552a386830ef7a826f235ffb804c59efbba"},
|
||||
{file = "ruamel.yaml-0.18.16.tar.gz", hash = "sha256:a6e587512f3c998b2225d68aa1f35111c29fad14aed561a26e73fab729ec5e5a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.14\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["mercurial (>5.7)", "ryd"]
|
||||
jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruamel-yaml-clib"
|
||||
version = "0.2.15"
|
||||
description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88eea8baf72f0ccf232c22124d122a7f26e8a24110a0273d9bcddcb0f7e1fa03"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b6f7d74d094d1f3a4e157278da97752f16ee230080ae331fcc219056ca54f77"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4be366220090d7c3424ac2b71c90d1044ea34fca8c0b88f250064fd06087e614"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f66f600833af58bea694d5892453f2270695b92200280ee8c625ec5a477eed3"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da3d6adadcf55a93c214d23941aef4abfd45652110aed6580e814152f385b862"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9fde97ecb7bb9c41261c2ce0da10323e9227555c674989f8d9eb7572fc2098d"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:05c70f7f86be6f7bee53794d80050a28ae7e13e4a0087c1839dcdefd68eb36b6"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f1d38cbe622039d111b69e9ca945e7e3efebb30ba998867908773183357f3ed"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win32.whl", hash = "sha256:fe239bdfdae2302e93bd6e8264bd9b71290218fff7084a9db250b55caaccf43f"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win_amd64.whl", hash = "sha256:468858e5cbde0198337e6a2a78eda8c3fb148bdf4c6498eaf4bc9ba3f8e780bd"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c583229f336682b7212a43d2fa32c30e643d3076178fb9f7a6a14dde85a2d8bd"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56ea19c157ed8c74b6be51b5fa1c3aff6e289a041575f0556f66e5fb848bb137"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5fea0932358e18293407feb921d4f4457db837b67ec1837f87074667449f9401"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71831bd61fbdb7aa0399d5c4da06bea37107ab5c79ff884cc07f2450910262"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:617d35dc765715fa86f8c3ccdae1e4229055832c452d4ec20856136acc75053f"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b45498cc81a4724a2d42273d6cfc243c0547ad7c6b87b4f774cb7bcc131c98d"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:def5663361f6771b18646620fca12968aae730132e104688766cf8a3b1d65922"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:014181cdec565c8745b7cbc4de3bf2cc8ced05183d986e6d1200168e5bb59490"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win32.whl", hash = "sha256:d290eda8f6ada19e1771b54e5706b8f9807e6bb08e873900d5ba114ced13e02c"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win_amd64.whl", hash = "sha256:bdc06ad71173b915167702f55d0f3f027fc61abd975bd308a0968c02db4a4c3e"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb15a2e2a90c8475df45c0949793af1ff413acfb0a716b8b94e488ea95ce7cff"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64da03cbe93c1e91af133f5bec37fd24d0d4ba2418eaf970d7166b0a26a148a2"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f6d3655e95a80325b84c4e14c080b2470fe4f33b6846f288379ce36154993fb1"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71845d377c7a47afc6592aacfea738cc8a7e876d586dfba814501d8c53c1ba60"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e5499db1ccbc7f4b41f0565e4f799d863ea720e01d3e99fa0b7b5fcd7802c9"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b293a37dc97e2b1e8a1aec62792d1e52027087c8eea4fc7b5abd2bdafdd6642"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:512571ad41bba04eac7268fe33f7f4742210ca26a81fe0c75357fa682636c690"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e5e9f630c73a490b758bf14d859a39f375e6999aea5ddd2e2e9da89b9953486a"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win32.whl", hash = "sha256:f4421ab780c37210a07d138e56dd4b51f8642187cdfb433eb687fe8c11de0144"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win_amd64.whl", hash = "sha256:2b216904750889133d9222b7b873c199d48ecbb12912aca78970f84a5aa1a4bc"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4dcec721fddbb62e60c2801ba08c87010bd6b700054a09998c4d09c08147b8fb"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:65f48245279f9bb301d1276f9679b82e4c080a1ae25e679f682ac62446fac471"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:46895c17ead5e22bea5e576f1db7e41cb273e8d062c04a6a49013d9f60996c25"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3eb199178b08956e5be6288ee0b05b2fb0b5c1f309725ad25d9c6ea7e27f962a"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d1032919280ebc04a80e4fb1e93f7a738129857eaec9448310e638c8bccefcf"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab0df0648d86a7ecbd9c632e8f8d6b21bb21b5fc9d9e095c796cacf32a728d2d"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:331fb180858dd8534f0e61aa243b944f25e73a4dae9962bd44c46d1761126bbf"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fd4c928ddf6bce586285daa6d90680b9c291cfd045fc40aad34e445d57b1bf51"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win32.whl", hash = "sha256:bf0846d629e160223805db9fe8cc7aec16aaa11a07310c50c8c7164efa440aec"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win_amd64.whl", hash = "sha256:45702dfbea1420ba3450bb3dd9a80b33f0badd57539c6aac09f42584303e0db6"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:753faf20b3a5906faf1fc50e4ddb8c074cb9b251e00b14c18b28492f933ac8ef"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:480894aee0b29752560a9de46c0e5f84a82602f2bc5c6cde8db9a345319acfdf"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d3b58ab2454b4747442ac76fab66739c72b1e2bb9bd173d7694b9f9dbc9c000"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfd309b316228acecfa30670c3887dcedf9b7a44ea39e2101e75d2654522acd4"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2812ff359ec1f30129b62372e5f22a52936fac13d5d21e70373dbca5d64bb97c"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7e74ea87307303ba91073b63e67f2c667e93f05a8c63079ee5b7a5c8d0d7b043"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:713cd68af9dfbe0bb588e144a61aad8dcc00ef92a82d2e87183ca662d242f524"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:542d77b72786a35563f97069b9379ce762944e67055bea293480f7734b2c7e5e"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win32.whl", hash = "sha256:424ead8cef3939d690c4b5c85ef5b52155a231ff8b252961b6516ed7cf05f6aa"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win_amd64.whl", hash = "sha256:ac9b8d5fa4bb7fd2917ab5027f60d4234345fd366fe39aa711d5dca090aa1467"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:923816815974425fbb1f1bf57e85eca6e14d8adc313c66db21c094927ad01815"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dcc7f3162d3711fd5d52e2267e44636e3e566d1e5675a5f0b30e98f2c4af7974"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5d3c9210219cbc0f22706f19b154c9a798ff65a6beeafbf77fc9c057ec806f7d"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bb7b728fd9f405aa00b4a0b17ba3f3b810d0ccc5f77f7373162e9b5f0ff75d5"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3cb75a3c14f1d6c3c2a94631e362802f70e83e20d1f2b2ef3026c05b415c4900"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:badd1d7283f3e5894779a6ea8944cc765138b96804496c91812b2829f70e18a7"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ba6604bbc3dfcef844631932d06a1a4dcac3fee904efccf582261948431628a"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8220fd4c6f98485e97aea65e1df76d4fed1678ede1fe1d0eed2957230d287c4"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win32.whl", hash = "sha256:04d21dc9c57d9608225da28285900762befbb0165ae48482c15d8d4989d4af14"},
|
||||
{file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win_amd64.whl", hash = "sha256:27dc656e84396e6d687f97c6e65fb284d100483628f02d95464fd731743a4afe"},
|
||||
{file = "ruamel_yaml_clib-0.2.15.tar.gz", hash = "sha256:46e4cc8c43ef6a94885f72512094e482114a8a706d3c555a34ed4b0d20200600"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.14.3"
|
||||
|
|
@ -3707,6 +3949,24 @@ docs = ["sphinxcontrib-websupport"]
|
|||
lint = ["flake8 (>=6.0)", "mypy (==1.11.1)", "pyright (==1.1.384)", "pytest (>=6.0)", "ruff (==0.6.9)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.18.0.20240506)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241005)", "types-requests (==2.32.0.20240914)", "types-urllib3 (==1.26.25.14)"]
|
||||
test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-autodoc-typehints"
|
||||
version = "3.0.1"
|
||||
description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
|
||||
optional = true
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "sphinx_autodoc_typehints-3.0.1-py3-none-any.whl", hash = "sha256:4b64b676a14b5b79cefb6628a6dc8070e320d4963e8ff640a2f3e9390ae9045a"},
|
||||
{file = "sphinx_autodoc_typehints-3.0.1.tar.gz", hash = "sha256:b9b40dd15dee54f6f810c924f863f9cf1c54f9f3265c495140ea01be7f44fa55"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
sphinx = ">=8.1.3"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2024.8.6)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "defusedxml (>=0.7.1)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "sphobjinv (>=2.3.1.2)", "typing-extensions (>=4.12.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-copybutton"
|
||||
version = "0.5.2"
|
||||
|
|
@ -3750,6 +4010,22 @@ theme-pydata = ["pydata-sphinx-theme (>=0.15.2,<0.16.0)"]
|
|||
theme-rtd = ["sphinx-rtd-theme (>=2.0,<3.0)"]
|
||||
theme-sbt = ["sphinx-book-theme (>=1.1,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-jinja2-compat"
|
||||
version = "0.4.1"
|
||||
description = "Patches Jinja2 v3 to restore compatibility with earlier Sphinx versions."
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sphinx_jinja2_compat-0.4.1-py3-none-any.whl", hash = "sha256:64ca0d46f0d8029fbe69ea612793a55e6ef0113e1bba4a85d402158c09f17a14"},
|
||||
{file = "sphinx_jinja2_compat-0.4.1.tar.gz", hash = "sha256:0188f0802d42c3da72997533b55a00815659a78d3f81d4b4747b1fb15a5728e6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jinja2 = ">=2.10"
|
||||
markupsafe = ">=1"
|
||||
standard-imghdr = {version = "3.10.14", markers = "python_version >= \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-lint"
|
||||
version = "1.0.1"
|
||||
|
|
@ -3768,6 +4044,80 @@ regex = "*"
|
|||
[package.extras]
|
||||
tests = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-prompt"
|
||||
version = "1.9.0"
|
||||
description = "Sphinx directive to add unselectable prompt"
|
||||
optional = true
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "sphinx_prompt-1.9.0-py3-none-any.whl", hash = "sha256:fd731446c03f043d1ff6df9f22414495b23067c67011cc21658ea8d36b3575fc"},
|
||||
{file = "sphinx_prompt-1.9.0.tar.gz", hash = "sha256:471b3c6d466dce780a9b167d9541865fd4e9a80ed46e31b06a52a0529ae995a1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
docutils = "*"
|
||||
idna = "*"
|
||||
pygments = "*"
|
||||
Sphinx = ">=8.0.0,<9.0.0"
|
||||
urllib3 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-tabs"
|
||||
version = "3.4.5"
|
||||
description = "Tabbed views for Sphinx"
|
||||
optional = true
|
||||
python-versions = "~=3.7"
|
||||
files = [
|
||||
{file = "sphinx-tabs-3.4.5.tar.gz", hash = "sha256:ba9d0c1e3e37aaadd4b5678449eb08176770e0fc227e769b6ce747df3ceea531"},
|
||||
{file = "sphinx_tabs-3.4.5-py3-none-any.whl", hash = "sha256:92cc9473e2ecf1828ca3f6617d0efc0aa8acb06b08c56ba29d1413f2f0f6cf09"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
docutils = "*"
|
||||
pygments = "*"
|
||||
sphinx = "*"
|
||||
|
||||
[package.extras]
|
||||
code-style = ["pre-commit (==2.13.0)"]
|
||||
testing = ["bs4", "coverage", "pygments", "pytest (>=7.1,<8)", "pytest-cov", "pytest-regressions", "rinohtype"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-toolbox"
|
||||
version = "4.1.1"
|
||||
description = "Box of handy tools for Sphinx 🧰 📔"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "sphinx_toolbox-4.1.1-py3-none-any.whl", hash = "sha256:1ee2616091453430ffe41e8371e0ddd22a5c1f504ba2dfb306f50870f3f7672a"},
|
||||
{file = "sphinx_toolbox-4.1.1.tar.gz", hash = "sha256:1bb1750bf9e1f72a54161b0867caf3b6bf2ee216ecb9f8c519f0a9348824954a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
apeye = ">=0.4.0"
|
||||
autodocsumm = ">=0.2.0"
|
||||
beautifulsoup4 = ">=4.9.1"
|
||||
cachecontrol = {version = ">=0.13.0", extras = ["filecache"]}
|
||||
dict2css = ">=0.2.3"
|
||||
docutils = ">=0.16"
|
||||
domdf-python-tools = ">=2.9.0"
|
||||
filelock = ">=3.8.0"
|
||||
html5lib = ">=1.1"
|
||||
roman = ">4.0"
|
||||
"ruamel.yaml" = ">=0.16.12,<=0.18.16"
|
||||
sphinx = ">=3.2.0"
|
||||
sphinx-autodoc-typehints = ">=1.11.1"
|
||||
sphinx-jinja2-compat = ">=0.1.0"
|
||||
sphinx-prompt = ">=1.1.0"
|
||||
sphinx-tabs = ">=1.2.1,<3.4.7"
|
||||
tabulate = ">=0.8.7"
|
||||
typing-extensions = ">=3.7.4.3,<3.10.0.1 || >3.10.0.1"
|
||||
|
||||
[package.extras]
|
||||
all = ["coincidence (>=0.4.3)", "pygments (>=2.7.4,<=2.13.0)"]
|
||||
testing = ["coincidence (>=0.4.3)", "pygments (>=2.7.4,<=2.13.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinxcontrib-applehelp"
|
||||
version = "2.0.0"
|
||||
|
|
@ -3888,6 +4238,17 @@ files = [
|
|||
{file = "standard_chunk-3.13.0.tar.gz", hash = "sha256:4ac345d37d7e686d2755e01836b8d98eda0d1a3ee90375e597ae43aaf064d654"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "standard-imghdr"
|
||||
version = "3.10.14"
|
||||
description = "Standard library imghdr redistribution. \"dead battery\"."
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "standard_imghdr-3.10.14-py3-none-any.whl", hash = "sha256:cdf6883163349624dee9a81d2853a20260337c4cd41c04e99c082e01833a08e2"},
|
||||
{file = "standard_imghdr-3.10.14.tar.gz", hash = "sha256:2598fe2e7c540dbda34b233295e10957ab8dc8ac6f3bd9eaa8d38be167232e52"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "standard-sunau"
|
||||
version = "3.13.0"
|
||||
|
|
@ -4063,17 +4424,6 @@ files = [
|
|||
{file = "types_html5lib-1.1.11.20251014.tar.gz", hash = "sha256:cc628d626e0111a2426a64f5f061ecfd113958b69ff6b3dc0eaaed2347ba9455"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-mock"
|
||||
version = "5.2.0.20250924"
|
||||
description = "Typing stubs for mock"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "types_mock-5.2.0.20250924-py3-none-any.whl", hash = "sha256:23617ffb4cf948c085db69ec90bd474afbce634ef74995045ae0a5748afbe57d"},
|
||||
{file = "types_mock-5.2.0.20250924.tar.gz", hash = "sha256:953197543b4183f00363e8e626f6c7abea1a3f7a4dd69d199addb70b01b6bb35"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "10.2.0.20240822"
|
||||
|
|
@ -4160,6 +4510,17 @@ h2 = ["h2 (>=4,<5)"]
|
|||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "webencodings"
|
||||
version = "0.5.1"
|
||||
description = "Character encoding aliases for legacy web content"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
|
||||
{file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "werkzeug"
|
||||
version = "3.1.3"
|
||||
|
|
@ -4199,7 +4560,7 @@ beatport = ["requests-oauthlib"]
|
|||
bpd = ["PyGObject"]
|
||||
chroma = ["pyacoustid"]
|
||||
discogs = ["python3-discogs-client"]
|
||||
docs = ["docutils", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinx-design"]
|
||||
docs = ["docutils", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx-toolbox"]
|
||||
embedart = ["Pillow"]
|
||||
embyupdate = ["requests"]
|
||||
fetchart = ["Pillow", "beautifulsoup4", "langdetect", "requests"]
|
||||
|
|
@ -4207,13 +4568,9 @@ import = ["py7zr", "rarfile"]
|
|||
kodiupdate = ["requests"]
|
||||
lastgenre = ["pylast"]
|
||||
lastimport = ["pylast"]
|
||||
listenbrainz = ["musicbrainzngs"]
|
||||
lyrics = ["beautifulsoup4", "langdetect", "requests"]
|
||||
mbcollection = ["musicbrainzngs"]
|
||||
metasync = ["dbus-python"]
|
||||
missing = ["musicbrainzngs"]
|
||||
mpdstats = ["python-mpd2"]
|
||||
parentwork = ["musicbrainzngs"]
|
||||
plexupdate = ["requests"]
|
||||
reflink = ["reflink"]
|
||||
replaygain = ["PyGObject"]
|
||||
|
|
@ -4226,4 +4583,4 @@ web = ["flask", "flask-cors"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4"
|
||||
content-hash = "8cf2ad0e6a842511e1215720a63bfdf9d5f49345410644cbb0b5fd8fb74f50d2"
|
||||
content-hash = "8a1714daca55eab559558f2d4bd63d4857686eb607bf4b24f1ea6dbd412e6641"
|
||||
|
|
|
|||
|
|
@ -69,7 +69,6 @@ scipy = [ # for librosa
|
|||
{ python = "<3.13", version = ">=1.13.1", optional = true },
|
||||
{ python = ">=3.13", version = ">=1.16.1", optional = true },
|
||||
]
|
||||
musicbrainzngs = { version = ">=0.4", optional = true }
|
||||
numba = [ # for librosa
|
||||
{ python = "<3.13", version = ">=0.60", optional = true },
|
||||
{ python = ">=3.13", version = ">=0.62.1", optional = true },
|
||||
|
|
@ -94,6 +93,7 @@ pydata-sphinx-theme = { version = "*", optional = true }
|
|||
sphinx = { version = "*", optional = true }
|
||||
sphinx-design = { version = ">=0.6.1", optional = true }
|
||||
sphinx-copybutton = { version = ">=0.5.2", optional = true }
|
||||
sphinx-toolbox = { version = ">=4.1.0", optional = true }
|
||||
titlecase = { version = "^2.4.1", optional = true }
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
|
|
@ -101,7 +101,6 @@ beautifulsoup4 = "*"
|
|||
codecov = ">=2.1.13"
|
||||
flask = "*"
|
||||
langdetect = "*"
|
||||
mock = "*"
|
||||
pylast = "*"
|
||||
pytest = "*"
|
||||
pytest-cov = "*"
|
||||
|
|
@ -125,7 +124,6 @@ sphinx-lint = ">=1.0.0"
|
|||
mypy = "*"
|
||||
types-beautifulsoup4 = "*"
|
||||
types-docutils = ">=0.22.2.20251006"
|
||||
types-mock = "*"
|
||||
types-Flask-Cors = "*"
|
||||
types-Pillow = "*"
|
||||
types-PyYAML = "*"
|
||||
|
|
@ -154,6 +152,7 @@ docs = [
|
|||
"sphinx-lint",
|
||||
"sphinx-design",
|
||||
"sphinx-copybutton",
|
||||
"sphinx-toolbox",
|
||||
]
|
||||
discogs = ["python3-discogs-client"]
|
||||
embedart = ["Pillow"] # ImageMagick
|
||||
|
|
@ -165,13 +164,9 @@ import = ["py7zr", "rarfile"]
|
|||
kodiupdate = ["requests"]
|
||||
lastgenre = ["pylast"]
|
||||
lastimport = ["pylast"]
|
||||
listenbrainz = ["musicbrainzngs"]
|
||||
lyrics = ["beautifulsoup4", "langdetect", "requests"]
|
||||
mbcollection = ["musicbrainzngs"]
|
||||
metasync = ["dbus-python"]
|
||||
missing = ["musicbrainzngs"]
|
||||
mpdstats = ["python-mpd2"]
|
||||
parentwork = ["musicbrainzngs"]
|
||||
plexupdate = ["requests"]
|
||||
reflink = ["reflink"]
|
||||
replaygain = [
|
||||
|
|
@ -321,6 +316,7 @@ ignore = [
|
|||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"beets/**" = ["PT"]
|
||||
"test/plugins/test_ftintitle.py" = ["E501"]
|
||||
"test/test_util.py" = ["E501"]
|
||||
"test/ui/test_field_diff.py" = ["E501"]
|
||||
|
||||
|
|
|
|||
22
test/plugins/conftest.py
Normal file
22
test/plugins/conftest.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from requests_mock import Mocker
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock(requests_mock, monkeypatch) -> Mocker:
|
||||
"""Use plain session wherever MB requests are mocked.
|
||||
|
||||
This avoids rate limiting requests to speed up tests.
|
||||
"""
|
||||
monkeypatch.setattr(
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.create_session",
|
||||
lambda _: requests.Session(),
|
||||
)
|
||||
return requests_mock
|
||||
|
|
@ -15,6 +15,7 @@
|
|||
"""Tests for the 'ftintitle' plugin."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import TypeAlias
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -22,6 +23,8 @@ from beets.library.models import Album, Item
|
|||
from beets.test.helper import PluginTestCase
|
||||
from beetsplug import ftintitle
|
||||
|
||||
ConfigValue: TypeAlias = str | bool | list[str]
|
||||
|
||||
|
||||
class FtInTitlePluginFunctional(PluginTestCase):
|
||||
plugin = "ftintitle"
|
||||
|
|
@ -39,7 +42,7 @@ def env() -> Generator[FtInTitlePluginFunctional, None, None]:
|
|||
|
||||
def set_config(
|
||||
env: FtInTitlePluginFunctional,
|
||||
cfg: dict[str, str | bool | list[str]] | None,
|
||||
cfg: dict[str, ConfigValue] | None,
|
||||
) -> None:
|
||||
cfg = {} if cfg is None else cfg
|
||||
defaults = {
|
||||
|
|
@ -246,6 +249,21 @@ def add_item(
|
|||
("Alice", "Song 1 feat. Bob"),
|
||||
id="skip-if-artist-and-album-artists-is-the-same-matching-match-b",
|
||||
),
|
||||
# ---- titles with brackets/parentheses ----
|
||||
pytest.param(
|
||||
{"format": "ft. {}", "bracket_keywords": ["mix"]},
|
||||
("ftintitle",),
|
||||
("Alice ft. Bob", "Song 1 (Club Mix)", "Alice"),
|
||||
("Alice", "Song 1 ft. Bob (Club Mix)"),
|
||||
id="ft-inserted-before-matching-bracket-keyword",
|
||||
),
|
||||
pytest.param(
|
||||
{"format": "ft. {}", "bracket_keywords": ["nomatch"]},
|
||||
("ftintitle",),
|
||||
("Alice ft. Bob", "Song 1 (Club Remix)", "Alice"),
|
||||
("Alice", "Song 1 (Club Remix) ft. Bob"),
|
||||
id="ft-inserted-at-end-no-bracket-keyword-match",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_ftintitle_functional(
|
||||
|
|
@ -312,6 +330,66 @@ def test_split_on_feat(
|
|||
assert ftintitle.split_on_feat(given) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"given,keywords,expected",
|
||||
[
|
||||
## default keywords
|
||||
# different braces and keywords
|
||||
("Song (Remix)", None, "Song ft. Bob (Remix)"),
|
||||
("Song [Version]", None, "Song ft. Bob [Version]"),
|
||||
("Song {Extended Mix}", None, "Song ft. Bob {Extended Mix}"),
|
||||
("Song <Instrumental>", None, "Song ft. Bob <Instrumental>"),
|
||||
# two keyword clauses
|
||||
("Song (Remix) (Live)", None, "Song ft. Bob (Remix) (Live)"),
|
||||
# brace insensitivity
|
||||
("Song (Live) [Remix]", None, "Song ft. Bob (Live) [Remix]"),
|
||||
("Song [Edit] (Remastered)", None, "Song ft. Bob [Edit] (Remastered)"),
|
||||
# negative cases
|
||||
("Song", None, "Song ft. Bob"), # no clause
|
||||
("Song (Arbitrary)", None, "Song (Arbitrary) ft. Bob"), # no keyword
|
||||
("Song (", None, "Song ( ft. Bob"), # no matching brace or keyword
|
||||
("Song (Live", None, "Song (Live ft. Bob"), # no matching brace with keyword
|
||||
# one keyword clause, one non-keyword clause
|
||||
("Song (Live) (Arbitrary)", None, "Song ft. Bob (Live) (Arbitrary)"),
|
||||
("Song (Arbitrary) (Remix)", None, "Song (Arbitrary) ft. Bob (Remix)"),
|
||||
# nested brackets - same type
|
||||
("Song (Remix (Extended))", None, "Song ft. Bob (Remix (Extended))"),
|
||||
("Song [Arbitrary [Description]]", None, "Song [Arbitrary [Description]] ft. Bob"),
|
||||
# nested brackets - different types
|
||||
("Song (Remix [Extended])", None, "Song ft. Bob (Remix [Extended])"),
|
||||
# nested - returns outer start position despite inner keyword
|
||||
("Song [Arbitrary {Extended}]", None, "Song ft. Bob [Arbitrary {Extended}]"),
|
||||
("Song {Live <Arbitrary>}", None, "Song ft. Bob {Live <Arbitrary>}"),
|
||||
("Song <Remaster (Arbitrary)>", None, "Song ft. Bob <Remaster (Arbitrary)>"),
|
||||
("Song <Extended> [Live]", None, "Song ft. Bob <Extended> [Live]"),
|
||||
("Song (Version) <Live>", None, "Song ft. Bob (Version) <Live>"),
|
||||
("Song (Arbitrary [Description])", None, "Song (Arbitrary [Description]) ft. Bob"),
|
||||
("Song [Description (Arbitrary)]", None, "Song [Description (Arbitrary)] ft. Bob"),
|
||||
## custom keywords
|
||||
("Song (Live)", ["live"], "Song ft. Bob (Live)"),
|
||||
("Song (Concert)", ["concert"], "Song ft. Bob (Concert)"),
|
||||
("Song (Remix)", ["custom"], "Song (Remix) ft. Bob"),
|
||||
("Song (Custom)", ["custom"], "Song ft. Bob (Custom)"),
|
||||
("Song", [], "Song ft. Bob"),
|
||||
("Song (", [], "Song ( ft. Bob"),
|
||||
# Multi-word keyword tests
|
||||
("Song (Club Mix)", ["club mix"], "Song ft. Bob (Club Mix)"), # Positive: matches multi-word
|
||||
("Song (Club Remix)", ["club mix"], "Song (Club Remix) ft. Bob"), # Negative: no match
|
||||
],
|
||||
) # fmt: skip
|
||||
def test_insert_ft_into_title(
|
||||
given: str,
|
||||
keywords: list[str] | None,
|
||||
expected: str,
|
||||
) -> None:
|
||||
assert (
|
||||
ftintitle.FtInTitlePlugin.insert_ft_into_title(
|
||||
given, "ft. Bob", keywords
|
||||
)
|
||||
== expected
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"given,expected",
|
||||
[
|
||||
|
|
|
|||
47
test/plugins/test_listenbrainz.py
Normal file
47
test/plugins/test_listenbrainz.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import pytest
|
||||
|
||||
from beets.test.helper import ConfigMixin
|
||||
from beetsplug.listenbrainz import ListenBrainzPlugin
|
||||
|
||||
|
||||
class TestListenBrainzPlugin(ConfigMixin):
|
||||
@pytest.fixture(scope="class")
|
||||
def plugin(self) -> ListenBrainzPlugin:
|
||||
self.config["listenbrainz"]["token"] = "test_token"
|
||||
self.config["listenbrainz"]["username"] = "test_user"
|
||||
return ListenBrainzPlugin()
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"search_response, expected_id",
|
||||
[([{"id": "id1"}], "id1"), ([], None)],
|
||||
ids=["found", "not_found"],
|
||||
)
|
||||
def test_get_mb_recording_id(
|
||||
self, plugin, requests_mock, search_response, expected_id
|
||||
):
|
||||
requests_mock.get(
|
||||
"/ws/2/recording", json={"recordings": search_response}
|
||||
)
|
||||
track = {"track_metadata": {"track_name": "S", "release_name": "A"}}
|
||||
|
||||
assert plugin.get_mb_recording_id(track) == expected_id
|
||||
|
||||
def test_get_track_info(self, plugin, requests_mock):
|
||||
requests_mock.get(
|
||||
"/ws/2/recording/id1?inc=releases%2Bartist-credits",
|
||||
json={
|
||||
"title": "T",
|
||||
"artist-credit": [],
|
||||
"releases": [{"title": "Al", "date": "2023-01"}],
|
||||
},
|
||||
)
|
||||
|
||||
assert plugin.get_track_info([{"identifier": "id1"}]) == [
|
||||
{
|
||||
"identifier": "id1",
|
||||
"title": "T",
|
||||
"artist": None,
|
||||
"album": "Al",
|
||||
"year": "2023",
|
||||
}
|
||||
]
|
||||
142
test/plugins/test_mbcollection.py
Normal file
142
test/plugins/test_mbcollection.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
import re
|
||||
import uuid
|
||||
from contextlib import nullcontext as does_not_raise
|
||||
|
||||
import pytest
|
||||
|
||||
from beets.library import Album
|
||||
from beets.test.helper import PluginMixin, TestHelper
|
||||
from beets.ui import UserError
|
||||
from beetsplug import mbcollection
|
||||
|
||||
|
||||
class TestMbCollectionPlugin(PluginMixin, TestHelper):
|
||||
"""Tests for the MusicBrainzCollectionPlugin class methods."""
|
||||
|
||||
plugin = "mbcollection"
|
||||
|
||||
COLLECTION_ID = str(uuid.uuid4())
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_config(self):
|
||||
self.config["musicbrainz"]["user"] = "testuser"
|
||||
self.config["musicbrainz"]["pass"] = "testpass"
|
||||
self.config["mbcollection"]["collection"] = self.COLLECTION_ID
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def helper(self):
|
||||
self.setup_beets()
|
||||
|
||||
yield self
|
||||
|
||||
self.teardown_beets()
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"user_collections,expectation",
|
||||
[
|
||||
(
|
||||
[],
|
||||
pytest.raises(
|
||||
UserError, match=r"no collections exist for user"
|
||||
),
|
||||
),
|
||||
(
|
||||
[{"id": "c1", "entity-type": "event"}],
|
||||
pytest.raises(UserError, match=r"No release collection found."),
|
||||
),
|
||||
(
|
||||
[{"id": "c1", "entity-type": "release"}],
|
||||
pytest.raises(UserError, match=r"invalid collection ID"),
|
||||
),
|
||||
(
|
||||
[{"id": COLLECTION_ID, "entity-type": "release"}],
|
||||
does_not_raise(),
|
||||
),
|
||||
],
|
||||
ids=["no collections", "no release collections", "invalid ID", "valid"],
|
||||
)
|
||||
def test_get_collection_validation(
|
||||
self, requests_mock, user_collections, expectation
|
||||
):
|
||||
requests_mock.get(
|
||||
"/ws/2/collection", json={"collections": user_collections}
|
||||
)
|
||||
|
||||
with expectation:
|
||||
mbcollection.MusicBrainzCollectionPlugin().collection
|
||||
|
||||
def test_mbupdate(self, helper, requests_mock, monkeypatch):
|
||||
"""Verify mbupdate sync of a MusicBrainz collection with the library.
|
||||
|
||||
This test ensures that the command:
|
||||
- fetches collection releases using paginated requests,
|
||||
- submits releases that exist locally but are missing from the remote
|
||||
collection
|
||||
- and removes releases from the remote collection that are not in the
|
||||
local library. Small chunk sizes are forced to exercise pagination and
|
||||
batching logic.
|
||||
"""
|
||||
for mb_albumid in [
|
||||
# already present in remote collection
|
||||
"in_collection1",
|
||||
"in_collection2",
|
||||
# two new albums not in remote collection
|
||||
"00000000-0000-0000-0000-000000000001",
|
||||
"00000000-0000-0000-0000-000000000002",
|
||||
]:
|
||||
helper.lib.add(Album(mb_albumid=mb_albumid))
|
||||
|
||||
# The relevant collection
|
||||
requests_mock.get(
|
||||
"/ws/2/collection",
|
||||
json={
|
||||
"collections": [
|
||||
{
|
||||
"id": self.COLLECTION_ID,
|
||||
"entity-type": "release",
|
||||
"release-count": 3,
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
collection_releases = f"/ws/2/collection/{self.COLLECTION_ID}/releases"
|
||||
# Force small fetch chunk to require multiple paged requests.
|
||||
monkeypatch.setattr(
|
||||
"beetsplug.mbcollection.MBCollection.FETCH_CHUNK_SIZE", 2
|
||||
)
|
||||
# 3 releases are fetched in two pages.
|
||||
requests_mock.get(
|
||||
re.compile(rf".*{collection_releases}\b.*&offset=0.*"),
|
||||
json={
|
||||
"releases": [{"id": "in_collection1"}, {"id": "not_in_library"}]
|
||||
},
|
||||
)
|
||||
requests_mock.get(
|
||||
re.compile(rf".*{collection_releases}\b.*&offset=2.*"),
|
||||
json={"releases": [{"id": "in_collection2"}]},
|
||||
)
|
||||
|
||||
# Force small submission chunk
|
||||
monkeypatch.setattr(
|
||||
"beetsplug.mbcollection.MBCollection.SUBMISSION_CHUNK_SIZE", 1
|
||||
)
|
||||
# so that releases are added using two requests
|
||||
requests_mock.put(
|
||||
re.compile(
|
||||
rf".*{collection_releases}/00000000-0000-0000-0000-000000000001"
|
||||
)
|
||||
)
|
||||
requests_mock.put(
|
||||
re.compile(
|
||||
rf".*{collection_releases}/00000000-0000-0000-0000-000000000002"
|
||||
)
|
||||
)
|
||||
# and finally, one release is removed
|
||||
requests_mock.delete(
|
||||
re.compile(rf".*{collection_releases}/not_in_library")
|
||||
)
|
||||
|
||||
helper.run_command("mbupdate", "--remove")
|
||||
|
||||
assert requests_mock.call_count == 6
|
||||
|
|
@ -94,7 +94,7 @@ class TestMBPseudoMixin(PluginMixin):
|
|||
@pytest.fixture(autouse=True)
|
||||
def patch_get_release(self, monkeypatch, pseudo_release: JSONDict):
|
||||
monkeypatch.setattr(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_release",
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_release",
|
||||
lambda _, album_id: deepcopy(
|
||||
{pseudo_release["id"]: pseudo_release}[album_id]
|
||||
),
|
||||
|
|
|
|||
61
test/plugins/test_missing.py
Normal file
61
test/plugins/test_missing.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
from beets.library import Album
|
||||
from beets.test.helper import PluginMixin, TestHelper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def helper():
|
||||
helper = TestHelper()
|
||||
helper.setup_beets()
|
||||
|
||||
yield helper
|
||||
|
||||
helper.teardown_beets()
|
||||
|
||||
|
||||
class TestMissingAlbums(PluginMixin):
|
||||
plugin = "missing"
|
||||
album_in_lib = Album(
|
||||
album="Album",
|
||||
albumartist="Artist",
|
||||
mb_albumartistid=str(uuid.uuid4()),
|
||||
mb_albumid="album",
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"release_from_mb,expected_output",
|
||||
[
|
||||
pytest.param(
|
||||
{"id": "other", "title": "Other Album"},
|
||||
"Artist - Other Album\n",
|
||||
id="missing",
|
||||
),
|
||||
pytest.param(
|
||||
{"id": album_in_lib.mb_albumid, "title": album_in_lib.album},
|
||||
"",
|
||||
marks=pytest.mark.xfail(
|
||||
reason=(
|
||||
"Album in lib must not be reported as missing."
|
||||
" Needs fixing."
|
||||
)
|
||||
),
|
||||
id="not missing",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_missing_artist_albums(
|
||||
self, requests_mock, helper, release_from_mb, expected_output
|
||||
):
|
||||
helper.lib.add(self.album_in_lib)
|
||||
requests_mock.get(
|
||||
f"/ws/2/release-group?artist={self.album_in_lib.mb_albumartistid}",
|
||||
json={"release-groups": [release_from_mb]},
|
||||
)
|
||||
|
||||
with self.configure_plugin({}):
|
||||
assert (
|
||||
helper.run_with_output("missing", "--album") == expected_output
|
||||
)
|
||||
|
|
@ -863,7 +863,7 @@ class MBLibraryTest(MusicBrainzTestCase):
|
|||
]
|
||||
|
||||
with mock.patch(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_release"
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_release"
|
||||
) as gp:
|
||||
gp.side_effect = side_effect
|
||||
album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
|
||||
|
|
@ -907,7 +907,7 @@ class MBLibraryTest(MusicBrainzTestCase):
|
|||
]
|
||||
|
||||
with mock.patch(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_release"
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_release"
|
||||
) as gp:
|
||||
gp.side_effect = side_effect
|
||||
album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
|
||||
|
|
@ -951,7 +951,7 @@ class MBLibraryTest(MusicBrainzTestCase):
|
|||
]
|
||||
|
||||
with mock.patch(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_release"
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_release"
|
||||
) as gp:
|
||||
gp.side_effect = side_effect
|
||||
album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
|
||||
|
|
@ -1004,7 +1004,7 @@ class MBLibraryTest(MusicBrainzTestCase):
|
|||
]
|
||||
|
||||
with mock.patch(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_release"
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_release"
|
||||
) as gp:
|
||||
gp.side_effect = side_effect
|
||||
album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
|
||||
|
|
@ -1055,7 +1055,7 @@ class TestMusicBrainzPlugin(PluginMixin):
|
|||
|
||||
def test_item_candidates(self, monkeypatch, mb):
|
||||
monkeypatch.setattr(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_json",
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_json",
|
||||
lambda *_, **__: {"recordings": [self.RECORDING]},
|
||||
)
|
||||
|
||||
|
|
@ -1066,11 +1066,11 @@ class TestMusicBrainzPlugin(PluginMixin):
|
|||
|
||||
def test_candidates(self, monkeypatch, mb):
|
||||
monkeypatch.setattr(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_json",
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_json",
|
||||
lambda *_, **__: {"releases": [{"id": self.mbid}]},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"beetsplug.musicbrainz.MusicBrainzAPI.get_release",
|
||||
"beetsplug._utils.musicbrainz.MusicBrainzAPI.get_release",
|
||||
lambda *_, **__: {
|
||||
"title": "hi",
|
||||
"id": self.mbid,
|
||||
|
|
@ -1099,84 +1099,3 @@ class TestMusicBrainzPlugin(PluginMixin):
|
|||
assert len(candidates) == 1
|
||||
assert candidates[0].tracks[0].track_id == self.RECORDING["id"]
|
||||
assert candidates[0].album == "hi"
|
||||
|
||||
|
||||
def test_group_relations():
|
||||
raw_release = {
|
||||
"id": "r1",
|
||||
"relations": [
|
||||
{"target-type": "artist", "type": "vocal", "name": "A"},
|
||||
{"target-type": "url", "type": "streaming", "url": "http://s"},
|
||||
{"target-type": "url", "type": "purchase", "url": "http://p"},
|
||||
{
|
||||
"target-type": "work",
|
||||
"type": "performance",
|
||||
"work": {
|
||||
"relations": [
|
||||
{
|
||||
"artist": {"name": "幾田りら"},
|
||||
"target-type": "artist",
|
||||
"type": "composer",
|
||||
},
|
||||
{
|
||||
"target-type": "url",
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://utaten.com/lyric/tt24121002/"
|
||||
},
|
||||
},
|
||||
{
|
||||
"artist": {"name": "幾田りら"},
|
||||
"target-type": "artist",
|
||||
"type": "lyricist",
|
||||
},
|
||||
{
|
||||
"target-type": "url",
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://www.uta-net.com/song/366579/"
|
||||
},
|
||||
},
|
||||
],
|
||||
"title": "百花繚乱",
|
||||
"type": "Song",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
assert musicbrainz.MusicBrainzAPI._group_relations(raw_release) == {
|
||||
"id": "r1",
|
||||
"artist-relations": [{"type": "vocal", "name": "A"}],
|
||||
"url-relations": [
|
||||
{"type": "streaming", "url": "http://s"},
|
||||
{"type": "purchase", "url": "http://p"},
|
||||
],
|
||||
"work-relations": [
|
||||
{
|
||||
"type": "performance",
|
||||
"work": {
|
||||
"artist-relations": [
|
||||
{"type": "composer", "artist": {"name": "幾田りら"}},
|
||||
{"type": "lyricist", "artist": {"name": "幾田りら"}},
|
||||
],
|
||||
"url-relations": [
|
||||
{
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://utaten.com/lyric/tt24121002/"
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://www.uta-net.com/song/366579/"
|
||||
},
|
||||
},
|
||||
],
|
||||
"title": "百花繚乱",
|
||||
"type": "Song",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,74 +14,10 @@
|
|||
|
||||
"""Tests for the 'parentwork' plugin."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from beets.library import Item
|
||||
from beets.test.helper import PluginTestCase
|
||||
from beetsplug import parentwork
|
||||
|
||||
work = {
|
||||
"work": {
|
||||
"id": "1",
|
||||
"title": "work",
|
||||
"work-relation-list": [
|
||||
{"type": "parts", "direction": "backward", "work": {"id": "2"}}
|
||||
],
|
||||
"artist-relation-list": [
|
||||
{
|
||||
"type": "composer",
|
||||
"artist": {
|
||||
"name": "random composer",
|
||||
"sort-name": "composer, random",
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
dp_work = {
|
||||
"work": {
|
||||
"id": "2",
|
||||
"title": "directparentwork",
|
||||
"work-relation-list": [
|
||||
{"type": "parts", "direction": "backward", "work": {"id": "3"}}
|
||||
],
|
||||
"artist-relation-list": [
|
||||
{
|
||||
"type": "composer",
|
||||
"artist": {
|
||||
"name": "random composer",
|
||||
"sort-name": "composer, random",
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
p_work = {
|
||||
"work": {
|
||||
"id": "3",
|
||||
"title": "parentwork",
|
||||
"artist-relation-list": [
|
||||
{
|
||||
"type": "composer",
|
||||
"artist": {
|
||||
"name": "random composer",
|
||||
"sort-name": "composer, random",
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def mock_workid_response(mbid, includes):
|
||||
if mbid == "1":
|
||||
return work
|
||||
elif mbid == "2":
|
||||
return dp_work
|
||||
elif mbid == "3":
|
||||
return p_work
|
||||
|
||||
|
||||
@pytest.mark.integration_test
|
||||
|
|
@ -134,35 +70,56 @@ class ParentWorkIntegrationTest(PluginTestCase):
|
|||
item.load()
|
||||
assert item["mb_parentworkid"] == "XXX"
|
||||
|
||||
# test different cases, still with Matthew Passion Ouverture or Mozart
|
||||
# requiem
|
||||
|
||||
def test_direct_parent_work_real(self):
|
||||
mb_workid = "2e4a3668-458d-3b2a-8be2-0b08e0d8243a"
|
||||
assert (
|
||||
"f04b42df-7251-4d86-a5ee-67cfa49580d1"
|
||||
== parentwork.direct_parent_id(mb_workid)[0]
|
||||
)
|
||||
assert (
|
||||
"45afb3b2-18ac-4187-bc72-beb1b1c194ba"
|
||||
== parentwork.work_parent_id(mb_workid)[0]
|
||||
)
|
||||
|
||||
|
||||
class ParentWorkTest(PluginTestCase):
|
||||
plugin = "parentwork"
|
||||
|
||||
def setUp(self):
|
||||
"""Set up configuration"""
|
||||
super().setUp()
|
||||
self.patcher = patch(
|
||||
"musicbrainzngs.get_work_by_id", side_effect=mock_workid_response
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_works(self, requests_mock):
|
||||
requests_mock.get(
|
||||
"/ws/2/work/1?inc=work-rels%2Bartist-rels",
|
||||
json={
|
||||
"id": "1",
|
||||
"title": "work",
|
||||
"work-relations": [
|
||||
{
|
||||
"type": "parts",
|
||||
"direction": "backward",
|
||||
"work": {"id": "2"},
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
requests_mock.get(
|
||||
"/ws/2/work/2?inc=work-rels%2Bartist-rels",
|
||||
json={
|
||||
"id": "2",
|
||||
"title": "directparentwork",
|
||||
"work-relations": [
|
||||
{
|
||||
"type": "parts",
|
||||
"direction": "backward",
|
||||
"work": {"id": "3"},
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
requests_mock.get(
|
||||
"/ws/2/work/3?inc=work-rels%2Bartist-rels",
|
||||
json={
|
||||
"id": "3",
|
||||
"title": "parentwork",
|
||||
"artist-relations": [
|
||||
{
|
||||
"type": "composer",
|
||||
"artist": {
|
||||
"name": "random composer",
|
||||
"sort-name": "composer, random",
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
self.patcher.start()
|
||||
|
||||
def tearDown(self):
|
||||
super().tearDown()
|
||||
self.patcher.stop()
|
||||
|
||||
def test_normal_case(self):
|
||||
item = Item(path="/file", mb_workid="1", parentwork_workid_current="1")
|
||||
|
|
@ -204,7 +161,3 @@ class ParentWorkTest(PluginTestCase):
|
|||
|
||||
item.load()
|
||||
assert item["mb_parentworkid"] == "XXX"
|
||||
|
||||
def test_direct_parent_work(self):
|
||||
assert "2" == parentwork.direct_parent_id("1")[0]
|
||||
assert "3" == parentwork.work_parent_id("1")[0]
|
||||
|
|
|
|||
0
test/plugins/utils/__init__.py
Normal file
0
test/plugins/utils/__init__.py
Normal file
82
test/plugins/utils/test_musicbrainz.py
Normal file
82
test/plugins/utils/test_musicbrainz.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
from beetsplug._utils.musicbrainz import MusicBrainzAPI
|
||||
|
||||
|
||||
def test_group_relations():
|
||||
raw_release = {
|
||||
"id": "r1",
|
||||
"relations": [
|
||||
{"target-type": "artist", "type": "vocal", "name": "A"},
|
||||
{"target-type": "url", "type": "streaming", "url": "http://s"},
|
||||
{"target-type": "url", "type": "purchase", "url": "http://p"},
|
||||
{
|
||||
"target-type": "work",
|
||||
"type": "performance",
|
||||
"work": {
|
||||
"relations": [
|
||||
{
|
||||
"artist": {"name": "幾田りら"},
|
||||
"target-type": "artist",
|
||||
"type": "composer",
|
||||
},
|
||||
{
|
||||
"target-type": "url",
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://utaten.com/lyric/tt24121002/"
|
||||
},
|
||||
},
|
||||
{
|
||||
"artist": {"name": "幾田りら"},
|
||||
"target-type": "artist",
|
||||
"type": "lyricist",
|
||||
},
|
||||
{
|
||||
"target-type": "url",
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://www.uta-net.com/song/366579/"
|
||||
},
|
||||
},
|
||||
],
|
||||
"title": "百花繚乱",
|
||||
"type": "Song",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
assert MusicBrainzAPI._group_relations(raw_release) == {
|
||||
"id": "r1",
|
||||
"artist-relations": [{"type": "vocal", "name": "A"}],
|
||||
"url-relations": [
|
||||
{"type": "streaming", "url": "http://s"},
|
||||
{"type": "purchase", "url": "http://p"},
|
||||
],
|
||||
"work-relations": [
|
||||
{
|
||||
"type": "performance",
|
||||
"work": {
|
||||
"artist-relations": [
|
||||
{"type": "composer", "artist": {"name": "幾田りら"}},
|
||||
{"type": "lyricist", "artist": {"name": "幾田りら"}},
|
||||
],
|
||||
"url-relations": [
|
||||
{
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://utaten.com/lyric/tt24121002/"
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lyrics",
|
||||
"url": {
|
||||
"resource": "https://www.uta-net.com/song/366579/"
|
||||
},
|
||||
},
|
||||
],
|
||||
"title": "百花繚乱",
|
||||
"type": "Song",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
|
@ -48,11 +48,20 @@ class TestRequestHandlerRetry:
|
|||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"last_response", [ConnectionResetError], ids=["conn_error"]
|
||||
"last_response",
|
||||
[
|
||||
ConnectionResetError,
|
||||
HTTPResponse(
|
||||
body=io.BytesIO(b"Server Error"),
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
preload_content=False,
|
||||
),
|
||||
],
|
||||
ids=["conn_error", "server_error"],
|
||||
)
|
||||
def test_retry_exhaustion(self, request_handler):
|
||||
"""Verify that the handler raises an error after exhausting retries."""
|
||||
with pytest.raises(
|
||||
requests.exceptions.ConnectionError, match="Max retries exceeded"
|
||||
requests.exceptions.RequestException, match="Max retries exceeded"
|
||||
):
|
||||
request_handler.get("http://example.com/api")
|
||||
|
|
|
|||
Loading…
Reference in a new issue