diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a6720335f..748cf24d1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,7 +21,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - name: Setup Python with poetry caching # poetry cache requires poetry to already be installed, weirdly uses: actions/setup-python@v5 diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 1a848bde5..eae04d1d4 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -9,7 +9,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - uses: actions/setup-python@v5 with: python-version: 3.9 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 9e2552ab1..16757da27 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -53,7 +53,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -74,7 +74,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -94,7 +94,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -118,7 +118,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/make_release.yaml b/.github/workflows/make_release.yaml index e54381392..7ea2d631c 100644 --- a/.github/workflows/make_release.yaml +++ b/.github/workflows/make_release.yaml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -50,7 +50,7 @@ jobs: ref: ${{ env.NEW_TAG }} - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v0.1.1 + uses: BrandonLWhite/pipx-install-action@v1.0.1 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 9010db2c3..5fccb8e80 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -87,6 +87,15 @@ Install `poetry`_ and `poethepoet`_ using `pipx`_:: $ pipx install poetry poethepoet +.. admonition:: Check ``tool.pipx-install`` section in ``pyproject.toml`` to + see supported versions + + :: + + [tool.pipx-install] + poethepoet = ">=0.26" + poetry = "<2" + .. _pipx: https://pipx.pypa.io/stable .. _pipx-installation-instructions: https://pipx.pypa.io/stable/installation/ diff --git a/beets/__init__.py b/beets/__init__.py index 1bac81b65..8be305202 100644 --- a/beets/__init__.py +++ b/beets/__init__.py @@ -17,7 +17,7 @@ from sys import stderr import confuse -__version__ = "2.3.0" +__version__ = "2.3.1" __author__ = "Adrian Sampson " diff --git a/beets/autotag/hooks.py b/beets/autotag/hooks.py index 33606020d..641a6cb4f 100644 --- a/beets/autotag/hooks.py +++ b/beets/autotag/hooks.py @@ -18,17 +18,16 @@ from __future__ import annotations import re from functools import total_ordering -from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar +from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar from jellyfish import levenshtein_distance from unidecode import unidecode -from beets import config, logging, plugins -from beets.autotag import mb +from beets import config, logging from beets.util import as_string, cached_classproperty if TYPE_CHECKING: - from collections.abc import Iterable, Iterator + from collections.abc import Iterator from beets.library import Item @@ -56,7 +55,7 @@ class AttrDict(dict[str, V]): return id(self) -class AlbumInfo(AttrDict): +class AlbumInfo(AttrDict[Any]): """Describes a canonical release that may be used to match a release in the library. Consists of these data members: @@ -166,7 +165,7 @@ class AlbumInfo(AttrDict): return dupe -class TrackInfo(AttrDict): +class TrackInfo(AttrDict[Any]): """Describes a canonical track present on a release. Appears as part of an AlbumInfo's ``tracks`` list. Consists of these data members: @@ -357,8 +356,8 @@ class Distance: for each individual penalty. """ - def __init__(self): - self._penalties = {} + def __init__(self) -> None: + self._penalties: dict[str, list[float]] = {} self.tracks: dict[TrackInfo, Distance] = {} @cached_classproperty @@ -591,99 +590,3 @@ class AlbumMatch(NamedTuple): class TrackMatch(NamedTuple): distance: Distance info: TrackInfo - - -# Aggregation of sources. - - -def album_for_mbid(release_id: str) -> AlbumInfo | None: - """Get an AlbumInfo object for a MusicBrainz release ID. Return None - if the ID is not found. - """ - try: - if album := mb.album_for_id(release_id): - plugins.send("albuminfo_received", info=album) - return album - except mb.MusicBrainzAPIError as exc: - exc.log(log) - return None - - -def track_for_mbid(recording_id: str) -> TrackInfo | None: - """Get a TrackInfo object for a MusicBrainz recording ID. Return None - if the ID is not found. - """ - try: - if track := mb.track_for_id(recording_id): - plugins.send("trackinfo_received", info=track) - return track - except mb.MusicBrainzAPIError as exc: - exc.log(log) - return None - - -def album_for_id(_id: str) -> AlbumInfo | None: - """Get AlbumInfo object for the given ID string.""" - return album_for_mbid(_id) or plugins.album_for_id(_id) - - -def track_for_id(_id: str) -> TrackInfo | None: - """Get TrackInfo object for the given ID string.""" - return track_for_mbid(_id) or plugins.track_for_id(_id) - - -def invoke_mb(call_func: Callable, *args): - try: - return call_func(*args) - except mb.MusicBrainzAPIError as exc: - exc.log(log) - return () - - -@plugins.notify_info_yielded("albuminfo_received") -def album_candidates( - items: list[Item], - artist: str, - album: str, - va_likely: bool, - extra_tags: dict, -) -> Iterable[tuple]: - """Search for album matches. ``items`` is a list of Item objects - that make up the album. ``artist`` and ``album`` are the respective - names (strings), which may be derived from the item list or may be - entered by the user. ``va_likely`` is a boolean indicating whether - the album is likely to be a "various artists" release. ``extra_tags`` - is an optional dictionary of additional tags used to further - constrain the search. - """ - - if config["musicbrainz"]["enabled"]: - # Base candidates if we have album and artist to match. - if artist and album: - yield from invoke_mb( - mb.match_album, artist, album, len(items), extra_tags - ) - - # Also add VA matches from MusicBrainz where appropriate. - if va_likely and album: - yield from invoke_mb( - mb.match_album, None, album, len(items), extra_tags - ) - - # Candidates from plugins. - yield from plugins.candidates(items, artist, album, va_likely, extra_tags) - - -@plugins.notify_info_yielded("trackinfo_received") -def item_candidates(item: Item, artist: str, title: str) -> Iterable[tuple]: - """Search for item matches. ``item`` is the Item to be matched. - ``artist`` and ``title`` are strings and either reflect the item or - are specified by the user. - """ - - # MusicBrainz candidates. - if config["musicbrainz"]["enabled"] and artist and title: - yield from invoke_mb(mb.match_track, artist, title) - - # Plugin candidates. - yield from plugins.item_candidates(item, artist, title) diff --git a/beets/autotag/match.py b/beets/autotag/match.py index 433093def..91a315de0 100644 --- a/beets/autotag/match.py +++ b/beets/autotag/match.py @@ -335,8 +335,8 @@ def distance( return dist -def match_by_id(items: Iterable[Item]): - """If the items are tagged with a MusicBrainz album ID, returns an +def match_by_id(items: Iterable[Item]) -> AlbumInfo | None: + """If the items are tagged with an external source ID, return an AlbumInfo object for the corresponding album. Otherwise, returns None. """ @@ -356,7 +356,7 @@ def match_by_id(items: Iterable[Item]): return None # If all album IDs are equal, look up the album. log.debug("Searching for discovered album ID: {0}", first) - return hooks.album_for_mbid(first) + return plugins.album_for_id(first) def _recommendation( @@ -511,15 +511,14 @@ def tag_album( if search_ids: for search_id in search_ids: log.debug("Searching for album ID: {0}", search_id) - if info := hooks.album_for_id(search_id): + if info := plugins.album_for_id(search_id): _add_candidate(items, candidates, info) # Use existing metadata or text search. else: # Try search based on current ID. - id_info = match_by_id(items) - if id_info: - _add_candidate(items, candidates, id_info) + if info := match_by_id(items): + _add_candidate(items, candidates, info) rec = _recommendation(list(candidates.values())) log.debug("Album ID match recommendation is {0}", rec) if candidates and not config["import"]["timid"]: @@ -540,12 +539,6 @@ def tag_album( search_artist, search_album = cur_artist, cur_album log.debug("Search terms: {0} - {1}", search_artist, search_album) - extra_tags = None - if config["musicbrainz"]["extra_tags"]: - tag_list = config["musicbrainz"]["extra_tags"].get() - extra_tags = {k: v for (k, v) in likelies.items() if k in tag_list} - log.debug("Additional search terms: {0}", extra_tags) - # Is this album likely to be a "various artist" release? va_likely = ( (not consensus["artist"]) @@ -555,8 +548,8 @@ def tag_album( log.debug("Album might be VA: {0}", va_likely) # Get the results from the data sources. - for matched_candidate in hooks.album_candidates( - items, search_artist, search_album, va_likely, extra_tags + for matched_candidate in plugins.candidates( + items, search_artist, search_album, va_likely ): _add_candidate(items, candidates, matched_candidate) @@ -576,22 +569,21 @@ def tag_item( """Find metadata for a single track. Return a `Proposal` consisting of `TrackMatch` objects. - `search_artist` and `search_title` may be used - to override the current metadata for the purposes of the MusicBrainz - title. `search_ids` may be used for restricting the search to a list - of metadata backend IDs. + `search_artist` and `search_title` may be used to override the item + metadata in the search query. `search_ids` may be used for restricting the + search to a list of metadata backend IDs. """ # Holds candidates found so far: keys are MBIDs; values are # (distance, TrackInfo) pairs. candidates = {} rec: Recommendation | None = None - # First, try matching by MusicBrainz ID. + # First, try matching by the external source ID. trackids = search_ids or [t for t in [item.mb_trackid] if t] if trackids: for trackid in trackids: log.debug("Searching for track ID: {0}", trackid) - if info := hooks.track_for_id(trackid): + if info := plugins.track_for_id(trackid): dist = track_distance(item, info, incl_artist=True) candidates[info.track_id] = hooks.TrackMatch(dist, info) # If this is a good match, then don't keep searching. @@ -612,12 +604,14 @@ def tag_item( return Proposal([], Recommendation.none) # Search terms. - if not (search_artist and search_title): - search_artist, search_title = item.artist, item.title + search_artist = search_artist or item.artist + search_title = search_title or item.title log.debug("Item search terms: {0} - {1}", search_artist, search_title) # Get and evaluate candidate metadata. - for track_info in hooks.item_candidates(item, search_artist, search_title): + for track_info in plugins.item_candidates( + item, search_artist, search_title + ): dist = track_distance(item, track_info, incl_artist=True) candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info) diff --git a/beets/autotag/mb.py b/beets/autotag/mb.py deleted file mode 100644 index 28cb66ca1..000000000 --- a/beets/autotag/mb.py +++ /dev/null @@ -1,891 +0,0 @@ -# This file is part of beets. -# Copyright 2016, Adrian Sampson. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. - -"""Searches for albums in the MusicBrainz database.""" - -from __future__ import annotations - -import re -import traceback -from collections import Counter -from itertools import product -from typing import TYPE_CHECKING, Any -from urllib.parse import urljoin - -import musicbrainzngs - -import beets -import beets.autotag.hooks -from beets import config, logging, plugins, util -from beets.plugins import MetadataSourcePlugin -from beets.util.id_extractors import ( - beatport_id_regex, - deezer_id_regex, - extract_discogs_id_regex, - spotify_id_regex, -) - -if TYPE_CHECKING: - from collections.abc import Iterator, Sequence - -VARIOUS_ARTISTS_ID = "89ad4ac3-39f7-470e-963a-56509c546377" - -BASE_URL = "https://musicbrainz.org/" - -SKIPPED_TRACKS = ["[data track]"] - -FIELDS_TO_MB_KEYS = { - "catalognum": "catno", - "country": "country", - "label": "label", - "barcode": "barcode", - "media": "format", - "year": "date", -} - -musicbrainzngs.set_useragent("beets", beets.__version__, "https://beets.io/") - - -class MusicBrainzAPIError(util.HumanReadableError): - """An error while talking to MusicBrainz. The `query` field is the - parameter to the action and may have any type. - """ - - def __init__(self, reason, verb, query, tb=None): - self.query = query - if isinstance(reason, musicbrainzngs.WebServiceError): - reason = "MusicBrainz not reachable" - super().__init__(reason, verb, tb) - - def get_message(self): - return "{} in {} with query {}".format( - self._reasonstr(), self.verb, repr(self.query) - ) - - -log = logging.getLogger("beets") - -RELEASE_INCLUDES = list( - { - "artists", - "media", - "recordings", - "release-groups", - "labels", - "artist-credits", - "aliases", - "recording-level-rels", - "work-rels", - "work-level-rels", - "artist-rels", - "isrcs", - "url-rels", - "release-rels", - "tags", - } - & set(musicbrainzngs.VALID_INCLUDES["release"]) -) - -TRACK_INCLUDES = list( - { - "artists", - "aliases", - "isrcs", - "work-level-rels", - "artist-rels", - } - & set(musicbrainzngs.VALID_INCLUDES["recording"]) -) - -BROWSE_INCLUDES = [ - "artist-credits", - "work-rels", - "artist-rels", - "recording-rels", - "release-rels", -] -if "work-level-rels" in musicbrainzngs.VALID_BROWSE_INCLUDES["recording"]: - BROWSE_INCLUDES.append("work-level-rels") -BROWSE_CHUNKSIZE = 100 -BROWSE_MAXTRACKS = 500 - - -def track_url(trackid: str) -> str: - return urljoin(BASE_URL, "recording/" + trackid) - - -def album_url(albumid: str) -> str: - return urljoin(BASE_URL, "release/" + albumid) - - -def configure(): - """Set up the python-musicbrainz-ngs module according to settings - from the beets configuration. This should be called at startup. - """ - hostname = config["musicbrainz"]["host"].as_str() - https = config["musicbrainz"]["https"].get(bool) - # Only call set_hostname when a custom server is configured. Since - # musicbrainz-ngs connects to musicbrainz.org with HTTPS by default - if hostname != "musicbrainz.org": - musicbrainzngs.set_hostname(hostname, https) - musicbrainzngs.set_rate_limit( - config["musicbrainz"]["ratelimit_interval"].as_number(), - config["musicbrainz"]["ratelimit"].get(int), - ) - - -def _preferred_alias(aliases: list): - """Given an list of alias structures for an artist credit, select - and return the user's preferred alias alias or None if no matching - alias is found. - """ - if not aliases: - return - - # Only consider aliases that have locales set. - aliases = [a for a in aliases if "locale" in a] - - # Get any ignored alias types and lower case them to prevent case issues - ignored_alias_types = config["import"]["ignored_alias_types"].as_str_seq() - ignored_alias_types = [a.lower() for a in ignored_alias_types] - - # Search configured locales in order. - for locale in config["import"]["languages"].as_str_seq(): - # Find matching primary aliases for this locale that are not - # being ignored - matches = [] - for a in aliases: - if ( - a["locale"] == locale - and "primary" in a - and a.get("type", "").lower() not in ignored_alias_types - ): - matches.append(a) - - # Skip to the next locale if we have no matches - if not matches: - continue - - return matches[0] - - -def _preferred_release_event( - release: dict[str, Any], -) -> tuple[str | None, str | None]: - """Given a release, select and return the user's preferred release - event as a tuple of (country, release_date). Fall back to the - default release event if a preferred event is not found. - """ - preferred_countries: Sequence[str] = config["match"]["preferred"][ - "countries" - ].as_str_seq() - - for country in preferred_countries: - for event in release.get("release-event-list", {}): - try: - if country in event["area"]["iso-3166-1-code-list"]: - return country, event["date"] - except KeyError: - pass - - return release.get("country"), release.get("date") - - -def _multi_artist_credit( - credit: list[dict], include_join_phrase: bool -) -> tuple[list[str], list[str], list[str]]: - """Given a list representing an ``artist-credit`` block, accumulate - data into a triple of joined artist name lists: canonical, sort, and - credit. - """ - artist_parts = [] - artist_sort_parts = [] - artist_credit_parts = [] - for el in credit: - if isinstance(el, str): - # Join phrase. - if include_join_phrase: - artist_parts.append(el) - artist_credit_parts.append(el) - artist_sort_parts.append(el) - - else: - alias = _preferred_alias(el["artist"].get("alias-list", ())) - - # An artist. - if alias: - cur_artist_name = alias["alias"] - else: - cur_artist_name = el["artist"]["name"] - artist_parts.append(cur_artist_name) - - # Artist sort name. - if alias: - artist_sort_parts.append(alias["sort-name"]) - elif "sort-name" in el["artist"]: - artist_sort_parts.append(el["artist"]["sort-name"]) - else: - artist_sort_parts.append(cur_artist_name) - - # Artist credit. - if "name" in el: - artist_credit_parts.append(el["name"]) - else: - artist_credit_parts.append(cur_artist_name) - - return ( - artist_parts, - artist_sort_parts, - artist_credit_parts, - ) - - -def _flatten_artist_credit(credit: list[dict]) -> tuple[str, str, str]: - """Given a list representing an ``artist-credit`` block, flatten the - data into a triple of joined artist name strings: canonical, sort, and - credit. - """ - artist_parts, artist_sort_parts, artist_credit_parts = _multi_artist_credit( - credit, include_join_phrase=True - ) - return ( - "".join(artist_parts), - "".join(artist_sort_parts), - "".join(artist_credit_parts), - ) - - -def _artist_ids(credit: list[dict]) -> list[str]: - """ - Given a list representing an ``artist-credit``, - return a list of artist IDs - """ - artist_ids: list[str] = [] - for el in credit: - if isinstance(el, dict): - artist_ids.append(el["artist"]["id"]) - - return artist_ids - - -def _get_related_artist_names(relations, relation_type): - """Given a list representing the artist relationships extract the names of - the remixers and concatenate them. - """ - related_artists = [] - - for relation in relations: - if relation["type"] == relation_type: - related_artists.append(relation["artist"]["name"]) - - return ", ".join(related_artists) - - -def track_info( - recording: dict, - index: int | None = None, - medium: int | None = None, - medium_index: int | None = None, - medium_total: int | None = None, -) -> beets.autotag.hooks.TrackInfo: - """Translates a MusicBrainz recording result dictionary into a beets - ``TrackInfo`` object. Three parameters are optional and are used - only for tracks that appear on releases (non-singletons): ``index``, - the overall track number; ``medium``, the disc number; - ``medium_index``, the track's index on its medium; ``medium_total``, - the number of tracks on the medium. Each number is a 1-based index. - """ - info = beets.autotag.hooks.TrackInfo( - title=recording["title"], - track_id=recording["id"], - index=index, - medium=medium, - medium_index=medium_index, - medium_total=medium_total, - data_source="MusicBrainz", - data_url=track_url(recording["id"]), - ) - - if recording.get("artist-credit"): - # Get the artist names. - ( - info.artist, - info.artist_sort, - info.artist_credit, - ) = _flatten_artist_credit(recording["artist-credit"]) - - ( - info.artists, - info.artists_sort, - info.artists_credit, - ) = _multi_artist_credit( - recording["artist-credit"], include_join_phrase=False - ) - - info.artists_ids = _artist_ids(recording["artist-credit"]) - info.artist_id = info.artists_ids[0] - - if recording.get("artist-relation-list"): - info.remixer = _get_related_artist_names( - recording["artist-relation-list"], relation_type="remixer" - ) - - if recording.get("length"): - info.length = int(recording["length"]) / 1000.0 - - info.trackdisambig = recording.get("disambiguation") - - if recording.get("isrc-list"): - info.isrc = ";".join(recording["isrc-list"]) - - lyricist = [] - composer = [] - composer_sort = [] - for work_relation in recording.get("work-relation-list", ()): - if work_relation["type"] != "performance": - continue - info.work = work_relation["work"]["title"] - info.mb_workid = work_relation["work"]["id"] - if "disambiguation" in work_relation["work"]: - info.work_disambig = work_relation["work"]["disambiguation"] - - for artist_relation in work_relation["work"].get( - "artist-relation-list", () - ): - if "type" in artist_relation: - type = artist_relation["type"] - if type == "lyricist": - lyricist.append(artist_relation["artist"]["name"]) - elif type == "composer": - composer.append(artist_relation["artist"]["name"]) - composer_sort.append(artist_relation["artist"]["sort-name"]) - if lyricist: - info.lyricist = ", ".join(lyricist) - if composer: - info.composer = ", ".join(composer) - info.composer_sort = ", ".join(composer_sort) - - arranger = [] - for artist_relation in recording.get("artist-relation-list", ()): - if "type" in artist_relation: - type = artist_relation["type"] - if type == "arranger": - arranger.append(artist_relation["artist"]["name"]) - if arranger: - info.arranger = ", ".join(arranger) - - # Supplementary fields provided by plugins - extra_trackdatas = plugins.send("mb_track_extract", data=recording) - for extra_trackdata in extra_trackdatas: - info.update(extra_trackdata) - - return info - - -def _set_date_str( - info: beets.autotag.hooks.AlbumInfo, - date_str: str, - original: bool = False, -): - """Given a (possibly partial) YYYY-MM-DD string and an AlbumInfo - object, set the object's release date fields appropriately. If - `original`, then set the original_year, etc., fields. - """ - if date_str: - date_parts = date_str.split("-") - for key in ("year", "month", "day"): - if date_parts: - date_part = date_parts.pop(0) - try: - date_num = int(date_part) - except ValueError: - continue - - if original: - key = "original_" + key - setattr(info, key, date_num) - - -def album_info(release: dict) -> beets.autotag.hooks.AlbumInfo: - """Takes a MusicBrainz release result dictionary and returns a beets - AlbumInfo object containing the interesting data about that release. - """ - # Get artist name using join phrases. - artist_name, artist_sort_name, artist_credit_name = _flatten_artist_credit( - release["artist-credit"] - ) - - ( - artists_names, - artists_sort_names, - artists_credit_names, - ) = _multi_artist_credit( - release["artist-credit"], include_join_phrase=False - ) - - ntracks = sum(len(m["track-list"]) for m in release["medium-list"]) - - # The MusicBrainz API omits 'artist-relation-list' and 'work-relation-list' - # when the release has more than 500 tracks. So we use browse_recordings - # on chunks of tracks to recover the same information in this case. - if ntracks > BROWSE_MAXTRACKS: - log.debug("Album {} has too many tracks", release["id"]) - recording_list = [] - for i in range(0, ntracks, BROWSE_CHUNKSIZE): - log.debug("Retrieving tracks starting at {}", i) - recording_list.extend( - musicbrainzngs.browse_recordings( - release=release["id"], - limit=BROWSE_CHUNKSIZE, - includes=BROWSE_INCLUDES, - offset=i, - )["recording-list"] - ) - track_map = {r["id"]: r for r in recording_list} - for medium in release["medium-list"]: - for recording in medium["track-list"]: - recording_info = track_map[recording["recording"]["id"]] - recording["recording"] = recording_info - - # Basic info. - track_infos = [] - index = 0 - for medium in release["medium-list"]: - disctitle = medium.get("title") - format = medium.get("format") - - if format in config["match"]["ignored_media"].as_str_seq(): - continue - - all_tracks = medium["track-list"] - if ( - "data-track-list" in medium - and not config["match"]["ignore_data_tracks"] - ): - all_tracks += medium["data-track-list"] - track_count = len(all_tracks) - - if "pregap" in medium: - all_tracks.insert(0, medium["pregap"]) - - for track in all_tracks: - if ( - "title" in track["recording"] - and track["recording"]["title"] in SKIPPED_TRACKS - ): - continue - - if ( - "video" in track["recording"] - and track["recording"]["video"] == "true" - and config["match"]["ignore_video_tracks"] - ): - continue - - # Basic information from the recording. - index += 1 - ti = track_info( - track["recording"], - index, - int(medium["position"]), - int(track["position"]), - track_count, - ) - ti.release_track_id = track["id"] - ti.disctitle = disctitle - ti.media = format - ti.track_alt = track["number"] - - # Prefer track data, where present, over recording data. - if track.get("title"): - ti.title = track["title"] - if track.get("artist-credit"): - # Get the artist names. - ( - ti.artist, - ti.artist_sort, - ti.artist_credit, - ) = _flatten_artist_credit(track["artist-credit"]) - - ( - ti.artists, - ti.artists_sort, - ti.artists_credit, - ) = _multi_artist_credit( - track["artist-credit"], include_join_phrase=False - ) - - ti.artists_ids = _artist_ids(track["artist-credit"]) - ti.artist_id = ti.artists_ids[0] - if track.get("length"): - ti.length = int(track["length"]) / (1000.0) - - track_infos.append(ti) - - album_artist_ids = _artist_ids(release["artist-credit"]) - info = beets.autotag.hooks.AlbumInfo( - album=release["title"], - album_id=release["id"], - artist=artist_name, - artist_id=album_artist_ids[0], - artists=artists_names, - artists_ids=album_artist_ids, - tracks=track_infos, - mediums=len(release["medium-list"]), - artist_sort=artist_sort_name, - artists_sort=artists_sort_names, - artist_credit=artist_credit_name, - artists_credit=artists_credit_names, - data_source="MusicBrainz", - data_url=album_url(release["id"]), - barcode=release.get("barcode"), - ) - info.va = info.artist_id == VARIOUS_ARTISTS_ID - if info.va: - info.artist = config["va_name"].as_str() - info.asin = release.get("asin") - info.releasegroup_id = release["release-group"]["id"] - info.albumstatus = release.get("status") - - if release["release-group"].get("title"): - info.release_group_title = release["release-group"].get("title") - - # Get the disambiguation strings at the release and release group level. - if release["release-group"].get("disambiguation"): - info.releasegroupdisambig = release["release-group"].get( - "disambiguation" - ) - if release.get("disambiguation"): - info.albumdisambig = release.get("disambiguation") - - # Get the "classic" Release type. This data comes from a legacy API - # feature before MusicBrainz supported multiple release types. - if "type" in release["release-group"]: - reltype = release["release-group"]["type"] - if reltype: - info.albumtype = reltype.lower() - - # Set the new-style "primary" and "secondary" release types. - albumtypes = [] - if "primary-type" in release["release-group"]: - rel_primarytype = release["release-group"]["primary-type"] - if rel_primarytype: - albumtypes.append(rel_primarytype.lower()) - if "secondary-type-list" in release["release-group"]: - if release["release-group"]["secondary-type-list"]: - for sec_type in release["release-group"]["secondary-type-list"]: - albumtypes.append(sec_type.lower()) - info.albumtypes = albumtypes - - # Release events. - info.country, release_date = _preferred_release_event(release) - release_group_date = release["release-group"].get("first-release-date") - if not release_date: - # Fall back if release-specific date is not available. - release_date = release_group_date - - if release_date: - _set_date_str(info, release_date, False) - _set_date_str(info, release_group_date, True) - - # Label name. - if release.get("label-info-list"): - label_info = release["label-info-list"][0] - if label_info.get("label"): - label = label_info["label"]["name"] - if label != "[no label]": - info.label = label - info.catalognum = label_info.get("catalog-number") - - # Text representation data. - if release.get("text-representation"): - rep = release["text-representation"] - info.script = rep.get("script") - info.language = rep.get("language") - - # Media (format). - if release["medium-list"]: - # If all media are the same, use that medium name - if len({m.get("format") for m in release["medium-list"]}) == 1: - info.media = release["medium-list"][0].get("format") - # Otherwise, let's just call it "Media" - else: - info.media = "Media" - - if config["musicbrainz"]["genres"]: - sources = [ - release["release-group"].get("tag-list", []), - release.get("tag-list", []), - ] - genres: Counter[str] = Counter() - for source in sources: - for genreitem in source: - genres[genreitem["name"]] += int(genreitem["count"]) - info.genre = "; ".join( - genre - for genre, _count in sorted(genres.items(), key=lambda g: -g[1]) - ) - - # We might find links to external sources (Discogs, Bandcamp, ...) - external_ids = config["musicbrainz"]["external_ids"].get() - wanted_sources = {site for site, wanted in external_ids.items() if wanted} - if wanted_sources and (url_rels := release.get("url-relation-list")): - urls = {} - - for source, url in product(wanted_sources, url_rels): - if f"{source}.com" in (target := url["target"]): - urls[source] = target - log.debug( - "Found link to {} release via MusicBrainz", - source.capitalize(), - ) - - if "discogs" in urls: - info.discogs_albumid = extract_discogs_id_regex(urls["discogs"]) - if "bandcamp" in urls: - info.bandcamp_album_id = urls["bandcamp"] - if "spotify" in urls: - info.spotify_album_id = MetadataSourcePlugin._get_id( - "album", urls["spotify"], spotify_id_regex - ) - if "deezer" in urls: - info.deezer_album_id = MetadataSourcePlugin._get_id( - "album", urls["deezer"], deezer_id_regex - ) - if "beatport" in urls: - info.beatport_album_id = MetadataSourcePlugin._get_id( - "album", urls["beatport"], beatport_id_regex - ) - if "tidal" in urls: - info.tidal_album_id = urls["tidal"].split("/")[-1] - - extra_albumdatas = plugins.send("mb_album_extract", data=release) - for extra_albumdata in extra_albumdatas: - info.update(extra_albumdata) - - return info - - -def match_album( - artist: str, - album: str, - tracks: int | None = None, - extra_tags: dict[str, Any] | None = None, -) -> Iterator[beets.autotag.hooks.AlbumInfo]: - """Searches for a single album ("release" in MusicBrainz parlance) - and returns an iterator over AlbumInfo objects. May raise a - MusicBrainzAPIError. - - The query consists of an artist name, an album name, and, - optionally, a number of tracks on the album and any other extra tags. - """ - # Build search criteria. - criteria = {"release": album.lower().strip()} - if artist is not None: - criteria["artist"] = artist.lower().strip() - else: - # Various Artists search. - criteria["arid"] = VARIOUS_ARTISTS_ID - if tracks is not None: - criteria["tracks"] = str(tracks) - - # Additional search cues from existing metadata. - if extra_tags: - for tag, value in extra_tags.items(): - key = FIELDS_TO_MB_KEYS[tag] - value = str(value).lower().strip() - if key == "catno": - value = value.replace(" ", "") - if value: - criteria[key] = value - - # Abort if we have no search terms. - if not any(criteria.values()): - return - - try: - log.debug("Searching for MusicBrainz releases with: {!r}", criteria) - res = musicbrainzngs.search_releases( - limit=config["musicbrainz"]["searchlimit"].get(int), **criteria - ) - except musicbrainzngs.MusicBrainzError as exc: - raise MusicBrainzAPIError( - exc, "release search", criteria, traceback.format_exc() - ) - for release in res["release-list"]: - # The search result is missing some data (namely, the tracks), - # so we just use the ID and fetch the rest of the information. - albuminfo = album_for_id(release["id"]) - if albuminfo is not None: - yield albuminfo - - -def match_track( - artist: str, - title: str, -) -> Iterator[beets.autotag.hooks.TrackInfo]: - """Searches for a single track and returns an iterable of TrackInfo - objects. May raise a MusicBrainzAPIError. - """ - criteria = { - "artist": artist.lower().strip(), - "recording": title.lower().strip(), - } - - if not any(criteria.values()): - return - - try: - res = musicbrainzngs.search_recordings( - limit=config["musicbrainz"]["searchlimit"].get(int), **criteria - ) - except musicbrainzngs.MusicBrainzError as exc: - raise MusicBrainzAPIError( - exc, "recording search", criteria, traceback.format_exc() - ) - for recording in res["recording-list"]: - yield track_info(recording) - - -def _parse_id(s: str) -> str | None: - """Search for a MusicBrainz ID in the given string and return it. If - no ID can be found, return None. - """ - # Find the first thing that looks like a UUID/MBID. - match = re.search("[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}", s) - if match is not None: - return match.group() if match else None - return None - - -def _is_translation(r): - _trans_key = "transl-tracklisting" - return r["type"] == _trans_key and r["direction"] == "backward" - - -def _find_actual_release_from_pseudo_release( - pseudo_rel: dict, -) -> dict | None: - try: - relations = pseudo_rel["release"]["release-relation-list"] - except KeyError: - return None - - # currently we only support trans(liter)ation's - translations = [r for r in relations if _is_translation(r)] - - if not translations: - return None - - actual_id = translations[0]["target"] - - return musicbrainzngs.get_release_by_id(actual_id, RELEASE_INCLUDES) - - -def _merge_pseudo_and_actual_album( - pseudo: beets.autotag.hooks.AlbumInfo, actual: beets.autotag.hooks.AlbumInfo -) -> beets.autotag.hooks.AlbumInfo | None: - """ - Merges a pseudo release with its actual release. - - This implementation is naive, it doesn't overwrite fields, - like status or ids. - - According to the ticket PICARD-145, the main release id should be used. - But the ticket has been in limbo since over a decade now. - It also suggests the introduction of the tag `musicbrainz_pseudoreleaseid`, - but as of this field can't be found in any official Picard docs, - hence why we did not implement that for now. - """ - merged = pseudo.copy() - from_actual = { - k: actual[k] - for k in [ - "media", - "mediums", - "country", - "catalognum", - "year", - "month", - "day", - "original_year", - "original_month", - "original_day", - "label", - "barcode", - "asin", - "style", - "genre", - ] - } - merged.update(from_actual) - return merged - - -def album_for_id(releaseid: str) -> beets.autotag.hooks.AlbumInfo | None: - """Fetches an album by its MusicBrainz ID and returns an AlbumInfo - object or None if the album is not found. May raise a - MusicBrainzAPIError. - """ - log.debug("Requesting MusicBrainz release {}", releaseid) - albumid = _parse_id(releaseid) - if not albumid: - log.debug("Invalid MBID ({0}).", releaseid) - return None - try: - res = musicbrainzngs.get_release_by_id(albumid, RELEASE_INCLUDES) - - # resolve linked release relations - actual_res = None - - if res["release"].get("status") == "Pseudo-Release": - actual_res = _find_actual_release_from_pseudo_release(res) - - except musicbrainzngs.ResponseError: - log.debug("Album ID match failed.") - return None - except musicbrainzngs.MusicBrainzError as exc: - raise MusicBrainzAPIError( - exc, "get release by ID", albumid, traceback.format_exc() - ) - - # release is potentially a pseudo release - release = album_info(res["release"]) - - # should be None unless we're dealing with a pseudo release - if actual_res is not None: - actual_release = album_info(actual_res["release"]) - return _merge_pseudo_and_actual_album(release, actual_release) - else: - return release - - -def track_for_id(releaseid: str) -> beets.autotag.hooks.TrackInfo | None: - """Fetches a track by its MusicBrainz ID. Returns a TrackInfo object - or None if no track is found. May raise a MusicBrainzAPIError. - """ - trackid = _parse_id(releaseid) - if not trackid: - log.debug("Invalid MBID ({0}).", releaseid) - return None - try: - res = musicbrainzngs.get_recording_by_id(trackid, TRACK_INCLUDES) - except musicbrainzngs.ResponseError: - log.debug("Track ID match failed.") - return None - except musicbrainzngs.MusicBrainzError as exc: - raise MusicBrainzAPIError( - exc, "get recording by ID", trackid, traceback.format_exc() - ) - return track_info(res["recording"]) diff --git a/beets/config_default.yaml b/beets/config_default.yaml index c5cebd441..d1329f494 100644 --- a/beets/config_default.yaml +++ b/beets/config_default.yaml @@ -6,7 +6,8 @@ statefile: state.pickle # --------------- Plugins --------------- -plugins: [] +plugins: [musicbrainz] + pluginpath: [] # --------------- Import --------------- @@ -163,22 +164,6 @@ sort_case_insensitive: yes overwrite_null: album: [] track: [] -musicbrainz: - enabled: yes - host: musicbrainz.org - https: no - ratelimit: 1 - ratelimit_interval: 1.0 - searchlimit: 5 - extra_tags: [] - genres: no - external_ids: - discogs: no - bandcamp: no - spotify: no - deezer: no - beatport: no - tidal: no match: strong_rec_thresh: 0.04 diff --git a/beets/plugins.py b/beets/plugins.py index d33458825..8751e11ad 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -22,7 +22,6 @@ import re import sys import traceback from collections import defaultdict -from collections.abc import Iterable from functools import wraps from typing import ( TYPE_CHECKING, @@ -46,14 +45,18 @@ else: if TYPE_CHECKING: + from collections.abc import Iterator + from confuse import ConfigView from beets.autotag import AlbumInfo, Distance, TrackInfo from beets.dbcore import Query - from beets.dbcore.db import FieldQueryType, SQLiteType + from beets.dbcore.db import FieldQueryType + from beets.dbcore.types import Type from beets.importer import ImportSession, ImportTask from beets.library import Album, Item, Library from beets.ui import Subcommand + from beets.util.id_extractors import RegexDict # TYPE_CHECKING guard is needed for any derived type # which uses an import from `beets.library` and `beets.imported` @@ -64,6 +67,11 @@ if TYPE_CHECKING: AnyModel = TypeVar("AnyModel", Album, Item) + P = ParamSpec("P") + Ret = TypeVar("Ret", bound=Any) + Listener = Callable[..., None] + IterF = Callable[P, Iterator[Ret]] + PLUGIN_NAMESPACE = "beetsplug" @@ -74,11 +82,6 @@ LASTFM_KEY = "2dc3914abf35f0d9c92d97d8f8e42b43" log = logging.getLogger("beets") -P = ParamSpec("P") -Ret = TypeVar("Ret", bound=Any) -Listener = Callable[..., None] - - class PluginConflictError(Exception): """Indicates that the services provided by one plugin conflict with those of another. @@ -224,7 +227,7 @@ class BeetsPlugin: def album_distance( self, - items: list[Item], + items: Sequence[Item], album_info: AlbumInfo, mapping: dict[Item, TrackInfo], ) -> Distance: @@ -242,22 +245,29 @@ class BeetsPlugin: album: str, va_likely: bool, extra_tags: dict[str, Any] | None = None, - ) -> Sequence[AlbumInfo]: - """Should return a sequence of AlbumInfo objects that match the - album whose items are provided. + ) -> Iterator[AlbumInfo]: + """Return :py:class:`AlbumInfo` candidates that match the given album. + + :param items: List of items in the album + :param artist: Album artist + :param album: Album name + :param va_likely: Whether the album is likely to be by various artists + :param extra_tags: is a an optional dictionary of extra tags to search. + Only relevant to :py:class:`MusicBrainzPlugin` autotagger and can be + ignored by other plugins """ - return () + yield from () def item_candidates( - self, - item: Item, - artist: str, - title: str, - ) -> Sequence[TrackInfo]: - """Should return a sequence of TrackInfo objects that match the - item provided. + self, item: Item, artist: str, title: str + ) -> Iterator[TrackInfo]: + """Return :py:class:`TrackInfo` candidates that match the given track. + + :param item: Track item + :param artist: Track artist + :param title: Track title """ - return () + yield from () def album_for_id(self, album_id: str) -> AlbumInfo | None: """Return an AlbumInfo object or None if no matching release was @@ -422,10 +432,10 @@ def queries() -> dict[str, type[Query]]: return out -def types(model_cls: type[AnyModel]) -> dict[str, type[SQLiteType]]: +def types(model_cls: type[AnyModel]) -> dict[str, Type]: # Gives us `item_types` and `album_types` attr_name = f"{model_cls.__name__.lower()}_types" - types: dict[str, type[SQLiteType]] = {} + types: dict[str, Type] = {} for plugin in find_plugins(): plugin_types = getattr(plugin, attr_name, {}) for field in plugin_types: @@ -462,7 +472,7 @@ def track_distance(item: Item, info: TrackInfo) -> Distance: def album_distance( - items: list[Item], + items: Sequence[Item], album_info: AlbumInfo, mapping: dict[Item, TrackInfo], ) -> Distance: @@ -475,24 +485,38 @@ def album_distance( return dist -def candidates( - items: list[Item], - artist: str, - album: str, - va_likely: bool, - extra_tags: dict[str, Any] | None = None, -) -> Iterable[AlbumInfo]: - """Gets MusicBrainz candidates for an album from each plugin.""" - for plugin in find_plugins(): - yield from plugin.candidates( - items, artist, album, va_likely, extra_tags - ) +def notify_info_yielded(event: str) -> Callable[[IterF[P, Ret]], IterF[P, Ret]]: + """Makes a generator send the event 'event' every time it yields. + This decorator is supposed to decorate a generator, but any function + returning an iterable should work. + Each yielded value is passed to plugins using the 'info' parameter of + 'send'. + """ + + def decorator(func: IterF[P, Ret]) -> IterF[P, Ret]: + @wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> Iterator[Ret]: + for v in func(*args, **kwargs): + send(event, info=v) + yield v + + return wrapper + + return decorator -def item_candidates(item: Item, artist: str, title: str) -> Iterable[TrackInfo]: - """Gets MusicBrainz candidates for an item from the plugins.""" +@notify_info_yielded("albuminfo_received") +def candidates(*args, **kwargs) -> Iterator[AlbumInfo]: + """Return matching album candidates from all plugins.""" for plugin in find_plugins(): - yield from plugin.item_candidates(item, artist, title) + yield from plugin.candidates(*args, **kwargs) + + +@notify_info_yielded("trackinfo_received") +def item_candidates(*args, **kwargs) -> Iterator[TrackInfo]: + """Return matching track candidates from all plugins.""" + for plugin in find_plugins(): + yield from plugin.item_candidates(*args, **kwargs) def album_for_id(_id: str) -> AlbumInfo | None: @@ -673,7 +697,7 @@ def sanitize_pairs( ... ) [('foo', 'baz'), ('foo', 'bar'), ('key', 'value'), ('foo', 'foobar')] """ - pairs_all: list[tuple[str, str]] = list(pairs_all) + pairs_all = list(pairs_all) seen: set[tuple[str, str]] = set() others = [x for x in pairs_all if x not in pairs] res: list[tuple[str, str]] = [] @@ -695,32 +719,6 @@ def sanitize_pairs( return res -IterF = Callable[P, Iterable[Ret]] - - -def notify_info_yielded( - event: str, -) -> Callable[[IterF[P, Ret]], IterF[P, Ret]]: - """Makes a generator send the event 'event' every time it yields. - This decorator is supposed to decorate a generator, but any function - returning an iterable should work. - Each yielded value is passed to plugins using the 'info' parameter of - 'send'. - """ - - def decorator( - generator: IterF[P, Ret], - ) -> IterF[P, Ret]: - def decorated(*args: P.args, **kwargs: P.kwargs) -> Iterable[Ret]: - for v in generator(*args, **kwargs): - send(event, info=v) - yield v - - return decorated - - return decorator - - def get_distance( config: ConfigView, data_source: str, info: AlbumInfo | TrackInfo ) -> Distance: @@ -772,15 +770,6 @@ class Response(TypedDict): id: str -class RegexDict(TypedDict): - """A dictionary containing a regex pattern and the number of the - match group. - """ - - pattern: str - match_group: int - - R = TypeVar("R", bound=Response) @@ -828,9 +817,7 @@ class MetadataSourcePlugin(Generic[R], BeetsPlugin, metaclass=abc.ABCMeta): raise NotImplementedError @abc.abstractmethod - def track_for_id( - self, track_id: str | None = None, track_data: R | None = None - ) -> TrackInfo | None: + def track_for_id(self, track_id: str) -> TrackInfo | None: raise NotImplementedError @staticmethod @@ -911,44 +898,26 @@ class MetadataSourcePlugin(Generic[R], BeetsPlugin, metaclass=abc.ABCMeta): album: str, va_likely: bool, extra_tags: dict[str, Any] | None = None, - ) -> Sequence[AlbumInfo]: - """Returns a list of AlbumInfo objects for Search API results - matching an ``album`` and ``artist`` (if not various). - - :param items: List of items comprised by an album to be matched. - :param artist: The artist of the album to be matched. - :param album: The name of the album to be matched. - :param va_likely: True if the album to be matched likely has - Various Artists. - """ + ) -> Iterator[AlbumInfo]: query_filters = {"album": album} if not va_likely: query_filters["artist"] = artist - results = self._search_api(query_type="album", filters=query_filters) - albums = [self.album_for_id(album_id=r["id"]) for r in results] - return [a for a in albums if a is not None] + for result in self._search_api("album", query_filters): + if info := self.album_for_id(result["id"]): + yield info def item_candidates( self, item: Item, artist: str, title: str - ) -> Sequence[TrackInfo]: - """Returns a list of TrackInfo objects for Search API results - matching ``title`` and ``artist``. - - :param item: Singleton item to be matched. - :param artist: The artist of the track to be matched. - :param title: The title of the track to be matched. - """ - track_responses = self._search_api( - query_type="track", keywords=title, filters={"artist": artist} - ) - - tracks = [self.track_for_id(track_data=r) for r in track_responses] - - return [t for t in tracks if t is not None] + ) -> Iterator[TrackInfo]: + for result in self._search_api( + "track", {"artist": artist}, keywords=title + ): + if info := self.track_for_id(result["id"]): + yield info def album_distance( self, - items: list[Item], + items: Sequence[Item], album_info: AlbumInfo, mapping: dict[Item, TrackInfo], ) -> Distance: diff --git a/beets/test/helper.py b/beets/test/helper.py index 67ae1cfcf..66b4ddb71 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -35,6 +35,7 @@ import subprocess import sys import unittest from contextlib import contextmanager +from dataclasses import dataclass from enum import Enum from functools import cached_property from io import StringIO @@ -48,7 +49,7 @@ from mediafile import Image, MediaFile import beets import beets.plugins -from beets import autotag, importer, logging, util +from beets import importer, logging, util from beets.autotag.hooks import AlbumInfo, TrackInfo from beets.importer import ImportSession from beets.library import Album, Item, Library @@ -447,6 +448,11 @@ class PluginMixin(ConfigMixin): plugin: ClassVar[str] preload_plugin: ClassVar[bool] = True + original_item_types = dict(Item._types) + original_album_types = dict(Album._types) + original_item_queries = dict(Item._queries) + original_album_queries = dict(Album._queries) + def setup_beets(self): super().setup_beets() if self.preload_plugin: @@ -470,13 +476,8 @@ class PluginMixin(ConfigMixin): # Take a backup of the original _types and _queries to restore # when unloading. - Item._original_types = dict(Item._types) - Album._original_types = dict(Album._types) Item._types.update(beets.plugins.types(Item)) Album._types.update(beets.plugins.types(Album)) - - Item._original_queries = dict(Item._queries) - Album._original_queries = dict(Album._queries) Item._queries.update(beets.plugins.named_queries(Item)) Album._queries.update(beets.plugins.named_queries(Album)) @@ -488,10 +489,10 @@ class PluginMixin(ConfigMixin): self.config["plugins"] = [] beets.plugins._classes = set() beets.plugins._instances = {} - Item._types = getattr(Item, "_original_types", {}) - Album._types = getattr(Album, "_original_types", {}) - Item._queries = getattr(Item, "_original_queries", {}) - Album._queries = getattr(Album, "_original_queries", {}) + Item._types = self.original_item_types + Album._types = self.original_album_types + Item._queries = self.original_item_queries + Album._queries = self.original_album_queries @contextmanager def configure_plugin(self, config: Any): @@ -774,6 +775,7 @@ class TerminalImportMixin(ImportHelper): ) +@dataclass class AutotagStub: """Stub out MusicBrainz album and track matcher and control what the autotagger returns. @@ -784,47 +786,42 @@ class AutotagStub: GOOD = "GOOD" BAD = "BAD" MISSING = "MISSING" - """Generate an album match for all but one track - """ + matching: str length = 2 - matching = IDENT def install(self): - self.mb_match_album = autotag.mb.match_album - self.mb_match_track = autotag.mb.match_track - self.mb_album_for_id = autotag.mb.album_for_id - self.mb_track_for_id = autotag.mb.track_for_id - - autotag.mb.match_album = self.match_album - autotag.mb.match_track = self.match_track - autotag.mb.album_for_id = self.album_for_id - autotag.mb.track_for_id = self.track_for_id + self.patchers = [ + patch("beets.plugins.album_for_id", lambda *_: None), + patch("beets.plugins.track_for_id", lambda *_: None), + patch("beets.plugins.candidates", self.candidates), + patch("beets.plugins.item_candidates", self.item_candidates), + ] + for p in self.patchers: + p.start() return self def restore(self): - autotag.mb.match_album = self.mb_match_album - autotag.mb.match_track = self.mb_match_track - autotag.mb.album_for_id = self.mb_album_for_id - autotag.mb.track_for_id = self.mb_track_for_id + for p in self.patchers: + p.stop() - def match_album(self, albumartist, album, tracks, extra_tags): + def candidates(self, items, artist, album, va_likely, extra_tags=None): if self.matching == self.IDENT: - yield self._make_album_match(albumartist, album, tracks) + yield self._make_album_match(artist, album, len(items)) elif self.matching == self.GOOD: for i in range(self.length): - yield self._make_album_match(albumartist, album, tracks, i) + yield self._make_album_match(artist, album, len(items), i) elif self.matching == self.BAD: for i in range(self.length): - yield self._make_album_match(albumartist, album, tracks, i + 1) + yield self._make_album_match(artist, album, len(items), i + 1) elif self.matching == self.MISSING: - yield self._make_album_match(albumartist, album, tracks, missing=1) + yield self._make_album_match(artist, album, len(items), missing=1) - def match_track(self, artist, title): + def item_candidates(self, item, artist, title): yield TrackInfo( title=title.replace("Tag", "Applied"), track_id="trackid", @@ -834,12 +831,6 @@ class AutotagStub: index=0, ) - def album_for_id(self, mbid): - return None - - def track_for_id(self, mbid): - return None - def _make_track_match(self, artist, album, number): return TrackInfo( title="Applied Track %d" % number, @@ -877,6 +868,15 @@ class AutotagStub: ) +class AutotagImportTestCase(ImportTestCase): + matching = AutotagStub.IDENT + + def setUp(self): + super().setUp() + self.matcher = AutotagStub(self.matching).install() + self.addCleanup(self.matcher.restore) + + class FetchImageHelper: """Helper mixin for mocking requests when fetching images with remote art sources. diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 8cc5de309..a6f615b45 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -17,6 +17,8 @@ interface. To invoke the CLI, just call beets.ui.main(). The actual CLI commands are implemented in the ui.commands module. """ +from __future__ import annotations + import errno import optparse import os.path @@ -27,17 +29,19 @@ import sys import textwrap import traceback from difflib import SequenceMatcher -from typing import Any, Callable +from typing import TYPE_CHECKING, Any, Callable import confuse from beets import config, library, logging, plugins, util -from beets.autotag import mb from beets.dbcore import db from beets.dbcore import query as db_query from beets.util import as_string from beets.util.functemplate import template +if TYPE_CHECKING: + from types import ModuleType + # On Windows platforms, use colorama to support "ANSI" terminal colors. if sys.platform == "win32": try: @@ -570,7 +574,7 @@ COLOR_NAMES = [ "text_diff_removed", "text_diff_changed", ] -COLORS = None +COLORS: dict[str, list[str]] | None = None def _colorize(color, text): @@ -1623,7 +1627,9 @@ optparse.Option.ALWAYS_TYPED_ACTIONS += ("callback",) # The main entry point and bootstrapping. -def _load_plugins(options, config): +def _load_plugins( + options: optparse.Values, config: confuse.LazyConfig +) -> ModuleType: """Load the plugins specified on the command line or in the configuration.""" paths = config["pluginpath"].as_str_seq(split=False) paths = [util.normpath(p) for p in paths] @@ -1648,6 +1654,11 @@ def _load_plugins(options, config): ) else: plugin_list = config["plugins"].as_str_seq() + # TODO: Remove in v2.4 or v3 + if "musicbrainz" in config and config["musicbrainz"].get().get( + "enabled" + ): + plugin_list.append("musicbrainz") # Exclude any plugins that were specified on the command line if options.exclude is not None: @@ -1664,9 +1675,6 @@ def _setup(options, lib=None): Returns a list of subcommands, a list of plugins, and a library instance. """ - # Configure the MusicBrainz API. - mb.configure() - config = _configure(options) plugins = _load_plugins(options, config) diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 7b7554546..f42291019 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -363,7 +363,7 @@ class ChangeRepresentation: self.indent_header + f"Match ({dist_string(self.match.distance)}):" ) - if self.match.info.get("album"): + if isinstance(self.match.info, autotag.hooks.AlbumInfo): # Matching an album - print that artist_album_str = ( f"{self.match.info.artist}" + f" - {self.match.info.album}" diff --git a/beets/util/id_extractors.py b/beets/util/id_extractors.py index 04e9e94a7..4dbab087d 100644 --- a/beets/util/id_extractors.py +++ b/beets/util/id_extractors.py @@ -15,20 +15,31 @@ """Helpers around the extraction of album/track ID's from metadata sources.""" import re +from typing import TypedDict + + +class RegexDict(TypedDict): + """A dictionary containing a regex pattern and the number of the + match group. + """ + + pattern: str + match_group: int + # Spotify IDs consist of 22 alphanumeric characters # (zero-left-padded base62 representation of randomly generated UUID4) -spotify_id_regex = { +spotify_id_regex: RegexDict = { "pattern": r"(^|open\.spotify\.com/{}/)([0-9A-Za-z]{{22}})", "match_group": 2, } -deezer_id_regex = { +deezer_id_regex: RegexDict = { "pattern": r"(^|deezer\.com/)([a-z]*/)?({}/)?(\d+)", "match_group": 4, } -beatport_id_regex = { +beatport_id_regex: RegexDict = { "pattern": r"(^|beatport\.com/release/.+/)(\d+)$", "match_group": 2, } diff --git a/beetsplug/albumtypes.py b/beetsplug/albumtypes.py index b1e143a88..180773f58 100644 --- a/beetsplug/albumtypes.py +++ b/beetsplug/albumtypes.py @@ -14,10 +14,11 @@ """Adds an album template field for formatted album types.""" -from beets.autotag.mb import VARIOUS_ARTISTS_ID from beets.library import Album from beets.plugins import BeetsPlugin +from .musicbrainz import VARIOUS_ARTISTS_ID + class AlbumTypesPlugin(BeetsPlugin): """Adds an album template field for formatted album types.""" diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py index 369a3cc73..08fb97f59 100644 --- a/beetsplug/chroma.py +++ b/beetsplug/chroma.py @@ -18,13 +18,14 @@ autotagger. Requires the pyacoustid library. import re from collections import defaultdict -from functools import partial +from functools import cached_property, partial import acoustid import confuse from beets import config, plugins, ui, util -from beets.autotag import hooks +from beets.autotag.hooks import Distance +from beetsplug.musicbrainz import MusicBrainzPlugin API_KEY = "1vOwZtEn" SCORE_THRESH = 0.5 @@ -182,11 +183,15 @@ class AcoustidPlugin(plugins.BeetsPlugin): self.register_listener("import_task_start", self.fingerprint_task) self.register_listener("import_task_apply", apply_acoustid_metadata) + @cached_property + def mb(self) -> MusicBrainzPlugin: + return MusicBrainzPlugin() + def fingerprint_task(self, task, session): return fingerprint_task(self._log, task, session) def track_distance(self, item, info): - dist = hooks.Distance() + dist = Distance() if item.path not in _matches or not info.track_id: # Match failed or no track ID. return dist @@ -198,7 +203,7 @@ class AcoustidPlugin(plugins.BeetsPlugin): def candidates(self, items, artist, album, va_likely, extra_tags=None): albums = [] for relid in prefix(_all_releases(items), MAX_RELEASES): - album = hooks.album_for_mbid(relid) + album = self.mb.album_for_id(relid) if album: albums.append(album) @@ -212,7 +217,7 @@ class AcoustidPlugin(plugins.BeetsPlugin): recording_ids, _ = _matches[item.path] tracks = [] for recording_id in prefix(recording_ids, MAX_RECORDINGS): - track = hooks.track_for_mbid(recording_id) + track = self.mb.track_for_id(recording_id) if track: tracks.append(track) self._log.debug("acoustid item candidates: {0}", len(tracks)) diff --git a/beetsplug/mbsync.py b/beetsplug/mbsync.py index 2e62b7b7e..94870232c 100644 --- a/beetsplug/mbsync.py +++ b/beetsplug/mbsync.py @@ -16,8 +16,7 @@ from collections import defaultdict -from beets import autotag, library, ui, util -from beets.autotag import hooks +from beets import autotag, library, plugins, ui, util from beets.plugins import BeetsPlugin, apply_item_changes @@ -80,7 +79,7 @@ class MBSyncPlugin(BeetsPlugin): ) continue - if not (track_info := hooks.track_for_id(item.mb_trackid)): + if not (track_info := plugins.track_for_id(item.mb_trackid)): self._log.info( "Recording ID not found: {0.mb_trackid} for track {0}", item ) @@ -101,7 +100,7 @@ class MBSyncPlugin(BeetsPlugin): self._log.info("Skipping album with no mb_albumid: {}", album) continue - if not (album_info := hooks.album_for_id(album.mb_albumid)): + if not (album_info := plugins.album_for_id(album.mb_albumid)): self._log.info( "Release ID {0.mb_albumid} not found for album {0}", album ) diff --git a/beetsplug/missing.py b/beetsplug/missing.py index ccaa65320..c4bbb83fd 100644 --- a/beetsplug/missing.py +++ b/beetsplug/missing.py @@ -21,8 +21,7 @@ from collections.abc import Iterator import musicbrainzngs from musicbrainzngs.musicbrainz import MusicBrainzError -from beets import config -from beets.autotag import hooks +from beets import config, plugins from beets.dbcore import types from beets.library import Album, Item, Library from beets.plugins import BeetsPlugin @@ -223,7 +222,7 @@ class MissingPlugin(BeetsPlugin): item_mbids = {x.mb_trackid for x in album.items()} # fetch missing items # TODO: Implement caching that without breaking other stuff - if album_info := hooks.album_for_id(album.mb_albumid): + if album_info := plugins.album_for_id(album.mb_albumid): for track_info in album_info.tracks: if track_info.track_id not in item_mbids: self._log.debug( diff --git a/beetsplug/musicbrainz.py b/beetsplug/musicbrainz.py new file mode 100644 index 000000000..e1a640d84 --- /dev/null +++ b/beetsplug/musicbrainz.py @@ -0,0 +1,922 @@ +# This file is part of beets. +# Copyright 2016, Adrian Sampson. +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. + +"""Searches for albums in the MusicBrainz database.""" + +from __future__ import annotations + +import re +import traceback +from collections import Counter +from itertools import product +from typing import TYPE_CHECKING, Any +from urllib.parse import urljoin + +import musicbrainzngs + +import beets +import beets.autotag.hooks +from beets import config, plugins, util +from beets.plugins import BeetsPlugin, MetadataSourcePlugin +from beets.util.id_extractors import ( + beatport_id_regex, + deezer_id_regex, + extract_discogs_id_regex, + spotify_id_regex, +) + +if TYPE_CHECKING: + from collections.abc import Iterator, Sequence + + from beets.library import Item + + from ._typing import JSONDict + +VARIOUS_ARTISTS_ID = "89ad4ac3-39f7-470e-963a-56509c546377" + +BASE_URL = "https://musicbrainz.org/" + +SKIPPED_TRACKS = ["[data track]"] + +FIELDS_TO_MB_KEYS = { + "catalognum": "catno", + "country": "country", + "label": "label", + "barcode": "barcode", + "media": "format", + "year": "date", +} + +musicbrainzngs.set_useragent("beets", beets.__version__, "https://beets.io/") + + +class MusicBrainzAPIError(util.HumanReadableError): + """An error while talking to MusicBrainz. The `query` field is the + parameter to the action and may have any type. + """ + + def __init__(self, reason, verb, query, tb=None): + self.query = query + if isinstance(reason, musicbrainzngs.WebServiceError): + reason = "MusicBrainz not reachable" + super().__init__(reason, verb, tb) + + def get_message(self): + return "{} in {} with query {}".format( + self._reasonstr(), self.verb, repr(self.query) + ) + + +RELEASE_INCLUDES = list( + { + "artists", + "media", + "recordings", + "release-groups", + "labels", + "artist-credits", + "aliases", + "recording-level-rels", + "work-rels", + "work-level-rels", + "artist-rels", + "isrcs", + "url-rels", + "release-rels", + "tags", + } + & set(musicbrainzngs.VALID_INCLUDES["release"]) +) + +TRACK_INCLUDES = list( + { + "artists", + "aliases", + "isrcs", + "work-level-rels", + "artist-rels", + } + & set(musicbrainzngs.VALID_INCLUDES["recording"]) +) + +BROWSE_INCLUDES = [ + "artist-credits", + "work-rels", + "artist-rels", + "recording-rels", + "release-rels", +] +if "work-level-rels" in musicbrainzngs.VALID_BROWSE_INCLUDES["recording"]: + BROWSE_INCLUDES.append("work-level-rels") +BROWSE_CHUNKSIZE = 100 +BROWSE_MAXTRACKS = 500 + + +def _preferred_alias(aliases: list[JSONDict]): + """Given an list of alias structures for an artist credit, select + and return the user's preferred alias alias or None if no matching + alias is found. + """ + if not aliases: + return + + # Only consider aliases that have locales set. + valid_aliases = [a for a in aliases if "locale" in a] + + # Get any ignored alias types and lower case them to prevent case issues + ignored_alias_types = config["import"]["ignored_alias_types"].as_str_seq() + ignored_alias_types = [a.lower() for a in ignored_alias_types] + + # Search configured locales in order. + for locale in config["import"]["languages"].as_str_seq(): + # Find matching primary aliases for this locale that are not + # being ignored + matches = [] + for alias in valid_aliases: + if ( + alias["locale"] == locale + and "primary" in alias + and alias.get("type", "").lower() not in ignored_alias_types + ): + matches.append(alias) + + # Skip to the next locale if we have no matches + if not matches: + continue + + return matches[0] + + +def _multi_artist_credit( + credit: list[JSONDict], include_join_phrase: bool +) -> tuple[list[str], list[str], list[str]]: + """Given a list representing an ``artist-credit`` block, accumulate + data into a triple of joined artist name lists: canonical, sort, and + credit. + """ + artist_parts = [] + artist_sort_parts = [] + artist_credit_parts = [] + for el in credit: + if isinstance(el, str): + # Join phrase. + if include_join_phrase: + artist_parts.append(el) + artist_credit_parts.append(el) + artist_sort_parts.append(el) + + else: + alias = _preferred_alias(el["artist"].get("alias-list", ())) + + # An artist. + if alias: + cur_artist_name = alias["alias"] + else: + cur_artist_name = el["artist"]["name"] + artist_parts.append(cur_artist_name) + + # Artist sort name. + if alias: + artist_sort_parts.append(alias["sort-name"]) + elif "sort-name" in el["artist"]: + artist_sort_parts.append(el["artist"]["sort-name"]) + else: + artist_sort_parts.append(cur_artist_name) + + # Artist credit. + if "name" in el: + artist_credit_parts.append(el["name"]) + else: + artist_credit_parts.append(cur_artist_name) + + return ( + artist_parts, + artist_sort_parts, + artist_credit_parts, + ) + + +def track_url(trackid: str) -> str: + return urljoin(BASE_URL, "recording/" + trackid) + + +def _flatten_artist_credit(credit: list[JSONDict]) -> tuple[str, str, str]: + """Given a list representing an ``artist-credit`` block, flatten the + data into a triple of joined artist name strings: canonical, sort, and + credit. + """ + artist_parts, artist_sort_parts, artist_credit_parts = _multi_artist_credit( + credit, include_join_phrase=True + ) + return ( + "".join(artist_parts), + "".join(artist_sort_parts), + "".join(artist_credit_parts), + ) + + +def _artist_ids(credit: list[JSONDict]) -> list[str]: + """ + Given a list representing an ``artist-credit``, + return a list of artist IDs + """ + artist_ids: list[str] = [] + for el in credit: + if isinstance(el, dict): + artist_ids.append(el["artist"]["id"]) + + return artist_ids + + +def _get_related_artist_names(relations, relation_type): + """Given a list representing the artist relationships extract the names of + the remixers and concatenate them. + """ + related_artists = [] + + for relation in relations: + if relation["type"] == relation_type: + related_artists.append(relation["artist"]["name"]) + + return ", ".join(related_artists) + + +def album_url(albumid: str) -> str: + return urljoin(BASE_URL, "release/" + albumid) + + +def _preferred_release_event( + release: dict[str, Any], +) -> tuple[str | None, str | None]: + """Given a release, select and return the user's preferred release + event as a tuple of (country, release_date). Fall back to the + default release event if a preferred event is not found. + """ + preferred_countries: Sequence[str] = config["match"]["preferred"][ + "countries" + ].as_str_seq() + + for country in preferred_countries: + for event in release.get("release-event-list", {}): + try: + if country in event["area"]["iso-3166-1-code-list"]: + return country, event["date"] + except KeyError: + pass + + return release.get("country"), release.get("date") + + +def _set_date_str( + info: beets.autotag.hooks.AlbumInfo, + date_str: str, + original: bool = False, +): + """Given a (possibly partial) YYYY-MM-DD string and an AlbumInfo + object, set the object's release date fields appropriately. If + `original`, then set the original_year, etc., fields. + """ + if date_str: + date_parts = date_str.split("-") + for key in ("year", "month", "day"): + if date_parts: + date_part = date_parts.pop(0) + try: + date_num = int(date_part) + except ValueError: + continue + + if original: + key = "original_" + key + setattr(info, key, date_num) + + +def _parse_id(s: str) -> str | None: + """Search for a MusicBrainz ID in the given string and return it. If + no ID can be found, return None. + """ + # Find the first thing that looks like a UUID/MBID. + match = re.search("[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}", s) + if match is not None: + return match.group() if match else None + return None + + +def _is_translation(r): + _trans_key = "transl-tracklisting" + return r["type"] == _trans_key and r["direction"] == "backward" + + +def _find_actual_release_from_pseudo_release( + pseudo_rel: JSONDict, +) -> JSONDict | None: + try: + relations = pseudo_rel["release"]["release-relation-list"] + except KeyError: + return None + + # currently we only support trans(liter)ation's + translations = [r for r in relations if _is_translation(r)] + + if not translations: + return None + + actual_id = translations[0]["target"] + + return musicbrainzngs.get_release_by_id(actual_id, RELEASE_INCLUDES) + + +def _merge_pseudo_and_actual_album( + pseudo: beets.autotag.hooks.AlbumInfo, actual: beets.autotag.hooks.AlbumInfo +) -> beets.autotag.hooks.AlbumInfo | None: + """ + Merges a pseudo release with its actual release. + + This implementation is naive, it doesn't overwrite fields, + like status or ids. + + According to the ticket PICARD-145, the main release id should be used. + But the ticket has been in limbo since over a decade now. + It also suggests the introduction of the tag `musicbrainz_pseudoreleaseid`, + but as of this field can't be found in any official Picard docs, + hence why we did not implement that for now. + """ + merged = pseudo.copy() + from_actual = { + k: actual[k] + for k in [ + "media", + "mediums", + "country", + "catalognum", + "year", + "month", + "day", + "original_year", + "original_month", + "original_day", + "label", + "barcode", + "asin", + "style", + "genre", + ] + } + merged.update(from_actual) + return merged + + +class MusicBrainzPlugin(BeetsPlugin): + data_source = "Musicbrainz" + + def __init__(self): + """Set up the python-musicbrainz-ngs module according to settings + from the beets configuration. This should be called at startup. + """ + super().__init__() + self.config.add( + { + "host": "musicbrainz.org", + "https": False, + "ratelimit": 1, + "ratelimit_interval": 1, + "searchlimit": 5, + "genres": False, + "external_ids": { + "discogs": False, + "bandcamp": False, + "spotify": False, + "deezer": False, + "tidal": False, + }, + "extra_tags": {}, + }, + ) + hostname = self.config["host"].as_str() + https = self.config["https"].get(bool) + # Only call set_hostname when a custom server is configured. Since + # musicbrainz-ngs connects to musicbrainz.org with HTTPS by default + if hostname != "musicbrainz.org": + musicbrainzngs.set_hostname(hostname, https) + musicbrainzngs.set_rate_limit( + self.config["ratelimit_interval"].as_number(), + self.config["ratelimit"].get(int), + ) + + def track_info( + self, + recording: JSONDict, + index: int | None = None, + medium: int | None = None, + medium_index: int | None = None, + medium_total: int | None = None, + ) -> beets.autotag.hooks.TrackInfo: + """Translates a MusicBrainz recording result dictionary into a beets + ``TrackInfo`` object. Three parameters are optional and are used + only for tracks that appear on releases (non-singletons): ``index``, + the overall track number; ``medium``, the disc number; + ``medium_index``, the track's index on its medium; ``medium_total``, + the number of tracks on the medium. Each number is a 1-based index. + """ + info = beets.autotag.hooks.TrackInfo( + title=recording["title"], + track_id=recording["id"], + index=index, + medium=medium, + medium_index=medium_index, + medium_total=medium_total, + data_source="MusicBrainz", + data_url=track_url(recording["id"]), + ) + + if recording.get("artist-credit"): + # Get the artist names. + ( + info.artist, + info.artist_sort, + info.artist_credit, + ) = _flatten_artist_credit(recording["artist-credit"]) + + ( + info.artists, + info.artists_sort, + info.artists_credit, + ) = _multi_artist_credit( + recording["artist-credit"], include_join_phrase=False + ) + + info.artists_ids = _artist_ids(recording["artist-credit"]) + info.artist_id = info.artists_ids[0] + + if recording.get("artist-relation-list"): + info.remixer = _get_related_artist_names( + recording["artist-relation-list"], relation_type="remixer" + ) + + if recording.get("length"): + info.length = int(recording["length"]) / 1000.0 + + info.trackdisambig = recording.get("disambiguation") + + if recording.get("isrc-list"): + info.isrc = ";".join(recording["isrc-list"]) + + lyricist = [] + composer = [] + composer_sort = [] + for work_relation in recording.get("work-relation-list", ()): + if work_relation["type"] != "performance": + continue + info.work = work_relation["work"]["title"] + info.mb_workid = work_relation["work"]["id"] + if "disambiguation" in work_relation["work"]: + info.work_disambig = work_relation["work"]["disambiguation"] + + for artist_relation in work_relation["work"].get( + "artist-relation-list", () + ): + if "type" in artist_relation: + type = artist_relation["type"] + if type == "lyricist": + lyricist.append(artist_relation["artist"]["name"]) + elif type == "composer": + composer.append(artist_relation["artist"]["name"]) + composer_sort.append( + artist_relation["artist"]["sort-name"] + ) + if lyricist: + info.lyricist = ", ".join(lyricist) + if composer: + info.composer = ", ".join(composer) + info.composer_sort = ", ".join(composer_sort) + + arranger = [] + for artist_relation in recording.get("artist-relation-list", ()): + if "type" in artist_relation: + type = artist_relation["type"] + if type == "arranger": + arranger.append(artist_relation["artist"]["name"]) + if arranger: + info.arranger = ", ".join(arranger) + + # Supplementary fields provided by plugins + extra_trackdatas = plugins.send("mb_track_extract", data=recording) + for extra_trackdata in extra_trackdatas: + info.update(extra_trackdata) + + return info + + def album_info(self, release: JSONDict) -> beets.autotag.hooks.AlbumInfo: + """Takes a MusicBrainz release result dictionary and returns a beets + AlbumInfo object containing the interesting data about that release. + """ + # Get artist name using join phrases. + artist_name, artist_sort_name, artist_credit_name = ( + _flatten_artist_credit(release["artist-credit"]) + ) + + ( + artists_names, + artists_sort_names, + artists_credit_names, + ) = _multi_artist_credit( + release["artist-credit"], include_join_phrase=False + ) + + ntracks = sum(len(m["track-list"]) for m in release["medium-list"]) + + # The MusicBrainz API omits 'artist-relation-list' and 'work-relation-list' + # when the release has more than 500 tracks. So we use browse_recordings + # on chunks of tracks to recover the same information in this case. + if ntracks > BROWSE_MAXTRACKS: + self._log.debug("Album {} has too many tracks", release["id"]) + recording_list = [] + for i in range(0, ntracks, BROWSE_CHUNKSIZE): + self._log.debug("Retrieving tracks starting at {}", i) + recording_list.extend( + musicbrainzngs.browse_recordings( + release=release["id"], + limit=BROWSE_CHUNKSIZE, + includes=BROWSE_INCLUDES, + offset=i, + )["recording-list"] + ) + track_map = {r["id"]: r for r in recording_list} + for medium in release["medium-list"]: + for recording in medium["track-list"]: + recording_info = track_map[recording["recording"]["id"]] + recording["recording"] = recording_info + + # Basic info. + track_infos = [] + index = 0 + for medium in release["medium-list"]: + disctitle = medium.get("title") + format = medium.get("format") + + if format in config["match"]["ignored_media"].as_str_seq(): + continue + + all_tracks = medium["track-list"] + if ( + "data-track-list" in medium + and not config["match"]["ignore_data_tracks"] + ): + all_tracks += medium["data-track-list"] + track_count = len(all_tracks) + + if "pregap" in medium: + all_tracks.insert(0, medium["pregap"]) + + for track in all_tracks: + if ( + "title" in track["recording"] + and track["recording"]["title"] in SKIPPED_TRACKS + ): + continue + + if ( + "video" in track["recording"] + and track["recording"]["video"] == "true" + and config["match"]["ignore_video_tracks"] + ): + continue + + # Basic information from the recording. + index += 1 + ti = self.track_info( + track["recording"], + index, + int(medium["position"]), + int(track["position"]), + track_count, + ) + ti.release_track_id = track["id"] + ti.disctitle = disctitle + ti.media = format + ti.track_alt = track["number"] + + # Prefer track data, where present, over recording data. + if track.get("title"): + ti.title = track["title"] + if track.get("artist-credit"): + # Get the artist names. + ( + ti.artist, + ti.artist_sort, + ti.artist_credit, + ) = _flatten_artist_credit(track["artist-credit"]) + + ( + ti.artists, + ti.artists_sort, + ti.artists_credit, + ) = _multi_artist_credit( + track["artist-credit"], include_join_phrase=False + ) + + ti.artists_ids = _artist_ids(track["artist-credit"]) + ti.artist_id = ti.artists_ids[0] + if track.get("length"): + ti.length = int(track["length"]) / (1000.0) + + track_infos.append(ti) + + album_artist_ids = _artist_ids(release["artist-credit"]) + info = beets.autotag.hooks.AlbumInfo( + album=release["title"], + album_id=release["id"], + artist=artist_name, + artist_id=album_artist_ids[0], + artists=artists_names, + artists_ids=album_artist_ids, + tracks=track_infos, + mediums=len(release["medium-list"]), + artist_sort=artist_sort_name, + artists_sort=artists_sort_names, + artist_credit=artist_credit_name, + artists_credit=artists_credit_names, + data_source="MusicBrainz", + data_url=album_url(release["id"]), + barcode=release.get("barcode"), + ) + info.va = info.artist_id == VARIOUS_ARTISTS_ID + if info.va: + info.artist = config["va_name"].as_str() + info.asin = release.get("asin") + info.releasegroup_id = release["release-group"]["id"] + info.albumstatus = release.get("status") + + if release["release-group"].get("title"): + info.release_group_title = release["release-group"].get("title") + + # Get the disambiguation strings at the release and release group level. + if release["release-group"].get("disambiguation"): + info.releasegroupdisambig = release["release-group"].get( + "disambiguation" + ) + if release.get("disambiguation"): + info.albumdisambig = release.get("disambiguation") + + # Get the "classic" Release type. This data comes from a legacy API + # feature before MusicBrainz supported multiple release types. + if "type" in release["release-group"]: + reltype = release["release-group"]["type"] + if reltype: + info.albumtype = reltype.lower() + + # Set the new-style "primary" and "secondary" release types. + albumtypes = [] + if "primary-type" in release["release-group"]: + rel_primarytype = release["release-group"]["primary-type"] + if rel_primarytype: + albumtypes.append(rel_primarytype.lower()) + if "secondary-type-list" in release["release-group"]: + if release["release-group"]["secondary-type-list"]: + for sec_type in release["release-group"]["secondary-type-list"]: + albumtypes.append(sec_type.lower()) + info.albumtypes = albumtypes + + # Release events. + info.country, release_date = _preferred_release_event(release) + release_group_date = release["release-group"].get("first-release-date") + if not release_date: + # Fall back if release-specific date is not available. + release_date = release_group_date + + if release_date: + _set_date_str(info, release_date, False) + _set_date_str(info, release_group_date, True) + + # Label name. + if release.get("label-info-list"): + label_info = release["label-info-list"][0] + if label_info.get("label"): + label = label_info["label"]["name"] + if label != "[no label]": + info.label = label + info.catalognum = label_info.get("catalog-number") + + # Text representation data. + if release.get("text-representation"): + rep = release["text-representation"] + info.script = rep.get("script") + info.language = rep.get("language") + + # Media (format). + if release["medium-list"]: + # If all media are the same, use that medium name + if len({m.get("format") for m in release["medium-list"]}) == 1: + info.media = release["medium-list"][0].get("format") + # Otherwise, let's just call it "Media" + else: + info.media = "Media" + + if self.config["genres"]: + sources = [ + release["release-group"].get("tag-list", []), + release.get("tag-list", []), + ] + genres: Counter[str] = Counter() + for source in sources: + for genreitem in source: + genres[genreitem["name"]] += int(genreitem["count"]) + info.genre = "; ".join( + genre + for genre, _count in sorted(genres.items(), key=lambda g: -g[1]) + ) + + # We might find links to external sources (Discogs, Bandcamp, ...) + external_ids = self.config["external_ids"].get() + wanted_sources = { + site for site, wanted in external_ids.items() if wanted + } + if wanted_sources and (url_rels := release.get("url-relation-list")): + urls = {} + + for source, url in product(wanted_sources, url_rels): + if f"{source}.com" in (target := url["target"]): + urls[source] = target + self._log.debug( + "Found link to {} release via MusicBrainz", + source.capitalize(), + ) + + if "discogs" in urls: + info.discogs_albumid = extract_discogs_id_regex(urls["discogs"]) + if "bandcamp" in urls: + info.bandcamp_album_id = urls["bandcamp"] + if "spotify" in urls: + info.spotify_album_id = MetadataSourcePlugin._get_id( + "album", urls["spotify"], spotify_id_regex + ) + if "deezer" in urls: + info.deezer_album_id = MetadataSourcePlugin._get_id( + "album", urls["deezer"], deezer_id_regex + ) + if "beatport" in urls: + info.beatport_album_id = MetadataSourcePlugin._get_id( + "album", urls["beatport"], beatport_id_regex + ) + if "tidal" in urls: + info.tidal_album_id = urls["tidal"].split("/")[-1] + + extra_albumdatas = plugins.send("mb_album_extract", data=release) + for extra_albumdata in extra_albumdatas: + info.update(extra_albumdata) + + return info + + def candidates( + self, + items: list[Item], + artist: str, + album: str, + va_likely: bool, + extra_tags: dict[str, Any] | None = None, + ) -> Iterator[beets.autotag.hooks.AlbumInfo]: + """Searches for a single album ("release" in MusicBrainz parlance) + and returns an iterator over AlbumInfo objects. May raise a + MusicBrainzAPIError. + + The query consists of an artist name, an album name, and, + optionally, a number of tracks on the album and any other extra tags. + """ + # Build search criteria. + criteria = {"release": album.lower().strip()} + if artist is not None: + criteria["artist"] = artist.lower().strip() + else: + # Various Artists search. + criteria["arid"] = VARIOUS_ARTISTS_ID + if track_count := len(items): + criteria["tracks"] = str(track_count) + + if self.config["extra_tags"]: + tag_list = self.config["extra_tags"].get() + self._log.debug("Additional search terms: {0}", tag_list) + for tag, value in tag_list.items(): + if key := FIELDS_TO_MB_KEYS.get(tag): + value = str(value).lower().strip() + if key == "catno": + value = value.replace(" ", "") + if value: + criteria[key] = value + + # Abort if we have no search terms. + if not any(criteria.values()): + return + + try: + self._log.debug( + "Searching for MusicBrainz releases with: {!r}", criteria + ) + res = musicbrainzngs.search_releases( + limit=self.config["searchlimit"].get(int), **criteria + ) + except musicbrainzngs.MusicBrainzError as exc: + raise MusicBrainzAPIError( + exc, "release search", criteria, traceback.format_exc() + ) + for release in res["release-list"]: + # The search result is missing some data (namely, the tracks), + # so we just use the ID and fetch the rest of the information. + albuminfo = self.album_for_id(release["id"]) + if albuminfo is not None: + yield albuminfo + + def item_candidates( + self, item: Item, artist: str, title: str + ) -> Iterator[beets.autotag.hooks.TrackInfo]: + """Searches for a single track and returns an iterable of TrackInfo + objects. May raise a MusicBrainzAPIError. + """ + criteria = { + "artist": artist.lower().strip(), + "recording": title.lower().strip(), + } + + if not any(criteria.values()): + return + + try: + res = musicbrainzngs.search_recordings( + limit=self.config["searchlimit"].get(int), **criteria + ) + except musicbrainzngs.MusicBrainzError as exc: + raise MusicBrainzAPIError( + exc, "recording search", criteria, traceback.format_exc() + ) + for recording in res["recording-list"]: + yield self.track_info(recording) + + def album_for_id( + self, album_id: str + ) -> beets.autotag.hooks.AlbumInfo | None: + """Fetches an album by its MusicBrainz ID and returns an AlbumInfo + object or None if the album is not found. May raise a + MusicBrainzAPIError. + """ + self._log.debug("Requesting MusicBrainz release {}", album_id) + albumid = _parse_id(album_id) + if not albumid: + self._log.debug("Invalid MBID ({0}).", album_id) + return None + try: + res = musicbrainzngs.get_release_by_id(albumid, RELEASE_INCLUDES) + + # resolve linked release relations + actual_res = None + + if res["release"].get("status") == "Pseudo-Release": + actual_res = _find_actual_release_from_pseudo_release(res) + + except musicbrainzngs.ResponseError: + self._log.debug("Album ID match failed.") + return None + except musicbrainzngs.MusicBrainzError as exc: + raise MusicBrainzAPIError( + exc, "get release by ID", albumid, traceback.format_exc() + ) + + # release is potentially a pseudo release + release = self.album_info(res["release"]) + + # should be None unless we're dealing with a pseudo release + if actual_res is not None: + actual_release = self.album_info(actual_res["release"]) + return _merge_pseudo_and_actual_album(release, actual_release) + else: + return release + + def track_for_id( + self, track_id: str + ) -> beets.autotag.hooks.TrackInfo | None: + """Fetches a track by its MusicBrainz ID. Returns a TrackInfo object + or None if no track is found. May raise a MusicBrainzAPIError. + """ + trackid = _parse_id(track_id) + if not trackid: + self._log.debug("Invalid MBID ({0}).", track_id) + return None + try: + res = musicbrainzngs.get_recording_by_id(trackid, TRACK_INCLUDES) + except musicbrainzngs.ResponseError: + self._log.debug("Track ID match failed.") + return None + except musicbrainzngs.MusicBrainzError as exc: + raise MusicBrainzAPIError( + exc, "get recording by ID", trackid, traceback.format_exc() + ) + return self.track_info(res["recording"]) diff --git a/docs/changelog.rst b/docs/changelog.rst index ab0b9519d..0f84f2473 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,14 +10,23 @@ New features: Bug fixes: +For packagers: + +Other changes: + +2.3.1 (May 14, 2025) +-------------------- + +Bug fixes: * :doc:`/reference/pathformat`: Fixed a regression where path legalization incorrectly removed parts of user-configured path formats that followed a dot (**.**). :bug:`5771` For packagers: - -Other changes: +* Force ``poetry`` version below 2 to avoid it mangling file modification times + in ``sdist`` package. + :bug:`5770` 2.3.0 (May 07, 2025) -------------------- @@ -27,6 +36,13 @@ been dropped. New features: +* :doc:`plugins/musicbrainz`: The MusicBrainz autotagger has been moved to + a separate plugin. The default :ref:`plugins-config` includes `musicbrainz`, + but if you've customized your `plugins` list in your configuration, you'll + need to explicitly add `musicbrainz` to continue using this functionality. + Configuration option `musicbrainz.enabled` has thus been deprecated. + :bug:`2686` + :bug:`4605` * :doc:`plugins/lastgenre`: The new configuration option, ``keep_existing``, provides more fine-grained control over how pre-populated genre tags are handled. The ``force`` option now behaves in a more conventional manner. @@ -115,8 +131,8 @@ Other changes: :bug:`5539` * :doc:`/plugins/smartplaylist`: URL-encode additional item `fields` within generated EXTM3U playlists instead of JSON-encoding them. -* typehints: `./beets/importer.py` file now has improved typehints. -* typehints: `./beets/plugins.py` file now includes typehints. +* typehints: `./beets/importer.py` file now has improved typehints. +* typehints: `./beets/plugins.py` file now includes typehints. * :doc:`plugins/ftintitle`: Optimize the plugin by avoiding unnecessary writes to the database. * Database models are now serializable with pickle. diff --git a/docs/conf.py b/docs/conf.py index fafabef70..497c5e71e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,7 +12,7 @@ project = "beets" copyright = "2016, Adrian Sampson" version = "2.3" -release = "2.3.0" +release = "2.3.1" pygments_style = "sphinx" diff --git a/docs/plugins/index.rst b/docs/plugins/index.rst index b7998ef19..82fa94281 100644 --- a/docs/plugins/index.rst +++ b/docs/plugins/index.rst @@ -13,7 +13,9 @@ Using Plugins ------------- To use one of the plugins included with beets (see the rest of this page for a -list), just use the ``plugins`` option in your :doc:`config.yaml ` file, like so:: +list), just use the ``plugins`` option in your :doc:`config.yaml ` file: + +.. code-block:: sh plugins: inline convert web @@ -21,7 +23,9 @@ The value for ``plugins`` can be a space-separated list of plugin names or a YAML list like ``[foo, bar]``. You can see which plugins are currently enabled by typing ``beet version``. -Each plugin has its own set of options that can be defined in a section bearing its name:: +Each plugin has its own set of options that can be defined in a section bearing its name: + +.. code-block:: yaml plugins: inline convert web @@ -30,10 +34,11 @@ Each plugin has its own set of options that can be defined in a section bearing Some plugins have special dependencies that you'll need to install. The documentation page for each plugin will list them in the setup instructions. -For some, you can use ``pip``'s "extras" feature to install the dependencies, -like this:: +For some, you can use ``pip``'s "extras" feature to install the dependencies: - pip install beets[fetchart,lyrics,lastgenre] +.. code-block:: sh + + pip install "beets[fetchart,lyrics,lastgenre]" .. _metadata-source-plugin-configuration: @@ -48,7 +53,9 @@ plugins share the following configuration option: Default: ``0.5``. For example, to equally consider matches from Discogs and MusicBrainz add the -following to your configuration:: +following to your configuration: + +.. code-block:: yaml plugins: discogs @@ -111,6 +118,7 @@ following to your configuration:: missing mpdstats mpdupdate + musicbrainz parentwork permissions play @@ -142,21 +150,26 @@ Autotagger Extensions Use acoustic fingerprinting to identify audio files with missing or incorrect metadata. -:doc:`discogs ` - Search for releases in the `Discogs`_ database. - -:doc:`spotify ` - Search for releases in the `Spotify`_ database. - :doc:`deezer ` Search for releases in the `Deezer`_ database. +:doc:`discogs ` + Search for releases in the `Discogs`_ database. + :doc:`fromfilename ` Guess metadata for untagged tracks from their filenames. -.. _Discogs: https://www.discogs.com/ +:doc:`musicbrainz ` + Search for releases in the `MusicBrainz`_ database. + +:doc:`spotify ` + Search for releases in the `Spotify`_ database. + + +.. _Deezer: https://www.deezer.com +.. _Discogs: https://www.discogs.com +.. _MusicBrainz: https://www.musicbrainz.com .. _Spotify: https://www.spotify.com -.. _Deezer: https://www.deezer.com/ Metadata -------- @@ -465,6 +478,10 @@ Here are a few of the plugins written by the beets community: `dsedivec`_ Has two plugins: ``edit`` and ``moveall``. +`beets-filetote`_ + Helps bring non-music extra files, attachments, and artifacts during + imports and CLI file manipulation actions (`beet move`, etc.). + `beets-follow`_ Lets you check for new albums from artists you like. @@ -560,6 +577,7 @@ Here are a few of the plugins written by the beets community: .. _cmus: http://cmus.sourceforge.net/ .. _beet-amazon: https://github.com/jmwatte/beet-amazon .. _beets-alternatives: https://github.com/geigerzaehler/beets-alternatives +.. _beets-filetote: https://github.com/gtronset/beets-filetote .. _beets-follow: https://github.com/nolsto/beets-follow .. _beets-ibroadcast: https://github.com/ctrueden/beets-ibroadcast .. _iBroadcast: https://ibroadcast.com/ diff --git a/docs/plugins/musicbrainz.rst b/docs/plugins/musicbrainz.rst new file mode 100644 index 000000000..ef10be66d --- /dev/null +++ b/docs/plugins/musicbrainz.rst @@ -0,0 +1,153 @@ +MusicBrainz Plugin +================== + +The ``musicbrainz`` plugin extends the autotagger's search capabilities to +include matches from the `MusicBrainz`_ database. + +.. _MusicBrainz: https://musicbrainz.org/ + +Installation +------------ + +To use the ``musicbrainz`` plugin, enable it in your configuration (see +:ref:`using-plugins`) + +.. _musicbrainz-config: + +Configuration +------------- + +Default +^^^^^^^ + +.. code-block:: yaml + + musicbrainz: + host: musicbrainz.org + https: no + ratelimit: 1 + ratelimit_interval: 1.0 + searchlimit: 5 + extra_tags: [] + genres: no + external_ids: + discogs: no + bandcamp: no + spotify: no + deezer: no + beatport: no + tidal: no + + +You can instruct beets to use `your own MusicBrainz database`_ instead of +the `main server`_. Use the ``host``, ``https`` and ``ratelimit`` options +under a ``musicbrainz:`` header, like so + +.. code-block:: yaml + + musicbrainz: + host: localhost:5000 + https: no + ratelimit: 100 + +The ``host`` key, of course, controls the Web server hostname (and port, +optionally) that will be contacted by beets (default: musicbrainz.org). +The ``https`` key makes the client use HTTPS instead of HTTP. This setting applies +only to custom servers. The official MusicBrainz server always uses HTTPS. (Default: no.) +The server must have search indices enabled (see `Building search indexes`_). + +The ``ratelimit`` option, an integer, controls the number of Web service requests +per second (default: 1). **Do not change the rate limit setting** if you're +using the main MusicBrainz server---on this public server, you're `limited`_ +to one request per second. + +.. _your own MusicBrainz database: https://musicbrainz.org/doc/MusicBrainz_Server/Setup +.. _main server: https://musicbrainz.org/ +.. _limited: https://musicbrainz.org/doc/XML_Web_Service/Rate_Limiting +.. _Building search indexes: https://musicbrainz.org/doc/Development/Search_server_setup + +.. _musicbrainz.enabled: + +enabled +~~~~~~~ + +.. deprecated:: 2.3 + Add `musicbrainz` to the `plugins` list instead. + +This option allows you to disable using MusicBrainz as a metadata source. This applies +if you use plugins that fetch data from alternative sources and should make the import +process quicker. + +Default: ``yes``. + +.. _searchlimit: + +searchlimit +~~~~~~~~~~~ + +The number of matches returned when sending search queries to the +MusicBrainz server. + +Default: ``5``. + +.. _extra_tags: + +extra_tags +~~~~~~~~~~ + +By default, beets will use only the artist, album, and track count to query +MusicBrainz. Additional tags to be queried can be supplied with the +``extra_tags`` setting. For example + +.. code-block:: yaml + + musicbrainz: + extra_tags: [year, catalognum, country, media, label] + +This setting should improve the autotagger results if the metadata with the +given tags match the metadata returned by MusicBrainz. + +Note that the only tags supported by this setting are the ones listed in the +above example. + +Default: ``[]`` + +.. _genres: + +genres +~~~~~~ + +Use MusicBrainz genre tags to populate (and replace if it's already set) the +``genre`` tag. This will make it a list of all the genres tagged for the +release and the release-group on MusicBrainz, separated by "; " and sorted by +the total number of votes. +Default: ``no`` + +.. _musicbrainz.external_ids: + +external_ids +~~~~~~~~~~~~ + +Set any of the ``external_ids`` options to ``yes`` to enable the MusicBrainz +importer to look for links to related metadata sources. If such a link is +available the release ID will be extracted from the URL provided and imported +to the beets library + +.. code-block:: yaml + + musicbrainz: + external_ids: + discogs: yes + spotify: yes + bandcamp: yes + beatport: yes + deezer: yes + tidal: yes + + +The library fields of the corresponding :ref:`autotagger_extensions` are used +to save the data (``discogs_albumid``, ``bandcamp_album_id``, +``spotify_album_id``, ``beatport_album_id``, ``deezer_album_id``, +``tidal_album_id``). On re-imports existing data will be overwritten. + +The default of all options is ``no``. diff --git a/docs/reference/config.rst b/docs/reference/config.rst index 234185e79..7e93b00ff 100644 --- a/docs/reference/config.rst +++ b/docs/reference/config.rst @@ -58,6 +58,8 @@ directory The directory to which files will be copied/moved when adding them to the library. Defaults to a folder called ``Music`` in your home directory. +.. _plugins-config: + plugins ~~~~~~~ @@ -874,115 +876,6 @@ This feature is currently supported by the :doc:`/plugins/discogs` and the Default: ``yes``. -.. _musicbrainz-config: - -MusicBrainz Options -------------------- - -You can instruct beets to use `your own MusicBrainz database`_ instead of -the `main server`_. Use the ``host``, ``https`` and ``ratelimit`` options -under a ``musicbrainz:`` header, like so:: - - musicbrainz: - host: localhost:5000 - https: no - ratelimit: 100 - -The ``host`` key, of course, controls the Web server hostname (and port, -optionally) that will be contacted by beets (default: musicbrainz.org). -The ``https`` key makes the client use HTTPS instead of HTTP. This setting applies -only to custom servers. The official MusicBrainz server always uses HTTPS. (Default: no.) -The server must have search indices enabled (see `Building search indexes`_). - -The ``ratelimit`` option, an integer, controls the number of Web service requests -per second (default: 1). **Do not change the rate limit setting** if you're -using the main MusicBrainz server---on this public server, you're `limited`_ -to one request per second. - -.. _your own MusicBrainz database: https://musicbrainz.org/doc/MusicBrainz_Server/Setup -.. _main server: https://musicbrainz.org/ -.. _limited: https://musicbrainz.org/doc/XML_Web_Service/Rate_Limiting -.. _Building search indexes: https://musicbrainz.org/doc/Development/Search_server_setup - -.. _musicbrainz.enabled: - -enabled -~~~~~~~ - -This option allows you to disable using MusicBrainz as a metadata source. This applies -if you use plugins that fetch data from alternative sources and should make the import -process quicker. - -Default: ``yes``. - -.. _searchlimit: - -searchlimit -~~~~~~~~~~~ - -The number of matches returned when sending search queries to the -MusicBrainz server. - -Default: ``5``. - -.. _extra_tags: - -extra_tags -~~~~~~~~~~ - -By default, beets will use only the artist, album, and track count to query -MusicBrainz. Additional tags to be queried can be supplied with the -``extra_tags`` setting. For example:: - - musicbrainz: - extra_tags: [year, catalognum, country, media, label] - -This setting should improve the autotagger results if the metadata with the -given tags match the metadata returned by MusicBrainz. - -Note that the only tags supported by this setting are the ones listed in the -above example. - -Default: ``[]`` - -.. _genres: - -genres -~~~~~~ - -Use MusicBrainz genre tags to populate (and replace if it's already set) the -``genre`` tag. This will make it a list of all the genres tagged for the -release and the release-group on MusicBrainz, separated by "; " and sorted by -the total number of votes. -Default: ``no`` - -.. _musicbrainz.external_ids: - -external_ids -~~~~~~~~~~~~ - -Set any of the ``external_ids`` options to ``yes`` to enable the MusicBrainz -importer to look for links to related metadata sources. If such a link is -available the release ID will be extracted from the URL provided and imported -to the beets library:: - - musicbrainz: - external_ids: - discogs: yes - spotify: yes - bandcamp: yes - beatport: yes - deezer: yes - tidal: yes - - -The library fields of the corresponding :ref:`autotagger_extensions` are used -to save the data (``discogs_albumid``, ``bandcamp_album_id``, -``spotify_album_id``, ``beatport_album_id``, ``deezer_album_id``, -``tidal_album_id``). On re-imports existing data will be overwritten. - -The default of all options is ``no``. - .. _match-config: Autotagger Matching Options diff --git a/poetry.lock b/poetry.lock index fe9dec791..bdd0ee0ca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -6,8 +6,6 @@ version = "0.0.5" description = "A collection of accessible pygments styles" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"}, {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"}, @@ -26,8 +24,6 @@ version = "0.7.16" description = "A light, configurable Sphinx theme" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -39,7 +35,6 @@ version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" -groups = ["main", "test"] files = [ {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, @@ -53,7 +48,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -62,8 +57,6 @@ version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"sonosupdate\"" files = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, @@ -75,8 +68,6 @@ version = "3.0.1" description = "Multi-library, cross-platform audio decoding." optional = true python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"autobpm\" or extra == \"chroma\"" files = [ {file = "audioread-3.0.1-py3-none-any.whl", hash = "sha256:4cdce70b8adc0da0a3c9e0d85fb10b3ace30fbdf8d1670fd443929b61d117c33"}, {file = "audioread-3.0.1.tar.gz", hash = "sha256:ac5460a5498c48bdf2e8e767402583a4dcd13f4414d286f42ce4379e8b35066d"}, @@ -91,15 +82,13 @@ version = "2.17.0" description = "Internationalization utilities" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "beautifulsoup4" @@ -107,7 +96,6 @@ version = "4.13.4" description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" -groups = ["main", "test"] files = [ {file = "beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b"}, {file = "beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195"}, @@ -130,7 +118,6 @@ version = "1.9.0" description = "Fast, simple object-to-object and broadcast signaling" optional = false python-versions = ">=3.9" -groups = ["main", "test", "typing"] files = [ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, @@ -142,8 +129,6 @@ version = "1.1.0" description = "Python bindings for the Brotli compression library" optional = false python-versions = "*" -groups = ["main", "test"] -markers = "platform_python_implementation == \"CPython\"" files = [ {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, @@ -278,8 +263,6 @@ version = "1.1.0.0" description = "Python CFFI bindings to the Brotli library" optional = false python-versions = ">=3.7" -groups = ["main", "test"] -markers = "platform_python_implementation == \"PyPy\"" files = [ {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"}, {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"}, @@ -319,7 +302,6 @@ version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["main", "test"] files = [ {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, @@ -331,7 +313,6 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -401,7 +382,6 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {main = "extra == \"autobpm\" or extra == \"reflink\" or platform_python_implementation == \"PyPy\"", test = "platform_python_implementation == \"PyPy\""} [package.dependencies] pycparser = "*" @@ -412,7 +392,6 @@ version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "test"] files = [ {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, @@ -514,7 +493,6 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main", "release", "test", "typing"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -529,7 +507,6 @@ version = "2.1.13" description = "Hosted coverage reports for GitHub, Bitbucket and Gitlab" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["test"] files = [ {file = "codecov-2.1.13-py2.py3-none-any.whl", hash = "sha256:c2ca5e51bba9ebb43644c43d0690148a55086f7f5e6fd36170858fa4206744d5"}, {file = "codecov-2.1.13.tar.gz", hash = "sha256:2362b685633caeaf45b9951a9b76ce359cd3581dd515b430c6c3f5dfb4d92a8c"}, @@ -545,8 +522,6 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "release", "test", "typing"] -markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -558,7 +533,6 @@ version = "2.0.1" description = "Painless YAML configuration." optional = false python-versions = ">=3.6" -groups = ["main"] files = [ {file = "confuse-2.0.1-py3-none-any.whl", hash = "sha256:9b9e5bbc70e2cb9b318bcab14d917ec88e21bf1b724365e3815eb16e37aabd2a"}, {file = "confuse-2.0.1.tar.gz", hash = "sha256:7379a2ad49aaa862b79600cc070260c1b7974d349f4fa5e01f9afa6c4dd0611f"}, @@ -573,7 +547,6 @@ version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -groups = ["test"] files = [ {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, @@ -644,7 +617,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "dbus-python" @@ -652,8 +625,6 @@ version = "1.4.0" description = "Python bindings for libdbus" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"metasync\"" files = [ {file = "dbus-python-1.4.0.tar.gz", hash = "sha256:991666e498f60dbf3e49b8b7678f5559b8a65034fdf61aae62cdecdb7d89c770"}, ] @@ -668,8 +639,6 @@ version = "5.2.1" description = "Decorators for Humans" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -681,8 +650,6 @@ version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -690,17 +657,18 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["main", "test"] -markers = "python_version < \"3.11\"" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] @@ -710,7 +678,6 @@ version = "1.2.0" description = "Infer file type and MIME type of any file/buffer. No external dependencies." optional = false python-versions = "*" -groups = ["main"] files = [ {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, @@ -718,23 +685,23 @@ files = [ [[package]] name = "flask" -version = "3.1.0" +version = "3.1.1" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.9" -groups = ["main", "test", "typing"] files = [ - {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"}, - {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"}, + {file = "flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c"}, + {file = "flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e"}, ] [package.dependencies] -blinker = ">=1.9" +blinker = ">=1.9.0" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -itsdangerous = ">=2.2" -Jinja2 = ">=3.1.2" -Werkzeug = ">=3.1" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.2.0" +jinja2 = ">=3.1.2" +markupsafe = ">=2.1.1" +werkzeug = ">=3.1.0" [package.extras] async = ["asgiref (>=3.2)"] @@ -746,8 +713,6 @@ version = "5.0.1" description = "A Flask extension simplifying CORS support" optional = true python-versions = "<4.0,>=3.9" -groups = ["main"] -markers = "extra == \"aura\" or extra == \"web\"" files = [ {file = "flask_cors-5.0.1-py3-none-any.whl", hash = "sha256:fa5cb364ead54bbf401a26dbf03030c6b18fb2fcaf70408096a572b409586b0c"}, {file = "flask_cors-5.0.1.tar.gz", hash = "sha256:6ccb38d16d6b72bbc156c1c3f192bc435bfcc3c2bc864b2df1eb9b2d97b2403c"}, @@ -763,7 +728,6 @@ version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, @@ -775,7 +739,6 @@ version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, @@ -797,7 +760,6 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -810,7 +772,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -822,7 +784,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "test"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -837,8 +798,6 @@ version = "0.2.0" description = "Cross-platform network interface and IP address enumeration library" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"sonosupdate\"" files = [ {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, @@ -850,8 +809,6 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -863,8 +820,6 @@ version = "8.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" -groups = ["main", "test", "typing"] -markers = "python_version == \"3.9\"" files = [ {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, @@ -874,12 +829,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -888,7 +843,6 @@ version = "1.0.1" description = "deflate64 compression/decompression library" optional = false python-versions = ">=3.9" -groups = ["main", "test"] files = [ {file = "inflate64-1.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5122a188995e47a735ab969edc9129d42bbd97b993df5a3f0819b87205ce81b4"}, {file = "inflate64-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:975ed694c680e46a5c0bb872380a9c9da271a91f9c0646561c58e8f3714347d4"}, @@ -944,7 +898,6 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -956,7 +909,6 @@ version = "2.2.0" description = "Safely pass data to untrusted environments and back." optional = false python-versions = ">=3.8" -groups = ["main", "test", "typing"] files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -968,7 +920,6 @@ version = "1.2.0" description = "Approximate and phonetic matching of strings." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "jellyfish-1.2.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:50b6d2a123d3e0929cf136c6c26a6774338be7eb9d140a94223f56e3339b2f80"}, {file = "jellyfish-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:baa1e44244cba9da6d67a50d3076dd7567e3b91caa9887bb68e20f321e0d2500"}, @@ -1053,7 +1004,6 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["main", "test", "typing"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -1071,8 +1021,6 @@ version = "1.5.0" description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "joblib-1.5.0-py3-none-any.whl", hash = "sha256:206144b320246485b712fc8cc51f017de58225fa8b414a1fe1764a7231aca491"}, {file = "joblib-1.5.0.tar.gz", hash = "sha256:d8757f955389a3dd7a23152e43bc297c2e0c2d3060056dad0feefc88a06939b5"}, @@ -1084,8 +1032,6 @@ version = "1.0.9" description = "Language detection library ported from Google's language-detection." optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"fetchart\" or extra == \"lyrics\"" files = [ {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, @@ -1100,7 +1046,6 @@ version = "0.5.12" description = "Linear Assignment Problem solver (LAPJV/LAPMOD)." optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "lap-0.5.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c3a38070b24531949e30d7ebc83ca533fcbef6b1d6562f035cae3b44dfbd5ec"}, {file = "lap-0.5.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a301dc9b8a30e41e4121635a0e3d0f6374a08bb9509f618d900e18d209b815c4"}, @@ -1167,8 +1112,6 @@ version = "0.4" description = "Makes it easy to load subpackages and functions on demand." optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc"}, {file = "lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1"}, @@ -1188,8 +1131,6 @@ version = "0.10.2.post1" description = "Python module for audio and music processing" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "librosa-0.10.2.post1-py3-none-any.whl", hash = "sha256:dc882750e8b577a63039f25661b7e39ec4cfbacc99c1cffba666cd664fb0a7a0"}, {file = "librosa-0.10.2.post1.tar.gz", hash = "sha256:cd99f16717cbcd1e0983e37308d1db46a6f7dfc2e396e5a9e61e6821e44bd2e7"}, @@ -1221,8 +1162,6 @@ version = "0.43.0" description = "lightweight wrapper around basic LLVM functionality" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, @@ -1253,8 +1192,6 @@ version = "5.4.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"sonosupdate\"" files = [ {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"}, {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"}, @@ -1400,7 +1337,6 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["main", "test", "typing"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1471,7 +1407,6 @@ version = "0.13.0" description = "Handles low-level interfacing for files' tags. Wraps Mutagen to" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "mediafile-0.13.0-py3-none-any.whl", hash = "sha256:cd8d183d0e0671b5203a86e92cf4e3338ecc892a1ec9dcd7ec0ed87779e514cb"}, {file = "mediafile-0.13.0.tar.gz", hash = "sha256:de71063e1bffe9733d6ccad526ea7dac8a9ce760105827f81ab0cb034c729a6d"}, @@ -1490,7 +1425,6 @@ version = "5.2.0" description = "Rolling backport of unittest.mock for all Pythons" optional = false python-versions = ">=3.6" -groups = ["test"] files = [ {file = "mock-5.2.0-py3-none-any.whl", hash = "sha256:7ba87f72ca0e915175596069dbbcc7c75af7b5e9b9bc107ad6349ede0819982f"}, {file = "mock-5.2.0.tar.gz", hash = "sha256:4e460e818629b4b173f32d08bf30d3af8123afbb8e04bb5707a1fd4799e503f0"}, @@ -1507,8 +1441,6 @@ version = "1.1.0" description = "MessagePack serializer" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, @@ -1582,7 +1514,6 @@ version = "0.2.3" description = "multi volume file wrapper library" optional = false python-versions = ">=3.6" -groups = ["main", "test"] files = [ {file = "multivolumefile-0.2.3-py3-none-any.whl", hash = "sha256:237f4353b60af1703087cf7725755a1f6fcaeeea48421e1896940cd1c920d678"}, {file = "multivolumefile-0.2.3.tar.gz", hash = "sha256:a0648d0aafbc96e59198d5c17e9acad7eb531abea51035d08ce8060dcad709d6"}, @@ -1599,7 +1530,6 @@ version = "0.7.1" description = "Python bindings for the MusicBrainz NGS and the Cover Art Archive webservices" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main"] files = [ {file = "musicbrainzngs-0.7.1-py2.py3-none-any.whl", hash = "sha256:e841a8f975104c0a72290b09f59326050194081a5ae62ee512f41915090e1a10"}, {file = "musicbrainzngs-0.7.1.tar.gz", hash = "sha256:ab1c0100fd0b305852e65f2ed4113c6de12e68afd55186987b8ed97e0f98e627"}, @@ -1611,7 +1541,6 @@ version = "1.47.0" description = "read and write audio tags for many formats" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "mutagen-1.47.0-py3-none-any.whl", hash = "sha256:edd96f50c5907a9539d8e5bba7245f62c9f520aef333d13392a79a4f70aca719"}, {file = "mutagen-1.47.0.tar.gz", hash = "sha256:719fadef0a978c31b4cf3c956261b3c58b6948b32023078a2117b1de09f0fc99"}, @@ -1623,7 +1552,6 @@ version = "1.15.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" -groups = ["typing"] files = [ {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, @@ -1677,7 +1605,6 @@ version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.8" -groups = ["typing"] files = [ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, @@ -1689,8 +1616,6 @@ version = "0.60.0" description = "compiling Python code using LLVM" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, @@ -1725,7 +1650,6 @@ version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -1780,7 +1704,6 @@ version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" -groups = ["main", "test"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -1797,7 +1720,6 @@ version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["main", "release", "test"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, @@ -1809,8 +1731,6 @@ version = "11.2.1" description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"aura\" or extra == \"embedart\" or extra == \"fetchart\" or extra == \"thumbnails\"" files = [ {file = "pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047"}, {file = "pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95"}, @@ -1901,19 +1821,18 @@ fpx = ["olefile"] mic = ["olefile"] test-arrow = ["pyarrow"] tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions ; python_version < \"3.10\""] +typing = ["typing-extensions"] xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.3.7" +version = "4.3.8" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ - {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, - {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, ] [package.extras] @@ -1927,7 +1846,6 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1943,8 +1861,6 @@ version = "1.8.2" description = "A friend to fetch your data files" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "pooch-1.8.2-py3-none-any.whl", hash = "sha256:3529a57096f7198778a5ceefd5ac3ef0e4d06a6ddaf9fc2d609b806f25302c47"}, {file = "pooch-1.8.2.tar.gz", hash = "sha256:76561f0de68a01da4df6af38e9955c4c9d1a5c90da73f7e40276a5728ec83d10"}, @@ -1966,8 +1882,6 @@ version = "7.0.0" description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." optional = false python-versions = ">=3.6" -groups = ["main", "test"] -markers = "sys_platform != \"cygwin\"" files = [ {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, @@ -1991,7 +1905,6 @@ version = "0.22.0" description = "Pure python 7-zip library" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "py7zr-0.22.0-py3-none-any.whl", hash = "sha256:993b951b313500697d71113da2681386589b7b74f12e48ba13cc12beca79d078"}, {file = "py7zr-0.22.0.tar.gz", hash = "sha256:c6c7aea5913535184003b73938490f9a4d8418598e533f9ca991d3b8e45a139e"}, @@ -2022,8 +1935,6 @@ version = "1.3.0" description = "bindings for Chromaprint acoustic fingerprinting and the Acoustid API" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"chroma\"" files = [ {file = "pyacoustid-1.3.0.tar.gz", hash = "sha256:5f4f487191c19ebb908270b1b7b5297f132da332b1568b96a914574c079ed177"}, ] @@ -2038,7 +1949,6 @@ version = "1.0.6" description = "bcj filter library" optional = false python-versions = ">=3.9" -groups = ["main", "test"] files = [ {file = "pybcj-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0fc8eda59e9e52d807f411de6db30aadd7603aa0cb0a830f6f45226b74be1926"}, {file = "pybcj-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0495443e8691510129f0c589ed956af4962c22b7963c5730b0c80c9c5b818c06"}, @@ -2098,8 +2008,6 @@ version = "1.28.0" description = "Python interface for cairo" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"bpd\" or extra == \"replaygain\"" files = [ {file = "pycairo-1.28.0-cp310-cp310-win32.whl", hash = "sha256:53e6dbc98456f789965dad49ef89ce2c62f9a10fc96c8d084e14da0ffb73d8a6"}, {file = "pycairo-1.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:c8ab91a75025f984bc327ada335c787efb61c929ea0512063793cb36cee503d4"}, @@ -2125,12 +2033,10 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {main = "extra == \"autobpm\" or extra == \"reflink\" or platform_python_implementation == \"PyPy\"", test = "platform_python_implementation == \"PyPy\""} [[package]] name = "pycryptodomex" @@ -2138,7 +2044,6 @@ version = "3.22.0" description = "Cryptographic library for Python" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "test"] files = [ {file = "pycryptodomex-3.22.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:41673e5cc39a8524557a0472077635d981172182c9fe39ce0b5f5c19381ffaff"}, {file = "pycryptodomex-3.22.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:276be1ed006e8fd01bba00d9bd9b60a0151e478033e86ea1cb37447bbc057edc"}, @@ -2177,8 +2082,6 @@ version = "0.16.1" description = "Bootstrap-based Sphinx theme from the PyData community" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "pydata_sphinx_theme-0.16.1-py3-none-any.whl", hash = "sha256:225331e8ac4b32682c18fcac5a57a6f717c4e632cea5dd0e247b55155faeccde"}, {file = "pydata_sphinx_theme-0.16.1.tar.gz", hash = "sha256:a08b7f0b7f70387219dc659bff0893a7554d5eb39b59d3b8ef37b8401b7642d7"}, @@ -2206,8 +2109,6 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -2222,8 +2123,6 @@ version = "3.52.3" description = "Python bindings for GObject Introspection" optional = true python-versions = "<4.0,>=3.9" -groups = ["main"] -markers = "extra == \"bpd\" or extra == \"replaygain\"" files = [ {file = "pygobject-3.52.3.tar.gz", hash = "sha256:00e427d291e957462a8fad659a9f9c8be776ff82a8b76bdf402f1eaeec086d82"}, ] @@ -2237,7 +2136,6 @@ version = "5.5.0" description = "A Python interface to Last.fm and Libre.fm" optional = false python-versions = ">=3.9" -groups = ["main", "test"] files = [ {file = "pylast-5.5.0-py3-none-any.whl", hash = "sha256:a28b5dbf69ef71b868e42ce27c74e4feea5277fbee26960549604ce34d631bbe"}, {file = "pylast-5.5.0.tar.gz", hash = "sha256:b6e95cf11fb99779cd451afd5dd68c4036c44f88733cf2346ba27317c1869da4"}, @@ -2255,7 +2153,6 @@ version = "1.1.1" description = "PPMd compression/decompression library" optional = false python-versions = ">=3.9" -groups = ["main", "test"] files = [ {file = "pyppmd-1.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:406b184132c69e3f60ea9621b69eaa0c5494e83f82c307b3acce7b86a4f8f888"}, {file = "pyppmd-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2cf003bb184adf306e1ac1828107307927737dde63474715ba16462e266cbef"}, @@ -2326,7 +2223,6 @@ version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, @@ -2349,7 +2245,6 @@ version = "6.1.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" -groups = ["test"] files = [ {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, @@ -2368,7 +2263,6 @@ version = "1.3.0" description = "A set of py.test fixtures to test Flask applications." optional = false python-versions = ">=3.7" -groups = ["test"] files = [ {file = "pytest-flask-1.3.0.tar.gz", hash = "sha256:58be1c97b21ba3c4d47e0a7691eb41007748506c36bf51004f78df10691fa95e"}, {file = "pytest_flask-1.3.0-py3-none-any.whl", hash = "sha256:c0e36e6b0fddc3b91c4362661db83fa694d1feb91fa505475be6732b5bc8c253"}, @@ -2388,7 +2282,6 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "test"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2403,7 +2296,6 @@ version = "3.1.1" description = "A Python MPD client library" optional = false python-versions = ">=3.6" -groups = ["main", "test"] files = [ {file = "python-mpd2-3.1.1.tar.gz", hash = "sha256:4baec3584cc43ed9948d5559079fafc2679b06b2ade273e909b3582654b2b3f5"}, {file = "python_mpd2-3.1.1-py2.py3-none-any.whl", hash = "sha256:86bf1100a0b135959d74a9a7a58cf0515bf30bb54eb25ae6fb8e175e50300fc3"}, @@ -2418,7 +2310,6 @@ version = "2.8" description = "Python API client for Discogs" optional = false python-versions = "*" -groups = ["main", "test"] files = [ {file = "python3_discogs_client-2.8-py3-none-any.whl", hash = "sha256:60d63a613da73afeb818015e680fa5f007ffaa94d97578070e7ee4f11dc1b1b3"}, {file = "python3_discogs_client-2.8.tar.gz", hash = "sha256:0f2c77f4ff491a6ef60fe892032028df899808e65efcd48249b4ecf21146b33b"}, @@ -2438,7 +2329,6 @@ version = "0.28" description = "PyXDG contains implementations of freedesktop.org standards in python." optional = false python-versions = "*" -groups = ["main", "test"] files = [ {file = "pyxdg-0.28-py2.py3-none-any.whl", hash = "sha256:bdaf595999a0178ecea4052b7f4195569c1ff4d344567bccdc12dfdf02d545ab"}, {file = "pyxdg-0.28.tar.gz", hash = "sha256:3267bb3074e934df202af2ee0868575484108581e6f3cb006af1da35395e88b4"}, @@ -2450,7 +2340,6 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2509,104 +2398,116 @@ files = [ [[package]] name = "pyzstd" -version = "0.16.2" +version = "0.17.0" description = "Python bindings to Zstandard (zstd) compression library." optional = false python-versions = ">=3.5" -groups = ["main", "test"] files = [ - {file = "pyzstd-0.16.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:637376c8f8cbd0afe1cab613f8c75fd502bd1016bf79d10760a2d5a00905fe62"}, - {file = "pyzstd-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e7a7118cbcfa90ca2ddbf9890c7cb582052a9a8cf2b7e2c1bbaf544bee0f16a"}, - {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74cb1ba05876179525144511eed3bd5a509b0ab2b10632c1215a85db0834dfd"}, - {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c084dde218ffbf112e507e72cbf626b8f58ce9eb23eec129809e31037984662"}, - {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4646459ebd3d7a59ddbe9312f020bcf7cdd1f059a2ea07051258f7af87a0b31"}, - {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14bfc2833cc16d7657fc93259edeeaa793286e5031b86ca5dc861ba49b435fce"}, - {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f27d488f19e5bf27d1e8aa1ae72c6c0a910f1e1ffbdf3c763d02ab781295dd27"}, - {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e134ca968ff7dcfa8b7d433318f01d309b74ee87e0d2bcadc117c08e1c80db"}, - {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6b5f64cd3963c58b8f886eb6139bb8d164b42a74f8a1bb95d49b4804f4592d61"}, - {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0b4a8266871b9e0407f9fd8e8d077c3558cf124d174e6357b523d14f76971009"}, - {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1bb19f7acac30727354c25125922aa59f44d82e0e6a751df17d0d93ff6a73853"}, - {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3008325b7368e794d66d4d98f2ee1d867ef5afd09fd388646ae02b25343c420d"}, - {file = "pyzstd-0.16.2-cp310-cp310-win32.whl", hash = "sha256:66f2d5c0bbf5bf32c577aa006197b3525b80b59804450e2c32fbcc2d16e850fd"}, - {file = "pyzstd-0.16.2-cp310-cp310-win_amd64.whl", hash = "sha256:5fe5f5459ebe1161095baa7a86d04ab625b35148f6c425df0347ed6c90a2fd58"}, - {file = "pyzstd-0.16.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c1bdbe7f01c7f37d5cd07be70e32a84010d7dfd6677920c0de04cf7d245b60d"}, - {file = "pyzstd-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1882a3ceaaf9adc12212d587d150ec5e58cfa9a765463d803d739abbd3ac0f7a"}, - {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea46a8b9d60f6a6eba29facba54c0f0d70328586f7ef0da6f57edf7e43db0303"}, - {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7865bc06589cdcecdede0deefe3da07809d5b7ad9044c224d7b2a0867256957"}, - {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52f938a65b409c02eb825e8c77fc5ea54508b8fc44b5ce226db03011691ae8cc"}, - {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e97620d3f53a0282947304189deef7ca7f7d0d6dfe15033469dc1c33e779d5e5"}, - {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c40e9983d017108670dc8df68ceef14c7c1cf2d19239213274783041d0e64c"}, - {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7cd4b3b2c6161066e4bde6af1cf78ed3acf5d731884dd13fdf31f1db10830080"}, - {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:454f31fd84175bb203c8c424f2255a343fa9bd103461a38d1bf50487c3b89508"}, - {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5ef754a93743f08fb0386ce3596780bfba829311b49c8f4107af1a4bcc16935d"}, - {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:be81081db9166e10846934f0e3576a263cbe18d81eca06e6a5c23533f8ce0dc6"}, - {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:738bcb2fa1e5f1868986f5030955e64de53157fa1141d01f3a4daf07a1aaf644"}, - {file = "pyzstd-0.16.2-cp311-cp311-win32.whl", hash = "sha256:0ea214c9b97046867d1657d55979021028d583704b30c481a9c165191b08d707"}, - {file = "pyzstd-0.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:c17c0fc02f0e75b0c7cd21f8eaf4c6ce4112333b447d93da1773a5f705b2c178"}, - {file = "pyzstd-0.16.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4081fd841a9efe9ded7290ee7502dbf042c4158b90edfadea3b8a072c8ec4e1"}, - {file = "pyzstd-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fd3fa45d2aeb65367dd702806b2e779d13f1a3fa2d13d5ec777cfd09de6822de"}, - {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8b5f0d2c07994a5180d8259d51df6227a57098774bb0618423d7eb4a7303467"}, - {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60c9d25b15c7ae06ed5d516d096a0d8254f9bed4368b370a09cccf191eaab5cb"}, - {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29acf31ce37254f6cad08deb24b9d9ba954f426fa08f8fae4ab4fdc51a03f4ae"}, - {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec77612a17697a9f7cf6634ffcee616eba9b997712fdd896e77fd19ab3a0618"}, - {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:313ea4974be93be12c9a640ab40f0fc50a023178aae004a8901507b74f190173"}, - {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e91acdefc8c2c6c3b8d5b1b5fe837dce4e591ecb7c0a2a50186f552e57d11203"}, - {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:929bd91a403539e72b5b5cb97f725ac4acafe692ccf52f075e20cd9bf6e5493d"}, - {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:740837a379aa32d110911ebcbbc524f9a9b145355737527543a884bd8777ca4f"}, - {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:adfc0e80dd157e6d1e0b0112c8ecc4b58a7a23760bd9623d74122ef637cfbdb6"}, - {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:79b183beae1c080ad3dca39019e49b7785391947f9aab68893ad85d27828c6e7"}, - {file = "pyzstd-0.16.2-cp312-cp312-win32.whl", hash = "sha256:b8d00631a3c466bc313847fab2a01f6b73b3165de0886fb03210e08567ae3a89"}, - {file = "pyzstd-0.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:c0d43764e9a60607f35d8cb3e60df772a678935ab0e02e2804d4147377f4942c"}, - {file = "pyzstd-0.16.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3ae9ae7ad730562810912d7ecaf1fff5eaf4c726f4b4dfe04784ed5f06d7b91f"}, - {file = "pyzstd-0.16.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2ce8d3c213f76a564420f3d0137066ac007ce9fb4e156b989835caef12b367a7"}, - {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2c14dac23c865e2d78cebd9087e148674b7154f633afd4709b4cd1520b99a61"}, - {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4527969d66a943e36ef374eda847e918077de032d58b5df84d98ffd717b6fa77"}, - {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd8256149b88e657e99f31e6d4b114c8ff2935951f1d8bb8e1fe501b224999c0"}, - {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bd1f1822d65c9054bf36d35307bf8ed4aa2d2d6827431761a813628ff671b1d"}, - {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6733f4d373ec9ad2c1976cf06f973a3324c1f9abe236d114d6bb91165a397d"}, - {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7bec165ab6524663f00b69bfefd13a46a69fed3015754abaf81b103ec73d92c6"}, - {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4460fa6949aac6528a1ad0de8871079600b12b3ef4db49316306786a3598321"}, - {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75df79ea0315c97d88337953a17daa44023dbf6389f8151903d371513f503e3c"}, - {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:93e1d45f4a196afb6f18682c79bdd5399277ead105b67f30b35c04c207966071"}, - {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:075e18b871f38a503b5d23e40a661adfc750bd4bd0bb8b208c1e290f3ceb8fa2"}, - {file = "pyzstd-0.16.2-cp313-cp313-win32.whl", hash = "sha256:9e4295eb299f8d87e3487852bca033d30332033272a801ca8130e934475e07a9"}, - {file = "pyzstd-0.16.2-cp313-cp313-win_amd64.whl", hash = "sha256:18deedc70f858f4cf574e59f305d2a0678e54db2751a33dba9f481f91bc71c28"}, - {file = "pyzstd-0.16.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a9892b707ef52f599098b1e9528df0e7849c5ec01d3e8035fb0e67de4b464839"}, - {file = "pyzstd-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4fbd647864341f3c174c4a6d7f20e6ea6b4be9d840fb900dc0faf0849561badc"}, - {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ac2c15656cc6194c4fed1cb0e8159f9394d4ea1d58be755448743d2ec6c9c4"}, - {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b239fb9a20c1be3374b9a2bd183ba624fd22ad7a3f67738c0d80cda68b4ae1d3"}, - {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc52400412cdae2635e0978b8d6bcc0028cc638fdab2fd301f6d157675d26896"}, - {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b766a6aeb8dbb6c46e622e7a1aebfa9ab03838528273796941005a5ce7257b1"}, - {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd4b8676052f9d59579242bf3cfe5fd02532b6a9a93ab7737c118ae3b8509dc"}, - {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1c6c0a677aac7c0e3d2d2605d4d68ffa9893fdeeb2e071040eb7c8750969d463"}, - {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:15f9c2d612e7e2023d68d321d1b479846751f792af89141931d44e82ae391394"}, - {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:11740bff847aad23beef4085a1bb767d101895881fe891f0a911aa27d43c372c"}, - {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b9067483ebe860e4130a03ee665b3d7be4ec1608b208e645d5e7eb3492379464"}, - {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:988f0ba19b14c2fe0afefc444ac1edfb2f497b7d7c3212b2f587504cc2ec804e"}, - {file = "pyzstd-0.16.2-cp39-cp39-win32.whl", hash = "sha256:8855acb1c3e3829030b9e9e9973b19e2d70f33efb14ad5c474b4d086864c959c"}, - {file = "pyzstd-0.16.2-cp39-cp39-win_amd64.whl", hash = "sha256:018e88378df5e76f5e1d8cf4416576603b6bc4a103cbc66bb593eaac54c758de"}, - {file = "pyzstd-0.16.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4b631117b97a42ff6dfd0ffc885a92fff462d7c34766b28383c57b996f863338"}, - {file = "pyzstd-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:56493a3fbe1b651a02102dd0902b0aa2377a732ff3544fb6fb3f114ca18db52f"}, - {file = "pyzstd-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1eae9bdba4a1e5d3181331f403114ff5b8ce0f4b569f48eba2b9beb2deef1e4"}, - {file = "pyzstd-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1be6972391c8aeecc7e61feb96ffc8e77a401bcba6ed994e7171330c45a1948"}, - {file = "pyzstd-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:761439d687e3a5687c2ff5c6a1190e1601362a4a3e8c6c82ff89719d51d73e19"}, - {file = "pyzstd-0.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f5fbdb8cf31b60b2dc586fecb9b73e2f172c21a0b320ed275f7b8d8a866d9003"}, - {file = "pyzstd-0.16.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:183f26e34f9becf0f2db38be9c0bfb136753d228bcb47c06c69175901bea7776"}, - {file = "pyzstd-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:88318b64b5205a67748148d6d244097fa6cf61fcea02ad3435511b9e7155ae16"}, - {file = "pyzstd-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73142aa2571b6480136a1865ebda8257e09eabbc8bcd54b222202f6fa4febe1e"}, - {file = "pyzstd-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d3f8877c29a97f1b1bba16f3d3ab01ad10ad3da7bad317aecf36aaf8848b37c"}, - {file = "pyzstd-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f25754562473ac7de856b8331ebd5964f5d85601045627a5f0bb0e4e899990"}, - {file = "pyzstd-0.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6ce17e84310080c55c02827ad9bb17893c00a845c8386a328b346f814aabd2c1"}, - {file = "pyzstd-0.16.2.tar.gz", hash = "sha256:179c1a2ea1565abf09c5f2fd72f9ce7c54b2764cf7369e05c0bfd8f1f67f63d2"}, + {file = "pyzstd-0.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ac857abb4c4daea71f134e74af7fe16bcfeec40911d13cf9128ddc600d46d92"}, + {file = "pyzstd-0.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d84e8d1cbecd3b661febf5ca8ce12c5e112cfeb8401ceedfb84ab44365298ac"}, + {file = "pyzstd-0.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f829fa1e7daac2e45b46656bdee13923150f329e53554aeaef75cceec706dd8c"}, + {file = "pyzstd-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:994de7a13bb683c190a1b2a0fb99fe0c542126946f0345360582d7d5e8ce8cda"}, + {file = "pyzstd-0.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3eb213a22823e2155aa252d9093c62ac12d7a9d698a4b37c5613f99cb9de327"}, + {file = "pyzstd-0.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c451cfa31e70860334cc7dffe46e5178de1756642d972bc3a570fc6768673868"}, + {file = "pyzstd-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d66dc6f15249625e537ea4e5e64c195f50182556c3731f260b13c775b7888d6b"}, + {file = "pyzstd-0.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:308d4888083913fac2b7b6f4a88f67c0773d66db37e6060971c3f173cfa92d1e"}, + {file = "pyzstd-0.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a3b636f37af9de52efb7dd2d2f15deaeabdeeacf8e69c29bf3e7e731931e6d66"}, + {file = "pyzstd-0.17.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4c07391c67b496d851b18aa29ff552a552438187900965df57f64d5cf2100c40"}, + {file = "pyzstd-0.17.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e8bd12a13313ffa27347d7abe20840dcd2092852ab835a8e86008f38f11bd5ac"}, + {file = "pyzstd-0.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e27bfab45f9cdab0c336c747f493a00680a52a018a8bb7a1f787ddde4b29410"}, + {file = "pyzstd-0.17.0-cp310-cp310-win32.whl", hash = "sha256:7370c0978edfcb679419f43ec504c128463858a7ea78cf6d0538c39dfb36fce3"}, + {file = "pyzstd-0.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:564f7aa66cda4acd9b2a8461ff0c6a6e39a977be3e2e7317411a9f7860d7338d"}, + {file = "pyzstd-0.17.0-cp310-cp310-win_arm64.whl", hash = "sha256:fccff3a37fa4c513fe1ebf94cb9dc0369c714da22b5671f78ddcbc7ec8f581cc"}, + {file = "pyzstd-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06d1e7afafe86b90f3d763f83d2f6b6a437a8d75119fe1ff52b955eb9df04eaa"}, + {file = "pyzstd-0.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc827657f644e4510211b49f5dab6b04913216bc316206d98f9a75214361f16e"}, + {file = "pyzstd-0.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecffadaa2ee516ecea3e432ebf45348fa8c360017f03b88800dd312d62ecb063"}, + {file = "pyzstd-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:596de361948d3aad98a837c98fcee4598e51b608f7e0912e0e725f82e013f00f"}, + {file = "pyzstd-0.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd3a8d0389c103e93853bf794b9a35ac5d0d11ca3e7e9f87e3305a10f6dfa6b2"}, + {file = "pyzstd-0.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1356f72c7b8bb99b942d582b61d1a93c5065e66b6df3914dac9f2823136c3228"}, + {file = "pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f514c339b013b0b0a2ed8ea6e44684524223bd043267d7644d7c3a70e74a0dd"}, + {file = "pyzstd-0.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4de16306821021c2d82a45454b612e2a8683d99bfb98cff51a883af9334bea0"}, + {file = "pyzstd-0.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aeb9759c04b6a45c1b56be21efb0a738e49b0b75c4d096a38707497a7ff2be82"}, + {file = "pyzstd-0.17.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7a5b31ddeada0027e67464d99f09167cf08bab5f346c3c628b2d3c84e35e239a"}, + {file = "pyzstd-0.17.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8338e4e91c52af839abcf32f1f65f3b21e2597ffe411609bdbdaf10274991bd0"}, + {file = "pyzstd-0.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628e93862feb372b4700085ec4d1d389f1283ac31900af29591ae01019910ff3"}, + {file = "pyzstd-0.17.0-cp311-cp311-win32.whl", hash = "sha256:c27773f9c95ebc891cfcf1ef282584d38cde0a96cb8d64127953ad752592d3d7"}, + {file = "pyzstd-0.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:c043a5766e00a2b7844705c8fa4563b7c195987120afee8f4cf594ecddf7e9ac"}, + {file = "pyzstd-0.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:efd371e41153ef55bf51f97e1ce4c1c0b05ceb59ed1d8972fc9aa1e9b20a790f"}, + {file = "pyzstd-0.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ac330fc4f64f97a411b6f3fc179d2fe3050b86b79140e75a9a6dd9d6d82087f"}, + {file = "pyzstd-0.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:725180c0c4eb2e643b7048ebfb45ddf43585b740535907f70ff6088f5eda5096"}, + {file = "pyzstd-0.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c20fe0a60019685fa1f7137cb284f09e3f64680a503d9c0d50be4dd0a3dc5ec"}, + {file = "pyzstd-0.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d97f7aaadc3b6e2f8e51bfa6aa203ead9c579db36d66602382534afaf296d0db"}, + {file = "pyzstd-0.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42dcb34c5759b59721997036ff2d94210515d3ef47a9de84814f1c51a1e07e8a"}, + {file = "pyzstd-0.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6bf05e18be6f6c003c7129e2878cffd76fcbebda4e7ebd7774e34ae140426cbf"}, + {file = "pyzstd-0.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f7c3a5144aa4fbccf37c30411f6b1db4c0f2cb6ad4df470b37929bffe6ca0"}, + {file = "pyzstd-0.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9efd4007f8369fd0890701a4fc77952a0a8c4cb3bd30f362a78a1adfb3c53c12"}, + {file = "pyzstd-0.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5f8add139b5fd23b95daa844ca13118197f85bd35ce7507e92fcdce66286cc34"}, + {file = "pyzstd-0.17.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:259a60e8ce9460367dcb4b34d8b66e44ca3d8c9c30d53ed59ae7037622b3bfc7"}, + {file = "pyzstd-0.17.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:86011a93cc3455c5d2e35988feacffbf2fa106812a48e17eb32c2a52d25a95b3"}, + {file = "pyzstd-0.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:425c31bc3de80313054e600398e4f1bd229ee61327896d5d015e2cd0283c9012"}, + {file = "pyzstd-0.17.0-cp312-cp312-win32.whl", hash = "sha256:7c4b88183bb36eb2cebbc0352e6e9fe8e2d594f15859ae1ef13b63ebc58be158"}, + {file = "pyzstd-0.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c31947e0120468342d74e0fa936d43f7e1dad66a2262f939735715aa6c730e8"}, + {file = "pyzstd-0.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:1d0346418abcef11507356a31bef5470520f6a5a786d4e2c69109408361b1020"}, + {file = "pyzstd-0.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6cd1a1d37a7abe9c01d180dad699e3ac3889e4f48ac5dcca145cc46b04e9abd2"}, + {file = "pyzstd-0.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a44fd596eda06b6265dc0358d5b309715a93f8e96e8a4b5292c2fe0e14575b3"}, + {file = "pyzstd-0.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a99b37453f92f0691b2454d0905bbf2f430522612f6f12bbc81133ad947eb97"}, + {file = "pyzstd-0.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63d864e9f9e624a466070a121ace9d9cbf579eac4ed575dee3b203ab1b3cbeee"}, + {file = "pyzstd-0.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e58bc02b055f96d1f83c791dd197d8c80253275a56cd84f917a006e9f528420d"}, + {file = "pyzstd-0.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e62df7c0ba74618481149c849bc3ed7d551b9147e1274b4b3170bbcc0bfcc0a"}, + {file = "pyzstd-0.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42ecdd7136294f1becb8e57441df00eaa6dfd7444a8b0c96a1dfba5c81b066e7"}, + {file = "pyzstd-0.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:be07a57af75f99fc39b8e2d35f8fb823ecd7ef099cd1f6203829a5094a991ae2"}, + {file = "pyzstd-0.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0d41e6f7ec2a70dab4982157a099562de35a6735c890945b4cebb12fb7eb0be0"}, + {file = "pyzstd-0.17.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f482d906426756e7cc9a43f500fee907e1b3b4e9c04d42d58fb1918c6758759b"}, + {file = "pyzstd-0.17.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:827327b35605265e1d05a2f6100244415e8f2728bb75c951736c9288415908d7"}, + {file = "pyzstd-0.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a55008f80e3390e4f37bd9353830f1675f271d13d6368d2f1dc413b7c6022b3"}, + {file = "pyzstd-0.17.0-cp313-cp313-win32.whl", hash = "sha256:a4be186c0df86d4d95091c759a06582654f2b93690503b1c24d77f537d0cf5d0"}, + {file = "pyzstd-0.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:251a0b599bd224ec66f39165ddb2f959d0a523938e3bbfa82d8188dc03a271a2"}, + {file = "pyzstd-0.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:ce6d5fd908fd3ddec32d1c1a5a7a15b9d7737d0ef2ab20fe1e8261da61395017"}, + {file = "pyzstd-0.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5cb23c3c4ba4105a518cfbe8a566f9482da26f4bc8c1c865fd66e8e266be071"}, + {file = "pyzstd-0.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10b5d9215890a24f22505b68add26beeb2e3858bbe738a7ee339f0db8e29d033"}, + {file = "pyzstd-0.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db1cff52fd24caf42a2cfb7e5d8dc822b93e9fac5dab505d0bd22e302061e2d2"}, + {file = "pyzstd-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3caad3106e0e80f76acbb19c15e1e834ba6fd44dd4c82719ef8e3374f7fafd3"}, + {file = "pyzstd-0.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7e52e1de31b935e27568742145d8b4d0f204a1605e36f4e1e2846e0d39bed98"}, + {file = "pyzstd-0.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaa046bc9e751c4083102f3624a52bbb66e20e7aa3e28673543b22e69d9b57cd"}, + {file = "pyzstd-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cc9310bdb7cf2c70098aab40fb6bf68faaf0149110c6ef668996e7957e0147a"}, + {file = "pyzstd-0.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3619075966456783818904f9d9e213c6fe2e583d5beb545fa1968b1848781e0f"}, + {file = "pyzstd-0.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3844f8c7d7850580423b1b33601b016b3b913d18deb6fe14a7641b9c2714275c"}, + {file = "pyzstd-0.17.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab53f91280b7b639c47bb2048e01182230e7cf3f0f0980bdb405b4241cfb705e"}, + {file = "pyzstd-0.17.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:75252ee53e53a819ea7ac4271f66686018bc8b98ef12628269f099c10d881077"}, + {file = "pyzstd-0.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0795afdaa34e1ed7f3d7552100cd57a1cef9d7310b386a893e0890e9a585b427"}, + {file = "pyzstd-0.17.0-cp39-cp39-win32.whl", hash = "sha256:f7316be5a5246b6bbdd807c7a4f10382b6b02c3afc5ae6acd2e266a84f715493"}, + {file = "pyzstd-0.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:121e8fac3e24b881fed59d638100b80c34f6347c02d2f24580f633451939f2d7"}, + {file = "pyzstd-0.17.0-cp39-cp39-win_arm64.whl", hash = "sha256:fe36ccda67f73e909ac305984fe13b7b5a79296706d095a80472ada4413174c2"}, + {file = "pyzstd-0.17.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c56f99c697130f39702e07ab9fa0bb4c929c7bfe47c0a488dea732bd8a8752a"}, + {file = "pyzstd-0.17.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:152bae1b2197bcd41fc143f93acd23d474f590162547484ca04ce5874c4847de"}, + {file = "pyzstd-0.17.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2ddbbd7614922e52018ba3e7bb4cbe6f25b230096831d97916b8b89be8cd0cb"}, + {file = "pyzstd-0.17.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f6f3f152888825f71fd2cf2499f093fac252a5c1fa15ab8747110b3dc095f6b"}, + {file = "pyzstd-0.17.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d00a2d2bddf51c7bf32c17dc47f0f49f47ebae07c2528b9ee8abf1f318ac193"}, + {file = "pyzstd-0.17.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d79e3eff07217707a92c1a6a9841c2466bfcca4d00fea0bea968f4034c27a256"}, + {file = "pyzstd-0.17.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3ce6bac0c4c032c5200647992a8efcb9801c918633ebe11cceba946afea152d9"}, + {file = "pyzstd-0.17.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:a00998144b35be7c485a383f739fe0843a784cd96c3f1f2f53f1a249545ce49a"}, + {file = "pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8521d7bbd00e0e1c1fd222c1369a7600fba94d24ba380618f9f75ee0c375c277"}, + {file = "pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da65158c877eac78dcc108861d607c02fb3703195c3a177f2687e0bcdfd519d0"}, + {file = "pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:226ca0430e2357abae1ade802585231a2959b010ec9865600e416652121ba80b"}, + {file = "pyzstd-0.17.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e3a19e8521c145a0e2cd87ca464bf83604000c5454f7e0746092834fd7de84d1"}, + {file = "pyzstd-0.17.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:56ed2de4717844ffdebb5c312ec7e7b8eb2b69eb72883bbfe472ba2c872419e6"}, + {file = "pyzstd-0.17.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc61c47ca631241081c0c99895a1feb56dab4beab37cac7d1f9f18aff06962eb"}, + {file = "pyzstd-0.17.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd61757a4020590dad6c20fdbf37c054ed9f349591a0d308c3c03c0303ce221"}, + {file = "pyzstd-0.17.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d6cce91a8ac8ae1aab06684a8bf0dee088405de7f451e1e89776ddc1f40074"}, + {file = "pyzstd-0.17.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc668b67a13bf6213d0a9c09edc1f4842ed680b92fc3c9361f55a904903bfd1f"}, + {file = "pyzstd-0.17.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a67d7ef18715875b31127eb90075c03ced722fd87902b34bca4b807a2ce1e4d9"}, + {file = "pyzstd-0.17.0.tar.gz", hash = "sha256:d84271f8baa66c419204c1dd115a4dec8b266f8a2921da21b81764fa208c1db6"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.13.2", markers = "python_version < \"3.13\""} + [[package]] name = "rarfile" version = "4.2" description = "RAR archive reader for Python" optional = false python-versions = ">=3.6" -groups = ["main", "test"] files = [ {file = "rarfile-4.2-py3-none-any.whl", hash = "sha256:8757e1e3757e32962e229cab2432efc1f15f210823cc96ccba0f6a39d17370c9"}, {file = "rarfile-4.2.tar.gz", hash = "sha256:8e1c8e72d0845ad2b32a47ab11a719bc2e41165ec101fd4d3fe9e92aa3f469ef"}, @@ -2618,8 +2519,6 @@ version = "0.2.2" description = "Python reflink wraps around platform specific reflink implementations" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"reflink\"" files = [ {file = "reflink-0.2.2-cp36-cp36m-win32.whl", hash = "sha256:8435c7153af4d6e66dc8acb48a9372c8ec6f978a09cdf7b57cd6656d969e343a"}, {file = "reflink-0.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:be4787c6208faf7fc892390909cf01e34e650ea67c37bf345addefd597ed90e1"}, @@ -2635,7 +2534,6 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2657,7 +2555,6 @@ version = "1.12.1" description = "Mock out responses from the requests package" optional = false python-versions = ">=3.5" -groups = ["test"] files = [ {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, @@ -2675,7 +2572,6 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=3.4" -groups = ["main", "test"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -2694,8 +2590,6 @@ version = "0.4.3" description = "Efficient signal resampling" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "resampy-0.4.3-py3-none-any.whl", hash = "sha256:ad2ed64516b140a122d96704e32bc0f92b23f45419e8b8f478e5a05f83edcebd"}, {file = "resampy-0.4.3.tar.gz", hash = "sha256:a0d1c28398f0e55994b739650afef4e3974115edbe96cd4bb81968425e916e47"}, @@ -2716,7 +2610,6 @@ version = "0.25.7" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, @@ -2728,34 +2621,33 @@ requests = ">=2.30.0,<3.0" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "ruff" -version = "0.11.8" +version = "0.11.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" -groups = ["lint"] files = [ - {file = "ruff-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:896a37516c594805e34020c4a7546c8f8a234b679a7716a3f08197f38913e1a3"}, - {file = "ruff-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab86d22d3d721a40dd3ecbb5e86ab03b2e053bc93c700dc68d1c3346b36ce835"}, - {file = "ruff-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:258f3585057508d317610e8a412788cf726efeefa2fec4dba4001d9e6f90d46c"}, - {file = "ruff-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727d01702f7c30baed3fc3a34901a640001a2828c793525043c29f7614994a8c"}, - {file = "ruff-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dca977cc4fc8f66e89900fa415ffe4dbc2e969da9d7a54bfca81a128c5ac219"}, - {file = "ruff-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c657fa987d60b104d2be8b052d66da0a2a88f9bd1d66b2254333e84ea2720c7f"}, - {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f2e74b021d0de5eceb8bd32919f6ff8a9b40ee62ed97becd44993ae5b9949474"}, - {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9b5ef39820abc0f2c62111f7045009e46b275f5b99d5e59dda113c39b7f4f38"}, - {file = "ruff-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1dba3135ca503727aa4648152c0fa67c3b1385d3dc81c75cd8a229c4b2a1458"}, - {file = "ruff-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f024d32e62faad0f76b2d6afd141b8c171515e4fb91ce9fd6464335c81244e5"}, - {file = "ruff-0.11.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d365618d3ad747432e1ae50d61775b78c055fee5936d77fb4d92c6f559741948"}, - {file = "ruff-0.11.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d9aaa91035bdf612c8ee7266153bcf16005c7c7e2f5878406911c92a31633cb"}, - {file = "ruff-0.11.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0eba551324733efc76116d9f3a0d52946bc2751f0cd30661564117d6fd60897c"}, - {file = "ruff-0.11.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:161eb4cff5cfefdb6c9b8b3671d09f7def2f960cee33481dd898caf2bcd02304"}, - {file = "ruff-0.11.8-py3-none-win32.whl", hash = "sha256:5b18caa297a786465cc511d7f8be19226acf9c0a1127e06e736cd4e1878c3ea2"}, - {file = "ruff-0.11.8-py3-none-win_amd64.whl", hash = "sha256:6e70d11043bef637c5617297bdedec9632af15d53ac1e1ba29c448da9341b0c4"}, - {file = "ruff-0.11.8-py3-none-win_arm64.whl", hash = "sha256:304432e4c4a792e3da85b7699feb3426a0908ab98bf29df22a31b0cdd098fac2"}, - {file = "ruff-0.11.8.tar.gz", hash = "sha256:6d742d10626f9004b781f4558154bb226620a7242080e11caeffab1a40e99df8"}, + {file = "ruff-0.11.9-py3-none-linux_armv6l.whl", hash = "sha256:a31a1d143a5e6f499d1fb480f8e1e780b4dfdd580f86e05e87b835d22c5c6f8c"}, + {file = "ruff-0.11.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:66bc18ca783b97186a1f3100e91e492615767ae0a3be584e1266aa9051990722"}, + {file = "ruff-0.11.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bd576cd06962825de8aece49f28707662ada6a1ff2db848d1348e12c580acbf1"}, + {file = "ruff-0.11.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b1d18b4be8182cc6fddf859ce432cc9631556e9f371ada52f3eaefc10d878de"}, + {file = "ruff-0.11.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0f3f46f759ac623e94824b1e5a687a0df5cd7f5b00718ff9c24f0a894a683be7"}, + {file = "ruff-0.11.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f34847eea11932d97b521450cf3e1d17863cfa5a94f21a056b93fb86f3f3dba2"}, + {file = "ruff-0.11.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f33b15e00435773df97cddcd263578aa83af996b913721d86f47f4e0ee0ff271"}, + {file = "ruff-0.11.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b27613a683b086f2aca8996f63cb3dd7bc49e6eccf590563221f7b43ded3f65"}, + {file = "ruff-0.11.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e0d88756e63e8302e630cee3ce2ffb77859797cc84a830a24473939e6da3ca6"}, + {file = "ruff-0.11.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:537c82c9829d7811e3aa680205f94c81a2958a122ac391c0eb60336ace741a70"}, + {file = "ruff-0.11.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:440ac6a7029f3dee7d46ab7de6f54b19e34c2b090bb4f2480d0a2d635228f381"}, + {file = "ruff-0.11.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:71c539bac63d0788a30227ed4d43b81353c89437d355fdc52e0cda4ce5651787"}, + {file = "ruff-0.11.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c67117bc82457e4501473c5f5217d49d9222a360794bfb63968e09e70f340abd"}, + {file = "ruff-0.11.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e4b78454f97aa454586e8a5557facb40d683e74246c97372af3c2d76901d697b"}, + {file = "ruff-0.11.9-py3-none-win32.whl", hash = "sha256:7fe1bc950e7d7b42caaee2a8a3bc27410547cc032c9558ee2e0f6d3b209e845a"}, + {file = "ruff-0.11.9-py3-none-win_amd64.whl", hash = "sha256:52edaa4a6d70f8180343a5b7f030c7edd36ad180c9f4d224959c2d689962d964"}, + {file = "ruff-0.11.9-py3-none-win_arm64.whl", hash = "sha256:bcf42689c22f2e240f496d0c183ef2c6f7b35e809f12c1db58f75d9aa8d630ca"}, + {file = "ruff-0.11.9.tar.gz", hash = "sha256:ebd58d4f67a00afb3a30bf7d383e52d0e036e6195143c6db7019604a05335517"}, ] [[package]] @@ -2764,8 +2656,6 @@ version = "1.6.1" description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, @@ -2820,8 +2710,6 @@ version = "1.13.1" description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, @@ -2864,7 +2752,6 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "test"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -2876,7 +2763,6 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" -groups = ["main", "test"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -2884,28 +2770,24 @@ files = [ [[package]] name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +version = "3.0.1" +description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." optional = true -python-versions = "*" -groups = ["main"] -markers = "extra == \"docs\"" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, + {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, + {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, ] [[package]] name = "soco" -version = "0.30.9" +version = "0.30.10" description = "SoCo (Sonos Controller) is a simple library to control Sonos speakers." optional = true python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"sonosupdate\"" files = [ - {file = "soco-0.30.9-py2.py3-none-any.whl", hash = "sha256:cf06a56c7431e06fe923dfd58a1217f25e7a1737b74525850859f6d30dc86a24"}, - {file = "soco-0.30.9.tar.gz", hash = "sha256:21f7a3b3f0e65aadfc90aaef69a5a428205597271b09c3d99bea8b5cb00df9da"}, + {file = "soco-0.30.10-py2.py3-none-any.whl", hash = "sha256:f62ea676e4457223a8fc5192ffe91f795f6a4a18da8aa686ef20ce6657056a0f"}, + {file = "soco-0.30.10.tar.gz", hash = "sha256:a9c8ddb53836d18a0bbb881224cc6818e1ef1b28791637378ab25ff1eb1a87c3"}, ] [package.dependencies] @@ -2917,7 +2799,7 @@ xmltodict = "*" [package.extras] events-asyncio = ["aiohttp"] -testing = ["black (>=22.12.0) ; python_version >= \"3.7\"", "coveralls", "flake8", "graphviz", "importlib-metadata (<5) ; python_version == \"3.7\"", "pylint", "pytest (>=2.5)", "pytest-cov (<2.6.0)", "requests-mock", "sphinx (==4.5.0)", "sphinx_rtd_theme", "twine", "wheel"] +testing = ["black (>=22.12.0)", "coveralls", "flake8", "graphviz", "importlib-metadata (<5)", "pylint", "pytest (>=2.5)", "pytest-cov (<2.6.0)", "requests-mock", "sphinx (==4.5.0)", "sphinx_rtd_theme", "twine", "wheel"] [[package]] name = "soundfile" @@ -2925,8 +2807,6 @@ version = "0.13.1" description = "An audio library based on libsndfile, CFFI and NumPy" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "soundfile-0.13.1-py2.py3-none-any.whl", hash = "sha256:a23c717560da2cf4c7b5ae1142514e0fd82d6bbd9dfc93a50423447142f2c445"}, {file = "soundfile-0.13.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:82dc664d19831933fe59adad199bf3945ad06d84bc111a5b4c0d3089a5b9ec33"}, @@ -2948,7 +2828,6 @@ version = "2.7" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4"}, {file = "soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a"}, @@ -2960,8 +2839,6 @@ version = "0.5.0.post1" description = "High quality, one-dimensional sample-rate conversion library" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "soxr-0.5.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:7406d782d85f8cf64e66b65e6b7721973de8a1dc50b9e88bc2288c343a987484"}, {file = "soxr-0.5.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa0a382fb8d8e2afed2c1642723b2d2d1b9a6728ff89f77f3524034c8885b8c9"}, @@ -2999,8 +2876,6 @@ version = "7.4.7" description = "Python documentation generator" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -3037,8 +2912,6 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -3055,8 +2928,6 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -3073,8 +2944,6 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -3091,8 +2960,6 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = true python-versions = ">=3.5" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -3107,8 +2974,6 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -3125,8 +2990,6 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -3143,7 +3006,6 @@ version = "1.7.0" description = "module to create simple ASCII tables" optional = false python-versions = "*" -groups = ["main", "test"] files = [ {file = "texttable-1.7.0-py2.py3-none-any.whl", hash = "sha256:72227d592c82b3d7f672731ae73e4d1f88cd8e2ef5b075a7a7f01a23a3743917"}, {file = "texttable-1.7.0.tar.gz", hash = "sha256:2d2068fb55115807d3ac77a4ca68fa48803e84ebb0ee2340f858107a36522638"}, @@ -3155,8 +3017,6 @@ version = "3.6.0" description = "threadpoolctl" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"autobpm\"" files = [ {file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"}, {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, @@ -3168,7 +3028,6 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" -groups = ["main", "release", "test", "typing"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -3210,7 +3069,6 @@ version = "4.12.0.20250204" description = "Typing stubs for beautifulsoup4" optional = false python-versions = ">=3.9" -groups = ["typing"] files = [ {file = "types_beautifulsoup4-4.12.0.20250204-py3-none-any.whl", hash = "sha256:57ce9e75717b63c390fd789c787d267a67eb01fa6d800a03b9bdde2e877ed1eb"}, {file = "types_beautifulsoup4-4.12.0.20250204.tar.gz", hash = "sha256:f083d8edcbd01279f8c3995b56cfff2d01f1bb894c3b502ba118d36fbbc495bf"}, @@ -3225,7 +3083,6 @@ version = "5.0.0.20250413" description = "Typing stubs for Flask-Cors" optional = false python-versions = ">=3.9" -groups = ["typing"] files = [ {file = "types_flask_cors-5.0.0.20250413-py3-none-any.whl", hash = "sha256:8183fdba764d45a5b40214468a1d5daa0e86c4ee6042d13f38cc428308f27a64"}, {file = "types_flask_cors-5.0.0.20250413.tar.gz", hash = "sha256:b346d052f4ef3b606b73faf13e868e458f1efdbfedcbe1aba739eb2f54a6cf5f"}, @@ -3240,7 +3097,6 @@ version = "1.1.11.20241018" description = "Typing stubs for html5lib" optional = false python-versions = ">=3.8" -groups = ["typing"] files = [ {file = "types-html5lib-1.1.11.20241018.tar.gz", hash = "sha256:98042555ff78d9e3a51c77c918b1041acbb7eb6c405408d8a9e150ff5beccafa"}, {file = "types_html5lib-1.1.11.20241018-py3-none-any.whl", hash = "sha256:3f1e064d9ed2c289001ae6392c84c93833abb0816165c6ff0abfc304a779f403"}, @@ -3252,7 +3108,6 @@ version = "5.2.0.20250306" description = "Typing stubs for mock" optional = false python-versions = ">=3.9" -groups = ["typing"] files = [ {file = "types_mock-5.2.0.20250306-py3-none-any.whl", hash = "sha256:eb69fec98b8de26be1d7121623d05a2f117d1ea2e01dd30c123d07d204a15c95"}, {file = "types_mock-5.2.0.20250306.tar.gz", hash = "sha256:15882cb5cf9980587a7607e31890801223801d7997f559686805ce09b6536087"}, @@ -3264,7 +3119,6 @@ version = "10.2.0.20240822" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" -groups = ["typing"] files = [ {file = "types-Pillow-10.2.0.20240822.tar.gz", hash = "sha256:559fb52a2ef991c326e4a0d20accb3bb63a7ba8d40eb493e0ecb0310ba52f0d3"}, {file = "types_Pillow-10.2.0.20240822-py3-none-any.whl", hash = "sha256:d9dab025aba07aeb12fd50a6799d4eac52a9603488eca09d7662543983f16c5d"}, @@ -3276,7 +3130,6 @@ version = "6.0.12.20250402" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.9" -groups = ["typing"] files = [ {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, @@ -3288,7 +3141,6 @@ version = "2.32.0.20250328" description = "Typing stubs for requests" optional = false python-versions = ">=3.9" -groups = ["typing"] files = [ {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"}, {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"}, @@ -3303,7 +3155,6 @@ version = "1.26.25.14" description = "Typing stubs for urllib3" optional = false python-versions = "*" -groups = ["typing"] files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -3315,7 +3166,6 @@ version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" -groups = ["main", "test", "typing"] files = [ {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, @@ -3327,7 +3177,6 @@ version = "1.4.0" description = "ASCII transliterations of Unicode text" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "Unidecode-1.4.0-py3-none-any.whl", hash = "sha256:c3c7606c27503ad8d501270406e345ddb480a7b5f38827eafe4fa82a137f0021"}, {file = "Unidecode-1.4.0.tar.gz", hash = "sha256:ce35985008338b676573023acc382d62c264f307c8f7963733405add37ea2b23"}, @@ -3339,14 +3188,13 @@ version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "test", "typing"] files = [ {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -3357,7 +3205,6 @@ version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" -groups = ["main", "test", "typing"] files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -3375,8 +3222,6 @@ version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = true python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"sonosupdate\"" files = [ {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, @@ -3388,19 +3233,17 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" -groups = ["main", "test", "typing"] -markers = "python_version == \"3.9\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] @@ -3431,6 +3274,6 @@ thumbnails = ["Pillow", "pyxdg"] web = ["flask", "flask-cors"] [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = ">=3.9,<4" content-hash = "d609e83f7ffeefc12e28d627e5646aa5c1a6f5a56d7013bb649a468069550dba" diff --git a/pyproject.toml b/pyproject.toml index f83c174b4..8b817a078 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "beets" -version = "2.3.0" +version = "2.3.1" description = "music tagger and library organizer" authors = ["Adrian Sampson "] maintainers = ["Serene-Arc"] @@ -159,7 +159,7 @@ build-backend = "poetry.core.masonry.api" [tool.pipx-install] poethepoet = ">=0.26" -poetry = ">=1.8" +poetry = ">=1.8,<2" [tool.poe.tasks.build] help = "Build the package" diff --git a/test/plugins/test_albumtypes.py b/test/plugins/test_albumtypes.py index 8be1ff011..0a9d53349 100644 --- a/test/plugins/test_albumtypes.py +++ b/test/plugins/test_albumtypes.py @@ -16,9 +16,9 @@ from collections.abc import Sequence -from beets.autotag.mb import VARIOUS_ARTISTS_ID from beets.test.helper import PluginTestCase from beetsplug.albumtypes import AlbumTypesPlugin +from beetsplug.musicbrainz import VARIOUS_ARTISTS_ID class AlbumTypesPluginTest(PluginTestCase): diff --git a/test/plugins/test_edit.py b/test/plugins/test_edit.py index 2d557d623..278e04b9e 100644 --- a/test/plugins/test_edit.py +++ b/test/plugins/test_edit.py @@ -19,9 +19,9 @@ from beets.dbcore.query import TrueQuery from beets.library import Item from beets.test import _common from beets.test.helper import ( + AutotagImportTestCase, AutotagStub, BeetsTestCase, - ImportTestCase, PluginMixin, TerminalImportMixin, control_stdin, @@ -316,10 +316,12 @@ class EditCommandTest(EditMixin, BeetsTestCase): @_common.slow_test() class EditDuringImporterTestCase( - EditMixin, TerminalImportMixin, ImportTestCase + EditMixin, TerminalImportMixin, AutotagImportTestCase ): """TODO""" + matching = AutotagStub.GOOD + IGNORED = ["added", "album_id", "id", "mtime", "path"] def setUp(self): @@ -327,12 +329,6 @@ class EditDuringImporterTestCase( # Create some mediafiles, and store them for comparison. self.prepare_album_for_import(1) self.items_orig = [Item.from_path(f.path) for f in self.import_media] - self.matcher = AutotagStub().install() - self.matcher.matching = AutotagStub.GOOD - - def tearDown(self): - super().tearDown() - self.matcher.restore() @_common.slow_test() diff --git a/test/plugins/test_importadded.py b/test/plugins/test_importadded.py index 5c26fdca4..d54c04b0e 100644 --- a/test/plugins/test_importadded.py +++ b/test/plugins/test_importadded.py @@ -20,7 +20,7 @@ import os import pytest from beets import importer -from beets.test.helper import AutotagStub, ImportTestCase, PluginMixin +from beets.test.helper import AutotagImportTestCase, PluginMixin from beets.util import displayable_path, syspath from beetsplug.importadded import ImportAddedPlugin @@ -41,7 +41,7 @@ def modify_mtimes(paths, offset=-60000): os.utime(syspath(path), (mstat.st_atime, mstat.st_mtime + offset * i)) -class ImportAddedTest(PluginMixin, ImportTestCase): +class ImportAddedTest(PluginMixin, AutotagImportTestCase): # The minimum mtime of the files to be imported plugin = "importadded" min_mtime = None @@ -56,15 +56,9 @@ class ImportAddedTest(PluginMixin, ImportTestCase): self.min_mtime = min( os.path.getmtime(mfile.path) for mfile in self.import_media ) - self.matcher = AutotagStub().install() - self.matcher.matching = AutotagStub.IDENT self.importer = self.setup_importer() self.importer.add_choice(importer.Action.APPLY) - def tearDown(self): - super().tearDown() - self.matcher.restore() - def find_media_file(self, item): """Find the pre-import MediaFile for an Item""" for m in self.import_media: diff --git a/test/plugins/test_mbsubmit.py b/test/plugins/test_mbsubmit.py index 04b1b736e..712c90866 100644 --- a/test/plugins/test_mbsubmit.py +++ b/test/plugins/test_mbsubmit.py @@ -14,8 +14,7 @@ from beets.test.helper import ( - AutotagStub, - ImportTestCase, + AutotagImportTestCase, PluginMixin, TerminalImportMixin, capture_stdout, @@ -23,23 +22,18 @@ from beets.test.helper import ( ) -class MBSubmitPluginTest(PluginMixin, TerminalImportMixin, ImportTestCase): +class MBSubmitPluginTest( + PluginMixin, TerminalImportMixin, AutotagImportTestCase +): plugin = "mbsubmit" def setUp(self): super().setUp() self.prepare_album_for_import(2) self.setup_importer() - self.matcher = AutotagStub().install() - - def tearDown(self): - super().tearDown() - self.matcher.restore() def test_print_tracks_output(self): """Test the output of the "print tracks" choice.""" - self.matcher.matching = AutotagStub.BAD - with capture_stdout() as output: with control_stdin("\n".join(["p", "s"])): # Print tracks; Skip @@ -55,8 +49,6 @@ class MBSubmitPluginTest(PluginMixin, TerminalImportMixin, ImportTestCase): def test_print_tracks_output_as_tracks(self): """Test the output of the "print tracks" choice, as singletons.""" - self.matcher.matching = AutotagStub.BAD - with capture_stdout() as output: with control_stdin("\n".join(["t", "s", "p", "s"])): # as Tracks; Skip; Print tracks; Skip diff --git a/test/test_mb.py b/test/plugins/test_musicbrainz.py similarity index 90% rename from test/test_mb.py rename to test/plugins/test_musicbrainz.py index 37b5c0fff..b8640c870 100644 --- a/test/test_mb.py +++ b/test/plugins/test_musicbrainz.py @@ -17,11 +17,17 @@ from unittest import mock from beets import config -from beets.autotag import mb from beets.test.helper import BeetsTestCase +from beetsplug import musicbrainz -class MBAlbumInfoTest(BeetsTestCase): +class MusicBrainzTestCase(BeetsTestCase): + def setUp(self): + super().setUp() + self.mb = musicbrainz.MusicBrainzPlugin() + + +class MBAlbumInfoTest(MusicBrainzTestCase): def _make_release( self, date_str="2009", @@ -210,7 +216,7 @@ class MBAlbumInfoTest(BeetsTestCase): def test_parse_release_with_year(self): release = self._make_release("1984") - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.album == "ALBUM TITLE" assert d.album_id == "ALBUM ID" assert d.artist == "ARTIST NAME" @@ -221,12 +227,12 @@ class MBAlbumInfoTest(BeetsTestCase): def test_parse_release_type(self): release = self._make_release("1984") - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.albumtype == "album" def test_parse_release_full_date(self): release = self._make_release("1987-03-31") - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.original_year == 1987 assert d.original_month == 3 assert d.original_day == 31 @@ -238,7 +244,7 @@ class MBAlbumInfoTest(BeetsTestCase): ] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) t = d.tracks assert len(t) == 2 assert t[0].title == "TITLE ONE" @@ -255,7 +261,7 @@ class MBAlbumInfoTest(BeetsTestCase): ] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) t = d.tracks assert t[0].medium_index == 1 assert t[0].index == 1 @@ -269,7 +275,7 @@ class MBAlbumInfoTest(BeetsTestCase): ] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.mediums == 1 t = d.tracks assert t[0].medium == 1 @@ -296,7 +302,7 @@ class MBAlbumInfoTest(BeetsTestCase): } ) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.mediums == 2 t = d.tracks assert t[0].medium == 1 @@ -308,79 +314,81 @@ class MBAlbumInfoTest(BeetsTestCase): def test_parse_release_year_month_only(self): release = self._make_release("1987-03") - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.original_year == 1987 assert d.original_month == 3 def test_no_durations(self): tracks = [self._make_track("TITLE", "ID", None)] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.tracks[0].length is None def test_track_length_overrides_recording_length(self): tracks = [self._make_track("TITLE", "ID", 1.0 * 1000.0)] release = self._make_release(tracks=tracks, track_length=2.0 * 1000.0) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.tracks[0].length == 2.0 def test_no_release_date(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert not d.original_year assert not d.original_month assert not d.original_day def test_various_artists_defaults_false(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert not d.va def test_detect_various_artists(self): release = self._make_release(None) - release["artist-credit"][0]["artist"]["id"] = mb.VARIOUS_ARTISTS_ID - d = mb.album_info(release) + release["artist-credit"][0]["artist"]["id"] = ( + musicbrainz.VARIOUS_ARTISTS_ID + ) + d = self.mb.album_info(release) assert d.va def test_parse_artist_sort_name(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.artist_sort == "ARTIST SORT NAME" def test_parse_releasegroupid(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.releasegroup_id == "RELEASE GROUP ID" def test_parse_asin(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.asin == "ALBUM ASIN" def test_parse_catalognum(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.catalognum == "CATALOG NUMBER" def test_parse_textrepr(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.script == "SCRIPT" assert d.language == "LANGUAGE" def test_parse_country(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.country == "COUNTRY" def test_parse_status(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.albumstatus == "STATUS" def test_parse_barcode(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.barcode == "BARCODE" def test_parse_media(self): @@ -389,12 +397,12 @@ class MBAlbumInfoTest(BeetsTestCase): self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0), ] release = self._make_release(None, tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.media == "FORMAT" def test_parse_disambig(self): release = self._make_release(None) - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.albumdisambig == "R_DISAMBIGUATION" assert d.releasegroupdisambig == "RG_DISAMBIGUATION" @@ -404,7 +412,7 @@ class MBAlbumInfoTest(BeetsTestCase): self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0), ] release = self._make_release(None, tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) t = d.tracks assert t[0].disctitle == "MEDIUM TITLE" assert t[1].disctitle == "MEDIUM TITLE" @@ -412,13 +420,13 @@ class MBAlbumInfoTest(BeetsTestCase): def test_missing_language(self): release = self._make_release(None) del release["text-representation"]["language"] - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.language is None def test_parse_recording_artist(self): tracks = [self._make_track("a", "b", 1, True)] release = self._make_release(None, tracks=tracks) - track = mb.album_info(release).tracks[0] + track = self.mb.album_info(release).tracks[0] assert track.artist == "RECORDING ARTIST NAME" assert track.artist_id == "RECORDING ARTIST ID" assert track.artist_sort == "RECORDING ARTIST SORT NAME" @@ -427,7 +435,7 @@ class MBAlbumInfoTest(BeetsTestCase): def test_parse_recording_artist_multi(self): tracks = [self._make_track("a", "b", 1, True, multi_artist_credit=True)] release = self._make_release(None, tracks=tracks) - track = mb.album_info(release).tracks[0] + track = self.mb.album_info(release).tracks[0] assert track.artist == "RECORDING ARTIST NAME & RECORDING ARTIST 2 NAME" assert track.artist_id == "RECORDING ARTIST ID" assert ( @@ -459,7 +467,7 @@ class MBAlbumInfoTest(BeetsTestCase): def test_track_artist_overrides_recording_artist(self): tracks = [self._make_track("a", "b", 1, True)] release = self._make_release(None, tracks=tracks, track_artist=True) - track = mb.album_info(release).tracks[0] + track = self.mb.album_info(release).tracks[0] assert track.artist == "TRACK ARTIST NAME" assert track.artist_id == "TRACK ARTIST ID" assert track.artist_sort == "TRACK ARTIST SORT NAME" @@ -470,7 +478,7 @@ class MBAlbumInfoTest(BeetsTestCase): release = self._make_release( None, tracks=tracks, track_artist=True, multi_artist_credit=True ) - track = mb.album_info(release).tracks[0] + track = self.mb.album_info(release).tracks[0] assert track.artist == "TRACK ARTIST NAME & TRACK ARTIST 2 NAME" assert track.artist_id == "TRACK ARTIST ID" assert ( @@ -495,12 +503,12 @@ class MBAlbumInfoTest(BeetsTestCase): def test_parse_recording_remixer(self): tracks = [self._make_track("a", "b", 1, remixer=True)] release = self._make_release(None, tracks=tracks) - track = mb.album_info(release).tracks[0] + track = self.mb.album_info(release).tracks[0] assert track.remixer == "RECORDING REMIXER ARTIST NAME" def test_data_source(self): release = self._make_release() - d = mb.album_info(release) + d = self.mb.album_info(release) assert d.data_source == "MusicBrainz" def test_ignored_media(self): @@ -510,7 +518,7 @@ class MBAlbumInfoTest(BeetsTestCase): self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0), ] release = self._make_release(tracks=tracks, medium_format="IGNORED1") - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 0 def test_no_ignored_media(self): @@ -520,7 +528,7 @@ class MBAlbumInfoTest(BeetsTestCase): self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0), ] release = self._make_release(tracks=tracks, medium_format="NON-IGNORED") - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 2 def test_skip_data_track(self): @@ -530,7 +538,7 @@ class MBAlbumInfoTest(BeetsTestCase): self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0), ] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 2 assert d.tracks[0].title == "TITLE ONE" assert d.tracks[1].title == "TITLE TWO" @@ -546,7 +554,7 @@ class MBAlbumInfoTest(BeetsTestCase): ) ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 2 assert d.tracks[0].title == "TITLE ONE" assert d.tracks[1].title == "TITLE TWO" @@ -563,7 +571,7 @@ class MBAlbumInfoTest(BeetsTestCase): ) ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 3 assert d.tracks[0].title == "TITLE ONE" assert d.tracks[1].title == "TITLE TWO" @@ -578,7 +586,7 @@ class MBAlbumInfoTest(BeetsTestCase): self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0), ] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 2 assert d.tracks[0].title == "TITLE ONE" assert d.tracks[1].title == "TITLE TWO" @@ -594,7 +602,7 @@ class MBAlbumInfoTest(BeetsTestCase): ) ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 2 assert d.tracks[0].title == "TITLE ONE" assert d.tracks[1].title == "TITLE TWO" @@ -610,7 +618,7 @@ class MBAlbumInfoTest(BeetsTestCase): self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0), ] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 3 assert d.tracks[0].title == "TITLE ONE" assert d.tracks[1].title == "TITLE VIDEO" @@ -629,7 +637,7 @@ class MBAlbumInfoTest(BeetsTestCase): ) ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) assert len(d.tracks) == 3 assert d.tracks[0].title == "TITLE ONE" assert d.tracks[1].title == "TITLE TWO" @@ -647,7 +655,7 @@ class MBAlbumInfoTest(BeetsTestCase): ] release = self._make_release(tracks=tracks) - d = mb.album_info(release) + d = self.mb.album_info(release) t = d.tracks assert len(t) == 2 assert t[0].trackdisambig is None @@ -657,18 +665,18 @@ class MBAlbumInfoTest(BeetsTestCase): class ParseIDTest(BeetsTestCase): def test_parse_id_correct(self): id_string = "28e32c71-1450-463e-92bf-e0a46446fc11" - out = mb._parse_id(id_string) + out = musicbrainz._parse_id(id_string) assert out == id_string def test_parse_id_non_id_returns_none(self): id_string = "blah blah" - out = mb._parse_id(id_string) + out = musicbrainz._parse_id(id_string) assert out is None def test_parse_id_url_finds_id(self): id_string = "28e32c71-1450-463e-92bf-e0a46446fc11" id_url = "https://musicbrainz.org/entity/%s" % id_string - out = mb._parse_id(id_url) + out = musicbrainz._parse_id(id_url) assert out == id_string @@ -696,24 +704,28 @@ class ArtistFlatteningTest(BeetsTestCase): def test_single_artist(self): credit = [self._credit_dict()] - a, s, c = mb._flatten_artist_credit(credit) + a, s, c = musicbrainz._flatten_artist_credit(credit) assert a == "NAME" assert s == "SORT" assert c == "CREDIT" - a, s, c = mb._multi_artist_credit(credit, include_join_phrase=False) + a, s, c = musicbrainz._multi_artist_credit( + credit, include_join_phrase=False + ) assert a == ["NAME"] assert s == ["SORT"] assert c == ["CREDIT"] def test_two_artists(self): credit = [self._credit_dict("a"), " AND ", self._credit_dict("b")] - a, s, c = mb._flatten_artist_credit(credit) + a, s, c = musicbrainz._flatten_artist_credit(credit) assert a == "NAMEa AND NAMEb" assert s == "SORTa AND SORTb" assert c == "CREDITa AND CREDITb" - a, s, c = mb._multi_artist_credit(credit, include_join_phrase=False) + a, s, c = musicbrainz._multi_artist_credit( + credit, include_join_phrase=False + ) assert a == ["NAMEa", "NAMEb"] assert s == ["SORTa", "SORTb"] assert c == ["CREDITa", "CREDITb"] @@ -730,36 +742,36 @@ class ArtistFlatteningTest(BeetsTestCase): # test no alias config["import"]["languages"] = [""] - flat = mb._flatten_artist_credit([credit_dict]) + flat = musicbrainz._flatten_artist_credit([credit_dict]) assert flat == ("NAME", "SORT", "CREDIT") # test en primary config["import"]["languages"] = ["en"] - flat = mb._flatten_artist_credit([credit_dict]) + flat = musicbrainz._flatten_artist_credit([credit_dict]) assert flat == ("ALIASen", "ALIASSORTen", "CREDIT") # test en_GB en primary config["import"]["languages"] = ["en_GB", "en"] - flat = mb._flatten_artist_credit([credit_dict]) + flat = musicbrainz._flatten_artist_credit([credit_dict]) assert flat == ("ALIASen_GB", "ALIASSORTen_GB", "CREDIT") # test en en_GB primary config["import"]["languages"] = ["en", "en_GB"] - flat = mb._flatten_artist_credit([credit_dict]) + flat = musicbrainz._flatten_artist_credit([credit_dict]) assert flat == ("ALIASen", "ALIASSORTen", "CREDIT") # test fr primary config["import"]["languages"] = ["fr"] - flat = mb._flatten_artist_credit([credit_dict]) + flat = musicbrainz._flatten_artist_credit([credit_dict]) assert flat == ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT") # test for not matching non-primary config["import"]["languages"] = ["pt_BR", "fr"] - flat = mb._flatten_artist_credit([credit_dict]) + flat = musicbrainz._flatten_artist_credit([credit_dict]) assert flat == ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT") -class MBLibraryTest(BeetsTestCase): +class MBLibraryTest(MusicBrainzTestCase): def test_match_track(self): with mock.patch("musicbrainzngs.search_recordings") as p: p.return_value = { @@ -771,13 +783,13 @@ class MBLibraryTest(BeetsTestCase): } ], } - ti = list(mb.match_track("hello", "there"))[0] + ti = list(self.mb.item_candidates(None, "hello", "there"))[0] p.assert_called_with(artist="hello", recording="there", limit=5) assert ti.title == "foo" assert ti.track_id == "bar" - def test_match_album(self): + def test_candidates(self): mbid = "d2a6f856-b553-40a0-ac54-a321e8e2da99" with mock.patch("musicbrainzngs.search_releases") as sp: sp.return_value = { @@ -824,7 +836,7 @@ class MBLibraryTest(BeetsTestCase): } } - ai = list(mb.match_album("hello", "there"))[0] + ai = list(self.mb.candidates([], "hello", "there", False))[0] sp.assert_called_with(artist="hello", release="there", limit=5) gp.assert_called_with(mbid, mock.ANY) @@ -833,13 +845,13 @@ class MBLibraryTest(BeetsTestCase): def test_match_track_empty(self): with mock.patch("musicbrainzngs.search_recordings") as p: - til = list(mb.match_track(" ", " ")) + til = list(self.mb.item_candidates(None, " ", " ")) assert not p.called assert til == [] - def test_match_album_empty(self): + def test_candidates_empty(self): with mock.patch("musicbrainzngs.search_releases") as p: - ail = list(mb.match_album(" ", " ")) + ail = list(self.mb.candidates([], " ", " ", False)) assert not p.called assert ail == [] @@ -927,7 +939,7 @@ class MBLibraryTest(BeetsTestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect - album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") + album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") assert album.country == "COUNTRY" def test_pseudo_releases_with_empty_links(self): @@ -972,7 +984,7 @@ class MBLibraryTest(BeetsTestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect - album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") + album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") assert album.country is None def test_pseudo_releases_without_links(self): @@ -1016,7 +1028,7 @@ class MBLibraryTest(BeetsTestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect - album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") + album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") assert album.country is None def test_pseudo_releases_with_unsupported_links(self): @@ -1067,5 +1079,5 @@ class MBLibraryTest(BeetsTestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect - album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") + album = self.mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") assert album.country is None diff --git a/test/plugins/test_scrub.py b/test/plugins/test_scrub.py new file mode 100644 index 000000000..129d91a22 --- /dev/null +++ b/test/plugins/test_scrub.py @@ -0,0 +1,37 @@ +import os + +from mediafile import MediaFile + +from beets.test.helper import AsIsImporterMixin, ImportTestCase, PluginMixin + + +class ScrubbedImportTest(AsIsImporterMixin, PluginMixin, ImportTestCase): + db_on_disk = True + plugin = "scrub" + + def test_tags_not_scrubbed(self): + with self.configure_plugin({"auto": False}): + self.run_asis_importer(write=True) + + for item in self.lib.items(): + imported_file = MediaFile(os.path.join(item.path)) + assert imported_file.artist == "Tag Artist" + assert imported_file.album == "Tag Album" + + def test_tags_restored(self): + with self.configure_plugin({"auto": True}): + self.run_asis_importer(write=True) + + for item in self.lib.items(): + imported_file = MediaFile(os.path.join(item.path)) + assert imported_file.artist == "Tag Artist" + assert imported_file.album == "Tag Album" + + def test_tags_not_restored(self): + with self.configure_plugin({"auto": True}): + self.run_asis_importer(write=False) + + for item in self.lib.items(): + imported_file = MediaFile(os.path.join(item.path)) + assert imported_file.artist is None + assert imported_file.album is None diff --git a/test/test_importer.py b/test/test_importer.py index 34dea6df8..993362254 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -39,6 +39,7 @@ from beets.test import _common from beets.test.helper import ( NEEDS_REFLINK, AsIsImporterMixin, + AutotagImportTestCase, AutotagStub, BeetsTestCase, ImportTestCase, @@ -49,53 +50,6 @@ from beets.test.helper import ( from beets.util import bytestring_path, displayable_path, syspath -class ScrubbedImportTest(AsIsImporterMixin, PluginMixin, ImportTestCase): - db_on_disk = True - plugin = "scrub" - - def test_tags_not_scrubbed(self): - config["plugins"] = ["scrub"] - config["scrub"]["auto"] = False - config["import"]["write"] = True - for mediafile in self.import_media: - assert mediafile.artist == "Tag Artist" - assert mediafile.album == "Tag Album" - self.run_asis_importer() - for item in self.lib.items(): - imported_file = os.path.join(item.path) - imported_file = MediaFile(imported_file) - assert imported_file.artist == "Tag Artist" - assert imported_file.album == "Tag Album" - - def test_tags_restored(self): - config["plugins"] = ["scrub"] - config["scrub"]["auto"] = True - config["import"]["write"] = True - for mediafile in self.import_media: - assert mediafile.artist == "Tag Artist" - assert mediafile.album == "Tag Album" - self.run_asis_importer() - for item in self.lib.items(): - imported_file = os.path.join(item.path) - imported_file = MediaFile(imported_file) - assert imported_file.artist == "Tag Artist" - assert imported_file.album == "Tag Album" - - def test_tags_not_restored(self): - config["plugins"] = ["scrub"] - config["scrub"]["auto"] = True - config["import"]["write"] = False - for mediafile in self.import_media: - assert mediafile.artist == "Tag Artist" - assert mediafile.album == "Tag Album" - self.run_asis_importer() - for item in self.lib.items(): - imported_file = os.path.join(item.path) - imported_file = MediaFile(imported_file) - assert imported_file.artist is None - assert imported_file.album is None - - @_common.slow_test() class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): db_on_disk = True @@ -306,7 +260,7 @@ class ImportPasswordRarTest(ImportZipTest): return os.path.join(_common.RSRC, b"password.rar") -class ImportSingletonTest(ImportTestCase): +class ImportSingletonTest(AutotagImportTestCase): """Test ``APPLY`` and ``ASIS`` choices for an import session with singletons config set to True. """ @@ -315,11 +269,6 @@ class ImportSingletonTest(ImportTestCase): super().setUp() self.prepare_album_for_import(1) self.importer = self.setup_singleton_importer() - self.matcher = AutotagStub().install() - - def tearDown(self): - super().tearDown() - self.matcher.restore() def test_apply_asis_adds_track(self): assert self.lib.items().get() is None @@ -432,19 +381,13 @@ class ImportSingletonTest(ImportTestCase): assert item.disc == disc -class ImportTest(ImportTestCase): +class ImportTest(AutotagImportTestCase): """Test APPLY, ASIS and SKIP choices.""" def setUp(self): super().setUp() self.prepare_album_for_import(1) self.setup_importer() - self.matcher = AutotagStub().install() - self.matcher.matching = AutotagStub.IDENT - - def tearDown(self): - super().tearDown() - self.matcher.restore() def test_apply_asis_adds_album(self): assert self.lib.albums().get() is None @@ -639,18 +582,13 @@ class ImportTest(ImportTestCase): assert item.disc == disc -class ImportTracksTest(ImportTestCase): +class ImportTracksTest(AutotagImportTestCase): """Test TRACKS and APPLY choice.""" def setUp(self): super().setUp() self.prepare_album_for_import(1) self.setup_importer() - self.matcher = AutotagStub().install() - - def tearDown(self): - super().tearDown() - self.matcher.restore() def test_apply_tracks_adds_singleton_track(self): assert self.lib.items().get() is None @@ -673,18 +611,13 @@ class ImportTracksTest(ImportTestCase): self.assert_file_in_lib(b"singletons", b"Applied Track 1.mp3") -class ImportCompilationTest(ImportTestCase): +class ImportCompilationTest(AutotagImportTestCase): """Test ASIS import of a folder containing tracks with different artists.""" def setUp(self): super().setUp() self.prepare_album_for_import(3) self.setup_importer() - self.matcher = AutotagStub().install() - - def tearDown(self): - super().tearDown() - self.matcher.restore() def test_asis_homogenous_sets_albumartist(self): self.importer.add_choice(importer.Action.ASIS) @@ -783,21 +716,16 @@ class ImportCompilationTest(ImportTestCase): assert asserted_multi_artists_1 -class ImportExistingTest(ImportTestCase): +class ImportExistingTest(AutotagImportTestCase): """Test importing files that are already in the library directory.""" def setUp(self): super().setUp() self.prepare_album_for_import(1) - self.matcher = AutotagStub().install() self.reimporter = self.setup_importer(import_dir=self.libdir) self.importer = self.setup_importer() - def tearDown(self): - super().tearDown() - self.matcher.restore() - def test_does_not_duplicate_item(self): self.importer.run() assert len(self.lib.items()) == 1 @@ -904,12 +832,12 @@ class ImportExistingTest(ImportTestCase): self.assertNotExists(self.import_media[0].path) -class GroupAlbumsImportTest(ImportTestCase): +class GroupAlbumsImportTest(AutotagImportTestCase): + matching = AutotagStub.NONE + def setUp(self): super().setUp() self.prepare_album_for_import(3) - self.matcher = AutotagStub().install() - self.matcher.matching = AutotagStub.NONE self.setup_importer() # Split tracks into two albums and use both as-is @@ -917,10 +845,6 @@ class GroupAlbumsImportTest(ImportTestCase): self.importer.add_choice(importer.Action.ASIS) self.importer.add_choice(importer.Action.ASIS) - def tearDown(self): - super().tearDown() - self.matcher.restore() - def test_add_album_for_different_artist_and_different_album(self): self.import_media[0].artist = "Artist B" self.import_media[0].album = "Album B" @@ -976,17 +900,13 @@ class GlobalGroupAlbumsImportTest(GroupAlbumsImportTest): config["import"]["group_albums"] = True -class ChooseCandidateTest(ImportTestCase): +class ChooseCandidateTest(AutotagImportTestCase): + matching = AutotagStub.BAD + def setUp(self): super().setUp() self.prepare_album_for_import(1) self.setup_importer() - self.matcher = AutotagStub().install() - self.matcher.matching = AutotagStub.BAD - - def tearDown(self): - super().tearDown() - self.matcher.restore() def test_choose_first_candidate(self): self.importer.add_choice(1) @@ -1094,26 +1014,22 @@ class InferAlbumDataTest(BeetsTestCase): assert not self.items[0].comp -def match_album_mock(*args, **kwargs): +def album_candidates_mock(*args, **kwargs): """Create an AlbumInfo object for testing.""" - track_info = TrackInfo( - title="new title", - track_id="trackid", - index=0, - ) - album_info = AlbumInfo( + yield AlbumInfo( artist="artist", album="album", - tracks=[track_info], + tracks=[TrackInfo(title="new title", track_id="trackid", index=0)], album_id="albumid", artist_id="artistid", flex="flex", ) - return iter([album_info]) -@patch("beets.autotag.mb.match_album", Mock(side_effect=match_album_mock)) -class ImportDuplicateAlbumTest(ImportTestCase): +@patch("beets.plugins.candidates", Mock(side_effect=album_candidates_mock)) +class ImportDuplicateAlbumTest(PluginMixin, ImportTestCase): + plugin = "musicbrainz" + def setUp(self): super().setUp() @@ -1219,20 +1135,16 @@ class ImportDuplicateAlbumTest(ImportTestCase): return album -def match_track_mock(*args, **kwargs): - return iter( - [ - TrackInfo( - artist="artist", - title="title", - track_id="new trackid", - index=0, - ) - ] +def item_candidates_mock(*args, **kwargs): + yield TrackInfo( + artist="artist", + title="title", + track_id="new trackid", + index=0, ) -@patch("beets.autotag.mb.match_track", Mock(side_effect=match_track_mock)) +@patch("beets.plugins.item_candidates", Mock(side_effect=item_candidates_mock)) class ImportDuplicateSingletonTest(ImportTestCase): def setUp(self): super().setUp() @@ -1566,7 +1478,7 @@ class MultiDiscAlbumsInDirTest(BeetsTestCase): assert len(items) == 3 -class ReimportTest(ImportTestCase): +class ReimportTest(AutotagImportTestCase): """Test "re-imports", in which the autotagging machinery is used for music that's already in the library. @@ -1575,6 +1487,8 @@ class ReimportTest(ImportTestCase): attributes and the added date. """ + matching = AutotagStub.GOOD + def setUp(self): super().setUp() @@ -1589,14 +1503,6 @@ class ReimportTest(ImportTestCase): item.added = 4747.0 item.store() - # Set up an import pipeline with a "good" match. - self.matcher = AutotagStub().install() - self.matcher.matching = AutotagStub.GOOD - - def tearDown(self): - super().tearDown() - self.matcher.restore() - def _setup_session(self, singletons=False): self.setup_importer(import_dir=self.libdir, singletons=singletons) self.importer.add_choice(importer.Action.APPLY) @@ -1672,27 +1578,22 @@ class ReimportTest(ImportTestCase): def test_reimported_album_not_preserves_flexattr(self): self._setup_session() - assert self._album().data_source == "original_source" + self.importer.run() assert self._album().data_source == "match_source" -class ImportPretendTest(ImportTestCase): +class ImportPretendTest(AutotagImportTestCase): """Test the pretend commandline option""" def setUp(self): super().setUp() - self.matcher = AutotagStub().install() self.io.install() self.album_track_path = self.prepare_album_for_import(1)[0] self.single_path = self.prepare_track_for_import(2, self.import_path) self.album_path = self.album_track_path.parent - def tearDown(self): - super().tearDown() - self.matcher.restore() - def __run(self, importer): with capture_log() as logs: importer.run() @@ -1701,6 +1602,7 @@ class ImportPretendTest(ImportTestCase): assert len(self.lib.albums()) == 0 return [line for line in logs if not line.startswith("Sending event:")] + assert self._album().data_source == "original_source" def test_import_singletons_pretend(self): assert self.__run(self.setup_singleton_importer(pretend=True)) == [ @@ -1725,112 +1627,64 @@ class ImportPretendTest(ImportTestCase): assert self.__run(importer) == [f"No files imported from {empty_path}"] -# Helpers for ImportMusicBrainzIdTest. +def mocked_get_album_by_id(id_): + """Return album candidate for the given id. - -def mocked_get_release_by_id( - id_, includes=[], release_status=[], release_type=[] -): - """Mimic musicbrainzngs.get_release_by_id, accepting only a restricted list - of MB ids (ID_RELEASE_0, ID_RELEASE_1). The returned dict differs only in - the release title and artist name, so that ID_RELEASE_0 is a closer match - to the items created by ImportHelper.prepare_album_for_import().""" + The two albums differ only in the release title and artist name, so that + ID_RELEASE_0 is a closer match to the items created by + ImportHelper.prepare_album_for_import(). + """ # Map IDs to (release title, artist), so the distances are different. - releases = { - ImportMusicBrainzIdTest.ID_RELEASE_0: ("VALID_RELEASE_0", "TAG ARTIST"), - ImportMusicBrainzIdTest.ID_RELEASE_1: ( - "VALID_RELEASE_1", - "DISTANT_MATCH", - ), - } + album, artist = { + ImportIdTest.ID_RELEASE_0: ("VALID_RELEASE_0", "TAG ARTIST"), + ImportIdTest.ID_RELEASE_1: ("VALID_RELEASE_1", "DISTANT_MATCH"), + }[id_] - return { - "release": { - "title": releases[id_][0], - "id": id_, - "medium-list": [ - { - "track-list": [ - { - "id": "baz", - "recording": { - "title": "foo", - "id": "bar", - "length": 59, - }, - "position": 9, - "number": "A2", - } - ], - "position": 5, - } - ], - "artist-credit": [ - { - "artist": { - "name": releases[id_][1], - "id": "some-id", - }, - } - ], - "release-group": { - "id": "another-id", - }, - "status": "Official", - } - } + return AlbumInfo( + album_id=id_, + album=album, + artist_id="some-id", + artist=artist, + albumstatus="Official", + tracks=[ + TrackInfo( + track_id="bar", + title="foo", + artist_id="some-id", + artist=artist, + length=59, + index=9, + track_allt="A2", + ) + ], + ) -def mocked_get_recording_by_id( - id_, includes=[], release_status=[], release_type=[] -): - """Mimic musicbrainzngs.get_recording_by_id, accepting only a restricted - list of MB ids (ID_RECORDING_0, ID_RECORDING_1). The returned dict differs - only in the recording title and artist name, so that ID_RECORDING_0 is a - closer match to the items created by ImportHelper.prepare_album_for_import(). +def mocked_get_track_by_id(id_): + """Return track candidate for the given id. + + The two tracks differ only in the release title and artist name, so that + ID_RELEASE_0 is a closer match to the items created by + ImportHelper.prepare_album_for_import(). """ # Map IDs to (recording title, artist), so the distances are different. - releases = { - ImportMusicBrainzIdTest.ID_RECORDING_0: ( - "VALID_RECORDING_0", - "TAG ARTIST", - ), - ImportMusicBrainzIdTest.ID_RECORDING_1: ( - "VALID_RECORDING_1", - "DISTANT_MATCH", - ), - } + title, artist = { + ImportIdTest.ID_RECORDING_0: ("VALID_RECORDING_0", "TAG ARTIST"), + ImportIdTest.ID_RECORDING_1: ("VALID_RECORDING_1", "DISTANT_MATCH"), + }[id_] - return { - "recording": { - "title": releases[id_][0], - "id": id_, - "length": 59, - "artist-credit": [ - { - "artist": { - "name": releases[id_][1], - "id": "some-id", - }, - } - ], - } - } + return TrackInfo( + track_id=id_, + title=title, + artist_id="some-id", + artist=artist, + length=59, + ) -@patch( - "musicbrainzngs.get_recording_by_id", - Mock(side_effect=mocked_get_recording_by_id), -) -@patch( - "musicbrainzngs.get_release_by_id", - Mock(side_effect=mocked_get_release_by_id), -) -class ImportMusicBrainzIdTest(ImportTestCase): - """Test the --musicbrainzid argument.""" - - MB_RELEASE_PREFIX = "https://musicbrainz.org/release/" - MB_RECORDING_PREFIX = "https://musicbrainz.org/recording/" +@patch("beets.plugins.track_for_id", Mock(side_effect=mocked_get_track_by_id)) +@patch("beets.plugins.album_for_id", Mock(side_effect=mocked_get_album_by_id)) +class ImportIdTest(ImportTestCase): ID_RELEASE_0 = "00000000-0000-0000-0000-000000000000" ID_RELEASE_1 = "11111111-1111-1111-1111-111111111111" ID_RECORDING_0 = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" @@ -1841,21 +1695,14 @@ class ImportMusicBrainzIdTest(ImportTestCase): self.prepare_album_for_import(1) def test_one_mbid_one_album(self): - self.setup_importer( - search_ids=[self.MB_RELEASE_PREFIX + self.ID_RELEASE_0] - ) + self.setup_importer(search_ids=[self.ID_RELEASE_0]) self.importer.add_choice(importer.Action.APPLY) self.importer.run() assert self.lib.albums().get().album == "VALID_RELEASE_0" def test_several_mbid_one_album(self): - self.setup_importer( - search_ids=[ - self.MB_RELEASE_PREFIX + self.ID_RELEASE_0, - self.MB_RELEASE_PREFIX + self.ID_RELEASE_1, - ] - ) + self.setup_importer(search_ids=[self.ID_RELEASE_0, self.ID_RELEASE_1]) self.importer.add_choice(2) # Pick the 2nd best match (release 1). self.importer.add_choice(importer.Action.APPLY) @@ -1863,9 +1710,7 @@ class ImportMusicBrainzIdTest(ImportTestCase): assert self.lib.albums().get().album == "VALID_RELEASE_1" def test_one_mbid_one_singleton(self): - self.setup_singleton_importer( - search_ids=[self.MB_RECORDING_PREFIX + self.ID_RECORDING_0] - ) + self.setup_singleton_importer(search_ids=[self.ID_RECORDING_0]) self.importer.add_choice(importer.Action.APPLY) self.importer.run() @@ -1873,10 +1718,7 @@ class ImportMusicBrainzIdTest(ImportTestCase): def test_several_mbid_one_singleton(self): self.setup_singleton_importer( - search_ids=[ - self.MB_RECORDING_PREFIX + self.ID_RECORDING_0, - self.MB_RECORDING_PREFIX + self.ID_RECORDING_1, - ] + search_ids=[self.ID_RECORDING_0, self.ID_RECORDING_1] ) self.importer.add_choice(2) # Pick the 2nd best match (recording 1). @@ -1889,11 +1731,7 @@ class ImportMusicBrainzIdTest(ImportTestCase): task = importer.ImportTask( paths=self.import_dir, toppath="top path", items=[_common.item()] ) - task.search_ids = [ - self.MB_RELEASE_PREFIX + self.ID_RELEASE_0, - self.MB_RELEASE_PREFIX + self.ID_RELEASE_1, - "an invalid and discarded id", - ] + task.search_ids = [self.ID_RELEASE_0, self.ID_RELEASE_1] task.lookup_candidates() assert {"VALID_RELEASE_0", "VALID_RELEASE_1"} == { @@ -1905,11 +1743,7 @@ class ImportMusicBrainzIdTest(ImportTestCase): task = importer.SingletonImportTask( toppath="top path", item=_common.item() ) - task.search_ids = [ - self.MB_RECORDING_PREFIX + self.ID_RECORDING_0, - self.MB_RECORDING_PREFIX + self.ID_RECORDING_1, - "an invalid and discarded id", - ] + task.search_ids = [self.ID_RECORDING_0, self.ID_RECORDING_1] task.lookup_candidates() assert {"VALID_RECORDING_0", "VALID_RECORDING_1"} == { diff --git a/test/test_plugins.py b/test/test_plugins.py index 25f1f3c66..417debbdd 100644 --- a/test/test_plugins.py +++ b/test/test_plugins.py @@ -347,7 +347,8 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase): def setUp(self): super().setUp() self.setup_importer() - self.matcher = AutotagStub().install() + self.matcher = AutotagStub(AutotagStub.IDENT).install() + self.addCleanup(self.matcher.restore) # keep track of ui.input_option() calls self.input_options_patcher = patch( "beets.ui.input_options", side_effect=ui.input_options @@ -357,7 +358,6 @@ class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase): def tearDown(self): super().tearDown() self.input_options_patcher.stop() - self.matcher.restore() def test_plugin_choices_in_ui_input_options_album(self): """Test the presence of plugin choices on the prompt (album)."""