diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index c6ec4cb5f..14b50859f 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -57,3 +57,19 @@ c490ac5810b70f3cf5fd8649669838e8fdb19f4d 769dcdc88a1263638ae25944ba6b2be3e8933666 # Reformat all docs using docstrfmt ab5acaabb3cd24c482adb7fa4800c89fd6a2f08d +# Replace format calls with f-strings +4a361bd501e85de12c91c2474c423559ca672852 +# Replace percent formatting +9352a79e4108bd67f7e40b1e944c01e0a7353272 +# Replace string concatenation (' + ') +1c16b2b3087e9c3635d68d41c9541c4319d0bdbe +# Do not use backslashes to deal with long strings +2fccf64efe82851861e195b521b14680b480a42a +# Do not use explicit indices for logging args when not needed +d93ddf8dd43e4f9ed072a03829e287c78d2570a2 +# Moved dev docs +07549ed896d9649562d40b75cd30702e6fa6e975 +# Moved plugin docs Further Reading chapter +33f1a5d0bef8ca08be79ee7a0d02a018d502680d +# Moved art.py utility module from beets into beetsplug +28aee0fde463f1e18dfdba1994e2bdb80833722f diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..bb888d520 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,5 @@ +# assign the entire repo to the maintainers team +* @beetbox/maintainers + +# Specific ownerships: +/beets/metadata_plugins.py @semohr \ No newline at end of file diff --git a/.github/workflows/changelog_reminder.yaml b/.github/workflows/changelog_reminder.yaml index a9c26c1f5..380d89996 100644 --- a/.github/workflows/changelog_reminder.yaml +++ b/.github/workflows/changelog_reminder.yaml @@ -10,7 +10,7 @@ jobs: check_changes: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Get all updated Python files id: changed-python-files diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 80826f468..e8a532956 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -20,17 +20,17 @@ jobs: fail-fast: false matrix: platform: [ubuntu-latest, windows-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] runs-on: ${{ matrix.platform }} env: IS_MAIN_PYTHON: ${{ matrix.python-version == '3.9' && matrix.platform == 'ubuntu-latest' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - name: Setup Python with poetry caching # poetry cache requires poetry to already be installed, weirdly - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: poetry @@ -39,7 +39,15 @@ jobs: if: matrix.platform == 'ubuntu-latest' run: | sudo apt update - sudo apt install --yes --no-install-recommends ffmpeg gobject-introspection gstreamer1.0-plugins-base python3-gst-1.0 libcairo2-dev libgirepository-2.0-dev pandoc imagemagick + sudo apt install --yes --no-install-recommends \ + ffmpeg \ + gobject-introspection \ + gstreamer1.0-plugins-base \ + python3-gst-1.0 \ + libcairo2-dev \ + libgirepository-2.0-dev \ + pandoc \ + imagemagick - name: Get changed lyrics files id: lyrics-update @@ -90,10 +98,10 @@ jobs: permissions: id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Get the coverage report - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: coverage-report diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index f88864c48..8c7e44d7a 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -7,10 +7,10 @@ jobs: test_integration: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: 3.9 cache: poetry diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 0048a8f6e..dcc5d0f12 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -24,7 +24,7 @@ jobs: changed_doc_files: ${{ steps.changed-doc-files.outputs.all_changed_files }} changed_python_files: ${{ steps.changed-python-files.outputs.all_changed_files }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Get changed docs files id: changed-doc-files uses: tj-actions/changed-files@v46 @@ -56,10 +56,10 @@ jobs: name: Check formatting needs: changed-files steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ env.PYTHON_VERSION }} cache: poetry @@ -77,10 +77,10 @@ jobs: name: Check linting needs: changed-files steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ env.PYTHON_VERSION }} cache: poetry @@ -97,10 +97,10 @@ jobs: name: Check types with mypy needs: changed-files steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ env.PYTHON_VERSION }} cache: poetry @@ -120,10 +120,10 @@ jobs: name: Check docs needs: changed-files steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ env.PYTHON_VERSION }} cache: poetry @@ -143,4 +143,4 @@ jobs: run: poe lint-docs - name: Build docs - run: poe docs -e 'SPHINXOPTS=--fail-on-warning --keep-going' + run: poe docs -- -e 'SPHINXOPTS=--fail-on-warning --keep-going' diff --git a/.github/workflows/make_release.yaml b/.github/workflows/make_release.yaml index b18dded8d..5a8abe5bb 100644 --- a/.github/workflows/make_release.yaml +++ b/.github/workflows/make_release.yaml @@ -17,10 +17,10 @@ jobs: name: Bump version, commit and create tag runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ env.PYTHON_VERSION }} cache: poetry @@ -45,13 +45,13 @@ jobs: outputs: changelog: ${{ steps.generate_changelog.outputs.changelog }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ env.NEW_TAG }} - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v1.0.3 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ env.PYTHON_VERSION }} cache: poetry @@ -92,7 +92,7 @@ jobs: id-token: write steps: - name: Download all the dists - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: python-package-distributions path: dist/ @@ -107,7 +107,7 @@ jobs: CHANGELOG: ${{ needs.build.outputs.changelog }} steps: - name: Download all the dists - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: python-package-distributions path: dist/ diff --git a/.gitignore b/.gitignore index 90ef7387d..138965b22 100644 --- a/.gitignore +++ b/.gitignore @@ -94,3 +94,6 @@ ENV/ # pyright pyrightconfig.json + +# Pyrefly +pyrefly.toml diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 92375b465..d19a376b3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -180,8 +180,7 @@ Your First Contribution If this is your first time contributing to an open source project, welcome! If you are confused at all about how to contribute or what to contribute, take a look at `this great tutorial `__, or stop by our -`discussion board `__ if you have -any questions. +`discussion board`_ if you have any questions. We maintain a list of issues we reserved for those new to open source labeled `first timers only`_. Since the goal of these issues is to get users comfortable @@ -216,6 +215,15 @@ will ship in no time. Remember, code contributions have four parts: the code, the tests, the documentation, and the changelog entry. Thank you for contributing! +.. admonition:: Ownership + + If you are the owner of a plugin, please consider reviewing pull requests + that affect your plugin. If you are not the owner of a plugin, please + consider becoming one! You can do so by adding an entry to + ``.github/CODEOWNERS``. This way, you will automatically receive a review + request for pull requests that adjust the code that you own. If you have any + questions, please ask on our `discussion board`_. + The Code -------- @@ -238,25 +246,22 @@ There are a few coding conventions we use in beets: .. code-block:: python with g.lib.transaction() as tx: - rows = tx.query( - "SELECT DISTINCT '{0}' FROM '{1}' ORDER BY '{2}'".format( - field, model._table, sort_field - ) - ) + rows = tx.query("SELECT DISTINCT {field} FROM {model._table} ORDER BY {sort_field}") To fetch Item objects from the database, use lib.items(…) and supply a query as an argument. Resist the urge to write raw SQL for your query. If you must - use lower-level queries into the database, do this: + use lower-level queries into the database, do this, for example: .. code-block:: python with lib.transaction() as tx: - rows = tx.query("SELECT …") + rows = tx.query("SELECT path FROM items WHERE album_id = ?", (album_id,)) Transaction objects help control concurrent access to the database and assist in debugging conflicting accesses. -- ``str.format()`` should be used instead of the ``%`` operator +- f-strings should be used instead of the ``%`` operator and ``str.format()`` + calls. - Never ``print`` informational messages; use the `logging `__ module instead. In particular, we have our own logging shim, so you’ll see ``from beets import @@ -264,7 +269,7 @@ There are a few coding conventions we use in beets: - The loggers use `str.format `__-style logging - instead of ``%``-style, so you can type ``log.debug("{0}", obj)`` to do your + instead of ``%``-style, so you can type ``log.debug("{}", obj)`` to do your formatting. - Exception handlers must use ``except A as B:`` instead of ``except A, B:``. @@ -281,31 +286,6 @@ according to the specifications required by the project. Similarly, run ``poe format-docs`` and ``poe lint-docs`` to ensure consistent documentation formatting and check for any issues. -Handling Paths -~~~~~~~~~~~~~~ - -A great deal of convention deals with the handling of **paths**. Paths are -stored internally—in the database, for instance—as byte strings (i.e., ``bytes`` -instead of ``str`` in Python 3). This is because POSIX operating systems’ path -names are only reliably usable as byte strings—operating systems typically -recommend but do not require that filenames use a given encoding, so violations -of any reported encoding are inevitable. On Windows, the strings are always -encoded with UTF-8; on Unix, the encoding is controlled by the filesystem. Here -are some guidelines to follow: - -- If you have a Unicode path or you’re not sure whether something is Unicode or - not, pass it through ``bytestring_path`` function in the ``beets.util`` module - to convert it to bytes. -- Pass every path name through the ``syspath`` function (also in ``beets.util``) - before sending it to any *operating system* file operation (``open``, for - example). This is necessary to use long filenames (which, maddeningly, must be - Unicode) on Windows. This allows us to consistently store bytes in the - database but use the native encoding rule on both POSIX and Windows. -- Similarly, the ``displayable_path`` utility function converts bytestring paths - to a Unicode string for displaying to the user. Every time you want to print - out a string to the terminal or log it with the ``logging`` module, feed it - through this function. - Editor Settings ~~~~~~~~~~~~~~~ @@ -397,6 +377,8 @@ This way, the test will be run only in the integration test suite. .. _codecov: https://codecov.io/github/beetbox/beets +.. _discussion board: https://github.com/beetbox/beets/discussions + .. _documentation: https://beets.readthedocs.io/en/stable/ .. _https://github.com/beetbox/beets/blob/master/test/test_template.py#l224: https://github.com/beetbox/beets/blob/master/test/test_template.py#L224 diff --git a/README.rst b/README.rst index e8cec8ce9..3d5a84712 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ Beets is the media library management system for obsessive music geeks. The purpose of beets is to get your music collection right once and for all. It catalogs your collection, automatically improving its metadata as it goes. It -then provides a bouquet of tools for manipulating and accessing your music. +then provides a suite of tools for manipulating and accessing your music. Here's an example of beets' brainy tag corrector doing its thing: diff --git a/beets/__init__.py b/beets/__init__.py index 8be305202..d448d8c49 100644 --- a/beets/__init__.py +++ b/beets/__init__.py @@ -17,10 +17,25 @@ from sys import stderr import confuse -__version__ = "2.3.1" +from .util import deprecate_imports + +__version__ = "2.5.1" __author__ = "Adrian Sampson " +def __getattr__(name: str): + """Handle deprecated imports.""" + return deprecate_imports( + old_module=__name__, + new_module_by_name={ + "art": "beetsplug._utils", + "vfs": "beetsplug._utils", + }, + name=name, + version="3.0.0", + ) + + class IncludeLazyConfig(confuse.LazyConfig): """A version of Confuse's LazyConfig that also merges in data from YAML files specified in an `include` setting. @@ -35,7 +50,7 @@ class IncludeLazyConfig(confuse.LazyConfig): except confuse.NotFoundError: pass except confuse.ConfigReadError as err: - stderr.write("configuration `import` failed: {}".format(err.reason)) + stderr.write(f"configuration `import` failed: {err.reason}") config = IncludeLazyConfig("beets", __name__) diff --git a/beets/autotag/__init__.py b/beets/autotag/__init__.py index 4d107b3a1..319f7f522 100644 --- a/beets/autotag/__init__.py +++ b/beets/autotag/__init__.py @@ -261,7 +261,7 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]): continue for suffix in "year", "month", "day": - key = prefix + suffix + key = f"{prefix}{suffix}" value = getattr(album_info, key) or 0 # If we don't even have a year, apply nothing. diff --git a/beets/autotag/distance.py b/beets/autotag/distance.py index 39d16858f..37c6f84f4 100644 --- a/beets/autotag/distance.py +++ b/beets/autotag/distance.py @@ -78,10 +78,10 @@ def string_dist(str1: str | None, str2: str | None) -> float: # example, "the something" should be considered equal to # "something, the". for word in SD_END_WORDS: - if str1.endswith(", %s" % word): - str1 = "{} {}".format(word, str1[: -len(word) - 2]) - if str2.endswith(", %s" % word): - str2 = "{} {}".format(word, str2[: -len(word) - 2]) + if str1.endswith(f", {word}"): + str1 = f"{word} {str1[: -len(word) - 2]}" + if str2.endswith(f", {word}"): + str2 = f"{word} {str2[: -len(word) - 2]}" # Perform a couple of basic normalizing substitutions. for pat, repl in SD_REPLACE: @@ -230,7 +230,7 @@ class Distance: """Adds all the distance penalties from `dist`.""" if not isinstance(dist, Distance): raise ValueError( - "`dist` must be a Distance object, not {}".format(type(dist)) + f"`dist` must be a Distance object, not {type(dist)}" ) for key, penalties in dist._penalties.items(): self._penalties.setdefault(key, []).extend(penalties) @@ -345,6 +345,12 @@ class Distance: dist = string_dist(str1, str2) self.add(key, dist) + def add_data_source(self, before: str | None, after: str | None) -> None: + if before != after and ( + before or len(metadata_plugins.find_metadata_source_plugins()) > 1 + ): + self.add("data_source", metadata_plugins.get_penalty(after)) + @cache def get_track_length_grace() -> float: @@ -408,8 +414,7 @@ def track_distance( if track_info.medium and item.disc: dist.add_expr("medium", item.disc != track_info.medium) - # Plugins. - dist.update(metadata_plugins.track_distance(item, track_info)) + dist.add_data_source(item.get("data_source"), track_info.data_source) return dist @@ -444,7 +449,7 @@ def distance( # Preferred media options. media_patterns: Sequence[str] = preferred_config["media"].as_str_seq() options = [ - re.compile(r"(\d+x)?(%s)" % pat, re.I) for pat in media_patterns + re.compile(rf"(\d+x)?({pat})", re.I) for pat in media_patterns ] if options: dist.add_priority("media", album_info.media, options) @@ -525,7 +530,6 @@ def distance( for _ in range(len(items) - len(mapping)): dist.add("unmatched_tracks", 1.0) - # Plugins. - dist.update(metadata_plugins.album_distance(items, album_info, mapping)) + dist.add_data_source(likelies["data_source"], album_info.data_source) return dist diff --git a/beets/autotag/hooks.py b/beets/autotag/hooks.py index 7cd215fc4..b809609ea 100644 --- a/beets/autotag/hooks.py +++ b/beets/autotag/hooks.py @@ -16,236 +16,201 @@ from __future__ import annotations +from copy import deepcopy from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar -from beets import logging +from typing_extensions import Self if TYPE_CHECKING: from beets.library import Item from .distance import Distance -log = logging.getLogger("beets") - V = TypeVar("V") # Classes used to represent candidate options. class AttrDict(dict[str, V]): - """A dictionary that supports attribute ("dot") access, so `d.field` - is equivalent to `d['field']`. - """ + """Mapping enabling attribute-style access to stored metadata values.""" + + def copy(self) -> Self: + return deepcopy(self) def __getattr__(self, attr: str) -> V: if attr in self: return self[attr] - else: - raise AttributeError - def __setattr__(self, key: str, value: V): + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{attr}'" + ) + + def __setattr__(self, key: str, value: V) -> None: self.__setitem__(key, value) - def __hash__(self): + def __hash__(self) -> int: # type: ignore[override] return id(self) -class AlbumInfo(AttrDict[Any]): - """Describes a canonical release that may be used to match a release - in the library. Consists of these data members: +class Info(AttrDict[Any]): + """Container for metadata about a musical entity.""" - - ``album``: the release title - - ``album_id``: MusicBrainz ID; UUID fragment only - - ``artist``: name of the release's primary artist - - ``artist_id`` - - ``tracks``: list of TrackInfo objects making up the release + def __init__( + self, + album: str | None = None, + artist_credit: str | None = None, + artist_id: str | None = None, + artist: str | None = None, + artists_credit: list[str] | None = None, + artists_ids: list[str] | None = None, + artists: list[str] | None = None, + artist_sort: str | None = None, + artists_sort: list[str] | None = None, + data_source: str | None = None, + data_url: str | None = None, + genre: str | None = None, + media: str | None = None, + **kwargs, + ) -> None: + self.album = album + self.artist = artist + self.artist_credit = artist_credit + self.artist_id = artist_id + self.artists = artists or [] + self.artists_credit = artists_credit or [] + self.artists_ids = artists_ids or [] + self.artist_sort = artist_sort + self.artists_sort = artists_sort or [] + self.data_source = data_source + self.data_url = data_url + self.genre = genre + self.media = media + self.update(kwargs) - ``mediums`` along with the fields up through ``tracks`` are required. - The others are optional and may be None. + +class AlbumInfo(Info): + """Metadata snapshot representing a single album candidate. + + Aggregates track entries and album-wide context gathered from an external + provider. Used during matching to evaluate similarity against a group of + user items, and later to drive tagging decisions once selected. """ - # TYPING: are all of these correct? I've assumed optional strings def __init__( self, tracks: list[TrackInfo], - album: str | None = None, + *, album_id: str | None = None, - artist: str | None = None, - artist_id: str | None = None, - artists: list[str] | None = None, - artists_ids: list[str] | None = None, - asin: str | None = None, + albumdisambig: str | None = None, + albumstatus: str | None = None, albumtype: str | None = None, albumtypes: list[str] | None = None, + asin: str | None = None, + barcode: str | None = None, + catalognum: str | None = None, + country: str | None = None, + day: int | None = None, + discogs_albumid: str | None = None, + discogs_artistid: str | None = None, + discogs_labelid: str | None = None, + label: str | None = None, + language: str | None = None, + mediums: int | None = None, + month: int | None = None, + original_day: int | None = None, + original_month: int | None = None, + original_year: int | None = None, + release_group_title: str | None = None, + releasegroup_id: str | None = None, + releasegroupdisambig: str | None = None, + script: str | None = None, + style: str | None = None, va: bool = False, year: int | None = None, - month: int | None = None, - day: int | None = None, - label: str | None = None, - barcode: str | None = None, - mediums: int | None = None, - artist_sort: str | None = None, - artists_sort: list[str] | None = None, - releasegroup_id: str | None = None, - release_group_title: str | None = None, - catalognum: str | None = None, - script: str | None = None, - language: str | None = None, - country: str | None = None, - style: str | None = None, - genre: str | None = None, - albumstatus: str | None = None, - media: str | None = None, - albumdisambig: str | None = None, - releasegroupdisambig: str | None = None, - artist_credit: str | None = None, - artists_credit: list[str] | None = None, - original_year: int | None = None, - original_month: int | None = None, - original_day: int | None = None, - data_source: str | None = None, - data_url: str | None = None, - discogs_albumid: str | None = None, - discogs_labelid: str | None = None, - discogs_artistid: str | None = None, **kwargs, - ): - self.album = album - self.album_id = album_id - self.artist = artist - self.artist_id = artist_id - self.artists = artists or [] - self.artists_ids = artists_ids or [] + ) -> None: self.tracks = tracks - self.asin = asin + self.album_id = album_id + self.albumdisambig = albumdisambig + self.albumstatus = albumstatus self.albumtype = albumtype self.albumtypes = albumtypes or [] + self.asin = asin + self.barcode = barcode + self.catalognum = catalognum + self.country = country + self.day = day + self.discogs_albumid = discogs_albumid + self.discogs_artistid = discogs_artistid + self.discogs_labelid = discogs_labelid + self.label = label + self.language = language + self.mediums = mediums + self.month = month + self.original_day = original_day + self.original_month = original_month + self.original_year = original_year + self.release_group_title = release_group_title + self.releasegroup_id = releasegroup_id + self.releasegroupdisambig = releasegroupdisambig + self.script = script + self.style = style self.va = va self.year = year - self.month = month - self.day = day - self.label = label - self.barcode = barcode - self.mediums = mediums - self.artist_sort = artist_sort - self.artists_sort = artists_sort or [] - self.releasegroup_id = releasegroup_id - self.release_group_title = release_group_title - self.catalognum = catalognum - self.script = script - self.language = language - self.country = country - self.style = style - self.genre = genre - self.albumstatus = albumstatus - self.media = media - self.albumdisambig = albumdisambig - self.releasegroupdisambig = releasegroupdisambig - self.artist_credit = artist_credit - self.artists_credit = artists_credit or [] - self.original_year = original_year - self.original_month = original_month - self.original_day = original_day - self.data_source = data_source - self.data_url = data_url - self.discogs_albumid = discogs_albumid - self.discogs_labelid = discogs_labelid - self.discogs_artistid = discogs_artistid - self.update(kwargs) - - def copy(self) -> AlbumInfo: - dupe = AlbumInfo([]) - dupe.update(self) - dupe.tracks = [track.copy() for track in self.tracks] - return dupe + super().__init__(**kwargs) -class TrackInfo(AttrDict[Any]): - """Describes a canonical track present on a release. Appears as part - of an AlbumInfo's ``tracks`` list. Consists of these data members: +class TrackInfo(Info): + """Metadata snapshot for a single track candidate. - - ``title``: name of the track - - ``track_id``: MusicBrainz ID; UUID fragment only - - Only ``title`` and ``track_id`` are required. The rest of the fields - may be None. The indices ``index``, ``medium``, and ``medium_index`` - are all 1-based. + Captures identifying details and creative credits used to compare against + a user's item. Instances often originate within an AlbumInfo but may also + stand alone for singleton matching. """ - # TYPING: are all of these correct? I've assumed optional strings def __init__( self, - title: str | None = None, - track_id: str | None = None, - release_track_id: str | None = None, - artist: str | None = None, - artist_id: str | None = None, - artists: list[str] | None = None, - artists_ids: list[str] | None = None, - length: float | None = None, + *, + arranger: str | None = None, + bpm: str | None = None, + composer: str | None = None, + composer_sort: str | None = None, + disctitle: str | None = None, index: int | None = None, + initial_key: str | None = None, + length: float | None = None, + lyricist: str | None = None, + mb_workid: str | None = None, medium: int | None = None, medium_index: int | None = None, medium_total: int | None = None, - artist_sort: str | None = None, - artists_sort: list[str] | None = None, - disctitle: str | None = None, - artist_credit: str | None = None, - artists_credit: list[str] | None = None, - data_source: str | None = None, - data_url: str | None = None, - media: str | None = None, - lyricist: str | None = None, - composer: str | None = None, - composer_sort: str | None = None, - arranger: str | None = None, + release_track_id: str | None = None, + title: str | None = None, track_alt: str | None = None, + track_id: str | None = None, work: str | None = None, - mb_workid: str | None = None, work_disambig: str | None = None, - bpm: str | None = None, - initial_key: str | None = None, - genre: str | None = None, - album: str | None = None, **kwargs, - ): - self.title = title - self.track_id = track_id - self.release_track_id = release_track_id - self.artist = artist - self.artist_id = artist_id - self.artists = artists or [] - self.artists_ids = artists_ids or [] - self.length = length + ) -> None: + self.arranger = arranger + self.bpm = bpm + self.composer = composer + self.composer_sort = composer_sort + self.disctitle = disctitle self.index = index - self.media = media + self.initial_key = initial_key + self.length = length + self.lyricist = lyricist + self.mb_workid = mb_workid self.medium = medium self.medium_index = medium_index self.medium_total = medium_total - self.artist_sort = artist_sort - self.artists_sort = artists_sort or [] - self.disctitle = disctitle - self.artist_credit = artist_credit - self.artists_credit = artists_credit or [] - self.data_source = data_source - self.data_url = data_url - self.lyricist = lyricist - self.composer = composer - self.composer_sort = composer_sort - self.arranger = arranger + self.release_track_id = release_track_id + self.title = title self.track_alt = track_alt + self.track_id = track_id self.work = work - self.mb_workid = mb_workid self.work_disambig = work_disambig - self.bpm = bpm - self.initial_key = initial_key - self.genre = genre - self.album = album - self.update(kwargs) - - def copy(self) -> TrackInfo: - dupe = TrackInfo() - dupe.update(self) - return dupe + super().__init__(**kwargs) # Structures that compose all the information for a candidate match. diff --git a/beets/autotag/match.py b/beets/autotag/match.py index e74d21755..8fec844a6 100644 --- a/beets/autotag/match.py +++ b/beets/autotag/match.py @@ -118,7 +118,7 @@ def match_by_id(items: Iterable[Item]) -> AlbumInfo | None: log.debug("No album ID consensus.") return None # If all album IDs are equal, look up the album. - log.debug("Searching for discovered album ID: {0}", first) + log.debug("Searching for discovered album ID: {}", first) return metadata_plugins.album_for_id(first) @@ -197,9 +197,7 @@ def _add_candidate( checking the track count, ordering the items, checking for duplicates, and calculating the distance. """ - log.debug( - "Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id - ) + log.debug("Candidate: {0.artist} - {0.album} ({0.album_id})", info) # Discard albums with zero tracks. if not info.tracks: @@ -215,7 +213,7 @@ def _add_candidate( required_tags: Sequence[str] = config["match"]["required"].as_str_seq() for req_tag in required_tags: if getattr(info, req_tag) is None: - log.debug("Ignored. Missing required tag: {0}", req_tag) + log.debug("Ignored. Missing required tag: {}", req_tag) return # Find mapping between the items and the track info. @@ -229,10 +227,10 @@ def _add_candidate( ignored_tags: Sequence[str] = config["match"]["ignored"].as_str_seq() for penalty in ignored_tags: if penalty in penalties: - log.debug("Ignored. Penalty: {0}", penalty) + log.debug("Ignored. Penalty: {}", penalty) return - log.debug("Success. Distance: {0}", dist) + log.debug("Success. Distance: {}", dist) results[info.album_id] = hooks.AlbumMatch( dist, info, mapping, extra_items, extra_tracks ) @@ -265,7 +263,7 @@ def tag_album( likelies, consensus = get_most_common_tags(items) cur_artist: str = likelies["artist"] cur_album: str = likelies["album"] - log.debug("Tagging {0} - {1}", cur_artist, cur_album) + log.debug("Tagging {} - {}", cur_artist, cur_album) # The output result, keys are the MB album ID. candidates: dict[Any, AlbumMatch] = {} @@ -273,7 +271,7 @@ def tag_album( # Search by explicit ID. if search_ids: for search_id in search_ids: - log.debug("Searching for album ID: {0}", search_id) + log.debug("Searching for album ID: {}", search_id) if info := metadata_plugins.album_for_id(search_id): _add_candidate(items, candidates, info) @@ -283,7 +281,7 @@ def tag_album( if info := match_by_id(items): _add_candidate(items, candidates, info) rec = _recommendation(list(candidates.values())) - log.debug("Album ID match recommendation is {0}", rec) + log.debug("Album ID match recommendation is {}", rec) if candidates and not config["import"]["timid"]: # If we have a very good MBID match, return immediately. # Otherwise, this match will compete against metadata-based @@ -300,7 +298,7 @@ def tag_album( if not (search_artist and search_album): # No explicit search terms -- use current metadata. search_artist, search_album = cur_artist, cur_album - log.debug("Search terms: {0} - {1}", search_artist, search_album) + log.debug("Search terms: {} - {}", search_artist, search_album) # Is this album likely to be a "various artist" release? va_likely = ( @@ -308,7 +306,7 @@ def tag_album( or (search_artist.lower() in VA_ARTISTS) or any(item.comp for item in items) ) - log.debug("Album might be VA: {0}", va_likely) + log.debug("Album might be VA: {}", va_likely) # Get the results from the data sources. for matched_candidate in metadata_plugins.candidates( @@ -316,7 +314,7 @@ def tag_album( ): _add_candidate(items, candidates, matched_candidate) - log.debug("Evaluating {0} candidates.", len(candidates)) + log.debug("Evaluating {} candidates.", len(candidates)) # Sort and get the recommendation. candidates_sorted = _sort_candidates(candidates.values()) rec = _recommendation(candidates_sorted) @@ -345,7 +343,7 @@ def tag_item( trackids = search_ids or [t for t in [item.mb_trackid] if t] if trackids: for trackid in trackids: - log.debug("Searching for track ID: {0}", trackid) + log.debug("Searching for track ID: {}", trackid) if info := metadata_plugins.track_for_id(trackid): dist = track_distance(item, info, incl_artist=True) candidates[info.track_id] = hooks.TrackMatch(dist, info) @@ -369,7 +367,7 @@ def tag_item( # Search terms. search_artist = search_artist or item.artist search_title = search_title or item.title - log.debug("Item search terms: {0} - {1}", search_artist, search_title) + log.debug("Item search terms: {} - {}", search_artist, search_title) # Get and evaluate candidate metadata. for track_info in metadata_plugins.item_candidates( @@ -379,7 +377,7 @@ def tag_item( candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info) # Sort by distance and return with recommendation. - log.debug("Found {0} candidates.", len(candidates)) + log.debug("Found {} candidates.", len(candidates)) candidates_sorted = _sort_candidates(candidates.values()) rec = _recommendation(candidates_sorted) return Proposal(candidates_sorted, rec) diff --git a/beets/config_default.yaml b/beets/config_default.yaml index d1329f494..c0bab8056 100644 --- a/beets/config_default.yaml +++ b/beets/config_default.yaml @@ -127,19 +127,12 @@ ui: action_default: ['bold', 'cyan'] action: ['bold', 'cyan'] # New Colors - text: ['normal'] text_faint: ['faint'] import_path: ['bold', 'blue'] import_path_items: ['bold', 'blue'] - added: ['green'] - removed: ['red'] changed: ['yellow'] - added_highlight: ['bold', 'green'] - removed_highlight: ['bold', 'red'] - changed_highlight: ['bold', 'yellow'] - text_diff_added: ['bold', 'red'] + text_diff_added: ['bold', 'green'] text_diff_removed: ['bold', 'red'] - text_diff_changed: ['bold', 'red'] action_description: ['white'] import: indentation: @@ -173,7 +166,7 @@ match: missing_tracks: medium unmatched_tracks: medium distance_weights: - source: 2.0 + data_source: 2.0 artist: 3.0 album: 3.0 media: 1.0 diff --git a/beets/dbcore/db.py b/beets/dbcore/db.py index 81c1be4b9..afae6e906 100755 --- a/beets/dbcore/db.py +++ b/beets/dbcore/db.py @@ -17,15 +17,17 @@ from __future__ import annotations import contextlib +import functools import os import re import sqlite3 +import sys import threading import time from abc import ABC from collections import defaultdict from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence -from sqlite3 import Connection +from sqlite3 import Connection, sqlite_version_info from typing import TYPE_CHECKING, Any, AnyStr, Callable, Generic from typing_extensions import TypeVar # default value support @@ -64,6 +66,16 @@ class DBAccessError(Exception): """ +class DBCustomFunctionError(Exception): + """A sqlite function registered by beets failed.""" + + def __init__(self): + super().__init__( + "beets defined SQLite function failed; " + "see the other errors above for details" + ) + + class FormattedMapping(Mapping[str, str]): """A `dict`-like formatted view of a model. @@ -390,9 +402,9 @@ class Model(ABC, Generic[D]): return obj def __repr__(self) -> str: - return "{}({})".format( - type(self).__name__, - ", ".join(f"{k}={v!r}" for k, v in dict(self).items()), + return ( + f"{type(self).__name__}" + f"({', '.join(f'{k}={v!r}' for k, v in dict(self).items())})" ) def clear_dirty(self): @@ -409,9 +421,9 @@ class Model(ABC, Generic[D]): exception is raised otherwise. """ if not self._db: - raise ValueError("{} has no database".format(type(self).__name__)) + raise ValueError(f"{type(self).__name__} has no database") if need_id and not self.id: - raise ValueError("{} has no id".format(type(self).__name__)) + raise ValueError(f"{type(self).__name__} has no id") return self._db @@ -588,16 +600,14 @@ class Model(ABC, Generic[D]): for key in fields: if key != "id" and key in self._dirty: self._dirty.remove(key) - assignments.append(key + "=?") + assignments.append(f"{key}=?") value = self._type(key).to_sql(self[key]) subvars.append(value) with db.transaction() as tx: # Main table update. if assignments: - query = "UPDATE {} SET {} WHERE id=?".format( - self._table, ",".join(assignments) - ) + query = f"UPDATE {self._table} SET {','.join(assignments)} WHERE id=?" subvars.append(self.id) tx.mutate(query, subvars) @@ -607,9 +617,9 @@ class Model(ABC, Generic[D]): self._dirty.remove(key) value = self._type(key).to_sql(value) tx.mutate( - "INSERT INTO {} " + f"INSERT INTO {self._flex_table} " "(entity_id, key, value) " - "VALUES (?, ?, ?);".format(self._flex_table), + "VALUES (?, ?, ?);", (self.id, key, value), ) @@ -930,10 +940,10 @@ class Transaction: def __exit__( self, - exc_type: type[Exception], - exc_value: Exception, - traceback: TracebackType, - ): + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> bool | None: """Complete a transaction. This must be the most recently entered but not yet exited transaction. If it is the last active transaction, the database updates are committed. @@ -949,6 +959,14 @@ class Transaction: self._mutated = False self.db._db_lock.release() + if ( + isinstance(exc_value, sqlite3.OperationalError) + and exc_value.args[0] == "user-defined function raised exception" + ): + raise DBCustomFunctionError() + + return None + def query( self, statement: str, subvals: Sequence[SQLiteType] = () ) -> list[sqlite3.Row]: @@ -1009,6 +1027,13 @@ class Database: "sqlite3 must be compiled with multi-threading support" ) + # Print tracebacks for exceptions in user defined functions + # See also `self.add_functions` and `DBCustomFunctionError`. + # + # `if`: use feature detection because PyPy doesn't support this. + if hasattr(sqlite3, "enable_callback_tracebacks"): + sqlite3.enable_callback_tracebacks(True) + self.path = path self.timeout = timeout @@ -1104,9 +1129,16 @@ class Database: return bytestring - conn.create_function("regexp", 2, regexp) - conn.create_function("unidecode", 1, unidecode) - conn.create_function("bytelower", 1, bytelower) + create_function = conn.create_function + if sys.version_info >= (3, 8) and sqlite_version_info >= (3, 8, 3): + # Let sqlite make extra optimizations + create_function = functools.partial( + conn.create_function, deterministic=True + ) + + create_function("regexp", 2, regexp) + create_function("unidecode", 1, unidecode) + create_function("bytelower", 1, bytelower) def _close(self): """Close the all connections to the underlying SQLite database @@ -1160,7 +1192,7 @@ class Database: """ # Get current schema. with self.transaction() as tx: - rows = tx.query("PRAGMA table_info(%s)" % table) + rows = tx.query(f"PRAGMA table_info({table})") current_fields = {row[1] for row in rows} field_names = set(fields.keys()) @@ -1173,9 +1205,7 @@ class Database: columns = [] for name, typ in fields.items(): columns.append(f"{name} {typ.sql}") - setup_sql = "CREATE TABLE {} ({});\n".format( - table, ", ".join(columns) - ) + setup_sql = f"CREATE TABLE {table} ({', '.join(columns)});\n" else: # Table exists does not match the field set. @@ -1183,8 +1213,8 @@ class Database: for name, typ in fields.items(): if name in current_fields: continue - setup_sql += "ALTER TABLE {} ADD COLUMN {} {};\n".format( - table, name, typ.sql + setup_sql += ( + f"ALTER TABLE {table} ADD COLUMN {name} {typ.sql};\n" ) with self.transaction() as tx: @@ -1195,18 +1225,16 @@ class Database: for the given entity (if they don't exist). """ with self.transaction() as tx: - tx.script( - """ - CREATE TABLE IF NOT EXISTS {0} ( + tx.script(f""" + CREATE TABLE IF NOT EXISTS {flex_table} ( id INTEGER PRIMARY KEY, entity_id INTEGER, key TEXT, value TEXT, UNIQUE(entity_id, key) ON CONFLICT REPLACE); - CREATE INDEX IF NOT EXISTS {0}_by_entity - ON {0} (entity_id); - """.format(flex_table) - ) + CREATE INDEX IF NOT EXISTS {flex_table}_by_entity + ON {flex_table} (entity_id); + """) # Querying. diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index 49d7f6428..dfeb42707 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -190,7 +190,7 @@ class MatchQuery(FieldQuery[AnySQLiteType]): """A query that looks for exact matches in an Model field.""" def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: - return self.field + " = ?", [self.pattern] + return f"{self.field} = ?", [self.pattern] @classmethod def value_match(cls, pattern: AnySQLiteType, value: Any) -> bool: @@ -204,7 +204,7 @@ class NoneQuery(FieldQuery[None]): super().__init__(field, None, fast) def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: - return self.field + " IS NULL", () + return f"{self.field} IS NULL", () def match(self, obj: Model) -> bool: return obj.get(self.field_name) is None @@ -246,7 +246,7 @@ class StringQuery(StringFieldQuery[str]): .replace("%", "\\%") .replace("_", "\\_") ) - clause = self.field + " like ? escape '\\'" + clause = f"{self.field} like ? escape '\\'" subvals = [search] return clause, subvals @@ -264,8 +264,8 @@ class SubstringQuery(StringFieldQuery[str]): .replace("%", "\\%") .replace("_", "\\_") ) - search = "%" + pattern + "%" - clause = self.field + " like ? escape '\\'" + search = f"%{pattern}%" + clause = f"{self.field} like ? escape '\\'" subvals = [search] return clause, subvals @@ -471,11 +471,11 @@ class NumericQuery(FieldQuery[str]): def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: if self.point is not None: - return self.field + "=?", (self.point,) + return f"{self.field}=?", (self.point,) else: if self.rangemin is not None and self.rangemax is not None: return ( - "{0} >= ? AND {0} <= ?".format(self.field), + f"{self.field} >= ? AND {self.field} <= ?", (self.rangemin, self.rangemax), ) elif self.rangemin is not None: @@ -549,9 +549,9 @@ class CollectionQuery(Query): if not subq_clause: # Fall back to slow query. return None, () - clause_parts.append("(" + subq_clause + ")") + clause_parts.append(f"({subq_clause})") subvals += subq_subvals - clause = (" " + joiner + " ").join(clause_parts) + clause = f" {joiner} ".join(clause_parts) return clause, subvals def __repr__(self) -> str: @@ -690,9 +690,7 @@ class Period: ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second ) relative_units = {"y": 365, "m": 30, "w": 7, "d": 1} - relative_re = ( - "(?P[+|-]?)(?P[0-9]+)" + "(?P[y|m|w|d])" - ) + relative_re = "(?P[+|-]?)(?P[0-9]+)(?P[y|m|w|d])" def __init__(self, date: datetime, precision: str): """Create a period with the given date (a `datetime` object) and @@ -800,9 +798,7 @@ class DateInterval: def __init__(self, start: datetime | None, end: datetime | None): if start is not None and end is not None and not start < end: - raise ValueError( - "start date {} is not before end date {}".format(start, end) - ) + raise ValueError(f"start date {start} is not before end date {end}") self.start = start self.end = end @@ -850,8 +846,6 @@ class DateQuery(FieldQuery[str]): date = datetime.fromtimestamp(timestamp) return self.interval.contains(date) - _clause_tmpl = "{0} {1} ?" - def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: clause_parts = [] subvals = [] @@ -859,11 +853,11 @@ class DateQuery(FieldQuery[str]): # Convert the `datetime` objects to an integer number of seconds since # the (local) Unix epoch using `datetime.timestamp()`. if self.interval.start: - clause_parts.append(self._clause_tmpl.format(self.field, ">=")) + clause_parts.append(f"{self.field} >= ?") subvals.append(int(self.interval.start.timestamp())) if self.interval.end: - clause_parts.append(self._clause_tmpl.format(self.field, "<")) + clause_parts.append(f"{self.field} < ?") subvals.append(int(self.interval.end.timestamp())) if clause_parts: @@ -1074,9 +1068,9 @@ class FixedFieldSort(FieldSort): if self.case_insensitive: field = ( "(CASE " - "WHEN TYPEOF({0})='text' THEN LOWER({0}) " - "WHEN TYPEOF({0})='blob' THEN LOWER({0}) " - "ELSE {0} END)".format(self.field) + f"WHEN TYPEOF({self.field})='text' THEN LOWER({self.field}) " + f"WHEN TYPEOF({self.field})='blob' THEN LOWER({self.field}) " + f"ELSE {self.field} END)" ) else: field = self.field diff --git a/beets/dbcore/types.py b/beets/dbcore/types.py index 1b8434a0b..3b4badd33 100644 --- a/beets/dbcore/types.py +++ b/beets/dbcore/types.py @@ -194,7 +194,7 @@ class BasePaddedInt(BaseInteger[N]): self.digits = digits def format(self, value: int | N) -> str: - return "{0:0{1}d}".format(value or 0, self.digits) + return f"{value or 0:0{self.digits}d}" class PaddedInt(BasePaddedInt[int]): @@ -219,7 +219,7 @@ class ScaledInt(Integer): self.suffix = suffix def format(self, value: int) -> str: - return "{}{}".format((value or 0) // self.unit, self.suffix) + return f"{(value or 0) // self.unit}{self.suffix}" class Id(NullInteger): @@ -249,7 +249,7 @@ class BaseFloat(Type[float, N]): self.digits = digits def format(self, value: float | N) -> str: - return "{0:.{1}f}".format(value or 0, self.digits) + return f"{value or 0:.{self.digits}f}" class Float(BaseFloat[float]): diff --git a/beets/importer/session.py b/beets/importer/session.py index e45644fa3..46277837e 100644 --- a/beets/importer/session.py +++ b/beets/importer/session.py @@ -150,7 +150,7 @@ class ImportSession: """Log a message about a given album to the importer log. The status should reflect the reason the album couldn't be tagged. """ - self.logger.info("{0} {1}", status, displayable_path(paths)) + self.logger.info("{} {}", status, displayable_path(paths)) def log_choice(self, task: ImportTask, duplicate=False): """Logs the task's current choice if it should be logged. If @@ -187,7 +187,7 @@ class ImportSession: def run(self): """Run the import task.""" - self.logger.info("import started {0}", time.asctime()) + self.logger.info("import started {}", time.asctime()) self.set_config(config["import"]) # Set up the pipeline. @@ -297,7 +297,7 @@ class ImportSession: # Either accept immediately or prompt for input to decide. if self.want_resume is True or self.should_resume(toppath): log.warning( - "Resuming interrupted import of {0}", + "Resuming interrupted import of {}", util.displayable_path(toppath), ) self._is_resuming[toppath] = True diff --git a/beets/importer/stages.py b/beets/importer/stages.py index 24ff815f3..d99b742a2 100644 --- a/beets/importer/stages.py +++ b/beets/importer/stages.py @@ -58,11 +58,11 @@ def read_tasks(session: ImportSession): skipped += task_factory.skipped if not task_factory.imported: - log.warning("No files imported from {0}", displayable_path(toppath)) + log.warning("No files imported from {}", displayable_path(toppath)) # Show skipped directories (due to incremental/resume). if skipped: - log.info("Skipped {0} paths.", skipped) + log.info("Skipped {} paths.", skipped) def query_tasks(session: ImportSession): @@ -82,10 +82,7 @@ def query_tasks(session: ImportSession): # Search for albums. for album in session.lib.albums(session.query): log.debug( - "yielding album {0}: {1} - {2}", - album.id, - album.albumartist, - album.album, + "yielding album {0.id}: {0.albumartist} - {0.album}", album ) items = list(album.items()) _freshen_items(items) @@ -140,7 +137,7 @@ def lookup_candidates(session: ImportSession, task: ImportTask): return plugins.send("import_task_start", session=session, task=task) - log.debug("Looking up: {0}", displayable_path(task.paths)) + log.debug("Looking up: {}", displayable_path(task.paths)) # Restrict the initial lookup to IDs specified by the user via the -m # option. Currently all the IDs are passed onto the tasks directly. @@ -259,11 +256,11 @@ def plugin_stage( def log_files(session: ImportSession, task: ImportTask): """A coroutine (pipeline stage) to log each file to be imported.""" if isinstance(task, SingletonImportTask): - log.info("Singleton: {0}", displayable_path(task.item["path"])) + log.info("Singleton: {}", displayable_path(task.item["path"])) elif task.items: - log.info("Album: {0}", displayable_path(task.paths[0])) + log.info("Album: {}", displayable_path(task.paths[0])) for item in task.items: - log.info(" {0}", displayable_path(item["path"])) + log.info(" {}", displayable_path(item["path"])) # --------------------------------- Consumer --------------------------------- # @@ -341,9 +338,7 @@ def _resolve_duplicates(session: ImportSession, task: ImportTask): if task.choice_flag in (Action.ASIS, Action.APPLY, Action.RETAG): found_duplicates = task.find_duplicates(session.lib) if found_duplicates: - log.debug( - "found duplicates: {}".format([o.id for o in found_duplicates]) - ) + log.debug("found duplicates: {}", [o.id for o in found_duplicates]) # Get the default action to follow from config. duplicate_action = config["import"]["duplicate_action"].as_choice( @@ -355,7 +350,7 @@ def _resolve_duplicates(session: ImportSession, task: ImportTask): "ask": "a", } ) - log.debug("default action for duplicates: {0}", duplicate_action) + log.debug("default action for duplicates: {}", duplicate_action) if duplicate_action == "s": # Skip new. diff --git a/beets/importer/state.py b/beets/importer/state.py index fccb7c282..fde26c606 100644 --- a/beets/importer/state.py +++ b/beets/importer/state.py @@ -87,7 +87,7 @@ class ImportState: # unpickling, including ImportError. We use a catch-all # exception to avoid enumerating them all (the docs don't even have a # full list!). - log.debug("state file could not be read: {0}", exc) + log.debug("state file could not be read: {}", exc) def _save(self): try: @@ -100,7 +100,7 @@ class ImportState: f, ) except OSError as exc: - log.error("state file could not be written: {0}", exc) + log.error("state file could not be written: {}", exc) # -------------------------------- Tagprogress ------------------------------- # diff --git a/beets/importer/tasks.py b/beets/importer/tasks.py index abe2ca8a9..710f4da50 100644 --- a/beets/importer/tasks.py +++ b/beets/importer/tasks.py @@ -51,15 +51,16 @@ SINGLE_ARTIST_THRESH = 0.25 # def extend_reimport_fresh_fields_item(): # importer.REIMPORT_FRESH_FIELDS_ITEM.extend(['tidal_track_popularity'] # ) -REIMPORT_FRESH_FIELDS_ALBUM = [ +REIMPORT_FRESH_FIELDS_ITEM = [ "data_source", "bandcamp_album_id", "spotify_album_id", "deezer_album_id", "beatport_album_id", "tidal_album_id", + "data_url", ] -REIMPORT_FRESH_FIELDS_ITEM = list(REIMPORT_FRESH_FIELDS_ALBUM) +REIMPORT_FRESH_FIELDS_ALBUM = [*REIMPORT_FRESH_FIELDS_ITEM, "media"] # Global logger. log = logging.getLogger("beets") @@ -267,13 +268,11 @@ class ImportTask(BaseImportTask): def remove_duplicates(self, lib: library.Library): duplicate_items = self.duplicate_items(lib) - log.debug("removing {0} old duplicated items", len(duplicate_items)) + log.debug("removing {} old duplicated items", len(duplicate_items)) for item in duplicate_items: item.remove() if lib.directory in util.ancestry(item.path): - log.debug( - "deleting duplicate {0}", util.displayable_path(item.path) - ) + log.debug("deleting duplicate {.filepath}", item) util.remove(item.path) util.prune_dirs(os.path.dirname(item.path), lib.directory) @@ -285,10 +284,10 @@ class ImportTask(BaseImportTask): for field, view in config["import"]["set_fields"].items(): value = str(view.get()) log.debug( - "Set field {1}={2} for {0}", - util.displayable_path(self.paths), + "Set field {}={} for {}", field, value, + util.displayable_path(self.paths), ) self.album.set_parse(field, format(self.album, value)) for item in items: @@ -554,12 +553,11 @@ class ImportTask(BaseImportTask): ] if overwritten_fields: log.debug( - "Reimported {} {}. Not preserving flexible attributes {}. " - "Path: {}", + "Reimported {0} {1.id}. Not preserving flexible attributes {2}. " + "Path: {1.filepath}", noun, - new_obj.id, + new_obj, overwritten_fields, - util.displayable_path(new_obj.path), ) for key in overwritten_fields: del existing_fields[key] @@ -578,17 +576,15 @@ class ImportTask(BaseImportTask): self.album.artpath = replaced_album.artpath self.album.store() log.debug( - "Reimported album {}. Preserving attribute ['added']. " - "Path: {}", - self.album.id, - util.displayable_path(self.album.path), + "Reimported album {0.album.id}. Preserving attribute ['added']. " + "Path: {0.album.filepath}", + self, ) log.debug( - "Reimported album {}. Preserving flexible attributes {}. " - "Path: {}", - self.album.id, + "Reimported album {0.album.id}. Preserving flexible" + " attributes {1}. Path: {0.album.filepath}", + self, list(album_fields.keys()), - util.displayable_path(self.album.path), ) for item in self.imported_items(): @@ -597,21 +593,19 @@ class ImportTask(BaseImportTask): if dup_item.added and dup_item.added != item.added: item.added = dup_item.added log.debug( - "Reimported item {}. Preserving attribute ['added']. " - "Path: {}", - item.id, - util.displayable_path(item.path), + "Reimported item {0.id}. Preserving attribute ['added']. " + "Path: {0.filepath}", + item, ) item_fields = _reduce_and_log( item, dup_item._values_flex, REIMPORT_FRESH_FIELDS_ITEM ) item.update(item_fields) log.debug( - "Reimported item {}. Preserving flexible attributes {}. " - "Path: {}", - item.id, + "Reimported item {0.id}. Preserving flexible attributes {1}. " + "Path: {0.filepath}", + item, list(item_fields.keys()), - util.displayable_path(item.path), ) item.store() @@ -621,14 +615,10 @@ class ImportTask(BaseImportTask): """ for item in self.imported_items(): for dup_item in self.replaced_items[item]: - log.debug( - "Replacing item {0}: {1}", - dup_item.id, - util.displayable_path(item.path), - ) + log.debug("Replacing item {.id}: {.filepath}", dup_item, item) dup_item.remove() log.debug( - "{0} of {1} items replaced", + "{} of {} items replaced", sum(bool(v) for v in self.replaced_items.values()), len(self.imported_items()), ) @@ -747,10 +737,10 @@ class SingletonImportTask(ImportTask): for field, view in config["import"]["set_fields"].items(): value = str(view.get()) log.debug( - "Set field {1}={2} for {0}", - util.displayable_path(self.paths), + "Set field {}={} for {}", field, value, + util.displayable_path(self.paths), ) self.item.set_parse(field, format(self.item, value)) self.item.store() @@ -870,7 +860,7 @@ class ArchiveImportTask(SentinelImportTask): """Removes the temporary directory the archive was extracted to.""" if self.extracted and self.toppath: log.debug( - "Removing extracted directory: {0}", + "Removing extracted directory: {}", util.displayable_path(self.toppath), ) shutil.rmtree(util.syspath(self.toppath)) @@ -1002,7 +992,7 @@ class ImportTaskFactory: """Return a `SingletonImportTask` for the music file.""" if self.session.already_imported(self.toppath, [path]): log.debug( - "Skipping previously-imported path: {0}", + "Skipping previously-imported path: {}", util.displayable_path(path), ) self.skipped += 1 @@ -1026,7 +1016,7 @@ class ImportTaskFactory: if self.session.already_imported(self.toppath, dirs): log.debug( - "Skipping previously-imported path: {0}", + "Skipping previously-imported path: {}", util.displayable_path(dirs), ) self.skipped += 1 @@ -1063,19 +1053,17 @@ class ImportTaskFactory: ) return - log.debug( - "Extracting archive: {0}", util.displayable_path(self.toppath) - ) + log.debug("Extracting archive: {}", util.displayable_path(self.toppath)) archive_task = ArchiveImportTask(self.toppath) try: archive_task.extract() except Exception as exc: - log.error("extraction failed: {0}", exc) + log.error("extraction failed: {}", exc) return # Now read albums from the extracted directory. self.toppath = archive_task.toppath - log.debug("Archive extracted to: {0}", self.toppath) + log.debug("Archive extracted to: {.toppath}", self) return archive_task def read_item(self, path: util.PathBytes): @@ -1091,10 +1079,10 @@ class ImportTaskFactory: # Silently ignore non-music files. pass elif isinstance(exc.reason, mediafile.UnreadableFileError): - log.warning("unreadable file: {0}", util.displayable_path(path)) + log.warning("unreadable file: {}", util.displayable_path(path)) else: log.error( - "error reading {0}: {1}", util.displayable_path(path), exc + "error reading {}: {}", util.displayable_path(path), exc ) diff --git a/beets/library/exceptions.py b/beets/library/exceptions.py index 7f117a2fe..0dc874c2a 100644 --- a/beets/library/exceptions.py +++ b/beets/library/exceptions.py @@ -28,11 +28,11 @@ class ReadError(FileOperationError): """An error while reading a file (i.e. in `Item.read`).""" def __str__(self): - return "error reading " + str(super()) + return f"error reading {super()}" class WriteError(FileOperationError): """An error while writing a file (i.e. in `Item.write`).""" def __str__(self): - return "error writing " + str(super()) + return f"error writing {super()}" diff --git a/beets/library/models.py b/beets/library/models.py index 7501513a1..cbee2a411 100644 --- a/beets/library/models.py +++ b/beets/library/models.py @@ -425,7 +425,7 @@ class Album(LibModel): new_art = util.unique_path(new_art) log.debug( - "moving album art {0} to {1}", + "moving album art {} to {}", util.displayable_path(old_art), util.displayable_path(new_art), ) @@ -482,7 +482,7 @@ class Album(LibModel): """ item = self.items().get() if not item: - raise ValueError("empty album for album id %d" % self.id) + raise ValueError(f"empty album for album id {self.id}") return os.path.dirname(item.path) def _albumtotal(self): @@ -844,12 +844,9 @@ class Item(LibModel): # This must not use `with_album=True`, because that might access # the database. When debugging, that is not guaranteed to succeed, and # can even deadlock due to the database lock. - return "{}({})".format( - type(self).__name__, - ", ".join( - "{}={!r}".format(k, self[k]) - for k in self.keys(with_album=False) - ), + return ( + f"{type(self).__name__}" + f"({', '.join(f'{k}={self[k]!r}' for k in self.keys(with_album=False))})" ) def keys(self, computed=False, with_album=True): @@ -995,7 +992,7 @@ class Item(LibModel): self.write(*args, **kwargs) return True except FileOperationError as exc: - log.error("{0}", exc) + log.error("{}", exc) return False def try_sync(self, write, move, with_album=True): @@ -1015,10 +1012,7 @@ class Item(LibModel): if move: # Check whether this file is inside the library directory. if self._db and self._db.directory in util.ancestry(self.path): - log.debug( - "moving {0} to synchronize path", - util.displayable_path(self.path), - ) + log.debug("moving {.filepath} to synchronize path", self) self.move(with_album=with_album) self.store() @@ -1090,7 +1084,7 @@ class Item(LibModel): try: return os.path.getsize(syspath(self.path)) except (OSError, Exception) as exc: - log.warning("could not get filesize: {0}", exc) + log.warning("could not get filesize: {}", exc) return 0 # Model methods. diff --git a/beets/logging.py b/beets/logging.py index fd8b1962f..3ed5e5a84 100644 --- a/beets/logging.py +++ b/beets/logging.py @@ -20,6 +20,8 @@ use {}-style formatting and can interpolate keywords arguments to the logging calls (`debug`, `info`, etc). """ +from __future__ import annotations + import threading from copy import copy from logging import ( @@ -32,8 +34,10 @@ from logging import ( Handler, Logger, NullHandler, + RootLogger, StreamHandler, ) +from typing import TYPE_CHECKING, Any, Mapping, TypeVar, Union, overload __all__ = [ "DEBUG", @@ -49,8 +53,20 @@ __all__ = [ "getLogger", ] +if TYPE_CHECKING: + T = TypeVar("T") + from types import TracebackType -def logsafe(val): + # see https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi + _SysExcInfoType = Union[ + tuple[type[BaseException], BaseException, Union[TracebackType, None]], + tuple[None, None, None], + ] + _ExcInfoType = Union[None, bool, _SysExcInfoType, BaseException] + _ArgsType = Union[tuple[object, ...], Mapping[str, object]] + + +def _logsafe(val: T) -> str | T: """Coerce `bytes` to `str` to avoid crashes solely due to logging. This is particularly relevant for bytestring paths. Much of our code @@ -83,40 +99,45 @@ class StrFormatLogger(Logger): """ class _LogMessage: - def __init__(self, msg, args, kwargs): + def __init__( + self, + msg: str, + args: _ArgsType, + kwargs: dict[str, Any], + ): self.msg = msg self.args = args self.kwargs = kwargs def __str__(self): - args = [logsafe(a) for a in self.args] - kwargs = {k: logsafe(v) for (k, v) in self.kwargs.items()} + args = [_logsafe(a) for a in self.args] + kwargs = {k: _logsafe(v) for (k, v) in self.kwargs.items()} return self.msg.format(*args, **kwargs) def _log( self, - level, - msg, - args, - exc_info=None, - extra=None, - stack_info=False, + level: int, + msg: object, + args: _ArgsType, + exc_info: _ExcInfoType = None, + extra: Mapping[str, Any] | None = None, + stack_info: bool = False, + stacklevel: int = 1, **kwargs, ): """Log msg.format(*args, **kwargs)""" - m = self._LogMessage(msg, args, kwargs) - stacklevel = kwargs.pop("stacklevel", 1) - stacklevel = {"stacklevel": stacklevel} + if isinstance(msg, str): + msg = self._LogMessage(msg, args, kwargs) return super()._log( level, - m, + msg, (), exc_info=exc_info, extra=extra, stack_info=stack_info, - **stacklevel, + stacklevel=stacklevel, ) @@ -156,9 +177,12 @@ my_manager = copy(Logger.manager) my_manager.loggerClass = BeetsLogger -# Override the `getLogger` to use our machinery. -def getLogger(name=None): # noqa +@overload +def getLogger(name: str) -> BeetsLogger: ... +@overload +def getLogger(name: None = ...) -> RootLogger: ... +def getLogger(name=None) -> BeetsLogger | RootLogger: # noqa: N802 if name: - return my_manager.getLogger(name) + return my_manager.getLogger(name) # type: ignore[return-value] else: return Logger.root diff --git a/beets/metadata_plugins.py b/beets/metadata_plugins.py index 9d69633d6..b865167e4 100644 --- a/beets/metadata_plugins.py +++ b/beets/metadata_plugins.py @@ -8,11 +8,12 @@ implemented as plugins. from __future__ import annotations import abc -import inspect import re -import warnings +from functools import cache, cached_property from typing import TYPE_CHECKING, Generic, Literal, Sequence, TypedDict, TypeVar +import unidecode +from confuse import NotFoundError from typing_extensions import NotRequired from beets.util import cached_classproperty @@ -23,36 +24,14 @@ from .plugins import BeetsPlugin, find_plugins, notify_info_yielded, send if TYPE_CHECKING: from collections.abc import Iterable - from confuse import ConfigView - - from .autotag import Distance from .autotag.hooks import AlbumInfo, Item, TrackInfo +@cache def find_metadata_source_plugins() -> list[MetadataSourcePlugin]: - """Returns a list of MetadataSourcePlugin subclass instances - - Resolved from all currently loaded beets plugins. - """ - - all_plugins = find_plugins() - metadata_plugins: list[MetadataSourcePlugin | BeetsPlugin] = [] - for plugin in all_plugins: - if isinstance(plugin, MetadataSourcePlugin): - metadata_plugins.append(plugin) - elif hasattr(plugin, "data_source"): - # TODO: Remove this in the future major release, v3.0.0 - warnings.warn( - f"{plugin.__class__.__name__} is used as a legacy metadata source. " - "It should extend MetadataSourcePlugin instead of BeetsPlugin. " - "Support for this will be removed in the v3.0.0 release!", - DeprecationWarning, - stacklevel=2, - ) - metadata_plugins.append(plugin) - - # typeignore: BeetsPlugin is not a MetadataSourcePlugin (legacy support) - return metadata_plugins # type: ignore[return-value] + """Return a list of all loaded metadata source plugins.""" + # TODO: Make this an isinstance(MetadataSourcePlugin, ...) check in v3.0.0 + return [p for p in find_plugins() if hasattr(p, "data_source")] # type: ignore[misc] @notify_info_yielded("albuminfo_received") @@ -95,46 +74,17 @@ def track_for_id(_id: str) -> TrackInfo | None: return None -def track_distance(item: Item, info: TrackInfo) -> Distance: - """Returns the track distance for an item and trackinfo. - - Returns a Distance object is populated by all metadata source plugins - that implement the :py:meth:`MetadataSourcePlugin.track_distance` method. - """ - from beets.autotag.distance import Distance - - dist = Distance() - for plugin in find_metadata_source_plugins(): - dist.update(plugin.track_distance(item, info)) - return dist - - -def album_distance( - items: Sequence[Item], - album_info: AlbumInfo, - mapping: dict[Item, TrackInfo], -) -> Distance: - """Returns the album distance calculated by plugins.""" - from beets.autotag.distance import Distance - - dist = Distance() - for plugin in find_metadata_source_plugins(): - dist.update(plugin.album_distance(items, album_info, mapping)) - return dist - - -def _get_distance( - config: ConfigView, data_source: str, info: AlbumInfo | TrackInfo -) -> Distance: - """Returns the ``data_source`` weight and the maximum source weight - for albums or individual tracks. - """ - from beets.autotag.distance import Distance - - dist = Distance() - if info.data_source == data_source: - dist.add("source", config["source_weight"].as_number()) - return dist +@cache +def get_penalty(data_source: str | None) -> float: + """Get the penalty value for the given data source.""" + return next( + ( + p.data_source_mismatch_penalty + for p in find_metadata_source_plugins() + if p.data_source == data_source + ), + MetadataSourcePlugin.DEFAULT_DATA_SOURCE_MISMATCH_PENALTY, + ) class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta): @@ -145,9 +95,31 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta): and tracks, and to retrieve album and track information by ID. """ + DEFAULT_DATA_SOURCE_MISMATCH_PENALTY = 0.5 + + @cached_classproperty + def data_source(cls) -> str: + """The data source name for this plugin. + + This is inferred from the plugin name. + """ + return cls.__name__.replace("Plugin", "") # type: ignore[attr-defined] + + @cached_property + def data_source_mismatch_penalty(self) -> float: + try: + return self.config["source_weight"].as_number() + except NotFoundError: + return self.config["data_source_mismatch_penalty"].as_number() + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self.config.add({"source_weight": 0.5}) + self.config.add( + { + "search_limit": 5, + "data_source_mismatch_penalty": self.DEFAULT_DATA_SOURCE_MISMATCH_PENALTY, # noqa: E501 + } + ) @abc.abstractmethod def album_for_id(self, album_id: str) -> AlbumInfo | None: @@ -219,35 +191,6 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta): return (self.track_for_id(id) for id in ids) - def album_distance( - self, - items: Sequence[Item], - album_info: AlbumInfo, - mapping: dict[Item, TrackInfo], - ) -> Distance: - """Calculate the distance for an album based on its items and album info.""" - return _get_distance( - data_source=self.data_source, info=album_info, config=self.config - ) - - def track_distance( - self, - item: Item, - info: TrackInfo, - ) -> Distance: - """Calculate the distance for a track based on its item and track info.""" - return _get_distance( - data_source=self.data_source, info=info, config=self.config - ) - - @cached_classproperty - def data_source(cls) -> str: - """The data source name for this plugin. - - This is inferred from the plugin name. - """ - return cls.__name__.replace("Plugin", "") # type: ignore[attr-defined] - def _extract_id(self, url: str) -> str | None: """Extract an ID from a URL for this metadata source plugin. @@ -266,10 +209,9 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta): """Returns an artist string (all artists) and an artist_id (the main artist) for a list of artist object dicts. - For each artist, this function moves articles (such as 'a', 'an', - and 'the') to the front and strips trailing disambiguation numbers. It - returns a tuple containing the comma-separated string of all - normalized artists and the ``id`` of the main/first artist. + For each artist, this function moves articles (such as 'a', 'an', and 'the') + to the front. It returns a tuple containing the comma-separated string + of all normalized artists and the ``id`` of the main/first artist. Alternatively a keyword can be used to combine artists together into a single string by passing the join_key argument. @@ -293,8 +235,6 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta): if not artist_id: artist_id = artist[id_key] name = artist[name_key] - # Strip disambiguation number. - name = re.sub(r" \(\d+\)$", "", name) # Move articles to the front. name = re.sub(r"^(.*?), (a|an|the)$", r"\2 \1", name, flags=re.I) # Use a join keyword if requested and available. @@ -334,18 +274,26 @@ class SearchApiMetadataSourcePlugin( of identifiers for the requested type (album or track). """ + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.config.add( + { + "search_query_ascii": False, + } + ) + @abc.abstractmethod def _search_api( self, query_type: Literal["album", "track"], filters: SearchFilter, - keywords: str = "", + query_string: str = "", ) -> Sequence[R]: """Perform a search on the API. :param query_type: The type of query to perform. :param filters: A dictionary of filters to apply to the search. - :param keywords: Additional keywords to include in the search. + :param query_string: Additional query to include in the search. Should return a list of identifiers for the requested type (album or track). """ @@ -358,7 +306,9 @@ class SearchApiMetadataSourcePlugin( album: str, va_likely: bool, ) -> Iterable[AlbumInfo]: - query_filters: SearchFilter = {"album": album} + query_filters: SearchFilter = {} + if album: + query_filters["album"] = album if not va_likely: query_filters["artist"] = artist @@ -373,7 +323,9 @@ class SearchApiMetadataSourcePlugin( def item_candidates( self, item: Item, artist: str, title: str ) -> Iterable[TrackInfo]: - results = self._search_api("track", {"artist": artist}, keywords=title) + results = self._search_api( + "track", {"artist": artist}, query_string=title + ) if not results: return [] @@ -382,12 +334,26 @@ class SearchApiMetadataSourcePlugin( self.tracks_for_ids([result["id"] for result in results if result]), ) + def _construct_search_query( + self, filters: SearchFilter, query_string: str + ) -> str: + """Construct a query string with the specified filters and keywords to + be provided to the spotify (or similar) search API. -# Dynamically copy methods to BeetsPlugin for legacy support -# TODO: Remove this in the future major release, v3.0.0 + The returned format was initially designed for spotify's search API but + we found is also useful with other APIs that support similar query structures. + see `spotify `_ + and `deezer `_. -for name, method in inspect.getmembers( - MetadataSourcePlugin, predicate=inspect.isfunction -): - if not hasattr(BeetsPlugin, name): - setattr(BeetsPlugin, name, method) + :param filters: Field filters to apply. + :param query_string: Query keywords to use. + :return: Query string to be provided to the search API. + """ + + components = [query_string, *(f"{k}:'{v}'" for k, v in filters.items())] + query = " ".join(filter(None, components)) + + if self.config["search_query_ascii"].get(): + query = unidecode.unidecode(query) + + return query diff --git a/beets/plugins.py b/beets/plugins.py index c5c5b2c53..e10dcf80c 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -20,8 +20,10 @@ import abc import inspect import re import sys +import warnings from collections import defaultdict -from functools import wraps +from functools import cached_property, wraps +from importlib import import_module from pathlib import Path from types import GenericAlias from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeVar @@ -130,9 +132,9 @@ class PluginLogFilter(logging.Filter): def filter(self, record): if hasattr(record.msg, "msg") and isinstance(record.msg.msg, str): # A _LogMessage from our hacked-up Logging replacement. - record.msg.msg = self.prefix + record.msg.msg + record.msg.msg = f"{self.prefix}{record.msg.msg}" elif isinstance(record.msg, str): - record.msg = self.prefix + record.msg + record.msg = f"{self.prefix}{record.msg}" return True @@ -158,6 +160,59 @@ class BeetsPlugin(metaclass=abc.ABCMeta): early_import_stages: list[ImportStageFunc] import_stages: list[ImportStageFunc] + def __init_subclass__(cls) -> None: + """Enable legacy metadata‐source plugins to work with the new interface. + + When a plugin subclass of BeetsPlugin defines a `data_source` attribute + but does not inherit from MetadataSourcePlugin, this hook: + + 1. Skips abstract classes. + 2. Warns that the class should extend MetadataSourcePlugin (deprecation). + 3. Copies any nonabstract methods from MetadataSourcePlugin onto the + subclass to provide the full plugin API. + + This compatibility layer will be removed in the v3.0.0 release. + """ + # TODO: Remove in v3.0.0 + if inspect.isabstract(cls): + return + + from beets.metadata_plugins import MetadataSourcePlugin + + if issubclass(cls, MetadataSourcePlugin) or not hasattr( + cls, "data_source" + ): + return + + warnings.warn( + f"{cls.__name__} is used as a legacy metadata source. " + "It should extend MetadataSourcePlugin instead of BeetsPlugin. " + "Support for this will be removed in the v3.0.0 release!", + DeprecationWarning, + stacklevel=3, + ) + + method: property | cached_property[Any] | Callable[..., Any] + for name, method in inspect.getmembers( + MetadataSourcePlugin, + predicate=lambda f: ( # type: ignore[arg-type] + ( + isinstance(f, (property, cached_property)) + and not hasattr( + BeetsPlugin, + getattr(f, "attrname", None) or f.fget.__name__, # type: ignore[union-attr] + ) + ) + or ( + inspect.isfunction(f) + and f.__name__ + and not getattr(f, "__isabstractmethod__", False) + and not hasattr(BeetsPlugin, f.__name__) + ) + ), + ): + setattr(cls, name, method) + def __init__(self, name: str | None = None): """Perform one-time plugin setup.""" @@ -181,6 +236,37 @@ class BeetsPlugin(metaclass=abc.ABCMeta): if not any(isinstance(f, PluginLogFilter) for f in self._log.filters): self._log.addFilter(PluginLogFilter(self)) + # In order to verify the config we need to make sure the plugin is fully + # configured (plugins usually add the default configuration *after* + # calling super().__init__()). + self.register_listener("pluginload", self._verify_config) + + def _verify_config(self, *_, **__) -> None: + """Verify plugin configuration. + + If deprecated 'source_weight' option is explicitly set by the user, they + will see a warning in the logs. Otherwise, this must be configured by + a third party plugin, thus we raise a deprecation warning which won't be + shown to user but will be visible to plugin developers. + """ + # TODO: Remove in v3.0.0 + if ( + not hasattr(self, "data_source") + or "source_weight" not in self.config + ): + return + + message = ( + "'source_weight' configuration option is deprecated and will be" + " removed in v3.0.0. Use 'data_source_mismatch_penalty' instead" + ) + for source in self.config.root().sources: + if "source_weight" in (source.get(self.name) or {}): + if source.filename: # user config + self._log.warning(message) + else: # 3rd-party plugin config + warnings.warn(message, DeprecationWarning, stacklevel=0) + def commands(self) -> Sequence[Subcommand]: """Should return a list of beets.ui.Subcommand objects for commands that should be added to beets' CLI. @@ -347,14 +433,20 @@ def _get_plugin(name: str) -> BeetsPlugin | None: Attempts to import the plugin module, locate the appropriate plugin class within it, and return an instance. Handles import failures gracefully and logs warnings for missing plugins or loading errors. + + Note we load the *last* plugin class found in the plugin namespace. This + allows plugins to define helper classes that inherit from BeetsPlugin + without those being loaded as the main plugin class. + + Returns None if the plugin could not be loaded for any reason. """ try: try: - namespace = __import__(f"{PLUGIN_NAMESPACE}.{name}", None, None) + namespace = import_module(f"{PLUGIN_NAMESPACE}.{name}") except Exception as exc: raise PluginImportError(name) from exc - for obj in getattr(namespace, name).__dict__.values(): + for obj in reversed(namespace.__dict__.values()): if ( inspect.isclass(obj) and not isinstance( @@ -363,6 +455,12 @@ def _get_plugin(name: str) -> BeetsPlugin | None: and issubclass(obj, BeetsPlugin) and obj != BeetsPlugin and not inspect.isabstract(obj) + # Only consider this plugin's module or submodules to avoid + # conflicts when plugins import other BeetsPlugin classes + and ( + obj.__module__ == namespace.__name__ + or obj.__module__.startswith(f"{namespace.__name__}.") + ) ): return obj() @@ -384,7 +482,7 @@ def load_plugins() -> None: """ if not _instances: names = get_plugin_names() - log.info("Loading plugins: {}", ", ".join(sorted(names))) + log.debug("Loading plugins: {}", ", ".join(sorted(names))) _instances.extend(filter(None, map(_get_plugin, names))) send("pluginload") @@ -424,9 +522,9 @@ def types(model_cls: type[AnyModel]) -> dict[str, Type]: for field in plugin_types: if field in types and plugin_types[field] != types[field]: raise PluginConflictError( - "Plugin {} defines flexible field {} " + f"Plugin {plugin.name} defines flexible field {field} " "which has already been defined with " - "another type.".format(plugin.name, field) + "another type." ) types.update(plugin_types) return types @@ -543,7 +641,7 @@ def send(event: EventType, **arguments: Any) -> list[Any]: Return a list of non-None values returned from the handlers. """ - log.debug("Sending event: {0}", event) + log.debug("Sending event: {}", event) return [ r for handler in BeetsPlugin.listeners[event] @@ -551,17 +649,21 @@ def send(event: EventType, **arguments: Any) -> list[Any]: ] -def feat_tokens(for_artist: bool = True) -> str: +def feat_tokens( + for_artist: bool = True, custom_words: list[str] | None = None +) -> str: """Return a regular expression that matches phrases like "featuring" that separate a main artist or a song title from secondary artists. The `for_artist` option determines whether the regex should be suitable for matching artist fields (the default) or title fields. """ feat_words = ["ft", "featuring", "feat", "feat.", "ft."] + if isinstance(custom_words, list): + feat_words += custom_words if for_artist: feat_words += ["with", "vs", "and", "con", "&"] - return r"(?<=[\s(\[])(?:{})(?=\s)".format( - "|".join(re.escape(x) for x in feat_words) + return ( + rf"(?<=[\s(\[])(?:{'|'.join(re.escape(x) for x in feat_words)})(?=\s)" ) diff --git a/beets/test/_common.py b/beets/test/_common.py index d70f9ec80..ffb2bfd65 100644 --- a/beets/test/_common.py +++ b/beets/test/_common.py @@ -153,7 +153,7 @@ class DummyIn: self.out = out def add(self, s): - self.buf.append(s + "\n") + self.buf.append(f"{s}\n") def close(self): pass diff --git a/beets/test/helper.py b/beets/test/helper.py index f1633c110..ea08ec840 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -58,7 +58,6 @@ from beets.ui.commands import TerminalImportSession from beets.util import ( MoveOperation, bytestring_path, - cached_classproperty, clean_module_tempdir, syspath, ) @@ -267,7 +266,7 @@ class TestHelper(ConfigMixin): The item is attached to the database from `self.lib`. """ values_ = { - "title": "t\u00eftle {0}", + "title": "t\u00eftle {}", "artist": "the \u00e4rtist", "album": "the \u00e4lbum", "track": 1, @@ -278,7 +277,7 @@ class TestHelper(ConfigMixin): values_["db"] = self.lib item = Item(**values_) if "path" not in values: - item["path"] = "audio." + item["format"].lower() + item["path"] = f"audio.{item['format'].lower()}" # mtime needs to be set last since other assignments reset it. item.mtime = 12345 return item @@ -310,7 +309,7 @@ class TestHelper(ConfigMixin): item = self.create_item(**values) extension = item["format"].lower() item["path"] = os.path.join( - _common.RSRC, util.bytestring_path("min." + extension) + _common.RSRC, util.bytestring_path(f"min.{extension}") ) item.add(self.lib) item.move(operation=MoveOperation.COPY) @@ -325,7 +324,7 @@ class TestHelper(ConfigMixin): """Add a number of items with files to the database.""" # TODO base this on `add_item()` items = [] - path = os.path.join(_common.RSRC, util.bytestring_path("full." + ext)) + path = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}")) for i in range(count): item = Item.from_path(path) item.album = f"\u00e4lbum {i}" # Check unicode paths @@ -372,7 +371,7 @@ class TestHelper(ConfigMixin): specified extension a cover art image is added to the media file. """ - src = os.path.join(_common.RSRC, util.bytestring_path("full." + ext)) + src = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}")) handle, path = mkstemp(dir=self.temp_dir) path = bytestring_path(path) os.close(handle) @@ -495,7 +494,6 @@ class PluginMixin(ConfigMixin): # FIXME this should eventually be handled by a plugin manager plugins = (self.plugin,) if hasattr(self, "plugin") else plugins self.config["plugins"] = plugins - cached_classproperty.cache.clear() beets.plugins.load_plugins() def unload_plugins(self) -> None: @@ -570,7 +568,7 @@ class ImportHelper(TestHelper): medium = MediaFile(track_path) medium.update( { - "album": "Tag Album" + (f" {album_id}" if album_id else ""), + "album": f"Tag Album{f' {album_id}' if album_id else ''}", "albumartist": None, "mb_albumid": None, "comp": None, @@ -831,23 +829,21 @@ class AutotagStub: def _make_track_match(self, artist, album, number): return TrackInfo( - title="Applied Track %d" % number, - track_id="match %d" % number, + title=f"Applied Track {number}", + track_id=f"match {number}", artist=artist, length=1, index=0, ) def _make_album_match(self, artist, album, tracks, distance=0, missing=0): - if distance: - id = " " + "M" * distance - else: - id = "" + id = f" {'M' * distance}" if distance else "" + if artist is None: artist = "Various Artists" else: - artist = artist.replace("Tag", "Applied") + id - album = album.replace("Tag", "Applied") + id + artist = f"{artist.replace('Tag', 'Applied')}{id}" + album = f"{album.replace('Tag', 'Applied')}{id}" track_infos = [] for i in range(tracks - missing): @@ -858,8 +854,8 @@ class AutotagStub: album=album, tracks=track_infos, va=False, - album_id="albumid" + id, - artist_id="artistid" + id, + album_id=f"albumid{id}", + artist_id=f"artistid{id}", albumtype="soundtrack", data_source="match_source", bandcamp_album_id="bc_url", @@ -885,7 +881,7 @@ class FetchImageHelper: super().run(*args, **kwargs) IMAGEHEADER: dict[str, bytes] = { - "image/jpeg": b"\xff\xd8\xff" + b"\x00" * 3 + b"JFIF", + "image/jpeg": b"\xff\xd8\xff\x00\x00\x00JFIF", "image/png": b"\211PNG\r\n\032\n", "image/gif": b"GIF89a", # dummy type that is definitely not a valid image content type diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 01030a977..fe980bb5c 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -23,14 +23,16 @@ import errno import optparse import os.path import re +import shutil import sqlite3 -import struct import sys import textwrap import traceback import warnings from difflib import SequenceMatcher -from typing import Any, Callable +from functools import cache +from itertools import chain +from typing import Any, Callable, Literal import confuse @@ -125,7 +127,7 @@ def print_(*strings: str, end: str = "\n") -> None: The `end` keyword argument behaves similarly to the built-in `print` (it defaults to a newline). """ - txt = " ".join(strings or ("",)) + end + txt = f"{' '.join(strings or ('',))}{end}" # Encode the string and write it to stdout. # On Python 3, sys.stdout expects text strings and uses the @@ -269,7 +271,7 @@ def input_options( ) ): # The first option is the default; mark it. - show_letter = "[%s]" % found_letter.upper() + show_letter = f"[{found_letter.upper()}]" is_default = True else: show_letter = found_letter.upper() @@ -308,9 +310,9 @@ def input_options( if isinstance(default, int): default_name = str(default) default_name = colorize("action_default", default_name) - tmpl = "# selection (default %s)" - prompt_parts.append(tmpl % default_name) - prompt_part_lengths.append(len(tmpl % str(default))) + tmpl = "# selection (default {})" + prompt_parts.append(tmpl.format(default_name)) + prompt_part_lengths.append(len(tmpl) - 2 + len(str(default))) else: prompt_parts.append("# selection") prompt_part_lengths.append(len(prompt_parts[-1])) @@ -338,7 +340,7 @@ def input_options( if line_length != 0: # Not the beginning of the line; need a space. - part = " " + part + part = f" {part}" length += 1 prompt += part @@ -349,8 +351,8 @@ def input_options( if not fallback_prompt: fallback_prompt = "Enter one of " if numrange: - fallback_prompt += "%i-%i, " % numrange - fallback_prompt += ", ".join(display_letters) + ":" + fallback_prompt += "{}-{}, ".format(*numrange) + fallback_prompt += f"{', '.join(display_letters)}:" resp = input_(prompt) while True: @@ -406,7 +408,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None): objects individually. """ choice = input_options( - ("y", "n", "s"), False, "%s? (Yes/no/select)" % (prompt_all or prompt) + ("y", "n", "s"), False, f"{prompt_all or prompt}? (Yes/no/select)" ) print() # Blank line. @@ -420,7 +422,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None): answer = input_options( ("y", "n", "q"), True, - "%s? (yes/no/quit)" % prompt, + f"{prompt}? (yes/no/quit)", "Enter Y or N:", ) if answer == "y": @@ -438,7 +440,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None): # ANSI terminal colorization code heavily inspired by pygments: # https://bitbucket.org/birkenfeld/pygments-main/src/default/pygments/console.py # (pygments is by Tim Hatch, Armin Ronacher, et al.) -COLOR_ESCAPE = "\x1b[" +COLOR_ESCAPE = "\x1b" LEGACY_COLORS = { "black": ["black"], "darkred": ["red"], @@ -463,7 +465,7 @@ LEGACY_COLORS = { "white": ["bold", "white"], } # All ANSI Colors. -ANSI_CODES = { +CODE_BY_COLOR = { # Styles. "normal": 0, "bold": 1, @@ -494,11 +496,17 @@ ANSI_CODES = { "bg_cyan": 46, "bg_white": 47, } -RESET_COLOR = COLOR_ESCAPE + "39;49;00m" - -# These abstract COLOR_NAMES are lazily mapped on to the actual color in COLORS -# as they are defined in the configuration files, see function: colorize -COLOR_NAMES = [ +RESET_COLOR = f"{COLOR_ESCAPE}[39;49;00m" +# Precompile common ANSI-escape regex patterns +ANSI_CODE_REGEX = re.compile(rf"({COLOR_ESCAPE}\[[;0-9]*m)") +ESC_TEXT_REGEX = re.compile( + rf"""(?P[^{COLOR_ESCAPE}]*) + (?P(?:{ANSI_CODE_REGEX.pattern})+) + (?P[^{COLOR_ESCAPE}]+)(?P{re.escape(RESET_COLOR)}) + (?P[^{COLOR_ESCAPE}]*)""", + re.VERBOSE, +) +ColorName = Literal[ "text_success", "text_warning", "text_error", @@ -507,76 +515,54 @@ COLOR_NAMES = [ "action_default", "action", # New Colors - "text", "text_faint", "import_path", "import_path_items", "action_description", - "added", - "removed", "changed", - "added_highlight", - "removed_highlight", - "changed_highlight", "text_diff_added", "text_diff_removed", - "text_diff_changed", ] -COLORS: dict[str, list[str]] | None = None -def _colorize(color, text): - """Returns a string that prints the given text in the given color - in a terminal that is ANSI color-aware. The color must be a list of strings - from ANSI_CODES. +@cache +def get_color_config() -> dict[ColorName, str]: + """Parse and validate color configuration, converting names to ANSI codes. + + Processes the UI color configuration, handling both new list format and + legacy single-color format. Validates all color names against known codes + and raises an error for any invalid entries. """ - # Construct escape sequence to be put before the text by iterating - # over all "ANSI codes" in `color`. - escape = "" - for code in color: - escape = escape + COLOR_ESCAPE + "%im" % ANSI_CODES[code] - return escape + text + RESET_COLOR + colors_by_color_name: dict[ColorName, list[str]] = { + k: (v if isinstance(v, list) else LEGACY_COLORS.get(v, [v])) + for k, v in config["ui"]["colors"].flatten().items() + } + + if invalid_colors := ( + set(chain.from_iterable(colors_by_color_name.values())) + - CODE_BY_COLOR.keys() + ): + raise UserError( + f"Invalid color(s) in configuration: {', '.join(invalid_colors)}" + ) + + return { + n: ";".join(str(CODE_BY_COLOR[c]) for c in colors) + for n, colors in colors_by_color_name.items() + } -def colorize(color_name, text): - """Colorize text if colored output is enabled. (Like _colorize but - conditional.) +def colorize(color_name: ColorName, text: str) -> str: + """Apply ANSI color formatting to text based on configuration settings. + + Returns colored text when color output is enabled and NO_COLOR environment + variable is not set, otherwise returns plain text unchanged. """ if config["ui"]["color"] and "NO_COLOR" not in os.environ: - global COLORS - if not COLORS: - # Read all color configurations and set global variable COLORS. - COLORS = dict() - for name in COLOR_NAMES: - # Convert legacy color definitions (strings) into the new - # list-based color definitions. Do this by trying to read the - # color definition from the configuration as unicode - if this - # is successful, the color definition is a legacy definition - # and has to be converted. - try: - color_def = config["ui"]["colors"][name].get(str) - except (confuse.ConfigTypeError, NameError): - # Normal color definition (type: list of unicode). - color_def = config["ui"]["colors"][name].get(list) - else: - # Legacy color definition (type: unicode). Convert. - if color_def in LEGACY_COLORS: - color_def = LEGACY_COLORS[color_def] - else: - raise UserError("no such color %s", color_def) - for code in color_def: - if code not in ANSI_CODES.keys(): - raise ValueError("no such ANSI code %s", code) - COLORS[name] = color_def - # In case a 3rd party plugin is still passing the actual color ('red') - # instead of the abstract color name ('text_error') - color = COLORS.get(color_name) - if not color: - log.debug("Invalid color_name: {0}", color_name) - color = color_name - return _colorize(color, text) - else: - return text + color_code = get_color_config()[color_name] + return f"{COLOR_ESCAPE}[{color_code}m{text}{RESET_COLOR}" + + return text def uncolorize(colored_text): @@ -589,26 +575,22 @@ def uncolorize(colored_text): # [;\d]* - matches a sequence consisting of one or more digits or # semicola # [A-Za-z] - matches a letter - ansi_code_regex = re.compile(r"\x1b\[[;\d]*[A-Za-z]", re.VERBOSE) - # Strip ANSI codes from `colored_text` using the regular expression. - text = ansi_code_regex.sub("", colored_text) - return text + return ANSI_CODE_REGEX.sub("", colored_text) def color_split(colored_text, index): - ansi_code_regex = re.compile(r"(\x1b\[[;\d]*[A-Za-z])", re.VERBOSE) length = 0 pre_split = "" post_split = "" found_color_code = None found_split = False - for part in ansi_code_regex.split(colored_text): + for part in ANSI_CODE_REGEX.split(colored_text): # Count how many real letters we have passed length += color_len(part) if found_split: post_split += part else: - if ansi_code_regex.match(part): + if ANSI_CODE_REGEX.match(part): # This is a color code if part == RESET_COLOR: found_color_code = None @@ -621,8 +603,8 @@ def color_split(colored_text, index): split_index = index - (length - color_len(part)) found_split = True if found_color_code: - pre_split += part[:split_index] + RESET_COLOR - post_split += found_color_code + part[split_index:] + pre_split += f"{part[:split_index]}{RESET_COLOR}" + post_split += f"{found_color_code}{part[split_index:]}" else: pre_split += part[:split_index] post_split += part[split_index:] @@ -642,7 +624,7 @@ def color_len(colored_text): return len(uncolorize(colored_text)) -def _colordiff(a, b): +def _colordiff(a: Any, b: Any) -> tuple[str, str]: """Given two values, return the same pair of strings except with their differences highlighted in the specified color. Strings are highlighted intelligently to show differences; other values are @@ -664,35 +646,21 @@ def _colordiff(a, b): colorize("text_diff_added", str(b)), ) - a_out = [] - b_out = [] + before = "" + after = "" matcher = SequenceMatcher(lambda x: False, a, b) for op, a_start, a_end, b_start, b_end in matcher.get_opcodes(): - if op == "equal": - # In both strings. - a_out.append(a[a_start:a_end]) - b_out.append(b[b_start:b_end]) - elif op == "insert": - # Right only. - b_out.append(colorize("text_diff_added", b[b_start:b_end])) - elif op == "delete": - # Left only. - a_out.append(colorize("text_diff_removed", a[a_start:a_end])) - elif op == "replace": - # Right and left differ. Colorise with second highlight if - # it's just a case change. - if a[a_start:a_end].lower() != b[b_start:b_end].lower(): - a_color = "text_diff_removed" - b_color = "text_diff_added" - else: - a_color = b_color = "text_highlight_minor" - a_out.append(colorize(a_color, a[a_start:a_end])) - b_out.append(colorize(b_color, b[b_start:b_end])) - else: - assert False + before_part, after_part = a[a_start:a_end], b[b_start:b_end] + if op in {"delete", "replace"}: + before_part = colorize("text_diff_removed", before_part) + if op in {"insert", "replace"}: + after_part = colorize("text_diff_added", after_part) - return "".join(a_out), "".join(b_out) + before += before_part + after += after_part + + return before, after def colordiff(a, b): @@ -726,32 +694,16 @@ def get_replacements(): replacements.append((re.compile(pattern), repl)) except re.error: raise UserError( - "malformed regular expression in replace: {}".format(pattern) + f"malformed regular expression in replace: {pattern}" ) return replacements -def term_width(): +@cache +def term_width() -> int: """Get the width (columns) of the terminal.""" - fallback = config["ui"]["terminal_width"].get(int) - - # The fcntl and termios modules are not available on non-Unix - # platforms, so we fall back to a constant. - try: - import fcntl - import termios - except ImportError: - return fallback - - try: - buf = fcntl.ioctl(0, termios.TIOCGWINSZ, " " * 4) - except OSError: - return fallback - try: - height, width = struct.unpack("hh", buf) - except struct.error: - return fallback - return width + columns, _ = shutil.get_terminal_size(fallback=(0, 0)) + return columns if columns else config["ui"]["terminal_width"].get(int) def split_into_lines(string, width_tuple): @@ -765,19 +717,13 @@ def split_into_lines(string, width_tuple): """ first_width, middle_width, last_width = width_tuple words = [] - esc_text = re.compile( - r"""(?P[^\x1b]*) - (?P(?:\x1b\[[;\d]*[A-Za-z])+) - (?P[^\x1b]+)(?P\x1b\[39;49;00m) - (?P[^\x1b]*)""", - re.VERBOSE, - ) + if uncolorize(string) == string: # No colors in string words = string.split() else: # Use a regex to find escapes and the text within them. - for m in esc_text.finditer(string): + for m in ESC_TEXT_REGEX.finditer(string): # m contains four groups: # pretext - any text before escape sequence # esc - intitial escape sequence @@ -806,17 +752,17 @@ def split_into_lines(string, width_tuple): # Colorize each word with pre/post escapes # Reconstruct colored words words += [ - m.group("esc") + raw_word + RESET_COLOR + f"{m['esc']}{raw_word}{RESET_COLOR}" for raw_word in raw_words ] elif raw_words: # Pretext stops mid-word if m.group("esc") != RESET_COLOR: # Add the rest of the current word, with a reset after it - words[-1] += m.group("esc") + raw_words[0] + RESET_COLOR + words[-1] += f"{m['esc']}{raw_words[0]}{RESET_COLOR}" # Add the subsequent colored words: words += [ - m.group("esc") + raw_word + RESET_COLOR + f"{m['esc']}{raw_word}{RESET_COLOR}" for raw_word in raw_words[1:] ] else: @@ -907,18 +853,12 @@ def print_column_layout( With subsequent lines (i.e. {lhs1}, {rhs1} onwards) being the rest of contents, wrapped if the width would be otherwise exceeded. """ - if right["prefix"] + right["contents"] + right["suffix"] == "": + if f"{right['prefix']}{right['contents']}{right['suffix']}" == "": # No right hand information, so we don't need a separator. separator = "" first_line_no_wrap = ( - indent_str - + left["prefix"] - + left["contents"] - + left["suffix"] - + separator - + right["prefix"] - + right["contents"] - + right["suffix"] + f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}" + f"{separator}{right['prefix']}{right['contents']}{right['suffix']}" ) if color_len(first_line_no_wrap) < max_width: # Everything fits, print out line. @@ -1044,18 +984,12 @@ def print_newline_layout( If {lhs0} would go over the maximum width, the subsequent lines are indented a second time for ease of reading. """ - if right["prefix"] + right["contents"] + right["suffix"] == "": + if f"{right['prefix']}{right['contents']}{right['suffix']}" == "": # No right hand information, so we don't need a separator. separator = "" first_line_no_wrap = ( - indent_str - + left["prefix"] - + left["contents"] - + left["suffix"] - + separator - + right["prefix"] - + right["contents"] - + right["suffix"] + f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}" + f"{separator}{right['prefix']}{right['contents']}{right['suffix']}" ) if color_len(first_line_no_wrap) < max_width: # Everything fits, print out line. @@ -1069,7 +1003,7 @@ def print_newline_layout( empty_space - len(indent_str), empty_space - len(indent_str), ) - left_str = left["prefix"] + left["contents"] + left["suffix"] + left_str = f"{left['prefix']}{left['contents']}{left['suffix']}" left_split = split_into_lines(left_str, left_width_tuple) # Repeat calculations for rhs, including separator on first line right_width_tuple = ( @@ -1077,19 +1011,19 @@ def print_newline_layout( empty_space - len(indent_str), empty_space - len(indent_str), ) - right_str = right["prefix"] + right["contents"] + right["suffix"] + right_str = f"{right['prefix']}{right['contents']}{right['suffix']}" right_split = split_into_lines(right_str, right_width_tuple) for i, line in enumerate(left_split): if i == 0: - print_(indent_str + line) + print_(f"{indent_str}{line}") elif line != "": # Ignore empty lines - print_(indent_str * 2 + line) + print_(f"{indent_str * 2}{line}") for i, line in enumerate(right_split): if i == 0: - print_(indent_str + separator + line) + print_(f"{indent_str}{separator}{line}") elif line != "": - print_(indent_str * 2 + line) + print_(f"{indent_str * 2}{line}") FLOAT_EPSILON = 0.01 @@ -1122,13 +1056,15 @@ def _field_diff(field, old, old_fmt, new, new_fmt): if isinstance(oldval, str): oldstr, newstr = colordiff(oldval, newstr) else: - oldstr = colorize("text_error", oldstr) - newstr = colorize("text_error", newstr) + oldstr = colorize("text_diff_removed", oldstr) + newstr = colorize("text_diff_added", newstr) return f"{oldstr} -> {newstr}" -def show_model_changes(new, old=None, fields=None, always=False): +def show_model_changes( + new, old=None, fields=None, always=False, print_obj: bool = True +): """Given a Model object, print a list of changes from its pristine version stored in the database. Return a boolean indicating whether any changes were found. @@ -1163,11 +1099,11 @@ def show_model_changes(new, old=None, fields=None, always=False): continue changes.append( - " {}: {}".format(field, colorize("text_highlight", new_fmt[field])) + f" {field}: {colorize('text_highlight', new_fmt[field])}" ) # Print changes. - if changes or always: + if print_obj and (changes or always): print_(format(old)) if changes: print_("\n".join(changes)) @@ -1204,22 +1140,16 @@ def show_path_changes(path_changes): # Print every change over two lines for source, dest in zip(sources, destinations): color_source, color_dest = colordiff(source, dest) - print_("{0} \n -> {1}".format(color_source, color_dest)) + print_(f"{color_source} \n -> {color_dest}") else: # Print every change on a single line, and add a header title_pad = max_width - len("Source ") + len(" -> ") - print_("Source {0} Destination".format(" " * title_pad)) + print_(f"Source {' ' * title_pad} Destination") for source, dest in zip(sources, destinations): pad = max_width - len(source) color_source, color_dest = colordiff(source, dest) - print_( - "{0} {1} -> {2}".format( - color_source, - " " * pad, - color_dest, - ) - ) + print_(f"{color_source} {' ' * pad} -> {color_dest}") # Helper functions for option parsing. @@ -1245,9 +1175,7 @@ def _store_dict(option, opt_str, value, parser): raise ValueError except ValueError: raise UserError( - "supplied argument `{}' is not of the form `key=value'".format( - value - ) + f"supplied argument `{value}' is not of the form `key=value'" ) option_values[key] = value @@ -1426,8 +1354,8 @@ class Subcommand: @root_parser.setter def root_parser(self, root_parser): self._root_parser = root_parser - self.parser.prog = "{} {}".format( - as_string(root_parser.get_prog_name()), self.name + self.parser.prog = ( + f"{as_string(root_parser.get_prog_name())} {self.name}" ) @@ -1483,7 +1411,7 @@ class SubcommandsOptionParser(CommonOptionsParser): for subcommand in subcommands: name = subcommand.name if subcommand.aliases: - name += " (%s)" % ", ".join(subcommand.aliases) + name += f" ({', '.join(subcommand.aliases)})" disp_names.append(name) # Set the help position based on the max width. @@ -1496,32 +1424,24 @@ class SubcommandsOptionParser(CommonOptionsParser): # Lifted directly from optparse.py. name_width = help_position - formatter.current_indent - 2 if len(name) > name_width: - name = "%*s%s\n" % (formatter.current_indent, "", name) + name = f"{' ' * formatter.current_indent}{name}\n" indent_first = help_position else: - name = "%*s%-*s " % ( - formatter.current_indent, - "", - name_width, - name, - ) + name = f"{' ' * formatter.current_indent}{name:<{name_width}}\n" indent_first = 0 result.append(name) help_width = formatter.width - help_position help_lines = textwrap.wrap(subcommand.help, help_width) help_line = help_lines[0] if help_lines else "" - result.append("%*s%s\n" % (indent_first, "", help_line)) + result.append(f"{' ' * indent_first}{help_line}\n") result.extend( - [ - "%*s%s\n" % (help_position, "", line) - for line in help_lines[1:] - ] + [f"{' ' * help_position}{line}\n" for line in help_lines[1:]] ) formatter.dedent() # Concatenate the original help message with the subcommand # list. - return out + "".join(result) + return f"{out}{''.join(result)}" def _subcommand_for_name(self, name): """Return the subcommand in self.subcommands matching the @@ -1615,19 +1535,19 @@ def _configure(options): if overlay_path: log.debug( - "overlaying configuration: {0}", util.displayable_path(overlay_path) + "overlaying configuration: {}", util.displayable_path(overlay_path) ) config_path = config.user_config_path() if os.path.isfile(config_path): - log.debug("user configuration: {0}", util.displayable_path(config_path)) + log.debug("user configuration: {}", util.displayable_path(config_path)) else: log.debug( - "no user configuration found at {0}", + "no user configuration found at {}", util.displayable_path(config_path), ) - log.debug("data directory: {0}", util.displayable_path(config.config_dir())) + log.debug("data directory: {}", util.displayable_path(config.config_dir())) return config @@ -1637,10 +1557,8 @@ def _ensure_db_directory_exists(path): newpath = os.path.dirname(path) if not os.path.isdir(newpath): if input_yn( - "The database directory {} does not \ - exist. Create it (Y/n)?".format( - util.displayable_path(newpath) - ) + f"The database directory {util.displayable_path(newpath)} does not" + " exist. Create it (Y/n)?" ): os.makedirs(newpath) @@ -1660,12 +1578,11 @@ def _open_library(config: confuse.LazyConfig) -> library.Library: except (sqlite3.OperationalError, sqlite3.DatabaseError) as db_error: log.debug("{}", traceback.format_exc()) raise UserError( - "database file {} cannot not be opened: {}".format( - util.displayable_path(dbpath), db_error - ) + f"database file {util.displayable_path(dbpath)} cannot not be" + f" opened: {db_error}" ) log.debug( - "library database: {0}\nlibrary directory: {1}", + "library database: {}\nlibrary directory: {}", util.displayable_path(lib.path), util.displayable_path(lib.directory), ) @@ -1782,7 +1699,7 @@ def main(args=None): _raw_main(args) except UserError as exc: message = exc.args[0] if exc.args else None - log.error("error: {0}", message) + log.error("error: {}", message) sys.exit(1) except util.HumanReadableError as exc: exc.log(log) @@ -1794,10 +1711,10 @@ def main(args=None): log.error("{}", exc) sys.exit(1) except confuse.ConfigError as exc: - log.error("configuration error: {0}", exc) + log.error("configuration error: {}", exc) sys.exit(1) except db_query.InvalidQueryError as exc: - log.error("invalid query: {0}", exc) + log.error("invalid query: {}", exc) sys.exit(1) except OSError as exc: if exc.errno == errno.EPIPE: @@ -1810,7 +1727,7 @@ def main(args=None): log.debug("{}", traceback.format_exc()) except db.DBAccessError as exc: log.error( - "database access error: {0}\n" + "database access error: {}\n" "the library file might have a permissions problem", exc, ) diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 12a8d6875..b52e965b7 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -18,8 +18,10 @@ interface. import os import re +import textwrap from collections import Counter from collections.abc import Sequence +from functools import cached_property from itertools import chain from platform import python_version from typing import Any, NamedTuple @@ -112,15 +114,11 @@ def _parse_logfiles(logfiles): yield from _paths_from_logfile(syspath(normpath(logfile))) except ValueError as err: raise ui.UserError( - "malformed logfile {}: {}".format( - util.displayable_path(logfile), str(err) - ) + f"malformed logfile {util.displayable_path(logfile)}: {err}" ) from err except OSError as err: raise ui.UserError( - "unreadable logfile {}: {}".format( - util.displayable_path(logfile), str(err) - ) + f"unreadable logfile {util.displayable_path(logfile)}: {err}" ) from err @@ -132,13 +130,13 @@ def _print_keys(query): returned row, with indentation of 2 spaces. """ for row in query: - print_(" " * 2 + row["key"]) + print_(f" {row['key']}") def fields_func(lib, opts, args): def _print_rows(names): names.sort() - print_(" " + "\n ".join(names)) + print_(textwrap.indent("\n".join(names), " ")) print_("Item fields:") _print_rows(library.Item.all_keys()) @@ -148,13 +146,13 @@ def fields_func(lib, opts, args): with lib.transaction() as tx: # The SQL uses the DISTINCT to get unique values from the query - unique_fields = "SELECT DISTINCT key FROM (%s)" + unique_fields = "SELECT DISTINCT key FROM ({})" print_("Item flexible attributes:") - _print_keys(tx.query(unique_fields % library.Item._flex_table)) + _print_keys(tx.query(unique_fields.format(library.Item._flex_table))) print_("Album flexible attributes:") - _print_keys(tx.query(unique_fields % library.Album._flex_table)) + _print_keys(tx.query(unique_fields.format(library.Album._flex_table))) fields_cmd = ui.Subcommand( @@ -213,10 +211,10 @@ def get_singleton_disambig_fields(info: hooks.TrackInfo) -> Sequence[str]: out = [] chosen_fields = config["match"]["singleton_disambig_fields"].as_str_seq() calculated_values = { - "index": "Index {}".format(str(info.index)), - "track_alt": "Track {}".format(info.track_alt), + "index": f"Index {info.index}", + "track_alt": f"Track {info.track_alt}", "album": ( - "[{}]".format(info.album) + f"[{info.album}]" if ( config["import"]["singleton_album_disambig"].get() and info.get("album") @@ -242,7 +240,7 @@ def get_album_disambig_fields(info: hooks.AlbumInfo) -> Sequence[str]: chosen_fields = config["match"]["album_disambig_fields"].as_str_seq() calculated_values = { "media": ( - "{}x{}".format(info.mediums, info.media) + f"{info.mediums}x{info.media}" if (info.mediums and info.mediums > 1) else info.media ), @@ -277,7 +275,7 @@ def dist_string(dist): """Formats a distance (a float) as a colorized similarity percentage string. """ - string = "{:.1f}%".format(((1 - dist) * 100)) + string = f"{(1 - dist) * 100:.1f}%" return dist_colorize(string, dist) @@ -295,7 +293,7 @@ def penalty_string(distance, limit=None): if limit and len(penalties) > limit: penalties = penalties[:limit] + ["..."] # Prefix penalty string with U+2260: Not Equal To - penalty_string = "\u2260 {}".format(", ".join(penalties)) + penalty_string = f"\u2260 {', '.join(penalties)}" return ui.colorize("changed", penalty_string) @@ -306,6 +304,10 @@ class ChangeRepresentation: TrackMatch object, accordingly. """ + @cached_property + def changed_prefix(self) -> str: + return ui.colorize("changed", "\u2260") + cur_artist = None # cur_album set if album, cur_title set if singleton cur_album = None @@ -360,18 +362,18 @@ class ChangeRepresentation: # 'Match' line and similarity. print_( - self.indent_header + f"Match ({dist_string(self.match.distance)}):" + f"{self.indent_header}Match ({dist_string(self.match.distance)}):" ) if isinstance(self.match.info, autotag.hooks.AlbumInfo): # Matching an album - print that artist_album_str = ( - f"{self.match.info.artist}" + f" - {self.match.info.album}" + f"{self.match.info.artist} - {self.match.info.album}" ) else: # Matching a single track artist_album_str = ( - f"{self.match.info.artist}" + f" - {self.match.info.title}" + f"{self.match.info.artist} - {self.match.info.title}" ) print_( self.indent_header @@ -381,17 +383,17 @@ class ChangeRepresentation: # Penalties. penalties = penalty_string(self.match.distance) if penalties: - print_(self.indent_header + penalties) + print_(f"{self.indent_header}{penalties}") # Disambiguation. disambig = disambig_string(self.match.info) if disambig: - print_(self.indent_header + disambig) + print_(f"{self.indent_header}{disambig}") # Data URL. if self.match.info.data_url: url = ui.colorize("text_faint", f"{self.match.info.data_url}") - print_(self.indent_header + url) + print_(f"{self.indent_header}{url}") def show_match_details(self): """Print out the details of the match, including changes in album name @@ -404,9 +406,8 @@ class ChangeRepresentation: artist_l, artist_r = "", "" if artist_l != artist_r: artist_l, artist_r = ui.colordiff(artist_l, artist_r) - # Prefix with U+2260: Not Equal To left = { - "prefix": ui.colorize("changed", "\u2260") + " Artist: ", + "prefix": f"{self.changed_prefix} Artist: ", "contents": artist_l, "suffix": "", } @@ -414,7 +415,7 @@ class ChangeRepresentation: self.print_layout(self.indent_detail, left, right) else: - print_(self.indent_detail + "*", "Artist:", artist_r) + print_(f"{self.indent_detail}*", "Artist:", artist_r) if self.cur_album: # Album @@ -424,31 +425,29 @@ class ChangeRepresentation: and self.match.info.album != VARIOUS_ARTISTS ): album_l, album_r = ui.colordiff(album_l, album_r) - # Prefix with U+2260: Not Equal To left = { - "prefix": ui.colorize("changed", "\u2260") + " Album: ", + "prefix": f"{self.changed_prefix} Album: ", "contents": album_l, "suffix": "", } right = {"prefix": "", "contents": album_r, "suffix": ""} self.print_layout(self.indent_detail, left, right) else: - print_(self.indent_detail + "*", "Album:", album_r) + print_(f"{self.indent_detail}*", "Album:", album_r) elif self.cur_title: # Title - for singletons title_l, title_r = self.cur_title or "", self.match.info.title if self.cur_title != self.match.info.title: title_l, title_r = ui.colordiff(title_l, title_r) - # Prefix with U+2260: Not Equal To left = { - "prefix": ui.colorize("changed", "\u2260") + " Title: ", + "prefix": f"{self.changed_prefix} Title: ", "contents": title_l, "suffix": "", } right = {"prefix": "", "contents": title_r, "suffix": ""} self.print_layout(self.indent_detail, left, right) else: - print_(self.indent_detail + "*", "Title:", title_r) + print_(f"{self.indent_detail}*", "Title:", title_r) def make_medium_info_line(self, track_info): """Construct a line with the current medium's info.""" @@ -490,7 +489,6 @@ class ChangeRepresentation: """Format colored track indices.""" cur_track = self.format_index(item) new_track = self.format_index(track_info) - templ = "(#{})" changed = False # Choose color based on change. if cur_track != new_track: @@ -502,10 +500,8 @@ class ChangeRepresentation: else: highlight_color = "text_faint" - cur_track = templ.format(cur_track) - new_track = templ.format(new_track) - lhs_track = ui.colorize(highlight_color, cur_track) - rhs_track = ui.colorize(highlight_color, new_track) + lhs_track = ui.colorize(highlight_color, f"(#{cur_track})") + rhs_track = ui.colorize(highlight_color, f"(#{new_track})") return lhs_track, rhs_track, changed @staticmethod @@ -573,11 +569,10 @@ class ChangeRepresentation: # the case, thus the 'info' dictionary is unneeded. # penalties = penalty_string(self.match.distance.tracks[track_info]) - prefix = ui.colorize("changed", "\u2260 ") if changed else "* " lhs = { - "prefix": prefix + lhs_track + " ", + "prefix": f"{self.changed_prefix if changed else '*'} {lhs_track} ", "contents": lhs_title, - "suffix": " " + lhs_length, + "suffix": f" {lhs_length}", } rhs = {"prefix": "", "contents": "", "suffix": ""} if not changed: @@ -586,9 +581,9 @@ class ChangeRepresentation: else: # Construct a dictionary for the "changed to" side rhs = { - "prefix": rhs_track + " ", + "prefix": f"{rhs_track} ", "contents": rhs_title, - "suffix": " " + rhs_length, + "suffix": f" {rhs_length}", } return (lhs, rhs) @@ -681,7 +676,7 @@ class AlbumChange(ChangeRepresentation): # Print tracks from previous medium self.print_tracklist(lines) lines = [] - print_(self.indent_detail + header) + print_(f"{self.indent_detail}{header}") # Save new medium details for future comparison. medium, disctitle = track_info.medium, track_info.disctitle @@ -697,11 +692,9 @@ class AlbumChange(ChangeRepresentation): # Missing and unmatched tracks. if self.match.extra_tracks: print_( - "Missing tracks ({0}/{1} - {2:.1%}):".format( - len(self.match.extra_tracks), - len(self.match.info.tracks), - len(self.match.extra_tracks) / len(self.match.info.tracks), - ) + "Missing tracks" + f" ({len(self.match.extra_tracks)}/{len(self.match.info.tracks)} -" + f" {len(self.match.extra_tracks) / len(self.match.info.tracks):.1%}):" ) for track_info in self.match.extra_tracks: line = f" ! {track_info.title} (#{self.format_index(track_info)})" @@ -711,9 +704,9 @@ class AlbumChange(ChangeRepresentation): if self.match.extra_items: print_(f"Unmatched tracks ({len(self.match.extra_items)}):") for item in self.match.extra_items: - line = " ! {} (#{})".format(item.title, self.format_index(item)) + line = f" ! {item.title} (#{self.format_index(item)})" if item.length: - line += " ({})".format(human_seconds_short(item.length)) + line += f" ({human_seconds_short(item.length)})" print_(ui.colorize("text_warning", line)) @@ -769,7 +762,7 @@ def summarize_items(items, singleton): """ summary_parts = [] if not singleton: - summary_parts.append("{} items".format(len(items))) + summary_parts.append(f"{len(items)} items") format_counts = {} for item in items: @@ -789,10 +782,11 @@ def summarize_items(items, singleton): average_bitrate = sum([item.bitrate for item in items]) / len(items) total_duration = sum([item.length for item in items]) total_filesize = sum([item.filesize for item in items]) - summary_parts.append("{}kbps".format(int(average_bitrate / 1000))) + summary_parts.append(f"{int(average_bitrate / 1000)}kbps") if items[0].format == "FLAC": - sample_bits = "{}kHz/{} bit".format( - round(int(items[0].samplerate) / 1000, 1), items[0].bitdepth + sample_bits = ( + f"{round(int(items[0].samplerate) / 1000, 1)}kHz" + f"/{items[0].bitdepth} bit" ) summary_parts.append(sample_bits) summary_parts.append(human_seconds_short(total_duration)) @@ -885,7 +879,7 @@ def choose_candidate( if singleton: print_("No matching recordings found.") else: - print_("No matching release found for {} tracks.".format(itemcount)) + print_(f"No matching release found for {itemcount} tracks.") print_( "For help, see: " "https://beets.readthedocs.org/en/latest/faq.html#nomatch" @@ -910,40 +904,38 @@ def choose_candidate( # Display list of candidates. print_("") print_( - 'Finding tags for {} "{} - {}".'.format( - "track" if singleton else "album", - item.artist if singleton else cur_artist, - item.title if singleton else cur_album, - ) + f"Finding tags for {'track' if singleton else 'album'} " + f'"{item.artist if singleton else cur_artist} -' + f' {item.title if singleton else cur_album}".' ) - print_(ui.indent(2) + "Candidates:") + print_(" Candidates:") for i, match in enumerate(candidates): # Index, metadata, and distance. - index0 = "{0}.".format(i + 1) + index0 = f"{i + 1}." index = dist_colorize(index0, match.distance) - dist = "({:.1f}%)".format((1 - match.distance) * 100) + dist = f"({(1 - match.distance) * 100:.1f}%)" distance = dist_colorize(dist, match.distance) - metadata = "{0} - {1}".format( - match.info.artist, - match.info.title if singleton else match.info.album, + metadata = ( + f"{match.info.artist} -" + f" {match.info.title if singleton else match.info.album}" ) if i == 0: metadata = dist_colorize(metadata, match.distance) else: metadata = ui.colorize("text_highlight_minor", metadata) line1 = [index, distance, metadata] - print_(ui.indent(2) + " ".join(line1)) + print_(f" {' '.join(line1)}") # Penalties. penalties = penalty_string(match.distance, 3) if penalties: - print_(ui.indent(13) + penalties) + print_(f"{' ' * 13}{penalties}") # Disambiguation disambig = disambig_string(match.info) if disambig: - print_(ui.indent(13) + disambig) + print_(f"{' ' * 13}{disambig}") # Ask the user for a choice. sel = ui.input_options(choice_opts, numrange=(1, len(candidates))) @@ -1015,7 +1007,7 @@ def manual_id(session, task): Input an ID, either for an album ("release") or a track ("recording"). """ - prompt = "Enter {} ID:".format("release" if task.is_album else "recording") + prompt = f"Enter {'release' if task.is_album else 'recording'} ID:" search_id = input_(prompt).strip() if task.is_album: @@ -1043,7 +1035,7 @@ class TerminalImportSession(importer.ImportSession): path_str0 = displayable_path(task.paths, "\n") path_str = ui.colorize("import_path", path_str0) - items_str0 = "({} items)".format(len(task.items)) + items_str0 = f"({len(task.items)} items)" items_str = ui.colorize("import_path_items", items_str0) print_(" ".join([path_str, items_str])) @@ -1156,7 +1148,7 @@ class TerminalImportSession(importer.ImportSession): that's already in the library. """ log.warning( - "This {0} is already in the library!", + "This {} is already in the library!", ("album" if task.is_album else "item"), ) @@ -1217,8 +1209,8 @@ class TerminalImportSession(importer.ImportSession): def should_resume(self, path): return ui.input_yn( - "Import of the directory:\n{}\n" - "was interrupted. Resume (Y/n)?".format(displayable_path(path)) + f"Import of the directory:\n{displayable_path(path)}\n" + "was interrupted. Resume (Y/n)?" ) def _get_choices(self, task): @@ -1288,11 +1280,10 @@ class TerminalImportSession(importer.ImportSession): dup_choices = [c for c in all_choices if c.short == short] for c in dup_choices[1:]: log.warning( - "Prompt choice '{0}' removed due to conflict " - "with '{1}' (short letter: '{2}')", - c.long, - dup_choices[0].long, - c.short, + "Prompt choice '{0.long}' removed due to conflict " + "with '{1[0].long}' (short letter: '{0.short}')", + c, + dup_choices, ) extra_choices.remove(c) @@ -1317,7 +1308,8 @@ def import_files(lib, paths: list[bytes], query): loghandler = logging.FileHandler(logpath, encoding="utf-8") except OSError: raise ui.UserError( - f"Could not open log file for writing: {displayable_path(logpath)}" + "Could not open log file for writing:" + f" {displayable_path(logpath)}" ) else: loghandler = None @@ -1362,9 +1354,7 @@ def import_func(lib, opts, args: list[str]): for path in byte_paths: if not os.path.exists(syspath(normpath(path))): raise ui.UserError( - "no such file or directory: {}".format( - displayable_path(path) - ) + f"no such file or directory: {displayable_path(path)}" ) # Check the directories from the logfiles, but don't throw an error in @@ -1374,9 +1364,7 @@ def import_func(lib, opts, args: list[str]): for path in paths_from_logfiles: if not os.path.exists(syspath(normpath(path))): log.warning( - "No such file or directory: {}".format( - displayable_path(path) - ) + "No such file or directory: {}", displayable_path(path) ) continue @@ -1650,9 +1638,8 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None): # Did the item change since last checked? if item.current_mtime() <= item.mtime: log.debug( - "skipping {0} because mtime is up to date ({1})", - displayable_path(item.path), - item.mtime, + "skipping {0.filepath} because mtime is up to date ({0.mtime})", + item, ) continue @@ -1660,9 +1647,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None): try: item.read() except library.ReadError as exc: - log.error( - "error reading {0}: {1}", displayable_path(item.path), exc - ) + log.error("error reading {.filepath}: {}", item, exc) continue # Special-case album artist when it matches track artist. (Hacky @@ -1703,7 +1688,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None): continue album = lib.get_album(album_id) if not album: # Empty albums have already been removed. - log.debug("emptied album {0}", album_id) + log.debug("emptied album {}", album_id) continue first_item = album.items().get() @@ -1714,7 +1699,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None): # Move album art (and any inconsistent items). if move and lib.directory in ancestry(first_item.path): - log.debug("moving album {0}", album_id) + log.debug("moving album {}", album_id) # Manually moving and storing the album. items = list(album.items()) @@ -1808,7 +1793,7 @@ def remove_items(lib, query, album, delete, force): if not force: # Prepare confirmation with user. album_str = ( - " in {} album{}".format(len(albums), "s" if len(albums) > 1 else "") + f" in {len(albums)} album{'s' if len(albums) > 1 else ''}" if album else "" ) @@ -1816,14 +1801,17 @@ def remove_items(lib, query, album, delete, force): if delete: fmt = "$path - $title" prompt = "Really DELETE" - prompt_all = "Really DELETE {} file{}{}".format( - len(items), "s" if len(items) > 1 else "", album_str + prompt_all = ( + "Really DELETE" + f" {len(items)} file{'s' if len(items) > 1 else ''}{album_str}" ) else: fmt = "" prompt = "Really remove from the library?" - prompt_all = "Really remove {} item{}{} from the library?".format( - len(items), "s" if len(items) > 1 else "", album_str + prompt_all = ( + "Really remove" + f" {len(items)} item{'s' if len(items) > 1 else ''}{album_str}" + " from the library?" ) # Helpers for printing affected items @@ -1892,7 +1880,7 @@ def show_stats(lib, query, exact): try: total_size += os.path.getsize(syspath(item.path)) except OSError as exc: - log.info("could not get size of {}: {}", item.path, exc) + log.info("could not get size of {.path}: {}", item, exc) else: total_size += int(item.length * item.bitrate / 8) total_time += item.length @@ -1902,27 +1890,17 @@ def show_stats(lib, query, exact): if item.album_id: albums.add(item.album_id) - size_str = "" + human_bytes(total_size) + size_str = human_bytes(total_size) if exact: size_str += f" ({total_size} bytes)" - print_( - """Tracks: {} -Total time: {}{} -{}: {} -Artists: {} -Albums: {} -Album artists: {}""".format( - total_items, - human_seconds(total_time), - f" ({total_time:.2f} seconds)" if exact else "", - "Total size" if exact else "Approximate total size", - size_str, - len(artists), - len(albums), - len(album_artists), - ), - ) + print_(f"""Tracks: {total_items} +Total time: {human_seconds(total_time)} +{f" ({total_time:.2f} seconds)" if exact else ""} +{"Total size" if exact else "Approximate total size"}: {size_str} +Artists: {len(artists)} +Albums: {len(albums)} +Album artists: {len(album_artists)}""") def stats_func(lib, opts, args): @@ -1943,7 +1921,7 @@ default_commands.append(stats_cmd) def show_version(lib, opts, args): - print_("beets version %s" % beets.__version__) + print_(f"beets version {beets.__version__}") print_(f"Python version {python_version()}") # Show plugins. names = sorted(p.name for p in plugins.find_plugins()) @@ -1977,7 +1955,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit): # Apply changes *temporarily*, preview them, and collect modified # objects. - print_("Modifying {} {}s.".format(len(objs), "album" if album else "item")) + print_(f"Modifying {len(objs)} {'album' if album else 'item'}s.") changed = [] templates = { key: functemplate.template(value) for key, value in mods.items() @@ -2007,7 +1985,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit): extra = "" changed = ui.input_select_objects( - "Really modify%s" % extra, + f"Really modify{extra}", changed, lambda o: print_and_modify(o, mods, dels), ) @@ -2159,7 +2137,7 @@ def move_items( act = "copy" if copy else "move" entity = "album" if album else "item" log.info( - "{0} {1} {2}{3}{4}.", + "{} {} {}{}{}.", action, len(objs), entity, @@ -2185,7 +2163,7 @@ def move_items( else: if confirm: objs = ui.input_select_objects( - "Really %s" % act, + f"Really {act}", objs, lambda o: show_path_changes( [(o.path, o.destination(basedir=dest))] @@ -2193,7 +2171,7 @@ def move_items( ) for obj in objs: - log.debug("moving: {0}", util.displayable_path(obj.path)) + log.debug("moving: {.filepath}", obj) if export: # Copy without affecting the database. @@ -2213,9 +2191,7 @@ def move_func(lib, opts, args): if dest is not None: dest = normpath(dest) if not os.path.isdir(syspath(dest)): - raise ui.UserError( - "no such directory: {}".format(displayable_path(dest)) - ) + raise ui.UserError(f"no such directory: {displayable_path(dest)}") move_items( lib, @@ -2278,16 +2254,14 @@ def write_items(lib, query, pretend, force): for item in items: # Item deleted? if not os.path.exists(syspath(item.path)): - log.info("missing file: {0}", util.displayable_path(item.path)) + log.info("missing file: {.filepath}", item) continue # Get an Item object reflecting the "clean" (on-disk) state. try: clean_item = library.Item.from_path(item.path) except library.ReadError as exc: - log.error( - "error reading {0}: {1}", displayable_path(item.path), exc - ) + log.error("error reading {.filepath}: {}", item, exc) continue # Check for and display changes. @@ -2480,30 +2454,27 @@ def completion_script(commands): yield "_beet() {\n" # Command names - yield " local commands='%s'\n" % " ".join(command_names) + yield f" local commands={' '.join(command_names)!r}\n" yield "\n" # Command aliases - yield " local aliases='%s'\n" % " ".join(aliases.keys()) + yield f" local aliases={' '.join(aliases.keys())!r}\n" for alias, cmd in aliases.items(): - yield " local alias__{}={}\n".format(alias.replace("-", "_"), cmd) + yield f" local alias__{alias.replace('-', '_')}={cmd}\n" yield "\n" # Fields - yield " fields='%s'\n" % " ".join( - set( - list(library.Item._fields.keys()) - + list(library.Album._fields.keys()) - ) - ) + fields = library.Item._fields.keys() | library.Album._fields.keys() + yield f" fields={' '.join(fields)!r}\n" # Command options for cmd, opts in options.items(): for option_type, option_list in opts.items(): if option_list: option_list = " ".join(option_list) - yield " local {}__{}='{}'\n".format( - option_type, cmd.replace("-", "_"), option_list + yield ( + " local" + f" {option_type}__{cmd.replace('-', '_')}='{option_list}'\n" ) yield " _beet_dispatch\n" diff --git a/beets/util/__init__.py b/beets/util/__init__.py index e2f7f46bd..fc05e4997 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -47,6 +47,7 @@ from typing import ( NamedTuple, TypeVar, Union, + cast, ) from unidecode import unidecode @@ -112,7 +113,7 @@ class HumanReadableError(Exception): elif hasattr(self.reason, "strerror"): # i.e., EnvironmentError return self.reason.strerror else: - return '"{}"'.format(str(self.reason)) + return f'"{self.reason}"' def get_message(self): """Create the human-readable description of the error, sans @@ -126,7 +127,7 @@ class HumanReadableError(Exception): """ if self.tb: logger.debug(self.tb) - logger.error("{0}: {1}", self.error_kind, self.args[0]) + logger.error("{0.error_kind}: {0.args[0]}", self) class FilesystemError(HumanReadableError): @@ -142,18 +143,16 @@ class FilesystemError(HumanReadableError): def get_message(self): # Use a nicer English phrasing for some specific verbs. if self.verb in ("move", "copy", "rename"): - clause = "while {} {} to {}".format( - self._gerund(), - displayable_path(self.paths[0]), - displayable_path(self.paths[1]), + clause = ( + f"while {self._gerund()} {displayable_path(self.paths[0])} to" + f" {displayable_path(self.paths[1])}" ) elif self.verb in ("delete", "write", "create", "read"): - clause = "while {} {}".format( - self._gerund(), displayable_path(self.paths[0]) - ) + clause = f"while {self._gerund()} {displayable_path(self.paths[0])}" else: - clause = "during {} of paths {}".format( - self.verb, ", ".join(displayable_path(p) for p in self.paths) + clause = ( + f"during {self.verb} of paths" + f" {', '.join(displayable_path(p) for p in self.paths)}" ) return f"{self._reasonstr()} {clause}" @@ -223,12 +222,12 @@ def sorted_walk( # Get all the directories and files at this level. try: contents = os.listdir(syspath(bytes_path)) - except OSError as exc: + except OSError: if logger: logger.warning( - "could not list directory {}: {}".format( - displayable_path(bytes_path), exc.strerror - ) + "could not list directory {}", + displayable_path(bytes_path), + exc_info=True, ) return dirs = [] @@ -436,8 +435,8 @@ def syspath(path: PathLike, prefix: bool = True) -> str: if prefix and not str_path.startswith(WINDOWS_MAGIC_PREFIX): if str_path.startswith("\\\\"): # UNC path. Final path should look like \\?\UNC\... - str_path = "UNC" + str_path[1:] - str_path = WINDOWS_MAGIC_PREFIX + str_path + str_path = f"UNC{str_path[1:]}" + str_path = f"{WINDOWS_MAGIC_PREFIX}{str_path}" return str_path @@ -509,8 +508,8 @@ def move(path: bytes, dest: bytes, replace: bool = False): basename = os.path.basename(bytestring_path(dest)) dirname = os.path.dirname(bytestring_path(dest)) tmp = tempfile.NamedTemporaryFile( - suffix=syspath(b".beets", prefix=False), - prefix=syspath(b"." + basename + b".", prefix=False), + suffix=".beets", + prefix=f".{os.fsdecode(basename)}.", dir=syspath(dirname), delete=False, ) @@ -719,7 +718,7 @@ def truncate_path(str_path: str) -> str: path = Path(str_path) parent_parts = [truncate_str(p, max_length) for p in path.parts[:-1]] stem = truncate_str(path.stem, max_length - len(path.suffix)) - return str(Path(*parent_parts, stem)) + path.suffix + return f"{Path(*parent_parts, stem)}{path.suffix}" def _legalize_stage( @@ -838,9 +837,10 @@ def get_most_common_tags( "country", "media", "albumdisambig", + "data_source", ] for field in fields: - values = [item[field] for item in items if item] + values = [item.get(field) for item in items if item] likelies[field], freq = plurality(values) consensus[field] = freq == len(values) @@ -1053,7 +1053,7 @@ def par_map(transform: Callable[[T], Any], items: Sequence[T]) -> None: pool.join() -class cached_classproperty: +class cached_classproperty(Generic[T]): """Descriptor implementing cached class properties. Provides class-level dynamic property behavior where the getter function is @@ -1061,9 +1061,9 @@ class cached_classproperty: instance properties, this operates on the class rather than instances. """ - cache: ClassVar[dict[tuple[Any, str], Any]] = {} + cache: ClassVar[dict[tuple[type[object], str], object]] = {} - name: str + name: str = "" # Ideally, we would like to use `Callable[[type[T]], Any]` here, # however, `mypy` is unable to see this as a **class** property, and thinks @@ -1079,21 +1079,21 @@ class cached_classproperty: # "Callable[[Album], ...]"; expected "Callable[[type[Album]], ...]" # # Therefore, we just use `Any` here, which is not ideal, but works. - def __init__(self, getter: Callable[[Any], Any]) -> None: + def __init__(self, getter: Callable[..., T]) -> None: """Initialize the descriptor with the property getter function.""" - self.getter = getter + self.getter: Callable[..., T] = getter - def __set_name__(self, owner: Any, name: str) -> None: + def __set_name__(self, owner: object, name: str) -> None: """Capture the attribute name this descriptor is assigned to.""" self.name = name - def __get__(self, instance: Any, owner: type[Any]) -> Any: + def __get__(self, instance: object, owner: type[object]) -> T: """Compute and cache if needed, and return the property value.""" - key = owner, self.name + key: tuple[type[object], str] = owner, self.name if key not in self.cache: self.cache[key] = self.getter(owner) - return self.cache[key] + return cast(T, self.cache[key]) class LazySharedInstance(Generic[T]): diff --git a/beets/util/artresizer.py b/beets/util/artresizer.py index fe67c506e..5ecde5140 100644 --- a/beets/util/artresizer.py +++ b/beets/util/artresizer.py @@ -54,7 +54,7 @@ def resize_url(url: str, maxwidth: int, quality: int = 0) -> str: if quality > 0: params["q"] = quality - return "{}?{}".format(PROXY_URL, urlencode(params)) + return f"{PROXY_URL}?{urlencode(params)}" class LocalBackendNotAvailableError(Exception): @@ -255,7 +255,7 @@ class IMBackend(LocalBackend): path_out = get_temp_filename(__name__, "resize_IM_", path_in) log.debug( - "artresizer: ImageMagick resizing {0} to {1}", + "artresizer: ImageMagick resizing {} to {}", displayable_path(path_in), displayable_path(path_out), ) @@ -287,7 +287,7 @@ class IMBackend(LocalBackend): util.command_output(cmd) except subprocess.CalledProcessError: log.warning( - "artresizer: IM convert failed for {0}", + "artresizer: IM convert failed for {}", displayable_path(path_in), ) return path_in @@ -306,9 +306,9 @@ class IMBackend(LocalBackend): except subprocess.CalledProcessError as exc: log.warning("ImageMagick size query failed") log.debug( - "`convert` exited with (status {}) when " + "`convert` exited with (status {.returncode}) when " "getting size with command {}:\n{}", - exc.returncode, + exc, cmd, exc.output.strip(), ) @@ -441,8 +441,8 @@ class IMBackend(LocalBackend): convert_proc.wait() if convert_proc.returncode: log.debug( - "ImageMagick convert failed with status {}: {!r}", - convert_proc.returncode, + "ImageMagick convert failed with status {.returncode}: {!r}", + convert_proc, convert_stderr, ) return None @@ -452,7 +452,7 @@ class IMBackend(LocalBackend): if compare_proc.returncode: if compare_proc.returncode != 1: log.debug( - "ImageMagick compare failed: {0}, {1}", + "ImageMagick compare failed: {}, {}", displayable_path(im2), displayable_path(im1), ) @@ -472,7 +472,7 @@ class IMBackend(LocalBackend): log.debug("IM output is not a number: {0!r}", out_str) return None - log.debug("ImageMagick compare score: {0}", phash_diff) + log.debug("ImageMagick compare score: {}", phash_diff) return phash_diff <= compare_threshold @property @@ -523,7 +523,7 @@ class PILBackend(LocalBackend): from PIL import Image log.debug( - "artresizer: PIL resizing {0} to {1}", + "artresizer: PIL resizing {} to {}", displayable_path(path_in), displayable_path(path_out), ) @@ -552,7 +552,7 @@ class PILBackend(LocalBackend): for i in range(5): # 5 attempts is an arbitrary choice filesize = os.stat(syspath(path_out)).st_size - log.debug("PIL Pass {0} : Output size: {1}B", i, filesize) + log.debug("PIL Pass {} : Output size: {}B", i, filesize) if filesize <= max_filesize: return path_out # The relationship between filesize & quality will be @@ -569,7 +569,7 @@ class PILBackend(LocalBackend): progressive=False, ) log.warning( - "PIL Failed to resize file to below {0}B", max_filesize + "PIL Failed to resize file to below {}B", max_filesize ) return path_out @@ -577,7 +577,7 @@ class PILBackend(LocalBackend): return path_out except OSError: log.error( - "PIL cannot create thumbnail for '{0}'", + "PIL cannot create thumbnail for '{}'", displayable_path(path_in), ) return path_in @@ -696,7 +696,7 @@ class ArtResizer: for backend_cls in BACKEND_CLASSES: try: self.local_method = backend_cls() - log.debug(f"artresizer: method is {self.local_method.NAME}") + log.debug("artresizer: method is {.local_method.NAME}", self) break except LocalBackendNotAvailableError: continue diff --git a/beets/util/bluelet.py b/beets/util/bluelet.py index b81b389e0..3f3a88b1e 100644 --- a/beets/util/bluelet.py +++ b/beets/util/bluelet.py @@ -559,7 +559,7 @@ def spawn(coro): and child coroutines run concurrently. """ if not isinstance(coro, types.GeneratorType): - raise ValueError("%s is not a coroutine" % coro) + raise ValueError(f"{coro} is not a coroutine") return SpawnEvent(coro) @@ -569,7 +569,7 @@ def call(coro): returns a value using end(), then this event returns that value. """ if not isinstance(coro, types.GeneratorType): - raise ValueError("%s is not a coroutine" % coro) + raise ValueError(f"{coro} is not a coroutine") return DelegationEvent(coro) diff --git a/beets/util/functemplate.py b/beets/util/functemplate.py index b0daefac2..5d85530a1 100644 --- a/beets/util/functemplate.py +++ b/beets/util/functemplate.py @@ -136,7 +136,7 @@ class Symbol: self.original = original def __repr__(self): - return "Symbol(%s)" % repr(self.ident) + return f"Symbol({self.ident!r})" def evaluate(self, env): """Evaluate the symbol in the environment, returning a Unicode @@ -152,7 +152,7 @@ class Symbol: def translate(self): """Compile the variable lookup.""" ident = self.ident - expr = ex_rvalue(VARIABLE_PREFIX + ident) + expr = ex_rvalue(f"{VARIABLE_PREFIX}{ident}") return [expr], {ident}, set() @@ -165,9 +165,7 @@ class Call: self.original = original def __repr__(self): - return "Call({}, {}, {})".format( - repr(self.ident), repr(self.args), repr(self.original) - ) + return f"Call({self.ident!r}, {self.args!r}, {self.original!r})" def evaluate(self, env): """Evaluate the function call in the environment, returning a @@ -180,7 +178,7 @@ class Call: except Exception as exc: # Function raised exception! Maybe inlining the name of # the exception will help debug. - return "<%s>" % str(exc) + return f"<{exc}>" return str(out) else: return self.original @@ -213,7 +211,7 @@ class Call: ) ) - subexpr_call = ex_call(FUNCTION_PREFIX + self.ident, arg_exprs) + subexpr_call = ex_call(f"{FUNCTION_PREFIX}{self.ident}", arg_exprs) return [subexpr_call], varnames, funcnames @@ -226,7 +224,7 @@ class Expression: self.parts = parts def __repr__(self): - return "Expression(%s)" % (repr(self.parts)) + return f"Expression({self.parts!r})" def evaluate(self, env): """Evaluate the entire expression in the environment, returning @@ -298,9 +296,6 @@ class Parser: GROUP_CLOSE, ESCAPE_CHAR, ) - special_char_re = re.compile( - r"[%s]|\Z" % "".join(re.escape(c) for c in special_chars) - ) escapable_chars = (SYMBOL_DELIM, FUNC_DELIM, GROUP_CLOSE, ARG_SEP) terminator_chars = (GROUP_CLOSE,) @@ -312,24 +307,18 @@ class Parser: """ # Append comma (ARG_SEP) to the list of special characters only when # parsing function arguments. - extra_special_chars = () - special_char_re = self.special_char_re - if self.in_argument: - extra_special_chars = (ARG_SEP,) - special_char_re = re.compile( - r"[%s]|\Z" - % "".join( - re.escape(c) - for c in self.special_chars + extra_special_chars - ) - ) + extra_special_chars = (ARG_SEP,) if self.in_argument else () + special_chars = (*self.special_chars, *extra_special_chars) + special_char_re = re.compile( + rf"[{''.join(map(re.escape, special_chars))}]|\Z" + ) text_parts = [] while self.pos < len(self.string): char = self.string[self.pos] - if char not in self.special_chars + extra_special_chars: + if char not in special_chars: # A non-special character. Skip to the next special # character, treating the interstice as literal text. next_pos = ( @@ -566,9 +555,9 @@ class Template: argnames = [] for varname in varnames: - argnames.append(VARIABLE_PREFIX + varname) + argnames.append(f"{VARIABLE_PREFIX}{varname}") for funcname in funcnames: - argnames.append(FUNCTION_PREFIX + funcname) + argnames.append(f"{FUNCTION_PREFIX}{funcname}") func = compile_func( argnames, @@ -578,9 +567,9 @@ class Template: def wrapper_func(values={}, functions={}): args = {} for varname in varnames: - args[VARIABLE_PREFIX + varname] = values[varname] + args[f"{VARIABLE_PREFIX}{varname}"] = values[varname] for funcname in funcnames: - args[FUNCTION_PREFIX + funcname] = functions[funcname] + args[f"{FUNCTION_PREFIX}{funcname}"] = functions[funcname] parts = func(**args) return "".join(parts) diff --git a/beets/util/id_extractors.py b/beets/util/id_extractors.py index 6cdb787d1..f66f1690f 100644 --- a/beets/util/id_extractors.py +++ b/beets/util/id_extractors.py @@ -58,7 +58,8 @@ def extract_release_id(source: str, id_: str) -> str | None: source_pattern = PATTERN_BY_SOURCE[source.lower()] except KeyError: log.debug( - f"Unknown source '{source}' for ID extraction. Returning id/url as-is." + "Unknown source '{}' for ID extraction. Returning id/url as-is.", + source, ) return id_ diff --git a/beets/util/units.py b/beets/util/units.py index d07d42546..f5fcb743b 100644 --- a/beets/util/units.py +++ b/beets/util/units.py @@ -19,7 +19,7 @@ def human_seconds_short(interval): string. """ interval = int(interval) - return "%i:%02i" % (interval // 60, interval % 60) + return f"{interval // 60}:{interval % 60:02d}" def human_bytes(size): diff --git a/beetsplug/_utils/__init__.py b/beetsplug/_utils/__init__.py new file mode 100644 index 000000000..7453f88bf --- /dev/null +++ b/beetsplug/_utils/__init__.py @@ -0,0 +1,3 @@ +from . import art, vfs + +__all__ = ["art", "vfs"] diff --git a/beets/art.py b/beetsplug/_utils/art.py similarity index 84% rename from beets/art.py rename to beetsplug/_utils/art.py index 2ff58c309..656c303ce 100644 --- a/beets/art.py +++ b/beetsplug/_utils/art.py @@ -38,11 +38,7 @@ def get_art(log, item): try: mf = mediafile.MediaFile(syspath(item.path)) except mediafile.UnreadableFileError as exc: - log.warning( - "Could not extract art from {0}: {1}", - displayable_path(item.path), - exc, - ) + log.warning("Could not extract art from {.filepath}: {}", item, exc) return return mf.art @@ -83,16 +79,16 @@ def embed_item( # Get the `Image` object from the file. try: - log.debug("embedding {0}", displayable_path(imagepath)) + log.debug("embedding {}", displayable_path(imagepath)) image = mediafile_image(imagepath, maxwidth) except OSError as exc: - log.warning("could not read image file: {0}", exc) + log.warning("could not read image file: {}", exc) return # Make sure the image kind is safe (some formats only support PNG # and JPEG). if image.mime_type not in ("image/jpeg", "image/png"): - log.info("not embedding image of unsupported type: {}", image.mime_type) + log.info("not embedding image of unsupported type: {.mime_type}", image) return item.try_write(path=itempath, tags={"images": [image]}, id3v23=id3v23) @@ -110,11 +106,11 @@ def embed_album( """Embed album art into all of the album's items.""" imagepath = album.artpath if not imagepath: - log.info("No album art present for {0}", album) + log.info("No album art present for {}", album) return if not os.path.isfile(syspath(imagepath)): log.info( - "Album art not found at {0} for {1}", + "Album art not found at {} for {}", displayable_path(imagepath), album, ) @@ -122,7 +118,7 @@ def embed_album( if maxwidth: imagepath = resize_image(log, imagepath, maxwidth, quality) - log.info("Embedding album art into {0}", album) + log.info("Embedding album art into {}", album) for item in album.items(): embed_item( @@ -143,8 +139,7 @@ def resize_image(log, imagepath, maxwidth, quality): specified quality level. """ log.debug( - "Resizing album art to {0} pixels wide and encoding at quality \ - level {1}", + "Resizing album art to {} pixels wide and encoding at quality level {}", maxwidth, quality, ) @@ -184,18 +179,18 @@ def extract(log, outpath, item): art = get_art(log, item) outpath = bytestring_path(outpath) if not art: - log.info("No album art present in {0}, skipping.", item) + log.info("No album art present in {}, skipping.", item) return # Add an extension to the filename. ext = mediafile.image_extension(art) if not ext: - log.warning("Unknown image type in {0}.", displayable_path(item.path)) + log.warning("Unknown image type in {.filepath}.", item) return - outpath += bytestring_path("." + ext) + outpath += bytestring_path(f".{ext}") log.info( - "Extracting album art from: {0} to: {1}", + "Extracting album art from: {} to: {}", item, displayable_path(outpath), ) @@ -213,7 +208,7 @@ def extract_first(log, outpath, items): def clear(log, lib, query): items = lib.items(query) - log.info("Clearing album art from {0} items", len(items)) + log.info("Clearing album art from {} items", len(items)) for item in items: - log.debug("Clearing art for {0}", item) + log.debug("Clearing art for {}", item) item.try_write(tags={"images": None}) diff --git a/beets/vfs.py b/beetsplug/_utils/vfs.py similarity index 82% rename from beets/vfs.py rename to beetsplug/_utils/vfs.py index 4fd133f5a..6294b644c 100644 --- a/beets/vfs.py +++ b/beetsplug/_utils/vfs.py @@ -16,17 +16,25 @@ libraries. """ -from typing import Any, NamedTuple +from __future__ import annotations + +from typing import TYPE_CHECKING, NamedTuple from beets import util +if TYPE_CHECKING: + from beets.library import Library + class Node(NamedTuple): - files: dict[str, Any] - dirs: dict[str, Any] + files: dict[str, int] + # Maps filenames to Item ids. + + dirs: dict[str, Node] + # Maps directory names to child nodes. -def _insert(node, path, itemid): +def _insert(node: Node, path: list[str], itemid: int): """Insert an item into a virtual filesystem node.""" if len(path) == 1: # Last component. Insert file. @@ -40,7 +48,7 @@ def _insert(node, path, itemid): _insert(node.dirs[dirname], rest, itemid) -def libtree(lib): +def libtree(lib: Library) -> Node: """Generates a filesystem-like directory tree for the files contained in `lib`. Filesystem nodes are (files, dirs) named tuples in which both components are dictionaries. The first diff --git a/beetsplug/absubmit.py b/beetsplug/absubmit.py index c02a1c923..62a248482 100644 --- a/beetsplug/absubmit.py +++ b/beetsplug/absubmit.py @@ -42,9 +42,7 @@ def call(args): try: return util.command_output(args).stdout except subprocess.CalledProcessError as e: - raise ABSubmitError( - "{} exited with status {}".format(args[0], e.returncode) - ) + raise ABSubmitError(f"{args[0]} exited with status {e.returncode}") class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin): @@ -63,9 +61,7 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin): # Explicit path to extractor if not os.path.isfile(self.extractor): raise ui.UserError( - "Extractor command does not exist: {0}.".format( - self.extractor - ) + f"Extractor command does not exist: {self.extractor}." ) else: # Implicit path to extractor, search for it in path @@ -101,8 +97,8 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin): "with an HTTP scheme" ) elif base_url[-1] != "/": - base_url = base_url + "/" - self.url = base_url + "{mbid}/low-level" + base_url = f"{base_url}/" + self.url = f"{base_url}{{mbid}}/low-level" def commands(self): cmd = ui.Subcommand( @@ -122,8 +118,10 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin): dest="pretend_fetch", action="store_true", default=False, - help="pretend to perform action, but show \ -only files which would be processed", + help=( + "pretend to perform action, but show only files which would be" + " processed" + ), ) cmd.func = self.command return [cmd] diff --git a/beetsplug/acousticbrainz.py b/beetsplug/acousticbrainz.py index 56ac0f6c5..92a1976a1 100644 --- a/beetsplug/acousticbrainz.py +++ b/beetsplug/acousticbrainz.py @@ -97,7 +97,7 @@ class AcousticPlugin(plugins.BeetsPlugin): "with an HTTP scheme" ) elif self.base_url[-1] != "/": - self.base_url = self.base_url + "/" + self.base_url = f"{self.base_url}/" if self.config["auto"]: self.register_listener("import_task_files", self.import_task_files) @@ -153,7 +153,7 @@ class AcousticPlugin(plugins.BeetsPlugin): try: data.update(res.json()) except ValueError: - self._log.debug("Invalid Response: {}", res.text) + self._log.debug("Invalid Response: {.text}", res) return {} return data @@ -300,4 +300,4 @@ class AcousticPlugin(plugins.BeetsPlugin): def _generate_urls(base_url, mbid): """Generates AcousticBrainz end point urls for given `mbid`.""" for level in LEVELS: - yield base_url + mbid + level + yield f"{base_url}{mbid}{level}" diff --git a/beetsplug/aura.py b/beetsplug/aura.py index 53458d7ee..7b75f31e5 100644 --- a/beetsplug/aura.py +++ b/beetsplug/aura.py @@ -236,14 +236,14 @@ class AURADocument: # Not the last page so work out links.next url if not self.args: # No existing arguments, so current page is 0 - next_url = request.url + "?page=1" + next_url = f"{request.url}?page=1" elif not self.args.get("page", None): # No existing page argument, so add one to the end - next_url = request.url + "&page=1" + next_url = f"{request.url}&page=1" else: # Increment page token by 1 next_url = request.url.replace( - f"page={page}", "page={}".format(page + 1) + f"page={page}", f"page={page + 1}" ) # Get only the items in the page range data = [ @@ -427,9 +427,7 @@ class TrackDocument(AURADocument): return self.error( "404 Not Found", "No track with the requested id.", - "There is no track with an id of {} in the library.".format( - track_id - ), + f"There is no track with an id of {track_id} in the library.", ) return self.single_resource_document( self.get_resource_object(self.lib, track) @@ -513,9 +511,7 @@ class AlbumDocument(AURADocument): return self.error( "404 Not Found", "No album with the requested id.", - "There is no album with an id of {} in the library.".format( - album_id - ), + f"There is no album with an id of {album_id} in the library.", ) return self.single_resource_document( self.get_resource_object(self.lib, album) @@ -600,9 +596,7 @@ class ArtistDocument(AURADocument): return self.error( "404 Not Found", "No artist with the requested id.", - "There is no artist with an id of {} in the library.".format( - artist_id - ), + f"There is no artist with an id of {artist_id} in the library.", ) return self.single_resource_document(artist_resource) @@ -703,7 +697,7 @@ class ImageDocument(AURADocument): relationships = {} # Split id into [parent_type, parent_id, filename] id_split = image_id.split("-") - relationships[id_split[0] + "s"] = { + relationships[f"{id_split[0]}s"] = { "data": [{"type": id_split[0], "id": id_split[1]}] } @@ -727,9 +721,7 @@ class ImageDocument(AURADocument): return self.error( "404 Not Found", "No image with the requested id.", - "There is no image with an id of {} in the library.".format( - image_id - ), + f"There is no image with an id of {image_id} in the library.", ) return self.single_resource_document(image_resource) @@ -775,9 +767,7 @@ def audio_file(track_id): return AURADocument.error( "404 Not Found", "No track with the requested id.", - "There is no track with an id of {} in the library.".format( - track_id - ), + f"There is no track with an id of {track_id} in the library.", ) path = os.fsdecode(track.path) @@ -785,9 +775,8 @@ def audio_file(track_id): return AURADocument.error( "404 Not Found", "No audio file for the requested track.", - ( - "There is no audio file for track {} at the expected location" - ).format(track_id), + f"There is no audio file for track {track_id} at the expected" + " location", ) file_mimetype = guess_type(path)[0] @@ -795,10 +784,8 @@ def audio_file(track_id): return AURADocument.error( "500 Internal Server Error", "Requested audio file has an unknown mimetype.", - ( - "The audio file for track {} has an unknown mimetype. " - "Its file extension is {}." - ).format(track_id, path.split(".")[-1]), + f"The audio file for track {track_id} has an unknown mimetype. " + f"Its file extension is {path.split('.')[-1]}.", ) # Check that the Accept header contains the file's mimetype @@ -810,10 +797,8 @@ def audio_file(track_id): return AURADocument.error( "406 Not Acceptable", "Unsupported MIME type or bitrate parameter in Accept header.", - ( - "The audio file for track {} is only available as {} and " - "bitrate parameters are not supported." - ).format(track_id, file_mimetype), + f"The audio file for track {track_id} is only available as" + f" {file_mimetype} and bitrate parameters are not supported.", ) return send_file( @@ -896,9 +881,7 @@ def image_file(image_id): return AURADocument.error( "404 Not Found", "No image with the requested id.", - "There is no image with an id of {} in the library".format( - image_id - ), + f"There is no image with an id of {image_id} in the library", ) return send_file(img_path) diff --git a/beetsplug/badfiles.py b/beetsplug/badfiles.py index 0511d960d..070008be8 100644 --- a/beetsplug/badfiles.py +++ b/beetsplug/badfiles.py @@ -110,9 +110,7 @@ class BadFiles(BeetsPlugin): self._log.debug("checking path: {}", dpath) if not os.path.exists(item.path): ui.print_( - "{}: file does not exist".format( - ui.colorize("text_error", dpath) - ) + f"{ui.colorize('text_error', dpath)}: file does not exist" ) # Run the checker against the file if one is found @@ -129,37 +127,32 @@ class BadFiles(BeetsPlugin): except CheckerCommandError as e: if e.errno == errno.ENOENT: self._log.error( - "command not found: {} when validating file: {}", - e.checker, - e.path, + "command not found: {0.checker} when validating file: {0.path}", + e, ) else: - self._log.error("error invoking {}: {}", e.checker, e.msg) + self._log.error("error invoking {0.checker}: {0.msg}", e) return [] error_lines = [] if status > 0: error_lines.append( - "{}: checker exited with status {}".format( - ui.colorize("text_error", dpath), status - ) + f"{ui.colorize('text_error', dpath)}: checker exited with" + f" status {status}" ) for line in output: error_lines.append(f" {line}") elif errors > 0: error_lines.append( - "{}: checker found {} errors or warnings".format( - ui.colorize("text_warning", dpath), errors - ) + f"{ui.colorize('text_warning', dpath)}: checker found" + f" {status} errors or warnings" ) for line in output: error_lines.append(f" {line}") elif self.verbose: - error_lines.append( - "{}: ok".format(ui.colorize("text_success", dpath)) - ) + error_lines.append(f"{ui.colorize('text_success', dpath)}: ok") return error_lines @@ -180,9 +173,8 @@ class BadFiles(BeetsPlugin): def on_import_task_before_choice(self, task, session): if hasattr(task, "_badfiles_checks_failed"): ui.print_( - "{} one or more files failed checks:".format( - ui.colorize("text_warning", "BAD") - ) + f"{ui.colorize('text_warning', 'BAD')} one or more files failed" + " checks:" ) for error in task._badfiles_checks_failed: for error_line in error: diff --git a/beetsplug/beatport.py b/beetsplug/beatport.py index 16e0dc896..c07cce72f 100644 --- a/beetsplug/beatport.py +++ b/beetsplug/beatport.py @@ -110,7 +110,7 @@ class BeatportClient: :returns: OAuth resource owner key and secret as unicode """ self.api.parse_authorization_response( - "https://beets.io/auth?" + auth_data + f"https://beets.io/auth?{auth_data}" ) access_data = self.api.fetch_access_token( self._make_url("/identity/1/oauth/access-token") @@ -200,8 +200,8 @@ class BeatportClient: def _make_url(self, endpoint: str) -> str: """Get complete URL for a given API endpoint.""" if not endpoint.startswith("/"): - endpoint = "/" + endpoint - return self._api_base + endpoint + endpoint = f"/{endpoint}" + return f"{self._api_base}{endpoint}" def _get(self, endpoint: str, **kwargs) -> list[JSONDict]: """Perform a GET request on a given API endpoint. @@ -212,14 +212,10 @@ class BeatportClient: try: response = self.api.get(self._make_url(endpoint), params=kwargs) except Exception as e: - raise BeatportAPIError( - "Error connecting to Beatport API: {}".format(e) - ) + raise BeatportAPIError(f"Error connecting to Beatport API: {e}") if not response: raise BeatportAPIError( - "Error {0.status_code} for '{0.request.path_url}".format( - response - ) + f"Error {response.status_code} for '{response.request.path_url}" ) return response.json()["results"] @@ -275,15 +271,14 @@ class BeatportRelease(BeatportObject): self.genre = data.get("genre") if "slug" in data: - self.url = "https://beatport.com/release/{}/{}".format( - data["slug"], data["id"] + self.url = ( + f"https://beatport.com/release/{data['slug']}/{data['id']}" ) def __str__(self) -> str: - return "".format( - self.artists_str(), - self.name, - self.catalog_number, + return ( + "" ) @@ -311,9 +306,7 @@ class BeatportTrack(BeatportObject): except ValueError: pass if "slug" in data: - self.url = "https://beatport.com/track/{}/{}".format( - data["slug"], data["id"] - ) + self.url = f"https://beatport.com/track/{data['slug']}/{data['id']}" self.track_number = data.get("trackNumber") self.bpm = data.get("bpm") self.initial_key = str((data.get("key") or {}).get("shortName")) @@ -335,7 +328,6 @@ class BeatportPlugin(MetadataSourcePlugin): "apikey": "57713c3906af6f5def151b33601389176b37b429", "apisecret": "b3fe08c93c80aefd749fe871a16cd2bb32e2b954", "tokenfile": "beatport_token.json", - "source_weight": 0.5, } ) self.config["apikey"].redact = True @@ -373,7 +365,7 @@ class BeatportPlugin(MetadataSourcePlugin): try: url = auth_client.get_authorize_url() except AUTH_ERRORS as e: - self._log.debug("authentication error: {0}", e) + self._log.debug("authentication error: {}", e) raise beets.ui.UserError("communication with Beatport failed") beets.ui.print_("To authenticate with Beatport, visit:") @@ -384,11 +376,11 @@ class BeatportPlugin(MetadataSourcePlugin): try: token, secret = auth_client.get_access_token(data) except AUTH_ERRORS as e: - self._log.debug("authentication error: {0}", e) + self._log.debug("authentication error: {}", e) raise beets.ui.UserError("Beatport token request failed") # Save the token for later use. - self._log.debug("Beatport token {0}, secret {1}", token, secret) + self._log.debug("Beatport token {}, secret {}", token, secret) with open(self._tokenfile(), "w") as f: json.dump({"token": token, "secret": secret}, f) @@ -412,7 +404,7 @@ class BeatportPlugin(MetadataSourcePlugin): try: yield from self._get_releases(query) except BeatportAPIError as e: - self._log.debug("API Error: {0} (query: {1})", e, query) + self._log.debug("API Error: {} (query: {})", e, query) return def item_candidates( @@ -422,14 +414,14 @@ class BeatportPlugin(MetadataSourcePlugin): try: return self._get_tracks(query) except BeatportAPIError as e: - self._log.debug("API Error: {0} (query: {1})", e, query) + self._log.debug("API Error: {} (query: {})", e, query) return [] def album_for_id(self, album_id: str): """Fetches a release by its Beatport ID and returns an AlbumInfo object or None if the query is not a valid ID or release is not found. """ - self._log.debug("Searching for release {0}", album_id) + self._log.debug("Searching for release {}", album_id) if not (release_id := self._extract_id(album_id)): self._log.debug("Not a valid Beatport release ID.") @@ -444,7 +436,7 @@ class BeatportPlugin(MetadataSourcePlugin): """Fetches a track by its Beatport ID and returns a TrackInfo object or None if the track is not a valid Beatport ID or track is not found. """ - self._log.debug("Searching for track {0}", track_id) + self._log.debug("Searching for track {}", track_id) # TODO: move to extractor match = re.search(r"(^|beatport\.com/track/.+/)(\d+)$", track_id) if not match: diff --git a/beetsplug/bench.py b/beetsplug/bench.py index cf72527e8..d77f1f92a 100644 --- a/beetsplug/bench.py +++ b/beetsplug/bench.py @@ -17,10 +17,11 @@ import cProfile import timeit -from beets import importer, library, plugins, ui, vfs +from beets import importer, library, plugins, ui from beets.autotag import match from beets.plugins import BeetsPlugin from beets.util.functemplate import Template +from beetsplug._utils import vfs def aunique_benchmark(lib, prof): diff --git a/beetsplug/bpd/__init__.py b/beetsplug/bpd/__init__.py index a2ad2835c..1a4f505dd 100644 --- a/beetsplug/bpd/__init__.py +++ b/beetsplug/bpd/__init__.py @@ -30,10 +30,11 @@ from typing import TYPE_CHECKING import beets import beets.ui -from beets import dbcore, logging, vfs +from beets import dbcore, logging from beets.library import Item from beets.plugins import BeetsPlugin from beets.util import as_string, bluelet +from beetsplug._utils import vfs if TYPE_CHECKING: from beets.dbcore.query import Query @@ -52,7 +53,7 @@ except ImportError as e: PROTOCOL_VERSION = "0.16.0" BUFSIZE = 1024 -HELLO = "OK MPD %s" % PROTOCOL_VERSION +HELLO = f"OK MPD {PROTOCOL_VERSION}" CLIST_BEGIN = "command_list_begin" CLIST_VERBOSE_BEGIN = "command_list_ok_begin" CLIST_END = "command_list_end" @@ -282,7 +283,7 @@ class BaseServer: if not self.ctrl_sock: self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port)) - self.ctrl_sock.sendall((message + "\n").encode("utf-8")) + self.ctrl_sock.sendall((f"{message}\n").encode("utf-8")) def _send_event(self, event): """Notify subscribed connections of an event.""" @@ -376,13 +377,13 @@ class BaseServer: if self.password and not conn.authenticated: # Not authenticated. Show limited list of commands. for cmd in SAFE_COMMANDS: - yield "command: " + cmd + yield f"command: {cmd}" else: # Authenticated. Show all commands. for func in dir(self): if func.startswith("cmd_"): - yield "command: " + func[4:] + yield f"command: {func[4:]}" def cmd_notcommands(self, conn): """Lists all unavailable commands.""" @@ -392,7 +393,7 @@ class BaseServer: if func.startswith("cmd_"): cmd = func[4:] if cmd not in SAFE_COMMANDS: - yield "command: " + cmd + yield f"command: {cmd}" else: # Authenticated. No commands are unavailable. @@ -406,22 +407,22 @@ class BaseServer: playlist, playlistlength, and xfade. """ yield ( - "repeat: " + str(int(self.repeat)), - "random: " + str(int(self.random)), - "consume: " + str(int(self.consume)), - "single: " + str(int(self.single)), - "playlist: " + str(self.playlist_version), - "playlistlength: " + str(len(self.playlist)), - "mixrampdb: " + str(self.mixrampdb), + f"repeat: {int(self.repeat)}", + f"random: {int(self.random)}", + f"consume: {int(self.consume)}", + f"single: {int(self.single)}", + f"playlist: {self.playlist_version}", + f"playlistlength: {len(self.playlist)}", + f"mixrampdb: {self.mixrampdb}", ) if self.volume > 0: - yield "volume: " + str(self.volume) + yield f"volume: {self.volume}" if not math.isnan(self.mixrampdelay): - yield "mixrampdelay: " + str(self.mixrampdelay) + yield f"mixrampdelay: {self.mixrampdelay}" if self.crossfade > 0: - yield "xfade: " + str(self.crossfade) + yield f"xfade: {self.crossfade}" if self.current_index == -1: state = "stop" @@ -429,20 +430,20 @@ class BaseServer: state = "pause" else: state = "play" - yield "state: " + state + yield f"state: {state}" if self.current_index != -1: # i.e., paused or playing current_id = self._item_id(self.playlist[self.current_index]) - yield "song: " + str(self.current_index) - yield "songid: " + str(current_id) + yield f"song: {self.current_index}" + yield f"songid: {current_id}" if len(self.playlist) > self.current_index + 1: # If there's a next song, report its index too. next_id = self._item_id(self.playlist[self.current_index + 1]) - yield "nextsong: " + str(self.current_index + 1) - yield "nextsongid: " + str(next_id) + yield f"nextsong: {self.current_index + 1}" + yield f"nextsongid: {next_id}" if self.error: - yield "error: " + self.error + yield f"error: {self.error}" def cmd_clearerror(self, conn): """Removes the persistent error state of the server. This @@ -522,7 +523,7 @@ class BaseServer: def cmd_replay_gain_status(self, conn): """Get the replaygain mode.""" - yield "replay_gain_mode: " + str(self.replay_gain_mode) + yield f"replay_gain_mode: {self.replay_gain_mode}" def cmd_clear(self, conn): """Clear the playlist.""" @@ -643,8 +644,8 @@ class BaseServer: Also a dummy implementation. """ for idx, track in enumerate(self.playlist): - yield "cpos: " + str(idx) - yield "Id: " + str(track.id) + yield f"cpos: {idx}" + yield f"Id: {track.id}" def cmd_currentsong(self, conn): """Sends information about the currently-playing song.""" @@ -759,11 +760,11 @@ class Connection: """Create a new connection for the accepted socket `client`.""" self.server = server self.sock = sock - self.address = "{}:{}".format(*sock.sock.getpeername()) + self.address = ":".join(map(str, sock.sock.getpeername())) def debug(self, message, kind=" "): """Log a debug message about this connection.""" - self.server._log.debug("{}[{}]: {}", kind, self.address, message) + self.server._log.debug("{}[{.address}]: {}", kind, self, message) def run(self): pass @@ -899,9 +900,7 @@ class MPDConnection(Connection): return except BPDIdleError as e: self.idle_subscriptions = e.subsystems - self.debug( - "awaiting: {}".format(" ".join(e.subsystems)), kind="z" - ) + self.debug(f"awaiting: {' '.join(e.subsystems)}", kind="z") yield bluelet.call(self.server.dispatch_events()) @@ -913,7 +912,7 @@ class ControlConnection(Connection): super().__init__(server, sock) def debug(self, message, kind=" "): - self.server._log.debug("CTRL {}[{}]: {}", kind, self.address, message) + self.server._log.debug("CTRL {}[{.address}]: {}", kind, self, message) def run(self): """Listen for control commands and delegate to `ctrl_*` methods.""" @@ -933,7 +932,7 @@ class ControlConnection(Connection): func = command.delegate("ctrl_", self) yield bluelet.call(func(*command.args)) except (AttributeError, TypeError) as e: - yield self.send("ERROR: {}".format(e.args[0])) + yield self.send(f"ERROR: {e.args[0]}") except Exception: yield self.send( ["ERROR: server error", traceback.format_exc().rstrip()] @@ -992,7 +991,7 @@ class Command: of arguments. """ # Attempt to get correct command function. - func_name = prefix + self.name + func_name = f"{prefix}{self.name}" if not hasattr(target, func_name): raise AttributeError(f'unknown command "{self.name}"') func = getattr(target, func_name) @@ -1011,7 +1010,7 @@ class Command: # If the command accepts a variable number of arguments skip the check. if wrong_num and not argspec.varargs: raise TypeError( - 'wrong number of arguments for "{}"'.format(self.name), + f'wrong number of arguments for "{self.name}"', self.name, ) @@ -1110,10 +1109,8 @@ class Server(BaseServer): self.lib = library self.player = gstplayer.GstPlayer(self.play_finished) self.cmd_update(None) - log.info("Server ready and listening on {}:{}".format(host, port)) - log.debug( - "Listening for control signals on {}:{}".format(host, ctrl_port) - ) + log.info("Server ready and listening on {}:{}", host, port) + log.debug("Listening for control signals on {}:{}", host, ctrl_port) def run(self): self.player.run() @@ -1128,23 +1125,21 @@ class Server(BaseServer): def _item_info(self, item): info_lines = [ - "file: " + as_string(item.destination(relative_to_libdir=True)), - "Time: " + str(int(item.length)), - "duration: " + f"{item.length:.3f}", - "Id: " + str(item.id), + f"file: {as_string(item.destination(relative_to_libdir=True))}", + f"Time: {int(item.length)}", + "duration: {item.length:.3f}", + f"Id: {item.id}", ] try: pos = self._id_to_index(item.id) - info_lines.append("Pos: " + str(pos)) + info_lines.append(f"Pos: {pos}") except ArgumentNotFoundError: # Don't include position if not in playlist. pass for tagtype, field in self.tagtype_map.items(): - info_lines.append( - "{}: {}".format(tagtype, str(getattr(item, field))) - ) + info_lines.append(f"{tagtype}: {getattr(item, field)}") return info_lines @@ -1207,7 +1202,7 @@ class Server(BaseServer): def _path_join(self, p1, p2): """Smashes together two BPD paths.""" - out = p1 + "/" + p2 + out = f"{p1}/{p2}" return out.replace("//", "/").replace("//", "/") def cmd_lsinfo(self, conn, path="/"): @@ -1225,7 +1220,7 @@ class Server(BaseServer): if dirpath.startswith("/"): # Strip leading slash (libmpc rejects this). dirpath = dirpath[1:] - yield "directory: %s" % dirpath + yield f"directory: {dirpath}" def _listall(self, basepath, node, info=False): """Helper function for recursive listing. If info, show @@ -1237,7 +1232,7 @@ class Server(BaseServer): item = self.lib.get_item(node) yield self._item_info(item) else: - yield "file: " + basepath + yield f"file: {basepath}" else: # List a directory. Recurse into both directories and files. for name, itemid in sorted(node.files.items()): @@ -1246,7 +1241,7 @@ class Server(BaseServer): yield from self._listall(newpath, itemid, info) for name, subdir in sorted(node.dirs.items()): newpath = self._path_join(basepath, name) - yield "directory: " + newpath + yield f"directory: {newpath}" yield from self._listall(newpath, subdir, info) def cmd_listall(self, conn, path="/"): @@ -1280,7 +1275,7 @@ class Server(BaseServer): for item in self._all_items(self._resolve_path(path)): self.playlist.append(item) if send_id: - yield "Id: " + str(item.id) + yield f"Id: {item.id}" self.playlist_version += 1 self._send_event("playlist") @@ -1302,20 +1297,13 @@ class Server(BaseServer): item = self.playlist[self.current_index] yield ( - "bitrate: " + str(item.bitrate / 1000), - "audio: {}:{}:{}".format( - str(item.samplerate), - str(item.bitdepth), - str(item.channels), - ), + f"bitrate: {item.bitrate / 1000}", + f"audio: {item.samplerate}:{item.bitdepth}:{item.channels}", ) (pos, total) = self.player.time() yield ( - "time: {}:{}".format( - str(int(pos)), - str(int(total)), - ), + f"time: {int(pos)}:{int(total)}", "elapsed: " + f"{pos:.3f}", "duration: " + f"{total:.3f}", ) @@ -1335,13 +1323,13 @@ class Server(BaseServer): artists, albums, songs, totaltime = tx.query(statement)[0] yield ( - "artists: " + str(artists), - "albums: " + str(albums), - "songs: " + str(songs), - "uptime: " + str(int(time.time() - self.startup_time)), - "playtime: " + "0", # Missing. - "db_playtime: " + str(int(totaltime)), - "db_update: " + str(int(self.updated_time)), + f"artists: {artists}", + f"albums: {albums}", + f"songs: {songs}", + f"uptime: {int(time.time() - self.startup_time)}", + "playtime: 0", # Missing. + f"db_playtime: {int(totaltime)}", + f"db_update: {int(self.updated_time)}", ) def cmd_decoders(self, conn): @@ -1383,7 +1371,7 @@ class Server(BaseServer): searching. """ for tag in self.tagtype_map: - yield "tagtype: " + tag + yield f"tagtype: {tag}" def _tagtype_lookup(self, tag): """Uses `tagtype_map` to look up the beets column name for an @@ -1458,12 +1446,9 @@ class Server(BaseServer): clause, subvals = query.clause() statement = ( - "SELECT DISTINCT " - + show_key - + " FROM items WHERE " - + clause - + " ORDER BY " - + show_key + f"SELECT DISTINCT {show_key}" + f" FROM items WHERE {clause}" + f" ORDER BY {show_key}" ) self._log.debug(statement) with self.lib.transaction() as tx: @@ -1473,7 +1458,7 @@ class Server(BaseServer): if not row[0]: # Skip any empty values of the field. continue - yield show_tag_canon + ": " + str(row[0]) + yield f"{show_tag_canon}: {row[0]}" def cmd_count(self, conn, tag, value): """Returns the number and total time of songs matching the @@ -1487,8 +1472,8 @@ class Server(BaseServer): ): songs += 1 playtime += item.length - yield "songs: " + str(songs) - yield "playtime: " + str(int(playtime)) + yield f"songs: {songs}" + yield f"playtime: {int(playtime)}" # Persistent playlist manipulation. In MPD this is an optional feature so # these dummy implementations match MPD's behaviour with the feature off. diff --git a/beetsplug/bpd/gstplayer.py b/beetsplug/bpd/gstplayer.py index 03fb179aa..fa23f2b0e 100644 --- a/beetsplug/bpd/gstplayer.py +++ b/beetsplug/bpd/gstplayer.py @@ -129,7 +129,7 @@ class GstPlayer: self.player.set_state(Gst.State.NULL) if isinstance(path, str): path = path.encode("utf-8") - uri = "file://" + urllib.parse.quote(path) + uri = f"file://{urllib.parse.quote(path)}" self.player.set_property("uri", uri) self.player.set_state(Gst.State.PLAYING) self.playing = True diff --git a/beetsplug/bpm.py b/beetsplug/bpm.py index 145986a95..d49963b72 100644 --- a/beetsplug/bpm.py +++ b/beetsplug/bpm.py @@ -73,12 +73,12 @@ class BPMPlugin(BeetsPlugin): item = items[0] if item["bpm"]: - self._log.info("Found bpm {0}", item["bpm"]) + self._log.info("Found bpm {}", item["bpm"]) if not overwrite: return self._log.info( - "Press Enter {0} times to the rhythm or Ctrl-D to exit", + "Press Enter {} times to the rhythm or Ctrl-D to exit", self.config["max_strokes"].get(int), ) new_bpm = bpm(self.config["max_strokes"].get(int)) @@ -86,4 +86,4 @@ class BPMPlugin(BeetsPlugin): if write: item.try_write() item.store() - self._log.info("Added new bpm {0}", item["bpm"]) + self._log.info("Added new bpm {}", item["bpm"]) diff --git a/beetsplug/bpsync.py b/beetsplug/bpsync.py index ccd781b28..9ae6d47d5 100644 --- a/beetsplug/bpsync.py +++ b/beetsplug/bpsync.py @@ -82,8 +82,8 @@ class BPSyncPlugin(BeetsPlugin): if not self.is_beatport_track(item): self._log.info( - "Skipping non-{} singleton: {}", - self.beatport_plugin.data_source, + "Skipping non-{.beatport_plugin.data_source} singleton: {}", + self, item, ) continue @@ -107,8 +107,8 @@ class BPSyncPlugin(BeetsPlugin): return False if not album.mb_albumid.isnumeric(): self._log.info( - "Skipping album with invalid {} ID: {}", - self.beatport_plugin.data_source, + "Skipping album with invalid {.beatport_plugin.data_source} ID: {}", + self, album, ) return False @@ -117,8 +117,8 @@ class BPSyncPlugin(BeetsPlugin): return items if not all(self.is_beatport_track(item) for item in items): self._log.info( - "Skipping non-{} release: {}", - self.beatport_plugin.data_source, + "Skipping non-{.beatport_plugin.data_source} release: {}", + self, album, ) return False @@ -139,9 +139,7 @@ class BPSyncPlugin(BeetsPlugin): albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid) if not albuminfo: self._log.info( - "Release ID {} not found for album {}", - album.mb_albumid, - album, + "Release ID {0.mb_albumid} not found for album {0}", album ) continue diff --git a/beetsplug/bucket.py b/beetsplug/bucket.py index 9246539fc..40369f74a 100644 --- a/beetsplug/bucket.py +++ b/beetsplug/bucket.py @@ -41,7 +41,7 @@ def span_from_str(span_str): def normalize_year(d, yearfrom): """Convert string to a 4 digits year""" if yearfrom < 100: - raise BucketError("%d must be expressed on 4 digits" % yearfrom) + raise BucketError(f"{yearfrom} must be expressed on 4 digits") # if two digits only, pick closest year that ends by these two # digits starting from yearfrom @@ -55,14 +55,13 @@ def span_from_str(span_str): years = [int(x) for x in re.findall(r"\d+", span_str)] if not years: raise ui.UserError( - "invalid range defined for year bucket '%s': no " - "year found" % span_str + f"invalid range defined for year bucket {span_str!r}: no year found" ) try: years = [normalize_year(x, years[0]) for x in years] except BucketError as exc: raise ui.UserError( - "invalid range defined for year bucket '%s': %s" % (span_str, exc) + f"invalid range defined for year bucket {span_str!r}: {exc}" ) res = {"from": years[0], "str": span_str} @@ -125,22 +124,19 @@ def str2fmt(s): "fromnchars": len(m.group("fromyear")), "tonchars": len(m.group("toyear")), } - res["fmt"] = "{}%s{}{}{}".format( - m.group("bef"), - m.group("sep"), - "%s" if res["tonchars"] else "", - m.group("after"), + res["fmt"] = ( + f"{m['bef']}{{}}{m['sep']}{'{}' if res['tonchars'] else ''}{m['after']}" ) return res def format_span(fmt, yearfrom, yearto, fromnchars, tonchars): """Return a span string representation.""" - args = str(yearfrom)[-fromnchars:] + args = [str(yearfrom)[-fromnchars:]] if tonchars: - args = (str(yearfrom)[-fromnchars:], str(yearto)[-tonchars:]) + args.append(str(yearto)[-tonchars:]) - return fmt % args + return fmt.format(*args) def extract_modes(spans): @@ -169,14 +165,12 @@ def build_alpha_spans(alpha_spans_str, alpha_regexs): else: raise ui.UserError( "invalid range defined for alpha bucket " - "'%s': no alphanumeric character found" % elem + f"'{elem}': no alphanumeric character found" ) spans.append( re.compile( - "^[" - + ASCII_DIGITS[begin_index : end_index + 1] - + ASCII_DIGITS[begin_index : end_index + 1].upper() - + "]" + rf"^[{ASCII_DIGITS[begin_index : end_index + 1]}]", + re.IGNORECASE, ) ) return spans diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py index f90877113..192310fb8 100644 --- a/beetsplug/chroma.py +++ b/beetsplug/chroma.py @@ -90,7 +90,7 @@ def acoustid_match(log, path): duration, fp = acoustid.fingerprint_file(util.syspath(path)) except acoustid.FingerprintGenerationError as exc: log.error( - "fingerprinting of {0} failed: {1}", + "fingerprinting of {} failed: {}", util.displayable_path(repr(path)), exc, ) @@ -98,15 +98,17 @@ def acoustid_match(log, path): fp = fp.decode() _fingerprints[path] = fp try: - res = acoustid.lookup(API_KEY, fp, duration, meta="recordings releases") + res = acoustid.lookup( + API_KEY, fp, duration, meta="recordings releases", timeout=10 + ) except acoustid.AcoustidError as exc: log.debug( - "fingerprint matching {0} failed: {1}", + "fingerprint matching {} failed: {}", util.displayable_path(repr(path)), exc, ) return None - log.debug("chroma: fingerprinted {0}", util.displayable_path(repr(path))) + log.debug("chroma: fingerprinted {}", util.displayable_path(repr(path))) # Ensure the response is usable and parse it. if res["status"] != "ok" or not res.get("results"): @@ -144,7 +146,7 @@ def acoustid_match(log, path): release_ids = [rel["id"] for rel in releases] log.debug( - "matched recordings {0} on releases {1}", recording_ids, release_ids + "matched recordings {} on releases {}", recording_ids, release_ids ) _matches[path] = recording_ids, release_ids @@ -209,7 +211,7 @@ class AcoustidPlugin(MetadataSourcePlugin): if album: albums.append(album) - self._log.debug("acoustid album candidates: {0}", len(albums)) + self._log.debug("acoustid album candidates: {}", len(albums)) return albums def item_candidates(self, item, artist, title) -> Iterable[TrackInfo]: @@ -222,7 +224,7 @@ class AcoustidPlugin(MetadataSourcePlugin): track = self.mb.track_for_id(recording_id) if track: tracks.append(track) - self._log.debug("acoustid item candidates: {0}", len(tracks)) + self._log.debug("acoustid item candidates: {}", len(tracks)) return tracks def album_for_id(self, *args, **kwargs): @@ -290,11 +292,11 @@ def submit_items(log, userkey, items, chunksize=64): def submit_chunk(): """Submit the current accumulated fingerprint data.""" - log.info("submitting {0} fingerprints", len(data)) + log.info("submitting {} fingerprints", len(data)) try: - acoustid.submit(API_KEY, userkey, data) + acoustid.submit(API_KEY, userkey, data, timeout=10) except acoustid.AcoustidError as exc: - log.warning("acoustid submission error: {0}", exc) + log.warning("acoustid submission error: {}", exc) del data[:] for item in items: @@ -341,31 +343,23 @@ def fingerprint_item(log, item, write=False): """ # Get a fingerprint and length for this track. if not item.length: - log.info("{0}: no duration available", util.displayable_path(item.path)) + log.info("{.filepath}: no duration available", item) elif item.acoustid_fingerprint: if write: - log.info( - "{0}: fingerprint exists, skipping", - util.displayable_path(item.path), - ) + log.info("{.filepath}: fingerprint exists, skipping", item) else: - log.info( - "{0}: using existing fingerprint", - util.displayable_path(item.path), - ) + log.info("{.filepath}: using existing fingerprint", item) return item.acoustid_fingerprint else: - log.info("{0}: fingerprinting", util.displayable_path(item.path)) + log.info("{.filepath}: fingerprinting", item) try: _, fp = acoustid.fingerprint_file(util.syspath(item.path)) item.acoustid_fingerprint = fp.decode() if write: - log.info( - "{0}: writing fingerprint", util.displayable_path(item.path) - ) + log.info("{.filepath}: writing fingerprint", item) item.try_write() if item._db: item.store() return item.acoustid_fingerprint except acoustid.FingerprintGenerationError as exc: - log.info("fingerprint generation failed: {0}", exc) + log.info("fingerprint generation failed: {}", exc) diff --git a/beetsplug/convert.py b/beetsplug/convert.py index c4df9ab57..e72f8c75a 100644 --- a/beetsplug/convert.py +++ b/beetsplug/convert.py @@ -25,12 +25,13 @@ from string import Template import mediafile from confuse import ConfigTypeError, Optional -from beets import art, config, plugins, ui, util +from beets import config, plugins, ui, util from beets.library import Item, parse_query_string from beets.plugins import BeetsPlugin from beets.util import par_map from beets.util.artresizer import ArtResizer from beets.util.m3u import M3UFile +from beetsplug._utils import art _fs_lock = threading.Lock() _temp_files = [] # Keep track of temporary transcoded files for deletion. @@ -64,9 +65,7 @@ def get_format(fmt=None): command = format_info["command"] extension = format_info.get("extension", fmt) except KeyError: - raise ui.UserError( - 'convert: format {} needs the "command" field'.format(fmt) - ) + raise ui.UserError(f'convert: format {fmt} needs the "command" field') except ConfigTypeError: command = config["convert"]["formats"][fmt].get(str) extension = fmt @@ -77,8 +76,8 @@ def get_format(fmt=None): command = config["convert"]["command"].as_str() elif "opts" in keys: # Undocumented option for backwards compatibility with < 1.3.1. - command = "ffmpeg -i $source -y {} $dest".format( - config["convert"]["opts"].as_str() + command = ( + f"ffmpeg -i $source -y {config['convert']['opts'].as_str()} $dest" ) if "extension" in keys: extension = config["convert"]["extension"].as_str() @@ -123,20 +122,28 @@ class ConvertPlugin(BeetsPlugin): "threads": os.cpu_count(), "format": "mp3", "id3v23": "inherit", + "write_metadata": True, "formats": { "aac": { - "command": "ffmpeg -i $source -y -vn -acodec aac " - "-aq 1 $dest", + "command": ( + "ffmpeg -i $source -y -vn -acodec aac -aq 1 $dest" + ), "extension": "m4a", }, "alac": { - "command": "ffmpeg -i $source -y -vn -acodec alac $dest", + "command": ( + "ffmpeg -i $source -y -vn -acodec alac $dest" + ), "extension": "m4a", }, "flac": "ffmpeg -i $source -y -vn -acodec flac $dest", "mp3": "ffmpeg -i $source -y -vn -aq 2 $dest", - "opus": "ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest", - "ogg": "ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest", + "opus": ( + "ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest" + ), + "ogg": ( + "ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest" + ), "wma": "ffmpeg -i $source -y -vn -acodec wmav2 -vn $dest", }, "max_bitrate": None, @@ -171,16 +178,17 @@ class ConvertPlugin(BeetsPlugin): "--threads", action="store", type="int", - help="change the number of threads, \ - defaults to maximum available processors", + help=( + "change the number of threads, defaults to maximum available" + " processors" + ), ) cmd.parser.add_option( "-k", "--keep-new", action="store_true", dest="keep_new", - help="keep only the converted \ - and move the old files", + help="keep only the converted and move the old files", ) cmd.parser.add_option( "-d", "--dest", action="store", help="set the destination directory" @@ -204,16 +212,16 @@ class ConvertPlugin(BeetsPlugin): "--link", action="store_true", dest="link", - help="symlink files that do not \ - need transcoding.", + help="symlink files that do not need transcoding.", ) cmd.parser.add_option( "-H", "--hardlink", action="store_true", dest="hardlink", - help="hardlink files that do not \ - need transcoding. Overrides --link.", + help=( + "hardlink files that do not need transcoding. Overrides --link." + ), ) cmd.parser.add_option( "-m", @@ -282,7 +290,7 @@ class ConvertPlugin(BeetsPlugin): quiet = self.config["quiet"].get(bool) if not quiet and not pretend: - self._log.info("Encoding {0}", util.displayable_path(source)) + self._log.info("Encoding {}", util.displayable_path(source)) command = os.fsdecode(command) source = os.fsdecode(source) @@ -301,7 +309,7 @@ class ConvertPlugin(BeetsPlugin): encode_cmd.append(os.fsdecode(args[i])) if pretend: - self._log.info("{0}", " ".join(args)) + self._log.info("{}", " ".join(args)) return try: @@ -309,26 +317,25 @@ class ConvertPlugin(BeetsPlugin): except subprocess.CalledProcessError as exc: # Something went wrong (probably Ctrl+C), remove temporary files self._log.info( - "Encoding {0} failed. Cleaning up...", + "Encoding {} failed. Cleaning up...", util.displayable_path(source), ) self._log.debug( - "Command {0} exited with status {1}: {2}", + "Command {0} exited with status {1.returncode}: {1.output}", args, - exc.returncode, - exc.output, + exc, ) util.remove(dest) util.prune_dirs(os.path.dirname(dest)) raise except OSError as exc: raise ui.UserError( - "convert: couldn't invoke '{}': {}".format(" ".join(args), exc) + f"convert: couldn't invoke {' '.join(args)!r}: {exc}" ) if not quiet and not pretend: self._log.info( - "Finished encoding {0}", util.displayable_path(source) + "Finished encoding {}", util.displayable_path(source) ) def convert_item( @@ -356,7 +363,7 @@ class ConvertPlugin(BeetsPlugin): try: mediafile.MediaFile(util.syspath(item.path)) except mediafile.UnreadableFileError as exc: - self._log.error("Could not open file to convert: {0}", exc) + self._log.error("Could not open file to convert: {}", exc) continue # When keeping the new file in the library, we first move the @@ -382,21 +389,20 @@ class ConvertPlugin(BeetsPlugin): if os.path.exists(util.syspath(dest)): self._log.info( - "Skipping {0} (target file exists)", - util.displayable_path(item.path), + "Skipping {.filepath} (target file exists)", item ) continue if keep_new: if pretend: self._log.info( - "mv {0} {1}", - util.displayable_path(item.path), + "mv {.filepath} {}", + item, util.displayable_path(original), ) else: self._log.info( - "Moving to {0}", util.displayable_path(original) + "Moving to {}", util.displayable_path(original) ) util.move(item.path, original) @@ -412,10 +418,10 @@ class ConvertPlugin(BeetsPlugin): msg = "ln" if hardlink else ("ln -s" if link else "cp") self._log.info( - "{2} {0} {1}", + "{} {} {}", + msg, util.displayable_path(original), util.displayable_path(converted), - msg, ) else: # No transcoding necessary. @@ -425,9 +431,7 @@ class ConvertPlugin(BeetsPlugin): else ("Linking" if link else "Copying") ) - self._log.info( - "{1} {0}", util.displayable_path(item.path), msg - ) + self._log.info("{} {.filepath}", msg, item) if hardlink: util.hardlink(original, converted) @@ -443,8 +447,9 @@ class ConvertPlugin(BeetsPlugin): if id3v23 == "inherit": id3v23 = None - # Write tags from the database to the converted file. - item.try_write(path=converted, id3v23=id3v23) + # Write tags from the database to the file if requested + if self.config["write_metadata"].get(bool): + item.try_write(path=converted, id3v23=id3v23) if keep_new: # If we're keeping the transcoded file, read it again (after @@ -458,8 +463,7 @@ class ConvertPlugin(BeetsPlugin): if album and album.artpath: maxwidth = self._get_art_resize(album.artpath) self._log.debug( - "embedding album art from {}", - util.displayable_path(album.artpath), + "embedding album art from {.art_filepath}", album ) art.embed_item( self._log, @@ -517,8 +521,7 @@ class ConvertPlugin(BeetsPlugin): if os.path.exists(util.syspath(dest)): self._log.info( - "Skipping {0} (target file exists)", - util.displayable_path(album.artpath), + "Skipping {.art_filepath} (target file exists)", album ) return @@ -528,8 +531,8 @@ class ConvertPlugin(BeetsPlugin): # Either copy or resize (while copying) the image. if maxwidth is not None: self._log.info( - "Resizing cover art from {0} to {1}", - util.displayable_path(album.artpath), + "Resizing cover art from {.art_filepath} to {}", + album, util.displayable_path(dest), ) if not pretend: @@ -539,10 +542,10 @@ class ConvertPlugin(BeetsPlugin): msg = "ln" if hardlink else ("ln -s" if link else "cp") self._log.info( - "{2} {0} {1}", - util.displayable_path(album.artpath), - util.displayable_path(dest), + "{} {.art_filepath} {}", msg, + album, + util.displayable_path(dest), ) else: msg = ( @@ -552,10 +555,10 @@ class ConvertPlugin(BeetsPlugin): ) self._log.info( - "{2} cover art from {0} to {1}", - util.displayable_path(album.artpath), - util.displayable_path(dest), + "{} cover art from {.art_filepath} to {}", msg, + album, + util.displayable_path(dest), ) if hardlink: util.hardlink(album.artpath, dest) @@ -616,7 +619,7 @@ class ConvertPlugin(BeetsPlugin): # Playlist paths are understood as relative to the dest directory. pl_normpath = util.normpath(playlist) pl_dir = os.path.dirname(pl_normpath) - self._log.info("Creating playlist file {0}", pl_normpath) + self._log.info("Creating playlist file {}", pl_normpath) # Generates a list of paths to media files, ensures the paths are # relative to the playlist's location and translates the unicode # strings we get from item.destination to bytes. @@ -644,7 +647,7 @@ class ConvertPlugin(BeetsPlugin): tmpdir = self.config["tmpdir"].get() if tmpdir: tmpdir = os.fsdecode(util.bytestring_path(tmpdir)) - fd, dest = tempfile.mkstemp(os.fsdecode(b"." + ext), dir=tmpdir) + fd, dest = tempfile.mkstemp(f".{os.fsdecode(ext)}", dir=tmpdir) os.close(fd) dest = util.bytestring_path(dest) _temp_files.append(dest) # Delete the transcode later. @@ -666,7 +669,7 @@ class ConvertPlugin(BeetsPlugin): if self.config["delete_originals"]: self._log.log( logging.DEBUG if self.config["quiet"] else logging.INFO, - "Removing original file {0}", + "Removing original file {}", source_path, ) util.remove(source_path, False) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index 8815e3d59..3eaca1e05 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -21,7 +21,6 @@ import time from typing import TYPE_CHECKING, Literal, Sequence import requests -import unidecode from beets import ui from beets.autotag import AlbumInfo, TrackInfo @@ -50,6 +49,9 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): album_url = "https://api.deezer.com/album/" track_url = "https://api.deezer.com/track/" + def __init__(self) -> None: + super().__init__() + def commands(self): """Add beet UI commands to interact with Deezer.""" deezer_update_cmd = ui.Subcommand( @@ -97,7 +99,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): f"Invalid `release_date` returned by {self.data_source} API: " f"{release_date!r}" ) - tracks_obj = self.fetch_data(self.album_url + deezer_id + "/tracks") + tracks_obj = self.fetch_data(f"{self.album_url}{deezer_id}/tracks") if tracks_obj is None: return None try: @@ -170,7 +172,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): # the track's disc). if not ( album_tracks_obj := self.fetch_data( - self.album_url + str(track_data["album"]["id"]) + "/tracks" + f"{self.album_url}{track_data['album']['id']}/tracks" ) ): return None @@ -216,27 +218,6 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): deezer_updated=time.time(), ) - @staticmethod - def _construct_search_query( - filters: SearchFilter, keywords: str = "" - ) -> str: - """Construct a query string with the specified filters and keywords to - be provided to the Deezer Search API - (https://developers.deezer.com/api/search). - - :param filters: Field filters to apply. - :param keywords: (Optional) Query keywords to use. - :return: Query string to be provided to the Search API. - """ - query_components = [ - keywords, - " ".join(f'{k}:"{v}"' for k, v in filters.items()), - ] - query = " ".join([q for q in query_components if q]) - if not isinstance(query, str): - query = query.decode("utf8") - return unidecode.unidecode(query) - def _search_api( self, query_type: Literal[ @@ -250,37 +231,42 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): "user", ], filters: SearchFilter, - keywords="", + query_string: str = "", ) -> Sequence[IDResponse]: - """Query the Deezer Search API for the specified ``keywords``, applying + """Query the Deezer Search API for the specified ``query_string``, applying the provided ``filters``. :param filters: Field filters to apply. - :param keywords: Query keywords to use. + :param query_string: Additional query to include in the search. :return: JSON data for the class:`Response ` object or None if no search results are returned. """ - query = self._construct_search_query(keywords=keywords, filters=filters) - self._log.debug(f"Searching {self.data_source} for '{query}'") + query = self._construct_search_query( + query_string=query_string, filters=filters + ) + self._log.debug("Searching {.data_source} for '{}'", self, query) try: response = requests.get( - self.search_url + query_type, - params={"q": query}, + f"{self.search_url}{query_type}", + params={ + "q": query, + "limit": self.config["search_limit"].get(), + }, timeout=10, ) response.raise_for_status() except requests.exceptions.RequestException as e: self._log.error( - "Error fetching data from {} API\n Error: {}", - self.data_source, + "Error fetching data from {.data_source} API\n Error: {}", + self, e, ) return () response_data: Sequence[IDResponse] = response.json().get("data", []) self._log.debug( - "Found {} result(s) from {} for '{}'", + "Found {} result(s) from {.data_source} for '{}'", len(response_data), - self.data_source, + self, query, ) return response_data diff --git a/beetsplug/discogs.py b/beetsplug/discogs.py index ac7421c5f..be1cf97fa 100644 --- a/beetsplug/discogs.py +++ b/beetsplug/discogs.py @@ -27,13 +27,13 @@ import time import traceback from functools import cache from string import ascii_lowercase -from typing import TYPE_CHECKING, Sequence +from typing import TYPE_CHECKING, Sequence, cast import confuse from discogs_client import Client, Master, Release from discogs_client.exceptions import DiscogsAPIError from requests.exceptions import ConnectionError -from typing_extensions import TypedDict +from typing_extensions import NotRequired, TypedDict import beets import beets.ui @@ -76,6 +76,8 @@ TRACK_INDEX_RE = re.compile( re.VERBOSE, ) +DISAMBIGUATION_RE = re.compile(r" \(\d+\)") + class ReleaseFormat(TypedDict): name: str @@ -83,6 +85,42 @@ class ReleaseFormat(TypedDict): descriptions: list[str] | None +class Artist(TypedDict): + name: str + anv: str + join: str + role: str + tracks: str + id: str + resource_url: str + + +class Track(TypedDict): + position: str + type_: str + title: str + duration: str + artists: list[Artist] + extraartists: NotRequired[list[Artist]] + + +class TrackWithSubtracks(Track): + sub_tracks: list[TrackWithSubtracks] + + +class IntermediateTrackInfo(TrackInfo): + """Allows work with string mediums from + get_track_info""" + + def __init__( + self, + medium_str: str | None, + **kwargs, + ) -> None: + self.medium_str = medium_str + super().__init__(**kwargs) + + class DiscogsPlugin(MetadataSourcePlugin): def __init__(self): super().__init__() @@ -91,12 +129,17 @@ class DiscogsPlugin(MetadataSourcePlugin): "apikey": API_KEY, "apisecret": API_SECRET, "tokenfile": "discogs_token.json", - "source_weight": 0.5, "user_token": "", "separator": ", ", "index_tracks": False, "append_style_genre": False, - "search_limit": 5, + "strip_disambiguation": True, + "featured_string": "Feat.", + "anv": { + "artist_credit": True, + "artist": False, + "album_artist": False, + }, } ) self.config["apikey"].redact = True @@ -104,7 +147,7 @@ class DiscogsPlugin(MetadataSourcePlugin): self.config["user_token"].redact = True self.setup() - def setup(self, session=None): + def setup(self, session=None) -> None: """Create the `discogs_client` field. Authenticate if necessary.""" c_key = self.config["apikey"].as_str() c_secret = self.config["apisecret"].as_str() @@ -130,22 +173,22 @@ class DiscogsPlugin(MetadataSourcePlugin): self.discogs_client = Client(USER_AGENT, c_key, c_secret, token, secret) - def reset_auth(self): + def reset_auth(self) -> None: """Delete token file & redo the auth steps.""" os.remove(self._tokenfile()) self.setup() - def _tokenfile(self): + def _tokenfile(self) -> str: """Get the path to the JSON file for storing the OAuth token.""" return self.config["tokenfile"].get(confuse.Filename(in_app_dir=True)) - def authenticate(self, c_key, c_secret): + def authenticate(self, c_key: str, c_secret: str) -> tuple[str, str]: # Get the link for the OAuth page. auth_client = Client(USER_AGENT, c_key, c_secret) try: _, _, url = auth_client.get_authorize_url() except CONNECTION_ERRORS as e: - self._log.debug("connection error: {0}", e) + self._log.debug("connection error: {}", e) raise beets.ui.UserError("communication with Discogs failed") beets.ui.print_("To authenticate with Discogs, visit:") @@ -158,11 +201,11 @@ class DiscogsPlugin(MetadataSourcePlugin): except DiscogsAPIError: raise beets.ui.UserError("Discogs authorization failed") except CONNECTION_ERRORS as e: - self._log.debug("connection error: {0}", e) + self._log.debug("connection error: {}", e) raise beets.ui.UserError("Discogs token request failed") # Save the token for later use. - self._log.debug("Discogs token {0}, secret {1}", token, secret) + self._log.debug("Discogs token {}, secret {}", token, secret) with open(self._tokenfile(), "w") as f: json.dump({"token": token, "secret": secret}, f) @@ -202,7 +245,7 @@ class DiscogsPlugin(MetadataSourcePlugin): """Fetches an album by its Discogs ID and returns an AlbumInfo object or None if the album is not found. """ - self._log.debug("Searching for release {0}", album_id) + self._log.debug("Searching for release {}", album_id) discogs_id = self._extract_id(album_id) @@ -216,7 +259,7 @@ class DiscogsPlugin(MetadataSourcePlugin): except DiscogsAPIError as e: if e.status_code != 404: self._log.debug( - "API Error: {0} (query: {1})", + "API Error: {} (query: {})", e, result.data["resource_url"], ) @@ -250,7 +293,7 @@ class DiscogsPlugin(MetadataSourcePlugin): try: results = self.discogs_client.search(query, type="release") - results.per_page = self.config["search_limit"].as_number() + results.per_page = self.config["search_limit"].get() releases = results.page(1) except CONNECTION_ERRORS: self._log.debug( @@ -266,7 +309,7 @@ class DiscogsPlugin(MetadataSourcePlugin): """Fetches a master release given its Discogs ID and returns its year or None if the master release is not found. """ - self._log.debug("Getting master release {0}", master_id) + self._log.debug("Getting master release {}", master_id) result = Master(self.discogs_client, {"id": master_id}) try: @@ -274,7 +317,7 @@ class DiscogsPlugin(MetadataSourcePlugin): except DiscogsAPIError as e: if e.status_code != 404: self._log.debug( - "API Error: {0} (query: {1})", + "API Error: {} (query: {})", e, result.data["resource_url"], ) @@ -300,7 +343,26 @@ class DiscogsPlugin(MetadataSourcePlugin): return media, albumtype - def get_album_info(self, result): + def get_artist_with_anv( + self, artists: list[Artist], use_anv: bool = False + ) -> tuple[str, str | None]: + """Iterates through a discogs result, fetching data + if the artist anv is to be used, maps that to the name. + Calls the parent class get_artist method.""" + artist_list: list[dict[str | int, str]] = [] + for artist_data in artists: + a: dict[str | int, str] = { + "name": artist_data["name"], + "id": artist_data["id"], + "join": artist_data.get("join", ""), + } + if use_anv and (anv := artist_data.get("anv", "")): + a["name"] = anv + artist_list.append(a) + artist, artist_id = self.get_artist(artist_list, join_key="join") + return self.strip_disambiguation(artist), artist_id + + def get_album_info(self, result: Release) -> AlbumInfo | None: """Returns an AlbumInfo object for a discogs Release object.""" # Explicitly reload the `Release` fields, as they might not be yet # present if the result is from a `discogs_client.search()`. @@ -328,16 +390,29 @@ class DiscogsPlugin(MetadataSourcePlugin): self._log.warning("Release does not contain the required fields") return None - artist, artist_id = self.get_artist( - [a.data for a in result.artists], join_key="join" + artist_data = [a.data for a in result.artists] + album_artist, album_artist_id = self.get_artist_with_anv(artist_data) + album_artist_anv, _ = self.get_artist_with_anv( + artist_data, use_anv=True ) + artist_credit = album_artist_anv + album = re.sub(r" +", " ", result.title) album_id = result.data["id"] # Use `.data` to access the tracklist directly instead of the # convenient `.tracklist` property, which will strip out useful artist # information and leave us with skeleton `Artist` objects that will # each make an API call just to get the same data back. - tracks = self.get_tracks(result.data["tracklist"]) + tracks = self.get_tracks( + result.data["tracklist"], + (album_artist, album_artist_anv, album_artist_id), + ) + + # Assign ANV to the proper fields for tagging + if not self.config["anv"]["artist_credit"]: + artist_credit = album_artist + if self.config["anv"]["album_artist"]: + album_artist = album_artist_anv # Extract information for the optional AlbumInfo fields, if possible. va = result.data["artists"][0].get("name", "").lower() == "various" @@ -363,15 +438,20 @@ class DiscogsPlugin(MetadataSourcePlugin): label = catalogno = labelid = None if result.data.get("labels"): - label = result.data["labels"][0].get("name") + label = self.strip_disambiguation( + result.data["labels"][0].get("name") + ) catalogno = result.data["labels"][0].get("catno") labelid = result.data["labels"][0].get("id") cover_art_url = self.select_cover_art(result) - # Additional cleanups (various artists name, catalog number, media). + # Additional cleanups + # (various artists name, catalog number, media, disambiguation). if va: - artist = config["va_name"].as_str() + va_name = config["va_name"].as_str() + album_artist = va_name + artist_credit = va_name if catalogno == "none": catalogno = None # Explicitly set the `media` for the tracks, since it is expected by @@ -379,13 +459,9 @@ class DiscogsPlugin(MetadataSourcePlugin): for track in tracks: track.media = media track.medium_total = mediums.count(track.medium) - if not track.artist: # get_track_info often fails to find artist - track.artist = artist - if not track.artist_id: - track.artist_id = artist_id # Discogs does not have track IDs. Invent our own IDs as proposed # in #2336. - track.track_id = str(album_id) + "-" + track.track_alt + track.track_id = f"{album_id}-{track.track_alt}" track.data_url = data_url track.data_source = "Discogs" @@ -398,8 +474,9 @@ class DiscogsPlugin(MetadataSourcePlugin): return AlbumInfo( album=album, album_id=album_id, - artist=artist, - artist_id=artist_id, + artist=album_artist, + artist_credit=artist_credit, + artist_id=album_artist_id, tracks=tracks, albumtype=albumtype, va=va, @@ -417,11 +494,11 @@ class DiscogsPlugin(MetadataSourcePlugin): data_url=data_url, discogs_albumid=discogs_albumid, discogs_labelid=labelid, - discogs_artistid=artist_id, + discogs_artistid=album_artist_id, cover_art_url=cover_art_url, ) - def select_cover_art(self, result): + def select_cover_art(self, result: Release) -> str | None: """Returns the best candidate image, if any, from a Discogs `Release` object.""" if result.data.get("images") and len(result.data.get("images")) > 0: # The first image in this list appears to be the one displayed first @@ -431,7 +508,7 @@ class DiscogsPlugin(MetadataSourcePlugin): return None - def format(self, classification): + def format(self, classification: Iterable[str]) -> str | None: if classification: return ( self.config["separator"].as_str().join(sorted(classification)) @@ -439,22 +516,17 @@ class DiscogsPlugin(MetadataSourcePlugin): else: return None - def get_tracks(self, tracklist): - """Returns a list of TrackInfo objects for a discogs tracklist.""" - try: - clean_tracklist = self.coalesce_tracks(tracklist) - except Exception as exc: - # FIXME: this is an extra precaution for making sure there are no - # side effects after #2222. It should be removed after further - # testing. - self._log.debug("{}", traceback.format_exc()) - self._log.error("uncaught exception in coalesce_tracks: {}", exc) - clean_tracklist = tracklist - tracks = [] + def _process_clean_tracklist( + self, + clean_tracklist: list[Track], + album_artist_data: tuple[str, str, str | None], + ) -> tuple[list[TrackInfo], dict[int, str], int, list[str], list[str]]: + # Distinct works and intra-work divisions, as defined by index tracks. + tracks: list[TrackInfo] = [] index_tracks = {} index = 0 - # Distinct works and intra-work divisions, as defined by index tracks. - divisions, next_divisions = [], [] + divisions: list[str] = [] + next_divisions: list[str] = [] for track in clean_tracklist: # Only real tracks have `position`. Otherwise, it's an index track. if track["position"]: @@ -464,7 +536,9 @@ class DiscogsPlugin(MetadataSourcePlugin): # divisions. divisions += next_divisions del next_divisions[:] - track_info = self.get_track_info(track, index, divisions) + track_info = self.get_track_info( + track, index, divisions, album_artist_data + ) track_info.track_alt = track["position"] tracks.append(track_info) else: @@ -476,7 +550,29 @@ class DiscogsPlugin(MetadataSourcePlugin): except IndexError: pass index_tracks[index + 1] = track["title"] + return tracks, index_tracks, index, divisions, next_divisions + def get_tracks( + self, + tracklist: list[Track], + album_artist_data: tuple[str, str, str | None], + ) -> list[TrackInfo]: + """Returns a list of TrackInfo objects for a discogs tracklist.""" + try: + clean_tracklist: list[Track] = self.coalesce_tracks( + cast(list[TrackWithSubtracks], tracklist) + ) + except Exception as exc: + # FIXME: this is an extra precaution for making sure there are no + # side effects after #2222. It should be removed after further + # testing. + self._log.debug("{}", traceback.format_exc()) + self._log.error("uncaught exception in coalesce_tracks: {}", exc) + clean_tracklist = tracklist + processed = self._process_clean_tracklist( + clean_tracklist, album_artist_data + ) + tracks, index_tracks, index, divisions, next_divisions = processed # Fix up medium and medium_index for each track. Discogs position is # unreliable, but tracks are in order. medium = None @@ -485,8 +581,8 @@ class DiscogsPlugin(MetadataSourcePlugin): # If a medium has two sides (ie. vinyl or cassette), each pair of # consecutive sides should belong to the same medium. - if all([track.medium is not None for track in tracks]): - m = sorted({track.medium.lower() for track in tracks}) + if all([track.medium_str is not None for track in tracks]): + m = sorted({track.medium_str.lower() for track in tracks}) # If all track.medium are single consecutive letters, assume it is # a 2-sided medium. if "".join(m) in ascii_lowercase: @@ -500,17 +596,17 @@ class DiscogsPlugin(MetadataSourcePlugin): # side_count is the number of mediums or medium sides (in the case # of two-sided mediums) that were seen before. medium_is_index = ( - track.medium + track.medium_str and not track.medium_index and ( - len(track.medium) != 1 + len(track.medium_str) != 1 or # Not within standard incremental medium values (A, B, C, ...). - ord(track.medium) - 64 != side_count + 1 + ord(track.medium_str) - 64 != side_count + 1 ) ) - if not medium_is_index and medium != track.medium: + if not medium_is_index and medium != track.medium_str: side_count += 1 if sides_per_medium == 2: if side_count % sides_per_medium: @@ -521,7 +617,7 @@ class DiscogsPlugin(MetadataSourcePlugin): # Medium changed. Reset index_count. medium_count += 1 index_count = 0 - medium = track.medium + medium = track.medium_str index_count += 1 medium_count = 1 if medium_count == 0 else medium_count @@ -537,22 +633,27 @@ class DiscogsPlugin(MetadataSourcePlugin): disctitle = None track.disctitle = disctitle - return tracks + return cast(list[TrackInfo], tracks) - def coalesce_tracks(self, raw_tracklist): + def coalesce_tracks( + self, raw_tracklist: list[TrackWithSubtracks] + ) -> list[Track]: """Pre-process a tracklist, merging subtracks into a single track. The title for the merged track is the one from the previous index track, if present; otherwise it is a combination of the subtracks titles. """ - def add_merged_subtracks(tracklist, subtracks): + def add_merged_subtracks( + tracklist: list[TrackWithSubtracks], + subtracks: list[TrackWithSubtracks], + ) -> None: """Modify `tracklist` in place, merging a list of `subtracks` into a single track into `tracklist`.""" # Calculate position based on first subtrack, without subindex. idx, medium_idx, sub_idx = self.get_track_index( subtracks[0]["position"] ) - position = "{}{}".format(idx or "", medium_idx or "") + position = f"{idx or ''}{medium_idx or ''}" if tracklist and not tracklist[-1]["position"]: # Assume the previous index track contains the track title. @@ -574,8 +675,8 @@ class DiscogsPlugin(MetadataSourcePlugin): # option is set if self.config["index_tracks"]: for subtrack in subtracks: - subtrack["title"] = "{}: {}".format( - index_track["title"], subtrack["title"] + subtrack["title"] = ( + f"{index_track['title']}: {subtrack['title']}" ) tracklist.extend(subtracks) else: @@ -585,8 +686,8 @@ class DiscogsPlugin(MetadataSourcePlugin): tracklist.append(track) # Pre-process the tracklist, trying to identify subtracks. - subtracks = [] - tracklist = [] + subtracks: list[TrackWithSubtracks] = [] + tracklist: list[TrackWithSubtracks] = [] prev_subindex = "" for track in raw_tracklist: # Regular subtrack (track with subindex). @@ -621,10 +722,32 @@ class DiscogsPlugin(MetadataSourcePlugin): if subtracks: add_merged_subtracks(tracklist, subtracks) - return tracklist + return cast(list[Track], tracklist) - def get_track_info(self, track, index, divisions): + def strip_disambiguation(self, text: str) -> str: + """Removes discogs specific disambiguations from a string. + Turns 'Label Name (5)' to 'Label Name' or 'Artist (1) & Another Artist (2)' + to 'Artist & Another Artist'. Does nothing if strip_disambiguation is False.""" + if not self.config["strip_disambiguation"]: + return text + return DISAMBIGUATION_RE.sub("", text) + + def get_track_info( + self, + track: Track, + index: int, + divisions: list[str], + album_artist_data: tuple[str, str, str | None], + ) -> IntermediateTrackInfo: """Returns a TrackInfo object for a discogs track.""" + + artist, artist_anv, artist_id = album_artist_data + artist_credit = artist_anv + if not self.config["anv"]["artist_credit"]: + artist_credit = artist + if self.config["anv"]["artist"]: + artist = artist_anv + title = track["title"] if self.config["index_tracks"]: prefix = ", ".join(divisions) @@ -632,18 +755,44 @@ class DiscogsPlugin(MetadataSourcePlugin): title = f"{prefix}: {title}" track_id = None medium, medium_index, _ = self.get_track_index(track["position"]) - artist, artist_id = self.get_artist( - track.get("artists", []), join_key="join" - ) + + # If artists are found on the track, we will use those instead + if artists := track.get("artists", []): + artist, artist_id = self.get_artist_with_anv( + artists, self.config["anv"]["artist"] + ) + artist_credit, _ = self.get_artist_with_anv( + artists, self.config["anv"]["artist_credit"] + ) length = self.get_track_length(track["duration"]) - return TrackInfo( + + # Add featured artists + if extraartists := track.get("extraartists", []): + featured_list = [ + artist + for artist in extraartists + if "Featuring" in artist["role"] + ] + featured, _ = self.get_artist_with_anv( + featured_list, self.config["anv"]["artist"] + ) + featured_credit, _ = self.get_artist_with_anv( + featured_list, self.config["anv"]["artist_credit"] + ) + if featured: + artist += f" {self.config['featured_string']} {featured}" + artist_credit += ( + f" {self.config['featured_string']} {featured_credit}" + ) + return IntermediateTrackInfo( title=title, track_id=track_id, + artist_credit=artist_credit, artist=artist, artist_id=artist_id, length=length, index=index, - medium=medium, + medium_str=medium, medium_index=medium_index, ) @@ -664,7 +813,7 @@ class DiscogsPlugin(MetadataSourcePlugin): return medium or None, index or None, subindex or None - def get_track_length(self, duration): + def get_track_length(self, duration: str) -> int | None: """Returns the track length in seconds for a discogs duration.""" try: length = time.strptime(duration, "%M:%S") diff --git a/beetsplug/duplicates.py b/beetsplug/duplicates.py index ea7abaaff..904e19262 100644 --- a/beetsplug/duplicates.py +++ b/beetsplug/duplicates.py @@ -150,7 +150,7 @@ class DuplicatesPlugin(BeetsPlugin): count = self.config["count"].get(bool) delete = self.config["delete"].get(bool) remove = self.config["remove"].get(bool) - fmt = self.config["format"].get(str) + fmt_tmpl = self.config["format"].get(str) full = self.config["full"].get(bool) keys = self.config["keys"].as_str_seq() merge = self.config["merge"].get(bool) @@ -175,15 +175,14 @@ class DuplicatesPlugin(BeetsPlugin): return if path: - fmt = "$path" + fmt_tmpl = "$path" # Default format string for count mode. - if count and not fmt: + if count and not fmt_tmpl: if album: - fmt = "$albumartist - $album" + fmt_tmpl = "$albumartist - $album" else: - fmt = "$albumartist - $album - $title" - fmt += ": {0}" + fmt_tmpl = "$albumartist - $album - $title" if checksum: for i in items: @@ -207,7 +206,7 @@ class DuplicatesPlugin(BeetsPlugin): delete=delete, remove=remove, tag=tag, - fmt=fmt.format(obj_count), + fmt=f"{fmt_tmpl}: {obj_count}", ) self._command.func = _dup @@ -255,28 +254,24 @@ class DuplicatesPlugin(BeetsPlugin): checksum = getattr(item, key, False) if not checksum: self._log.debug( - "key {0} on item {1} not cached:computing checksum", + "key {} on item {.filepath} not cached:computing checksum", key, - displayable_path(item.path), + item, ) try: checksum = command_output(args).stdout setattr(item, key, checksum) item.store() self._log.debug( - "computed checksum for {0} using {1}", item.title, key + "computed checksum for {.title} using {}", item, key ) except subprocess.CalledProcessError as e: - self._log.debug( - "failed to checksum {0}: {1}", - displayable_path(item.path), - e, - ) + self._log.debug("failed to checksum {.filepath}: {}", item, e) else: self._log.debug( - "key {0} on item {1} cached:not computing checksum", + "key {} on item {.filepath} cached:not computing checksum", key, - displayable_path(item.path), + item, ) return key, checksum @@ -294,15 +289,15 @@ class DuplicatesPlugin(BeetsPlugin): values = [v for v in values if v not in (None, "")] if strict and len(values) < len(keys): self._log.debug( - "some keys {0} on item {1} are null or empty: skipping", + "some keys {} on item {.filepath} are null or empty: skipping", keys, - displayable_path(obj.path), + obj, ) elif not strict and not len(values): self._log.debug( - "all keys {0} on item {1} are null or empty: skipping", + "all keys {} on item {.filepath} are null or empty: skipping", keys, - displayable_path(obj.path), + obj, ) else: key = tuple(values) @@ -360,11 +355,11 @@ class DuplicatesPlugin(BeetsPlugin): value = getattr(o, f, None) if value: self._log.debug( - "key {0} on item {1} is null " - "or empty: setting from item {2}", + "key {} on item {} is null " + "or empty: setting from item {.filepath}", f, displayable_path(objs[0].path), - displayable_path(o.path), + o, ) setattr(objs[0], f, value) objs[0].store() @@ -384,11 +379,11 @@ class DuplicatesPlugin(BeetsPlugin): missing.album_id = objs[0].id missing.add(i._db) self._log.debug( - "item {0} missing from album {1}:" - " merging from {2} into {3}", + "item {} missing from album {}:" + " merging from {.filepath} into {}", missing, objs[0], - displayable_path(o.path), + o, displayable_path(missing.destination()), ) missing.move(operation=MoveOperation.COPY) diff --git a/beetsplug/edit.py b/beetsplug/edit.py index 52387c314..f6fadefd0 100644 --- a/beetsplug/edit.py +++ b/beetsplug/edit.py @@ -46,9 +46,7 @@ def edit(filename, log): try: subprocess.call(cmd) except OSError as exc: - raise ui.UserError( - "could not run editor command {!r}: {}".format(cmd[0], exc) - ) + raise ui.UserError(f"could not run editor command {cmd[0]!r}: {exc}") def dump(arg): @@ -71,9 +69,7 @@ def load(s): for d in yaml.safe_load_all(s): if not isinstance(d, dict): raise ParseError( - "each entry must be a dictionary; found {}".format( - type(d).__name__ - ) + f"each entry must be a dictionary; found {type(d).__name__}" ) # Convert all keys to strings. They started out as strings, diff --git a/beetsplug/embedart.py b/beetsplug/embedart.py index 8df3c3c05..cbf40f570 100644 --- a/beetsplug/embedart.py +++ b/beetsplug/embedart.py @@ -20,11 +20,12 @@ from mimetypes import guess_extension import requests -from beets import art, config, ui +from beets import config, ui from beets.plugins import BeetsPlugin from beets.ui import print_ from beets.util import bytestring_path, displayable_path, normpath, syspath from beets.util.artresizer import ArtResizer +from beetsplug._utils import art def _confirm(objs, album): @@ -35,8 +36,9 @@ def _confirm(objs, album): to items). """ noun = "album" if album else "file" - prompt = "Modify artwork for {} {}{} (Y/n)?".format( - len(objs), noun, "s" if len(objs) > 1 else "" + prompt = ( + "Modify artwork for" + f" {len(objs)} {noun}{'s' if len(objs) > 1 else ''} (Y/n)?" ) # Show all the items or albums. @@ -110,9 +112,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): imagepath = normpath(opts.file) if not os.path.isfile(syspath(imagepath)): raise ui.UserError( - "image file {} not found".format( - displayable_path(imagepath) - ) + f"image file {displayable_path(imagepath)} not found" ) items = lib.items(args) @@ -137,7 +137,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): response = requests.get(opts.url, timeout=5) response.raise_for_status() except requests.exceptions.RequestException as e: - self._log.error("{}".format(e)) + self._log.error("{}", e) return extension = guess_extension(response.headers["Content-Type"]) if extension is None: @@ -149,7 +149,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): with open(tempimg, "wb") as f: f.write(response.content) except Exception as e: - self._log.error("Unable to save image: {}".format(e)) + self._log.error("Unable to save image: {}", e) return items = lib.items(args) # Confirm with user. @@ -274,7 +274,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): """ if self.config["remove_art_file"] and album.artpath: if os.path.isfile(syspath(album.artpath)): - self._log.debug("Removing album art file for {0}", album) + self._log.debug("Removing album art file for {}", album) os.remove(syspath(album.artpath)) album.artpath = None album.store() diff --git a/beetsplug/embyupdate.py b/beetsplug/embyupdate.py index c696f39f3..25f3ed8b3 100644 --- a/beetsplug/embyupdate.py +++ b/beetsplug/embyupdate.py @@ -38,9 +38,7 @@ def api_url(host, port, endpoint): hostname_list.insert(0, "http://") hostname = "".join(hostname_list) - joined = urljoin( - "{hostname}:{port}".format(hostname=hostname, port=port), endpoint - ) + joined = urljoin(f"{hostname}:{port}", endpoint) scheme, netloc, path, query_string, fragment = urlsplit(joined) query_params = parse_qs(query_string) @@ -81,12 +79,12 @@ def create_headers(user_id, token=None): headers = {} authorization = ( - 'MediaBrowser UserId="{user_id}", ' + f'MediaBrowser UserId="{user_id}", ' 'Client="other", ' 'Device="beets", ' 'DeviceId="beets", ' 'Version="0.0.0"' - ).format(user_id=user_id) + ) headers["x-emby-authorization"] = authorization @@ -186,7 +184,7 @@ class EmbyUpdate(BeetsPlugin): # Get user information from the Emby API. user = get_user(host, port, username) if not user: - self._log.warning(f"User {username} could not be found.") + self._log.warning("User {} could not be found.", username) return userid = user[0]["Id"] @@ -198,7 +196,7 @@ class EmbyUpdate(BeetsPlugin): # Get authentication token. token = get_token(host, port, headers, auth_data) if not token: - self._log.warning("Could not get token for user {0}", username) + self._log.warning("Could not get token for user {}", username) return # Recreate headers with a token. diff --git a/beetsplug/export.py b/beetsplug/export.py index 05ca3f24a..e6c2b88c7 100644 --- a/beetsplug/export.py +++ b/beetsplug/export.py @@ -150,7 +150,7 @@ class ExportPlugin(BeetsPlugin): try: data, item = data_emitter(included_keys or "*") except (mediafile.UnreadableFileError, OSError) as ex: - self._log.error("cannot read file: {0}", ex) + self._log.error("cannot read file: {}", ex) continue for key, value in data.items(): diff --git a/beetsplug/fetchart.py b/beetsplug/fetchart.py index e1ec5aa09..37e7426f6 100644 --- a/beetsplug/fetchart.py +++ b/beetsplug/fetchart.py @@ -36,10 +36,10 @@ from beets.util.config import sanitize_pairs if TYPE_CHECKING: from collections.abc import Iterable, Iterator, Sequence - from logging import Logger from beets.importer import ImportSession, ImportTask from beets.library import Album, Library + from beets.logging import BeetsLogger as Logger try: from bs4 import BeautifulSoup, Tag @@ -133,7 +133,7 @@ class Candidate: # get_size returns None if no local imaging backend is available if not self.size: self.size = ArtResizer.shared.get_size(self.path) - self._log.debug("image size: {}", self.size) + self._log.debug("image size: {.size}", self) if not self.size: self._log.warning( @@ -151,7 +151,7 @@ class Candidate: # Check minimum dimension. if plugin.minwidth and self.size[0] < plugin.minwidth: self._log.debug( - "image too small ({} < {})", self.size[0], plugin.minwidth + "image too small ({} < {.minwidth})", self.size[0], plugin ) return ImageAction.BAD @@ -162,10 +162,10 @@ class Candidate: if edge_diff > plugin.margin_px: self._log.debug( "image is not close enough to being " - "square, ({} - {} > {})", + "square, ({} - {} > {.margin_px})", long_edge, short_edge, - plugin.margin_px, + plugin, ) return ImageAction.BAD elif plugin.margin_percent: @@ -190,7 +190,7 @@ class Candidate: downscale = False if plugin.maxwidth and self.size[0] > plugin.maxwidth: self._log.debug( - "image needs rescaling ({} > {})", self.size[0], plugin.maxwidth + "image needs rescaling ({} > {.maxwidth})", self.size[0], plugin ) downscale = True @@ -200,9 +200,9 @@ class Candidate: filesize = os.stat(syspath(self.path)).st_size if filesize > plugin.max_filesize: self._log.debug( - "image needs resizing ({}B > {}B)", + "image needs resizing ({}B > {.max_filesize}B)", filesize, - plugin.max_filesize, + plugin, ) downsize = True @@ -213,9 +213,9 @@ class Candidate: reformat = fmt != plugin.cover_format if reformat: self._log.debug( - "image needs reformatting: {} -> {}", + "image needs reformatting: {} -> {.cover_format}", fmt, - plugin.cover_format, + plugin, ) skip_check_for = skip_check_for or [] @@ -329,7 +329,7 @@ def _logged_get(log: Logger, *args, **kwargs) -> requests.Response: prepped.url, {}, None, None, None ) send_kwargs.update(settings) - log.debug("{}: {}", message, prepped.url) + log.debug("{}: {.url}", message, prepped) return s.send(prepped, **send_kwargs) @@ -542,14 +542,14 @@ class CoverArtArchive(RemoteArtSource): try: response = self.request(url) except requests.RequestException: - self._log.debug("{}: error receiving response", self.NAME) + self._log.debug("{.NAME}: error receiving response", self) return try: data = response.json() except ValueError: self._log.debug( - "{}: error loading response: {}", self.NAME, response.text + "{.NAME}: error loading response: {.text}", self, response ) return @@ -593,7 +593,7 @@ class CoverArtArchive(RemoteArtSource): class Amazon(RemoteArtSource): NAME = "Amazon" ID = "amazon" - URL = "https://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg" + URL = "https://images.amazon.com/images/P/{}.{:02d}.LZZZZZZZ.jpg" INDICES = (1, 2) def get( @@ -606,7 +606,7 @@ class Amazon(RemoteArtSource): if album.asin: for index in self.INDICES: yield self._candidate( - url=self.URL % (album.asin, index), + url=self.URL.format(album.asin, index), match=MetadataMatch.EXACT, ) @@ -629,7 +629,7 @@ class AlbumArtOrg(RemoteArtSource): # Get the page from albumart.org. try: resp = self.request(self.URL, params={"asin": album.asin}) - self._log.debug("scraped art URL: {}", resp.url) + self._log.debug("scraped art URL: {.url}", resp) except requests.RequestException: self._log.debug("error scraping art page") return @@ -682,7 +682,7 @@ class GoogleImages(RemoteArtSource): """ if not (album.albumartist and album.album): return - search_string = (album.albumartist + "," + album.album).encode("utf-8") + search_string = f"{album.albumartist},{album.album}".encode("utf-8") try: response = self.request( @@ -702,7 +702,7 @@ class GoogleImages(RemoteArtSource): try: data = response.json() except ValueError: - self._log.debug("google: error loading response: {}", response.text) + self._log.debug("google: error loading response: {.text}", response) return if "error" in data: @@ -723,7 +723,7 @@ class FanartTV(RemoteArtSource): NAME = "fanart.tv" ID = "fanarttv" API_URL = "https://webservice.fanart.tv/v3/" - API_ALBUMS = API_URL + "music/albums/" + API_ALBUMS = f"{API_URL}music/albums/" PROJECT_KEY = "61a7d0ab4e67162b7a0c7c35915cd48e" def __init__(self, *args, **kwargs): @@ -750,7 +750,7 @@ class FanartTV(RemoteArtSource): try: response = self.request( - self.API_ALBUMS + album.mb_releasegroupid, + f"{self.API_ALBUMS}{album.mb_releasegroupid}", headers={ "api-key": self.PROJECT_KEY, "client-key": self.client_key, @@ -764,7 +764,7 @@ class FanartTV(RemoteArtSource): data = response.json() except ValueError: self._log.debug( - "fanart.tv: error loading response: {}", response.text + "fanart.tv: error loading response: {.text}", response ) return @@ -820,7 +820,7 @@ class ITunesStore(RemoteArtSource): return payload = { - "term": album.albumartist + " " + album.album, + "term": f"{album.albumartist} {album.album}", "entity": "album", "media": "music", "limit": 200, @@ -947,14 +947,14 @@ class Wikipedia(RemoteArtSource): data = dbpedia_response.json() results = data["results"]["bindings"] if results: - cover_filename = "File:" + results[0]["coverFilename"]["value"] + cover_filename = f"File:{results[0]['coverFilename']['value']}" page_id = results[0]["pageId"]["value"] else: self._log.debug("wikipedia: album not found on dbpedia") except (ValueError, KeyError, IndexError): self._log.debug( - "wikipedia: error scraping dbpedia response: {}", - dbpedia_response.text, + "wikipedia: error scraping dbpedia response: {.text}", + dbpedia_response, ) # Ensure we have a filename before attempting to query wikipedia @@ -996,7 +996,7 @@ class Wikipedia(RemoteArtSource): results = data["query"]["pages"][page_id]["images"] for result in results: if re.match( - re.escape(lpart) + r".*?\." + re.escape(rpart), + rf"{re.escape(lpart)}.*?\.{re.escape(rpart)}", result["title"], ): cover_filename = result["title"] @@ -1179,7 +1179,7 @@ class LastFM(RemoteArtSource): if "error" in data: if data["error"] == 6: self._log.debug( - "lastfm: no results for {}", album.mb_albumid + "lastfm: no results for {.mb_albumid}", album ) else: self._log.error( @@ -1200,7 +1200,7 @@ class LastFM(RemoteArtSource): url=images[size], size=self.SIZES[size] ) except ValueError: - self._log.debug("lastfm: error loading response: {}", response.text) + self._log.debug("lastfm: error loading response: {.text}", response) return @@ -1227,7 +1227,7 @@ class Spotify(RemoteArtSource): paths: None | Sequence[bytes], ) -> Iterator[Candidate]: try: - url = self.SPOTIFY_ALBUM_URL + album.items().get().spotify_album_id + url = f"{self.SPOTIFY_ALBUM_URL}{album.items().get().spotify_album_id}" except AttributeError: self._log.debug("Fetchart: no Spotify album ID found") return @@ -1244,7 +1244,7 @@ class Spotify(RemoteArtSource): soup = BeautifulSoup(html, "html.parser") except ValueError: self._log.debug( - "Spotify: error loading response: {}", response.text + "Spotify: error loading response: {.text}", response ) return @@ -1541,9 +1541,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin): out = candidate assert out.path is not None # help mypy self._log.debug( - "using {0.LOC} image {1}", - source, - util.displayable_path(out.path), + "using {.LOC} image {.path}", source, out ) break # Remove temporary files for invalid candidates. @@ -1576,7 +1574,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin): message = ui.colorize( "text_highlight_minor", "has album art" ) - self._log.info("{0}: {1}", album, message) + self._log.info("{}: {}", album, message) else: # In ordinary invocations, look for images on the # filesystem. When forcing, however, always go to the Web @@ -1589,4 +1587,4 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin): message = ui.colorize("text_success", "found album art") else: message = ui.colorize("text_error", "no art found") - self._log.info("{0}: {1}", album, message) + self._log.info("{}: {}", album, message) diff --git a/beetsplug/fish.py b/beetsplug/fish.py index 4cf9b60a1..b1518f1c4 100644 --- a/beetsplug/fish.py +++ b/beetsplug/fish.py @@ -89,8 +89,9 @@ class FishPlugin(BeetsPlugin): "-o", "--output", default="~/.config/fish/completions/beet.fish", - help="where to save the script. default: " - "~/.config/fish/completions", + help=( + "where to save the script. default: ~/.config/fish/completions" + ), ) return [cmd] @@ -122,23 +123,13 @@ class FishPlugin(BeetsPlugin): for name in names: cmd_names_help.append((name, cmd.help)) # Concatenate the string - totstring = HEAD + "\n" + totstring = f"{HEAD}\n" totstring += get_cmds_list([name[0] for name in cmd_names_help]) totstring += "" if nobasicfields else get_standard_fields(fields) totstring += get_extravalues(lib, extravalues) if extravalues else "" - totstring += ( - "\n" - + "# ====== {} =====".format("setup basic beet completion") - + "\n" * 2 - ) + totstring += "\n# ====== setup basic beet completion =====\n\n" totstring += get_basic_beet_options() - totstring += ( - "\n" - + "# ====== {} =====".format( - "setup field completion for subcommands" - ) - + "\n" - ) + totstring += "\n# ====== setup field completion for subcommands =====\n" totstring += get_subcommands(cmd_names_help, nobasicfields, extravalues) # Set up completion for all the command options totstring += get_all_commands(beetcmds) @@ -150,23 +141,19 @@ class FishPlugin(BeetsPlugin): def _escape(name): # Escape ? in fish if name == "?": - name = "\\" + name + name = f"\\{name}" return name def get_cmds_list(cmds_names): # Make a list of all Beets core & plugin commands - substr = "" - substr += "set CMDS " + " ".join(cmds_names) + ("\n" * 2) - return substr + return f"set CMDS {' '.join(cmds_names)}\n\n" def get_standard_fields(fields): # Make a list of album/track fields and append with ':' - fields = (field + ":" for field in fields) - substr = "" - substr += "set FIELDS " + " ".join(fields) + ("\n" * 2) - return substr + fields = (f"{field}:" for field in fields) + return f"set FIELDS {' '.join(fields)}\n\n" def get_extravalues(lib, extravalues): @@ -175,14 +162,8 @@ def get_extravalues(lib, extravalues): word = "" values_set = get_set_of_values_for_field(lib, extravalues) for fld in extravalues: - extraname = fld.upper() + "S" - word += ( - "set " - + extraname - + " " - + " ".join(sorted(values_set[fld])) - + ("\n" * 2) - ) + extraname = f"{fld.upper()}S" + word += f"set {extraname} {' '.join(sorted(values_set[fld]))}\n\n" return word @@ -226,35 +207,29 @@ def get_subcommands(cmd_name_and_help, nobasicfields, extravalues): for cmdname, cmdhelp in cmd_name_and_help: cmdname = _escape(cmdname) - word += ( - "\n" - + "# ------ {} -------".format("fieldsetups for " + cmdname) - + "\n" - ) + word += f"\n# ------ fieldsetups for {cmdname} -------\n" word += BL_NEED2.format( - ("-a " + cmdname), ("-f " + "-d " + wrap(clean_whitespace(cmdhelp))) + f"-a {cmdname}", f"-f -d {wrap(clean_whitespace(cmdhelp))}" ) if nobasicfields is False: word += BL_USE3.format( cmdname, - ("-a " + wrap("$FIELDS")), - ("-f " + "-d " + wrap("fieldname")), + f"-a {wrap('$FIELDS')}", + f"-f -d {wrap('fieldname')}", ) if extravalues: for f in extravalues: - setvar = wrap("$" + f.upper() + "S") - word += ( - " ".join( - BL_EXTRA3.format( - (cmdname + " " + f + ":"), - ("-f " + "-A " + "-a " + setvar), - ("-d " + wrap(f)), - ).split() - ) - + "\n" + setvar = wrap(f"${f.upper()}S") + word += " ".join( + BL_EXTRA3.format( + f"{cmdname} {f}:", + f"-f -A -a {setvar}", + f"-d {wrap(f)}", + ).split() ) + word += "\n" return word @@ -267,59 +242,44 @@ def get_all_commands(beetcmds): for name in names: name = _escape(name) - word += "\n" - word += ( - ("\n" * 2) - + "# ====== {} =====".format("completions for " + name) - + "\n" - ) + word += f"\n\n\n# ====== completions for {name} =====\n" for option in cmd.parser._get_all_options()[1:]: cmd_l = ( - (" -l " + option._long_opts[0].replace("--", "")) + f" -l {option._long_opts[0].replace('--', '')}" if option._long_opts else "" ) cmd_s = ( - (" -s " + option._short_opts[0].replace("-", "")) + f" -s {option._short_opts[0].replace('-', '')}" if option._short_opts else "" ) cmd_need_arg = " -r " if option.nargs in [1] else "" cmd_helpstr = ( - (" -d " + wrap(" ".join(option.help.split()))) + f" -d {wrap(' '.join(option.help.split()))}" if option.help else "" ) cmd_arglist = ( - (" -a " + wrap(" ".join(option.choices))) + f" -a {wrap(' '.join(option.choices))}" if option.choices else "" ) - word += ( - " ".join( - BL_USE3.format( - name, - ( - cmd_need_arg - + cmd_s - + cmd_l - + " -f " - + cmd_arglist - ), - cmd_helpstr, - ).split() - ) - + "\n" + word += " ".join( + BL_USE3.format( + name, + f"{cmd_need_arg}{cmd_s}{cmd_l} -f {cmd_arglist}", + cmd_helpstr, + ).split() ) + word += "\n" - word = word + " ".join( - BL_USE3.format( - name, - ("-s " + "h " + "-l " + "help" + " -f "), - ("-d " + wrap("print help") + "\n"), - ).split() + word = word + BL_USE3.format( + name, + "-s h -l help -f", + f"-d {wrap('print help')}", ) return word @@ -332,9 +292,9 @@ def clean_whitespace(word): def wrap(word): # Need " or ' around strings but watch out if they're in the string sptoken = '"' - if ('"') in word and ("'") in word: + if '"' in word and ("'") in word: word.replace('"', sptoken) - return '"' + word + '"' + return f'"{word}"' tok = '"' if "'" in word else "'" - return tok + word + tok + return f"{tok}{word}{tok}" diff --git a/beetsplug/fromfilename.py b/beetsplug/fromfilename.py index 103e82901..c3fb4bc6b 100644 --- a/beetsplug/fromfilename.py +++ b/beetsplug/fromfilename.py @@ -12,8 +12,8 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""If the title is empty, try to extract track and title from the -filename. +"""If the title is empty, try to extract it from the filename +(possibly also extract track and artist) """ import os @@ -25,12 +25,12 @@ from beets.util import displayable_path # Filename field extraction patterns. PATTERNS = [ # Useful patterns. - r"^(?P.+)[\-_](?P.+)[\-_](?P<tag>.*)$", - r"^(?P<track>\d+)[\s.\-_]+(?P<artist>.+)[\-_](?P<title>.+)[\-_](?P<tag>.*)$", - r"^(?P<artist>.+)[\-_](?P<title>.+)$", - r"^(?P<track>\d+)[\s.\-_]+(?P<artist>.+)[\-_](?P<title>.+)$", - r"^(?P<track>\d+)[\s.\-_]+(?P<title>.+)$", - r"^(?P<track>\d+)\s+(?P<title>.+)$", + ( + r"^(?P<track>\d+)\.?\s*-\s*(?P<artist>.+?)\s*-\s*(?P<title>.+?)" + r"(\s*-\s*(?P<tag>.*))?$" + ), + r"^(?P<artist>.+?)\s*-\s*(?P<title>.+?)(\s*-\s*(?P<tag>.*))?$", + r"^(?P<track>\d+)\.?[\s_-]+(?P<title>.+)$", r"^(?P<title>.+) by (?P<artist>.+)$", r"^(?P<track>\d+).*$", r"^(?P<title>.+)$", @@ -98,6 +98,7 @@ def apply_matches(d, log): # Given both an "artist" and "title" field, assume that one is # *actually* the artist, which must be uniform, and use the other # for the title. This, of course, won't work for VA albums. + # Only check for "artist": patterns containing it, also contain "title" if "artist" in keys: if equal_fields(d, "artist"): artist = some_map["artist"] @@ -112,21 +113,22 @@ def apply_matches(d, log): for item in d: if not item.artist: item.artist = artist - log.info("Artist replaced with: {}".format(item.artist)) - - # No artist field: remaining field is the title. - else: + log.info("Artist replaced with: {.artist}", item) + # otherwise, if the pattern contains "title", use that for title_field + elif "title" in keys: title_field = "title" + else: + title_field = None - # Apply the title and track. + # Apply the title and track, if any. for item in d: - if bad_title(item.title): + if title_field and bad_title(item.title): item.title = str(d[item][title_field]) - log.info("Title replaced with: {}".format(item.title)) + log.info("Title replaced with: {.title}", item) if "track" in d[item] and item.track == 0: item.track = int(d[item]["track"]) - log.info("Track replaced with: {}".format(item.track)) + log.info("Track replaced with: {.track}", item) # Plugin structure and hook into import process. @@ -160,6 +162,7 @@ class FromFilenamePlugin(plugins.BeetsPlugin): # Look for useful information in the filenames. for pattern in PATTERNS: + self._log.debug(f"Trying pattern: {pattern}") d = all_matches(names, pattern) if d: apply_matches(d, self._log) diff --git a/beetsplug/ftintitle.py b/beetsplug/ftintitle.py index 150f230aa..dd681a972 100644 --- a/beetsplug/ftintitle.py +++ b/beetsplug/ftintitle.py @@ -20,21 +20,26 @@ import re from typing import TYPE_CHECKING from beets import plugins, ui -from beets.util import displayable_path if TYPE_CHECKING: from beets.importer import ImportSession, ImportTask from beets.library import Item -def split_on_feat(artist: str) -> tuple[str, str | None]: +def split_on_feat( + artist: str, + for_artist: bool = True, + custom_words: list[str] | None = None, +) -> tuple[str, str | None]: """Given an artist string, split the "main" artist from any artist on the right-hand side of a string like "feat". Return the main artist, which is always a string, and the featuring artist, which may be a string or None if none is present. """ # split on the first "feat". - regex = re.compile(plugins.feat_tokens(), re.IGNORECASE) + regex = re.compile( + plugins.feat_tokens(for_artist, custom_words), re.IGNORECASE + ) parts = tuple(s.strip() for s in regex.split(artist, 1)) if len(parts) == 1: return parts[0], None @@ -43,43 +48,54 @@ def split_on_feat(artist: str) -> tuple[str, str | None]: return parts -def contains_feat(title: str) -> bool: +def contains_feat(title: str, custom_words: list[str] | None = None) -> bool: """Determine whether the title contains a "featured" marker.""" return bool( re.search( - plugins.feat_tokens(for_artist=False), + plugins.feat_tokens(for_artist=False, custom_words=custom_words), title, flags=re.IGNORECASE, ) ) -def find_feat_part(artist: str, albumartist: str) -> str | None: +def find_feat_part( + artist: str, + albumartist: str | None, + custom_words: list[str] | None = None, +) -> str | None: """Attempt to find featured artists in the item's artist fields and return the results. Returns None if no featured artist found. """ - # Look for the album artist in the artist field. If it's not - # present, give up. - albumartist_split = artist.split(albumartist, 1) - if len(albumartist_split) <= 1: - return None + # Handle a wider variety of extraction cases if the album artist is + # contained within the track artist. + if albumartist and albumartist in artist: + albumartist_split = artist.split(albumartist, 1) - # If the last element of the split (the right-hand side of the - # album artist) is nonempty, then it probably contains the - # featured artist. - elif albumartist_split[1] != "": - # Extract the featured artist from the right-hand side. - _, feat_part = split_on_feat(albumartist_split[1]) - return feat_part + # If the last element of the split (the right-hand side of the + # album artist) is nonempty, then it probably contains the + # featured artist. + if albumartist_split[1] != "": + # Extract the featured artist from the right-hand side. + _, feat_part = split_on_feat( + albumartist_split[1], custom_words=custom_words + ) + return feat_part - # Otherwise, if there's nothing on the right-hand side, look for a - # featuring artist on the left-hand side. - else: - lhs, rhs = split_on_feat(albumartist_split[0]) - if lhs: - return lhs + # Otherwise, if there's nothing on the right-hand side, + # look for a featuring artist on the left-hand side. + else: + lhs, _ = split_on_feat( + albumartist_split[0], custom_words=custom_words + ) + if lhs: + return lhs - return None + # Fall back to conservative handling of the track artist without relying + # on albumartist, which covers compilations using a 'Various Artists' + # albumartist and album tracks by a guest artist featuring a third artist. + _, feat_part = split_on_feat(artist, False, custom_words) + return feat_part class FtInTitlePlugin(plugins.BeetsPlugin): @@ -90,8 +106,10 @@ class FtInTitlePlugin(plugins.BeetsPlugin): { "auto": True, "drop": False, - "format": "feat. {0}", + "format": "feat. {}", "keep_in_artist": False, + "preserve_album_artist": True, + "custom_words": [], } ) @@ -116,10 +134,20 @@ class FtInTitlePlugin(plugins.BeetsPlugin): self.config.set_args(opts) drop_feat = self.config["drop"].get(bool) keep_in_artist_field = self.config["keep_in_artist"].get(bool) + preserve_album_artist = self.config["preserve_album_artist"].get( + bool + ) + custom_words = self.config["custom_words"].get(list) write = ui.should_write() for item in lib.items(args): - if self.ft_in_title(item, drop_feat, keep_in_artist_field): + if self.ft_in_title( + item, + drop_feat, + keep_in_artist_field, + preserve_album_artist, + custom_words, + ): item.store() if write: item.try_write() @@ -131,9 +159,17 @@ class FtInTitlePlugin(plugins.BeetsPlugin): """Import hook for moving featuring artist automatically.""" drop_feat = self.config["drop"].get(bool) keep_in_artist_field = self.config["keep_in_artist"].get(bool) + preserve_album_artist = self.config["preserve_album_artist"].get(bool) + custom_words = self.config["custom_words"].get(list) for item in task.imported_items(): - if self.ft_in_title(item, drop_feat, keep_in_artist_field): + if self.ft_in_title( + item, + drop_feat, + keep_in_artist_field, + preserve_album_artist, + custom_words, + ): item.store() def update_metadata( @@ -142,6 +178,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin): feat_part: str, drop_feat: bool, keep_in_artist_field: bool, + custom_words: list[str], ) -> None: """Choose how to add new artists to the title and set the new metadata. Also, print out messages about any changes that are made. @@ -151,23 +188,28 @@ class FtInTitlePlugin(plugins.BeetsPlugin): # In case the artist is kept, do not update the artist fields. if keep_in_artist_field: self._log.info( - "artist: {0} (Not changing due to keep_in_artist)", item.artist + "artist: {.artist} (Not changing due to keep_in_artist)", item ) else: - self._log.info("artist: {0} -> {1}", item.artist, item.albumartist) - item.artist = item.albumartist + track_artist, _ = split_on_feat( + item.artist, custom_words=custom_words + ) + self._log.info("artist: {0.artist} -> {1}", item, track_artist) + item.artist = track_artist if item.artist_sort: # Just strip the featured artist from the sort name. - item.artist_sort, _ = split_on_feat(item.artist_sort) + item.artist_sort, _ = split_on_feat( + item.artist_sort, custom_words=custom_words + ) # Only update the title if it does not already contain a featured # artist and if we do not drop featuring information. - if not drop_feat and not contains_feat(item.title): + if not drop_feat and not contains_feat(item.title, custom_words): feat_format = self.config["format"].as_str() new_format = feat_format.format(feat_part) new_title = f"{item.title} {new_format}" - self._log.info("title: {0} -> {1}", item.title, new_title) + self._log.info("title: {.title} -> {}", item, new_title) item.title = new_title def ft_in_title( @@ -175,6 +217,8 @@ class FtInTitlePlugin(plugins.BeetsPlugin): item: Item, drop_feat: bool, keep_in_artist_field: bool, + preserve_album_artist: bool, + custom_words: list[str], ) -> bool: """Look for featured artists in the item's artist fields and move them to the title. @@ -188,22 +232,24 @@ class FtInTitlePlugin(plugins.BeetsPlugin): # Check whether there is a featured artist on this track and the # artist field does not exactly match the album artist field. In # that case, we attempt to move the featured artist to the title. - if not albumartist or albumartist == artist: + if preserve_album_artist and albumartist and artist == albumartist: return False - _, featured = split_on_feat(artist) + _, featured = split_on_feat(artist, custom_words=custom_words) if not featured: return False - self._log.info("{}", displayable_path(item.path)) + self._log.info("{.filepath}", item) # Attempt to find the featured artist. - feat_part = find_feat_part(artist, albumartist) + feat_part = find_feat_part(artist, albumartist, custom_words) if not feat_part: self._log.info("no featuring artists found") return False # If we have a featuring artist, move it to the title. - self.update_metadata(item, feat_part, drop_feat, keep_in_artist_field) + self.update_metadata( + item, feat_part, drop_feat, keep_in_artist_field, custom_words + ) return True diff --git a/beetsplug/hook.py b/beetsplug/hook.py index 90d66553a..b8869eca4 100644 --- a/beetsplug/hook.py +++ b/beetsplug/hook.py @@ -62,7 +62,7 @@ class HookPlugin(BeetsPlugin): def create_and_register_hook(self, event, command): def hook_function(**kwargs): if command is None or len(command) == 0: - self._log.error('invalid command "{0}"', command) + self._log.error('invalid command "{}"', command) return # For backwards compatibility, use a string formatter that decodes @@ -74,7 +74,7 @@ class HookPlugin(BeetsPlugin): ] self._log.debug( - 'running command "{0}" for event {1}', + 'running command "{}" for event {}', " ".join(command_pieces), event, ) @@ -83,9 +83,9 @@ class HookPlugin(BeetsPlugin): subprocess.check_call(command_pieces) except subprocess.CalledProcessError as exc: self._log.error( - "hook for {0} exited with status {1}", event, exc.returncode + "hook for {} exited with status {.returncode}", event, exc ) except OSError as exc: - self._log.error("hook for {0} failed: {1}", event, exc) + self._log.error("hook for {} failed: {}", event, exc) self.register_listener(event, hook_function) diff --git a/beetsplug/ihate.py b/beetsplug/ihate.py index d6357294d..54a61384c 100644 --- a/beetsplug/ihate.py +++ b/beetsplug/ihate.py @@ -70,10 +70,10 @@ class IHatePlugin(BeetsPlugin): self._log.debug("processing your hate") if self.do_i_hate_this(task, skip_queries): task.choice_flag = Action.SKIP - self._log.info("skipped: {0}", summary(task)) + self._log.info("skipped: {}", summary(task)) return if self.do_i_hate_this(task, warn_queries): - self._log.info("you may hate this: {0}", summary(task)) + self._log.info("you may hate this: {}", summary(task)) else: self._log.debug("nothing to do") else: diff --git a/beetsplug/importadded.py b/beetsplug/importadded.py index 2564f26b2..f728a104f 100644 --- a/beetsplug/importadded.py +++ b/beetsplug/importadded.py @@ -94,7 +94,7 @@ class ImportAddedPlugin(BeetsPlugin): mtime = os.stat(util.syspath(source)).st_mtime self.item_mtime[destination] = mtime self._log.debug( - "Recorded mtime {0} for item '{1}' imported from '{2}'", + "Recorded mtime {} for item '{}' imported from '{}'", mtime, util.displayable_path(destination), util.displayable_path(source), @@ -103,9 +103,9 @@ class ImportAddedPlugin(BeetsPlugin): def update_album_times(self, lib, album): if self.reimported_album(album): self._log.debug( - "Album '{0}' is reimported, skipping import of " + "Album '{.filepath}' is reimported, skipping import of " "added dates for the album and its items.", - util.displayable_path(album.path), + album, ) return @@ -119,18 +119,17 @@ class ImportAddedPlugin(BeetsPlugin): item.store() album.added = min(album_mtimes) self._log.debug( - "Import of album '{0}', selected album.added={1} " + "Import of album '{0.album}', selected album.added={0.added} " "from item file mtimes.", - album.album, - album.added, + album, ) album.store() def update_item_times(self, lib, item): if self.reimported_item(item): self._log.debug( - "Item '{0}' is reimported, skipping import of added date.", - util.displayable_path(item.path), + "Item '{.filepath}' is reimported, skipping import of added date.", + item, ) return mtime = self.item_mtime.pop(item.path, None) @@ -139,9 +138,8 @@ class ImportAddedPlugin(BeetsPlugin): if self.config["preserve_mtimes"].get(bool): self.write_item_mtime(item, mtime) self._log.debug( - "Import of item '{0}', selected item.added={1}", - util.displayable_path(item.path), - item.added, + "Import of item '{0.filepath}', selected item.added={0.added}", + item, ) item.store() @@ -153,7 +151,6 @@ class ImportAddedPlugin(BeetsPlugin): if self.config["preserve_write_mtimes"].get(bool): self.write_item_mtime(item, item.added) self._log.debug( - "Write of item '{0}', selected item.added={1}", - util.displayable_path(item.path), - item.added, + "Write of item '{0.filepath}', selected item.added={0.added}", + item, ) diff --git a/beetsplug/importfeeds.py b/beetsplug/importfeeds.py index 0a5a6afe4..a74746f8b 100644 --- a/beetsplug/importfeeds.py +++ b/beetsplug/importfeeds.py @@ -50,7 +50,7 @@ def _build_m3u_filename(basename): path = normpath( os.path.join( config["importfeeds"]["dir"].as_filename(), - date + "_" + basename + ".m3u", + f"{date}_{basename}.m3u", ) ) return path @@ -136,7 +136,7 @@ class ImportFeedsPlugin(BeetsPlugin): if "echo" in formats: self._log.info("Location of imported music:") for path in paths: - self._log.info(" {0}", path) + self._log.info(" {}", path) def album_imported(self, lib, album): self._record_items(lib, album.album, album.items()) diff --git a/beetsplug/info.py b/beetsplug/info.py index c4d5aacbf..cc78aaffe 100644 --- a/beetsplug/info.py +++ b/beetsplug/info.py @@ -117,7 +117,6 @@ def print_data(data, item=None, fmt=None): return maxwidth = max(len(key) for key in formatted) - lineformat = f"{{0:>{maxwidth}}}: {{1}}" if path: ui.print_(displayable_path(path)) @@ -126,7 +125,7 @@ def print_data(data, item=None, fmt=None): value = formatted[field] if isinstance(value, list): value = "; ".join(value) - ui.print_(lineformat.format(field, value)) + ui.print_(f"{field:>{maxwidth}}: {value}") def print_data_keys(data, item=None): @@ -139,12 +138,11 @@ def print_data_keys(data, item=None): if len(formatted) == 0: return - line_format = "{0}{{0}}".format(" " * 4) if path: ui.print_(displayable_path(path)) for field in sorted(formatted): - ui.print_(line_format.format(field)) + ui.print_(f" {field}") class InfoPlugin(BeetsPlugin): @@ -221,7 +219,7 @@ class InfoPlugin(BeetsPlugin): try: data, item = data_emitter(included_keys or "*") except (mediafile.UnreadableFileError, OSError) as ex: - self._log.error("cannot read file: {0}", ex) + self._log.error("cannot read file: {}", ex) continue if opts.summarize: diff --git a/beetsplug/inline.py b/beetsplug/inline.py index c4258fc83..e9a94ac38 100644 --- a/beetsplug/inline.py +++ b/beetsplug/inline.py @@ -28,8 +28,7 @@ class InlineError(Exception): def __init__(self, code, exc): super().__init__( - ("error in inline path field code:\n%s\n%s: %s") - % (code, type(exc).__name__, str(exc)) + f"error in inline path field code:\n{code}\n{type(exc).__name__}: {exc}" ) @@ -37,7 +36,8 @@ def _compile_func(body): """Given Python code for a function body, return a compiled callable that invokes that code. """ - body = "def {}():\n {}".format(FUNC_NAME, body.replace("\n", "\n ")) + body = body.replace("\n", "\n ") + body = f"def {FUNC_NAME}():\n {body}" code = compile(body, "inline", "exec") env = {} eval(code, env) @@ -60,14 +60,14 @@ class InlinePlugin(BeetsPlugin): for key, view in itertools.chain( config["item_fields"].items(), config["pathfields"].items() ): - self._log.debug("adding item field {0}", key) + self._log.debug("adding item field {}", key) func = self.compile_inline(view.as_str(), False) if func is not None: self.template_fields[key] = func # Album fields. for key, view in config["album_fields"].items(): - self._log.debug("adding album field {0}", key) + self._log.debug("adding album field {}", key) func = self.compile_inline(view.as_str(), True) if func is not None: self.album_template_fields[key] = func @@ -87,7 +87,7 @@ class InlinePlugin(BeetsPlugin): func = _compile_func(python_code) except SyntaxError: self._log.error( - "syntax error in inline field definition:\n{0}", + "syntax error in inline field definition:\n{}", traceback.format_exc(), ) return diff --git a/beetsplug/ipfs.py b/beetsplug/ipfs.py index 3c6425c06..8b6d57fd3 100644 --- a/beetsplug/ipfs.py +++ b/beetsplug/ipfs.py @@ -77,7 +77,7 @@ class IPFSPlugin(BeetsPlugin): for album in lib.albums(args): if len(album.items()) == 0: self._log.info( - "{0} does not contain items, aborting", album + "{} does not contain items, aborting", album ) self.ipfs_add(album) @@ -122,13 +122,13 @@ class IPFSPlugin(BeetsPlugin): return False try: if album.ipfs: - self._log.debug("{0} already added", album_dir) + self._log.debug("{} already added", album_dir) # Already added to ipfs return False except AttributeError: pass - self._log.info("Adding {0} to ipfs", album_dir) + self._log.info("Adding {} to ipfs", album_dir) if self.config["nocopy"]: cmd = "ipfs add --nocopy -q -r".split() @@ -138,7 +138,7 @@ class IPFSPlugin(BeetsPlugin): try: output = util.command_output(cmd).stdout.split() except (OSError, subprocess.CalledProcessError) as exc: - self._log.error("Failed to add {0}, error: {1}", album_dir, exc) + self._log.error("Failed to add {}, error: {}", album_dir, exc) return False length = len(output) @@ -146,12 +146,12 @@ class IPFSPlugin(BeetsPlugin): line = line.strip() if linenr == length - 1: # last printed line is the album hash - self._log.info("album: {0}", line) + self._log.info("album: {}", line) album.ipfs = line else: try: item = album.items()[linenr] - self._log.info("item: {0}", line) + self._log.info("item: {}", line) item.ipfs = line item.store() except IndexError: @@ -180,11 +180,11 @@ class IPFSPlugin(BeetsPlugin): util.command_output(cmd) except (OSError, subprocess.CalledProcessError) as err: self._log.error( - "Failed to get {0} from ipfs.\n{1}", _hash, err.output + "Failed to get {} from ipfs.\n{.output}", _hash, err ) return False - self._log.info("Getting {0} from ipfs", _hash) + self._log.info("Getting {} from ipfs", _hash) imp = ui.commands.TerminalImportSession( lib, loghandler=None, query=None, paths=[_hash] ) @@ -208,7 +208,7 @@ class IPFSPlugin(BeetsPlugin): msg = f"Failed to publish library. Error: {err}" self._log.error(msg) return False - self._log.info("hash of library: {0}", output) + self._log.info("hash of library: {}", output) def ipfs_import(self, lib, args): _hash = args[0] @@ -232,7 +232,7 @@ class IPFSPlugin(BeetsPlugin): try: util.command_output(cmd) except (OSError, subprocess.CalledProcessError): - self._log.error(f"Could not import {_hash}") + self._log.error("Could not import {}", _hash) return False # add all albums from remotes into a combined library @@ -306,7 +306,7 @@ class IPFSPlugin(BeetsPlugin): items.append(item) if len(items) < 1: return False - self._log.info("Adding '{0}' to temporary library", album) + self._log.info("Adding '{}' to temporary library", album) new_album = tmplib.add_album(items) new_album.ipfs = album.ipfs new_album.store(inherit=False) diff --git a/beetsplug/keyfinder.py b/beetsplug/keyfinder.py index 00b688d4f..e2aff24e5 100644 --- a/beetsplug/keyfinder.py +++ b/beetsplug/keyfinder.py @@ -65,7 +65,7 @@ class KeyFinderPlugin(BeetsPlugin): command + [util.syspath(item.path)] ).stdout except (subprocess.CalledProcessError, OSError) as exc: - self._log.error("execution failed: {0}", exc) + self._log.error("execution failed: {}", exc) continue try: @@ -73,7 +73,7 @@ class KeyFinderPlugin(BeetsPlugin): except IndexError: # Sometimes keyfinder-cli returns 0 but with no key, usually # when the file is silent or corrupt, so we log and skip. - self._log.error("no key returned for path: {0}", item.path) + self._log.error("no key returned for path: {.path}", item) continue try: @@ -84,9 +84,7 @@ class KeyFinderPlugin(BeetsPlugin): item["initial_key"] = key self._log.info( - "added computed initial key {0} for {1}", - key, - util.displayable_path(item.path), + "added computed initial key {} for {.filepath}", key, item ) if write: diff --git a/beetsplug/kodiupdate.py b/beetsplug/kodiupdate.py index 2f679c38b..890ab16c4 100644 --- a/beetsplug/kodiupdate.py +++ b/beetsplug/kodiupdate.py @@ -96,10 +96,10 @@ class KodiUpdate(BeetsPlugin): continue self._log.info( - "Kodi update triggered for {0}:{1}", + "Kodi update triggered for {}:{}", instance["host"], instance["port"], ) except requests.exceptions.RequestException as e: - self._log.warning("Kodi update failed: {0}", str(e)) + self._log.warning("Kodi update failed: {}", str(e)) continue diff --git a/beetsplug/lastgenre/__init__.py b/beetsplug/lastgenre/__init__.py index dbab96cf8..3b04e65d6 100644 --- a/beetsplug/lastgenre/__init__.py +++ b/beetsplug/lastgenre/__init__.py @@ -22,17 +22,23 @@ The scraper script used is available here: https://gist.github.com/1241307 """ -import codecs +from __future__ import annotations + import os import traceback -from typing import Union +from functools import singledispatchmethod +from pathlib import Path +from typing import TYPE_CHECKING, Union import pylast import yaml from beets import config, library, plugins, ui from beets.library import Album, Item -from beets.util import normpath, plurality, unique_list +from beets.util import plurality, unique_list + +if TYPE_CHECKING: + from beets.library import LibModel LASTFM = pylast.LastFMNetwork(api_key=plugins.LASTFM_KEY) @@ -42,10 +48,6 @@ PYLAST_EXCEPTIONS = ( pylast.NetworkError, ) -REPLACE = { - "\u2010": "-", -} - # Canonicalization tree processing. @@ -104,7 +106,7 @@ class LastGenrePlugin(plugins.BeetsPlugin): "separator": ", ", "prefer_specific": False, "title_case": True, - "extended_debug": False, + "pretend": False, } ) self.setup() @@ -115,39 +117,54 @@ class LastGenrePlugin(plugins.BeetsPlugin): self.import_stages = [self.imported] self._genre_cache = {} + self.whitelist = self._load_whitelist() + self.c14n_branches, self.canonicalize = self._load_c14n_tree() - # Read the whitelist file if enabled. - self.whitelist = set() + def _load_whitelist(self) -> set[str]: + """Load the whitelist from a text file. + + Default whitelist is used if config is True, empty string or set to "nothing". + """ + whitelist = set() wl_filename = self.config["whitelist"].get() - if wl_filename in (True, ""): # Indicates the default whitelist. + if wl_filename in (True, "", None): # Indicates the default whitelist. wl_filename = WHITELIST if wl_filename: - wl_filename = normpath(wl_filename) - with open(wl_filename, "rb") as f: - for line in f: - line = line.decode("utf-8").strip().lower() - if line and not line.startswith("#"): - self.whitelist.add(line) + self._log.debug("Loading whitelist {}", wl_filename) + text = Path(wl_filename).expanduser().read_text(encoding="utf-8") + for line in text.splitlines(): + if (line := line.strip().lower()) and not line.startswith("#"): + whitelist.add(line) - # Read the genres tree for canonicalization if enabled. - self.c14n_branches = [] + return whitelist + + def _load_c14n_tree(self) -> tuple[list[list[str]], bool]: + """Load the canonicalization tree from a YAML file. + + Default tree is used if config is True, empty string, set to "nothing" + or if prefer_specific is enabled. + """ + c14n_branches: list[list[str]] = [] c14n_filename = self.config["canonical"].get() - self.canonicalize = c14n_filename is not False - + canonicalize = c14n_filename is not False # Default tree - if c14n_filename in (True, ""): - c14n_filename = C14N_TREE - elif not self.canonicalize and self.config["prefer_specific"].get(): + if c14n_filename in (True, "", None) or ( # prefer_specific requires a tree, load default tree + not canonicalize and self.config["prefer_specific"].get() + ): c14n_filename = C14N_TREE - # Read the tree if c14n_filename: - self._log.debug("Loading canonicalization tree {0}", c14n_filename) - c14n_filename = normpath(c14n_filename) - with codecs.open(c14n_filename, "r", encoding="utf-8") as f: + self._log.debug("Loading canonicalization tree {}", c14n_filename) + with Path(c14n_filename).expanduser().open(encoding="utf-8") as f: genres_tree = yaml.safe_load(f) - flatten_tree(genres_tree, [], self.c14n_branches) + flatten_tree(genres_tree, [], c14n_branches) + return c14n_branches, canonicalize + + def _tunelog(self, msg, *args, **kwargs): + """Log tuning messages at DEBUG level when verbosity level is high enough.""" + if config["verbose"].as_number() >= 3: + self._log.debug(msg, *args, **kwargs) @property def sources(self) -> tuple[str, ...]: @@ -184,31 +201,28 @@ class LastGenrePlugin(plugins.BeetsPlugin): return [p[1] for p in depth_tag_pairs] def _resolve_genres(self, tags: list[str]) -> list[str]: - """Filter, deduplicate, sort, canonicalize provided genres list. + """Canonicalize, sort and filter a list of genres. - Returns an empty list if the input tags list is empty. - If canonicalization is enabled, it extends the list by incorporating parent genres from the canonicalization tree. When a whitelist is set, only parent tags that pass a validity check (_is_valid) are included; - otherwise, it adds the oldest ancestor. - - During canonicalization, it stops adding parent tags if the count of - tags reaches the configured limit (count). + otherwise, it adds the oldest ancestor. Adding parent tags is stopped + when the count of tags reaches the configured limit (count). - The tags list is then deduplicated to ensure only unique genres are retained. - - Optionally, if the 'prefer_specific' configuration is enabled, the - list is sorted by the specificity (depth in the canonicalization tree) - of the genres. - - The method then filters the tag list, ensuring that only valid - genres (those that pass the _is_valid method) are kept. If a - whitelist is set, only genres in the whitelist are considered valid - (which may even result in no genres at all being retained). - - Finally, the filtered list of genres, limited to - the configured count is returned. + - If the 'prefer_specific' configuration is enabled, the list is sorted + by the specificity (depth in the canonicalization tree) of the genres. + - Finally applies whitelist filtering to ensure that only valid + genres are kept. (This may result in no genres at all being retained). + - Returns the filtered list of genres, limited to the configured count. """ if not tags: return [] count = self.config["count"].get(int) + + # Canonicalization (if enabled) if self.canonicalize: # Extend the list to consider tags parents in the c14n tree tags_all = [] @@ -242,8 +256,8 @@ class LastGenrePlugin(plugins.BeetsPlugin): # c14n only adds allowed genres but we may have had forbidden genres in # the original tags list - valid_tags = self._filter_valid_genres(tags) - return valid_tags[: self.config["count"].get(int)] + valid_tags = [t for t in tags if self._is_valid(t)] + return valid_tags[:count] def fetch_genre(self, lastfm_obj): """Return the genre for a pylast entity or None if no suitable genre @@ -252,12 +266,6 @@ class LastGenrePlugin(plugins.BeetsPlugin): min_weight = self.config["min_weight"].get(int) return self._tags_for(lastfm_obj, min_weight) - def _filter_valid_genres(self, genres: list[str]) -> list[str]: - """Filter list of genres, only keep valid.""" - if not genres: - return [] - return [x for x in genres if self._is_valid(x)] - def _is_valid(self, genre: str) -> bool: """Check if the genre is valid. @@ -281,7 +289,7 @@ class LastGenrePlugin(plugins.BeetsPlugin): """ # Shortcut if we're missing metadata. if any(not s for s in args): - return None + return [] key = f"{entity}.{'-'.join(str(a) for a in args)}" if key not in self._genre_cache: @@ -289,34 +297,27 @@ class LastGenrePlugin(plugins.BeetsPlugin): self._genre_cache[key] = self.fetch_genre(method(*args)) genre = self._genre_cache[key] - if self.config["extended_debug"]: - self._log.debug(f"last.fm (unfiltered) {entity} tags: {genre}") + self._tunelog("last.fm (unfiltered) {} tags: {}", entity, genre) return genre def fetch_album_genre(self, obj): - """Return the album genre for this Item or Album.""" - return self._filter_valid_genres( - self._last_lookup( - "album", LASTFM.get_album, obj.albumartist, obj.album - ) + """Return raw album genres from Last.fm for this Item or Album.""" + return self._last_lookup( + "album", LASTFM.get_album, obj.albumartist, obj.album ) def fetch_album_artist_genre(self, obj): - """Return the album artist genre for this Item or Album.""" - return self._filter_valid_genres( - self._last_lookup("artist", LASTFM.get_artist, obj.albumartist) - ) + """Return raw album artist genres from Last.fm for this Item or Album.""" + return self._last_lookup("artist", LASTFM.get_artist, obj.albumartist) def fetch_artist_genre(self, item): - """Returns the track artist genre for this Item.""" - return self._filter_valid_genres( - self._last_lookup("artist", LASTFM.get_artist, item.artist) - ) + """Returns raw track artist genres from Last.fm for this Item.""" + return self._last_lookup("artist", LASTFM.get_artist, item.artist) def fetch_track_genre(self, obj): - """Returns the track genre for this Item.""" - return self._filter_valid_genres( - self._last_lookup("track", LASTFM.get_track, obj.artist, obj.title) + """Returns raw track genres from Last.fm for this Item.""" + return self._last_lookup( + "track", LASTFM.get_track, obj.artist, obj.title ) # Main processing: _get_genre() and helpers. @@ -330,7 +331,7 @@ class LastGenrePlugin(plugins.BeetsPlugin): return self.config["separator"].as_str().join(formatted) - def _get_existing_genres(self, obj: Union[Album, Item]) -> list[str]: + def _get_existing_genres(self, obj: LibModel) -> list[str]: """Return a list of genres for this Item or Album. Empty string genres are removed.""" separator = self.config["separator"].get() @@ -346,14 +347,12 @@ class LastGenrePlugin(plugins.BeetsPlugin): self, old: list[str], new: list[str] ) -> list[str]: """Combine old and new genres and process via _resolve_genres.""" - self._log.debug(f"valid last.fm tags: {new}") - self._log.debug(f"existing genres taken into account: {old}") + self._log.debug("raw last.fm tags: {}", new) + self._log.debug("existing genres taken into account: {}", old) combined = old + new return self._resolve_genres(combined) - def _get_genre( - self, obj: Union[Album, Item] - ) -> tuple[Union[str, None], ...]: + def _get_genre(self, obj: LibModel) -> tuple[Union[str, None], ...]: """Get the final genre string for an Album or Item object. `self.sources` specifies allowed genre sources. Starting with the first @@ -372,9 +371,22 @@ class LastGenrePlugin(plugins.BeetsPlugin): applied, while "artist, any" means only new last.fm genres are included and the whitelist feature was disabled. """ + + def _try_resolve_stage(stage_label: str, keep_genres, new_genres): + """Try to resolve genres for a given stage and log the result.""" + resolved_genres = self._combine_resolve_and_log( + keep_genres, new_genres + ) + if resolved_genres: + suffix = "whitelist" if self.whitelist else "any" + label = f"{stage_label}, {suffix}" + if keep_genres: + label = f"keep + {label}" + return self._format_and_stringify(resolved_genres), label + return None + keep_genres = [] new_genres = [] - label = "" genres = self._get_existing_genres(obj) if genres and not self.config["force"]: @@ -394,20 +406,26 @@ class LastGenrePlugin(plugins.BeetsPlugin): # album artist, or most popular track genre. if isinstance(obj, library.Item) and "track" in self.sources: if new_genres := self.fetch_track_genre(obj): - label = "track" + if result := _try_resolve_stage( + "track", keep_genres, new_genres + ): + return result - if not new_genres and "album" in self.sources: + if "album" in self.sources: if new_genres := self.fetch_album_genre(obj): - label = "album" + if result := _try_resolve_stage( + "album", keep_genres, new_genres + ): + return result - if not new_genres and "artist" in self.sources: + if "artist" in self.sources: new_genres = [] if isinstance(obj, library.Item): new_genres = self.fetch_artist_genre(obj) - label = "artist" + stage_label = "artist" elif obj.albumartist != config["va_name"].as_str(): new_genres = self.fetch_album_artist_genre(obj) - label = "album artist" + stage_label = "album artist" else: # For "Various Artists", pick the most popular track genre. item_genres = [] @@ -422,24 +440,18 @@ class LastGenrePlugin(plugins.BeetsPlugin): if item_genres: most_popular, rank = plurality(item_genres) new_genres = [most_popular] - label = "most popular track" + stage_label = "most popular track" self._log.debug( 'Most popular track genre "{}" ({}) for VA album.', most_popular, rank, ) - # Return with a combined or freshly fetched genre list. - if new_genres: - resolved_genres = self._combine_resolve_and_log( - keep_genres, new_genres - ) - if resolved_genres: - suffix = "whitelist" if self.whitelist else "any" - label += f", {suffix}" - if keep_genres: - label = f"keep + {label}" - return self._format_and_stringify(resolved_genres), label + if new_genres: + if result := _try_resolve_stage( + stage_label, keep_genres, new_genres + ): + return result # Nothing found, leave original if configured and valid. if obj.genre and self.config["keep_existing"]: @@ -455,8 +467,47 @@ class LastGenrePlugin(plugins.BeetsPlugin): # Beets plugin hooks and CLI. + def _fetch_and_log_genre(self, obj: LibModel) -> None: + """Fetch genre and log it.""" + self._log.info(str(obj)) + obj.genre, label = self._get_genre(obj) + self._log.debug("Resolved ({}): {}", label, obj.genre) + + ui.show_model_changes(obj, fields=["genre"], print_obj=False) + + @singledispatchmethod + def _process(self, obj: LibModel, write: bool) -> None: + """Process an object, dispatching to the appropriate method.""" + raise NotImplementedError + + @_process.register + def _process_track(self, obj: Item, write: bool) -> None: + """Process a single track/item.""" + self._fetch_and_log_genre(obj) + if not self.config["pretend"]: + obj.try_sync(write=write, move=False) + + @_process.register + def _process_album(self, obj: Album, write: bool) -> None: + """Process an entire album.""" + self._fetch_and_log_genre(obj) + if "track" in self.sources: + for item in obj.items(): + self._process(item, write) + + if not self.config["pretend"]: + obj.try_sync( + write=write, move=False, inherit="track" not in self.sources + ) + def commands(self): lastgenre_cmd = ui.Subcommand("lastgenre", help="fetch genres") + lastgenre_cmd.parser.add_option( + "-p", + "--pretend", + action="store_true", + help="show actions but do nothing", + ) lastgenre_cmd.parser.add_option( "-f", "--force", @@ -506,94 +557,20 @@ class LastGenrePlugin(plugins.BeetsPlugin): dest="album", help="match albums instead of items (default)", ) - lastgenre_cmd.parser.add_option( - "-d", - "--debug", - action="store_true", - dest="extended_debug", - help="extended last.fm debug logging", - ) lastgenre_cmd.parser.set_defaults(album=True) def lastgenre_func(lib, opts, args): - write = ui.should_write() self.config.set_args(opts) - if opts.album: - # Fetch genres for whole albums - for album in lib.albums(args): - album.genre, src = self._get_genre(album) - self._log.info( - 'genre for album "{0.album}" ({1}): {0.genre}', - album, - src, - ) - if "track" in self.sources: - album.store(inherit=False) - else: - album.store() - - for item in album.items(): - # If we're using track-level sources, also look up each - # track on the album. - if "track" in self.sources: - item.genre, src = self._get_genre(item) - item.store() - self._log.info( - 'genre for track "{0.title}" ({1}): {0.genre}', - item, - src, - ) - - if write: - item.try_write() - else: - # Just query singletons, i.e. items that are not part of - # an album - for item in lib.items(args): - item.genre, src = self._get_genre(item) - item.store() - self._log.info( - "genre for track {0.title} ({1}): {0.genre}", item, src - ) + method = lib.albums if opts.album else lib.items + for obj in method(args): + self._process(obj, write=ui.should_write()) lastgenre_cmd.func = lastgenre_func return [lastgenre_cmd] def imported(self, session, task): - """Event hook called when an import task finishes.""" - if task.is_album: - album = task.album - album.genre, src = self._get_genre(album) - self._log.debug( - 'genre for album "{0.album}" ({1}): {0.genre}', album, src - ) - - # If we're using track-level sources, store the album genre only, - # then also look up individual track genres. - if "track" in self.sources: - album.store(inherit=False) - for item in album.items(): - item.genre, src = self._get_genre(item) - self._log.debug( - 'genre for track "{0.title}" ({1}): {0.genre}', - item, - src, - ) - item.store() - # Store the album genre and inherit to tracks. - else: - album.store() - - else: - item = task.item - item.genre, src = self._get_genre(item) - self._log.debug( - 'genre for track "{0.title}" ({1}): {0.genre}', - item, - src, - ) - item.store() + self._process(task.album if task.is_album else task.item, write=False) def _tags_for(self, obj, min_weight=None): """Core genre identification routine. @@ -613,12 +590,12 @@ class LastGenrePlugin(plugins.BeetsPlugin): try: res = obj.get_top_tags() except PYLAST_EXCEPTIONS as exc: - self._log.debug("last.fm error: {0}", exc) + self._log.debug("last.fm error: {}", exc) return [] except Exception as exc: # Isolate bugs in pylast. self._log.debug("{}", traceback.format_exc()) - self._log.error("error in pylast library: {0}", exc) + self._log.error("error in pylast library: {}", exc) return [] # Filter by weight (optionally). diff --git a/beetsplug/lastgenre/genres-tree.yaml b/beetsplug/lastgenre/genres-tree.yaml index c8ae42478..d7acfbc1f 100644 --- a/beetsplug/lastgenre/genres-tree.yaml +++ b/beetsplug/lastgenre/genres-tree.yaml @@ -9,6 +9,7 @@ - cape jazz - chimurenga - coupé-décalé + - egyptian - fuji music - genge - highlife @@ -35,6 +36,7 @@ - sega - seggae - semba + - shangaan electro - soukous - taarab - zouglou @@ -133,6 +135,7 @@ - chutney - chutney soca - compas + - folklore argentino - mambo - merengue - méringue @@ -185,6 +188,7 @@ - humor - parody music - stand-up + - kabarett - country: - alternative country: - cowpunk @@ -250,7 +254,6 @@ - acid breaks - baltimore club - big beat - - breakbeat hardcore - broken beat - florida breaks - nu skool breaks @@ -287,12 +290,15 @@ - jump-up - liquid funk - neurofunk - - oldschool jungle: + - jungle: - darkside jungle - ragga jungle + - oldschool jungle - raggacore - sambass - techstep + - leftfield + - halftime - electro: - crunk - electro backbeat @@ -343,6 +349,7 @@ - hardcore: - bouncy house - bouncy techno + - breakbeat hardcore - breakcore - digital hardcore - doomcore @@ -400,6 +407,8 @@ - power electronics - power noise - witch house + - juke: + - footwork - post-disco: - boogie - dance-pop @@ -414,6 +423,7 @@ - techno: - acid techno - detroit techno + - dub techno - free tekno - ghettotech - minimal @@ -469,7 +479,6 @@ - chap hop - christian hip hop - conscious hip hop - - country-rap - crunkcore - cumbia rap - east coast hip hop: @@ -481,6 +490,7 @@ - freestyle rap - g-funk - gangsta rap + - glitch hop - golden age hip hop - hip hop soul - hip pop @@ -521,11 +531,14 @@ - west coast hip hop: - chicano rap - jerkin' + - austrian hip hop + - german hip hop - jazz: - asian american jazz - avant-garde jazz - bebop - boogie-woogie + - brass band - british dance band - chamber jazz - continental jazz @@ -568,14 +581,13 @@ - vocal jazz - west coast gypsy jazz - west coast jazz -- other: - - worldbeat +- kids music: + - kinderlieder - pop: - adult contemporary - arab pop - baroque pop - bubblegum pop - - chanson - christian pop - classical crossover - europop: @@ -640,6 +652,7 @@ - beat music - chinese rock - christian rock + - classic rock - dark cabaret - desert rock - experimental rock @@ -720,6 +733,7 @@ - art punk - christian punk - deathrock + - deutschpunk - folk punk: - celtic punk - gypsy punk @@ -762,5 +776,18 @@ - dancehall - ska: - 2 tone - - dub - rocksteady + - dub +- soundtrack: +- singer-songwriter: + - cantautorato + - cantautor + - cantautora + - chanson + - canción de autor + - nueva canción +- world: + - world dub + - world fusion + - worldbeat + diff --git a/beetsplug/lastgenre/genres.txt b/beetsplug/lastgenre/genres.txt index 28b1225c3..571b6f350 100644 --- a/beetsplug/lastgenre/genres.txt +++ b/beetsplug/lastgenre/genres.txt @@ -160,10 +160,14 @@ calypso jazz calypso-style baila campursari canatronic +canción de autor candombe canon canrock cantata +cantautorato +cantautor +cantautora cante chico cante jondo canterbury scene @@ -371,6 +375,7 @@ desert rock desi detroit blues detroit techno +dub techno dhamar dhimotiká dhrupad @@ -684,7 +689,7 @@ indo rock indonesian pop indoyíftika industrial death metal -industrial hip-hop +industrial hip hop industrial metal industrial music industrial musical @@ -1069,10 +1074,10 @@ nortec norteño northern soul nota -nu breaks nu jazz nu metal nu soul +nu skool breaks nueva canción nyatiti néo kýma diff --git a/beetsplug/lastimport.py b/beetsplug/lastimport.py index 122e5f9cd..baa522d14 100644 --- a/beetsplug/lastimport.py +++ b/beetsplug/lastimport.py @@ -70,7 +70,7 @@ class CustomUser(pylast.User): tuple with the total number of pages of results. Includes an MBID, if found. """ - doc = self._request(self.ws_prefix + "." + method, cacheable, params) + doc = self._request(f"{self.ws_prefix}.{method}", cacheable, params) toptracks_node = doc.getElementsByTagName("toptracks")[0] total_pages = int(toptracks_node.getAttribute("totalPages")) @@ -120,7 +120,7 @@ def import_lastfm(lib, log): if not user: raise ui.UserError("You must specify a user name for lastimport") - log.info("Fetching last.fm library for @{0}", user) + log.info("Fetching last.fm library for @{}", user) page_total = 1 page_current = 0 @@ -130,7 +130,7 @@ def import_lastfm(lib, log): # Iterate through a yet to be known page total count while page_current < page_total: log.info( - "Querying page #{0}{1}...", + "Querying page #{}{}...", page_current + 1, f"/{page_total}" if page_total > 1 else "", ) @@ -147,27 +147,27 @@ def import_lastfm(lib, log): unknown_total += unknown break else: - log.error("ERROR: unable to read page #{0}", page_current + 1) + log.error("ERROR: unable to read page #{}", page_current + 1) if retry < retry_limit: log.info( - "Retrying page #{0}... ({1}/{2} retry)", + "Retrying page #{}... ({}/{} retry)", page_current + 1, retry + 1, retry_limit, ) else: log.error( - "FAIL: unable to fetch page #{0}, ", - "tried {1} times", + "FAIL: unable to fetch page #{}, ", + "tried {} times", page_current, retry + 1, ) page_current += 1 log.info("... done!") - log.info("finished processing {0} song pages", page_total) - log.info("{0} unknown play-counts", unknown_total) - log.info("{0} play-counts imported", found_total) + log.info("finished processing {} song pages", page_total) + log.info("{} unknown play-counts", unknown_total) + log.info("{} play-counts imported", found_total) def fetch_tracks(user, page, limit): @@ -201,7 +201,7 @@ def process_tracks(lib, tracks, log): total = len(tracks) total_found = 0 total_fails = 0 - log.info("Received {0} tracks in this page, processing...", total) + log.info("Received {} tracks in this page, processing...", total) for num in range(0, total): song = None @@ -220,7 +220,7 @@ def process_tracks(lib, tracks, log): else None ) - log.debug("query: {0} - {1} ({2})", artist, title, album) + log.debug("query: {} - {} ({})", artist, title, album) # First try to query by musicbrainz's trackid if trackid: @@ -231,7 +231,7 @@ def process_tracks(lib, tracks, log): # If not, try just album/title if song is None: log.debug( - "no album match, trying by album/title: {0} - {1}", album, title + "no album match, trying by album/title: {} - {}", album, title ) query = dbcore.AndQuery( [ @@ -268,10 +268,9 @@ def process_tracks(lib, tracks, log): count = int(song.get("play_count", 0)) new_count = int(tracks[num].get("playcount", 1)) log.debug( - "match: {0} - {1} ({2}) updating: play_count {3} => {4}", - song.artist, - song.title, - song.album, + "match: {0.artist} - {0.title} ({0.album}) updating:" + " play_count {1} => {2}", + song, count, new_count, ) @@ -280,11 +279,11 @@ def process_tracks(lib, tracks, log): total_found += 1 else: total_fails += 1 - log.info(" - No match: {0} - {1} ({2})", artist, title, album) + log.info(" - No match: {} - {} ({})", artist, title, album) if total_fails > 0: log.info( - "Acquired {0}/{1} play-counts ({2} unknown)", + "Acquired {}/{} play-counts ({} unknown)", total_found, total, total_fails, diff --git a/beetsplug/listenbrainz.py b/beetsplug/listenbrainz.py index c579645db..2aa4e7ad6 100644 --- a/beetsplug/listenbrainz.py +++ b/beetsplug/listenbrainz.py @@ -13,7 +13,6 @@ from beetsplug.lastimport import process_tracks class ListenBrainzPlugin(BeetsPlugin): """A Beets plugin for interacting with ListenBrainz.""" - data_source = "ListenBrainz" ROOT = "http://api.listenbrainz.org/1/" def __init__(self): @@ -27,7 +26,7 @@ class ListenBrainzPlugin(BeetsPlugin): def commands(self): """Add beet UI commands to interact with ListenBrainz.""" lbupdate_cmd = ui.Subcommand( - "lbimport", help=f"Import {self.data_source} history" + "lbimport", help="Import ListenBrainz history" ) def func(lib, opts, args): @@ -42,14 +41,14 @@ class ListenBrainzPlugin(BeetsPlugin): unknown_total = 0 ls = self.get_listens() tracks = self.get_tracks_from_listens(ls) - log.info(f"Found {len(ls)} listens") + log.info("Found {} listens", len(ls)) if tracks: found, unknown = process_tracks(lib, tracks, log) found_total += found unknown_total += unknown log.info("... done!") - log.info("{0} unknown play-counts", unknown_total) - log.info("{0} play-counts imported", found_total) + log.info("{} unknown play-counts", unknown_total) + log.info("{} play-counts imported", found_total) def _make_request(self, url, params=None): """Makes a request to the ListenBrainz API.""" @@ -63,7 +62,7 @@ class ListenBrainzPlugin(BeetsPlugin): response.raise_for_status() return response.json() except requests.exceptions.RequestException as e: - self._log.debug(f"Invalid Search Error: {e}") + self._log.debug("Invalid Search Error: {}", e) return None def get_listens(self, min_ts=None, max_ts=None, count=None): @@ -156,7 +155,7 @@ class ListenBrainzPlugin(BeetsPlugin): playlist_info = playlist.get("playlist") if playlist_info.get("creator") == "listenbrainz": title = playlist_info.get("title") - self._log.debug(f"Playlist title: {title}") + self._log.debug("Playlist title: {}", title) playlist_type = ( "Exploration" if "Exploration" in title else "Jams" ) @@ -179,9 +178,7 @@ class ListenBrainzPlugin(BeetsPlugin): listenbrainz_playlists, key=lambda x: x["date"], reverse=True ) for playlist in listenbrainz_playlists: - self._log.debug( - f"Playlist: {playlist['type']} - {playlist['date']}" - ) + self._log.debug("Playlist: {0[type]} - {0[date]}", playlist) return listenbrainz_playlists def get_playlist(self, identifier): diff --git a/beetsplug/lyrics.py b/beetsplug/lyrics.py index f1c40ab24..4c35d8a2e 100644 --- a/beetsplug/lyrics.py +++ b/beetsplug/lyrics.py @@ -42,10 +42,9 @@ from beets.autotag.distance import string_dist from beets.util.config import sanitize_choices if TYPE_CHECKING: - from logging import Logger - from beets.importer import ImportTask from beets.library import Item, Library + from beets.logging import BeetsLogger as Logger from ._typing import ( GeniusAPI, @@ -154,7 +153,7 @@ def search_pairs(item): # examples include (live), (remix), and (acoustic). r"(.+?)\s+[(].*[)]$", # Remove any featuring artists from the title - r"(.*?) {}".format(plugins.feat_tokens(for_artist=False)), + rf"(.*?) {plugins.feat_tokens(for_artist=False)}", # Remove part of title after colon ':' for songs with subtitles r"(.+?)\s*:.*", ] @@ -186,7 +185,7 @@ def slug(text: str) -> str: class RequestHandler: - _log: beets.logging.Logger + _log: Logger def debug(self, message: str, *args) -> None: """Log a debug message with the class name.""" @@ -508,9 +507,9 @@ class SearchBackend(SoupMixin, Backend): # log out the candidate that did not make it but was close. # This may show a matching candidate with some noise in the name self.debug( - "({}, {}) does not match ({}, {}) but dist was close: {:.2f}", - result.artist, - result.title, + "({0.artist}, {0.title}) does not match ({1}, {2}) but dist" + " was close: {3:.2f}", + result, target_artist, target_title, max_dist, @@ -582,7 +581,7 @@ class Tekstowo(SearchBackend): """Fetch lyrics from Tekstowo.pl.""" BASE_URL = "https://www.tekstowo.pl" - SEARCH_URL = BASE_URL + "/szukaj,{}.html" + SEARCH_URL = f"{BASE_URL}/szukaj,{{}}.html" def build_url(self, artist, title): artistitle = f"{artist.title()} {title.title()}" @@ -644,7 +643,7 @@ class Google(SearchBackend): re.IGNORECASE | re.VERBOSE, ) #: Split cleaned up URL title into artist and title parts. - URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +") + URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +|, ") SOURCE_DIST_FACTOR = {"www.azlyrics.com": 0.5, "www.songlyrics.com": 0.6} @@ -702,8 +701,8 @@ class Google(SearchBackend): result_artist, result_title = "", parts[0] else: # sort parts by their similarity to the artist - parts.sort(key=lambda p: cls.get_part_dist(artist, title, p)) - result_artist, result_title = parts[0], " ".join(parts[1:]) + result_artist = min(parts, key=lambda p: string_dist(artist, p)) + result_title = min(parts, key=lambda p: string_dist(title, p)) return SearchResult(result_artist, result_title, item["link"]) @@ -746,7 +745,9 @@ class Translator(RequestHandler): TRANSLATE_URL = "https://api.cognitive.microsofttranslator.com/translate" LINE_PARTS_RE = re.compile(r"^(\[\d\d:\d\d.\d\d\]|) *(.*)$") SEPARATOR = " | " - remove_translations = partial(re.compile(r" / [^\n]+").sub, "") + remove_translations = staticmethod( + partial(re.compile(r" / [^\n]+").sub, "") + ) _log: Logger api_key: str @@ -838,15 +839,16 @@ class Translator(RequestHandler): lyrics_language = langdetect.detect(new_lyrics).upper() if lyrics_language == self.to_language: self.info( - "🔵 Lyrics are already in the target language {}", - self.to_language, + "🔵 Lyrics are already in the target language {.to_language}", + self, ) return new_lyrics if self.from_languages and lyrics_language not in self.from_languages: self.info( - "🔵 Configuration {} does not permit translating from {}", - self.from_languages, + "🔵 Configuration {.from_languages} does not permit translating" + " from {}", + self, lyrics_language, ) return new_lyrics @@ -854,7 +856,7 @@ class Translator(RequestHandler): lyrics, *url = new_lyrics.split("\n\nSource: ") with self.handle_request(): translated_lines = self.append_translations(lyrics.splitlines()) - self.info("🟢 Translated lyrics to {}", self.to_language) + self.info("🟢 Translated lyrics to {.to_language}", self) return "\n\nSource: ".join(["\n".join(translated_lines), *url]) @@ -1090,7 +1092,7 @@ class LyricsPlugin(RequestHandler, plugins.BeetsPlugin): return if lyrics := self.find_lyrics(item): - self.info("🟢 Found lyrics: {0}", item) + self.info("🟢 Found lyrics: {}", item) if translator := self.translator: lyrics = translator.translate(lyrics, item.lyrics) else: diff --git a/beetsplug/mbcollection.py b/beetsplug/mbcollection.py index 7a1289d1b..2f9ef709e 100644 --- a/beetsplug/mbcollection.py +++ b/beetsplug/mbcollection.py @@ -83,9 +83,7 @@ class MusicBrainzCollectionPlugin(BeetsPlugin): collection = self.config["collection"].as_str() if collection: if collection not in collection_ids: - raise ui.UserError( - "invalid collection ID: {}".format(collection) - ) + raise ui.UserError(f"invalid collection ID: {collection}") return collection # No specified collection. Just return the first collection ID @@ -156,10 +154,10 @@ class MusicBrainzCollectionPlugin(BeetsPlugin): if re.match(UUID_REGEX, aid): album_ids.append(aid) else: - self._log.info("skipping invalid MBID: {0}", aid) + self._log.info("skipping invalid MBID: {}", aid) # Submit to MusicBrainz. - self._log.info("Updating MusicBrainz collection {0}...", collection_id) + self._log.info("Updating MusicBrainz collection {}...", collection_id) submit_albums(collection_id, album_ids) if remove_missing: self.remove_missing(collection_id, lib.albums()) diff --git a/beetsplug/mbsubmit.py b/beetsplug/mbsubmit.py index e23c0d610..93e88dc9e 100644 --- a/beetsplug/mbsubmit.py +++ b/beetsplug/mbsubmit.py @@ -73,7 +73,7 @@ class MBSubmitPlugin(BeetsPlugin): subprocess.Popen([picard_path] + paths) self._log.info("launched picard from\n{}", picard_path) except OSError as exc: - self._log.error(f"Could not open picard, got error:\n{exc}") + self._log.error("Could not open picard, got error:\n{}", exc) def print_tracks(self, session, task): for i in sorted(task.items, key=lambda i: i.track): diff --git a/beetsplug/metasync/__init__.py b/beetsplug/metasync/__init__.py index f99e820b5..d4e31851e 100644 --- a/beetsplug/metasync/__init__.py +++ b/beetsplug/metasync/__init__.py @@ -49,7 +49,7 @@ def load_meta_sources(): meta_sources = {} for module_path, class_name in SOURCES.items(): - module = import_module(METASYNC_MODULE + "." + module_path) + module = import_module(f"{METASYNC_MODULE}.{module_path}") meta_sources[class_name.lower()] = getattr(module, class_name) return meta_sources @@ -117,13 +117,13 @@ class MetaSyncPlugin(BeetsPlugin): try: cls = META_SOURCES[player] except KeyError: - self._log.error("Unknown metadata source '{}'".format(player)) + self._log.error("Unknown metadata source '{}'", player) try: meta_source_instances[player] = cls(self.config, self._log) except (ImportError, ConfigValueError) as e: self._log.error( - f"Failed to instantiate metadata source {player!r}: {e}" + "Failed to instantiate metadata source {!r}: {}", player, e ) # Avoid needlessly iterating over items diff --git a/beetsplug/metasync/amarok.py b/beetsplug/metasync/amarok.py index 9afe6dbca..47e6a1a65 100644 --- a/beetsplug/metasync/amarok.py +++ b/beetsplug/metasync/amarok.py @@ -44,11 +44,12 @@ class Amarok(MetaSource): "amarok_lastplayed": types.DATE, } - query_xml = '<query version="1.0"> \ - <filters> \ - <and><include field="filename" value=%s /></and> \ - </filters> \ - </query>' + query_xml = """ + <query version="1.0"> + <filters> + <and><include field="filename" value={} /></and> + </filters> + </query>""" def __init__(self, config, log): super().__init__(config, log) @@ -68,7 +69,7 @@ class Amarok(MetaSource): # of the result set. So query for the filename and then try to match # the correct item from the results we get back results = self.collection.Query( - self.query_xml % quoteattr(basename(path)) + self.query_xml.format(quoteattr(basename(path))) ) for result in results: if result["xesam:url"] != path: diff --git a/beetsplug/metasync/itunes.py b/beetsplug/metasync/itunes.py index f777d0d55..6f441ef8b 100644 --- a/beetsplug/metasync/itunes.py +++ b/beetsplug/metasync/itunes.py @@ -76,12 +76,12 @@ class Itunes(MetaSource): library_path = config["itunes"]["library"].as_filename() try: - self._log.debug(f"loading iTunes library from {library_path}") + self._log.debug("loading iTunes library from {}", library_path) with create_temporary_copy(library_path) as library_copy: with open(library_copy, "rb") as library_copy_f: raw_library = plistlib.load(library_copy_f) except OSError as e: - raise ConfigValueError("invalid iTunes library: " + e.strerror) + raise ConfigValueError(f"invalid iTunes library: {e.strerror}") except Exception: # It's likely the user configured their '.itl' library (<> xml) if os.path.splitext(library_path)[1].lower() != ".xml": @@ -91,7 +91,7 @@ class Itunes(MetaSource): ) else: hint = "" - raise ConfigValueError("invalid iTunes library" + hint) + raise ConfigValueError(f"invalid iTunes library{hint}") # Make the iTunes library queryable using the path self.collection = { @@ -104,7 +104,7 @@ class Itunes(MetaSource): result = self.collection.get(util.bytestring_path(item.path).lower()) if not result: - self._log.warning(f"no iTunes match found for {item}") + self._log.warning("no iTunes match found for {}", item) return item.itunes_rating = result.get("Rating") diff --git a/beetsplug/missing.py b/beetsplug/missing.py index d0e956930..cbdda4599 100644 --- a/beetsplug/missing.py +++ b/beetsplug/missing.py @@ -226,8 +226,8 @@ class MissingPlugin(BeetsPlugin): for track_info in album_info.tracks: if track_info.track_id not in item_mbids: self._log.debug( - "track {0} in album {1}", - track_info.track_id, - album_info.album_id, + "track {.track_id} in album {.album_id}", + track_info, + album_info, ) yield _item(track_info, album_info, album.id) diff --git a/beetsplug/mpdstats.py b/beetsplug/mpdstats.py index 52ae88e1f..0a3e1de02 100644 --- a/beetsplug/mpdstats.py +++ b/beetsplug/mpdstats.py @@ -51,8 +51,8 @@ class MPDClientWrapper: if not self.strip_path.endswith("/"): self.strip_path += "/" - self._log.debug("music_directory: {0}", self.music_directory) - self._log.debug("strip_path: {0}", self.strip_path) + self._log.debug("music_directory: {.music_directory}", self) + self._log.debug("strip_path: {.strip_path}", self) self.client = mpd.MPDClient() @@ -64,7 +64,7 @@ class MPDClientWrapper: if host[0] in ["/", "~"]: host = os.path.expanduser(host) - self._log.info("connecting to {0}:{1}", host, port) + self._log.info("connecting to {}:{}", host, port) try: self.client.connect(host, port) except OSError as e: @@ -89,7 +89,7 @@ class MPDClientWrapper: try: return getattr(self.client, command)() except (OSError, mpd.ConnectionError) as err: - self._log.error("{0}", err) + self._log.error("{}", err) if retries <= 0: # if we exited without breaking, we couldn't reconnect in time :( @@ -123,7 +123,7 @@ class MPDClientWrapper: result = os.path.join(self.music_directory, file) else: result = entry["file"] - self._log.debug("returning: {0}", result) + self._log.debug("returning: {}", result) return result, entry.get("id") def status(self): @@ -169,7 +169,7 @@ class MPDStats: if item: return item else: - self._log.info("item not found: {0}", displayable_path(path)) + self._log.info("item not found: {}", displayable_path(path)) def update_item(self, item, attribute, value=None, increment=None): """Update the beets item. Set attribute to value or increment the value @@ -188,10 +188,10 @@ class MPDStats: item.store() self._log.debug( - "updated: {0} = {1} [{2}]", + "updated: {} = {} [{.filepath}]", attribute, item[attribute], - displayable_path(item.path), + item, ) def update_rating(self, item, skipped): @@ -234,12 +234,12 @@ class MPDStats: def handle_played(self, song): """Updates the play count of a song.""" self.update_item(song["beets_item"], "play_count", increment=1) - self._log.info("played {0}", displayable_path(song["path"])) + self._log.info("played {}", displayable_path(song["path"])) def handle_skipped(self, song): """Updates the skip count of a song.""" self.update_item(song["beets_item"], "skip_count", increment=1) - self._log.info("skipped {0}", displayable_path(song["path"])) + self._log.info("skipped {}", displayable_path(song["path"])) def on_stop(self, status): self._log.info("stop") @@ -278,11 +278,11 @@ class MPDStats: self.handle_song_change(self.now_playing) if is_url(path): - self._log.info("playing stream {0}", displayable_path(path)) + self._log.info("playing stream {}", displayable_path(path)) self.now_playing = None return - self._log.info("playing {0}", displayable_path(path)) + self._log.info("playing {}", displayable_path(path)) self.now_playing = { "started": time.time(), @@ -307,12 +307,12 @@ class MPDStats: if "player" in events: status = self.mpd.status() - handler = getattr(self, "on_" + status["state"], None) + handler = getattr(self, f"on_{status['state']}", None) if handler: handler(status) else: - self._log.debug('unhandled status "{0}"', status) + self._log.debug('unhandled status "{}"', status) events = self.mpd.events() diff --git a/beetsplug/mpdupdate.py b/beetsplug/mpdupdate.py index cb53afaa5..5d8fc598b 100644 --- a/beetsplug/mpdupdate.py +++ b/beetsplug/mpdupdate.py @@ -101,8 +101,8 @@ class MPDUpdatePlugin(BeetsPlugin): try: s = BufferedSocket(host, port) - except OSError as e: - self._log.warning("MPD connection failed: {0}", str(e.strerror)) + except OSError: + self._log.warning("MPD connection failed", exc_info=True) return resp = s.readline() @@ -111,7 +111,7 @@ class MPDUpdatePlugin(BeetsPlugin): return if password: - s.send(b'password "%s"\n' % password.encode("utf8")) + s.send(f'password "{password}"\n'.encode()) resp = s.readline() if b"OK" not in resp: self._log.warning("Authentication failed: {0!r}", resp) diff --git a/beetsplug/musicbrainz.py b/beetsplug/musicbrainz.py index b52e44b23..8e259e94b 100644 --- a/beetsplug/musicbrainz.py +++ b/beetsplug/musicbrainz.py @@ -18,12 +18,14 @@ from __future__ import annotations import traceback from collections import Counter +from contextlib import suppress from functools import cached_property from itertools import product from typing import TYPE_CHECKING, Any, Iterable, Sequence from urllib.parse import urljoin import musicbrainzngs +from confuse.exceptions import NotFoundError import beets import beets.autotag.hooks @@ -68,9 +70,7 @@ class MusicBrainzAPIError(util.HumanReadableError): super().__init__(reason, verb, tb) def get_message(self): - return "{} in {} with query {}".format( - self._reasonstr(), self.verb, repr(self.query) - ) + return f"{self._reasonstr()} in {self.verb} with query {self.query!r}" RELEASE_INCLUDES = list( @@ -203,7 +203,7 @@ def _multi_artist_credit( def track_url(trackid: str) -> str: - return urljoin(BASE_URL, "recording/" + trackid) + return urljoin(BASE_URL, f"recording/{trackid}") def _flatten_artist_credit(credit: list[JSONDict]) -> tuple[str, str, str]: @@ -248,7 +248,7 @@ def _get_related_artist_names(relations, relation_type): def album_url(albumid: str) -> str: - return urljoin(BASE_URL, "release/" + albumid) + return urljoin(BASE_URL, f"release/{albumid}") def _preferred_release_event( @@ -293,7 +293,7 @@ def _set_date_str( continue if original: - key = "original_" + key + key = f"original_{key}" setattr(info, key, date_num) @@ -373,7 +373,6 @@ class MusicBrainzPlugin(MetadataSourcePlugin): "https": False, "ratelimit": 1, "ratelimit_interval": 1, - "searchlimit": 5, "genres": False, "external_ids": { "discogs": False, @@ -385,6 +384,15 @@ class MusicBrainzPlugin(MetadataSourcePlugin): "extra_tags": [], }, ) + # TODO: Remove in 3.0.0 + with suppress(NotFoundError): + self.config["search_limit"] = self.config["match"][ + "searchlimit" + ].get() + self._log.warning( + "'musicbrainz.searchlimit' option is deprecated and will be " + "removed in 3.0.0. Use 'musicbrainz.search_limit' instead." + ) hostname = self.config["host"].as_str() https = self.config["https"].get(bool) # Only call set_hostname when a custom server is configured. Since @@ -801,7 +809,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin): ) try: method = getattr(musicbrainzngs, f"search_{query_type}s") - res = method(limit=self.config["searchlimit"].get(int), **filters) + res = method(limit=self.config["search_limit"].get(), **filters) except musicbrainzngs.MusicBrainzError as exc: raise MusicBrainzAPIError( exc, f"{query_type} search", filters, traceback.format_exc() @@ -838,7 +846,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin): """ self._log.debug("Requesting MusicBrainz release {}", album_id) if not (albumid := self._extract_id(album_id)): - self._log.debug("Invalid MBID ({0}).", album_id) + self._log.debug("Invalid MBID ({}).", album_id) return None try: @@ -875,7 +883,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin): or None if no track is found. May raise a MusicBrainzAPIError. """ if not (trackid := self._extract_id(track_id)): - self._log.debug("Invalid MBID ({0}).", track_id) + self._log.debug("Invalid MBID ({}).", track_id) return None try: diff --git a/beetsplug/parentwork.py b/beetsplug/parentwork.py index ab2d39b2b..eb2fd8f11 100644 --- a/beetsplug/parentwork.py +++ b/beetsplug/parentwork.py @@ -179,10 +179,8 @@ class ParentWorkPlugin(BeetsPlugin): if not item.mb_workid: self._log.info( - "No work for {}, \ -add one at https://musicbrainz.org/recording/{}", + "No work for {0}, add one at https://musicbrainz.org/recording/{0.mb_trackid}", item, - item.mb_trackid, ) return diff --git a/beetsplug/play.py b/beetsplug/play.py index 3e7ba0a9e..8fb146213 100644 --- a/beetsplug/play.py +++ b/beetsplug/play.py @@ -28,6 +28,11 @@ from beets.util import get_temp_filename # If this is missing, they're placed at the end. ARGS_MARKER = "$args" +# Indicate where the playlist file (with absolute path) should be inserted into +# the command string. If this is missing, its placed at the end, but before +# arguments. +PLS_MARKER = "$playlist" + def play( command_str, @@ -43,7 +48,7 @@ def play( """ # Print number of tracks or albums to be played, log command to be run. item_type += "s" if len(selection) > 1 else "" - ui.print_("Playing {} {}.".format(len(selection), item_type)) + ui.print_(f"Playing {len(selection)} {item_type}.") log.debug("executing command: {} {!r}", command_str, open_args) try: @@ -132,8 +137,23 @@ class PlayPlugin(BeetsPlugin): return open_args = self._playlist_or_paths(paths) + open_args_str = [ + p.decode("utf-8") for p in self._playlist_or_paths(paths) + ] command_str = self._command_str(opts.args) + if PLS_MARKER in command_str: + if not config["play"]["raw"]: + command_str = command_str.replace( + PLS_MARKER, "".join(open_args_str) + ) + self._log.debug( + "command altered by PLS_MARKER to: {}", command_str + ) + open_args = [] + else: + command_str = command_str.replace(PLS_MARKER, " ") + # Check if the selection exceeds configured threshold. If True, # cancel, otherwise proceed with play command. if opts.yes or not self._exceeds_threshold( @@ -154,7 +174,7 @@ class PlayPlugin(BeetsPlugin): return f"{command_str} {args}" else: # Don't include the marker in the command. - return command_str.replace(" " + ARGS_MARKER, "") + return command_str.replace(f" {ARGS_MARKER}", "") def _playlist_or_paths(self, paths): """Return either the raw paths of items or a playlist of the items.""" @@ -162,6 +182,7 @@ class PlayPlugin(BeetsPlugin): return paths else: return [self._create_tmp_playlist(paths)] + return [shlex.quote(self._create_tmp_playlist(paths))] def _exceeds_threshold( self, selection, command_str, open_args, item_type="track" @@ -179,9 +200,7 @@ class PlayPlugin(BeetsPlugin): ui.print_( ui.colorize( "text_warning", - "You are about to queue {} {}.".format( - len(selection), item_type - ), + f"You are about to queue {len(selection)} {item_type}.", ) ) diff --git a/beetsplug/playlist.py b/beetsplug/playlist.py index 7a27b02a3..07c12e0e0 100644 --- a/beetsplug/playlist.py +++ b/beetsplug/playlist.py @@ -123,7 +123,7 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin): def cli_exit(self, lib): for playlist in self.find_playlists(): - self._log.info(f"Updating playlist: {playlist}") + self._log.info("Updating playlist: {}", playlist) base_dir = beets.util.bytestring_path( self.relative_to if self.relative_to @@ -133,21 +133,16 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin): try: self.update_playlist(playlist, base_dir) except beets.util.FilesystemError: - self._log.error( - "Failed to update playlist: {}".format( - beets.util.displayable_path(playlist) - ) - ) + self._log.error("Failed to update playlist: {}", playlist) def find_playlists(self): """Find M3U playlists in the playlist directory.""" + playlist_dir = beets.util.syspath(self.playlist_dir) try: - dir_contents = os.listdir(beets.util.syspath(self.playlist_dir)) + dir_contents = os.listdir(playlist_dir) except OSError: self._log.warning( - "Unable to open playlist directory {}".format( - beets.util.displayable_path(self.playlist_dir) - ) + "Unable to open playlist directory {.playlist_dir}", self ) return @@ -195,9 +190,10 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin): if changes or deletions: self._log.info( - "Updated playlist {} ({} changes, {} deletions)".format( - filename, changes, deletions - ) + "Updated playlist {} ({} changes, {} deletions)", + filename, + changes, + deletions, ) beets.util.copy(new_playlist, filename, replace=True) beets.util.remove(new_playlist) diff --git a/beetsplug/plexupdate.py b/beetsplug/plexupdate.py index 9b4419c71..5e255d45b 100644 --- a/beetsplug/plexupdate.py +++ b/beetsplug/plexupdate.py @@ -22,9 +22,7 @@ def get_music_section( ): """Getting the section key for the music library in Plex.""" api_endpoint = append_token("library/sections", token) - url = urljoin( - "{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint - ) + url = urljoin(f"{get_protocol(secure)}://{host}:{port}", api_endpoint) # Sends request. r = requests.get( @@ -54,9 +52,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors): ) api_endpoint = f"library/sections/{section_key}/refresh" api_endpoint = append_token(api_endpoint, token) - url = urljoin( - "{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint - ) + url = urljoin(f"{get_protocol(secure)}://{host}:{port}", api_endpoint) # Sends request and returns requests object. r = requests.get( @@ -70,7 +66,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors): def append_token(url, token): """Appends the Plex Home token to the api call if required.""" if token: - url += "?" + urlencode({"X-Plex-Token": token}) + url += f"?{urlencode({'X-Plex-Token': token})}" return url diff --git a/beetsplug/replaygain.py b/beetsplug/replaygain.py index 96c854314..3e777d977 100644 --- a/beetsplug/replaygain.py +++ b/beetsplug/replaygain.py @@ -70,9 +70,7 @@ def call(args: list[str], log: Logger, **kwargs: Any): return command_output(args, **kwargs) except subprocess.CalledProcessError as e: log.debug(e.output.decode("utf8", "ignore")) - raise ReplayGainError( - "{} exited with status {}".format(args[0], e.returncode) - ) + raise ReplayGainError(f"{args[0]} exited with status {e.returncode}") def db_to_lufs(db: float) -> float: @@ -143,9 +141,8 @@ class RgTask: item.rg_track_peak = track_gain.peak item.store() self._log.debug( - "applied track gain {0} LU, peak {1} of FS", - item.rg_track_gain, - item.rg_track_peak, + "applied track gain {0.rg_track_gain} LU, peak {0.rg_track_peak} of FS", + item, ) def _store_album_gain(self, item: Item, album_gain: Gain): @@ -157,9 +154,8 @@ class RgTask: item.rg_album_peak = album_gain.peak item.store() self._log.debug( - "applied album gain {0} LU, peak {1} of FS", - item.rg_album_gain, - item.rg_album_peak, + "applied album gain {0.rg_album_gain} LU, peak {0.rg_album_peak} of FS", + item, ) def _store_track(self, write: bool): @@ -170,15 +166,14 @@ class RgTask: # `track_gains` without throwing FatalReplayGainError # => raise non-fatal exception & continue raise ReplayGainError( - "ReplayGain backend `{}` failed for track {}".format( - self.backend_name, item - ) + f"ReplayGain backend `{self.backend_name}` failed for track" + f" {item}" ) self._store_track_gain(item, self.track_gains[0]) if write: item.try_write() - self._log.debug("done analyzing {0}", item) + self._log.debug("done analyzing {}", item) def _store_album(self, write: bool): """Store track/album gains for all tracks of the task in the database.""" @@ -191,17 +186,15 @@ class RgTask: # `album_gain` without throwing FatalReplayGainError # => raise non-fatal exception & continue raise ReplayGainError( - "ReplayGain backend `{}` failed " - "for some tracks in album {}".format( - self.backend_name, self.album - ) + f"ReplayGain backend `{self.backend_name}` failed " + f"for some tracks in album {self.album}" ) for item, track_gain in zip(self.items, self.track_gains): self._store_track_gain(item, track_gain) self._store_album_gain(item, self.album_gain) if write: item.try_write() - self._log.debug("done analyzing {0}", item) + self._log.debug("done analyzing {}", item) def store(self, write: bool): """Store computed gains for the items of this task in the database.""" @@ -235,7 +228,7 @@ class R128Task(RgTask): def _store_track_gain(self, item: Item, track_gain: Gain): item.r128_track_gain = track_gain.gain item.store() - self._log.debug("applied r128 track gain {0} LU", item.r128_track_gain) + self._log.debug("applied r128 track gain {.r128_track_gain} LU", item) def _store_album_gain(self, item: Item, album_gain: Gain): """ @@ -244,7 +237,7 @@ class R128Task(RgTask): """ item.r128_album_gain = album_gain.gain item.store() - self._log.debug("applied r128 album gain {0} LU", item.r128_album_gain) + self._log.debug("applied r128 album gain {.r128_album_gain} LU", item) AnyRgTask = TypeVar("AnyRgTask", bound=RgTask) @@ -385,10 +378,7 @@ class FfmpegBackend(Backend): album_gain = target_level_lufs - album_gain self._log.debug( - "{}: gain {} LU, peak {}", - task.album, - album_gain, - album_peak, + "{.album}: gain {} LU, peak {}", task, album_gain, album_peak ) task.album_gain = Gain(album_gain, album_peak) @@ -431,9 +421,9 @@ class FfmpegBackend(Backend): target_level_lufs = db_to_lufs(target_level) # call ffmpeg - self._log.debug(f"analyzing {item}") + self._log.debug("analyzing {}", item) cmd = self._construct_cmd(item, peak_method) - self._log.debug("executing {0}", " ".join(map(displayable_path, cmd))) + self._log.debug("executing {}", " ".join(map(displayable_path, cmd))) output = call(cmd, self._log).stderr.splitlines() # parse output @@ -501,12 +491,10 @@ class FfmpegBackend(Backend): if self._parse_float(b"M: " + line[1]) >= gating_threshold: n_blocks += 1 self._log.debug( - "{}: {} blocks over {} LUFS".format( - item, n_blocks, gating_threshold - ) + "{}: {} blocks over {} LUFS", item, n_blocks, gating_threshold ) - self._log.debug("{}: gain {} LU, peak {}".format(item, gain, peak)) + self._log.debug("{}: gain {} LU, peak {}", item, gain, peak) return Gain(gain, peak), n_blocks @@ -526,9 +514,7 @@ class FfmpegBackend(Backend): if output[i].startswith(search): return i raise ReplayGainError( - "ffmpeg output: missing {} after line {}".format( - repr(search), start_line - ) + f"ffmpeg output: missing {search!r} after line {start_line}" ) def _parse_float(self, line: bytes) -> float: @@ -575,7 +561,7 @@ class CommandBackend(Backend): # Explicit executable path. if not os.path.isfile(self.command): raise FatalReplayGainError( - "replaygain command does not exist: {}".format(self.command) + f"replaygain command does not exist: {self.command}" ) else: # Check whether the program is in $PATH. @@ -663,8 +649,8 @@ class CommandBackend(Backend): cmd = cmd + ["-d", str(int(target_level - 89))] cmd = cmd + [syspath(i.path) for i in items] - self._log.debug("analyzing {0} files", len(items)) - self._log.debug("executing {0}", " ".join(map(displayable_path, cmd))) + self._log.debug("analyzing {} files", len(items)) + self._log.debug("executing {}", " ".join(map(displayable_path, cmd))) output = call(cmd, self._log).stdout self._log.debug("analysis finished") return self.parse_tool_output( @@ -680,7 +666,7 @@ class CommandBackend(Backend): for line in text.split(b"\n")[1 : num_lines + 1]: parts = line.split(b"\t") if len(parts) != 6 or parts[0] == b"File": - self._log.debug("bad tool output: {0}", text) + self._log.debug("bad tool output: {}", text) raise ReplayGainError("mp3gain failed") # _file = parts[0] @@ -1105,9 +1091,8 @@ class AudioToolsBackend(Backend): ) self._log.debug( - "ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}", - item.artist, - item.title, + "ReplayGain for track {0.artist} - {0.title}: {1:.2f}, {2:.2f}", + item, rg_track_gain, rg_track_peak, ) @@ -1132,7 +1117,7 @@ class AudioToolsBackend(Backend): ) track_gains.append(Gain(gain=rg_track_gain, peak=rg_track_peak)) self._log.debug( - "ReplayGain for track {0}: {1:.2f}, {2:.2f}", + "ReplayGain for track {}: {.2f}, {.2f}", item, rg_track_gain, rg_track_peak, @@ -1145,8 +1130,8 @@ class AudioToolsBackend(Backend): rg_album_gain, task.target_level ) self._log.debug( - "ReplayGain for album {0}: {1:.2f}, {2:.2f}", - task.items[0].album, + "ReplayGain for album {.items[0].album}: {.2f}, {.2f}", + task, rg_album_gain, rg_album_peak, ) @@ -1229,10 +1214,8 @@ class ReplayGainPlugin(BeetsPlugin): if self.backend_name not in BACKENDS: raise ui.UserError( - "Selected ReplayGain backend {} is not supported. " - "Please select one of: {}".format( - self.backend_name, ", ".join(BACKENDS.keys()) - ) + f"Selected ReplayGain backend {self.backend_name} is not" + f" supported. Please select one of: {', '.join(BACKENDS)}" ) # FIXME: Consider renaming the configuration option to 'peak_method' @@ -1240,10 +1223,9 @@ class ReplayGainPlugin(BeetsPlugin): peak_method = self.config["peak"].as_str() if peak_method not in PeakMethod.__members__: raise ui.UserError( - "Selected ReplayGain peak method {} is not supported. " - "Please select one of: {}".format( - peak_method, ", ".join(PeakMethod.__members__) - ) + f"Selected ReplayGain peak method {peak_method} is not" + " supported. Please select one of:" + f" {', '.join(PeakMethod.__members__)}" ) # This only applies to plain old rg tags, r128 doesn't store peak # values. @@ -1348,19 +1330,19 @@ class ReplayGainPlugin(BeetsPlugin): items, nothing is done. """ if not force and not self.album_requires_gain(album): - self._log.info("Skipping album {0}", album) + self._log.info("Skipping album {}", album) return items_iter = iter(album.items()) use_r128 = self.should_use_r128(next(items_iter)) if any(use_r128 != self.should_use_r128(i) for i in items_iter): self._log.error( - "Cannot calculate gain for album {0} (incompatible formats)", + "Cannot calculate gain for album {} (incompatible formats)", album, ) return - self._log.info("analyzing {0}", album) + self._log.info("analyzing {}", album) discs: dict[int, list[Item]] = {} if self.config["per_disc"].get(bool): @@ -1384,7 +1366,7 @@ class ReplayGainPlugin(BeetsPlugin): callback=store_cb, ) except ReplayGainError as e: - self._log.info("ReplayGain error: {0}", e) + self._log.info("ReplayGain error: {}", e) except FatalReplayGainError as e: raise ui.UserError(f"Fatal replay gain error: {e}") @@ -1396,7 +1378,7 @@ class ReplayGainPlugin(BeetsPlugin): in the item, nothing is done. """ if not force and not self.track_requires_gain(item): - self._log.info("Skipping track {0}", item) + self._log.info("Skipping track {}", item) return use_r128 = self.should_use_r128(item) @@ -1413,7 +1395,7 @@ class ReplayGainPlugin(BeetsPlugin): callback=store_cb, ) except ReplayGainError as e: - self._log.info("ReplayGain error: {0}", e) + self._log.info("ReplayGain error: {}", e) except FatalReplayGainError as e: raise ui.UserError(f"Fatal replay gain error: {e}") @@ -1526,18 +1508,16 @@ class ReplayGainPlugin(BeetsPlugin): if opts.album: albums = lib.albums(args) self._log.info( - "Analyzing {} albums ~ {} backend...".format( - len(albums), self.backend_name - ) + f"Analyzing {len(albums)} albums ~" + f" {self.backend_name} backend..." ) for album in albums: self.handle_album(album, write, force) else: items = lib.items(args) self._log.info( - "Analyzing {} tracks ~ {} backend...".format( - len(items), self.backend_name - ) + f"Analyzing {len(items)} tracks ~" + f" {self.backend_name} backend..." ) for item in items: self.handle_track(item, write, force) @@ -1556,8 +1536,10 @@ class ReplayGainPlugin(BeetsPlugin): "--threads", dest="threads", type=int, - help="change the number of threads, \ - defaults to maximum available processors", + help=( + "change the number of threads, defaults to maximum available" + " processors" + ), ) cmd.parser.add_option( "-f", @@ -1565,8 +1547,10 @@ class ReplayGainPlugin(BeetsPlugin): dest="force", action="store_true", default=False, - help="analyze all files, including those that " - "already have ReplayGain metadata", + help=( + "analyze all files, including those that already have" + " ReplayGain metadata" + ), ) cmd.parser.add_option( "-w", diff --git a/beetsplug/rewrite.py b/beetsplug/rewrite.py index 83829d657..1cc21ad75 100644 --- a/beetsplug/rewrite.py +++ b/beetsplug/rewrite.py @@ -57,9 +57,9 @@ class RewritePlugin(BeetsPlugin): raise ui.UserError("invalid rewrite specification") if fieldname not in library.Item._fields: raise ui.UserError( - "invalid field name (%s) in rewriter" % fieldname + f"invalid field name ({fieldname}) in rewriter" ) - self._log.debug("adding template field {0}", key) + self._log.debug("adding template field {}", key) pattern = re.compile(pattern.lower()) rules[fieldname].append((pattern, value)) if fieldname == "artist": diff --git a/beetsplug/scrub.py b/beetsplug/scrub.py index 813effb5f..c39894137 100644 --- a/beetsplug/scrub.py +++ b/beetsplug/scrub.py @@ -59,9 +59,7 @@ class ScrubPlugin(BeetsPlugin): def scrub_func(lib, opts, args): # Walk through matching files and remove tags. for item in lib.items(args): - self._log.info( - "scrubbing: {0}", util.displayable_path(item.path) - ) + self._log.info("scrubbing: {.filepath}", item) self._scrub_item(item, opts.write) scrub_cmd = ui.Subcommand("scrub", help="clean audio tags") @@ -110,7 +108,7 @@ class ScrubPlugin(BeetsPlugin): f.save() except (OSError, mutagen.MutagenError) as exc: self._log.error( - "could not scrub {0}: {1}", util.displayable_path(path), exc + "could not scrub {}: {}", util.displayable_path(path), exc ) def _scrub_item(self, item, restore): @@ -124,7 +122,7 @@ class ScrubPlugin(BeetsPlugin): util.syspath(item.path), config["id3v23"].get(bool) ) except mediafile.UnreadableFileError as exc: - self._log.error("could not open file to scrub: {0}", exc) + self._log.error("could not open file to scrub: {}", exc) return images = mf.images @@ -144,12 +142,10 @@ class ScrubPlugin(BeetsPlugin): mf.images = images mf.save() except mediafile.UnreadableFileError as exc: - self._log.error("could not write tags: {0}", exc) + self._log.error("could not write tags: {}", exc) def import_task_files(self, session, task): """Automatically scrub imported files.""" for item in task.imported_items(): - self._log.debug( - "auto-scrubbing {0}", util.displayable_path(item.path) - ) + self._log.debug("auto-scrubbing {.filepath}", item) self._scrub_item(item, ui.should_write()) diff --git a/beetsplug/smartplaylist.py b/beetsplug/smartplaylist.py index e65d59649..8203ce4ef 100644 --- a/beetsplug/smartplaylist.py +++ b/beetsplug/smartplaylist.py @@ -138,10 +138,9 @@ class SmartPlaylistPlugin(BeetsPlugin): if name in args } if not playlists: + unmatched = [name for name, _, _ in self._unmatched_playlists] raise ui.UserError( - "No playlist matching any of {} found".format( - [name for name, _, _ in self._unmatched_playlists] - ) + f"No playlist matching any of {unmatched} found" ) self._matched_playlists = playlists @@ -235,7 +234,7 @@ class SmartPlaylistPlugin(BeetsPlugin): for playlist in self._unmatched_playlists: n, (q, _), (a_q, _) = playlist if self.matches(model, q, a_q): - self._log.debug("{0} will be updated because of {1}", n, model) + self._log.debug("{} will be updated because of {}", n, model) self._matched_playlists.add(playlist) self.register_listener("cli_exit", self.update_playlists) @@ -244,12 +243,12 @@ class SmartPlaylistPlugin(BeetsPlugin): def update_playlists(self, lib, pretend=False): if pretend: self._log.info( - "Showing query results for {0} smart playlists...", + "Showing query results for {} smart playlists...", len(self._matched_playlists), ) else: self._log.info( - "Updating {0} smart playlists...", len(self._matched_playlists) + "Updating {} smart playlists...", len(self._matched_playlists) ) playlist_dir = self.config["playlist_dir"].as_filename() @@ -268,7 +267,7 @@ class SmartPlaylistPlugin(BeetsPlugin): if pretend: self._log.info("Results for playlist {}:", name) else: - self._log.info("Creating playlist {0}", name) + self._log.info("Creating playlist {}", name) items = [] if query: @@ -331,8 +330,9 @@ class SmartPlaylistPlugin(BeetsPlugin): for key, value in attr ] attrs = "".join(al) - comment = "#EXTINF:{}{},{} - {}\n".format( - int(item.length), attrs, item.artist, item.title + comment = ( + f"#EXTINF:{int(item.length)}{attrs}," + f"{item.artist} - {item.title}\n" ) f.write(comment.encode("utf-8") + entry.uri + b"\n") # Send an event when playlists were updated. @@ -340,13 +340,11 @@ class SmartPlaylistPlugin(BeetsPlugin): if pretend: self._log.info( - "Displayed results for {0} playlists", + "Displayed results for {} playlists", len(self._matched_playlists), ) else: - self._log.info( - "{0} playlists updated", len(self._matched_playlists) - ) + self._log.info("{} playlists updated", len(self._matched_playlists)) class PlaylistItem: diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py index fa5dc5c52..7cb9e330d 100644 --- a/beetsplug/spotify.py +++ b/beetsplug/spotify.py @@ -29,7 +29,6 @@ from typing import TYPE_CHECKING, Any, Literal, Sequence, Union import confuse import requests -import unidecode from beets import ui from beets.autotag.hooks import AlbumInfo, TrackInfo @@ -131,15 +130,11 @@ class SpotifyPlugin( "mode": "list", "tiebreak": "popularity", "show_failures": False, - "artist_field": "albumartist", - "album_field": "album", - "track_field": "title", "region_filter": None, "regex": [], "client_id": "4e414367a1d14c75a5c5129a627fcab8", "client_secret": "f82bdc09b2254f1a8286815d02fd46dc", "tokenfile": "spotify_token.json", - "search_query_ascii": False, } ) self.config["client_id"].redact = True @@ -170,8 +165,9 @@ class SpotifyPlugin( c_secret: str = self.config["client_secret"].as_str() headers = { - "Authorization": "Basic {}".format( - base64.b64encode(f"{c_id}:{c_secret}".encode()).decode() + "Authorization": ( + "Basic" + f" {base64.b64encode(f'{c_id}:{c_secret}'.encode()).decode()}" ) } response = requests.post( @@ -184,14 +180,12 @@ class SpotifyPlugin( response.raise_for_status() except requests.exceptions.HTTPError as e: raise ui.UserError( - "Spotify authorization failed: {}\n{}".format(e, response.text) + f"Spotify authorization failed: {e}\n{response.text}" ) self.access_token = response.json()["access_token"] # Save the token for later use. - self._log.debug( - "{} access token: {}", self.data_source, self.access_token - ) + self._log.debug("{0.data_source} access token: {0.access_token}", self) with open(self._tokenfile(), "w") as f: json.dump({"access_token": self.access_token}, f) @@ -229,16 +223,16 @@ class SpotifyPlugin( self._log.error("ReadTimeout.") raise APIError("Request timed out.") except requests.exceptions.ConnectionError as e: - self._log.error(f"Network error: {e}") + self._log.error("Network error: {}", e) raise APIError("Network error.") except requests.exceptions.RequestException as e: if e.response is None: - self._log.error(f"Request failed: {e}") + self._log.error("Request failed: {}", e) raise APIError("Request failed.") if e.response.status_code == 401: self._log.debug( - f"{self.data_source} access token has expired. " - f"Reauthenticating." + "{.data_source} access token has expired. Reauthenticating.", + self, ) self._authenticate() return self._handle_response( @@ -257,7 +251,7 @@ class SpotifyPlugin( "Retry-After", DEFAULT_WAITING_TIME ) self._log.debug( - f"Too many API requests. Retrying after {seconds} seconds." + "Too many API requests. Retrying after {} seconds.", seconds ) time.sleep(int(seconds) + 1) return self._handle_response( @@ -278,7 +272,7 @@ class SpotifyPlugin( f"URL:\n{url}\nparams:\n{params}" ) else: - self._log.error(f"Request failed. Error: {e}") + self._log.error("Request failed. Error: {}", e) raise APIError("Request failed.") def album_for_id(self, album_id: str) -> AlbumInfo | None: @@ -293,7 +287,9 @@ class SpotifyPlugin( if not (spotify_id := self._extract_id(album_id)): return None - album_data = self._handle_response("get", self.album_url + spotify_id) + album_data = self._handle_response( + "get", f"{self.album_url}{spotify_id}" + ) if album_data["name"] == "": self._log.debug("Album removed from Spotify: {}", album_id) return None @@ -316,9 +312,7 @@ class SpotifyPlugin( else: raise ui.UserError( "Invalid `release_date_precision` returned " - "by {} API: '{}'".format( - self.data_source, release_date_precision - ) + f"by {self.data_source} API: '{release_date_precision}'" ) tracks_data = album_data["tracks"] @@ -411,7 +405,7 @@ class SpotifyPlugin( # release) and `track.medium_total` (total number of tracks on # the track's disc). album_data = self._handle_response( - "get", self.album_url + track_data["album"]["id"] + "get", f"{self.album_url}{track_data['album']['id']}" ) medium_total = 0 for i, track_data in enumerate(album_data["tracks"]["items"], start=1): @@ -422,62 +416,43 @@ class SpotifyPlugin( track.medium_total = medium_total return track - def _construct_search_query( - self, filters: SearchFilter, keywords: str = "" - ) -> str: - """Construct a query string with the specified filters and keywords to - be provided to the Spotify Search API - (https://developer.spotify.com/documentation/web-api/reference/search). - - :param filters: (Optional) Field filters to apply. - :param keywords: (Optional) Query keywords to use. - :return: Query string to be provided to the Search API. - """ - - query_components = [ - keywords, - " ".join(f"{k}:{v}" for k, v in filters.items()), - ] - query = " ".join([q for q in query_components if q]) - if not isinstance(query, str): - query = query.decode("utf8") - - if self.config["search_query_ascii"].get(): - query = unidecode.unidecode(query) - - return query - def _search_api( self, query_type: Literal["album", "track"], filters: SearchFilter, - keywords: str = "", + query_string: str = "", ) -> Sequence[SearchResponseAlbums | SearchResponseTracks]: - """Query the Spotify Search API for the specified ``keywords``, + """Query the Spotify Search API for the specified ``query_string``, applying the provided ``filters``. :param query_type: Item type to search across. Valid types are: 'album', 'artist', 'playlist', and 'track'. - :param filters: (Optional) Field filters to apply. - :param keywords: (Optional) Query keywords to use. + :param filters: Field filters to apply. + :param query_string: Additional query to include in the search. """ - query = self._construct_search_query(keywords=keywords, filters=filters) + query = self._construct_search_query( + filters=filters, query_string=query_string + ) - self._log.debug(f"Searching {self.data_source} for '{query}'") + self._log.debug("Searching {.data_source} for '{}'", self, query) try: response = self._handle_response( "get", self.search_url, - params={"q": query, "type": query_type}, + params={ + "q": query, + "type": query_type, + "limit": self.config["search_limit"].get(), + }, ) except APIError as e: self._log.debug("Spotify API error: {}", e) return () - response_data = response.get(query_type + "s", {}).get("items", []) + response_data = response.get(f"{query_type}s", {}).get("items", []) self._log.debug( - "Found {} result(s) from {} for '{}'", + "Found {} result(s) from {.data_source} for '{}'", len(response_data), - self.data_source, + self, query, ) return response_data @@ -497,17 +472,17 @@ class SpotifyPlugin( "-m", "--mode", action="store", - help='"open" to open {} with playlist, ' - '"list" to print (default)'.format(self.data_source), + help=( + f'"open" to open {self.data_source} with playlist, ' + '"list" to print (default)' + ), ) spotify_cmd.parser.add_option( "-f", "--show-failures", action="store_true", dest="show_failures", - help="list tracks that did not match a {} ID".format( - self.data_source - ), + help=f"list tracks that did not match a {self.data_source} ID", ) spotify_cmd.func = queries @@ -540,7 +515,7 @@ class SpotifyPlugin( if self.config["mode"].get() not in ["list", "open"]: self._log.warning( - "{0} is not a valid mode", self.config["mode"].get() + "{} is not a valid mode", self.config["mode"].get() ) return False @@ -563,8 +538,8 @@ class SpotifyPlugin( if not items: self._log.debug( - "Your beets query returned no items, skipping {}.", - self.data_source, + "Your beets query returned no items, skipping {.data_source}.", + self, ) return @@ -585,19 +560,25 @@ class SpotifyPlugin( regex["search"], regex["replace"], value ) - # Custom values can be passed in the config (just in case) - artist = item[self.config["artist_field"].get()] - album = item[self.config["album_field"].get()] - keywords = item[self.config["track_field"].get()] + artist = item["artist"] or item["albumartist"] + album = item["album"] + query_string = item["title"] # Query the Web API for each track, look for the items' JSON data - query_filters: SearchFilter = {"artist": artist, "album": album} + query_filters: SearchFilter = {} + if artist: + query_filters["artist"] = artist + if album: + query_filters["album"] = album + response_data_tracks = self._search_api( - query_type="track", keywords=keywords, filters=query_filters + query_type="track", + query_string=query_string, + filters=query_filters, ) if not response_data_tracks: query = self._construct_search_query( - keywords=keywords, filters=query_filters + query_string=query_string, filters=query_filters ) failures.append(query) @@ -617,8 +598,8 @@ class SpotifyPlugin( or self.config["tiebreak"].get() == "first" ): self._log.debug( - "{} track(s) found, count: {}", - self.data_source, + "{.data_source} track(s) found, count: {}", + self, len(response_data_tracks), ) chosen_result = response_data_tracks[0] @@ -641,19 +622,19 @@ class SpotifyPlugin( if failure_count > 0: if self.config["show_failures"].get(): self._log.info( - "{} track(s) did not match a {} ID:", + "{} track(s) did not match a {.data_source} ID:", failure_count, - self.data_source, + self, ) for track in failures: self._log.info("track: {}", track) self._log.info("") else: self._log.warning( - "{} track(s) did not match a {} ID:\n" + "{} track(s) did not match a {.data_source} ID:\n" "use --show-failures to display", failure_count, - self.data_source, + self, ) return results @@ -670,20 +651,18 @@ class SpotifyPlugin( spotify_ids = [track_data["id"] for track_data in results] if self.config["mode"].get() == "open": self._log.info( - "Attempting to open {} with playlist".format( - self.data_source - ) + "Attempting to open {.data_source} with playlist", self ) - spotify_url = "spotify:trackset:Playlist:" + ",".join( - spotify_ids + spotify_url = ( + f"spotify:trackset:Playlist:{','.join(spotify_ids)}" ) webbrowser.open(spotify_url) else: for spotify_id in spotify_ids: - print(self.open_track_url + spotify_id) + print(f"{self.open_track_url}{spotify_id}") else: self._log.warning( - f"No {self.data_source} tracks found from beets query" + "No {.data_source} tracks found from beets query", self ) def _fetch_info(self, items, write, force): @@ -715,12 +694,10 @@ class SpotifyPlugin( audio_features = self.track_audio_features(spotify_track_id) if audio_features is None: self._log.info("No audio features found for: {}", item) - continue - for feature in audio_features.keys(): - if feature in self.spotify_audio_features.keys(): - item[self.spotify_audio_features[feature]] = audio_features[ - feature - ] + else: + for feature, value in audio_features.items(): + if feature in self.spotify_audio_features: + item[self.spotify_audio_features[feature]] = value item["spotify_updated"] = time.time() item.store() if write: @@ -728,7 +705,7 @@ class SpotifyPlugin( def track_info(self, track_id: str): """Fetch a track's popularity and external IDs using its Spotify ID.""" - track_data = self._handle_response("get", self.track_url + track_id) + track_data = self._handle_response("get", f"{self.track_url}{track_id}") external_ids = track_data.get("external_ids", {}) popularity = track_data.get("popularity") self._log.debug( @@ -747,7 +724,7 @@ class SpotifyPlugin( """Fetch track audio features by its Spotify ID.""" try: return self._handle_response( - "get", self.audio_features_url + track_id + "get", f"{self.audio_features_url}{track_id}" ) except APIError as e: self._log.debug("Spotify API error: {}", e) diff --git a/beetsplug/subsonicplaylist.py b/beetsplug/subsonicplaylist.py index 9b4a7778c..6c11ab918 100644 --- a/beetsplug/subsonicplaylist.py +++ b/beetsplug/subsonicplaylist.py @@ -168,9 +168,7 @@ class SubsonicPlaylistPlugin(BeetsPlugin): params["v"] = "1.12.0" params["c"] = "beets" resp = requests.get( - "{}/rest/{}?{}".format( - self.config["base_url"].get(), endpoint, urlencode(params) - ), + f"{self.config['base_url'].get()}/rest/{endpoint}?{urlencode(params)}", timeout=10, ) return resp @@ -182,5 +180,5 @@ class SubsonicPlaylistPlugin(BeetsPlugin): for track in tracks: if track not in output: output[track] = ";" - output[track] += name + ";" + output[track] += f"{name};" return output diff --git a/beetsplug/subsonicupdate.py b/beetsplug/subsonicupdate.py index ce888cb76..673cc94a8 100644 --- a/beetsplug/subsonicupdate.py +++ b/beetsplug/subsonicupdate.py @@ -74,7 +74,7 @@ class SubsonicUpdate(BeetsPlugin): # Pick the random sequence and salt the password r = string.ascii_letters + string.digits salt = "".join([random.choice(r) for _ in range(6)]) - salted_password = password + salt + salted_password = f"{password}{salt}" token = hashlib.md5(salted_password.encode("utf-8")).hexdigest() # Put together the payload of the request to the server and the URL @@ -101,14 +101,14 @@ class SubsonicUpdate(BeetsPlugin): context_path = "" url = f"http://{host}:{port}{context_path}" - return url + f"/rest/{endpoint}" + return f"{url}/rest/{endpoint}" def start_scan(self): user = self.config["user"].as_str() auth = self.config["auth"].as_str() url = self.__format_url("startScan") - self._log.debug("URL is {0}", url) - self._log.debug("auth type is {0}", self.config["auth"]) + self._log.debug("URL is {}", url) + self._log.debug("auth type is {.config[auth]}", self) if auth == "token": salt, token = self.__create_token() @@ -145,14 +145,15 @@ class SubsonicUpdate(BeetsPlugin): and json["subsonic-response"]["status"] == "ok" ): count = json["subsonic-response"]["scanStatus"]["count"] - self._log.info(f"Updating Subsonic; scanning {count} tracks") + self._log.info("Updating Subsonic; scanning {} tracks", count) elif ( response.status_code == 200 and json["subsonic-response"]["status"] == "failed" ): - error_message = json["subsonic-response"]["error"]["message"] - self._log.error(f"Error: {error_message}") + self._log.error( + "Error: {[subsonic-response][error][message]}", json + ) else: - self._log.error("Error: {0}", json) + self._log.error("Error: {}", json) except Exception as error: - self._log.error(f"Error: {error}") + self._log.error("Error: {}", error) diff --git a/beetsplug/the.py b/beetsplug/the.py index 802b0a3db..664d4c01e 100644 --- a/beetsplug/the.py +++ b/beetsplug/the.py @@ -23,7 +23,7 @@ __version__ = "1.1" PATTERN_THE = "^the\\s" PATTERN_A = "^[a][n]?\\s" -FORMAT = "{0}, {1}" +FORMAT = "{}, {}" class ThePlugin(BeetsPlugin): @@ -38,7 +38,7 @@ class ThePlugin(BeetsPlugin): { "the": True, "a": True, - "format": "{0}, {1}", + "format": "{}, {}", "strip": False, "patterns": [], } @@ -50,11 +50,11 @@ class ThePlugin(BeetsPlugin): try: re.compile(p) except re.error: - self._log.error("invalid pattern: {0}", p) + self._log.error("invalid pattern: {}", p) else: if not (p.startswith("^") or p.endswith("$")): self._log.warning( - 'warning: "{0}" will not match string start/end', + 'warning: "{}" will not match string start/end', p, ) if self.config["a"]: @@ -94,7 +94,7 @@ class ThePlugin(BeetsPlugin): for p in self.patterns: r = self.unthe(text, p) if r != text: - self._log.debug('"{0}" -> "{1}"', text, r) + self._log.debug('"{}" -> "{}"', text, r) break return r else: diff --git a/beetsplug/thumbnails.py b/beetsplug/thumbnails.py index 5460d3fec..651eaf3ac 100644 --- a/beetsplug/thumbnails.py +++ b/beetsplug/thumbnails.py @@ -104,21 +104,21 @@ class ThumbnailsPlugin(BeetsPlugin): f"Thumbnails: ArtResizer backend {ArtResizer.shared.method}" f" unexpectedly cannot write image metadata." ) - self._log.debug(f"using {ArtResizer.shared.method} to write metadata") + self._log.debug("using {.shared.method} to write metadata", ArtResizer) uri_getter = GioURI() if not uri_getter.available: uri_getter = PathlibURI() - self._log.debug("using {0.name} to compute URIs", uri_getter) + self._log.debug("using {.name} to compute URIs", uri_getter) self.get_uri = uri_getter.uri return True def process_album(self, album): """Produce thumbnails for the album folder.""" - self._log.debug("generating thumbnail for {0}", album) + self._log.debug("generating thumbnail for {}", album) if not album.artpath: - self._log.info("album {0} has no art", album) + self._log.info("album {} has no art", album) return if self.config["dolphin"]: @@ -127,7 +127,7 @@ class ThumbnailsPlugin(BeetsPlugin): size = ArtResizer.shared.get_size(album.artpath) if not size: self._log.warning( - "problem getting the picture size for {0}", album.artpath + "problem getting the picture size for {.artpath}", album ) return @@ -137,9 +137,9 @@ class ThumbnailsPlugin(BeetsPlugin): wrote &= self.make_cover_thumbnail(album, 128, NORMAL_DIR) if wrote: - self._log.info("wrote thumbnail for {0}", album) + self._log.info("wrote thumbnail for {}", album) else: - self._log.info("nothing to do for {0}", album) + self._log.info("nothing to do for {}", album) def make_cover_thumbnail(self, album, size, target_dir): """Make a thumbnail of given size for `album` and put it in @@ -154,16 +154,16 @@ class ThumbnailsPlugin(BeetsPlugin): ): if self.config["force"]: self._log.debug( - "found a suitable {1}x{1} thumbnail for {0}, " + "found a suitable {0}x{0} thumbnail for {1}, " "forcing regeneration", - album, size, + album, ) else: self._log.debug( - "{1}x{1} thumbnail for {0} exists and is recent enough", - album, + "{0}x{0} thumbnail for {1} exists and is recent enough", size, + album, ) return False resized = ArtResizer.shared.resize(size, album.artpath, target) @@ -192,7 +192,7 @@ class ThumbnailsPlugin(BeetsPlugin): ArtResizer.shared.write_metadata(image_path, metadata) except Exception: self._log.exception( - "could not write metadata to {0}", displayable_path(image_path) + "could not write metadata to {}", displayable_path(image_path) ) def make_dolphin_cover_thumbnail(self, album): @@ -202,9 +202,9 @@ class ThumbnailsPlugin(BeetsPlugin): artfile = os.path.split(album.artpath)[1] with open(syspath(outfilename), "w") as f: f.write("[Desktop Entry]\n") - f.write("Icon=./{}".format(artfile.decode("utf-8"))) + f.write(f"Icon=./{artfile.decode('utf-8')}") f.close() - self._log.debug("Wrote file {0}", displayable_path(outfilename)) + self._log.debug("Wrote file {}", displayable_path(outfilename)) class URIGetter: @@ -230,8 +230,7 @@ def copy_c_string(c_string): # This is a pretty dumb way to get a string copy, but it seems to # work. A more surefire way would be to allocate a ctypes buffer and copy # the data with `memcpy` or somesuch. - s = ctypes.cast(c_string, ctypes.c_char_p).value - return b"" + s + return ctypes.cast(c_string, ctypes.c_char_p).value class GioURI(URIGetter): @@ -266,9 +265,7 @@ class GioURI(URIGetter): g_file_ptr = self.libgio.g_file_new_for_path(path) if not g_file_ptr: raise RuntimeError( - "No gfile pointer received for {}".format( - displayable_path(path) - ) + f"No gfile pointer received for {displayable_path(path)}" ) try: diff --git a/beetsplug/types.py b/beetsplug/types.py index 9bdfdecee..561ce6828 100644 --- a/beetsplug/types.py +++ b/beetsplug/types.py @@ -44,6 +44,6 @@ class TypesPlugin(BeetsPlugin): mytypes[key] = types.DATE else: raise ConfigValueError( - "unknown type '{}' for the '{}' field".format(value, key) + f"unknown type '{value}' for the '{key}' field" ) return mytypes diff --git a/beetsplug/unimported.py b/beetsplug/unimported.py index 2d09ab4b7..21dc26aa3 100644 --- a/beetsplug/unimported.py +++ b/beetsplug/unimported.py @@ -61,7 +61,7 @@ class Unimported(BeetsPlugin): def commands(self): def print_unimported(lib, opts, args): ignore_exts = [ - ("." + x).encode() + f".{x}".encode() for x in self.config["ignore_extensions"].as_str_seq() ] in_folder = set() diff --git a/beetsplug/web/__init__.py b/beetsplug/web/__init__.py index 559f0622c..7b13cf016 100644 --- a/beetsplug/web/__init__.py +++ b/beetsplug/web/__init__.py @@ -77,7 +77,7 @@ def json_generator(items, root, expand=False): representation :returns: generator that yields strings """ - yield '{"%s":[' % root + yield f'{{"{root}":[' first = True for item in items: if first: @@ -232,9 +232,7 @@ def _get_unique_table_field_values(model, field, sort_field): raise KeyError with g.lib.transaction() as tx: rows = tx.query( - "SELECT DISTINCT '{}' FROM '{}' ORDER BY '{}'".format( - field, model._table, sort_field - ) + f"SELECT DISTINCT '{field}' FROM '{model._table}' ORDER BY '{sort_field}'" ) return [row[0] for row in rows] @@ -476,7 +474,7 @@ class WebPlugin(BeetsPlugin): # Enable CORS if required. if self.config["cors"]: self._log.info( - "Enabling CORS with origin: {0}", self.config["cors"] + "Enabling CORS with origin: {}", self.config["cors"] ) from flask_cors import CORS diff --git a/beetsplug/web/static/beets.js b/beetsplug/web/static/beets.js index eace4d27d..0600d09d0 100644 --- a/beetsplug/web/static/beets.js +++ b/beetsplug/web/static/beets.js @@ -241,6 +241,11 @@ var AppView = Backbone.View.extend({ 'pause': _.bind(this.audioPause, this), 'ended': _.bind(this.audioEnded, this) }); + if ("mediaSession" in navigator) { + navigator.mediaSession.setActionHandler("nexttrack", () => { + this.playNext(); + }); + } }, showItems: function(items) { this.shownItems = items; @@ -306,7 +311,9 @@ var AppView = Backbone.View.extend({ }, audioEnded: function() { this.playingItem.entryView.setPlaying(false); - + this.playNext(); + }, + playNext: function(){ // Try to play the next track. var idx = this.shownItems.indexOf(this.playingItem); if (idx == -1) { diff --git a/beetsplug/zero.py b/beetsplug/zero.py index 05e55bfcd..ab1bfa5ca 100644 --- a/beetsplug/zero.py +++ b/beetsplug/zero.py @@ -41,6 +41,7 @@ class ZeroPlugin(BeetsPlugin): "fields": [], "keep_fields": [], "update_database": False, + "omit_single_disc": False, } ) @@ -90,10 +91,10 @@ class ZeroPlugin(BeetsPlugin): Do some sanity checks then compile the regexes. """ if field not in MediaFile.fields(): - self._log.error("invalid field: {0}", field) + self._log.error("invalid field: {}", field) elif field in ("id", "path", "album_id"): self._log.warning( - "field '{0}' ignored, zeroing it would be dangerous", field + "field '{}' ignored, zeroing it would be dangerous", field ) else: try: @@ -123,9 +124,14 @@ class ZeroPlugin(BeetsPlugin): """ fields_set = False + if "disc" in tags and self.config["omit_single_disc"].get(bool): + if item.disctotal == 1: + fields_set = True + self._log.debug("disc: {.disc} -> None", item) + tags["disc"] = None + if not self.fields_to_progs: - self._log.warning("no fields, nothing to do") - return False + self._log.warning("no fields list to remove") for field, progs in self.fields_to_progs.items(): if field in tags: @@ -137,7 +143,7 @@ class ZeroPlugin(BeetsPlugin): if match: fields_set = True - self._log.debug("{0}: {1} -> None", field, value) + self._log.debug("{}: {} -> None", field, value) tags[field] = None if self.config["update_database"]: item[field] = None diff --git a/docs/_templates/autosummary/class.rst b/docs/_templates/autosummary/class.rst index fdf251b15..586b207b7 100644 --- a/docs/_templates/autosummary/class.rst +++ b/docs/_templates/autosummary/class.rst @@ -1,4 +1,4 @@ -{{ fullname | escape | underline}} +{{ name | escape | underline}} .. currentmodule:: {{ module }} diff --git a/docs/api/index.rst b/docs/api/index.rst new file mode 100644 index 000000000..edec5fe96 --- /dev/null +++ b/docs/api/index.rst @@ -0,0 +1,9 @@ +API Reference +============= + +.. toctree:: + :maxdepth: 2 + :titlesonly: + + plugins + database diff --git a/docs/api/plugins.rst b/docs/api/plugins.rst index 9320425db..2ce8dbed6 100644 --- a/docs/api/plugins.rst +++ b/docs/api/plugins.rst @@ -7,3 +7,11 @@ Plugins :toctree: generated/ BeetsPlugin + +.. currentmodule:: beets.metadata_plugins + +.. autosummary:: + :toctree: generated/ + + MetadataSourcePlugin + SearchApiMetadataSourcePlugin diff --git a/docs/changelog.rst b/docs/changelog.rst index a5af0a26d..9d0881a63 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,16 +9,161 @@ Unreleased New features: +- :doc:`plugins/ftintitle`: Added argument for custom feat. words in ftintitle. +- :doc:`plugins/ftintitle`: Added argument to skip the processing of artist and + album artist are the same in ftintitle. +- :doc:`plugins/play`: Added `$playlist` marker to precisely edit the playlist + filepath into the command calling the player program. +- :doc:`plugins/lastgenre`: For tuning plugin settings ``-vvv`` can be passed + to receive extra verbose logging around last.fm results and how they are + resolved. The ``extended_debug`` config setting and ``--debug`` option + have been removed. +- Added support for Python 3.13. + +Bug fixes: + +For packagers: + +Other changes: + +- The documentation chapter :doc:`dev/paths` has been moved to the "For + Developers" section and revised to reflect current best practices (pathlib + usage). + +2.5.1 (October 14, 2025) +------------------------ + +New features: + +- :doc:`plugins/zero`: Add new configuration option, ``omit_single_disc``, to + allow zeroing the disc number on write for single-disc albums. Defaults to + False. + +Bug fixes: + +- |BeetsPlugin|: load the last plugin class defined in the plugin namespace. + :bug:`6093` + +For packagers: + +- Fixed issue with legacy metadata plugins not copying properties from the base + class. +- Reverted the following: When installing ``beets`` via git or locally the + version string now reflects the current git branch and commit hash. + :bug:`6089` + +Other changes: + +- Removed outdated mailing list contact information from the documentation + :bug:`5462`. +- :doc:`guides/main`: Modernized the *Getting Started* guide with tabbed + sections and dropdown menus. Installation instructions have been streamlined, + and a new subpage now provides additional setup details. +- Documentation: introduced a new role ``conf`` for documenting configuration + options. This role provides consistent formatting and creates references + automatically. Applied it to :doc:`plugins/deezer`, :doc:`plugins/discogs`, + :doc:`plugins/musicbrainz` and :doc:`plugins/spotify` plugins documentation. + +2.5.0 (October 11, 2025) +------------------------ + +New features: + +- :doc:`plugins/lastgenre`: Add a ``--pretend`` option to preview genre changes + without storing or writing them. +- :doc:`plugins/convert`: Add a config option to disable writing metadata to + converted files. +- :doc:`plugins/discogs`: New config option + :conf:`plugins.discogs:strip_disambiguation` to toggle stripping discogs + numeric disambiguation on artist and label fields. +- :doc:`plugins/discogs` Added support for featured artists. :bug:`6038` +- :doc:`plugins/discogs` New configuration option + :conf:`plugins.discogs:featured_string` to change the default string used to + join featured artists. The default string is `Feat.`. +- :doc:`plugins/discogs` Support for `artist_credit` in Discogs tags. + :bug:`3354` +- :doc:`plugins/discogs` Support for name variations and config options to + specify where the variations are written. :bug:`3354` +- :doc:`plugins/web` Support for `nexttrack` keyboard press + +Bug fixes: + +- :doc:`plugins/musicbrainz` Refresh flexible MusicBrainz metadata on reimport + so format changes are applied. :bug:`6036` +- :doc:`plugins/spotify` Ensure ``spotifysync`` keeps popularity, ISRC, and + related fields current even when audio features requests fail. :bug:`6061` +- :doc:`plugins/spotify` Fixed an issue where track matching and lookups could + return incorrect or misleading results when using the Spotify plugin. The + problem occurred primarily when no album was provided or when the album field + was an empty string. :bug:`5189` +- :doc:`plugins/spotify` Removed old and undocumented config options + `artist_field`, `album_field` and `track` that were causing issues with track + matching. :bug:`5189` +- :doc:`plugins/spotify` Fixed an issue where candidate lookup would not find + matches due to query escaping (single vs double quotes). +- :doc:`plugins/discogs` Fixed inconsistency in stripping disambiguation from + artists but not labels. :bug:`5366` +- :doc:`plugins/chroma` :doc:`plugins/bpsync` Fix plugin loading issue caused by + an import of another |BeetsPlugin| class. :bug:`6033` +- :doc:`/plugins/fromfilename`: Fix :bug:`5218`, improve the code (refactor + regexps, allow for more cases, add some logging), add tests. +- Metadata source plugins: Fixed data source penalty calculation that was + incorrectly applied during import matching. The + :conf:`plugins.index:source_weight` configuration option has been renamed to + :conf:`plugins.index:data_source_mismatch_penalty` to better reflect its + purpose. :bug:`6066` + +Other changes: + +- :doc:`plugins/index`: Clarify that musicbrainz must be mentioned if plugin + list modified :bug:`6020` +- :doc:`/faq`: Add check for musicbrainz plugin if auto-tagger can't find a + match :bug:`6020` +- :doc:`guides/tagger`: Section on no matching release found, related to + possibly disabled musicbrainz plugin :bug:`6020` +- Moved ``art.py`` utility module from ``beets`` into ``beetsplug`` namespace as + it is not used in the core beets codebase. It can now be found in + ``beetsplug._utils``. +- Moved ``vfs.py`` utility module from ``beets`` into ``beetsplug`` namespace as + it is not used in the core beets codebase. It can now be found in + ``beetsplug._utils``. +- :class:`beets.metadata_plugin.MetadataSourcePlugin`: Remove discogs specific + disambiguation stripping. +- When installing ``beets`` via git or locally the version string now reflects + the current git branch and commit hash. :bug:`4448` +- :ref:`match-config`: ``match.distance_weights.source`` configuration has been + renamed to ``match.distance_weights.data_source`` for consistency with the + name of the field it refers to. + +For developers and plugin authors: + +- Typing improvements in ``beets/logging.py``: ``getLogger`` now returns + ``BeetsLogger`` when called with a name, or ``RootLogger`` when called without + a name. +- The ``track_distance()`` and ``album_distance()`` methods have been removed + from ``MetadataSourcePlugin``. Distance calculation for data source mismatches + is now handled automatically by the core matching logic. This change + simplifies the plugin architecture and fixes incorrect penalty calculations. + :bug:`6066` +- Metadata source plugins are now registered globally when instantiated, which + makes their handling slightly more efficient. + +2.4.0 (September 13, 2025) +-------------------------- + +New features: + - :doc:`plugins/musicbrainz`: The MusicBrainz autotagger has been moved to a separate plugin. The default :ref:`plugins-config` includes ``musicbrainz``, but if you've customized your ``plugins`` list in your configuration, you'll need to explicitly add ``musicbrainz`` to continue using this functionality. - Configuration option ``musicbrainz.enabled`` has thus been deprecated. - :bug:`2686` :bug:`4605` + Configuration option :conf:`plugins.musicbrainz:enabled` has thus been + deprecated. :bug:`2686` :bug:`4605` - :doc:`plugins/web`: Show notifications when a track plays. This uses the Media Session API to customize media notifications. -- :doc:`plugins/discogs`: Add configurable ``search_limit`` option to limit the - number of results returned by the Discogs metadata search queries. +- :doc:`plugins/discogs`: Add configurable :conf:`plugins.discogs:search_limit` + option to limit the number of results returned by the Discogs metadata search + queries. - :doc:`plugins/discogs`: Implement ``track_for_id`` method to allow retrieving singletons by their Discogs ID. :bug:`4661` - :doc:`plugins/replace`: Add new plugin. @@ -34,11 +179,14 @@ New features: - :doc:`plugins/web`: Display artist and album as part of the search results. - :doc:`/plugins/unimported`: Add ``ignore_as_globs`` option to use globbing for the ``ignore_subdirectories`` option. +- :doc:`plugins/spotify` :doc:`plugins/deezer`: Add new configuration option + :conf:`plugins.index:search_limit` to limit the number of results returned by + search queries. Bug fixes: - :doc:`plugins/musicbrainz`: fix regression where user configured - ``extra_tags`` have been read incorrectly. :bug:`5788` + :conf:`plugins.musicbrainz:extra_tags` have been read incorrectly. :bug:`5788` - tests: Fix library tests failing on Windows when run from outside ``D:/``. :bug:`5802` - Fix an issue where calling ``Library.add`` would cause the ``database_change`` @@ -54,12 +202,26 @@ Bug fixes: the config option ``spotify.search_query_ascii: yes``. :bug:`5699` - :doc:`plugins/discogs`: Beets will no longer crash if a release has been deleted, and returns a 404. +- :doc:`plugins/lastgenre`: Fix the issue introduced in Beets 2.3.0 where + non-whitelisted last.fm genres were not canonicalized to parent genres. + :bug:`5930` +- :doc:`plugins/chroma`: AcoustID lookup HTTP requests will now time out after + 10 seconds, rather than hanging the entire import process. +- :doc:`/plugins/deezer`: Fix the issue with that every query to deezer was + ascii encoded. This resulted in bad matches for queries that contained special + e.g. non latin characters as 盗作. If you want to keep the legacy behavior set + the config option ``deezer.search_query_ascii: yes``. :bug:`5860` +- Fixed regression with :doc:`/plugins/listenbrainz` where the plugin could not + be loaded :bug:`5975` +- :doc:`/plugins/fromfilename`: Beets will no longer crash if a track's title + field is missing. For packagers: -- Optional ``extra_tags`` parameter has been removed from - ``BeetsPlugin.candidates`` method signature since it is never passed in. If - you override this method in your plugin, feel free to remove this parameter. +- Optional :conf:`plugins.musicbrainz:extra_tags` parameter has been removed + from ``BeetsPlugin.candidates`` method signature since it is never passed in. + If you override this method in your plugin, feel free to remove this + parameter. - Loosened ``typing_extensions`` dependency in pyproject.toml to apply to every python version. @@ -70,8 +232,8 @@ For plugin developers: art sources might need to be adapted. - We split the responsibilities of plugins into two base classes - 1. :class:`beets.plugins.BeetsPlugin` is the base class for all plugins, any - plugin needs to inherit from this class. + 1. |BeetsPlugin| is the base class for all plugins, any plugin needs to + inherit from this class. 2. :class:`beets.metadata_plugin.MetadataSourcePlugin` allows plugins to act like metadata sources. E.g. used by the MusicBrainz plugin. All plugins in the beets repo are opted into this class where applicable. If you are @@ -116,9 +278,21 @@ Other changes: Autogenerated API references are now located in the ``docs/api`` subdirectory. - :doc:`/plugins/substitute`: Fix rST formatting for example cases so that each case is shown on separate lines. +- :doc:`/plugins/ftintitle`: Process items whose albumartist is not contained in + the artist field, including compilations using Various Artists as an + albumartist and album tracks by guest artists featuring a third artist. - Refactored library.py file by splitting it into multiple modules within the beets/library directory. - Added a test to check that all plugins can be imported without errors. +- :doc:`/guides/main`: Add instructions to install beets on Void Linux. +- :doc:`plugins/lastgenre`: Refactor loading whitelist and canonicalization + file. :bug:`5979` +- :doc:`plugins/lastgenre`: Updated and streamlined the genre whitelist and + canonicalization tree :bug:`5977` +- UI: Update default ``text_diff_added`` color from **bold red** to **bold + green.** +- UI: Use ``text_diff_added`` and ``text_diff_removed`` colors in **all** diff + comparisons, including case differences. 2.3.1 (May 14, 2025) -------------------- @@ -403,8 +577,9 @@ New features: :bug:`4348` - Create the parental directories for database if they do not exist. :bug:`3808` :bug:`4327` -- :ref:`musicbrainz-config`: a new :ref:`musicbrainz.enabled` option allows - disabling the MusicBrainz metadata source during the autotagging process +- :ref:`musicbrainz-config`: a new :conf:`plugins.musicbrainz:enabled` option + allows disabling the MusicBrainz metadata source during the autotagging + process - :doc:`/plugins/kodiupdate`: Now supports multiple kodi instances :bug:`4101` - Add the item fields ``bitrate_mode``, ``encoder_info`` and ``encoder_settings``. @@ -437,8 +612,8 @@ New features: :bug:`4561` :bug:`4600` - :ref:`musicbrainz-config`: MusicBrainz release pages often link to related metadata sources like Discogs, Bandcamp, Spotify, Deezer and Beatport. When - enabled via the :ref:`musicbrainz.external_ids` options, release ID's will be - extracted from those URL's and imported to the library. :bug:`4220` + enabled via the :conf:`plugins.musicbrainz:external_ids` options, release ID's + will be extracted from those URL's and imported to the library. :bug:`4220` - :doc:`/plugins/convert`: Add support for generating m3u8 playlists together with converted media files. :bug:`4373` - Fetch the ``release_group_title`` field from MusicBrainz. :bug:`4809` @@ -792,8 +967,9 @@ Other new things: - ``beet remove`` now also allows interactive selection of items from the query, similar to ``beet modify``. -- Enable HTTPS for MusicBrainz by default and add configuration option ``https`` - for custom servers. See :ref:`musicbrainz-config` for more details. +- Enable HTTPS for MusicBrainz by default and add configuration option + :conf:`plugins.musicbrainz:https` for custom servers. See + :ref:`musicbrainz-config` for more details. - :doc:`/plugins/mpdstats`: Add a new ``strip_path`` option to help build the right local path from MPD information. - :doc:`/plugins/convert`: Conversion can now parallelize conversion jobs on @@ -813,8 +989,8 @@ Other new things: server. - :doc:`/plugins/subsonicupdate`: The plugin now automatically chooses between token- and password-based authentication based on the server version. -- A new :ref:`extra_tags` configuration option lets you use more metadata in - MusicBrainz queries to further narrow the search. +- A new :conf:`plugins.musicbrainz:extra_tags` configuration option lets you use + more metadata in MusicBrainz queries to further narrow the search. - A new :doc:`/plugins/fish` adds `Fish shell`_ tab autocompletion to beets. - :doc:`plugins/fetchart` and :doc:`plugins/embedart`: Added a new ``quality`` option that controls the quality of the image output when the image is @@ -868,9 +1044,9 @@ Other new things: (and now deprecated) separate ``host``, ``port``, and ``contextpath`` config options. As a consequence, the plugin can now talk to Subsonic over HTTPS. Thanks to :user:`jef`. :bug:`3449` -- :doc:`/plugins/discogs`: The new ``index_tracks`` option enables incorporation - of work names and intra-work divisions into imported track titles. Thanks to - :user:`cole-miller`. :bug:`3459` +- :doc:`/plugins/discogs`: The new :conf:`plugins.discogs:index_tracks` option + enables incorporation of work names and intra-work divisions into imported + track titles. Thanks to :user:`cole-miller`. :bug:`3459` - :doc:`/plugins/web`: The query API now interprets backslashes as path separators to support path queries. Thanks to :user:`nmeum`. :bug:`3567` - ``beet import`` now handles tar archives with bzip2 or gzip compression. @@ -884,9 +1060,9 @@ Other new things: :user:`logan-arens`. :bug:`2947` - There is a new ``--plugins`` (or ``-p``) CLI flag to specify a list of plugins to load. -- A new :ref:`genres` option fetches genre information from MusicBrainz. This - functionality depends on functionality that is currently unreleased in the - python-musicbrainzngs_ library: see PR `#266 +- A new :conf:`plugins.musicbrainz:genres` option fetches genre information from + MusicBrainz. This functionality depends on functionality that is currently + unreleased in the python-musicbrainzngs_ library: see PR `#266 <https://github.com/alastair/python-musicbrainzngs/pull/266>`_. Thanks to :user:`aereaux`. - :doc:`/plugins/replaygain`: Analysis now happens in parallel using the @@ -926,9 +1102,10 @@ Fixes: :bug:`3867` - :doc:`/plugins/web`: Fixed a small bug that caused the album art path to be redacted even when ``include_paths`` option is set. :bug:`3866` -- :doc:`/plugins/discogs`: Fixed a bug with the ``index_tracks`` option that - sometimes caused the index to be discarded. Also, remove the extra semicolon - that was added when there is no index track. +- :doc:`/plugins/discogs`: Fixed a bug with the + :conf:`plugins.discogs:index_tracks` option that sometimes caused the index to + be discarded. Also, remove the extra semicolon that was added when there is no + index track. - :doc:`/plugins/subsonicupdate`: The API client was using the ``POST`` method rather the ``GET`` method. Also includes better exception handling, response parsing, and tests. @@ -2544,9 +2721,9 @@ Major new features and bigger changes: analysis tool. Thanks to :user:`jmwatte`. :bug:`1343` - A new ``filesize`` field on items indicates the number of bytes in the file. :bug:`1291` -- A new :ref:`searchlimit` configuration option allows you to specify how many - search results you wish to see when looking up releases at MusicBrainz during - import. :bug:`1245` +- A new :conf:`plugins.index:search_limit` configuration option allows you to + specify how many search results you wish to see when looking up releases at + MusicBrainz during import. :bug:`1245` - The importer now records the data source for a match in a new flexible attribute ``data_source`` on items and albums. :bug:`1311` - The colors used in the terminal interface are now configurable via the new @@ -4168,7 +4345,7 @@ fetching cover art for your music, enable this plugin after upgrading to beets "database is locked"). This release synchronizes access to the database to avoid internal SQLite contention, which should avoid this error. - Plugins can now add parallel stages to the import pipeline. See - :ref:`writing-plugins`. + :ref:`basic-plugin-setup`. - Beets now prints out an error when you use an unrecognized field name in a query: for example, when running ``beet ls -a artist:foo`` (because ``artist`` is an item-level field). @@ -4351,7 +4528,7 @@ to come in the next couple of releases. addition to replacing them) if the special string ``<strip>`` is specified as the replacement. - New plugin API: plugins can now add fields to the MediaFile tag abstraction - layer. See :ref:`writing-plugins`. + layer. See :ref:`basic-plugin-setup`. - A reasonable error message is now shown when the import log file cannot be opened. - The import log file is now flushed and closed properly so that it can be used @@ -4395,7 +4572,7 @@ filenames that would otherwise conflict. Three new plugins (``inline``, naming rules: for example, ``%upper{%left{$artist,1}}`` will insert the capitalized first letter of the track's artist. For more details, see :doc:`/reference/pathformat`. If you're interested in adding your own template - functions via a plugin, see :ref:`writing-plugins`. + functions via a plugin, see :ref:`basic-plugin-setup`. - Plugins can also now define new path *fields* in addition to functions. - The new :doc:`/plugins/inline` lets you **use Python expressions to customize path formats** by defining new fields in the config file. @@ -4942,7 +5119,7 @@ BPD). To "upgrade" an old database, you can use the included ``albumify`` plugin list of plugin names) and ``pluginpath`` (a colon-separated list of directories to search beyond ``sys.path``). Plugins are just Python modules under the ``beetsplug`` namespace package containing subclasses of - ``beets.plugins.BeetsPlugin``. See `the beetsplug directory`_ for examples or + |BeetsPlugin|. See `the beetsplug directory`_ for examples or :doc:`/plugins/index` for instructions. - As a consequence of adding album art, the database was significantly refactored to keep track of some information at an album (rather than item) diff --git a/docs/code_of_conduct.rst b/docs/code_of_conduct.rst index 772800d44..76e57d0e6 100644 --- a/docs/code_of_conduct.rst +++ b/docs/code_of_conduct.rst @@ -1,4 +1,3 @@ -.. - code_of_conduct: +.. code_of_conduct: .. include:: ../CODE_OF_CONDUCT.rst diff --git a/docs/conf.py b/docs/conf.py index d0f8cdffe..8d2bae130 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -6,6 +6,11 @@ # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information +import sys +from pathlib import Path + +# Add custom extensions directory to path +sys.path.insert(0, str(Path(__file__).parent / "extensions")) project = "beets" AUTHOR = "Adrian Sampson" @@ -13,8 +18,8 @@ copyright = "2016, Adrian Sampson" master_doc = "index" language = "en" -version = "2.3" -release = "2.3.1" +version = "2.5" +release = "2.5.1" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration @@ -23,13 +28,17 @@ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", "sphinx.ext.extlinks", + "sphinx.ext.viewcode", + "sphinx_design", + "sphinx_copybutton", + "conf", ] + autosummary_generate = True exclude_patterns = ["_build"] templates_path = ["_templates"] source_suffix = {".rst": "restructuredtext", ".md": "markdown"} - pygments_style = "sphinx" # External links to the bug tracker and other sites. @@ -75,13 +84,30 @@ man_pages = [ ), ] +# Global substitutions that can be used anywhere in the documentation. +rst_epilog = """ +.. |Album| replace:: :class:`~beets.library.models.Album` +.. |AlbumInfo| replace:: :class:`beets.autotag.hooks.AlbumInfo` +.. |BeetsPlugin| replace:: :class:`beets.plugins.BeetsPlugin` +.. |ImportSession| replace:: :class:`~beets.importer.session.ImportSession` +.. |ImportTask| replace:: :class:`~beets.importer.tasks.ImportTask` +.. |Item| replace:: :class:`~beets.library.models.Item` +.. |Library| replace:: :class:`~beets.library.library.Library` +.. |Model| replace:: :class:`~beets.dbcore.db.Model` +.. |TrackInfo| replace:: :class:`beets.autotag.hooks.TrackInfo` +""" # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output html_theme = "pydata_sphinx_theme" -html_theme_options = {"collapse_navigation": True, "logo": {"text": "beets"}} +html_theme_options = { + "collapse_navigation": False, + "logo": {"text": "beets"}, + "show_nav_level": 2, # How many levels in left sidebar to show automatically + "navigation_depth": 4, # How many levels of navigation to expand +} html_title = "beets" html_logo = "_static/beets_logo_nobg.png" html_static_path = ["_static"] diff --git a/docs/contributing.rst b/docs/contributing.rst index 6c71b2ce0..6af7deaef 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -1,4 +1,3 @@ -.. - contributing: +.. contributing: .. include:: ../CONTRIBUTING.rst diff --git a/docs/dev/index.rst b/docs/dev/index.rst index 7f8af5276..f22aa8c56 100644 --- a/docs/dev/index.rst +++ b/docs/dev/index.rst @@ -4,22 +4,21 @@ For Developers This section contains information for developers. Read on if you're interested in hacking beets itself or creating plugins for it. -See also the documentation for MediaFile_, the library used by beets to read and -write metadata tags in media files. +See also the documentation for the MediaFile_ and Confuse_ libraries. These are +maintained by the beets team and used to read and write metadata tags and manage +configuration files, respectively. + +.. _confuse: https://confuse.readthedocs.io/en/latest/ .. _mediafile: https://mediafile.readthedocs.io/en/latest/ .. toctree:: - :maxdepth: 1 + :maxdepth: 3 + :titlesonly: - plugins + plugins/index library + paths importer cli - -.. toctree:: - :maxdepth: 1 - :caption: API Reference - - ../api/plugins - ../api/database + ../api/index diff --git a/docs/dev/library.rst b/docs/dev/library.rst index 0f7554aac..8b854937d 100644 --- a/docs/dev/library.rst +++ b/docs/dev/library.rst @@ -7,18 +7,18 @@ This page describes the internal API of beets' core database features. It doesn't exhaustively document the API, but is aimed at giving an overview of the architecture to orient anyone who wants to dive into the code. -The :class:`Library` object is the central repository for data in beets. It -represents a database containing songs, which are :class:`Item` instances, and -groups of items, which are :class:`Album` instances. +The |Library| object is the central repository for data in beets. It represents +a database containing songs, which are |Item| instances, and groups of items, +which are |Album| instances. The Library Class ----------------- -The :class:`Library` is typically instantiated as a singleton. A single -invocation of beets usually has only one :class:`Library`. It's powered by -:class:`dbcore.Database` under the hood, which handles the SQLite_ abstraction, -something like a very minimal ORM_. The library is also responsible for handling -queries to retrieve stored objects. +The |Library| is typically instantiated as a singleton. A single invocation of +beets usually has only one |Library|. It's powered by :class:`dbcore.Database` +under the hood, which handles the SQLite_ abstraction, something like a very +minimal ORM_. The library is also responsible for handling queries to retrieve +stored objects. Overview ~~~~~~~~ @@ -40,10 +40,9 @@ which you can get using the :py:meth:`Library.transaction` context manager. Model Classes ------------- -The two model entities in beets libraries, :class:`Item` and :class:`Album`, -share a base class, :class:`LibModel`, that provides common functionality. That -class itself specialises :class:`beets.dbcore.Model` which provides an ORM-like -abstraction. +The two model entities in beets libraries, |Item| and |Album|, share a base +class, :class:`LibModel`, that provides common functionality. That class itself +specialises :class:`beets.dbcore.Model` which provides an ORM-like abstraction. To get or change the metadata of a model (an item or album), either access its attributes (e.g., ``print(album.year)`` or ``album.year = 2012``) or use the @@ -56,8 +55,7 @@ Models use dirty-flags to track when the object's metadata goes out of sync with the database. The dirty dictionary maps field names to booleans indicating whether the field has been written since the object was last synchronized (via load or store) with the database. This logic is implemented in the model base -class :class:`LibModel` and is inherited by both :class:`Item` and -:class:`Album`. +class :class:`LibModel` and is inherited by both |Item| and |Album|. We provide CRUD-like methods for interacting with the database: @@ -77,10 +75,10 @@ normal the normal mapping API is supported: Item ~~~~ -Each :class:`Item` object represents a song or track. (We use the more generic -term item because, one day, beets might support non-music media.) An item can -either be purely abstract, in which case it's just a bag of metadata fields, or -it can have an associated file (indicated by ``item.path``). +Each |Item| object represents a song or track. (We use the more generic term +item because, one day, beets might support non-music media.) An item can either +be purely abstract, in which case it's just a bag of metadata fields, or it can +have an associated file (indicated by ``item.path``). In terms of the underlying SQLite database, items are backed by a single table called items with one column per metadata fields. The metadata fields currently @@ -97,12 +95,12 @@ become out of sync with on-disk metadata, mainly to speed up the :ref:`update-cmd` (which needs to check whether the database is in sync with the filesystem). This feature turns out to be sort of complicated. -For any :class:`Item`, there are two mtimes: the on-disk mtime (maintained by -the OS) and the database mtime (maintained by beets). Correspondingly, there is -on-disk metadata (ID3 tags, for example) and DB metadata. The goal with the -mtime is to ensure that the on-disk and DB mtimes match when the on-disk and DB -metadata are in sync; this lets beets do a quick mtime check and avoid rereading -files in some circumstances. +For any |Item|, there are two mtimes: the on-disk mtime (maintained by the OS) +and the database mtime (maintained by beets). Correspondingly, there is on-disk +metadata (ID3 tags, for example) and DB metadata. The goal with the mtime is to +ensure that the on-disk and DB mtimes match when the on-disk and DB metadata are +in sync; this lets beets do a quick mtime check and avoid rereading files in +some circumstances. Specifically, beets attempts to maintain the following invariant: @@ -126,14 +124,14 @@ This leads to the following implementation policy: Album ~~~~~ -An :class:`Album` is a collection of Items in the database. Every item in the -database has either zero or one associated albums (accessible via -``item.album_id``). An item that has no associated album is called a singleton. -Changing fields on an album (e.g. ``album.year = 2012``) updates the album -itself and also changes the same field in all associated items. +An |Album| is a collection of Items in the database. Every item in the database +has either zero or one associated albums (accessible via ``item.album_id``). An +item that has no associated album is called a singleton. Changing fields on an +album (e.g. ``album.year = 2012``) updates the album itself and also changes the +same field in all associated items. -An :class:`Album` object keeps track of album-level metadata, which is (mostly) -a subset of the track-level metadata. The album-level metadata fields are listed +An |Album| object keeps track of album-level metadata, which is (mostly) a +subset of the track-level metadata. The album-level metadata fields are listed in ``Album._fields``. For those fields that are both item-level and album-level (e.g., ``year`` or ``albumartist``), every item in an album should share the same value. Albums use an SQLite table called ``albums``, in which each column @@ -147,7 +145,7 @@ is an album metadata field. Transactions ~~~~~~~~~~~~ -The :class:`Library` class provides the basic methods necessary to access and +The |Library| class provides the basic methods necessary to access and manipulate its contents. To perform more complicated operations atomically, or to interact directly with the underlying SQLite database, you must use a *transaction* (see this `blog post`_ for motivation). For example @@ -181,8 +179,8 @@ matching items/albums. The ``clause()`` method should return an SQLite ``WHERE`` clause that matches appropriate albums/items. This allows for efficient batch queries. -Correspondingly, the ``match(item)`` method should take an :class:`Item` object -and return a boolean, indicating whether or not a specific item matches the +Correspondingly, the ``match(item)`` method should take an |Item| object and +return a boolean, indicating whether or not a specific item matches the criterion. This alternate implementation allows clients to determine whether items that have already been fetched from the database match the query. @@ -194,4 +192,4 @@ together, matching only albums/items that match all constituent queries. Beets has a human-writable plain-text query syntax that can be parsed into :class:`Query` objects. Calling ``AndQuery.from_strings`` parses a list of query -parts into a query object that can then be used with :class:`Library` objects. +parts into a query object that can then be used with |Library| objects. diff --git a/docs/dev/paths.rst b/docs/dev/paths.rst new file mode 100644 index 000000000..a593580f6 --- /dev/null +++ b/docs/dev/paths.rst @@ -0,0 +1,64 @@ +Handling Paths +============== + +``pathlib`` provides a clean, cross-platform API for working with filesystem +paths. + +Use the ``.filepath`` property on ``Item`` and ``Album`` library objects to +access paths as ``pathlib.Path`` objects. This produces a readable, native +representation suitable for printing, logging, or further processing. + +Normalize paths using ``Path(...).expanduser().resolve()``, which expands ``~`` +and resolves symlinks. + +Cross-platform differences—such as path separators, Unicode handling, and +long-path support (Windows) are automatically managed by ``pathlib``. + +When storing paths in the database, however, convert them to bytes with +``bytestring_path()``. Paths in Beets are currently stored as bytes, although +there are plans to eventually store ``pathlib.Path`` objects directly. To access +media file paths in their stored form, use the ``.path`` property on ``Item`` +and ``Album``. + +Legacy utilities +---------------- + +Historically, Beets used custom utilities to ensure consistent behavior across +Linux, macOS, and Windows before ``pathlib`` became reliable: + +- ``syspath()``: worked around Windows Unicode and long-path limitations by + converting to a system-safe string (adding the ``\\?\`` prefix where needed). +- ``normpath()``: normalized slashes and removed ``./`` or ``..`` parts but did + not expand ``~``. +- ``bytestring_path()``: converted paths to bytes for database storage (still + used for that purpose today). +- ``displayable_path()``: converted byte paths to Unicode for display or + logging. + +These functions remain safe to use in legacy code, but new code should rely +solely on ``pathlib.Path``. + +Examples +-------- + +Old style + +.. code-block:: python + + displayable_path(item.path) + normpath("~/Music/../Artist") + syspath(path) + +New style + +.. code-block:: python + + item.filepath + Path("~/Music/../Artist").expanduser().resolve() + Path(path) + +When storing paths in the database + +.. code-block:: python + + path_bytes = bytestring_path(Path("/some/path/to/file.mp3")) diff --git a/docs/dev/plugins.rst b/docs/dev/plugins.rst deleted file mode 100644 index 620e1caec..000000000 --- a/docs/dev/plugins.rst +++ /dev/null @@ -1,653 +0,0 @@ -Plugin Development Guide -======================== - -Beets plugins are Python modules or packages that extend the core functionality -of beets. The plugin system is designed to be flexible, allowing developers to -add virtually any type of features. - -.. _writing-plugins: - -Writing Plugins ---------------- - -A beets plugin is just a Python module or package inside the ``beetsplug`` -namespace package. (Check out `this article`_ and `this Stack Overflow -question`_ if you haven't heard about namespace packages.) So, to make one, -create a directory called ``beetsplug`` and add either your plugin module: - -:: - - beetsplug/ - myawesomeplugin.py - -or your plugin subpackage: - -:: - - beetsplug/ - myawesomeplugin/ - __init__.py - myawesomeplugin.py - -.. attention:: - - You do not anymore need to add a ``__init__.py`` file to the ``beetsplug`` - directory. Python treats your plugin as a namespace package automatically, - thus we do not depend on ``pkgutil``-based setup in the ``__init__.py`` file - anymore. - -.. _this article: https://realpython.com/python-namespace-package/#setting-up-some-namespace-packages - -.. _this stack overflow question: https://stackoverflow.com/a/27586272/9582674 - -The meat of your plugin goes in ``myawesomeplugin.py``. There, you'll have to -import ``BeetsPlugin`` from ``beets.plugins`` and subclass it, for example - -.. code-block:: python - - from beets.plugins import BeetsPlugin - - - class MyAwesomePlugin(BeetsPlugin): - pass - -Once you have your ``BeetsPlugin`` subclass, there's a variety of things your -plugin can do. (Read on!) - -To use your new plugin, package your plugin (see how to do this with poetry_ or -setuptools_, for example) and install it into your ``beets`` virtual -environment. Then, add your plugin to beets configuration - -.. _poetry: https://python-poetry.org/docs/pyproject/#packages - -.. _setuptools: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#finding-simple-packages - -.. code-block:: yaml - - # config.yaml - plugins: - - myawesomeplugin - -and you're good to go! - -.. _add_subcommands: - -Add Commands to the CLI -~~~~~~~~~~~~~~~~~~~~~~~ - -Plugins can add new subcommands to the ``beet`` command-line interface. Define -the plugin class' ``commands()`` method to return a list of ``Subcommand`` -objects. (The ``Subcommand`` class is defined in the ``beets.ui`` module.) -Here's an example plugin that adds a simple command: - -:: - - from beets.plugins import BeetsPlugin - from beets.ui import Subcommand - - my_super_command = Subcommand('super', help='do something super') - def say_hi(lib, opts, args): - print("Hello everybody! I'm a plugin!") - my_super_command.func = say_hi - - class SuperPlug(BeetsPlugin): - def commands(self): - return [my_super_command] - -To make a subcommand, invoke the constructor like so: ``Subcommand(name, parser, -help, aliases)``. The ``name`` parameter is the only required one and should -just be the name of your command. ``parser`` can be an `OptionParser instance`_, -but it defaults to an empty parser (you can extend it later). ``help`` is a -description of your command, and ``aliases`` is a list of shorthand versions of -your command name. - -.. _optionparser instance: https://docs.python.org/library/optparse.html - -You'll need to add a function to your command by saying ``mycommand.func = -myfunction``. This function should take the following parameters: ``lib`` (a -beets ``Library`` object) and ``opts`` and ``args`` (command-line options and -arguments as returned by OptionParser.parse_args_). - -.. _optionparser.parse_args: https://docs.python.org/library/optparse.html#parsing-arguments - -The function should use any of the utility functions defined in ``beets.ui``. -Try running ``pydoc beets.ui`` to see what's available. - -You can add command-line options to your new command using the ``parser`` member -of the ``Subcommand`` class, which is a ``CommonOptionsParser`` instance. Just -use it like you would a normal ``OptionParser`` in an independent script. Note -that it offers several methods to add common options: ``--album``, ``--path`` -and ``--format``. This feature is versatile and extensively documented, try -``pydoc beets.ui.CommonOptionsParser`` for more information. - -.. _plugin_events: - -Listen for Events -~~~~~~~~~~~~~~~~~ - -Event handlers allow plugins to run code whenever something happens in beets' -operation. For instance, a plugin could write a log message every time an album -is successfully autotagged or update MPD's index whenever the database is -changed. - -You can "listen" for events using ``BeetsPlugin.register_listener``. Here's an -example: - -:: - - from beets.plugins import BeetsPlugin - - def loaded(): - print 'Plugin loaded!' - - class SomePlugin(BeetsPlugin): - def __init__(self): - super().__init__() - self.register_listener('pluginload', loaded) - -Note that if you want to access an attribute of your plugin (e.g. ``config`` or -``log``) you'll have to define a method and not a function. Here is the usual -registration process in this case: - -:: - - from beets.plugins import BeetsPlugin - - class SomePlugin(BeetsPlugin): - def __init__(self): - super().__init__() - self.register_listener('pluginload', self.loaded) - - def loaded(self): - self._log.info('Plugin loaded!') - -The events currently available are: - -- ``pluginload``: called after all the plugins have been loaded after the - ``beet`` command starts -- ``import``: called after a ``beet import`` command finishes (the ``lib`` - keyword argument is a Library object; ``paths`` is a list of paths (strings) - that were imported) -- ``album_imported``: called with an ``Album`` object every time the ``import`` - command finishes adding an album to the library. Parameters: ``lib``, - ``album`` -- ``album_removed``: called with an ``Album`` object every time an album is - removed from the library (even when its file is not deleted from disk). -- ``item_copied``: called with an ``Item`` object whenever its file is copied. - Parameters: ``item``, ``source`` path, ``destination`` path -- ``item_imported``: called with an ``Item`` object every time the importer adds - a singleton to the library (not called for full-album imports). Parameters: - ``lib``, ``item`` -- ``before_item_moved``: called with an ``Item`` object immediately before its - file is moved. Parameters: ``item``, ``source`` path, ``destination`` path -- ``item_moved``: called with an ``Item`` object whenever its file is moved. - Parameters: ``item``, ``source`` path, ``destination`` path -- ``item_linked``: called with an ``Item`` object whenever a symlink is created - for a file. Parameters: ``item``, ``source`` path, ``destination`` path -- ``item_hardlinked``: called with an ``Item`` object whenever a hardlink is - created for a file. Parameters: ``item``, ``source`` path, ``destination`` - path -- ``item_reflinked``: called with an ``Item`` object whenever a reflink is - created for a file. Parameters: ``item``, ``source`` path, ``destination`` - path -- ``item_removed``: called with an ``Item`` object every time an item (singleton - or album's part) is removed from the library (even when its file is not - deleted from disk). -- ``write``: called with an ``Item`` object, a ``path``, and a ``tags`` - dictionary just before a file's metadata is written to disk (i.e., just before - the file on disk is opened). Event handlers may change the ``tags`` dictionary - to customize the tags that are written to the media file. Event handlers may - also raise a ``library.FileOperationError`` exception to abort the write - operation. Beets will catch that exception, print an error message and - continue. -- ``after_write``: called with an ``Item`` object after a file's metadata is - written to disk (i.e., just after the file on disk is closed). -- ``import_task_created``: called immediately after an import task is - initialized. Plugins can use this to, for example, change imported files of a - task before anything else happens. It's also possible to replace the task with - another task by returning a list of tasks. This list can contain zero or more - ``ImportTask``. Returning an empty list will stop the task. Parameters: - ``task`` (an ``ImportTask``) and ``session`` (an ``ImportSession``). -- ``import_task_start``: called when before an import task begins processing. - Parameters: ``task`` and ``session``. -- ``import_task_apply``: called after metadata changes have been applied in an - import task. This is called on the same thread as the UI, so use this - sparingly and only for tasks that can be done quickly. For most plugins, an - import pipeline stage is a better choice (see :ref:`plugin-stage`). - Parameters: ``task`` and ``session``. -- ``import_task_before_choice``: called after candidate search for an import - task before any decision is made about how/if to import or tag. Can be used to - present information about the task or initiate interaction with the user - before importing occurs. Return an importer action to take a specific action. - Only one handler may return a non-None result. Parameters: ``task`` and - ``session`` -- ``import_task_choice``: called after a decision has been made about an import - task. This event can be used to initiate further interaction with the user. - Use ``task.choice_flag`` to determine or change the action to be taken. - Parameters: ``task`` and ``session``. -- ``import_task_files``: called after an import task finishes manipulating the - filesystem (copying and moving files, writing metadata tags). Parameters: - ``task`` and ``session``. -- ``library_opened``: called after beets starts up and initializes the main - Library object. Parameter: ``lib``. -- ``database_change``: a modification has been made to the library database. The - change might not be committed yet. Parameters: ``lib`` and ``model``. -- ``cli_exit``: called just before the ``beet`` command-line program exits. - Parameter: ``lib``. -- ``import_begin``: called just before a ``beet import`` session starts up. - Parameter: ``session``. -- ``trackinfo_received``: called after metadata for a track item has been - fetched from a data source, such as MusicBrainz. You can modify the tags that - the rest of the pipeline sees on a ``beet import`` operation or during later - adjustments, such as ``mbsync``. Slow handlers of the event can impact the - operation, since the event is fired for any fetched possible match ``before`` - the user (or the autotagger machinery) gets to see the match. Parameter: - ``info``. -- ``albuminfo_received``: like ``trackinfo_received``, the event indicates new - metadata for album items. The parameter is an ``AlbumInfo`` object instead of - a ``TrackInfo``. Parameter: ``info``. -- ``before_choose_candidate``: called before the user is prompted for a decision - during a ``beet import`` interactive session. Plugins can use this event for - :ref:`appending choices to the prompt <append_prompt_choices>` by returning a - list of ``PromptChoices``. Parameters: ``task`` and ``session``. -- ``mb_track_extract``: called after the metadata is obtained from MusicBrainz. - The parameter is a ``dict`` containing the tags retrieved from MusicBrainz for - a track. Plugins must return a new (potentially empty) ``dict`` with - additional ``field: value`` pairs, which the autotagger will apply to the - item, as flexible attributes if ``field`` is not a hardcoded field. Fields - already present on the track are overwritten. Parameter: ``data`` -- ``mb_album_extract``: Like ``mb_track_extract``, but for album tags. - Overwrites tags set at the track level, if they have the same ``field``. - Parameter: ``data`` - -The included ``mpdupdate`` plugin provides an example use case for event -listeners. - -Extend the Autotagger -~~~~~~~~~~~~~~~~~~~~~ - -Plugins can also enhance the functionality of the autotagger. For a -comprehensive example, try looking at the ``chroma`` plugin, which is included -with beets. - -A plugin can extend three parts of the autotagger's process: the track distance -function, the album distance function, and the initial MusicBrainz search. The -distance functions determine how "good" a match is at the track and album -levels; the initial search controls which candidates are presented to the -matching algorithm. Plugins implement these extensions by implementing four -methods on the plugin class: - -- ``track_distance(self, item, info)``: adds a component to the distance - function (i.e., the similarity metric) for individual tracks. ``item`` is the - track to be matched (an Item object) and ``info`` is the TrackInfo object that - is proposed as a match. Should return a ``(dist, dist_max)`` pair of floats - indicating the distance. -- ``album_distance(self, items, album_info, mapping)``: like the above, but - compares a list of items (representing an album) to an album-level MusicBrainz - entry. ``items`` is a list of Item objects; ``album_info`` is an AlbumInfo - object; and ``mapping`` is a dictionary that maps Items to their corresponding - TrackInfo objects. -- ``candidates(self, items, artist, album, va_likely)``: given a list of items - comprised by an album to be matched, return a list of ``AlbumInfo`` objects - for candidate albums to be compared and matched. -- ``item_candidates(self, item, artist, album)``: given a *singleton* item, - return a list of ``TrackInfo`` objects for candidate tracks to be compared and - matched. -- ``album_for_id(self, album_id)``: given an ID from user input or an album's - tags, return a candidate AlbumInfo object (or None). -- ``track_for_id(self, track_id)``: given an ID from user input or a file's - tags, return a candidate TrackInfo object (or None). - -When implementing these functions, you may want to use the functions from the -``beets.autotag`` and ``beets.autotag.mb`` modules, both of which have somewhat -helpful docstrings. - -Read Configuration Options -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Plugins can configure themselves using the ``config.yaml`` file. You can read -configuration values in two ways. The first is to use ``self.config`` within -your plugin class. This gives you a view onto the configuration values in a -section with the same name as your plugin's module. For example, if your plugin -is in ``greatplugin.py``, then ``self.config`` will refer to options under the -``greatplugin:`` section of the config file. - -For example, if you have a configuration value called "foo", then users can put -this in their ``config.yaml``: - -:: - - greatplugin: - foo: bar - -To access this value, say ``self.config['foo'].get()`` at any point in your -plugin's code. The ``self.config`` object is a *view* as defined by the Confuse_ -library. - -.. _confuse: https://confuse.readthedocs.io/en/latest/ - -If you want to access configuration values *outside* of your plugin's section, -import the ``config`` object from the ``beets`` module. That is, just put ``from -beets import config`` at the top of your plugin and access values from there. - -If your plugin provides configuration values for sensitive data (e.g., -passwords, API keys, ...), you should add these to the config so they can be -redacted automatically when users dump their config. This can be done by setting -each value's ``redact`` flag, like so: - -:: - - self.config['password'].redact = True - -Add Path Format Functions and Fields -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Beets supports *function calls* in its path format syntax (see -:doc:`/reference/pathformat`). Beets includes a few built-in functions, but -plugins can register new functions by adding them to the ``template_funcs`` -dictionary. - -Here's an example: - -:: - - class MyPlugin(BeetsPlugin): - def __init__(self): - super().__init__() - self.template_funcs['initial'] = _tmpl_initial - - def _tmpl_initial(text: str) -> str: - if text: - return text[0].upper() - else: - return u'' - -This plugin provides a function ``%initial`` to path templates where -``%initial{$artist}`` expands to the artist's initial (its capitalized first -character). - -Plugins can also add template *fields*, which are computed values referenced as -``$name`` in templates. To add a new field, add a function that takes an -``Item`` object to the ``template_fields`` dictionary on the plugin object. -Here's an example that adds a ``$disc_and_track`` field: - -:: - - class MyPlugin(BeetsPlugin): - def __init__(self): - super().__init__() - self.template_fields['disc_and_track'] = _tmpl_disc_and_track - - def _tmpl_disc_and_track(item: Item) -> str: - """Expand to the disc number and track number if this is a - multi-disc release. Otherwise, just expands to the track - number. - """ - if item.disctotal > 1: - return u'%02i.%02i' % (item.disc, item.track) - else: - return u'%02i' % (item.track) - -With this plugin enabled, templates can reference ``$disc_and_track`` as they -can any standard metadata field. - -This field works for *item* templates. Similarly, you can register *album* -template fields by adding a function accepting an ``Album`` argument to the -``album_template_fields`` dict. - -Extend MediaFile -~~~~~~~~~~~~~~~~ - -MediaFile_ is the file tag abstraction layer that beets uses to make -cross-format metadata manipulation simple. Plugins can add fields to MediaFile -to extend the kinds of metadata that they can easily manage. - -The ``MediaFile`` class uses ``MediaField`` descriptors to provide access to -file tags. If you have created a descriptor you can add it through your plugins -:py:meth:`beets.plugins.BeetsPlugin.add_media_field()` method. - -.. _mediafile: https://mediafile.readthedocs.io/en/latest/ - -Here's an example plugin that provides a meaningless new field "foo": - -:: - - class FooPlugin(BeetsPlugin): - def __init__(self): - field = mediafile.MediaField( - mediafile.MP3DescStorageStyle(u'foo'), - mediafile.StorageStyle(u'foo') - ) - self.add_media_field('foo', field) - - FooPlugin() - item = Item.from_path('/path/to/foo/tag.mp3') - assert item['foo'] == 'spam' - - item['foo'] == 'ham' - item.write() - # The "foo" tag of the file is now "ham" - -.. _plugin-stage: - -Add Import Pipeline Stages -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Many plugins need to add high-latency operations to the import workflow. For -example, a plugin that fetches lyrics from the Web would, ideally, not block the -progress of the rest of the importer. Beets allows plugins to add stages to the -parallel import pipeline. - -Each stage is run in its own thread. Plugin stages run after metadata changes -have been applied to a unit of music (album or track) and before file -manipulation has occurred (copying and moving files, writing tags to disk). -Multiple stages run in parallel but each stage processes only one task at a time -and each task is processed by only one stage at a time. - -Plugins provide stages as functions that take two arguments: ``config`` and -``task``, which are ``ImportSession`` and ``ImportTask`` objects (both defined -in ``beets.importer``). Add such a function to the plugin's ``import_stages`` -field to register it: - -:: - - from beets.plugins import BeetsPlugin - class ExamplePlugin(BeetsPlugin): - def __init__(self): - super().__init__() - self.import_stages = [self.stage] - def stage(self, session, task): - print('Importing something!') - -It is also possible to request your function to run early in the pipeline by -adding the function to the plugin's ``early_import_stages`` field instead: - -:: - - self.early_import_stages = [self.stage] - -.. _extend-query: - -Extend the Query Syntax -~~~~~~~~~~~~~~~~~~~~~~~ - -You can add new kinds of queries to beets' :doc:`query syntax -</reference/query>`. There are two ways to add custom queries: using a prefix -and using a name. Prefix-based query extension can apply to *any* field, while -named queries are not associated with any field. For example, beets already -supports regular expression queries, which are indicated by a colon -prefix---plugins can do the same. - -For either kind of query extension, define a subclass of the ``Query`` type from -the ``beets.dbcore.query`` module. Then: - -- To define a prefix-based query, define a ``queries`` method in your plugin - class. Return from this method a dictionary mapping prefix strings to query - classes. -- To define a named query, defined dictionaries named either ``item_queries`` or - ``album_queries``. These should map names to query types. So if you use ``{ - "foo": FooQuery }``, then the query ``foo:bar`` will construct a query like - ``FooQuery("bar")``. - -For prefix-based queries, you will want to extend ``FieldQuery``, which -implements string comparisons on fields. To use it, create a subclass inheriting -from that class and override the ``value_match`` class method. (Remember the -``@classmethod`` decorator!) The following example plugin declares a query using -the ``@`` prefix to delimit exact string matches. The plugin will be used if we -issue a command like ``beet ls @something`` or ``beet ls artist:@something``: - -:: - - from beets.plugins import BeetsPlugin - from beets.dbcore import FieldQuery - - class ExactMatchQuery(FieldQuery): - @classmethod - def value_match(self, pattern, val): - return pattern == val - - class ExactMatchPlugin(BeetsPlugin): - def queries(self): - return { - '@': ExactMatchQuery - } - -Flexible Field Types -~~~~~~~~~~~~~~~~~~~~ - -If your plugin uses flexible fields to store numbers or other non-string values, -you can specify the types of those fields. A rating plugin, for example, might -want to declare that the ``rating`` field should have an integer type: - -:: - - from beets.plugins import BeetsPlugin - from beets.dbcore import types - - class RatingPlugin(BeetsPlugin): - item_types = {'rating': types.INTEGER} - - @property - def album_types(self): - return {'rating': types.INTEGER} - -A plugin may define two attributes: ``item_types`` and ``album_types``. Each of -those attributes is a dictionary mapping a flexible field name to a type -instance. You can find the built-in types in the ``beets.dbcore.types`` and -``beets.library`` modules or implement your own type by inheriting from the -``Type`` class. - -Specifying types has several advantages: - -- Code that accesses the field like ``item['my_field']`` gets the right type - (instead of just a string). -- You can use advanced queries (like :ref:`ranges <numericquery>`) from the - command line. -- User input for flexible fields may be validated and converted. -- Items missing the given field can use an appropriate null value for querying - and sorting purposes. - -.. _plugin-logging: - -Logging -~~~~~~~ - -Each plugin object has a ``_log`` attribute, which is a ``Logger`` from the -`standard Python logging module`_. The logger is set up to `PEP 3101`_, -str.format-style string formatting. So you can write logging calls like this: - -:: - - self._log.debug(u'Processing {0.title} by {0.artist}', item) - -.. _pep 3101: https://www.python.org/dev/peps/pep-3101/ - -.. _standard python logging module: https://docs.python.org/2/library/logging.html - -When beets is in verbose mode, plugin messages are prefixed with the plugin name -to make them easier to see. - -Which messages will be logged depends on the logging level and the action -performed: - -- Inside import stages and event handlers, the default is ``WARNING`` messages - and above. -- Everywhere else, the default is ``INFO`` or above. - -The verbosity can be increased with ``--verbose`` (``-v``) flags: each flags -lowers the level by a notch. That means that, with a single ``-v`` flag, event -handlers won't have their ``DEBUG`` messages displayed, but command functions -(for example) will. With ``-vv`` on the command line, ``DEBUG`` messages will be -displayed everywhere. - -This addresses a common pattern where plugins need to use the same code for a -command and an import stage, but the command needs to print more messages than -the import stage. (For example, you'll want to log "found lyrics for this song" -when you're run explicitly as a command, but you don't want to noisily interrupt -the importer interface when running automatically.) - -.. _append_prompt_choices: - -Append Prompt Choices -~~~~~~~~~~~~~~~~~~~~~ - -Plugins can also append choices to the prompt presented to the user during an -import session. - -To do so, add a listener for the ``before_choose_candidate`` event, and return a -list of ``PromptChoices`` that represent the additional choices that your plugin -shall expose to the user: - -:: - - from beets.plugins import BeetsPlugin - from beets.ui.commands import PromptChoice - - class ExamplePlugin(BeetsPlugin): - def __init__(self): - super().__init__() - self.register_listener('before_choose_candidate', - self.before_choose_candidate_event) - - def before_choose_candidate_event(self, session, task): - return [PromptChoice('p', 'Print foo', self.foo), - PromptChoice('d', 'Do bar', self.bar)] - - def foo(self, session, task): - print('User has chosen "Print foo"!') - - def bar(self, session, task): - print('User has chosen "Do bar"!') - -The previous example modifies the standard prompt: - -:: - - # selection (default 1), Skip, Use as-is, as Tracks, Group albums, - Enter search, enter Id, aBort? - -by appending two additional options (``Print foo`` and ``Do bar``): - -:: - - # selection (default 1), Skip, Use as-is, as Tracks, Group albums, - Enter search, enter Id, aBort, Print foo, Do bar? - -If the user selects a choice, the ``callback`` attribute of the corresponding -``PromptChoice`` will be called. It is the responsibility of the plugin to check -for the status of the import session and decide the choices to be appended: for -example, if a particular choice should only be presented if the album has no -candidates, the relevant checks against ``task.candidates`` should be performed -inside the plugin's ``before_choose_candidate_event`` accordingly. - -Please make sure that the short letter for each of the choices provided by the -plugin is not already in use: the importer will emit a warning and discard all -but one of the choices using the same letter, giving priority to the core -importer prompt choices. As a reference, the following characters are used by -the choices on the core importer prompt, and hence should not be used: ``a``, -``s``, ``u``, ``t``, ``g``, ``e``, ``i``, ``b``. - -Additionally, the callback function can optionally specify the next action to be -performed by returning a ``importer.Action`` value. It may also return a -``autotag.Proposal`` value to update the set of current proposals to be -considered. diff --git a/docs/dev/plugins/autotagger.rst b/docs/dev/plugins/autotagger.rst new file mode 100644 index 000000000..8b6df6fb5 --- /dev/null +++ b/docs/dev/plugins/autotagger.rst @@ -0,0 +1,107 @@ +Extending the Autotagger +======================== + +.. currentmodule:: beets.metadata_plugins + +Beets supports **metadata source plugins**, which allow it to fetch and match +metadata from external services (such as Spotify, Discogs, or Deezer). This +guide explains how to build your own metadata source plugin by extending the +:py:class:`MetadataSourcePlugin`. + +These plugins integrate directly with the autotagger, providing candidate +metadata during lookups. To implement one, you must subclass +:py:class:`MetadataSourcePlugin` and implement its abstract methods. + +Overview +-------- + +Creating a metadata source plugin is very similar to writing a standard plugin +(see :ref:`basic-plugin-setup`). The main difference is that your plugin must: + +1. Subclass :py:class:`MetadataSourcePlugin`. +2. Implement all required abstract methods. + +Here`s a minimal example: + +.. code-block:: python + + # beetsplug/myawesomeplugin.py + from typing import Sequence + from beets.autotag.hooks import Item + from beets.metadata_plugin import MetadataSourcePlugin + + + class MyAwesomePlugin(MetadataSourcePlugin): + + def candidates( + self, + items: Sequence[Item], + artist: str, + album: str, + va_likely: bool, + ): ... + + def item_candidates(self, item: Item, artist: str, title: str): ... + + def track_for_id(self, track_id: str): ... + + def album_for_id(self, album_id: str): ... + +Each metadata source plugin automatically gets a unique identifier. You can +access this identifier using the :py:meth:`~MetadataSourcePlugin.data_source` +class property to tell plugins apart. + +Metadata lookup +--------------- + +When beets runs the autotagger, it queries **all enabled metadata source +plugins** for potential matches: + +- For **albums**, it calls :py:meth:`~MetadataSourcePlugin.candidates`. +- For **singletons**, it calls :py:meth:`~MetadataSourcePlugin.item_candidates`. + +The results are combined and scored. By default, candidate ranking is handled +automatically by the beets core, but you can customize weighting by overriding: + +- :py:meth:`~MetadataSourcePlugin.album_distance` +- :py:meth:`~MetadataSourcePlugin.track_distance` + +This is optional, if not overridden, both methods return a constant distance of +`0.5`. + +ID-based lookups +---------------- + +Your plugin must also define: + +- :py:meth:`~MetadataSourcePlugin.album_for_id` — fetch album metadata by ID. +- :py:meth:`~MetadataSourcePlugin.track_for_id` — fetch track metadata by ID. + +IDs are expected to be strings. If your source uses specific formats, consider +contributing an extractor regex to the core module: +:py:mod:`beets.util.id_extractors`. + +Best practices +-------------- + +Beets already ships with several metadata source plugins. Studying these +implementations can help you follow conventions and avoid pitfalls. Good +starting points include: + +- ``spotify`` +- ``deezer`` +- ``discogs`` + +Migration guidance +------------------ + +Older metadata plugins that extend |BeetsPlugin| should be migrated to +:py:class:`MetadataSourcePlugin`. Legacy support will be removed in **beets +v3.0.0**. + +.. seealso:: + + - :py:mod:`beets.autotag` + - :py:mod:`beets.metadata_plugins` + - :ref:`autotagger_extensions` + - :ref:`using-the-auto-tagger` diff --git a/docs/dev/plugins/commands.rst b/docs/dev/plugins/commands.rst new file mode 100644 index 000000000..f39578f11 --- /dev/null +++ b/docs/dev/plugins/commands.rst @@ -0,0 +1,54 @@ +.. _add_subcommands: + +Add Commands to the CLI +======================= + +Plugins can add new subcommands to the ``beet`` command-line interface. Define +the plugin class' ``commands()`` method to return a list of ``Subcommand`` +objects. (The ``Subcommand`` class is defined in the ``beets.ui`` module.) +Here's an example plugin that adds a simple command: + +.. code-block:: python + + from beets.plugins import BeetsPlugin + from beets.ui import Subcommand + + my_super_command = Subcommand("super", help="do something super") + + + def say_hi(lib, opts, args): + print("Hello everybody! I'm a plugin!") + + + my_super_command.func = say_hi + + + class SuperPlug(BeetsPlugin): + def commands(self): + return [my_super_command] + +To make a subcommand, invoke the constructor like so: ``Subcommand(name, parser, +help, aliases)``. The ``name`` parameter is the only required one and should +just be the name of your command. ``parser`` can be an `OptionParser instance`_, +but it defaults to an empty parser (you can extend it later). ``help`` is a +description of your command, and ``aliases`` is a list of shorthand versions of +your command name. + +.. _optionparser instance: https://docs.python.org/library/optparse.html + +You'll need to add a function to your command by saying ``mycommand.func = +myfunction``. This function should take the following parameters: ``lib`` (a +beets ``Library`` object) and ``opts`` and ``args`` (command-line options and +arguments as returned by OptionParser.parse_args_). + +.. _optionparser.parse_args: https://docs.python.org/library/optparse.html#parsing-arguments + +The function should use any of the utility functions defined in ``beets.ui``. +Try running ``pydoc beets.ui`` to see what's available. + +You can add command-line options to your new command using the ``parser`` member +of the ``Subcommand`` class, which is a ``CommonOptionsParser`` instance. Just +use it like you would a normal ``OptionParser`` in an independent script. Note +that it offers several methods to add common options: ``--album``, ``--path`` +and ``--format``. This feature is versatile and extensively documented, try +``pydoc beets.ui.CommonOptionsParser`` for more information. diff --git a/docs/dev/plugins/events.rst b/docs/dev/plugins/events.rst new file mode 100644 index 000000000..68773db3b --- /dev/null +++ b/docs/dev/plugins/events.rst @@ -0,0 +1,199 @@ +.. _plugin_events: + +Listen for Events +================= + +.. currentmodule:: beets.plugins + +Event handlers allow plugins to hook into whenever something happens in beets' +operations. For instance, a plugin could write a log message every time an album +is successfully autotagged or update MPD's index whenever the database is +changed. + +You can "listen" for events using :py:meth:`BeetsPlugin.register_listener`. +Here's an example: + +.. code-block:: python + + from beets.plugins import BeetsPlugin + + + def loaded(): + print("Plugin loaded!") + + + class SomePlugin(BeetsPlugin): + def __init__(self): + super().__init__() + self.register_listener("pluginload", loaded) + +Note that if you want to access an attribute of your plugin (e.g. ``config`` or +``log``) you'll have to define a method and not a function. Here is the usual +registration process in this case: + +.. code-block:: python + + from beets.plugins import BeetsPlugin + + + class SomePlugin(BeetsPlugin): + def __init__(self): + super().__init__() + self.register_listener("pluginload", self.loaded) + + def loaded(self): + self._log.info("Plugin loaded!") + +.. rubric:: Plugin Events + +``pluginload`` + :Parameters: (none) + :Description: Called after all plugins have been loaded after the ``beet`` + command starts. + +``import`` + :Parameters: ``lib`` (|Library|), ``paths`` (list of path strings) + :Description: Called after the ``import`` command finishes. + +``album_imported`` + :Parameters: ``lib`` (|Library|), ``album`` (|Album|) + :Description: Called every time the importer finishes adding an album to the + library. + +``album_removed`` + :Parameters: ``lib`` (|Library|), ``album`` (|Album|) + :Description: Called every time an album is removed from the library (even + when its files are not deleted from disk). + +``item_copied`` + :Parameters: ``item`` (|Item|), ``source`` (path), ``destination`` (path) + :Description: Called whenever an item file is copied. + +``item_imported`` + :Parameters: ``lib`` (|Library|), ``item`` (|Item|) + :Description: Called every time the importer adds a singleton to the library + (not called for full-album imports). + +``before_item_imported`` + :Parameters: ``item`` (|Item|), ``source`` (path), ``destination`` (path) + :Description: Called with an ``Item`` object immediately before it is + imported. + +``before_item_moved`` + :Parameters: ``item`` (|Item|), ``source`` (path), ``destination`` (path) + :Description: Called with an ``Item`` object immediately before its file is + moved. + +``item_moved`` + :Parameters: ``item`` (|Item|), ``source`` (path), ``destination`` (path) + :Description: Called with an ``Item`` object whenever its file is moved. + +``item_linked`` + :Parameters: ``item`` (|Item|), ``source`` (path), ``destination`` (path) + :Description: Called with an ``Item`` object whenever a symlink is created + for a file. + +``item_hardlinked`` + :Parameters: ``item`` (|Item|), ``source`` (path), ``destination`` (path) + :Description: Called with an ``Item`` object whenever a hardlink is created + for a file. + +``item_reflinked`` + :Parameters: ``item`` (|Item|), ``source`` (path), ``destination`` (path) + :Description: Called with an ``Item`` object whenever a reflink is created + for a file. + +``item_removed`` + :Parameters: ``item`` (|Item|) + :Description: Called with an ``Item`` object every time an item (singleton + or part of an album) is removed from the library (even when its file is + not deleted from disk). + +``write`` + :Parameters: ``item`` (|Item|), ``path`` (path), ``tags`` (dict) + :Description: Called just before a file's metadata is written to disk. + Handlers may modify ``tags`` or raise ``library.FileOperationError`` to + abort. + +``after_write`` + :Parameters: ``item`` (|Item|) + :Description: Called after a file's metadata is written to disk. + +``import_task_created`` + :Parameters: ``task`` (|ImportTask|), ``session`` (|ImportSession|) + :Description: Called immediately after an import task is initialized. May + return a list (possibly empty) of replacement tasks. + +``import_task_start`` + :Parameters: ``task`` (|ImportTask|), ``session`` (|ImportSession|) + :Description: Called before an import task begins processing. + +``import_task_apply`` + :Parameters: ``task`` (|ImportTask|), ``session`` (|ImportSession|) + :Description: Called after metadata changes have been applied in an import + task (on the UI thread; keep fast). Prefer a pipeline stage otherwise + (see :ref:`plugin-stage`). + +``import_task_before_choice`` + :Parameters: ``task`` (|ImportTask|), ``session`` (|ImportSession|) + :Description: Called after candidate search and before deciding how to + import. May return an importer action (only one handler may return + non-None). + +``import_task_choice`` + :Parameters: ``task`` (|ImportTask|), ``session`` (|ImportSession|) + :Description: Called after a decision has been made about an import task. + Use ``task.choice_flag`` to inspect or change the action. + +``import_task_files`` + :Parameters: ``task`` (|ImportTask|), ``session`` (|ImportSession|) + :Description: Called after filesystem manipulation (copy/move/write) for an + import task. + +``library_opened`` + :Parameters: ``lib`` (|Library|) + :Description: Called after beets starts and initializes the main Library + object. + +``database_change`` + :Parameters: ``lib`` (|Library|), ``model`` (|Model|) + :Description: A modification has been made to the library database (may not + yet be committed). + +``cli_exit`` + :Parameters: ``lib`` (|Library|) + :Description: Called just before the ``beet`` command-line program exits. + +``import_begin`` + :Parameters: ``session`` (|ImportSession|) + :Description: Called just before a ``beet import`` session starts. + +``trackinfo_received`` + :Parameters: ``info`` (|TrackInfo|) + :Description: Called after metadata for a track is fetched (e.g., from + MusicBrainz). Handlers can modify the tags seen by later pipeline stages + or adjustments (e.g., ``mbsync``). + +``albuminfo_received`` + :Parameters: ``info`` (|AlbumInfo|) + :Description: Like ``trackinfo_received`` but for album-level metadata. + +``before_choose_candidate`` + :Parameters: ``task`` (|ImportTask|), ``session`` (|ImportSession|) + :Description: Called before prompting the user during interactive import. + May return a list of ``PromptChoices`` to append to the prompt (see + :ref:`append_prompt_choices`). + +``mb_track_extract`` + :Parameters: ``data`` (dict) + :Description: Called after metadata is obtained from MusicBrainz for a + track. Must return a (possibly empty) dict of additional ``field: + value`` pairs to apply (overwriting existing fields). + +``mb_album_extract`` + :Parameters: ``data`` (dict) + :Description: Like ``mb_track_extract`` but for album tags. Overwrites tags + set at the track level with the same field. + +The included ``mpdupdate`` plugin provides an example use case for event +listeners. diff --git a/docs/dev/plugins/index.rst b/docs/dev/plugins/index.rst new file mode 100644 index 000000000..a8feb32d9 --- /dev/null +++ b/docs/dev/plugins/index.rst @@ -0,0 +1,109 @@ +Plugin Development +================== + +Beets plugins are Python modules or packages that extend the core functionality +of beets. The plugin system is designed to be flexible, allowing developers to +add virtually any type of features to beets. + +For instance you can create plugins that add new commands to the command-line +interface, listen for events in the beets lifecycle or extend the autotagger +with new metadata sources. + +.. _basic-plugin-setup: + +Basic Plugin Setup +------------------ + +A beets plugin is just a Python module or package inside the ``beetsplug`` +namespace [1]_ package. To create the basic plugin layout, create a directory +called ``beetsplug`` and add either your plugin module: + +.. code-block:: shell + + beetsplug/ + └── myawesomeplugin.py + +or your plugin subpackage + +.. code-block:: shell + + beetsplug/ + └── myawesomeplugin/ + ├── __init__.py + └── myawesomeplugin.py + +.. attention:: + + You do not need to add an ``__init__.py`` file to the ``beetsplug`` + directory. Python treats your plugin as a namespace package automatically, + thus we do not depend on ``pkgutil``-based setup in the ``__init__.py`` file + anymore. + +The meat of your plugin goes in ``myawesomeplugin.py``. Every plugin has to +extend the |BeetsPlugin| abstract base class [2]_ . For instance, a minimal +plugin without any functionality would look like this: + +.. code-block:: python + + # beetsplug/myawesomeplugin.py + from beets.plugins import BeetsPlugin + + + class MyAwesomePlugin(BeetsPlugin): + pass + +.. attention:: + + If your plugin is composed of intermediate |BeetsPlugin| subclasses, make + sure that your plugin is defined *last* in the namespace. We only load the + last subclass of |BeetsPlugin| we find in your plugin namespace. + +To use your new plugin, you need to package [3]_ your plugin and install it into +your ``beets`` (virtual) environment. To enable your plugin, add it it to the +beets configuration + +.. code-block:: yaml + + # config.yaml + plugins: + - myawesomeplugin + +and you're good to go! + +.. [1] Check out `this article`_ and `this Stack Overflow question`_ if you + haven't heard about namespace packages. + +.. [2] Abstract base classes allow us to define a contract which any plugin must + follow. This is a common paradigm in object-oriented programming, and it + helps to ensure that plugins are implemented in a consistent way. For more + information, see for example pep-3119_. + +.. [3] There are a variety of packaging tools available for python, for example + you can use poetry_, setuptools_ or hatchling_. + +.. _hatchling: https://hatch.pypa.io/latest/config/build/#build-system + +.. _pep-3119: https://peps.python.org/pep-3119/#rationale + +.. _poetry: https://python-poetry.org/docs/pyproject/#packages + +.. _setuptools: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#finding-simple-packages + +.. _this article: https://realpython.com/python-namespace-package/#setting-up-some-namespace-packages + +.. _this stack overflow question: https://stackoverflow.com/a/27586272/9582674 + +More information +---------------- + +For more information on writing plugins, feel free to check out the following +resources: + +.. toctree:: + :maxdepth: 3 + :includehidden: + + commands + events + autotagger + other/index diff --git a/docs/dev/plugins/other/config.rst b/docs/dev/plugins/other/config.rst new file mode 100644 index 000000000..7c529af93 --- /dev/null +++ b/docs/dev/plugins/other/config.rst @@ -0,0 +1,36 @@ +Read Configuration Options +========================== + +Plugins can configure themselves using the ``config.yaml`` file. You can read +configuration values in two ways. The first is to use ``self.config`` within +your plugin class. This gives you a view onto the configuration values in a +section with the same name as your plugin's module. For example, if your plugin +is in ``greatplugin.py``, then ``self.config`` will refer to options under the +``greatplugin:`` section of the config file. + +For example, if you have a configuration value called "foo", then users can put +this in their ``config.yaml``: + +:: + + greatplugin: + foo: bar + +To access this value, say ``self.config['foo'].get()`` at any point in your +plugin's code. The ``self.config`` object is a *view* as defined by the Confuse_ +library. + +.. _confuse: https://confuse.readthedocs.io/en/latest/ + +If you want to access configuration values *outside* of your plugin's section, +import the ``config`` object from the ``beets`` module. That is, just put ``from +beets import config`` at the top of your plugin and access values from there. + +If your plugin provides configuration values for sensitive data (e.g., +passwords, API keys, ...), you should add these to the config so they can be +redacted automatically when users dump their config. This can be done by setting +each value's ``redact`` flag, like so: + +:: + + self.config['password'].redact = True diff --git a/docs/dev/plugins/other/fields.rst b/docs/dev/plugins/other/fields.rst new file mode 100644 index 000000000..6ee570043 --- /dev/null +++ b/docs/dev/plugins/other/fields.rst @@ -0,0 +1,35 @@ +Flexible Field Types +==================== + +If your plugin uses flexible fields to store numbers or other non-string values, +you can specify the types of those fields. A rating plugin, for example, might +want to declare that the ``rating`` field should have an integer type: + +.. code-block:: python + + from beets.plugins import BeetsPlugin + from beets.dbcore import types + + + class RatingPlugin(BeetsPlugin): + item_types = {"rating": types.INTEGER} + + @property + def album_types(self): + return {"rating": types.INTEGER} + +A plugin may define two attributes: ``item_types`` and ``album_types``. Each of +those attributes is a dictionary mapping a flexible field name to a type +instance. You can find the built-in types in the ``beets.dbcore.types`` and +``beets.library`` modules or implement your own type by inheriting from the +``Type`` class. + +Specifying types has several advantages: + +- Code that accesses the field like ``item['my_field']`` gets the right type + (instead of just a string). +- You can use advanced queries (like :ref:`ranges <numericquery>`) from the + command line. +- User input for flexible fields may be validated and converted. +- Items missing the given field can use an appropriate null value for querying + and sorting purposes. diff --git a/docs/dev/plugins/other/import.rst b/docs/dev/plugins/other/import.rst new file mode 100644 index 000000000..706a520b7 --- /dev/null +++ b/docs/dev/plugins/other/import.rst @@ -0,0 +1,88 @@ +.. _plugin-stage: + +Add Import Pipeline Stages +========================== + +Many plugins need to add high-latency operations to the import workflow. For +example, a plugin that fetches lyrics from the Web would, ideally, not block the +progress of the rest of the importer. Beets allows plugins to add stages to the +parallel import pipeline. + +Each stage is run in its own thread. Plugin stages run after metadata changes +have been applied to a unit of music (album or track) and before file +manipulation has occurred (copying and moving files, writing tags to disk). +Multiple stages run in parallel but each stage processes only one task at a time +and each task is processed by only one stage at a time. + +Plugins provide stages as functions that take two arguments: ``config`` and +``task``, which are ``ImportSession`` and ``ImportTask`` objects (both defined +in ``beets.importer``). Add such a function to the plugin's ``import_stages`` +field to register it: + +.. code-block:: python + + from beets.importer import ImportSession, ImportTask + from beets.plugins import BeetsPlugin + + + class ExamplePlugin(BeetsPlugin): + + def __init__(self): + super().__init__() + self.import_stages = [self.stage] + + def stage(self, session: ImportSession, task: ImportTask): + print("Importing something!") + +It is also possible to request your function to run early in the pipeline by +adding the function to the plugin's ``early_import_stages`` field instead: + +.. code-block:: python + + self.early_import_stages = [self.stage] + +.. _extend-query: + +Extend the Query Syntax +----------------------- + +You can add new kinds of queries to beets' :doc:`query syntax +</reference/query>`. There are two ways to add custom queries: using a prefix +and using a name. Prefix-based query extension can apply to *any* field, while +named queries are not associated with any field. For example, beets already +supports regular expression queries, which are indicated by a colon +prefix---plugins can do the same. + +For either kind of query extension, define a subclass of the ``Query`` type from +the ``beets.dbcore.query`` module. Then: + +- To define a prefix-based query, define a ``queries`` method in your plugin + class. Return from this method a dictionary mapping prefix strings to query + classes. +- To define a named query, defined dictionaries named either ``item_queries`` or + ``album_queries``. These should map names to query types. So if you use ``{ + "foo": FooQuery }``, then the query ``foo:bar`` will construct a query like + ``FooQuery("bar")``. + +For prefix-based queries, you will want to extend ``FieldQuery``, which +implements string comparisons on fields. To use it, create a subclass inheriting +from that class and override the ``value_match`` class method. (Remember the +``@classmethod`` decorator!) The following example plugin declares a query using +the ``@`` prefix to delimit exact string matches. The plugin will be used if we +issue a command like ``beet ls @something`` or ``beet ls artist:@something``: + +.. code-block:: python + + from beets.plugins import BeetsPlugin + from beets.dbcore import FieldQuery + + + class ExactMatchQuery(FieldQuery): + @classmethod + def value_match(self, pattern, val): + return pattern == val + + + class ExactMatchPlugin(BeetsPlugin): + def queries(self): + return {"@": ExactMatchQuery} diff --git a/docs/dev/plugins/other/index.rst b/docs/dev/plugins/other/index.rst new file mode 100644 index 000000000..595139042 --- /dev/null +++ b/docs/dev/plugins/other/index.rst @@ -0,0 +1,16 @@ +Further Reading +=============== + +For more information on writing plugins, feel free to check out the following +resources: + +.. toctree:: + :maxdepth: 2 + + config + templates + mediafile + import + fields + logging + prompts diff --git a/docs/dev/plugins/other/logging.rst b/docs/dev/plugins/other/logging.rst new file mode 100644 index 000000000..a26f0c4c0 --- /dev/null +++ b/docs/dev/plugins/other/logging.rst @@ -0,0 +1,38 @@ +.. _plugin-logging: + +Logging +======= + +Each plugin object has a ``_log`` attribute, which is a ``Logger`` from the +`standard Python logging module`_. The logger is set up to `PEP 3101`_, +str.format-style string formatting. So you can write logging calls like this: + +.. code-block:: python + + self._log.debug("Processing {0.title} by {0.artist}", item) + +.. _pep 3101: https://www.python.org/dev/peps/pep-3101/ + +.. _standard python logging module: https://docs.python.org/3/library/logging.html + +When beets is in verbose mode, plugin messages are prefixed with the plugin name +to make them easier to see. + +Which messages will be logged depends on the logging level and the action +performed: + +- Inside import stages and event handlers, the default is ``WARNING`` messages + and above. +- Everywhere else, the default is ``INFO`` or above. + +The verbosity can be increased with ``--verbose`` (``-v``) flags: each flags +lowers the level by a notch. That means that, with a single ``-v`` flag, event +handlers won't have their ``DEBUG`` messages displayed, but command functions +(for example) will. With ``-vv`` on the command line, ``DEBUG`` messages will be +displayed everywhere. + +This addresses a common pattern where plugins need to use the same code for a +command and an import stage, but the command needs to print more messages than +the import stage. (For example, you'll want to log "found lyrics for this song" +when you're run explicitly as a command, but you don't want to noisily interrupt +the importer interface when running automatically.) diff --git a/docs/dev/plugins/other/mediafile.rst b/docs/dev/plugins/other/mediafile.rst new file mode 100644 index 000000000..8fa22ceca --- /dev/null +++ b/docs/dev/plugins/other/mediafile.rst @@ -0,0 +1,32 @@ +Extend MediaFile +================ + +MediaFile_ is the file tag abstraction layer that beets uses to make +cross-format metadata manipulation simple. Plugins can add fields to MediaFile +to extend the kinds of metadata that they can easily manage. + +The ``MediaFile`` class uses ``MediaField`` descriptors to provide access to +file tags. If you have created a descriptor you can add it through your plugins +:py:meth:`beets.plugins.BeetsPlugin.add_media_field` method. + +.. _mediafile: https://mediafile.readthedocs.io/en/latest/ + +Here's an example plugin that provides a meaningless new field "foo": + +.. code-block:: python + + class FooPlugin(BeetsPlugin): + def __init__(self): + field = mediafile.MediaField( + mediafile.MP3DescStorageStyle("foo"), mediafile.StorageStyle("foo") + ) + self.add_media_field("foo", field) + + + FooPlugin() + item = Item.from_path("/path/to/foo/tag.mp3") + assert item["foo"] == "spam" + + item["foo"] == "ham" + item.write() + # The "foo" tag of the file is now "ham" diff --git a/docs/dev/plugins/other/prompts.rst b/docs/dev/plugins/other/prompts.rst new file mode 100644 index 000000000..f734f0de3 --- /dev/null +++ b/docs/dev/plugins/other/prompts.rst @@ -0,0 +1,69 @@ +.. _append_prompt_choices: + +Append Prompt Choices +===================== + +Plugins can also append choices to the prompt presented to the user during an +import session. + +To do so, add a listener for the ``before_choose_candidate`` event, and return a +list of ``PromptChoices`` that represent the additional choices that your plugin +shall expose to the user: + +.. code-block:: python + + from beets.plugins import BeetsPlugin + from beets.ui.commands import PromptChoice + + + class ExamplePlugin(BeetsPlugin): + def __init__(self): + super().__init__() + self.register_listener( + "before_choose_candidate", self.before_choose_candidate_event + ) + + def before_choose_candidate_event(self, session, task): + return [ + PromptChoice("p", "Print foo", self.foo), + PromptChoice("d", "Do bar", self.bar), + ] + + def foo(self, session, task): + print('User has chosen "Print foo"!') + + def bar(self, session, task): + print('User has chosen "Do bar"!') + +The previous example modifies the standard prompt: + +.. code-block:: shell + + # selection (default 1), Skip, Use as-is, as Tracks, Group albums, + Enter search, enter Id, aBort? + +by appending two additional options (``Print foo`` and ``Do bar``): + +.. code-block:: shell + + # selection (default 1), Skip, Use as-is, as Tracks, Group albums, + Enter search, enter Id, aBort, Print foo, Do bar? + +If the user selects a choice, the ``callback`` attribute of the corresponding +``PromptChoice`` will be called. It is the responsibility of the plugin to check +for the status of the import session and decide the choices to be appended: for +example, if a particular choice should only be presented if the album has no +candidates, the relevant checks against ``task.candidates`` should be performed +inside the plugin's ``before_choose_candidate_event`` accordingly. + +Please make sure that the short letter for each of the choices provided by the +plugin is not already in use: the importer will emit a warning and discard all +but one of the choices using the same letter, giving priority to the core +importer prompt choices. As a reference, the following characters are used by +the choices on the core importer prompt, and hence should not be used: ``a``, +``s``, ``u``, ``t``, ``g``, ``e``, ``i``, ``b``. + +Additionally, the callback function can optionally specify the next action to be +performed by returning a ``importer.Action`` value. It may also return a +``autotag.Proposal`` value to update the set of current proposals to be +considered. diff --git a/docs/dev/plugins/other/templates.rst b/docs/dev/plugins/other/templates.rst new file mode 100644 index 000000000..89509dcb7 --- /dev/null +++ b/docs/dev/plugins/other/templates.rst @@ -0,0 +1,57 @@ +Add Path Format Functions and Fields +==================================== + +Beets supports *function calls* in its path format syntax (see +:doc:`/reference/pathformat`). Beets includes a few built-in functions, but +plugins can register new functions by adding them to the ``template_funcs`` +dictionary. + +Here's an example: + +.. code-block:: python + + class MyPlugin(BeetsPlugin): + def __init__(self): + super().__init__() + self.template_funcs["initial"] = _tmpl_initial + + + def _tmpl_initial(text: str) -> str: + if text: + return text[0].upper() + else: + return "" + +This plugin provides a function ``%initial`` to path templates where +``%initial{$artist}`` expands to the artist's initial (its capitalized first +character). + +Plugins can also add template *fields*, which are computed values referenced as +``$name`` in templates. To add a new field, add a function that takes an +``Item`` object to the ``template_fields`` dictionary on the plugin object. +Here's an example that adds a ``$disc_and_track`` field: + +.. code-block:: python + + class MyPlugin(BeetsPlugin): + def __init__(self): + super().__init__() + self.template_fields["disc_and_track"] = _tmpl_disc_and_track + + + def _tmpl_disc_and_track(item: Item) -> str: + """Expand to the disc number and track number if this is a + multi-disc release. Otherwise, just expands to the track + number. + """ + if item.disctotal > 1: + return "%02i.%02i" % (item.disc, item.track) + else: + return "%02i" % (item.track) + +With this plugin enabled, templates can reference ``$disc_and_track`` as they +can any standard metadata field. + +This field works for *item* templates. Similarly, you can register *album* +template fields by adding a function accepting an ``Album`` argument to the +``album_template_fields`` dict. diff --git a/docs/extensions/conf.py b/docs/extensions/conf.py new file mode 100644 index 000000000..308d28be2 --- /dev/null +++ b/docs/extensions/conf.py @@ -0,0 +1,142 @@ +"""Sphinx extension for simple configuration value documentation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, ClassVar + +from docutils import nodes +from docutils.parsers.rst import directives +from sphinx import addnodes +from sphinx.directives import ObjectDescription +from sphinx.domains import Domain, ObjType +from sphinx.roles import XRefRole +from sphinx.util.nodes import make_refnode + +if TYPE_CHECKING: + from collections.abc import Iterable, Sequence + + from docutils.nodes import Element + from docutils.parsers.rst.states import Inliner + from sphinx.addnodes import desc_signature, pending_xref + from sphinx.application import Sphinx + from sphinx.builders import Builder + from sphinx.environment import BuildEnvironment + from sphinx.util.typing import ExtensionMetadata, OptionSpec + + +class Conf(ObjectDescription[str]): + """Directive for documenting a single configuration value.""" + + option_spec: ClassVar[OptionSpec] = { + "default": directives.unchanged, + } + + def handle_signature(self, sig: str, signode: desc_signature) -> str: + """Process the directive signature (the config name).""" + signode += addnodes.desc_name(sig, sig) + + # Add default value if provided + if "default" in self.options: + signode += nodes.Text(" ") + default_container = nodes.inline("", "") + default_container += nodes.Text("(default: ") + default_container += nodes.literal("", self.options["default"]) + default_container += nodes.Text(")") + signode += default_container + + return sig + + def add_target_and_index( + self, name: str, sig: str, signode: desc_signature + ) -> None: + """Add cross-reference target and index entry.""" + target = f"conf-{name}" + if target not in self.state.document.ids: + signode["ids"].append(target) + self.state.document.note_explicit_target(signode) + + # A unique full name which includes the document name + index_name = f"{self.env.docname.replace('/', '.')}:{name}" + # Register with the conf domain + domain = self.env.get_domain("conf") + domain.data["objects"][index_name] = (self.env.docname, target) + + # Add to index + self.indexnode["entries"].append( + ("single", f"{name} (configuration value)", target, "", None) + ) + + +class ConfDomain(Domain): + """Domain for simple configuration values.""" + + name = "conf" + label = "Simple Configuration" + object_types = {"conf": ObjType("conf", "conf")} + directives = {"conf": Conf} + roles = {"conf": XRefRole()} + initial_data: dict[str, Any] = {"objects": {}} + + def get_objects(self) -> Iterable[tuple[str, str, str, str, str, int]]: + """Return an iterable of object tuples for the inventory.""" + for name, (docname, targetname) in self.data["objects"].items(): + # Remove the document name prefix for display + display_name = name.split(":")[-1] + yield (name, display_name, "conf", docname, targetname, 1) + + def resolve_xref( + self, + env: BuildEnvironment, + fromdocname: str, + builder: Builder, + typ: str, + target: str, + node: pending_xref, + contnode: Element, + ) -> Element | None: + if entry := self.data["objects"].get(target): + docname, targetid = entry + return make_refnode( + builder, fromdocname, docname, targetid, contnode + ) + + return None + + +# sphinx.util.typing.RoleFunction +def conf_role( + name: str, + rawtext: str, + text: str, + lineno: int, + inliner: Inliner, + /, + options: dict[str, Any] | None = None, + content: Sequence[str] = (), +) -> tuple[list[nodes.Node], list[nodes.system_message]]: + """Role for referencing configuration values.""" + node = addnodes.pending_xref( + "", + refdomain="conf", + reftype="conf", + reftarget=text, + refwarn=True, + **(options or {}), + ) + node += nodes.literal(text, text.split(":")[-1]) + return [node], [] + + +def setup(app: Sphinx) -> ExtensionMetadata: + app.add_domain(ConfDomain) + + # register a top-level directive so users can use ".. conf:: ..." + app.add_directive("conf", Conf) + + # Register role with short name + app.add_role("conf", conf_role) + return { + "version": "0.1", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/docs/faq.rst b/docs/faq.rst index 718356e42..287dc88af 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -163,7 +163,7 @@ documentation </dev/index>` pages. .. _bugs: …report a bug in beets? -~~~~~~~~~~~~~~~~~~~~~~~ +----------------------- We use the `issue tracker`_ on GitHub where you can `open a new ticket`_. Please follow these guidelines when reporting an issue: @@ -171,7 +171,7 @@ follow these guidelines when reporting an issue: - Most importantly: if beets is crashing, please `include the traceback <https://imgur.com/jacoj>`__. Tracebacks can be more readable if you put them in a pastebin (e.g., `Gist <https://gist.github.com/>`__ or `Hastebin - <https://hastebin.com/>`__), especially when communicating over IRC or email. + <https://hastebin.com/>`__), especially when communicating over IRC. - Turn on beets' debug output (using the -v option: for example, ``beet -v import ...``) and include that with your bug report. Look through this verbose output for any red flags that might point to the problem. @@ -236,7 +236,9 @@ Why does beets… There are a number of possibilities: -- First, make sure the album is in `the MusicBrainz database +- First, make sure you have at least one autotagger extension/plugin enabled. + See :ref:`autotagger_extensions` for a list of valid plugins. +- Check that the album is in `the MusicBrainz database <https://musicbrainz.org/>`__. You can search on their site to make sure it's cataloged there. (If not, anyone can edit MusicBrainz---so consider adding the data yourself.) diff --git a/docs/guides/index.rst b/docs/guides/index.rst index 08685abba..0695e9ff8 100644 --- a/docs/guides/index.rst +++ b/docs/guides/index.rst @@ -9,5 +9,6 @@ guide. :maxdepth: 1 main + installation tagger advanced diff --git a/docs/guides/installation.rst b/docs/guides/installation.rst new file mode 100644 index 000000000..648a72d0b --- /dev/null +++ b/docs/guides/installation.rst @@ -0,0 +1,179 @@ +Installation +============ + +Beets requires `Python 3.9 or later`_. You can install it using package +managers, pipx_, pip_ or by using package managers. + +.. _python 3.9 or later: https://python.org/download/ + +Using ``pipx`` or ``pip`` +------------------------- + +We recommend installing with pipx_ as it isolates beets and its dependencies +from your system Python and other Python packages. This helps avoid dependency +conflicts and keeps your system clean. + +.. <!-- start-quick-install --> + +.. tab-set:: + + .. tab-item:: pipx + + .. code-block:: console + + pipx install beets + + .. tab-item:: pip + + .. code-block:: console + + pip install beets + + .. tab-item:: pip (user install) + + .. code-block:: console + + pip install --user beets + +.. <!-- end-quick-install --> + +If you don't have pipx_ installed, you can follow the instructions on the `pipx +installation page`_ to get it set up. + +.. _pip: https://pip.pypa.io/en/ + +.. _pipx: https://pipx.pypa.io/stable + +.. _pipx installation page: https://pipx.pypa.io/stable/installation/ + +Using a Package Manager +----------------------- + +Depending on your operating system, you may be able to install beets using a +package manager. Here are some common options: + +.. attention:: + + Package manager installations may not provide the latest version of beets. + + Release cycles for package managers vary, and they may not always have the + most recent version of beets. If you want the latest features and fixes, + consider using pipx_ or pip_ as described above. + + Additionally, installing external beets plugins may be surprisingly + difficult when using a package manager. + +- On **Debian or Ubuntu**, depending on the version, beets is available as an + official package (`Debian details`_, `Ubuntu details`_), so try typing: + ``apt-get install beets``. But the version in the repositories might lag + behind, so make sure you read the right version of these docs. If you want the + latest version, you can get everything you need to install with pip as + described below by running: ``apt-get install python-dev python-pip`` +- On **Arch Linux**, `beets is in [extra] <arch extra_>`_, so just run ``pacman + -S beets``. (There's also a bleeding-edge `dev package <aur_>`_ in the AUR, + which will probably set your computer on fire.) +- On **Alpine Linux**, `beets is in the community repository <alpine package_>`_ + and can be installed with ``apk add beets``. +- On **Void Linux**, `beets is in the official repository <void package_>`_ and + can be installed with ``xbps-install -S beets``. +- For **Gentoo Linux**, beets is in Portage as ``media-sound/beets``. Just run + ``emerge beets`` to install. There are several USE flags available for + optional plugin dependencies. +- On **FreeBSD**, there's a `beets port <freebsd_>`_ at ``audio/beets``. +- On **OpenBSD**, there's a `beets port <openbsd_>`_ can be installed with + ``pkg_add beets``. +- On **Fedora** 22 or later, there's a `DNF package`_ you can install with + ``sudo dnf install beets beets-plugins beets-doc``. +- On **Solus**, run ``eopkg install beets``. +- On **NixOS**, there's a `package <nixos_>`_ you can install with ``nix-env -i + beets``. +- Using **MacPorts**, run ``port install beets`` or ``port install beets-full`` + to include many third-party plugins. + +.. _alpine package: https://pkgs.alpinelinux.org/package/edge/community/x86_64/beets + +.. _arch extra: https://archlinux.org/packages/extra/any/beets/ + +.. _aur: https://aur.archlinux.org/packages/beets-git/ + +.. _debian details: https://tracker.debian.org/pkg/beets + +.. _dnf package: https://packages.fedoraproject.org/pkgs/beets/ + +.. _freebsd: http://portsmon.freebsd.org/portoverview.py?category=audio&portname=beets + +.. _nixos: https://github.com/NixOS/nixpkgs/tree/master/pkgs/tools/audio/beets + +.. _openbsd: http://openports.se/audio/beets + +.. _ubuntu details: https://launchpad.net/ubuntu/+source/beets + +.. _void package: https://github.com/void-linux/void-packages/tree/master/srcpkgs/beets + +Installation FAQ +---------------- + +MacOS Installation +~~~~~~~~~~~~~~~~~~ + +**Q: I'm getting permission errors on macOS. What should I do?** + +Due to System Integrity Protection on macOS 10.11+, you may need to install for +your user only: + +.. code-block:: console + + pip install --user beets + +You might need to also add ``~/Library/Python/3.x/bin`` to your ``$PATH``. + +Windows Installation +~~~~~~~~~~~~~~~~~~~~ + +**Q: What's the process for installing on Windows?** + +Installing beets on Windows can be tricky. Following these steps might help you +get it right: + +1. `Install Python`_ (check "Add Python to PATH" skip to 3) +2. Ensure Python is in your ``PATH`` (add if needed): + + - Settings → System → About → Advanced system settings → Environment + Variables + - Edit "PATH" and add: `;C:\Python39;C:\Python39\Scripts` + - *Guide: [Adding Python to + PATH](https://realpython.com/add-python-to-path/)* + +3. Now install beets by running: ``pip install beets`` +4. You're all set! Type ``beet version`` in a new command prompt to verify the + installation. + +**Bonus: Windows Context Menu Integration** + +Windows users may also want to install a context menu item for importing files +into beets. Download the beets.reg_ file and open it in a text file to make sure +the paths to Python match your system. Then double-click the file add the +necessary keys to your registry. You can then right-click a directory and choose +"Import with beets". + +.. _beets.reg: https://github.com/beetbox/beets/blob/master/extra/beets.reg + +.. _install pip: https://pip.pypa.io/en/stable/installing/ + +.. _install python: https://python.org/download/ + +ARM Installation +~~~~~~~~~~~~~~~~ + +**Q: Can I run beets on a Raspberry Pi or other ARM device?** + +Yes, but with some considerations: Beets on ARM devices is not recommended for +Linux novices. If you are comfortable with troubleshooting tools like ``pip``, +``make``, and binary dependencies (e.g. ``ffmpeg`` and ``ImageMagick``), you +will be fine. We have `notes for ARM`_ and an `older ARM reference`_. Beets is +generally developed on x86-64 based devices, and most plugins target that +platform as well. + +.. _notes for arm: https://github.com/beetbox/beets/discussions/4910 + +.. _older arm reference: https://discourse.beets.io/t/diary-of-beets-on-arm-odroid-hc4-armbian/1993 diff --git a/docs/guides/main.rst b/docs/guides/main.rst index 3e9c880ff..48b248927 100644 --- a/docs/guides/main.rst +++ b/docs/guides/main.rst @@ -1,319 +1,310 @@ Getting Started =============== -Welcome to beets_! This guide will help you begin using it to make your music -collection better. +Welcome to beets_! This guide will help get started with improving and +organizing your music collection. .. _beets: https://beets.io/ -Installing ----------- +Quick Installation +------------------ -You will need Python. Beets works on Python 3.8 or later. +Beets is distributed via PyPI_ and can be installed by most users with a single +command: -- **macOS** 11 (Big Sur) includes Python 3.8 out of the box. You can opt for a - more recent Python installing it via Homebrew_ (``brew install python3``). - There's also a MacPorts_ port. Run ``port install beets`` or ``port install - beets-full`` to include many third-party plugins. -- On **Debian or Ubuntu**, depending on the version, beets is available as an - official package (`Debian details`_, `Ubuntu details`_), so try typing: - ``apt-get install beets``. But the version in the repositories might lag - behind, so make sure you read the right version of these docs. If you want the - latest version, you can get everything you need to install with pip as - described below by running: ``apt-get install python-dev python-pip`` -- On **Arch Linux**, `beets is in [extra] <arch extra_>`_, so just run ``pacman - -S beets``. (There's also a bleeding-edge `dev package <aur_>`_ in the AUR, - which will probably set your computer on fire.) -- On **Alpine Linux**, `beets is in the community repository <alpine package_>`_ - and can be installed with ``apk add beets``. -- For **Gentoo Linux**, beets is in Portage as ``media-sound/beets``. Just run - ``emerge beets`` to install. There are several USE flags available for - optional plugin dependencies. -- On **FreeBSD**, there's a `beets port <freebsd_>`_ at ``audio/beets``. -- On **OpenBSD**, there's a `beets port <openbsd_>`_ can be installed with - ``pkg_add beets``. -- For **Slackware**, there's a SlackBuild_ available. -- On **Fedora** 22 or later, there's a `DNF package`_ you can install with - ``sudo dnf install beets beets-plugins beets-doc``. -- On **Solus**, run ``eopkg install beets``. -- On **NixOS**, there's a `package <nixos_>`_ you can install with ``nix-env -i - beets``. +.. include:: installation.rst + :start-after: <!-- start-quick-install --> + :end-before: <!-- end-quick-install --> -.. _alpine package: https://pkgs.alpinelinux.org/package/edge/community/x86_64/beets +.. admonition:: Need more installation options? -.. _arch extra: https://archlinux.org/packages/extra/any/beets/ + Having trouble with the commands above? Looking for package manager + instructions? See the :doc:`complete installation guide + </guides/installation>` for: -.. _aur: https://aur.archlinux.org/packages/beets-git/ + - Operating system specific instructions + - Package manager options + - Troubleshooting help -.. _debian details: https://tracker.debian.org/pkg/beets +.. _pypi: https://pypi.org/project/beets/ -.. _dnf package: https://packages.fedoraproject.org/pkgs/beets/ +Basic Configuration +------------------- -.. _freebsd: http://portsmon.freebsd.org/portoverview.py?category=audio&portname=beets +Before using beets, you'll need a configuration file. This YAML file tells beets +where to store your music and how to organize it. -.. _macports: https://www.macports.org +While beets is highly configurable, you only need a few basic settings to get +started. -.. _nixos: https://github.com/NixOS/nixpkgs/tree/master/pkgs/tools/audio/beets +1. **Open the config file:** + .. code-block:: console -.. _openbsd: http://openports.se/audio/beets + beet config -e -.. _slackbuild: https://slackbuilds.org/repository/14.2/multimedia/beets/ + This creates the file (if needed) and opens it in your default editor. + You can also find its location with ``beet config -p``. +2. **Add required settings:** + In the config file, set the ``directory`` option to the path where you + want beets to store your music files. Set the ``library`` option to the + path where you want beets to store its database file. -.. _ubuntu details: https://launchpad.net/ubuntu/+source/beets + .. code-block:: yaml -If you have pip_, just say ``pip install beets`` (or ``pip install --user -beets`` if you run into permissions problems). + directory: ~/music + library: ~/data/musiclibrary.db +3. **Choose your import style** (pick one): + Beets offers flexible import strategies to match your workflow. Choose + one of the following approaches and put one of the following in your + config file: -To install without pip, download beets from `its PyPI page`_ and run ``python -setup.py install`` in the directory therein. + .. tab-set:: -.. _its pypi page: https://pypi.org/project/beets/#files + .. tab-item:: Copy Files (Default) -.. _pip: https://pip.pypa.io + This is the default configuration and assumes you want to start a new organized music folder (inside ``directory`` above). During import we will *copy* cleaned-up music into that empty folder. -The best way to upgrade beets to a new version is by running ``pip install -U -beets``. You may want to follow `@b33ts`_ on Twitter to hear about progress on -new versions. + .. code-block:: yaml -.. _@b33ts: https://twitter.com/b33ts + import: + copy: yes # Copy files to new location -Installing by Hand on macOS 10.11 and Higher -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Starting with version 10.11 (El Capitan), macOS has a new security feature -called `System Integrity Protection`_ (SIP) that prevents you from modifying -some parts of the system. This means that some ``pip`` commands may fail with a -permissions error. (You probably *won't* run into this if you've installed -Python yourself with Homebrew_ or otherwise. You can also try MacPorts_.) + .. tab-item:: Move Files -If this happens, you can install beets for the current user only by typing ``pip -install --user beets``. If you do that, you might want to add -``~/Library/Python/3.6/bin`` to your ``$PATH``. + Start with a new empty directory, but *move* new music in instead of copying it (saving disk space). -.. _homebrew: https://brew.sh + .. code-block:: yaml -.. _system integrity protection: https://support.apple.com/en-us/HT204899 + import: + move: yes # Move files to new location -Installing on Windows -~~~~~~~~~~~~~~~~~~~~~ + .. tab-item:: Use Existing Structure -Installing beets on Windows can be tricky. Following these steps might help you -get it right: + Keep your current directory structure; importing should never move or copy files but instead just correct the tags on music. Make sure to point ``directory`` at the place where your music is currently stored. -1. If you don't have it, `install Python`_ (you want at least Python 3.8). The - installer should give you the option to "add Python to PATH." Check this box. - If you do that, you can skip the next step. -2. If you haven't done so already, set your ``PATH`` environment variable to - include Python and its scripts. To do so, open the "Settings" application, - then access the "System" screen, then access the "About" tab, and then hit - "Advanced system settings" located on the right side of the screen. This - should open the "System Properties" screen, then select the "Advanced" tab, - then hit the "Environmental Variables..." button, and then look for the PATH - variable in the table. Add the following to the end of the variable's value: - ``;C:\Python38;C:\Python38\Scripts``. You may need to adjust these paths to - point to your Python installation. -3. Now install beets by running: ``pip install beets`` -4. You're all set! Type ``beet`` at the command prompt to make sure everything's - in order. + .. code-block:: yaml -Windows users may also want to install a context menu item for importing files -into beets. Download the beets.reg_ file and open it in a text file to make sure -the paths to Python match your system. Then double-click the file add the -necessary keys to your registry. You can then right-click a directory and choose -"Import with beets". + import: + copy: no # Use files in place -Because I don't use Windows myself, I may have missed something. If you have -trouble or you have more detail to contribute here, please direct it to `the -mailing list`_. + .. tab-item:: Read-Only Mode -.. _beets.reg: https://github.com/beetbox/beets/blob/master/extra/beets.reg + Keep everything exactly as-is; only track metadata in database. (Corrected tags will still be stored in beets' database, and you can use them to do renaming or tag changes later.) -.. _get-pip.py: https://bootstrap.pypa.io/get-pip.py + .. code-block:: yaml -.. _install pip: https://pip.pypa.io/en/stable/installing/ + import: + copy: no # Use files in place + write: no # Don't modify tags +4. **Add customization via plugins (optional):** + Beets comes with many plugins that extend its functionality. You can + enable plugins by adding a `plugins` section to your config file. -.. _install python: https://python.org/download/ + We recommend adding at least one :ref:`Autotagger Plugin + <autotagger_extensions>` to help with fetching metadata during import. + For getting started, :doc:`MusicBrainz </plugins/musicbrainz>` is a good + choice. -Installing on ARM (Raspberry Pi and similar) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + .. code-block:: yaml -Beets on ARM devices is not recommended for Linux novices. If you are -comfortable with light troubleshooting in tools like ``pip``, ``make``, and -beets' command-line binary dependencies (e.g. ``ffmpeg`` and ``ImageMagick``), -you will probably be okay on ARM devices like the Raspberry Pi. We have `notes -for ARM`_ and an `older ARM reference`_. Beets is generally developed on x86-64 -based devices, and most plugins target that platform as well. + plugins: + - musicbrainz # Example plugin for fetching metadata + - ... other plugins you want ... -.. _notes for arm: https://github.com/beetbox/beets/discussions/4910 - -.. _older arm reference: https://discourse.beets.io/t/diary-of-beets-on-arm-odroid-hc4-armbian/1993 - -Configuring ------------ - -You'll want to set a few basic options before you start using beets. The -:doc:`configuration </reference/config>` is stored in a text file. You can show -its location by running ``beet config -p``, though it may not exist yet. Run -``beet config -e`` to edit the configuration in your favorite text editor. The -file will start out empty, but here's good place to start: - -:: - - directory: ~/music - library: ~/data/musiclibrary.db - -Change that first path to a directory where you'd like to keep your music. Then, -for ``library``, choose a good place to keep a database file that keeps an index -of your music. (The config's format is YAML_. You'll want to configure your text -editor to use spaces, not real tabs, for indentation. Also, ``~`` means your -home directory in these paths, even on Windows.) - -The default configuration assumes you want to start a new organized music folder -(that ``directory`` above) and that you'll *copy* cleaned-up music into that -empty folder using beets' ``import`` command (see below). But you can configure -beets to behave many other ways: - -- Start with a new empty directory, but *move* new music in instead of copying - it (saving disk space). Put this in your config file: - - :: - - import: - move: yes - -- Keep your current directory structure; importing should never move or copy - files but instead just correct the tags on music. Put the line ``copy: no`` - under the ``import:`` heading in your config file to disable any copying or - renaming. Make sure to point ``directory`` at the place where your music is - currently stored. -- Keep your current directory structure and *do not* correct files' tags: leave - files completely unmodified on your disk. (Corrected tags will still be stored - in beets' database, and you can use them to do renaming or tag changes later.) - Put this in your config file: - - :: - - import: - copy: no - write: no - - to disable renaming and tag-writing. - -There are approximately six million other configuration options you can set -here, including the directory and file naming scheme. See -:doc:`/reference/config` for a full reference. + You can find a list of available plugins in the :doc:`plugins index + </plugins/index>`. .. _yaml: https://yaml.org/ -To check that you've set up your configuration how you want it, you can type -``beet version`` to see a list of enabled plugins or ``beet config`` to get a -complete listing of your current configuration. +To validate that you've set up your configuration and it is valid YAML, you can +type ``beet version`` to see a list of enabled plugins or ``beet config`` to get +a complete listing of your current configuration. -Importing Your Library ----------------------- +.. dropdown:: Minimal configuration -The next step is to import your music files into the beets library database. -Because this can involve modifying files and moving them around, data loss is -always a possibility, so now would be a good time to make sure you have a recent -backup of all your music. We'll wait. + Here's a sample configuration file that includes the settings mentioned above: -There are two good ways to bring your existing library into beets. You can -either: (a) quickly bring all your files with all their current metadata into -beets' database, or (b) use beets' highly-refined autotagger to find canonical -metadata for every album you import. Option (a) is really fast, but option (b) -makes sure all your songs' tags are exactly right from the get-go. The point -about speed bears repeating: using the autotagger on a large library can take a -very long time, and it's an interactive process. So set aside a good chunk of -time if you're going to go that route. For more on the interactive tagging -process, see :doc:`tagger`. + .. code-block:: yaml -If you've got time and want to tag all your music right once and for all, do -this: + directory: ~/music + library: ~/data/musiclibrary.db -:: + import: + move: yes # Move files to new location + # copy: no # Use files in place + # write: no # Don't modify tags - $ beet import /path/to/my/music + plugins: + - musicbrainz # Example plugin for fetching metadata + # - ... other plugins you want ... -(Note that by default, this command will *copy music into the directory you -specified above*. If you want to use your current directory structure, set the -``import.copy`` config option.) To take the fast, un-autotagged path, just say: + You can copy and paste this into your config file and modify it as needed. -:: +.. admonition:: Ready for more? - $ beet import -A /my/huge/mp3/library + For a complete reference of all configuration options, see the + :doc:`configuration reference </reference/config>`. -Note that you just need to add ``-A`` for "don't autotag". +Importing Your Music +-------------------- -Adding More Music ------------------ +Now you're ready to import your music into beets! -If you've ripped or... otherwise obtained some new music, you can add it with -the ``beet import`` command, the same way you imported your library. Like so: +.. important:: -:: + Importing can modify and move your music files. **Make sure you have a + recent backup** before proceeding. - $ beet import ~/some_great_album +Choose Your Import Method +~~~~~~~~~~~~~~~~~~~~~~~~~ -This will attempt to autotag the new album (interactively) and add it to your -library. There are, of course, more options for this command---just type ``beet -help import`` to see what's available. +There are two good ways to bring your *existing* library into beets database. + +.. tab-set:: + + .. tab-item:: Autotag (Recommended) + + This method uses beets' autotagger to find canonical metadata for every album you import. It may take a while, especially for large libraries, and it's an interactive process. But it ensures all your songs' tags are exactly right from the get-go. + + .. code-block:: console + + beet import /a/chunk/of/my/library + + .. warning:: + + The point about speed bears repeating: using the autotagger on a large library can take a + very long time, and it's an interactive process. So set aside a good chunk of + time if you're going to go that route. + + We also recommend importing smaller batches of music at a time (e.g., a few albums) to make the process more manageable. For more on the interactive tagging + process, see :doc:`tagger`. + + + .. tab-item:: Quick Import + + This method quickly brings all your files with all their current metadata into beets' database without any changes. It's really fast, but it doesn't clean up or correct any tags. + + To use this method, run: + + .. code-block:: console + + beet import --noautotag /my/huge/mp3/library + + The ``--noautotag`` / ``-A`` flag skips autotagging and uses your files' current metadata. + +.. admonition:: More Import Options + + The ``beet import`` command has many options to customize its behavior. For + a full list, type ``beet help import`` or see the :ref:`import command + reference <import-cmd>`. + +Adding More Music Later +~~~~~~~~~~~~~~~~~~~~~~~ + +When you acquire new music, use the same ``beet import`` command to add it to +your library: + +.. code-block:: console + + beet import ~/new_totally_not_ripped_album + +This will apply the same autotagging process to your new additions. For +alternative import behaviors, consult the options mentioned above. Seeing Your Music ----------------- -If you want to query your music library, the ``beet list`` (shortened to ``beet -ls``) command is for you. You give it a :doc:`query string </reference/query>`, -which is formatted something like a Google search, and it gives you a list of -songs. Thus: +Once you've imported music into beets, you'll want to explore and query your +library. Beets provides several commands for searching, browsing, and getting +statistics about your collection. -:: +Basic Searching +~~~~~~~~~~~~~~~ + +The ``beet list`` command (shortened to ``beet ls``) lets you search your music +library using :doc:`query string </reference/query>` similar to web searches: + +.. code-block:: console $ beet ls the magnetic fields The Magnetic Fields - Distortion - Three-Way - The Magnetic Fields - Distortion - California Girls + The Magnetic Fields - Dist The Magnetic Fields - Distortion - Old Fools + +.. code-block:: console + $ beet ls hissing gronlandic of Montreal - Hissing Fauna, Are You the Destroyer? - Gronlandic Edit + +.. code-block:: console + $ beet ls bird The Knife - The Knife - Bird The Mae Shi - Terrorbird - Revelation Six + +By default, search terms match against :ref:`common attributes <keywordquery>` +of songs, and multiple terms are combined with AND logic (a track must match +*all* criteria). + +Searching Specific Fields +~~~~~~~~~~~~~~~~~~~~~~~~~ + +To narrow a search term to a particular metadata field, prefix the term with the +field name followed by a colon. For example, ``album:bird`` searches for "bird" +only in the "album" field of your songs. For more details, see +:doc:`/reference/query/`. + +.. code-block:: console + $ beet ls album:bird The Mae Shi - Terrorbird - Revelation Six -By default, a search term will match any of a handful of :ref:`common attributes -<keywordquery>` of songs. (They're also implicitly joined by ANDs: a track must -match *all* criteria in order to match the query.) To narrow a search term to a -particular metadata field, just put the field before the term, separated by a : -character. So ``album:bird`` only looks for ``bird`` in the "album" field of -your songs. (Need to know more? :doc:`/reference/query/` will answer all your -questions.) +This searches only the ``album`` field for the term ``bird``. + +Searching for Albums +~~~~~~~~~~~~~~~~~~~~ The ``beet list`` command also has an ``-a`` option, which searches for albums instead of songs: -:: +.. code-block:: console $ beet ls -a forever Bon Iver - For Emma, Forever Ago Freezepop - Freezepop Forever +Custom Output Formatting +~~~~~~~~~~~~~~~~~~~~~~~~ + There's also an ``-f`` option (for *format*) that lets you specify what gets displayed in the results of a search: -:: +.. code-block:: console $ beet ls -a forever -f "[$format] $album ($year) - $artist - $title" [MP3] For Emma, Forever Ago (2009) - Bon Iver - Flume [AAC] Freezepop Forever (2011) - Freezepop - Harebrained Scheme -In the format option, field references like ``$format`` and ``$year`` are filled -in with data from each result. You can see a full list of available fields by -running ``beet fields``. +In the format string, field references like ``$format``, ``$year``, ``$album``, +etc., are replaced with data from each result. -Beets also has a ``stats`` command, just in case you want to see how much music -you have: +.. dropdown:: Available fields for formatting -:: + To see all available fields you can use in custom formats, run: + + .. code-block:: console + + beet fields + + This will display a comprehensive list of metadata fields available for your music. + +Library Statistics +~~~~~~~~~~~~~~~~~~ + +Beets can also show you statistics about your music collection: + +.. code-block:: console $ beet stats Tracks: 13019 @@ -322,31 +313,107 @@ you have: Artists: 548 Albums: 1094 +.. admonition:: Ready for more advanced queries? + + The ``beet list`` command has many additional options for sorting, limiting + results, and more complex queries. For a complete reference, run: + + .. code-block:: console + + beet help list + + Or see the :ref:`list command reference <list-cmd>`. + Keep Playing ------------ -This is only the beginning of your long and prosperous journey with beets. To -keep learning, take a look at :doc:`advanced` for a sampling of what else is -possible. You'll also want to glance over the :doc:`/reference/cli` page for a -more detailed description of all of beets' functionality. (Like deleting music! -That's important.) +Congratulations! You've now mastered the basics of beets. But this is only the +beginning, beets has many more powerful features to explore. -Also, check out :doc:`beets' plugins </plugins/index>`. The real power of beets -is in its extensibility---with plugins, beets can do almost anything for your -music collection. +Continue Your Learning Journey +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can always get help using the ``beet help`` command. The plain ``beet help`` -command lists all the available commands; then, for example, ``beet help -import`` gives more specific help about the ``import`` command. +*I was there to push people beyond what's expected of them.* -If you need more of a walkthrough, you can read an illustrated one `on the beets -blog <https://beets.io/blog/walkthrough.html>`_. +.. grid:: 2 + :gutter: 3 -Please let us know what you think of beets via `the discussion board`_ or -Mastodon_. + .. grid-item-card:: :octicon:`zap` Advanced Techniques + :link: advanced + :link-type: doc -.. _mastodon: https://fosstodon.org/@beets + Explore sophisticated beets workflows including: -.. _the discussion board: https://github.com/beetbox/beets/discussions + - Advanced tagging strategies + - Complex import scenarios + - Custom metadata management + - Workflow automation -.. _the mailing list: https://groups.google.com/group/beets-users + .. grid-item-card:: :octicon:`terminal` Command Reference + :link: /reference/cli + :link-type: doc + + Comprehensive guide to all beets commands: + + - Complete command syntax + - All available options + - Usage examples + - **Important operations like deleting music** + + .. grid-item-card:: :octicon:`plug` Plugin Ecosystem + :link: /plugins/index + :link-type: doc + + Discover beets' true power through plugins: + + - Metadata fetching from multiple sources + - Audio analysis and processing + - Streaming service integration + - Custom export formats + + .. grid-item-card:: :octicon:`question` Illustrated Walkthrough + :link: https://beets.io/blog/walkthrough.html + :link-type: url + + Visual, step-by-step guide covering: + + - Real-world import examples + - Screenshots of interactive tagging + - Common workflow patterns + - Troubleshooting tips + +.. admonition:: Need Help? + + Remember you can always use ``beet help`` to see all available commands, or + ``beet help [command]`` for detailed help on specific commands. + +Join the Community +~~~~~~~~~~~~~~~~~~ + +We'd love to hear about your experience with beets! + +.. grid:: 2 + :gutter: 2 + + .. grid-item-card:: :octicon:`comment-discussion` Discussion Board + :link: https://github.com/beetbox/beets/discussions + :link-type: url + + - Ask questions + - Share tips and tricks + - Discuss feature ideas + - Get help from other users + + .. grid-item-card:: :octicon:`git-pull-request` Developer Resources + :link: /dev/index + :link-type: doc + + - Contribute code + - Report issues + - Review pull requests + - Join development discussions + +.. admonition:: Found a Bug? + + If you encounter any issues, please report them on our `GitHub Issues page + <https://github.com/beetbox/beets/issues>`_. diff --git a/docs/guides/tagger.rst b/docs/guides/tagger.rst index dea1713f3..f43c1608c 100644 --- a/docs/guides/tagger.rst +++ b/docs/guides/tagger.rst @@ -1,3 +1,5 @@ +.. _using-the-auto-tagger: + Using the Auto-Tagger ===================== @@ -287,11 +289,21 @@ MusicBrainz---so consider adding the data yourself. .. _the musicbrainz database: https://musicbrainz.org/ +If you receive a "No matching release found" message from the Auto-Tagger for an +album you know is present in MusicBrainz, check that musicbrainz is in the +plugin list. Until version v2.4.0_ the default metadata source for the +Auto-Tagger, the :doc:`musicbrainz plugin </plugins/musicbrainz>`, had to be +manually disabled. At present, if the plugin list is changed, musicbrainz needs +to be added to the plugin list in order to continue contributing results to +Auto-Tagger. + If you think beets is ignoring an album that's listed in MusicBrainz, please `file a bug report`_. .. _file a bug report: https://github.com/beetbox/beets/issues +.. _v2.4.0: https://github.com/beetbox/beets/releases/tag/v2.4.0 + I Hope That Makes Sense ----------------------- @@ -299,5 +311,3 @@ If we haven't made the process clear, please post on `the discussion board`_ and we'll try to improve this guide. .. _the discussion board: https://github.com/beetbox/beets/discussions/ - -.. _the mailing list: https://groups.google.com/group/beets-users diff --git a/docs/index.rst b/docs/index.rst index 2b2c2e723..e9dd3b34f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -13,9 +13,8 @@ Then you can get a more detailed look at beets' features in the be interested in exploring the :doc:`plugins </plugins/index>`. If you still need help, you can drop by the ``#beets`` IRC channel on -Libera.Chat, drop by `the discussion board`_, send email to `the mailing list`_, -or `file a bug`_ in the issue tracker. Please let us know where you think this -documentation can be improved. +Libera.Chat, drop by `the discussion board`_ or `file a bug`_ in the issue +tracker. Please let us know where you think this documentation can be improved. .. _beets: https://beets.io/ @@ -23,8 +22,6 @@ documentation can be improved. .. _the discussion board: https://github.com/beetbox/beets/discussions/ -.. _the mailing list: https://groups.google.com/group/beets-users - Contents -------- diff --git a/docs/plugins/convert.rst b/docs/plugins/convert.rst index 8917422c5..ecf60a85b 100644 --- a/docs/plugins/convert.rst +++ b/docs/plugins/convert.rst @@ -97,6 +97,8 @@ The available options are: - **embed**: Embed album art in converted items. Default: ``yes``. - **id3v23**: Can be used to override the global ``id3v23`` option. Default: ``inherit``. +- **write_metadata**: Can be used to disable writing metadata to converted + files. Default: ``true``. - **max_bitrate**: By default, the plugin does not transcode files that are already in the destination format. This option instead also transcodes files with high bitrates, even if they are already in the same format as the output. diff --git a/docs/plugins/deezer.rst b/docs/plugins/deezer.rst index e58252e84..d44a565ce 100644 --- a/docs/plugins/deezer.rst +++ b/docs/plugins/deezer.rst @@ -29,6 +29,30 @@ Configuration This plugin can be configured like other metadata source plugins as described in :ref:`metadata-source-plugin-configuration`. +Default +~~~~~~~ + +.. code-block:: yaml + + deezer: + search_query_ascii: no + data_source_mismatch_penalty: 0.5 + search_limit: 5 + +.. conf:: search_query_ascii + :default: no + + If enabled, the search query will be converted to ASCII before being sent to + Deezer. Converting searches to ASCII can enhance search results in some cases, + but in general, it is not recommended. For instance, ``artist:deadmau5 + album:4×4`` will be converted to ``artist:deadmau5 album:4x4`` (notice + ``×!=x``). + +.. include:: ./shared_metadata_source_config.rst + +Commands +-------- + The ``deezer`` plugin provides an additional command ``deezerupdate`` to update the ``rank`` information from Deezer. The ``rank`` (ranges from 0 to 1M) is a global indicator of a song's popularity on Deezer that is updated daily based on diff --git a/docs/plugins/discogs.rst b/docs/plugins/discogs.rst index e6b93961a..780042026 100644 --- a/docs/plugins/discogs.rst +++ b/docs/plugins/discogs.rst @@ -65,50 +65,99 @@ Configuration This plugin can be configured like other metadata source plugins as described in :ref:`metadata-source-plugin-configuration`. -There is one additional option in the ``discogs:`` section, ``index_tracks``. -Index tracks (see the `Discogs guidelines`_) along with headers, mark divisions -between distinct works on the same release or within works. When -``index_tracks`` is enabled: +Default +~~~~~~~ .. code-block:: yaml discogs: - index_tracks: yes + apikey: REDACTED + apisecret: REDACTED + tokenfile: discogs_token.json + user_token: + index_tracks: no + append_style_genre: no + separator: ', ' + strip_disambiguation: yes + featured_string: Feat. + anv: + artist_credit: yes + artist: no + album_artist: no + data_source_mismatch_penalty: 0.5 + search_limit: 5 -beets will incorporate the names of the divisions containing each track into the -imported track's title. +.. conf:: index_tracks + :default: no -For example, importing `divisions album`_ would result in track names like: + Index tracks (see the `Discogs guidelines`_) along with headers, mark divisions + between distinct works on the same release or within works. When enabled, + beets will incorporate the names of the divisions containing each track into the + imported track's title. -.. code-block:: text + For example, importing `divisions album`_ would result in track names like: - Messiah, Part I: No.1: Sinfony - Messiah, Part II: No.22: Chorus- Behold The Lamb Of God - Athalia, Act I, Scene I: Sinfonia + .. code-block:: text -whereas with ``index_tracks`` disabled you'd get: + Messiah, Part I: No.1: Sinfony + Messiah, Part II: No.22: Chorus- Behold The Lamb Of God + Athalia, Act I, Scene I: Sinfonia -.. code-block:: text + whereas with ``index_tracks`` disabled you'd get: - No.1: Sinfony - No.22: Chorus- Behold The Lamb Of God - Sinfonia + .. code-block:: text -This option is useful when importing classical music. + No.1: Sinfony + No.22: Chorus- Behold The Lamb Of God + Sinfonia -Other configurations available under ``discogs:`` are: + This option is useful when importing classical music. -- **append_style_genre**: Appends the Discogs style (if found) to the genre tag. - This can be useful if you want more granular genres to categorize your music. - For example, a release in Discogs might have a genre of "Electronic" and a - style of "Techno": enabling this setting would set the genre to be - "Electronic, Techno" (assuming default separator of ``", "``) instead of just - "Electronic". Default: ``False`` -- **separator**: How to join multiple genre and style values from Discogs into a - string. Default: ``", "`` -- **search_limit**: The maximum number of results to return from Discogs. This - is useful if you want to limit the number of results returned to speed up - searches. Default: ``5`` +.. conf:: append_style_genre + :default: no + + Appends the Discogs style (if found) to the genre tag. This can be useful if + you want more granular genres to categorize your music. For example, + a release in Discogs might have a genre of "Electronic" and a style of + "Techno": enabling this setting would set the genre to be "Electronic, + Techno" (assuming default separator of ``", "``) instead of just + "Electronic". + +.. conf:: separator + :default: ", " + + How to join multiple genre and style values from Discogs into a string. + +.. conf:: strip_disambiguation + :default: yes + + Discogs uses strings like ``"(4)"`` to mark distinct artists and labels with + the same name. If you'd like to use the Discogs disambiguation in your tags, + you can disable this option. + +.. conf:: featured_string + :default: Feat. + + Configure the string used for noting featured artists. Useful if you prefer ``Featuring`` or ``ft.``. + +.. conf:: anv + + This configuration option is dedicated to handling Artist Name + Variations (ANVs). Sometimes a release credits artists differently compared to + the majority of their work. For example, "Basement Jaxx" may be credited as + "Tha Jaxx" or "The Basement Jaxx". You can select any combination of these + config options to control where beets writes and stores the variation credit. + The default, shown below, writes variations to the artist_credit field. + + .. code-block:: yaml + + discogs: + anv: + artist_credit: yes + artist: no + album_artist: no + +.. include:: ./shared_metadata_source_config.rst .. _discogs guidelines: https://support.discogs.com/hc/en-us/articles/360005055373-Database-Guidelines-12-Tracklisting#Index_Tracks_And_Headings diff --git a/docs/plugins/export.rst b/docs/plugins/export.rst index a5fa78617..b8e14ef22 100644 --- a/docs/plugins/export.rst +++ b/docs/plugins/export.rst @@ -70,7 +70,7 @@ These options match the options from the `Python csv module`_. .. _python csv module: https://docs.python.org/3/library/csv.html#csv-fmt-params -.. _python json module: https://docs.python.org/2/library/json.html#basic-usage +.. _python json module: https://docs.python.org/3/library/json.html#basic-usage The default options look like this: diff --git a/docs/plugins/ftintitle.rst b/docs/plugins/ftintitle.rst index 90b89ae89..1d2ec5c20 100644 --- a/docs/plugins/ftintitle.rst +++ b/docs/plugins/ftintitle.rst @@ -28,6 +28,10 @@ file. The available options are: - **keep_in_artist**: Keep the featuring X part in the artist field. This can be useful if you still want to be able to search for features in the artist field. Default: ``no``. +- **preserve_album_artist**: If the artist and the album artist are the same, + skip the ftintitle processing. Default: ``yes``. +- **custom_words**: List of additional words that will be treated as a marker + for artist features. Default: ``[]``. Running Manually ---------------- diff --git a/docs/plugins/index.rst b/docs/plugins/index.rst index 1dfa3aae2..2c9d94dfd 100644 --- a/docs/plugins/index.rst +++ b/docs/plugins/index.rst @@ -5,7 +5,7 @@ Plugins extend beets' core functionality. They add new commands, fetch additional data during import, provide new metadata sources, and much more. If beets by itself doesn't do what you want it to, you may just need to enable a plugin---or, if you want to do something new, :doc:`writing a plugin -</dev/plugins>` is easy if you know a little Python. +</dev/plugins/index>` is easy if you know a little Python. .. _using-plugins: @@ -18,7 +18,7 @@ list), just use the ``plugins`` option in your :doc:`config.yaml .. code-block:: sh - plugins: inline convert web + plugins: musicbrainz inline convert web The value for ``plugins`` can be a space-separated list of plugin names or a YAML list like ``[foo, bar]``. You can see which plugins are currently enabled @@ -29,7 +29,7 @@ its name: .. code-block:: yaml - plugins: inline convert web + plugins: musicbrainz inline convert web convert: auto: true @@ -47,21 +47,10 @@ some, you can use ``pip``'s "extras" feature to install the dependencies: Using Metadata Source Plugins ----------------------------- -Some plugins provide sources for metadata in addition to MusicBrainz. These -plugins share the following configuration option: +We provide several :ref:`autotagger_extensions` that fetch metadata from online +databases. They share the following configuration options: -- **source_weight**: Penalty applied to matches during import. Set to 0.0 to - disable. Default: ``0.5``. - -For example, to equally consider matches from Discogs and MusicBrainz add the -following to your configuration: - -.. code-block:: yaml - - plugins: discogs - - discogs: - source_weight: 0.0 +.. include:: ./shared_metadata_source_config.rst .. toctree:: :hidden: diff --git a/docs/plugins/inline.rst b/docs/plugins/inline.rst index 46ee3d634..d653b6d52 100644 --- a/docs/plugins/inline.rst +++ b/docs/plugins/inline.rst @@ -20,8 +20,7 @@ Here are a couple of examples of expressions: item_fields: initial: albumartist[0].upper() + u'.' - disc_and_track: u'%02i.%02i' % (disc, track) if - disctotal > 1 else u'%02i' % (track) + disc_and_track: f"{disc:02d}.{track:02d}" if disctotal > 1 else f"{track:02d}" Note that YAML syntax allows newlines in values if the subsequent lines are indented. diff --git a/docs/plugins/lastgenre.rst b/docs/plugins/lastgenre.rst index 68d4a60a7..ace7caaf0 100644 --- a/docs/plugins/lastgenre.rst +++ b/docs/plugins/lastgenre.rst @@ -124,7 +124,7 @@ tags** and will only **fetch new genres for empty tags**. When ``force`` is ``yes`` the setting of the ``whitelist`` option (as documented in Usage_) applies to any existing or newly fetched genres. -The follwing configurations are possible: +The following configurations are possible: **Setup 1** (default) @@ -147,8 +147,9 @@ Add new last.fm genres when **empty**. Any present tags stay **untouched**. **Setup 3** **Combine** genres in present tags with new ones (be aware of that with an -enabled ``whitelist`` setting, of course some genres might get cleaned up. To -make sure any existing genres remain, set ``whitelist: no``). +enabled ``whitelist`` setting, of course some genres might get cleaned up - +existing genres take precedence over new ones though. To make sure any existing +genres remain, set ``whitelist: no``). .. code-block:: yaml @@ -196,11 +197,6 @@ file. The available options are: internal whitelist, or ``no`` to consider all genres valid. Default: ``yes``. - **title_case**: Convert the new tags to TitleCase before saving. Default: ``yes``. -- **extended_debug**: Add additional debug logging messages that show what - last.fm tags were fetched for tracks, albums and artists. This is done before - any canonicalization and whitelist filtering is applied. It's useful for - tuning the plugin's settings and understanding how it works, but it can be - quite verbose. Default: ``no``. Running Manually ---------------- @@ -212,5 +208,19 @@ fetch genres for albums or items matching a certain query. By default, ``beet lastgenre`` matches albums. To match individual tracks or singletons, use the ``-A`` switch: ``beet lastgenre -A [QUERY]``. +To preview the changes that would be made without applying them, use the ``-p`` +or ``--pretend`` flag. This shows which genres would be set but does not write +or store any changes. + To disable automatic genre fetching on import, set the ``auto`` config option to false. + +Tuning Logs +----------- + +To enable tuning logs, run ``beet -vvv lastgenre ...`` or ``beet -vvv import +...``. This enables additional messages at the ``DEBUG`` log level, showing for +example what data was received from last.fm at each stage of genre fetching +(artist, album, and track levels) before any canonicalization or whitelist +filtering is applied. Tuning logs are useful for adjusting the plugin’s settings +and understanding its behavior, though they can be quite verbose. diff --git a/docs/plugins/missing.rst b/docs/plugins/missing.rst index 7764f5fe1..10842933c 100644 --- a/docs/plugins/missing.rst +++ b/docs/plugins/missing.rst @@ -39,21 +39,27 @@ Configuration To configure the plugin, make a ``missing:`` section in your configuration file. The available options are: -- **count**: Print a count of missing tracks per album, with ``format`` - defaulting to ``$albumartist - $album: $missing``. Default: ``no``. -- **format**: A specific format with which to print every track. This uses the - same template syntax as beets' :doc:`path formats </reference/pathformat>`. - The usage is inspired by, and therefore similar to, the :ref:`list <list-cmd>` - command. Default: :ref:`format_item`. +- **count**: Print a count of missing tracks per album, with the global + ``format_album`` used for formatting. Default: ``no``. - **total**: Print a single count of missing tracks in all albums. Default: ``no``. +Formatting +~~~~~~~~~~ + +- This plugin uses global formatting options from the main configuration; see + :ref:`format_item` and :ref:`format_album`: +- :ref:`format_item`: Used when listing missing tracks (default item format). +- :ref:`format_album`: Used when showing counts (``-c``) or missing albums + (``-a``). + Here's an example :: + format_album: $albumartist - $album + format_item: $artist - $album - $title missing: - format: $albumartist - $album - $title count: no total: no diff --git a/docs/plugins/musicbrainz.rst b/docs/plugins/musicbrainz.rst index fe22335b0..00c553d8b 100644 --- a/docs/plugins/musicbrainz.rst +++ b/docs/plugins/musicbrainz.rst @@ -17,6 +17,9 @@ To use the ``musicbrainz`` plugin, enable it in your configuration (see Configuration ------------- +This plugin can be configured like other metadata source plugins as described in +:ref:`metadata-source-plugin-configuration`. + Default ~~~~~~~ @@ -27,7 +30,6 @@ Default https: no ratelimit: 1 ratelimit_interval: 1.0 - searchlimit: 5 extra_tags: [] genres: no external_ids: @@ -37,117 +39,107 @@ Default deezer: no beatport: no tidal: no + data_source_mismatch_penalty: 0.5 + search_limit: 5 -You can instruct beets to use `your own MusicBrainz database -<https://musicbrainz.org/doc/MusicBrainz_Server/Setup>`__ instead of the +.. conf:: host + :default: musicbrainz.org -`main server`_. Use the ``host``, ``https`` and ``ratelimit`` options under a -``musicbrainz:`` header, like so + The Web server hostname (and port, optionally) that will be contacted by beets. + You can use this to configure beets to use `your own MusicBrainz database + <https://musicbrainz.org/doc/MusicBrainz_Server/Setup>`__ instead of the + `main server`_. -.. code-block:: yaml + The server must have search indices enabled (see `Building search indexes`_). - musicbrainz: - host: localhost:5000 - https: no - ratelimit: 100 + Example: -The ``host`` key, of course, controls the Web server hostname (and port, -optionally) that will be contacted by beets (default: musicbrainz.org). The -``https`` key makes the client use HTTPS instead of HTTP. This setting applies -only to custom servers. The official MusicBrainz server always uses HTTPS. -(Default: no.) The server must have search indices enabled (see `Building search -indexes`_). + .. code-block:: yaml -The ``ratelimit`` option, an integer, controls the number of Web service -requests per second (default: 1). **Do not change the rate limit setting** if -you're using the main MusicBrainz server---on this public server, you're -limited_ to one request per second. + musicbrainz: + host: localhost:5000 + +.. conf:: https + :default: no + + Makes the client use HTTPS instead of HTTP. This setting applies only to custom + servers. The official MusicBrainz server always uses HTTPS. + +.. conf:: ratelimit + :default: 1 + + Controls the number of Web service requests per second. + + **Do not change the rate limit setting** if you're using the main MusicBrainz + server---on this public server, you're limited_ to one request per second. + +.. conf:: ratelimit_interval + :default: 1.0 + + The time interval (in seconds) for the rate limit. + +.. conf:: enabled + :default: yes + + .. deprecated:: 2.4 Add ``musicbrainz`` to the ``plugins`` list instead. + +.. conf:: extra_tags + :default: [] + + By default, beets will use only the artist, album, and track count to query + MusicBrainz. Additional tags to be queried can be supplied with the + ``extra_tags`` setting. + + This setting should improve the autotagger results if the metadata with the + given tags match the metadata returned by MusicBrainz. + + Note that the only tags supported by this setting are: ``barcode``, + ``catalognum``, ``country``, ``label``, ``media``, and ``year``. + + Example: + + .. code-block:: yaml + + musicbrainz: + extra_tags: [barcode, catalognum, country, label, media, year] + +.. conf:: genres + :default: no + + Use MusicBrainz genre tags to populate (and replace if it's already set) the + ``genre`` tag. This will make it a list of all the genres tagged for the release + and the release-group on MusicBrainz, separated by "; " and sorted by the total + number of votes. + +.. conf:: external_ids + + **Default** + + .. code-block:: yaml + + musicbrainz: + external_ids: + discogs: no + spotify: no + bandcamp: no + beatport: no + deezer: no + tidal: no + + Set any of the ``external_ids`` options to ``yes`` to enable the MusicBrainz + importer to look for links to related metadata sources. If such a link is + available the release ID will be extracted from the URL provided and imported to + the beets library. + + The library fields of the corresponding :ref:`autotagger_extensions` are used to + save the data as flexible attributes (``discogs_album_id``, ``bandcamp_album_id``, ``spotify_album_id``, + ``beatport_album_id``, ``deezer_album_id``, ``tidal_album_id``). On re-imports + existing data will be overwritten. + +.. include:: ./shared_metadata_source_config.rst .. _building search indexes: https://musicbrainz.org/doc/Development/Search_server_setup .. _limited: https://musicbrainz.org/doc/XML_Web_Service/Rate_Limiting .. _main server: https://musicbrainz.org/ - -.. _musicbrainz.enabled: - -enabled -+++++++ - -.. deprecated:: 2.3 Add ``musicbrainz`` to the ``plugins`` list instead. - -This option allows you to disable using MusicBrainz as a metadata source. This -applies if you use plugins that fetch data from alternative sources and should -make the import process quicker. - -Default: ``yes``. - -.. _searchlimit: - -searchlimit -+++++++++++ - -The number of matches returned when sending search queries to the MusicBrainz -server. - -Default: ``5``. - -.. _extra_tags: - -extra_tags -++++++++++ - -By default, beets will use only the artist, album, and track count to query -MusicBrainz. Additional tags to be queried can be supplied with the -``extra_tags`` setting. For example - -.. code-block:: yaml - - musicbrainz: - extra_tags: [barcode, catalognum, country, label, media, year] - -This setting should improve the autotagger results if the metadata with the -given tags match the metadata returned by MusicBrainz. - -Note that the only tags supported by this setting are the ones listed in the -above example. - -Default: ``[]`` - -.. _genres: - -genres -++++++ - -Use MusicBrainz genre tags to populate (and replace if it's already set) the -``genre`` tag. This will make it a list of all the genres tagged for the release -and the release-group on MusicBrainz, separated by "; " and sorted by the total -number of votes. Default: ``no`` - -.. _musicbrainz.external_ids: - -external_ids -++++++++++++ - -Set any of the ``external_ids`` options to ``yes`` to enable the MusicBrainz -importer to look for links to related metadata sources. If such a link is -available the release ID will be extracted from the URL provided and imported to -the beets library - -.. code-block:: yaml - - musicbrainz: - external_ids: - discogs: yes - spotify: yes - bandcamp: yes - beatport: yes - deezer: yes - tidal: yes - -The library fields of the corresponding :ref:`autotagger_extensions` are used to -save the data (``discogs_albumid``, ``bandcamp_album_id``, ``spotify_album_id``, -``beatport_album_id``, ``deezer_album_id``, ``tidal_album_id``). On re-imports -existing data will be overwritten. - -The default of all options is ``no``. diff --git a/docs/plugins/play.rst b/docs/plugins/play.rst index 2bc825773..f06eb4cb3 100644 --- a/docs/plugins/play.rst +++ b/docs/plugins/play.rst @@ -107,6 +107,15 @@ string, use ``$args`` to indicate where to insert them. For example: indicates that you need to insert extra arguments before specifying the playlist. +Some players require a different syntax. For example, with ``mpv`` the optional +``$playlist`` variable can be used to match the syntax of the ``--playlist`` +option: + +:: + + play: + command: mpv $args --playlist=$playlist + The ``--yes`` (or ``-y``) flag to the ``play`` command will skip the warning message if you choose to play more items than the **warning_threshold** value usually allows. @@ -123,4 +132,4 @@ until they are externally wiped could be an issue for privacy or storage reasons. If this is the case for you, you might want to use the ``raw`` config option described above. -.. _tempfile.tempdir: https://docs.python.org/2/library/tempfile.html#tempfile.tempdir +.. _tempfile.tempdir: https://docs.python.org/3/library/tempfile.html#tempfile.tempdir diff --git a/docs/plugins/shared_metadata_source_config.rst b/docs/plugins/shared_metadata_source_config.rst new file mode 100644 index 000000000..609c7afd2 --- /dev/null +++ b/docs/plugins/shared_metadata_source_config.rst @@ -0,0 +1,65 @@ +.. _data_source_mismatch_penalty: + +.. conf:: data_source_mismatch_penalty + :default: 0.5 + + Penalty applied when the data source of a + match candidate differs from the original source of your existing tracks. Any + decimal number between 0.0 and 1.0 + + This setting controls how much to penalize matches from different metadata + sources during import. The penalty is applied when beets detects that a match + candidate comes from a different data source than what appears to be the + original source of your music collection. + + **Example configurations:** + + .. code-block:: yaml + + # Prefer MusicBrainz over Discogs when sources don't match + plugins: musicbrainz discogs + + musicbrainz: + data_source_mismatch_penalty: 0.3 # Lower penalty = preferred + discogs: + data_source_mismatch_penalty: 0.8 # Higher penalty = less preferred + + .. code-block:: yaml + + # Do not penalise candidates from Discogs at all + plugins: musicbrainz discogs + + musicbrainz: + data_source_mismatch_penalty: 0.5 + discogs: + data_source_mismatch_penalty: 0.0 + + .. code-block:: yaml + + # Disable cross-source penalties entirely + plugins: musicbrainz discogs + + musicbrainz: + data_source_mismatch_penalty: 0.0 + discogs: + data_source_mismatch_penalty: 0.0 + + .. tip:: + + The last configuration is equivalent to setting: + + .. code-block:: yaml + + match: + distance_weights: + data_source: 0.0 # Disable data source matching + +.. conf:: source_weight + :default: 0.5 + + .. deprecated:: 2.5 Use `data_source_mismatch_penalty`_ instead. + +.. conf:: search_limit + :default: 5 + + Maximum number of search results to return. diff --git a/docs/plugins/spotify.rst b/docs/plugins/spotify.rst index be929adf7..f0d6ac2ef 100644 --- a/docs/plugins/spotify.rst +++ b/docs/plugins/spotify.rst @@ -65,64 +65,84 @@ Configuration ------------- This plugin can be configured like other metadata source plugins as described in -:ref:`metadata-source-plugin-configuration`. In addition, the following -configuration options are provided. +:ref:`metadata-source-plugin-configuration`. -The default options should work as-is, but there are some options you can put in -config.yaml under the ``spotify:`` section: +Default +~~~~~~~ -- **mode**: One of the following: - - - ``list``: Print out the playlist as a list of links. This list can then - be pasted in to a new or existing Spotify playlist. - - ``open``: This mode actually sends a link to your default browser with - instructions to open Spotify with the playlist you created. Until this - has been tested on all platforms, it will remain optional. - - Default: ``list``. - -- **region_filter**: A two-character country abbreviation, to limit results to - that market. Default: None. -- **show_failures**: List each lookup that does not return a Spotify ID (and - therefore cannot be added to a playlist). Default: ``no``. -- **tiebreak**: How to choose the track if there is more than one identical - result. For example, there might be multiple releases of the same album. The - options are ``popularity`` and ``first`` (to just choose the first match - returned). Default: ``popularity``. -- **regex**: An array of regex transformations to perform on the - track/album/artist fields before sending them to Spotify. Can be useful for - changing certain abbreviations, like ft. -> feat. See the examples below. - Default: None. -- **search_query_ascii**: If set to ``yes``, the search query will be converted - to ASCII before being sent to Spotify. Converting searches to ASCII can - enhance search results in some cases, but in general, it is not recommended. - For instance ``artist:deadmau5 album:4×4`` will be converted to - ``artist:deadmau5 album:4x4`` (notice ``×!=x``). Default: ``no``. - -Here's an example: - -:: +.. code-block:: yaml spotify: - source_weight: 0.7 - mode: open - region_filter: US - show_failures: on - tiebreak: first + mode: list + region_filter: + show_failures: no + tiebreak: popularity + regex: [] search_query_ascii: no + client_id: REDACTED + client_secret: REDACTED + tokenfile: spotify_token.json + data_source_mismatch_penalty: 0.5 + search_limit: 5 - regex: [ - { - field: "albumartist", # Field in the item object to regex. - search: "Something", # String to look for. - replace: "Replaced" # Replacement value. - }, - { - field: "title", - search: "Something Else", - replace: "AlsoReplaced" - } - ] +.. conf:: mode + :default: list + + Controls how the playlist is output: + + - ``list``: Print out the playlist as a list of links. This list can then + be pasted in to a new or existing Spotify playlist. + - ``open``: This mode actually sends a link to your default browser with + instructions to open Spotify with the playlist you created. Until this + has been tested on all platforms, it will remain optional. + +.. conf:: region_filter + :default: + + A two-character country abbreviation, to limit results to that market. + +.. conf:: show_failures + :default: no + + List each lookup that does not return a Spotify ID (and therefore cannot be + added to a playlist). + +.. conf:: tiebreak + :default: popularity + + How to choose the candidate if there is more than one identical result. For + example, there might be multiple releases of the same album. + + - ``popularity``: pick the more popular candidate + - ``first``: pick the first candidate + +.. conf:: regex + :default: [] + + An array of regex transformations to perform on the track/album/artist fields + before sending them to Spotify. Can be useful for changing certain + abbreviations, like ft. -> feat. For example: + + .. code-block:: yaml + + regex: + - field: albumartist + search: Something + replace: Replaced + - field: title + search: Something Else + replace: AlsoReplaced + +.. conf:: search_query_ascii + :default: no + + If enabled, the search query will be converted to ASCII before being sent to + Spotify. Converting searches to ASCII can enhance search results in some + cases, but in general, it is not recommended. For instance, + ``artist:deadmau5 album:4×4`` will be converted to ``artist:deadmau5 + album:4x4`` (notice ``×!=x``). + +.. include:: ./shared_metadata_source_config.rst Obtaining Track Popularity and Audio Features from Spotify ---------------------------------------------------------- diff --git a/docs/plugins/zero.rst b/docs/plugins/zero.rst index 6ed9427d9..bf134e664 100644 --- a/docs/plugins/zero.rst +++ b/docs/plugins/zero.rst @@ -31,6 +31,9 @@ to nullify and the conditions for nullifying them: ``keep_fields``---not both! - To conditionally filter a field, use ``field: [regexp, regexp]`` to specify regular expressions. +- Set ``omit_single_disc`` to ``True`` to omit writing the ``disc`` number for + albums with only a single disc (``disctotal == 1``). By default, beets will + number the disc even if the album contains only one disc in total. - By default this plugin only affects files' tags; the beets database is left unchanged. To update the tags in the database, set the ``update_database`` option to true. diff --git a/docs/reference/cli.rst b/docs/reference/cli.rst index f1ef041b6..c0274553a 100644 --- a/docs/reference/cli.rst +++ b/docs/reference/cli.rst @@ -323,7 +323,7 @@ update :: - beet update [-F] FIELD [-e] EXCLUDE_FIELD [-aM] QUERY + beet update [-F] FIELD [-e] EXCLUDE_FIELD [-aMp] QUERY Update the library (and, by default, move files) to reflect out-of-band metadata changes and file deletions. diff --git a/docs/reference/config.rst b/docs/reference/config.rst index d4f5b3674..b4874416c 100644 --- a/docs/reference/config.rst +++ b/docs/reference/config.rst @@ -77,10 +77,10 @@ pluginpath ~~~~~~~~~~ Directories to search for plugins. Each Python file or directory in a plugin -path represents a plugin and should define a subclass of :class:`BeetsPlugin`. A -plugin can then be loaded by adding the filename to the ``plugins`` -configuration. The plugin path can either be a single string or a list of -strings---so, if you have multiple paths, format them as a YAML list like so: +path represents a plugin and should define a subclass of |BeetsPlugin|. A plugin +can then be loaded by adding the plugin name to the ``plugins`` configuration. +The plugin path can either be a single string or a list of strings---so, if you +have multiple paths, format them as a YAML list like so: :: @@ -376,7 +376,7 @@ terminal_encoding ~~~~~~~~~~~~~~~~~ The text encoding, as `known to Python -<https://docs.python.org/2/library/codecs.html#standard-encodings>`__, to use +<https://docs.python.org/3/library/codecs.html#standard-encodings>`__, to use for messages printed to the standard output. It's also used to read messages from the standard input. By default, this is determined automatically from the locale environment variables. @@ -429,20 +429,11 @@ UI Options The options that allow for customization of the visual appearance of the console interface. -These options are available in this section: - color ~~~~~ -Either ``yes`` or ``no``; whether to use color in console output (currently only -in the ``import`` command). Turn this off if your terminal doesn't support ANSI -colors. - -.. note:: - - The ``color`` option was previously a top-level configuration. This is still - respected, but a deprecation message will be shown until your top-level - ``color`` configuration has been nested under ``ui``. +Either ``yes`` or ``no``; whether to use color in console output. Turn this off +if your terminal doesn't support ANSI colors. .. _colors: @@ -450,10 +441,9 @@ colors ~~~~~~ The colors that are used throughout the user interface. These are only used if -the ``color`` option is set to ``yes``. For example, you might have a section in -your configuration file that looks like this: +the ``color`` option is set to ``yes``. See the default configuration: -:: +.. code-block:: yaml ui: colors: @@ -465,28 +455,26 @@ your configuration file that looks like this: action_default: ['bold', 'cyan'] action: ['bold', 'cyan'] # New colors after UI overhaul - text: ['normal'] text_faint: ['faint'] import_path: ['bold', 'blue'] import_path_items: ['bold', 'blue'] - added: ['green'] - removed: ['red'] changed: ['yellow'] - added_highlight: ['bold', 'green'] - removed_highlight: ['bold', 'red'] - changed_highlight: ['bold', 'yellow'] - text_diff_added: ['bold', 'red'] + text_diff_added: ['bold', 'green'] text_diff_removed: ['bold', 'red'] - text_diff_changed: ['bold', 'red'] action_description: ['white'] -Available colors: black, darkred, darkgreen, brown (darkyellow), darkblue, -purple (darkmagenta), teal (darkcyan), lightgray, darkgray, red, green, yellow, -blue, fuchsia (magenta), turquoise (cyan), white +Available attributes: -Legacy UI colors config directive used strings. If any colors value is still a -string instead of a list, it will be translated to list automatically. For -example ``blue`` will become ``['blue']``. +Foreground colors + ``black``, ``red``, ``green``, ``yellow``, ``blue``, ``magenta``, ``cyan``, + ``white`` + +Background colors + ``bg_black``, ``bg_red``, ``bg_green``, ``bg_yellow``, ``bg_blue``, + ``bg_magenta``, ``bg_cyan``, ``bg_white`` + +Text styles + ``normal``, ``bold``, ``faint``, ``underline``, ``reverse`` terminal_width ~~~~~~~~~~~~~~ @@ -495,7 +483,7 @@ Controls line wrapping on non-Unix systems. On Unix systems, the width of the terminal is detected automatically. If this fails, or on non-Unix systems, the specified value is used as a fallback. Defaults to ``80`` characters: -:: +.. code-block:: yaml ui: terminal_width: 80 @@ -511,7 +499,7 @@ different track lengths are colored with ``text_highlight_minor``. matching or distance score calculation (see :ref:`match-config`, ``distance_weights`` and :ref:`colors`): -:: +.. code-block:: yaml ui: length_diff_thresh: 10.0 @@ -523,18 +511,18 @@ When importing, beets will read several options to configure the visuals of the import dialogue. There are two layouts controlling how horizontal space and line wrapping is dealt with: ``column`` and ``newline``. The indentation of the respective elements of the import UI can also be configured. For example setting -``4`` for ``match_header`` will indent the very first block of a proposed match -by five characters in the terminal: +``2`` for ``match_header`` will indent the very first block of a proposed match +by two characters in the terminal: -:: +.. code-block:: yaml ui: import: indentation: - match_header: 4 - match_details: 4 - match_tracklist: 7 - layout: newline + match_header: 2 + match_details: 2 + match_tracklist: 5 + layout: column Importer Options ---------------- @@ -947,7 +935,7 @@ can be one of ``none``, ``low``, ``medium`` or ``strong``. When the maximum recommendation is ``strong``, no "downgrading" occurs. The available penalty names here are: -- source +- data_source - artist - album - media diff --git a/docs/reference/pathformat.rst b/docs/reference/pathformat.rst index 1fc204b62..30871cf55 100644 --- a/docs/reference/pathformat.rst +++ b/docs/reference/pathformat.rst @@ -289,4 +289,4 @@ constructs include: The :doc:`/plugins/inline` lets you define template fields in your beets configuration file using Python snippets. And for more advanced processing, you can go all-in and write a dedicated plugin to register your own fields and -functions (see :ref:`writing-plugins`). +functions (see :ref:`basic-plugin-setup`). diff --git a/extra/release.py b/extra/release.py index 647cc49c9..e16814960 100755 --- a/extra/release.py +++ b/extra/release.py @@ -19,6 +19,8 @@ from packaging.version import Version, parse from sphinx.ext import intersphinx from typing_extensions import TypeAlias +from docs.conf import rst_epilog + BASE = Path(__file__).parent.parent.absolute() PYPROJECT = BASE / "pyproject.toml" CHANGELOG = BASE / "docs" / "changelog.rst" @@ -104,11 +106,21 @@ def create_rst_replacements() -> list[Replacement]: plugins = "|".join( r.split("/")[-1] for r in refs if r.startswith("plugins/") ) + explicit_replacements = dict( + line.removeprefix(".. ").split(" replace:: ") + for line in filter(None, rst_epilog.splitlines()) + ) return [ - # Replace Sphinx :ref: and :doc: directives by documentation URLs + # Replace explicitly defined substitutions from rst_epilog + # |BeetsPlugin| -> :class:`beets.plugins.BeetsPlugin` + ( + r"\|\w[^ ]*\|", + lambda m: explicit_replacements.get(m[0], m[0]), + ), + # Replace Sphinx directives by documentation URLs, e.g., # :ref:`/plugins/autobpm` -> [AutoBPM Plugin](DOCS/plugins/autobpm.html) ( - r":(?:ref|doc):`+(?:([^`<]+)<)?/?([\w./_-]+)>?`+", + r":(?:ref|doc|class|conf):`+(?:([^`<]+)<)?/?([\w.:/_-]+)>?`+", lambda m: make_ref_link(m[2], m[1]), ), # Convert command references to documentation URLs diff --git a/poetry.lock b/poetry.lock index 25d9448ba..568b20d7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -63,18 +63,82 @@ files = [ ] [[package]] -name = "audioread" -version = "3.0.1" -description = "Multi-library, cross-platform audio decoding." +name = "audioop-lts" +version = "0.2.2" +description = "LTS Port of Python audioop" optional = true -python-versions = ">=3.6" +python-versions = ">=3.13" files = [ - {file = "audioread-3.0.1-py3-none-any.whl", hash = "sha256:4cdce70b8adc0da0a3c9e0d85fb10b3ace30fbdf8d1670fd443929b61d117c33"}, - {file = "audioread-3.0.1.tar.gz", hash = "sha256:ac5460a5498c48bdf2e8e767402583a4dcd13f4414d286f42ce4379e8b35066d"}, + {file = "audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd3d4602dc64914d462924a08c1a9816435a2155d74f325853c1f1ac3b2d9800"}, + {file = "audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:550c114a8df0aafe9a05442a1162dfc8fec37e9af1d625ae6060fed6e756f303"}, + {file = "audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:9a13dc409f2564de15dd68be65b462ba0dde01b19663720c68c1140c782d1d75"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51c916108c56aa6e426ce611946f901badac950ee2ddaf302b7ed35d9958970d"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47eba38322370347b1c47024defbd36374a211e8dd5b0dcbce7b34fdb6f8847b"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba7c3a7e5f23e215cb271516197030c32aef2e754252c4c70a50aaff7031a2c8"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:def246fe9e180626731b26e89816e79aae2276f825420a07b4a647abaa84becc"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e160bf9df356d841bb6c180eeeea1834085464626dc1b68fa4e1d59070affdc3"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4b4cd51a57b698b2d06cb9993b7ac8dfe89a3b2878e96bc7948e9f19ff51dba6"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:4a53aa7c16a60a6857e6b0b165261436396ef7293f8b5c9c828a3a203147ed4a"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_riscv64.whl", hash = "sha256:3fc38008969796f0f689f1453722a0f463da1b8a6fbee11987830bfbb664f623"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:15ab25dd3e620790f40e9ead897f91e79c0d3ce65fe193c8ed6c26cffdd24be7"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:03f061a1915538fd96272bac9551841859dbb2e3bf73ebe4a23ef043766f5449"}, + {file = "audioop_lts-0.2.2-cp313-abi3-win32.whl", hash = "sha256:3bcddaaf6cc5935a300a8387c99f7a7fbbe212a11568ec6cf6e4bc458c048636"}, + {file = "audioop_lts-0.2.2-cp313-abi3-win_amd64.whl", hash = "sha256:a2c2a947fae7d1062ef08c4e369e0ba2086049a5e598fda41122535557012e9e"}, + {file = "audioop_lts-0.2.2-cp313-abi3-win_arm64.whl", hash = "sha256:5f93a5db13927a37d2d09637ccca4b2b6b48c19cd9eda7b17a2e9f77edee6a6f"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:73f80bf4cd5d2ca7814da30a120de1f9408ee0619cc75da87d0641273d202a09"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:106753a83a25ee4d6f473f2be6b0966fc1c9af7e0017192f5531a3e7463dce58"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fbdd522624141e40948ab3e8cdae6e04c748d78710e9f0f8d4dae2750831de19"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:143fad0311e8209ece30a8dbddab3b65ab419cbe8c0dde6e8828da25999be911"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dfbbc74ec68a0fd08cfec1f4b5e8cca3d3cd7de5501b01c4b5d209995033cde9"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cfcac6aa6f42397471e4943e0feb2244549db5c5d01efcd02725b96af417f3fe"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:752d76472d9804ac60f0078c79cdae8b956f293177acd2316cd1e15149aee132"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:83c381767e2cc10e93e40281a04852facc4cd9334550e0f392f72d1c0a9c5753"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c0022283e9556e0f3643b7c3c03f05063ca72b3063291834cca43234f20c60bb"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a2d4f1513d63c795e82948e1305f31a6d530626e5f9f2605408b300ae6095093"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:c9c8e68d8b4a56fda8c025e538e639f8c5953f5073886b596c93ec9b620055e7"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:96f19de485a2925314f5020e85911fb447ff5fbef56e8c7c6927851b95533a1c"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e541c3ef484852ef36545f66209444c48b28661e864ccadb29daddb6a4b8e5f5"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-win32.whl", hash = "sha256:d5e73fa573e273e4f2e5ff96f9043858a5e9311e94ffefd88a3186a910c70917"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9191d68659eda01e448188f60364c7763a7ca6653ed3f87ebb165822153a8547"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c174e322bb5783c099aaf87faeb240c8d210686b04bd61dfd05a8e5a83d88969"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f9ee9b52f5f857fbaf9d605a360884f034c92c1c23021fb90b2e39b8e64bede6"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:49ee1a41738a23e98d98b937a0638357a2477bc99e61b0f768a8f654f45d9b7a"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b00be98ccd0fc123dcfad31d50030d25fcf31488cde9e61692029cd7394733b"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a6d2e0f9f7a69403e388894d4ca5ada5c47230716a03f2847cfc7bd1ecb589d6"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9b0b8a03ef474f56d1a842af1a2e01398b8f7654009823c6d9e0ecff4d5cfbf"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2b267b70747d82125f1a021506565bdc5609a2b24bcb4773c16d79d2bb260bbd"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0337d658f9b81f4cd0fdb1f47635070cc084871a3d4646d9de74fdf4e7c3d24a"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:167d3b62586faef8b6b2275c3218796b12621a60e43f7e9d5845d627b9c9b80e"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0d9385e96f9f6da847f4d571ce3cb15b5091140edf3db97276872647ce37efd7"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:48159d96962674eccdca9a3df280e864e8ac75e40a577cc97c5c42667ffabfc5"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8fefe5868cd082db1186f2837d64cfbfa78b548ea0d0543e9b28935ccce81ce9"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:58cf54380c3884fb49fdd37dfb7a772632b6701d28edd3e2904743c5e1773602"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:088327f00488cdeed296edd9215ca159f3a5a5034741465789cad403fcf4bec0"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-win32.whl", hash = "sha256:068aa17a38b4e0e7de771c62c60bbca2455924b67a8814f3b0dee92b5820c0b3"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:a5bf613e96f49712073de86f20dbdd4014ca18efd4d34ed18c75bd808337851b"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:b492c3b040153e68b9fdaff5913305aaaba5bb433d8a7f73d5cf6a64ed3cc1dd"}, + {file = "audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0"}, ] +[[package]] +name = "audioread" +version = "3.1.0" +description = "Multi-library, cross-platform audio decoding." +optional = true +python-versions = ">=3.9" +files = [ + {file = "audioread-3.1.0-py3-none-any.whl", hash = "sha256:b30d1df6c5d3de5dcef0fb0e256f6ea17bdcf5f979408df0297d8a408e2971b4"}, + {file = "audioread-3.1.0.tar.gz", hash = "sha256:1c4ab2f2972764c896a8ac61ac53e261c8d29f0c6ccd652f84e18f08a4cab190"}, +] + +[package.dependencies] +standard-aifc = {version = "*", markers = "python_version >= \"3.13\""} +standard-sunau = {version = "*", markers = "python_version >= \"3.13\""} + [package.extras] -test = ["tox"] +gi = ["pygobject (>=3.54.2,<4.0.0)"] +mad = ["pymad[mad] (>=0.11.3,<0.12.0)"] +test = ["pytest (>=8.4.2)", "pytest-cov (>=7.0.0)"] [[package]] name = "babel" @@ -575,13 +639,13 @@ files = [ [[package]] name = "confuse" -version = "2.0.1" -description = "Painless YAML configuration." +version = "2.1.0" +description = "Painless YAML config files" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "confuse-2.0.1-py3-none-any.whl", hash = "sha256:9b9e5bbc70e2cb9b318bcab14d917ec88e21bf1b724365e3815eb16e37aabd2a"}, - {file = "confuse-2.0.1.tar.gz", hash = "sha256:7379a2ad49aaa862b79600cc070260c1b7974d349f4fa5e01f9afa6c4dd0611f"}, + {file = "confuse-2.1.0-py3-none-any.whl", hash = "sha256:502be1299aa6bf7c48f7719f56795720c073fb28550c0c7a37394366c9d30316"}, + {file = "confuse-2.1.0.tar.gz", hash = "sha256:abb9674a99c7a6efaef84e2fc84403ecd2dd304503073ff76ea18ed4176e218d"}, ] [package.dependencies] @@ -696,28 +760,29 @@ files = [ [[package]] name = "docstrfmt" -version = "1.10.0" +version = "1.11.1" description = "docstrfmt: A formatter for Sphinx flavored reStructuredText." optional = false -python-versions = "<4,>=3.8" +python-versions = ">=3.9" files = [ - {file = "docstrfmt-1.10.0-py3-none-any.whl", hash = "sha256:a34ef6f3d8ab3233a7d0b3d1c2f3c66f8acbb3917df5ed2f3e34c1629ac29cef"}, - {file = "docstrfmt-1.10.0.tar.gz", hash = "sha256:9da96e71552937f4b49ae2d6ab1c118ffa8ad6968082e6b8fd978b01d1bc0066"}, + {file = "docstrfmt-1.11.1-py3-none-any.whl", hash = "sha256:6782d8663321c3a7c40be08a36fbcb1ea9e46d1efba85411ba807d97f384871a"}, + {file = "docstrfmt-1.11.1.tar.gz", hash = "sha256:d41e19d6c5d524cc7f8ff6cbfecb8762d77e696b9fe4f5057269051fb966fc80"}, ] [package.dependencies] -black = "==24.*" -click = "==8.*" -docutils = "==0.20.*" -libcst = "==1.*" -platformdirs = "==4.*" -sphinx = ">=7,<9" -tabulate = "==0.9.*" -toml = "==0.10.*" +black = ">=24" +click = ">=8" +docutils = ">=0.20" +libcst = ">=1" +platformdirs = ">=4" +roman = "*" +sphinx = ">=7" +tabulate = ">=0.9" +toml = {version = ">=0.10", markers = "python_version < \"3.11\""} [package.extras] ci = ["coveralls"] -d = ["aiohttp (==3.*)"] +d = ["aiohttp (>=3)"] dev = ["docstrfmt[lint]", "docstrfmt[test]", "packaging"] lint = ["pre-commit", "ruff (>=0.0.292)"] test = ["pytest", "pytest-aiohttp"] @@ -1095,13 +1160,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "joblib" -version = "1.5.1" +version = "1.5.2" description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.9" files = [ - {file = "joblib-1.5.1-py3-none-any.whl", hash = "sha256:4719a31f054c7d766948dcd83e9613686b27114f190f717cec7eaa2084f8a74a"}, - {file = "joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444"}, + {file = "joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241"}, + {file = "joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55"}, ] [[package]] @@ -1280,33 +1345,35 @@ typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [[package]] name = "librosa" -version = "0.10.2.post1" +version = "0.11.0" description = "Python module for audio and music processing" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "librosa-0.10.2.post1-py3-none-any.whl", hash = "sha256:dc882750e8b577a63039f25661b7e39ec4cfbacc99c1cffba666cd664fb0a7a0"}, - {file = "librosa-0.10.2.post1.tar.gz", hash = "sha256:cd99f16717cbcd1e0983e37308d1db46a6f7dfc2e396e5a9e61e6821e44bd2e7"}, + {file = "librosa-0.11.0-py3-none-any.whl", hash = "sha256:0b6415c4fd68bff4c29288abe67c6d80b587e0e1e2cfb0aad23e4559504a7fa1"}, + {file = "librosa-0.11.0.tar.gz", hash = "sha256:f5ed951ca189b375bbe2e33b2abd7e040ceeee302b9bbaeeffdfddb8d0ace908"}, ] [package.dependencies] audioread = ">=2.1.9" decorator = ">=4.3.0" -joblib = ">=0.14" -lazy-loader = ">=0.1" +joblib = ">=1.0" +lazy_loader = ">=0.1" msgpack = ">=1.0" numba = ">=0.51.0" -numpy = ">=1.20.3,<1.22.0 || >1.22.0,<1.22.1 || >1.22.1,<1.22.2 || >1.22.2" +numpy = ">=1.22.3" pooch = ">=1.1" -scikit-learn = ">=0.20.0" -scipy = ">=1.2.0" +scikit-learn = ">=1.1.0" +scipy = ">=1.6.0" soundfile = ">=0.12.1" soxr = ">=0.3.2" -typing-extensions = ">=4.1.1" +standard-aifc = {version = "*", markers = "python_version >= \"3.13\""} +standard-sunau = {version = "*", markers = "python_version >= \"3.13\""} +typing_extensions = ">=4.1.1" [package.extras] display = ["matplotlib (>=3.5.0)"] -docs = ["ipython (>=7.0)", "matplotlib (>=3.5.0)", "mir-eval (>=0.5)", "numba (>=0.51)", "numpydoc", "presets", "sphinx (!=1.3.1)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.7)", "sphinx-multiversion (>=0.2.3)", "sphinx-rtd-theme (>=1.2.0)", "sphinxcontrib-svg2pdfconverter"] +docs = ["ipython (>=7.0)", "matplotlib (>=3.5.0)", "mir_eval (>=0.5)", "numba (>=0.51)", "numpydoc", "presets", "sphinx (!=1.3.1)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.7)", "sphinx-multiversion (>=0.2.3)", "sphinx_rtd_theme (>=1.2.0)", "sphinxcontrib-googleanalytics (>=0.4)", "sphinxcontrib-svg2pdfconverter"] tests = ["matplotlib (>=3.5.0)", "packaging (>=20.0)", "pytest", "pytest-cov", "pytest-mpl", "resampy (>=0.2.2)", "samplerate", "types-decorator"] [[package]] @@ -1339,6 +1406,36 @@ files = [ {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, ] +[[package]] +name = "llvmlite" +version = "0.45.1" +description = "lightweight wrapper around basic LLVM functionality" +optional = true +python-versions = ">=3.10" +files = [ + {file = "llvmlite-0.45.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1b1af0c910af0978aa55fa4f60bbb3e9f39b41e97c2a6d94d199897be62ba07a"}, + {file = "llvmlite-0.45.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02a164db2d79088bbd6e0d9633b4fe4021d6379d7e4ac7cc85ed5f44b06a30c5"}, + {file = "llvmlite-0.45.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f2d47f34e4029e6df3395de34cc1c66440a8d72712993a6e6168db228686711b"}, + {file = "llvmlite-0.45.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f7319e5f9f90720578a7f56fbc805bdfb4bc071b507c7611f170d631c3c0f1e0"}, + {file = "llvmlite-0.45.1-cp310-cp310-win_amd64.whl", hash = "sha256:4edb62e685867799e336723cb9787ec6598d51d0b1ed9af0f38e692aa757e898"}, + {file = "llvmlite-0.45.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:60f92868d5d3af30b4239b50e1717cb4e4e54f6ac1c361a27903b318d0f07f42"}, + {file = "llvmlite-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98baab513e19beb210f1ef39066288784839a44cd504e24fff5d17f1b3cf0860"}, + {file = "llvmlite-0.45.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3adc2355694d6a6fbcc024d59bb756677e7de506037c878022d7b877e7613a36"}, + {file = "llvmlite-0.45.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f3377a6db40f563058c9515dedcc8a3e562d8693a106a28f2ddccf2c8fcf6ca"}, + {file = "llvmlite-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a"}, + {file = "llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e"}, + {file = "llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f"}, + {file = "llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f"}, + {file = "llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:779e2f2ceefef0f4368548685f0b4adde34e5f4b457e90391f570a10b348d433"}, + {file = "llvmlite-0.45.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e6c9949baf25d9aa9cd7cf0f6d011b9ca660dd17f5ba2b23bdbdb77cc86b116"}, + {file = "llvmlite-0.45.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:d9ea9e6f17569a4253515cc01dade70aba536476e3d750b2e18d81d7e670eb15"}, + {file = "llvmlite-0.45.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:c9f3cadee1630ce4ac18ea38adebf2a4f57a89bd2740ce83746876797f6e0bfb"}, + {file = "llvmlite-0.45.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:57c48bf2e1083eedbc9406fb83c4e6483017879714916fe8be8a72a9672c995a"}, + {file = "llvmlite-0.45.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3aa3dfceda4219ae39cf18806c60eeb518c1680ff834b8b311bd784160b9ce40"}, + {file = "llvmlite-0.45.1-cp313-cp313-win_amd64.whl", hash = "sha256:080e6f8d0778a8239cd47686d402cb66eb165e421efa9391366a9b7e5810a38b"}, + {file = "llvmlite-0.45.1.tar.gz", hash = "sha256:09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32"}, +] + [[package]] name = "lxml" version = "6.0.0" @@ -1554,70 +1651,73 @@ test = ["pytest", "pytest-cov"] [[package]] name = "msgpack" -version = "1.1.1" +version = "1.1.2" description = "MessagePack serializer" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "msgpack-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:353b6fc0c36fde68b661a12949d7d49f8f51ff5fa019c1e47c87c4ff34b080ed"}, - {file = "msgpack-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:79c408fcf76a958491b4e3b103d1c417044544b68e96d06432a189b43d1215c8"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78426096939c2c7482bf31ef15ca219a9e24460289c00dd0b94411040bb73ad2"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b17ba27727a36cb73aabacaa44b13090feb88a01d012c0f4be70c00f75048b4"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a17ac1ea6ec3c7687d70201cfda3b1e8061466f28f686c24f627cae4ea8efd0"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:88d1e966c9235c1d4e2afac21ca83933ba59537e2e2727a999bf3f515ca2af26"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6d58656842e1b2ddbe07f43f56b10a60f2ba5826164910968f5933e5178af75"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96decdfc4adcbc087f5ea7ebdcfd3dee9a13358cae6e81d54be962efc38f6338"}, - {file = "msgpack-1.1.1-cp310-cp310-win32.whl", hash = "sha256:6640fd979ca9a212e4bcdf6eb74051ade2c690b862b679bfcb60ae46e6dc4bfd"}, - {file = "msgpack-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:8b65b53204fe1bd037c40c4148d00ef918eb2108d24c9aaa20bc31f9810ce0a8"}, - {file = "msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558"}, - {file = "msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752"}, - {file = "msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295"}, - {file = "msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458"}, - {file = "msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238"}, - {file = "msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a"}, - {file = "msgpack-1.1.1-cp312-cp312-win32.whl", hash = "sha256:870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c"}, - {file = "msgpack-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4"}, - {file = "msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0"}, - {file = "msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5"}, - {file = "msgpack-1.1.1-cp313-cp313-win32.whl", hash = "sha256:500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323"}, - {file = "msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bba1be28247e68994355e028dcd668316db30c1f758d3241a7b903ac78dcd285"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f93dcddb243159c9e4109c9750ba5b335ab8d48d9522c5308cd05d7e3ce600"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fbbc0b906a24038c9958a1ba7ae0918ad35b06cb449d398b76a7d08470b0ed9"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:61e35a55a546a1690d9d09effaa436c25ae6130573b6ee9829c37ef0f18d5e78"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1abfc6e949b352dadf4bce0eb78023212ec5ac42f6abfd469ce91d783c149c2a"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:996f2609ddf0142daba4cefd767d6db26958aac8439ee41db9cc0db9f4c4c3a6"}, - {file = "msgpack-1.1.1-cp38-cp38-win32.whl", hash = "sha256:4d3237b224b930d58e9d83c81c0dba7aacc20fcc2f89c1e5423aa0529a4cd142"}, - {file = "msgpack-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:da8f41e602574ece93dbbda1fab24650d6bf2a24089f9e9dbb4f5730ec1e58ad"}, - {file = "msgpack-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5be6b6bc52fad84d010cb45433720327ce886009d862f46b26d4d154001994b"}, - {file = "msgpack-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a89cd8c087ea67e64844287ea52888239cbd2940884eafd2dcd25754fb72232"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d75f3807a9900a7d575d8d6674a3a47e9f227e8716256f35bc6f03fc597ffbf"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d182dac0221eb8faef2e6f44701812b467c02674a322c739355c39e94730cdbf"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b13fe0fb4aac1aa5320cd693b297fe6fdef0e7bea5518cbc2dd5299f873ae90"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:435807eeb1bc791ceb3247d13c79868deb22184e1fc4224808750f0d7d1affc1"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4835d17af722609a45e16037bb1d4d78b7bdf19d6c0128116d178956618c4e88"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8ef6e342c137888ebbfb233e02b8fbd689bb5b5fcc59b34711ac47ebd504478"}, - {file = "msgpack-1.1.1-cp39-cp39-win32.whl", hash = "sha256:61abccf9de335d9efd149e2fff97ed5974f2481b3353772e8e2dd3402ba2bd57"}, - {file = "msgpack-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:40eae974c873b2992fd36424a5d9407f93e97656d999f43fca9d29f820899084"}, - {file = "msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd"}, + {file = "msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2"}, + {file = "msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87"}, + {file = "msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251"}, + {file = "msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a"}, + {file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f"}, + {file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f"}, + {file = "msgpack-1.1.2-cp310-cp310-win32.whl", hash = "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9"}, + {file = "msgpack-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa"}, + {file = "msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c"}, + {file = "msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0"}, + {file = "msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296"}, + {file = "msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef"}, + {file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c"}, + {file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e"}, + {file = "msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e"}, + {file = "msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68"}, + {file = "msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406"}, + {file = "msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa"}, + {file = "msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb"}, + {file = "msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f"}, + {file = "msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42"}, + {file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9"}, + {file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620"}, + {file = "msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029"}, + {file = "msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b"}, + {file = "msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69"}, + {file = "msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf"}, + {file = "msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7"}, + {file = "msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999"}, + {file = "msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e"}, + {file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162"}, + {file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794"}, + {file = "msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c"}, + {file = "msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9"}, + {file = "msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84"}, + {file = "msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00"}, + {file = "msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2"}, + {file = "msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717"}, + {file = "msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b"}, + {file = "msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27"}, + {file = "msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46"}, + {file = "msgpack-1.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea5405c46e690122a76531ab97a079e184c0daf491e588592d6a23d3e32af99e"}, + {file = "msgpack-1.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fba231af7a933400238cb357ecccf8ab5d51535ea95d94fc35b7806218ff844"}, + {file = "msgpack-1.1.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a8f6e7d30253714751aa0b0c84ae28948e852ee7fb0524082e6716769124bc23"}, + {file = "msgpack-1.1.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94fd7dc7d8cb0a54432f296f2246bc39474e017204ca6f4ff345941d4ed285a7"}, + {file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:350ad5353a467d9e3b126d8d1b90fe05ad081e2e1cef5753f8c345217c37e7b8"}, + {file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6bde749afe671dc44893f8d08e83bf475a1a14570d67c4bb5cec5573463c8833"}, + {file = "msgpack-1.1.2-cp39-cp39-win32.whl", hash = "sha256:ad09b984828d6b7bb52d1d1d0c9be68ad781fa004ca39216c8a1e63c0f34ba3c"}, + {file = "msgpack-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:67016ae8c8965124fdede9d3769528ad8284f14d635337ffa6a713a580f6c030"}, + {file = "msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e"}, ] [[package]] @@ -1757,6 +1857,40 @@ files = [ llvmlite = "==0.43.*" numpy = ">=1.22,<2.1" +[[package]] +name = "numba" +version = "0.62.1" +description = "compiling Python code using LLVM" +optional = true +python-versions = ">=3.10" +files = [ + {file = "numba-0.62.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a323df9d36a0da1ca9c592a6baaddd0176d9f417ef49a65bb81951dce69d941a"}, + {file = "numba-0.62.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1e1f4781d3f9f7c23f16eb04e76ca10b5a3516e959634bd226fc48d5d8e7a0a"}, + {file = "numba-0.62.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:14432af305ea68627a084cd702124fd5d0c1f5b8a413b05f4e14757202d1cf6c"}, + {file = "numba-0.62.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f180922adf159ae36c2fe79fb94ffaa74cf5cb3688cb72dba0a904b91e978507"}, + {file = "numba-0.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:f41834909d411b4b8d1c68f745144136f21416547009c1e860cc2098754b4ca7"}, + {file = "numba-0.62.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f43e24b057714e480fe44bc6031de499e7cf8150c63eb461192caa6cc8530bc8"}, + {file = "numba-0.62.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:57cbddc53b9ee02830b828a8428757f5c218831ccc96490a314ef569d8342b7b"}, + {file = "numba-0.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:604059730c637c7885386521bb1b0ddcbc91fd56131a6dcc54163d6f1804c872"}, + {file = "numba-0.62.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6c540880170bee817011757dc9049dba5a29db0c09b4d2349295991fe3ee55f"}, + {file = "numba-0.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:03de6d691d6b6e2b76660ba0f38f37b81ece8b2cc524a62f2a0cfae2bfb6f9da"}, + {file = "numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494"}, + {file = "numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6"}, + {file = "numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59"}, + {file = "numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9315cc5e441300e0ca07c828a627d92a6802bcbf27c5487f31ae73783c58da53"}, + {file = "numba-0.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:44e3aa6228039992f058f5ebfcfd372c83798e9464297bdad8cc79febcf7891e"}, + {file = "numba-0.62.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:b72489ba8411cc9fdcaa2458d8f7677751e94f0109eeb53e5becfdc818c64afb"}, + {file = "numba-0.62.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:44a1412095534a26fb5da2717bc755b57da5f3053965128fe3dc286652cc6a92"}, + {file = "numba-0.62.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8c9460b9e936c5bd2f0570e20a0a5909ee6e8b694fd958b210e3bde3a6dba2d7"}, + {file = "numba-0.62.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:728f91a874192df22d74e3fd42c12900b7ce7190b1aad3574c6c61b08313e4c5"}, + {file = "numba-0.62.1-cp313-cp313-win_amd64.whl", hash = "sha256:bbf3f88b461514287df66bc8d0307e949b09f2b6f67da92265094e8fa1282dd8"}, + {file = "numba-0.62.1.tar.gz", hash = "sha256:7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161"}, +] + +[package.dependencies] +llvmlite = "==0.45.*" +numpy = ">=1.22,<2.4" + [[package]] name = "numpy" version = "2.0.2" @@ -1811,6 +1945,89 @@ files = [ {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, ] +[[package]] +name = "numpy" +version = "2.3.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.11" +files = [ + {file = "numpy-2.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e78aecd2800b32e8347ce49316d3eaf04aed849cd5b38e0af39f829a4e59f5eb"}, + {file = "numpy-2.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd09cc5d65bda1e79432859c40978010622112e9194e581e3415a3eccc7f43f"}, + {file = "numpy-2.3.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1b219560ae2c1de48ead517d085bc2d05b9433f8e49d0955c82e8cd37bd7bf36"}, + {file = "numpy-2.3.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:bafa7d87d4c99752d07815ed7a2c0964f8ab311eb8168f41b910bd01d15b6032"}, + {file = "numpy-2.3.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36dc13af226aeab72b7abad501d370d606326a0029b9f435eacb3b8c94b8a8b7"}, + {file = "numpy-2.3.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7b2f9a18b5ff9824a6af80de4f37f4ec3c2aab05ef08f51c77a093f5b89adda"}, + {file = "numpy-2.3.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9984bd645a8db6ca15d850ff996856d8762c51a2239225288f08f9050ca240a0"}, + {file = "numpy-2.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:64c5825affc76942973a70acf438a8ab618dbd692b84cd5ec40a0a0509edc09a"}, + {file = "numpy-2.3.4-cp311-cp311-win32.whl", hash = "sha256:ed759bf7a70342f7817d88376eb7142fab9fef8320d6019ef87fae05a99874e1"}, + {file = "numpy-2.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:faba246fb30ea2a526c2e9645f61612341de1a83fb1e0c5edf4ddda5a9c10996"}, + {file = "numpy-2.3.4-cp311-cp311-win_arm64.whl", hash = "sha256:4c01835e718bcebe80394fd0ac66c07cbb90147ebbdad3dcecd3f25de2ae7e2c"}, + {file = "numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11"}, + {file = "numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9"}, + {file = "numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667"}, + {file = "numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef"}, + {file = "numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e"}, + {file = "numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a"}, + {file = "numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16"}, + {file = "numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786"}, + {file = "numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc"}, + {file = "numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32"}, + {file = "numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db"}, + {file = "numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966"}, + {file = "numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3"}, + {file = "numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197"}, + {file = "numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e"}, + {file = "numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7"}, + {file = "numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953"}, + {file = "numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37"}, + {file = "numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd"}, + {file = "numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646"}, + {file = "numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d"}, + {file = "numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc"}, + {file = "numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879"}, + {file = "numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562"}, + {file = "numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a"}, + {file = "numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6"}, + {file = "numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7"}, + {file = "numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0"}, + {file = "numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f"}, + {file = "numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64"}, + {file = "numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb"}, + {file = "numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c"}, + {file = "numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40"}, + {file = "numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e"}, + {file = "numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff"}, + {file = "numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f"}, + {file = "numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b"}, + {file = "numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7"}, + {file = "numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2"}, + {file = "numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52"}, + {file = "numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26"}, + {file = "numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc"}, + {file = "numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9"}, + {file = "numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868"}, + {file = "numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec"}, + {file = "numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3"}, + {file = "numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365"}, + {file = "numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252"}, + {file = "numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e"}, + {file = "numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0"}, + {file = "numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0"}, + {file = "numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f"}, + {file = "numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d"}, + {file = "numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6"}, + {file = "numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29"}, + {file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6e274603039f924c0fe5cb73438fa9246699c78a6df1bd3decef9ae592ae1c05"}, + {file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d149aee5c72176d9ddbc6803aef9c0f6d2ceeea7626574fc68518da5476fa346"}, + {file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:6d34ed9db9e6395bb6cd33286035f73a59b058169733a9db9f85e650b88df37e"}, + {file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:fdebe771ca06bb8d6abce84e51dca9f7921fe6ad34a0c914541b063e9a68928b"}, + {file = "numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e92defe6c08211eb77902253b14fe5b480ebc5112bc741fd5e9cd0608f847"}, + {file = "numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13b9062e4f5c7ee5c7e5be96f29ba71bc5a37fed3d1d77c37390ae00724d296d"}, + {file = "numpy-2.3.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:81b3a59793523e552c4a96109dde028aa4448ae06ccac5a76ff6532a85558a7f"}, + {file = "numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a"}, +] + [[package]] name = "oauthlib" version = "3.3.1" @@ -2921,6 +3138,17 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +[[package]] +name = "roman" +version = "5.1" +description = "Integer to Roman numerals converter" +optional = false +python-versions = ">=3.9" +files = [ + {file = "roman-5.1-py3-none-any.whl", hash = "sha256:bf595d8a9bc4a8e8b1dfa23e1d4def0251b03b494786df6b8c3d3f1635ce285a"}, + {file = "roman-5.1.tar.gz", hash = "sha256:3a86572e9bc9183e771769601189e5fa32f1620ffeceebb9eca836affb409986"}, +] + [[package]] name = "ruff" version = "0.12.3" @@ -3044,6 +3272,84 @@ dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pyde doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +[[package]] +name = "scipy" +version = "1.16.2" +description = "Fundamental algorithms for scientific computing in Python" +optional = true +python-versions = ">=3.11" +files = [ + {file = "scipy-1.16.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6ab88ea43a57da1af33292ebd04b417e8e2eaf9d5aa05700be8d6e1b6501cd92"}, + {file = "scipy-1.16.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c95e96c7305c96ede73a7389f46ccd6c659c4da5ef1b2789466baeaed3622b6e"}, + {file = "scipy-1.16.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:87eb178db04ece7c698220d523c170125dbffebb7af0345e66c3554f6f60c173"}, + {file = "scipy-1.16.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:4e409eac067dcee96a57fbcf424c13f428037827ec7ee3cb671ff525ca4fc34d"}, + {file = "scipy-1.16.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e574be127bb760f0dad24ff6e217c80213d153058372362ccb9555a10fc5e8d2"}, + {file = "scipy-1.16.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f5db5ba6188d698ba7abab982ad6973265b74bb40a1efe1821b58c87f73892b9"}, + {file = "scipy-1.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec6e74c4e884104ae006d34110677bfe0098203a3fec2f3faf349f4cb05165e3"}, + {file = "scipy-1.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:912f46667d2d3834bc3d57361f854226475f695eb08c08a904aadb1c936b6a88"}, + {file = "scipy-1.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:91e9e8a37befa5a69e9cacbe0bcb79ae5afb4a0b130fd6db6ee6cc0d491695fa"}, + {file = "scipy-1.16.2-cp311-cp311-win_arm64.whl", hash = "sha256:f3bf75a6dcecab62afde4d1f973f1692be013110cad5338007927db8da73249c"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:89d6c100fa5c48472047632e06f0876b3c4931aac1f4291afc81a3644316bb0d"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ca748936cd579d3f01928b30a17dc474550b01272d8046e3e1ee593f23620371"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:fac4f8ce2ddb40e2e3d0f7ec36d2a1e7f92559a2471e59aec37bd8d9de01fec0"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:033570f1dcefd79547a88e18bccacff025c8c647a330381064f561d43b821232"}, + {file = "scipy-1.16.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ea3421209bf00c8a5ef2227de496601087d8f638a2363ee09af059bd70976dc1"}, + {file = "scipy-1.16.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f66bd07ba6f84cd4a380b41d1bf3c59ea488b590a2ff96744845163309ee8e2f"}, + {file = "scipy-1.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e9feab931bd2aea4a23388c962df6468af3d808ddf2d40f94a81c5dc38f32ef"}, + {file = "scipy-1.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03dfc75e52f72cf23ec2ced468645321407faad8f0fe7b1f5b49264adbc29cb1"}, + {file = "scipy-1.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:0ce54e07bbb394b417457409a64fd015be623f36e330ac49306433ffe04bc97e"}, + {file = "scipy-1.16.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a8ffaa4ac0df81a0b94577b18ee079f13fecdb924df3328fc44a7dc5ac46851"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:84f7bf944b43e20b8a894f5fe593976926744f6c185bacfcbdfbb62736b5cc70"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5c39026d12edc826a1ef2ad35ad1e6d7f087f934bb868fc43fa3049c8b8508f9"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e52729ffd45b68777c5319560014d6fd251294200625d9d70fd8626516fc49f5"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:024dd4a118cccec09ca3209b7e8e614931a6ffb804b2a601839499cb88bdf925"}, + {file = "scipy-1.16.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a5dc7ee9c33019973a470556081b0fd3c9f4c44019191039f9769183141a4d9"}, + {file = "scipy-1.16.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c2275ff105e508942f99d4e3bc56b6ef5e4b3c0af970386ca56b777608ce95b7"}, + {file = "scipy-1.16.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af80196eaa84f033e48444d2e0786ec47d328ba00c71e4299b602235ffef9acb"}, + {file = "scipy-1.16.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9fb1eb735fe3d6ed1f89918224e3385fbf6f9e23757cacc35f9c78d3b712dd6e"}, + {file = "scipy-1.16.2-cp313-cp313-win_amd64.whl", hash = "sha256:fda714cf45ba43c9d3bae8f2585c777f64e3f89a2e073b668b32ede412d8f52c"}, + {file = "scipy-1.16.2-cp313-cp313-win_arm64.whl", hash = "sha256:2f5350da923ccfd0b00e07c3e5cfb316c1c0d6c1d864c07a72d092e9f20db104"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:53d8d2ee29b925344c13bda64ab51785f016b1b9617849dac10897f0701b20c1"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:9e05e33657efb4c6a9d23bd8300101536abd99c85cca82da0bffff8d8764d08a"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:7fe65b36036357003b3ef9d37547abeefaa353b237e989c21027b8ed62b12d4f"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6406d2ac6d40b861cccf57f49592f9779071655e9f75cd4f977fa0bdd09cb2e4"}, + {file = "scipy-1.16.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff4dc42bd321991fbf611c23fc35912d690f731c9914bf3af8f417e64aca0f21"}, + {file = "scipy-1.16.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:654324826654d4d9133e10675325708fb954bc84dae6e9ad0a52e75c6b1a01d7"}, + {file = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63870a84cd15c44e65220eaed2dac0e8f8b26bbb991456a033c1d9abfe8a94f8"}, + {file = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:fa01f0f6a3050fa6a9771a95d5faccc8e2f5a92b4a2e5440a0fa7264a2398472"}, + {file = "scipy-1.16.2-cp313-cp313t-win_amd64.whl", hash = "sha256:116296e89fba96f76353a8579820c2512f6e55835d3fad7780fece04367de351"}, + {file = "scipy-1.16.2-cp313-cp313t-win_arm64.whl", hash = "sha256:98e22834650be81d42982360382b43b17f7ba95e0e6993e2a4f5b9ad9283a94d"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:567e77755019bb7461513c87f02bb73fb65b11f049aaaa8ca17cfaa5a5c45d77"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:17d9bb346194e8967296621208fcdfd39b55498ef7d2f376884d5ac47cec1a70"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:0a17541827a9b78b777d33b623a6dcfe2ef4a25806204d08ead0768f4e529a88"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:d7d4c6ba016ffc0f9568d012f5f1eb77ddd99412aea121e6fa8b4c3b7cbad91f"}, + {file = "scipy-1.16.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9702c4c023227785c779cba2e1d6f7635dbb5b2e0936cdd3a4ecb98d78fd41eb"}, + {file = "scipy-1.16.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d1cdf0ac28948d225decdefcc45ad7dd91716c29ab56ef32f8e0d50657dffcc7"}, + {file = "scipy-1.16.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:70327d6aa572a17c2941cdfb20673f82e536e91850a2e4cb0c5b858b690e1548"}, + {file = "scipy-1.16.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5221c0b2a4b58aa7c4ed0387d360fd90ee9086d383bb34d9f2789fafddc8a936"}, + {file = "scipy-1.16.2-cp314-cp314-win_amd64.whl", hash = "sha256:f5a85d7b2b708025af08f060a496dd261055b617d776fc05a1a1cc69e09fe9ff"}, + {file = "scipy-1.16.2-cp314-cp314-win_arm64.whl", hash = "sha256:2cc73a33305b4b24556957d5857d6253ce1e2dcd67fa0ff46d87d1670b3e1e1d"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:9ea2a3fed83065d77367775d689401a703d0f697420719ee10c0780bcab594d8"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7280d926f11ca945c3ef92ba960fa924e1465f8d07ce3a9923080363390624c4"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:8afae1756f6a1fe04636407ef7dbece33d826a5d462b74f3d0eb82deabefd831"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:5c66511f29aa8d233388e7416a3f20d5cae7a2744d5cee2ecd38c081f4e861b3"}, + {file = "scipy-1.16.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efe6305aeaa0e96b0ccca5ff647a43737d9a092064a3894e46c414db84bc54ac"}, + {file = "scipy-1.16.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f3a337d9ae06a1e8d655ee9d8ecb835ea5ddcdcbd8d23012afa055ab014f374"}, + {file = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bab3605795d269067d8ce78a910220262711b753de8913d3deeaedb5dded3bb6"}, + {file = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b0348d8ddb55be2a844c518cd8cc8deeeb8aeba707cf834db5758fc89b476a2c"}, + {file = "scipy-1.16.2-cp314-cp314t-win_amd64.whl", hash = "sha256:26284797e38b8a75e14ea6631d29bda11e76ceaa6ddb6fdebbfe4c4d90faf2f9"}, + {file = "scipy-1.16.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d2a4472c231328d4de38d5f1f68fdd6d28a615138f842580a8a321b5845cf779"}, + {file = "scipy-1.16.2.tar.gz", hash = "sha256:af029b153d243a80afb6eabe40b0a07f8e35c9adc269c019f364ad747f826a6b"}, +] + +[package.dependencies] +numpy = ">=1.25.2,<2.6" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest (>=8.0.0)", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + [[package]] name = "six" version = "1.17.0" @@ -3133,32 +3439,37 @@ files = [ [[package]] name = "soxr" -version = "0.5.0.post1" +version = "1.0.0" description = "High quality, one-dimensional sample-rate conversion library" optional = true python-versions = ">=3.9" files = [ - {file = "soxr-0.5.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:7406d782d85f8cf64e66b65e6b7721973de8a1dc50b9e88bc2288c343a987484"}, - {file = "soxr-0.5.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa0a382fb8d8e2afed2c1642723b2d2d1b9a6728ff89f77f3524034c8885b8c9"}, - {file = "soxr-0.5.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b01d3efb95a2851f78414bcd00738b0253eec3f5a1e5482838e965ffef84969"}, - {file = "soxr-0.5.0.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcc049b0a151a65aa75b92f0ac64bb2dba785d16b78c31c2b94e68c141751d6d"}, - {file = "soxr-0.5.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:97f269bc26937c267a2ace43a77167d0c5c8bba5a2b45863bb6042b5b50c474e"}, - {file = "soxr-0.5.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6fb77b626773a966e3d8f6cb24f6f74b5327fa5dc90f1ff492450e9cdc03a378"}, - {file = "soxr-0.5.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:39e0f791ba178d69cd676485dbee37e75a34f20daa478d90341ecb7f6d9d690f"}, - {file = "soxr-0.5.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f0b558f445ba4b64dbcb37b5f803052eee7d93b1dbbbb97b3ec1787cb5a28eb"}, - {file = "soxr-0.5.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca6903671808e0a6078b0d146bb7a2952b118dfba44008b2aa60f221938ba829"}, - {file = "soxr-0.5.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:c4d8d5283ed6f5efead0df2c05ae82c169cfdfcf5a82999c2d629c78b33775e8"}, - {file = "soxr-0.5.0.post1-cp312-abi3-macosx_10_14_x86_64.whl", hash = "sha256:fef509466c9c25f65eae0ce1e4b9ac9705d22c6038c914160ddaf459589c6e31"}, - {file = "soxr-0.5.0.post1-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:4704ba6b13a3f1e41d12acf192878384c1c31f71ce606829c64abdf64a8d7d32"}, - {file = "soxr-0.5.0.post1-cp312-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd052a66471a7335b22a6208601a9d0df7b46b8d087dce4ff6e13eed6a33a2a1"}, - {file = "soxr-0.5.0.post1-cp312-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f16810dd649ab1f433991d2a9661e9e6a116c2b4101039b53b3c3e90a094fc"}, - {file = "soxr-0.5.0.post1-cp312-abi3-win_amd64.whl", hash = "sha256:b1be9fee90afb38546bdbd7bde714d1d9a8c5a45137f97478a83b65e7f3146f6"}, - {file = "soxr-0.5.0.post1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:c5af7b355959061beb90a1d73c4834ece4549f07b708f8c73c088153cec29935"}, - {file = "soxr-0.5.0.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e1dda616fc797b1507b65486f3116ed2c929f13c722922963dd419d64ada6c07"}, - {file = "soxr-0.5.0.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94de2812368e98cb42b4eaeddf8ee1657ecc19bd053f8e67b9b5aa12a3592012"}, - {file = "soxr-0.5.0.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8e9c980637e03d3f345a4fd81d56477a58c294fb26205fa121bc4eb23d9d01"}, - {file = "soxr-0.5.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:7e71b0b0db450f36de70f1047505231db77a713f8c47df9342582ae8a4b828f2"}, - {file = "soxr-0.5.0.post1.tar.gz", hash = "sha256:7092b9f3e8a416044e1fa138c8172520757179763b85dc53aa9504f4813cff73"}, + {file = "soxr-1.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:b876a3156f67c76aef0cff1084eaf4088d9ca584bb569cb993f89a52ec5f399f"}, + {file = "soxr-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4d3b957a7b0cc19ae6aa45d40b2181474e53a8dd00efd7bce6bcf4e60e020892"}, + {file = "soxr-1.0.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89685faedebc45af71f08f9957b61cc6143bc94ba43fe38e97067f81e272969"}, + {file = "soxr-1.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d255741b2f0084fd02d4a2ddd77cd495be9e7e7b6f9dba1c9494f86afefac65b"}, + {file = "soxr-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:158a4a9055958c4b95ef91dbbe280cabb00946b5423b25a9b0ce31bd9e0a271e"}, + {file = "soxr-1.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:28e19d74a5ef45c0d7000f3c70ec1719e89077379df2a1215058914d9603d2d8"}, + {file = "soxr-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8dc69fc18884e53b72f6141fdf9d80997edbb4fec9dc2942edcb63abbe0d023"}, + {file = "soxr-1.0.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f15450e6f65f22f02fcd4c5a9219c873b1e583a73e232805ff160c759a6b586"}, + {file = "soxr-1.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f73f57452f9df37b4de7a4052789fcbd474a5b28f38bba43278ae4b489d4384"}, + {file = "soxr-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f417c3d69236051cf5a1a7bad7c4bff04eb3d8fcaa24ac1cb06e26c8d48d8dc"}, + {file = "soxr-1.0.0-cp312-abi3-macosx_10_14_x86_64.whl", hash = "sha256:abecf4e39017f3fadb5e051637c272ae5778d838e5c3926a35db36a53e3a607f"}, + {file = "soxr-1.0.0-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:e973d487ee46aa8023ca00a139db6e09af053a37a032fe22f9ff0cc2e19c94b4"}, + {file = "soxr-1.0.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e8ce273cca101aff3d8c387db5a5a41001ba76ef1837883438d3c652507a9ccc"}, + {file = "soxr-1.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8f2a69686f2856d37823bbb7b78c3d44904f311fe70ba49b893af11d6b6047b"}, + {file = "soxr-1.0.0-cp312-abi3-win_amd64.whl", hash = "sha256:2a3b77b115ae7c478eecdbd060ed4f61beda542dfb70639177ac263aceda42a2"}, + {file = "soxr-1.0.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:392a5c70c04eb939c9c176bd6f654dec9a0eaa9ba33d8f1024ed63cf68cdba0a"}, + {file = "soxr-1.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fdc41a1027ba46777186f26a8fba7893be913383414135577522da2fcc684490"}, + {file = "soxr-1.0.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:449acd1dfaf10f0ce6dfd75c7e2ef984890df94008765a6742dafb42061c1a24"}, + {file = "soxr-1.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:38b35c99e408b8f440c9376a5e1dd48014857cd977c117bdaa4304865ae0edd0"}, + {file = "soxr-1.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:a39b519acca2364aa726b24a6fd55acf29e4c8909102e0b858c23013c38328e5"}, + {file = "soxr-1.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:c120775b7d0ef9e974a5797a4695861e88653f7ecd0a2a532f089bc4452ba130"}, + {file = "soxr-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e59e5f648bd6144e79a6e0596aa486218876293f5ddce3ca84b9d8f8aa34d6d"}, + {file = "soxr-1.0.0-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bb86c342862697dbd4a44043f275e5196f2d2c49dca374c78f19b7893988675d"}, + {file = "soxr-1.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3d2a4fadd88207c2991fb08c29fc189e7b2e298b598a94ea1747e42c8acb7a01"}, + {file = "soxr-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c7f5ace8f04f924b21caedeeb69f2a7b3d83d2d436639498c08b2cebe181af14"}, + {file = "soxr-1.0.0.tar.gz", hash = "sha256:e07ee6c1d659bc6957034f4800c60cb8b98de798823e34d2a2bba1caa85a4509"}, ] [package.dependencies] @@ -3204,6 +3515,49 @@ docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +description = "Add a copy button to each of your code cells." +optional = true +python-versions = ">=3.7" +files = [ + {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, + {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, +] + +[package.dependencies] +sphinx = ">=1.8" + +[package.extras] +code-style = ["pre-commit (==2.12.1)"] +rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] + +[[package]] +name = "sphinx-design" +version = "0.6.1" +description = "A sphinx extension for designing beautiful, view size responsive web components." +optional = true +python-versions = ">=3.9" +files = [ + {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"}, + {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"}, +] + +[package.dependencies] +sphinx = ">=6,<9" + +[package.extras] +code-style = ["pre-commit (>=3,<4)"] +rtd = ["myst-parser (>=2,<4)"] +testing = ["defusedxml", "myst-parser (>=2,<4)", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +testing-no-myst = ["defusedxml", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +theme-furo = ["furo (>=2024.7.18,<2024.8.0)"] +theme-im = ["sphinx-immaterial (>=0.12.2,<0.13.0)"] +theme-pydata = ["pydata-sphinx-theme (>=0.15.2,<0.16.0)"] +theme-rtd = ["sphinx-rtd-theme (>=2.0,<3.0)"] +theme-sbt = ["sphinx-book-theme (>=1.1,<2.0)"] + [[package]] name = "sphinx-lint" version = "1.0.0" @@ -3316,6 +3670,46 @@ lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] +[[package]] +name = "standard-aifc" +version = "3.13.0" +description = "Standard library aifc redistribution. \"dead battery\"." +optional = true +python-versions = "*" +files = [ + {file = "standard_aifc-3.13.0-py3-none-any.whl", hash = "sha256:f7ae09cc57de1224a0dd8e3eb8f73830be7c3d0bc485de4c1f82b4a7f645ac66"}, + {file = "standard_aifc-3.13.0.tar.gz", hash = "sha256:64e249c7cb4b3daf2fdba4e95721f811bde8bdfc43ad9f936589b7bb2fae2e43"}, +] + +[package.dependencies] +audioop-lts = {version = "*", markers = "python_version >= \"3.13\""} +standard-chunk = {version = "*", markers = "python_version >= \"3.13\""} + +[[package]] +name = "standard-chunk" +version = "3.13.0" +description = "Standard library chunk redistribution. \"dead battery\"." +optional = true +python-versions = "*" +files = [ + {file = "standard_chunk-3.13.0-py3-none-any.whl", hash = "sha256:17880a26c285189c644bd5bd8f8ed2bdb795d216e3293e6dbe55bbd848e2982c"}, + {file = "standard_chunk-3.13.0.tar.gz", hash = "sha256:4ac345d37d7e686d2755e01836b8d98eda0d1a3ee90375e597ae43aaf064d654"}, +] + +[[package]] +name = "standard-sunau" +version = "3.13.0" +description = "Standard library sunau redistribution. \"dead battery\"." +optional = true +python-versions = "*" +files = [ + {file = "standard_sunau-3.13.0-py3-none-any.whl", hash = "sha256:53af624a9529c41062f4c2fd33837f297f3baa196b0cfceffea6555654602622"}, + {file = "standard_sunau-3.13.0.tar.gz", hash = "sha256:b319a1ac95a09a2378a8442f403c66f4fd4b36616d6df6ae82b8e536ee790908"}, +] + +[package.dependencies] +audioop-lts = {version = "*", markers = "python_version >= \"3.13\""} + [[package]] name = "tabulate" version = "0.9.0" @@ -3418,6 +3812,17 @@ files = [ [package.dependencies] types-html5lib = "*" +[[package]] +name = "types-docutils" +version = "0.22.2.20251006" +description = "Typing stubs for docutils" +optional = false +python-versions = ">=3.9" +files = [ + {file = "types_docutils-0.22.2.20251006-py3-none-any.whl", hash = "sha256:1e61afdeb4fab4ae802034deea3e853ced5c9b5e1d156179000cb68c85daf384"}, + {file = "types_docutils-0.22.2.20251006.tar.gz", hash = "sha256:c36c0459106eda39e908e9147bcff9dbd88535975cde399433c428a517b9e3b2"}, +] + [[package]] name = "types-flask-cors" version = "6.0.0.20250520" @@ -3503,13 +3908,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" files = [ - {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, - {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] @@ -3595,7 +4000,7 @@ beatport = ["requests-oauthlib"] bpd = ["PyGObject"] chroma = ["pyacoustid"] discogs = ["python3-discogs-client"] -docs = ["pydata-sphinx-theme", "sphinx"] +docs = ["docutils", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinx-design"] embedart = ["Pillow"] embyupdate = ["requests"] fetchart = ["Pillow", "beautifulsoup4", "langdetect", "requests"] @@ -3617,4 +4022,4 @@ web = ["flask", "flask-cors"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "daa6c3c2b5bee3180f74f4186bb29ee1ad825870b5b9f6c2b743fcaa61b34c8c" +content-hash = "be135ccdcad615804f5fc96290d5d8e6ad51a244599356133c2b68bb030f640f" diff --git a/pyproject.toml b/pyproject.toml index dbe8e568a..78e85286b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "beets" -version = "2.3.1" +version = "2.5.1" description = "music tagger and library organizer" authors = ["Adrian Sampson <adrian@radbox.org>"] maintainers = ["Serene-Arc"] @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", ] packages = [ @@ -44,12 +45,15 @@ Changelog = "https://github.com/beetbox/beets/blob/master/docs/changelog.rst" python = ">=3.9,<4" colorama = { version = "*", markers = "sys_platform == 'win32'" } -confuse = ">=1.5.0" +confuse = ">=2.1.0" jellyfish = "*" lap = ">=0.5.12" mediafile = ">=0.12.0" musicbrainzngs = ">=0.4" -numpy = ">=1.24.4" +numpy = [ + { python = "<3.13", version = ">=2.0.2" }, + { python = ">=3.13", version = ">=2.3.4" }, +] platformdirs = ">=3.5.0" pyyaml = "*" typing_extensions = "*" @@ -60,7 +64,15 @@ dbus-python = { version = "*", optional = true } flask = { version = "*", optional = true } flask-cors = { version = "*", optional = true } langdetect = { version = "*", optional = true } -librosa = { version = "^0.10.2.post1", optional = true } +librosa = { version = ">=0.11", optional = true } +scipy = [ # for librosa + { python = "<3.13", version = ">=1.13.1", optional = true }, + { python = ">=3.13", version = ">=1.16.1", optional = true }, +] +numba = [ # for librosa + { python = "<3.13", version = ">=0.60", optional = true }, + { python = ">=3.13", version = ">=0.62.1", optional = true }, +] mutagen = { version = ">=1.33", optional = true } Pillow = { version = "*", optional = true } py7zr = { version = "*", optional = true } @@ -77,8 +89,11 @@ resampy = { version = ">=0.4.3", optional = true } requests-oauthlib = { version = ">=0.6.1", optional = true } soco = { version = "*", optional = true } +docutils = { version = ">=0.20.1", optional = true } pydata-sphinx-theme = { version = "*", optional = true } sphinx = { version = "*", optional = true } +sphinx-design = { version = ">=0.6.1", optional = true } +sphinx-copybutton = { version = ">=0.5.2", optional = true } [tool.poetry.group.test.dependencies] beautifulsoup4 = "*" @@ -100,13 +115,14 @@ requests_oauthlib = "*" responses = ">=0.3.0" [tool.poetry.group.lint.dependencies] -docstrfmt = ">=1.10.0" +docstrfmt = ">=1.11.1" ruff = ">=0.6.4" sphinx-lint = ">=1.0.0" [tool.poetry.group.typing.dependencies] mypy = "*" types-beautifulsoup4 = "*" +types-docutils = ">=0.22.2.20251006" types-mock = "*" types-Flask-Cors = "*" types-Pillow = "*" @@ -129,7 +145,14 @@ beatport = ["requests-oauthlib"] bpd = ["PyGObject"] # gobject-introspection, gstreamer1.0-plugins-base, python3-gst-1.0 chroma = ["pyacoustid"] # chromaprint or fpcalc # convert # ffmpeg -docs = ["pydata-sphinx-theme", "sphinx", "sphinx-lint"] +docs = [ + "docutils", + "pydata-sphinx-theme", + "sphinx", + "sphinx-lint", + "sphinx-design", + "sphinx-copybutton", +] discogs = ["python3-discogs-client"] embedart = ["Pillow"] # ImageMagick embyupdate = ["requests"] @@ -196,7 +219,8 @@ cmd = "mypy" [tool.poe.tasks.docs] help = "Build documentation" -cmd = "make -C docs html" +args = [{ name = "COMMANDS", positional = true, multiple = true, default = "html" }] +cmd = "make -C docs $COMMANDS" [tool.poe.tasks.format] help = "Format the codebase" @@ -212,7 +236,7 @@ cmd = "ruff check" [tool.poe.tasks.lint-docs] help = "Lint the documentation" -shell = "sphinx-lint --enable all $(git ls-files '*.rst')" +shell = "sphinx-lint --enable all --disable default-role $(git ls-files '*.rst')" [tool.poe.tasks.update-dependencies] help = "Update dependencies to their latest versions." @@ -275,11 +299,15 @@ select = [ "E", # pycodestyle "F", # pyflakes # "B", # flake8-bugbear + "G", # flake8-logging-format "I", # isort + "ISC", # flake8-implicit-str-concat "N", # pep8-naming "PT", # flake8-pytest-style # "RUF", # ruff # "UP", # pyupgrade + "UP031", # do not use percent formatting + "UP032", # use f-string instead of format call "TCH", # flake8-type-checking "W", # pycodestyle ] diff --git a/test/autotag/test_distance.py b/test/autotag/test_distance.py index e3ce9f891..213d32956 100644 --- a/test/autotag/test_distance.py +++ b/test/autotag/test_distance.py @@ -10,24 +10,23 @@ from beets.autotag.distance import ( track_distance, ) from beets.library import Item +from beets.metadata_plugins import MetadataSourcePlugin, get_penalty +from beets.plugins import BeetsPlugin from beets.test.helper import ConfigMixin _p = pytest.param class TestDistance: - @pytest.fixture(scope="class") - def config(self): - return ConfigMixin().config - - @pytest.fixture - def dist(self, config): - config["match"]["distance_weights"]["source"] = 2.0 + @pytest.fixture(autouse=True, scope="class") + def setup_config(self): + config = ConfigMixin().config + config["match"]["distance_weights"]["data_source"] = 2.0 config["match"]["distance_weights"]["album"] = 4.0 config["match"]["distance_weights"]["medium"] = 2.0 - Distance.__dict__["_weights"].cache = {} - + @pytest.fixture + def dist(self): return Distance() def test_add(self, dist): @@ -103,7 +102,7 @@ class TestDistance: assert dist["media"] == 1 / 6 def test_operators(self, dist): - dist.add("source", 0.0) + dist.add("data_source", 0.0) dist.add("album", 0.5) dist.add("medium", 0.25) dist.add("medium", 0.75) @@ -162,10 +161,8 @@ class TestTrackDistance: def test_track_distance(self, info, title, artist, expected_penalty): item = Item(artist=artist, title=title) - assert ( - bool(track_distance(item, info, incl_artist=True)) - == expected_penalty - ) + dist = track_distance(item, info, incl_artist=True) + assert bool(dist) == expected_penalty, dist._penalties class TestAlbumDistance: @@ -297,3 +294,66 @@ class TestStringDistance: string_dist("The ", "") string_dist("(EP)", "(EP)") string_dist(", An", "") + + +class TestDataSourceDistance: + MATCH = 0.0 + MISMATCH = 0.125 + + @pytest.fixture(autouse=True) + def setup(self, monkeypatch, penalty, weight, multiple_data_sources): + monkeypatch.setitem(Distance._weights, "data_source", weight) + get_penalty.cache_clear() + + class TestMetadataSourcePlugin(MetadataSourcePlugin): + def album_for_id(self, *args, **kwargs): ... + def track_for_id(self, *args, **kwargs): ... + def candidates(self, *args, **kwargs): ... + def item_candidates(self, *args, **kwargs): ... + + # We use BeetsPlugin here to check if our compatibility layer + # for pre 2.4.0 MetadataPlugins is working as expected + # TODO: Replace BeetsPlugin with TestMetadataSourcePlugin in v3.0.0 + with pytest.deprecated_call(): + + class OriginalPlugin(BeetsPlugin): + data_source = "Original" + + class OtherPlugin(TestMetadataSourcePlugin): + @property + def data_source_mismatch_penalty(self): + return penalty + + monkeypatch.setattr( + "beets.metadata_plugins.find_metadata_source_plugins", + lambda: ( + [OriginalPlugin(), OtherPlugin()] + if multiple_data_sources + else [OtherPlugin()] + ), + ) + + @pytest.mark.parametrize( + "item,info,penalty,weight,multiple_data_sources,expected_distance", + [ + _p("Original", "Original", 0.5, 1.0, True, MATCH, id="match"), + _p("Original", "Other", 0.5, 1.0, True, MISMATCH, id="mismatch"), + _p("Other", "Original", 0.5, 1.0, True, MISMATCH, id="mismatch"), + _p("Original", "unknown", 0.5, 1.0, True, MISMATCH, id="mismatch-unknown"), # noqa: E501 + _p("Original", None, 0.5, 1.0, True, MISMATCH, id="mismatch-no-info"), # noqa: E501 + _p(None, "Other", 0.5, 1.0, True, MISMATCH, id="mismatch-no-original-multiple-sources"), # noqa: E501 + _p(None, "Other", 0.5, 1.0, False, MATCH, id="match-no-original-but-single-source"), # noqa: E501 + _p("unknown", "unknown", 0.5, 1.0, True, MATCH, id="match-unknown"), + _p("Original", "Other", 1.0, 1.0, True, 0.25, id="mismatch-max-penalty"), # noqa: E501 + _p("Original", "Other", 0.5, 5.0, True, 0.3125, id="mismatch-high-weight"), # noqa: E501 + _p("Original", "Other", 0.0, 1.0, True, MATCH, id="match-no-penalty"), # noqa: E501 + _p("Original", "Other", 0.5, 0.0, True, MATCH, id="match-no-weight"), # noqa: E501 + ], + ) # fmt: skip + def test_distance(self, item, info, expected_distance): + item = Item(data_source=item) + info = TrackInfo(data_source=info, title="") + + dist = track_distance(item, info) + + assert dist.distance == expected_distance diff --git a/test/conftest.py b/test/conftest.py index 3107ad690..eb46b94b0 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -3,7 +3,9 @@ import os import pytest +from beets.autotag.distance import Distance from beets.dbcore.query import Query +from beets.util import cached_classproperty def skip_marked_items(items: list[pytest.Item], marker_name: str, reason: str): @@ -41,3 +43,13 @@ def pytest_make_parametrize_id(config, val, argname): return inspect.getsource(val).split("lambda")[-1][:30] return repr(val) + + +def pytest_assertrepr_compare(op, left, right): + if isinstance(left, Distance) or isinstance(right, Distance): + return [f"Comparing Distance: {float(left)} {op} {float(right)}"] + + +@pytest.fixture(autouse=True) +def clear_cached_classproperty(): + cached_classproperty.cache.clear() diff --git a/test/plugins/lyrics_pages.py b/test/plugins/lyrics_pages.py index e1806b167..15cb812a1 100644 --- a/test/plugins/lyrics_pages.py +++ b/test/plugins/lyrics_pages.py @@ -128,6 +128,7 @@ lyrics_pages = [ artist="Atlanta", track_title="Mergaitės Nori Mylėt", url_title="Mergaitės nori mylėt – Atlanta | Dainų Žodžiai", + marks=[xfail_on_ci("Expired SSL certificate")], ), LyricsPage.make( "https://genius.com/The-beatles-lady-madonna-lyrics", @@ -328,34 +329,40 @@ lyrics_pages = [ url_title="The Beatles - Lady Madonna Lyrics", ), LyricsPage.make( - "https://www.lyricsmode.com/lyrics/b/beatles/lady_madonna.html", + "https://www.lyricsmode.com/lyrics/b/beatles/mother_natures_son.html", """ - Lady Madonna, children at your feet. - Wonder how you manage to make ends meet. - Who finds the money? When you pay the rent? - Did you think that money was heaven sent? + Born a poor young country boy, Mother Nature's son + All day long I'm sitting singing songs for everyone - Friday night arrives without a suitcase. - Sunday morning creep in like a nun. - Mondays child has learned to tie his bootlace. - See how they run. + Sit beside a mountain stream, see her waters rise + Listen to the pretty sound of music as she flies - Lady Madonna, baby at your breast. - Wonder how you manage to feed the rest. + Doo doo doo doo doo doo doo doo doo doo doo + Doo doo doo doo doo doo doo doo doo + Doo doo doo - See how they run. - Lady Madonna, lying on the bed, - Listen to the music playing in your head. + Find me in my field of grass, Mother Nature's son + Swaying daises sing a lazy song beneath the sun - Tuesday afternoon is never ending. - Wednesday morning papers didn't come. - Thursday night you stockings needed mending. - See how they run. + Doo doo doo doo doo doo doo doo doo doo doo + Doo doo doo doo doo doo doo doo doo + Doo doo doo doo doo doo + Yeah yeah yeah - Lady Madonna, children at your feet. - Wonder how you manage to make ends meet. + Mm mm mm mm mm mm mm + Mm mm mm, ooh ooh ooh + Mm mm mm mm mm mm mm + Mm mm mm mm, wah wah wah + + Wah, Mother Nature's son """, - url_title="Lady Madonna lyrics by The Beatles - original song full text. Official Lady Madonna lyrics, 2024 version | LyricsMode.com", # noqa: E501 + artist="The Beatles", + track_title="Mother Nature's Son", + url_title=( + "Mother Nature's Son lyrics by The Beatles - original song full" + " text. Official Mother Nature's Son lyrics, 2025 version" + " | LyricsMode.com" + ), ), LyricsPage.make( "https://www.lyricsontop.com/amy-winehouse-songs/jazz-n-blues-lyrics.html", @@ -528,6 +535,7 @@ lyrics_pages = [ Wonder how you manage to make ends meet. """, url_title="The Beatles - Lady Madonna", + marks=[xfail_on_ci("Sweetslyrics also fails with 403 FORBIDDEN in CI")], ), LyricsPage.make( "https://www.tekstowo.pl/piosenka,the_beatles,lady_madonna.html", diff --git a/test/plugins/test_art.py b/test/plugins/test_art.py index 38f8c7559..285bb70e5 100644 --- a/test/plugins/test_art.py +++ b/test/plugins/test_art.py @@ -89,11 +89,11 @@ class CAAHelper: MBID_RELASE = "rid" MBID_GROUP = "rgid" - RELEASE_URL = "coverartarchive.org/release/{}".format(MBID_RELASE) - GROUP_URL = "coverartarchive.org/release-group/{}".format(MBID_GROUP) + RELEASE_URL = f"coverartarchive.org/release/{MBID_RELASE}" + GROUP_URL = f"coverartarchive.org/release-group/{MBID_GROUP}" - RELEASE_URL = "https://" + RELEASE_URL - GROUP_URL = "https://" + GROUP_URL + RELEASE_URL = f"https://{RELEASE_URL}" + GROUP_URL = f"https://{GROUP_URL}" RESPONSE_RELEASE = """{ "images": [ @@ -305,10 +305,8 @@ class FSArtTest(UseThePlugin): class CombinedTest(FetchImageTestCase, CAAHelper): ASIN = "xxxx" MBID = "releaseid" - AMAZON_URL = "https://images.amazon.com/images/P/{}.01.LZZZZZZZ.jpg".format( - ASIN - ) - AAO_URL = "https://www.albumart.org/index_detail.php?asin={}".format(ASIN) + AMAZON_URL = f"https://images.amazon.com/images/P/{ASIN}.01.LZZZZZZZ.jpg" + AAO_URL = f"https://www.albumart.org/index_detail.php?asin={ASIN}" def setUp(self): super().setUp() @@ -708,7 +706,7 @@ class FanartTVTest(UseThePlugin): def test_fanarttv_finds_image(self): album = _common.Bag(mb_releasegroupid="thereleasegroupid") self.mock_response( - fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", + f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid", self.RESPONSE_MULTIPLE, ) candidate = next(self.source.get(album, self.settings, [])) @@ -717,7 +715,7 @@ class FanartTVTest(UseThePlugin): def test_fanarttv_returns_no_result_when_error_received(self): album = _common.Bag(mb_releasegroupid="thereleasegroupid") self.mock_response( - fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", + f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid", self.RESPONSE_ERROR, ) with pytest.raises(StopIteration): @@ -726,7 +724,7 @@ class FanartTVTest(UseThePlugin): def test_fanarttv_returns_no_result_with_malformed_response(self): album = _common.Bag(mb_releasegroupid="thereleasegroupid") self.mock_response( - fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", + f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid", self.RESPONSE_MALFORMED, ) with pytest.raises(StopIteration): @@ -736,7 +734,7 @@ class FanartTVTest(UseThePlugin): # The source used to fail when there were images present, but no cover album = _common.Bag(mb_releasegroupid="thereleasegroupid") self.mock_response( - fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", + f"{fetchart.FanartTV.API_ALBUMS}thereleasegroupid", self.RESPONSE_NO_ART, ) with pytest.raises(StopIteration): diff --git a/test/plugins/test_convert.py b/test/plugins/test_convert.py index dcf684ccc..1452686a7 100644 --- a/test/plugins/test_convert.py +++ b/test/plugins/test_convert.py @@ -49,14 +49,12 @@ class ConvertMixin: """ if re.search("[^a-zA-Z0-9]", tag): raise ValueError( - "tag '{}' must only contain letters and digits".format(tag) + f"tag '{tag}' must only contain letters and digits" ) # A Python script that copies the file and appends a tag. stub = os.path.join(_common.RSRC, b"convert_stub.py").decode("utf-8") - return "{} {} $source $dest {}".format( - shell_quote(sys.executable), shell_quote(stub), tag - ) + return f"{shell_quote(sys.executable)} {shell_quote(stub)} $source $dest {tag}" def file_endswith(self, path: Path, tag: str): """Check the path is a file and if its content ends with `tag`.""" diff --git a/test/plugins/test_discogs.py b/test/plugins/test_discogs.py index c31ac7511..eb65bc588 100644 --- a/test/plugins/test_discogs.py +++ b/test/plugins/test_discogs.py @@ -82,7 +82,7 @@ class DGAlbumInfoTest(BeetsTestCase): """Return a Bag that mimics a discogs_client.Release with a tracklist where tracks have the specified `positions`.""" tracks = [ - self._make_track("TITLE%s" % i, position) + self._make_track(f"TITLE{i}", position) for (i, position) in enumerate(positions, start=1) ] return self._make_release(tracks) @@ -374,6 +374,245 @@ class DGAlbumInfoTest(BeetsTestCase): assert d.genre == "GENRE1, GENRE2" assert d.style is None + def test_strip_disambiguation(self): + """Test removing disambiguation from all disambiguated fields.""" + data = { + "id": 123, + "uri": "https://www.discogs.com/release/123456-something", + "tracklist": [ + { + "title": "track", + "position": "A", + "type_": "track", + "duration": "5:44", + "artists": [ + {"name": "TEST ARTIST (5)", "tracks": "", "id": 11146} + ], + } + ], + "artists": [ + {"name": "ARTIST NAME (2)", "id": 321, "join": "&"}, + {"name": "OTHER ARTIST (5)", "id": 321, "join": ""}, + ], + "title": "title", + "labels": [ + { + "name": "LABEL NAME (5)", + "catno": "catalog number", + } + ], + } + release = Bag( + data=data, + title=data["title"], + artists=[Bag(data=d) for d in data["artists"]], + ) + d = DiscogsPlugin().get_album_info(release) + assert d.artist == "ARTIST NAME & OTHER ARTIST" + assert d.tracks[0].artist == "TEST ARTIST" + assert d.label == "LABEL NAME" + + def test_strip_disambiguation_false(self): + """Test disabling disambiguation removal from all disambiguated fields.""" + config["discogs"]["strip_disambiguation"] = False + data = { + "id": 123, + "uri": "https://www.discogs.com/release/123456-something", + "tracklist": [ + { + "title": "track", + "position": "A", + "type_": "track", + "duration": "5:44", + "artists": [ + {"name": "TEST ARTIST (5)", "tracks": "", "id": 11146} + ], + } + ], + "artists": [ + {"name": "ARTIST NAME (2)", "id": 321, "join": "&"}, + {"name": "OTHER ARTIST (5)", "id": 321, "join": ""}, + ], + "title": "title", + "labels": [ + { + "name": "LABEL NAME (5)", + "catno": "catalog number", + } + ], + } + release = Bag( + data=data, + title=data["title"], + artists=[Bag(data=d) for d in data["artists"]], + ) + d = DiscogsPlugin().get_album_info(release) + assert d.artist == "ARTIST NAME (2) & OTHER ARTIST (5)" + assert d.tracks[0].artist == "TEST ARTIST (5)" + assert d.label == "LABEL NAME (5)" + config["discogs"]["strip_disambiguation"] = True + + +@pytest.mark.parametrize( + "track_artist_anv,track_artist", + [(False, "ARTIST Feat. PERFORMER"), (True, "VARIATION Feat. VARIATION")], +) +@pytest.mark.parametrize( + "album_artist_anv,album_artist", + [(False, "ARTIST & SOLOIST"), (True, "VARIATION & VARIATION")], +) +@pytest.mark.parametrize( + "artist_credit_anv,track_artist_credit,album_artist_credit", + [ + (False, "ARTIST Feat. PERFORMER", "ARTIST & SOLOIST"), + (True, "VARIATION Feat. VARIATION", "VARIATION & VARIATION"), + ], +) +@patch("beetsplug.discogs.DiscogsPlugin.setup", Mock()) +def test_anv( + track_artist_anv, + track_artist, + album_artist_anv, + album_artist, + artist_credit_anv, + track_artist_credit, + album_artist_credit, +): + """Test using artist name variations.""" + data = { + "id": 123, + "uri": "https://www.discogs.com/release/123456-something", + "tracklist": [ + { + "title": "track", + "position": "A", + "type_": "track", + "duration": "5:44", + "artists": [ + { + "name": "ARTIST", + "tracks": "", + "anv": "VARIATION", + "id": 11146, + } + ], + "extraartists": [ + { + "name": "PERFORMER", + "role": "Featuring", + "anv": "VARIATION", + "id": 787, + } + ], + } + ], + "artists": [ + {"name": "ARTIST (4)", "anv": "VARIATION", "id": 321, "join": "&"}, + {"name": "SOLOIST", "anv": "VARIATION", "id": 445, "join": ""}, + ], + "title": "title", + } + release = Bag( + data=data, + title=data["title"], + artists=[Bag(data=d) for d in data["artists"]], + ) + config["discogs"]["anv"]["album_artist"] = album_artist_anv + config["discogs"]["anv"]["artist"] = track_artist_anv + config["discogs"]["anv"]["artist_credit"] = artist_credit_anv + r = DiscogsPlugin().get_album_info(release) + assert r.artist == album_artist + assert r.artist_credit == album_artist_credit + assert r.tracks[0].artist == track_artist + assert r.tracks[0].artist_credit == track_artist_credit + + +@patch("beetsplug.discogs.DiscogsPlugin.setup", Mock()) +def test_anv_album_artist(): + """Test using artist name variations when the album artist + is the same as the track artist, but only the track artist + should use the artist name variation.""" + data = { + "id": 123, + "uri": "https://www.discogs.com/release/123456-something", + "tracklist": [ + { + "title": "track", + "position": "A", + "type_": "track", + "duration": "5:44", + } + ], + "artists": [ + {"name": "ARTIST (4)", "anv": "VARIATION", "id": 321}, + ], + "title": "title", + } + release = Bag( + data=data, + title=data["title"], + artists=[Bag(data=d) for d in data["artists"]], + ) + config["discogs"]["anv"]["album_artist"] = False + config["discogs"]["anv"]["artist"] = True + config["discogs"]["anv"]["artist_credit"] = False + r = DiscogsPlugin().get_album_info(release) + assert r.artist == "ARTIST" + assert r.artist_credit == "ARTIST" + assert r.tracks[0].artist == "VARIATION" + assert r.tracks[0].artist_credit == "ARTIST" + + +@pytest.mark.parametrize( + "track, expected_artist", + [ + ( + { + "type_": "track", + "title": "track", + "position": "1", + "duration": "5:00", + "artists": [ + {"name": "NEW ARTIST", "tracks": "", "id": 11146}, + {"name": "VOCALIST", "tracks": "", "id": 344, "join": "&"}, + ], + "extraartists": [ + { + "name": "SOLOIST", + "id": 3, + "role": "Featuring", + }, + { + "name": "PERFORMER (1)", + "id": 5, + "role": "Other Role, Featuring", + }, + { + "name": "RANDOM", + "id": 8, + "role": "Written-By", + }, + { + "name": "MUSICIAN", + "id": 10, + "role": "Featuring [Uncredited]", + }, + ], + }, + "NEW ARTIST, VOCALIST Feat. SOLOIST, PERFORMER, MUSICIAN", + ), + ], +) +@patch("beetsplug.discogs.DiscogsPlugin.setup", Mock()) +def test_parse_featured_artists(track, expected_artist): + """Tests the plugins ability to parse a featured artist. + Initial check with one featured artist, two featured artists, + and three. Ignores artists that are not listed as featured.""" + t = DiscogsPlugin().get_track_info( + track, 1, 1, ("ARTIST", "ARTIST CREDIT", 2) + ) + assert t.artist == expected_artist + @pytest.mark.parametrize( "formats, expected_media, expected_albumtype", diff --git a/test/plugins/test_embedart.py b/test/plugins/test_embedart.py index 62b2bb7d1..d40025374 100644 --- a/test/plugins/test_embedart.py +++ b/test/plugins/test_embedart.py @@ -23,7 +23,7 @@ from unittest.mock import MagicMock, patch import pytest from mediafile import MediaFile -from beets import art, config, logging, ui +from beets import config, logging, ui from beets.test import _common from beets.test.helper import ( BeetsTestCase, @@ -33,6 +33,7 @@ from beets.test.helper import ( ) from beets.util import bytestring_path, displayable_path, syspath from beets.util.artresizer import ArtResizer +from beetsplug._utils import art from test.test_art_resize import DummyIMBackend @@ -144,9 +145,7 @@ class EmbedartCliTest(IOMixin, PluginMixin, FetchImageHelper, BeetsTestCase): if os.path.isfile(syspath(tmp_path)): os.remove(syspath(tmp_path)) self.fail( - "Artwork file {} was not deleted".format( - displayable_path(tmp_path) - ) + f"Artwork file {displayable_path(tmp_path)} was not deleted" ) def test_art_file_missing(self): @@ -285,7 +284,7 @@ class DummyArtResizer(ArtResizer): @patch("beets.util.artresizer.subprocess") -@patch("beets.art.extract") +@patch("beetsplug._utils.art.extract") class ArtSimilarityTest(unittest.TestCase): def setUp(self): self.item = _common.item() diff --git a/test/plugins/test_fromfilename.py b/test/plugins/test_fromfilename.py new file mode 100644 index 000000000..f13e88aea --- /dev/null +++ b/test/plugins/test_fromfilename.py @@ -0,0 +1,99 @@ +# This file is part of beets. +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. + +"""Tests for the fromfilename plugin.""" + +import pytest + +from beetsplug import fromfilename + + +class Session: + pass + + +class Item: + def __init__(self, path): + self.path = path + self.track = 0 + self.artist = "" + self.title = "" + + +class Task: + def __init__(self, items): + self.items = items + self.is_album = True + + +@pytest.mark.parametrize( + "song1, song2", + [ + ( + ( + "/tmp/01 - The Artist - Song One.m4a", + 1, + "The Artist", + "Song One", + ), + ( + "/tmp/02. - The Artist - Song Two.m4a", + 2, + "The Artist", + "Song Two", + ), + ), + ( + ("/tmp/01-The_Artist-Song_One.m4a", 1, "The_Artist", "Song_One"), + ("/tmp/02.-The_Artist-Song_Two.m4a", 2, "The_Artist", "Song_Two"), + ), + ( + ("/tmp/01 - Song_One.m4a", 1, "", "Song_One"), + ("/tmp/02. - Song_Two.m4a", 2, "", "Song_Two"), + ), + ( + ("/tmp/Song One by The Artist.m4a", 0, "The Artist", "Song One"), + ("/tmp/Song Two by The Artist.m4a", 0, "The Artist", "Song Two"), + ), + (("/tmp/01.m4a", 1, "", "01"), ("/tmp/02.m4a", 2, "", "02")), + ( + ("/tmp/Song One.m4a", 0, "", "Song One"), + ("/tmp/Song Two.m4a", 0, "", "Song Two"), + ), + ], +) +def test_fromfilename(song1, song2): + """ + Each "song" is a tuple of path, expected track number, expected artist, + expected title. + + We use two songs for each test for two reasons: + - The plugin needs more than one item to look for uniform strings in paths + in order to guess if the string describes an artist or a title. + - Sometimes we allow for an optional "." after the track number in paths. + """ + + session = Session() + item1 = Item(song1[0]) + item2 = Item(song2[0]) + task = Task([item1, item2]) + + f = fromfilename.FromFilenamePlugin() + f.filename_task(task, session) + + assert task.items[0].track == song1[1] + assert task.items[0].artist == song1[2] + assert task.items[0].title == song1[3] + assert task.items[1].track == song2[1] + assert task.items[1].artist == song2[2] + assert task.items[1].title == song2[3] diff --git a/test/plugins/test_ftintitle.py b/test/plugins/test_ftintitle.py index 1dbe4a727..56c82b9d2 100644 --- a/test/plugins/test_ftintitle.py +++ b/test/plugins/test_ftintitle.py @@ -14,8 +14,11 @@ """Tests for the 'ftintitle' plugin.""" -import unittest +from typing import Dict, Generator, Optional, Tuple, Union +import pytest + +from beets.library.models import Item from beets.test.helper import PluginTestCase from beetsplug import ftintitle @@ -23,169 +26,341 @@ from beetsplug import ftintitle class FtInTitlePluginFunctional(PluginTestCase): plugin = "ftintitle" - def _ft_add_item(self, path, artist, title, aartist): - return self.add_item( - path=path, - artist=artist, - artist_sort=artist, - title=title, - albumartist=aartist, - ) - def _ft_set_config( - self, ftformat, drop=False, auto=True, keep_in_artist=False - ): - self.config["ftintitle"]["format"] = ftformat - self.config["ftintitle"]["drop"] = drop - self.config["ftintitle"]["auto"] = auto - self.config["ftintitle"]["keep_in_artist"] = keep_in_artist - - def test_functional_drop(self): - item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") - self.run_command("ftintitle", "-d") - item.load() - assert item["artist"] == "Alice" - assert item["title"] == "Song 1" - - def test_functional_not_found(self): - item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "George") - self.run_command("ftintitle", "-d") - item.load() - # item should be unchanged - assert item["artist"] == "Alice ft Bob" - assert item["title"] == "Song 1" - - def test_functional_custom_format(self): - self._ft_set_config("feat. {0}") - item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") - self.run_command("ftintitle") - item.load() - assert item["artist"] == "Alice" - assert item["title"] == "Song 1 feat. Bob" - - self._ft_set_config("featuring {0}") - item = self._ft_add_item("/", "Alice feat. Bob", "Song 1", "Alice") - self.run_command("ftintitle") - item.load() - assert item["artist"] == "Alice" - assert item["title"] == "Song 1 featuring Bob" - - self._ft_set_config("with {0}") - item = self._ft_add_item("/", "Alice feat Bob", "Song 1", "Alice") - self.run_command("ftintitle") - item.load() - assert item["artist"] == "Alice" - assert item["title"] == "Song 1 with Bob" - - def test_functional_keep_in_artist(self): - self._ft_set_config("feat. {0}", keep_in_artist=True) - item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") - self.run_command("ftintitle") - item.load() - assert item["artist"] == "Alice ft Bob" - assert item["title"] == "Song 1 feat. Bob" - - item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") - self.run_command("ftintitle", "-d") - item.load() - assert item["artist"] == "Alice ft Bob" - assert item["title"] == "Song 1" +@pytest.fixture +def env() -> Generator[FtInTitlePluginFunctional, None, None]: + case = FtInTitlePluginFunctional(methodName="runTest") + case.setUp() + try: + yield case + finally: + case.tearDown() -class FtInTitlePluginTest(unittest.TestCase): - def setUp(self): - """Set up configuration""" - ftintitle.FtInTitlePlugin() +def set_config( + env: FtInTitlePluginFunctional, + cfg: Optional[Dict[str, Union[str, bool, list[str]]]], +) -> None: + cfg = {} if cfg is None else cfg + defaults = { + "drop": False, + "auto": True, + "keep_in_artist": False, + "custom_words": [], + } + env.config["ftintitle"].set(defaults) + env.config["ftintitle"].set(cfg) - def test_find_feat_part(self): - test_cases = [ + +def add_item( + env: FtInTitlePluginFunctional, + path: str, + artist: str, + title: str, + albumartist: Optional[str], +) -> Item: + return env.add_item( + path=path, + artist=artist, + artist_sort=artist, + title=title, + albumartist=albumartist, + ) + + +@pytest.mark.parametrize( + "cfg, cmd_args, given, expected", + [ + pytest.param( + None, + ("ftintitle",), + ("Alice", "Song 1", "Alice"), + ("Alice", "Song 1"), + id="no-featured-artist", + ), + pytest.param( + {"format": "feat {0}"}, + ("ftintitle",), + ("Alice ft. Bob", "Song 1", None), + ("Alice", "Song 1 feat Bob"), + id="no-albumartist-custom-format", + ), + pytest.param( + None, + ("ftintitle",), + ("Alice", "Song 1", None), + ("Alice", "Song 1"), + id="no-albumartist-no-feature", + ), + pytest.param( + {"format": "featuring {0}"}, + ("ftintitle",), + ("Alice ft Bob", "Song 1", "George"), + ("Alice", "Song 1 featuring Bob"), + id="guest-artist-custom-format", + ), + pytest.param( + None, + ("ftintitle",), + ("Alice", "Song 1", "George"), + ("Alice", "Song 1"), + id="guest-artist-no-feature", + ), + # ---- drop (-d) variants ---- + pytest.param( + None, + ("ftintitle", "-d"), + ("Alice ft Bob", "Song 1", "Alice"), + ("Alice", "Song 1"), + id="drop-self-ft", + ), + pytest.param( + None, + ("ftintitle", "-d"), + ("Alice", "Song 1", "Alice"), + ("Alice", "Song 1"), + id="drop-self-no-ft", + ), + pytest.param( + None, + ("ftintitle", "-d"), + ("Alice ft Bob", "Song 1", "George"), + ("Alice", "Song 1"), + id="drop-guest-ft", + ), + pytest.param( + None, + ("ftintitle", "-d"), + ("Alice", "Song 1", "George"), + ("Alice", "Song 1"), + id="drop-guest-no-ft", + ), + # ---- custom format variants ---- + pytest.param( + {"format": "feat. {}"}, + ("ftintitle",), + ("Alice ft Bob", "Song 1", "Alice"), + ("Alice", "Song 1 feat. Bob"), + id="custom-format-feat-dot", + ), + pytest.param( + {"format": "featuring {}"}, + ("ftintitle",), + ("Alice feat. Bob", "Song 1", "Alice"), + ("Alice", "Song 1 featuring Bob"), + id="custom-format-featuring", + ), + pytest.param( + {"format": "with {}"}, + ("ftintitle",), + ("Alice feat Bob", "Song 1", "Alice"), + ("Alice", "Song 1 with Bob"), + id="custom-format-with", + ), + # ---- keep_in_artist variants ---- + pytest.param( + {"format": "feat. {}", "keep_in_artist": True}, + ("ftintitle",), + ("Alice ft Bob", "Song 1", "Alice"), + ("Alice ft Bob", "Song 1 feat. Bob"), + id="keep-in-artist-add-to-title", + ), + pytest.param( + {"format": "feat. {}", "keep_in_artist": True}, + ("ftintitle", "-d"), + ("Alice ft Bob", "Song 1", "Alice"), + ("Alice ft Bob", "Song 1"), + id="keep-in-artist-drop-from-title", + ), + # ---- custom_words variants ---- + pytest.param( + {"format": "featuring {}", "custom_words": ["med"]}, + ("ftintitle",), + ("Alice med Bob", "Song 1", "Alice"), + ("Alice", "Song 1 featuring Bob"), + id="custom-feat-words", + ), + pytest.param( { - "artist": "Alice ft. Bob", - "album_artist": "Alice", - "feat_part": "Bob", + "format": "featuring {}", + "keep_in_artist": True, + "custom_words": ["med"], }, + ("ftintitle",), + ("Alice med Bob", "Song 1", "Alice"), + ("Alice med Bob", "Song 1 featuring Bob"), + id="custom-feat-words-keep-in-artists", + ), + pytest.param( { - "artist": "Alice feat Bob", - "album_artist": "Alice", - "feat_part": "Bob", + "format": "featuring {}", + "keep_in_artist": True, + "custom_words": ["med"], }, + ( + "ftintitle", + "-d", + ), + ("Alice med Bob", "Song 1", "Alice"), + ("Alice med Bob", "Song 1"), + id="custom-feat-words-keep-in-artists-drop-from-title", + ), + # ---- preserve_album_artist variants ---- + pytest.param( { - "artist": "Alice featuring Bob", - "album_artist": "Alice", - "feat_part": "Bob", + "format": "feat. {}", + "preserve_album_artist": True, }, + ("ftintitle",), + ("Alice feat. Bob", "Song 1", "Alice"), + ("Alice", "Song 1 feat. Bob"), + id="skip-if-artist-and-album-artists-is-the-same-different-match", + ), + pytest.param( { - "artist": "Alice & Bob", - "album_artist": "Alice", - "feat_part": "Bob", + "format": "feat. {}", + "preserve_album_artist": False, }, + ("ftintitle",), + ("Alice feat. Bob", "Song 1", "Alice"), + ("Alice", "Song 1 feat. Bob"), + id="skip-if-artist-and-album-artists-is-the-same-different-match-b", + ), + pytest.param( { - "artist": "Alice and Bob", - "album_artist": "Alice", - "feat_part": "Bob", + "format": "feat. {}", + "preserve_album_artist": True, }, + ("ftintitle",), + ("Alice feat. Bob", "Song 1", "Alice feat. Bob"), + ("Alice feat. Bob", "Song 1"), + id="skip-if-artist-and-album-artists-is-the-same-matching-match", + ), + pytest.param( { - "artist": "Alice With Bob", - "album_artist": "Alice", - "feat_part": "Bob", + "format": "feat. {}", + "preserve_album_artist": False, }, - { - "artist": "Alice defeat Bob", - "album_artist": "Alice", - "feat_part": None, - }, - { - "artist": "Alice & Bob", - "album_artist": "Bob", - "feat_part": "Alice", - }, - { - "artist": "Alice ft. Bob", - "album_artist": "Bob", - "feat_part": "Alice", - }, - { - "artist": "Alice ft. Carol", - "album_artist": "Bob", - "feat_part": None, - }, - ] + ("ftintitle",), + ("Alice feat. Bob", "Song 1", "Alice feat. Bob"), + ("Alice", "Song 1 feat. Bob"), + id="skip-if-artist-and-album-artists-is-the-same-matching-match-b", + ), + ], +) +def test_ftintitle_functional( + env: FtInTitlePluginFunctional, + cfg: Optional[Dict[str, Union[str, bool, list[str]]]], + cmd_args: Tuple[str, ...], + given: Tuple[str, str, Optional[str]], + expected: Tuple[str, str], +) -> None: + set_config(env, cfg) + ftintitle.FtInTitlePlugin() - for test_case in test_cases: - feat_part = ftintitle.find_feat_part( - test_case["artist"], test_case["album_artist"] - ) - assert feat_part == test_case["feat_part"] + artist, title, albumartist = given + item = add_item(env, "/", artist, title, albumartist) - def test_split_on_feat(self): - parts = ftintitle.split_on_feat("Alice ft. Bob") - assert parts == ("Alice", "Bob") - parts = ftintitle.split_on_feat("Alice feat Bob") - assert parts == ("Alice", "Bob") - parts = ftintitle.split_on_feat("Alice feat. Bob") - assert parts == ("Alice", "Bob") - parts = ftintitle.split_on_feat("Alice featuring Bob") - assert parts == ("Alice", "Bob") - parts = ftintitle.split_on_feat("Alice & Bob") - assert parts == ("Alice", "Bob") - parts = ftintitle.split_on_feat("Alice and Bob") - assert parts == ("Alice", "Bob") - parts = ftintitle.split_on_feat("Alice With Bob") - assert parts == ("Alice", "Bob") - parts = ftintitle.split_on_feat("Alice defeat Bob") - assert parts == ("Alice defeat Bob", None) + env.run_command(*cmd_args) + item.load() - def test_contains_feat(self): - assert ftintitle.contains_feat("Alice ft. Bob") - assert ftintitle.contains_feat("Alice feat. Bob") - assert ftintitle.contains_feat("Alice feat Bob") - assert ftintitle.contains_feat("Alice featuring Bob") - assert ftintitle.contains_feat("Alice (ft. Bob)") - assert ftintitle.contains_feat("Alice (feat. Bob)") - assert ftintitle.contains_feat("Alice [ft. Bob]") - assert ftintitle.contains_feat("Alice [feat. Bob]") - assert not ftintitle.contains_feat("Alice defeat Bob") - assert not ftintitle.contains_feat("Aliceft.Bob") - assert not ftintitle.contains_feat("Alice (defeat Bob)") - assert not ftintitle.contains_feat("Live and Let Go") - assert not ftintitle.contains_feat("Come With Me") + expected_artist, expected_title = expected + assert item["artist"] == expected_artist + assert item["title"] == expected_title + + +@pytest.mark.parametrize( + "artist,albumartist,expected", + [ + ("Alice ft. Bob", "Alice", "Bob"), + ("Alice feat Bob", "Alice", "Bob"), + ("Alice featuring Bob", "Alice", "Bob"), + ("Alice & Bob", "Alice", "Bob"), + ("Alice and Bob", "Alice", "Bob"), + ("Alice With Bob", "Alice", "Bob"), + ("Alice defeat Bob", "Alice", None), + ("Alice & Bob", "Bob", "Alice"), + ("Alice ft. Bob", "Bob", "Alice"), + ("Alice ft. Carol", "Bob", "Carol"), + ], +) +def test_find_feat_part( + artist: str, + albumartist: str, + expected: Optional[str], +) -> None: + assert ftintitle.find_feat_part(artist, albumartist) == expected + + +@pytest.mark.parametrize( + "given,expected", + [ + ("Alice ft. Bob", ("Alice", "Bob")), + ("Alice feat Bob", ("Alice", "Bob")), + ("Alice feat. Bob", ("Alice", "Bob")), + ("Alice featuring Bob", ("Alice", "Bob")), + ("Alice & Bob", ("Alice", "Bob")), + ("Alice and Bob", ("Alice", "Bob")), + ("Alice With Bob", ("Alice", "Bob")), + ("Alice defeat Bob", ("Alice defeat Bob", None)), + ], +) +def test_split_on_feat( + given: str, + expected: Tuple[str, Optional[str]], +) -> None: + assert ftintitle.split_on_feat(given) == expected + + +@pytest.mark.parametrize( + "given,expected", + [ + ("Alice ft. Bob", True), + ("Alice feat. Bob", True), + ("Alice feat Bob", True), + ("Alice featuring Bob", True), + ("Alice (ft. Bob)", True), + ("Alice (feat. Bob)", True), + ("Alice [ft. Bob]", True), + ("Alice [feat. Bob]", True), + ("Alice defeat Bob", False), + ("Aliceft.Bob", False), + ("Alice (defeat Bob)", False), + ("Live and Let Go", False), + ("Come With Me", False), + ], +) +def test_contains_feat(given: str, expected: bool) -> None: + assert ftintitle.contains_feat(given) is expected + + +@pytest.mark.parametrize( + "given,custom_words,expected", + [ + ("Alice ft. Bob", [], True), + ("Alice feat. Bob", [], True), + ("Alice feat Bob", [], True), + ("Alice featuring Bob", [], True), + ("Alice (ft. Bob)", [], True), + ("Alice (feat. Bob)", [], True), + ("Alice [ft. Bob]", [], True), + ("Alice [feat. Bob]", [], True), + ("Alice defeat Bob", [], False), + ("Aliceft.Bob", [], False), + ("Alice (defeat Bob)", [], False), + ("Live and Let Go", [], False), + ("Come With Me", [], False), + ("Alice x Bob", ["x"], True), + ("Alice x Bob", ["X"], True), + ("Alice och Xavier", ["x"], False), + ("Alice ft. Xavier", ["x"], True), + ("Alice med Carol", ["med"], True), + ("Alice med Carol", [], False), + ], +) +def test_custom_words( + given: str, custom_words: Optional[list[str]], expected: bool +) -> None: + if custom_words is None: + custom_words = [] + assert ftintitle.contains_feat(given, custom_words) is expected diff --git a/test/plugins/test_importadded.py b/test/plugins/test_importadded.py index 1b198b31d..352471f9b 100644 --- a/test/plugins/test_importadded.py +++ b/test/plugins/test_importadded.py @@ -65,7 +65,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): if m.title.replace("Tag", "Applied") == item.title: return m raise AssertionError( - "No MediaFile found for Item " + displayable_path(item.path) + f"No MediaFile found for Item {displayable_path(item.path)}" ) def test_import_album_with_added_dates(self): @@ -117,7 +117,7 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): for item_path, added_after in items_added_after.items(): assert items_added_before[item_path] == pytest.approx( added_after, rel=1e-4 - ), "reimport modified Item.added for " + displayable_path(item_path) + ), f"reimport modified Item.added for {displayable_path(item_path)}" def test_import_singletons_with_added_dates(self): self.config["import"]["singletons"] = True @@ -157,4 +157,4 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): for item_path, added_after in items_added_after.items(): assert items_added_before[item_path] == pytest.approx( added_after, rel=1e-4 - ), "reimport modified Item.added for " + displayable_path(item_path) + ), f"reimport modified Item.added for {displayable_path(item_path)}" diff --git a/test/plugins/test_ipfs.py b/test/plugins/test_ipfs.py index 096bc393b..b94bd551b 100644 --- a/test/plugins/test_ipfs.py +++ b/test/plugins/test_ipfs.py @@ -37,7 +37,7 @@ class IPFSPluginTest(PluginTestCase): try: if check_item.get("ipfs", with_album=False): ipfs_item = os.fsdecode(os.path.basename(want_item.path)) - want_path = "/ipfs/{}/{}".format(test_album.ipfs, ipfs_item) + want_path = f"/ipfs/{test_album.ipfs}/{ipfs_item}" want_path = bytestring_path(want_path) assert check_item.path == want_path assert ( diff --git a/test/plugins/test_lastgenre.py b/test/plugins/test_lastgenre.py index be145d811..12ff30f8e 100644 --- a/test/plugins/test_lastgenre.py +++ b/test/plugins/test_lastgenre.py @@ -14,16 +14,18 @@ """Tests for the 'lastgenre' plugin.""" -from unittest.mock import Mock +from unittest.mock import Mock, patch import pytest from beets.test import _common -from beets.test.helper import BeetsTestCase +from beets.test.helper import PluginTestCase from beetsplug import lastgenre -class LastGenrePluginTest(BeetsTestCase): +class LastGenrePluginTest(PluginTestCase): + plugin = "lastgenre" + def setUp(self): super().setUp() self.plugin = lastgenre.LastGenrePlugin() @@ -131,6 +133,33 @@ class LastGenrePluginTest(BeetsTestCase): "math rock", ] + @patch("beets.ui.should_write", Mock(return_value=True)) + @patch( + "beetsplug.lastgenre.LastGenrePlugin._get_genre", + Mock(return_value=("Mock Genre", "mock stage")), + ) + def test_pretend_option_skips_library_updates(self): + item = self.create_item( + album="Pretend Album", + albumartist="Pretend Artist", + artist="Pretend Artist", + title="Pretend Track", + genre="Original Genre", + ) + album = self.lib.add_album([item]) + + def unexpected_store(*_, **__): + raise AssertionError("Unexpected store call") + + # Verify that try_write was never called (file operations skipped) + with patch("beetsplug.lastgenre.Item.store", unexpected_store): + output = self.run_with_output("lastgenre", "--pretend") + + assert "Mock Genre" in output + album.load() + assert album.genre == "Original Genre" + assert album.items()[0].genre == "Original Genre" + def test_no_duplicate(self): """Remove duplicated genres.""" self._setup_config(count=99) @@ -441,6 +470,77 @@ class LastGenrePluginTest(BeetsTestCase): }, ("Jazz", "keep + artist, whitelist"), ), + # 13 - canonicalization transforms non-whitelisted genres to canonical forms + # + # "Acid Techno" is not in the default whitelist, thus gets resolved "up" in the + # tree to "Techno" and "Electronic". + ( + { + "force": True, + "keep_existing": False, + "source": "album", + "whitelist": True, + "canonical": True, + "prefer_specific": False, + "count": 10, + }, + "", + { + "album": ["acid techno"], + }, + ("Techno, Electronic", "album, whitelist"), + ), + # 14 - canonicalization transforms whitelisted genres to canonical forms and + # includes originals + # + # "Detroit Techno" is in the default whitelist, thus it stays and and also gets + # resolved "up" in the tree to "Techno" and "Electronic". The same happens for + # newly fetched genre "Acid House". + ( + { + "force": True, + "keep_existing": True, + "source": "album", + "whitelist": True, + "canonical": True, + "prefer_specific": False, + "count": 10, + "extended_debug": True, + }, + "detroit techno", + { + "album": ["acid house"], + }, + ( + "Detroit Techno, Techno, Electronic, Acid House, House", + "keep + album, whitelist", + ), + ), + # 15 - canonicalization transforms non-whitelisted original genres to canonical + # forms and deduplication works. + # + # "Cosmic Disco" is not in the default whitelist, thus gets resolved "up" in the + # tree to "Disco" and "Electronic". New genre "Detroit Techno" resolves to + # "Techno". Both resolve to "Electronic" which gets deduplicated. + ( + { + "force": True, + "keep_existing": True, + "source": "album", + "whitelist": True, + "canonical": True, + "prefer_specific": False, + "count": 10, + }, + "Cosmic Disco", + { + "album": ["Detroit Techno"], + }, + ( + "Disco, Electronic, Detroit Techno, Techno", + "keep + album, whitelist", + ), + ), ], ) def test_get_genre(config_values, item_genre, mock_genres, expected_result): @@ -466,6 +566,7 @@ def test_get_genre(config_values, item_genre, mock_genres, expected_result): plugin = lastgenre.LastGenrePlugin() # Configure plugin.config.set(config_values) + plugin.setup() # Loads default whitelist and canonicalization tree item = _common.item() item.genre = item_genre diff --git a/test/plugins/test_limit.py b/test/plugins/test_limit.py index 12700295e..d77e47ca8 100644 --- a/test/plugins/test_limit.py +++ b/test/plugins/test_limit.py @@ -42,8 +42,8 @@ class LimitPluginTest(PluginTestCase): # a subset of tests has only `num_limit` results, identified by a # range filter on the track number - self.track_head_range = "track:.." + str(self.num_limit) - self.track_tail_range = "track:" + str(self.num_limit + 1) + ".." + self.track_head_range = f"track:..{self.num_limit}" + self.track_tail_range = f"track:{self.num_limit + 1}{'..'}" def test_no_limit(self): """Returns all when there is no limit or filter.""" @@ -82,13 +82,13 @@ class LimitPluginTest(PluginTestCase): def test_prefix_when_correctly_ordered(self): """Returns the expected number with the query prefix and filter when the prefix portion (correctly) appears last.""" - correct_order = self.track_tail_range + " " + self.num_limit_prefix + correct_order = f"{self.track_tail_range} {self.num_limit_prefix}" result = self.lib.items(correct_order) assert len(result) == self.num_limit def test_prefix_when_incorrectly_ordred(self): """Returns no results with the query prefix and filter when the prefix portion (incorrectly) appears first.""" - incorrect_order = self.num_limit_prefix + " " + self.track_tail_range + incorrect_order = f"{self.num_limit_prefix} {self.track_tail_range}" result = self.lib.items(incorrect_order) assert len(result) == 0 diff --git a/test/plugins/test_mpdstats.py b/test/plugins/test_mpdstats.py index dcaf196ef..6f5d3f3ce 100644 --- a/test/plugins/test_mpdstats.py +++ b/test/plugins/test_mpdstats.py @@ -77,7 +77,7 @@ class MPDStatsTest(PluginTestCase): except KeyboardInterrupt: pass - log.debug.assert_has_calls([call('unhandled status "{0}"', ANY)]) + log.debug.assert_has_calls([call('unhandled status "{}"', ANY)]) log.info.assert_has_calls( - [call("pause"), call("playing {0}", ANY), call("stop")] + [call("pause"), call("playing {}", ANY), call("stop")] ) diff --git a/test/plugins/test_musicbrainz.py b/test/plugins/test_musicbrainz.py index aea05bc20..844b2ad4e 100644 --- a/test/plugins/test_musicbrainz.py +++ b/test/plugins/test_musicbrainz.py @@ -99,7 +99,7 @@ class MBAlbumInfoTest(MusicBrainzTestCase): for recording in tracks: i += 1 track = { - "id": "RELEASE TRACK ID %d" % i, + "id": f"RELEASE TRACK ID {i}", "recording": recording, "position": i, "number": "A1", @@ -140,7 +140,7 @@ class MBAlbumInfoTest(MusicBrainzTestCase): for recording in data_tracks: i += 1 data_track = { - "id": "RELEASE TRACK ID %d" % i, + "id": f"RELEASE TRACK ID {i}", "recording": recording, "position": i, "number": "A1", @@ -670,17 +670,17 @@ class ArtistFlatteningTest(unittest.TestCase): def _credit_dict(self, suffix=""): return { "artist": { - "name": "NAME" + suffix, - "sort-name": "SORT" + suffix, + "name": f"NAME{suffix}", + "sort-name": f"SORT{suffix}", }, - "name": "CREDIT" + suffix, + "name": f"CREDIT{suffix}", } def _add_alias(self, credit_dict, suffix="", locale="", primary=False): alias = { - "alias": "ALIAS" + suffix, + "alias": f"ALIAS{suffix}", "locale": locale, - "sort-name": "ALIASSORT" + suffix, + "sort-name": f"ALIASSORT{suffix}", } if primary: alias["primary"] = "primary" diff --git a/test/plugins/test_parentwork.py b/test/plugins/test_parentwork.py index 99267f6ff..1abe25709 100644 --- a/test/plugins/test_parentwork.py +++ b/test/plugins/test_parentwork.py @@ -93,8 +93,7 @@ class ParentWorkIntegrationTest(PluginTestCase): item = Item( path="/file", mb_workid="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53", - parentwork_workid_current="e27bda6e-531e-36d3-9cd7-\ - b8ebc18e8c53", + parentwork_workid_current="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53", ) item.add(self.lib) @@ -109,8 +108,7 @@ class ParentWorkIntegrationTest(PluginTestCase): path="/file", mb_workid="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53", mb_parentworkid="XXX", - parentwork_workid_current="e27bda6e-531e-36d3-9cd7-\ - b8ebc18e8c53", + parentwork_workid_current="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53", parentwork="whatever", ) item.add(self.lib) @@ -124,11 +122,9 @@ class ParentWorkIntegrationTest(PluginTestCase): self.config["parentwork"]["force"] = False item = Item( path="/file", - mb_workid="e27bda6e-531e-36d3-9cd7-\ - b8ebc18e8c53", + mb_workid="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53", mb_parentworkid="XXX", - parentwork_workid_current="e27bda6e-531e-36d3-9cd7-\ - b8ebc18e8c53", + parentwork_workid_current="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53", parentwork="whatever", ) item.add(self.lib) diff --git a/test/plugins/test_play.py b/test/plugins/test_play.py index 571af95dd..b184db63f 100644 --- a/test/plugins/test_play.py +++ b/test/plugins/test_play.py @@ -49,7 +49,7 @@ class PlayPluginTest(CleanupModulesMixin, PluginTestCase): open_mock.assert_called_once_with(ANY, expected_cmd) expected_playlist = expected_playlist or self.item.path.decode("utf-8") - exp_playlist = expected_playlist + "\n" + exp_playlist = f"{expected_playlist}\n" with open(open_mock.call_args[0][0][0], "rb") as playlist: assert exp_playlist == playlist.read().decode("utf-8") @@ -96,9 +96,7 @@ class PlayPluginTest(CleanupModulesMixin, PluginTestCase): open_mock.assert_called_once_with(ANY, open_anything()) with open(open_mock.call_args[0][0][0], "rb") as f: playlist = f.read().decode("utf-8") - assert ( - f"{os.path.dirname(self.item.path.decode('utf-8'))}\n" == playlist - ) + assert f"{self.item.filepath.parent}\n" == playlist def test_raw(self, open_mock): self.config["play"]["raw"] = True @@ -107,6 +105,19 @@ class PlayPluginTest(CleanupModulesMixin, PluginTestCase): open_mock.assert_called_once_with([self.item.path], "echo") + def test_pls_marker(self, open_mock): + self.config["play"]["command"] = ( + "echo --some params --playlist=$playlist --some-more params" + ) + + self.run_command("play", "nice") + + open_mock.assert_called_once + + commandstr = open_mock.call_args_list[0][0][1] + assert commandstr.startswith("echo --some params --playlist=") + assert commandstr.endswith(" --some-more params") + def test_not_found(self, open_mock): self.run_command("play", "not found") @@ -125,9 +136,7 @@ class PlayPluginTest(CleanupModulesMixin, PluginTestCase): self.config["play"]["warning_threshold"] = 1 self.other_item = self.add_item(title="another NiceTitle") - expected_playlist = "{}\n{}".format( - self.item.path.decode("utf-8"), self.other_item.path.decode("utf-8") - ) + expected_playlist = f"{self.item.filepath}\n{self.other_item.filepath}" with control_stdin("a"): self.run_and_assert( diff --git a/test/plugins/test_playlist.py b/test/plugins/test_playlist.py index 9d9ce0303..a8c145696 100644 --- a/test/plugins/test_playlist.py +++ b/test/plugins/test_playlist.py @@ -91,14 +91,7 @@ class PlaylistQueryTest: assert {i.title for i in results} == {"some item", "another item"} def test_path_query_with_absolute_paths_in_playlist(self): - q = "playlist:{}".format( - quote( - os.path.join( - self.playlist_dir, - "absolute.m3u", - ) - ) - ) + q = f"playlist:{quote(os.path.join(self.playlist_dir, 'absolute.m3u'))}" results = self.lib.items(q) assert {i.title for i in results} == {"some item", "another item"} @@ -108,14 +101,7 @@ class PlaylistQueryTest: assert {i.title for i in results} == {"some item", "another item"} def test_path_query_with_relative_paths_in_playlist(self): - q = "playlist:{}".format( - quote( - os.path.join( - self.playlist_dir, - "relative.m3u", - ) - ) - ) + q = f"playlist:{quote(os.path.join(self.playlist_dir, 'relative.m3u'))}" results = self.lib.items(q) assert {i.title for i in results} == {"some item", "another item"} @@ -125,15 +111,7 @@ class PlaylistQueryTest: assert set(results) == set() def test_path_query_with_nonexisting_playlist(self): - q = "playlist:{}".format( - quote( - os.path.join( - self.playlist_dir, - self.playlist_dir, - "nonexisting.m3u", - ) - ) - ) + q = f"playlist:{os.path.join(self.playlist_dir, 'nonexisting.m3u')!r}" results = self.lib.items(q) assert set(results) == set() @@ -141,20 +119,22 @@ class PlaylistQueryTest: class PlaylistTestRelativeToLib(PlaylistQueryTest, PlaylistTestCase): def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: - f.write( - "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3")) + f.writelines( + [ + os.path.join(self.music_dir, "a", "b", "c.mp3") + "\n", + os.path.join(self.music_dir, "d", "e", "f.mp3") + "\n", + os.path.join(self.music_dir, "nonexisting.mp3") + "\n", + ] ) with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f: - f.write("{}\n".format(os.path.join("a", "b", "c.mp3"))) - f.write("{}\n".format(os.path.join("d", "e", "f.mp3"))) - f.write("{}\n".format("nonexisting.mp3")) + f.writelines( + [ + os.path.join("a", "b", "c.mp3") + "\n", + os.path.join("d", "e", "f.mp3") + "\n", + "nonexisting.mp3\n", + ] + ) self.config["playlist"]["relative_to"] = "library" @@ -162,20 +142,22 @@ class PlaylistTestRelativeToLib(PlaylistQueryTest, PlaylistTestCase): class PlaylistTestRelativeToDir(PlaylistQueryTest, PlaylistTestCase): def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: - f.write( - "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3")) + f.writelines( + [ + os.path.join(self.music_dir, "a", "b", "c.mp3") + "\n", + os.path.join(self.music_dir, "d", "e", "f.mp3") + "\n", + os.path.join(self.music_dir, "nonexisting.mp3") + "\n", + ] ) with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f: - f.write("{}\n".format(os.path.join("a", "b", "c.mp3"))) - f.write("{}\n".format(os.path.join("d", "e", "f.mp3"))) - f.write("{}\n".format("nonexisting.mp3")) + f.writelines( + [ + os.path.join("a", "b", "c.mp3") + "\n", + os.path.join("d", "e", "f.mp3") + "\n", + "nonexisting.mp3\n", + ] + ) self.config["playlist"]["relative_to"] = self.music_dir @@ -183,40 +165,33 @@ class PlaylistTestRelativeToDir(PlaylistQueryTest, PlaylistTestCase): class PlaylistTestRelativeToPls(PlaylistQueryTest, PlaylistTestCase): def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: - f.write( - "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3")) + f.writelines( + [ + os.path.join(self.music_dir, "a", "b", "c.mp3") + "\n", + os.path.join(self.music_dir, "d", "e", "f.mp3") + "\n", + os.path.join(self.music_dir, "nonexisting.mp3") + "\n", + ] ) with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f: - f.write( - "{}\n".format( + f.writelines( + [ os.path.relpath( os.path.join(self.music_dir, "a", "b", "c.mp3"), start=self.playlist_dir, ) - ) - ) - f.write( - "{}\n".format( + + "\n", os.path.relpath( os.path.join(self.music_dir, "d", "e", "f.mp3"), start=self.playlist_dir, ) - ) - ) - f.write( - "{}\n".format( + + "\n", os.path.relpath( os.path.join(self.music_dir, "nonexisting.mp3"), start=self.playlist_dir, ) - ) + + "\n", + ] ) self.config["playlist"]["relative_to"] = "playlist" @@ -226,20 +201,22 @@ class PlaylistTestRelativeToPls(PlaylistQueryTest, PlaylistTestCase): class PlaylistUpdateTest: def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: - f.write( - "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3")) - ) - f.write( - "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3")) + f.writelines( + [ + os.path.join(self.music_dir, "a", "b", "c.mp3") + "\n", + os.path.join(self.music_dir, "d", "e", "f.mp3") + "\n", + os.path.join(self.music_dir, "nonexisting.mp3") + "\n", + ] ) with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f: - f.write("{}\n".format(os.path.join("a", "b", "c.mp3"))) - f.write("{}\n".format(os.path.join("d", "e", "f.mp3"))) - f.write("{}\n".format("nonexisting.mp3")) + f.writelines( + [ + os.path.join("a", "b", "c.mp3") + "\n", + os.path.join("d", "e", "f.mp3") + "\n", + "nonexisting.mp3\n", + ] + ) self.config["playlist"]["auto"] = True self.config["playlist"]["relative_to"] = "library" @@ -249,9 +226,7 @@ class PlaylistTestItemMoved(PlaylistUpdateTest, PlaylistTestCase): def test_item_moved(self): # Emit item_moved event for an item that is in a playlist results = self.lib.items( - "path:{}".format( - quote(os.path.join(self.music_dir, "d", "e", "f.mp3")) - ) + f"path:{quote(os.path.join(self.music_dir, 'd', 'e', 'f.mp3'))}" ) item = results[0] beets.plugins.send( @@ -265,9 +240,7 @@ class PlaylistTestItemMoved(PlaylistUpdateTest, PlaylistTestCase): # Emit item_moved event for an item that is not in a playlist results = self.lib.items( - "path:{}".format( - quote(os.path.join(self.music_dir, "x", "y", "z.mp3")) - ) + f"path:{quote(os.path.join(self.music_dir, 'x', 'y', 'z.mp3'))}" ) item = results[0] beets.plugins.send( @@ -309,18 +282,14 @@ class PlaylistTestItemRemoved(PlaylistUpdateTest, PlaylistTestCase): def test_item_removed(self): # Emit item_removed event for an item that is in a playlist results = self.lib.items( - "path:{}".format( - quote(os.path.join(self.music_dir, "d", "e", "f.mp3")) - ) + f"path:{quote(os.path.join(self.music_dir, 'd', 'e', 'f.mp3'))}" ) item = results[0] beets.plugins.send("item_removed", item=item) # Emit item_removed event for an item that is not in a playlist results = self.lib.items( - "path:{}".format( - quote(os.path.join(self.music_dir, "x", "y", "z.mp3")) - ) + f"path:{quote(os.path.join(self.music_dir, 'x', 'y', 'z.mp3'))}" ) item = results[0] beets.plugins.send("item_removed", item=item) diff --git a/test/plugins/test_plexupdate.py b/test/plugins/test_plexupdate.py index f319db6ce..ab53d8c2e 100644 --- a/test/plugins/test_plexupdate.py +++ b/test/plugins/test_plexupdate.py @@ -29,7 +29,7 @@ class PlexUpdateTest(PluginTestCase): "</Directory>" '<Directory allowSync="0" art="/:/resources/artist-fanart.jpg" ' 'filters="1" refreshing="0" thumb="/:/resources/artist.png" ' - 'key="2" type="artist" title="' + escaped_section_name + '" ' + f'key="2" type="artist" title="{escaped_section_name}" ' 'composite="/library/sections/2/composite/1416929243" ' 'agent="com.plexapp.agents.lastfm" scanner="Plex Music Scanner" ' 'language="en" uuid="90897c95-b3bd-4778-a9c8-1f43cb78f047" ' diff --git a/test/plugins/test_plugin_mediafield.py b/test/plugins/test_plugin_mediafield.py index 898e891ce..84565b47b 100644 --- a/test/plugins/test_plugin_mediafield.py +++ b/test/plugins/test_plugin_mediafield.py @@ -43,7 +43,7 @@ list_field_extension = mediafile.ListMediaField( class ExtendedFieldTestMixin(BeetsTestCase): def _mediafile_fixture(self, name, extension="mp3"): - name = bytestring_path(name + "." + extension) + name = bytestring_path(f"{name}.{extension}") src = os.path.join(_common.RSRC, name) target = os.path.join(self.temp_dir, name) shutil.copy(syspath(src), syspath(target)) diff --git a/test/plugins/test_random.py b/test/plugins/test_random.py index 5bff1ee5e..9bcf8e59b 100644 --- a/test/plugins/test_random.py +++ b/test/plugins/test_random.py @@ -69,7 +69,7 @@ class RandomTest(TestHelper, unittest.TestCase): # Print a histogram (useful for debugging). if histogram: for i in range(len(self.items)): - print("{:2d} {}".format(i, "*" * positions.count(i))) + print(f"{i:2d} {'*' * positions.count(i)}") return self._stats(positions) mean1, stdev1, median1 = experiment("artist") diff --git a/test/plugins/test_replaygain.py b/test/plugins/test_replaygain.py index 091298766..094349b25 100644 --- a/test/plugins/test_replaygain.py +++ b/test/plugins/test_replaygain.py @@ -204,9 +204,7 @@ class ReplayGainCliTest: # This test is a lot less interesting if the backend cannot write # both tag types. self.skipTest( - "r128 tags for opus not supported on backend {}".format( - self.backend - ) + f"r128 tags for opus not supported on backend {self.backend}" ) album_rg = self._add_album(1) @@ -263,9 +261,7 @@ class ReplayGainCliTest: def test_cli_writes_only_r128_tags(self): if not self.has_r128_support: self.skipTest( - "r128 tags for opus not supported on backend {}".format( - self.backend - ) + f"r128 tags for opus not supported on backend {self.backend}" ) album = self._add_album(2, ext="opus") @@ -299,9 +295,7 @@ class ReplayGainCliTest: def test_r128_targetlevel_has_effect(self): if not self.has_r128_support: self.skipTest( - "r128 tags for opus not supported on backend {}".format( - self.backend - ) + f"r128 tags for opus not supported on backend {self.backend}" ) album = self._add_album(1, ext="opus") diff --git a/test/plugins/test_smartplaylist.py b/test/plugins/test_smartplaylist.py index c8e516e8b..d3569d836 100644 --- a/test/plugins/test_smartplaylist.py +++ b/test/plugins/test_smartplaylist.py @@ -227,11 +227,10 @@ class SmartPlaylistTest(BeetsTestCase): content = m3u_filepath.read_bytes() rmtree(syspath(dir)) - assert ( - content - == b"#EXTM3U\n" - + b"#EXTINF:300,fake artist - fake title\n" - + b"http://beets:8337/files/tagada.mp3\n" + assert content == ( + b"#EXTM3U\n" + b"#EXTINF:300,fake artist - fake title\n" + b"http://beets:8337/files/tagada.mp3\n" ) def test_playlist_update_output_extm3u_fields(self): @@ -278,11 +277,10 @@ class SmartPlaylistTest(BeetsTestCase): content = m3u_filepath.read_bytes() rmtree(syspath(dir)) - assert ( - content - == b"#EXTM3U\n" - + b'#EXTINF:300 id="456" genre="Fake%20Genre",Fake Artist - fake Title\n' - + b"/tagada.mp3\n" + assert content == ( + b"#EXTM3U\n" + b'#EXTINF:300 id="456" genre="Fake%20Genre",Fake Artist - fake Title\n' + b"/tagada.mp3\n" ) def test_playlist_update_uri_format(self): diff --git a/test/plugins/test_spotify.py b/test/plugins/test_spotify.py index a2fb26f4b..bc55485c6 100644 --- a/test/plugins/test_spotify.py +++ b/test/plugins/test_spotify.py @@ -82,8 +82,8 @@ class SpotifyPluginTest(PluginTestCase): params = _params(responses.calls[0].request.url) query = params["q"][0] assert "duifhjslkef" in query - assert "artist:ujydfsuihse" in query - assert "album:lkajsdflakjsd" in query + assert "artist:'ujydfsuihse'" in query + assert "album:'lkajsdflakjsd'" in query assert params["type"] == ["track"] @responses.activate @@ -117,8 +117,8 @@ class SpotifyPluginTest(PluginTestCase): params = _params(responses.calls[0].request.url) query = params["q"][0] assert "Happy" in query - assert "artist:Pharrell Williams" in query - assert "album:Despicable Me 2" in query + assert "artist:'Pharrell Williams'" in query + assert "album:'Despicable Me 2'" in query assert params["type"] == ["track"] @responses.activate @@ -132,7 +132,7 @@ class SpotifyPluginTest(PluginTestCase): responses.add( responses.GET, - spotify.SpotifyPlugin.track_url + "6NPVjNh8Jhru9xOmyQigds", + f"{spotify.SpotifyPlugin.track_url}6NPVjNh8Jhru9xOmyQigds", body=response_body, status=200, content_type="application/json", @@ -145,7 +145,7 @@ class SpotifyPluginTest(PluginTestCase): responses.add( responses.GET, - spotify.SpotifyPlugin.album_url + "5l3zEmMrOhOzG8d8s83GOL", + f"{spotify.SpotifyPlugin.album_url}5l3zEmMrOhOzG8d8s83GOL", body=response_body, status=200, content_type="application/json", @@ -233,8 +233,8 @@ class SpotifyPluginTest(PluginTestCase): params = _params(responses.calls[0].request.url) query = params["q"][0] assert item.title in query - assert f"artist:{item.albumartist}" in query - assert f"album:{item.album}" in query + assert f"artist:'{item.albumartist}'" in query + assert f"album:'{item.album}'" in query assert not query.isascii() # Is not found in the library if ascii encoding is enabled diff --git a/test/plugins/test_substitute.py b/test/plugins/test_substitute.py index 48014e231..fc3789c0b 100644 --- a/test/plugins/test_substitute.py +++ b/test/plugins/test_substitute.py @@ -55,8 +55,10 @@ class SubstitutePluginTest(PluginTestCase): [ ("King Creosote & Jon Hopkins", "King Creosote"), ( - "Michael Hurley, The Holy Modal Rounders, Jeffrey Frederick & " - + "The Clamtones", + ( + "Michael Hurley, The Holy Modal Rounders, Jeffrey" + " Frederick & The Clamtones" + ), "Michael Hurley", ), ("James Yorkston and the Athletes", "James Yorkston"), diff --git a/test/plugins/test_web.py b/test/plugins/test_web.py index 2ad07bbe5..9fc3d109d 100644 --- a/test/plugins/test_web.py +++ b/test/plugins/test_web.py @@ -142,7 +142,7 @@ class WebPluginTest(ItemInDBTestCase): def test_get_single_item_by_path(self): data_path = os.path.join(_common.RSRC, b"full.mp3") self.lib.add(Item.from_path(data_path)) - response = self.client.get("/item/path/" + data_path.decode("utf-8")) + response = self.client.get(f"/item/path/{data_path.decode('utf-8')}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -152,12 +152,11 @@ class WebPluginTest(ItemInDBTestCase): data_path = os.path.join(_common.RSRC, b"full.mp3") # data_path points to a valid file, but we have not added the file # to the library. - response = self.client.get("/item/path/" + data_path.decode("utf-8")) + response = self.client.get(f"/item/path/{data_path.decode('utf-8')}") assert response.status_code == 404 def test_get_item_empty_query(self): - """testing item query: <empty>""" response = self.client.get("/item/query/") res_json = json.loads(response.data.decode("utf-8")) @@ -165,7 +164,6 @@ class WebPluginTest(ItemInDBTestCase): assert len(res_json["items"]) == 3 def test_get_simple_item_query(self): - """testing item query: another""" response = self.client.get("/item/query/another") res_json = json.loads(response.data.decode("utf-8")) @@ -174,8 +172,7 @@ class WebPluginTest(ItemInDBTestCase): assert res_json["results"][0]["title"] == "another title" def test_query_item_string(self): - """testing item query: testattr:ABC""" - response = self.client.get("/item/query/testattr%3aABC") + response = self.client.get("/item/query/testattr%3aABC") # testattr:ABC res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -183,8 +180,9 @@ class WebPluginTest(ItemInDBTestCase): assert res_json["results"][0]["title"] == "and a third" def test_query_item_regex(self): - """testing item query: testattr::[A-C]+""" - response = self.client.get("/item/query/testattr%3a%3a[A-C]%2b") + response = self.client.get( + "/item/query/testattr%3a%3a[A-C]%2b" + ) # testattr::[A-C]+ res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -192,8 +190,9 @@ class WebPluginTest(ItemInDBTestCase): assert res_json["results"][0]["title"] == "and a third" def test_query_item_regex_backslash(self): - # """ testing item query: testattr::\w+ """ - response = self.client.get("/item/query/testattr%3a%3a%5cw%2b") + response = self.client.get( + "/item/query/testattr%3a%3a%5cw%2b" + ) # testattr::\w+ res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -201,7 +200,6 @@ class WebPluginTest(ItemInDBTestCase): assert res_json["results"][0]["title"] == "and a third" def test_query_item_path(self): - # """ testing item query: path:\somewhere\a """ """Note: path queries are special: the query item must match the path from the root all the way to a directory, so this matches 1 item""" """ Note: filesystem separators in the query must be '\' """ @@ -267,8 +265,9 @@ class WebPluginTest(ItemInDBTestCase): assert response_track_titles == {"title", "and a third"} def test_query_album_string(self): - """testing query: albumtest:xy""" - response = self.client.get("/album/query/albumtest%3axy") + response = self.client.get( + "/album/query/albumtest%3axy" + ) # albumtest:xy res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -276,8 +275,9 @@ class WebPluginTest(ItemInDBTestCase): assert res_json["results"][0]["album"] == "album" def test_query_album_artpath_regex(self): - """testing query: artpath::art_""" - response = self.client.get("/album/query/artpath%3a%3aart_") + response = self.client.get( + "/album/query/artpath%3a%3aart_" + ) # artpath::art_ res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -285,8 +285,9 @@ class WebPluginTest(ItemInDBTestCase): assert res_json["results"][0]["album"] == "other album" def test_query_album_regex_backslash(self): - # """ testing query: albumtest::\w+ """ - response = self.client.get("/album/query/albumtest%3a%3a%5cw%2b") + response = self.client.get( + "/album/query/albumtest%3a%3a%5cw%2b" + ) # albumtest::\w+ res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -310,18 +311,18 @@ class WebPluginTest(ItemInDBTestCase): ) # Check we can find the temporary item we just created - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id # Delete item by id - response = self.client.delete("/item/" + str(item_id)) + response = self.client.delete(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 # Check the item has gone - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") assert response.status_code == 404 # Note: if this fails, the item may still be around # and may cause other tests to fail @@ -336,18 +337,18 @@ class WebPluginTest(ItemInDBTestCase): item_id = self.lib.add(Item.from_path(ipath)) # Check we can find the temporary item we just created - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id # Delete item by id, without deleting file - response = self.client.delete("/item/" + str(item_id)) + response = self.client.delete(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 # Check the item has gone - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") assert response.status_code == 404 # Check the file has not gone @@ -364,18 +365,18 @@ class WebPluginTest(ItemInDBTestCase): item_id = self.lib.add(Item.from_path(ipath)) # Check we can find the temporary item we just created - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id # Delete item by id, with file - response = self.client.delete("/item/" + str(item_id) + "?delete") + response = self.client.delete(f"/item/{item_id}?delete") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 # Check the item has gone - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") assert response.status_code == 404 # Check the file has gone @@ -427,17 +428,17 @@ class WebPluginTest(ItemInDBTestCase): ) # Check we can find the temporary item we just created - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id # Try to delete item by id - response = self.client.delete("/item/" + str(item_id)) + response = self.client.delete(f"/item/{item_id}") assert response.status_code == 405 # Check the item has not gone - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id @@ -481,18 +482,18 @@ class WebPluginTest(ItemInDBTestCase): ) # Check we can find the temporary album we just created - response = self.client.get("/album/" + str(album_id)) + response = self.client.get(f"/album/{album_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == album_id # Delete album by id - response = self.client.delete("/album/" + str(album_id)) + response = self.client.delete(f"/album/{album_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 # Check the album has gone - response = self.client.get("/album/" + str(album_id)) + response = self.client.get(f"/album/{album_id}") assert response.status_code == 404 # Note: if this fails, the album may still be around # and may cause other tests to fail @@ -543,17 +544,17 @@ class WebPluginTest(ItemInDBTestCase): ) # Check we can find the temporary album we just created - response = self.client.get("/album/" + str(album_id)) + response = self.client.get(f"/album/{album_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == album_id # Try to delete album by id - response = self.client.delete("/album/" + str(album_id)) + response = self.client.delete(f"/album/{album_id}") assert response.status_code == 405 # Check the item has not gone - response = self.client.get("/album/" + str(album_id)) + response = self.client.get(f"/album/{album_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == album_id @@ -603,7 +604,7 @@ class WebPluginTest(ItemInDBTestCase): ) # Check we can find the temporary item we just created - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id @@ -613,7 +614,7 @@ class WebPluginTest(ItemInDBTestCase): # Patch item by id # patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}]}) response = self.client.patch( - "/item/" + str(item_id), json={"test_patch_f2": "New"} + f"/item/{item_id}", json={"test_patch_f2": "New"} ) res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 @@ -622,7 +623,7 @@ class WebPluginTest(ItemInDBTestCase): assert res_json["test_patch_f2"] == "New" # Check the update has really worked - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id @@ -647,7 +648,7 @@ class WebPluginTest(ItemInDBTestCase): ) # Check we can find the temporary item we just created - response = self.client.get("/item/" + str(item_id)) + response = self.client.get(f"/item/{item_id}") res_json = json.loads(response.data.decode("utf-8")) assert response.status_code == 200 assert res_json["id"] == item_id @@ -657,7 +658,7 @@ class WebPluginTest(ItemInDBTestCase): # Patch item by id # patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}) response = self.client.patch( - "/item/" + str(item_id), json={"test_patch_f2": "New"} + f"/item/{item_id}", json={"test_patch_f2": "New"} ) assert response.status_code == 405 @@ -670,6 +671,6 @@ class WebPluginTest(ItemInDBTestCase): assert os.path.exists(ipath) item_id = self.lib.add(Item.from_path(ipath)) - response = self.client.get("/item/" + str(item_id) + "/file") + response = self.client.get(f"/item/{item_id}/file") assert response.status_code == 200 diff --git a/test/plugins/test_zero.py b/test/plugins/test_zero.py index 51913c8e0..b08bf0dca 100644 --- a/test/plugins/test_zero.py +++ b/test/plugins/test_zero.py @@ -249,6 +249,54 @@ class ZeroPluginTest(PluginTestCase): assert "id" not in z.fields_to_progs + def test_omit_single_disc_with_tags_single(self): + item = self.add_item_fixture( + disctotal=1, disc=1, comments="test comment" + ) + item.write() + with self.configure_plugin( + {"omit_single_disc": True, "fields": ["comments"]} + ): + item.write() + + mf = MediaFile(syspath(item.path)) + assert mf.comments is None + assert mf.disc == 0 + + def test_omit_single_disc_with_tags_multi(self): + item = self.add_item_fixture( + disctotal=4, disc=1, comments="test comment" + ) + item.write() + with self.configure_plugin( + {"omit_single_disc": True, "fields": ["comments"]} + ): + item.write() + + mf = MediaFile(syspath(item.path)) + assert mf.comments is None + assert mf.disc == 1 + + def test_omit_single_disc_only_change_single(self): + item = self.add_item_fixture(disctotal=1, disc=1) + item.write() + + with self.configure_plugin({"omit_single_disc": True}): + item.write() + + mf = MediaFile(syspath(item.path)) + assert mf.disc == 0 + + def test_omit_single_disc_only_change_multi(self): + item = self.add_item_fixture(disctotal=4, disc=1) + item.write() + + with self.configure_plugin({"omit_single_disc": True}): + item.write() + + mf = MediaFile(syspath(item.path)) + assert mf.disc == 1 + def test_empty_query_n_response_no_changes(self): item = self.add_item_fixture( year=2016, day=13, month=3, comments="test comment" diff --git a/test/test_vfs.py b/test/plugins/utils/test_vfs.py similarity index 97% rename from test/test_vfs.py rename to test/plugins/utils/test_vfs.py index 7f75fbd83..9505075f9 100644 --- a/test/test_vfs.py +++ b/test/plugins/utils/test_vfs.py @@ -14,9 +14,9 @@ """Tests for the virtual filesystem builder..""" -from beets import vfs from beets.test import _common from beets.test.helper import BeetsTestCase +from beetsplug._utils import vfs class VFSTest(BeetsTestCase): diff --git a/test/rsrc/lyrics/examplecom/beetssong.txt b/test/rsrc/lyrics/examplecom/beetssong.txt index c546dd602..436612ce0 100644 --- a/test/rsrc/lyrics/examplecom/beetssong.txt +++ b/test/rsrc/lyrics/examplecom/beetssong.txt @@ -221,7 +221,7 @@ e9.size = "120x600, 160x600"; <h2>John Doe <br> beets song lyrics</h2> <img src="images/phone-left.gif" alt="Ringtones left icon" width="16" height="17"> <a href="http://www.ringtonematcher.com/go/?sid=LBSMros&artist=The+John Doe&song=Beets+Song" target="_blank"><b><font size="+1" color="red" face="arial">Send "beets song" Ringtone to your Cell</font></b></a> <img src="images/phone-right.gif" alt="Ringtones right icon" width="16" height="17"><br><br><center>Beets is the media library management system for obsessive music geeks.<br> -The purpose of beets is to get your music collection right once and for all. It catalogs your collection, automatically improving its metadata as it goes. It then provides a bouquet of tools for manipulating and accessing your music.<br> +The purpose of beets is to get your music collection right once and for all. It catalogs your collection, automatically improving its metadata as it goes. It then provides a suite of tools for manipulating and accessing your music.<br> <div class='flow breaker'> </div> Here's an example of beets' brainy tag corrector doing its thing: Because beets is designed as a library, it can do almost anything you can imagine for your music collection. Via plugins, beets becomes a panacea</center> diff --git a/test/test_art_resize.py b/test/test_art_resize.py index 34bf810b9..0ccbb0eae 100644 --- a/test/test_art_resize.py +++ b/test/test_art_resize.py @@ -150,9 +150,5 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase): metadata = {"a": "A", "b": "B"} im = DummyIMBackend() im.write_metadata("foo", metadata) - try: - command = im.convert_cmd + "foo -set a A -set b B foo".split() - mock_util.command_output.assert_called_once_with(command) - except AssertionError: - command = im.convert_cmd + "foo -set b B -set a A foo".split() - mock_util.command_output.assert_called_once_with(command) + command = [*im.convert_cmd, *"foo -set a A -set b B foo".split()] + mock_util.command_output.assert_called_once_with(command) diff --git a/test/test_datequery.py b/test/test_datequery.py index 1063a62c1..d73fca45f 100644 --- a/test/test_datequery.py +++ b/test/test_datequery.py @@ -186,37 +186,37 @@ class DateQueryTestRelativeMore(ItemInDBTestCase): def test_relative(self): for timespan in ["d", "w", "m", "y"]: - query = DateQuery("added", "-4" + timespan + "..+4" + timespan) + query = DateQuery("added", f"-4{timespan}..+4{timespan}") matched = self.lib.items(query) assert len(matched) == 1 def test_relative_fail(self): for timespan in ["d", "w", "m", "y"]: - query = DateQuery("added", "-2" + timespan + "..-1" + timespan) + query = DateQuery("added", f"-2{timespan}..-1{timespan}") matched = self.lib.items(query) assert len(matched) == 0 def test_start_relative(self): for timespan in ["d", "w", "m", "y"]: - query = DateQuery("added", "-4" + timespan + "..") + query = DateQuery("added", f"-4{timespan}..") matched = self.lib.items(query) assert len(matched) == 1 def test_start_relative_fail(self): for timespan in ["d", "w", "m", "y"]: - query = DateQuery("added", "4" + timespan + "..") + query = DateQuery("added", f"4{timespan}..") matched = self.lib.items(query) assert len(matched) == 0 def test_end_relative(self): for timespan in ["d", "w", "m", "y"]: - query = DateQuery("added", "..+4" + timespan) + query = DateQuery("added", f"..+4{timespan}") matched = self.lib.items(query) assert len(matched) == 1 def test_end_relative_fail(self): for timespan in ["d", "w", "m", "y"]: - query = DateQuery("added", "..-4" + timespan) + query = DateQuery("added", f"..-4{timespan}") matched = self.lib.items(query) assert len(matched) == 0 diff --git a/test/test_dbcore.py b/test/test_dbcore.py index 3f9a9d45e..653adf298 100644 --- a/test/test_dbcore.py +++ b/test/test_dbcore.py @@ -23,6 +23,7 @@ from tempfile import mkstemp import pytest from beets import dbcore +from beets.dbcore.db import DBCustomFunctionError from beets.library import LibModel from beets.test import _common from beets.util import cached_classproperty @@ -31,6 +32,13 @@ from beets.util import cached_classproperty # have multiple models with different numbers of fields. +@pytest.fixture +def db(model): + db = model(":memory:") + yield db + db._connection().close() + + class SortFixture(dbcore.query.FieldSort): pass @@ -81,7 +89,6 @@ class ModelFixture1(LibModel): class DatabaseFixture1(dbcore.Database): _models = (ModelFixture1,) - pass class ModelFixture2(ModelFixture1): @@ -94,7 +101,6 @@ class ModelFixture2(ModelFixture1): class DatabaseFixture2(dbcore.Database): _models = (ModelFixture2,) - pass class ModelFixture3(ModelFixture1): @@ -108,7 +114,6 @@ class ModelFixture3(ModelFixture1): class DatabaseFixture3(dbcore.Database): _models = (ModelFixture3,) - pass class ModelFixture4(ModelFixture1): @@ -123,7 +128,6 @@ class ModelFixture4(ModelFixture1): class DatabaseFixture4(dbcore.Database): _models = (ModelFixture4,) - pass class AnotherModelFixture(ModelFixture1): @@ -145,12 +149,10 @@ class ModelFixture5(ModelFixture1): class DatabaseFixture5(dbcore.Database): _models = (ModelFixture5,) - pass class DatabaseFixtureTwoModels(dbcore.Database): _models = (ModelFixture2, AnotherModelFixture) - pass class ModelFixtureWithGetters(dbcore.Model): @@ -256,7 +258,7 @@ class TransactionTest(unittest.TestCase): def test_query_no_increase_revision(self): old_rev = self.db.revision with self.db.transaction() as tx: - tx.query("PRAGMA table_info(%s)" % ModelFixture1._table) + tx.query(f"PRAGMA table_info({ModelFixture1._table})") assert self.db.revision == old_rev @@ -784,3 +786,25 @@ class ResultsIteratorTest(unittest.TestCase): self.db._fetch(ModelFixture1, dbcore.query.FalseQuery()).get() is None ) + + +class TestException: + @pytest.mark.parametrize("model", [DatabaseFixture1]) + @pytest.mark.filterwarnings( + "ignore: .*plz_raise.*: pytest.PytestUnraisableExceptionWarning" + ) + @pytest.mark.filterwarnings( + "error: .*: pytest.PytestUnraisableExceptionWarning" + ) + def test_custom_function_error(self, db: DatabaseFixture1): + def plz_raise(): + raise Exception("i haz raized") + + db._connection().create_function("plz_raise", 0, plz_raise) + + with db.transaction() as tx: + tx.mutate("insert into test (field_one) values (1)") + + with pytest.raises(DBCustomFunctionError): + with db.transaction() as tx: + tx.query("select * from test where plz_raise()") diff --git a/test/test_library.py b/test/test_library.py index 35791bad7..7c0529001 100644 --- a/test/test_library.py +++ b/test/test_library.py @@ -1033,7 +1033,7 @@ class ArtDestinationTest(BeetsTestCase): def test_art_filename_respects_setting(self): art = self.ai.art_destination("something.jpg") - new_art = bytestring_path("%sartimage.jpg" % os.path.sep) + new_art = bytestring_path(f"{os.path.sep}artimage.jpg") assert new_art in art def test_art_path_in_item_dir(self): diff --git a/test/test_logging.py b/test/test_logging.py index 1859ea2dd..48f9cbfd8 100644 --- a/test/test_logging.py +++ b/test/test_logging.py @@ -3,18 +3,21 @@ import logging as log import sys import threading -import unittest -from io import StringIO +from types import ModuleType +from unittest.mock import patch + +import pytest import beets.logging as blog -import beetsplug from beets import plugins, ui from beets.test import _common, helper from beets.test.helper import AsIsImporterMixin, ImportTestCase, PluginMixin -class LoggingTest(unittest.TestCase): - def test_logging_management(self): +class TestStrFormatLogger: + """Tests for the custom str-formatting logger.""" + + def test_logger_creation(self): l1 = log.getLogger("foo123") l2 = blog.getLogger("foo123") assert l1 == l2 @@ -34,49 +37,76 @@ class LoggingTest(unittest.TestCase): l6 = blog.getLogger() assert l1 != l6 - def test_str_format_logging(self): - logger = blog.getLogger("baz123") - stream = StringIO() - handler = log.StreamHandler(stream) + @pytest.mark.parametrize( + "level", [log.DEBUG, log.INFO, log.WARNING, log.ERROR] + ) + @pytest.mark.parametrize( + "msg, args, kwargs, expected", + [ + ("foo {} bar {}", ("oof", "baz"), {}, "foo oof bar baz"), + ( + "foo {bar} baz {foo}", + (), + {"foo": "oof", "bar": "baz"}, + "foo baz baz oof", + ), + ("no args", (), {}, "no args"), + ("foo {} bar {baz}", ("oof",), {"baz": "baz"}, "foo oof bar baz"), + ], + ) + def test_str_format_logging( + self, level, msg, args, kwargs, expected, caplog + ): + logger = blog.getLogger("test_logger") + logger.setLevel(level) - logger.addHandler(handler) - logger.propagate = False + with caplog.at_level(level, logger="test_logger"): + logger.log(level, msg, *args, **kwargs) - logger.warning("foo {0} {bar}", "oof", bar="baz") - handler.flush() - assert stream.getvalue(), "foo oof baz" + assert caplog.records, "No log records were captured" + assert str(caplog.records[0].msg) == expected + + +class DummyModule(ModuleType): + class DummyPlugin(plugins.BeetsPlugin): + def __init__(self): + plugins.BeetsPlugin.__init__(self, "dummy") + self.import_stages = [self.import_stage] + self.register_listener("dummy_event", self.listener) + + def log_all(self, name): + self._log.debug("debug {}", name) + self._log.info("info {}", name) + self._log.warning("warning {}", name) + + def commands(self): + cmd = ui.Subcommand("dummy") + cmd.func = lambda _, __, ___: self.log_all("cmd") + return (cmd,) + + def import_stage(self, session, task): + self.log_all("import_stage") + + def listener(self): + self.log_all("listener") + + def __init__(self, *_, **__): + module_name = "beetsplug.dummy" + super().__init__(module_name) + self.DummyPlugin.__module__ = module_name + self.DummyPlugin = self.DummyPlugin class LoggingLevelTest(AsIsImporterMixin, PluginMixin, ImportTestCase): plugin = "dummy" - class DummyModule: - class DummyPlugin(plugins.BeetsPlugin): - def __init__(self): - plugins.BeetsPlugin.__init__(self, "dummy") - self.import_stages = [self.import_stage] - self.register_listener("dummy_event", self.listener) + @classmethod + def setUpClass(cls): + patcher = patch.dict(sys.modules, {"beetsplug.dummy": DummyModule()}) + patcher.start() + cls.addClassCleanup(patcher.stop) - def log_all(self, name): - self._log.debug("debug " + name) - self._log.info("info " + name) - self._log.warning("warning " + name) - - def commands(self): - cmd = ui.Subcommand("dummy") - cmd.func = lambda _, __, ___: self.log_all("cmd") - return (cmd,) - - def import_stage(self, session, task): - self.log_all("import_stage") - - def listener(self): - self.log_all("listener") - - def setUp(self): - sys.modules["beetsplug.dummy"] = self.DummyModule - beetsplug.dummy = self.DummyModule - super().setUp() + super().setUpClass() def test_command_level0(self): self.config["verbose"] = 0 @@ -172,9 +202,9 @@ class ConcurrentEventsTest(AsIsImporterMixin, ImportTestCase): self.t1_step = self.t2_step = 0 def log_all(self, name): - self._log.debug("debug " + name) - self._log.info("info " + name) - self._log.warning("warning " + name) + self._log.debug("debug {}", name) + self._log.info("info {}", name) + self._log.warning("warning {}", name) def listener1(self): try: diff --git a/test/test_plugins.py b/test/test_plugins.py index df338f924..07bbf0966 100644 --- a/test/test_plugins.py +++ b/test/test_plugins.py @@ -523,3 +523,23 @@ class TestImportPlugin(PluginMixin): assert "PluginImportError" not in caplog.text, ( f"Plugin '{plugin_name}' has issues during import." ) + + +class TestDeprecationCopy: + # TODO: remove this test in Beets 3.0.0 + def test_legacy_metadata_plugin_deprecation(self): + """Test that a MetadataSourcePlugin with 'legacy' data_source + raises a deprecation warning and all function and properties are + copied from the base class. + """ + with pytest.warns(DeprecationWarning, match="LegacyMetadataPlugin"): + + class LegacyMetadataPlugin(plugins.BeetsPlugin): + data_source = "legacy" + + # Assert all methods are present + assert hasattr(LegacyMetadataPlugin, "albums_for_ids") + assert hasattr(LegacyMetadataPlugin, "tracks_for_ids") + assert hasattr(LegacyMetadataPlugin, "data_source_mismatch_penalty") + assert hasattr(LegacyMetadataPlugin, "_extract_id") + assert hasattr(LegacyMetadataPlugin, "get_artist") diff --git a/test/test_ui.py b/test/test_ui.py index 664323e2a..534d0e466 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -1020,7 +1020,7 @@ class ConfigTest(TestPluginTestCase): def test_cli_config_file_loads_plugin_commands(self): with open(self.cli_config_path, "w") as file: - file.write("pluginpath: %s\n" % _common.PLUGINPATH) + file.write(f"pluginpath: {_common.PLUGINPATH}\n") file.write("plugins: test") self.run_command("--config", self.cli_config_path, "plugin", lib=None) @@ -1257,11 +1257,10 @@ class ShowChangeTest(IOMixin, unittest.TestCase): with patch("beets.ui.commands.ui.term_width", return_value=30): # Test newline layout config["ui"]["import"]["layout"] = "newline" - long_name = "another artist with a" + (" very" * 10) + " long name" + long_name = f"another artist with a{' very' * 10} long name" msg = self._show_change( cur_artist=long_name, cur_album="another album" ) - # _common.log.info("Message:{}".format(msg)) assert "artist: another artist" in msg assert " -> the artist" in msg assert "another album -> the album" not in msg @@ -1271,7 +1270,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase): with patch("beets.ui.commands.ui.term_width", return_value=54): # Test Column layout config["ui"]["import"]["layout"] = "column" - long_title = "a track with a" + (" very" * 10) + " long name" + long_title = f"a track with a{' very' * 10} long name" self.items[0].title = long_title msg = self._show_change() assert "(#1) a track (1:00) -> (#1) the title (0:00)" in msg @@ -1280,7 +1279,7 @@ class ShowChangeTest(IOMixin, unittest.TestCase): # Patch ui.term_width to force wrapping with patch("beets.ui.commands.ui.term_width", return_value=30): config["ui"]["import"]["layout"] = "newline" - long_title = "a track with a" + (" very" * 10) + " long name" + long_title = f"a track with a{' very' * 10} long name" self.items[0].title = long_title msg = self._show_change() assert "(#1) a track with" in msg