From d5bd24bb648dea3880f32d40b4a7c73422a8b6ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 22 Jun 2025 17:47:30 +0100 Subject: [PATCH 01/95] Update pipx-install-action to fix caching errors --- .github/workflows/ci.yaml | 2 +- .github/workflows/integration_test.yaml | 2 +- .github/workflows/lint.yml | 8 ++++---- .github/workflows/make_release.yaml | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 333706dc7..ac3263bcd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,7 +21,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - name: Setup Python with poetry caching # poetry cache requires poetry to already be installed, weirdly uses: actions/setup-python@v5 diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index eae04d1d4..f88864c48 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -9,7 +9,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - uses: actions/setup-python@v5 with: python-version: 3.9 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 16757da27..c9b66f402 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -53,7 +53,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -74,7 +74,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -94,7 +94,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -118,7 +118,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/make_release.yaml b/.github/workflows/make_release.yaml index 7ea2d631c..b18dded8d 100644 --- a/.github/workflows/make_release.yaml +++ b/.github/workflows/make_release.yaml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -50,7 +50,7 @@ jobs: ref: ${{ env.NEW_TAG }} - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.1 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} From ab9b2e0b69210d733b1bf80d63380a279233c7d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 22 Jun 2025 18:07:22 +0100 Subject: [PATCH 02/95] Try using threeal/pipx-install-action@v1.0.0 for CI tests I have reported the issue with BrandonLWhite/pipx-install-action@v1.0.3 failing on Windows here: https://github.com/BrandonLWhite/pipx-install-action/issues/62 --- .github/workflows/ci.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ac3263bcd..9d0e67d5f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,7 +21,10 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - uses: BrandonLWhite/pipx-install-action@v1.0.3 + # BrandonLWhite/pipx-install-action@v1.0.3 fails on Windows, thus we're using an alternative action here + uses: threeal/pipx-install-action@v1.0.0 + with: + packages: poethepoet>=0.26 poetry<2 - name: Setup Python with poetry caching # poetry cache requires poetry to already be installed, weirdly uses: actions/setup-python@v5 From 9926a1ac3cb8d4877061df2fae94ddb8b2aab7b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 29 Jun 2025 13:43:48 +0100 Subject: [PATCH 03/95] Revert "Try using threeal/pipx-install-action@v1.0.0 for CI tests" This reverts commit ab9b2e0b69210d733b1bf80d63380a279233c7d9. --- .github/workflows/ci.yaml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d0e67d5f..ac3263bcd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,10 +21,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Python tools - # BrandonLWhite/pipx-install-action@v1.0.3 fails on Windows, thus we're using an alternative action here - uses: threeal/pipx-install-action@v1.0.0 - with: - packages: poethepoet>=0.26 poetry<2 + uses: BrandonLWhite/pipx-install-action@v1.0.3 - name: Setup Python with poetry caching # poetry cache requires poetry to already be installed, weirdly uses: actions/setup-python@v5 From dd6cb538ac343c837348a0e4b404bc994ed97db1 Mon Sep 17 00:00:00 2001 From: dhruvravii <122979040+dhruvravii@users.noreply.github.com> Date: Tue, 1 Jul 2025 14:38:54 +0530 Subject: [PATCH 04/95] Fix: Spotify plugin unable to recognize Chinese and Japanese albums. (#5705) Fixes an issue where each spotify query was converted to ascii before sending. Adds a new config option to enable legacy behaviour. A file called japanese_track_request.json was made to mimic the Spotify API response since I don't have the credentials. Entries in that will need to be modified with the actual entries. Co-authored-by: Sebastian Mohr Co-authored-by: Sebastian Mohr <39738318+semohr@users.noreply.github.com> Co-authored-by: J0J0 Todos <2733783+JOJ0@users.noreply.github.com> --- beetsplug/spotify.py | 12 ++- docs/changelog.rst | 7 +- docs/plugins/spotify.rst | 8 ++ test/plugins/test_spotify.py | 79 +++++++++++++++- test/rsrc/spotify/japanese_track_request.json | 89 +++++++++++++++++++ 5 files changed, 188 insertions(+), 7 deletions(-) create mode 100644 test/rsrc/spotify/japanese_track_request.json diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py index 9d285928a..76ceeed68 100644 --- a/beetsplug/spotify.py +++ b/beetsplug/spotify.py @@ -106,6 +106,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): "client_id": "4e414367a1d14c75a5c5129a627fcab8", "client_secret": "f82bdc09b2254f1a8286815d02fd46dc", "tokenfile": "spotify_token.json", + "search_query_ascii": False, } ) self.config["client_id"].redact = True @@ -388,9 +389,8 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): track.medium_total = medium_total return track - @staticmethod def _construct_search_query( - filters: dict[str, str], keywords: str = "" + self, filters: dict[str, str], keywords: str = "" ) -> str: """Construct a query string with the specified filters and keywords to be provided to the Spotify Search API @@ -407,7 +407,11 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): query = " ".join([q for q in query_components if q]) if not isinstance(query, str): query = query.decode("utf8") - return unidecode.unidecode(query) + + if self.config["search_query_ascii"].get(): + query = unidecode.unidecode(query) + + return query def _search_api( self, @@ -424,6 +428,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): :param keywords: (Optional) Query keywords to use. """ query = self._construct_search_query(keywords=keywords, filters=filters) + self._log.debug(f"Searching {self.data_source} for '{query}'") try: response = self._handle_response( @@ -560,6 +565,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): query = self._construct_search_query( keywords=keywords, filters=query_filters ) + failures.append(query) continue diff --git a/docs/changelog.rst b/docs/changelog.rst index 88b82e4da..1baa54011 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -39,7 +39,12 @@ Bug fixes: :bug:`5797` * :doc:`plugins/musicbrainz`: Fix the MusicBrainz search not taking into account the album/recording aliases - +* :doc:`/plugins/spotify`: Fix the issue with that every query to spotify was + ascii encoded. This resulted in bad matches for queries that contained special + e.g. non latin characters as 盗作. If you want to keep the legacy behavior + set the config option ``spotify.search_query_ascii: yes``. + :bug:`5699` + For packagers: * Optional ``extra_tags`` parameter has been removed from diff --git a/docs/plugins/spotify.rst b/docs/plugins/spotify.rst index 233d00726..c5aff8ef3 100644 --- a/docs/plugins/spotify.rst +++ b/docs/plugins/spotify.rst @@ -83,6 +83,13 @@ in config.yaml under the ``spotify:`` section: track/album/artist fields before sending them to Spotify. Can be useful for changing certain abbreviations, like ft. -> feat. See the examples below. Default: None. +- **search_query_ascii**: If set to ``yes``, the search query will be converted to + ASCII before being sent to Spotify. Converting searches to ASCII can + enhance search results in some cases, but in general, it is not recommended. + For instance `artist:deadmau5 album:4×4` will be converted to + `artist:deadmau5 album:4x4` (notice `×!=x`). + Default: ``no``. + Here's an example:: @@ -92,6 +99,7 @@ Here's an example:: region_filter: US show_failures: on tiebreak: first + search_query_ascii: no regex: [ { diff --git a/test/plugins/test_spotify.py b/test/plugins/test_spotify.py index a2336df10..a2fb26f4b 100644 --- a/test/plugins/test_spotify.py +++ b/test/plugins/test_spotify.py @@ -7,7 +7,7 @@ import responses from beets.library import Item from beets.test import _common -from beets.test.helper import BeetsTestCase +from beets.test.helper import PluginTestCase from beetsplug import spotify @@ -23,10 +23,11 @@ def _params(url): return parse_qs(urlparse(url).query) -class SpotifyPluginTest(BeetsTestCase): +class SpotifyPluginTest(PluginTestCase): + plugin = "spotify" + @responses.activate def setUp(self): - super().setUp() responses.add( responses.POST, spotify.SpotifyPlugin.oauth_token_url, @@ -39,6 +40,7 @@ class SpotifyPluginTest(BeetsTestCase): "scope": "", }, ) + super().setUp() self.spotify = spotify.SpotifyPlugin() opts = ArgumentsMock("list", False) self.spotify._parse_opts(opts) @@ -176,3 +178,74 @@ class SpotifyPluginTest(BeetsTestCase): results = self.spotify._match_library_tracks(self.lib, "Happy") assert 1 == len(results) assert "6NPVjNh8Jhru9xOmyQigds" == results[0]["id"] + + @responses.activate + def test_japanese_track(self): + """Ensure non-ASCII characters remain unchanged in search queries""" + + # Path to the mock JSON file for the Japanese track + json_file = os.path.join( + _common.RSRC, b"spotify", b"japanese_track_request.json" + ) + + # Load the mock JSON response + with open(json_file, "rb") as f: + response_body = f.read() + + # Mock Spotify Search API response + responses.add( + responses.GET, + spotify.SpotifyPlugin.search_url, + body=response_body, + status=200, + content_type="application/json", + ) + + # Create a mock item with Japanese metadata + item = Item( + mb_trackid="56789", + album="盗作", + albumartist="ヨルシカ", + title="思想犯", + length=10, + ) + item.add(self.lib) + + # Search without ascii encoding + + with self.configure_plugin( + { + "search_query_ascii": False, + } + ): + assert self.spotify.config["search_query_ascii"].get() is False + # Call the method to match library tracks + results = self.spotify._match_library_tracks(self.lib, item.title) + + # Assertions to verify results + assert results is not None + assert 1 == len(results) + assert results[0]["name"] == item.title + assert results[0]["artists"][0]["name"] == item.albumartist + assert results[0]["album"]["name"] == item.album + + # Verify search query parameters + params = _params(responses.calls[0].request.url) + query = params["q"][0] + assert item.title in query + assert f"artist:{item.albumartist}" in query + assert f"album:{item.album}" in query + assert not query.isascii() + + # Is not found in the library if ascii encoding is enabled + with self.configure_plugin( + { + "search_query_ascii": True, + } + ): + assert self.spotify.config["search_query_ascii"].get() is True + results = self.spotify._match_library_tracks(self.lib, item.title) + params = _params(responses.calls[1].request.url) + query = params["q"][0] + + assert query.isascii() diff --git a/test/rsrc/spotify/japanese_track_request.json b/test/rsrc/spotify/japanese_track_request.json new file mode 100644 index 000000000..04559588e --- /dev/null +++ b/test/rsrc/spotify/japanese_track_request.json @@ -0,0 +1,89 @@ +{ + "tracks":{ + "href":"https://api.spotify.com/v1/search?query=Happy+album%3ADespicable+Me+2+artist%3APharrell+Williams&offset=0&limit=20&type=track", + "items":[ + { + "album":{ + "album_type":"compilation", + "available_markets":[ + "AD", "AR", "AT", "AU", "BE", "BG", "BO", "BR", "CA", + "CH", "CL", "CO", "CR", "CY", "CZ", "DE", "DK", "DO", + "EC", "EE", "ES", "FI", "FR", "GB", "GR", "GT", "HK", + "HN", "HU", "IE", "IS", "IT", "LI", "LT", "LU", "LV", + "MC", "MT", "MX", "MY", "NI", "NL", "NO", "NZ", "PA", + "PE", "PH", "PL", "PT", "PY", "RO", "SE", "SG", "SI", + "SK", "SV", "TR", "TW", "US", "UY" + ], + "external_urls":{ + "spotify":"https://open.spotify.com/album/5l3zEmMrOhOzG8d8s83GOL" + }, + "href":"https://api.spotify.com/v1/albums/5l3zEmMrOhOzG8d8s83GOL", + "id":"5l3zEmMrOhOzG8d8s83GOL", + "images":[ + { + "height":640, + "width":640, + "url":"https://i.scdn.co/image/cb7905340c132365bbaee3f17498f062858382e8" + }, + { + "height":300, + "width":300, + "url":"https://i.scdn.co/image/af369120f0b20099d6784ab31c88256113f10ffb" + }, + { + "height":64, + "width":64, + "url":"https://i.scdn.co/image/9dad385ddf2e7db0bef20cec1fcbdb08689d9ae8" + } + ], + "name":"盗作", + "type":"album", + "uri":"spotify:album:5l3zEmMrOhOzG8d8s83GOL" + }, + "artists":[ + { + "external_urls":{ + "spotify":"https://open.spotify.com/artist/2RdwBSPQiwcmiDo9kixcl8" + }, + "href":"https://api.spotify.com/v1/artists/2RdwBSPQiwcmiDo9kixcl8", + "id":"2RdwBSPQiwcmiDo9kixcl8", + "name":"ヨルシカ", + "type":"artist", + "uri":"spotify:artist:2RdwBSPQiwcmiDo9kixcl8" + } + ], + "available_markets":[ + "AD", "AR", "AT", "AU", "BE", "BG", "BO", "BR", "CA", + "CH", "CL", "CO", "CR", "CY", "CZ", "DE", "DK", "DO", + "EC", "EE", "ES", "FI", "FR", "GB", "GR", "GT", "HK", + "HN", "HU", "IE", "IS", "IT", "LI", "LT", "LU", "LV", + "MC", "MT", "MX", "MY", "NI", "NL", "NO", "NZ", "PA", + "PE", "PH", "PL", "PT", "PY", "RO", "SE", "SG", "SI", + "SK", "SV", "TR", "TW", "US", "UY" + ], + "disc_number":1, + "duration_ms":233305, + "explicit":false, + "external_ids":{ + "isrc":"USQ4E1300686" + }, + "external_urls":{ + "spotify":"https://open.spotify.com/track/6NPVjNh8Jhru9xOmyQigds" + }, + "href":"https://api.spotify.com/v1/tracks/6NPVjNh8Jhru9xOmyQigds", + "id":"6NPVjNh8Jhru9xOmyQigds", + "name":"思想犯", + "popularity":89, + "preview_url":"https://p.scdn.co/mp3-preview/6b00000be293e6b25f61c33e206a0c522b5cbc87", + "track_number":4, + "type":"track", + "uri":"spotify:track:6NPVjNh8Jhru9xOmyQigds" + } + ], + "limit":20, + "next":null, + "offset":0, + "previous":null, + "total":1 + } +} From ac96b9b64e97cdca0be6305e939e902e8742f147 Mon Sep 17 00:00:00 2001 From: Noor Date: Wed, 2 Jul 2025 20:40:37 +0200 Subject: [PATCH 05/95] Preserve line breaks for example cases in substitution plugin docs (#5846) ## Description Adds line block markup to example substitutions in the plugin documentation, so that each case is shown on a separate line: > The replacement can be an expression utilising the matched regex, allowing us to create more general rules. Say for example, we want to sort all albums by multiple artists into the directory of the first artist. We can thus capture everything before the first ,, `` &`` or `` and``, and use this capture group in the output, discarding the rest of the string. > > ```yaml > substitute: > ^(.*?)(,| &| and).*: \1 > ``` > > This would handle all the below cases in a single rule: > >> Bob Dylan and The Band -> Bob Dylan >> Neil Young & Crazy Horse -> Neil Young >> James Yorkston, Nina Persson & The Second Hand Orchestra -> James Yorkston --- docs/changelog.rst | 2 ++ docs/plugins/substitute.rst | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1baa54011..0e5799846 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -61,6 +61,8 @@ Other changes: * Documentation structure for auto generated API references changed slightly. Autogenerated API references are now located in the `docs/api` subdirectory. +* :doc:`/plugins/substitute`: Fix rST formatting for example cases so that each + case is shown on separate lines. 2.3.1 (May 14, 2025) -------------------- diff --git a/docs/plugins/substitute.rst b/docs/plugins/substitute.rst index 87ee2ad45..c6fec8054 100644 --- a/docs/plugins/substitute.rst +++ b/docs/plugins/substitute.rst @@ -31,9 +31,9 @@ group in the output, discarding the rest of the string. This would handle all the below cases in a single rule: - Bob Dylan and The Band -> Bob Dylan - Neil Young & Crazy Horse -> Neil Young - James Yorkston, Nina Persson & The Second Hand Orchestra -> James Yorkston + | Bob Dylan and The Band -> Bob Dylan + | Neil Young & Crazy Horse -> Neil Young + | James Yorkston, Nina Persson & The Second Hand Orchestra -> James Yorkston To apply the substitution, you have to call the function ``%substitute{}`` in the paths section. For example: From 537a71ff8286461440ee883ff2c077636bf7e120 Mon Sep 17 00:00:00 2001 From: J0J0 Todos Date: Mon, 6 Nov 2023 14:47:34 +0100 Subject: [PATCH 06/95] duplicates: Add --remove option to duplicates plugin Removes from library but keeps files. --- beetsplug/duplicates.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/beetsplug/duplicates.py b/beetsplug/duplicates.py index fadb29845..76441133f 100644 --- a/beetsplug/duplicates.py +++ b/beetsplug/duplicates.py @@ -53,6 +53,7 @@ class DuplicatesPlugin(BeetsPlugin): "tiebreak": {}, "strict": False, "tag": "", + "remove": False, } ) @@ -131,6 +132,13 @@ class DuplicatesPlugin(BeetsPlugin): action="store", help="tag matched items with 'k=v' attribute", ) + self._command.parser.add_option( + "-r", + "--remove", + dest="remove", + action="store_true", + help="remove items from library", + ) self._command.parser.add_all_common_options() def commands(self): @@ -141,6 +149,7 @@ class DuplicatesPlugin(BeetsPlugin): copy = bytestring_path(self.config["copy"].as_str()) count = self.config["count"].get(bool) delete = self.config["delete"].get(bool) + remove = self.config["remove"].get(bool) fmt = self.config["format"].get(str) full = self.config["full"].get(bool) keys = self.config["keys"].as_str_seq() @@ -196,6 +205,7 @@ class DuplicatesPlugin(BeetsPlugin): copy=copy, move=move, delete=delete, + remove=remove, tag=tag, fmt=fmt.format(obj_count), ) @@ -204,7 +214,14 @@ class DuplicatesPlugin(BeetsPlugin): return [self._command] def _process_item( - self, item, copy=False, move=False, delete=False, tag=False, fmt="" + self, + item, + copy=False, + move=False, + delete=False, + tag=False, + fmt="", + remove=False, ): """Process Item `item`.""" print_(format(item, fmt)) @@ -216,6 +233,8 @@ class DuplicatesPlugin(BeetsPlugin): item.store() if delete: item.remove(delete=True) + if remove: + item.remove(delete=False) if tag: try: k, v = tag.split("=") From 549847bfd8c64ef68c65b36f652a5d01031e4294 Mon Sep 17 00:00:00 2001 From: J0J0 Todos Date: Sun, 22 Jun 2025 08:48:10 +0200 Subject: [PATCH 07/95] duplicates: Add docs for --remove option --- docs/plugins/duplicates.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/plugins/duplicates.rst b/docs/plugins/duplicates.rst index 8b11b6661..8ce0e4578 100644 --- a/docs/plugins/duplicates.rst +++ b/docs/plugins/duplicates.rst @@ -34,6 +34,7 @@ duplicates themselves via command-line switches :: -o DEST, --copy=DEST copy items to dest -p, --path print paths for matched items or albums -t TAG, --tag=TAG tag matched items with 'k=v' attribute + -r, --remove remove items from library Configuration ------------- @@ -57,7 +58,7 @@ file. The available options mirror the command-line options: ``$albumartist - $album - $title: $count`` (for tracks) or ``$albumartist - $album: $count`` (for albums). Default: ``no``. -- **delete**: Removes matched items from the library and from the disk. +- **delete**: Remove matched items from the library and from the disk. Default: ``no`` - **format**: A specific format with which to print every track or album. This uses the same template syntax as beets' @@ -92,6 +93,8 @@ file. The available options mirror the command-line options: set. If you would like to consider the lower bitrates as duplicates, for example, set ``tiebreak: items: [bitrate]``. Default: ``{}``. +- **remove**: Remove matched items from the library, but not from the disk. + Default: ``no``. Examples -------- From 47eee070ba426fc61bfd3e72d166917c60076b8a Mon Sep 17 00:00:00 2001 From: J0J0 Todos <2733783+JOJ0@users.noreply.github.com> Date: Wed, 2 Jul 2025 07:10:46 +0200 Subject: [PATCH 08/95] duplicates: remove or delete options mutually exclusive Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- beetsplug/duplicates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beetsplug/duplicates.py b/beetsplug/duplicates.py index 76441133f..5a2be0cd2 100644 --- a/beetsplug/duplicates.py +++ b/beetsplug/duplicates.py @@ -233,7 +233,7 @@ class DuplicatesPlugin(BeetsPlugin): item.store() if delete: item.remove(delete=True) - if remove: + elif remove: item.remove(delete=False) if tag: try: From 7c22cd635c502e2f3ccb71803c3034bc2bb2ec11 Mon Sep 17 00:00:00 2001 From: J0J0 Todos Date: Sat, 5 Jul 2025 07:24:26 +0200 Subject: [PATCH 09/95] duplicates: Add changelog for --remove option --- docs/changelog.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0e5799846..d1a477cb5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -23,6 +23,9 @@ New features: singletons by their Discogs ID. :bug:`4661` * :doc:`plugins/replace`: Add new plugin. +* :doc:`plugins/duplicates`: Add ``--remove`` option, allowing to remove from + the library without deleting media files. + :bug:`5832` Bug fixes: From 1a045c91668c771686f4c871c84f1680af2e944b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 12 May 2025 12:17:35 +0100 Subject: [PATCH 10/95] Copy paste query, types from library to dbcore --- beets/dbcore/query.py | 115 +++++++++++++++ beets/dbcore/types.py | 158 ++++++++++++++++++-- beets/library.py | 276 ++--------------------------------- beets/ui/__init__.py | 8 - beets/ui/commands.py | 10 +- beets/util/__init__.py | 8 + beetsplug/deezer.py | 3 +- beetsplug/metasync/amarok.py | 5 +- beetsplug/metasync/itunes.py | 7 +- beetsplug/mpdstats.py | 7 +- beetsplug/playlist.py | 3 +- beetsplug/spotify.py | 4 +- beetsplug/types.py | 3 +- beetsplug/web/__init__.py | 3 +- test/test_library.py | 54 ------- test/test_query.py | 12 +- test/test_types.py | 59 ++++++++ 17 files changed, 367 insertions(+), 368 deletions(-) create mode 100644 test/test_types.py diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index c7ca44452..9812a7528 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -16,6 +16,7 @@ from __future__ import annotations +import os import re import unicodedata from abc import ABC, abstractmethod @@ -36,6 +37,11 @@ if TYPE_CHECKING: else: P = TypeVar("P") +# To use the SQLite "blob" type, it doesn't suffice to provide a byte +# string; SQLite treats that as encoded text. Wrapping it in a +# `memoryview` tells it that we actually mean non-text data. +BLOB_TYPE = memoryview + class ParsingError(ValueError): """Abstract class for any unparsable user-requested album/query @@ -267,6 +273,97 @@ class SubstringQuery(StringFieldQuery[str]): return pattern.lower() in value.lower() +class PathQuery(FieldQuery[bytes]): + """A query that matches all items under a given path. + + Matching can either be case-insensitive or case-sensitive. By + default, the behavior depends on the OS: case-insensitive on Windows + and case-sensitive otherwise. + """ + + # For tests + force_implicit_query_detection = False + + def __init__(self, field, pattern, fast=True, case_sensitive=None): + """Create a path query. + + `pattern` must be a path, either to a file or a directory. + + `case_sensitive` can be a bool or `None`, indicating that the + behavior should depend on the filesystem. + """ + super().__init__(field, pattern, fast) + + path = util.normpath(pattern) + + # By default, the case sensitivity depends on the filesystem + # that the query path is located on. + if case_sensitive is None: + case_sensitive = util.case_sensitive(path) + self.case_sensitive = case_sensitive + + # Use a normalized-case pattern for case-insensitive matches. + if not case_sensitive: + # We need to lowercase the entire path, not just the pattern. + # In particular, on Windows, the drive letter is otherwise not + # lowercased. + # This also ensures that the `match()` method below and the SQL + # from `col_clause()` do the same thing. + path = path.lower() + + # Match the path as a single file. + self.file_path = path + # As a directory (prefix). + self.dir_path = os.path.join(path, b"") + + @classmethod + def is_path_query(cls, query_part): + """Try to guess whether a unicode query part is a path query. + + Condition: separator precedes colon and the file exists. + """ + colon = query_part.find(":") + if colon != -1: + query_part = query_part[:colon] + + # Test both `sep` and `altsep` (i.e., both slash and backslash on + # Windows). + if not ( + os.sep in query_part or (os.altsep and os.altsep in query_part) + ): + return False + + if cls.force_implicit_query_detection: + return True + return os.path.exists(util.syspath(util.normpath(query_part))) + + def match(self, item): + path = item.path if self.case_sensitive else item.path.lower() + return (path == self.file_path) or path.startswith(self.dir_path) + + def col_clause(self): + file_blob = BLOB_TYPE(self.file_path) + dir_blob = BLOB_TYPE(self.dir_path) + + if self.case_sensitive: + query_part = "({0} = ?) || (substr({0}, 1, ?) = ?)" + else: + query_part = "(BYTELOWER({0}) = BYTELOWER(?)) || \ + (substr(BYTELOWER({0}), 1, ?) = BYTELOWER(?))" + + return query_part.format(self.field), ( + file_blob, + len(dir_blob), + dir_blob, + ) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}({self.field!r}, {self.pattern!r}, " + f"fast={self.fast}, case_sensitive={self.case_sensitive})" + ) + + class RegexpQuery(StringFieldQuery[Pattern[str]]): """A query that matches a regular expression in a specific Model field. @@ -844,6 +941,24 @@ class DurationQuery(NumericQuery): ) +class SingletonQuery(FieldQuery[str]): + """This query is responsible for the 'singleton' lookup. + + It is based on the FieldQuery and constructs a SQL clause + 'album_id is NULL' which yields the same result as the previous filter + in Python but is more performant since it's done in SQL. + + Using util.str2bool ensures that lookups like singleton:true, singleton:1 + and singleton:false, singleton:0 are handled consistently. + """ + + def __new__(cls, field: str, value: str, *args, **kwargs): + query = NoneQuery("album_id") + if util.str2bool(value): + return query + return NotQuery(query) + + # Sorting. diff --git a/beets/dbcore/types.py b/beets/dbcore/types.py index 2a64b2ed9..27cd04b92 100644 --- a/beets/dbcore/types.py +++ b/beets/dbcore/types.py @@ -16,19 +16,18 @@ from __future__ import annotations +import re +import time import typing from abc import ABC from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast -from beets.util import str2bool +from beets import util -from .query import ( - BooleanQuery, - FieldQueryType, - NumericQuery, - SQLiteType, - SubstringQuery, -) +from . import query + +SQLiteType = query.SQLiteType +BLOB_TYPE = query.BLOB_TYPE class ModelType(typing.Protocol): @@ -61,7 +60,7 @@ class Type(ABC, Generic[T, N]): """The SQLite column type for the value. """ - query: FieldQueryType = SubstringQuery + query: query.FieldQueryType = query.SubstringQuery """The `Query` subclass to be used when querying the field. """ @@ -160,7 +159,7 @@ class BaseInteger(Type[int, N]): """A basic integer type.""" sql = "INTEGER" - query = NumericQuery + query = query.NumericQuery model_type = int def normalize(self, value: Any) -> int | N: @@ -241,7 +240,7 @@ class BaseFloat(Type[float, N]): """ sql = "REAL" - query: FieldQueryType = NumericQuery + query: query.FieldQueryType = query.NumericQuery model_type = float def __init__(self, digits: int = 1): @@ -271,7 +270,7 @@ class BaseString(Type[T, N]): """A Unicode string type.""" sql = "TEXT" - query = SubstringQuery + query = query.SubstringQuery def normalize(self, value: Any) -> T | N: if value is None: @@ -312,14 +311,144 @@ class Boolean(Type): """A boolean type.""" sql = "INTEGER" - query = BooleanQuery + query = query.BooleanQuery model_type = bool def format(self, value: bool) -> str: return str(bool(value)) def parse(self, string: str) -> bool: - return str2bool(string) + return util.str2bool(string) + + +class DateType(Float): + # TODO representation should be `datetime` object + # TODO distinguish between date and time types + query = query.DateQuery + + def format(self, value): + return time.strftime( + beets.config["time_format"].as_str(), time.localtime(value or 0) + ) + + def parse(self, string): + try: + # Try a formatted date string. + return time.mktime( + time.strptime(string, beets.config["time_format"].as_str()) + ) + except ValueError: + # Fall back to a plain timestamp number. + try: + return float(string) + except ValueError: + return self.null + + +class PathType(Type[bytes, bytes]): + """A dbcore type for filesystem paths. + + These are represented as `bytes` objects, in keeping with + the Unix filesystem abstraction. + """ + + sql = "BLOB" + query = query.PathQuery + model_type = bytes + + def __init__(self, nullable=False): + """Create a path type object. + + `nullable` controls whether the type may be missing, i.e., None. + """ + self.nullable = nullable + + @property + def null(self): + if self.nullable: + return None + else: + return b"" + + def format(self, value): + return util.displayable_path(value) + + def parse(self, string): + return util.normpath(util.bytestring_path(string)) + + def normalize(self, value): + if isinstance(value, str): + # Paths stored internally as encoded bytes. + return util.bytestring_path(value) + + elif isinstance(value, BLOB_TYPE): + # We unwrap buffers to bytes. + return bytes(value) + + else: + return value + + def from_sql(self, sql_value): + return self.normalize(sql_value) + + def to_sql(self, value): + if isinstance(value, bytes): + value = BLOB_TYPE(value) + return value + + +class MusicalKey(String): + """String representing the musical key of a song. + + The standard format is C, Cm, C#, C#m, etc. + """ + + ENHARMONIC = { + r"db": "c#", + r"eb": "d#", + r"gb": "f#", + r"ab": "g#", + r"bb": "a#", + } + + null = None + + def parse(self, key): + key = key.lower() + for flat, sharp in self.ENHARMONIC.items(): + key = re.sub(flat, sharp, key) + key = re.sub(r"[\W\s]+minor", "m", key) + key = re.sub(r"[\W\s]+major", "", key) + return key.capitalize() + + def normalize(self, key): + if key is None: + return None + else: + return self.parse(key) + + +class DurationType(Float): + """Human-friendly (M:SS) representation of a time interval.""" + + query = query.DurationQuery + + def format(self, value): + if not beets.config["format_raw_length"].get(bool): + return util.human_seconds_short(value or 0.0) + else: + return value + + def parse(self, string): + try: + # Try to format back hh:ss to seconds. + return util.raw_seconds_short(string) + except ValueError: + # Fall back to a plain float. + try: + return float(string) + except ValueError: + return self.null # Shared instances of common types. @@ -331,6 +460,7 @@ FLOAT = Float() NULL_FLOAT = NullFloat() STRING = String() BOOLEAN = Boolean() +DATE = DateType() SEMICOLON_SPACE_DSV = DelimitedString(delimiter="; ") # Will set the proper null char in mediafile diff --git a/beets/library.py b/beets/library.py index 271059c69..5a692ef1c 100644 --- a/beets/library.py +++ b/beets/library.py @@ -17,7 +17,6 @@ from __future__ import annotations import os -import re import shlex import string import sys @@ -46,259 +45,9 @@ from beets.util.functemplate import Template, template if TYPE_CHECKING: from .dbcore.query import FieldQuery, FieldQueryType -# To use the SQLite "blob" type, it doesn't suffice to provide a byte -# string; SQLite treats that as encoded text. Wrapping it in a -# `memoryview` tells it that we actually mean non-text data. -BLOB_TYPE = memoryview - log = logging.getLogger("beets") -# Library-specific query types. - - -class SingletonQuery(dbcore.FieldQuery[str]): - """This query is responsible for the 'singleton' lookup. - - It is based on the FieldQuery and constructs a SQL clause - 'album_id is NULL' which yields the same result as the previous filter - in Python but is more performant since it's done in SQL. - - Using util.str2bool ensures that lookups like singleton:true, singleton:1 - and singleton:false, singleton:0 are handled consistently. - """ - - def __new__(cls, field: str, value: str, *args, **kwargs): - query = dbcore.query.NoneQuery("album_id") - if util.str2bool(value): - return query - return dbcore.query.NotQuery(query) - - -class PathQuery(dbcore.FieldQuery[bytes]): - """A query that matches all items under a given path. - - Matching can either be case-insensitive or case-sensitive. By - default, the behavior depends on the OS: case-insensitive on Windows - and case-sensitive otherwise. - """ - - # For tests - force_implicit_query_detection = False - - def __init__(self, field, pattern, fast=True, case_sensitive=None): - """Create a path query. - - `pattern` must be a path, either to a file or a directory. - - `case_sensitive` can be a bool or `None`, indicating that the - behavior should depend on the filesystem. - """ - super().__init__(field, pattern, fast) - - path = util.normpath(pattern) - - # By default, the case sensitivity depends on the filesystem - # that the query path is located on. - if case_sensitive is None: - case_sensitive = util.case_sensitive(path) - self.case_sensitive = case_sensitive - - # Use a normalized-case pattern for case-insensitive matches. - if not case_sensitive: - # We need to lowercase the entire path, not just the pattern. - # In particular, on Windows, the drive letter is otherwise not - # lowercased. - # This also ensures that the `match()` method below and the SQL - # from `col_clause()` do the same thing. - path = path.lower() - - # Match the path as a single file. - self.file_path = path - # As a directory (prefix). - self.dir_path = os.path.join(path, b"") - - @classmethod - def is_path_query(cls, query_part): - """Try to guess whether a unicode query part is a path query. - - Condition: separator precedes colon and the file exists. - """ - colon = query_part.find(":") - if colon != -1: - query_part = query_part[:colon] - - # Test both `sep` and `altsep` (i.e., both slash and backslash on - # Windows). - if not ( - os.sep in query_part or (os.altsep and os.altsep in query_part) - ): - return False - - if cls.force_implicit_query_detection: - return True - return os.path.exists(syspath(normpath(query_part))) - - def match(self, item): - path = item.path if self.case_sensitive else item.path.lower() - return (path == self.file_path) or path.startswith(self.dir_path) - - def col_clause(self): - file_blob = BLOB_TYPE(self.file_path) - dir_blob = BLOB_TYPE(self.dir_path) - - if self.case_sensitive: - query_part = "({0} = ?) || (substr({0}, 1, ?) = ?)" - else: - query_part = "(BYTELOWER({0}) = BYTELOWER(?)) || \ - (substr(BYTELOWER({0}), 1, ?) = BYTELOWER(?))" - - return query_part.format(self.field), ( - file_blob, - len(dir_blob), - dir_blob, - ) - - def __repr__(self) -> str: - return ( - f"{self.__class__.__name__}({self.field!r}, {self.pattern!r}, " - f"fast={self.fast}, case_sensitive={self.case_sensitive})" - ) - - -# Library-specific field types. - - -class DateType(types.Float): - # TODO representation should be `datetime` object - # TODO distinguish between date and time types - query = dbcore.query.DateQuery - - def format(self, value): - return time.strftime( - beets.config["time_format"].as_str(), time.localtime(value or 0) - ) - - def parse(self, string): - try: - # Try a formatted date string. - return time.mktime( - time.strptime(string, beets.config["time_format"].as_str()) - ) - except ValueError: - # Fall back to a plain timestamp number. - try: - return float(string) - except ValueError: - return self.null - - -class PathType(types.Type[bytes, bytes]): - """A dbcore type for filesystem paths. - - These are represented as `bytes` objects, in keeping with - the Unix filesystem abstraction. - """ - - sql = "BLOB" - query = PathQuery - model_type = bytes - - def __init__(self, nullable=False): - """Create a path type object. - - `nullable` controls whether the type may be missing, i.e., None. - """ - self.nullable = nullable - - @property - def null(self): - if self.nullable: - return None - else: - return b"" - - def format(self, value): - return util.displayable_path(value) - - def parse(self, string): - return normpath(bytestring_path(string)) - - def normalize(self, value): - if isinstance(value, str): - # Paths stored internally as encoded bytes. - return bytestring_path(value) - - elif isinstance(value, BLOB_TYPE): - # We unwrap buffers to bytes. - return bytes(value) - - else: - return value - - def from_sql(self, sql_value): - return self.normalize(sql_value) - - def to_sql(self, value): - if isinstance(value, bytes): - value = BLOB_TYPE(value) - return value - - -class MusicalKey(types.String): - """String representing the musical key of a song. - - The standard format is C, Cm, C#, C#m, etc. - """ - - ENHARMONIC = { - r"db": "c#", - r"eb": "d#", - r"gb": "f#", - r"ab": "g#", - r"bb": "a#", - } - - null = None - - def parse(self, key): - key = key.lower() - for flat, sharp in self.ENHARMONIC.items(): - key = re.sub(flat, sharp, key) - key = re.sub(r"[\W\s]+minor", "m", key) - key = re.sub(r"[\W\s]+major", "", key) - return key.capitalize() - - def normalize(self, key): - if key is None: - return None - else: - return self.parse(key) - - -class DurationType(types.Float): - """Human-friendly (M:SS) representation of a time interval.""" - - query = dbcore.query.DurationQuery - - def format(self, value): - if not beets.config["format_raw_length"].get(bool): - return beets.ui.human_seconds_short(value or 0.0) - else: - return value - - def parse(self, string): - try: - # Try to format back hh:ss to seconds. - return util.raw_seconds_short(string) - except ValueError: - # Fall back to a plain float. - try: - return float(string) - except ValueError: - return self.null - - # Special path format key. PF_KEY_DEFAULT = "default" @@ -517,7 +266,7 @@ class Item(LibModel): _flex_table = "item_attributes" _fields = { "id": types.PRIMARY_ID, - "path": PathType(), + "path": types.PathType(), "album_id": types.FOREIGN_ID, "title": types.STRING, "artist": types.STRING, @@ -596,8 +345,8 @@ class Item(LibModel): "original_year": types.PaddedInt(4), "original_month": types.PaddedInt(2), "original_day": types.PaddedInt(2), - "initial_key": MusicalKey(), - "length": DurationType(), + "initial_key": types.MusicalKey(), + "length": types.DurationType(), "bitrate": types.ScaledInt(1000, "kbps"), "bitrate_mode": types.STRING, "encoder_info": types.STRING, @@ -606,8 +355,8 @@ class Item(LibModel): "samplerate": types.ScaledInt(1000, "kHz"), "bitdepth": types.INTEGER, "channels": types.INTEGER, - "mtime": DateType(), - "added": DateType(), + "mtime": types.DATE, + "added": types.DATE, } _search_fields = ( @@ -641,7 +390,7 @@ class Item(LibModel): _sorts = {"artist": dbcore.query.SmartArtistSort} - _queries = {"singleton": SingletonQuery} + _queries = {"singleton": dbcore.query.SingletonQuery} _format_config_key = "format_item" @@ -717,7 +466,7 @@ class Item(LibModel): if key == "path": if isinstance(value, str): value = bytestring_path(value) - elif isinstance(value, BLOB_TYPE): + elif isinstance(value, types.BLOB_TYPE): value = bytes(value) elif key == "album_id": self._cached_album = None @@ -1161,8 +910,8 @@ class Album(LibModel): _always_dirty = True _fields = { "id": types.PRIMARY_ID, - "artpath": PathType(True), - "added": DateType(), + "artpath": types.PathType(True), + "added": types.DATE, "albumartist": types.STRING, "albumartist_sort": types.STRING, "albumartist_credit": types.STRING, @@ -1208,7 +957,7 @@ class Album(LibModel): _search_fields = ("album", "albumartist", "genre") _types = { - "path": PathType(), + "path": types.PathType(), "data_source": types.STRING, } @@ -1563,7 +1312,10 @@ def parse_query_parts(parts, model_cls): # Special-case path-like queries, which are non-field queries # containing path separators (/). - parts = [f"path:{s}" if PathQuery.is_path_query(s) else s for s in parts] + parts = [ + f"path:{s}" if dbcore.query.PathQuery.is_path_query(s) else s + for s in parts + ] case_insensitive = beets.config["sort_case_insensitive"].get(bool) diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index a6f615b45..f1aac766f 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -477,14 +477,6 @@ def human_seconds(interval): return f"{interval:3.1f} {suffix}s" -def human_seconds_short(interval): - """Formats a number of seconds as a short human-readable M:SS - string. - """ - interval = int(interval) - return "%i:%02i" % (interval // 60, interval % 60) - - # Colorization. # ANSI terminal colorization code heavily inspired by pygments: diff --git a/beets/ui/commands.py b/beets/ui/commands.py index f42291019..fb9ca8b89 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -541,8 +541,8 @@ class ChangeRepresentation: cur_length0 = item.length if item.length else 0 new_length0 = track_info.length if track_info.length else 0 # format into string - cur_length = f"({ui.human_seconds_short(cur_length0)})" - new_length = f"({ui.human_seconds_short(new_length0)})" + cur_length = f"({util.human_seconds_short(cur_length0)})" + new_length = f"({util.human_seconds_short(new_length0)})" # colorize lhs_length = ui.colorize(highlight_color, cur_length) rhs_length = ui.colorize(highlight_color, new_length) @@ -706,14 +706,14 @@ class AlbumChange(ChangeRepresentation): for track_info in self.match.extra_tracks: line = f" ! {track_info.title} (#{self.format_index(track_info)})" if track_info.length: - line += f" ({ui.human_seconds_short(track_info.length)})" + line += f" ({util.human_seconds_short(track_info.length)})" print_(ui.colorize("text_warning", line)) if self.match.extra_items: print_(f"Unmatched tracks ({len(self.match.extra_items)}):") for item in self.match.extra_items: line = " ! {} (#{})".format(item.title, self.format_index(item)) if item.length: - line += " ({})".format(ui.human_seconds_short(item.length)) + line += " ({})".format(util.human_seconds_short(item.length)) print_(ui.colorize("text_warning", line)) @@ -795,7 +795,7 @@ def summarize_items(items, singleton): round(int(items[0].samplerate) / 1000, 1), items[0].bitdepth ) summary_parts.append(sample_bits) - summary_parts.append(ui.human_seconds_short(total_duration)) + summary_parts.append(util.human_seconds_short(total_duration)) summary_parts.append(ui.human_bytes(total_filesize)) return ", ".join(summary_parts) diff --git a/beets/util/__init__.py b/beets/util/__init__.py index 9bd7451f8..4572b27f9 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -1032,6 +1032,14 @@ def raw_seconds_short(string: str) -> float: return float(minutes * 60 + seconds) +def human_seconds_short(interval): + """Formats a number of seconds as a short human-readable M:SS + string. + """ + interval = int(interval) + return "%i:%02i" % (interval // 60, interval % 60) + + def asciify_path(path: str, sep_replace: str) -> str: """Decodes all unicode characters in a path into ASCII equivalents. diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index 2e5d8473a..89f7436f8 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -25,7 +25,6 @@ import unidecode from beets import ui from beets.autotag import AlbumInfo, TrackInfo from beets.dbcore import types -from beets.library import DateType from beets.plugins import BeetsPlugin, MetadataSourcePlugin @@ -35,7 +34,7 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): item_types = { "deezer_track_rank": types.INTEGER, "deezer_track_id": types.INTEGER, - "deezer_updated": DateType(), + "deezer_updated": types.DATE, } # Base URLs for the Deezer API diff --git a/beetsplug/metasync/amarok.py b/beetsplug/metasync/amarok.py index f8dcbe3f3..9afe6dbca 100644 --- a/beetsplug/metasync/amarok.py +++ b/beetsplug/metasync/amarok.py @@ -20,7 +20,6 @@ from time import mktime from xml.sax.saxutils import quoteattr from beets.dbcore import types -from beets.library import DateType from beets.util import displayable_path from beetsplug.metasync import MetaSource @@ -41,8 +40,8 @@ class Amarok(MetaSource): "amarok_score": types.FLOAT, "amarok_uid": types.STRING, "amarok_playcount": types.INTEGER, - "amarok_firstplayed": DateType(), - "amarok_lastplayed": DateType(), + "amarok_firstplayed": types.DATE, + "amarok_lastplayed": types.DATE, } query_xml = ' \ diff --git a/beetsplug/metasync/itunes.py b/beetsplug/metasync/itunes.py index 02f592fdc..f777d0d55 100644 --- a/beetsplug/metasync/itunes.py +++ b/beetsplug/metasync/itunes.py @@ -26,7 +26,6 @@ from confuse import ConfigValueError from beets import util from beets.dbcore import types -from beets.library import DateType from beets.util import bytestring_path, syspath from beetsplug.metasync import MetaSource @@ -63,9 +62,9 @@ class Itunes(MetaSource): "itunes_rating": types.INTEGER, # 0..100 scale "itunes_playcount": types.INTEGER, "itunes_skipcount": types.INTEGER, - "itunes_lastplayed": DateType(), - "itunes_lastskipped": DateType(), - "itunes_dateadded": DateType(), + "itunes_lastplayed": types.DATE, + "itunes_lastskipped": types.DATE, + "itunes_dateadded": types.DATE, } def __init__(self, config, log): diff --git a/beetsplug/mpdstats.py b/beetsplug/mpdstats.py index 6d4c269d1..20faf225f 100644 --- a/beetsplug/mpdstats.py +++ b/beetsplug/mpdstats.py @@ -18,8 +18,9 @@ import time import mpd -from beets import config, library, plugins, ui +from beets import config, plugins, ui from beets.dbcore import types +from beets.dbcore.query import PathQuery from beets.util import displayable_path # If we lose the connection, how many times do we want to retry and how @@ -160,7 +161,7 @@ class MPDStats: def get_item(self, path): """Return the beets item related to path.""" - query = library.PathQuery("path", path) + query = PathQuery("path", path) item = self.lib.items(query).get() if item: return item @@ -321,7 +322,7 @@ class MPDStatsPlugin(plugins.BeetsPlugin): item_types = { "play_count": types.INTEGER, "skip_count": types.INTEGER, - "last_played": library.DateType(), + "last_played": types.DATE, "rating": types.FLOAT, } diff --git a/beetsplug/playlist.py b/beetsplug/playlist.py index cf1d500e8..cb16fb5bc 100644 --- a/beetsplug/playlist.py +++ b/beetsplug/playlist.py @@ -18,8 +18,7 @@ import tempfile from collections.abc import Sequence import beets -from beets.dbcore.query import InQuery -from beets.library import BLOB_TYPE +from beets.dbcore.query import BLOB_TYPE, InQuery from beets.util import path_as_posix diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py index 76ceeed68..595da4892 100644 --- a/beetsplug/spotify.py +++ b/beetsplug/spotify.py @@ -34,10 +34,10 @@ import unidecode from beets import ui from beets.autotag.hooks import AlbumInfo, TrackInfo from beets.dbcore import types -from beets.library import DateType, Library from beets.plugins import BeetsPlugin, MetadataSourcePlugin, Response if TYPE_CHECKING: + from beets.library import Library from beetsplug._typing import JSONDict DEFAULT_WAITING_TIME = 5 @@ -64,7 +64,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): "spotify_tempo": types.FLOAT, "spotify_time_signature": types.INTEGER, "spotify_valence": types.FLOAT, - "spotify_updated": DateType(), + "spotify_updated": types.DATE, } # Base URLs for the Spotify API diff --git a/beetsplug/types.py b/beetsplug/types.py index 9ba3aac66..9bdfdecee 100644 --- a/beetsplug/types.py +++ b/beetsplug/types.py @@ -15,7 +15,6 @@ from confuse import ConfigValueError -from beets import library from beets.dbcore import types from beets.plugins import BeetsPlugin @@ -42,7 +41,7 @@ class TypesPlugin(BeetsPlugin): elif value.get() == "bool": mytypes[key] = types.BOOLEAN elif value.get() == "date": - mytypes[key] = library.DateType() + mytypes[key] = types.DATE else: raise ConfigValueError( "unknown type '{}' for the '{}' field".format(value, key) diff --git a/beetsplug/web/__init__.py b/beetsplug/web/__init__.py index 175cec4a9..c1b0b5029 100644 --- a/beetsplug/web/__init__.py +++ b/beetsplug/web/__init__.py @@ -25,6 +25,7 @@ from werkzeug.routing import BaseConverter, PathConverter import beets.library from beets import ui, util +from beets.dbcore.query import PathQuery from beets.plugins import BeetsPlugin # Utilities. @@ -342,7 +343,7 @@ def item_query(queries): @app.route("/item/path/") def item_at_path(path): - query = beets.library.PathQuery("path", path.encode("utf-8")) + query = PathQuery("path", path.encode("utf-8")) item = g.lib.items(query).get() if item: return flask.jsonify(_rep(item)) diff --git a/test/test_library.py b/test/test_library.py index 36322cfec..2d232c88f 100644 --- a/test/test_library.py +++ b/test/test_library.py @@ -19,7 +19,6 @@ import os.path import re import shutil import stat -import time import unicodedata import unittest from unittest.mock import patch @@ -1320,56 +1319,3 @@ class ParseQueryTest(unittest.TestCase): def test_parse_bytes(self): with pytest.raises(AssertionError): beets.library.parse_query_string(b"query", None) - - -class LibraryFieldTypesTest(unittest.TestCase): - """Test format() and parse() for library-specific field types""" - - def test_datetype(self): - t = beets.library.DateType() - - # format - time_format = beets.config["time_format"].as_str() - time_local = time.strftime(time_format, time.localtime(123456789)) - assert time_local == t.format(123456789) - # parse - assert 123456789.0 == t.parse(time_local) - assert 123456789.0 == t.parse("123456789.0") - assert t.null == t.parse("not123456789.0") - assert t.null == t.parse("1973-11-29") - - def test_pathtype(self): - t = beets.library.PathType() - - # format - assert "/tmp" == t.format("/tmp") - assert "/tmp/\xe4lbum" == t.format("/tmp/\u00e4lbum") - # parse - assert np(b"/tmp") == t.parse("/tmp") - assert np(b"/tmp/\xc3\xa4lbum") == t.parse("/tmp/\u00e4lbum/") - - def test_musicalkey(self): - t = beets.library.MusicalKey() - - # parse - assert "C#m" == t.parse("c#m") - assert "Gm" == t.parse("g minor") - assert "Not c#m" == t.parse("not C#m") - - def test_durationtype(self): - t = beets.library.DurationType() - - # format - assert "1:01" == t.format(61.23) - assert "60:01" == t.format(3601.23) - assert "0:00" == t.format(None) - # parse - assert 61.0 == t.parse("1:01") - assert 61.23 == t.parse("61.23") - assert 3601.0 == t.parse("60:01") - assert t.null == t.parse("1:00:01") - assert t.null == t.parse("not61.23") - # config format_raw_length - beets.config["format_raw_length"] = True - assert 61.23 == t.format(61.23) - assert 3601.23 == t.format(3601.23) diff --git a/test/test_query.py b/test/test_query.py index 22c2710de..6546cb4df 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -466,9 +466,9 @@ class PathQueryTest(ItemInDBTestCase, AssertsMixin): # Unadorned path queries with path separators in them are considered # path queries only when the path in question actually exists. So we # mock the existence check to return true. - beets.library.PathQuery.force_implicit_query_detection = True + beets.dbcore.query.PathQuery.force_implicit_query_detection = True yield - beets.library.PathQuery.force_implicit_query_detection = False + beets.dbcore.query.PathQuery.force_implicit_query_detection = False def test_path_exact_match(self): q = "path:/a/b/c.mp3" @@ -609,7 +609,7 @@ class PathQueryTest(ItemInDBTestCase, AssertsMixin): def test_case_sensitivity(self): self.add_album(path=b"/A/B/C2.mp3", title="caps path") - makeq = partial(beets.library.PathQuery, "path", "/A/B") + makeq = partial(beets.dbcore.query.PathQuery, "path", "/A/B") results = self.lib.items(makeq(case_sensitive=True)) self.assert_items_matched(results, ["caps path"]) @@ -621,7 +621,7 @@ class PathQueryTest(ItemInDBTestCase, AssertsMixin): # both os.sep and os.altsep @unittest.skipIf(sys.platform == "win32", "win32") def test_path_sep_detection(self): - is_path_query = beets.library.PathQuery.is_path_query + is_path_query = beets.dbcore.query.PathQuery.is_path_query with self.force_implicit_query_detection(): assert is_path_query("/foo/bar") @@ -641,7 +641,7 @@ class PathQueryTest(ItemInDBTestCase, AssertsMixin): Thus, don't use the `force_implicit_query_detection()` contextmanager which would disable the existence check. """ - is_path_query = beets.library.PathQuery.is_path_query + is_path_query = beets.dbcore.query.PathQuery.is_path_query path = self.touch(os.path.join(b"foo", b"bar")) assert os.path.isabs(util.syspath(path)) @@ -664,7 +664,7 @@ class PathQueryTest(ItemInDBTestCase, AssertsMixin): Thus, don't use the `force_implicit_query_detection()` contextmanager which would disable the existence check. """ - is_path_query = beets.library.PathQuery.is_path_query + is_path_query = beets.dbcore.query.PathQuery.is_path_query self.touch(os.path.join(b"foo", b"bar")) diff --git a/test/test_types.py b/test/test_types.py new file mode 100644 index 000000000..8a6acd0dc --- /dev/null +++ b/test/test_types.py @@ -0,0 +1,59 @@ +import time +import unittest + +import beets +from beets.dbcore import types +from beets.util import normpath + + +class LibraryFieldTypesTest(unittest.TestCase): + """Test format() and parse() for library-specific field types""" + + def test_datetype(self): + t = types.DATE + + # format + time_format = beets.config["time_format"].as_str() + time_local = time.strftime(time_format, time.localtime(123456789)) + assert time_local == t.format(123456789) + # parse + assert 123456789.0 == t.parse(time_local) + assert 123456789.0 == t.parse("123456789.0") + assert t.null == t.parse("not123456789.0") + assert t.null == t.parse("1973-11-29") + + def test_pathtype(self): + t = types.PathType() + + # format + assert "/tmp" == t.format("/tmp") + assert "/tmp/\xe4lbum" == t.format("/tmp/\u00e4lbum") + # parse + assert normpath(b"/tmp") == t.parse("/tmp") + assert normpath(b"/tmp/\xc3\xa4lbum") == t.parse("/tmp/\u00e4lbum/") + + def test_musicalkey(self): + t = types.MusicalKey() + + # parse + assert "C#m" == t.parse("c#m") + assert "Gm" == t.parse("g minor") + assert "Not c#m" == t.parse("not C#m") + + def test_durationtype(self): + t = types.DurationType() + + # format + assert "1:01" == t.format(61.23) + assert "60:01" == t.format(3601.23) + assert "0:00" == t.format(None) + # parse + assert 61.0 == t.parse("1:01") + assert 61.23 == t.parse("61.23") + assert 3601.0 == t.parse("60:01") + assert t.null == t.parse("1:00:01") + assert t.null == t.parse("not61.23") + # config format_raw_length + beets.config["format_raw_length"] = True + assert 61.23 == t.format(61.23) + assert 3601.23 == t.format(3601.23) From b40ce836d5b7cf57a1c4ac836b9eee252aeb70e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 12 May 2025 12:19:00 +0100 Subject: [PATCH 11/95] Add NullPathType and types to PathType --- .git-blame-ignore-revs | 4 +- beets/dbcore/query.py | 2 + beets/dbcore/types.py | 46 +++++++++++----------- beets/library.py | 2 +- test/test_types.py | 87 +++++++++++++++++++++--------------------- 5 files changed, 73 insertions(+), 68 deletions(-) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 4703203ba..5441940a4 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -48,4 +48,6 @@ f36bc497c8c8f89004f3f6879908d3f0b25123e1 # Fix formatting c490ac5810b70f3cf5fd8649669838e8fdb19f4d # Importer restructure -9147577b2b19f43ca827e9650261a86fb0450cef \ No newline at end of file +9147577b2b19f43ca827e9650261a86fb0450cef +# Copy paste query, types from library to dbcore +1a045c91668c771686f4c871c84f1680af2e944b diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index 9812a7528..3243445cb 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -40,6 +40,8 @@ else: # To use the SQLite "blob" type, it doesn't suffice to provide a byte # string; SQLite treats that as encoded text. Wrapping it in a # `memoryview` tells it that we actually mean non-text data. +# needs to be defined in here due to circular import. +# TODO: remove it from this module and define it in dbcore/types.py instead BLOB_TYPE = memoryview diff --git a/beets/dbcore/types.py b/beets/dbcore/types.py index 27cd04b92..be28f6891 100644 --- a/beets/dbcore/types.py +++ b/beets/dbcore/types.py @@ -22,6 +22,7 @@ import typing from abc import ABC from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast +import beets from beets import util from . import query @@ -345,7 +346,7 @@ class DateType(Float): return self.null -class PathType(Type[bytes, bytes]): +class BasePathType(Type[bytes, N]): """A dbcore type for filesystem paths. These are represented as `bytes` objects, in keeping with @@ -356,27 +357,10 @@ class PathType(Type[bytes, bytes]): query = query.PathQuery model_type = bytes - def __init__(self, nullable=False): - """Create a path type object. + def parse(self, string: str) -> bytes: + return util.normpath(string) - `nullable` controls whether the type may be missing, i.e., None. - """ - self.nullable = nullable - - @property - def null(self): - if self.nullable: - return None - else: - return b"" - - def format(self, value): - return util.displayable_path(value) - - def parse(self, string): - return util.normpath(util.bytestring_path(string)) - - def normalize(self, value): + def normalize(self, value: Any) -> bytes | N: if isinstance(value, str): # Paths stored internally as encoded bytes. return util.bytestring_path(value) @@ -391,12 +375,30 @@ class PathType(Type[bytes, bytes]): def from_sql(self, sql_value): return self.normalize(sql_value) - def to_sql(self, value): + def to_sql(self, value: bytes) -> BLOB_TYPE: if isinstance(value, bytes): value = BLOB_TYPE(value) return value +class NullPathType(BasePathType[None]): + @property + def null(self) -> None: + return None + + def format(self, value: bytes | None) -> str: + return util.displayable_path(value or b"") + + +class PathType(BasePathType[bytes]): + @property + def null(self) -> bytes: + return b"" + + def format(self, value: bytes) -> str: + return util.displayable_path(value or b"") + + class MusicalKey(String): """String representing the musical key of a song. diff --git a/beets/library.py b/beets/library.py index 5a692ef1c..9223b3209 100644 --- a/beets/library.py +++ b/beets/library.py @@ -910,7 +910,7 @@ class Album(LibModel): _always_dirty = True _fields = { "id": types.PRIMARY_ID, - "artpath": types.PathType(True), + "artpath": types.NullPathType(), "added": types.DATE, "albumartist": types.STRING, "albumartist_sort": types.STRING, diff --git a/test/test_types.py b/test/test_types.py index 8a6acd0dc..6727917d8 100644 --- a/test/test_types.py +++ b/test/test_types.py @@ -1,59 +1,58 @@ import time -import unittest import beets from beets.dbcore import types from beets.util import normpath -class LibraryFieldTypesTest(unittest.TestCase): - """Test format() and parse() for library-specific field types""" +def test_datetype(): + t = types.DATE - def test_datetype(self): - t = types.DATE + # format + time_format = beets.config["time_format"].as_str() + time_local = time.strftime(time_format, time.localtime(123456789)) + assert time_local == t.format(123456789) + # parse + assert 123456789.0 == t.parse(time_local) + assert 123456789.0 == t.parse("123456789.0") + assert t.null == t.parse("not123456789.0") + assert t.null == t.parse("1973-11-29") - # format - time_format = beets.config["time_format"].as_str() - time_local = time.strftime(time_format, time.localtime(123456789)) - assert time_local == t.format(123456789) - # parse - assert 123456789.0 == t.parse(time_local) - assert 123456789.0 == t.parse("123456789.0") - assert t.null == t.parse("not123456789.0") - assert t.null == t.parse("1973-11-29") - def test_pathtype(self): - t = types.PathType() +def test_pathtype(): + t = types.PathType() - # format - assert "/tmp" == t.format("/tmp") - assert "/tmp/\xe4lbum" == t.format("/tmp/\u00e4lbum") - # parse - assert normpath(b"/tmp") == t.parse("/tmp") - assert normpath(b"/tmp/\xc3\xa4lbum") == t.parse("/tmp/\u00e4lbum/") + # format + assert "/tmp" == t.format("/tmp") + assert "/tmp/\xe4lbum" == t.format("/tmp/\u00e4lbum") + # parse + assert normpath(b"/tmp") == t.parse("/tmp") + assert normpath(b"/tmp/\xc3\xa4lbum") == t.parse("/tmp/\u00e4lbum/") - def test_musicalkey(self): - t = types.MusicalKey() - # parse - assert "C#m" == t.parse("c#m") - assert "Gm" == t.parse("g minor") - assert "Not c#m" == t.parse("not C#m") +def test_musicalkey(): + t = types.MusicalKey() - def test_durationtype(self): - t = types.DurationType() + # parse + assert "C#m" == t.parse("c#m") + assert "Gm" == t.parse("g minor") + assert "Not c#m" == t.parse("not C#m") - # format - assert "1:01" == t.format(61.23) - assert "60:01" == t.format(3601.23) - assert "0:00" == t.format(None) - # parse - assert 61.0 == t.parse("1:01") - assert 61.23 == t.parse("61.23") - assert 3601.0 == t.parse("60:01") - assert t.null == t.parse("1:00:01") - assert t.null == t.parse("not61.23") - # config format_raw_length - beets.config["format_raw_length"] = True - assert 61.23 == t.format(61.23) - assert 3601.23 == t.format(3601.23) + +def test_durationtype(): + t = types.DurationType() + + # format + assert "1:01" == t.format(61.23) + assert "60:01" == t.format(3601.23) + assert "0:00" == t.format(None) + # parse + assert 61.0 == t.parse("1:01") + assert 61.23 == t.parse("61.23") + assert 3601.0 == t.parse("60:01") + assert t.null == t.parse("1:00:01") + assert t.null == t.parse("not61.23") + # config format_raw_length + beets.config["format_raw_length"] = True + assert 61.23 == t.format(61.23) + assert 3601.23 == t.format(3601.23) From a38918380d3654e65cc7d19776e9c426050a7464 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 12 May 2025 09:53:01 +0100 Subject: [PATCH 12/95] Rewrite path query tests using pytest.mark.parametrize And remove `force_implicit_query_detection` attribute from `PathQuery` class. --- beets/dbcore/query.py | 5 - test/test_query.py | 350 +++++++++++++----------------------------- 2 files changed, 105 insertions(+), 250 deletions(-) diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index 3243445cb..e02ebb76a 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -283,9 +283,6 @@ class PathQuery(FieldQuery[bytes]): and case-sensitive otherwise. """ - # For tests - force_implicit_query_detection = False - def __init__(self, field, pattern, fast=True, case_sensitive=None): """Create a path query. @@ -335,8 +332,6 @@ class PathQuery(FieldQuery[bytes]): ): return False - if cls.force_implicit_query_detection: - return True return os.path.exists(util.syspath(util.normpath(query_part))) def match(self, item): diff --git a/test/test_query.py b/test/test_query.py index 6546cb4df..11537e039 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -14,26 +14,23 @@ """Various tests for querying the library database.""" -import os import sys import unittest -from contextlib import contextmanager -from functools import partial +from pathlib import Path import pytest from mock import patch -import beets.library -from beets import dbcore, util +from beets import dbcore from beets.dbcore import types from beets.dbcore.query import ( InvalidQueryArgumentValueError, NoneQuery, ParsingError, + PathQuery, ) from beets.test import _common -from beets.test.helper import BeetsTestCase, ItemInDBTestCase -from beets.util import syspath +from beets.test.helper import BeetsTestCase, TestHelper # Because the absolute path begins with something like C:, we # can't disambiguate it from an ordinary query. @@ -442,244 +439,6 @@ class MatchTest(unittest.TestCase): assert q3 != q4 -class PathQueryTest(ItemInDBTestCase, AssertsMixin): - def setUp(self): - super().setUp() - - # This is the item we'll try to match. - self.i.path = util.normpath("/a/b/c.mp3") - self.i.title = "path item" - self.i.album = "path album" - self.i.store() - self.lib.add_album([self.i]) - - # A second item for testing exclusion. - i2 = _common.item() - i2.path = util.normpath("/x/y/z.mp3") - i2.title = "another item" - i2.album = "another album" - self.lib.add(i2) - self.lib.add_album([i2]) - - @contextmanager - def force_implicit_query_detection(self): - # Unadorned path queries with path separators in them are considered - # path queries only when the path in question actually exists. So we - # mock the existence check to return true. - beets.dbcore.query.PathQuery.force_implicit_query_detection = True - yield - beets.dbcore.query.PathQuery.force_implicit_query_detection = False - - def test_path_exact_match(self): - q = "path:/a/b/c.mp3" - results = self.lib.items(q) - self.assert_items_matched(results, ["path item"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["path album"]) - - # FIXME: fails on windows - @unittest.skipIf(sys.platform == "win32", "win32") - def test_parent_directory_no_slash(self): - q = "path:/a" - results = self.lib.items(q) - self.assert_items_matched(results, ["path item"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["path album"]) - - # FIXME: fails on windows - @unittest.skipIf(sys.platform == "win32", "win32") - def test_parent_directory_with_slash(self): - q = "path:/a/" - results = self.lib.items(q) - self.assert_items_matched(results, ["path item"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["path album"]) - - def test_no_match(self): - q = "path:/xyzzy/" - results = self.lib.items(q) - self.assert_items_matched(results, []) - - results = self.lib.albums(q) - self.assert_albums_matched(results, []) - - def test_fragment_no_match(self): - q = "path:/b/" - results = self.lib.items(q) - self.assert_items_matched(results, []) - - results = self.lib.albums(q) - self.assert_albums_matched(results, []) - - def test_nonnorm_path(self): - q = "path:/x/../a/b" - results = self.lib.items(q) - self.assert_items_matched(results, ["path item"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["path album"]) - - @unittest.skipIf(sys.platform == "win32", WIN32_NO_IMPLICIT_PATHS) - def test_slashed_query_matches_path(self): - with self.force_implicit_query_detection(): - q = "/a/b" - results = self.lib.items(q) - self.assert_items_matched(results, ["path item"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["path album"]) - - @unittest.skipIf(sys.platform == "win32", WIN32_NO_IMPLICIT_PATHS) - def test_path_query_in_or_query(self): - with self.force_implicit_query_detection(): - q = "/a/b , /a/b" - results = self.lib.items(q) - self.assert_items_matched(results, ["path item"]) - - def test_non_slashed_does_not_match_path(self): - with self.force_implicit_query_detection(): - q = "c.mp3" - results = self.lib.items(q) - self.assert_items_matched(results, []) - - results = self.lib.albums(q) - self.assert_albums_matched(results, []) - - def test_slashes_in_explicit_field_does_not_match_path(self): - with self.force_implicit_query_detection(): - q = "title:/a/b" - results = self.lib.items(q) - self.assert_items_matched(results, []) - - def test_path_item_regex(self): - q = "path::c\\.mp3$" - results = self.lib.items(q) - self.assert_items_matched(results, ["path item"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["path album"]) - - def test_path_album_regex(self): - q = "path::b" - results = self.lib.albums(q) - self.assert_albums_matched(results, ["path album"]) - - def test_escape_underscore(self): - self.add_album( - path=b"/a/_/title.mp3", - title="with underscore", - album="album with underscore", - ) - q = "path:/a/_" - results = self.lib.items(q) - self.assert_items_matched(results, ["with underscore"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["album with underscore"]) - - def test_escape_percent(self): - self.add_album( - path=b"/a/%/title.mp3", - title="with percent", - album="album with percent", - ) - q = "path:/a/%" - results = self.lib.items(q) - self.assert_items_matched(results, ["with percent"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["album with percent"]) - - def test_escape_backslash(self): - self.add_album( - path=rb"/a/\x/title.mp3", - title="with backslash", - album="album with backslash", - ) - q = "path:/a/\\\\x" - results = self.lib.items(q) - self.assert_items_matched(results, ["with backslash"]) - - results = self.lib.albums(q) - self.assert_albums_matched(results, ["album with backslash"]) - - def test_case_sensitivity(self): - self.add_album(path=b"/A/B/C2.mp3", title="caps path") - - makeq = partial(beets.dbcore.query.PathQuery, "path", "/A/B") - - results = self.lib.items(makeq(case_sensitive=True)) - self.assert_items_matched(results, ["caps path"]) - - results = self.lib.items(makeq(case_sensitive=False)) - self.assert_items_matched(results, ["path item", "caps path"]) - - # FIXME: Also create a variant of this test for windows, which tests - # both os.sep and os.altsep - @unittest.skipIf(sys.platform == "win32", "win32") - def test_path_sep_detection(self): - is_path_query = beets.dbcore.query.PathQuery.is_path_query - - with self.force_implicit_query_detection(): - assert is_path_query("/foo/bar") - assert is_path_query("foo/bar") - assert is_path_query("foo/") - assert not is_path_query("foo") - assert is_path_query("foo/:bar") - assert not is_path_query("foo:bar/") - assert not is_path_query("foo:/bar") - - # FIXME: shouldn't this also work on windows? - @unittest.skipIf(sys.platform == "win32", WIN32_NO_IMPLICIT_PATHS) - def test_detect_absolute_path(self): - """Test detection of implicit path queries based on whether or - not the path actually exists, when using an absolute path query. - - Thus, don't use the `force_implicit_query_detection()` - contextmanager which would disable the existence check. - """ - is_path_query = beets.dbcore.query.PathQuery.is_path_query - - path = self.touch(os.path.join(b"foo", b"bar")) - assert os.path.isabs(util.syspath(path)) - path_str = path.decode("utf-8") - - # The file itself. - assert is_path_query(path_str) - - # The parent directory. - parent = os.path.dirname(path_str) - assert is_path_query(parent) - - # Some non-existent path. - assert not is_path_query(f"{path_str}baz") - - def test_detect_relative_path(self): - """Test detection of implicit path queries based on whether or - not the path actually exists, when using a relative path query. - - Thus, don't use the `force_implicit_query_detection()` - contextmanager which would disable the existence check. - """ - is_path_query = beets.dbcore.query.PathQuery.is_path_query - - self.touch(os.path.join(b"foo", b"bar")) - - # Temporarily change directory so relative paths work. - cur_dir = os.getcwd() - try: - os.chdir(syspath(self.temp_dir)) - assert is_path_query("foo/") - assert is_path_query("foo/bar") - assert is_path_query("foo/bar:tagada") - assert not is_path_query("bar") - finally: - os.chdir(cur_dir) - - class IntQueryTest(BeetsTestCase): def test_exact_value_match(self): item = self.add_item(bpm=120) @@ -1104,3 +863,104 @@ class RelatedQueriesTest(BeetsTestCase, AssertsMixin): q = "artpath::A Album1" results = self.lib.items(q) self.assert_items_matched(results, ["Album1 Item1", "Album1 Item2"]) + + +@pytest.fixture(scope="class") +def helper(): + helper = TestHelper() + helper.setup_beets() + + yield helper + + helper.teardown_beets() + + +class TestPathQuery: + _p = pytest.param + + @pytest.fixture(scope="class") + def lib(self, helper): + helper.add_item(path=b"/a/b/c.mp3", title="path item") + helper.add_item(path=b"/x/y/z.mp3", title="another item") + helper.add_item(path=b"/c/_/title.mp3", title="with underscore") + helper.add_item(path=b"/c/%/title.mp3", title="with percent") + helper.add_item(path=rb"/c/\x/title.mp3", title="with backslash") + helper.add_item(path=b"/A/B/C2.mp3", title="caps path") + + return helper.lib + + @pytest.mark.parametrize( + "q, expected_titles", + [ + _p("path:/a/b/c.mp3", ["path item"], id="exact-match"), + _p("path:/a", ["path item"], id="parent-dir-no-slash"), + _p("path:/a/", ["path item"], id="parent-dir-with-slash"), + _p("path:/xyzzy/", [], id="no-match"), + _p("path:/b/", [], id="fragment-no-match"), + _p("path:/x/../a/b", ["path item"], id="non-normalized"), + _p("path::c\\.mp3$", ["path item"], id="regex"), + _p("path:/c/_", ["with underscore"], id="underscore-escaped"), + _p("path:/c/%", ["with percent"], id="percent-escaped"), + _p("path:/c/\\\\x", ["with backslash"], id="backslash-escaped"), + ], + ) + def test_explicit(self, lib, q, expected_titles): + assert {i.title for i in lib.items(q)} == set(expected_titles) + + @pytest.mark.skipif(sys.platform == "win32", reason=WIN32_NO_IMPLICIT_PATHS) + @pytest.mark.parametrize( + "q, expected_titles", + [ + _p("/a/b", ["path item"], id="slashed-query"), + _p("/a/b , /a/b", ["path item"], id="path-in-or-query"), + _p("c.mp3", [], id="no-slash-no-match"), + _p("title:/a/b", [], id="slash-with-explicit-field-no-match"), + ], + ) + def test_implicit(self, monkeypatch, lib, q, expected_titles): + monkeypatch.setattr( + "beets.dbcore.query.PathQuery.is_path_query", lambda path: True + ) + + assert {i.title for i in lib.items(q)} == set(expected_titles) + + @pytest.mark.parametrize( + "case_sensitive, expected_titles", + [ + _p(True, [], id="non-caps-dont-match-caps"), + _p(False, ["caps path"], id="non-caps-match-caps"), + ], + ) + def test_case_sensitivity( + self, lib, monkeypatch, case_sensitive, expected_titles + ): + q = "path:/a/b/c2.mp3" + monkeypatch.setattr( + "beets.util.case_sensitive", lambda *_: case_sensitive + ) + + assert {i.title for i in lib.items(q)} == set(expected_titles) + + # FIXME: Also create a variant of this test for windows, which tests + # both os.sep and os.altsep + @pytest.mark.skipif(sys.platform == "win32", reason=WIN32_NO_IMPLICIT_PATHS) + @pytest.mark.parametrize( + "q, is_path_query", + [ + ("/foo/bar", True), + ("foo/bar", True), + ("foo/", True), + ("foo", False), + ("foo/:bar", True), + ("foo:bar/", False), + ("foo:/bar", False), + ], + ) + def test_path_sep_detection(self, monkeypatch, tmp_path, q, is_path_query): + monkeypatch.chdir(tmp_path) + (tmp_path / "foo").mkdir() + (tmp_path / "foo" / "bar").touch() + if Path(q).is_absolute(): + q = str(tmp_path / q[1:]) + + assert PathQuery.is_path_query(q) == is_path_query From 45f92ac6416e1200d941b410e42041dc147ecaa5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 12 May 2025 09:48:40 +0100 Subject: [PATCH 13/95] Remove case_sensitive from PathQuery.__init__ The case_sensitive parameter was only used in tests, which now use monkeypatch to control the behavior of util.case_sensitive() instead. This simplifies the PathQuery initialization logic while maintaining test coverage. --- beets/dbcore/query.py | 14 ++++---------- test/test_query.py | 4 +++- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index e02ebb76a..c814c5966 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -283,26 +283,20 @@ class PathQuery(FieldQuery[bytes]): and case-sensitive otherwise. """ - def __init__(self, field, pattern, fast=True, case_sensitive=None): + def __init__(self, field, pattern, fast=True): """Create a path query. `pattern` must be a path, either to a file or a directory. - - `case_sensitive` can be a bool or `None`, indicating that the - behavior should depend on the filesystem. """ super().__init__(field, pattern, fast) path = util.normpath(pattern) - # By default, the case sensitivity depends on the filesystem - # that the query path is located on. - if case_sensitive is None: - case_sensitive = util.case_sensitive(path) - self.case_sensitive = case_sensitive + # Case sensitivity depends on the filesystem that the query path is located on. + self.case_sensitive = util.case_sensitive(path) # Use a normalized-case pattern for case-insensitive matches. - if not case_sensitive: + if not self.case_sensitive: # We need to lowercase the entire path, not just the pattern. # In particular, on Windows, the drive letter is otherwise not # lowercased. diff --git a/test/test_query.py b/test/test_query.py index 11537e039..a8646f1bb 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -904,7 +904,9 @@ class TestPathQuery: _p("path:/c/\\\\x", ["with backslash"], id="backslash-escaped"), ], ) - def test_explicit(self, lib, q, expected_titles): + def test_explicit(self, monkeypatch, lib, q, expected_titles): + monkeypatch.setattr("beets.util.case_sensitive", lambda *_: True) + assert {i.title for i in lib.items(q)} == set(expected_titles) @pytest.mark.skipif(sys.platform == "win32", reason=WIN32_NO_IMPLICIT_PATHS) From 8937978d5f607885609d6809aca23d84cee063db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sat, 31 May 2025 18:57:09 +0100 Subject: [PATCH 14/95] Refactor PathQuery and add docs --- beets/dbcore/query.py | 90 ++++++++++++++++++++++--------------------- test/test_query.py | 15 ++++---- 2 files changed, 55 insertions(+), 50 deletions(-) diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index c814c5966..9cff082a3 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -22,7 +22,7 @@ import unicodedata from abc import ABC, abstractmethod from collections.abc import Iterator, MutableSequence, Sequence from datetime import datetime, timedelta -from functools import reduce +from functools import cached_property, reduce from operator import mul, or_ from re import Pattern from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union @@ -30,8 +30,7 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union from beets import util if TYPE_CHECKING: - from beets.dbcore import Model - from beets.dbcore.db import AnyModel + from beets.dbcore.db import AnyModel, Model P = TypeVar("P", default=Any) else: @@ -283,13 +282,11 @@ class PathQuery(FieldQuery[bytes]): and case-sensitive otherwise. """ - def __init__(self, field, pattern, fast=True): + def __init__(self, field: str, pattern: bytes, fast: bool = True) -> None: """Create a path query. `pattern` must be a path, either to a file or a directory. """ - super().__init__(field, pattern, fast) - path = util.normpath(pattern) # Case sensitivity depends on the filesystem that the query path is located on. @@ -304,50 +301,57 @@ class PathQuery(FieldQuery[bytes]): # from `col_clause()` do the same thing. path = path.lower() - # Match the path as a single file. - self.file_path = path - # As a directory (prefix). - self.dir_path = os.path.join(path, b"") + super().__init__(field, path, fast) - @classmethod - def is_path_query(cls, query_part): + @cached_property + def dir_path(self) -> bytes: + return os.path.join(self.pattern, b"") + + @staticmethod + def is_path_query(query_part: str) -> bool: """Try to guess whether a unicode query part is a path query. - Condition: separator precedes colon and the file exists. + The path query must + 1. precede the colon in the query, if a colon is present + 2. contain either ``os.sep`` or ``os.altsep`` (Windows) + 3. this path must exist on the filesystem. """ - colon = query_part.find(":") - if colon != -1: - query_part = query_part[:colon] + query_part = query_part.split(":")[0] - # Test both `sep` and `altsep` (i.e., both slash and backslash on - # Windows). - if not ( - os.sep in query_part or (os.altsep and os.altsep in query_part) - ): - return False - - return os.path.exists(util.syspath(util.normpath(query_part))) - - def match(self, item): - path = item.path if self.case_sensitive else item.path.lower() - return (path == self.file_path) or path.startswith(self.dir_path) - - def col_clause(self): - file_blob = BLOB_TYPE(self.file_path) - dir_blob = BLOB_TYPE(self.dir_path) - - if self.case_sensitive: - query_part = "({0} = ?) || (substr({0}, 1, ?) = ?)" - else: - query_part = "(BYTELOWER({0}) = BYTELOWER(?)) || \ - (substr(BYTELOWER({0}), 1, ?) = BYTELOWER(?))" - - return query_part.format(self.field), ( - file_blob, - len(dir_blob), - dir_blob, + return ( + # make sure the query part contains a path separator + bool(set(query_part) & {os.sep, os.altsep}) + and os.path.exists(util.normpath(query_part)) ) + def match(self, obj: Model) -> bool: + """Check whether a model object's path matches this query. + + Performs either an exact match against the pattern or checks if the path + starts with the given directory path. Case sensitivity depends on the object's + filesystem as determined during initialization. + """ + path = obj.path if self.case_sensitive else obj.path.lower() + return (path == self.pattern) or path.startswith(self.dir_path) + + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: + """Generate an SQL clause that implements path matching in the database. + + Returns a tuple of SQL clause string and parameter values list that matches + paths either exactly or by directory prefix. Handles case sensitivity + appropriately using BYTELOWER for case-insensitive matches. + """ + if self.case_sensitive: + left, right = self.field, "?" + else: + left, right = f"BYTELOWER({self.field})", "BYTELOWER(?)" + + return f"({left} = {right}) || (substr({left}, 1, ?) = {right})", [ + BLOB_TYPE(self.pattern), + len(dir_blob := BLOB_TYPE(self.dir_path)), + dir_blob, + ] + def __repr__(self) -> str: return ( f"{self.__class__.__name__}({self.field!r}, {self.pattern!r}, " diff --git a/test/test_query.py b/test/test_query.py index a8646f1bb..776bfd6f6 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -880,7 +880,7 @@ class TestPathQuery: @pytest.fixture(scope="class") def lib(self, helper): - helper.add_item(path=b"/a/b/c.mp3", title="path item") + helper.add_item(path=b"/aaa/bb/c.mp3", title="path item") helper.add_item(path=b"/x/y/z.mp3", title="another item") helper.add_item(path=b"/c/_/title.mp3", title="with underscore") helper.add_item(path=b"/c/%/title.mp3", title="with percent") @@ -892,12 +892,13 @@ class TestPathQuery: @pytest.mark.parametrize( "q, expected_titles", [ - _p("path:/a/b/c.mp3", ["path item"], id="exact-match"), - _p("path:/a", ["path item"], id="parent-dir-no-slash"), - _p("path:/a/", ["path item"], id="parent-dir-with-slash"), + _p("path:/aaa/bb/c.mp3", ["path item"], id="exact-match"), + _p("path:/aaa", ["path item"], id="parent-dir-no-slash"), + _p("path:/aaa/", ["path item"], id="parent-dir-with-slash"), + _p("path:/aa", [], id="no-match-does-not-match-parent-dir"), _p("path:/xyzzy/", [], id="no-match"), _p("path:/b/", [], id="fragment-no-match"), - _p("path:/x/../a/b", ["path item"], id="non-normalized"), + _p("path:/x/../aaa/bb", ["path item"], id="non-normalized"), _p("path::c\\.mp3$", ["path item"], id="regex"), _p("path:/c/_", ["with underscore"], id="underscore-escaped"), _p("path:/c/%", ["with percent"], id="percent-escaped"), @@ -913,8 +914,8 @@ class TestPathQuery: @pytest.mark.parametrize( "q, expected_titles", [ - _p("/a/b", ["path item"], id="slashed-query"), - _p("/a/b , /a/b", ["path item"], id="path-in-or-query"), + _p("/aaa/bb", ["path item"], id="slashed-query"), + _p("/aaa/bb , /aaa", ["path item"], id="path-in-or-query"), _p("c.mp3", [], id="no-slash-no-match"), _p("title:/a/b", [], id="slash-with-explicit-field-no-match"), ], From 9d088ab69f499dd54597dd8af941746acfee3e27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sat, 5 Jul 2025 20:46:27 +0100 Subject: [PATCH 15/95] Move human formatting functions under beets.util.units --- beets/dbcore/types.py | 5 ++-- beets/ui/__init__.py | 42 ---------------------------- beets/ui/commands.py | 17 ++++++------ beets/util/__init__.py | 21 -------------- beets/util/units.py | 61 +++++++++++++++++++++++++++++++++++++++++ test/test_ui_init.py | 38 +------------------------ test/util/test_units.py | 43 +++++++++++++++++++++++++++++ 7 files changed, 117 insertions(+), 110 deletions(-) create mode 100644 beets/util/units.py create mode 100644 test/util/test_units.py diff --git a/beets/dbcore/types.py b/beets/dbcore/types.py index be28f6891..30cabf42f 100644 --- a/beets/dbcore/types.py +++ b/beets/dbcore/types.py @@ -24,6 +24,7 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast import beets from beets import util +from beets.util.units import human_seconds_short, raw_seconds_short from . import query @@ -437,14 +438,14 @@ class DurationType(Float): def format(self, value): if not beets.config["format_raw_length"].get(bool): - return util.human_seconds_short(value or 0.0) + return human_seconds_short(value or 0.0) else: return value def parse(self, string): try: # Try to format back hh:ss to seconds. - return util.raw_seconds_short(string) + return raw_seconds_short(string) except ValueError: # Fall back to a plain float. try: diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index f1aac766f..b7033e41b 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -435,48 +435,6 @@ def input_select_objects(prompt, objs, rep, prompt_all=None): return [] -# Human output formatting. - - -def human_bytes(size): - """Formats size, a number of bytes, in a human-readable way.""" - powers = ["", "K", "M", "G", "T", "P", "E", "Z", "Y", "H"] - unit = "B" - for power in powers: - if size < 1024: - return f"{size:3.1f} {power}{unit}" - size /= 1024.0 - unit = "iB" - return "big" - - -def human_seconds(interval): - """Formats interval, a number of seconds, as a human-readable time - interval using English words. - """ - units = [ - (1, "second"), - (60, "minute"), - (60, "hour"), - (24, "day"), - (7, "week"), - (52, "year"), - (10, "decade"), - ] - for i in range(len(units) - 1): - increment, suffix = units[i] - next_increment, _ = units[i + 1] - interval /= float(increment) - if interval < next_increment: - break - else: - # Last unit. - increment, suffix = units[-1] - interval /= float(increment) - - return f"{interval:3.1f} {suffix}s" - - # Colorization. # ANSI terminal colorization code heavily inspired by pygments: diff --git a/beets/ui/commands.py b/beets/ui/commands.py index fb9ca8b89..3117262f1 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -43,6 +43,7 @@ from beets.util import ( normpath, syspath, ) +from beets.util.units import human_bytes, human_seconds, human_seconds_short from . import _store_dict @@ -541,8 +542,8 @@ class ChangeRepresentation: cur_length0 = item.length if item.length else 0 new_length0 = track_info.length if track_info.length else 0 # format into string - cur_length = f"({util.human_seconds_short(cur_length0)})" - new_length = f"({util.human_seconds_short(new_length0)})" + cur_length = f"({human_seconds_short(cur_length0)})" + new_length = f"({human_seconds_short(new_length0)})" # colorize lhs_length = ui.colorize(highlight_color, cur_length) rhs_length = ui.colorize(highlight_color, new_length) @@ -706,14 +707,14 @@ class AlbumChange(ChangeRepresentation): for track_info in self.match.extra_tracks: line = f" ! {track_info.title} (#{self.format_index(track_info)})" if track_info.length: - line += f" ({util.human_seconds_short(track_info.length)})" + line += f" ({human_seconds_short(track_info.length)})" print_(ui.colorize("text_warning", line)) if self.match.extra_items: print_(f"Unmatched tracks ({len(self.match.extra_items)}):") for item in self.match.extra_items: line = " ! {} (#{})".format(item.title, self.format_index(item)) if item.length: - line += " ({})".format(util.human_seconds_short(item.length)) + line += " ({})".format(human_seconds_short(item.length)) print_(ui.colorize("text_warning", line)) @@ -795,8 +796,8 @@ def summarize_items(items, singleton): round(int(items[0].samplerate) / 1000, 1), items[0].bitdepth ) summary_parts.append(sample_bits) - summary_parts.append(util.human_seconds_short(total_duration)) - summary_parts.append(ui.human_bytes(total_filesize)) + summary_parts.append(human_seconds_short(total_duration)) + summary_parts.append(human_bytes(total_filesize)) return ", ".join(summary_parts) @@ -1906,7 +1907,7 @@ def show_stats(lib, query, exact): if item.album_id: albums.add(item.album_id) - size_str = "" + ui.human_bytes(total_size) + size_str = "" + human_bytes(total_size) if exact: size_str += f" ({total_size} bytes)" @@ -1918,7 +1919,7 @@ Artists: {} Albums: {} Album artists: {}""".format( total_items, - ui.human_seconds(total_time), + human_seconds(total_time), f" ({total_time:.2f} seconds)" if exact else "", "Total size" if exact else "Approximate total size", size_str, diff --git a/beets/util/__init__.py b/beets/util/__init__.py index 4572b27f9..c1c76c860 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -1019,27 +1019,6 @@ def case_sensitive(path: bytes) -> bool: return not os.path.samefile(lower_sys, upper_sys) -def raw_seconds_short(string: str) -> float: - """Formats a human-readable M:SS string as a float (number of seconds). - - Raises ValueError if the conversion cannot take place due to `string` not - being in the right format. - """ - match = re.match(r"^(\d+):([0-5]\d)$", string) - if not match: - raise ValueError("String not in M:SS format") - minutes, seconds = map(int, match.groups()) - return float(minutes * 60 + seconds) - - -def human_seconds_short(interval): - """Formats a number of seconds as a short human-readable M:SS - string. - """ - interval = int(interval) - return "%i:%02i" % (interval // 60, interval % 60) - - def asciify_path(path: str, sep_replace: str) -> str: """Decodes all unicode characters in a path into ASCII equivalents. diff --git a/beets/util/units.py b/beets/util/units.py new file mode 100644 index 000000000..d07d42546 --- /dev/null +++ b/beets/util/units.py @@ -0,0 +1,61 @@ +import re + + +def raw_seconds_short(string: str) -> float: + """Formats a human-readable M:SS string as a float (number of seconds). + + Raises ValueError if the conversion cannot take place due to `string` not + being in the right format. + """ + match = re.match(r"^(\d+):([0-5]\d)$", string) + if not match: + raise ValueError("String not in M:SS format") + minutes, seconds = map(int, match.groups()) + return float(minutes * 60 + seconds) + + +def human_seconds_short(interval): + """Formats a number of seconds as a short human-readable M:SS + string. + """ + interval = int(interval) + return "%i:%02i" % (interval // 60, interval % 60) + + +def human_bytes(size): + """Formats size, a number of bytes, in a human-readable way.""" + powers = ["", "K", "M", "G", "T", "P", "E", "Z", "Y", "H"] + unit = "B" + for power in powers: + if size < 1024: + return f"{size:3.1f} {power}{unit}" + size /= 1024.0 + unit = "iB" + return "big" + + +def human_seconds(interval): + """Formats interval, a number of seconds, as a human-readable time + interval using English words. + """ + units = [ + (1, "second"), + (60, "minute"), + (60, "hour"), + (24, "day"), + (7, "week"), + (52, "year"), + (10, "decade"), + ] + for i in range(len(units) - 1): + increment, suffix = units[i] + next_increment, _ = units[i + 1] + interval /= float(increment) + if interval < next_increment: + break + else: + # Last unit. + increment, suffix = units[-1] + interval /= float(increment) + + return f"{interval:3.1f} {suffix}s" diff --git a/test/test_ui_init.py b/test/test_ui_init.py index a6f06c494..df21b300c 100644 --- a/test/test_ui_init.py +++ b/test/test_ui_init.py @@ -21,7 +21,7 @@ from random import random from beets import config, ui from beets.test import _common -from beets.test.helper import BeetsTestCase, ItemInDBTestCase, control_stdin +from beets.test.helper import BeetsTestCase, control_stdin class InputMethodsTest(BeetsTestCase): @@ -88,42 +88,6 @@ class InputMethodsTest(BeetsTestCase): assert items == ["1", "3"] -class InitTest(ItemInDBTestCase): - def test_human_bytes(self): - tests = [ - (0, "0.0 B"), - (30, "30.0 B"), - (pow(2, 10), "1.0 KiB"), - (pow(2, 20), "1.0 MiB"), - (pow(2, 30), "1.0 GiB"), - (pow(2, 40), "1.0 TiB"), - (pow(2, 50), "1.0 PiB"), - (pow(2, 60), "1.0 EiB"), - (pow(2, 70), "1.0 ZiB"), - (pow(2, 80), "1.0 YiB"), - (pow(2, 90), "1.0 HiB"), - (pow(2, 100), "big"), - ] - for i, h in tests: - assert h == ui.human_bytes(i) - - def test_human_seconds(self): - tests = [ - (0, "0.0 seconds"), - (30, "30.0 seconds"), - (60, "1.0 minutes"), - (90, "1.5 minutes"), - (125, "2.1 minutes"), - (3600, "1.0 hours"), - (86400, "1.0 days"), - (604800, "1.0 weeks"), - (31449600, "1.0 years"), - (314496000, "1.0 decades"), - ] - for i, h in tests: - assert h == ui.human_seconds(i) - - class ParentalDirCreation(BeetsTestCase): def test_create_yes(self): non_exist_path = _common.os.fsdecode( diff --git a/test/util/test_units.py b/test/util/test_units.py new file mode 100644 index 000000000..26f4d3eca --- /dev/null +++ b/test/util/test_units.py @@ -0,0 +1,43 @@ +import pytest + +from beets.util.units import human_bytes, human_seconds + + +@pytest.mark.parametrize( + "input_bytes,expected", + [ + (0, "0.0 B"), + (30, "30.0 B"), + (pow(2, 10), "1.0 KiB"), + (pow(2, 20), "1.0 MiB"), + (pow(2, 30), "1.0 GiB"), + (pow(2, 40), "1.0 TiB"), + (pow(2, 50), "1.0 PiB"), + (pow(2, 60), "1.0 EiB"), + (pow(2, 70), "1.0 ZiB"), + (pow(2, 80), "1.0 YiB"), + (pow(2, 90), "1.0 HiB"), + (pow(2, 100), "big"), + ], +) +def test_human_bytes(input_bytes, expected): + assert human_bytes(input_bytes) == expected + + +@pytest.mark.parametrize( + "input_seconds,expected", + [ + (0, "0.0 seconds"), + (30, "30.0 seconds"), + (60, "1.0 minutes"), + (90, "1.5 minutes"), + (125, "2.1 minutes"), + (3600, "1.0 hours"), + (86400, "1.0 days"), + (604800, "1.0 weeks"), + (31449600, "1.0 years"), + (314496000, "1.0 decades"), + ], +) +def test_human_seconds(input_seconds, expected): + assert human_seconds(input_seconds) == expected From 2b306de0fef3e5778cf440096f650788d4b9d84e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 5 Aug 2024 18:08:17 +0100 Subject: [PATCH 16/95] Replace assertInResult and assertNotInResult --- test/test_query.py | 46 +++++++++++++++++++--------------------------- 1 file changed, 19 insertions(+), 27 deletions(-) diff --git a/test/test_query.py b/test/test_query.py index 776bfd6f6..ecaa19514 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -44,14 +44,6 @@ class AssertsMixin: def assert_albums_matched(self, results, albums): assert {a.album for a in results} == set(albums) - def assertInResult(self, item, results): - result_ids = [i.id for i in results] - assert item.id in result_ids - - def assertNotInResult(self, item, results): - result_ids = [i.id for i in results] - assert item.id not in result_ids - # A test case class providing a library with some dummy data and some # assertions involving that data. @@ -477,44 +469,44 @@ class BoolQueryTest(BeetsTestCase, AssertsMixin): item_true = self.add_item(comp=True) item_false = self.add_item(comp=False) matched = self.lib.items("comp:true") - self.assertInResult(item_true, matched) - self.assertNotInResult(item_false, matched) + assert item_true.id in {i.id for i in matched} + assert item_false.id not in {i.id for i in matched} def test_flex_parse_true(self): item_true = self.add_item(flexbool=True) item_false = self.add_item(flexbool=False) matched = self.lib.items("flexbool:true") - self.assertInResult(item_true, matched) - self.assertNotInResult(item_false, matched) + assert item_true.id in {i.id for i in matched} + assert item_false.id not in {i.id for i in matched} def test_flex_parse_false(self): item_true = self.add_item(flexbool=True) item_false = self.add_item(flexbool=False) matched = self.lib.items("flexbool:false") - self.assertInResult(item_false, matched) - self.assertNotInResult(item_true, matched) + assert item_false.id in {i.id for i in matched} + assert item_true.id not in {i.id for i in matched} def test_flex_parse_1(self): item_true = self.add_item(flexbool=True) item_false = self.add_item(flexbool=False) matched = self.lib.items("flexbool:1") - self.assertInResult(item_true, matched) - self.assertNotInResult(item_false, matched) + assert item_true.id in {i.id for i in matched} + assert item_false.id not in {i.id for i in matched} def test_flex_parse_0(self): item_true = self.add_item(flexbool=True) item_false = self.add_item(flexbool=False) matched = self.lib.items("flexbool:0") - self.assertInResult(item_false, matched) - self.assertNotInResult(item_true, matched) + assert item_false.id in {i.id for i in matched} + assert item_true.id not in {i.id for i in matched} def test_flex_parse_any_string(self): # TODO this should be the other way around item_true = self.add_item(flexbool=True) item_false = self.add_item(flexbool=False) matched = self.lib.items("flexbool:something") - self.assertInResult(item_false, matched) - self.assertNotInResult(item_true, matched) + assert item_false.id in {i.id for i in matched} + assert item_true.id not in {i.id for i in matched} class DefaultSearchFieldsTest(DummyDataTestCase): @@ -541,33 +533,33 @@ class NoneQueryTest(BeetsTestCase, AssertsMixin): album_item = self.add_album().items().get() matched = self.lib.items(NoneQuery("album_id")) - self.assertInResult(singleton, matched) - self.assertNotInResult(album_item, matched) + assert singleton.id in {i.id for i in matched} + assert album_item.id not in {i.id for i in matched} def test_match_after_set_none(self): item = self.add_item(rg_track_gain=0) matched = self.lib.items(NoneQuery("rg_track_gain")) - self.assertNotInResult(item, matched) + assert item.id not in {i.id for i in matched} item["rg_track_gain"] = None item.store() matched = self.lib.items(NoneQuery("rg_track_gain")) - self.assertInResult(item, matched) + assert item.id in {i.id for i in matched} def test_match_slow(self): item = self.add_item() matched = self.lib.items(NoneQuery("rg_track_peak", fast=False)) - self.assertInResult(item, matched) + assert item.id in {i.id for i in matched} def test_match_slow_after_set_none(self): item = self.add_item(rg_track_gain=0) matched = self.lib.items(NoneQuery("rg_track_gain", fast=False)) - self.assertNotInResult(item, matched) + assert item.id not in {i.id for i in matched} item["rg_track_gain"] = None item.store() matched = self.lib.items(NoneQuery("rg_track_gain", fast=False)) - self.assertInResult(item, matched) + assert item.id in {i.id for i in matched} class NotQueryMatchTest(unittest.TestCase): From 2c6f314f4febd37314ab333a1c4ce29a05cf736b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Wed, 7 Aug 2024 15:26:33 +0100 Subject: [PATCH 17/95] Replace assertNegationProperties --- test/test_query.py | 189 +++++++++++++++++++++------------------------ 1 file changed, 90 insertions(+), 99 deletions(-) diff --git a/test/test_query.py b/test/test_query.py index ecaa19514..252367c14 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -627,119 +627,110 @@ class NotQueryMatchTest(unittest.TestCase): dbcore.query.NotQuery(q) -class NotQueryTest(DummyDataTestCase): - """Test `query.NotQuery` against the dummy data: - - `test_type_xxx`: tests for the negation of a particular XxxQuery class. - - `test_get_yyy`: tests on query strings (similar to `GetTest`) - """ +class TestNotQuery: + """Test `query.NotQuery` against the dummy data.""" - def assertNegationProperties(self, q): - """Given a Query `q`, assert that: - - q OR not(q) == all items - - q AND not(q) == 0 - - not(not(q)) == q - """ + @pytest.fixture(autouse=True, scope="class") + def lib(self): + test_case = DummyDataTestCase() + test_case.setUp() + return test_case.lib + + @pytest.mark.parametrize( + "q, expected_results", + [ + ( + dbcore.query.BooleanQuery("comp", True), + {"beets 4 eva"}, + ), + ( + dbcore.query.DateQuery("added", "2000-01-01"), + {"foo bar", "baz qux", "beets 4 eva"}, + ), + ( + dbcore.query.FalseQuery(), + {"foo bar", "baz qux", "beets 4 eva"}, + ), + ( + dbcore.query.MatchQuery("year", "2003"), + {"foo bar", "baz qux"}, + ), + ( + dbcore.query.NoneQuery("rg_track_gain"), + set(), + ), + ( + dbcore.query.NumericQuery("year", "2001..2002"), + {"beets 4 eva"}, + ), + ( + dbcore.query.AnyFieldQuery( + "baz", ["album"], dbcore.query.MatchQuery + ), + {"beets 4 eva"}, + ), + ( + dbcore.query.AndQuery( + [ + dbcore.query.BooleanQuery("comp", True), + dbcore.query.NumericQuery("year", "2002"), + ] + ), + {"foo bar", "beets 4 eva"}, + ), + ( + dbcore.query.OrQuery( + [ + dbcore.query.BooleanQuery("comp", True), + dbcore.query.NumericQuery("year", "2002"), + ] + ), + {"beets 4 eva"}, + ), + ( + dbcore.query.RegexpQuery("artist", "^t"), + {"foo bar"}, + ), + ( + dbcore.query.SubstringQuery("album", "ba"), + {"beets 4 eva"}, + ), + ( + dbcore.query.TrueQuery(), + set(), + ), + ], + ids=lambda x: x.__class__ if isinstance(x, dbcore.query.Query) else "", + ) + def test_query_type(self, lib, q, expected_results): + def get_results(*args): + return {i.title for i in lib.items(*args)} + + # not(a and b) <-> not(a) or not(b) not_q = dbcore.query.NotQuery(q) + not_q_results = get_results(not_q) + assert not_q_results == expected_results + # assert using OrQuery, AndQuery q_or = dbcore.query.OrQuery([q, not_q]) + q_and = dbcore.query.AndQuery([q, not_q]) - self.assert_items_matched_all(self.lib.items(q_or)) - self.assert_items_matched(self.lib.items(q_and), []) + assert get_results(q_or) == {"foo bar", "baz qux", "beets 4 eva"} + assert get_results(q_and) == set() # assert manually checking the item titles - all_titles = {i.title for i in self.lib.items()} - q_results = {i.title for i in self.lib.items(q)} - not_q_results = {i.title for i in self.lib.items(not_q)} + all_titles = get_results() + q_results = get_results(q) assert q_results.union(not_q_results) == all_titles assert q_results.intersection(not_q_results) == set() # round trip not_not_q = dbcore.query.NotQuery(not_q) - assert {i.title for i in self.lib.items(q)} == { - i.title for i in self.lib.items(not_not_q) - } + assert get_results(q) == get_results(not_not_q) - def test_type_and(self): - # not(a and b) <-> not(a) or not(b) - q = dbcore.query.AndQuery( - [ - dbcore.query.BooleanQuery("comp", True), - dbcore.query.NumericQuery("year", "2002"), - ], - ) - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, ["foo bar", "beets 4 eva"]) - self.assertNegationProperties(q) - def test_type_boolean(self): - q = dbcore.query.BooleanQuery("comp", True) - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, ["beets 4 eva"]) - self.assertNegationProperties(q) - - def test_type_date(self): - q = dbcore.query.DateQuery("added", "2000-01-01") - not_results = self.lib.items(dbcore.query.NotQuery(q)) - # query date is in the past, thus the 'not' results should contain all - # items - self.assert_items_matched( - not_results, ["foo bar", "baz qux", "beets 4 eva"] - ) - self.assertNegationProperties(q) - - def test_type_false(self): - q = dbcore.query.FalseQuery() - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched_all(not_results) - self.assertNegationProperties(q) - - def test_type_match(self): - q = dbcore.query.MatchQuery("year", "2003") - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, ["foo bar", "baz qux"]) - self.assertNegationProperties(q) - - def test_type_none(self): - q = dbcore.query.NoneQuery("rg_track_gain") - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, []) - self.assertNegationProperties(q) - - def test_type_numeric(self): - q = dbcore.query.NumericQuery("year", "2001..2002") - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, ["beets 4 eva"]) - self.assertNegationProperties(q) - - def test_type_or(self): - # not(a or b) <-> not(a) and not(b) - q = dbcore.query.OrQuery( - [ - dbcore.query.BooleanQuery("comp", True), - dbcore.query.NumericQuery("year", "2002"), - ] - ) - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, ["beets 4 eva"]) - self.assertNegationProperties(q) - - def test_type_regexp(self): - q = dbcore.query.RegexpQuery("artist", "^t") - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, ["foo bar"]) - self.assertNegationProperties(q) - - def test_type_substring(self): - q = dbcore.query.SubstringQuery("album", "ba") - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, ["beets 4 eva"]) - self.assertNegationProperties(q) - - def test_type_true(self): - q = dbcore.query.TrueQuery() - not_results = self.lib.items(dbcore.query.NotQuery(q)) - self.assert_items_matched(not_results, []) - self.assertNegationProperties(q) +class NegationPrefixTest(DummyDataTestCase): + """Tests negation prefixes.""" def test_get_prefixes_keyed(self): """Test both negation prefixes on a keyed query.""" From 09b22949c0c9c6d41b5869fd0b5bf58ee0a763b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 1 Jun 2025 13:53:34 +0100 Subject: [PATCH 18/95] Refactor test_query And rewrite test_query.py --- beets/test/_common.py | 5 +- test/conftest.py | 20 + test/test_query.py | 1206 +++++++++++++---------------------------- 3 files changed, 413 insertions(+), 818 deletions(-) diff --git a/beets/test/_common.py b/beets/test/_common.py index 86319c011..da81a587c 100644 --- a/beets/test/_common.py +++ b/beets/test/_common.py @@ -63,8 +63,8 @@ HAVE_SYMLINK = sys.platform != "win32" HAVE_HARDLINK = sys.platform != "win32" -def item(lib=None): - i = beets.library.Item( +def item(lib=None, **kwargs): + defaults = dict( title="the title", artist="the artist", albumartist="the album artist", @@ -99,6 +99,7 @@ def item(lib=None): album_id=None, mtime=12345, ) + i = beets.library.Item(**{**defaults, **kwargs}) if lib: lib.add(i) return i diff --git a/test/conftest.py b/test/conftest.py index 95509bdb6..3107ad690 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,7 +1,10 @@ +import inspect import os import pytest +from beets.dbcore.query import Query + def skip_marked_items(items: list[pytest.Item], marker_name: str, reason: str): for item in (i for i in items if i.get_closest_marker(marker_name)): @@ -21,3 +24,20 @@ def pytest_collection_modifyitems( skip_marked_items( items, "on_lyrics_update", "No change in lyrics source code" ) + + +def pytest_make_parametrize_id(config, val, argname): + """Generate readable test identifiers for pytest parametrized tests. + + Provides custom string representations for: + - Query classes/instances: use class name + - Lambda functions: show abbreviated source + - Other values: use standard repr() + """ + if inspect.isclass(val) and issubclass(val, Query): + return val.__name__ + + if inspect.isfunction(val) and val.__name__ == "": + return inspect.getsource(val).split("lambda")[-1][:30] + + return repr(val) diff --git a/test/test_query.py b/test/test_query.py index 252367c14..0ddf83e3a 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -15,837 +15,39 @@ """Various tests for querying the library database.""" import sys -import unittest +from functools import partial from pathlib import Path import pytest -from mock import patch -from beets import dbcore from beets.dbcore import types from beets.dbcore.query import ( - InvalidQueryArgumentValueError, + AndQuery, + BooleanQuery, + DateQuery, + FalseQuery, + MatchQuery, NoneQuery, + NotQuery, + NumericQuery, + OrQuery, ParsingError, PathQuery, + RegexpQuery, + StringFieldQuery, + StringQuery, + SubstringQuery, + TrueQuery, ) +from beets.library import Item from beets.test import _common -from beets.test.helper import BeetsTestCase, TestHelper +from beets.test.helper import TestHelper # Because the absolute path begins with something like C:, we # can't disambiguate it from an ordinary query. WIN32_NO_IMPLICIT_PATHS = "Implicit paths are not supported on Windows" - -class AssertsMixin: - def assert_items_matched(self, results, titles): - assert {i.title for i in results} == set(titles) - - def assert_albums_matched(self, results, albums): - assert {a.album for a in results} == set(albums) - - -# A test case class providing a library with some dummy data and some -# assertions involving that data. -class DummyDataTestCase(BeetsTestCase, AssertsMixin): - def setUp(self): - super().setUp() - items = [_common.item() for _ in range(3)] - items[0].title = "foo bar" - items[0].artist = "one" - items[0].artists = ["one", "eleven"] - items[0].album = "baz" - items[0].year = 2001 - items[0].comp = True - items[0].genre = "rock" - items[1].title = "baz qux" - items[1].artist = "two" - items[1].artists = ["two", "twelve"] - items[1].album = "baz" - items[1].year = 2002 - items[1].comp = True - items[1].genre = "Rock" - items[2].title = "beets 4 eva" - items[2].artist = "three" - items[2].artists = ["three", "one"] - items[2].album = "foo" - items[2].year = 2003 - items[2].comp = False - items[2].genre = "Hard Rock" - for item in items: - self.lib.add(item) - self.album = self.lib.add_album(items[:2]) - - def assert_items_matched_all(self, results): - self.assert_items_matched( - results, - [ - "foo bar", - "baz qux", - "beets 4 eva", - ], - ) - - -class GetTest(DummyDataTestCase): - def test_get_empty(self): - q = "" - results = self.lib.items(q) - self.assert_items_matched_all(results) - - def test_get_none(self): - q = None - results = self.lib.items(q) - self.assert_items_matched_all(results) - - def test_get_one_keyed_term(self): - q = "title:qux" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - - def test_get_one_keyed_exact(self): - q = "genre:=rock" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar"]) - q = "genre:=Rock" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - q = 'genre:="Hard Rock"' - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_get_one_keyed_exact_nocase(self): - q = 'genre:=~"hard rock"' - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_get_one_keyed_regexp(self): - q = "artist::t.+r" - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_get_one_unkeyed_term(self): - q = "three" - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_get_one_unkeyed_exact(self): - q = "=rock" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar"]) - - def test_get_one_unkeyed_exact_nocase(self): - q = '=~"hard rock"' - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_get_one_unkeyed_regexp(self): - q = ":x$" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - - def test_get_no_matches(self): - q = "popebear" - results = self.lib.items(q) - self.assert_items_matched(results, []) - - def test_invalid_key(self): - q = "pope:bear" - results = self.lib.items(q) - # Matches nothing since the flexattr is not present on the - # objects. - self.assert_items_matched(results, []) - - def test_get_no_matches_exact(self): - q = 'genre:="hard rock"' - results = self.lib.items(q) - self.assert_items_matched(results, []) - - def test_term_case_insensitive(self): - q = "oNE" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar"]) - - def test_regexp_case_sensitive(self): - q = ":oNE" - results = self.lib.items(q) - self.assert_items_matched(results, []) - q = ":one" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar"]) - - def test_term_case_insensitive_with_key(self): - q = "artist:thrEE" - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_term_case_regex_with_multi_key_matches(self): - q = "artists::eleven" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar"]) - - def test_term_case_regex_with_multi_key_matches_multiple_columns(self): - q = "artists::one" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar", "beets 4 eva"]) - - def test_key_case_insensitive(self): - q = "ArTiST:three" - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_keyed_matches_exact_nocase(self): - q = "genre:=~rock" - results = self.lib.items(q) - self.assert_items_matched( - results, - [ - "foo bar", - "baz qux", - ], - ) - - def test_unkeyed_term_matches_multiple_columns(self): - q = "baz" - results = self.lib.items(q) - self.assert_items_matched( - results, - [ - "foo bar", - "baz qux", - ], - ) - - def test_unkeyed_regexp_matches_multiple_columns(self): - q = ":z$" - results = self.lib.items(q) - self.assert_items_matched( - results, - [ - "foo bar", - "baz qux", - ], - ) - - def test_keyed_term_matches_only_one_column(self): - q = "title:baz" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - - def test_keyed_regexp_matches_only_one_column(self): - q = "title::baz" - results = self.lib.items(q) - self.assert_items_matched( - results, - [ - "baz qux", - ], - ) - - def test_multiple_terms_narrow_search(self): - q = "qux baz" - results = self.lib.items(q) - self.assert_items_matched( - results, - [ - "baz qux", - ], - ) - - def test_multiple_regexps_narrow_search(self): - q = ":baz :qux" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - - def test_mixed_terms_regexps_narrow_search(self): - q = ":baz qux" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - - def test_single_year(self): - q = "year:2001" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar"]) - - def test_year_range(self): - q = "year:2000..2002" - results = self.lib.items(q) - self.assert_items_matched( - results, - [ - "foo bar", - "baz qux", - ], - ) - - def test_singleton_true(self): - q = "singleton:true" - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_singleton_1(self): - q = "singleton:1" - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_singleton_false(self): - q = "singleton:false" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar", "baz qux"]) - - def test_singleton_0(self): - q = "singleton:0" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar", "baz qux"]) - - def test_compilation_true(self): - q = "comp:true" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar", "baz qux"]) - - def test_compilation_false(self): - q = "comp:false" - results = self.lib.items(q) - self.assert_items_matched(results, ["beets 4 eva"]) - - def test_unknown_field_name_no_results(self): - q = "xyzzy:nonsense" - results = self.lib.items(q) - titles = [i.title for i in results] - assert titles == [] - - def test_unknown_field_name_no_results_in_album_query(self): - q = "xyzzy:nonsense" - results = self.lib.albums(q) - names = [a.album for a in results] - assert names == [] - - def test_item_field_name_matches_nothing_in_album_query(self): - q = "format:nonsense" - results = self.lib.albums(q) - names = [a.album for a in results] - assert names == [] - - def test_unicode_query(self): - item = self.lib.items().get() - item.title = "caf\xe9" - item.store() - - q = "title:caf\xe9" - results = self.lib.items(q) - self.assert_items_matched(results, ["caf\xe9"]) - - def test_numeric_search_positive(self): - q = dbcore.query.NumericQuery("year", "2001") - results = self.lib.items(q) - assert results - - def test_numeric_search_negative(self): - q = dbcore.query.NumericQuery("year", "1999") - results = self.lib.items(q) - assert not results - - def test_album_field_fallback(self): - self.album["albumflex"] = "foo" - self.album.store() - - q = "albumflex:foo" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar", "baz qux"]) - - def test_invalid_query(self): - with pytest.raises(InvalidQueryArgumentValueError, match="not an int"): - dbcore.query.NumericQuery("year", "199a") - - msg_match = r"not a regular expression.*unterminated subpattern" - with pytest.raises(ParsingError, match=msg_match): - dbcore.query.RegexpQuery("year", "199(") - - -class MatchTest(unittest.TestCase): - def setUp(self): - super().setUp() - self.item = _common.item() - - def test_regex_match_positive(self): - q = dbcore.query.RegexpQuery("album", "^the album$") - assert q.match(self.item) - - def test_regex_match_negative(self): - q = dbcore.query.RegexpQuery("album", "^album$") - assert not q.match(self.item) - - def test_regex_match_non_string_value(self): - q = dbcore.query.RegexpQuery("disc", "^6$") - assert q.match(self.item) - - def test_substring_match_positive(self): - q = dbcore.query.SubstringQuery("album", "album") - assert q.match(self.item) - - def test_substring_match_negative(self): - q = dbcore.query.SubstringQuery("album", "ablum") - assert not q.match(self.item) - - def test_substring_match_non_string_value(self): - q = dbcore.query.SubstringQuery("disc", "6") - assert q.match(self.item) - - def test_exact_match_nocase_positive(self): - q = dbcore.query.StringQuery("genre", "the genre") - assert q.match(self.item) - q = dbcore.query.StringQuery("genre", "THE GENRE") - assert q.match(self.item) - - def test_exact_match_nocase_negative(self): - q = dbcore.query.StringQuery("genre", "genre") - assert not q.match(self.item) - - def test_year_match_positive(self): - q = dbcore.query.NumericQuery("year", "1") - assert q.match(self.item) - - def test_year_match_negative(self): - q = dbcore.query.NumericQuery("year", "10") - assert not q.match(self.item) - - def test_bitrate_range_positive(self): - q = dbcore.query.NumericQuery("bitrate", "100000..200000") - assert q.match(self.item) - - def test_bitrate_range_negative(self): - q = dbcore.query.NumericQuery("bitrate", "200000..300000") - assert not q.match(self.item) - - def test_open_range(self): - dbcore.query.NumericQuery("bitrate", "100000..") - - def test_eq(self): - q1 = dbcore.query.MatchQuery("foo", "bar") - q2 = dbcore.query.MatchQuery("foo", "bar") - q3 = dbcore.query.MatchQuery("foo", "baz") - q4 = dbcore.query.StringFieldQuery("foo", "bar") - assert q1 == q2 - assert q1 != q3 - assert q1 != q4 - assert q3 != q4 - - -class IntQueryTest(BeetsTestCase): - def test_exact_value_match(self): - item = self.add_item(bpm=120) - matched = self.lib.items("bpm:120").get() - assert item.id == matched.id - - def test_range_match(self): - item = self.add_item(bpm=120) - self.add_item(bpm=130) - - matched = self.lib.items("bpm:110..125") - assert 1 == len(matched) - assert item.id == matched.get().id - - @patch("beets.library.Item._types", {"myint": types.Integer()}) - def test_flex_range_match(self): - item = self.add_item(myint=2) - matched = self.lib.items("myint:2").get() - assert item.id == matched.id - - @patch("beets.library.Item._types", {"myint": types.Integer()}) - def test_flex_dont_match_missing(self): - self.add_item() - matched = self.lib.items("myint:2").get() - assert matched is None - - def test_no_substring_match(self): - self.add_item(bpm=120) - matched = self.lib.items("bpm:12").get() - assert matched is None - - -@patch("beets.library.Item._types", {"flexbool": types.Boolean()}) -class BoolQueryTest(BeetsTestCase, AssertsMixin): - def test_parse_true(self): - item_true = self.add_item(comp=True) - item_false = self.add_item(comp=False) - matched = self.lib.items("comp:true") - assert item_true.id in {i.id for i in matched} - assert item_false.id not in {i.id for i in matched} - - def test_flex_parse_true(self): - item_true = self.add_item(flexbool=True) - item_false = self.add_item(flexbool=False) - matched = self.lib.items("flexbool:true") - assert item_true.id in {i.id for i in matched} - assert item_false.id not in {i.id for i in matched} - - def test_flex_parse_false(self): - item_true = self.add_item(flexbool=True) - item_false = self.add_item(flexbool=False) - matched = self.lib.items("flexbool:false") - assert item_false.id in {i.id for i in matched} - assert item_true.id not in {i.id for i in matched} - - def test_flex_parse_1(self): - item_true = self.add_item(flexbool=True) - item_false = self.add_item(flexbool=False) - matched = self.lib.items("flexbool:1") - assert item_true.id in {i.id for i in matched} - assert item_false.id not in {i.id for i in matched} - - def test_flex_parse_0(self): - item_true = self.add_item(flexbool=True) - item_false = self.add_item(flexbool=False) - matched = self.lib.items("flexbool:0") - assert item_false.id in {i.id for i in matched} - assert item_true.id not in {i.id for i in matched} - - def test_flex_parse_any_string(self): - # TODO this should be the other way around - item_true = self.add_item(flexbool=True) - item_false = self.add_item(flexbool=False) - matched = self.lib.items("flexbool:something") - assert item_false.id in {i.id for i in matched} - assert item_true.id not in {i.id for i in matched} - - -class DefaultSearchFieldsTest(DummyDataTestCase): - def test_albums_matches_album(self): - albums = list(self.lib.albums("baz")) - assert len(albums) == 1 - - def test_albums_matches_albumartist(self): - albums = list(self.lib.albums(["album artist"])) - assert len(albums) == 1 - - def test_items_matches_title(self): - items = self.lib.items("beets") - self.assert_items_matched(items, ["beets 4 eva"]) - - def test_items_does_not_match_year(self): - items = self.lib.items("2001") - self.assert_items_matched(items, []) - - -class NoneQueryTest(BeetsTestCase, AssertsMixin): - def test_match_singletons(self): - singleton = self.add_item() - album_item = self.add_album().items().get() - - matched = self.lib.items(NoneQuery("album_id")) - assert singleton.id in {i.id for i in matched} - assert album_item.id not in {i.id for i in matched} - - def test_match_after_set_none(self): - item = self.add_item(rg_track_gain=0) - matched = self.lib.items(NoneQuery("rg_track_gain")) - assert item.id not in {i.id for i in matched} - - item["rg_track_gain"] = None - item.store() - matched = self.lib.items(NoneQuery("rg_track_gain")) - assert item.id in {i.id for i in matched} - - def test_match_slow(self): - item = self.add_item() - matched = self.lib.items(NoneQuery("rg_track_peak", fast=False)) - assert item.id in {i.id for i in matched} - - def test_match_slow_after_set_none(self): - item = self.add_item(rg_track_gain=0) - matched = self.lib.items(NoneQuery("rg_track_gain", fast=False)) - assert item.id not in {i.id for i in matched} - - item["rg_track_gain"] = None - item.store() - matched = self.lib.items(NoneQuery("rg_track_gain", fast=False)) - assert item.id in {i.id for i in matched} - - -class NotQueryMatchTest(unittest.TestCase): - """Test `query.NotQuery` matching against a single item, using the same - cases and assertions as on `MatchTest`, plus assertion on the negated - queries (ie. assert q -> assert not NotQuery(q)). - """ - - def setUp(self): - super().setUp() - self.item = _common.item() - - def test_regex_match_positive(self): - q = dbcore.query.RegexpQuery("album", "^the album$") - assert q.match(self.item) - assert not dbcore.query.NotQuery(q).match(self.item) - - def test_regex_match_negative(self): - q = dbcore.query.RegexpQuery("album", "^album$") - assert not q.match(self.item) - assert dbcore.query.NotQuery(q).match(self.item) - - def test_regex_match_non_string_value(self): - q = dbcore.query.RegexpQuery("disc", "^6$") - assert q.match(self.item) - assert not dbcore.query.NotQuery(q).match(self.item) - - def test_substring_match_positive(self): - q = dbcore.query.SubstringQuery("album", "album") - assert q.match(self.item) - assert not dbcore.query.NotQuery(q).match(self.item) - - def test_substring_match_negative(self): - q = dbcore.query.SubstringQuery("album", "ablum") - assert not q.match(self.item) - assert dbcore.query.NotQuery(q).match(self.item) - - def test_substring_match_non_string_value(self): - q = dbcore.query.SubstringQuery("disc", "6") - assert q.match(self.item) - assert not dbcore.query.NotQuery(q).match(self.item) - - def test_year_match_positive(self): - q = dbcore.query.NumericQuery("year", "1") - assert q.match(self.item) - assert not dbcore.query.NotQuery(q).match(self.item) - - def test_year_match_negative(self): - q = dbcore.query.NumericQuery("year", "10") - assert not q.match(self.item) - assert dbcore.query.NotQuery(q).match(self.item) - - def test_bitrate_range_positive(self): - q = dbcore.query.NumericQuery("bitrate", "100000..200000") - assert q.match(self.item) - assert not dbcore.query.NotQuery(q).match(self.item) - - def test_bitrate_range_negative(self): - q = dbcore.query.NumericQuery("bitrate", "200000..300000") - assert not q.match(self.item) - assert dbcore.query.NotQuery(q).match(self.item) - - def test_open_range(self): - q = dbcore.query.NumericQuery("bitrate", "100000..") - dbcore.query.NotQuery(q) - - -class TestNotQuery: - """Test `query.NotQuery` against the dummy data.""" - - @pytest.fixture(autouse=True, scope="class") - def lib(self): - test_case = DummyDataTestCase() - test_case.setUp() - return test_case.lib - - @pytest.mark.parametrize( - "q, expected_results", - [ - ( - dbcore.query.BooleanQuery("comp", True), - {"beets 4 eva"}, - ), - ( - dbcore.query.DateQuery("added", "2000-01-01"), - {"foo bar", "baz qux", "beets 4 eva"}, - ), - ( - dbcore.query.FalseQuery(), - {"foo bar", "baz qux", "beets 4 eva"}, - ), - ( - dbcore.query.MatchQuery("year", "2003"), - {"foo bar", "baz qux"}, - ), - ( - dbcore.query.NoneQuery("rg_track_gain"), - set(), - ), - ( - dbcore.query.NumericQuery("year", "2001..2002"), - {"beets 4 eva"}, - ), - ( - dbcore.query.AnyFieldQuery( - "baz", ["album"], dbcore.query.MatchQuery - ), - {"beets 4 eva"}, - ), - ( - dbcore.query.AndQuery( - [ - dbcore.query.BooleanQuery("comp", True), - dbcore.query.NumericQuery("year", "2002"), - ] - ), - {"foo bar", "beets 4 eva"}, - ), - ( - dbcore.query.OrQuery( - [ - dbcore.query.BooleanQuery("comp", True), - dbcore.query.NumericQuery("year", "2002"), - ] - ), - {"beets 4 eva"}, - ), - ( - dbcore.query.RegexpQuery("artist", "^t"), - {"foo bar"}, - ), - ( - dbcore.query.SubstringQuery("album", "ba"), - {"beets 4 eva"}, - ), - ( - dbcore.query.TrueQuery(), - set(), - ), - ], - ids=lambda x: x.__class__ if isinstance(x, dbcore.query.Query) else "", - ) - def test_query_type(self, lib, q, expected_results): - def get_results(*args): - return {i.title for i in lib.items(*args)} - - # not(a and b) <-> not(a) or not(b) - not_q = dbcore.query.NotQuery(q) - not_q_results = get_results(not_q) - assert not_q_results == expected_results - - # assert using OrQuery, AndQuery - q_or = dbcore.query.OrQuery([q, not_q]) - - q_and = dbcore.query.AndQuery([q, not_q]) - assert get_results(q_or) == {"foo bar", "baz qux", "beets 4 eva"} - assert get_results(q_and) == set() - - # assert manually checking the item titles - all_titles = get_results() - q_results = get_results(q) - assert q_results.union(not_q_results) == all_titles - assert q_results.intersection(not_q_results) == set() - - # round trip - not_not_q = dbcore.query.NotQuery(not_q) - assert get_results(q) == get_results(not_not_q) - - -class NegationPrefixTest(DummyDataTestCase): - """Tests negation prefixes.""" - - def test_get_prefixes_keyed(self): - """Test both negation prefixes on a keyed query.""" - q0 = "-title:qux" - q1 = "^title:qux" - results0 = self.lib.items(q0) - results1 = self.lib.items(q1) - self.assert_items_matched(results0, ["foo bar", "beets 4 eva"]) - self.assert_items_matched(results1, ["foo bar", "beets 4 eva"]) - - def test_get_prefixes_unkeyed(self): - """Test both negation prefixes on an unkeyed query.""" - q0 = "-qux" - q1 = "^qux" - results0 = self.lib.items(q0) - results1 = self.lib.items(q1) - self.assert_items_matched(results0, ["foo bar", "beets 4 eva"]) - self.assert_items_matched(results1, ["foo bar", "beets 4 eva"]) - - def test_get_one_keyed_regexp(self): - q = "-artist::t.+r" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar", "baz qux"]) - - def test_get_one_unkeyed_regexp(self): - q = "-:x$" - results = self.lib.items(q) - self.assert_items_matched(results, ["foo bar", "beets 4 eva"]) - - def test_get_multiple_terms(self): - q = "baz -bar" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - - def test_get_mixed_terms(self): - q = "baz -title:bar" - results = self.lib.items(q) - self.assert_items_matched(results, ["baz qux"]) - - def test_fast_vs_slow(self): - """Test that the results are the same regardless of the `fast` flag - for negated `FieldQuery`s. - - TODO: investigate NoneQuery(fast=False), as it is raising - AttributeError: type object 'NoneQuery' has no attribute 'field' - at NoneQuery.match() (due to being @classmethod, and no self?) - """ - classes = [ - (dbcore.query.DateQuery, ["added", "2001-01-01"]), - (dbcore.query.MatchQuery, ["artist", "one"]), - # (dbcore.query.NoneQuery, ['rg_track_gain']), - (dbcore.query.NumericQuery, ["year", "2002"]), - (dbcore.query.StringFieldQuery, ["year", "2001"]), - (dbcore.query.RegexpQuery, ["album", "^.a"]), - (dbcore.query.SubstringQuery, ["title", "x"]), - ] - - for klass, args in classes: - q_fast = dbcore.query.NotQuery(klass(*(args + [True]))) - q_slow = dbcore.query.NotQuery(klass(*(args + [False]))) - - try: - assert [i.title for i in self.lib.items(q_fast)] == [ - i.title for i in self.lib.items(q_slow) - ] - except NotImplementedError: - # ignore classes that do not provide `fast` implementation - pass - - -class RelatedQueriesTest(BeetsTestCase, AssertsMixin): - """Test album-level queries with track-level filters and vice-versa.""" - - def setUp(self): - super().setUp() - - albums = [] - for album_idx in range(1, 3): - album_name = f"Album{album_idx}" - album_items = [] - for item_idx in range(1, 3): - item = _common.item() - item.album = album_name - item.title = f"{album_name} Item{item_idx}" - self.lib.add(item) - album_items.append(item) - album = self.lib.add_album(album_items) - album.artpath = f"{album_name} Artpath" - album.catalognum = "ABC" - album.store() - albums.append(album) - - self.album, self.another_album = albums - - def test_get_albums_filter_by_track_field(self): - q = "title:Album1" - results = self.lib.albums(q) - self.assert_albums_matched(results, ["Album1"]) - - def test_get_items_filter_by_album_field(self): - q = "artpath::Album1" - results = self.lib.items(q) - self.assert_items_matched(results, ["Album1 Item1", "Album1 Item2"]) - - def test_filter_albums_by_common_field(self): - # title:Album1 ensures that the items table is joined for the query - q = "title:Album1 Album1" - results = self.lib.albums(q) - self.assert_albums_matched(results, ["Album1"]) - - def test_filter_items_by_common_field(self): - # artpath::A ensures that the albums table is joined for the query - q = "artpath::A Album1" - results = self.lib.items(q) - self.assert_items_matched(results, ["Album1 Item1", "Album1 Item2"]) +_p = pytest.param @pytest.fixture(scope="class") @@ -858,8 +60,228 @@ def helper(): helper.teardown_beets() +class TestGet: + @pytest.fixture(scope="class") + def lib(self, helper): + album_items = [ + helper.create_item( + title="first", + artist="one", + artists=["one", "eleven"], + album="baz", + year=2001, + comp=True, + genre="rock", + ), + helper.create_item( + title="second", + artist="two", + artists=["two", "twelve"], + album="baz", + year=2002, + comp=True, + genre="Rock", + ), + ] + album = helper.lib.add_album(album_items) + album.albumflex = "foo" + album.store() + + helper.add_item( + title="third", + artist="three", + artists=["three", "one"], + album="foo", + year=2003, + comp=False, + genre="Hard Rock", + comments="caf\xe9", + ) + + return helper.lib + + @pytest.mark.parametrize( + "q, expected_titles", + [ + ("", ["first", "second", "third"]), + (None, ["first", "second", "third"]), + (":oNE", []), + (":one", ["first"]), + (":sec :ond", ["second"]), + (":second", ["second"]), + ("=rock", ["first"]), + ('=~"hard rock"', ["third"]), + (":t$", ["first"]), + ("oNE", ["first"]), + ("baz", ["first", "second"]), + ("sec ond", ["second"]), + ("three", ["third"]), + ("albumflex:foo", ["first", "second"]), + ("artist::t.+r", ["third"]), + ("artist:thrEE", ["third"]), + ("artists::eleven", ["first"]), + ("artists::one", ["first", "third"]), + ("ArTiST:three", ["third"]), + ("comments:caf\xe9", ["third"]), + ("comp:true", ["first", "second"]), + ("comp:false", ["third"]), + ("genre:=rock", ["first"]), + ("genre:=Rock", ["second"]), + ('genre:="Hard Rock"', ["third"]), + ('genre:=~"hard rock"', ["third"]), + ("genre:=~rock", ["first", "second"]), + ('genre:="hard rock"', []), + ("popebear", []), + ("pope:bear", []), + ("singleton:true", ["third"]), + ("singleton:1", ["third"]), + ("singleton:false", ["first", "second"]), + ("singleton:0", ["first", "second"]), + ("title:ond", ["second"]), + ("title::sec", ["second"]), + ("year:2001", ["first"]), + ("year:2000..2002", ["first", "second"]), + ("xyzzy:nonsense", []), + ], + ) + def test_get_query(self, lib, q, expected_titles): + assert {i.title for i in lib.items(q)} == set(expected_titles) + + @pytest.mark.parametrize( + "q, expected_titles", + [ + (BooleanQuery("comp", True), ("third",)), + (DateQuery("added", "2000-01-01"), ("first", "second", "third")), + (FalseQuery(), ("first", "second", "third")), + (MatchQuery("year", "2003"), ("first", "second")), + (NoneQuery("rg_track_gain"), ()), + (NumericQuery("year", "2001..2002"), ("third",)), + ( + AndQuery( + [BooleanQuery("comp", True), NumericQuery("year", "2002")] + ), + ("first", "third"), + ), + ( + OrQuery( + [BooleanQuery("comp", True), NumericQuery("year", "2002")] + ), + ("third",), + ), + (RegexpQuery("artist", "^t"), ("first",)), + (SubstringQuery("album", "ba"), ("third",)), + (TrueQuery(), ()), + ], + ) + def test_query_logic(self, lib, q, expected_titles): + def get_results(*args): + return {i.title for i in lib.items(*args)} + + # not(a and b) <-> not(a) or not(b) + not_q = NotQuery(q) + not_q_results = get_results(not_q) + assert not_q_results == set(expected_titles) + + # assert using OrQuery, AndQuery + q_or = OrQuery([q, not_q]) + + q_and = AndQuery([q, not_q]) + assert get_results(q_or) == {"first", "second", "third"} + assert get_results(q_and) == set() + + # assert manually checking the item titles + all_titles = get_results() + q_results = get_results(q) + assert q_results.union(not_q_results) == all_titles + assert q_results.intersection(not_q_results) == set() + + # round trip + not_not_q = NotQuery(not_q) + assert get_results(q) == get_results(not_not_q) + + @pytest.mark.parametrize( + "q, expected_titles", + [ + ("-artist::t.+r", ["first", "second"]), + ("-:t$", ["second", "third"]), + ("sec -bar", ["second"]), + ("sec -title:bar", ["second"]), + ("-ond", ["first", "third"]), + ("^ond", ["first", "third"]), + ("^title:sec", ["first", "third"]), + ("-title:sec", ["first", "third"]), + ], + ) + def test_negation_prefix(self, lib, q, expected_titles): + actual_titles = {i.title for i in lib.items(q)} + assert actual_titles == set(expected_titles) + + @pytest.mark.parametrize( + "make_q", + [ + partial(DateQuery, "added", "2001-01-01"), + partial(MatchQuery, "artist", "one"), + partial(NoneQuery, "rg_track_gain"), + partial(NumericQuery, "year", "2002"), + partial(StringQuery, "year", "2001"), + partial(RegexpQuery, "album", "^.a"), + partial(SubstringQuery, "title", "x"), + ], + ) + def test_fast_vs_slow(self, lib, make_q): + """Test that the results are the same regardless of the `fast` flag + for negated `FieldQuery`s. + """ + q_fast = make_q(True) + q_slow = make_q(False) + + assert list(map(dict, lib.items(q_fast))) == list( + map(dict, lib.items(q_slow)) + ) + + +class TestMatch: + @pytest.fixture(scope="class") + def item(self): + return _common.item( + album="the album", + disc=6, + genre="the genre", + year=1, + bitrate=128000, + ) + + @pytest.mark.parametrize( + "q, should_match", + [ + (RegexpQuery("album", "^the album$"), True), + (RegexpQuery("album", "^album$"), False), + (RegexpQuery("disc", "^6$"), True), + (SubstringQuery("album", "album"), True), + (SubstringQuery("album", "ablum"), False), + (SubstringQuery("disc", "6"), True), + (StringQuery("genre", "the genre"), True), + (StringQuery("genre", "THE GENRE"), True), + (StringQuery("genre", "genre"), False), + (NumericQuery("year", "1"), True), + (NumericQuery("year", "10"), False), + (NumericQuery("bitrate", "100000..200000"), True), + (NumericQuery("bitrate", "200000..300000"), False), + (NumericQuery("bitrate", "100000.."), True), + ], + ) + def test_match(self, item, q, should_match): + assert q.match(item) == should_match + assert not NotQuery(q).match(item) == should_match + + class TestPathQuery: - _p = pytest.param + """Tests for path-based querying functionality in the database system. + + Verifies that path queries correctly match items by their file paths, + handling special characters, case sensitivity, parent directories, + and path separator detection across different platforms. + """ @pytest.fixture(scope="class") def lib(self, helper): @@ -889,6 +311,7 @@ class TestPathQuery: ], ) def test_explicit(self, monkeypatch, lib, q, expected_titles): + """Test explicit path queries with different path specifications.""" monkeypatch.setattr("beets.util.case_sensitive", lambda *_: True) assert {i.title for i in lib.items(q)} == set(expected_titles) @@ -904,6 +327,7 @@ class TestPathQuery: ], ) def test_implicit(self, monkeypatch, lib, q, expected_titles): + """Test implicit path detection when queries contain path separators.""" monkeypatch.setattr( "beets.dbcore.query.PathQuery.is_path_query", lambda path: True ) @@ -920,6 +344,7 @@ class TestPathQuery: def test_case_sensitivity( self, lib, monkeypatch, case_sensitive, expected_titles ): + """Test path matching with different case sensitivity settings.""" q = "path:/a/b/c2.mp3" monkeypatch.setattr( "beets.util.case_sensitive", lambda *_: case_sensitive @@ -943,6 +368,7 @@ class TestPathQuery: ], ) def test_path_sep_detection(self, monkeypatch, tmp_path, q, is_path_query): + """Test detection of path queries based on the presence of path separators.""" monkeypatch.chdir(tmp_path) (tmp_path / "foo").mkdir() (tmp_path / "foo" / "bar").touch() @@ -950,3 +376,151 @@ class TestPathQuery: q = str(tmp_path / q[1:]) assert PathQuery.is_path_query(q) == is_path_query + + +class TestQuery: + ALBUM = "album title" + SINGLE = "singleton" + + @pytest.fixture(scope="class") + def lib(self, helper): + helper.add_album( + title=self.ALBUM, + comp=True, + flexbool=True, + bpm=120, + flexint=2, + rg_track_gain=0, + ) + helper.add_item( + title=self.SINGLE, comp=False, flexbool=False, rg_track_gain=None + ) + + with pytest.MonkeyPatch.context() as monkeypatch: + monkeypatch.setattr( + Item, + "_types", + {"flexbool": types.Boolean(), "flexint": types.Integer()}, + ) + yield helper.lib + + @pytest.mark.parametrize("query_class", [MatchQuery, StringFieldQuery]) + def test_equality(self, query_class): + assert query_class("foo", "bar") == query_class("foo", "bar") + + @pytest.mark.parametrize( + "make_q, expected_msg", + [ + (lambda: NumericQuery("year", "199a"), "not an int"), + (lambda: RegexpQuery("year", "199("), r"not a regular expression.*unterminated subpattern"), # noqa: E501 + ] + ) # fmt: skip + def test_invalid_query(self, make_q, expected_msg): + with pytest.raises(ParsingError, match=expected_msg): + make_q() + + @pytest.mark.parametrize( + "q, expected_titles", + [ + # Boolean value + _p("comp:true", {ALBUM}, id="parse-true"), + _p("flexbool:true", {ALBUM}, id="flex-parse-true"), + _p("flexbool:false", {SINGLE}, id="flex-parse-false"), + _p("flexbool:1", {ALBUM}, id="flex-parse-1"), + _p("flexbool:0", {SINGLE}, id="flex-parse-0"), + # TODO: shouldn't this match 1 / true instead? + _p("flexbool:something", {SINGLE}, id="flex-parse-true"), + # Integer value + _p("bpm:120", {ALBUM}, id="int-exact-value"), + _p("bpm:110..125", {ALBUM}, id="int-range"), + _p("flexint:2", {ALBUM}, id="int-flex"), + _p("flexint:3", set(), id="int-no-match"), + _p("bpm:12", set(), id="int-dont-match-substring"), + # None value + _p(NoneQuery("album_id"), {SINGLE}, id="none-match-singleton"), + _p(NoneQuery("rg_track_gain"), {SINGLE}, id="none-value"), + ], + ) + def test_value_type(self, lib, q, expected_titles): + assert {i.title for i in lib.items(q)} == expected_titles + + +class TestDefaultSearchFields: + @pytest.fixture(scope="class") + def lib(self, helper): + helper.add_album( + title="title", + album="album", + albumartist="albumartist", + catalognum="catalognum", + year=2001, + ) + + return helper.lib + + @pytest.mark.parametrize( + "entity, q, should_match", + [ + _p("albums", "album", True, id="album-match-album"), + _p("albums", "albumartist", True, id="album-match-albumartist"), + _p("albums", "catalognum", False, id="album-dont-match-catalognum"), + _p("items", "title", True, id="item-match-title"), + _p("items", "2001", False, id="item-dont-match-year"), + ], + ) + def test_search(self, lib, entity, q, should_match): + assert bool(getattr(lib, entity)(q)) == should_match + + +class TestRelatedQueries: + """Test album-level queries with track-level filters and vice-versa.""" + + @pytest.fixture(scope="class") + def lib(self, helper): + for album_idx in range(1, 3): + album_name = f"Album{album_idx}" + items = [ + helper.create_item( + album=album_name, title=f"{album_name} Item{idx}" + ) + for idx in range(1, 3) + ] + album = helper.lib.add_album(items) + album.artpath = f"{album_name} Artpath" + album.catalognum = "ABC" + album.store() + + return helper.lib + + @pytest.mark.parametrize( + "q, expected_titles, expected_albums", + [ + _p( + "title:Album1", + ["Album1 Item1", "Album1 Item2"], + ["Album1"], + id="match-album-with-item-field-query", + ), + _p( + "title:Item2", + ["Album1 Item2", "Album2 Item2"], + ["Album1", "Album2"], + id="match-albums-with-item-field-query", + ), + _p( + "artpath::Album1", + ["Album1 Item1", "Album1 Item2"], + ["Album1"], + id="match-items-with-album-field-query", + ), + _p( + "catalognum:ABC Album1", + ["Album1 Item1", "Album1 Item2"], + ["Album1"], + id="query-field-common-to-album-and-item", + ), + ], + ) + def test_related_query(self, lib, q, expected_titles, expected_albums): + assert {i.album for i in lib.albums(q)} == set(expected_albums) + assert {i.title for i in lib.items(q)} == set(expected_titles) From 443ed578dcb717dcd51a354a4e48e7e9669f30bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 1 Jun 2025 13:56:15 +0100 Subject: [PATCH 19/95] Standardize abstract methods for coverage --- beets/dbcore/query.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index 9cff082a3..7d9f0cee7 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -85,6 +85,7 @@ class Query(ABC): """Return a set with field names that this query operates on.""" return set() + @abstractmethod def clause(self) -> tuple[str | None, Sequence[Any]]: """Generate an SQLite expression implementing the query. @@ -95,14 +96,12 @@ class Query(ABC): The default implementation returns None, falling back to a slow query using `match()`. """ - return None, () @abstractmethod def match(self, obj: Model): """Check whether this query matches a given Model. Can be used to perform queries on arbitrary sets of Model. """ - ... def __and__(self, other: Query) -> AndQuery: return AndQuery([self, other]) @@ -152,7 +151,7 @@ class FieldQuery(Query, Generic[P]): self.fast = fast def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: - return self.field, () + raise NotImplementedError def clause(self) -> tuple[str | None, Sequence[SQLiteType]]: if self.fast: @@ -164,7 +163,7 @@ class FieldQuery(Query, Generic[P]): @classmethod def value_match(cls, pattern: P, value: Any): """Determine whether the value matches the pattern.""" - raise NotImplementedError() + raise NotImplementedError def match(self, obj: Model) -> bool: return self.value_match(self.pattern, obj.get(self.field_name)) @@ -234,7 +233,7 @@ class StringFieldQuery(FieldQuery[P]): """Determine whether the value matches the pattern. Both arguments are strings. Subclasses implement this method. """ - raise NotImplementedError() + raise NotImplementedError class StringQuery(StringFieldQuery[str]): From dcd3a9f7f493d66e1e5df318b057af31325218a3 Mon Sep 17 00:00:00 2001 From: J0J0 Todos Date: Wed, 22 Nov 2023 00:03:02 +0100 Subject: [PATCH 20/95] playlist: Support m3u8 ending in playlist plugin --- beetsplug/playlist.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/beetsplug/playlist.py b/beetsplug/playlist.py index cb16fb5bc..8f1a2413d 100644 --- a/beetsplug/playlist.py +++ b/beetsplug/playlist.py @@ -12,7 +12,6 @@ # included in all copies or substantial portions of the Software. -import fnmatch import os import tempfile from collections.abc import Sequence @@ -22,6 +21,10 @@ from beets.dbcore.query import BLOB_TYPE, InQuery from beets.util import path_as_posix +def is_m3u_file(path): + return os.path.splitext(path)[1].lower() in {".m3u", ".m3u8"} + + class PlaylistQuery(InQuery[bytes]): """Matches files listed by a playlist file.""" @@ -45,7 +48,7 @@ class PlaylistQuery(InQuery[bytes]): paths = [] for playlist_path in playlist_paths: - if not fnmatch.fnmatch(playlist_path, "*.[mM]3[uU]"): + if not is_m3u_file(playlist_path): # This is not am M3U playlist, skip this candidate continue @@ -148,7 +151,7 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin): return for filename in dir_contents: - if fnmatch.fnmatch(filename, "*.[mM]3[uU]"): + if is_m3u_file(filename): yield os.path.join(self.playlist_dir, filename) def update_playlist(self, filename, base_dir): From 257991c73de79820819275a60af2d697a3b42c34 Mon Sep 17 00:00:00 2001 From: J0J0 Todos Date: Thu, 3 Jul 2025 10:52:41 +0200 Subject: [PATCH 21/95] playlist: Changelog for #5829 --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index d1a477cb5..5cf0557cc 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -26,6 +26,8 @@ New features: * :doc:`plugins/duplicates`: Add ``--remove`` option, allowing to remove from the library without deleting media files. :bug:`5832` +* :doc:`plugins/playlist`: Support files with the `.m3u8` extension. + :bug:`5829` Bug fixes: From cf557fb41b28ef254881cf40656a3012618647fa Mon Sep 17 00:00:00 2001 From: J0J0 Todos <2733783+JOJ0@users.noreply.github.com> Date: Sun, 6 Jul 2025 08:57:58 +0200 Subject: [PATCH 22/95] playlist: Use pathlib.Path and add types for is_m3u_file() MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Šarūnas Nejus --- beetsplug/playlist.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/beetsplug/playlist.py b/beetsplug/playlist.py index 8f1a2413d..7a27b02a3 100644 --- a/beetsplug/playlist.py +++ b/beetsplug/playlist.py @@ -15,14 +15,15 @@ import os import tempfile from collections.abc import Sequence +from pathlib import Path import beets from beets.dbcore.query import BLOB_TYPE, InQuery from beets.util import path_as_posix -def is_m3u_file(path): - return os.path.splitext(path)[1].lower() in {".m3u", ".m3u8"} +def is_m3u_file(path: str) -> bool: + return Path(path).suffix.lower() in {".m3u", ".m3u8"} class PlaylistQuery(InQuery[bytes]): From 59ecfd9a492bbf3997070e134c3d2bad6344a113 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Tue, 24 Jun 2025 16:56:12 +0200 Subject: [PATCH 23/95] Moved fetch_data and _get_track function. --- beetsplug/deezer.py | 82 ++++++++++++++++++++++----------------------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index 89f7436f8..f8d161759 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -60,19 +60,6 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): return [deezer_update_cmd] - def fetch_data(self, url): - try: - response = requests.get(url, timeout=10) - response.raise_for_status() - data = response.json() - except requests.exceptions.RequestException as e: - self._log.error("Error fetching data from {}\n Error: {}", url, e) - return None - if "error" in data: - self._log.debug("Deezer API error: {}", data["error"]["message"]) - return None - return data - def album_for_id(self, album_id: str) -> AlbumInfo | None: """Fetch an album by its Deezer ID or URL.""" if not (deezer_id := self._get_id(album_id)): @@ -156,34 +143,6 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): cover_art_url=album_data.get("cover_xl"), ) - def _get_track(self, track_data): - """Convert a Deezer track object dict to a TrackInfo object. - - :param track_data: Deezer Track object dict - :type track_data: dict - :return: TrackInfo object for track - :rtype: beets.autotag.hooks.TrackInfo - """ - artist, artist_id = self.get_artist( - track_data.get("contributors", [track_data["artist"]]) - ) - return TrackInfo( - title=track_data["title"], - track_id=track_data["id"], - deezer_track_id=track_data["id"], - isrc=track_data.get("isrc"), - artist=artist, - artist_id=artist_id, - length=track_data["duration"], - index=track_data.get("track_position"), - medium=track_data.get("disk_number"), - deezer_track_rank=track_data.get("rank"), - medium_index=track_data.get("track_position"), - data_source=self.data_source, - data_url=track_data["link"], - deezer_updated=time.time(), - ) - def track_for_id(self, track_id=None, track_data=None): """Fetch a track by its Deezer ID or URL and return a TrackInfo object or None if the track is not found. @@ -229,6 +188,34 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): track.medium_total = medium_total return track + def _get_track(self, track_data: JSONDict) -> TrackInfo: + """Convert a Deezer track object dict to a TrackInfo object. + + :param track_data: Deezer Track object dict + :type track_data: dict + :return: TrackInfo object for track + :rtype: beets.autotag.hooks.TrackInfo + """ + artist, artist_id = self.get_artist( + track_data.get("contributors", [track_data["artist"]]) + ) + return TrackInfo( + title=track_data["title"], + track_id=track_data["id"], + deezer_track_id=track_data["id"], + isrc=track_data.get("isrc"), + artist=artist, + artist_id=artist_id, + length=track_data["duration"], + index=track_data.get("track_position"), + medium=track_data.get("disk_number"), + deezer_track_rank=track_data.get("rank"), + medium_index=track_data.get("track_position"), + data_source=self.data_source, + data_url=track_data["link"], + deezer_updated=time.time(), + ) + @staticmethod def _construct_search_query(filters=None, keywords=""): """Construct a query string with the specified filters and keywords to @@ -320,3 +307,16 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): item.deezer_updated = time.time() if write: item.try_write() + + def fetch_data(self, url: str): + try: + response = requests.get(url, timeout=10) + response.raise_for_status() + data = response.json() + except requests.exceptions.RequestException as e: + self._log.error("Error fetching data from {}\n Error: {}", url, e) + return None + if "error" in data: + self._log.debug("Deezer API error: {}", data["error"]["message"]) + return None + return data From 085b89b70bd347caa9078c1a8cb948efbc384b56 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 2 Jun 2025 13:21:21 +0200 Subject: [PATCH 24/95] Minor improvements to deezer plugin typing. --- beets/plugins.py | 3 +- beetsplug/deezer.py | 78 ++++++++++++++++++++++----------------------- 2 files changed, 41 insertions(+), 40 deletions(-) diff --git a/beets/plugins.py b/beets/plugins.py index 1ae672e20..6ca2462e3 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -28,6 +28,7 @@ from typing import ( Any, Callable, Generic, + Literal, Sequence, TypedDict, TypeVar, @@ -737,7 +738,7 @@ class MetadataSourcePlugin(Generic[R], BeetsPlugin, metaclass=abc.ABCMeta): @abc.abstractmethod def _search_api( self, - query_type: str, + query_type: Literal["album", "track"], filters: dict[str, str] | None, keywords: str = "", ) -> Sequence[R]: diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index f8d161759..a6962ec56 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -18,6 +18,7 @@ from __future__ import annotations import collections import time +from typing import TYPE_CHECKING, Literal, Sequence import requests import unidecode @@ -25,10 +26,14 @@ import unidecode from beets import ui from beets.autotag import AlbumInfo, TrackInfo from beets.dbcore import types -from beets.plugins import BeetsPlugin, MetadataSourcePlugin +from beets.library import DateType, Item, Library +from beets.plugins import BeetsPlugin, MetadataSourcePlugin, Response + +if TYPE_CHECKING: + from beetsplug._typing import JSONDict -class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): +class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): data_source = "Deezer" item_types = { @@ -37,12 +42,6 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): "deezer_updated": types.DATE, } - # Base URLs for the Deezer API - # Documentation: https://developers.deezer.com/api/ - search_url = "https://api.deezer.com/search/" - album_url = "https://api.deezer.com/album/" - track_url = "https://api.deezer.com/track/" - def __init__(self): super().__init__() @@ -52,9 +51,9 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): "deezerupdate", help=f"Update {self.data_source} rank" ) - def func(lib, opts, args): + def func(lib: Library, opts, args): items = lib.items(ui.decargs(args)) - self.deezerupdate(items, ui.should_write()) + self.deezerupdate(list(items), ui.should_write()) deezer_update_cmd.func = func @@ -143,24 +142,23 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): cover_art_url=album_data.get("cover_xl"), ) - def track_for_id(self, track_id=None, track_data=None): + def track_for_id(self, track_id: str) -> None | TrackInfo: """Fetch a track by its Deezer ID or URL and return a TrackInfo object or None if the track is not found. :param track_id: (Optional) Deezer ID or URL for the track. Either ``track_id`` or ``track_data`` must be provided. - :type track_id: str :param track_data: (Optional) Simplified track object dict. May be provided instead of ``track_id`` to avoid unnecessary API calls. - :type track_data: dict :return: TrackInfo object for track - :rtype: beets.autotag.hooks.TrackInfo or None """ - if track_data is None: - if not (deezer_id := self._get_id(track_id)) or not ( - track_data := self.fetch_data(f"{self.track_url}{deezer_id}") - ): - return None + if not (deezer_id := self._get_id(track_id)): + self._log.debug("Invalid Deezer track_id: {}", track_id) + return None + + if not (track_data := self.fetch_data(f"{self.track_url}{deezer_id}")): + self._log.debug("Track not found: {}", track_id) + return None track = self._get_track(track_data) @@ -192,9 +190,7 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): """Convert a Deezer track object dict to a TrackInfo object. :param track_data: Deezer Track object dict - :type track_data: dict :return: TrackInfo object for track - :rtype: beets.autotag.hooks.TrackInfo """ artist, artist_id = self.get_artist( track_data.get("contributors", [track_data["artist"]]) @@ -216,18 +212,24 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): deezer_updated=time.time(), ) + # ------------------------------- Data fetching ------------------------------ # + # Base URLs for the Deezer API + # Documentation: https://developers.deezer.com/api/ + search_url = "https://api.deezer.com/search/" + album_url = "https://api.deezer.com/album/" + track_url = "https://api.deezer.com/track/" + @staticmethod - def _construct_search_query(filters=None, keywords=""): + def _construct_search_query( + filters: dict[str, str], keywords: str = "" + ) -> str: """Construct a query string with the specified filters and keywords to be provided to the Deezer Search API (https://developers.deezer.com/api/search). - :param filters: (Optional) Field filters to apply. - :type filters: dict + :param filters: Field filters to apply. :param keywords: (Optional) Query keywords to use. - :type keywords: str :return: Query string to be provided to the Search API. - :rtype: str """ query_components = [ keywords, @@ -238,25 +240,23 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): query = query.decode("utf8") return unidecode.unidecode(query) - def _search_api(self, query_type, filters=None, keywords=""): + def _search_api( + self, + query_type: Literal["album", "track"], + filters: dict[str, str], + keywords="", + ) -> Sequence[Response]: """Query the Deezer Search API for the specified ``keywords``, applying the provided ``filters``. - :param query_type: The Deezer Search API method to use. Valid types - are: 'album', 'artist', 'history', 'playlist', 'podcast', - 'radio', 'track', 'user', and 'track'. - :type query_type: str - :param filters: (Optional) Field filters to apply. - :type filters: dict + :param query_type: The Deezer Search API method to use. + Valid types are: 'album', 'artist', 'history', 'playlist', + 'podcast', 'radio', 'track', 'user', and 'track'. :param keywords: (Optional) Query keywords to use. - :type keywords: str :return: JSON data for the class:`Response ` object or None if no search results are returned. - :rtype: dict or None """ query = self._construct_search_query(keywords=keywords, filters=filters) - if not query: - return None self._log.debug(f"Searching {self.data_source} for '{query}'") try: response = requests.get( @@ -271,7 +271,7 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): self.data_source, e, ) - return None + return () response_data = response.json().get("data", []) self._log.debug( "Found {} result(s) from {} for '{}'", @@ -281,7 +281,7 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): ) return response_data - def deezerupdate(self, items, write): + def deezerupdate(self, items: Sequence[Item], write: bool): """Obtain rank information from Deezer.""" for index, item in enumerate(items, start=1): self._log.info( From 6ab0f8d3a774e1e4b5d1bdf2b1efdfa5377bc7d6 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 2 Jun 2025 13:44:38 +0200 Subject: [PATCH 25/95] Removed old docstring. --- beetsplug/deezer.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index a6962ec56..85970663c 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -143,14 +143,9 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): ) def track_for_id(self, track_id: str) -> None | TrackInfo: - """Fetch a track by its Deezer ID or URL and return a - TrackInfo object or None if the track is not found. + """Fetch a track by its Deezer ID or URL. - :param track_id: (Optional) Deezer ID or URL for the track. Either - ``track_id`` or ``track_data`` must be provided. - :param track_data: (Optional) Simplified track object dict. May be - provided instead of ``track_id`` to avoid unnecessary API calls. - :return: TrackInfo object for track + Returns a TrackInfo object or None if the track is not found. """ if not (deezer_id := self._get_id(track_id)): self._log.debug("Invalid Deezer track_id: {}", track_id) From 1f15598294cf21eacd0dd8d034947e0e60777958 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Wed, 18 Jun 2025 14:12:18 +0200 Subject: [PATCH 26/95] Moved constants back to top. --- beetsplug/deezer.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index 85970663c..0ffaeda0b 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -41,9 +41,11 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): "deezer_track_id": types.INTEGER, "deezer_updated": types.DATE, } - - def __init__(self): - super().__init__() + # Base URLs for the Deezer API + # Documentation: https://developers.deezer.com/api/ + search_url = "https://api.deezer.com/search/" + album_url = "https://api.deezer.com/album/" + track_url = "https://api.deezer.com/track/" def commands(self): """Add beet UI commands to interact with Deezer.""" @@ -207,13 +209,6 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): deezer_updated=time.time(), ) - # ------------------------------- Data fetching ------------------------------ # - # Base URLs for the Deezer API - # Documentation: https://developers.deezer.com/api/ - search_url = "https://api.deezer.com/search/" - album_url = "https://api.deezer.com/album/" - track_url = "https://api.deezer.com/track/" - @staticmethod def _construct_search_query( filters: dict[str, str], keywords: str = "" From 04a3dd21698f7da74a9db4cc40285db47988c0fd Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Wed, 18 Jun 2025 14:19:27 +0200 Subject: [PATCH 27/95] Adjusted typehint for search api. Removed optional none from filter. --- beets/plugins.py | 2 +- beetsplug/deezer.py | 13 ++++++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/beets/plugins.py b/beets/plugins.py index 6ca2462e3..983d15402 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -739,7 +739,7 @@ class MetadataSourcePlugin(Generic[R], BeetsPlugin, metaclass=abc.ABCMeta): def _search_api( self, query_type: Literal["album", "track"], - filters: dict[str, str] | None, + filters: dict[str, str], keywords: str = "", ) -> Sequence[R]: raise NotImplementedError diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index 0ffaeda0b..38a269b50 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -232,7 +232,16 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): def _search_api( self, - query_type: Literal["album", "track"], + query_type: Literal[ + "album", + "track", + "artist", + "history", + "playlist", + "podcast", + "radio", + "user", + ], filters: dict[str, str], keywords="", ) -> Sequence[Response]: @@ -240,8 +249,6 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): the provided ``filters``. :param query_type: The Deezer Search API method to use. - Valid types are: 'album', 'artist', 'history', 'playlist', - 'podcast', 'radio', 'track', 'user', and 'track'. :param keywords: (Optional) Query keywords to use. :return: JSON data for the class:`Response ` object or None if no search results are returned. From 50604b05109055821ae00d1a786a9d7c03fb4d69 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 11:40:51 +0200 Subject: [PATCH 28/95] Fixed linting issue after rebase. --- beetsplug/deezer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index 38a269b50..c9703c4dd 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -26,10 +26,10 @@ import unidecode from beets import ui from beets.autotag import AlbumInfo, TrackInfo from beets.dbcore import types -from beets.library import DateType, Item, Library from beets.plugins import BeetsPlugin, MetadataSourcePlugin, Response if TYPE_CHECKING: + from beets.library import Item, Library from beetsplug._typing import JSONDict From eb497eee1a2070faa230ba8e999755090c33c988 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9meint?= Date: Sat, 5 Jul 2025 14:10:34 +0200 Subject: [PATCH 29/95] Only consider release collections in mbcollection plugin --- beetsplug/mbcollection.py | 10 +++++++--- docs/changelog.rst | 2 ++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/beetsplug/mbcollection.py b/beetsplug/mbcollection.py index 1c010bf50..7a1289d1b 100644 --- a/beetsplug/mbcollection.py +++ b/beetsplug/mbcollection.py @@ -70,10 +70,14 @@ class MusicBrainzCollectionPlugin(BeetsPlugin): if not collections["collection-list"]: raise ui.UserError("no collections exist for user") - # Get all collection IDs, avoiding event collections - collection_ids = [x["id"] for x in collections["collection-list"]] + # Get all release collection IDs, avoiding event collections + collection_ids = [ + x["id"] + for x in collections["collection-list"] + if x["entity-type"] == "release" + ] if not collection_ids: - raise ui.UserError("No collection found.") + raise ui.UserError("No release collection found.") # Check that the collection exists so we can present a nice error collection = self.config["collection"].as_str() diff --git a/docs/changelog.rst b/docs/changelog.rst index 5cf0557cc..ec2fdd9e4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,6 +28,8 @@ New features: :bug:`5832` * :doc:`plugins/playlist`: Support files with the `.m3u8` extension. :bug:`5829` +* :doc:`plugins/mbcollection`: When getting the user collections, only consider + collections of releases, and ignore collections of other entity types. Bug fixes: From 98377ab5f6fc1829d79211b376bfd8d82bafaf33 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 15:45:55 +0200 Subject: [PATCH 30/95] Split library file into different files inside library folder. --- beets/library/__init__.py | 16 + beets/library/exceptions.py | 38 + beets/library/library.py | 148 ++++ beets/{library.py => library/models.py} | 1048 +++++++++-------------- beets/library/queries.py | 61 ++ 5 files changed, 663 insertions(+), 648 deletions(-) create mode 100644 beets/library/__init__.py create mode 100644 beets/library/exceptions.py create mode 100644 beets/library/library.py rename beets/{library.py => library/models.py} (86%) create mode 100644 beets/library/queries.py diff --git a/beets/library/__init__.py b/beets/library/__init__.py new file mode 100644 index 000000000..286b84189 --- /dev/null +++ b/beets/library/__init__.py @@ -0,0 +1,16 @@ +from .exceptions import FileOperationError, ReadError, WriteError +from .library import Library +from .models import Album, Item, LibModel +from .queries import parse_query_parts, parse_query_string + +__all__ = [ + "Library", + "LibModel", + "Album", + "Item", + "parse_query_parts", + "parse_query_string", + "FileOperationError", + "ReadError", + "WriteError", +] diff --git a/beets/library/exceptions.py b/beets/library/exceptions.py new file mode 100644 index 000000000..7f117a2fe --- /dev/null +++ b/beets/library/exceptions.py @@ -0,0 +1,38 @@ +from beets import util + + +class FileOperationError(Exception): + """Indicate an error when interacting with a file on disk. + + Possibilities include an unsupported media type, a permissions + error, and an unhandled Mutagen exception. + """ + + def __init__(self, path, reason): + """Create an exception describing an operation on the file at + `path` with the underlying (chained) exception `reason`. + """ + super().__init__(path, reason) + self.path = path + self.reason = reason + + def __str__(self): + """Get a string representing the error. + + Describe both the underlying reason and the file path in question. + """ + return f"{util.displayable_path(self.path)}: {self.reason}" + + +class ReadError(FileOperationError): + """An error while reading a file (i.e. in `Item.read`).""" + + def __str__(self): + return "error reading " + str(super()) + + +class WriteError(FileOperationError): + """An error while writing a file (i.e. in `Item.write`).""" + + def __str__(self): + return "error writing " + str(super()) diff --git a/beets/library/library.py b/beets/library/library.py new file mode 100644 index 000000000..7370f7ecd --- /dev/null +++ b/beets/library/library.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +import platformdirs + +import beets +from beets import dbcore +from beets.util import normpath + +from .models import Album, Item +from .queries import PF_KEY_DEFAULT, parse_query_parts, parse_query_string + +if TYPE_CHECKING: + from beets.dbcore import Results + + +class Library(dbcore.Database): + """A database of music containing songs and albums.""" + + _models = (Item, Album) + + def __init__( + self, + path="library.blb", + directory: str | None = None, + path_formats=((PF_KEY_DEFAULT, "$artist/$album/$track $title"),), + replacements=None, + ): + timeout = beets.config["timeout"].as_number() + super().__init__(path, timeout=timeout) + + self.directory = normpath(directory or platformdirs.user_music_path()) + + self.path_formats = path_formats + self.replacements = replacements + + # Used for template substitution performance. + self._memotable: dict[tuple[str, ...], str] = {} + + # Adding objects to the database. + + def add(self, obj): + """Add the :class:`Item` or :class:`Album` object to the library + database. + + Return the object's new id. + """ + obj.add(self) + self._memotable = {} + return obj.id + + def add_album(self, items): + """Create a new album consisting of a list of items. + + The items are added to the database if they don't yet have an + ID. Return a new :class:`Album` object. The list items must not + be empty. + """ + if not items: + raise ValueError("need at least one item") + + # Create the album structure using metadata from the first item. + values = {key: items[0][key] for key in Album.item_keys} + album = Album(self, **values) + + # Add the album structure and set the items' album_id fields. + # Store or add the items. + with self.transaction(): + album.add(self) + for item in items: + item.album_id = album.id + if item.id is None: + item.add(self) + else: + item.store() + + return album + + # Querying. + + def _fetch(self, model_cls, query, sort=None): + """Parse a query and fetch. + + If an order specification is present in the query string + the `sort` argument is ignored. + """ + # Parse the query, if necessary. + try: + parsed_sort = None + if isinstance(query, str): + query, parsed_sort = parse_query_string(query, model_cls) + elif isinstance(query, (list, tuple)): + query, parsed_sort = parse_query_parts(query, model_cls) + except dbcore.query.InvalidQueryArgumentValueError as exc: + raise dbcore.InvalidQueryError(query, exc) + + # Any non-null sort specified by the parsed query overrides the + # provided sort. + if parsed_sort and not isinstance(parsed_sort, dbcore.query.NullSort): + sort = parsed_sort + + return super()._fetch(model_cls, query, sort) + + @staticmethod + def get_default_album_sort(): + """Get a :class:`Sort` object for albums from the config option.""" + return dbcore.sort_from_strings( + Album, beets.config["sort_album"].as_str_seq() + ) + + @staticmethod + def get_default_item_sort(): + """Get a :class:`Sort` object for items from the config option.""" + return dbcore.sort_from_strings( + Item, beets.config["sort_item"].as_str_seq() + ) + + def albums(self, query=None, sort=None) -> Results[Album]: + """Get :class:`Album` objects matching the query.""" + return self._fetch(Album, query, sort or self.get_default_album_sort()) + + def items(self, query=None, sort=None) -> Results[Item]: + """Get :class:`Item` objects matching the query.""" + return self._fetch(Item, query, sort or self.get_default_item_sort()) + + # Convenience accessors. + + def get_item(self, id): + """Fetch a :class:`Item` by its ID. + + Return `None` if no match is found. + """ + return self._get(Item, id) + + def get_album(self, item_or_id): + """Given an album ID or an item associated with an album, return + a :class:`Album` object for the album. + + If no such album exists, return `None`. + """ + if isinstance(item_or_id, int): + album_id = item_or_id + else: + album_id = item_or_id.album_id + if album_id is None: + return None + return self._get(Album, album_id) diff --git a/beets/library.py b/beets/library/models.py similarity index 86% rename from beets/library.py rename to beets/library/models.py index 9223b3209..efa0f9694 100644 --- a/beets/library.py +++ b/beets/library/models.py @@ -1,23 +1,6 @@ -# This file is part of beets. -# Copyright 2016, Adrian Sampson. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. - -"""The core data store and collection logic for beets.""" - from __future__ import annotations import os -import shlex import string import sys import time @@ -26,12 +9,11 @@ from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING -import platformdirs from mediafile import MediaFile, UnreadableFileError import beets from beets import dbcore, logging, plugins, util -from beets.dbcore import Results, types +from beets.dbcore import types from beets.util import ( MoveOperation, bytestring_path, @@ -42,57 +24,16 @@ from beets.util import ( ) from beets.util.functemplate import Template, template +from .exceptions import FileOperationError, ReadError, WriteError +from .queries import PF_KEY_DEFAULT, parse_query_string + if TYPE_CHECKING: - from .dbcore.query import FieldQuery, FieldQueryType + from ..dbcore.query import FieldQuery, FieldQueryType + from .library import Library # noqa: F401 log = logging.getLogger("beets") -# Special path format key. -PF_KEY_DEFAULT = "default" - - -# Exceptions. -class FileOperationError(Exception): - """Indicate an error when interacting with a file on disk. - - Possibilities include an unsupported media type, a permissions - error, and an unhandled Mutagen exception. - """ - - def __init__(self, path, reason): - """Create an exception describing an operation on the file at - `path` with the underlying (chained) exception `reason`. - """ - super().__init__(path, reason) - self.path = path - self.reason = reason - - def __str__(self): - """Get a string representing the error. - - Describe both the underlying reason and the file path in question. - """ - return f"{util.displayable_path(self.path)}: {self.reason}" - - -class ReadError(FileOperationError): - """An error while reading a file (i.e. in `Item.read`).""" - - def __str__(self): - return "error reading " + str(super()) - - -class WriteError(FileOperationError): - """An error while writing a file (i.e. in `Item.write`).""" - - def __str__(self): - return "error writing " + str(super()) - - -# Item and Album model classes. - - class LibModel(dbcore.Model["Library"]): """Shared concrete functionality for Items and Albums.""" @@ -259,6 +200,400 @@ class FormattedItemMapping(dbcore.db.FormattedMapping): return len(self.all_keys) +class Album(LibModel): + """Provide access to information about albums stored in a + library. + + Reflects the library's "albums" table, including album art. + """ + + _table = "albums" + _flex_table = "album_attributes" + _always_dirty = True + _fields = { + "id": types.PRIMARY_ID, + "artpath": types.NullPathType(), + "added": types.DATE, + "albumartist": types.STRING, + "albumartist_sort": types.STRING, + "albumartist_credit": types.STRING, + "albumartists": types.MULTI_VALUE_DSV, + "albumartists_sort": types.MULTI_VALUE_DSV, + "albumartists_credit": types.MULTI_VALUE_DSV, + "album": types.STRING, + "genre": types.STRING, + "style": types.STRING, + "discogs_albumid": types.INTEGER, + "discogs_artistid": types.INTEGER, + "discogs_labelid": types.INTEGER, + "year": types.PaddedInt(4), + "month": types.PaddedInt(2), + "day": types.PaddedInt(2), + "disctotal": types.PaddedInt(2), + "comp": types.BOOLEAN, + "mb_albumid": types.STRING, + "mb_albumartistid": types.STRING, + "mb_albumartistids": types.MULTI_VALUE_DSV, + "albumtype": types.STRING, + "albumtypes": types.SEMICOLON_SPACE_DSV, + "label": types.STRING, + "barcode": types.STRING, + "mb_releasegroupid": types.STRING, + "release_group_title": types.STRING, + "asin": types.STRING, + "catalognum": types.STRING, + "script": types.STRING, + "language": types.STRING, + "country": types.STRING, + "albumstatus": types.STRING, + "albumdisambig": types.STRING, + "releasegroupdisambig": types.STRING, + "rg_album_gain": types.NULL_FLOAT, + "rg_album_peak": types.NULL_FLOAT, + "r128_album_gain": types.NULL_FLOAT, + "original_year": types.PaddedInt(4), + "original_month": types.PaddedInt(2), + "original_day": types.PaddedInt(2), + } + + _search_fields = ("album", "albumartist", "genre") + + _types = { + "path": types.PathType(), + "data_source": types.STRING, + } + + _sorts = { + "albumartist": dbcore.query.SmartArtistSort, + "artist": dbcore.query.SmartArtistSort, + } + + # List of keys that are set on an album's items. + item_keys = [ + "added", + "albumartist", + "albumartists", + "albumartist_sort", + "albumartists_sort", + "albumartist_credit", + "albumartists_credit", + "album", + "genre", + "style", + "discogs_albumid", + "discogs_artistid", + "discogs_labelid", + "year", + "month", + "day", + "disctotal", + "comp", + "mb_albumid", + "mb_albumartistid", + "mb_albumartistids", + "albumtype", + "albumtypes", + "label", + "barcode", + "mb_releasegroupid", + "asin", + "catalognum", + "script", + "language", + "country", + "albumstatus", + "albumdisambig", + "releasegroupdisambig", + "release_group_title", + "rg_album_gain", + "rg_album_peak", + "r128_album_gain", + "original_year", + "original_month", + "original_day", + ] + + _format_config_key = "format_album" + + @cached_classproperty + def _relation(cls) -> type[Item]: + return Item + + @cached_classproperty + def relation_join(cls) -> str: + """Return FROM clause which joins on related album items. + + Use LEFT join to select all albums, including those that do not have + any items. + """ + return ( + f"LEFT JOIN {cls._relation._table} " + f"ON {cls._table}.id = {cls._relation._table}.album_id" + ) + + @classmethod + def _getters(cls): + # In addition to plugin-provided computed fields, also expose + # the album's directory as `path`. + getters = plugins.album_field_getters() + getters["path"] = Album.item_dir + getters["albumtotal"] = Album._albumtotal + return getters + + def items(self): + """Return an iterable over the items associated with this + album. + + This method conflicts with :meth:`LibModel.items`, which is + inherited from :meth:`beets.dbcore.Model.items`. + Since :meth:`Album.items` predates these methods, and is + likely to be used by plugins, we keep this interface as-is. + """ + return self._db.items(dbcore.MatchQuery("album_id", self.id)) + + def remove(self, delete=False, with_items=True): + """Remove this album and all its associated items from the + library. + + If delete, then the items' files are also deleted from disk, + along with any album art. The directories containing the album are + also removed (recursively) if empty. + + Set with_items to False to avoid removing the album's items. + """ + super().remove() + + # Send a 'album_removed' signal to plugins + plugins.send("album_removed", album=self) + + # Delete art file. + if delete: + artpath = self.artpath + if artpath: + util.remove(artpath) + + # Remove (and possibly delete) the constituent items. + if with_items: + for item in self.items(): + item.remove(delete, False) + + def move_art(self, operation=MoveOperation.MOVE): + """Move, copy, link or hardlink (depending on `operation`) any + existing album art so that it remains in the same directory as + the items. + + `operation` should be an instance of `util.MoveOperation`. + """ + old_art = self.artpath + if not old_art: + return + + if not os.path.exists(syspath(old_art)): + log.error( + "removing reference to missing album art file {}", + util.displayable_path(old_art), + ) + self.artpath = None + return + + new_art = self.art_destination(old_art) + if new_art == old_art: + return + + new_art = util.unique_path(new_art) + log.debug( + "moving album art {0} to {1}", + util.displayable_path(old_art), + util.displayable_path(new_art), + ) + if operation == MoveOperation.MOVE: + util.move(old_art, new_art) + util.prune_dirs(os.path.dirname(old_art), self._db.directory) + elif operation == MoveOperation.COPY: + util.copy(old_art, new_art) + elif operation == MoveOperation.LINK: + util.link(old_art, new_art) + elif operation == MoveOperation.HARDLINK: + util.hardlink(old_art, new_art) + elif operation == MoveOperation.REFLINK: + util.reflink(old_art, new_art, fallback=False) + elif operation == MoveOperation.REFLINK_AUTO: + util.reflink(old_art, new_art, fallback=True) + else: + assert False, "unknown MoveOperation" + self.artpath = new_art + + def move(self, operation=MoveOperation.MOVE, basedir=None, store=True): + """Move, copy, link or hardlink (depending on `operation`) + all items to their destination. Any album art moves along with them. + + `basedir` overrides the library base directory for the destination. + + `operation` should be an instance of `util.MoveOperation`. + + By default, the album is stored to the database, persisting any + modifications to its metadata. If `store` is `False` however, + the album is not stored automatically, and it will have to be manually + stored after invoking this method. + """ + basedir = basedir or self._db.directory + + # Ensure new metadata is available to items for destination + # computation. + if store: + self.store() + + # Move items. + items = list(self.items()) + for item in items: + item.move(operation, basedir=basedir, with_album=False, store=store) + + # Move art. + self.move_art(operation) + if store: + self.store() + + def item_dir(self): + """Return the directory containing the album's first item, + provided that such an item exists. + """ + item = self.items().get() + if not item: + raise ValueError("empty album for album id %d" % self.id) + return os.path.dirname(item.path) + + def _albumtotal(self): + """Return the total number of tracks on all discs on the album.""" + if self.disctotal == 1 or not beets.config["per_disc_numbering"]: + return self.items()[0].tracktotal + + counted = [] + total = 0 + + for item in self.items(): + if item.disc in counted: + continue + + total += item.tracktotal + counted.append(item.disc) + + if len(counted) == self.disctotal: + break + + return total + + def art_destination(self, image, item_dir=None): + """Return a path to the destination for the album art image + for the album. + + `image` is the path of the image that will be + moved there (used for its extension). + + The path construction uses the existing path of the album's + items, so the album must contain at least one item or + item_dir must be provided. + """ + image = bytestring_path(image) + item_dir = item_dir or self.item_dir() + + filename_tmpl = template(beets.config["art_filename"].as_str()) + subpath = self.evaluate_template(filename_tmpl, True) + if beets.config["asciify_paths"]: + subpath = util.asciify_path( + subpath, beets.config["path_sep_replace"].as_str() + ) + subpath = util.sanitize_path( + subpath, replacements=self._db.replacements + ) + subpath = bytestring_path(subpath) + + _, ext = os.path.splitext(image) + dest = os.path.join(item_dir, subpath + ext) + + return bytestring_path(dest) + + def set_art(self, path, copy=True): + """Set the album's cover art to the image at the given path. + + The image is copied (or moved) into place, replacing any + existing art. + + Send an 'art_set' event with `self` as the sole argument. + """ + path = bytestring_path(path) + oldart = self.artpath + artdest = self.art_destination(path) + + if oldart and samefile(path, oldart): + # Art already set. + return + elif samefile(path, artdest): + # Art already in place. + self.artpath = path + return + + # Normal operation. + if oldart == artdest: + util.remove(oldart) + artdest = util.unique_path(artdest) + if copy: + util.copy(path, artdest) + else: + util.move(path, artdest) + self.artpath = artdest + + plugins.send("art_set", album=self) + + def store(self, fields=None, inherit=True): + """Update the database with the album information. + + `fields` represents the fields to be stored. If not specified, + all fields will be. + + The album's tracks are also updated when the `inherit` flag is enabled. + This applies to fixed attributes as well as flexible ones. The `id` + attribute of the album will never be inherited. + """ + # Get modified track fields. + track_updates = {} + track_deletes = set() + for key in self._dirty: + if inherit: + if key in self.item_keys: # is a fixed attribute + track_updates[key] = self[key] + elif key not in self: # is a fixed or a flexible attribute + track_deletes.add(key) + elif key != "id": # is a flexible attribute + track_updates[key] = self[key] + + with self._db.transaction(): + super().store(fields) + if track_updates: + for item in self.items(): + for key, value in track_updates.items(): + item[key] = value + item.store() + if track_deletes: + for item in self.items(): + for key in track_deletes: + if key in item: + del item[key] + item.store() + + def try_sync(self, write, move, inherit=True): + """Synchronize the album and its items with the database. + Optionally, also write any new tags into the files and update + their paths. + + `write` indicates whether to write tags to the item files, and + `move` controls whether files (both audio and album art) are + moved. + """ + self.store(inherit=inherit) + for item in self.items(): + item.try_sync(write, move) + + class Item(LibModel): """Represent a song or track.""" @@ -898,589 +1233,6 @@ class Item(LibModel): return normpath(os.path.join(basedir, lib_path_bytes)) -class Album(LibModel): - """Provide access to information about albums stored in a - library. - - Reflects the library's "albums" table, including album art. - """ - - _table = "albums" - _flex_table = "album_attributes" - _always_dirty = True - _fields = { - "id": types.PRIMARY_ID, - "artpath": types.NullPathType(), - "added": types.DATE, - "albumartist": types.STRING, - "albumartist_sort": types.STRING, - "albumartist_credit": types.STRING, - "albumartists": types.MULTI_VALUE_DSV, - "albumartists_sort": types.MULTI_VALUE_DSV, - "albumartists_credit": types.MULTI_VALUE_DSV, - "album": types.STRING, - "genre": types.STRING, - "style": types.STRING, - "discogs_albumid": types.INTEGER, - "discogs_artistid": types.INTEGER, - "discogs_labelid": types.INTEGER, - "year": types.PaddedInt(4), - "month": types.PaddedInt(2), - "day": types.PaddedInt(2), - "disctotal": types.PaddedInt(2), - "comp": types.BOOLEAN, - "mb_albumid": types.STRING, - "mb_albumartistid": types.STRING, - "mb_albumartistids": types.MULTI_VALUE_DSV, - "albumtype": types.STRING, - "albumtypes": types.SEMICOLON_SPACE_DSV, - "label": types.STRING, - "barcode": types.STRING, - "mb_releasegroupid": types.STRING, - "release_group_title": types.STRING, - "asin": types.STRING, - "catalognum": types.STRING, - "script": types.STRING, - "language": types.STRING, - "country": types.STRING, - "albumstatus": types.STRING, - "albumdisambig": types.STRING, - "releasegroupdisambig": types.STRING, - "rg_album_gain": types.NULL_FLOAT, - "rg_album_peak": types.NULL_FLOAT, - "r128_album_gain": types.NULL_FLOAT, - "original_year": types.PaddedInt(4), - "original_month": types.PaddedInt(2), - "original_day": types.PaddedInt(2), - } - - _search_fields = ("album", "albumartist", "genre") - - _types = { - "path": types.PathType(), - "data_source": types.STRING, - } - - _sorts = { - "albumartist": dbcore.query.SmartArtistSort, - "artist": dbcore.query.SmartArtistSort, - } - - # List of keys that are set on an album's items. - item_keys = [ - "added", - "albumartist", - "albumartists", - "albumartist_sort", - "albumartists_sort", - "albumartist_credit", - "albumartists_credit", - "album", - "genre", - "style", - "discogs_albumid", - "discogs_artistid", - "discogs_labelid", - "year", - "month", - "day", - "disctotal", - "comp", - "mb_albumid", - "mb_albumartistid", - "mb_albumartistids", - "albumtype", - "albumtypes", - "label", - "barcode", - "mb_releasegroupid", - "asin", - "catalognum", - "script", - "language", - "country", - "albumstatus", - "albumdisambig", - "releasegroupdisambig", - "release_group_title", - "rg_album_gain", - "rg_album_peak", - "r128_album_gain", - "original_year", - "original_month", - "original_day", - ] - - _format_config_key = "format_album" - - @cached_classproperty - def _relation(cls) -> type[Item]: - return Item - - @cached_classproperty - def relation_join(cls) -> str: - """Return FROM clause which joins on related album items. - - Use LEFT join to select all albums, including those that do not have - any items. - """ - return ( - f"LEFT JOIN {cls._relation._table} " - f"ON {cls._table}.id = {cls._relation._table}.album_id" - ) - - @classmethod - def _getters(cls): - # In addition to plugin-provided computed fields, also expose - # the album's directory as `path`. - getters = plugins.album_field_getters() - getters["path"] = Album.item_dir - getters["albumtotal"] = Album._albumtotal - return getters - - def items(self): - """Return an iterable over the items associated with this - album. - - This method conflicts with :meth:`LibModel.items`, which is - inherited from :meth:`beets.dbcore.Model.items`. - Since :meth:`Album.items` predates these methods, and is - likely to be used by plugins, we keep this interface as-is. - """ - return self._db.items(dbcore.MatchQuery("album_id", self.id)) - - def remove(self, delete=False, with_items=True): - """Remove this album and all its associated items from the - library. - - If delete, then the items' files are also deleted from disk, - along with any album art. The directories containing the album are - also removed (recursively) if empty. - - Set with_items to False to avoid removing the album's items. - """ - super().remove() - - # Send a 'album_removed' signal to plugins - plugins.send("album_removed", album=self) - - # Delete art file. - if delete: - artpath = self.artpath - if artpath: - util.remove(artpath) - - # Remove (and possibly delete) the constituent items. - if with_items: - for item in self.items(): - item.remove(delete, False) - - def move_art(self, operation=MoveOperation.MOVE): - """Move, copy, link or hardlink (depending on `operation`) any - existing album art so that it remains in the same directory as - the items. - - `operation` should be an instance of `util.MoveOperation`. - """ - old_art = self.artpath - if not old_art: - return - - if not os.path.exists(syspath(old_art)): - log.error( - "removing reference to missing album art file {}", - util.displayable_path(old_art), - ) - self.artpath = None - return - - new_art = self.art_destination(old_art) - if new_art == old_art: - return - - new_art = util.unique_path(new_art) - log.debug( - "moving album art {0} to {1}", - util.displayable_path(old_art), - util.displayable_path(new_art), - ) - if operation == MoveOperation.MOVE: - util.move(old_art, new_art) - util.prune_dirs(os.path.dirname(old_art), self._db.directory) - elif operation == MoveOperation.COPY: - util.copy(old_art, new_art) - elif operation == MoveOperation.LINK: - util.link(old_art, new_art) - elif operation == MoveOperation.HARDLINK: - util.hardlink(old_art, new_art) - elif operation == MoveOperation.REFLINK: - util.reflink(old_art, new_art, fallback=False) - elif operation == MoveOperation.REFLINK_AUTO: - util.reflink(old_art, new_art, fallback=True) - else: - assert False, "unknown MoveOperation" - self.artpath = new_art - - def move(self, operation=MoveOperation.MOVE, basedir=None, store=True): - """Move, copy, link or hardlink (depending on `operation`) - all items to their destination. Any album art moves along with them. - - `basedir` overrides the library base directory for the destination. - - `operation` should be an instance of `util.MoveOperation`. - - By default, the album is stored to the database, persisting any - modifications to its metadata. If `store` is `False` however, - the album is not stored automatically, and it will have to be manually - stored after invoking this method. - """ - basedir = basedir or self._db.directory - - # Ensure new metadata is available to items for destination - # computation. - if store: - self.store() - - # Move items. - items = list(self.items()) - for item in items: - item.move(operation, basedir=basedir, with_album=False, store=store) - - # Move art. - self.move_art(operation) - if store: - self.store() - - def item_dir(self): - """Return the directory containing the album's first item, - provided that such an item exists. - """ - item = self.items().get() - if not item: - raise ValueError("empty album for album id %d" % self.id) - return os.path.dirname(item.path) - - def _albumtotal(self): - """Return the total number of tracks on all discs on the album.""" - if self.disctotal == 1 or not beets.config["per_disc_numbering"]: - return self.items()[0].tracktotal - - counted = [] - total = 0 - - for item in self.items(): - if item.disc in counted: - continue - - total += item.tracktotal - counted.append(item.disc) - - if len(counted) == self.disctotal: - break - - return total - - def art_destination(self, image, item_dir=None): - """Return a path to the destination for the album art image - for the album. - - `image` is the path of the image that will be - moved there (used for its extension). - - The path construction uses the existing path of the album's - items, so the album must contain at least one item or - item_dir must be provided. - """ - image = bytestring_path(image) - item_dir = item_dir or self.item_dir() - - filename_tmpl = template(beets.config["art_filename"].as_str()) - subpath = self.evaluate_template(filename_tmpl, True) - if beets.config["asciify_paths"]: - subpath = util.asciify_path( - subpath, beets.config["path_sep_replace"].as_str() - ) - subpath = util.sanitize_path( - subpath, replacements=self._db.replacements - ) - subpath = bytestring_path(subpath) - - _, ext = os.path.splitext(image) - dest = os.path.join(item_dir, subpath + ext) - - return bytestring_path(dest) - - def set_art(self, path, copy=True): - """Set the album's cover art to the image at the given path. - - The image is copied (or moved) into place, replacing any - existing art. - - Send an 'art_set' event with `self` as the sole argument. - """ - path = bytestring_path(path) - oldart = self.artpath - artdest = self.art_destination(path) - - if oldart and samefile(path, oldart): - # Art already set. - return - elif samefile(path, artdest): - # Art already in place. - self.artpath = path - return - - # Normal operation. - if oldart == artdest: - util.remove(oldart) - artdest = util.unique_path(artdest) - if copy: - util.copy(path, artdest) - else: - util.move(path, artdest) - self.artpath = artdest - - plugins.send("art_set", album=self) - - def store(self, fields=None, inherit=True): - """Update the database with the album information. - - `fields` represents the fields to be stored. If not specified, - all fields will be. - - The album's tracks are also updated when the `inherit` flag is enabled. - This applies to fixed attributes as well as flexible ones. The `id` - attribute of the album will never be inherited. - """ - # Get modified track fields. - track_updates = {} - track_deletes = set() - for key in self._dirty: - if inherit: - if key in self.item_keys: # is a fixed attribute - track_updates[key] = self[key] - elif key not in self: # is a fixed or a flexible attribute - track_deletes.add(key) - elif key != "id": # is a flexible attribute - track_updates[key] = self[key] - - with self._db.transaction(): - super().store(fields) - if track_updates: - for item in self.items(): - for key, value in track_updates.items(): - item[key] = value - item.store() - if track_deletes: - for item in self.items(): - for key in track_deletes: - if key in item: - del item[key] - item.store() - - def try_sync(self, write, move, inherit=True): - """Synchronize the album and its items with the database. - Optionally, also write any new tags into the files and update - their paths. - - `write` indicates whether to write tags to the item files, and - `move` controls whether files (both audio and album art) are - moved. - """ - self.store(inherit=inherit) - for item in self.items(): - item.try_sync(write, move) - - -# Query construction helpers. - - -def parse_query_parts(parts, model_cls): - """Given a beets query string as a list of components, return the - `Query` and `Sort` they represent. - - Like `dbcore.parse_sorted_query`, with beets query prefixes and - ensuring that implicit path queries are made explicit with 'path::' - """ - # Get query types and their prefix characters. - prefixes = { - ":": dbcore.query.RegexpQuery, - "=~": dbcore.query.StringQuery, - "=": dbcore.query.MatchQuery, - } - prefixes.update(plugins.queries()) - - # Special-case path-like queries, which are non-field queries - # containing path separators (/). - parts = [ - f"path:{s}" if dbcore.query.PathQuery.is_path_query(s) else s - for s in parts - ] - - case_insensitive = beets.config["sort_case_insensitive"].get(bool) - - query, sort = dbcore.parse_sorted_query( - model_cls, parts, prefixes, case_insensitive - ) - log.debug("Parsed query: {!r}", query) - log.debug("Parsed sort: {!r}", sort) - return query, sort - - -def parse_query_string(s, model_cls): - """Given a beets query string, return the `Query` and `Sort` they - represent. - - The string is split into components using shell-like syntax. - """ - message = f"Query is not unicode: {s!r}" - assert isinstance(s, str), message - try: - parts = shlex.split(s) - except ValueError as exc: - raise dbcore.InvalidQueryError(s, exc) - return parse_query_parts(parts, model_cls) - - -# The Library: interface to the database. - - -class Library(dbcore.Database): - """A database of music containing songs and albums.""" - - _models = (Item, Album) - - def __init__( - self, - path="library.blb", - directory: str | None = None, - path_formats=((PF_KEY_DEFAULT, "$artist/$album/$track $title"),), - replacements=None, - ): - timeout = beets.config["timeout"].as_number() - super().__init__(path, timeout=timeout) - - self.directory = normpath(directory or platformdirs.user_music_path()) - - self.path_formats = path_formats - self.replacements = replacements - - # Used for template substitution performance. - self._memotable: dict[tuple[str, ...], str] = {} - - # Adding objects to the database. - - def add(self, obj): - """Add the :class:`Item` or :class:`Album` object to the library - database. - - Return the object's new id. - """ - obj.add(self) - self._memotable = {} - return obj.id - - def add_album(self, items): - """Create a new album consisting of a list of items. - - The items are added to the database if they don't yet have an - ID. Return a new :class:`Album` object. The list items must not - be empty. - """ - if not items: - raise ValueError("need at least one item") - - # Create the album structure using metadata from the first item. - values = {key: items[0][key] for key in Album.item_keys} - album = Album(self, **values) - - # Add the album structure and set the items' album_id fields. - # Store or add the items. - with self.transaction(): - album.add(self) - for item in items: - item.album_id = album.id - if item.id is None: - item.add(self) - else: - item.store() - - return album - - # Querying. - - def _fetch(self, model_cls, query, sort=None): - """Parse a query and fetch. - - If an order specification is present in the query string - the `sort` argument is ignored. - """ - # Parse the query, if necessary. - try: - parsed_sort = None - if isinstance(query, str): - query, parsed_sort = parse_query_string(query, model_cls) - elif isinstance(query, (list, tuple)): - query, parsed_sort = parse_query_parts(query, model_cls) - except dbcore.query.InvalidQueryArgumentValueError as exc: - raise dbcore.InvalidQueryError(query, exc) - - # Any non-null sort specified by the parsed query overrides the - # provided sort. - if parsed_sort and not isinstance(parsed_sort, dbcore.query.NullSort): - sort = parsed_sort - - return super()._fetch(model_cls, query, sort) - - @staticmethod - def get_default_album_sort(): - """Get a :class:`Sort` object for albums from the config option.""" - return dbcore.sort_from_strings( - Album, beets.config["sort_album"].as_str_seq() - ) - - @staticmethod - def get_default_item_sort(): - """Get a :class:`Sort` object for items from the config option.""" - return dbcore.sort_from_strings( - Item, beets.config["sort_item"].as_str_seq() - ) - - def albums(self, query=None, sort=None) -> Results[Album]: - """Get :class:`Album` objects matching the query.""" - return self._fetch(Album, query, sort or self.get_default_album_sort()) - - def items(self, query=None, sort=None) -> Results[Item]: - """Get :class:`Item` objects matching the query.""" - return self._fetch(Item, query, sort or self.get_default_item_sort()) - - # Convenience accessors. - - def get_item(self, id): - """Fetch a :class:`Item` by its ID. - - Return `None` if no match is found. - """ - return self._get(Item, id) - - def get_album(self, item_or_id): - """Given an album ID or an item associated with an album, return - a :class:`Album` object for the album. - - If no such album exists, return `None`. - """ - if isinstance(item_or_id, int): - album_id = item_or_id - else: - album_id = item_or_id.album_id - if album_id is None: - return None - return self._get(Album, album_id) - - -# Default path template resources. - - def _int_arg(s): """Convert a string argument to an integer for use in a template function. diff --git a/beets/library/queries.py b/beets/library/queries.py new file mode 100644 index 000000000..7c9d688cd --- /dev/null +++ b/beets/library/queries.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +import shlex + +import beets +from beets import dbcore, logging, plugins + +log = logging.getLogger("beets") + + +# Special path format key. +PF_KEY_DEFAULT = "default" + +# Query construction helpers. + + +def parse_query_parts(parts, model_cls): + """Given a beets query string as a list of components, return the + `Query` and `Sort` they represent. + + Like `dbcore.parse_sorted_query`, with beets query prefixes and + ensuring that implicit path queries are made explicit with 'path::' + """ + # Get query types and their prefix characters. + prefixes = { + ":": dbcore.query.RegexpQuery, + "=~": dbcore.query.StringQuery, + "=": dbcore.query.MatchQuery, + } + prefixes.update(plugins.queries()) + + # Special-case path-like queries, which are non-field queries + # containing path separators (/). + parts = [ + f"path:{s}" if dbcore.query.PathQuery.is_path_query(s) else s + for s in parts + ] + + case_insensitive = beets.config["sort_case_insensitive"].get(bool) + + query, sort = dbcore.parse_sorted_query( + model_cls, parts, prefixes, case_insensitive + ) + log.debug("Parsed query: {!r}", query) + log.debug("Parsed sort: {!r}", sort) + return query, sort + + +def parse_query_string(s, model_cls): + """Given a beets query string, return the `Query` and `Sort` they + represent. + + The string is split into components using shell-like syntax. + """ + message = f"Query is not unicode: {s!r}" + assert isinstance(s, str), message + try: + parts = shlex.split(s) + except ValueError as exc: + raise dbcore.InvalidQueryError(s, exc) + return parse_query_parts(parts, model_cls) From 9cd1a5078587ac8f0db765e89221819a3bfbe089 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 15:47:16 +0200 Subject: [PATCH 31/95] Added git blame ignore --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 5441940a4..975c884d6 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -51,3 +51,5 @@ c490ac5810b70f3cf5fd8649669838e8fdb19f4d 9147577b2b19f43ca827e9650261a86fb0450cef # Copy paste query, types from library to dbcore 1a045c91668c771686f4c871c84f1680af2e944b +# Library restructure (split library.py into multiple modules) +0ad4e19d4f870db757373f44d12ff3be2441363a From e6016c125b3d1975ef9453d9a3f50f965296a380 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Tue, 1 Jul 2025 12:32:38 +0200 Subject: [PATCH 32/95] Added changelog entry. --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index ec2fdd9e4..09f2fcbb0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -70,6 +70,8 @@ Other changes: Autogenerated API references are now located in the `docs/api` subdirectory. * :doc:`/plugins/substitute`: Fix rST formatting for example cases so that each case is shown on separate lines. +* Refactored library.py file by splitting it into multiple modules within the + beets/library directory. 2.3.1 (May 14, 2025) -------------------- From 7cada1c9f82f1eff3aefab07b52481b54ac29be7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 17 Jun 2025 12:16:58 +0100 Subject: [PATCH 33/95] Remove no-op decargs --- beets/ui/__init__.py | 9 +-------- beets/ui/commands.py | 17 ++++++++--------- beetsplug/absubmit.py | 2 +- beetsplug/acousticbrainz.py | 2 +- beetsplug/badfiles.py | 2 +- beetsplug/bareasc.py | 4 ++-- beetsplug/bench.py | 2 +- beetsplug/bpm.py | 3 +-- beetsplug/bpsync.py | 2 +- beetsplug/chroma.py | 4 ++-- beetsplug/convert.py | 10 ++++------ beetsplug/deezer.py | 2 +- beetsplug/duplicates.py | 6 +++--- beetsplug/edit.py | 2 +- beetsplug/embedart.py | 16 ++++++++-------- beetsplug/export.py | 2 +- beetsplug/fetchart.py | 4 +--- beetsplug/ftintitle.py | 2 +- beetsplug/info.py | 4 ++-- beetsplug/ipfs.py | 10 +++++----- beetsplug/keyfinder.py | 2 +- beetsplug/lastgenre/__init__.py | 4 ++-- beetsplug/limit.py | 4 ++-- beetsplug/mbsubmit.py | 2 +- beetsplug/mbsync.py | 2 +- beetsplug/metasync/__init__.py | 2 +- beetsplug/missing.py | 4 ++-- beetsplug/parentwork.py | 2 +- beetsplug/play.py | 4 ++-- beetsplug/random.py | 4 ++-- beetsplug/replaygain.py | 4 ++-- beetsplug/scrub.py | 2 +- beetsplug/smartplaylist.py | 2 +- beetsplug/spotify.py | 4 ++-- beetsplug/thumbnails.py | 4 ++-- beetsplug/web/__init__.py | 2 +- beetsplug/zero.py | 6 +++--- test/plugins/test_thumbnails.py | 4 +--- 38 files changed, 74 insertions(+), 89 deletions(-) diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index b7033e41b..109e39f4d 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -104,13 +104,6 @@ def _stream_encoding(stream, default="utf-8"): return stream.encoding or default -def decargs(arglist): - """Given a list of command-line argument bytestrings, attempts to - decode them to Unicode strings when running under Python 2. - """ - return arglist - - def print_(*strings, **kwargs): """Like print, but rather than raising an error when a character is not in the terminal's encoding's character set, just silently @@ -1311,7 +1304,7 @@ class CommonOptionsParser(optparse.OptionParser): if fmt: value = fmt elif value: - (value,) = decargs([value]) + (value,) = [value] else: value = "" diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 3117262f1..7153f30be 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -28,7 +28,6 @@ import beets from beets import autotag, config, importer, library, logging, plugins, ui, util from beets.autotag import Recommendation, hooks from beets.ui import ( - decargs, input_, print_, print_column_layout, @@ -1343,7 +1342,7 @@ def import_func(lib, opts, args): config["import"]["move"] = False if opts.library: - query = decargs(args) + query = args paths = [] else: query = None @@ -1596,7 +1595,7 @@ def list_items(lib, query, album, fmt=""): def list_func(lib, opts, args): - list_items(lib, decargs(args), opts.album) + list_items(lib, args, opts.album) list_cmd = ui.Subcommand("list", help="query the library", aliases=("ls",)) @@ -1739,7 +1738,7 @@ def update_func(lib, opts, args): return update_items( lib, - decargs(args), + args, opts.album, ui.should_move(opts.move), opts.pretend, @@ -1861,7 +1860,7 @@ def remove_items(lib, query, album, delete, force): def remove_func(lib, opts, args): - remove_items(lib, decargs(args), opts.album, opts.delete, opts.force) + remove_items(lib, args, opts.album, opts.delete, opts.force) remove_cmd = ui.Subcommand( @@ -1931,7 +1930,7 @@ Album artists: {}""".format( def stats_func(lib, opts, args): - show_stats(lib, decargs(args), opts.exact) + show_stats(lib, args, opts.exact) stats_cmd = ui.Subcommand( @@ -2059,7 +2058,7 @@ def modify_parse_args(args): def modify_func(lib, opts, args): - query, mods, dels = modify_parse_args(decargs(args)) + query, mods, dels = modify_parse_args(args) if not mods and not dels: raise ui.UserError("no modifications specified") modify_items( @@ -2217,7 +2216,7 @@ def move_func(lib, opts, args): move_items( lib, dest, - decargs(args), + args, opts.copy, opts.album, opts.pretend, @@ -2298,7 +2297,7 @@ def write_items(lib, query, pretend, force): def write_func(lib, opts, args): - write_items(lib, decargs(args), opts.pretend, opts.force) + write_items(lib, args, opts.pretend, opts.force) write_cmd = ui.Subcommand("write", help="write tag information to files") diff --git a/beetsplug/absubmit.py b/beetsplug/absubmit.py index 3c48f8897..3d3227ed2 100644 --- a/beetsplug/absubmit.py +++ b/beetsplug/absubmit.py @@ -137,7 +137,7 @@ only files which would be processed", ) else: # Get items from arguments - items = lib.items(ui.decargs(args)) + items = lib.items(args) self.opts = opts util.par_map(self.analyze_submit, items) diff --git a/beetsplug/acousticbrainz.py b/beetsplug/acousticbrainz.py index 714751ac9..56ac0f6c5 100644 --- a/beetsplug/acousticbrainz.py +++ b/beetsplug/acousticbrainz.py @@ -116,7 +116,7 @@ class AcousticPlugin(plugins.BeetsPlugin): ) def func(lib, opts, args): - items = lib.items(ui.decargs(args)) + items = lib.items(args) self._fetch_info( items, ui.should_write(), diff --git a/beetsplug/badfiles.py b/beetsplug/badfiles.py index 0903ebabf..0511d960d 100644 --- a/beetsplug/badfiles.py +++ b/beetsplug/badfiles.py @@ -204,7 +204,7 @@ class BadFiles(BeetsPlugin): def command(self, lib, opts, args): # Get items from arguments - items = lib.items(ui.decargs(args)) + items = lib.items(args) self.verbose = opts.verbose def check_and_print(item): diff --git a/beetsplug/bareasc.py b/beetsplug/bareasc.py index 3a52c41dd..ed1057b20 100644 --- a/beetsplug/bareasc.py +++ b/beetsplug/bareasc.py @@ -23,7 +23,7 @@ from unidecode import unidecode from beets import ui from beets.dbcore.query import StringFieldQuery from beets.plugins import BeetsPlugin -from beets.ui import decargs, print_ +from beets.ui import print_ class BareascQuery(StringFieldQuery[str]): @@ -83,7 +83,7 @@ class BareascPlugin(BeetsPlugin): def unidecode_list(self, lib, opts, args): """Emulate normal 'list' command but with unidecode output.""" - query = decargs(args) + query = args album = opts.album # Copied from commands.py - list_items if album: diff --git a/beetsplug/bench.py b/beetsplug/bench.py index 62d512ce7..cf72527e8 100644 --- a/beetsplug/bench.py +++ b/beetsplug/bench.py @@ -125,7 +125,7 @@ class BenchmarkPlugin(BeetsPlugin): "-i", "--id", default=None, help="album ID to match against" ) match_bench_cmd.func = lambda lib, opts, args: match_benchmark( - lib, opts.profile, ui.decargs(args), opts.id + lib, opts.profile, args, opts.id ) return [aunique_bench_cmd, match_bench_cmd] diff --git a/beetsplug/bpm.py b/beetsplug/bpm.py index 946769cdc..145986a95 100644 --- a/beetsplug/bpm.py +++ b/beetsplug/bpm.py @@ -63,9 +63,8 @@ class BPMPlugin(BeetsPlugin): return [cmd] def command(self, lib, opts, args): - items = lib.items(ui.decargs(args)) write = ui.should_write() - self.get_bpm(items, write) + self.get_bpm(lib.items(args), write) def get_bpm(self, items, write=False): overwrite = self.config["overwrite"].get(bool) diff --git a/beetsplug/bpsync.py b/beetsplug/bpsync.py index 05be94c99..656f30425 100644 --- a/beetsplug/bpsync.py +++ b/beetsplug/bpsync.py @@ -65,7 +65,7 @@ class BPSyncPlugin(BeetsPlugin): move = ui.should_move(opts.move) pretend = opts.pretend write = ui.should_write(opts.write) - query = ui.decargs(args) + query = args self.singletons(lib, query, move, pretend, write) self.albums(lib, query, move, pretend, write) diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py index 5c718154b..de3ac525a 100644 --- a/beetsplug/chroma.py +++ b/beetsplug/chroma.py @@ -233,7 +233,7 @@ class AcoustidPlugin(plugins.BeetsPlugin): apikey = config["acoustid"]["apikey"].as_str() except confuse.NotFoundError: raise ui.UserError("no Acoustid user API key provided") - submit_items(self._log, apikey, lib.items(ui.decargs(args))) + submit_items(self._log, apikey, lib.items(args)) submit_cmd.func = submit_cmd_func @@ -242,7 +242,7 @@ class AcoustidPlugin(plugins.BeetsPlugin): ) def fingerprint_cmd_func(lib, opts, args): - for item in lib.items(ui.decargs(args)): + for item in lib.items(args): fingerprint_item(self._log, item, write=ui.should_write()) fingerprint_cmd.func = fingerprint_cmd_func diff --git a/beetsplug/convert.py b/beetsplug/convert.py index 7586c2a1b..c4df9ab57 100644 --- a/beetsplug/convert.py +++ b/beetsplug/convert.py @@ -301,7 +301,7 @@ class ConvertPlugin(BeetsPlugin): encode_cmd.append(os.fsdecode(args[i])) if pretend: - self._log.info("{0}", " ".join(ui.decargs(args))) + self._log.info("{0}", " ".join(args)) return try: @@ -323,9 +323,7 @@ class ConvertPlugin(BeetsPlugin): raise except OSError as exc: raise ui.UserError( - "convert: couldn't invoke '{}': {}".format( - " ".join(ui.decargs(args)), exc - ) + "convert: couldn't invoke '{}': {}".format(" ".join(args), exc) ) if not quiet and not pretend: @@ -579,13 +577,13 @@ class ConvertPlugin(BeetsPlugin): ) = self._get_opts_and_config(opts) if opts.album: - albums = lib.albums(ui.decargs(args)) + albums = lib.albums(args) items = [i for a in albums for i in a.items()] if not pretend: for a in albums: ui.print_(format(a, "")) else: - items = list(lib.items(ui.decargs(args))) + items = list(lib.items(args)) if not pretend: for i in items: ui.print_(format(i, "")) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index c9703c4dd..7e4896437 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -54,7 +54,7 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): ) def func(lib: Library, opts, args): - items = lib.items(ui.decargs(args)) + items = lib.items(args) self.deezerupdate(list(items), ui.should_write()) deezer_update_cmd.func = func diff --git a/beetsplug/duplicates.py b/beetsplug/duplicates.py index 5a2be0cd2..ea7abaaff 100644 --- a/beetsplug/duplicates.py +++ b/beetsplug/duplicates.py @@ -19,7 +19,7 @@ import shlex from beets.library import Album, Item from beets.plugins import BeetsPlugin -from beets.ui import Subcommand, UserError, decargs, print_ +from beets.ui import Subcommand, UserError, print_ from beets.util import ( MoveOperation, bytestring_path, @@ -163,11 +163,11 @@ class DuplicatesPlugin(BeetsPlugin): if album: if not keys: keys = ["mb_albumid"] - items = lib.albums(decargs(args)) + items = lib.albums(args) else: if not keys: keys = ["mb_trackid", "mb_albumid"] - items = lib.items(decargs(args)) + items = lib.items(args) # If there's nothing to do, return early. The code below assumes # `items` to be non-empty. diff --git a/beetsplug/edit.py b/beetsplug/edit.py index b92c48839..e0c6509c8 100644 --- a/beetsplug/edit.py +++ b/beetsplug/edit.py @@ -180,7 +180,7 @@ class EditPlugin(plugins.BeetsPlugin): def _edit_command(self, lib, opts, args): """The CLI command function for the `beet edit` command.""" # Get the objects to edit. - query = ui.decargs(args) + query = args items, albums = _do_query(lib, query, opts.album, False) objs = albums if opts.album else items if not objs: diff --git a/beetsplug/embedart.py b/beetsplug/embedart.py index 2a4e06a93..8df3c3c05 100644 --- a/beetsplug/embedart.py +++ b/beetsplug/embedart.py @@ -22,7 +22,7 @@ import requests from beets import art, config, ui from beets.plugins import BeetsPlugin -from beets.ui import decargs, print_ +from beets.ui import print_ from beets.util import bytestring_path, displayable_path, normpath, syspath from beets.util.artresizer import ArtResizer @@ -115,7 +115,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): ) ) - items = lib.items(decargs(args)) + items = lib.items(args) # Confirm with user. if not opts.yes and not _confirm(items, not opts.file): @@ -151,7 +151,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): except Exception as e: self._log.error("Unable to save image: {}".format(e)) return - items = lib.items(decargs(args)) + items = lib.items(args) # Confirm with user. if not opts.yes and not _confirm(items, not opts.url): os.remove(tempimg) @@ -169,7 +169,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): ) os.remove(tempimg) else: - albums = lib.albums(decargs(args)) + albums = lib.albums(args) # Confirm with user. if not opts.yes and not _confirm(albums, not opts.file): return @@ -212,7 +212,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): def extract_func(lib, opts, args): if opts.outpath: art.extract_first( - self._log, normpath(opts.outpath), lib.items(decargs(args)) + self._log, normpath(opts.outpath), lib.items(args) ) else: filename = bytestring_path( @@ -223,7 +223,7 @@ class EmbedCoverArtPlugin(BeetsPlugin): "Only specify a name rather than a path for -n" ) return - for album in lib.albums(decargs(args)): + for album in lib.albums(args): artpath = normpath(os.path.join(album.path, filename)) artpath = art.extract_first( self._log, artpath, album.items() @@ -244,11 +244,11 @@ class EmbedCoverArtPlugin(BeetsPlugin): ) def clear_func(lib, opts, args): - items = lib.items(decargs(args)) + items = lib.items(args) # Confirm with user. if not opts.yes and not _confirm(items, False): return - art.clear(self._log, lib, decargs(args)) + art.clear(self._log, lib, args) clear_cmd.func = clear_func diff --git a/beetsplug/export.py b/beetsplug/export.py index 9b8ad3580..05ca3f24a 100644 --- a/beetsplug/export.py +++ b/beetsplug/export.py @@ -144,7 +144,7 @@ class ExportPlugin(BeetsPlugin): items = [] for data_emitter in data_collector( lib, - ui.decargs(args), + args, album=opts.album, ): try: diff --git a/beetsplug/fetchart.py b/beetsplug/fetchart.py index b442633da..e1ec5aa09 100644 --- a/beetsplug/fetchart.py +++ b/beetsplug/fetchart.py @@ -1503,9 +1503,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin): ) def func(lib: Library, opts, args) -> None: - self.batch_fetch_art( - lib, lib.albums(ui.decargs(args)), opts.force, opts.quiet - ) + self.batch_fetch_art(lib, lib.albums(args), opts.force, opts.quiet) cmd.func = func return [cmd] diff --git a/beetsplug/ftintitle.py b/beetsplug/ftintitle.py index a85aa9719..150f230aa 100644 --- a/beetsplug/ftintitle.py +++ b/beetsplug/ftintitle.py @@ -118,7 +118,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin): keep_in_artist_field = self.config["keep_in_artist"].get(bool) write = ui.should_write() - for item in lib.items(ui.decargs(args)): + for item in lib.items(args): if self.ft_in_title(item, drop_feat, keep_in_artist_field): item.store() if write: diff --git a/beetsplug/info.py b/beetsplug/info.py index d759d6066..c4d5aacbf 100644 --- a/beetsplug/info.py +++ b/beetsplug/info.py @@ -215,7 +215,7 @@ class InfoPlugin(BeetsPlugin): summary = {} for data_emitter in data_collector( lib, - ui.decargs(args), + args, album=opts.album, ): try: @@ -232,7 +232,7 @@ class InfoPlugin(BeetsPlugin): if opts.keys_only: print_data_keys(data, item) else: - fmt = ui.decargs([opts.format])[0] if opts.format else None + fmt = [opts.format][0] if opts.format else None print_data(data, item, fmt) first = False diff --git a/beetsplug/ipfs.py b/beetsplug/ipfs.py index 1c8c89aa9..3c6425c06 100644 --- a/beetsplug/ipfs.py +++ b/beetsplug/ipfs.py @@ -74,7 +74,7 @@ class IPFSPlugin(BeetsPlugin): def func(lib, opts, args): if opts.add: - for album in lib.albums(ui.decargs(args)): + for album in lib.albums(args): if len(album.items()) == 0: self._log.info( "{0} does not contain items, aborting", album @@ -84,19 +84,19 @@ class IPFSPlugin(BeetsPlugin): album.store() if opts.get: - self.ipfs_get(lib, ui.decargs(args)) + self.ipfs_get(lib, args) if opts.publish: self.ipfs_publish(lib) if opts._import: - self.ipfs_import(lib, ui.decargs(args)) + self.ipfs_import(lib, args) if opts._list: - self.ipfs_list(lib, ui.decargs(args)) + self.ipfs_list(lib, args) if opts.play: - self.ipfs_play(lib, opts, ui.decargs(args)) + self.ipfs_play(lib, opts, args) cmd.func = func return [cmd] diff --git a/beetsplug/keyfinder.py b/beetsplug/keyfinder.py index 87f0cc427..00b688d4f 100644 --- a/beetsplug/keyfinder.py +++ b/beetsplug/keyfinder.py @@ -43,7 +43,7 @@ class KeyFinderPlugin(BeetsPlugin): return [cmd] def command(self, lib, opts, args): - self.find_key(lib.items(ui.decargs(args)), write=ui.should_write()) + self.find_key(lib.items(args), write=ui.should_write()) def imported(self, session, task): self.find_key(task.imported_items()) diff --git a/beetsplug/lastgenre/__init__.py b/beetsplug/lastgenre/__init__.py index 30b44e187..b67f1fae2 100644 --- a/beetsplug/lastgenre/__init__.py +++ b/beetsplug/lastgenre/__init__.py @@ -521,7 +521,7 @@ class LastGenrePlugin(plugins.BeetsPlugin): if opts.album: # Fetch genres for whole albums - for album in lib.albums(ui.decargs(args)): + for album in lib.albums(args): album.genre, src = self._get_genre(album) self._log.info( 'genre for album "{0.album}" ({1}): {0.genre}', @@ -550,7 +550,7 @@ class LastGenrePlugin(plugins.BeetsPlugin): else: # Just query singletons, i.e. items that are not part of # an album - for item in lib.items(ui.decargs(args)): + for item in lib.items(args): item.genre, src = self._get_genre(item) item.store() self._log.info( diff --git a/beetsplug/limit.py b/beetsplug/limit.py index 0a13a78aa..2d5a30f24 100644 --- a/beetsplug/limit.py +++ b/beetsplug/limit.py @@ -25,7 +25,7 @@ from itertools import islice from beets.dbcore import FieldQuery from beets.plugins import BeetsPlugin -from beets.ui import Subcommand, decargs, print_ +from beets.ui import Subcommand, print_ def lslimit(lib, opts, args): @@ -36,7 +36,7 @@ def lslimit(lib, opts, args): if (opts.head or opts.tail or 0) < 0: raise ValueError("Limit value must be non-negative") - query = decargs(args) + query = args if opts.album: objs = lib.albums(query) else: diff --git a/beetsplug/mbsubmit.py b/beetsplug/mbsubmit.py index d215e616c..e23c0d610 100644 --- a/beetsplug/mbsubmit.py +++ b/beetsplug/mbsubmit.py @@ -86,7 +86,7 @@ class MBSubmitPlugin(BeetsPlugin): ) def func(lib, opts, args): - items = lib.items(ui.decargs(args)) + items = lib.items(args) self._mbsubmit(items) mbsubmit_cmd.func = func diff --git a/beetsplug/mbsync.py b/beetsplug/mbsync.py index 94870232c..36e8cbd47 100644 --- a/beetsplug/mbsync.py +++ b/beetsplug/mbsync.py @@ -63,7 +63,7 @@ class MBSyncPlugin(BeetsPlugin): move = ui.should_move(opts.move) pretend = opts.pretend write = ui.should_write(opts.write) - query = ui.decargs(args) + query = args self.singletons(lib, query, move, pretend, write) self.albums(lib, query, move, pretend, write) diff --git a/beetsplug/metasync/__init__.py b/beetsplug/metasync/__init__.py index 2466efe54..4c7aac1c0 100644 --- a/beetsplug/metasync/__init__.py +++ b/beetsplug/metasync/__init__.py @@ -97,7 +97,7 @@ class MetaSyncPlugin(BeetsPlugin): def func(self, lib, opts, args): """Command handler for the metasync function.""" pretend = opts.pretend - query = ui.decargs(args) + query = args sources = [] for source in opts.sources: diff --git a/beetsplug/missing.py b/beetsplug/missing.py index c4bbb83fd..8c328e647 100644 --- a/beetsplug/missing.py +++ b/beetsplug/missing.py @@ -25,7 +25,7 @@ from beets import config, plugins from beets.dbcore import types from beets.library import Album, Item, Library from beets.plugins import BeetsPlugin -from beets.ui import Subcommand, decargs, print_ +from beets.ui import Subcommand, print_ MB_ARTIST_QUERY = r"mb_albumartistid::^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$" @@ -135,7 +135,7 @@ class MissingPlugin(BeetsPlugin): albms = self.config["album"].get() helper = self._missing_albums if albms else self._missing_tracks - helper(lib, decargs(args)) + helper(lib, args) self._command.func = _miss return [self._command] diff --git a/beetsplug/parentwork.py b/beetsplug/parentwork.py index 463a455f5..ab2d39b2b 100644 --- a/beetsplug/parentwork.py +++ b/beetsplug/parentwork.py @@ -88,7 +88,7 @@ class ParentWorkPlugin(BeetsPlugin): force_parent = self.config["force"].get(bool) write = ui.should_write() - for item in lib.items(ui.decargs(args)): + for item in lib.items(args): changed = self.find_work(item, force_parent, verbose=True) if changed: item.store() diff --git a/beetsplug/play.py b/beetsplug/play.py index ddebd7d41..3e7ba0a9e 100644 --- a/beetsplug/play.py +++ b/beetsplug/play.py @@ -107,7 +107,7 @@ class PlayPlugin(BeetsPlugin): # Perform search by album and add folders rather than tracks to # playlist. if opts.album: - selection = lib.albums(ui.decargs(args)) + selection = lib.albums(args) paths = [] sort = lib.get_default_album_sort() @@ -120,7 +120,7 @@ class PlayPlugin(BeetsPlugin): # Perform item query and add tracks to playlist. else: - selection = lib.items(ui.decargs(args)) + selection = lib.items(args) paths = [item.path for item in selection] item_type = "track" diff --git a/beetsplug/random.py b/beetsplug/random.py index 05f2cdf77..55a9f40e5 100644 --- a/beetsplug/random.py +++ b/beetsplug/random.py @@ -16,13 +16,13 @@ from beets.plugins import BeetsPlugin from beets.random import random_objs -from beets.ui import Subcommand, decargs, print_ +from beets.ui import Subcommand, print_ def random_func(lib, opts, args): """Select some random items or albums and print the results.""" # Fetch all the objects matching the query into a list. - query = decargs(args) + query = args if opts.album: objs = list(lib.albums(query)) else: diff --git a/beetsplug/replaygain.py b/beetsplug/replaygain.py index 3aad8cd89..df37717b9 100644 --- a/beetsplug/replaygain.py +++ b/beetsplug/replaygain.py @@ -1530,7 +1530,7 @@ class ReplayGainPlugin(BeetsPlugin): self.open_pool(threads) if opts.album: - albums = lib.albums(ui.decargs(args)) + albums = lib.albums(args) self._log.info( "Analyzing {} albums ~ {} backend...".format( len(albums), self.backend_name @@ -1539,7 +1539,7 @@ class ReplayGainPlugin(BeetsPlugin): for album in albums: self.handle_album(album, write, force) else: - items = lib.items(ui.decargs(args)) + items = lib.items(args) self._log.info( "Analyzing {} tracks ~ {} backend...".format( len(items), self.backend_name diff --git a/beetsplug/scrub.py b/beetsplug/scrub.py index 630a4e6e6..813effb5f 100644 --- a/beetsplug/scrub.py +++ b/beetsplug/scrub.py @@ -58,7 +58,7 @@ class ScrubPlugin(BeetsPlugin): def commands(self): def scrub_func(lib, opts, args): # Walk through matching files and remove tags. - for item in lib.items(ui.decargs(args)): + for item in lib.items(args): self._log.info( "scrubbing: {0}", util.displayable_path(item.path) ) diff --git a/beetsplug/smartplaylist.py b/beetsplug/smartplaylist.py index 5ea3c6bff..e65d59649 100644 --- a/beetsplug/smartplaylist.py +++ b/beetsplug/smartplaylist.py @@ -127,7 +127,7 @@ class SmartPlaylistPlugin(BeetsPlugin): def update_cmd(self, lib, opts, args): self.build_queries() if args: - args = set(ui.decargs(args)) + args = set(args) for a in list(args): if not a.endswith(".m3u"): args.add(f"{a}.m3u") diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py index 595da4892..36790b56b 100644 --- a/beetsplug/spotify.py +++ b/beetsplug/spotify.py @@ -453,7 +453,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): def queries(lib, opts, args): success = self._parse_opts(opts) if success: - results = self._match_library_tracks(lib, ui.decargs(args)) + results = self._match_library_tracks(lib, args) self._output_match_results(results) spotify_cmd = ui.Subcommand( @@ -491,7 +491,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): ) def func(lib, opts, args): - items = lib.items(ui.decargs(args)) + items = lib.items(args) self._fetch_info(items, ui.should_write(), opts.force_refetch) sync_cmd.func = func diff --git a/beetsplug/thumbnails.py b/beetsplug/thumbnails.py index e11b75390..5460d3fec 100644 --- a/beetsplug/thumbnails.py +++ b/beetsplug/thumbnails.py @@ -28,7 +28,7 @@ from pathlib import PurePosixPath from xdg import BaseDirectory from beets.plugins import BeetsPlugin -from beets.ui import Subcommand, decargs +from beets.ui import Subcommand from beets.util import bytestring_path, displayable_path, syspath from beets.util.artresizer import ArtResizer @@ -78,7 +78,7 @@ class ThumbnailsPlugin(BeetsPlugin): def process_query(self, lib, opts, args): self.config.set_args(opts) if self._check_local_ok(): - for album in lib.albums(decargs(args)): + for album in lib.albums(args): self.process_album(album) def _check_local_ok(self): diff --git a/beetsplug/web/__init__.py b/beetsplug/web/__init__.py index c1b0b5029..f05d1903e 100644 --- a/beetsplug/web/__init__.py +++ b/beetsplug/web/__init__.py @@ -470,7 +470,7 @@ class WebPlugin(BeetsPlugin): ) def func(lib, opts, args): - args = ui.decargs(args) + args = args if args: self.config["host"] = args.pop(0) if args: diff --git a/beetsplug/zero.py b/beetsplug/zero.py index 7ee624ce7..05e55bfcd 100644 --- a/beetsplug/zero.py +++ b/beetsplug/zero.py @@ -21,7 +21,7 @@ from mediafile import MediaFile from beets.importer import Action from beets.plugins import BeetsPlugin -from beets.ui import Subcommand, decargs, input_yn +from beets.ui import Subcommand, input_yn __author__ = "baobab@heresiarch.info" @@ -75,11 +75,11 @@ class ZeroPlugin(BeetsPlugin): zero_command = Subcommand("zero", help="set fields to null") def zero_fields(lib, opts, args): - if not decargs(args) and not input_yn( + if not args and not input_yn( "Remove fields for all items? (Y/n)", True ): return - for item in lib.items(decargs(args)): + for item in lib.items(args): self.process_item(item) zero_command.func = zero_fields diff --git a/test/plugins/test_thumbnails.py b/test/plugins/test_thumbnails.py index bd3e22714..fadac34c2 100644 --- a/test/plugins/test_thumbnails.py +++ b/test/plugins/test_thumbnails.py @@ -232,8 +232,7 @@ class ThumbnailsTest(BeetsTestCase): ) @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok", Mock()) - @patch("beetsplug.thumbnails.decargs") - def test_invokations(self, mock_decargs): + def test_invokations(self): plugin = ThumbnailsPlugin() plugin.process_album = Mock() album = Mock() @@ -243,7 +242,6 @@ class ThumbnailsTest(BeetsTestCase): album2 = Mock() lib.albums.return_value = [album, album2] plugin.process_query(lib, Mock(), None) - lib.albums.assert_called_once_with(mock_decargs.return_value) plugin.process_album.assert_has_calls( [call(album), call(album2)], any_order=True ) From 4260162d4437f329e9e19aa6efa01718c36be658 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 7 Jul 2025 11:21:18 +0100 Subject: [PATCH 34/95] Remove all Python 2 references --- beets/dbcore/query.py | 33 --------------------------------- beets/importer/tasks.py | 7 +++---- beets/ui/__init__.py | 12 ++---------- beets/ui/commands.py | 16 ++++++---------- beets/util/__init__.py | 22 ++++++++-------------- beets/util/artresizer.py | 17 +++++++++-------- beetsplug/hook.py | 26 ++++++++++---------------- beetsplug/replaygain.py | 19 ++++++------------- beetsplug/web/__init__.py | 14 ++------------ 9 files changed, 46 insertions(+), 120 deletions(-) diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index 7d9f0cee7..ae8e0ddf6 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -411,39 +411,6 @@ class BooleanQuery(MatchQuery[int]): super().__init__(field_name, pattern_int, fast) -class BytesQuery(FieldQuery[bytes]): - """Match a raw bytes field (i.e., a path). This is a necessary hack - to work around the `sqlite3` module's desire to treat `bytes` and - `unicode` equivalently in Python 2. Always use this query instead of - `MatchQuery` when matching on BLOB values. - """ - - def __init__(self, field_name: str, pattern: bytes | str | memoryview): - # Use a buffer/memoryview representation of the pattern for SQLite - # matching. This instructs SQLite to treat the blob as binary - # rather than encoded Unicode. - if isinstance(pattern, (str, bytes)): - if isinstance(pattern, str): - bytes_pattern = pattern.encode("utf-8") - else: - bytes_pattern = pattern - self.buf_pattern = memoryview(bytes_pattern) - elif isinstance(pattern, memoryview): - self.buf_pattern = pattern - bytes_pattern = bytes(pattern) - else: - raise ValueError("pattern must be bytes, str, or memoryview") - - super().__init__(field_name, bytes_pattern) - - def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: - return self.field + " = ?", [self.buf_pattern] - - @classmethod - def value_match(cls, pattern: bytes, value: Any) -> bool: - return pattern == value - - class NumericQuery(FieldQuery[str]): """Matches numeric fields. A syntax using Ruby-style range ellipses (``..``) lets users specify one- or two-sided ranges. For example, diff --git a/beets/importer/tasks.py b/beets/importer/tasks.py index 75f04cf5a..441224b6b 100644 --- a/beets/importer/tasks.py +++ b/beets/importer/tasks.py @@ -26,7 +26,8 @@ from typing import TYPE_CHECKING, Callable, Iterable, Sequence import mediafile -from beets import autotag, config, dbcore, library, plugins, util +from beets import autotag, config, library, plugins, util +from beets.dbcore.query import PathQuery from .state import ImportState @@ -520,9 +521,7 @@ class ImportTask(BaseImportTask): ) replaced_album_ids = set() for item in self.imported_items(): - dup_items = list( - lib.items(query=dbcore.query.BytesQuery("path", item.path)) - ) + dup_items = list(lib.items(query=PathQuery("path", item.path))) self.replaced_items[item] = dup_items for dup_item in dup_items: if ( diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 109e39f4d..4f4236ff9 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -104,23 +104,15 @@ def _stream_encoding(stream, default="utf-8"): return stream.encoding or default -def print_(*strings, **kwargs): +def print_(*strings: str, end: str = "\n") -> None: """Like print, but rather than raising an error when a character is not in the terminal's encoding's character set, just silently replaces it. - The arguments must be Unicode strings: `unicode` on Python 2; `str` on - Python 3. - The `end` keyword argument behaves similarly to the built-in `print` (it defaults to a newline). """ - if not strings: - strings = [""] - assert isinstance(strings[0], str) - - txt = " ".join(strings) - txt += kwargs.get("end", "\n") + txt = " ".join(strings or ("",)) + end # Encode the string and write it to stdout. # On Python 3, sys.stdout expects text strings and uses the diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 7153f30be..25af95646 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -1302,7 +1302,7 @@ class TerminalImportSession(importer.ImportSession): # The import command. -def import_files(lib, paths, query): +def import_files(lib, paths: list[bytes], query): """Import the files in the given list of paths or matching the query. """ @@ -1333,7 +1333,7 @@ def import_files(lib, paths, query): plugins.send("import", lib=lib, paths=paths) -def import_func(lib, opts, args): +def import_func(lib, opts, args: list[str]): config["import"].set_args(opts) # Special case: --copy flag suppresses import_move (which would @@ -1355,15 +1355,11 @@ def import_func(lib, opts, args): if not paths and not paths_from_logfiles: raise ui.UserError("no path specified") - # On Python 2, we used to get filenames as raw bytes, which is - # what we need. On Python 3, we need to undo the "helpful" - # conversion to Unicode strings to get the real bytestring - # filename. - paths = [os.fsencode(p) for p in paths] + byte_paths = [os.fsencode(p) for p in paths] paths_from_logfiles = [os.fsencode(p) for p in paths_from_logfiles] # Check the user-specified directories. - for path in paths: + for path in byte_paths: if not os.path.exists(syspath(normpath(path))): raise ui.UserError( "no such file or directory: {}".format( @@ -1384,14 +1380,14 @@ def import_func(lib, opts, args): ) continue - paths.append(path) + byte_paths.append(path) # If all paths were read from a logfile, and none of them exist, throw # an error if not paths: raise ui.UserError("none of the paths are importable") - import_files(lib, paths, query) + import_files(lib, byte_paths, query) import_cmd = ui.Subcommand( diff --git a/beets/util/__init__.py b/beets/util/__init__.py index c1c76c860..00c9ce05d 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -28,6 +28,7 @@ import sys import tempfile import traceback from collections import Counter +from collections.abc import Sequence from contextlib import suppress from enum import Enum from functools import cache @@ -41,7 +42,6 @@ from typing import ( AnyStr, Callable, Generic, - Iterable, NamedTuple, TypeVar, Union, @@ -53,23 +53,17 @@ import beets from beets.util import hidden if TYPE_CHECKING: - from collections.abc import Iterator, Sequence + from collections.abc import Iterable, Iterator from logging import Logger from beets.library import Item -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - MAX_FILENAME_LENGTH = 200 WINDOWS_MAGIC_PREFIX = "\\\\?\\" T = TypeVar("T") -BytesOrStr = Union[str, bytes] -PathLike = Union[BytesOrStr, Path] -Replacements: TypeAlias = "Sequence[tuple[Pattern[str], str]]" +PathLike = Union[str, bytes, Path] +Replacements = Sequence[tuple[Pattern[str], str]] # Here for now to allow for a easy replace later on # once we can move to a PathLike (mainly used in importer) @@ -860,7 +854,9 @@ class CommandOutput(NamedTuple): stderr: bytes -def command_output(cmd: list[BytesOrStr], shell: bool = False) -> CommandOutput: +def command_output( + cmd: list[str] | list[bytes], shell: bool = False +) -> CommandOutput: """Runs the command and returns its output after it has exited. Returns a CommandOutput. The attributes ``stdout`` and ``stderr`` contain @@ -878,8 +874,6 @@ def command_output(cmd: list[BytesOrStr], shell: bool = False) -> CommandOutput: This replaces `subprocess.check_output` which can have problems if lots of output is sent to stderr. """ - converted_cmd = [os.fsdecode(a) for a in cmd] - devnull = subprocess.DEVNULL proc = subprocess.Popen( @@ -894,7 +888,7 @@ def command_output(cmd: list[BytesOrStr], shell: bool = False) -> CommandOutput: if proc.returncode: raise subprocess.CalledProcessError( returncode=proc.returncode, - cmd=" ".join(converted_cmd), + cmd=" ".join(map(os.fsdecode, cmd)), output=stdout + stderr, ) return CommandOutput(stdout, stderr) diff --git a/beets/util/artresizer.py b/beets/util/artresizer.py index 33b98c413..fe67c506e 100644 --- a/beets/util/artresizer.py +++ b/beets/util/artresizer.py @@ -214,9 +214,9 @@ class IMBackend(LocalBackend): else: return cls._version - convert_cmd: list[str | bytes] - identify_cmd: list[str | bytes] - compare_cmd: list[str | bytes] + convert_cmd: list[str] + identify_cmd: list[str] + compare_cmd: list[str] def __init__(self) -> None: """Initialize a wrapper around ImageMagick for local image operations. @@ -265,7 +265,7 @@ class IMBackend(LocalBackend): # with regards to the height. # ImageMagick already seems to default to no interlace, but we include # it here for the sake of explicitness. - cmd: list[str | bytes] = self.convert_cmd + [ + cmd: list[str] = self.convert_cmd + [ syspath(path_in, prefix=False), "-resize", f"{maxwidth}x>", @@ -295,7 +295,7 @@ class IMBackend(LocalBackend): return path_out def get_size(self, path_in: bytes) -> tuple[int, int] | None: - cmd: list[str | bytes] = self.identify_cmd + [ + cmd: list[str] = self.identify_cmd + [ "-format", "%w %h", syspath(path_in, prefix=False), @@ -480,10 +480,11 @@ class IMBackend(LocalBackend): return True def write_metadata(self, file: bytes, metadata: Mapping[str, str]) -> None: - assignments = list( - chain.from_iterable(("-set", k, v) for k, v in metadata.items()) + assignments = chain.from_iterable( + ("-set", k, v) for k, v in metadata.items() ) - command = self.convert_cmd + [file, *assignments, file] + str_file = os.fsdecode(file) + command = self.convert_cmd + [str_file, *assignments, str_file] util.command_output(command) diff --git a/beetsplug/hook.py b/beetsplug/hook.py index 5ce5ef828..90d66553a 100644 --- a/beetsplug/hook.py +++ b/beetsplug/hook.py @@ -14,27 +14,21 @@ """Allows custom commands to be run when an event is emitted by beets""" +from __future__ import annotations + +import os import shlex import string import subprocess -import sys +from typing import Any from beets.plugins import BeetsPlugin -class CodingFormatter(string.Formatter): - """A variant of `string.Formatter` that converts everything to `unicode` - strings. +class BytesToStrFormatter(string.Formatter): + """A variant of `string.Formatter` that converts `bytes` to `str`.""" - This was necessary on Python 2, in needs to be kept for backwards - compatibility. - """ - - def __init__(self, coding): - """Creates a new coding formatter with the provided coding.""" - self._coding = coding - - def convert_field(self, value, conversion): + def convert_field(self, value: Any, conversion: str | None) -> Any: """Converts the provided value given a conversion type. This method decodes the converted value using the formatter's coding. @@ -42,7 +36,7 @@ class CodingFormatter(string.Formatter): converted = super().convert_field(value, conversion) if isinstance(converted, bytes): - return converted.decode(self._coding) + return os.fsdecode(converted) return converted @@ -72,8 +66,8 @@ class HookPlugin(BeetsPlugin): return # For backwards compatibility, use a string formatter that decodes - # bytes (in particular, paths) to unicode strings. - formatter = CodingFormatter(sys.getfilesystemencoding()) + # bytes (in particular, paths) to strings. + formatter = BytesToStrFormatter() command_pieces = [ formatter.format(piece, event=event, **kwargs) for piece in shlex.split(command) diff --git a/beetsplug/replaygain.py b/beetsplug/replaygain.py index df37717b9..00b651d99 100644 --- a/beetsplug/replaygain.py +++ b/beetsplug/replaygain.py @@ -62,7 +62,7 @@ class FatalGstreamerPluginReplayGainError(FatalReplayGainError): loading the required plugins.""" -def call(args: list[Any], log: Logger, **kwargs: Any): +def call(args: list[str], log: Logger, **kwargs: Any): """Execute the command and return its output or raise a ReplayGainError on failure. """ @@ -73,11 +73,6 @@ def call(args: list[Any], log: Logger, **kwargs: Any): raise ReplayGainError( "{} exited with status {}".format(args[0], e.returncode) ) - except UnicodeEncodeError: - # Due to a bug in Python 2's subprocess on Windows, Unicode - # filenames can fail to encode on that platform. See: - # https://github.com/google-code-export/beets/issues/499 - raise ReplayGainError("argument encoding failed") def db_to_lufs(db: float) -> float: @@ -403,20 +398,18 @@ class FfmpegBackend(Backend): def _construct_cmd( self, item: Item, peak_method: PeakMethod | None - ) -> list[str | bytes]: + ) -> list[str]: """Construct the shell command to analyse items.""" return [ self._ffmpeg_path, "-nostats", "-hide_banner", "-i", - item.path, + str(item.filepath), "-map", "a:0", "-filter", - "ebur128=peak={}".format( - "none" if peak_method is None else peak_method.name - ), + f"ebur128=peak={'none' if peak_method is None else peak_method.name}", "-f", "null", "-", @@ -660,7 +653,7 @@ class CommandBackend(Backend): # tag-writing; this turns the mp3gain/aacgain tool into a gain # calculator rather than a tag manipulator because we take care # of changing tags ourselves. - cmd: list[bytes | str] = [self.command, "-o", "-s", "s"] + cmd: list[str] = [self.command, "-o", "-s", "s"] if self.noclip: # Adjust to avoid clipping. cmd = cmd + ["-k"] @@ -1039,7 +1032,7 @@ class AudioToolsBackend(Backend): os.fsdecode(syspath(item.path)) ) except OSError: - raise ReplayGainError(f"File {item.path} was not found") + raise ReplayGainError(f"File {item.filepath} was not found") except self._mod_audiotools.UnsupportedFile: raise ReplayGainError(f"Unsupported file type {item.format}") diff --git a/beetsplug/web/__init__.py b/beetsplug/web/__init__.py index f05d1903e..559f0622c 100644 --- a/beetsplug/web/__init__.py +++ b/beetsplug/web/__init__.py @@ -308,18 +308,8 @@ def all_items(): def item_file(item_id): item = g.lib.get_item(item_id) - # On Windows under Python 2, Flask wants a Unicode path. On Python 3, it - # *always* wants a Unicode path. - if os.name == "nt": - item_path = util.syspath(item.path) - else: - item_path = os.fsdecode(item.path) - + item_path = util.syspath(item.path) base_filename = os.path.basename(item_path) - if isinstance(base_filename, bytes): - unicode_base_filename = util.displayable_path(base_filename) - else: - unicode_base_filename = base_filename try: # Imitate http.server behaviour @@ -327,7 +317,7 @@ def item_file(item_id): except UnicodeError: safe_filename = unidecode(base_filename) else: - safe_filename = unicode_base_filename + safe_filename = base_filename response = flask.send_file( item_path, as_attachment=True, download_name=safe_filename From afe97cf31eb4ed9297c1ba37c85727221758c905 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 8 Jul 2025 11:27:46 +0100 Subject: [PATCH 35/95] Do not assign args to query --- beets/ui/__init__.py | 9 ++------- beetsplug/bareasc.py | 5 ++--- beetsplug/bpsync.py | 5 ++--- beetsplug/edit.py | 3 +-- beetsplug/limit.py | 5 ++--- beetsplug/mbsync.py | 5 ++--- beetsplug/metasync/__init__.py | 3 +-- beetsplug/random.py | 5 ++--- 8 files changed, 14 insertions(+), 26 deletions(-) diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 4f4236ff9..74dee550c 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -1293,14 +1293,9 @@ class CommonOptionsParser(optparse.OptionParser): setattr(parser.values, option.dest, True) # Use the explicitly specified format, or the string from the option. - if fmt: - value = fmt - elif value: - (value,) = [value] - else: - value = "" - + value = fmt or value or "" parser.values.format = value + if target: config[target._format_config_key].set(value) else: diff --git a/beetsplug/bareasc.py b/beetsplug/bareasc.py index ed1057b20..d2852bb1d 100644 --- a/beetsplug/bareasc.py +++ b/beetsplug/bareasc.py @@ -83,14 +83,13 @@ class BareascPlugin(BeetsPlugin): def unidecode_list(self, lib, opts, args): """Emulate normal 'list' command but with unidecode output.""" - query = args album = opts.album # Copied from commands.py - list_items if album: - for album in lib.albums(query): + for album in lib.albums(args): bare = unidecode(str(album)) print_(bare) else: - for item in lib.items(query): + for item in lib.items(args): bare = unidecode(str(item)) print_(bare) diff --git a/beetsplug/bpsync.py b/beetsplug/bpsync.py index 656f30425..ccd781b28 100644 --- a/beetsplug/bpsync.py +++ b/beetsplug/bpsync.py @@ -65,10 +65,9 @@ class BPSyncPlugin(BeetsPlugin): move = ui.should_move(opts.move) pretend = opts.pretend write = ui.should_write(opts.write) - query = args - self.singletons(lib, query, move, pretend, write) - self.albums(lib, query, move, pretend, write) + self.singletons(lib, args, move, pretend, write) + self.albums(lib, args, move, pretend, write) def singletons(self, lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for items matched by diff --git a/beetsplug/edit.py b/beetsplug/edit.py index e0c6509c8..52387c314 100644 --- a/beetsplug/edit.py +++ b/beetsplug/edit.py @@ -180,8 +180,7 @@ class EditPlugin(plugins.BeetsPlugin): def _edit_command(self, lib, opts, args): """The CLI command function for the `beet edit` command.""" # Get the objects to edit. - query = args - items, albums = _do_query(lib, query, opts.album, False) + items, albums = _do_query(lib, args, opts.album, False) objs = albums if opts.album else items if not objs: ui.print_("Nothing to edit.") diff --git a/beetsplug/limit.py b/beetsplug/limit.py index 2d5a30f24..aae99a717 100644 --- a/beetsplug/limit.py +++ b/beetsplug/limit.py @@ -36,11 +36,10 @@ def lslimit(lib, opts, args): if (opts.head or opts.tail or 0) < 0: raise ValueError("Limit value must be non-negative") - query = args if opts.album: - objs = lib.albums(query) + objs = lib.albums(args) else: - objs = lib.items(query) + objs = lib.items(args) if opts.head is not None: objs = islice(objs, opts.head) diff --git a/beetsplug/mbsync.py b/beetsplug/mbsync.py index 36e8cbd47..d38b25e9f 100644 --- a/beetsplug/mbsync.py +++ b/beetsplug/mbsync.py @@ -63,10 +63,9 @@ class MBSyncPlugin(BeetsPlugin): move = ui.should_move(opts.move) pretend = opts.pretend write = ui.should_write(opts.write) - query = args - self.singletons(lib, query, move, pretend, write) - self.albums(lib, query, move, pretend, write) + self.singletons(lib, args, move, pretend, write) + self.albums(lib, args, move, pretend, write) def singletons(self, lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for items matched by diff --git a/beetsplug/metasync/__init__.py b/beetsplug/metasync/__init__.py index 4c7aac1c0..f99e820b5 100644 --- a/beetsplug/metasync/__init__.py +++ b/beetsplug/metasync/__init__.py @@ -97,7 +97,6 @@ class MetaSyncPlugin(BeetsPlugin): def func(self, lib, opts, args): """Command handler for the metasync function.""" pretend = opts.pretend - query = args sources = [] for source in opts.sources: @@ -106,7 +105,7 @@ class MetaSyncPlugin(BeetsPlugin): sources = sources or self.config["source"].as_str_seq() meta_source_instances = {} - items = lib.items(query) + items = lib.items(args) # Avoid needlessly instantiating meta sources (can be expensive) if not items: diff --git a/beetsplug/random.py b/beetsplug/random.py index 55a9f40e5..c791af414 100644 --- a/beetsplug/random.py +++ b/beetsplug/random.py @@ -22,11 +22,10 @@ from beets.ui import Subcommand, print_ def random_func(lib, opts, args): """Select some random items or albums and print the results.""" # Fetch all the objects matching the query into a list. - query = args if opts.album: - objs = list(lib.albums(query)) + objs = list(lib.albums(args)) else: - objs = list(lib.items(query)) + objs = list(lib.items(args)) # Print a random subset. objs = random_objs( From 605cea9bdc2b030f1ee00d0854d2a37c9c722759 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 16 Sep 2024 16:17:42 +0100 Subject: [PATCH 36/95] Rip away io stuff from TestHelper to IOMixin --- beets/test/helper.py | 20 +++++++++++++++----- test/plugins/test_embedart.py | 13 +++++++------ test/test_importer.py | 5 ++--- test/test_ui.py | 26 +++++++------------------- test/test_ui_commands.py | 13 ++----------- test/test_ui_init.py | 8 ++------ 6 files changed, 35 insertions(+), 50 deletions(-) diff --git a/beets/test/helper.py b/beets/test/helper.py index b86db5b23..7ce3d06d7 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -163,6 +163,20 @@ NEEDS_REFLINK = unittest.skipUnless( ) +class IOMixin: + @cached_property + def io(self) -> _common.DummyIO: + return _common.DummyIO() + + def setUp(self): + super().setUp() + self.io.install() + + def tearDown(self): + super().tearDown() + self.io.restore() + + class TestHelper(_common.Assertions, ConfigMixin): """Helper mixin for high-level cli and plugin tests. @@ -215,12 +229,8 @@ class TestHelper(_common.Assertions, ConfigMixin): dbpath = ":memory:" self.lib = Library(dbpath, self.libdir) - # Initialize, but don't install, a DummyIO. - self.io = _common.DummyIO() - def teardown_beets(self): self.env_patcher.stop() - self.io.restore() self.lib._close() self.remove_temp_dir() @@ -759,7 +769,7 @@ class TerminalImportSessionFixture(TerminalImportSession): self._add_choice_input() -class TerminalImportMixin(ImportHelper): +class TerminalImportMixin(IOMixin, ImportHelper): """Provides_a terminal importer for the import session.""" io: _common.DummyIO diff --git a/test/plugins/test_embedart.py b/test/plugins/test_embedart.py index f2f02137b..2cada1d5b 100644 --- a/test/plugins/test_embedart.py +++ b/test/plugins/test_embedart.py @@ -24,7 +24,12 @@ from mediafile import MediaFile from beets import art, config, logging, ui from beets.test import _common -from beets.test.helper import BeetsTestCase, FetchImageHelper, PluginMixin +from beets.test.helper import ( + BeetsTestCase, + FetchImageHelper, + IOMixin, + PluginMixin, +) from beets.util import bytestring_path, displayable_path, syspath from beets.util.artresizer import ArtResizer from test.test_art_resize import DummyIMBackend @@ -68,17 +73,13 @@ def require_artresizer_compare(test): return wrapper -class EmbedartCliTest(PluginMixin, FetchImageHelper, BeetsTestCase): +class EmbedartCliTest(IOMixin, PluginMixin, FetchImageHelper, BeetsTestCase): plugin = "embedart" small_artpath = os.path.join(_common.RSRC, b"image-2x3.jpg") abbey_artpath = os.path.join(_common.RSRC, b"abbey.jpg") abbey_similarpath = os.path.join(_common.RSRC, b"abbey-similar.jpg") abbey_differentpath = os.path.join(_common.RSRC, b"abbey-different.jpg") - def setUp(self): - super().setUp() # Converter is threaded - self.io.install() - def _setup_data(self, artpath=None): if not artpath: artpath = self.small_artpath diff --git a/test/test_importer.py b/test/test_importer.py index 9bb0e8a63..fc4141c5b 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -43,6 +43,7 @@ from beets.test.helper import ( AutotagStub, BeetsTestCase, ImportTestCase, + IOMixin, PluginMixin, capture_log, has_program, @@ -1588,13 +1589,11 @@ class ReimportTest(AutotagImportTestCase): assert self._album().data_source == "match_source" -class ImportPretendTest(AutotagImportTestCase): +class ImportPretendTest(IOMixin, AutotagImportTestCase): """Test the pretend commandline option""" def setUp(self): super().setUp() - self.io.install() - self.album_track_path = self.prepare_album_for_import(1)[0] self.single_path = self.prepare_track_for_import(2, self.import_path) self.album_path = self.album_track_path.parent diff --git a/test/test_ui.py b/test/test_ui.py index 8bb0218d5..519962b41 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -32,6 +32,7 @@ from beets.autotag.match import distance from beets.test import _common from beets.test.helper import ( BeetsTestCase, + IOMixin, PluginTestCase, capture_stdout, control_stdin, @@ -107,12 +108,10 @@ class ListTest(BeetsTestCase): assert "the album" not in stdout.getvalue() -class RemoveTest(BeetsTestCase): +class RemoveTest(IOMixin, BeetsTestCase): def setUp(self): super().setUp() - self.io.install() - # Copy a file into the library. self.item_path = os.path.join(_common.RSRC, b"full.mp3") self.i = library.Item.from_path(self.item_path) @@ -444,8 +443,6 @@ class MoveTest(BeetsTestCase): def setUp(self): super().setUp() - self.io.install() - self.itempath = os.path.join(self.libdir, b"srcfile") shutil.copy( syspath(os.path.join(_common.RSRC, b"full.mp3")), @@ -544,12 +541,10 @@ class MoveTest(BeetsTestCase): self.assertNotExists(self.otherdir) -class UpdateTest(BeetsTestCase): +class UpdateTest(IOMixin, BeetsTestCase): def setUp(self): super().setUp() - self.io.install() - # Copy a file into the library. item_path = os.path.join(_common.RSRC, b"full.mp3") item_path_two = os.path.join(_common.RSRC, b"full.flac") @@ -742,11 +737,7 @@ class UpdateTest(BeetsTestCase): assert item.lyrics != "new lyrics" -class PrintTest(BeetsTestCase): - def setUp(self): - super().setUp() - self.io.install() - +class PrintTest(IOMixin, BeetsTestCase): def test_print_without_locale(self): lang = os.environ.get("LANG") if lang: @@ -1120,10 +1111,9 @@ class ConfigTest(TestPluginTestCase): ) -class ShowModelChangeTest(BeetsTestCase): +class ShowModelChangeTest(IOMixin, BeetsTestCase): def setUp(self): super().setUp() - self.io.install() self.a = _common.item() self.b = _common.item() self.a.path = self.b.path @@ -1172,10 +1162,9 @@ class ShowModelChangeTest(BeetsTestCase): assert "bar" in out -class ShowChangeTest(BeetsTestCase): +class ShowChangeTest(IOMixin, BeetsTestCase): def setUp(self): super().setUp() - self.io.install() self.items = [_common.item()] self.items[0].track = 1 @@ -1397,7 +1386,7 @@ class PluginTest(TestPluginTestCase): os.environ.get("GITHUB_ACTIONS") == "true" and sys.platform == "linux", reason="Completion is for some reason unhappy on Ubuntu 24.04 in CI", ) -class CompletionTest(TestPluginTestCase): +class CompletionTest(IOMixin, TestPluginTestCase): def test_completion(self): # Do not load any other bash completion scripts on the system. env = dict(os.environ) @@ -1427,7 +1416,6 @@ class CompletionTest(TestPluginTestCase): self.skipTest("could not read bash-completion script") # Load completion script. - self.io.install() self.run_command("completion", lib=None) completion_script = self.io.getoutput().encode("utf-8") self.io.restore() diff --git a/test/test_ui_commands.py b/test/test_ui_commands.py index 897cba8a1..412ddc2b7 100644 --- a/test/test_ui_commands.py +++ b/test/test_ui_commands.py @@ -21,7 +21,7 @@ import pytest from beets import library, ui from beets.test import _common -from beets.test.helper import BeetsTestCase, ItemInDBTestCase +from beets.test.helper import BeetsTestCase, IOMixin, ItemInDBTestCase from beets.ui import commands from beets.util import syspath @@ -75,16 +75,7 @@ class QueryTest(BeetsTestCase): self.check_do_query(0, 2, album=True, also_items=False) -class FieldsTest(ItemInDBTestCase): - def setUp(self): - super().setUp() - - self.io.install() - - def tearDown(self): - super().tearDown() - self.io.restore() - +class FieldsTest(IOMixin, ItemInDBTestCase): def remove_keys(self, keys, text): for i in text: try: diff --git a/test/test_ui_init.py b/test/test_ui_init.py index df21b300c..0f42d7258 100644 --- a/test/test_ui_init.py +++ b/test/test_ui_init.py @@ -21,14 +21,10 @@ from random import random from beets import config, ui from beets.test import _common -from beets.test.helper import BeetsTestCase, control_stdin +from beets.test.helper import BeetsTestCase, IOMixin, control_stdin -class InputMethodsTest(BeetsTestCase): - def setUp(self): - super().setUp() - self.io.install() - +class InputMethodsTest(IOMixin, BeetsTestCase): def _print_helper(self, s): print(s) From 9e4b11745490e26b98c830f66d832770a7faffa1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 26 May 2025 11:23:10 +0100 Subject: [PATCH 37/95] Speed up tests that only need IOMixin --- test/test_ui.py | 6 +++--- test/test_ui_init.py | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/test/test_ui.py b/test/test_ui.py index 519962b41..8c93a83ea 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -737,7 +737,7 @@ class UpdateTest(IOMixin, BeetsTestCase): assert item.lyrics != "new lyrics" -class PrintTest(IOMixin, BeetsTestCase): +class PrintTest(IOMixin, unittest.TestCase): def test_print_without_locale(self): lang = os.environ.get("LANG") if lang: @@ -1111,7 +1111,7 @@ class ConfigTest(TestPluginTestCase): ) -class ShowModelChangeTest(IOMixin, BeetsTestCase): +class ShowModelChangeTest(IOMixin, unittest.TestCase): def setUp(self): super().setUp() self.a = _common.item() @@ -1162,7 +1162,7 @@ class ShowModelChangeTest(IOMixin, BeetsTestCase): assert "bar" in out -class ShowChangeTest(IOMixin, BeetsTestCase): +class ShowChangeTest(IOMixin, unittest.TestCase): def setUp(self): super().setUp() diff --git a/test/test_ui_init.py b/test/test_ui_init.py index 0f42d7258..f6c9fe245 100644 --- a/test/test_ui_init.py +++ b/test/test_ui_init.py @@ -16,6 +16,7 @@ import os import shutil +import unittest from copy import deepcopy from random import random @@ -24,7 +25,7 @@ from beets.test import _common from beets.test.helper import BeetsTestCase, IOMixin, control_stdin -class InputMethodsTest(IOMixin, BeetsTestCase): +class InputMethodsTest(IOMixin, unittest.TestCase): def _print_helper(self, s): print(s) From edd3df99ba69a0aeb5037506fe0ab8920514c8ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sat, 31 May 2025 23:19:59 +0100 Subject: [PATCH 38/95] Introduce TestHelper.temp_dir_path --- beets/test/helper.py | 23 +++++++++++++---------- test/plugins/test_hook.py | 5 ++--- test/plugins/test_importfeeds.py | 16 ++++++---------- test/plugins/test_player.py | 4 ++-- test/plugins/test_playlist.py | 10 ++++------ test/test_importer.py | 5 ++--- test/test_library.py | 2 +- 7 files changed, 30 insertions(+), 35 deletions(-) diff --git a/beets/test/helper.py b/beets/test/helper.py index 7ce3d06d7..b6bd23f5c 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -186,6 +186,14 @@ class TestHelper(_common.Assertions, ConfigMixin): db_on_disk: ClassVar[bool] = False + @cached_property + def temp_dir_path(self) -> Path: + return Path(self.create_temp_dir()) + + @cached_property + def temp_dir(self) -> bytes: + return util.bytestring_path(self.temp_dir_path) + # TODO automate teardown through hook registration def setup_beets(self): @@ -208,8 +216,7 @@ class TestHelper(_common.Assertions, ConfigMixin): Make sure you call ``teardown_beets()`` afterwards. """ - self.create_temp_dir() - temp_dir_str = os.fsdecode(self.temp_dir) + temp_dir_str = str(self.temp_dir_path) self.env_patcher = patch.dict( "os.environ", { @@ -394,16 +401,12 @@ class TestHelper(_common.Assertions, ConfigMixin): # Safe file operations - def create_temp_dir(self, **kwargs): - """Create a temporary directory and assign it into - `self.temp_dir`. Call `remove_temp_dir` later to delete it. - """ - temp_dir = mkdtemp(**kwargs) - self.temp_dir = util.bytestring_path(temp_dir) + def create_temp_dir(self, **kwargs) -> str: + return mkdtemp(**kwargs) def remove_temp_dir(self): """Delete the temporary directory created by `create_temp_dir`.""" - shutil.rmtree(syspath(self.temp_dir)) + shutil.rmtree(self.temp_dir_path) def touch(self, path, dir=None, content=""): """Create a file at `path` with given content. @@ -541,7 +544,7 @@ class ImportHelper(TestHelper): @cached_property def import_path(self) -> Path: - import_path = Path(os.fsdecode(self.temp_dir)) / "import" + import_path = self.temp_dir_path / "import" import_path.mkdir(exist_ok=True) return import_path diff --git a/test/plugins/test_hook.py b/test/plugins/test_hook.py index 993b95911..d15de1cec 100644 --- a/test/plugins/test_hook.py +++ b/test/plugins/test_hook.py @@ -15,7 +15,7 @@ from __future__ import annotations -import os.path +import os import sys import unittest from contextlib import contextmanager @@ -74,8 +74,7 @@ class HookCommandTest(HookTestCase): def setUp(self): super().setUp() - temp_dir = os.fsdecode(self.temp_dir) - self.paths = [os.path.join(temp_dir, e) for e in self.events] + self.paths = [str(self.temp_dir_path / e) for e in self.events] def _test_command( self, diff --git a/test/plugins/test_importfeeds.py b/test/plugins/test_importfeeds.py index 5f1f915ad..d525bd801 100644 --- a/test/plugins/test_importfeeds.py +++ b/test/plugins/test_importfeeds.py @@ -12,8 +12,8 @@ class ImportfeedsTestTest(BeetsTestCase): def setUp(self): super().setUp() self.importfeeds = ImportFeedsPlugin() - self.feeds_dir = os.path.join(os.fsdecode(self.temp_dir), "importfeeds") - config["importfeeds"]["dir"] = self.feeds_dir + self.feeds_dir = self.temp_dir_path / "importfeeds" + config["importfeeds"]["dir"] = str(self.feeds_dir) def test_multi_format_album_playlist(self): config["importfeeds"]["formats"] = "m3u_multi" @@ -24,10 +24,8 @@ class ImportfeedsTestTest(BeetsTestCase): self.lib.add(item) self.importfeeds.album_imported(self.lib, album) - playlist_path = os.path.join( - self.feeds_dir, os.listdir(self.feeds_dir)[0] - ) - assert playlist_path.endswith("album_name.m3u") + playlist_path = self.feeds_dir / next(self.feeds_dir.iterdir()) + assert str(playlist_path).endswith("album_name.m3u") with open(playlist_path) as playlist: assert item_path in playlist.read() @@ -43,9 +41,7 @@ class ImportfeedsTestTest(BeetsTestCase): self.lib.add(item) self.importfeeds.album_imported(self.lib, album) - playlist = os.path.join( - self.feeds_dir, config["importfeeds"]["m3u_name"].get() - ) + playlist = self.feeds_dir / config["importfeeds"]["m3u_name"].get() playlist_subdir = os.path.dirname(playlist) assert os.path.isdir(playlist_subdir) assert os.path.isfile(playlist) @@ -62,7 +58,7 @@ class ImportfeedsTestTest(BeetsTestCase): self.importfeeds.import_begin(self) self.importfeeds.album_imported(self.lib, album) date = datetime.datetime.now().strftime("%Y%m%d_%Hh%M") - playlist = os.path.join(self.feeds_dir, f"imports_{date}.m3u") + playlist = self.feeds_dir / f"imports_{date}.m3u" assert os.path.isfile(playlist) with open(playlist) as playlist_contents: assert item_path in playlist_contents.read() diff --git a/test/plugins/test_player.py b/test/plugins/test_player.py index b17a78c17..a7c613d8f 100644 --- a/test/plugins/test_player.py +++ b/test/plugins/test_player.py @@ -311,7 +311,7 @@ class BPDTestHelper(PluginTestCase): """ # Create a config file: config = { - "pluginpath": [os.fsdecode(self.temp_dir)], + "pluginpath": [str(self.temp_dir_path)], "plugins": "bpd", # use port 0 to let the OS choose a free port "bpd": {"host": host, "port": 0, "control_port": 0}, @@ -320,7 +320,7 @@ class BPDTestHelper(PluginTestCase): config["bpd"]["password"] = password config_file = tempfile.NamedTemporaryFile( mode="wb", - dir=os.fsdecode(self.temp_dir), + dir=str(self.temp_dir_path), suffix=".yaml", delete=False, ) diff --git a/test/plugins/test_playlist.py b/test/plugins/test_playlist.py index ee4059b70..9d9ce0303 100644 --- a/test/plugins/test_playlist.py +++ b/test/plugins/test_playlist.py @@ -72,12 +72,10 @@ class PlaylistTestCase(PluginTestCase): self.lib.add(i3) self.lib.add_album([i3]) - self.playlist_dir = os.path.join( - os.fsdecode(self.temp_dir), "playlists" - ) - os.makedirs(self.playlist_dir) + self.playlist_dir = self.temp_dir_path / "playlists" + self.playlist_dir.mkdir(parents=True, exist_ok=True) self.config["directory"] = self.music_dir - self.config["playlist"]["playlist_dir"] = self.playlist_dir + self.config["playlist"]["playlist_dir"] = str(self.playlist_dir) self.setup_test() self.load_plugins() @@ -222,7 +220,7 @@ class PlaylistTestRelativeToPls(PlaylistQueryTest, PlaylistTestCase): ) self.config["playlist"]["relative_to"] = "playlist" - self.config["playlist"]["playlist_dir"] = self.playlist_dir + self.config["playlist"]["playlist_dir"] = str(self.playlist_dir) class PlaylistUpdateTest: diff --git a/test/test_importer.py b/test/test_importer.py index fc4141c5b..3e362a179 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -23,7 +23,6 @@ import sys import unicodedata import unittest from io import StringIO -from pathlib import Path from tarfile import TarFile from tempfile import mkstemp from unittest.mock import Mock, patch @@ -194,7 +193,7 @@ class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): def create_archive(session): - (handle, path) = mkstemp(dir=os.fsdecode(session.temp_dir)) + handle, path = mkstemp(dir=session.temp_dir_path) path = bytestring_path(path) os.close(handle) archive = ZipFile(os.fsdecode(path), mode="w") @@ -1623,7 +1622,7 @@ class ImportPretendTest(IOMixin, AutotagImportTestCase): ] def test_import_pretend_empty(self): - empty_path = Path(os.fsdecode(self.temp_dir)) / "empty" + empty_path = self.temp_dir_path / "empty" empty_path.mkdir() importer = self.setup_importer(pretend=True, import_dir=empty_path) diff --git a/test/test_library.py b/test/test_library.py index 2d232c88f..35791bad7 100644 --- a/test/test_library.py +++ b/test/test_library.py @@ -194,7 +194,7 @@ class DestinationTest(BeetsTestCase): def create_temp_dir(self, **kwargs): kwargs["prefix"] = "." - super().create_temp_dir(**kwargs) + return super().create_temp_dir(**kwargs) def setUp(self): super().setUp() From d017270196dc8e0e2a4051afa5d05213946cbbbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 27 May 2025 13:03:30 +0100 Subject: [PATCH 39/95] Use pathlib.Path in test_smartplaylist.py --- test/plugins/test_smartplaylist.py | 61 ++++++++++++++---------------- 1 file changed, 28 insertions(+), 33 deletions(-) diff --git a/test/plugins/test_smartplaylist.py b/test/plugins/test_smartplaylist.py index ade745c17..c8e516e8b 100644 --- a/test/plugins/test_smartplaylist.py +++ b/test/plugins/test_smartplaylist.py @@ -13,7 +13,8 @@ # included in all copies or substantial portions of the Software. -from os import fsdecode, path, remove +from os import path, remove +from pathlib import Path from shutil import rmtree from tempfile import mkdtemp from unittest.mock import MagicMock, Mock, PropertyMock @@ -26,7 +27,7 @@ from beets.dbcore.query import FixedFieldSort, MultipleSort, NullSort from beets.library import Album, Item, parse_query_string from beets.test.helper import BeetsTestCase, PluginTestCase from beets.ui import UserError -from beets.util import CHAR_REPLACE, bytestring_path, syspath +from beets.util import CHAR_REPLACE, syspath from beetsplug.smartplaylist import SmartPlaylistPlugin @@ -165,9 +166,9 @@ class SmartPlaylistTest(BeetsTestCase): pl = b"$title-my.m3u", (q, None), (a_q, None) spl._matched_playlists = [pl] - dir = bytestring_path(mkdtemp()) + dir = mkdtemp() config["smartplaylist"]["relative_to"] = False - config["smartplaylist"]["playlist_dir"] = fsdecode(dir) + config["smartplaylist"]["playlist_dir"] = str(dir) try: spl.update_playlists(lib) except Exception: @@ -177,10 +178,9 @@ class SmartPlaylistTest(BeetsTestCase): lib.items.assert_called_once_with(q, None) lib.albums.assert_called_once_with(a_q, None) - m3u_filepath = path.join(dir, b"ta_ga_da-my_playlist_.m3u") - self.assertExists(m3u_filepath) - with open(syspath(m3u_filepath), "rb") as f: - content = f.read() + m3u_filepath = Path(dir, "ta_ga_da-my_playlist_.m3u") + assert m3u_filepath.exists() + content = m3u_filepath.read_bytes() rmtree(syspath(dir)) assert content == b"/tagada.mp3\n" @@ -208,11 +208,11 @@ class SmartPlaylistTest(BeetsTestCase): pl = b"$title-my.m3u", (q, None), (a_q, None) spl._matched_playlists = [pl] - dir = bytestring_path(mkdtemp()) + dir = mkdtemp() config["smartplaylist"]["output"] = "extm3u" config["smartplaylist"]["prefix"] = "http://beets:8337/files" config["smartplaylist"]["relative_to"] = False - config["smartplaylist"]["playlist_dir"] = fsdecode(dir) + config["smartplaylist"]["playlist_dir"] = str(dir) try: spl.update_playlists(lib) except Exception: @@ -222,10 +222,9 @@ class SmartPlaylistTest(BeetsTestCase): lib.items.assert_called_once_with(q, None) lib.albums.assert_called_once_with(a_q, None) - m3u_filepath = path.join(dir, b"ta_ga_da-my_playlist_.m3u") - self.assertExists(m3u_filepath) - with open(syspath(m3u_filepath), "rb") as f: - content = f.read() + m3u_filepath = Path(dir, "ta_ga_da-my_playlist_.m3u") + assert m3u_filepath.exists() + content = m3u_filepath.read_bytes() rmtree(syspath(dir)) assert ( @@ -260,10 +259,10 @@ class SmartPlaylistTest(BeetsTestCase): pl = b"$title-my.m3u", (q, None), (a_q, None) spl._matched_playlists = [pl] - dir = bytestring_path(mkdtemp()) + dir = mkdtemp() config["smartplaylist"]["output"] = "extm3u" config["smartplaylist"]["relative_to"] = False - config["smartplaylist"]["playlist_dir"] = fsdecode(dir) + config["smartplaylist"]["playlist_dir"] = str(dir) config["smartplaylist"]["fields"] = ["id", "genre"] try: spl.update_playlists(lib) @@ -274,10 +273,9 @@ class SmartPlaylistTest(BeetsTestCase): lib.items.assert_called_once_with(q, None) lib.albums.assert_called_once_with(a_q, None) - m3u_filepath = path.join(dir, b"ta_ga_da-my_playlist_.m3u") - self.assertExists(m3u_filepath) - with open(syspath(m3u_filepath), "rb") as f: - content = f.read() + m3u_filepath = Path(dir, "ta_ga_da-my_playlist_.m3u") + assert m3u_filepath.exists() + content = m3u_filepath.read_bytes() rmtree(syspath(dir)) assert ( @@ -307,10 +305,10 @@ class SmartPlaylistTest(BeetsTestCase): pl = b"$title-my.m3u", (q, None), (a_q, None) spl._matched_playlists = [pl] - dir = bytestring_path(mkdtemp()) + dir = mkdtemp() tpl = "http://beets:8337/item/$id/file" config["smartplaylist"]["uri_format"] = tpl - config["smartplaylist"]["playlist_dir"] = fsdecode(dir) + config["smartplaylist"]["playlist_dir"] = dir # The following options should be ignored when uri_format is set config["smartplaylist"]["relative_to"] = "/data" config["smartplaylist"]["prefix"] = "/prefix" @@ -324,10 +322,9 @@ class SmartPlaylistTest(BeetsTestCase): lib.items.assert_called_once_with(q, None) lib.albums.assert_called_once_with(a_q, None) - m3u_filepath = path.join(dir, b"ta_ga_da-my_playlist_.m3u") - self.assertExists(m3u_filepath) - with open(syspath(m3u_filepath), "rb") as f: - content = f.read() + m3u_filepath = Path(dir, "ta_ga_da-my_playlist_.m3u") + assert m3u_filepath.exists() + content = m3u_filepath.read_bytes() rmtree(syspath(dir)) assert content == b"http://beets:8337/item/3/file\n" @@ -346,22 +343,20 @@ class SmartPlaylistCLITest(PluginTestCase): {"name": "all.m3u", "query": ""}, ] ) - config["smartplaylist"]["playlist_dir"].set(fsdecode(self.temp_dir)) + config["smartplaylist"]["playlist_dir"].set(str(self.temp_dir_path)) def test_splupdate(self): with pytest.raises(UserError): self.run_with_output("splupdate", "tagada") self.run_with_output("splupdate", "my_playlist") - m3u_path = path.join(self.temp_dir, b"my_playlist.m3u") - self.assertExists(m3u_path) - with open(syspath(m3u_path), "rb") as f: - assert f.read() == self.item.path + b"\n" + m3u_path = self.temp_dir_path / "my_playlist.m3u" + assert m3u_path.exists() + assert m3u_path.read_bytes() == self.item.path + b"\n" remove(syspath(m3u_path)) self.run_with_output("splupdate", "my_playlist.m3u") - with open(syspath(m3u_path), "rb") as f: - assert f.read() == self.item.path + b"\n" + assert m3u_path.read_bytes() == self.item.path + b"\n" remove(syspath(m3u_path)) self.run_with_output("splupdate") From e40c7fd71cfc9a32d6d926a253aa20eee0ca7c9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 1 Jun 2025 08:17:24 +0100 Subject: [PATCH 40/95] Introduce Album.art_filepath to simplify existence checks --- beets/library/models.py | 17 ++++++++++---- test/plugins/test_art.py | 11 ++++----- test/plugins/test_embedart.py | 6 ++--- test/test_files.py | 44 ++++++++++++++++++----------------- test/test_importer.py | 7 +++--- test/test_ui.py | 6 ++--- 6 files changed, 49 insertions(+), 42 deletions(-) diff --git a/beets/library/models.py b/beets/library/models.py index efa0f9694..68c80b934 100644 --- a/beets/library/models.py +++ b/beets/library/models.py @@ -45,6 +45,11 @@ class LibModel(dbcore.Model["Library"]): def writable_media_fields(cls) -> set[str]: return set(MediaFile.fields()) & cls._fields.keys() + @property + def filepath(self) -> Path: + """The path to the entity as pathlib.Path.""" + return Path(os.fsdecode(self.path)) + def _template_funcs(self): funcs = DefaultTemplateFunctions(self, self._db).functions() funcs.update(plugins.template_funcs()) @@ -207,6 +212,8 @@ class Album(LibModel): Reflects the library's "albums" table, including album art. """ + artpath: bytes + _table = "albums" _flex_table = "album_attributes" _always_dirty = True @@ -331,6 +338,11 @@ class Album(LibModel): f"ON {cls._table}.id = {cls._relation._table}.album_id" ) + @property + def art_filepath(self) -> Path | None: + """The path to album's cover picture as pathlib.Path.""" + return Path(os.fsdecode(self.artpath)) if self.artpath else None + @classmethod def _getters(cls): # In addition to plugin-provided computed fields, also expose @@ -748,11 +760,6 @@ class Item(LibModel): f"ON {cls._table}.album_id = {cls._relation._table}.id" ) - @property - def filepath(self) -> Path: - """The path to the item's file as pathlib.Path.""" - return Path(os.fsdecode(self.path)) - @property def _cached_album(self): """The Album object that this item belongs to, if any, or diff --git a/test/plugins/test_art.py b/test/plugins/test_art.py index 6577b54fc..9f5817108 100644 --- a/test/plugins/test_art.py +++ b/test/plugins/test_art.py @@ -19,6 +19,7 @@ from __future__ import annotations import os import shutil import unittest +from pathlib import Path from typing import TYPE_CHECKING from unittest.mock import patch @@ -804,12 +805,10 @@ class ArtImporterTest(UseThePlugin): self.plugin.fetch_art(self.session, self.task) self.plugin.assign_art(self.session, self.task) - artpath = self.lib.albums()[0].artpath + artpath = self.lib.albums()[0].art_filepath if should_exist: - assert artpath == os.path.join( - os.path.dirname(self.i.path), b"cover.jpg" - ) - self.assertExists(artpath) + assert artpath == self.i.filepath.parent / "cover.jpg" + assert artpath.exists() else: assert artpath is None return artpath @@ -861,7 +860,7 @@ class ArtImporterTest(UseThePlugin): self.plugin.batch_fetch_art( self.lib, self.lib.albums(), force=False, quiet=False ) - self.assertExists(self.album.artpath) + assert self.album.art_filepath.exists() class ArtForAlbumTest(UseThePlugin): diff --git a/test/plugins/test_embedart.py b/test/plugins/test_embedart.py index 2cada1d5b..1b8528cb7 100644 --- a/test/plugins/test_embedart.py +++ b/test/plugins/test_embedart.py @@ -203,23 +203,21 @@ class EmbedartCliTest(IOMixin, PluginMixin, FetchImageHelper, BeetsTestCase): resource_path = os.path.join(_common.RSRC, b"image.mp3") album = self.add_album_fixture() trackpath = album.items()[0].path - albumpath = album.path shutil.copy(syspath(resource_path), syspath(trackpath)) self.run_command("extractart", "-n", "extracted") - self.assertExists(os.path.join(albumpath, b"extracted.png")) + self.assertExists(album.filepath / "extracted.png") def test_extracted_extension(self): resource_path = os.path.join(_common.RSRC, b"image-jpeg.mp3") album = self.add_album_fixture() trackpath = album.items()[0].path - albumpath = album.path shutil.copy(syspath(resource_path), syspath(trackpath)) self.run_command("extractart", "-n", "extracted") - self.assertExists(os.path.join(albumpath, b"extracted.jpg")) + self.assertExists(album.filepath / "extracted.jpg") def test_clear_art_with_yes_input(self): self._setup_data() diff --git a/test/test_files.py b/test/test_files.py index 8be94f328..55865f4d4 100644 --- a/test/test_files.py +++ b/test/test_files.py @@ -19,6 +19,7 @@ import shutil import stat import unittest from os.path import join +from pathlib import Path import pytest @@ -314,9 +315,10 @@ class ArtFileTest(BeetsTestCase): # Make an album. self.ai = self.lib.add_album((self.i,)) # Make an art file too. - self.art = self.lib.get_album(self.i).art_destination("something.jpg") - touch(self.art) - self.ai.artpath = self.art + art_bytes = self.lib.get_album(self.i).art_destination("something.jpg") + self.art = Path(os.fsdecode(art_bytes)) + self.art.touch() + self.ai.artpath = art_bytes self.ai.store() # Alternate destination dir. self.otherdir = os.path.join(self.temp_dir, b"testotherdir") @@ -345,10 +347,10 @@ class ArtFileTest(BeetsTestCase): self.i.load() # Art should be in new directory. - self.assertNotExists(self.art) - newart = self.lib.get_album(self.i).artpath - self.assertExists(newart) - assert b"testotherdir" in newart + assert not self.art.exists() + newart = self.lib.get_album(self.i).art_filepath + assert newart.exists() + assert "testotherdir" in str(newart) def test_setart_copies_image(self): util.remove(self.art) @@ -363,7 +365,7 @@ class ArtFileTest(BeetsTestCase): assert ai.artpath is None ai.set_art(newart) - self.assertExists(ai.artpath) + assert ai.art_filepath.exists() def test_setart_to_existing_art_works(self): util.remove(self.art) @@ -380,7 +382,7 @@ class ArtFileTest(BeetsTestCase): # Set the art again. ai.set_art(ai.artpath) - self.assertExists(ai.artpath) + assert ai.art_filepath.exists() def test_setart_to_existing_but_unset_art_works(self): newart = os.path.join(self.libdir, b"newart.jpg") @@ -397,7 +399,7 @@ class ArtFileTest(BeetsTestCase): # Set the art again. ai.set_art(artdest) - self.assertExists(ai.artpath) + assert ai.art_filepath.exists() def test_setart_to_conflicting_file_gets_new_path(self): newart = os.path.join(self.libdir, b"newart.jpg") @@ -442,34 +444,34 @@ class ArtFileTest(BeetsTestCase): os.chmod(syspath(ai.artpath), 0o777) def test_move_last_file_moves_albumart(self): - oldartpath = self.lib.albums()[0].artpath - self.assertExists(oldartpath) + oldartpath = self.lib.albums()[0].art_filepath + assert oldartpath.exists() self.ai.album = "different_album" self.ai.store() self.ai.items()[0].move() - artpath = self.lib.albums()[0].artpath - assert b"different_album" in artpath - self.assertExists(artpath) - self.assertNotExists(oldartpath) + artpath = self.lib.albums()[0].art_filepath + assert "different_album" in str(artpath) + assert artpath.exists() + assert not oldartpath.exists() def test_move_not_last_file_does_not_move_albumart(self): i2 = item() i2.albumid = self.ai.id self.lib.add(i2) - oldartpath = self.lib.albums()[0].artpath - self.assertExists(oldartpath) + oldartpath = self.lib.albums()[0].art_filepath + assert oldartpath.exists() self.i.album = "different_album" self.i.album_id = None # detach from album self.i.move() - artpath = self.lib.albums()[0].artpath - assert b"different_album" not in artpath + artpath = self.lib.albums()[0].art_filepath + assert "different_album" not in str(artpath) assert artpath == oldartpath - self.assertExists(oldartpath) + assert oldartpath.exists() class RemoveTest(BeetsTestCase): diff --git a/test/test_importer.py b/test/test_importer.py index 3e362a179..c23b56d7a 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -23,6 +23,7 @@ import sys import unicodedata import unittest from io import StringIO +from pathlib import Path from tarfile import TarFile from tempfile import mkstemp from unittest.mock import Mock, patch @@ -1566,14 +1567,14 @@ class ReimportTest(AutotagImportTestCase): replaced_album = self._album() replaced_album.set_art(art_source) replaced_album.store() - old_artpath = replaced_album.artpath + old_artpath = replaced_album.art_filepath self.importer.run() new_album = self._album() new_artpath = new_album.art_destination(art_source) assert new_album.artpath == new_artpath - self.assertExists(new_artpath) + assert new_album.art_filepath.exists() if new_artpath != old_artpath: - self.assertNotExists(old_artpath) + assert not old_artpath.exists() def test_reimported_album_has_new_flexattr(self): self._setup_session() diff --git a/test/test_ui.py b/test/test_ui.py index 8c93a83ea..7a394a3d9 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -601,12 +601,12 @@ class UpdateTest(IOMixin, BeetsTestCase): assert not self.lib.albums() def test_delete_removes_album_art(self): - artpath = self.album.artpath - self.assertExists(artpath) + art_filepath = self.album.art_filepath + assert art_filepath.exists() util.remove(self.i.path) util.remove(self.i2.path) self._update() - self.assertNotExists(artpath) + assert not art_filepath.exists() def test_modified_metadata_detected(self): mf = MediaFile(syspath(self.i.path)) From c706f62fb218591b0467372ea429b26fab729247 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 1 Jun 2025 08:26:01 +0100 Subject: [PATCH 41/95] Replace assertExists in test_files --- test/test_files.py | 165 +++++++++++++++++++++------------------------ 1 file changed, 78 insertions(+), 87 deletions(-) diff --git a/test/test_files.py b/test/test_files.py index 55865f4d4..266e4f4d0 100644 --- a/test/test_files.py +++ b/test/test_files.py @@ -28,7 +28,7 @@ from beets import util from beets.test import _common from beets.test._common import item, touch from beets.test.helper import NEEDS_REFLINK, BeetsTestCase -from beets.util import MoveOperation, bytestring_path, syspath +from beets.util import MoveOperation, syspath class MoveTest(BeetsTestCase): @@ -36,11 +36,8 @@ class MoveTest(BeetsTestCase): super().setUp() # make a temporary file - self.path = join(self.temp_dir, b"temp.mp3") - shutil.copy( - syspath(join(_common.RSRC, b"full.mp3")), - syspath(self.path), - ) + self.path = self.temp_dir_path / "temp.mp3" + shutil.copy(self.resource_path, self.path) # add it to a temporary library self.i = beets.library.Item.from_path(self.path) @@ -53,57 +50,57 @@ class MoveTest(BeetsTestCase): self.i.artist = "one" self.i.album = "two" self.i.title = "three" - self.dest = join(self.libdir, b"one", b"two", b"three.mp3") + self.dest = self.lib_path / "one" / "two" / "three.mp3" - self.otherdir = join(self.temp_dir, b"testotherdir") + self.otherdir = self.temp_dir_path / "testotherdir" def test_move_arrives(self): self.i.move() - self.assertExists(self.dest) + assert self.dest.exists() def test_move_to_custom_dir(self): - self.i.move(basedir=self.otherdir) - self.assertExists(join(self.otherdir, b"one", b"two", b"three.mp3")) + self.i.move(basedir=os.fsencode(self.otherdir)) + assert (self.otherdir / "one" / "two" / "three.mp3").exists() def test_move_departs(self): self.i.move() - self.assertNotExists(self.path) + assert not self.path.exists() def test_move_in_lib_prunes_empty_dir(self): self.i.move() - old_path = self.i.path - self.assertExists(old_path) + old_path = self.i.filepath + assert old_path.exists() self.i.artist = "newArtist" self.i.move() - self.assertNotExists(old_path) - self.assertNotExists(os.path.dirname(old_path)) + assert not old_path.exists() + assert not old_path.parent.exists() def test_copy_arrives(self): self.i.move(operation=MoveOperation.COPY) - self.assertExists(self.dest) + assert self.dest.exists() def test_copy_does_not_depart(self): self.i.move(operation=MoveOperation.COPY) - self.assertExists(self.path) + assert self.path.exists() def test_reflink_arrives(self): self.i.move(operation=MoveOperation.REFLINK_AUTO) - self.assertExists(self.dest) + assert self.dest.exists() def test_reflink_does_not_depart(self): self.i.move(operation=MoveOperation.REFLINK_AUTO) - self.assertExists(self.path) + assert self.path.exists() @NEEDS_REFLINK def test_force_reflink_arrives(self): self.i.move(operation=MoveOperation.REFLINK) - self.assertExists(self.dest) + assert self.dest.exists() @NEEDS_REFLINK def test_force_reflink_does_not_depart(self): self.i.move(operation=MoveOperation.REFLINK) - self.assertExists(self.path) + assert self.path.exists() def test_move_changes_path(self): self.i.move() @@ -165,14 +162,14 @@ class MoveTest(BeetsTestCase): @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_link_arrives(self): self.i.move(operation=MoveOperation.LINK) - self.assertExists(self.dest) + assert self.dest.exists() assert os.path.islink(syspath(self.dest)) - assert bytestring_path(os.readlink(syspath(self.dest))) == self.path + assert self.dest.resolve() == self.path @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_link_does_not_depart(self): self.i.move(operation=MoveOperation.LINK) - self.assertExists(self.path) + assert self.path.exists() @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_link_changes_path(self): @@ -182,7 +179,7 @@ class MoveTest(BeetsTestCase): @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_hardlink_arrives(self): self.i.move(operation=MoveOperation.HARDLINK) - self.assertExists(self.dest) + assert self.dest.exists() s1 = os.stat(syspath(self.path)) s2 = os.stat(syspath(self.dest)) assert (s1[stat.ST_INO], s1[stat.ST_DEV]) == ( @@ -193,7 +190,7 @@ class MoveTest(BeetsTestCase): @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_hardlink_does_not_depart(self): self.i.move(operation=MoveOperation.HARDLINK) - self.assertExists(self.path) + assert self.path.exists() @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_hardlink_changes_path(self): @@ -265,24 +262,24 @@ class AlbumFileTest(BeetsTestCase): assert b"newAlbumName" in self.i.path def test_albuminfo_move_moves_file(self): - oldpath = self.i.path + oldpath = self.i.filepath self.ai.album = "newAlbumName" self.ai.move() self.ai.store() self.i.load() - self.assertNotExists(oldpath) - self.assertExists(self.i.path) + assert not oldpath.exists() + assert self.i.filepath.exists() def test_albuminfo_move_copies_file(self): - oldpath = self.i.path + oldpath = self.i.filepath self.ai.album = "newAlbumName" self.ai.move(operation=MoveOperation.COPY) self.ai.store() self.i.load() - self.assertExists(oldpath) - self.assertExists(self.i.path) + assert oldpath.exists() + assert self.i.filepath.exists() @NEEDS_REFLINK def test_albuminfo_move_reflinks_file(self): @@ -324,21 +321,21 @@ class ArtFileTest(BeetsTestCase): self.otherdir = os.path.join(self.temp_dir, b"testotherdir") def test_art_deleted_when_items_deleted(self): - self.assertExists(self.art) + assert self.art.exists() self.ai.remove(True) - self.assertNotExists(self.art) + assert not self.art.exists() def test_art_moves_with_album(self): - self.assertExists(self.art) + assert self.art.exists() oldpath = self.i.path self.ai.album = "newAlbum" self.ai.move() self.i.load() assert self.i.path != oldpath - self.assertNotExists(self.art) + assert not self.art.exists() newart = self.lib.get_album(self.i).art_destination(self.art) - self.assertExists(newart) + assert Path(os.fsdecode(newart)).exists() def test_art_moves_with_album_to_custom_dir(self): # Move the album to another directory. @@ -488,37 +485,32 @@ class RemoveTest(BeetsTestCase): self.ai = self.lib.add_album((self.i,)) def test_removing_last_item_prunes_empty_dir(self): - parent = os.path.dirname(self.i.path) - self.assertExists(parent) + assert self.i.filepath.parent.exists() self.i.remove(True) - self.assertNotExists(parent) + assert not self.i.filepath.parent.exists() def test_removing_last_item_preserves_nonempty_dir(self): - parent = os.path.dirname(self.i.path) - touch(os.path.join(parent, b"dummy.txt")) + (self.i.filepath.parent / "dummy.txt").touch() self.i.remove(True) - self.assertExists(parent) + assert self.i.filepath.parent.exists() def test_removing_last_item_prunes_dir_with_blacklisted_file(self): - parent = os.path.dirname(self.i.path) - touch(os.path.join(parent, b".DS_Store")) + (self.i.filepath.parent / ".DS_Store").touch() self.i.remove(True) - self.assertNotExists(parent) + assert not self.i.filepath.parent.exists() def test_removing_without_delete_leaves_file(self): - path = self.i.path self.i.remove(False) - self.assertExists(path) + assert self.i.filepath.parent.exists() def test_removing_last_item_preserves_library_dir(self): self.i.remove(True) - self.assertExists(self.libdir) + assert self.lib_path.exists() def test_removing_item_outside_of_library_deletes_nothing(self): self.lib.directory = os.path.join(self.temp_dir, b"xxx") - parent = os.path.dirname(self.i.path) self.i.remove(True) - self.assertExists(parent) + assert self.i.filepath.parent.exists() def test_removing_last_item_in_album_with_albumart_prunes_dir(self): artfile = os.path.join(self.temp_dir, b"testart.jpg") @@ -526,55 +518,54 @@ class RemoveTest(BeetsTestCase): self.ai.set_art(artfile) self.ai.store() - parent = os.path.dirname(self.i.path) self.i.remove(True) - self.assertNotExists(parent) + assert not self.i.filepath.parent.exists() -# Tests that we can "delete" nonexistent files. -class SoftRemoveTest(BeetsTestCase): +class FilePathTestCase(BeetsTestCase): def setUp(self): super().setUp() - self.path = os.path.join(self.temp_dir, b"testfile") - touch(self.path) + self.path = self.temp_dir_path / "testfile" + self.path.touch() + +# Tests that we can "delete" nonexistent files. +class SoftRemoveTest(FilePathTestCase): def test_soft_remove_deletes_file(self): util.remove(self.path, True) - self.assertNotExists(self.path) + assert not self.path.exists() def test_soft_remove_silent_on_no_file(self): try: - util.remove(self.path + b"XXX", True) + util.remove(self.path / "XXX", True) except OSError: self.fail("OSError when removing path") -class SafeMoveCopyTest(BeetsTestCase): +class SafeMoveCopyTest(FilePathTestCase): def setUp(self): super().setUp() - self.path = os.path.join(self.temp_dir, b"testfile") - touch(self.path) - self.otherpath = os.path.join(self.temp_dir, b"testfile2") - touch(self.otherpath) - self.dest = self.path + b".dest" + self.otherpath = self.temp_dir_path / "testfile2" + self.otherpath.touch() + self.dest = Path(f"{self.path}.dest") def test_successful_move(self): util.move(self.path, self.dest) - self.assertExists(self.dest) - self.assertNotExists(self.path) + assert self.dest.exists() + assert not self.path.exists() def test_successful_copy(self): util.copy(self.path, self.dest) - self.assertExists(self.dest) - self.assertExists(self.path) + assert self.dest.exists() + assert self.path.exists() @NEEDS_REFLINK def test_successful_reflink(self): util.reflink(self.path, self.dest) - self.assertExists(self.dest) - self.assertExists(self.path) + assert self.dest.exists() + assert self.path.exists() def test_unsuccessful_move(self): with pytest.raises(util.FilesystemError): @@ -590,31 +581,31 @@ class SafeMoveCopyTest(BeetsTestCase): def test_self_move(self): util.move(self.path, self.path) - self.assertExists(self.path) + assert self.path.exists() def test_self_copy(self): util.copy(self.path, self.path) - self.assertExists(self.path) + assert self.path.exists() class PruneTest(BeetsTestCase): def setUp(self): super().setUp() - self.base = os.path.join(self.temp_dir, b"testdir") - os.mkdir(syspath(self.base)) - self.sub = os.path.join(self.base, b"subdir") - os.mkdir(syspath(self.sub)) + self.base = self.temp_dir_path / "testdir" + self.base.mkdir() + self.sub = self.base / "subdir" + self.sub.mkdir() def test_prune_existent_directory(self): util.prune_dirs(self.sub, self.base) - self.assertExists(self.base) - self.assertNotExists(self.sub) + assert self.base.exists() + assert not self.sub.exists() def test_prune_nonexistent_directory(self): - util.prune_dirs(os.path.join(self.sub, b"another"), self.base) - self.assertExists(self.base) - self.assertNotExists(self.sub) + util.prune_dirs(self.sub / "another", self.base) + assert self.base.exists() + assert not self.sub.exists() class WalkTest(BeetsTestCase): @@ -681,11 +672,11 @@ class UniquePathTest(BeetsTestCase): class MkDirAllTest(BeetsTestCase): def test_parent_exists(self): - path = os.path.join(self.temp_dir, b"foo", b"bar", b"baz", b"qux.mp3") + path = self.temp_dir_path / "foo" / "bar" / "baz" / "qux.mp3" util.mkdirall(path) - self.assertIsDir(os.path.join(self.temp_dir, b"foo", b"bar", b"baz")) + self.assertIsDir(self.temp_dir_path / "foo" / "bar" / "baz") def test_child_does_not_exist(self): - path = os.path.join(self.temp_dir, b"foo", b"bar", b"baz", b"qux.mp3") + path = self.temp_dir_path / "foo" / "bar" / "baz" / "qux.mp3" util.mkdirall(path) - self.assertNotExists(path) + assert not path.exists() From 31dbd512221f6f787304bed15a7e8dec6ce14a6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 1 Jun 2025 10:05:09 +0100 Subject: [PATCH 42/95] Replace assertExists and assertNotExist --- beets/test/_common.py | 21 +++------ beets/test/helper.py | 23 +++++++--- beets/ui/commands.py | 10 ++++- test/plugins/test_art.py | 18 ++++---- test/plugins/test_convert.py | 11 +++-- test/plugins/test_embedart.py | 5 ++- test/test_art_resize.py | 9 ++-- test/test_importer.py | 85 +++++++++++++++++------------------ test/test_ui.py | 64 +++++++++++++------------- 9 files changed, 126 insertions(+), 120 deletions(-) diff --git a/beets/test/_common.py b/beets/test/_common.py index da81a587c..7c7defb02 100644 --- a/beets/test/_common.py +++ b/beets/test/_common.py @@ -18,6 +18,7 @@ import os import sys import unittest from contextlib import contextmanager +from pathlib import Path import beets import beets.library @@ -114,23 +115,15 @@ def import_session(lib=None, loghandler=None, paths=[], query=[], cli=False): class Assertions: """A mixin with additional unit test assertions.""" - def assertExists(self, path): - assert os.path.exists(syspath(path)), f"file does not exist: {path!r}" - - def assertNotExists(self, path): - assert not os.path.exists(syspath(path)), f"file exists: {path!r}" - def assertIsFile(self, path): - self.assertExists(path) - assert os.path.isfile(syspath(path)), ( - "path exists, but is not a regular file: {!r}".format(path) - ) + path = Path(os.fsdecode(path)) + assert path.exists() + assert path.is_file() def assertIsDir(self, path): - self.assertExists(path) - assert os.path.isdir(syspath(path)), ( - "path exists, but is not a directory: {!r}".format(path) - ) + path = Path(os.fsdecode(path)) + assert path.exists() + assert path.is_dir() def assert_equal_path(self, a, b): """Check that two paths are equal.""" diff --git a/beets/test/helper.py b/beets/test/helper.py index b6bd23f5c..6027aaede 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -184,6 +184,8 @@ class TestHelper(_common.Assertions, ConfigMixin): fixtures. """ + resource_path = Path(os.fsdecode(_common.RSRC)) / "full.mp3" + db_on_disk: ClassVar[bool] = False @cached_property @@ -194,6 +196,16 @@ class TestHelper(_common.Assertions, ConfigMixin): def temp_dir(self) -> bytes: return util.bytestring_path(self.temp_dir_path) + @cached_property + def lib_path(self) -> Path: + lib_path = self.temp_dir_path / "libdir" + lib_path.mkdir(exist_ok=True) + return lib_path + + @cached_property + def libdir(self) -> bytes: + return bytestring_path(self.lib_path) + # TODO automate teardown through hook registration def setup_beets(self): @@ -226,9 +238,7 @@ class TestHelper(_common.Assertions, ConfigMixin): ) self.env_patcher.start() - self.libdir = os.path.join(self.temp_dir, b"libdir") - os.mkdir(syspath(self.libdir)) - self.config["directory"] = os.fsdecode(self.libdir) + self.config["directory"] = str(self.lib_path) if self.db_on_disk: dbpath = util.bytestring_path(self.config["library"].as_filename()) @@ -527,7 +537,6 @@ class ImportHelper(TestHelper): autotagging library and several assertions for the library. """ - resource_path = syspath(os.path.join(_common.RSRC, b"full.mp3")) default_import_config = { "autotag": True, "copy": True, @@ -612,7 +621,7 @@ class ImportHelper(TestHelper): ] def prepare_albums_for_import(self, count: int = 1) -> None: - album_dirs = Path(os.fsdecode(self.import_dir)).glob("album_*") + album_dirs = self.import_path.glob("album_*") base_idx = int(str(max(album_dirs, default="0")).split("_")[-1]) + 1 for album_id in range(base_idx, count + base_idx): @@ -640,13 +649,13 @@ class ImportHelper(TestHelper): """Join the ``segments`` and assert that this path exists in the library directory. """ - self.assertExists(os.path.join(self.libdir, *segments)) + assert self.lib_path.joinpath(*segments).exists() def assert_file_not_in_lib(self, *segments): """Join the ``segments`` and assert that this path does not exist in the library directory. """ - self.assertNotExists(os.path.join(self.libdir, *segments)) + assert not self.lib_path.joinpath(*segments).exists() def assert_lib_dir_empty(self): assert not os.listdir(syspath(self.libdir)) diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 25af95646..7b22c2462 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -2122,12 +2122,20 @@ default_commands.append(modify_cmd) def move_items( - lib, dest, query, copy, album, pretend, confirm=False, export=False + lib, + dest_path: util.PathLike, + query, + copy, + album, + pretend, + confirm=False, + export=False, ): """Moves or copies items to a new base directory, given by dest. If dest is None, then the library's base directory is used, making the command "consolidate" files. """ + dest = os.fsencode(dest_path) if dest_path else dest_path items, albums = _do_query(lib, query, album, False) objs = albums if album else items num_objs = len(objs) diff --git a/test/plugins/test_art.py b/test/plugins/test_art.py index 9f5817108..45effa9b9 100644 --- a/test/plugins/test_art.py +++ b/test/plugins/test_art.py @@ -245,13 +245,13 @@ class FetchImageTest(FetchImageTestCase): self.mock_response(self.URL, "image/png") self.source.fetch_image(self.candidate, self.settings) assert os.path.splitext(self.candidate.path)[1] == b".png" - self.assertExists(self.candidate.path) + assert Path(os.fsdecode(self.candidate.path)).exists() def test_does_not_rely_on_server_content_type(self): self.mock_response(self.URL, "image/jpeg", "image/png") self.source.fetch_image(self.candidate, self.settings) assert os.path.splitext(self.candidate.path)[1] == b".png" - self.assertExists(self.candidate.path) + assert Path(os.fsdecode(self.candidate.path)).exists() class FSArtTest(UseThePlugin): @@ -749,8 +749,8 @@ class ArtImporterTest(UseThePlugin): super().setUp() # Mock the album art fetcher to always return our test file. - self.art_file = os.path.join(self.temp_dir, b"tmpcover.jpg") - _common.touch(self.art_file) + self.art_file = self.temp_dir_path / "tmpcover.jpg" + self.art_file.touch() self.old_afa = self.plugin.art_for_album self.afa_response = fetchart.Candidate( logger, @@ -827,20 +827,20 @@ class ArtImporterTest(UseThePlugin): def test_leave_original_file_in_place(self): self._fetch_art(True) - self.assertExists(self.art_file) + assert self.art_file.exists() def test_delete_original_file(self): prev_move = config["import"]["move"].get() try: config["import"]["move"] = True self._fetch_art(True) - self.assertNotExists(self.art_file) + assert not self.art_file.exists() finally: config["import"]["move"] = prev_move def test_do_not_delete_original_if_already_in_place(self): artdest = os.path.join(os.path.dirname(self.i.path), b"cover.jpg") - shutil.copyfile(syspath(self.art_file), syspath(artdest)) + shutil.copyfile(self.art_file, syspath(artdest)) self.afa_response = fetchart.Candidate( logger, source_name="test", @@ -899,7 +899,7 @@ class ArtForAlbumTest(UseThePlugin): super().tearDown() def assertImageIsValidArt(self, image_file, should_exist): - self.assertExists(image_file) + assert Path(os.fsdecode(image_file)).exists() self.image_file = image_file candidate = self.plugin.art_for_album(self.album, [""], True) @@ -907,7 +907,7 @@ class ArtForAlbumTest(UseThePlugin): if should_exist: assert candidate is not None assert candidate.path == self.image_file - self.assertExists(candidate.path) + assert Path(os.fsdecode(candidate.path)).exists() else: assert candidate is None diff --git a/test/plugins/test_convert.py b/test/plugins/test_convert.py index 6dd28337a..c57f0c935 100644 --- a/test/plugins/test_convert.py +++ b/test/plugins/test_convert.py @@ -18,6 +18,7 @@ import os.path import re import sys import unittest +from pathlib import Path import pytest from mediafile import MediaFile @@ -190,8 +191,9 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): def test_reject_confirmation(self): with control_stdin("n"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.mp3") - self.assertNotExists(converted) + assert not ( + Path(os.fsdecode(self.convert_dest)) / "converted.mp3" + ).exists() def test_convert_keep_new(self): assert os.path.splitext(self.item.path)[1] == b".ogg" @@ -231,8 +233,9 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): def test_pretend(self): self.run_convert("--pretend") - converted = os.path.join(self.convert_dest, b"converted.mp3") - self.assertNotExists(converted) + assert not ( + Path(os.fsdecode(self.convert_dest)) / "converted.mp3" + ).exists() def test_empty_query(self): with capture_log("beets.convert") as logs: diff --git a/test/plugins/test_embedart.py b/test/plugins/test_embedart.py index 1b8528cb7..62b2bb7d1 100644 --- a/test/plugins/test_embedart.py +++ b/test/plugins/test_embedart.py @@ -13,6 +13,7 @@ # included in all copies or substantial portions of the Software. +import os import os.path import shutil import tempfile @@ -207,7 +208,7 @@ class EmbedartCliTest(IOMixin, PluginMixin, FetchImageHelper, BeetsTestCase): self.run_command("extractart", "-n", "extracted") - self.assertExists(album.filepath / "extracted.png") + assert (album.filepath / "extracted.png").exists() def test_extracted_extension(self): resource_path = os.path.join(_common.RSRC, b"image-jpeg.mp3") @@ -217,7 +218,7 @@ class EmbedartCliTest(IOMixin, PluginMixin, FetchImageHelper, BeetsTestCase): self.run_command("extractart", "-n", "extracted") - self.assertExists(album.filepath / "extracted.jpg") + assert (album.filepath / "extracted.jpg").exists() def test_clear_art_with_yes_input(self): self._setup_data() diff --git a/test/test_art_resize.py b/test/test_art_resize.py index 8dd4d0e89..34bf810b9 100644 --- a/test/test_art_resize.py +++ b/test/test_art_resize.py @@ -16,6 +16,7 @@ import os import unittest +from pathlib import Path from unittest.mock import patch from beets.test import _common @@ -65,7 +66,7 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase): max_filesize=0, ) # check valid path returned - max_filesize hasn't broken resize command - self.assertExists(im_95_qual) + assert Path(os.fsdecode(im_95_qual)).exists() # Attempt a lower filesize with same quality im_a = backend.resize( @@ -74,7 +75,7 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase): quality=95, max_filesize=0.9 * os.stat(syspath(im_95_qual)).st_size, ) - self.assertExists(im_a) + assert Path(os.fsdecode(im_a)).exists() # target size was achieved assert ( os.stat(syspath(im_a)).st_size @@ -88,7 +89,7 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase): quality=75, max_filesize=0, ) - self.assertExists(im_75_qual) + assert Path(os.fsdecode(im_75_qual)).exists() im_b = backend.resize( 225, @@ -96,7 +97,7 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase): quality=95, max_filesize=0.9 * os.stat(syspath(im_75_qual)).st_size, ) - self.assertExists(im_b) + assert Path(os.fsdecode(im_b)).exists() # Check high (initial) quality still gives a smaller filesize assert ( os.stat(syspath(im_b)).st_size diff --git a/test/test_importer.py b/test/test_importer.py index c23b56d7a..4fbcbf9dd 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -83,23 +83,25 @@ class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): def test_import_with_move_deletes_import_files(self): for mediafile in self.import_media: - self.assertExists(mediafile.path) + assert Path(mediafile.path).exists() self.run_asis_importer(move=True) for mediafile in self.import_media: - self.assertNotExists(mediafile.path) + assert not Path(mediafile.path).exists() def test_import_with_move_prunes_directory_empty(self): - self.assertExists(os.path.join(self.import_dir, b"album")) + album_path = self.import_path / "album" + assert album_path.exists() self.run_asis_importer(move=True) - self.assertNotExists(os.path.join(self.import_dir, b"album")) + assert not album_path.exists() def test_import_with_move_prunes_with_extra_clutter(self): self.touch(os.path.join(self.import_dir, b"album", b"alog.log")) config["clutter"] = ["*.log"] - self.assertExists(os.path.join(self.import_dir, b"album")) + album_path = self.import_path / "album" + assert album_path.exists() self.run_asis_importer(move=True) - self.assertNotExists(os.path.join(self.import_dir, b"album")) + assert not album_path.exists() def test_threaded_import_move_arrives(self): self.run_asis_importer(move=True, threaded=True) @@ -113,22 +115,23 @@ class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): def test_threaded_import_move_deletes_import(self): self.run_asis_importer(move=True, threaded=True) for mediafile in self.import_media: - self.assertNotExists(mediafile.path) + assert not Path(mediafile.path).exists() def test_import_without_delete_retains_files(self): self.run_asis_importer(delete=False) for mediafile in self.import_media: - self.assertExists(mediafile.path) + assert Path(mediafile.path).exists() def test_import_with_delete_removes_files(self): self.run_asis_importer(delete=True) for mediafile in self.import_media: - self.assertNotExists(mediafile.path) + assert not Path(mediafile.path).exists() def test_import_with_delete_prunes_directory_empty(self): - self.assertExists(os.path.join(self.import_dir, b"album")) + album_path = self.import_path / "album" + assert album_path.exists() self.run_asis_importer(delete=True) - self.assertNotExists(os.path.join(self.import_dir, b"album")) + assert not album_path.exists() def test_album_mb_albumartistids(self): self.run_asis_importer() @@ -139,32 +142,24 @@ class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): def test_import_link_arrives(self): self.run_asis_importer(link=True) for mediafile in self.import_media: - filename = os.path.join( - self.libdir, - b"Tag Artist", - b"Tag Album", - util.bytestring_path(f"{mediafile.title}.mp3"), - ) - self.assertExists(filename) - assert os.path.islink(syspath(filename)) - self.assert_equal_path( - util.bytestring_path(os.readlink(syspath(filename))), - mediafile.path, + path = ( + self.lib_path / "Tag Artist" / "Tag Album" / "Tag Track 1.mp3" ) + assert path.exists() + assert path.is_symlink() + self.assert_equal_path(path.resolve(), mediafile.path) @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_import_hardlink_arrives(self): self.run_asis_importer(hardlink=True) for mediafile in self.import_media: - filename = os.path.join( - self.libdir, - b"Tag Artist", - b"Tag Album", - util.bytestring_path(f"{mediafile.title}.mp3"), + path = ( + self.lib_path / "Tag Artist" / "Tag Album" / "Tag Track 1.mp3" ) - self.assertExists(filename) + assert path.exists() + s1 = os.stat(syspath(mediafile.path)) - s2 = os.stat(syspath(filename)) + s2 = path.stat() assert (s1[stat.ST_INO], s1[stat.ST_DEV]) == ( s2[stat.ST_INO], s2[stat.ST_DEV], @@ -219,10 +214,10 @@ class RmTempTest(BeetsTestCase): zip_path = create_archive(self) archive_task = importer.ArchiveImportTask(zip_path) archive_task.extract() - tmp_path = archive_task.toppath - self.assertExists(tmp_path) + tmp_path = Path(os.fsdecode(archive_task.toppath)) + assert tmp_path.exists() archive_task.finalize(self) - self.assertNotExists(tmp_path) + assert not tmp_path.exists() class ImportZipTest(AsIsImporterMixin, ImportTestCase): @@ -467,22 +462,22 @@ class ImportTest(AutotagImportTestCase): def test_apply_with_move_deletes_import(self): config["import"]["move"] = True - import_file = os.path.join(self.import_dir, b"album", b"track_1.mp3") - self.assertExists(import_file) + track_path = Path(self.import_media[0].path) + assert track_path.exists() self.importer.add_choice(importer.Action.APPLY) self.importer.run() - self.assertNotExists(import_file) + assert not track_path.exists() def test_apply_with_delete_deletes_import(self): config["import"]["delete"] = True - import_file = os.path.join(self.import_dir, b"album", b"track_1.mp3") - self.assertExists(import_file) + track_path = Path(self.import_media[0].path) + assert track_path.exists() self.importer.add_choice(importer.Action.APPLY) self.importer.run() - self.assertNotExists(import_file) + assert not track_path.exists() def test_skip_does_not_add_track(self): self.importer.add_choice(importer.Action.SKIP) @@ -835,7 +830,7 @@ class ImportExistingTest(AutotagImportTestCase): self.reimporter = self.setup_importer(move=True) self.reimporter.add_choice(importer.Action.APPLY) self.reimporter.run() - self.assertNotExists(self.import_media[0].path) + assert not Path(self.import_media[0].path).exists() class GroupAlbumsImportTest(AutotagImportTestCase): @@ -1051,12 +1046,12 @@ class ImportDuplicateAlbumTest(PluginMixin, ImportTestCase): def test_remove_duplicate_album(self): item = self.lib.items().get() assert item.title == "t\xeftle 0" - self.assertExists(item.path) + assert item.filepath.exists() self.importer.default_resolution = self.importer.Resolution.REMOVE self.importer.run() - self.assertNotExists(item.path) + assert not item.filepath.exists() assert len(self.lib.albums()) == 1 assert len(self.lib.items()) == 1 item = self.lib.items().get() @@ -1066,7 +1061,7 @@ class ImportDuplicateAlbumTest(PluginMixin, ImportTestCase): config["import"]["autotag"] = False item = self.lib.items().get() assert item.title == "t\xeftle 0" - self.assertExists(item.path) + assert item.filepath.exists() # Imported item has the same artist and album as the one in the # library. @@ -1082,7 +1077,7 @@ class ImportDuplicateAlbumTest(PluginMixin, ImportTestCase): self.importer.default_resolution = self.importer.Resolution.REMOVE self.importer.run() - self.assertExists(item.path) + assert item.filepath.exists() assert len(self.lib.albums()) == 2 assert len(self.lib.items()) == 2 @@ -1169,12 +1164,12 @@ class ImportDuplicateSingletonTest(ImportTestCase): def test_remove_duplicate(self): item = self.lib.items().get() assert item.mb_trackid == "old trackid" - self.assertExists(item.path) + assert item.filepath.exists() self.importer.default_resolution = self.importer.Resolution.REMOVE self.importer.run() - self.assertNotExists(item.path) + assert not item.filepath.exists() assert len(self.lib.items()) == 1 item = self.lib.items().get() assert item.mb_trackid == "new trackid" diff --git a/test/test_ui.py b/test/test_ui.py index 7a394a3d9..fd3686ec2 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -113,8 +113,7 @@ class RemoveTest(IOMixin, BeetsTestCase): super().setUp() # Copy a file into the library. - self.item_path = os.path.join(_common.RSRC, b"full.mp3") - self.i = library.Item.from_path(self.item_path) + self.i = library.Item.from_path(self.resource_path) self.lib.add(self.i) self.i.move(operation=MoveOperation.COPY) @@ -123,29 +122,29 @@ class RemoveTest(IOMixin, BeetsTestCase): commands.remove_items(self.lib, "", False, False, False) items = self.lib.items() assert len(list(items)) == 0 - self.assertExists(self.i.path) + assert self.i.filepath.exists() def test_remove_items_with_delete(self): self.io.addinput("y") commands.remove_items(self.lib, "", False, True, False) items = self.lib.items() assert len(list(items)) == 0 - self.assertNotExists(self.i.path) + assert not self.i.filepath.exists() def test_remove_items_with_force_no_delete(self): commands.remove_items(self.lib, "", False, False, True) items = self.lib.items() assert len(list(items)) == 0 - self.assertExists(self.i.path) + assert self.i.filepath.exists() def test_remove_items_with_force_delete(self): commands.remove_items(self.lib, "", False, True, True) items = self.lib.items() assert len(list(items)) == 0 - self.assertNotExists(self.i.path) + assert not self.i.filepath.exists() def test_remove_items_select_with_delete(self): - i2 = library.Item.from_path(self.item_path) + i2 = library.Item.from_path(self.resource_path) self.lib.add(i2) i2.move(operation=MoveOperation.COPY) @@ -443,19 +442,16 @@ class MoveTest(BeetsTestCase): def setUp(self): super().setUp() - self.itempath = os.path.join(self.libdir, b"srcfile") - shutil.copy( - syspath(os.path.join(_common.RSRC, b"full.mp3")), - syspath(self.itempath), - ) + self.initial_item_path = self.lib_path / "srcfile" + shutil.copy(self.resource_path, self.initial_item_path) # Add a file to the library but don't copy it in yet. - self.i = library.Item.from_path(self.itempath) + self.i = library.Item.from_path(self.initial_item_path) self.lib.add(self.i) self.album = self.lib.add_album([self.i]) # Alternate destination directory. - self.otherdir = os.path.join(self.temp_dir, b"testotherdir") + self.otherdir = self.temp_dir_path / "testotherdir" def _move( self, @@ -474,71 +470,71 @@ class MoveTest(BeetsTestCase): self._move() self.i.load() assert b"libdir" in self.i.path - self.assertExists(self.i.path) - self.assertNotExists(self.itempath) + assert self.i.filepath.exists() + assert not self.initial_item_path.exists() def test_copy_item(self): self._move(copy=True) self.i.load() assert b"libdir" in self.i.path - self.assertExists(self.i.path) - self.assertExists(self.itempath) + assert self.i.filepath.exists() + assert self.initial_item_path.exists() def test_move_album(self): self._move(album=True) self.i.load() assert b"libdir" in self.i.path - self.assertExists(self.i.path) - self.assertNotExists(self.itempath) + assert self.i.filepath.exists() + assert not self.initial_item_path.exists() def test_copy_album(self): self._move(copy=True, album=True) self.i.load() assert b"libdir" in self.i.path - self.assertExists(self.i.path) - self.assertExists(self.itempath) + assert self.i.filepath.exists() + assert self.initial_item_path.exists() def test_move_item_custom_dir(self): self._move(dest=self.otherdir) self.i.load() assert b"testotherdir" in self.i.path - self.assertExists(self.i.path) - self.assertNotExists(self.itempath) + assert self.i.filepath.exists() + assert not self.initial_item_path.exists() def test_move_album_custom_dir(self): self._move(dest=self.otherdir, album=True) self.i.load() assert b"testotherdir" in self.i.path - self.assertExists(self.i.path) - self.assertNotExists(self.itempath) + assert self.i.filepath.exists() + assert not self.initial_item_path.exists() def test_pretend_move_item(self): self._move(dest=self.otherdir, pretend=True) self.i.load() - assert b"srcfile" in self.i.path + assert self.i.filepath == self.initial_item_path def test_pretend_move_album(self): self._move(album=True, pretend=True) self.i.load() - assert b"srcfile" in self.i.path + assert self.i.filepath == self.initial_item_path def test_export_item_custom_dir(self): self._move(dest=self.otherdir, export=True) self.i.load() - assert self.i.path == self.itempath - self.assertExists(self.otherdir) + assert self.i.filepath == self.initial_item_path + assert self.otherdir.exists() def test_export_album_custom_dir(self): self._move(dest=self.otherdir, album=True, export=True) self.i.load() - assert self.i.path == self.itempath - self.assertExists(self.otherdir) + assert self.i.filepath == self.initial_item_path + assert self.otherdir.exists() def test_pretend_export_item(self): self._move(dest=self.otherdir, pretend=True, export=True) self.i.load() - assert b"srcfile" in self.i.path - self.assertNotExists(self.otherdir) + assert self.i.filepath == self.initial_item_path + assert not self.otherdir.exists() class UpdateTest(IOMixin, BeetsTestCase): From e6d5f7396482300e995f3464cafcb12844b3635d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 1 Jun 2025 10:05:41 +0100 Subject: [PATCH 43/95] Use pathlib.Path in test_convert --- test/plugins/test_convert.py | 68 +++++++++++++----------------------- 1 file changed, 25 insertions(+), 43 deletions(-) diff --git a/test/plugins/test_convert.py b/test/plugins/test_convert.py index c57f0c935..e889ce7bb 100644 --- a/test/plugins/test_convert.py +++ b/test/plugins/test_convert.py @@ -18,7 +18,6 @@ import os.path import re import sys import unittest -from pathlib import Path import pytest from mediafile import MediaFile @@ -33,7 +32,7 @@ from beets.test.helper import ( capture_log, control_stdin, ) -from beets.util import bytestring_path, displayable_path +from beets.util import displayable_path from beetsplug import convert @@ -107,7 +106,7 @@ class ImportConvertTest(AsIsImporterMixin, ImportHelper, ConvertTestCase): def test_import_converted(self): self.run_asis_importer() item = self.lib.items().get() - self.assertFileTag(item.path, "convert") + self.assertFileTag(item.filepath, "convert") # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") @@ -160,11 +159,10 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): self.album = self.add_album_fixture(ext="ogg") self.item = self.album.items()[0] - self.convert_dest = bytestring_path( - os.path.join(self.temp_dir, b"convert_dest") - ) + self.convert_dest = self.temp_dir_path / "convert_dest" + self.converted_mp3 = self.convert_dest / "converted.mp3" self.config["convert"] = { - "dest": self.convert_dest, + "dest": str(self.convert_dest), "paths": {"default": "converted"}, "format": "mp3", "formats": { @@ -180,20 +178,16 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): def test_convert(self): with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.mp3") - self.assertFileTag(converted, "mp3") + self.assertFileTag(self.converted_mp3, "mp3") def test_convert_with_auto_confirmation(self): self.run_convert("--yes") - converted = os.path.join(self.convert_dest, b"converted.mp3") - self.assertFileTag(converted, "mp3") + self.assertFileTag(self.converted_mp3, "mp3") def test_reject_confirmation(self): with control_stdin("n"): self.run_convert() - assert not ( - Path(os.fsdecode(self.convert_dest)) / "converted.mp3" - ).exists() + assert not self.converted_mp3.exists() def test_convert_keep_new(self): assert os.path.splitext(self.item.path)[1] == b".ogg" @@ -207,8 +201,7 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): def test_format_option(self): with control_stdin("y"): self.run_convert("--format", "opus") - converted = os.path.join(self.convert_dest, b"converted.ops") - self.assertFileTag(converted, "opus") + self.assertFileTag(self.convert_dest / "converted.ops", "opus") def test_embed_album_art(self): self.config["convert"]["embed"] = True @@ -220,12 +213,11 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.mp3") - mediafile = MediaFile(converted) + mediafile = MediaFile(self.converted_mp3) assert mediafile.images[0].data == image_data def test_skip_existing(self): - converted = os.path.join(self.convert_dest, b"converted.mp3") + converted = self.converted_mp3 self.touch(converted, content="XXX") self.run_convert("--yes") with open(converted) as f: @@ -233,9 +225,7 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): def test_pretend(self): self.run_convert("--pretend") - assert not ( - Path(os.fsdecode(self.convert_dest)) / "converted.mp3" - ).exists() + assert not self.converted_mp3.exists() def test_empty_query(self): with capture_log("beets.convert") as logs: @@ -246,55 +236,47 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): self.config["convert"]["max_bitrate"] = 5000 with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.mp3") - self.assertFileTag(converted, "mp3") + self.assertFileTag(self.converted_mp3, "mp3") def test_transcode_when_maxbr_set_low_and_different_formats(self): self.config["convert"]["max_bitrate"] = 5 with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.mp3") - self.assertFileTag(converted, "mp3") + self.assertFileTag(self.converted_mp3, "mp3") def test_transcode_when_maxbr_set_to_none_and_different_formats(self): with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.mp3") - self.assertFileTag(converted, "mp3") + self.assertFileTag(self.converted_mp3, "mp3") def test_no_transcode_when_maxbr_set_high_and_same_formats(self): self.config["convert"]["max_bitrate"] = 5000 self.config["convert"]["format"] = "ogg" with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.ogg") - self.assertNoFileTag(converted, "ogg") + self.assertNoFileTag(self.convert_dest / "converted.ogg", "ogg") def test_transcode_when_maxbr_set_low_and_same_formats(self): self.config["convert"]["max_bitrate"] = 5 self.config["convert"]["format"] = "ogg" with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.ogg") - self.assertFileTag(converted, "ogg") + self.assertFileTag(self.convert_dest / "converted.ogg", "ogg") def test_transcode_when_maxbr_set_to_none_and_same_formats(self): self.config["convert"]["format"] = "ogg" with control_stdin("y"): self.run_convert() - converted = os.path.join(self.convert_dest, b"converted.ogg") - self.assertNoFileTag(converted, "ogg") + self.assertNoFileTag(self.convert_dest / "converted.ogg", "ogg") def test_playlist(self): with control_stdin("y"): self.run_convert("--playlist", "playlist.m3u8") - m3u_created = os.path.join(self.convert_dest, b"playlist.m3u8") - assert os.path.exists(m3u_created) + assert (self.convert_dest / "playlist.m3u8").exists() def test_playlist_pretend(self): self.run_convert("--playlist", "playlist.m3u8", "--pretend") - m3u_created = os.path.join(self.convert_dest, b"playlist.m3u8") - assert not os.path.exists(m3u_created) + assert not (self.convert_dest / "playlist.m3u8").exists() @_common.slow_test() @@ -304,9 +286,9 @@ class NeverConvertLossyFilesTest(ConvertTestCase, ConvertCommand): def setUp(self): super().setUp() - self.convert_dest = os.path.join(self.temp_dir, b"convert_dest") + self.convert_dest = self.temp_dir_path / "convert_dest" self.config["convert"] = { - "dest": self.convert_dest, + "dest": str(self.convert_dest), "paths": {"default": "converted"}, "never_convert_lossy_files": True, "format": "mp3", @@ -319,7 +301,7 @@ class NeverConvertLossyFilesTest(ConvertTestCase, ConvertCommand): [item] = self.add_item_fixtures(ext="flac") with control_stdin("y"): self.run_convert_path(item) - converted = os.path.join(self.convert_dest, b"converted.mp3") + converted = self.convert_dest / "converted.mp3" self.assertFileTag(converted, "mp3") def test_transcode_from_lossy(self): @@ -327,14 +309,14 @@ class NeverConvertLossyFilesTest(ConvertTestCase, ConvertCommand): [item] = self.add_item_fixtures(ext="ogg") with control_stdin("y"): self.run_convert_path(item) - converted = os.path.join(self.convert_dest, b"converted.mp3") + converted = self.convert_dest / "converted.mp3" self.assertFileTag(converted, "mp3") def test_transcode_from_lossy_prevented(self): [item] = self.add_item_fixtures(ext="ogg") with control_stdin("y"): self.run_convert_path(item) - converted = os.path.join(self.convert_dest, b"converted.ogg") + converted = self.convert_dest / "converted.ogg" self.assertNoFileTag(converted, "mp3") From 452644bbf3f9f771f8f70adfd5dfb3af8ec0c862 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Fri, 9 Aug 2024 02:04:41 +0100 Subject: [PATCH 44/95] Remove assert_lib_dir_empty --- beets/test/helper.py | 3 --- test/test_importer.py | 10 ---------- 2 files changed, 13 deletions(-) diff --git a/beets/test/helper.py b/beets/test/helper.py index 6027aaede..7fc25d46b 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -657,9 +657,6 @@ class ImportHelper(TestHelper): """ assert not self.lib_path.joinpath(*segments).exists() - def assert_lib_dir_empty(self): - assert not os.listdir(syspath(self.libdir)) - class AsIsImporterMixin: def setUp(self): diff --git a/test/test_importer.py b/test/test_importer.py index 4fbcbf9dd..e0314a496 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -286,8 +286,6 @@ class ImportSingletonTest(AutotagImportTestCase): assert self.lib.albums().get() is None def test_apply_asis_adds_singleton_path(self): - self.assert_lib_dir_empty() - self.importer.add_choice(importer.Action.ASIS) self.importer.run() self.assert_file_in_lib(b"singletons", b"Tag Track 1.mp3") @@ -305,8 +303,6 @@ class ImportSingletonTest(AutotagImportTestCase): assert self.lib.albums().get() is None def test_apply_candidate_adds_singleton_path(self): - self.assert_lib_dir_empty() - self.importer.add_choice(importer.Action.APPLY) self.importer.run() self.assert_file_in_lib(b"singletons", b"Applied Track 1.mp3") @@ -404,8 +400,6 @@ class ImportTest(AutotagImportTestCase): assert self.lib.items().get().title == "Tag Track 1" def test_apply_asis_adds_album_path(self): - self.assert_lib_dir_empty() - self.importer.add_choice(importer.Action.ASIS) self.importer.run() self.assert_file_in_lib(b"Tag Artist", b"Tag Album", b"Tag Track 1.mp3") @@ -425,8 +419,6 @@ class ImportTest(AutotagImportTestCase): assert self.lib.items().get().title == "Applied Track 1" def test_apply_candidate_adds_album_path(self): - self.assert_lib_dir_empty() - self.importer.add_choice(importer.Action.APPLY) self.importer.run() self.assert_file_in_lib( @@ -603,8 +595,6 @@ class ImportTracksTest(AutotagImportTestCase): assert self.lib.albums().get() is None def test_apply_tracks_adds_singleton_path(self): - self.assert_lib_dir_empty() - self.importer.add_choice(importer.Action.TRACKS) self.importer.add_choice(importer.Action.APPLY) self.importer.add_choice(importer.Action.APPLY) From e36e8f1f516614d24b9822770b5333d73cd84be6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 1 Jun 2025 10:06:24 +0100 Subject: [PATCH 45/95] Remove assert_file_in_lib --- beets/test/helper.py | 12 -- test/test_importer.py | 328 +++++++++++++++--------------------------- 2 files changed, 114 insertions(+), 226 deletions(-) diff --git a/beets/test/helper.py b/beets/test/helper.py index 7fc25d46b..98ce411bd 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -645,18 +645,6 @@ class ImportHelper(TestHelper): def setup_singleton_importer(self, **kwargs) -> ImportSession: return self.setup_importer(singletons=True, **kwargs) - def assert_file_in_lib(self, *segments): - """Join the ``segments`` and assert that this path exists in the - library directory. - """ - assert self.lib_path.joinpath(*segments).exists() - - def assert_file_not_in_lib(self, *segments): - """Join the ``segments`` and assert that this path does not - exist in the library directory. - """ - assert not self.lib_path.joinpath(*segments).exists() - class AsIsImporterMixin: def setUp(self): diff --git a/test/test_importer.py b/test/test_importer.py index e0314a496..521f98f99 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -15,6 +15,8 @@ """Tests for the general importer functionality.""" +from __future__ import annotations + import os import re import shutil @@ -22,6 +24,7 @@ import stat import sys import unicodedata import unittest +from functools import cached_property from io import StringIO from pathlib import Path from tarfile import TarFile @@ -51,87 +54,71 @@ from beets.test.helper import ( from beets.util import bytestring_path, displayable_path, syspath +class PathsMixin: + import_media: list[MediaFile] + + @cached_property + def track_import_path(self) -> Path: + return Path(self.import_media[0].path) + + @cached_property + def album_path(self) -> Path: + return self.track_import_path.parent + + @cached_property + def track_lib_path(self): + return self.lib_path / "Tag Artist" / "Tag Album" / "Tag Track 1.mp3" + + @_common.slow_test() -class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): +class NonAutotaggedImportTest(PathsMixin, AsIsImporterMixin, ImportTestCase): db_on_disk = True def test_album_created_with_track_artist(self): self.run_asis_importer() + albums = self.lib.albums() assert len(albums) == 1 assert albums[0].albumartist == "Tag Artist" def test_import_copy_arrives(self): self.run_asis_importer() - for mediafile in self.import_media: - self.assert_file_in_lib( - b"Tag Artist", - b"Tag Album", - util.bytestring_path(f"{mediafile.title}.mp3"), - ) + + assert self.track_lib_path.exists() def test_threaded_import_copy_arrives(self): config["threaded"] = True self.run_asis_importer() - for mediafile in self.import_media: - self.assert_file_in_lib( - b"Tag Artist", - b"Tag Album", - util.bytestring_path(f"{mediafile.title}.mp3"), - ) + assert self.track_lib_path.exists() def test_import_with_move_deletes_import_files(self): - for mediafile in self.import_media: - assert Path(mediafile.path).exists() - self.run_asis_importer(move=True) - for mediafile in self.import_media: - assert not Path(mediafile.path).exists() - - def test_import_with_move_prunes_directory_empty(self): - album_path = self.import_path / "album" - assert album_path.exists() - self.run_asis_importer(move=True) - assert not album_path.exists() - - def test_import_with_move_prunes_with_extra_clutter(self): - self.touch(os.path.join(self.import_dir, b"album", b"alog.log")) + assert self.album_path.exists() + assert self.track_import_path.exists() + (self.album_path / "alog.log").touch() config["clutter"] = ["*.log"] - album_path = self.import_path / "album" - assert album_path.exists() self.run_asis_importer(move=True) - assert not album_path.exists() + + assert not self.track_import_path.exists() + assert not self.album_path.exists() def test_threaded_import_move_arrives(self): self.run_asis_importer(move=True, threaded=True) - for mediafile in self.import_media: - self.assert_file_in_lib( - b"Tag Artist", - b"Tag Album", - util.bytestring_path(f"{mediafile.title}.mp3"), - ) - def test_threaded_import_move_deletes_import(self): - self.run_asis_importer(move=True, threaded=True) - for mediafile in self.import_media: - assert not Path(mediafile.path).exists() + assert self.track_lib_path.exists() + assert not self.track_import_path.exists() def test_import_without_delete_retains_files(self): self.run_asis_importer(delete=False) - for mediafile in self.import_media: - assert Path(mediafile.path).exists() + + assert self.track_import_path.exists() def test_import_with_delete_removes_files(self): self.run_asis_importer(delete=True) - for mediafile in self.import_media: - assert not Path(mediafile.path).exists() - def test_import_with_delete_prunes_directory_empty(self): - album_path = self.import_path / "album" - assert album_path.exists() - self.run_asis_importer(delete=True) - assert not album_path.exists() + assert not self.album_path.exists() + assert not self.track_import_path.exists() def test_album_mb_albumartistids(self): self.run_asis_importer() @@ -141,51 +128,36 @@ class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_import_link_arrives(self): self.run_asis_importer(link=True) - for mediafile in self.import_media: - path = ( - self.lib_path / "Tag Artist" / "Tag Album" / "Tag Track 1.mp3" - ) - assert path.exists() - assert path.is_symlink() - self.assert_equal_path(path.resolve(), mediafile.path) + + assert self.track_lib_path.exists() + assert self.track_lib_path.is_symlink() + self.assert_equal_path( + self.track_lib_path.resolve(), self.track_import_path + ) @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_import_hardlink_arrives(self): self.run_asis_importer(hardlink=True) - for mediafile in self.import_media: - path = ( - self.lib_path / "Tag Artist" / "Tag Album" / "Tag Track 1.mp3" - ) - assert path.exists() - s1 = os.stat(syspath(mediafile.path)) - s2 = path.stat() - assert (s1[stat.ST_INO], s1[stat.ST_DEV]) == ( - s2[stat.ST_INO], - s2[stat.ST_DEV], - ) + assert self.track_lib_path.exists() + media_stat = self.track_import_path.stat() + lib_media_stat = self.track_lib_path.stat() + assert media_stat[stat.ST_INO] == lib_media_stat[stat.ST_INO] + assert media_stat[stat.ST_DEV] == lib_media_stat[stat.ST_DEV] @NEEDS_REFLINK def test_import_reflink_arrives(self): # Detecting reflinks is currently tricky due to various fs # implementations, we'll just check the file exists. self.run_asis_importer(reflink=True) - for mediafile in self.import_media: - self.assert_file_in_lib( - b"Tag Artist", - b"Tag Album", - util.bytestring_path(f"{mediafile.title}.mp3"), - ) + + assert self.track_lib_path.exists() def test_import_reflink_auto_arrives(self): # Should pass regardless of reflink support due to fallback. self.run_asis_importer(reflink="auto") - for mediafile in self.import_media: - self.assert_file_in_lib( - b"Tag Artist", - b"Tag Album", - util.bytestring_path(f"{mediafile.title}.mp3"), - ) + + assert self.track_lib_path.exists() def create_archive(session): @@ -271,52 +243,36 @@ class ImportSingletonTest(AutotagImportTestCase): self.prepare_album_for_import(1) self.importer = self.setup_singleton_importer() - def test_apply_asis_adds_track(self): - assert self.lib.items().get() is None - + def test_apply_asis_adds_only_singleton_track(self): self.importer.add_choice(importer.Action.ASIS) self.importer.run() + + # album not added + assert not self.lib.albums() assert self.lib.items().get().title == "Tag Track 1" - - def test_apply_asis_does_not_add_album(self): - assert self.lib.albums().get() is None - - self.importer.add_choice(importer.Action.ASIS) - self.importer.run() - assert self.lib.albums().get() is None - - def test_apply_asis_adds_singleton_path(self): - self.importer.add_choice(importer.Action.ASIS) - self.importer.run() - self.assert_file_in_lib(b"singletons", b"Tag Track 1.mp3") + assert (self.lib_path / "singletons" / "Tag Track 1.mp3").exists() def test_apply_candidate_adds_track(self): - assert self.lib.items().get() is None - self.importer.add_choice(importer.Action.APPLY) self.importer.run() + + assert not self.lib.albums() assert self.lib.items().get().title == "Applied Track 1" + assert (self.lib_path / "singletons" / "Applied Track 1.mp3").exists() - def test_apply_candidate_does_not_add_album(self): - self.importer.add_choice(importer.Action.APPLY) - self.importer.run() - assert self.lib.albums().get() is None - - def test_apply_candidate_adds_singleton_path(self): - self.importer.add_choice(importer.Action.APPLY) - self.importer.run() - self.assert_file_in_lib(b"singletons", b"Applied Track 1.mp3") - - def test_skip_does_not_add_first_track(self): + def test_skip_does_not_add_track(self): self.importer.add_choice(importer.Action.SKIP) self.importer.run() - assert self.lib.items().get() is None - def test_skip_adds_other_tracks(self): + assert not self.lib.items() + + def test_skip_first_add_second_asis(self): self.prepare_album_for_import(2) + self.importer.add_choice(importer.Action.SKIP) self.importer.add_choice(importer.Action.ASIS) self.importer.run() + assert len(self.lib.items()) == 1 def test_import_single_files(self): @@ -365,7 +321,7 @@ class ImportSingletonTest(AutotagImportTestCase): item.remove() # Autotagged. - assert self.lib.albums().get() is None + assert not self.lib.albums() self.importer.clear_choices() self.importer.add_choice(importer.Action.APPLY) self.importer.run() @@ -378,7 +334,7 @@ class ImportSingletonTest(AutotagImportTestCase): assert item.disc == disc -class ImportTest(AutotagImportTestCase): +class ImportTest(PathsMixin, AutotagImportTestCase): """Test APPLY, ASIS and SKIP choices.""" def setUp(self): @@ -386,44 +342,23 @@ class ImportTest(AutotagImportTestCase): self.prepare_album_for_import(1) self.setup_importer() - def test_apply_asis_adds_album(self): - assert self.lib.albums().get() is None - + def test_asis_moves_album_and_track(self): self.importer.add_choice(importer.Action.ASIS) self.importer.run() + assert self.lib.albums().get().album == "Tag Album" + item = self.lib.items().get() + assert item.title == "Tag Track 1" + assert item.filepath.exists() - def test_apply_asis_adds_tracks(self): - assert self.lib.items().get() is None - self.importer.add_choice(importer.Action.ASIS) - self.importer.run() - assert self.lib.items().get().title == "Tag Track 1" - - def test_apply_asis_adds_album_path(self): - self.importer.add_choice(importer.Action.ASIS) - self.importer.run() - self.assert_file_in_lib(b"Tag Artist", b"Tag Album", b"Tag Track 1.mp3") - - def test_apply_candidate_adds_album(self): - assert self.lib.albums().get() is None - + def test_apply_moves_album_and_track(self): self.importer.add_choice(importer.Action.APPLY) self.importer.run() + assert self.lib.albums().get().album == "Applied Album" - - def test_apply_candidate_adds_tracks(self): - assert self.lib.items().get() is None - - self.importer.add_choice(importer.Action.APPLY) - self.importer.run() - assert self.lib.items().get().title == "Applied Track 1" - - def test_apply_candidate_adds_album_path(self): - self.importer.add_choice(importer.Action.APPLY) - self.importer.run() - self.assert_file_in_lib( - b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" - ) + item = self.lib.items().get() + assert item.title == "Applied Track 1" + assert item.filepath.exists() def test_apply_from_scratch_removes_other_metadata(self): config["import"]["from_scratch"] = True @@ -452,35 +387,35 @@ class ImportTest(AutotagImportTestCase): assert self.lib.items().get().bitrate == bitrate def test_apply_with_move_deletes_import(self): + assert self.track_import_path.exists() + config["import"]["move"] = True - - track_path = Path(self.import_media[0].path) - assert track_path.exists() - self.importer.add_choice(importer.Action.APPLY) self.importer.run() - assert not track_path.exists() + + assert not self.track_import_path.exists() def test_apply_with_delete_deletes_import(self): + assert self.track_import_path.exists() + config["import"]["delete"] = True - - track_path = Path(self.import_media[0].path) - assert track_path.exists() - self.importer.add_choice(importer.Action.APPLY) self.importer.run() - assert not track_path.exists() + + assert not self.track_import_path.exists() def test_skip_does_not_add_track(self): self.importer.add_choice(importer.Action.SKIP) self.importer.run() - assert self.lib.items().get() is None + + assert not self.lib.items() def test_skip_non_album_dirs(self): self.assertIsDir(os.path.join(self.import_dir, b"album")) self.touch(b"cruft", dir=self.import_dir) self.importer.add_choice(importer.Action.APPLY) self.importer.run() + assert len(self.lib.albums()) == 1 def test_unmatched_tracks_not_added(self): @@ -584,22 +519,21 @@ class ImportTracksTest(AutotagImportTestCase): self.setup_importer() def test_apply_tracks_adds_singleton_track(self): - assert self.lib.items().get() is None - assert self.lib.albums().get() is None - self.importer.add_choice(importer.Action.TRACKS) self.importer.add_choice(importer.Action.APPLY) self.importer.add_choice(importer.Action.APPLY) self.importer.run() + assert self.lib.items().get().title == "Applied Track 1" - assert self.lib.albums().get() is None + assert not self.lib.albums() def test_apply_tracks_adds_singleton_path(self): self.importer.add_choice(importer.Action.TRACKS) self.importer.add_choice(importer.Action.APPLY) self.importer.add_choice(importer.Action.APPLY) self.importer.run() - self.assert_file_in_lib(b"singletons", b"Applied Track 1.mp3") + + assert (self.lib_path / "singletons" / "Applied Track 1.mp3").exists() class ImportCompilationTest(AutotagImportTestCase): @@ -707,7 +641,7 @@ class ImportCompilationTest(AutotagImportTestCase): assert asserted_multi_artists_1 -class ImportExistingTest(AutotagImportTestCase): +class ImportExistingTest(PathsMixin, AutotagImportTestCase): """Test importing files that are already in the library directory.""" def setUp(self): @@ -717,20 +651,23 @@ class ImportExistingTest(AutotagImportTestCase): self.reimporter = self.setup_importer(import_dir=self.libdir) self.importer = self.setup_importer() - def test_does_not_duplicate_item(self): + def tearDown(self): + super().tearDown() + self.matcher.restore() + + @cached_property + def applied_track_path(self) -> Path: + return Path(str(self.track_lib_path).replace("Tag", "Applied")) + + def test_does_not_duplicate_item_nor_album(self): self.importer.run() assert len(self.lib.items()) == 1 - - self.reimporter.add_choice(importer.Action.APPLY) - self.reimporter.run() - assert len(self.lib.items()) == 1 - - def test_does_not_duplicate_album(self): - self.importer.run() assert len(self.lib.albums()) == 1 self.reimporter.add_choice(importer.Action.APPLY) self.reimporter.run() + + assert len(self.lib.items()) == 1 assert len(self.lib.albums()) == 1 def test_does_not_duplicate_singleton_track(self): @@ -744,33 +681,19 @@ class ImportExistingTest(AutotagImportTestCase): self.reimporter.run() assert len(self.lib.items()) == 1 - def test_asis_updates_metadata(self): + def test_asis_updates_metadata_and_moves_file(self): self.importer.run() + medium = MediaFile(self.lib.items().get().path) medium.title = "New Title" medium.save() self.reimporter.add_choice(importer.Action.ASIS) self.reimporter.run() + assert self.lib.items().get().title == "New Title" - - def test_asis_updated_moves_file(self): - self.importer.run() - medium = MediaFile(self.lib.items().get().path) - medium.title = "New Title" - medium.save() - - old_path = os.path.join( - b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" - ) - self.assert_file_in_lib(old_path) - - self.reimporter.add_choice(importer.Action.ASIS) - self.reimporter.run() - self.assert_file_in_lib( - b"Applied Artist", b"Applied Album", b"New Title.mp3" - ) - self.assert_file_not_in_lib(old_path) + assert not self.applied_track_path.exists() + assert self.applied_track_path.with_name("New Title.mp3").exists() def test_asis_updated_without_copy_does_not_move_file(self): self.importer.run() @@ -778,18 +701,12 @@ class ImportExistingTest(AutotagImportTestCase): medium.title = "New Title" medium.save() - old_path = os.path.join( - b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" - ) - self.assert_file_in_lib(old_path) - config["import"]["copy"] = False self.reimporter.add_choice(importer.Action.ASIS) self.reimporter.run() - self.assert_file_not_in_lib( - b"Applied Artist", b"Applied Album", b"New Title.mp3" - ) - self.assert_file_in_lib(old_path) + + assert self.applied_track_path.exists() + assert not self.applied_track_path.with_name("New Title.mp3").exists() def test_outside_file_is_copied(self): config["import"]["copy"] = False @@ -801,26 +718,9 @@ class ImportExistingTest(AutotagImportTestCase): self.reimporter = self.setup_importer() self.reimporter.add_choice(importer.Action.APPLY) self.reimporter.run() - new_path = os.path.join( - b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" - ) - self.assert_file_in_lib(new_path) - self.assert_equal_path( - self.lib.items().get().path, os.path.join(self.libdir, new_path) - ) - - def test_outside_file_is_moved(self): - config["import"]["copy"] = False - self.importer.run() - self.assert_equal_path( - self.lib.items().get().path, self.import_media[0].path - ) - - self.reimporter = self.setup_importer(move=True) - self.reimporter.add_choice(importer.Action.APPLY) - self.reimporter.run() - assert not Path(self.import_media[0].path).exists() + assert self.applied_track_path.exists() + assert self.lib.items().get().filepath == self.applied_track_path class GroupAlbumsImportTest(AutotagImportTestCase): From 038843cdb267d23f1cf3b92a2dc17fb1d50d04e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Mon, 5 Aug 2024 18:59:38 +0100 Subject: [PATCH 46/95] Replace assertAlmostEqual and assertEqualTimes --- test/plugins/test_importadded.py | 44 +++++++++++++++----------------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/test/plugins/test_importadded.py b/test/plugins/test_importadded.py index d54c04b0e..2ed1d0f4e 100644 --- a/test/plugins/test_importadded.py +++ b/test/plugins/test_importadded.py @@ -68,10 +68,6 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): "No MediaFile found for Item " + displayable_path(item.path) ) - def assertEqualTimes(self, first, second, msg=None): - """For comparing file modification times at a sufficient precision""" - assert first == pytest.approx(second, rel=1e-4), msg - def assertAlbumImport(self): self.importer.run() album = self.lib.albums().get() @@ -95,10 +91,12 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): album = self.lib.albums().get() assert album.added == self.min_mtime for item in album.items(): - self.assertEqualTimes(item.added, self.min_mtime) + assert item.added == pytest.approx(self.min_mtime, rel=1e-4) mediafile_mtime = os.path.getmtime(self.find_media_file(item).path) - self.assertEqualTimes(item.mtime, mediafile_mtime) - self.assertEqualTimes(os.path.getmtime(item.path), mediafile_mtime) + assert item.mtime == pytest.approx(mediafile_mtime, rel=1e-4) + assert os.path.getmtime(item.path) == pytest.approx( + mediafile_mtime, rel=1e-4 + ) def test_reimported_album_skipped(self): # Import and record the original added dates @@ -113,22 +111,21 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): self.importer.run() # Verify the reimported items album = self.lib.albums().get() - self.assertEqualTimes(album.added, album_added_before) + assert album.added == pytest.approx(album_added_before, rel=1e-4) items_added_after = {item.path: item.added for item in album.items()} for item_path, added_after in items_added_after.items(): - self.assertEqualTimes( - items_added_before[item_path], - added_after, - "reimport modified Item.added for " - + displayable_path(item_path), - ) + assert items_added_before[item_path] == pytest.approx( + added_after, rel=1e-4 + ), "reimport modified Item.added for " + displayable_path(item_path) def test_import_singletons_with_added_dates(self): self.config["import"]["singletons"] = True self.importer.run() for item in self.lib.items(): mfile = self.find_media_file(item) - self.assertEqualTimes(item.added, os.path.getmtime(mfile.path)) + assert item.added == pytest.approx( + os.path.getmtime(mfile.path), rel=1e-4 + ) def test_import_singletons_with_preserved_mtimes(self): self.config["import"]["singletons"] = True @@ -136,9 +133,11 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): self.importer.run() for item in self.lib.items(): mediafile_mtime = os.path.getmtime(self.find_media_file(item).path) - self.assertEqualTimes(item.added, mediafile_mtime) - self.assertEqualTimes(item.mtime, mediafile_mtime) - self.assertEqualTimes(os.path.getmtime(item.path), mediafile_mtime) + assert item.added == pytest.approx(mediafile_mtime, rel=1e-4) + assert item.mtime == pytest.approx(mediafile_mtime, rel=1e-4) + assert os.path.getmtime(item.path) == pytest.approx( + mediafile_mtime, rel=1e-4 + ) def test_reimported_singletons_skipped(self): self.config["import"]["singletons"] = True @@ -155,9 +154,6 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): # Verify the reimported items items_added_after = {item.path: item.added for item in self.lib.items()} for item_path, added_after in items_added_after.items(): - self.assertEqualTimes( - items_added_before[item_path], - added_after, - "reimport modified Item.added for " - + displayable_path(item_path), - ) + assert items_added_before[item_path] == pytest.approx( + added_after, rel=1e-4 + ), "reimport modified Item.added for " + displayable_path(item_path) From ca4fa6ba10807f4a48a428d23e45c023c15dfa7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 6 Aug 2024 22:40:36 +0100 Subject: [PATCH 47/95] Replace assertIsFile --- beets/test/_common.py | 5 ----- test/plugins/test_convert.py | 11 ++++++++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/beets/test/_common.py b/beets/test/_common.py index 7c7defb02..0d2d51f1e 100644 --- a/beets/test/_common.py +++ b/beets/test/_common.py @@ -115,11 +115,6 @@ def import_session(lib=None, loghandler=None, paths=[], query=[], cli=False): class Assertions: """A mixin with additional unit test assertions.""" - def assertIsFile(self, path): - path = Path(os.fsdecode(path)) - assert path.exists() - assert path.is_file() - def assertIsDir(self, path): path = Path(os.fsdecode(path)) assert path.exists() diff --git a/test/plugins/test_convert.py b/test/plugins/test_convert.py index e889ce7bb..25a000284 100644 --- a/test/plugins/test_convert.py +++ b/test/plugins/test_convert.py @@ -58,13 +58,18 @@ class ConvertMixin: shell_quote(sys.executable), shell_quote(stub), tag ) + def assert_is_file(self, path): + path = Path(os.fsdecode(path)) + assert path.exists() + assert path.is_file() + def assertFileTag(self, path, tag): """Assert that the path is a file and the files content ends with `tag`. """ display_tag = tag tag = tag.encode("utf-8") - self.assertIsFile(path) + self.assert_is_file(path) with open(path, "rb") as f: f.seek(-len(display_tag), os.SEEK_END) assert f.read() == tag, ( @@ -77,7 +82,7 @@ class ConvertMixin: """ display_tag = tag tag = tag.encode("utf-8") - self.assertIsFile(path) + self.assert_is_file(path) with open(path, "rb") as f: f.seek(-len(tag), os.SEEK_END) assert f.read() != tag, ( @@ -117,7 +122,7 @@ class ImportConvertTest(AsIsImporterMixin, ImportHelper, ConvertTestCase): item = self.lib.items().get() assert item is not None - self.assertIsFile(item.path) + self.assert_is_file(item.path) def test_delete_originals(self): self.config["convert"]["delete_originals"] = True From 43b8cce063b1a1ef079266f362272307fb328d73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 6 Aug 2024 22:49:57 +0100 Subject: [PATCH 48/95] Replace assertIsDir --- beets/test/_common.py | 6 ------ test/test_files.py | 15 ++++++--------- test/test_importer.py | 2 +- 3 files changed, 7 insertions(+), 16 deletions(-) diff --git a/beets/test/_common.py b/beets/test/_common.py index 0d2d51f1e..ca1671cb7 100644 --- a/beets/test/_common.py +++ b/beets/test/_common.py @@ -18,7 +18,6 @@ import os import sys import unittest from contextlib import contextmanager -from pathlib import Path import beets import beets.library @@ -115,11 +114,6 @@ def import_session(lib=None, loghandler=None, paths=[], query=[], cli=False): class Assertions: """A mixin with additional unit test assertions.""" - def assertIsDir(self, path): - path = Path(os.fsdecode(path)) - assert path.exists() - assert path.is_dir() - def assert_equal_path(self, a, b): """Check that two paths are equal.""" a_bytes, b_bytes = util.normpath(a), util.normpath(b) diff --git a/test/test_files.py b/test/test_files.py index 266e4f4d0..8b08a3fab 100644 --- a/test/test_files.py +++ b/test/test_files.py @@ -671,12 +671,9 @@ class UniquePathTest(BeetsTestCase): class MkDirAllTest(BeetsTestCase): - def test_parent_exists(self): - path = self.temp_dir_path / "foo" / "bar" / "baz" / "qux.mp3" - util.mkdirall(path) - self.assertIsDir(self.temp_dir_path / "foo" / "bar" / "baz") - - def test_child_does_not_exist(self): - path = self.temp_dir_path / "foo" / "bar" / "baz" / "qux.mp3" - util.mkdirall(path) - assert not path.exists() + def test_mkdirall(self): + child = self.temp_dir_path / "foo" / "bar" / "baz" / "quz.mp3" + util.mkdirall(child) + assert not child.exists() + assert child.parent.exists() + assert child.parent.is_dir() diff --git a/test/test_importer.py b/test/test_importer.py index 521f98f99..a072d43d4 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -411,7 +411,7 @@ class ImportTest(PathsMixin, AutotagImportTestCase): assert not self.lib.items() def test_skip_non_album_dirs(self): - self.assertIsDir(os.path.join(self.import_dir, b"album")) + assert (self.import_path / "album").exists() self.touch(b"cruft", dir=self.import_dir) self.importer.add_choice(importer.Action.APPLY) self.importer.run() From c6b5b3bed31704f7fe8632a6aef1a2348028348f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 6 Aug 2024 23:07:02 +0100 Subject: [PATCH 49/95] Replace assertFileTag and assertNoFileTag --- test/plugins/test_convert.py | 65 ++++++++++++------------------------ 1 file changed, 22 insertions(+), 43 deletions(-) diff --git a/test/plugins/test_convert.py b/test/plugins/test_convert.py index 25a000284..dcf684ccc 100644 --- a/test/plugins/test_convert.py +++ b/test/plugins/test_convert.py @@ -18,6 +18,7 @@ import os.path import re import sys import unittest +from pathlib import Path import pytest from mediafile import MediaFile @@ -32,7 +33,6 @@ from beets.test.helper import ( capture_log, control_stdin, ) -from beets.util import displayable_path from beetsplug import convert @@ -58,36 +58,11 @@ class ConvertMixin: shell_quote(sys.executable), shell_quote(stub), tag ) - def assert_is_file(self, path): - path = Path(os.fsdecode(path)) + def file_endswith(self, path: Path, tag: str): + """Check the path is a file and if its content ends with `tag`.""" assert path.exists() assert path.is_file() - - def assertFileTag(self, path, tag): - """Assert that the path is a file and the files content ends - with `tag`. - """ - display_tag = tag - tag = tag.encode("utf-8") - self.assert_is_file(path) - with open(path, "rb") as f: - f.seek(-len(display_tag), os.SEEK_END) - assert f.read() == tag, ( - f"{displayable_path(path)} is not tagged with {display_tag}" - ) - - def assertNoFileTag(self, path, tag): - """Assert that the path is a file and the files content does not - end with `tag`. - """ - display_tag = tag - tag = tag.encode("utf-8") - self.assert_is_file(path) - with open(path, "rb") as f: - f.seek(-len(tag), os.SEEK_END) - assert f.read() != tag, ( - f"{displayable_path(path)} is unexpectedly tagged with {display_tag}" - ) + return path.read_bytes().endswith(tag.encode("utf-8")) class ConvertTestCase(ConvertMixin, PluginTestCase): @@ -111,7 +86,7 @@ class ImportConvertTest(AsIsImporterMixin, ImportHelper, ConvertTestCase): def test_import_converted(self): self.run_asis_importer() item = self.lib.items().get() - self.assertFileTag(item.filepath, "convert") + assert self.file_endswith(item.filepath, "convert") # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") @@ -122,7 +97,7 @@ class ImportConvertTest(AsIsImporterMixin, ImportHelper, ConvertTestCase): item = self.lib.items().get() assert item is not None - self.assert_is_file(item.path) + assert item.filepath.is_file() def test_delete_originals(self): self.config["convert"]["delete_originals"] = True @@ -183,11 +158,11 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): def test_convert(self): with control_stdin("y"): self.run_convert() - self.assertFileTag(self.converted_mp3, "mp3") + assert self.file_endswith(self.converted_mp3, "mp3") def test_convert_with_auto_confirmation(self): self.run_convert("--yes") - self.assertFileTag(self.converted_mp3, "mp3") + assert self.file_endswith(self.converted_mp3, "mp3") def test_reject_confirmation(self): with control_stdin("n"): @@ -206,7 +181,7 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): def test_format_option(self): with control_stdin("y"): self.run_convert("--format", "opus") - self.assertFileTag(self.convert_dest / "converted.ops", "opus") + assert self.file_endswith(self.convert_dest / "converted.ops", "opus") def test_embed_album_art(self): self.config["convert"]["embed"] = True @@ -241,38 +216,42 @@ class ConvertCliTest(ConvertTestCase, ConvertCommand): self.config["convert"]["max_bitrate"] = 5000 with control_stdin("y"): self.run_convert() - self.assertFileTag(self.converted_mp3, "mp3") + assert self.file_endswith(self.converted_mp3, "mp3") def test_transcode_when_maxbr_set_low_and_different_formats(self): self.config["convert"]["max_bitrate"] = 5 with control_stdin("y"): self.run_convert() - self.assertFileTag(self.converted_mp3, "mp3") + assert self.file_endswith(self.converted_mp3, "mp3") def test_transcode_when_maxbr_set_to_none_and_different_formats(self): with control_stdin("y"): self.run_convert() - self.assertFileTag(self.converted_mp3, "mp3") + assert self.file_endswith(self.converted_mp3, "mp3") def test_no_transcode_when_maxbr_set_high_and_same_formats(self): self.config["convert"]["max_bitrate"] = 5000 self.config["convert"]["format"] = "ogg" with control_stdin("y"): self.run_convert() - self.assertNoFileTag(self.convert_dest / "converted.ogg", "ogg") + assert not self.file_endswith( + self.convert_dest / "converted.ogg", "ogg" + ) def test_transcode_when_maxbr_set_low_and_same_formats(self): self.config["convert"]["max_bitrate"] = 5 self.config["convert"]["format"] = "ogg" with control_stdin("y"): self.run_convert() - self.assertFileTag(self.convert_dest / "converted.ogg", "ogg") + assert self.file_endswith(self.convert_dest / "converted.ogg", "ogg") def test_transcode_when_maxbr_set_to_none_and_same_formats(self): self.config["convert"]["format"] = "ogg" with control_stdin("y"): self.run_convert() - self.assertNoFileTag(self.convert_dest / "converted.ogg", "ogg") + assert not self.file_endswith( + self.convert_dest / "converted.ogg", "ogg" + ) def test_playlist(self): with control_stdin("y"): @@ -307,7 +286,7 @@ class NeverConvertLossyFilesTest(ConvertTestCase, ConvertCommand): with control_stdin("y"): self.run_convert_path(item) converted = self.convert_dest / "converted.mp3" - self.assertFileTag(converted, "mp3") + assert self.file_endswith(converted, "mp3") def test_transcode_from_lossy(self): self.config["convert"]["never_convert_lossy_files"] = False @@ -315,14 +294,14 @@ class NeverConvertLossyFilesTest(ConvertTestCase, ConvertCommand): with control_stdin("y"): self.run_convert_path(item) converted = self.convert_dest / "converted.mp3" - self.assertFileTag(converted, "mp3") + assert self.file_endswith(converted, "mp3") def test_transcode_from_lossy_prevented(self): [item] = self.add_item_fixtures(ext="ogg") with control_stdin("y"): self.run_convert_path(item) converted = self.convert_dest / "converted.ogg" - self.assertNoFileTag(converted, "mp3") + assert not self.file_endswith(converted, "mp3") class TestNoConvert: From 3c8179a762c4387f9c40a12e3b9e560ff1c194ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Wed, 7 Aug 2024 11:03:29 +0100 Subject: [PATCH 50/95] Replace assertAlbumImport --- test/plugins/test_importadded.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/test/plugins/test_importadded.py b/test/plugins/test_importadded.py index 2ed1d0f4e..1b198b31d 100644 --- a/test/plugins/test_importadded.py +++ b/test/plugins/test_importadded.py @@ -68,22 +68,23 @@ class ImportAddedTest(PluginMixin, AutotagImportTestCase): "No MediaFile found for Item " + displayable_path(item.path) ) - def assertAlbumImport(self): + def test_import_album_with_added_dates(self): self.importer.run() + album = self.lib.albums().get() assert album.added == self.min_mtime for item in album.items(): assert item.added == self.min_mtime - def test_import_album_with_added_dates(self): - self.assertAlbumImport() - def test_import_album_inplace_with_added_dates(self): self.config["import"]["copy"] = False - self.config["import"]["move"] = False - self.config["import"]["link"] = False - self.config["import"]["hardlink"] = False - self.assertAlbumImport() + + self.importer.run() + + album = self.lib.albums().get() + assert album.added == self.min_mtime + for item in album.items(): + assert item.added == self.min_mtime def test_import_album_with_preserved_mtimes(self): self.config["importadded"]["preserve_mtimes"] = True From 111cb562ea311adc0d70d3ef26205297b72603d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Wed, 7 Aug 2024 12:10:25 +0100 Subject: [PATCH 51/95] Replace assertContains and assertExcludes --- test/test_datequery.py | 174 ++++++++++++++--------------------------- 1 file changed, 60 insertions(+), 114 deletions(-) diff --git a/test/test_datequery.py b/test/test_datequery.py index 9c968e998..1063a62c1 100644 --- a/test/test_datequery.py +++ b/test/test_datequery.py @@ -29,122 +29,68 @@ from beets.dbcore.query import ( from beets.test.helper import ItemInDBTestCase -def _date(string): - return datetime.strptime(string, "%Y-%m-%dT%H:%M:%S") +class TestDateInterval: + now = datetime.now().replace(microsecond=0, second=0).isoformat() - -def _datepattern(datetimedate): - return datetimedate.strftime("%Y-%m-%dT%H:%M:%S") - - -class DateIntervalTest(unittest.TestCase): - def test_year_precision_intervals(self): - self.assertContains("2000..2001", "2000-01-01T00:00:00") - self.assertContains("2000..2001", "2001-06-20T14:15:16") - self.assertContains("2000..2001", "2001-12-31T23:59:59") - self.assertExcludes("2000..2001", "1999-12-31T23:59:59") - self.assertExcludes("2000..2001", "2002-01-01T00:00:00") - - self.assertContains("2000..", "2000-01-01T00:00:00") - self.assertContains("2000..", "2099-10-11T00:00:00") - self.assertExcludes("2000..", "1999-12-31T23:59:59") - - self.assertContains("..2001", "2001-12-31T23:59:59") - self.assertExcludes("..2001", "2002-01-01T00:00:00") - - self.assertContains("-1d..1d", _datepattern(datetime.now())) - self.assertExcludes("-2d..-1d", _datepattern(datetime.now())) - - def test_day_precision_intervals(self): - self.assertContains("2000-06-20..2000-06-20", "2000-06-20T00:00:00") - self.assertContains("2000-06-20..2000-06-20", "2000-06-20T10:20:30") - self.assertContains("2000-06-20..2000-06-20", "2000-06-20T23:59:59") - self.assertExcludes("2000-06-20..2000-06-20", "2000-06-19T23:59:59") - self.assertExcludes("2000-06-20..2000-06-20", "2000-06-21T00:00:00") - - def test_month_precision_intervals(self): - self.assertContains("1999-12..2000-02", "1999-12-01T00:00:00") - self.assertContains("1999-12..2000-02", "2000-02-15T05:06:07") - self.assertContains("1999-12..2000-02", "2000-02-29T23:59:59") - self.assertExcludes("1999-12..2000-02", "1999-11-30T23:59:59") - self.assertExcludes("1999-12..2000-02", "2000-03-01T00:00:00") - - def test_hour_precision_intervals(self): - # test with 'T' separator - self.assertExcludes( - "2000-01-01T12..2000-01-01T13", "2000-01-01T11:59:59" - ) - self.assertContains( - "2000-01-01T12..2000-01-01T13", "2000-01-01T12:00:00" - ) - self.assertContains( - "2000-01-01T12..2000-01-01T13", "2000-01-01T12:30:00" - ) - self.assertContains( - "2000-01-01T12..2000-01-01T13", "2000-01-01T13:30:00" - ) - self.assertContains( - "2000-01-01T12..2000-01-01T13", "2000-01-01T13:59:59" - ) - self.assertExcludes( - "2000-01-01T12..2000-01-01T13", "2000-01-01T14:00:00" - ) - self.assertExcludes( - "2000-01-01T12..2000-01-01T13", "2000-01-01T14:30:00" - ) - - # test non-range query - self.assertContains("2008-12-01T22", "2008-12-01T22:30:00") - self.assertExcludes("2008-12-01T22", "2008-12-01T23:30:00") - - def test_minute_precision_intervals(self): - self.assertExcludes( - "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:29:59" - ) - self.assertContains( - "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:30:00" - ) - self.assertContains( - "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:30:30" - ) - self.assertContains( - "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:31:59" - ) - self.assertExcludes( - "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:32:00" - ) - - def test_second_precision_intervals(self): - self.assertExcludes( - "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:49" - ) - self.assertContains( - "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:50" - ) - self.assertContains( - "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:55" - ) - self.assertExcludes( - "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:56" - ) - - def test_unbounded_endpoints(self): - self.assertContains("..", date=datetime.max) - self.assertContains("..", date=datetime.min) - self.assertContains("..", "1000-01-01T00:00:00") - - def assertContains(self, interval_pattern, date_pattern=None, date=None): - if date is None: - date = _date(date_pattern) - (start, end) = _parse_periods(interval_pattern) + @pytest.mark.parametrize( + "pattern, datestr, include", + [ + # year precision + ("2000..2001", "2000-01-01T00:00:00", True), + ("2000..2001", "2001-06-20T14:15:16", True), + ("2000..2001", "2001-12-31T23:59:59", True), + ("2000..2001", "1999-12-31T23:59:59", False), + ("2000..2001", "2002-01-01T00:00:00", False), + ("2000..", "2000-01-01T00:00:00", True), + ("2000..", "2099-10-11T00:00:00", True), + ("2000..", "1999-12-31T23:59:59", False), + ("..2001", "2001-12-31T23:59:59", True), + ("..2001", "2002-01-01T00:00:00", False), + ("-1d..1d", now, True), + ("-2d..-1d", now, False), + # month precision + ("2000-06-20..2000-06-20", "2000-06-20T00:00:00", True), + ("2000-06-20..2000-06-20", "2000-06-20T10:20:30", True), + ("2000-06-20..2000-06-20", "2000-06-20T23:59:59", True), + ("2000-06-20..2000-06-20", "2000-06-19T23:59:59", False), + ("2000-06-20..2000-06-20", "2000-06-21T00:00:00", False), + # day precision + ("1999-12..2000-02", "1999-12-01T00:00:00", True), + ("1999-12..2000-02", "2000-02-15T05:06:07", True), + ("1999-12..2000-02", "2000-02-29T23:59:59", True), + ("1999-12..2000-02", "1999-11-30T23:59:59", False), + ("1999-12..2000-02", "2000-03-01T00:00:00", False), + # hour precision with 'T' separator + ("2000-01-01T12..2000-01-01T13", "2000-01-01T11:59:59", False), + ("2000-01-01T12..2000-01-01T13", "2000-01-01T12:00:00", True), + ("2000-01-01T12..2000-01-01T13", "2000-01-01T12:30:00", True), + ("2000-01-01T12..2000-01-01T13", "2000-01-01T13:30:00", True), + ("2000-01-01T12..2000-01-01T13", "2000-01-01T13:59:59", True), + ("2000-01-01T12..2000-01-01T13", "2000-01-01T14:00:00", False), + ("2000-01-01T12..2000-01-01T13", "2000-01-01T14:30:00", False), + # hour precision non-range query + ("2008-12-01T22", "2008-12-01T22:30:00", True), + ("2008-12-01T22", "2008-12-01T23:30:00", False), + # minute precision + ("2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:29:59", False), + ("2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:30:00", True), + ("2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:30:30", True), + ("2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:31:59", True), + ("2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:32:00", False), + # second precision + ("2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:49", False), + ("2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:50", True), + ("2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:55", True), + ("2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:56", False), # unbounded # noqa: E501 + ("..", datetime.max.isoformat(), True), + ("..", datetime.min.isoformat(), True), + ("..", "1000-01-01T00:00:00", True), + ], + ) # fmt: skip + def test_intervals(self, pattern, datestr, include): + (start, end) = _parse_periods(pattern) interval = DateInterval.from_periods(start, end) - assert interval.contains(date) - - def assertExcludes(self, interval_pattern, date_pattern): - date = _date(date_pattern) - (start, end) = _parse_periods(interval_pattern) - interval = DateInterval.from_periods(start, end) - assert not interval.contains(date) + assert interval.contains(datetime.fromisoformat(datestr)) == include def _parsetime(s): From 72caf0d2cdc8fcefe1c252bdb0ac9b11b90cc649 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Wed, 7 Aug 2024 22:05:42 +0100 Subject: [PATCH 52/95] Replace assertCount --- test/plugins/test_edit.py | 53 ++++++++------------------------------- 1 file changed, 10 insertions(+), 43 deletions(-) diff --git a/test/plugins/test_edit.py b/test/plugins/test_edit.py index 278e04b9e..4e6c97ab2 100644 --- a/test/plugins/test_edit.py +++ b/test/plugins/test_edit.py @@ -134,22 +134,6 @@ class EditCommandTest(EditMixin, BeetsTestCase): {f: item[f] for f in item._fields} for item in self.album.items() ] - def assertCounts( - self, - mock_write, - album_count=ALBUM_COUNT, - track_count=TRACK_COUNT, - write_call_count=TRACK_COUNT, - title_starts_with="", - ): - """Several common assertions on Album, Track and call counts.""" - assert len(self.lib.albums()) == album_count - assert len(self.lib.items()) == track_count - assert mock_write.call_count == write_call_count - assert all( - i.title.startswith(title_starts_with) for i in self.lib.items() - ) - def test_title_edit_discard(self, mock_write): """Edit title for all items in the library, then discard changes.""" # Edit track titles. @@ -159,9 +143,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): ["c"], ) - self.assertCounts( - mock_write, write_call_count=0, title_starts_with="t\u00eftle" - ) + assert mock_write.call_count == 0 self.assertItemFieldsModified(self.album.items(), self.items_orig, []) def test_title_edit_apply(self, mock_write): @@ -173,11 +155,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): ["a"], ) - self.assertCounts( - mock_write, - write_call_count=self.TRACK_COUNT, - title_starts_with="modified t\u00eftle", - ) + assert mock_write.call_count == self.TRACK_COUNT self.assertItemFieldsModified( self.album.items(), self.items_orig, ["title", "mtime"] ) @@ -191,10 +169,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): ["a"], ) - self.assertCounts( - mock_write, - write_call_count=1, - ) + assert mock_write.call_count == 1 # No changes except on last item. self.assertItemFieldsModified( list(self.album.items())[:-1], self.items_orig[:-1], [] @@ -210,9 +185,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): [], ) - self.assertCounts( - mock_write, write_call_count=0, title_starts_with="t\u00eftle" - ) + assert mock_write.call_count == 0 self.assertItemFieldsModified(self.album.items(), self.items_orig, []) def test_album_edit_apply(self, mock_write): @@ -226,7 +199,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): ["a"], ) - self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT) + assert mock_write.call_count == self.TRACK_COUNT self.assertItemFieldsModified( self.album.items(), self.items_orig, ["album", "mtime"] ) @@ -249,9 +222,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): # Even though a flexible attribute was written (which is not directly # written to the tags), write should still be called since templates # might use it. - self.assertCounts( - mock_write, write_call_count=1, title_starts_with="t\u00eftle" - ) + assert mock_write.call_count == 1 def test_a_album_edit_apply(self, mock_write): """Album query (-a), edit album field, apply changes.""" @@ -263,7 +234,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): ) self.album.load() - self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT) + assert mock_write.call_count == self.TRACK_COUNT assert self.album.album == "modified \u00e4lbum" self.assertItemFieldsModified( self.album.items(), self.items_orig, ["album", "mtime"] @@ -279,7 +250,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): ) self.album.load() - self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT) + assert mock_write.call_count == self.TRACK_COUNT assert self.album.albumartist == "the modified album artist" self.assertItemFieldsModified( self.album.items(), self.items_orig, ["albumartist", "mtime"] @@ -295,9 +266,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): ["n"], ) - self.assertCounts( - mock_write, write_call_count=0, title_starts_with="t\u00eftle" - ) + assert mock_write.call_count == 0 def test_invalid_yaml(self, mock_write): """Edit the yaml file incorrectly (resulting in a well-formed but @@ -309,9 +278,7 @@ class EditCommandTest(EditMixin, BeetsTestCase): [], ) - self.assertCounts( - mock_write, write_call_count=0, title_starts_with="t\u00eftle" - ) + assert mock_write.call_count == 0 @_common.slow_test() From 9f6d5063d183b36b56a97f13fefa9808cfd1ae19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Thu, 8 Aug 2024 07:56:34 +0100 Subject: [PATCH 53/95] Replace _assertImageIsValidArt --- test/plugins/test_art.py | 78 ++++++++++++++++------------------------ 1 file changed, 31 insertions(+), 47 deletions(-) diff --git a/test/plugins/test_art.py b/test/plugins/test_art.py index 45effa9b9..152b86dbb 100644 --- a/test/plugins/test_art.py +++ b/test/plugins/test_art.py @@ -863,6 +863,16 @@ class ArtImporterTest(UseThePlugin): assert self.album.art_filepath.exists() +IMAGE_PATH = os.path.join(_common.RSRC, b"abbey-similar.jpg") +IMAGE_SIZE = os.stat(util.syspath(IMAGE_PATH)).st_size + + +def fs_source_get(_self, album, settings, paths): + if paths: + yield fetchart.Candidate(logger, source_name=_self.ID, path=IMAGE_PATH) + + +@patch("beetsplug.fetchart.FileSystem.get", fs_source_get) class ArtForAlbumTest(UseThePlugin): """Tests that fetchart.art_for_album respects the scale & filesize configurations (e.g., minwidth, enforce_ratio, max_filesize) @@ -870,53 +880,24 @@ class ArtForAlbumTest(UseThePlugin): IMG_225x225 = os.path.join(_common.RSRC, b"abbey.jpg") IMG_348x348 = os.path.join(_common.RSRC, b"abbey-different.jpg") - IMG_500x490 = os.path.join(_common.RSRC, b"abbey-similar.jpg") IMG_225x225_SIZE = os.stat(util.syspath(IMG_225x225)).st_size - IMG_348x348_SIZE = os.stat(util.syspath(IMG_348x348)).st_size RESIZE_OP = "resize" DEINTERLACE_OP = "deinterlace" REFORMAT_OP = "reformat" - def setUp(self): - super().setUp() + album = _common.Bag() - self.old_fs_source_get = fetchart.FileSystem.get - - def fs_source_get(_self, album, settings, paths): - if paths: - yield fetchart.Candidate( - logger, source_name=_self.ID, path=self.image_file - ) - - fetchart.FileSystem.get = fs_source_get - - self.album = _common.Bag() - - def tearDown(self): - fetchart.FileSystem.get = self.old_fs_source_get - super().tearDown() - - def assertImageIsValidArt(self, image_file, should_exist): - assert Path(os.fsdecode(image_file)).exists() - self.image_file = image_file - - candidate = self.plugin.art_for_album(self.album, [""], True) - - if should_exist: - assert candidate is not None - assert candidate.path == self.image_file - assert Path(os.fsdecode(candidate.path)).exists() - else: - assert candidate is None + def get_album_art(self): + return self.plugin.art_for_album(self.album, [""], True) def _assert_image_operated(self, image_file, operation, should_operate): self.image_file = image_file with patch.object( ArtResizer.shared, operation, return_value=self.image_file ) as mock_operation: - self.plugin.art_for_album(self.album, [""], True) + self.get_album_art() assert mock_operation.called == should_operate def _require_backend(self): @@ -929,48 +910,51 @@ class ArtForAlbumTest(UseThePlugin): def test_respect_minwidth(self): self._require_backend() self.plugin.minwidth = 300 - self.assertImageIsValidArt(self.IMG_225x225, False) - self.assertImageIsValidArt(self.IMG_348x348, True) + assert self.get_album_art() + + def test_respect_minwidth_no(self): + self._require_backend() + self.plugin.minwidth = 600 + assert not self.get_album_art() def test_respect_enforce_ratio_yes(self): self._require_backend() self.plugin.enforce_ratio = True - self.assertImageIsValidArt(self.IMG_500x490, False) - self.assertImageIsValidArt(self.IMG_225x225, True) + assert not self.get_album_art() def test_respect_enforce_ratio_no(self): self.plugin.enforce_ratio = False - self.assertImageIsValidArt(self.IMG_500x490, True) + assert self.get_album_art() def test_respect_enforce_ratio_px_above(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_px = 5 - self.assertImageIsValidArt(self.IMG_500x490, False) + assert not self.get_album_art() def test_respect_enforce_ratio_px_below(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_px = 15 - self.assertImageIsValidArt(self.IMG_500x490, True) + assert self.get_album_art() def test_respect_enforce_ratio_percent_above(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_percent = (500 - 490) / 500 * 0.5 - self.assertImageIsValidArt(self.IMG_500x490, False) + assert not self.get_album_art() def test_respect_enforce_ratio_percent_below(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_percent = (500 - 490) / 500 * 1.5 - self.assertImageIsValidArt(self.IMG_500x490, True) + assert self.get_album_art() def test_resize_if_necessary(self): self._require_backend() self.plugin.maxwidth = 300 - self._assert_image_operated(self.IMG_225x225, self.RESIZE_OP, False) - self._assert_image_operated(self.IMG_348x348, self.RESIZE_OP, True) + assert self.get_album_art() + self._assert_image_operated(IMAGE_PATH, self.RESIZE_OP, True) def test_fileresize(self): self._require_backend() @@ -979,9 +963,9 @@ class ArtForAlbumTest(UseThePlugin): def test_fileresize_if_necessary(self): self._require_backend() - self.plugin.max_filesize = self.IMG_225x225_SIZE - self._assert_image_operated(self.IMG_225x225, self.RESIZE_OP, False) - self.assertImageIsValidArt(self.IMG_225x225, True) + self.plugin.max_filesize = IMAGE_SIZE + self._assert_image_operated(IMAGE_PATH, self.RESIZE_OP, False) + assert self.get_album_art() def test_fileresize_no_scale(self): self._require_backend() From 8dcc69e6d8624c08d234759f2d782ea8d260aec2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Thu, 8 Aug 2024 17:41:33 +0100 Subject: [PATCH 54/95] Replace _assert_image_operated --- .github/workflows/ci.yaml | 2 +- test/plugins/test_art.py | 175 ++++++++++++++++++-------------------- 2 files changed, 86 insertions(+), 91 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ac3263bcd..390878372 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -52,7 +52,7 @@ jobs: - if: ${{ env.IS_MAIN_PYTHON != 'true' }} name: Test without coverage run: | - poetry install --extras=autobpm --extras=lyrics + poetry install --extras=autobpm --extras=lyrics --extras=embedart poe test - if: ${{ env.IS_MAIN_PYTHON == 'true' }} diff --git a/test/plugins/test_art.py b/test/plugins/test_art.py index 152b86dbb..38f8c7559 100644 --- a/test/plugins/test_art.py +++ b/test/plugins/test_art.py @@ -863,137 +863,132 @@ class ArtImporterTest(UseThePlugin): assert self.album.art_filepath.exists() -IMAGE_PATH = os.path.join(_common.RSRC, b"abbey-similar.jpg") -IMAGE_SIZE = os.stat(util.syspath(IMAGE_PATH)).st_size +class AlbumArtOperationTestCase(UseThePlugin): + """Base test case for album art operations. - -def fs_source_get(_self, album, settings, paths): - if paths: - yield fetchart.Candidate(logger, source_name=_self.ID, path=IMAGE_PATH) - - -@patch("beetsplug.fetchart.FileSystem.get", fs_source_get) -class ArtForAlbumTest(UseThePlugin): - """Tests that fetchart.art_for_album respects the scale & filesize - configurations (e.g., minwidth, enforce_ratio, max_filesize) + Provides common setup for testing album art processing operations by setting + up a mock filesystem source that returns a predefined test image. """ - IMG_225x225 = os.path.join(_common.RSRC, b"abbey.jpg") - IMG_348x348 = os.path.join(_common.RSRC, b"abbey-different.jpg") + IMAGE_PATH = os.path.join(_common.RSRC, b"abbey-similar.jpg") + IMAGE_FILESIZE = os.stat(util.syspath(IMAGE_PATH)).st_size + IMAGE_WIDTH = 500 + IMAGE_HEIGHT = 490 + IMAGE_WIDTH_HEIGHT_DIFF = IMAGE_WIDTH - IMAGE_HEIGHT - IMG_225x225_SIZE = os.stat(util.syspath(IMG_225x225)).st_size + @classmethod + def setUpClass(cls): + super().setUpClass() - RESIZE_OP = "resize" - DEINTERLACE_OP = "deinterlace" - REFORMAT_OP = "reformat" + def fs_source_get(_self, album, settings, paths): + if paths: + yield fetchart.Candidate( + logger, source_name=_self.ID, path=cls.IMAGE_PATH + ) - album = _common.Bag() + patch("beetsplug.fetchart.FileSystem.get", fs_source_get).start() + cls.addClassCleanup(patch.stopall) def get_album_art(self): - return self.plugin.art_for_album(self.album, [""], True) + return self.plugin.art_for_album(_common.Bag(), [""], True) - def _assert_image_operated(self, image_file, operation, should_operate): - self.image_file = image_file - with patch.object( - ArtResizer.shared, operation, return_value=self.image_file - ) as mock_operation: - self.get_album_art() - assert mock_operation.called == should_operate - def _require_backend(self): - """Skip the test if the art resizer doesn't have ImageMagick or - PIL (so comparisons and measurements are unavailable). - """ - if not ArtResizer.shared.local: - self.skipTest("ArtResizer has no local imaging backend available") +class AlbumArtOperationConfigurationTest(AlbumArtOperationTestCase): + """Check that scale & filesize configuration is respected. - def test_respect_minwidth(self): - self._require_backend() - self.plugin.minwidth = 300 + Depending on `minwidth`, `enforce_ratio`, `margin_px`, and `margin_percent` + configuration the plugin should or should not return an art candidate. + """ + + def test_minwidth(self): + self.plugin.minwidth = self.IMAGE_WIDTH / 2 assert self.get_album_art() - def test_respect_minwidth_no(self): - self._require_backend() - self.plugin.minwidth = 600 + self.plugin.minwidth = self.IMAGE_WIDTH * 2 assert not self.get_album_art() - def test_respect_enforce_ratio_yes(self): - self._require_backend() + def test_enforce_ratio(self): self.plugin.enforce_ratio = True assert not self.get_album_art() - def test_respect_enforce_ratio_no(self): self.plugin.enforce_ratio = False assert self.get_album_art() - def test_respect_enforce_ratio_px_above(self): - self._require_backend() + def test_enforce_ratio_with_px_margin(self): self.plugin.enforce_ratio = True - self.plugin.margin_px = 5 + + self.plugin.margin_px = self.IMAGE_WIDTH_HEIGHT_DIFF * 0.5 assert not self.get_album_art() - def test_respect_enforce_ratio_px_below(self): - self._require_backend() - self.plugin.enforce_ratio = True - self.plugin.margin_px = 15 + self.plugin.margin_px = self.IMAGE_WIDTH_HEIGHT_DIFF * 1.5 assert self.get_album_art() - def test_respect_enforce_ratio_percent_above(self): - self._require_backend() + def test_enforce_ratio_with_percent_margin(self): self.plugin.enforce_ratio = True - self.plugin.margin_percent = (500 - 490) / 500 * 0.5 + diff_by_width = self.IMAGE_WIDTH_HEIGHT_DIFF / self.IMAGE_WIDTH + + self.plugin.margin_percent = diff_by_width * 0.5 assert not self.get_album_art() - def test_respect_enforce_ratio_percent_below(self): - self._require_backend() - self.plugin.enforce_ratio = True - self.plugin.margin_percent = (500 - 490) / 500 * 1.5 + self.plugin.margin_percent = diff_by_width * 1.5 assert self.get_album_art() - def test_resize_if_necessary(self): - self._require_backend() - self.plugin.maxwidth = 300 + +class AlbumArtPerformOperationTest(AlbumArtOperationTestCase): + """Test that the art is resized and deinterlaced if necessary.""" + + def setUp(self): + super().setUp() + self.resizer_mock = patch.object( + ArtResizer.shared, "resize", return_value=self.IMAGE_PATH + ).start() + self.deinterlacer_mock = patch.object( + ArtResizer.shared, "deinterlace", return_value=self.IMAGE_PATH + ).start() + + def test_resize(self): + self.plugin.maxwidth = self.IMAGE_WIDTH / 2 assert self.get_album_art() - self._assert_image_operated(IMAGE_PATH, self.RESIZE_OP, True) + assert self.resizer_mock.called - def test_fileresize(self): - self._require_backend() - self.plugin.max_filesize = self.IMG_225x225_SIZE // 2 - self._assert_image_operated(self.IMG_225x225, self.RESIZE_OP, True) - - def test_fileresize_if_necessary(self): - self._require_backend() - self.plugin.max_filesize = IMAGE_SIZE - self._assert_image_operated(IMAGE_PATH, self.RESIZE_OP, False) + def test_file_resized(self): + self.plugin.max_filesize = self.IMAGE_FILESIZE // 2 assert self.get_album_art() + assert self.resizer_mock.called - def test_fileresize_no_scale(self): - self._require_backend() - self.plugin.maxwidth = 300 - self.plugin.max_filesize = self.IMG_225x225_SIZE // 2 - self._assert_image_operated(self.IMG_225x225, self.RESIZE_OP, True) + def test_file_not_resized(self): + self.plugin.max_filesize = self.IMAGE_FILESIZE + assert self.get_album_art() + assert not self.resizer_mock.called - def test_fileresize_and_scale(self): - self._require_backend() - self.plugin.maxwidth = 200 - self.plugin.max_filesize = self.IMG_225x225_SIZE // 2 - self._assert_image_operated(self.IMG_225x225, self.RESIZE_OP, True) + def test_file_resized_but_not_scaled(self): + self.plugin.maxwidth = self.IMAGE_WIDTH * 2 + self.plugin.max_filesize = self.IMAGE_FILESIZE // 2 + assert self.get_album_art() + assert self.resizer_mock.called - def test_deinterlace(self): - self._require_backend() + def test_file_resized_and_scaled(self): + self.plugin.maxwidth = self.IMAGE_WIDTH / 2 + self.plugin.max_filesize = self.IMAGE_FILESIZE // 2 + assert self.get_album_art() + assert self.resizer_mock.called + + def test_deinterlaced(self): self.plugin.deinterlace = True - self._assert_image_operated(self.IMG_225x225, self.DEINTERLACE_OP, True) + assert self.get_album_art() + assert self.deinterlacer_mock.called + + def test_not_deinterlaced(self): self.plugin.deinterlace = False - self._assert_image_operated( - self.IMG_225x225, self.DEINTERLACE_OP, False - ) + assert self.get_album_art() + assert not self.deinterlacer_mock.called - def test_deinterlace_and_resize(self): - self._require_backend() - self.plugin.maxwidth = 300 + def test_deinterlaced_and_resized(self): + self.plugin.maxwidth = self.IMAGE_WIDTH / 2 self.plugin.deinterlace = True - self._assert_image_operated(self.IMG_348x348, self.DEINTERLACE_OP, True) - self._assert_image_operated(self.IMG_348x348, self.RESIZE_OP, True) + assert self.get_album_art() + assert self.deinterlacer_mock.called + assert self.resizer_mock.called class DeprecatedConfigTest(unittest.TestCase): From ca3adfe525773d16130221afaa1522e99a5c5b08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Fri, 9 Aug 2024 01:50:55 +0100 Subject: [PATCH 55/95] Replace assertPrem --- test/plugins/test_permissions.py | 49 ++++++-------------------------- 1 file changed, 8 insertions(+), 41 deletions(-) diff --git a/test/plugins/test_permissions.py b/test/plugins/test_permissions.py index 274cd92ac..475e98194 100644 --- a/test/plugins/test_permissions.py +++ b/test/plugins/test_permissions.py @@ -6,7 +6,6 @@ from unittest.mock import Mock, patch from beets.test._common import touch from beets.test.helper import AsIsImporterMixin, ImportTestCase, PluginMixin -from beets.util import displayable_path from beetsplug.permissions import ( check_permissions, convert_perm, @@ -23,57 +22,25 @@ class PermissionsPluginTest(AsIsImporterMixin, PluginMixin, ImportTestCase): self.config["permissions"] = {"file": "777", "dir": "777"} def test_permissions_on_album_imported(self): - self.do_thing(True) + self.import_and_check_permissions() def test_permissions_on_item_imported(self): self.config["import"]["singletons"] = True - self.do_thing(True) + self.import_and_check_permissions() - @patch("os.chmod", Mock()) - def test_failing_to_set_permissions(self): - self.do_thing(False) - - def do_thing(self, expect_success): + def import_and_check_permissions(self): if platform.system() == "Windows": self.skipTest("permissions not available on Windows") - def get_stat(v): - return ( - os.stat(os.path.join(self.temp_dir, b"import", *v)).st_mode - & 0o777 - ) - - typs = ["file", "dir"] - - track_file = (b"album", b"track_1.mp3") - self.exp_perms = { - True: { - k: convert_perm(self.config["permissions"][k].get()) - for k in typs - }, - False: {k: get_stat(v) for (k, v) in zip(typs, (track_file, ()))}, - } + track_file = os.path.join(self.import_dir, b"album", b"track_1.mp3") + assert os.stat(track_file).st_mode & 0o777 != 511 self.run_asis_importer() item = self.lib.items().get() - self.assertPerms(item.path, "file", expect_success) - - for path in dirs_in_library(self.lib.directory, item.path): - self.assertPerms(path, "dir", expect_success) - - def assertPerms(self, path, typ, expect_success): - for x in [ - (True, self.exp_perms[expect_success][typ], "!="), - (False, self.exp_perms[not expect_success][typ], "=="), - ]: - msg = "{} : {} {} {}".format( - displayable_path(path), - oct(os.stat(path).st_mode), - x[2], - oct(x[1]), - ) - assert x[0] == check_permissions(path, x[1]), msg + paths = (item.path, *dirs_in_library(self.lib.directory, item.path)) + for path in paths: + assert os.stat(path).st_mode & 0o777 == 511 def test_convert_perm_from_string(self): assert convert_perm("10") == 8 From 0dd6cb3b6d9fb01ccce686d93f09569a2184f23d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sat, 31 May 2025 23:20:28 +0100 Subject: [PATCH 56/95] Replace assert_equal_path --- beets/test/_common.py | 10 ----- beets/test/helper.py | 2 +- test/test_importer.py | 8 +--- test/test_ui.py | 100 ++++++++++++++---------------------------- 4 files changed, 35 insertions(+), 85 deletions(-) diff --git a/beets/test/_common.py b/beets/test/_common.py index ca1671cb7..d70f9ec80 100644 --- a/beets/test/_common.py +++ b/beets/test/_common.py @@ -111,16 +111,6 @@ def import_session(lib=None, loghandler=None, paths=[], query=[], cli=False): return cls(lib, loghandler, paths, query) -class Assertions: - """A mixin with additional unit test assertions.""" - - def assert_equal_path(self, a, b): - """Check that two paths are equal.""" - a_bytes, b_bytes = util.normpath(a), util.normpath(b) - - assert a_bytes == b_bytes, f"{a_bytes=} != {b_bytes=}" - - # Mock I/O. diff --git a/beets/test/helper.py b/beets/test/helper.py index 98ce411bd..db753a760 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -177,7 +177,7 @@ class IOMixin: self.io.restore() -class TestHelper(_common.Assertions, ConfigMixin): +class TestHelper(ConfigMixin): """Helper mixin for high-level cli and plugin tests. This mixin provides methods to isolate beets' global state provide diff --git a/test/test_importer.py b/test/test_importer.py index a072d43d4..9ec160568 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -131,9 +131,7 @@ class NonAutotaggedImportTest(PathsMixin, AsIsImporterMixin, ImportTestCase): assert self.track_lib_path.exists() assert self.track_lib_path.is_symlink() - self.assert_equal_path( - self.track_lib_path.resolve(), self.track_import_path - ) + assert self.track_lib_path.resolve() == self.track_import_path @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_import_hardlink_arrives(self): @@ -711,9 +709,7 @@ class ImportExistingTest(PathsMixin, AutotagImportTestCase): def test_outside_file_is_copied(self): config["import"]["copy"] = False self.importer.run() - self.assert_equal_path( - self.lib.items().get().path, self.import_media[0].path - ) + assert self.lib.items().get().filepath == self.track_import_path self.reimporter = self.setup_importer() self.reimporter.add_choice(importer.Action.APPLY) diff --git a/test/test_ui.py b/test/test_ui.py index fd3686ec2..713e69891 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -21,6 +21,7 @@ import shutil import subprocess import sys import unittest +from pathlib import Path from unittest.mock import Mock, patch import pytest @@ -828,9 +829,7 @@ class ConfigTest(TestPluginTestCase): del os.environ["BEETSDIR"] # Also set APPDATA, the Windows equivalent of setting $HOME. - appdata_dir = os.fsdecode( - os.path.join(self.temp_dir, b"AppData", b"Roaming") - ) + appdata_dir = self.temp_dir_path / "AppData" / "Roaming" self._orig_cwd = os.getcwd() self.test_cmd = self._make_test_cmd() @@ -838,27 +837,21 @@ class ConfigTest(TestPluginTestCase): # Default user configuration if platform.system() == "Windows": - self.user_config_dir = os.fsencode( - os.path.join(appdata_dir, "beets") - ) + self.user_config_dir = appdata_dir / "beets" else: - self.user_config_dir = os.path.join( - self.temp_dir, b".config", b"beets" - ) - os.makedirs(syspath(self.user_config_dir)) - self.user_config_path = os.path.join( - self.user_config_dir, b"config.yaml" - ) + self.user_config_dir = self.temp_dir_path / ".config" / "beets" + self.user_config_dir.mkdir(parents=True, exist_ok=True) + self.user_config_path = self.user_config_dir / "config.yaml" # Custom BEETSDIR - self.beetsdir = os.path.join(self.temp_dir, b"beetsdir") - self.cli_config_path = os.path.join( - os.fsdecode(self.temp_dir), "config.yaml" - ) - os.makedirs(syspath(self.beetsdir)) + self.beetsdir = self.temp_dir_path / "beetsdir" + self.beetsdir.mkdir(parents=True, exist_ok=True) + + self.env_config_path = str(self.beetsdir / "config.yaml") + self.cli_config_path = str(self.temp_dir_path / "config.yaml") self.env_patcher = patch( "os.environ", - {"HOME": os.fsdecode(self.temp_dir), "APPDATA": appdata_dir}, + {"HOME": str(self.temp_dir_path), "APPDATA": str(appdata_dir)}, ) self.env_patcher.start() @@ -957,9 +950,8 @@ class ConfigTest(TestPluginTestCase): assert config["anoption"].get() == "cli overwrite" def test_cli_config_file_overwrites_beetsdir_defaults(self): - os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) - env_config_path = os.path.join(self.beetsdir, b"config.yaml") - with open(env_config_path, "w") as file: + os.environ["BEETSDIR"] = str(self.beetsdir) + with open(self.env_config_path, "w") as file: file.write("anoption: value") with open(self.cli_config_path, "w") as file: @@ -1006,39 +998,25 @@ class ConfigTest(TestPluginTestCase): file.write("statefile: state") self.run_command("--config", self.cli_config_path, "test", lib=None) - self.assert_equal_path( - util.bytestring_path(config["library"].as_filename()), - os.path.join(self.user_config_dir, b"beets.db"), - ) - self.assert_equal_path( - util.bytestring_path(config["statefile"].as_filename()), - os.path.join(self.user_config_dir, b"state"), - ) + assert config["library"].as_path() == self.user_config_dir / "beets.db" + assert config["statefile"].as_path() == self.user_config_dir / "state" def test_cli_config_paths_resolve_relative_to_beetsdir(self): - os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) + os.environ["BEETSDIR"] = str(self.beetsdir) with open(self.cli_config_path, "w") as file: file.write("library: beets.db\n") file.write("statefile: state") self.run_command("--config", self.cli_config_path, "test", lib=None) - self.assert_equal_path( - util.bytestring_path(config["library"].as_filename()), - os.path.join(self.beetsdir, b"beets.db"), - ) - self.assert_equal_path( - util.bytestring_path(config["statefile"].as_filename()), - os.path.join(self.beetsdir, b"state"), - ) + assert config["library"].as_path() == self.beetsdir / "beets.db" + assert config["statefile"].as_path() == self.beetsdir / "state" def test_command_line_option_relative_to_working_dir(self): config.read() os.chdir(syspath(self.temp_dir)) self.run_command("--library", "foo.db", "test", lib=None) - self.assert_equal_path( - config["library"].as_filename(), os.path.join(os.getcwd(), "foo.db") - ) + assert config["library"].as_path() == Path.cwd() / "foo.db" def test_cli_config_file_loads_plugin_commands(self): with open(self.cli_config_path, "w") as file: @@ -1050,24 +1028,23 @@ class ConfigTest(TestPluginTestCase): self.unload_plugins() def test_beetsdir_config(self): - os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) + os.environ["BEETSDIR"] = str(self.beetsdir) - env_config_path = os.path.join(self.beetsdir, b"config.yaml") - with open(env_config_path, "w") as file: + with open(self.env_config_path, "w") as file: file.write("anoption: overwrite") config.read() assert config["anoption"].get() == "overwrite" def test_beetsdir_points_to_file_error(self): - beetsdir = os.path.join(self.temp_dir, b"beetsfile") + beetsdir = str(self.temp_dir_path / "beetsfile") open(beetsdir, "a").close() - os.environ["BEETSDIR"] = os.fsdecode(beetsdir) + os.environ["BEETSDIR"] = beetsdir with pytest.raises(ConfigError): self.run_command("test") def test_beetsdir_config_does_not_load_default_user_config(self): - os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) + os.environ["BEETSDIR"] = str(self.beetsdir) with open(self.user_config_path, "w") as file: file.write("anoption: value") @@ -1076,35 +1053,22 @@ class ConfigTest(TestPluginTestCase): assert not config["anoption"].exists() def test_default_config_paths_resolve_relative_to_beetsdir(self): - os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) + os.environ["BEETSDIR"] = str(self.beetsdir) config.read() - self.assert_equal_path( - util.bytestring_path(config["library"].as_filename()), - os.path.join(self.beetsdir, b"library.db"), - ) - self.assert_equal_path( - util.bytestring_path(config["statefile"].as_filename()), - os.path.join(self.beetsdir, b"state.pickle"), - ) + assert config["library"].as_path() == self.beetsdir / "library.db" + assert config["statefile"].as_path() == self.beetsdir / "state.pickle" def test_beetsdir_config_paths_resolve_relative_to_beetsdir(self): - os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) + os.environ["BEETSDIR"] = str(self.beetsdir) - env_config_path = os.path.join(self.beetsdir, b"config.yaml") - with open(env_config_path, "w") as file: + with open(self.env_config_path, "w") as file: file.write("library: beets.db\n") file.write("statefile: state") config.read() - self.assert_equal_path( - util.bytestring_path(config["library"].as_filename()), - os.path.join(self.beetsdir, b"beets.db"), - ) - self.assert_equal_path( - util.bytestring_path(config["statefile"].as_filename()), - os.path.join(self.beetsdir, b"state"), - ) + assert config["library"].as_path() == self.beetsdir / "beets.db" + assert config["statefile"].as_path() == self.beetsdir / "state" class ShowModelChangeTest(IOMixin, unittest.TestCase): From 48d45b4df7d69c3c8d70a8fd5715a7ff0da44c40 Mon Sep 17 00:00:00 2001 From: 54562474 <54562474+5061726b6572@users.noreply.github.com> Date: Tue, 4 Mar 2025 18:02:04 -0700 Subject: [PATCH 57/95] feat: mpdstats: add config option for played ratio threshold to determine if a track was played or skipped. --- beetsplug/mpdstats.py | 20 ++++++++++---------- docs/changelog.rst | 3 +++ docs/plugins/mpdstats.rst | 3 +++ 3 files changed, 16 insertions(+), 10 deletions(-) diff --git a/beetsplug/mpdstats.py b/beetsplug/mpdstats.py index 20faf225f..52ae88e1f 100644 --- a/beetsplug/mpdstats.py +++ b/beetsplug/mpdstats.py @@ -27,6 +27,7 @@ from beets.util import displayable_path # much time should we wait between retries? RETRIES = 10 RETRY_INTERVAL = 5 +DUPLICATE_PLAY_THRESHOLD = 10.0 mpd_config = config["mpd"] @@ -143,7 +144,9 @@ class MPDStats: self.do_rating = mpd_config["rating"].get(bool) self.rating_mix = mpd_config["rating_mix"].get(float) - self.time_threshold = 10.0 # TODO: maybe add config option? + self.played_ratio_threshold = mpd_config["played_ratio_threshold"].get( + float + ) self.now_playing = None self.mpd = MPDClientWrapper(log) @@ -216,10 +219,8 @@ class MPDStats: Returns whether the change was manual (skipped previous song or not) """ - diff = abs(song["remaining"] - (time.time() - song["started"])) - - skipped = diff >= self.time_threshold - + elapsed = song["elapsed_at_start"] + (time.time() - song["started"]) + skipped = elapsed / song["duration"] < self.played_ratio_threshold if skipped: self.handle_skipped(song) else: @@ -256,13 +257,10 @@ class MPDStats: def on_play(self, status): path, songid = self.mpd.currentsong() - if not path: return played, duration = map(int, status["time"].split(":", 1)) - remaining = duration - played - if self.now_playing: if self.now_playing["path"] != path: self.handle_song_change(self.now_playing) @@ -273,7 +271,7 @@ class MPDStats: # after natural song start. diff = abs(time.time() - self.now_playing["started"]) - if diff <= self.time_threshold: + if diff <= DUPLICATE_PLAY_THRESHOLD: return if self.now_playing["path"] == path and played == 0: @@ -288,7 +286,8 @@ class MPDStats: self.now_playing = { "started": time.time(), - "remaining": remaining, + "elapsed_at_start": played, + "duration": duration, "path": path, "id": songid, "beets_item": self.get_item(path), @@ -337,6 +336,7 @@ class MPDStatsPlugin(plugins.BeetsPlugin): "host": os.environ.get("MPD_HOST", "localhost"), "port": int(os.environ.get("MPD_PORT", 6600)), "password": "", + "played_ratio_threshold": 0.85, } ) mpd_config["password"].redact = True diff --git a/docs/changelog.rst b/docs/changelog.rst index 09f2fcbb0..f9dafa00c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,6 +30,9 @@ New features: :bug:`5829` * :doc:`plugins/mbcollection`: When getting the user collections, only consider collections of releases, and ignore collections of other entity types. +* :doc:`plugins/mpdstats`: Add new configuration option, + ``played_ratio_threshold``, to allow configuring the percentage the song must + be played for it to be counted as played instead of skipped. Bug fixes: diff --git a/docs/plugins/mpdstats.rst b/docs/plugins/mpdstats.rst index cb2cf1606..865b615a7 100644 --- a/docs/plugins/mpdstats.rst +++ b/docs/plugins/mpdstats.rst @@ -58,6 +58,9 @@ configuration file. The available options are: Default: ``yes``. - **rating_mix**: Tune the way rating is calculated (see below). Default: 0.75. +- **played_ratio_threshold**: If a song was played for less than this percentage + of its duration it will be considered a skip. + Default: 0.85 A Word on Ratings ----------------- From 06b6f4f27b6d06f763ff0add1019d39c4c5ed9de Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:27:37 +0200 Subject: [PATCH 58/95] Isolated autottag related plugin functions into an new metadata_plugins file. --- beets/autotag/match.py | 12 +- beets/metadata_plugins.py | 367 ++++++++++++++++++++++++++++++++++++++ beets/plugins.py | 300 +------------------------------ 3 files changed, 376 insertions(+), 303 deletions(-) create mode 100644 beets/metadata_plugins.py diff --git a/beets/autotag/match.py b/beets/autotag/match.py index 64572cf3b..e74d21755 100644 --- a/beets/autotag/match.py +++ b/beets/autotag/match.py @@ -24,7 +24,7 @@ from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar import lap import numpy as np -from beets import config, logging, plugins +from beets import config, logging, metadata_plugins from beets.autotag import AlbumInfo, AlbumMatch, TrackInfo, TrackMatch, hooks from beets.util import get_most_common_tags @@ -119,7 +119,7 @@ def match_by_id(items: Iterable[Item]) -> AlbumInfo | None: return None # If all album IDs are equal, look up the album. log.debug("Searching for discovered album ID: {0}", first) - return plugins.album_for_id(first) + return metadata_plugins.album_for_id(first) def _recommendation( @@ -274,7 +274,7 @@ def tag_album( if search_ids: for search_id in search_ids: log.debug("Searching for album ID: {0}", search_id) - if info := plugins.album_for_id(search_id): + if info := metadata_plugins.album_for_id(search_id): _add_candidate(items, candidates, info) # Use existing metadata or text search. @@ -311,7 +311,7 @@ def tag_album( log.debug("Album might be VA: {0}", va_likely) # Get the results from the data sources. - for matched_candidate in plugins.candidates( + for matched_candidate in metadata_plugins.candidates( items, search_artist, search_album, va_likely ): _add_candidate(items, candidates, matched_candidate) @@ -346,7 +346,7 @@ def tag_item( if trackids: for trackid in trackids: log.debug("Searching for track ID: {0}", trackid) - if info := plugins.track_for_id(trackid): + if info := metadata_plugins.track_for_id(trackid): dist = track_distance(item, info, incl_artist=True) candidates[info.track_id] = hooks.TrackMatch(dist, info) # If this is a good match, then don't keep searching. @@ -372,7 +372,7 @@ def tag_item( log.debug("Item search terms: {0} - {1}", search_artist, search_title) # Get and evaluate candidate metadata. - for track_info in plugins.item_candidates( + for track_info in metadata_plugins.item_candidates( item, search_artist, search_title ): dist = track_distance(item, track_info, incl_artist=True) diff --git a/beets/metadata_plugins.py b/beets/metadata_plugins.py new file mode 100644 index 000000000..63893ad18 --- /dev/null +++ b/beets/metadata_plugins.py @@ -0,0 +1,367 @@ +"""Metadata source plugin interface. + +This allows beets to lookup metadata from various sources. We define +a common interface for all metadata sources which need to be +implemented as plugins. +""" + +from __future__ import annotations + +import abc +import re +from typing import ( + TYPE_CHECKING, + Any, + Generic, + Iterator, + Literal, + Sequence, + TypedDict, + TypeVar, +) + +from typing_extensions import NotRequired + +from .plugins import BeetsPlugin, find_plugins, notify_info_yielded, send + +if TYPE_CHECKING: + from confuse import ConfigView + + from .autotag import Distance + from .autotag.hooks import AlbumInfo, Item, TrackInfo + + +def find_metadata_source_plugins() -> list[MetadataSourcePluginNext]: + """Returns a list of MetadataSourcePluginNew subclass instances from all + currently loaded beets plugins. + """ + return [ + plugin + for plugin in find_plugins() + if isinstance(plugin, MetadataSourcePluginNext) + ] + + +@notify_info_yielded("albuminfo_received") +def candidates(*args, **kwargs) -> Iterator[AlbumInfo]: + """Return matching album candidates by using all metadata source + plugins.""" + for plugin in find_metadata_source_plugins(): + yield from plugin.candidates(*args, **kwargs) + + +@notify_info_yielded("trackinfo_received") +def item_candidates(*args, **kwargs) -> Iterator[TrackInfo]: + """Return matching track candidates by using all metadata source + plugins.""" + for plugin in find_metadata_source_plugins(): + yield from plugin.item_candidates(*args, **kwargs) + + +def album_for_id(_id: str) -> AlbumInfo | None: + """Get AlbumInfo object for the given ID string. + + A single ID can yield just a single album, so we return the first match. + """ + for plugin in find_metadata_source_plugins(): + if info := plugin.album_for_id(album_id=_id): + send("albuminfo_received", info=info) + return info + + return None + + +def track_for_id(_id: str) -> TrackInfo | None: + """Get TrackInfo object for the given ID string. + + A single ID can yield just a single track, so we return the first match. + """ + for plugin in find_metadata_source_plugins(): + if info := plugin.track_for_id(_id): + send("trackinfo_received", info=info) + return info + + return None + + +def track_distance(item: Item, info: TrackInfo) -> Distance: + """Gets the track distance calculated by all loaded plugins. + Returns a Distance object. + """ + from beets.autotag.hooks import Distance + + dist = Distance() + for plugin in find_metadata_source_plugins(): + dist.update(plugin.track_distance(item, info)) + return dist + + +def album_distance( + items: Sequence[Item], + album_info: AlbumInfo, + mapping: dict[Item, TrackInfo], +) -> Distance: + """Returns the album distance calculated by plugins.""" + from beets.autotag.hooks import Distance + + dist = Distance() + for plugin in find_metadata_source_plugins(): + dist.update(plugin.album_distance(items, album_info, mapping)) + return dist + + +def _get_distance( + config: ConfigView, data_source: str, info: AlbumInfo | TrackInfo +) -> Distance: + """Returns the ``data_source`` weight and the maximum source weight + for albums or individual tracks. + """ + from beets.autotag.hooks import Distance + + dist = Distance() + if info.data_source == data_source: + dist.add("source", config["source_weight"].as_number()) + return dist + + +class MetadataSourcePluginNext(BeetsPlugin, metaclass=abc.ABCMeta): + """A plugin that provides metadata from a specific source. + + This base class implements a contract for plugins that provide metadata + from a specific source. The plugin must implement the methods to search for albums + and tracks, and to retrieve album and track information by ID. + + TODO: Rename once all plugins are migrated to this interface. + """ + + data_source: str + + def __init__(self, data_source: str, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.data_source = data_source or self.__class__.__name__ + self.config.add({"source_weight": 0.5}) + + # --------------------------------- id lookup -------------------------------- # + + def albums_for_ids(self, ids: Sequence[str]) -> Iterator[AlbumInfo | None]: + """Batch lookup of album metadata for a list of album IDs. + + Given a list of album identifiers, yields corresponding AlbumInfo + objects. Missing albums result in None values in the output iterator. + Plugins may implement this for optimized batched lookups instead of + single calls to album_for_id. + """ + + return iter(self.album_for_id(id) for id in ids) + + @abc.abstractmethod + def album_for_id(self, album_id: str) -> AlbumInfo | None: + """Return :py:class:`AlbumInfo` object or None if no matching release was + found.""" + raise NotImplementedError + + def tracks_for_ids(self, ids: Sequence[str]) -> Iterator[TrackInfo | None]: + """Batch lookup of track metadata for a list of track IDs. + + Given a list of track identifiers, yields corresponding TrackInfo objects. + Missing tracks result in None values in the output iterator. Plugins may + implement this for optimized batched lookups instead of single calls to + track_for_id. + """ + + return iter(self.track_for_id(id) for id in ids) + + @abc.abstractmethod + def track_for_id(self, track_id: str) -> TrackInfo | None: + """Return a :py:class:`AlbumInfo` object or None if no matching release was + found. + """ + raise NotImplementedError + + # ---------------------------------- search ---------------------------------- # + + @abc.abstractmethod + def candidates( + self, + items: Sequence[Item], + artist: str, + album: str, + va_likely: bool, + extra_tags: dict[str, Any] | None = None, + ) -> Iterator[AlbumInfo]: + """Return :py:class:`AlbumInfo` candidates that match the given album. + + Used in the autotag functionality to search for albums. + + :param items: List of items in the album + :param artist: Album artist + :param album: Album name + :param va_likely: Whether the album is likely to be by various artists + :param extra_tags: is a an optional dictionary of extra tags to search. + TODO: remove: + Currently relevant to :py:class:`MusicBrainzPlugin` autotagger and can be + ignored by other plugins + """ + raise NotImplementedError + + @abc.abstractmethod + def item_candidates( + self, item: Item, artist: str, title: str + ) -> Iterator[TrackInfo]: + """Return :py:class:`TrackInfo` candidates that match the given track. + + Used in the autotag functionality to search for tracks. + + :param item: Track item + :param artist: Track artist + :param title: Track title + """ + raise NotImplementedError + + # --------------------------------- distances -------------------------------- # + + def album_distance( + self, + items: Sequence[Item], + album_info: AlbumInfo, + mapping: dict[Item, TrackInfo], + ) -> Distance: + return _get_distance( + data_source=self.data_source, info=album_info, config=self.config + ) + + def track_distance( + self, + item: Item, + info: TrackInfo, + ) -> Distance: + return _get_distance( + data_source=self.data_source, info=info, config=self.config + ) + + +class IDResponse(TypedDict): + """Response from the API containing an ID.""" + + id: str + + +class SearchFilter(TypedDict): + artist: NotRequired[str] + album: NotRequired[str] + + +R = TypeVar("R", bound=IDResponse) + + +class SearchApiMetadataSourcePluginNext( + Generic[R], MetadataSourcePluginNext, metaclass=abc.ABCMeta +): + """Helper class to implement a metadata source plugin with an API. + + Plugins using this ABC must implement an API search method to + retrieve album and track information by ID, + i.e. `album_for_id` and `track_for_id`, and a search method to + perform a search on the API. The search method should return a list + of identifiers for the requested type (album or track). + """ + + @abc.abstractmethod + def _search_api( + self, + query_type: Literal["album", "track"], + filters: SearchFilter | None = None, + keywords: str = "", + ) -> Sequence[R] | None: + """Perform a search on the API. + + :param query_type: The type of query to perform. + :param filters: A dictionary of filters to apply to the search. + :param keywords: Additional keywords to include in the search. + + Should return a list of identifiers for the requested type (album or track). + """ + raise NotImplementedError + + def candidates( + self, + items: Sequence[Item], + artist: str, + album: str, + va_likely: bool, + extra_tags: dict[str, Any] | None = None, + ) -> Iterator[AlbumInfo]: + query_filters: SearchFilter = {"album": album} + if not va_likely: + query_filters["artist"] = artist + + results = self._search_api("album", query_filters) + if not results: + return + + yield from filter( + None, self.albums_for_ids([result["id"] for result in results]) + ) + + def item_candidates( + self, item: Item, artist: str, title: str + ) -> Iterator[TrackInfo]: + results = self._search_api("track", {"artist": artist}, keywords=title) + if not results: + return + + yield from filter( + None, self.tracks_for_ids([result["id"] for result in results]) + ) + + +def artists_to_artist_str( + artists, + id_key: str | int = "id", + name_key: str | int = "name", + join_key: str | int | None = None, +) -> tuple[str, str | None]: + """Returns an artist string (all artists) and an artist_id (the main + artist) for a list of artist object dicts. + + For each artist, this function moves articles (such as 'a', 'an', + and 'the') to the front and strips trailing disambiguation numbers. It + returns a tuple containing the comma-separated string of all + normalized artists and the ``id`` of the main/first artist. + Alternatively a keyword can be used to combine artists together into a + single string by passing the join_key argument. + + :param artists: Iterable of artist dicts or lists returned by API. + :type artists: list[dict] or list[list] + :param id_key: Key or index corresponding to the value of ``id`` for + the main/first artist. Defaults to 'id'. + :param name_key: Key or index corresponding to values of names + to concatenate for the artist string (containing all artists). + Defaults to 'name'. + :param join_key: Key or index corresponding to a field containing a + keyword to use for combining artists into a single string, for + example "Feat.", "Vs.", "And" or similar. The default is None + which keeps the default behaviour (comma-separated). + :return: Normalized artist string. + """ + artist_id = None + artist_string = "" + artists = list(artists) # In case a generator was passed. + total = len(artists) + for idx, artist in enumerate(artists): + if not artist_id: + artist_id = artist[id_key] + name = artist[name_key] + # Strip disambiguation number. + name = re.sub(r" \(\d+\)$", "", name) + # Move articles to the front. + name = re.sub(r"^(.*?), (a|an|the)$", r"\2 \1", name, flags=re.I) + # Use a join keyword if requested and available. + if idx < (total - 1): # Skip joining on last. + if join_key and artist.get(join_key, None): + name += f" {artist[join_key]} " + else: + name += ", " + artist_string += name + + return artist_string, artist_id diff --git a/beets/plugins.py b/beets/plugins.py index 983d15402..668f8585d 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -23,22 +23,12 @@ import sys import traceback from collections import defaultdict from functools import wraps -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Generic, - Literal, - Sequence, - TypedDict, - TypeVar, -) +from typing import TYPE_CHECKING, Any, Callable, Sequence, TypeVar import mediafile import beets from beets import logging -from beets.util.id_extractors import extract_release_id if TYPE_CHECKING: from beets.event_types import EventType @@ -115,7 +105,7 @@ class PluginLogFilter(logging.Filter): # Managing the plugins themselves. -class BeetsPlugin: +class BeetsPlugin(metaclass=abc.ABCMeta): """The base class for all beets plugins. Plugins provide functionality by defining a subclass of BeetsPlugin and overriding the abstract methods defined here. @@ -218,66 +208,6 @@ class BeetsPlugin: """Return a dict mapping prefixes to Query subclasses.""" return {} - def track_distance( - self, - item: Item, - info: TrackInfo, - ) -> Distance: - """Should return a Distance object to be added to the - distance for every track comparison. - """ - from beets.autotag.distance import Distance - - return Distance() - - def album_distance( - self, - items: Sequence[Item], - album_info: AlbumInfo, - mapping: dict[Item, TrackInfo], - ) -> Distance: - """Should return a Distance object to be added to the - distance for every album-level comparison. - """ - from beets.autotag.distance import Distance - - return Distance() - - def candidates( - self, items: list[Item], artist: str, album: str, va_likely: bool - ) -> Iterable[AlbumInfo]: - """Return :py:class:`AlbumInfo` candidates that match the given album. - - :param items: List of items in the album - :param artist: Album artist - :param album: Album name - :param va_likely: Whether the album is likely to be by various artists - """ - yield from () - - def item_candidates( - self, item: Item, artist: str, title: str - ) -> Iterable[TrackInfo]: - """Return :py:class:`TrackInfo` candidates that match the given track. - - :param item: Track item - :param artist: Track artist - :param title: Track title - """ - yield from () - - def album_for_id(self, album_id: str) -> AlbumInfo | None: - """Return an AlbumInfo object or None if no matching release was - found. - """ - return None - - def track_for_id(self, track_id: str) -> TrackInfo | None: - """Return a TrackInfo object or None if no matching release was - found. - """ - return None - def add_media_field( self, name: str, descriptor: mediafile.MediaField ) -> None: @@ -372,7 +302,7 @@ def load_plugins(names: Sequence[str] = ()) -> None: isinstance(obj, type) and issubclass(obj, BeetsPlugin) and obj != BeetsPlugin - and obj != MetadataSourcePlugin + and not inspect.isabstract(obj) and obj not in _classes ): _classes.add(obj) @@ -456,32 +386,6 @@ def named_queries(model_cls: type[AnyModel]) -> dict[str, FieldQueryType]: return queries -def track_distance(item: Item, info: TrackInfo) -> Distance: - """Gets the track distance calculated by all loaded plugins. - Returns a Distance object. - """ - from beets.autotag.distance import Distance - - dist = Distance() - for plugin in find_plugins(): - dist.update(plugin.track_distance(item, info)) - return dist - - -def album_distance( - items: Sequence[Item], - album_info: AlbumInfo, - mapping: dict[Item, TrackInfo], -) -> Distance: - """Returns the album distance calculated by plugins.""" - from beets.autotag.distance import Distance - - dist = Distance() - for plugin in find_plugins(): - dist.update(plugin.album_distance(items, album_info, mapping)) - return dist - - def notify_info_yielded(event: str) -> Callable[[IterF[P, Ret]], IterF[P, Ret]]: """Makes a generator send the event 'event' every time it yields. This decorator is supposed to decorate a generator, but any function @@ -502,46 +406,6 @@ def notify_info_yielded(event: str) -> Callable[[IterF[P, Ret]], IterF[P, Ret]]: return decorator -@notify_info_yielded("albuminfo_received") -def candidates(*args, **kwargs) -> Iterable[AlbumInfo]: - """Return matching album candidates from all plugins.""" - for plugin in find_plugins(): - yield from plugin.candidates(*args, **kwargs) - - -@notify_info_yielded("trackinfo_received") -def item_candidates(*args, **kwargs) -> Iterable[TrackInfo]: - """Return matching track candidates from all plugins.""" - for plugin in find_plugins(): - yield from plugin.item_candidates(*args, **kwargs) - - -def album_for_id(_id: str) -> AlbumInfo | None: - """Get AlbumInfo object for the given ID string. - - A single ID can yield just a single album, so we return the first match. - """ - for plugin in find_plugins(): - if info := plugin.album_for_id(_id): - send("albuminfo_received", info=info) - return info - - return None - - -def track_for_id(_id: str) -> TrackInfo | None: - """Get TrackInfo object for the given ID string. - - A single ID can yield just a single track, so we return the first match. - """ - for plugin in find_plugins(): - if info := plugin.track_for_id(_id): - send("trackinfo_received", info=info) - return info - - return None - - def template_funcs() -> TFuncMap[str]: """Get all the template functions declared by plugins as a dictionary. @@ -656,18 +520,6 @@ def feat_tokens(for_artist: bool = True) -> str: ) -def get_distance( - config: ConfigView, data_source: str, info: AlbumInfo | TrackInfo -) -> Distance: - """Returns the ``data_source`` weight and the maximum source weight - for albums or individual tracks. - """ - from beets.autotag.distance import Distance - - dist = Distance() - if info.data_source == data_source: - dist.add("source", config["source_weight"].as_number()) - return dist def apply_item_changes( @@ -695,149 +547,3 @@ def apply_item_changes( item.try_write() item.store() - - -class Response(TypedDict): - """A dictionary with the response of a plugin API call. - - May be extended by plugins to include additional information, but `id` - is required. - """ - - id: str - - -R = TypeVar("R", bound=Response) - - -class MetadataSourcePlugin(Generic[R], BeetsPlugin, metaclass=abc.ABCMeta): - def __init__(self): - super().__init__() - self.config.add({"source_weight": 0.5}) - - @property - @abc.abstractmethod - def data_source(self) -> str: - raise NotImplementedError - - @property - @abc.abstractmethod - def search_url(self) -> str: - raise NotImplementedError - - @property - @abc.abstractmethod - def album_url(self) -> str: - raise NotImplementedError - - @property - @abc.abstractmethod - def track_url(self) -> str: - raise NotImplementedError - - @abc.abstractmethod - def _search_api( - self, - query_type: Literal["album", "track"], - filters: dict[str, str], - keywords: str = "", - ) -> Sequence[R]: - raise NotImplementedError - - @abc.abstractmethod - def album_for_id(self, album_id: str) -> AlbumInfo | None: - raise NotImplementedError - - @abc.abstractmethod - def track_for_id(self, track_id: str) -> TrackInfo | None: - raise NotImplementedError - - @staticmethod - def get_artist( - artists, - id_key: str | int = "id", - name_key: str | int = "name", - join_key: str | int | None = None, - ) -> tuple[str, str | None]: - """Returns an artist string (all artists) and an artist_id (the main - artist) for a list of artist object dicts. - - For each artist, this function moves articles (such as 'a', 'an', - and 'the') to the front and strips trailing disambiguation numbers. It - returns a tuple containing the comma-separated string of all - normalized artists and the ``id`` of the main/first artist. - Alternatively a keyword can be used to combine artists together into a - single string by passing the join_key argument. - - :param artists: Iterable of artist dicts or lists returned by API. - :type artists: list[dict] or list[list] - :param id_key: Key or index corresponding to the value of ``id`` for - the main/first artist. Defaults to 'id'. - :param name_key: Key or index corresponding to values of names - to concatenate for the artist string (containing all artists). - Defaults to 'name'. - :param join_key: Key or index corresponding to a field containing a - keyword to use for combining artists into a single string, for - example "Feat.", "Vs.", "And" or similar. The default is None - which keeps the default behaviour (comma-separated). - :return: Normalized artist string. - """ - artist_id = None - artist_string = "" - artists = list(artists) # In case a generator was passed. - total = len(artists) - for idx, artist in enumerate(artists): - if not artist_id: - artist_id = artist[id_key] - name = artist[name_key] - # Strip disambiguation number. - name = re.sub(r" \(\d+\)$", "", name) - # Move articles to the front. - name = re.sub(r"^(.*?), (a|an|the)$", r"\2 \1", name, flags=re.I) - # Use a join keyword if requested and available. - if idx < (total - 1): # Skip joining on last. - if join_key and artist.get(join_key, None): - name += f" {artist[join_key]} " - else: - name += ", " - artist_string += name - - return artist_string, artist_id - - def _get_id(self, id_string: str) -> str | None: - """Parse release ID from the given ID string.""" - return extract_release_id(self.data_source.lower(), id_string) - - def candidates( - self, items: list[Item], artist: str, album: str, va_likely: bool - ) -> Iterable[AlbumInfo]: - query_filters = {"album": album} - if not va_likely: - query_filters["artist"] = artist - for result in self._search_api("album", query_filters): - if info := self.album_for_id(result["id"]): - yield info - - def item_candidates( - self, item: Item, artist: str, title: str - ) -> Iterable[TrackInfo]: - for result in self._search_api( - "track", {"artist": artist}, keywords=title - ): - if info := self.track_for_id(result["id"]): - yield info - - def album_distance( - self, - items: Sequence[Item], - album_info: AlbumInfo, - mapping: dict[Item, TrackInfo], - ) -> Distance: - return get_distance( - data_source=self.data_source, info=album_info, config=self.config - ) - - def track_distance(self, item: Item, info: TrackInfo) -> Distance: - return get_distance( - data_source=self.data_source, info=info, config=self.config - ) From 24295d68447eb0af106ab27c26694330e661216b Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:33:20 +0200 Subject: [PATCH 59/95] Renamed plugin, adjusted some docstrings and moved artists_to_artist_str back into get_artist method. --- beets/metadata_plugins.py | 258 +++++++++++++++++++------------------- 1 file changed, 129 insertions(+), 129 deletions(-) diff --git a/beets/metadata_plugins.py b/beets/metadata_plugins.py index 63893ad18..87ff9eb98 100644 --- a/beets/metadata_plugins.py +++ b/beets/metadata_plugins.py @@ -9,51 +9,46 @@ from __future__ import annotations import abc import re -from typing import ( - TYPE_CHECKING, - Any, - Generic, - Iterator, - Literal, - Sequence, - TypedDict, - TypeVar, -) +from typing import TYPE_CHECKING, Generic, Literal, Sequence, TypedDict, TypeVar from typing_extensions import NotRequired +from beets.util import cached_classproperty +from beets.util.id_extractors import extract_release_id + from .plugins import BeetsPlugin, find_plugins, notify_info_yielded, send if TYPE_CHECKING: + from collections.abc import Iterable + from confuse import ConfigView from .autotag import Distance from .autotag.hooks import AlbumInfo, Item, TrackInfo -def find_metadata_source_plugins() -> list[MetadataSourcePluginNext]: - """Returns a list of MetadataSourcePluginNew subclass instances from all - currently loaded beets plugins. +def find_metadata_source_plugins() -> list[MetadataSourcePlugin]: + """Returns a list of MetadataSourcePlugin subclass instances + + Resolved from all currently loaded beets plugins. """ return [ plugin for plugin in find_plugins() - if isinstance(plugin, MetadataSourcePluginNext) + if isinstance(plugin, MetadataSourcePlugin) ] @notify_info_yielded("albuminfo_received") -def candidates(*args, **kwargs) -> Iterator[AlbumInfo]: - """Return matching album candidates by using all metadata source - plugins.""" +def candidates(*args, **kwargs) -> Iterable[AlbumInfo]: + """Return matching album candidates from all metadata source plugins.""" for plugin in find_metadata_source_plugins(): yield from plugin.candidates(*args, **kwargs) @notify_info_yielded("trackinfo_received") -def item_candidates(*args, **kwargs) -> Iterator[TrackInfo]: - """Return matching track candidates by using all metadata source - plugins.""" +def item_candidates(*args, **kwargs) -> Iterable[TrackInfo]: + """Return matching track candidates fromm all metadata source plugins.""" for plugin in find_metadata_source_plugins(): yield from plugin.item_candidates(*args, **kwargs) @@ -85,10 +80,12 @@ def track_for_id(_id: str) -> TrackInfo | None: def track_distance(item: Item, info: TrackInfo) -> Distance: - """Gets the track distance calculated by all loaded plugins. - Returns a Distance object. + """Returns the track distance for an item and trackinfo. + + Returns a Distance object is populated by all metadata source plugins + that implement the :py:meth:`MetadataSourcePlugin.track_distance` method. """ - from beets.autotag.hooks import Distance + from beets.autotag.distance import Distance dist = Distance() for plugin in find_metadata_source_plugins(): @@ -102,7 +99,7 @@ def album_distance( mapping: dict[Item, TrackInfo], ) -> Distance: """Returns the album distance calculated by plugins.""" - from beets.autotag.hooks import Distance + from beets.autotag.distance import Distance dist = Distance() for plugin in find_metadata_source_plugins(): @@ -116,7 +113,7 @@ def _get_distance( """Returns the ``data_source`` weight and the maximum source weight for albums or individual tracks. """ - from beets.autotag.hooks import Distance + from beets.autotag.distance import Distance dist = Distance() if info.data_source == data_source: @@ -124,56 +121,27 @@ def _get_distance( return dist -class MetadataSourcePluginNext(BeetsPlugin, metaclass=abc.ABCMeta): +class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta): """A plugin that provides metadata from a specific source. This base class implements a contract for plugins that provide metadata from a specific source. The plugin must implement the methods to search for albums and tracks, and to retrieve album and track information by ID. - - TODO: Rename once all plugins are migrated to this interface. """ - data_source: str - - def __init__(self, data_source: str, *args, **kwargs) -> None: + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self.data_source = data_source or self.__class__.__name__ self.config.add({"source_weight": 0.5}) - # --------------------------------- id lookup -------------------------------- # - - def albums_for_ids(self, ids: Sequence[str]) -> Iterator[AlbumInfo | None]: - """Batch lookup of album metadata for a list of album IDs. - - Given a list of album identifiers, yields corresponding AlbumInfo - objects. Missing albums result in None values in the output iterator. - Plugins may implement this for optimized batched lookups instead of - single calls to album_for_id. - """ - - return iter(self.album_for_id(id) for id in ids) - @abc.abstractmethod def album_for_id(self, album_id: str) -> AlbumInfo | None: """Return :py:class:`AlbumInfo` object or None if no matching release was found.""" raise NotImplementedError - def tracks_for_ids(self, ids: Sequence[str]) -> Iterator[TrackInfo | None]: - """Batch lookup of track metadata for a list of track IDs. - - Given a list of track identifiers, yields corresponding TrackInfo objects. - Missing tracks result in None values in the output iterator. Plugins may - implement this for optimized batched lookups instead of single calls to - track_for_id. - """ - - return iter(self.track_for_id(id) for id in ids) - @abc.abstractmethod def track_for_id(self, track_id: str) -> TrackInfo | None: - """Return a :py:class:`AlbumInfo` object or None if no matching release was + """Return a :py:class:`TrackInfo` object or None if no matching release was found. """ raise NotImplementedError @@ -187,8 +155,7 @@ class MetadataSourcePluginNext(BeetsPlugin, metaclass=abc.ABCMeta): artist: str, album: str, va_likely: bool, - extra_tags: dict[str, Any] | None = None, - ) -> Iterator[AlbumInfo]: + ) -> Iterable[AlbumInfo]: """Return :py:class:`AlbumInfo` candidates that match the given album. Used in the autotag functionality to search for albums. @@ -197,17 +164,13 @@ class MetadataSourcePluginNext(BeetsPlugin, metaclass=abc.ABCMeta): :param artist: Album artist :param album: Album name :param va_likely: Whether the album is likely to be by various artists - :param extra_tags: is a an optional dictionary of extra tags to search. - TODO: remove: - Currently relevant to :py:class:`MusicBrainzPlugin` autotagger and can be - ignored by other plugins """ raise NotImplementedError @abc.abstractmethod def item_candidates( self, item: Item, artist: str, title: str - ) -> Iterator[TrackInfo]: + ) -> Iterable[TrackInfo]: """Return :py:class:`TrackInfo` candidates that match the given track. Used in the autotag functionality to search for tracks. @@ -218,7 +181,27 @@ class MetadataSourcePluginNext(BeetsPlugin, metaclass=abc.ABCMeta): """ raise NotImplementedError - # --------------------------------- distances -------------------------------- # + def albums_for_ids(self, ids: Sequence[str]) -> Iterable[AlbumInfo | None]: + """Batch lookup of album metadata for a list of album IDs. + + Given a list of album identifiers, yields corresponding AlbumInfo objects. + Missing albums result in None values in the output iterator. + Plugins may implement this for optimized batched lookups instead of + single calls to album_for_id. + """ + + return (self.album_for_id(id) for id in ids) + + def tracks_for_ids(self, ids: Sequence[str]) -> Iterable[TrackInfo | None]: + """Batch lookup of track metadata for a list of track IDs. + + Given a list of track identifiers, yields corresponding TrackInfo objects. + Missing tracks result in None values in the output iterator. + Plugins may implement this for optimized batched lookups instead of + single calls to track_for_id. + """ + + return (self.track_for_id(id) for id in ids) def album_distance( self, @@ -226,6 +209,7 @@ class MetadataSourcePluginNext(BeetsPlugin, metaclass=abc.ABCMeta): album_info: AlbumInfo, mapping: dict[Item, TrackInfo], ) -> Distance: + """Calculate the distance for an album based on its items and album info.""" return _get_distance( data_source=self.data_source, info=album_info, config=self.config ) @@ -235,10 +219,78 @@ class MetadataSourcePluginNext(BeetsPlugin, metaclass=abc.ABCMeta): item: Item, info: TrackInfo, ) -> Distance: + """Calculate the distance for a track based on its item and track info.""" return _get_distance( data_source=self.data_source, info=info, config=self.config ) + @cached_classproperty + def data_source(cls) -> str: + """The data source name for this plugin. + + This is inferred from the plugin name. + """ + return cls.__name__.replace("Plugin", "") # type: ignore[attr-defined] + + def extract_release_id(self, url: str) -> str | None: + """Extract an ID from a URL for this metadata source plugin. + + Uses the plugin's data source name to determine the ID format and + extracts the ID from a given URL. + """ + return extract_release_id(self.data_source, url) + + @staticmethod + def get_artist( + artists: Iterable[dict[str | int, str]], + id_key: str | int = "id", + name_key: str | int = "name", + join_key: str | int | None = None, + ) -> tuple[str, str | None]: + """Returns an artist string (all artists) and an artist_id (the main + artist) for a list of artist object dicts. + + For each artist, this function moves articles (such as 'a', 'an', + and 'the') to the front and strips trailing disambiguation numbers. It + returns a tuple containing the comma-separated string of all + normalized artists and the ``id`` of the main/first artist. + Alternatively a keyword can be used to combine artists together into a + single string by passing the join_key argument. + + :param artists: Iterable of artist dicts or lists returned by API. + :param id_key: Key or index corresponding to the value of ``id`` for + the main/first artist. Defaults to 'id'. + :param name_key: Key or index corresponding to values of names + to concatenate for the artist string (containing all artists). + Defaults to 'name'. + :param join_key: Key or index corresponding to a field containing a + keyword to use for combining artists into a single string, for + example "Feat.", "Vs.", "And" or similar. The default is None + which keeps the default behaviour (comma-separated). + :return: Normalized artist string. + """ + artist_id = None + artist_string = "" + artists = list(artists) # In case a generator was passed. + total = len(artists) + for idx, artist in enumerate(artists): + if not artist_id: + artist_id = artist[id_key] + name = artist[name_key] + # Strip disambiguation number. + name = re.sub(r" \(\d+\)$", "", name) + # Move articles to the front. + name = re.sub(r"^(.*?), (a|an|the)$", r"\2 \1", name, flags=re.I) + # Use a join keyword if requested and available. + if idx < (total - 1): # Skip joining on last. + if join_key and artist.get(join_key, None): + name += f" {artist[join_key]} " + else: + name += ", " + artist_string += name + + return artist_string, artist_id + class IDResponse(TypedDict): """Response from the API containing an ID.""" @@ -254,8 +306,8 @@ class SearchFilter(TypedDict): R = TypeVar("R", bound=IDResponse) -class SearchApiMetadataSourcePluginNext( - Generic[R], MetadataSourcePluginNext, metaclass=abc.ABCMeta +class SearchApiMetadataSourcePlugin( + Generic[R], MetadataSourcePlugin, metaclass=abc.ABCMeta ): """Helper class to implement a metadata source plugin with an API. @@ -270,9 +322,9 @@ class SearchApiMetadataSourcePluginNext( def _search_api( self, query_type: Literal["album", "track"], - filters: SearchFilter | None = None, + filters: SearchFilter, keywords: str = "", - ) -> Sequence[R] | None: + ) -> Sequence[R]: """Perform a search on the API. :param query_type: The type of query to perform. @@ -289,79 +341,27 @@ class SearchApiMetadataSourcePluginNext( artist: str, album: str, va_likely: bool, - extra_tags: dict[str, Any] | None = None, - ) -> Iterator[AlbumInfo]: + ) -> Iterable[AlbumInfo]: query_filters: SearchFilter = {"album": album} if not va_likely: query_filters["artist"] = artist results = self._search_api("album", query_filters) if not results: - return + return [] - yield from filter( + return filter( None, self.albums_for_ids([result["id"] for result in results]) ) def item_candidates( self, item: Item, artist: str, title: str - ) -> Iterator[TrackInfo]: + ) -> Iterable[TrackInfo]: results = self._search_api("track", {"artist": artist}, keywords=title) if not results: - return + return [] - yield from filter( - None, self.tracks_for_ids([result["id"] for result in results]) + return filter( + None, + self.tracks_for_ids([result["id"] for result in results if result]), ) - - -def artists_to_artist_str( - artists, - id_key: str | int = "id", - name_key: str | int = "name", - join_key: str | int | None = None, -) -> tuple[str, str | None]: - """Returns an artist string (all artists) and an artist_id (the main - artist) for a list of artist object dicts. - - For each artist, this function moves articles (such as 'a', 'an', - and 'the') to the front and strips trailing disambiguation numbers. It - returns a tuple containing the comma-separated string of all - normalized artists and the ``id`` of the main/first artist. - Alternatively a keyword can be used to combine artists together into a - single string by passing the join_key argument. - - :param artists: Iterable of artist dicts or lists returned by API. - :type artists: list[dict] or list[list] - :param id_key: Key or index corresponding to the value of ``id`` for - the main/first artist. Defaults to 'id'. - :param name_key: Key or index corresponding to values of names - to concatenate for the artist string (containing all artists). - Defaults to 'name'. - :param join_key: Key or index corresponding to a field containing a - keyword to use for combining artists into a single string, for - example "Feat.", "Vs.", "And" or similar. The default is None - which keeps the default behaviour (comma-separated). - :return: Normalized artist string. - """ - artist_id = None - artist_string = "" - artists = list(artists) # In case a generator was passed. - total = len(artists) - for idx, artist in enumerate(artists): - if not artist_id: - artist_id = artist[id_key] - name = artist[name_key] - # Strip disambiguation number. - name = re.sub(r" \(\d+\)$", "", name) - # Move articles to the front. - name = re.sub(r"^(.*?), (a|an|the)$", r"\2 \1", name, flags=re.I) - # Use a join keyword if requested and available. - if idx < (total - 1): # Skip joining on last. - if join_key and artist.get(join_key, None): - name += f" {artist[join_key]} " - else: - name += ", " - artist_string += name - - return artist_string, artist_id From 5fe8431a6577b775fe7e2c9d70d07ffb04232eed Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:34:06 +0200 Subject: [PATCH 60/95] Use inspect instead of isclass in load_plugin function to fix import issues. --- beets/plugins.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/beets/plugins.py b/beets/plugins.py index 668f8585d..c0d71bec1 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -44,8 +44,6 @@ if TYPE_CHECKING: from confuse import ConfigView - from beets.autotag import AlbumInfo, TrackInfo - from beets.autotag.distance import Distance from beets.dbcore import Query from beets.dbcore.db import FieldQueryType from beets.dbcore.types import Type @@ -299,7 +297,7 @@ def load_plugins(names: Sequence[str] = ()) -> None: else: for obj in getattr(namespace, name).__dict__.values(): if ( - isinstance(obj, type) + inspect.isclass(obj) and issubclass(obj, BeetsPlugin) and obj != BeetsPlugin and not inspect.isabstract(obj) @@ -520,8 +518,6 @@ def feat_tokens(for_artist: bool = True) -> str: ) - - def apply_item_changes( lib: Library, item: Item, move: bool, pretend: bool, write: bool ) -> None: From 178e27f11fb3bd8ecabb88d62b5bfe476c8e0a48 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:35:18 +0200 Subject: [PATCH 61/95] Fixed imports for all tests and added a bit of stricter type checking. --- beets/autotag/distance.py | 6 +++--- beets/test/helper.py | 10 ++++++---- setup.cfg | 5 +++++ test/plugins/test_mbsync.py | 4 ++-- test/test_importer.py | 19 +++++++++++++++---- test/test_ui.py | 4 +++- 6 files changed, 34 insertions(+), 14 deletions(-) diff --git a/beets/autotag/distance.py b/beets/autotag/distance.py index d146c27f0..39d16858f 100644 --- a/beets/autotag/distance.py +++ b/beets/autotag/distance.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, Any from jellyfish import levenshtein_distance from unidecode import unidecode -from beets import config, plugins +from beets import config, metadata_plugins from beets.util import as_string, cached_classproperty, get_most_common_tags if TYPE_CHECKING: @@ -409,7 +409,7 @@ def track_distance( dist.add_expr("medium", item.disc != track_info.medium) # Plugins. - dist.update(plugins.track_distance(item, track_info)) + dist.update(metadata_plugins.track_distance(item, track_info)) return dist @@ -526,6 +526,6 @@ def distance( dist.add("unmatched_tracks", 1.0) # Plugins. - dist.update(plugins.album_distance(items, album_info, mapping)) + dist.update(metadata_plugins.album_distance(items, album_info, mapping)) return dist diff --git a/beets/test/helper.py b/beets/test/helper.py index db753a760..4f26e8448 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -799,10 +799,12 @@ class AutotagStub: def install(self): self.patchers = [ - patch("beets.plugins.album_for_id", lambda *_: None), - patch("beets.plugins.track_for_id", lambda *_: None), - patch("beets.plugins.candidates", self.candidates), - patch("beets.plugins.item_candidates", self.item_candidates), + patch("beets.metadata_plugins.album_for_id", lambda *_: None), + patch("beets.metadata_plugins.track_for_id", lambda *_: None), + patch("beets.metadata_plugins.candidates", self.candidates), + patch( + "beets.metadata_plugins.item_candidates", self.item_candidates + ), ] for p in self.patchers: p.start() diff --git a/setup.cfg b/setup.cfg index e3472b04c..e999b55d3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -49,3 +49,8 @@ disallow_untyped_decorators = true disallow_any_generics = true check_untyped_defs = true allow_redefinition = true + +[[mypy-beets.metadata_plugins]] +disallow_untyped_decorators = true +disallow_any_generics = true +check_untyped_defs = true diff --git a/test/plugins/test_mbsync.py b/test/plugins/test_mbsync.py index 088165ef5..bb88e5e63 100644 --- a/test/plugins/test_mbsync.py +++ b/test/plugins/test_mbsync.py @@ -23,7 +23,7 @@ class MbsyncCliTest(PluginTestCase): plugin = "mbsync" @patch( - "beets.plugins.album_for_id", + "beets.metadata_plugins.album_for_id", Mock( side_effect=lambda *_: AlbumInfo( album_id="album id", @@ -33,7 +33,7 @@ class MbsyncCliTest(PluginTestCase): ), ) @patch( - "beets.plugins.track_for_id", + "beets.metadata_plugins.track_for_id", Mock( side_effect=lambda *_: TrackInfo( track_id="singleton id", title="new title" diff --git a/test/test_importer.py b/test/test_importer.py index 9ec160568..14b163f73 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -913,7 +913,9 @@ def album_candidates_mock(*args, **kwargs): ) -@patch("beets.plugins.candidates", Mock(side_effect=album_candidates_mock)) +@patch( + "beets.metadata_plugins.candidates", Mock(side_effect=album_candidates_mock) +) class ImportDuplicateAlbumTest(PluginMixin, ImportTestCase): plugin = "musicbrainz" @@ -1031,7 +1033,10 @@ def item_candidates_mock(*args, **kwargs): ) -@patch("beets.plugins.item_candidates", Mock(side_effect=item_candidates_mock)) +@patch( + "beets.metadata_plugins.item_candidates", + Mock(side_effect=item_candidates_mock), +) class ImportDuplicateSingletonTest(ImportTestCase): def setUp(self): super().setUp() @@ -1567,8 +1572,14 @@ def mocked_get_track_by_id(id_): ) -@patch("beets.plugins.track_for_id", Mock(side_effect=mocked_get_track_by_id)) -@patch("beets.plugins.album_for_id", Mock(side_effect=mocked_get_album_by_id)) +@patch( + "beets.metadata_plugins.track_for_id", + Mock(side_effect=mocked_get_track_by_id), +) +@patch( + "beets.metadata_plugins.album_for_id", + Mock(side_effect=mocked_get_album_by_id), +) class ImportIdTest(ImportTestCase): ID_RELEASE_0 = "00000000-0000-0000-0000-000000000000" ID_RELEASE_1 = "11111111-1111-1111-1111-111111111111" diff --git a/test/test_ui.py b/test/test_ui.py index 713e69891..664323e2a 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -1024,7 +1024,9 @@ class ConfigTest(TestPluginTestCase): file.write("plugins: test") self.run_command("--config", self.cli_config_path, "plugin", lib=None) - assert plugins.find_plugins()[0].is_test_plugin + plugs = plugins.find_plugins() + assert len(plugs) == 1 + assert plugs[0].is_test_plugin self.unload_plugins() def test_beetsdir_config(self): From 8552eb9a37ee71748a215f87caa0a5153053b24c Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:35:55 +0200 Subject: [PATCH 62/95] Added debug message when id extractor pattern is not found. --- beets/util/id_extractors.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/beets/util/id_extractors.py b/beets/util/id_extractors.py index bbe2c32a4..6cdb787d1 100644 --- a/beets/util/id_extractors.py +++ b/beets/util/id_extractors.py @@ -18,6 +18,11 @@ from __future__ import annotations import re +from beets import logging + +log = logging.getLogger("beets") + + PATTERN_BY_SOURCE = { "spotify": re.compile(r"(?:^|open\.spotify\.com/[^/]+/)([0-9A-Za-z]{22})"), "deezer": re.compile(r"(?:^|deezer\.com/)(?:[a-z]*/)?(?:[^/]+/)?(\d+)"), @@ -43,6 +48,21 @@ PATTERN_BY_SOURCE = { def extract_release_id(source: str, id_: str) -> str | None: - if m := PATTERN_BY_SOURCE[source].search(str(id_)): + """Extract the release ID from a given source and ID. + + Normally, the `id_` is a url string which contains the ID of the + release. This function extracts the ID from the URL based on the + `source` provided. + """ + try: + source_pattern = PATTERN_BY_SOURCE[source.lower()] + except KeyError: + log.debug( + f"Unknown source '{source}' for ID extraction. Returning id/url as-is." + ) + return id_ + + if m := source_pattern.search(str(id_)): return m[1] + return None From fd800dce7c9e2488928056c66bb1b958c419c870 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:37:32 +0200 Subject: [PATCH 63/95] Opt in spotify plugin and enhanced typing for the search responses. --- beetsplug/spotify.py | 104 ++++++++++++++++++++++++++++++------------- 1 file changed, 72 insertions(+), 32 deletions(-) diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py index 36790b56b..27fd2e3b5 100644 --- a/beetsplug/spotify.py +++ b/beetsplug/spotify.py @@ -25,7 +25,7 @@ import json import re import time import webbrowser -from typing import TYPE_CHECKING, Any, Literal, Sequence +from typing import TYPE_CHECKING, Any, Literal, Sequence, Union import confuse import requests @@ -34,7 +34,12 @@ import unidecode from beets import ui from beets.autotag.hooks import AlbumInfo, TrackInfo from beets.dbcore import types -from beets.plugins import BeetsPlugin, MetadataSourcePlugin, Response +from beets.library import Library +from beets.metadata_plugins import ( + IDResponse, + SearchApiMetadataSourcePlugin, + SearchFilter, +) if TYPE_CHECKING: from beets.library import Library @@ -43,13 +48,41 @@ if TYPE_CHECKING: DEFAULT_WAITING_TIME = 5 -class SpotifyAPIError(Exception): +class SearchResponseAlbums(IDResponse): + """A response returned by the Spotify API. + + We only use items and disregard the pagination information. + i.e. res["albums"]["items"][0]. + + There are more fields in the response, but we only type + the ones we currently use. + + see https://developer.spotify.com/documentation/web-api/reference/search + """ + + album_type: str + available_markets: Sequence[str] + name: str + + +class SearchResponseTracks(IDResponse): + """A track response returned by the Spotify API.""" + + album: SearchResponseAlbums + available_markets: Sequence[str] + popularity: int + name: str + + +class APIError(Exception): pass -class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): - data_source = "Spotify" - +class SpotifyPlugin( + SearchApiMetadataSourcePlugin[ + Union[SearchResponseAlbums, SearchResponseTracks] + ] +): item_types = { "spotify_track_popularity": types.INTEGER, "spotify_acousticness": types.FLOAT, @@ -180,7 +213,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): """ if retry_count > max_retries: - raise SpotifyAPIError("Maximum retries reached.") + raise APIError("Maximum retries reached.") try: response = requests.request( @@ -194,14 +227,14 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): return response.json() except requests.exceptions.ReadTimeout: self._log.error("ReadTimeout.") - raise SpotifyAPIError("Request timed out.") + raise APIError("Request timed out.") except requests.exceptions.ConnectionError as e: self._log.error(f"Network error: {e}") - raise SpotifyAPIError("Network error.") + raise APIError("Network error.") except requests.exceptions.RequestException as e: if e.response is None: self._log.error(f"Request failed: {e}") - raise SpotifyAPIError("Request failed.") + raise APIError("Request failed.") if e.response.status_code == 401: self._log.debug( f"{self.data_source} access token has expired. " @@ -215,7 +248,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): retry_count=retry_count + 1, ) elif e.response.status_code == 404: - raise SpotifyAPIError( + raise APIError( f"API Error: {e.response.status_code}\n" f"URL: {url}\nparams: {params}" ) @@ -235,18 +268,18 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): ) elif e.response.status_code == 503: self._log.error("Service Unavailable.") - raise SpotifyAPIError("Service Unavailable.") + raise APIError("Service Unavailable.") elif e.response.status_code == 502: self._log.error("Bad Gateway.") - raise SpotifyAPIError("Bad Gateway.") + raise APIError("Bad Gateway.") elif e.response is not None: - raise SpotifyAPIError( + raise APIError( f"{self.data_source} API error:\n{e.response.text}\n" f"URL:\n{url}\nparams:\n{params}" ) else: self._log.error(f"Request failed. Error: {e}") - raise SpotifyAPIError("Request failed.") + raise APIError("Request failed.") def album_for_id(self, album_id: str) -> AlbumInfo | None: """Fetch an album by its Spotify ID or URL and return an @@ -257,7 +290,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): :return: AlbumInfo object for album :rtype: beets.autotag.hooks.AlbumInfo or None """ - if not (spotify_id := self._get_id(album_id)): + if not (spotify_id := self.extract_release_id(album_id)): return None album_data = self._handle_response("get", self.album_url + spotify_id) @@ -360,7 +393,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): Returns a TrackInfo object or None if the track is not found. """ - if not (spotify_id := self._get_id(track_id)): + if not (spotify_id := self.extract_release_id(track_id)): self._log.debug("Invalid Spotify ID: {}", track_id) return None @@ -390,7 +423,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): return track def _construct_search_query( - self, filters: dict[str, str], keywords: str = "" + self, filters: SearchFilter, keywords: str = "" ) -> str: """Construct a query string with the specified filters and keywords to be provided to the Spotify Search API @@ -400,9 +433,10 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): :param keywords: (Optional) Query keywords to use. :return: Query string to be provided to the Search API. """ + query_components = [ keywords, - " ".join(":".join((k, v)) for k, v in filters.items()), + " ".join(f"{k}:{v}" for k, v in filters.items()), ] query = " ".join([q for q in query_components if q]) if not isinstance(query, str): @@ -416,9 +450,9 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): def _search_api( self, query_type: Literal["album", "track"], - filters: dict[str, str], + filters: SearchFilter, keywords: str = "", - ) -> Sequence[Response]: + ) -> Sequence[SearchResponseAlbums | SearchResponseTracks]: """Query the Spotify Search API for the specified ``keywords``, applying the provided ``filters``. @@ -436,7 +470,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): self.search_url, params={"q": query, "type": query_type}, ) - except SpotifyAPIError as e: + except APIError as e: self._log.debug("Spotify API error: {}", e) return () response_data = response.get(query_type + "s", {}).get("items", []) @@ -557,7 +591,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): keywords = item[self.config["track_field"].get()] # Query the Web API for each track, look for the items' JSON data - query_filters = {"artist": artist, "album": album} + query_filters: SearchFilter = {"artist": artist, "album": album} response_data_tracks = self._search_api( query_type="track", keywords=keywords, filters=query_filters ) @@ -570,7 +604,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): continue # Apply market filter if requested - region_filter = self.config["region_filter"].get() + region_filter: str = self.config["region_filter"].get() if region_filter: response_data_tracks = [ track_data @@ -595,7 +629,11 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): len(response_data_tracks), ) chosen_result = max( - response_data_tracks, key=lambda x: x["popularity"] + response_data_tracks, + key=lambda x: x[ + # We are sure this is a track response! + "popularity" # type: ignore[typeddict-item] + ], ) results.append(chosen_result) @@ -691,16 +729,18 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): def track_info(self, track_id: str): """Fetch a track's popularity and external IDs using its Spotify ID.""" track_data = self._handle_response("get", self.track_url + track_id) + external_ids = track_data.get("external_ids", {}) + popularity = track_data.get("popularity") self._log.debug( "track_popularity: {} and track_isrc: {}", - track_data.get("popularity"), - track_data.get("external_ids").get("isrc"), + popularity, + external_ids.get("isrc"), ) return ( - track_data.get("popularity"), - track_data.get("external_ids").get("isrc"), - track_data.get("external_ids").get("ean"), - track_data.get("external_ids").get("upc"), + popularity, + external_ids.get("isrc"), + external_ids.get("ean"), + external_ids.get("upc"), ) def track_audio_features(self, track_id: str): @@ -709,6 +749,6 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin): return self._handle_response( "get", self.audio_features_url + track_id ) - except SpotifyAPIError as e: + except APIError as e: self._log.debug("Spotify API error: {}", e) return None From b62fb10da8e052c1c7ee241c4890c6e0b91d7762 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:51:33 +0200 Subject: [PATCH 64/95] Opt in musicbrainz plugin. --- beetsplug/musicbrainz.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/beetsplug/musicbrainz.py b/beetsplug/musicbrainz.py index e33cc4fce..3b250c071 100644 --- a/beetsplug/musicbrainz.py +++ b/beetsplug/musicbrainz.py @@ -20,7 +20,7 @@ import traceback from collections import Counter from functools import cached_property from itertools import product -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Iterable, Sequence from urllib.parse import urljoin import musicbrainzngs @@ -28,11 +28,10 @@ import musicbrainzngs import beets import beets.autotag.hooks from beets import config, plugins, util -from beets.plugins import BeetsPlugin +from beets.metadata_plugins import MetadataSourcePlugin from beets.util.id_extractors import extract_release_id if TYPE_CHECKING: - from collections.abc import Iterator, Sequence from typing import Literal from beets.library import Item @@ -362,9 +361,7 @@ def _merge_pseudo_and_actual_album( return merged -class MusicBrainzPlugin(BeetsPlugin): - data_source = "Musicbrainz" - +class MusicBrainzPlugin(MetadataSourcePlugin): def __init__(self): """Set up the python-musicbrainz-ngs module according to settings from the beets configuration. This should be called at startup. @@ -421,7 +418,7 @@ class MusicBrainzPlugin(BeetsPlugin): medium=medium, medium_index=medium_index, medium_total=medium_total, - data_source="MusicBrainz", + data_source=self.data_source, data_url=track_url(recording["id"]), ) @@ -632,7 +629,7 @@ class MusicBrainzPlugin(BeetsPlugin): artists_sort=artists_sort_names, artist_credit=artist_credit_name, artists_credit=artists_credit_names, - data_source="MusicBrainz", + data_source=self.data_source, data_url=album_url(release["id"]), barcode=release.get("barcode"), ) @@ -767,7 +764,7 @@ class MusicBrainzPlugin(BeetsPlugin): return mb_field_by_tag def get_album_criteria( - self, items: list[Item], artist: str, album: str, va_likely: bool + self, items: Sequence[Item], artist: str, album: str, va_likely: bool ) -> dict[str, str]: criteria = { "release": album, @@ -813,12 +810,11 @@ class MusicBrainzPlugin(BeetsPlugin): def candidates( self, - items: list[Item], + items: Sequence[Item], artist: str, album: str, va_likely: bool, - extra_tags: dict[str, Any] | None = None, - ) -> Iterator[beets.autotag.hooks.AlbumInfo]: + ) -> Iterable[beets.autotag.hooks.AlbumInfo]: criteria = self.get_album_criteria(items, artist, album, va_likely) release_ids = (r["id"] for r in self._search_api("release", criteria)) @@ -826,7 +822,7 @@ class MusicBrainzPlugin(BeetsPlugin): def item_candidates( self, item: Item, artist: str, title: str - ) -> Iterator[beets.autotag.hooks.TrackInfo]: + ) -> Iterable[beets.autotag.hooks.TrackInfo]: criteria = {"artist": artist, "recording": title, "alias": title} yield from filter( @@ -841,7 +837,7 @@ class MusicBrainzPlugin(BeetsPlugin): MusicBrainzAPIError. """ self._log.debug("Requesting MusicBrainz release {}", album_id) - if not (albumid := extract_release_id("musicbrainz", album_id)): + if not (albumid := self.extract_release_id(album_id)): self._log.debug("Invalid MBID ({0}).", album_id) return None @@ -878,7 +874,7 @@ class MusicBrainzPlugin(BeetsPlugin): """Fetches a track by its MusicBrainz ID. Returns a TrackInfo object or None if no track is found. May raise a MusicBrainzAPIError. """ - if not (trackid := extract_release_id("musicbrainz", track_id)): + if not (trackid := self.extract_release_id(track_id)): self._log.debug("Invalid MBID ({0}).", track_id) return None From a770cfb6696af9121c5f45d47c1d1c67247d51b7 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:53:18 +0200 Subject: [PATCH 65/95] Opt in chroma plugin. --- beetsplug/chroma.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py index de3ac525a..21098ea81 100644 --- a/beetsplug/chroma.py +++ b/beetsplug/chroma.py @@ -19,12 +19,14 @@ autotagger. Requires the pyacoustid library. import re from collections import defaultdict from functools import cached_property, partial +from typing import Iterable import acoustid import confuse -from beets import config, plugins, ui, util +from beets import config, ui, util from beets.autotag.distance import Distance +from beets.metadata_plugins import MetadataSourcePlugin, TrackInfo from beetsplug.musicbrainz import MusicBrainzPlugin API_KEY = "1vOwZtEn" @@ -168,10 +170,8 @@ def _all_releases(items): yield release_id -class AcoustidPlugin(plugins.BeetsPlugin): +class AcoustidPlugin(MetadataSourcePlugin): def __init__(self): - super().__init__() - self.config.add( { "auto": True, @@ -210,7 +210,7 @@ class AcoustidPlugin(plugins.BeetsPlugin): self._log.debug("acoustid album candidates: {0}", len(albums)) return albums - def item_candidates(self, item, artist, title): + def item_candidates(self, item, artist, title) -> Iterable[TrackInfo]: if item.path not in _matches: return [] @@ -223,6 +223,14 @@ class AcoustidPlugin(plugins.BeetsPlugin): self._log.debug("acoustid item candidates: {0}", len(tracks)) return tracks + def album_for_id(self, *args, **kwargs): + # Lookup by fingerprint ID does not make too much sense. + return None + + def track_for_id(self, *args, **kwargs): + # Lookup by fingerprint ID does not make too much sense. + return None + def commands(self): submit_cmd = ui.Subcommand( "submit", help="submit Acoustid fingerprints" From 6f623ee7b0356e18f87fe1bccd0dfcc99b81faf2 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:56:00 +0200 Subject: [PATCH 66/95] Opt in deezer plugin. --- beetsplug/deezer.py | 47 ++++++++++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 20 deletions(-) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index 7e4896437..bf6f83980 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -26,16 +26,19 @@ import unidecode from beets import ui from beets.autotag import AlbumInfo, TrackInfo from beets.dbcore import types -from beets.plugins import BeetsPlugin, MetadataSourcePlugin, Response +from beets.metadata_plugins import ( + IDResponse, + SearchApiMetadataSourcePlugin, + SearchFilter, +) if TYPE_CHECKING: from beets.library import Item, Library - from beetsplug._typing import JSONDict + + from ._typing import JSONDict -class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): - data_source = "Deezer" - +class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): item_types = { "deezer_track_rank": types.INTEGER, "deezer_track_id": types.INTEGER, @@ -63,7 +66,7 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): def album_for_id(self, album_id: str) -> AlbumInfo | None: """Fetch an album by its Deezer ID or URL.""" - if not (deezer_id := self._get_id(album_id)): + if not (deezer_id := self.extract_release_id(album_id)): return None album_url = f"{self.album_url}{deezer_id}" @@ -145,11 +148,14 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): ) def track_for_id(self, track_id: str) -> None | TrackInfo: - """Fetch a track by its Deezer ID or URL. + """Fetch a track by its Deezer ID or URL and return a + TrackInfo object or None if the track is not found. + + :param track_id: (Optional) Deezer ID or URL for the track. Either + ``track_id`` or ``track_data`` must be provided. - Returns a TrackInfo object or None if the track is not found. """ - if not (deezer_id := self._get_id(track_id)): + if not (deezer_id := self.extract_release_id(track_id)): self._log.debug("Invalid Deezer track_id: {}", track_id) return None @@ -162,11 +168,13 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): # Get album's tracks to set `track.index` (position on the entire # release) and `track.medium_total` (total number of tracks on # the track's disc). - album_tracks_obj = self.fetch_data( - self.album_url + str(track_data["album"]["id"]) + "/tracks" - ) - if album_tracks_obj is None: + if not ( + album_tracks_obj := self.fetch_data( + self.album_url + str(track_data["album"]["id"]) + "/tracks" + ) + ): return None + try: album_tracks_data = album_tracks_obj["data"] except KeyError: @@ -187,7 +195,6 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): """Convert a Deezer track object dict to a TrackInfo object. :param track_data: Deezer Track object dict - :return: TrackInfo object for track """ artist, artist_id = self.get_artist( track_data.get("contributors", [track_data["artist"]]) @@ -211,7 +218,7 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): @staticmethod def _construct_search_query( - filters: dict[str, str], keywords: str = "" + filters: SearchFilter, keywords: str = "" ) -> str: """Construct a query string with the specified filters and keywords to be provided to the Deezer Search API @@ -242,14 +249,14 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): "radio", "user", ], - filters: dict[str, str], + filters: SearchFilter, keywords="", - ) -> Sequence[Response]: + ) -> Sequence[IDResponse]: """Query the Deezer Search API for the specified ``keywords``, applying the provided ``filters``. - :param query_type: The Deezer Search API method to use. - :param keywords: (Optional) Query keywords to use. + :param filters: Field filters to apply. + :param keywords: Query keywords to use. :return: JSON data for the class:`Response ` object or None if no search results are returned. """ @@ -269,7 +276,7 @@ class DeezerPlugin(MetadataSourcePlugin[Response], BeetsPlugin): e, ) return () - response_data = response.json().get("data", []) + response_data: Sequence[IDResponse] = response.json().get("data", []) self._log.debug( "Found {} result(s) from {} for '{}'", len(response_data), From a97633dbf624008947f8fa41242288f6d820b2be Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:57:46 +0200 Subject: [PATCH 67/95] Opt in dicogs plugin. --- beetsplug/discogs.py | 33 ++++++++++----------------------- 1 file changed, 10 insertions(+), 23 deletions(-) diff --git a/beetsplug/discogs.py b/beetsplug/discogs.py index 2408f3498..713dfbcae 100644 --- a/beetsplug/discogs.py +++ b/beetsplug/discogs.py @@ -27,7 +27,7 @@ import time import traceback from functools import cache from string import ascii_lowercase -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Sequence import confuse from discogs_client import Client, Master, Release @@ -40,8 +40,7 @@ import beets.ui from beets import config from beets.autotag.distance import string_dist from beets.autotag.hooks import AlbumInfo, TrackInfo -from beets.plugins import BeetsPlugin, MetadataSourcePlugin, get_distance -from beets.util.id_extractors import extract_release_id +from beets.metadata_plugins import MetadataSourcePlugin if TYPE_CHECKING: from collections.abc import Callable, Iterable @@ -84,7 +83,7 @@ class ReleaseFormat(TypedDict): descriptions: list[str] | None -class DiscogsPlugin(BeetsPlugin): +class DiscogsPlugin(MetadataSourcePlugin): def __init__(self): super().__init__() self.config.add( @@ -169,20 +168,8 @@ class DiscogsPlugin(BeetsPlugin): return token, secret - def album_distance(self, items, album_info, mapping): - """Returns the album distance.""" - return get_distance( - data_source="Discogs", info=album_info, config=self.config - ) - - def track_distance(self, item, track_info): - """Returns the track distance.""" - return get_distance( - data_source="Discogs", info=track_info, config=self.config - ) - def candidates( - self, items: list[Item], artist: str, album: str, va_likely: bool + self, items: Sequence[Item], artist: str, album: str, va_likely: bool ) -> Iterable[AlbumInfo]: return self.get_albums(f"{artist} {album}" if va_likely else album) @@ -217,7 +204,7 @@ class DiscogsPlugin(BeetsPlugin): """ self._log.debug("Searching for release {0}", album_id) - discogs_id = extract_release_id("discogs", album_id) + discogs_id = self.extract_release_id(album_id) if not discogs_id: return None @@ -272,7 +259,7 @@ class DiscogsPlugin(BeetsPlugin): exc_info=True, ) return [] - return map(self.get_album_info, releases) + return filter(None, map(self.get_album_info, releases)) @cache def get_master_year(self, master_id: str) -> int | None: @@ -334,7 +321,7 @@ class DiscogsPlugin(BeetsPlugin): self._log.warning("Release does not contain the required fields") return None - artist, artist_id = MetadataSourcePlugin.get_artist( + artist, artist_id = self.get_artist( [a.data for a in result.artists], join_key="join" ) album = re.sub(r" +", " ", result.title) @@ -359,7 +346,7 @@ class DiscogsPlugin(BeetsPlugin): else: genre = base_genre - discogs_albumid = extract_release_id("discogs", result.data.get("uri")) + discogs_albumid = self.extract_release_id(result.data.get("uri")) # Extract information for the optional AlbumInfo fields that are # contained on nested discogs fields. @@ -419,7 +406,7 @@ class DiscogsPlugin(BeetsPlugin): genre=genre, media=media, original_year=original_year, - data_source="Discogs", + data_source=self.data_source, data_url=data_url, discogs_albumid=discogs_albumid, discogs_labelid=labelid, @@ -638,7 +625,7 @@ class DiscogsPlugin(BeetsPlugin): title = f"{prefix}: {title}" track_id = None medium, medium_index, _ = self.get_track_index(track["position"]) - artist, artist_id = MetadataSourcePlugin.get_artist( + artist, artist_id = self.get_artist( track.get("artists", []), join_key="join" ) length = self.get_track_length(track["duration"]) From 3eadf17e8f07fbb0b97c8ca9419d589c54981573 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:58:40 +0200 Subject: [PATCH 68/95] Opt in beatport plugin. Also enhanced type hints and minor cleanup for the beatport plugin. --- beetsplug/beatport.py | 288 ++++++++++++++++++++++++------------------ 1 file changed, 166 insertions(+), 122 deletions(-) diff --git a/beetsplug/beatport.py b/beetsplug/beatport.py index 20147b5cc..72828a96a 100644 --- a/beetsplug/beatport.py +++ b/beetsplug/beatport.py @@ -14,9 +14,19 @@ """Adds Beatport release and track search support to the autotagger""" +from __future__ import annotations + import json import re from datetime import datetime, timedelta +from typing import ( + TYPE_CHECKING, + Iterable, + Iterator, + Literal, + Sequence, + overload, +) import confuse from requests_oauthlib import OAuth1Session @@ -29,7 +39,13 @@ from requests_oauthlib.oauth1_session import ( import beets import beets.ui from beets.autotag.hooks import AlbumInfo, TrackInfo -from beets.plugins import BeetsPlugin, MetadataSourcePlugin, get_distance +from beets.metadata_plugins import MetadataSourcePlugin + +if TYPE_CHECKING: + from beets.importer import ImportSession + from beets.library import Item + + from ._typing import JSONDict AUTH_ERRORS = (TokenRequestDenied, TokenMissing, VerifierMissing) USER_AGENT = f"beets/{beets.__version__} +https://beets.io/" @@ -39,20 +55,6 @@ class BeatportAPIError(Exception): pass -class BeatportObject: - def __init__(self, data): - self.beatport_id = data["id"] - self.name = str(data["name"]) - if "releaseDate" in data: - self.release_date = datetime.strptime( - data["releaseDate"], "%Y-%m-%d" - ) - if "artists" in data: - self.artists = [(x["id"], str(x["name"])) for x in data["artists"]] - if "genres" in data: - self.genres = [str(x["name"]) for x in data["genres"]] - - class BeatportClient: _api_base = "https://oauth-api.beatport.com" @@ -77,7 +79,7 @@ class BeatportClient: ) self.api.headers = {"User-Agent": USER_AGENT} - def get_authorize_url(self): + def get_authorize_url(self) -> str: """Generate the URL for the user to authorize the application. Retrieves a request token from the Beatport API and returns the @@ -99,15 +101,13 @@ class BeatportClient: self._make_url("/identity/1/oauth/authorize") ) - def get_access_token(self, auth_data): + def get_access_token(self, auth_data: str) -> tuple[str, str]: """Obtain the final access token and secret for the API. :param auth_data: URL-encoded authorization data as displayed at the authorization url (obtained via :py:meth:`get_authorize_url`) after signing in - :type auth_data: unicode - :returns: OAuth resource owner key and secret - :rtype: (unicode, unicode) tuple + :returns: OAuth resource owner key and secret as unicode """ self.api.parse_authorization_response( "https://beets.io/auth?" + auth_data @@ -117,20 +117,37 @@ class BeatportClient: ) return access_data["oauth_token"], access_data["oauth_token_secret"] - def search(self, query, release_type="release", details=True): + @overload + def search( + self, + query: str, + release_type: Literal["release"], + details: bool = True, + ) -> Iterator[BeatportRelease]: ... + + @overload + def search( + self, + query: str, + release_type: Literal["track"], + details: bool = True, + ) -> Iterator[BeatportTrack]: ... + + def search( + self, + query: str, + release_type: Literal["release", "track"], + details=True, + ) -> Iterator[BeatportRelease | BeatportTrack]: """Perform a search of the Beatport catalogue. :param query: Query string - :param release_type: Type of releases to search for, can be - 'release' or 'track' + :param release_type: Type of releases to search for. :param details: Retrieve additional information about the search results. Currently this will fetch the tracklist for releases and do nothing for tracks :returns: Search results - :rtype: generator that yields - py:class:`BeatportRelease` or - :py:class:`BeatportTrack` """ response = self._get( "catalog/3/search", @@ -140,20 +157,18 @@ class BeatportClient: ) for item in response: if release_type == "release": + release = BeatportRelease(item) if details: - release = self.get_release(item["id"]) - else: - release = BeatportRelease(item) + release.tracks = self.get_release_tracks(item["id"]) yield release elif release_type == "track": yield BeatportTrack(item) - def get_release(self, beatport_id): + def get_release(self, beatport_id: str) -> BeatportRelease | None: """Get information about a single release. :param beatport_id: Beatport ID of the release :returns: The matching release - :rtype: :py:class:`BeatportRelease` """ response = self._get("/catalog/3/releases", id=beatport_id) if response: @@ -162,35 +177,33 @@ class BeatportClient: return release return None - def get_release_tracks(self, beatport_id): + def get_release_tracks(self, beatport_id: str) -> list[BeatportTrack]: """Get all tracks for a given release. :param beatport_id: Beatport ID of the release :returns: Tracks in the matching release - :rtype: list of :py:class:`BeatportTrack` """ response = self._get( "/catalog/3/tracks", releaseId=beatport_id, perPage=100 ) return [BeatportTrack(t) for t in response] - def get_track(self, beatport_id): + def get_track(self, beatport_id: str) -> BeatportTrack: """Get information about a single track. :param beatport_id: Beatport ID of the track :returns: The matching track - :rtype: :py:class:`BeatportTrack` """ response = self._get("/catalog/3/tracks", id=beatport_id) return BeatportTrack(response[0]) - def _make_url(self, endpoint): + def _make_url(self, endpoint: str) -> str: """Get complete URL for a given API endpoint.""" if not endpoint.startswith("/"): endpoint = "/" + endpoint return self._api_base + endpoint - def _get(self, endpoint, **kwargs): + def _get(self, endpoint: str, **kwargs) -> list[JSONDict]: """Perform a GET request on a given API endpoint. Automatically extracts result data from the response and converts HTTP @@ -211,48 +224,81 @@ class BeatportClient: return response.json()["results"] -class BeatportRelease(BeatportObject): - def __str__(self): - if len(self.artists) < 4: - artist_str = ", ".join(x[1] for x in self.artists) +class BeatportObject: + beatport_id: str + name: str + + release_date: datetime | None = None + + artists: list[tuple[str, str]] | None = None + # tuple of artist id and artist name + + def __init__(self, data: JSONDict): + self.beatport_id = str(data["id"]) # given as int in the response + self.name = str(data["name"]) + if "releaseDate" in data: + self.release_date = datetime.strptime( + data["releaseDate"], "%Y-%m-%d" + ) + if "artists" in data: + self.artists = [(x["id"], str(x["name"])) for x in data["artists"]] + if "genres" in data: + self.genres = [str(x["name"]) for x in data["genres"]] + + def artists_str(self) -> str | None: + if self.artists is not None: + if len(self.artists) < 4: + artist_str = ", ".join(x[1] for x in self.artists) + else: + artist_str = "Various Artists" else: - artist_str = "Various Artists" - return "".format( - artist_str, - self.name, - self.catalog_number, - ) + artist_str = None - def __repr__(self): - return str(self).encode("utf-8") + return artist_str + + +class BeatportRelease(BeatportObject): + catalog_number: str | None + label_name: str | None + category: str | None + url: str | None + genre: str | None + + tracks: list[BeatportTrack] | None = None + + def __init__(self, data: JSONDict): + super().__init__(data) + + self.catalog_number = data.get("catalogNumber") + self.label_name = data.get("label", {}).get("name") + self.category = data.get("category") + self.genre = data.get("genre") - def __init__(self, data): - BeatportObject.__init__(self, data) - if "catalogNumber" in data: - self.catalog_number = data["catalogNumber"] - if "label" in data: - self.label_name = data["label"]["name"] - if "category" in data: - self.category = data["category"] if "slug" in data: self.url = "https://beatport.com/release/{}/{}".format( data["slug"], data["id"] ) - self.genre = data.get("genre") + + def __str__(self) -> str: + return "".format( + self.artists_str(), + self.name, + self.catalog_number, + ) class BeatportTrack(BeatportObject): - def __str__(self): - artist_str = ", ".join(x[1] for x in self.artists) - return "".format( - artist_str, self.name, self.mix_name - ) + title: str | None + mix_name: str | None + length: timedelta + url: str | None + track_number: int | None + bpm: str | None + initial_key: str | None + genre: str | None - def __repr__(self): - return str(self).encode("utf-8") - - def __init__(self, data): - BeatportObject.__init__(self, data) + def __init__(self, data: JSONDict): + super().__init__(data) if "title" in data: self.title = str(data["title"]) if "mixName" in data: @@ -279,8 +325,8 @@ class BeatportTrack(BeatportObject): self.genre = str(data["genres"][0].get("name")) -class BeatportPlugin(BeetsPlugin): - data_source = "Beatport" +class BeatportPlugin(MetadataSourcePlugin): + _client: BeatportClient | None = None def __init__(self): super().__init__() @@ -294,12 +340,19 @@ class BeatportPlugin(BeetsPlugin): ) self.config["apikey"].redact = True self.config["apisecret"].redact = True - self.client = None self.register_listener("import_begin", self.setup) - def setup(self, session=None): - c_key = self.config["apikey"].as_str() - c_secret = self.config["apisecret"].as_str() + @property + def client(self) -> BeatportClient: + if self._client is None: + raise ValueError( + "Beatport client not initialized. Call setup() first." + ) + return self._client + + def setup(self, session: ImportSession): + c_key: str = self.config["apikey"].as_str() + c_secret: str = self.config["apisecret"].as_str() # Get the OAuth token from a file or log in. try: @@ -312,9 +365,9 @@ class BeatportPlugin(BeetsPlugin): token = tokendata["token"] secret = tokendata["secret"] - self.client = BeatportClient(c_key, c_secret, token, secret) + self._client = BeatportClient(c_key, c_secret, token, secret) - def authenticate(self, c_key, c_secret): + def authenticate(self, c_key: str, c_secret: str) -> tuple[str, str]: # Get the link for the OAuth page. auth_client = BeatportClient(c_key, c_secret) try: @@ -341,44 +394,30 @@ class BeatportPlugin(BeetsPlugin): return token, secret - def _tokenfile(self): + def _tokenfile(self) -> str: """Get the path to the JSON file for storing the OAuth token.""" return self.config["tokenfile"].get(confuse.Filename(in_app_dir=True)) - def album_distance(self, items, album_info, mapping): - """Returns the Beatport source weight and the maximum source weight - for albums. - """ - return get_distance( - data_source=self.data_source, info=album_info, config=self.config - ) - - def track_distance(self, item, track_info): - """Returns the Beatport source weight and the maximum source weight - for individual tracks. - """ - return get_distance( - data_source=self.data_source, info=track_info, config=self.config - ) - - def candidates(self, items, artist, release, va_likely): - """Returns a list of AlbumInfo objects for beatport search results - matching release and artist (if not various). - """ + def candidates( + self, + items: Sequence[Item], + artist: str, + album: str, + va_likely: bool, + ) -> Iterator[AlbumInfo]: if va_likely: - query = release + query = album else: - query = f"{artist} {release}" + query = f"{artist} {album}" try: - return self._get_releases(query) + yield from self._get_releases(query) except BeatportAPIError as e: self._log.debug("API Error: {0} (query: {1})", e, query) - return [] + return - def item_candidates(self, item, artist, title): - """Returns a list of TrackInfo objects for beatport search results - matching title and artist. - """ + def item_candidates( + self, item: Item, artist: str, title: str + ) -> Iterable[TrackInfo]: query = f"{artist} {title}" try: return self._get_tracks(query) @@ -386,13 +425,13 @@ class BeatportPlugin(BeetsPlugin): self._log.debug("API Error: {0} (query: {1})", e, query) return [] - def album_for_id(self, release_id): + def album_for_id(self, album_id: str): """Fetches a release by its Beatport ID and returns an AlbumInfo object or None if the query is not a valid ID or release is not found. """ - self._log.debug("Searching for release {0}", release_id) + self._log.debug("Searching for release {0}", album_id) - if not (release_id := self._get_id(release_id)): + if not (release_id := self.extract_release_id(album_id)): self._log.debug("Not a valid Beatport release ID.") return None @@ -401,11 +440,12 @@ class BeatportPlugin(BeetsPlugin): return self._get_album_info(release) return None - def track_for_id(self, track_id): + def track_for_id(self, track_id: str): """Fetches a track by its Beatport ID and returns a TrackInfo object or None if the track is not a valid Beatport ID or track is not found. """ self._log.debug("Searching for track {0}", track_id) + # TODO: move to extractor match = re.search(r"(^|beatport\.com/track/.+/)(\d+)$", track_id) if not match: self._log.debug("Not a valid Beatport track ID.") @@ -415,7 +455,7 @@ class BeatportPlugin(BeetsPlugin): return self._get_track_info(bp_track) return None - def _get_releases(self, query): + def _get_releases(self, query: str) -> Iterator[AlbumInfo]: """Returns a list of AlbumInfo objects for a beatport search query.""" # Strip non-word characters from query. Things like "!" and "-" can # cause a query to return no results, even if they match the artist or @@ -425,16 +465,22 @@ class BeatportPlugin(BeetsPlugin): # Strip medium information from query, Things like "CD1" and "disk 1" # can also negate an otherwise positive result. query = re.sub(r"\b(CD|disc)\s*\d+", "", query, flags=re.I) - albums = [self._get_album_info(x) for x in self.client.search(query)] - return albums + for beatport_release in self.client.search(query, "release"): + if beatport_release is None: + continue + yield self._get_album_info(beatport_release) - def _get_album_info(self, release): + def _get_album_info(self, release: BeatportRelease) -> AlbumInfo: """Returns an AlbumInfo object for a Beatport Release object.""" - va = len(release.artists) > 3 + va = release.artists is not None and len(release.artists) > 3 artist, artist_id = self._get_artist(release.artists) if va: artist = "Various Artists" - tracks = [self._get_track_info(x) for x in release.tracks] + tracks: list[TrackInfo] = [] + if release.tracks is not None: + tracks = [self._get_track_info(x) for x in release.tracks] + + release_date = release.release_date return AlbumInfo( album=release.name, @@ -445,18 +491,18 @@ class BeatportPlugin(BeetsPlugin): tracks=tracks, albumtype=release.category, va=va, - year=release.release_date.year, - month=release.release_date.month, - day=release.release_date.day, label=release.label_name, catalognum=release.catalog_number, media="Digital", data_source=self.data_source, data_url=release.url, genre=release.genre, + year=release_date.year if release_date else None, + month=release_date.month if release_date else None, + day=release_date.day if release_date else None, ) - def _get_track_info(self, track): + def _get_track_info(self, track: BeatportTrack) -> TrackInfo: """Returns a TrackInfo object for a Beatport Track object.""" title = track.name if track.mix_name != "Original Mix": @@ -482,9 +528,7 @@ class BeatportPlugin(BeetsPlugin): """Returns an artist string (all artists) and an artist_id (the main artist) for a list of Beatport release or track artists. """ - return MetadataSourcePlugin.get_artist( - artists=artists, id_key=0, name_key=1 - ) + return self.get_artist(artists=artists, id_key=0, name_key=1) def _get_tracks(self, query): """Returns a list of TrackInfo objects for a Beatport query.""" From 3ce33631a68a3d9703b61e88b8a22b77d935440a Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:59:04 +0200 Subject: [PATCH 69/95] Renamed import in mbsync and missing plugins. --- beetsplug/mbsync.py | 10 +++++++--- beetsplug/missing.py | 4 ++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/beetsplug/mbsync.py b/beetsplug/mbsync.py index d38b25e9f..3f7daec6c 100644 --- a/beetsplug/mbsync.py +++ b/beetsplug/mbsync.py @@ -16,7 +16,7 @@ from collections import defaultdict -from beets import autotag, library, plugins, ui, util +from beets import autotag, library, metadata_plugins, ui, util from beets.plugins import BeetsPlugin, apply_item_changes @@ -78,7 +78,9 @@ class MBSyncPlugin(BeetsPlugin): ) continue - if not (track_info := plugins.track_for_id(item.mb_trackid)): + if not ( + track_info := metadata_plugins.track_for_id(item.mb_trackid) + ): self._log.info( "Recording ID not found: {0.mb_trackid} for track {0}", item ) @@ -99,7 +101,9 @@ class MBSyncPlugin(BeetsPlugin): self._log.info("Skipping album with no mb_albumid: {}", album) continue - if not (album_info := plugins.album_for_id(album.mb_albumid)): + if not ( + album_info := metadata_plugins.album_for_id(album.mb_albumid) + ): self._log.info( "Release ID {0.mb_albumid} not found for album {0}", album ) diff --git a/beetsplug/missing.py b/beetsplug/missing.py index 8c328e647..d0e956930 100644 --- a/beetsplug/missing.py +++ b/beetsplug/missing.py @@ -21,7 +21,7 @@ from collections.abc import Iterator import musicbrainzngs from musicbrainzngs.musicbrainz import MusicBrainzError -from beets import config, plugins +from beets import config, metadata_plugins from beets.dbcore import types from beets.library import Album, Item, Library from beets.plugins import BeetsPlugin @@ -222,7 +222,7 @@ class MissingPlugin(BeetsPlugin): item_mbids = {x.mb_trackid for x in album.items()} # fetch missing items # TODO: Implement caching that without breaking other stuff - if album_info := plugins.album_for_id(album.mb_albumid): + if album_info := metadata_plugins.album_for_id(album.mb_albumid): for track_info in album_info.tracks: if track_info.track_id not in item_mbids: self._log.debug( From 29b77cfbd4f146dee9ac19b0a9bf8737e0090102 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 13:59:37 +0200 Subject: [PATCH 70/95] Added changelog entry. --- docs/changelog.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index f9dafa00c..dadd6b73d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -66,9 +66,20 @@ For plugin developers: * The `fetchart` plugins has seen a few changes to function signatures and source registration in the process of introducing typings to the code. Custom art sources might need to be adapted. - +* We split the responsibilities of plugins into two base classes + #. :class:`beets.plugins.BeetsPlugin` + is the base class for all plugins, any plugin needs to inherit from this class. + #. :class:`beets.metadata_plugin.MetadataSourcePlugin` + allows plugins to act like metadata sources. E.g. used by the MusicBrainz plugin. All plugins + in the beets repo are opted into this class where applicable. If you are maintaining a plugin + that acts like a metadata source, i.e. you expose any of `track_for_id, + album_for_id, candidates, item_candidates, album_distance, track_distance` methods, + please update your plugin to inherit from the new baseclass, as otherwise it will + not be registered as a metadata source and wont be usable going forward. + Other changes: +* Refactor: Split responsibilities of Plugins into MetaDataPlugins and general Plugins. * Documentation structure for auto generated API references changed slightly. Autogenerated API references are now located in the `docs/api` subdirectory. * :doc:`/plugins/substitute`: Fix rST formatting for example cases so that each From 04f033917266936a460544b3e25e948e39e81ff1 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 7 Jul 2025 14:02:05 +0200 Subject: [PATCH 71/95] Added python version check for typing_Extensions --- beets/metadata_plugins.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/beets/metadata_plugins.py b/beets/metadata_plugins.py index 87ff9eb98..d0cf4e3b1 100644 --- a/beets/metadata_plugins.py +++ b/beets/metadata_plugins.py @@ -9,15 +9,19 @@ from __future__ import annotations import abc import re +import sys from typing import TYPE_CHECKING, Generic, Literal, Sequence, TypedDict, TypeVar -from typing_extensions import NotRequired - from beets.util import cached_classproperty from beets.util.id_extractors import extract_release_id from .plugins import BeetsPlugin, find_plugins, notify_info_yielded, send +if sys.version_info >= (3, 11): + from typing import NotRequired +else: + from typing_extensions import NotRequired + if TYPE_CHECKING: from collections.abc import Iterable From 5cd6a39a01167361c3fef4c1670cdef1e573ebc7 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Wed, 9 Jul 2025 10:33:52 +0200 Subject: [PATCH 72/95] Edited changelog and fixed an issue with plugin loading. --- beets/plugins.py | 2 ++ docs/changelog.rst | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/beets/plugins.py b/beets/plugins.py index c0d71bec1..8d1f5e93a 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -23,6 +23,7 @@ import sys import traceback from collections import defaultdict from functools import wraps +from types import GenericAlias from typing import TYPE_CHECKING, Any, Callable, Sequence, TypeVar import mediafile @@ -298,6 +299,7 @@ def load_plugins(names: Sequence[str] = ()) -> None: for obj in getattr(namespace, name).__dict__.values(): if ( inspect.isclass(obj) + and not isinstance(obj, GenericAlias) and issubclass(obj, BeetsPlugin) and obj != BeetsPlugin and not inspect.isabstract(obj) diff --git a/docs/changelog.rst b/docs/changelog.rst index dadd6b73d..d7d8f6efb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -72,8 +72,8 @@ For plugin developers: #. :class:`beets.metadata_plugin.MetadataSourcePlugin` allows plugins to act like metadata sources. E.g. used by the MusicBrainz plugin. All plugins in the beets repo are opted into this class where applicable. If you are maintaining a plugin - that acts like a metadata source, i.e. you expose any of `track_for_id, - album_for_id, candidates, item_candidates, album_distance, track_distance` methods, + that acts like a metadata source, i.e. you expose any of ``track_for_id``, + ``album_for_id``, ``candidates``, ``item_candidates``, ``album_distance``, ``track_distance`` methods, please update your plugin to inherit from the new baseclass, as otherwise it will not be registered as a metadata source and wont be usable going forward. From 648a9be1728173d2cd46c1e5e1fa375b70c00a7b Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Wed, 9 Jul 2025 10:41:32 +0200 Subject: [PATCH 73/95] Added deprecation warning for legacy type metadata plugins. --- beets/metadata_plugins.py | 35 ++++++++++++++++++++++++++++++----- beets/plugins.py | 4 +++- 2 files changed, 33 insertions(+), 6 deletions(-) diff --git a/beets/metadata_plugins.py b/beets/metadata_plugins.py index d0cf4e3b1..8da1f0333 100644 --- a/beets/metadata_plugins.py +++ b/beets/metadata_plugins.py @@ -8,8 +8,10 @@ implemented as plugins. from __future__ import annotations import abc +import inspect import re import sys +import warnings from typing import TYPE_CHECKING, Generic, Literal, Sequence, TypedDict, TypeVar from beets.util import cached_classproperty @@ -36,11 +38,24 @@ def find_metadata_source_plugins() -> list[MetadataSourcePlugin]: Resolved from all currently loaded beets plugins. """ - return [ - plugin - for plugin in find_plugins() - if isinstance(plugin, MetadataSourcePlugin) - ] + + all_plugins = find_plugins() + metadata_plugins = [] + for plugin in all_plugins: + if isinstance(plugin, MetadataSourcePlugin): + metadata_plugins.append(plugin) + elif hasattr(plugin, "data_source"): + # TODO: Remove this in the future major release, v3.0.0 + warnings.warn( + f"{plugin.__class__.__name__} is used as a legacy metadata source. " + "It should extend MetadataSourcePlugin instead of BeetsPlugin. " + "Support for this will be removed in the v3.0.0 release!", + DeprecationWarning, + stacklevel=2, + ) + metadata_plugins.append(plugin) + + return metadata_plugins @notify_info_yielded("albuminfo_received") @@ -369,3 +384,13 @@ class SearchApiMetadataSourcePlugin( None, self.tracks_for_ids([result["id"] for result in results if result]), ) + + +# Dynamically copy methods to BeetsPlugin for legacy support +# TODO: Remove this in the future major release, v3.0.0 + +for name, method in inspect.getmembers( + MetadataSourcePlugin, predicate=inspect.isfunction +): + if not hasattr(BeetsPlugin, name): + setattr(BeetsPlugin, name, method) diff --git a/beets/plugins.py b/beets/plugins.py index 8d1f5e93a..821a96152 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -299,7 +299,9 @@ def load_plugins(names: Sequence[str] = ()) -> None: for obj in getattr(namespace, name).__dict__.values(): if ( inspect.isclass(obj) - and not isinstance(obj, GenericAlias) + and not isinstance( + obj, GenericAlias + ) # seems to be needed for python <= 3.9 only and issubclass(obj, BeetsPlugin) and obj != BeetsPlugin and not inspect.isabstract(obj) From 1d33580b6880ef4ca970b8c271672c4ca354491a Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Fri, 11 Jul 2025 11:45:52 +0200 Subject: [PATCH 74/95] Renamed class method to _extract_id. --- beets/metadata_plugins.py | 2 +- beetsplug/beatport.py | 2 +- beetsplug/deezer.py | 4 ++-- beetsplug/discogs.py | 4 ++-- beetsplug/musicbrainz.py | 4 ++-- beetsplug/spotify.py | 4 ++-- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/beets/metadata_plugins.py b/beets/metadata_plugins.py index 8da1f0333..1d16c0cc8 100644 --- a/beets/metadata_plugins.py +++ b/beets/metadata_plugins.py @@ -251,7 +251,7 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta): """ return cls.__name__.replace("Plugin", "") # type: ignore[attr-defined] - def extract_release_id(self, url: str) -> str | None: + def _extract_id(self, url: str) -> str | None: """Extract an ID from a URL for this metadata source plugin. Uses the plugin's data source name to determine the ID format and diff --git a/beetsplug/beatport.py b/beetsplug/beatport.py index 72828a96a..16e0dc896 100644 --- a/beetsplug/beatport.py +++ b/beetsplug/beatport.py @@ -431,7 +431,7 @@ class BeatportPlugin(MetadataSourcePlugin): """ self._log.debug("Searching for release {0}", album_id) - if not (release_id := self.extract_release_id(album_id)): + if not (release_id := self._extract_id(album_id)): self._log.debug("Not a valid Beatport release ID.") return None diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index bf6f83980..8815e3d59 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -66,7 +66,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): def album_for_id(self, album_id: str) -> AlbumInfo | None: """Fetch an album by its Deezer ID or URL.""" - if not (deezer_id := self.extract_release_id(album_id)): + if not (deezer_id := self._extract_id(album_id)): return None album_url = f"{self.album_url}{deezer_id}" @@ -155,7 +155,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]): ``track_id`` or ``track_data`` must be provided. """ - if not (deezer_id := self.extract_release_id(track_id)): + if not (deezer_id := self._extract_id(track_id)): self._log.debug("Invalid Deezer track_id: {}", track_id) return None diff --git a/beetsplug/discogs.py b/beetsplug/discogs.py index 713dfbcae..9765f317f 100644 --- a/beetsplug/discogs.py +++ b/beetsplug/discogs.py @@ -204,7 +204,7 @@ class DiscogsPlugin(MetadataSourcePlugin): """ self._log.debug("Searching for release {0}", album_id) - discogs_id = self.extract_release_id(album_id) + discogs_id = self._extract_id(album_id) if not discogs_id: return None @@ -346,7 +346,7 @@ class DiscogsPlugin(MetadataSourcePlugin): else: genre = base_genre - discogs_albumid = self.extract_release_id(result.data.get("uri")) + discogs_albumid = self._extract_id(result.data.get("uri")) # Extract information for the optional AlbumInfo fields that are # contained on nested discogs fields. diff --git a/beetsplug/musicbrainz.py b/beetsplug/musicbrainz.py index 3b250c071..b52e44b23 100644 --- a/beetsplug/musicbrainz.py +++ b/beetsplug/musicbrainz.py @@ -837,7 +837,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin): MusicBrainzAPIError. """ self._log.debug("Requesting MusicBrainz release {}", album_id) - if not (albumid := self.extract_release_id(album_id)): + if not (albumid := self._extract_id(album_id)): self._log.debug("Invalid MBID ({0}).", album_id) return None @@ -874,7 +874,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin): """Fetches a track by its MusicBrainz ID. Returns a TrackInfo object or None if no track is found. May raise a MusicBrainzAPIError. """ - if not (trackid := self.extract_release_id(track_id)): + if not (trackid := self._extract_id(track_id)): self._log.debug("Invalid MBID ({0}).", track_id) return None diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py index 27fd2e3b5..7a4f4ec52 100644 --- a/beetsplug/spotify.py +++ b/beetsplug/spotify.py @@ -290,7 +290,7 @@ class SpotifyPlugin( :return: AlbumInfo object for album :rtype: beets.autotag.hooks.AlbumInfo or None """ - if not (spotify_id := self.extract_release_id(album_id)): + if not (spotify_id := self._extract_id(album_id)): return None album_data = self._handle_response("get", self.album_url + spotify_id) @@ -393,7 +393,7 @@ class SpotifyPlugin( Returns a TrackInfo object or None if the track is not found. """ - if not (spotify_id := self.extract_release_id(track_id)): + if not (spotify_id := self._extract_id(track_id)): self._log.debug("Invalid Spotify ID: {}", track_id) return None From 0f085fb91bd36d5e580f7ed99bb27b2557b65bea Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Fri, 11 Jul 2025 11:47:39 +0200 Subject: [PATCH 75/95] Updated changelog --- docs/changelog.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d7d8f6efb..7fb81237e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -74,8 +74,8 @@ For plugin developers: in the beets repo are opted into this class where applicable. If you are maintaining a plugin that acts like a metadata source, i.e. you expose any of ``track_for_id``, ``album_for_id``, ``candidates``, ``item_candidates``, ``album_distance``, ``track_distance`` methods, - please update your plugin to inherit from the new baseclass, as otherwise it will - not be registered as a metadata source and wont be usable going forward. + please update your plugin to inherit from the new baseclass, as otherwise your plugin will + stop working with the next major release. Other changes: From a0ae9db0c62d29f7bf7f9c11384d75c23d9e9839 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Mon, 14 Jul 2025 14:34:44 +0200 Subject: [PATCH 76/95] Added tpyehint to fix mypy issue. --- beets/metadata_plugins.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/beets/metadata_plugins.py b/beets/metadata_plugins.py index 1d16c0cc8..5b11dc4ec 100644 --- a/beets/metadata_plugins.py +++ b/beets/metadata_plugins.py @@ -40,7 +40,7 @@ def find_metadata_source_plugins() -> list[MetadataSourcePlugin]: """ all_plugins = find_plugins() - metadata_plugins = [] + metadata_plugins: list[MetadataSourcePlugin | BeetsPlugin] = [] for plugin in all_plugins: if isinstance(plugin, MetadataSourcePlugin): metadata_plugins.append(plugin) @@ -55,7 +55,8 @@ def find_metadata_source_plugins() -> list[MetadataSourcePlugin]: ) metadata_plugins.append(plugin) - return metadata_plugins + # typeignore: BeetsPlugin is not a MetadataSourcePlugin (legacy support) + return metadata_plugins # type: ignore[return-value] @notify_info_yielded("albuminfo_received") From 0c6b383b06f302a25de027efd6d8c679719f434a Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Wed, 16 Jul 2025 11:43:17 +0200 Subject: [PATCH 77/95] Track info should not be imported from metadata_plugin. --- beetsplug/chroma.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py index 21098ea81..eb98e7926 100644 --- a/beetsplug/chroma.py +++ b/beetsplug/chroma.py @@ -25,8 +25,8 @@ import acoustid import confuse from beets import config, ui, util -from beets.autotag.distance import Distance -from beets.metadata_plugins import MetadataSourcePlugin, TrackInfo +from beets.autotag import Distance, TrackInfo +from beets.metadata_plugins import MetadataSourcePlugin from beetsplug.musicbrainz import MusicBrainzPlugin API_KEY = "1vOwZtEn" From 47f8fbe629f24b1c1e936426b6329ca538e2bef6 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Wed, 16 Jul 2025 11:48:34 +0200 Subject: [PATCH 78/95] Plugin should call super init. --- beetsplug/chroma.py | 1 + 1 file changed, 1 insertion(+) diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py index eb98e7926..8259934fd 100644 --- a/beetsplug/chroma.py +++ b/beetsplug/chroma.py @@ -172,6 +172,7 @@ def _all_releases(items): class AcoustidPlugin(MetadataSourcePlugin): def __init__(self): + super().__init__() self.config.add( { "auto": True, From f70e5ec758be6e7c9cdfb7cbd601f1e75efb2e66 Mon Sep 17 00:00:00 2001 From: Sebastian Mohr Date: Wed, 16 Jul 2025 12:07:49 +0200 Subject: [PATCH 79/95] split imports --- beetsplug/chroma.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py index 8259934fd..f90877113 100644 --- a/beetsplug/chroma.py +++ b/beetsplug/chroma.py @@ -25,7 +25,8 @@ import acoustid import confuse from beets import config, ui, util -from beets.autotag import Distance, TrackInfo +from beets.autotag.distance import Distance +from beets.autotag.hooks import TrackInfo from beets.metadata_plugins import MetadataSourcePlugin from beetsplug.musicbrainz import MusicBrainzPlugin From 5677f9beee189791a7d2eef91a07a9bc3ef635a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 13 Jul 2025 20:06:34 +0100 Subject: [PATCH 80/95] Fix breaking issues --- beets/dbcore/query.py | 3 ++- beets/ui/commands.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index ae8e0ddf6..49d7f6428 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -28,6 +28,7 @@ from re import Pattern from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union from beets import util +from beets.util.units import raw_seconds_short if TYPE_CHECKING: from beets.dbcore.db import AnyModel, Model @@ -892,7 +893,7 @@ class DurationQuery(NumericQuery): if not s: return None try: - return util.raw_seconds_short(s) + return raw_seconds_short(s) except ValueError: try: return float(s) diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 7b22c2462..12a8d6875 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -1343,7 +1343,7 @@ def import_func(lib, opts, args: list[str]): if opts.library: query = args - paths = [] + byte_paths = [] else: query = None paths = args From 21459c70ee0c303d8ad51a8fb62df65e52a81e14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 13 Jul 2025 20:35:46 +0100 Subject: [PATCH 81/95] importer: provides search_ids into lookup_candidates explicitly --- beets/importer/stages.py | 4 +--- beets/importer/tasks.py | 30 ++++++++++++++---------------- test/test_importer.py | 8 ++++---- 3 files changed, 19 insertions(+), 23 deletions(-) diff --git a/beets/importer/stages.py b/beets/importer/stages.py index 5b3540db4..137bfcbc4 100644 --- a/beets/importer/stages.py +++ b/beets/importer/stages.py @@ -143,9 +143,7 @@ def lookup_candidates(session: ImportSession, task: ImportTask): # Restrict the initial lookup to IDs specified by the user via the -m # option. Currently all the IDs are passed onto the tasks directly. - task.search_ids = session.config["search_ids"].as_str_seq() - - task.lookup_candidates() + task.lookup_candidates(session.config["search_ids"].as_str_seq()) @pipeline.stage diff --git a/beets/importer/tasks.py b/beets/importer/tasks.py index 441224b6b..e4b94bf95 100644 --- a/beets/importer/tasks.py +++ b/beets/importer/tasks.py @@ -32,6 +32,8 @@ from beets.dbcore.query import PathQuery from .state import ImportState if TYPE_CHECKING: + from beets.autotag.match import Recommendation + from .session import ImportSession # Global logger. @@ -159,6 +161,7 @@ class ImportTask(BaseImportTask): cur_album: str | None = None cur_artist: str | None = None candidates: Sequence[autotag.AlbumMatch | autotag.TrackMatch] = [] + rec: Recommendation | None = None def __init__( self, @@ -167,11 +170,9 @@ class ImportTask(BaseImportTask): items: Iterable[library.Item] | None, ): super().__init__(toppath, paths, items) - self.rec = None self.should_remove_duplicates = False self.should_merge_duplicates = False self.is_album = True - self.search_ids = [] # user-supplied candidate IDs. def set_choice( self, choice: Action | autotag.AlbumMatch | autotag.TrackMatch @@ -356,18 +357,15 @@ class ImportTask(BaseImportTask): tasks = [t for inner in tasks for t in inner] return tasks - def lookup_candidates(self): - """Retrieve and store candidates for this album. User-specified - candidate IDs are stored in self.search_ids: if present, the - initial lookup is restricted to only those IDs. + def lookup_candidates(self, search_ids: list[str]) -> None: + """Retrieve and store candidates for this album. + + If User-specified ``search_ids`` list is not empty, the lookup is + restricted to only those IDs. """ - artist, album, prop = autotag.tag_album( - self.items, search_ids=self.search_ids + self.cur_artist, self.cur_album, (self.candidates, self.rec) = ( + autotag.tag_album(self.items, search_ids=search_ids) ) - self.cur_artist = artist - self.cur_album = album - self.candidates = prop.candidates - self.rec = prop.recommendation def find_duplicates(self, lib: library.Library): """Return a list of albums from `lib` with the same artist and @@ -695,10 +693,10 @@ class SingletonImportTask(ImportTask): for item in self.imported_items(): plugins.send("item_imported", lib=lib, item=item) - def lookup_candidates(self): - prop = autotag.tag_item(self.item, search_ids=self.search_ids) - self.candidates = prop.candidates - self.rec = prop.recommendation + def lookup_candidates(self, search_ids: list[str]) -> None: + self.candidates, self.rec = autotag.tag_item( + self.item, search_ids=search_ids + ) def find_duplicates(self, lib): """Return a list of items from `lib` that have the same artist diff --git a/test/test_importer.py b/test/test_importer.py index 14b163f73..c1768df3e 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -1627,9 +1627,9 @@ class ImportIdTest(ImportTestCase): task = importer.ImportTask( paths=self.import_dir, toppath="top path", items=[_common.item()] ) - task.search_ids = [self.ID_RELEASE_0, self.ID_RELEASE_1] - task.lookup_candidates() + task.lookup_candidates([self.ID_RELEASE_0, self.ID_RELEASE_1]) + assert {"VALID_RELEASE_0", "VALID_RELEASE_1"} == { c.info.album for c in task.candidates } @@ -1639,9 +1639,9 @@ class ImportIdTest(ImportTestCase): task = importer.SingletonImportTask( toppath="top path", item=_common.item() ) - task.search_ids = [self.ID_RECORDING_0, self.ID_RECORDING_1] - task.lookup_candidates() + task.lookup_candidates([self.ID_RECORDING_0, self.ID_RECORDING_1]) + assert {"VALID_RECORDING_0", "VALID_RECORDING_1"} == { c.info.title for c in task.candidates } From d3c64d8506370d0fcdfd42d679caa520fe43fb94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 13 Jul 2025 22:58:48 +0100 Subject: [PATCH 82/95] Add missing types to importer and pipeline --- beets/importer/stages.py | 1 + beets/importer/tasks.py | 65 ++++++++++++++++++++++------------------ beets/util/__init__.py | 1 + beets/util/pipeline.py | 4 ++- 4 files changed, 41 insertions(+), 30 deletions(-) diff --git a/beets/importer/stages.py b/beets/importer/stages.py index 137bfcbc4..24ff815f3 100644 --- a/beets/importer/stages.py +++ b/beets/importer/stages.py @@ -70,6 +70,7 @@ def query_tasks(session: ImportSession): Instead of finding files from the filesystem, a query is used to match items from the library. """ + task: ImportTask if session.config["singletons"]: # Search for items. for item in session.lib.items(session.query): diff --git a/beets/importer/tasks.py b/beets/importer/tasks.py index e4b94bf95..abe2ca8a9 100644 --- a/beets/importer/tasks.py +++ b/beets/importer/tasks.py @@ -22,7 +22,7 @@ import time from collections import defaultdict from enum import Enum from tempfile import mkdtemp -from typing import TYPE_CHECKING, Callable, Iterable, Sequence +from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence import mediafile @@ -367,7 +367,7 @@ class ImportTask(BaseImportTask): autotag.tag_album(self.items, search_ids=search_ids) ) - def find_duplicates(self, lib: library.Library): + def find_duplicates(self, lib: library.Library) -> list[library.Album]: """Return a list of albums from `lib` with the same artist and album name as the task. """ @@ -698,7 +698,7 @@ class SingletonImportTask(ImportTask): self.item, search_ids=search_ids ) - def find_duplicates(self, lib): + def find_duplicates(self, lib: library.Library) -> list[library.Item]: # type: ignore[override] # Need splitting Singleton and Album tasks into separate classes """Return a list of items from `lib` that have the same artist and title as the task. """ @@ -800,6 +800,11 @@ class SentinelImportTask(ImportTask): pass +ArchiveHandler = tuple[ + Callable[[util.StrPath], bool], Callable[[util.StrPath], Any] +] + + class ArchiveImportTask(SentinelImportTask): """An import task that represents the processing of an archive. @@ -825,13 +830,13 @@ class ArchiveImportTask(SentinelImportTask): if not os.path.isfile(path): return False - for path_test, _ in cls.handlers(): + for path_test, _ in cls.handlers: if path_test(os.fsdecode(path)): return True return False - @classmethod - def handlers(cls): + @util.cached_classproperty + def handlers(cls) -> list[ArchiveHandler]: """Returns a list of archive handlers. Each handler is a `(path_test, ArchiveClass)` tuple. `path_test` @@ -839,28 +844,27 @@ class ArchiveImportTask(SentinelImportTask): handled by `ArchiveClass`. `ArchiveClass` is a class that implements the same interface as `tarfile.TarFile`. """ - if not hasattr(cls, "_handlers"): - cls._handlers: list[tuple[Callable, ...]] = [] - from zipfile import ZipFile, is_zipfile + _handlers: list[ArchiveHandler] = [] + from zipfile import ZipFile, is_zipfile - cls._handlers.append((is_zipfile, ZipFile)) - import tarfile + _handlers.append((is_zipfile, ZipFile)) + import tarfile - cls._handlers.append((tarfile.is_tarfile, tarfile.open)) - try: - from rarfile import RarFile, is_rarfile - except ImportError: - pass - else: - cls._handlers.append((is_rarfile, RarFile)) - try: - from py7zr import SevenZipFile, is_7zfile - except ImportError: - pass - else: - cls._handlers.append((is_7zfile, SevenZipFile)) + _handlers.append((tarfile.is_tarfile, tarfile.open)) + try: + from rarfile import RarFile, is_rarfile + except ImportError: + pass + else: + _handlers.append((is_rarfile, RarFile)) + try: + from py7zr import SevenZipFile, is_7zfile + except ImportError: + pass + else: + _handlers.append((is_7zfile, SevenZipFile)) - return cls._handlers + return _handlers def cleanup(self, copy=False, delete=False, move=False): """Removes the temporary directory the archive was extracted to.""" @@ -877,7 +881,7 @@ class ArchiveImportTask(SentinelImportTask): """ assert self.toppath is not None, "toppath must be set" - for path_test, handler_class in self.handlers(): + for path_test, handler_class in self.handlers: if path_test(os.fsdecode(self.toppath)): break else: @@ -923,7 +927,7 @@ class ImportTaskFactory: self.imported = 0 # "Real" tasks created. self.is_archive = ArchiveImportTask.is_archive(util.syspath(toppath)) - def tasks(self): + def tasks(self) -> Iterable[ImportTask]: """Yield all import tasks for music found in the user-specified path `self.toppath`. Any necessary sentinel tasks are also produced. @@ -1112,7 +1116,10 @@ def albums_in_dir(path: util.PathBytes): a list of Items that is probably an album. Specifically, any folder containing any media files is an album. """ - collapse_pat = collapse_paths = collapse_items = None + collapse_paths: list[util.PathBytes] = [] + collapse_items: list[util.PathBytes] = [] + collapse_pat = None + ignore: list[str] = config["ignore"].as_str_seq() ignore_hidden: bool = config["ignore_hidden"].get(bool) @@ -1137,7 +1144,7 @@ def albums_in_dir(path: util.PathBytes): # proceed to process the current one. if collapse_items: yield collapse_paths, collapse_items - collapse_pat = collapse_paths = collapse_items = None + collapse_pat, collapse_paths, collapse_items = None, [], [] # Check whether this directory looks like the *first* directory # in a multi-disc sequence. There are two indicators: the file diff --git a/beets/util/__init__.py b/beets/util/__init__.py index 00c9ce05d..a4b6ef3d6 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -63,6 +63,7 @@ MAX_FILENAME_LENGTH = 200 WINDOWS_MAGIC_PREFIX = "\\\\?\\" T = TypeVar("T") PathLike = Union[str, bytes, Path] +StrPath = Union[str, Path] Replacements = Sequence[tuple[Pattern[str], str]] # Here for now to allow for a easy replace later on diff --git a/beets/util/pipeline.py b/beets/util/pipeline.py index cebde0f23..140407f04 100644 --- a/beets/util/pipeline.py +++ b/beets/util/pipeline.py @@ -48,6 +48,8 @@ POISON = "__PIPELINE_POISON__" DEFAULT_QUEUE_SIZE = 16 +Tq = TypeVar("Tq") + def _invalidate_queue(q, val=None, sync=True): """Breaks a Queue such that it never blocks, always has size 1, @@ -91,7 +93,7 @@ def _invalidate_queue(q, val=None, sync=True): q.mutex.release() -class CountedQueue(queue.Queue): +class CountedQueue(queue.Queue[Tq]): """A queue that keeps track of the number of threads that are still feeding into it. The queue is poisoned when all threads are finished with the queue. From 816d06f160e3057690b98139f02eedced6debd70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 13 Jul 2025 23:18:32 +0100 Subject: [PATCH 83/95] Fix plugin types --- beetsplug/autobpm.py | 7 +++++-- beetsplug/lastgenre/__init__.py | 2 +- beetsplug/replaygain.py | 13 +++++++------ 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/beetsplug/autobpm.py b/beetsplug/autobpm.py index 9c953f711..46d7e672a 100644 --- a/beetsplug/autobpm.py +++ b/beetsplug/autobpm.py @@ -15,10 +15,10 @@ from __future__ import annotations -from collections.abc import Iterable from typing import TYPE_CHECKING import librosa +import numpy as np from beets.plugins import BeetsPlugin from beets.ui import Subcommand, should_write @@ -76,7 +76,10 @@ class AutoBPMPlugin(BeetsPlugin): self._log.error("Failed to measure BPM for {}: {}", path, exc) continue - bpm = round(tempo[0] if isinstance(tempo, Iterable) else tempo) + bpm = round( + float(tempo[0] if isinstance(tempo, np.ndarray) else tempo) + ) + item["bpm"] = bpm self._log.info("Computed BPM for {}: {}", path, bpm) diff --git a/beetsplug/lastgenre/__init__.py b/beetsplug/lastgenre/__init__.py index b67f1fae2..dbab96cf8 100644 --- a/beetsplug/lastgenre/__init__.py +++ b/beetsplug/lastgenre/__init__.py @@ -401,7 +401,7 @@ class LastGenrePlugin(plugins.BeetsPlugin): label = "album" if not new_genres and "artist" in self.sources: - new_genres = None + new_genres = [] if isinstance(obj, library.Item): new_genres = self.fetch_artist_genre(obj) label = "artist" diff --git a/beetsplug/replaygain.py b/beetsplug/replaygain.py index 00b651d99..96c854314 100644 --- a/beetsplug/replaygain.py +++ b/beetsplug/replaygain.py @@ -1161,7 +1161,9 @@ class ExceptionWatcher(Thread): Once an exception occurs, raise it and execute a callback. """ - def __init__(self, queue: queue.Queue, callback: Callable[[], None]): + def __init__( + self, queue: queue.Queue[Exception], callback: Callable[[], None] + ): self._queue = queue self._callback = callback self._stopevent = Event() @@ -1197,7 +1199,9 @@ BACKENDS: dict[str, type[Backend]] = {b.NAME: b for b in BACKEND_CLASSES} class ReplayGainPlugin(BeetsPlugin): """Provides ReplayGain analysis.""" - def __init__(self): + pool: ThreadPool | None = None + + def __init__(self) -> None: super().__init__() # default backend is 'command' for backward-compatibility. @@ -1261,9 +1265,6 @@ class ReplayGainPlugin(BeetsPlugin): except (ReplayGainError, FatalReplayGainError) as e: raise ui.UserError(f"replaygain initialization failed: {e}") - # Start threadpool lazily. - self.pool = None - def should_use_r128(self, item: Item) -> bool: """Checks the plugin setting to decide whether the calculation should be done using the EBU R128 standard and use R128_ tags instead. @@ -1420,7 +1421,7 @@ class ReplayGainPlugin(BeetsPlugin): """Open a `ThreadPool` instance in `self.pool`""" if self.pool is None and self.backend_instance.do_parallel: self.pool = ThreadPool(threads) - self.exc_queue: queue.Queue = queue.Queue() + self.exc_queue: queue.Queue[Exception] = queue.Queue() signal.signal(signal.SIGINT, self._interrupt) From 72003ba1929ae697f833add53dd169494cb1a38e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Sun, 13 Jul 2025 23:19:51 +0100 Subject: [PATCH 84/95] Let mypy failure block CI --- .github/workflows/lint.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c9b66f402..7900d247d 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -105,7 +105,6 @@ jobs: - name: Type check code uses: liskin/gh-problem-matcher-wrap@v3 - continue-on-error: true with: linters: mypy run: poe check-types --show-column-numbers --no-error-summary ${{ needs.changed-files.outputs.changed_python_files }} From a5bbe574908438b722f0bd0a9853f3cd7e84bf72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 09:12:17 +0100 Subject: [PATCH 85/95] Fix types in test_player --- beetsplug/bpd/__init__.py | 39 +++++++++++---------------- test/plugins/test_player.py | 53 +++++++------------------------------ 2 files changed, 24 insertions(+), 68 deletions(-) diff --git a/beetsplug/bpd/__init__.py b/beetsplug/bpd/__init__.py index 435368e35..a2ad2835c 100644 --- a/beetsplug/bpd/__init__.py +++ b/beetsplug/bpd/__init__.py @@ -30,7 +30,7 @@ from typing import TYPE_CHECKING import beets import beets.ui -from beets import dbcore, vfs +from beets import dbcore, logging, vfs from beets.library import Item from beets.plugins import BeetsPlugin from beets.util import as_string, bluelet @@ -38,6 +38,17 @@ from beets.util import as_string, bluelet if TYPE_CHECKING: from beets.dbcore.query import Query +log = logging.getLogger(__name__) + + +try: + from . import gstplayer +except ImportError as e: + raise ImportError( + "Gstreamer Python bindings not found." + ' Install "gstreamer1.0" and "python-gi" or similar package to use BPD.' + ) from e + PROTOCOL_VERSION = "0.16.0" BUFSIZE = 1024 @@ -94,11 +105,6 @@ SUBSYSTEMS = [ ] -# Gstreamer import error. -class NoGstreamerError(Exception): - pass - - # Error-handling, exceptions, parameter parsing. @@ -1099,14 +1105,6 @@ class Server(BaseServer): """ def __init__(self, library, host, port, password, ctrl_port, log): - try: - from beetsplug.bpd import gstplayer - except ImportError as e: - # This is a little hacky, but it's the best I know for now. - if e.args[0].endswith(" gst"): - raise NoGstreamerError() - else: - raise log.info("Starting server...") super().__init__(host, port, password, ctrl_port, log) self.lib = library @@ -1616,16 +1614,9 @@ class BPDPlugin(BeetsPlugin): def start_bpd(self, lib, host, port, password, volume, ctrl_port): """Starts a BPD server.""" - try: - server = Server(lib, host, port, password, ctrl_port, self._log) - server.cmd_setvol(None, volume) - server.run() - except NoGstreamerError: - self._log.error("Gstreamer Python bindings not found.") - self._log.error( - 'Install "gstreamer1.0" and "python-gi"' - "or similar package to use BPD." - ) + server = Server(lib, host, port, password, ctrl_port, self._log) + server.cmd_setvol(None, volume) + server.run() def commands(self): cmd = beets.ui.Subcommand( diff --git a/test/plugins/test_player.py b/test/plugins/test_player.py index a7c613d8f..16e424d7e 100644 --- a/test/plugins/test_player.py +++ b/test/plugins/test_player.py @@ -14,19 +14,15 @@ """Tests for BPD's implementation of the MPD protocol.""" -import importlib.util import multiprocessing as mp import os import socket -import sys import tempfile import threading import time import unittest from contextlib import contextmanager - -# Mock GstPlayer so that the forked process doesn't attempt to import gi: -from unittest import mock +from unittest.mock import MagicMock, patch import confuse import pytest @@ -34,43 +30,8 @@ import yaml from beets.test.helper import PluginTestCase from beets.util import bluelet -from beetsplug import bpd -gstplayer = importlib.util.module_from_spec( - importlib.util.find_spec("beetsplug.bpd.gstplayer") -) - - -def _gstplayer_play(*_): - bpd.gstplayer._GstPlayer.playing = True - return mock.DEFAULT - - -gstplayer._GstPlayer = mock.MagicMock( - spec_set=[ - "time", - "volume", - "playing", - "run", - "play_file", - "pause", - "stop", - "seek", - "play", - "get_decoders", - ], - **{ - "playing": False, - "volume": 0, - "time.return_value": (0, 0), - "play_file.side_effect": _gstplayer_play, - "play.side_effect": _gstplayer_play, - "get_decoders.return_value": {"default": ({"audio/mpeg"}, {"mp3"})}, - }, -) -gstplayer.GstPlayer = lambda _: gstplayer._GstPlayer -sys.modules["beetsplug.bpd.gstplayer"] = gstplayer -bpd.gstplayer = gstplayer +bpd = pytest.importorskip("beetsplug.bpd") class CommandParseTest(unittest.TestCase): @@ -256,7 +217,7 @@ def implements(commands, fail=False): bluelet_listener = bluelet.Listener -@mock.patch("beets.util.bluelet.Listener") +@patch("beets.util.bluelet.Listener") def start_server(args, assigned_port, listener_patch): """Start the bpd server, writing the port to `assigned_port`.""" @@ -938,7 +899,7 @@ class BPDPlaylistsTest(BPDTestHelper): response = client.send_command("load", "anything") self._assert_failed(response, bpd.ERROR_NO_EXIST) - @unittest.skip + @unittest.expectedFailure def test_cmd_playlistadd(self): with self.run_bpd() as client: self._bpd_add(client, self.item1, playlist="anything") @@ -1128,7 +1089,7 @@ class BPDConnectionTest(BPDTestHelper): self._assert_ok(response) assert self.TAGTYPES == set(response.data["tagtype"]) - @unittest.skip + @unittest.expectedFailure def test_tagtypes_mask(self): with self.run_bpd() as client: response = client.send_command("tagtypes", "clear") @@ -1169,6 +1130,10 @@ class BPDReflectionTest(BPDTestHelper): fail=True, ) + @patch( + "beetsplug.bpd.gstplayer.GstPlayer.get_decoders", + MagicMock(return_value={"default": ({"audio/mpeg"}, {"mp3"})}), + ) def test_cmd_decoders(self): with self.run_bpd() as client: response = client.send_command("decoders") From e1162b7e0f00e46e6743f15adae738a0dc490360 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 09:12:57 +0100 Subject: [PATCH 86/95] Rename test_player -> test_bpd --- test/plugins/{test_player.py => test_bpd.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename test/plugins/{test_player.py => test_bpd.py} (100%) diff --git a/test/plugins/test_player.py b/test/plugins/test_bpd.py similarity index 100% rename from test/plugins/test_player.py rename to test/plugins/test_bpd.py From 46318a113c6b2ac0452cfc0e54c28bfba61065af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 09:36:44 +0100 Subject: [PATCH 87/95] Install gstreamer1.0-plugins-base, python3-gst-1.0 for bpd --- .github/workflows/ci.yaml | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 390878372..f86cc103e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,7 +33,7 @@ jobs: if: matrix.platform == 'ubuntu-latest' run: | sudo apt update - sudo apt install ffmpeg gobject-introspection libcairo2-dev libgirepository-2.0-dev pandoc imagemagick + sudo apt install ffmpeg gobject-introspection gstreamer1.0-plugins-base python3-gst-1.0 libcairo2-dev libgirepository-2.0-dev pandoc imagemagick - name: Get changed lyrics files id: lyrics-update diff --git a/pyproject.toml b/pyproject.toml index ea69240d5..39c543307 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -124,7 +124,7 @@ aura = ["flask", "flask-cors", "Pillow"] autobpm = ["librosa", "resampy"] # badfiles # mp3val and flac beatport = ["requests-oauthlib"] -bpd = ["PyGObject"] # python-gi and GStreamer 1.0+ +bpd = ["PyGObject"] # gobject-introspection, gstreamer1.0-plugins-base, python3-gst-1.0 chroma = ["pyacoustid"] # chromaprint or fpcalc # convert # ffmpeg docs = ["pydata-sphinx-theme", "sphinx"] From 2ac612b696949faf4b57740cc8acd715a727e83d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 10:06:55 +0100 Subject: [PATCH 88/95] Post coverage status comment --- codecov.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/codecov.yml b/codecov.yml index c4b333ad3..c899db06a 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,5 +1,6 @@ -# Don't post a comment on pull requests. -comment: off +comment: + layout: "condensed_header, condensed_files" + require_changes: true # Sets non-blocking status checks # https://docs.codecov.com/docs/commit-status#informational @@ -11,7 +12,7 @@ coverage: patch: default: informational: true - changes: no + changes: false github_checks: annotations: false From 3495a7ef364fbb4d5ba36d75eb39003fa03dfcb2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Wed, 16 Jul 2025 14:00:29 +0100 Subject: [PATCH 89/95] Update mypy config Remove 'files' configuration from the config to make sure that mypy runs quicker when invoked from the editor (where ideally it should only check the file being edited). Remove 'disallow_any_generics' from the config since we already have this configured centrally. And finally, do not allow variable redefinitions. --- setup.cfg | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/setup.cfg b/setup.cfg index e999b55d3..0b50485ea 100644 --- a/setup.cfg +++ b/setup.cfg @@ -15,7 +15,7 @@ markers = data_file = .reports/coverage/data branch = true relative_files = true -omit = +omit = beets/test/* beetsplug/_typing.py @@ -34,7 +34,6 @@ exclude_also = show_contexts = true [mypy] -files = beets,beetsplug,test,extra,docs allow_any_generics = false # FIXME: Would be better to actually type the libraries (if under our control), # or write our own stubs. For now, silence errors @@ -46,11 +45,8 @@ explicit_package_bases = true # config for all files. [[mypy-beets.plugins]] disallow_untyped_decorators = true -disallow_any_generics = true check_untyped_defs = true -allow_redefinition = true [[mypy-beets.metadata_plugins]] disallow_untyped_decorators = true -disallow_any_generics = true check_untyped_defs = true From 7509843517e70810fce1b16d80451ba8561fba38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Wed, 16 Jul 2025 14:06:06 +0100 Subject: [PATCH 90/95] Fix a generic type error and warnings in spotify --- beets/dbcore/types.py | 2 +- beetsplug/spotify.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/beets/dbcore/types.py b/beets/dbcore/types.py index 30cabf42f..1b8434a0b 100644 --- a/beets/dbcore/types.py +++ b/beets/dbcore/types.py @@ -292,7 +292,7 @@ class DelimitedString(BaseString[list[str], list[str]]): containing delimiter-separated values. """ - model_type = list + model_type = list[str] def __init__(self, delimiter: str): self.delimiter = delimiter diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py index 7a4f4ec52..fa5dc5c52 100644 --- a/beetsplug/spotify.py +++ b/beetsplug/spotify.py @@ -162,7 +162,7 @@ class SpotifyPlugin( """Get the path to the JSON file for storing the OAuth token.""" return self.config["tokenfile"].get(confuse.Filename(in_app_dir=True)) - def _authenticate(self): + def _authenticate(self) -> None: """Request an access token via the Client Credentials Flow: https://developer.spotify.com/documentation/general/guides/authorization-guide/#client-credentials-flow """ From ada672b759ee2356ac991c46398a7167e33efea3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Wed, 16 Jul 2025 14:30:03 +0100 Subject: [PATCH 91/95] Do not install not required apt deps in ci --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f86cc103e..baeb52f18 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,7 +33,7 @@ jobs: if: matrix.platform == 'ubuntu-latest' run: | sudo apt update - sudo apt install ffmpeg gobject-introspection gstreamer1.0-plugins-base python3-gst-1.0 libcairo2-dev libgirepository-2.0-dev pandoc imagemagick + sudo apt install --yes --no-install-recommends ffmpeg gobject-introspection gstreamer1.0-plugins-base python3-gst-1.0 libcairo2-dev libgirepository-2.0-dev pandoc imagemagick - name: Get changed lyrics files id: lyrics-update From 3be4a89aeeaf965c4f9ced05cf062ba12f125078 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 11:58:25 +0100 Subject: [PATCH 92/95] refactor: convert _types from class attributes to cached properties Convert static _types dictionaries to dynamic cached class properties to enable proper plugin type inheritance and avoid mutating shared state. Key changes: - Replace static _types dicts with @cached_classproperty decorators - Update cached_classproperty to support proper caching with class names - Remove manual _types mutation in plugin loading/unloading - Add pluginload event and cache clearing for proper plugin integration - Fix test to trigger type checking during item creation This ensures plugin types are properly inherited through the class hierarchy and eliminates issues with shared mutable state between test runs. --- beets/dbcore/db.py | 7 ++--- beets/library/models.py | 19 +++++++------ beets/plugins.py | 2 +- beets/test/helper.py | 12 +++------ beets/ui/__init__.py | 9 +------ beets/util/__init__.py | 45 ++++++++++++++++++++++++------- beetsplug/advancedrewrite.py | 2 ++ test/plugins/test_types_plugin.py | 2 +- 8 files changed, 60 insertions(+), 38 deletions(-) diff --git a/beets/dbcore/db.py b/beets/dbcore/db.py index 16ca54995..b1c9e18d4 100755 --- a/beets/dbcore/db.py +++ b/beets/dbcore/db.py @@ -289,9 +289,10 @@ class Model(ABC, Generic[D]): terms. """ - _types: dict[str, types.Type] = {} - """Optional Types for non-fixed (i.e., flexible and computed) fields. - """ + @cached_classproperty + def _types(cls) -> dict[str, types.Type]: + """Optional types for non-fixed (flexible and computed) fields.""" + return {} _sorts: dict[str, type[FieldSort]] = {} """Optional named sort criteria. The keys are strings and the values diff --git a/beets/library/models.py b/beets/library/models.py index 68c80b934..8de1c2982 100644 --- a/beets/library/models.py +++ b/beets/library/models.py @@ -41,6 +41,14 @@ class LibModel(dbcore.Model["Library"]): _format_config_key: str path: bytes + @cached_classproperty + def _types(cls) -> dict[str, types.Type]: + """Return the types of the fields in this model.""" + return { + **plugins.types(cls), # type: ignore[arg-type] + "data_source": types.STRING, + } + @cached_classproperty def writable_media_fields(cls) -> set[str]: return set(MediaFile.fields()) & cls._fields.keys() @@ -265,10 +273,9 @@ class Album(LibModel): _search_fields = ("album", "albumartist", "genre") - _types = { - "path": types.PathType(), - "data_source": types.STRING, - } + @cached_classproperty + def _types(cls) -> dict[str, types.Type]: + return {**super()._types, "path": types.PathType()} _sorts = { "albumartist": dbcore.query.SmartArtistSort, @@ -715,10 +722,6 @@ class Item(LibModel): "genre", ) - _types = { - "data_source": types.STRING, - } - # Set of item fields that are backed by `MediaFile` fields. # Any kind of field (fixed, flexible, and computed) may be a media # field. Only these fields are read from disk in `read` and written in diff --git a/beets/plugins.py b/beets/plugins.py index 821a96152..9893633fb 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -362,7 +362,7 @@ def queries() -> dict[str, type[Query]]: def types(model_cls: type[AnyModel]) -> dict[str, Type]: - # Gives us `item_types` and `album_types` + """Return mapping between flex field names and types for the given model.""" attr_name = f"{model_cls.__name__.lower()}_types" types: dict[str, Type] = {} for plugin in find_plugins(): diff --git a/beets/test/helper.py b/beets/test/helper.py index 4f26e8448..a1d741b16 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -58,6 +58,7 @@ from beets.ui.commands import TerminalImportSession from beets.util import ( MoveOperation, bytestring_path, + cached_classproperty, clean_module_tempdir, syspath, ) @@ -471,8 +472,6 @@ class PluginMixin(ConfigMixin): plugin: ClassVar[str] preload_plugin: ClassVar[bool] = True - original_item_types = dict(Item._types) - original_album_types = dict(Album._types) original_item_queries = dict(Item._queries) original_album_queries = dict(Album._queries) @@ -494,13 +493,12 @@ class PluginMixin(ConfigMixin): # FIXME this should eventually be handled by a plugin manager plugins = (self.plugin,) if hasattr(self, "plugin") else plugins self.config["plugins"] = plugins + cached_classproperty.cache.clear() beets.plugins.load_plugins(plugins) + beets.plugins.send("pluginload") beets.plugins.find_plugins() - # Take a backup of the original _types and _queries to restore - # when unloading. - Item._types.update(beets.plugins.types(Item)) - Album._types.update(beets.plugins.types(Album)) + # Take a backup of the original _queries to restore when unloading. Item._queries.update(beets.plugins.named_queries(Item)) Album._queries.update(beets.plugins.named_queries(Album)) @@ -512,8 +510,6 @@ class PluginMixin(ConfigMixin): self.config["plugins"] = [] beets.plugins._classes = set() beets.plugins._instances = {} - Item._types = self.original_item_types - Album._types = self.original_album_types Item._queries = self.original_item_queries Album._queries = self.original_album_queries diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 74dee550c..85fdda254 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -1609,14 +1609,7 @@ def _setup(options, lib=None): plugins = _load_plugins(options, config) - # Add types and queries defined by plugins. - plugin_types_album = plugins.types(library.Album) - library.Album._types.update(plugin_types_album) - item_types = plugin_types_album.copy() - item_types.update(library.Item._types) - item_types.update(plugins.types(library.Item)) - library.Item._types = item_types - + # Add queries defined by plugins. library.Item._queries.update(plugins.named_queries(library.Item)) library.Album._queries.update(plugins.named_queries(library.Album)) diff --git a/beets/util/__init__.py b/beets/util/__init__.py index a4b6ef3d6..58b08c844 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -41,6 +41,7 @@ from typing import ( Any, AnyStr, Callable, + ClassVar, Generic, NamedTuple, TypeVar, @@ -1052,20 +1053,46 @@ def par_map(transform: Callable[[T], Any], items: Sequence[T]) -> None: class cached_classproperty: - """A decorator implementing a read-only property that is *lazy* in - the sense that the getter is only invoked once. Subsequent accesses - through *any* instance use the cached result. + """Descriptor implementing cached class properties. + + Provides class-level dynamic property behavior where the getter function is + called once per class and the result is cached for subsequent access. Unlike + instance properties, this operates on the class rather than instances. """ - def __init__(self, getter): + cache: ClassVar[dict[tuple[Any, str], Any]] = {} + + name: str + + # Ideally, we would like to use `Callable[[type[T]], Any]` here, + # however, `mypy` is unable to see this as a **class** property, and thinks + # that this callable receives an **instance** of the object, failing the + # type check, for example: + # >>> class Album: + # >>> @cached_classproperty + # >>> def foo(cls): + # >>> reveal_type(cls) # mypy: revealed type is "Album" + # >>> return cls.bar + # + # Argument 1 to "cached_classproperty" has incompatible type + # "Callable[[Album], ...]"; expected "Callable[[type[Album]], ...]" + # + # Therefore, we just use `Any` here, which is not ideal, but works. + def __init__(self, getter: Callable[[Any], Any]) -> None: + """Initialize the descriptor with the property getter function.""" self.getter = getter - self.cache = {} - def __get__(self, instance, owner): - if owner not in self.cache: - self.cache[owner] = self.getter(owner) + def __set_name__(self, owner: Any, name: str) -> None: + """Capture the attribute name this descriptor is assigned to.""" + self.name = name - return self.cache[owner] + def __get__(self, instance: Any, owner: type[Any]) -> Any: + """Compute and cache if needed, and return the property value.""" + key = owner, self.name + if key not in self.cache: + self.cache[key] = self.getter(owner) + + return self.cache[key] class LazySharedInstance(Generic[T]): diff --git a/beetsplug/advancedrewrite.py b/beetsplug/advancedrewrite.py index 9a5feaaff..8bc63c0cb 100644 --- a/beetsplug/advancedrewrite.py +++ b/beetsplug/advancedrewrite.py @@ -58,7 +58,9 @@ class AdvancedRewritePlugin(BeetsPlugin): def __init__(self): """Parse configuration and register template fields for rewriting.""" super().__init__() + self.register_listener("pluginload", self.loaded) + def loaded(self): template = confuse.Sequence( confuse.OneOf( [ diff --git a/test/plugins/test_types_plugin.py b/test/plugins/test_types_plugin.py index b41e9bb18..41807b80d 100644 --- a/test/plugins/test_types_plugin.py +++ b/test/plugins/test_types_plugin.py @@ -134,7 +134,7 @@ class TypesPluginTest(PluginTestCase): def test_unknown_type_error(self): self.config["types"] = {"flex": "unkown type"} with pytest.raises(ConfigValueError): - self.run_command("ls") + self.add_item(flex="test") def test_template_if_def(self): # Tests for a subtle bug when using %ifdef in templates along with From 98bb7f12be06338877525e02f6d19391ef25a503 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 13:03:58 +0100 Subject: [PATCH 93/95] refactor: convert _queries from class attributes to cached properties Convert _queries from mutable class attributes to cached class properties that dynamically fetch plugin queries. This eliminates the need for manual query registration and cleanup in plugin loading/unloading logic. --- beets/dbcore/db.py | 10 ++++++---- beets/library/models.py | 8 +++++++- beets/plugins.py | 12 ++++++------ beets/test/helper.py | 11 +---------- beets/ui/__init__.py | 4 ---- 5 files changed, 20 insertions(+), 25 deletions(-) diff --git a/beets/dbcore/db.py b/beets/dbcore/db.py index b1c9e18d4..b780c5756 100755 --- a/beets/dbcore/db.py +++ b/beets/dbcore/db.py @@ -299,10 +299,12 @@ class Model(ABC, Generic[D]): are subclasses of `Sort`. """ - _queries: dict[str, FieldQueryType] = {} - """Named queries that use a field-like `name:value` syntax but which - do not relate to any specific field. - """ + @cached_classproperty + def _queries(cls) -> dict[str, FieldQueryType]: + """Named queries that use a field-like `name:value` syntax but which + do not relate to any specific field. + """ + return {} _always_dirty = False """By default, fields only become "dirty" when their value actually diff --git a/beets/library/models.py b/beets/library/models.py index 8de1c2982..7501513a1 100644 --- a/beets/library/models.py +++ b/beets/library/models.py @@ -49,6 +49,10 @@ class LibModel(dbcore.Model["Library"]): "data_source": types.STRING, } + @cached_classproperty + def _queries(cls) -> dict[str, FieldQueryType]: + return plugins.named_queries(cls) # type: ignore[arg-type] + @cached_classproperty def writable_media_fields(cls) -> set[str]: return set(MediaFile.fields()) & cls._fields.keys() @@ -740,7 +744,9 @@ class Item(LibModel): _sorts = {"artist": dbcore.query.SmartArtistSort} - _queries = {"singleton": dbcore.query.SingletonQuery} + @cached_classproperty + def _queries(cls) -> dict[str, FieldQueryType]: + return {**super()._queries, "singleton": dbcore.query.SingletonQuery} _format_config_key = "format_item" diff --git a/beets/plugins.py b/beets/plugins.py index 9893633fb..81f423431 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -379,13 +379,13 @@ def types(model_cls: type[AnyModel]) -> dict[str, Type]: def named_queries(model_cls: type[AnyModel]) -> dict[str, FieldQueryType]: - # Gather `item_queries` and `album_queries` from the plugins. + """Return mapping between field names and queries for the given model.""" attr_name = f"{model_cls.__name__.lower()}_queries" - queries: dict[str, FieldQueryType] = {} - for plugin in find_plugins(): - plugin_queries = getattr(plugin, attr_name, {}) - queries.update(plugin_queries) - return queries + return { + field: query + for plugin in find_plugins() + for field, query in getattr(plugin, attr_name, {}).items() + } def notify_info_yielded(event: str) -> Callable[[IterF[P, Ret]], IterF[P, Ret]]: diff --git a/beets/test/helper.py b/beets/test/helper.py index a1d741b16..eb024a7aa 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -52,7 +52,7 @@ import beets.plugins from beets import importer, logging, util from beets.autotag.hooks import AlbumInfo, TrackInfo from beets.importer import ImportSession -from beets.library import Album, Item, Library +from beets.library import Item, Library from beets.test import _common from beets.ui.commands import TerminalImportSession from beets.util import ( @@ -472,9 +472,6 @@ class PluginMixin(ConfigMixin): plugin: ClassVar[str] preload_plugin: ClassVar[bool] = True - original_item_queries = dict(Item._queries) - original_album_queries = dict(Album._queries) - def setup_beets(self): super().setup_beets() if self.preload_plugin: @@ -498,10 +495,6 @@ class PluginMixin(ConfigMixin): beets.plugins.send("pluginload") beets.plugins.find_plugins() - # Take a backup of the original _queries to restore when unloading. - Item._queries.update(beets.plugins.named_queries(Item)) - Album._queries.update(beets.plugins.named_queries(Album)) - def unload_plugins(self) -> None: """Unload all plugins and remove them from the configuration.""" # FIXME this should eventually be handled by a plugin manager @@ -510,8 +503,6 @@ class PluginMixin(ConfigMixin): self.config["plugins"] = [] beets.plugins._classes = set() beets.plugins._instances = {} - Item._queries = self.original_item_queries - Album._queries = self.original_album_queries @contextmanager def configure_plugin(self, config: Any): diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 85fdda254..8b2419a07 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -1609,10 +1609,6 @@ def _setup(options, lib=None): plugins = _load_plugins(options, config) - # Add queries defined by plugins. - library.Item._queries.update(plugins.named_queries(library.Item)) - library.Album._queries.update(plugins.named_queries(library.Album)) - plugins.send("pluginload") # Get the default subcommands. From 7a79adb1e6deb702e2881ebdd4a3fcdf6ca6068d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 13:46:22 +0100 Subject: [PATCH 94/95] Update Python deps --- poetry.lock | 1147 ++++++++++++++++++++++++++------------------------- 1 file changed, 580 insertions(+), 567 deletions(-) diff --git a/poetry.lock b/poetry.lock index 752953e1d..a7a0ee9f7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -298,13 +298,13 @@ cffi = ">=1.0.0" [[package]] name = "certifi" -version = "2025.4.26" +version = "2025.7.14" description = "Python package for providing Mozilla's CA Bundle." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, - {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, + {file = "certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2"}, + {file = "certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995"}, ] [[package]] @@ -543,74 +543,78 @@ pyyaml = "*" [[package]] name = "coverage" -version = "7.8.0" +version = "7.9.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, - {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, - {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, - {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, - {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, - {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, - {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, - {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, - {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, - {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, - {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, - {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, - {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, - {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, - {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, - {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, - {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, - {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, - {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, - {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, - {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, - {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, - {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, - {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, - {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, - {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, - {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, - {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, - {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, - {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, - {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, - {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, - {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, - {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, - {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, - {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, - {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, - {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, - {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, - {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, - {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, - {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, - {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, - {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, - {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, - {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, - {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, - {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, - {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, - {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, - {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, - {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, - {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, - {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, - {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, - {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, - {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, - {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, - {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, - {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, - {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, - {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, - {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, + {file = "coverage-7.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912"}, + {file = "coverage-7.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f"}, + {file = "coverage-7.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f"}, + {file = "coverage-7.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf"}, + {file = "coverage-7.9.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547"}, + {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45"}, + {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2"}, + {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e"}, + {file = "coverage-7.9.2-cp310-cp310-win32.whl", hash = "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e"}, + {file = "coverage-7.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c"}, + {file = "coverage-7.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba"}, + {file = "coverage-7.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa"}, + {file = "coverage-7.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a"}, + {file = "coverage-7.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc"}, + {file = "coverage-7.9.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2"}, + {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c"}, + {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd"}, + {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74"}, + {file = "coverage-7.9.2-cp311-cp311-win32.whl", hash = "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6"}, + {file = "coverage-7.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7"}, + {file = "coverage-7.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62"}, + {file = "coverage-7.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0"}, + {file = "coverage-7.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3"}, + {file = "coverage-7.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1"}, + {file = "coverage-7.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615"}, + {file = "coverage-7.9.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b"}, + {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9"}, + {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f"}, + {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d"}, + {file = "coverage-7.9.2-cp312-cp312-win32.whl", hash = "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355"}, + {file = "coverage-7.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0"}, + {file = "coverage-7.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b"}, + {file = "coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038"}, + {file = "coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d"}, + {file = "coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3"}, + {file = "coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14"}, + {file = "coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6"}, + {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b"}, + {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d"}, + {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868"}, + {file = "coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a"}, + {file = "coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b"}, + {file = "coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694"}, + {file = "coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5"}, + {file = "coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b"}, + {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3"}, + {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8"}, + {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46"}, + {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584"}, + {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e"}, + {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac"}, + {file = "coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926"}, + {file = "coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd"}, + {file = "coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb"}, + {file = "coverage-7.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddc39510ac922a5c4c27849b739f875d3e1d9e590d1e7b64c98dadf037a16cce"}, + {file = "coverage-7.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a535c0c7364acd55229749c2b3e5eebf141865de3a8f697076a3291985f02d30"}, + {file = "coverage-7.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df0f9ef28e0f20c767ccdccfc5ae5f83a6f4a2fbdfbcbcc8487a8a78771168c8"}, + {file = "coverage-7.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f3da12e0ccbcb348969221d29441ac714bbddc4d74e13923d3d5a7a0bebef7a"}, + {file = "coverage-7.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a17eaf46f56ae0f870f14a3cbc2e4632fe3771eab7f687eda1ee59b73d09fe4"}, + {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:669135a9d25df55d1ed56a11bf555f37c922cf08d80799d4f65d77d7d6123fcf"}, + {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9d3a700304d01a627df9db4322dc082a0ce1e8fc74ac238e2af39ced4c083193"}, + {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:71ae8b53855644a0b1579d4041304ddc9995c7b21c8a1f16753c4d8903b4dfed"}, + {file = "coverage-7.9.2-cp39-cp39-win32.whl", hash = "sha256:dd7a57b33b5cf27acb491e890720af45db05589a80c1ffc798462a765be6d4d7"}, + {file = "coverage-7.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f65bb452e579d5540c8b37ec105dd54d8b9307b07bcaa186818c104ffda22441"}, + {file = "coverage-7.9.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050"}, + {file = "coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4"}, + {file = "coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b"}, ] [package.dependencies] @@ -709,13 +713,13 @@ dotenv = ["python-dotenv"] [[package]] name = "flask-cors" -version = "5.0.1" +version = "6.0.1" description = "A Flask extension simplifying CORS support" optional = true python-versions = "<4.0,>=3.9" files = [ - {file = "flask_cors-5.0.1-py3-none-any.whl", hash = "sha256:fa5cb364ead54bbf401a26dbf03030c6b18fb2fcaf70408096a572b409586b0c"}, - {file = "flask_cors-5.0.1.tar.gz", hash = "sha256:6ccb38d16d6b72bbc156c1c3f192bc435bfcc3c2bc864b2df1eb9b2d97b2403c"}, + {file = "flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c"}, + {file = "flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db"}, ] [package.dependencies] @@ -1017,13 +1021,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "joblib" -version = "1.5.0" +version = "1.5.1" description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.9" files = [ - {file = "joblib-1.5.0-py3-none-any.whl", hash = "sha256:206144b320246485b712fc8cc51f017de58225fa8b414a1fe1764a7231aca491"}, - {file = "joblib-1.5.0.tar.gz", hash = "sha256:d8757f955389a3dd7a23152e43bc297c2e0c2d3060056dad0feefc88a06939b5"}, + {file = "joblib-1.5.1-py3-none-any.whl", hash = "sha256:4719a31f054c7d766948dcd83e9613686b27114f190f717cec7eaa2084f8a74a"}, + {file = "joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444"}, ] [[package]] @@ -1188,143 +1192,105 @@ files = [ [[package]] name = "lxml" -version = "5.4.0" +version = "6.0.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"}, - {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"}, - {file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"}, - {file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"}, - {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"}, - {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"}, - {file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"}, - {file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"}, - {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"}, - {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"}, - {file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"}, - {file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"}, - {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"}, - {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"}, - {file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"}, - {file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"}, - {file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"}, - {file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"}, - {file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"}, - {file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"}, - {file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"}, - {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"}, - {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"}, - {file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"}, - {file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"}, - {file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"}, - {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"}, - {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"}, - {file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"}, - {file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"}, - {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"}, - {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"}, - {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"}, - {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"}, - {file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"}, - {file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"}, - {file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"}, + {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"}, + {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2793a627e95d119e9f1e19720730472f5543a6d84c50ea33313ce328d870f2dd"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46b9ed911f36bfeb6338e0b482e7fe7c27d362c52fde29f221fddbc9ee2227e7"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b4790b558bee331a933e08883c423f65bbcd07e278f91b2272489e31ab1e2b4"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2030956cf4886b10be9a0285c6802e078ec2391e1dd7ff3eb509c2c95a69b76"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23854ecf381ab1facc8f353dcd9adeddef3652268ee75297c1164c987c11dc"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:43fe5af2d590bf4691531b1d9a2495d7aab2090547eaacd224a3afec95706d76"}, + {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74e748012f8c19b47f7d6321ac929a9a94ee92ef12bc4298c47e8b7219b26541"}, + {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:43cfbb7db02b30ad3926e8fceaef260ba2fb7df787e38fa2df890c1ca7966c3b"}, + {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34190a1ec4f1e84af256495436b2d196529c3f2094f0af80202947567fdbf2e7"}, + {file = "lxml-6.0.0-cp310-cp310-win32.whl", hash = "sha256:5967fe415b1920a3877a4195e9a2b779249630ee49ece22021c690320ff07452"}, + {file = "lxml-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f3389924581d9a770c6caa4df4e74b606180869043b9073e2cec324bad6e306e"}, + {file = "lxml-6.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:522fe7abb41309e9543b0d9b8b434f2b630c5fdaf6482bee642b34c8c70079c8"}, + {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36"}, + {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58"}, + {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2"}, + {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851"}, + {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f"}, + {file = "lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c"}, + {file = "lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816"}, + {file = "lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab"}, + {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108"}, + {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"}, + {file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"}, + {file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"}, + {file = "lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb"}, + {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da"}, + {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"}, + {file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"}, + {file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"}, + {file = "lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03"}, + {file = "lxml-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4eb114a0754fd00075c12648d991ec7a4357f9cb873042cc9a77bf3a7e30c9db"}, + {file = "lxml-6.0.0-cp38-cp38-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:7da298e1659e45d151b4028ad5c7974917e108afb48731f4ed785d02b6818994"}, + {file = "lxml-6.0.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bf61bc4345c1895221357af8f3e89f8c103d93156ef326532d35c707e2fb19d"}, + {file = "lxml-6.0.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63b634facdfbad421d4b61c90735688465d4ab3a8853ac22c76ccac2baf98d97"}, + {file = "lxml-6.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e380e85b93f148ad28ac15f8117e2fd8e5437aa7732d65e260134f83ce67911b"}, + {file = "lxml-6.0.0-cp38-cp38-win32.whl", hash = "sha256:185efc2fed89cdd97552585c624d3c908f0464090f4b91f7d92f8ed2f3b18f54"}, + {file = "lxml-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:f97487996a39cb18278ca33f7be98198f278d0bc3c5d0fd4d7b3d63646ca3c8a"}, + {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85b14a4689d5cff426c12eefe750738648706ea2753b20c2f973b2a000d3d261"}, + {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f64ccf593916e93b8d36ed55401bb7fe9c7d5de3180ce2e10b08f82a8f397316"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:b372d10d17a701b0945f67be58fae4664fd056b85e0ff0fbc1e6c951cdbc0512"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a674c0948789e9136d69065cc28009c1b1874c6ea340253db58be7622ce6398f"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:edf6e4c8fe14dfe316939711e3ece3f9a20760aabf686051b537a7562f4da91a"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:048a930eb4572829604982e39a0c7289ab5dc8abc7fc9f5aabd6fbc08c154e93"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b5fa5eda84057a4f1bbb4bb77a8c28ff20ae7ce211588d698ae453e13c6281"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:c352fc8f36f7e9727db17adbf93f82499457b3d7e5511368569b4c5bd155a922"}, + {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8db5dc617cb937ae17ff3403c3a70a7de9df4852a046f93e71edaec678f721d0"}, + {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:2181e4b1d07dde53986023482673c0f1fba5178ef800f9ab95ad791e8bdded6a"}, + {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3c98d5b24c6095e89e03d65d5c574705be3d49c0d8ca10c17a8a4b5201b72f5"}, + {file = "lxml-6.0.0-cp39-cp39-win32.whl", hash = "sha256:04d67ceee6db4bcb92987ccb16e53bef6b42ced872509f333c04fb58a3315256"}, + {file = "lxml-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0b1520ef900e9ef62e392dd3d7ae4f5fa224d1dd62897a792cf353eb20b6cae"}, + {file = "lxml-6.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:e35e8aaaf3981489f42884b59726693de32dabfc438ac10ef4eb3409961fd402"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:dbdd7679a6f4f08152818043dbb39491d1af3332128b3752c3ec5cebc0011a72"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40442e2a4456e9910875ac12951476d36c0870dcb38a68719f8c4686609897c4"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db0efd6bae1c4730b9c863fc4f5f3c0fa3e8f05cae2c44ae141cb9dfc7d091dc"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab542c91f5a47aaa58abdd8ea84b498e8e49fe4b883d67800017757a3eb78e8"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:013090383863b72c62a702d07678b658fa2567aa58d373d963cca245b017e065"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c86df1c9af35d903d2b52d22ea3e66db8058d21dc0f59842ca5deb0595921141"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4337e4aec93b7c011f7ee2e357b0d30562edd1955620fdd4aeab6aacd90d43c5"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ae74f7c762270196d2dda56f8dd7309411f08a4084ff2dfcc0b095a218df2e06"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:059c4cbf3973a621b62ea3132934ae737da2c132a788e6cfb9b08d63a0ef73f9"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f090a9bc0ce8da51a5632092f98a7e7f84bca26f33d161a98b57f7fb0004ca"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9da022c14baeec36edfcc8daf0e281e2f55b950249a455776f0d1adeeada4734"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a55da151d0b0c6ab176b4e761670ac0e2667817a1e0dadd04a01d0561a219349"}, + {file = "lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72"}, ] [package.extras] @@ -1332,7 +1298,6 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml_html_clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.11,<3.1.0)"] [[package]] name = "markupsafe" @@ -1440,75 +1405,70 @@ test = ["pytest", "pytest-cov"] [[package]] name = "msgpack" -version = "1.1.0" +version = "1.1.1" description = "MessagePack serializer" optional = true python-versions = ">=3.8" files = [ - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, - {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, - {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, - {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, - {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, - {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, - {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, - {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, - {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, - {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, - {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, - {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, - {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, - {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, + {file = "msgpack-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:353b6fc0c36fde68b661a12949d7d49f8f51ff5fa019c1e47c87c4ff34b080ed"}, + {file = "msgpack-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:79c408fcf76a958491b4e3b103d1c417044544b68e96d06432a189b43d1215c8"}, + {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78426096939c2c7482bf31ef15ca219a9e24460289c00dd0b94411040bb73ad2"}, + {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b17ba27727a36cb73aabacaa44b13090feb88a01d012c0f4be70c00f75048b4"}, + {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a17ac1ea6ec3c7687d70201cfda3b1e8061466f28f686c24f627cae4ea8efd0"}, + {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:88d1e966c9235c1d4e2afac21ca83933ba59537e2e2727a999bf3f515ca2af26"}, + {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6d58656842e1b2ddbe07f43f56b10a60f2ba5826164910968f5933e5178af75"}, + {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96decdfc4adcbc087f5ea7ebdcfd3dee9a13358cae6e81d54be962efc38f6338"}, + {file = "msgpack-1.1.1-cp310-cp310-win32.whl", hash = "sha256:6640fd979ca9a212e4bcdf6eb74051ade2c690b862b679bfcb60ae46e6dc4bfd"}, + {file = "msgpack-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:8b65b53204fe1bd037c40c4148d00ef918eb2108d24c9aaa20bc31f9810ce0a8"}, + {file = "msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558"}, + {file = "msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d"}, + {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0"}, + {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f"}, + {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704"}, + {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2"}, + {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2"}, + {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752"}, + {file = "msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295"}, + {file = "msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458"}, + {file = "msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238"}, + {file = "msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157"}, + {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce"}, + {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a"}, + {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c"}, + {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b"}, + {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef"}, + {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a"}, + {file = "msgpack-1.1.1-cp312-cp312-win32.whl", hash = "sha256:870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c"}, + {file = "msgpack-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4"}, + {file = "msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0"}, + {file = "msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9"}, + {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8"}, + {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a"}, + {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac"}, + {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b"}, + {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7"}, + {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5"}, + {file = "msgpack-1.1.1-cp313-cp313-win32.whl", hash = "sha256:500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323"}, + {file = "msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69"}, + {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bba1be28247e68994355e028dcd668316db30c1f758d3241a7b903ac78dcd285"}, + {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f93dcddb243159c9e4109c9750ba5b335ab8d48d9522c5308cd05d7e3ce600"}, + {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fbbc0b906a24038c9958a1ba7ae0918ad35b06cb449d398b76a7d08470b0ed9"}, + {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:61e35a55a546a1690d9d09effaa436c25ae6130573b6ee9829c37ef0f18d5e78"}, + {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1abfc6e949b352dadf4bce0eb78023212ec5ac42f6abfd469ce91d783c149c2a"}, + {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:996f2609ddf0142daba4cefd767d6db26958aac8439ee41db9cc0db9f4c4c3a6"}, + {file = "msgpack-1.1.1-cp38-cp38-win32.whl", hash = "sha256:4d3237b224b930d58e9d83c81c0dba7aacc20fcc2f89c1e5423aa0529a4cd142"}, + {file = "msgpack-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:da8f41e602574ece93dbbda1fab24650d6bf2a24089f9e9dbb4f5730ec1e58ad"}, + {file = "msgpack-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5be6b6bc52fad84d010cb45433720327ce886009d862f46b26d4d154001994b"}, + {file = "msgpack-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a89cd8c087ea67e64844287ea52888239cbd2940884eafd2dcd25754fb72232"}, + {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d75f3807a9900a7d575d8d6674a3a47e9f227e8716256f35bc6f03fc597ffbf"}, + {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d182dac0221eb8faef2e6f44701812b467c02674a322c739355c39e94730cdbf"}, + {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b13fe0fb4aac1aa5320cd693b297fe6fdef0e7bea5518cbc2dd5299f873ae90"}, + {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:435807eeb1bc791ceb3247d13c79868deb22184e1fc4224808750f0d7d1affc1"}, + {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4835d17af722609a45e16037bb1d4d78b7bdf19d6c0128116d178956618c4e88"}, + {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8ef6e342c137888ebbfb233e02b8fbd689bb5b5fcc59b34711ac47ebd504478"}, + {file = "msgpack-1.1.1-cp39-cp39-win32.whl", hash = "sha256:61abccf9de335d9efd149e2fff97ed5974f2481b3353772e8e2dd3402ba2bd57"}, + {file = "msgpack-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:40eae974c873b2992fd36424a5d9407f93e97656d999f43fca9d29f820899084"}, + {file = "msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd"}, ] [[package]] @@ -1551,47 +1511,48 @@ files = [ [[package]] name = "mypy" -version = "1.15.0" +version = "1.17.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, + {file = "mypy-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8e08de6138043108b3b18f09d3f817a4783912e48828ab397ecf183135d84d6"}, + {file = "mypy-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce4a17920ec144647d448fc43725b5873548b1aae6c603225626747ededf582d"}, + {file = "mypy-1.17.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ff25d151cc057fdddb1cb1881ef36e9c41fa2a5e78d8dd71bee6e4dcd2bc05b"}, + {file = "mypy-1.17.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93468cf29aa9a132bceb103bd8475f78cacde2b1b9a94fd978d50d4bdf616c9a"}, + {file = "mypy-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:98189382b310f16343151f65dd7e6867386d3e35f7878c45cfa11383d175d91f"}, + {file = "mypy-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:c004135a300ab06a045c1c0d8e3f10215e71d7b4f5bb9a42ab80236364429937"}, + {file = "mypy-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d4fe5c72fd262d9c2c91c1117d16aac555e05f5beb2bae6a755274c6eec42be"}, + {file = "mypy-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d96b196e5c16f41b4f7736840e8455958e832871990c7ba26bf58175e357ed61"}, + {file = "mypy-1.17.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:73a0ff2dd10337ceb521c080d4147755ee302dcde6e1a913babd59473904615f"}, + {file = "mypy-1.17.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24cfcc1179c4447854e9e406d3af0f77736d631ec87d31c6281ecd5025df625d"}, + {file = "mypy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c56f180ff6430e6373db7a1d569317675b0a451caf5fef6ce4ab365f5f2f6c3"}, + {file = "mypy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:eafaf8b9252734400f9b77df98b4eee3d2eecab16104680d51341c75702cad70"}, + {file = "mypy-1.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f986f1cab8dbec39ba6e0eaa42d4d3ac6686516a5d3dccd64be095db05ebc6bb"}, + {file = "mypy-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:51e455a54d199dd6e931cd7ea987d061c2afbaf0960f7f66deef47c90d1b304d"}, + {file = "mypy-1.17.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3204d773bab5ff4ebbd1f8efa11b498027cd57017c003ae970f310e5b96be8d8"}, + {file = "mypy-1.17.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1051df7ec0886fa246a530ae917c473491e9a0ba6938cfd0ec2abc1076495c3e"}, + {file = "mypy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f773c6d14dcc108a5b141b4456b0871df638eb411a89cd1c0c001fc4a9d08fc8"}, + {file = "mypy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:1619a485fd0e9c959b943c7b519ed26b712de3002d7de43154a489a2d0fd817d"}, + {file = "mypy-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c41aa59211e49d717d92b3bb1238c06d387c9325d3122085113c79118bebb06"}, + {file = "mypy-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e69db1fb65b3114f98c753e3930a00514f5b68794ba80590eb02090d54a5d4a"}, + {file = "mypy-1.17.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:03ba330b76710f83d6ac500053f7727270b6b8553b0423348ffb3af6f2f7b889"}, + {file = "mypy-1.17.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037bc0f0b124ce46bfde955c647f3e395c6174476a968c0f22c95a8d2f589bba"}, + {file = "mypy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c38876106cb6132259683632b287238858bd58de267d80defb6f418e9ee50658"}, + {file = "mypy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:d30ba01c0f151998f367506fab31c2ac4527e6a7b2690107c7a7f9e3cb419a9c"}, + {file = "mypy-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:63e751f1b5ab51d6f3d219fe3a2fe4523eaa387d854ad06906c63883fde5b1ab"}, + {file = "mypy-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fb09d05e0f1c329a36dcd30e27564a3555717cde87301fae4fb542402ddfad"}, + {file = "mypy-1.17.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b72c34ce05ac3a1361ae2ebb50757fb6e3624032d91488d93544e9f82db0ed6c"}, + {file = "mypy-1.17.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:434ad499ad8dde8b2f6391ddfa982f41cb07ccda8e3c67781b1bfd4e5f9450a8"}, + {file = "mypy-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f105f61a5eff52e137fd73bee32958b2add9d9f0a856f17314018646af838e97"}, + {file = "mypy-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:ba06254a5a22729853209550d80f94e28690d5530c661f9416a68ac097b13fc4"}, + {file = "mypy-1.17.0-py3-none-any.whl", hash = "sha256:15d9d0018237ab058e5de3d8fce61b6fa72cc59cc78fd91f1b474bce12abf496"}, + {file = "mypy-1.17.0.tar.gz", hash = "sha256:e5d7ccc08ba089c06e2f5629c660388ef1fee708444f1dee0b9203fa031dee03"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing_extensions = ">=4.6.0" @@ -1703,13 +1664,13 @@ files = [ [[package]] name = "oauthlib" -version = "3.2.2" +version = "3.3.1" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, + {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, + {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, ] [package.extras] @@ -1728,102 +1689,138 @@ files = [ {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pillow" -version = "11.2.1" +version = "11.3.0" description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" files = [ - {file = "pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047"}, - {file = "pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d"}, - {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97"}, - {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579"}, - {file = "pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d"}, - {file = "pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad"}, - {file = "pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2"}, - {file = "pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70"}, - {file = "pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788"}, - {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e"}, - {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e"}, - {file = "pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6"}, - {file = "pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193"}, - {file = "pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7"}, - {file = "pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f"}, - {file = "pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4"}, - {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443"}, - {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c"}, - {file = "pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3"}, - {file = "pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941"}, - {file = "pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb"}, - {file = "pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28"}, - {file = "pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155"}, - {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14"}, - {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b"}, - {file = "pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2"}, - {file = "pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691"}, - {file = "pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c"}, - {file = "pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22"}, - {file = "pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91"}, - {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751"}, - {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9"}, - {file = "pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd"}, - {file = "pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e"}, - {file = "pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681"}, - {file = "pillow-11.2.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8"}, - {file = "pillow-11.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb"}, - {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a"}, - {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36"}, - {file = "pillow-11.2.1-cp39-cp39-win32.whl", hash = "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67"}, - {file = "pillow-11.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1"}, - {file = "pillow-11.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044"}, - {file = "pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6"}, + {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, + {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}, + {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}, + {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"}, + {file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"}, + {file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"}, + {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}, + {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}, + {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}, + {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}, + {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"}, + {file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"}, + {file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"}, + {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}, + {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"}, + {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"}, + {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"}, + {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"}, + {file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"}, + {file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"}, + {file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"}, + {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"}, + {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"}, + {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"}, + {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"}, + {file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"}, + {file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"}, + {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"}, + {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"}, + {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"}, + {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"}, + {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"}, + {file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"}, + {file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"}, + {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"}, + {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}, + {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}, + {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"}, + {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"}, + {file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"}, + {file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"}, + {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"}, + {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"}, + {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"}, + {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"}, + {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"}, + {file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"}, + {file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"}, + {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"}, + {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}, + {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}, + {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}, + {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"}, + {file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"}, + {file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"}, + {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, + {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] test-arrow = ["pyarrow"] -tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"] typing = ["typing-extensions"] xmp = ["defusedxml"] @@ -1845,18 +1842,18 @@ type = ["mypy (>=1.14.1)"] [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pooch" @@ -1904,13 +1901,13 @@ test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "py7zr" -version = "0.22.0" +version = "1.0.0" description = "Pure python 7-zip library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "py7zr-0.22.0-py3-none-any.whl", hash = "sha256:993b951b313500697d71113da2681386589b7b74f12e48ba13cc12beca79d078"}, - {file = "py7zr-0.22.0.tar.gz", hash = "sha256:c6c7aea5913535184003b73938490f9a4d8418598e533f9ca991d3b8e45a139e"}, + {file = "py7zr-1.0.0-py3-none-any.whl", hash = "sha256:6f42d2ff34c808e9026ad11b721c13b41b0673cf2b4e8f8fb34f9d65ae143dd1"}, + {file = "py7zr-1.0.0.tar.gz", hash = "sha256:f6bfee81637c9032f6a9f0eb045a4bfc7a7ff4138becfc42d7cb89b54ffbfef1"}, ] [package.dependencies] @@ -1920,16 +1917,16 @@ inflate64 = ">=1.0.0,<1.1.0" multivolumefile = ">=0.2.3" psutil = {version = "*", markers = "sys_platform != \"cygwin\""} pybcj = ">=1.0.0,<1.1.0" -pycryptodomex = ">=3.16.0" -pyppmd = ">=1.1.0,<1.2.0" -pyzstd = ">=0.15.9" +pycryptodomex = ">=3.20.0" +pyppmd = ">=1.1.0,<1.3.0" +pyzstd = ">=0.16.1" texttable = "*" [package.extras] -check = ["black (>=23.1.0)", "check-manifest", "flake8 (<8)", "flake8-black (>=0.3.6)", "flake8-deprecated", "flake8-isort", "isort (>=5.0.3)", "lxml", "mypy (>=0.940)", "mypy-extensions (>=0.4.1)", "pygments", "readme-renderer", "twine", "types-psutil"] +check = ["black (>=24.8.0)", "check-manifest", "flake8 (<8)", "flake8-black (>=0.3.6)", "flake8-deprecated", "flake8-isort", "isort (>=5.13.2)", "lxml", "mypy (>=1.10.0)", "mypy_extensions (>=1.0.0)", "pygments", "pylint", "readme-renderer", "twine", "types-psutil"] debug = ["pytest", "pytest-leaks", "pytest-profiling"] -docs = ["docutils", "sphinx (>=5.0)", "sphinx-a4doc", "sphinx-py3doc-enhanced-theme"] -test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "py-cpuinfo", "pytest", "pytest-benchmark", "pytest-cov", "pytest-remotedata", "pytest-timeout"] +docs = ["docutils", "sphinx (>=7.0.0)", "sphinx-a4doc", "sphinx-py3doc-enhanced-theme"] +test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "py-cpuinfo", "pytest", "pytest-benchmark", "pytest-cov", "pytest-httpserver", "pytest-remotedata", "pytest-timeout", "requests"] test-compat = ["libarchive-c"] [[package]] @@ -2043,40 +2040,52 @@ files = [ [[package]] name = "pycryptodomex" -version = "3.22.0" +version = "3.23.0" description = "Cryptographic library for Python" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "pycryptodomex-3.22.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:41673e5cc39a8524557a0472077635d981172182c9fe39ce0b5f5c19381ffaff"}, - {file = "pycryptodomex-3.22.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:276be1ed006e8fd01bba00d9bd9b60a0151e478033e86ea1cb37447bbc057edc"}, - {file = "pycryptodomex-3.22.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:813e57da5ceb4b549bab96fa548781d9a63f49f1d68fdb148eeac846238056b7"}, - {file = "pycryptodomex-3.22.0-cp27-cp27m-win32.whl", hash = "sha256:d7beeacb5394765aa8dabed135389a11ee322d3ee16160d178adc7f8ee3e1f65"}, - {file = "pycryptodomex-3.22.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:b3746dedf74787da43e4a2f85bd78f5ec14d2469eb299ddce22518b3891f16ea"}, - {file = "pycryptodomex-3.22.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5ebc09b7d8964654aaf8a4f5ac325f2b0cc038af9bea12efff0cd4a5bb19aa42"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:aef4590263b9f2f6283469e998574d0bd45c14fb262241c27055b82727426157"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:5ac608a6dce9418d4f300fab7ba2f7d499a96b462f2b9b5c90d8d994cd36dcad"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a24f681365ec9757ccd69b85868bbd7216ba451d0f86f6ea0eed75eeb6975db"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:259664c4803a1fa260d5afb322972813c5fe30ea8b43e54b03b7e3a27b30856b"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7127d9de3c7ce20339e06bcd4f16f1a1a77f1471bcf04e3b704306dde101b719"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee75067b35c93cc18b38af47b7c0664998d8815174cfc66dd00ea1e244eb27e6"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:1a8b0c5ba061ace4bcd03496d42702c3927003db805b8ec619ea6506080b381d"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:bfe4fe3233ef3e58028a3ad8f28473653b78c6d56e088ea04fe7550c63d4d16b"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-win32.whl", hash = "sha256:2cac9ed5c343bb3d0075db6e797e6112514764d08d667c74cb89b931aac9dddd"}, - {file = "pycryptodomex-3.22.0-cp37-abi3-win_amd64.whl", hash = "sha256:ff46212fda7ee86ec2f4a64016c994e8ad80f11ef748131753adb67e9b722ebd"}, - {file = "pycryptodomex-3.22.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:5bf3ce9211d2a9877b00b8e524593e2209e370a287b3d5e61a8c45f5198487e2"}, - {file = "pycryptodomex-3.22.0-pp27-pypy_73-win32.whl", hash = "sha256:684cb57812cd243217c3d1e01a720c5844b30f0b7b64bb1a49679f7e1e8a54ac"}, - {file = "pycryptodomex-3.22.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c8cffb03f5dee1026e3f892f7cffd79926a538c67c34f8b07c90c0bd5c834e27"}, - {file = "pycryptodomex-3.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:140b27caa68a36d0501b05eb247bd33afa5f854c1ee04140e38af63c750d4e39"}, - {file = "pycryptodomex-3.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:644834b1836bb8e1d304afaf794d5ae98a1d637bd6e140c9be7dd192b5374811"}, - {file = "pycryptodomex-3.22.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c506aba3318505dbeecf821ed7b9a9f86f422ed085e2d79c4fba0ae669920a"}, - {file = "pycryptodomex-3.22.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7cd39f7a110c1ab97ce9ee3459b8bc615920344dc00e56d1b709628965fba3f2"}, - {file = "pycryptodomex-3.22.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e4eaaf6163ff13788c1f8f615ad60cdc69efac6d3bf7b310b21e8cfe5f46c801"}, - {file = "pycryptodomex-3.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac39e237d65981554c2d4c6668192dc7051ad61ab5fc383ed0ba049e4007ca2"}, - {file = "pycryptodomex-3.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab0d89d1761959b608952c7b347b0e76a32d1a5bb278afbaa10a7f3eaef9a0a"}, - {file = "pycryptodomex-3.22.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e64164f816f5e43fd69f8ed98eb28f98157faf68208cd19c44ed9d8e72d33e8"}, - {file = "pycryptodomex-3.22.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f005de31efad6f9acefc417296c641f13b720be7dbfec90edeaca601c0fab048"}, - {file = "pycryptodomex-3.22.0.tar.gz", hash = "sha256:a1da61bacc22f93a91cbe690e3eb2022a03ab4123690ab16c46abb693a9df63d"}, + {file = "pycryptodomex-3.23.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:add243d204e125f189819db65eed55e6b4713f70a7e9576c043178656529cec7"}, + {file = "pycryptodomex-3.23.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1c6d919fc8429e5cb228ba8c0d4d03d202a560b421c14867a65f6042990adc8e"}, + {file = "pycryptodomex-3.23.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:1c3a65ad441746b250d781910d26b7ed0a396733c6f2dbc3327bd7051ec8a541"}, + {file = "pycryptodomex-3.23.0-cp27-cp27m-win32.whl", hash = "sha256:47f6d318fe864d02d5e59a20a18834819596c4ed1d3c917801b22b92b3ffa648"}, + {file = "pycryptodomex-3.23.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:d9825410197a97685d6a1fa2a86196430b01877d64458a20e95d4fd00d739a08"}, + {file = "pycryptodomex-3.23.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:267a3038f87a8565bd834317dbf053a02055915acf353bf42ededb9edaf72010"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:7b37e08e3871efe2187bc1fd9320cc81d87caf19816c648f24443483005ff886"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:91979028227543010d7b2ba2471cf1d1e398b3f183cb105ac584df0c36dac28d"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8962204c47464d5c1c4038abeadd4514a133b28748bcd9fa5b6d62e3cec6fa"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a33986a0066860f7fcf7c7bd2bc804fa90e434183645595ae7b33d01f3c91ed8"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7947ab8d589e3178da3d7cdeabe14f841b391e17046954f2fbcd941705762b5"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c25e30a20e1b426e1f0fa00131c516f16e474204eee1139d1603e132acffc314"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:da4fa650cef02db88c2b98acc5434461e027dce0ae8c22dd5a69013eaf510006"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58b851b9effd0d072d4ca2e4542bf2a4abcf13c82a29fd2c93ce27ee2a2e9462"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:a9d446e844f08299236780f2efa9898c818fe7e02f17263866b8550c7d5fb328"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bc65bdd9fc8de7a35a74cab1c898cab391a4add33a8fe740bda00f5976ca4708"}, + {file = "pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c885da45e70139464f082018ac527fdaad26f1657a99ee13eecdce0f0ca24ab4"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f489c4765093fb60e2edafdf223397bc716491b2b69fe74367b70d6999257a5c"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdc69d0d3d989a1029df0eed67cc5e8e5d968f3724f4519bd03e0ec68df7543c"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bbcb1dd0f646484939e142462d9e532482bc74475cecf9c4903d4e1cd21f003"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:8a4fcd42ccb04c31268d1efeecfccfd1249612b4de6374205376b8f280321744"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:55ccbe27f049743a4caf4f4221b166560d3438d0b1e5ab929e07ae1702a4d6fd"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-win32.whl", hash = "sha256:189afbc87f0b9f158386bf051f720e20fa6145975f1e76369303d0f31d1a8d7c"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:52e5ca58c3a0b0bd5e100a9fbc8015059b05cffc6c66ce9d98b4b45e023443b9"}, + {file = "pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:02d87b80778c171445d67e23d1caef279bf4b25c3597050ccd2e13970b57fd51"}, + {file = "pycryptodomex-3.23.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:febec69c0291efd056c65691b6d9a339f8b4bc43c6635b8699471248fe897fea"}, + {file = "pycryptodomex-3.23.0-pp27-pypy_73-win32.whl", hash = "sha256:c84b239a1f4ec62e9c789aafe0543f0594f0acd90c8d9e15bcece3efe55eca66"}, + {file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5"}, + {file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798"}, + {file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f"}, + {file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea"}, + {file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe"}, + {file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7de1e40a41a5d7f1ac42b6569b10bcdded34339950945948529067d8426d2785"}, + {file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bffc92138d75664b6d543984db7893a628559b9e78658563b0395e2a5fb47ed9"}, + {file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df027262368334552db2c0ce39706b3fb32022d1dce34673d0f9422df004b96a"}, + {file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e79f1aaff5a3a374e92eb462fa9e598585452135012e2945f96874ca6eeb1ff"}, + {file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:27e13c80ac9a0a1d050ef0a7e0a18cc04c8850101ec891815b6c5a0375e8a245"}, + {file = "pycryptodomex-3.23.0.tar.gz", hash = "sha256:71909758f010c82bc99b0abf4ea12012c98962fbf0583c2164f8b84533c2e4da"}, ] [[package]] @@ -2108,13 +2117,13 @@ test = ["pytest", "pytest-cov", "pytest-regressions", "sphinx[test]"] [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." -optional = true +optional = false python-versions = ">=3.8" files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, ] [package.extras] @@ -2152,66 +2161,68 @@ tests = ["flaky", "pytest", "pytest-cov", "pytest-random-order", "pyyaml"] [[package]] name = "pyppmd" -version = "1.1.1" +version = "1.2.0" description = "PPMd compression/decompression library" optional = false python-versions = ">=3.9" files = [ - {file = "pyppmd-1.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:406b184132c69e3f60ea9621b69eaa0c5494e83f82c307b3acce7b86a4f8f888"}, - {file = "pyppmd-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2cf003bb184adf306e1ac1828107307927737dde63474715ba16462e266cbef"}, - {file = "pyppmd-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71c8fd0ecc8d4760e852dd6df19d1a827427cb9e6c9e568cbf5edba7d860c514"}, - {file = "pyppmd-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6b5edee08b66ad6c39fd4d34a7ef4cfeb4b69fd6d68957e59cd2db674611a9e"}, - {file = "pyppmd-1.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e95bd23eb1543ab3149f24fe02f6dd2695023326027a4b989fb2c6dba256e75e"}, - {file = "pyppmd-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e633ee4cc19d0c71b3898092c3c4cc20a10bd5e6197229fffac29d68ad5d83b8"}, - {file = "pyppmd-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ecaafe2807ef557f0c49b8476a4fa04091b43866072fbcf31b3ceb01a96c9168"}, - {file = "pyppmd-1.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c182fccff60ae8f24f28f5145c36a60708b5b041a25d36b67f23c44923552fa4"}, - {file = "pyppmd-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:70c93d19efe67cdac3e7fa2d4e171650a2c4f90127a9781b25e496a43f12fbbc"}, - {file = "pyppmd-1.1.1-cp310-cp310-win32.whl", hash = "sha256:57c75856920a210ed72b553885af7bc06eddfd30ff26b62a3a63cb8f86f3d217"}, - {file = "pyppmd-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:d5293f10dc8c1d571b780e0d54426d3d858c19bbd8cb0fe972dcea3906acd05c"}, - {file = "pyppmd-1.1.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:753c5297c91c059443caef33bccbffb10764221739d218046981638aeb9bc5f2"}, - {file = "pyppmd-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b5a73da09de480a94793c9064876af14a01be117de872737935ac447b7cde3c"}, - {file = "pyppmd-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89c6febb7114dea02a061143d78d04751a945dfcadff77560e9a3d3c7583c24b"}, - {file = "pyppmd-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0001e467c35e35e6076a8c32ed9074aa45833615ee16115de9282d5c0985a1d8"}, - {file = "pyppmd-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c76820db25596afc859336ba06c01c9be0ff326480beec9c699fd378a546a77f"}, - {file = "pyppmd-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b67f0a228f8c58750a21ba667c170ae957283e08fd580857f13cb686334e5b3e"}, - {file = "pyppmd-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b18f24c14f0b0f1757a42c458ae7b6fd7aa0bce8147ac1016a9c134068c1ccc2"}, - {file = "pyppmd-1.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c9e43729161cc3b6ad5b04b16bae7665d3c0cc803de047d8a979aa9232a4f94a"}, - {file = "pyppmd-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fe057d254528b4eeebe2800baefde47d6af679bae184d3793c13a06f794df442"}, - {file = "pyppmd-1.1.1-cp311-cp311-win32.whl", hash = "sha256:faa51240493a5c53c9b544c99722f70303eea702742bf90f3c3064144342da4a"}, - {file = "pyppmd-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:62486f544d6957e1381147e3961eee647b7f4421795be4fb4f1e29d52aee6cb5"}, - {file = "pyppmd-1.1.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9877ef273e2c0efdec740855e28004a708ada9012e0db6673df4bb6eba3b05e0"}, - {file = "pyppmd-1.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f816a5cbccceced80e15335389eeeaf1b56a605fb7eebe135b1c85bd161e288c"}, - {file = "pyppmd-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bddabf8f2c6b991d15d6785e603d9d414ae4a791f131b1a729bb8a5d31133d1"}, - {file = "pyppmd-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:855bc2b0d19c3fead5815d72dbe350b4f765334336cbf8bcb504d46edc9e9dd2"}, - {file = "pyppmd-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a95b11b3717c083b912f0879678ba72f301bbdb9b69efed46dbc5df682aa3ce7"}, - {file = "pyppmd-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38b645347b6ea217b0c58e8edac27473802868f152db520344ac8c7490981849"}, - {file = "pyppmd-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f8f94b6222262def5b532f2b9716554ef249ad8411fd4da303596cc8c2e8eda1"}, - {file = "pyppmd-1.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c0306f69ceddf385ef689ebd0218325b7e523c48333d87157b37393466cfa1e"}, - {file = "pyppmd-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4ba510457a56535522a660098399e3fa8722e4de55808d089c9d13435d87069"}, - {file = "pyppmd-1.1.1-cp312-cp312-win32.whl", hash = "sha256:032f040a89fd8348109e8638f94311bd4c3c693fb4cad213ad06a37c203690b1"}, - {file = "pyppmd-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:2be8cbd13dd59fad1a0ad38062809e28596f3673b77a799dfe82b287986265ed"}, - {file = "pyppmd-1.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9458f972f090f3846fc5bea0a6f7363da773d3c4b2d4654f1d4ca3c11f6ecbfa"}, - {file = "pyppmd-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:44811a9d958873d857ca81cebf7ba646a0952f8a7bbf8a60cf6ec5d002faa040"}, - {file = "pyppmd-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a1b12460958885ca44e433986644009d0599b87a444f668ce3724a46ce588924"}, - {file = "pyppmd-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:200c74f05b97b00f047cf60607914a0b50f80991f1fb3677f624a85aa79d9458"}, - {file = "pyppmd-1.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ebe0d98a341b32f164e860059243e125398865cc0363b32ffc31f953460fe87"}, - {file = "pyppmd-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf93e1e047a82f1e7e194fcf49da166d2b9d8dc98d7c0b5cd844dc4360d9c1f5"}, - {file = "pyppmd-1.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f5b0b8c746bde378ae3b4df42a11fd8599ba3e5808dfea36e16d722b74bd0506"}, - {file = "pyppmd-1.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bcdd5207b6c79887f25639632ca2623a399d8c54f567973e9ba474b5ebae2b1c"}, - {file = "pyppmd-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7bfcca94e5452b6d54ac24a11c2402f6a193c331e5dc221c1f1df71773624374"}, - {file = "pyppmd-1.1.1-cp39-cp39-win32.whl", hash = "sha256:18e99c074664f996f511bc6e87aab46bc4c75f5bd0157d3210292919be35e22c"}, - {file = "pyppmd-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b29788d5a0f8f39ea46a1255cd886daddf9c64ba9d4cb64677bc93bd3859ac0e"}, - {file = "pyppmd-1.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:28648ef56793bf1ed0ff24728642f56fa39cb96ea161dec6ee2d26f97c0cdd28"}, - {file = "pyppmd-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:427d6f9b9c011e032db9529b2a15773f2e2944ca490b67d5757f4af33bbda406"}, - {file = "pyppmd-1.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34c7a07197a03656c1920fd88e05049c155a955c4de4b8b8a8e5fec19a97b45b"}, - {file = "pyppmd-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1fea2eee28beca61165c4714dcd032de76af318553791107d308b4b08575ecc"}, - {file = "pyppmd-1.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:04391e4f82c8c2c316ba60e480300ad1af37ec12bdb5c20f06b502030ff35975"}, - {file = "pyppmd-1.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cf08a354864c352a94e6e53733009baeab1e7c570010c4f5be226923ecfa09d1"}, - {file = "pyppmd-1.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:334e5fe5d75764b87c591a16d2b2df6f9939e2ad114dacf98bb4b0e7c90911e9"}, - {file = "pyppmd-1.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15d5928b25f04f5431585d17c835cd509a34e1c9f1416653db8d2815e97d4e20"}, - {file = "pyppmd-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af06329796a4965788910ac40f1b012d2e173ede08456ceea0ec7fc4d2e69d62"}, - {file = "pyppmd-1.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4ccdd3751e432e71e02de96f16fc8824e4f4bfc47a8b470f0c7aae88dae4c666"}, - {file = "pyppmd-1.1.1.tar.gz", hash = "sha256:f1a812f1e7628f4c26d05de340b91b72165d7b62778c27d322b82ce2e8ff00cb"}, + {file = "pyppmd-1.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4a25d8b2a71e0cc6f34475c36450e905586b13d0c88fb28471655c215f370908"}, + {file = "pyppmd-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9dd8a6261152591a352d91e5e52c16b81fa760f64c361a7afb24a1f3b5e048"}, + {file = "pyppmd-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2cd2694f43720fa1304c1fa31b8a1e7d80162f045e91569fb93473277c2747b8"}, + {file = "pyppmd-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0354919ab0f4065d76c64ad0dc21f14116651a2124cf4915b96c4e76d9caf470"}, + {file = "pyppmd-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:416c15576924ff9d2852fbe53d162c946e0466ce79d8a03a058e6f09a54934f0"}, + {file = "pyppmd-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dcdd5bf53f562af2a9be76739be69c9de080dfa59a4c4a8bcc4a163f9c5cb53e"}, + {file = "pyppmd-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c67196af6cfcc68e72a8fffbc332d743327bb9323cb7f3709e27185e743c7272"}, + {file = "pyppmd-1.2.0-cp310-cp310-win32.whl", hash = "sha256:d529c78382a2315db22c93e1c831231ee3fd2ad5a352f61496d72474558c6b00"}, + {file = "pyppmd-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f19285ae4dd20bb917c4fd177f0911847feb3abada91cec5fd5d9d5f1b9f3e0"}, + {file = "pyppmd-1.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:30068ed6da301f6ba25219f96d828f3c3a80ca227647571d21c7704301e095e6"}, + {file = "pyppmd-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1a5f0b78d68620ffb51c46c34c9e0ec02c40bb68e903e6c3ce02870c528164af"}, + {file = "pyppmd-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5f1ee49b88fd2e58a144b1ae0da9c2fe0dabc1962531da9475badbed6fba61fc"}, + {file = "pyppmd-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c98697fea3f3baf5ffc759fd41c766d708ff3fba7379776031077527873ce4ac"}, + {file = "pyppmd-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a3087d7ee6fc35db0bfecabd1df4615f2a9d58a56af61f5fc18b9ce2b379cbf"}, + {file = "pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69fe10feb24a92e673b68aca5d945564232d09e25a4e185899e0c657096ae695"}, + {file = "pyppmd-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aa40c982d1df515cd4cb366d3e1ae95ce22f3c20e6b8b2d31aa492679f7ad78c"}, + {file = "pyppmd-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a5c03dd85da64a237c601dd690d8eb95951b7c2eef91b89e110eb208010c6035"}, + {file = "pyppmd-1.2.0-cp311-cp311-win32.whl", hash = "sha256:c577f3dadd514979255e9df6eb89a63409d0e91855bb8c0969ffcd67d5d4f124"}, + {file = "pyppmd-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29dfb7aaf4b49ebc09d726fcdeabbce1cb21e9cf3a055244bb1384b8b51dd3b"}, + {file = "pyppmd-1.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:bf26c2def22322135fbaaa3de3c0963062c1835bd43d595478e3a2a674466a1a"}, + {file = "pyppmd-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d28cc9febcf37f2ff08b9e25d472de529e8973119c0a3279603b1915c809dd45"}, + {file = "pyppmd-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0f07d5376e1f699d09fbb9139562e5b72a347100aecaa73b688fa08461b3c118"}, + {file = "pyppmd-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:874f52eae03647b653aa34476f4e23c4c30458245c0eb7aa7fb290940abbd5b9"}, + {file = "pyppmd-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abafffb3d5b292924eafd8214ad80487400cf358c4e9dc2ac6c21d2c651c5ee2"}, + {file = "pyppmd-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e955de43991346d4ccb28a74fb4c80cadecf72a6724705301fe1adb569689fe"}, + {file = "pyppmd-1.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14ed0846c3bcee506555cd943db950d5787a6ffa1089e05deced010759ef1fe5"}, + {file = "pyppmd-1.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3caef2fb93a63d696b21e5ff72cb2955687b5dfcbed1938936334f9f7737fcd3"}, + {file = "pyppmd-1.2.0-cp312-cp312-win32.whl", hash = "sha256:011c813389476e84387599ad4aa834100e888c6608a6e7d6f07ea7cbac8a8e65"}, + {file = "pyppmd-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:42c7c9050b44b713676d255f0c212b8ff5c0463821053960c89292cf6b6221cc"}, + {file = "pyppmd-1.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:5768bff11936047613bcb91ee466f21779efc24360bd7953bd338b32da88577a"}, + {file = "pyppmd-1.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:4aa8ffca1727872923d2673045975bca571eb810cf14a21d048648da5237369b"}, + {file = "pyppmd-1.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6dc00f0ce9f79e1c1c87d9998220a714ab8216873b6c996776b88ab23efe05ac"}, + {file = "pyppmd-1.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d437881763ffd0d19079402f50e7f4aad5895e3cd5312d095edef0b32dac3aef"}, + {file = "pyppmd-1.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c763f2e3a011d5e96dfa0195f38accce9a14d489725a3d3641a74addbb5b72"}, + {file = "pyppmd-1.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38e3835a1951d18dd273000c870a4eb2804c20c400aa3c72231449f300cedf19"}, + {file = "pyppmd-1.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c76b8881fc087e70338b1cccd452bd12566206587a0d0d8266ba2833be902194"}, + {file = "pyppmd-1.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8b43e299310e27af5a4bc505bcc87d10cfc38ae28e5ed1f6a779d811705e5ad6"}, + {file = "pyppmd-1.2.0-cp313-cp313-win32.whl", hash = "sha256:4b3249256f8a7ecdd36477f277b232a46ee2c8ca280b23faaeacb7f50cab949a"}, + {file = "pyppmd-1.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:625896f5da7038fe7145907b68b0b58f7c13b88ad6bbfdc1c20c05654c17fa6c"}, + {file = "pyppmd-1.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:bec8abbf1edb0300c0a2e4f1bbad6a96154de3e507a2b054a0b1187f1c2e4982"}, + {file = "pyppmd-1.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b5c3284be4dccebb87d81c14b148c81e035356cd01a29889736c75672f6187d"}, + {file = "pyppmd-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40bfa26fdb3332a6a8d90fe1f6e0d9f489505a014911b470d66f2f79caea6d61"}, + {file = "pyppmd-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75b173bbc9164cdc6fb257d3480269cc26b1eb102ad72281a98cf90e0f7dc860"}, + {file = "pyppmd-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91534eb8c9c0bff9d6c6ec5eb5119a583d31bb9f8cf208d5a925b4e2293c9a7b"}, + {file = "pyppmd-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edc4fcd928bf6219bcddb8230a5830e33a35b684b16ca3e8d1357b17029a9ef7"}, + {file = "pyppmd-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5ff515c2c3544096fe524f341c244787d6449b36692d27131bf74d5075e5c83b"}, + {file = "pyppmd-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:af9be87228cba6b543531260f44675a23b4a1527158a44162dce186157cb13d9"}, + {file = "pyppmd-1.2.0-cp39-cp39-win32.whl", hash = "sha256:3674b5eba0e312b9af987ec7e6af59248f54db9a7f5ca63add5365d6c6639e9e"}, + {file = "pyppmd-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:cff27496fd164b587f150abba9524cae81629adbd2e9472f09e7b2b24b2d4939"}, + {file = "pyppmd-1.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:c9d0f5a903045ee6b399f5fb308e192e39f8f1f551b61441a595676d95dc76ad"}, + {file = "pyppmd-1.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86e252979fc5ae2492ebb46ed0eed0625a46a2cce519c4616b870eab58d77fb7"}, + {file = "pyppmd-1.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9095d8b098ce8cb5c1e404843a16e5167fb5bdebb4d6aed259d43dd2d73cfca3"}, + {file = "pyppmd-1.2.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064307c7fec7bdf3da63f5e28c0f1c5cb5c9bf888c1b268c6df3c131391ab345"}, + {file = "pyppmd-1.2.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c012c17a53b6d9744e0514b17b0c4169c5f21fb54b4db7a0119bc2d7b3fcc609"}, + {file = "pyppmd-1.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0877758ffa73b2e9d2f93b698e17336a4d8acab8d9a3d17cd7960aec08347387"}, + {file = "pyppmd-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac0960d2d0a1738af3ca3f27c6ed6eead38518d77875a47b2b4aae90ae933f4"}, + {file = "pyppmd-1.2.0.tar.gz", hash = "sha256:cc04af92f1d26831ec96963439dfb27c96467b5452b94436a6af696649a121fd"}, ] [package.extras] @@ -2222,40 +2233,42 @@ test = ["coverage[toml] (>=5.2)", "hypothesis", "pytest (>=6.0)", "pytest-benchm [[package]] name = "pytest" -version = "8.3.5" +version = "8.4.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, - {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" pluggy = ">=1.5,<2" +pygments = ">=2.7.2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" -version = "6.1.1" +version = "6.2.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" files = [ - {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, - {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, + {file = "pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5"}, + {file = "pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2"}, ] [package.dependencies] coverage = {version = ">=7.5", extras = ["toml"]} -pytest = ">=4.6" +pluggy = ">=1.2" +pytest = ">=6.2.5" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] @@ -2533,18 +2546,18 @@ cffi = "*" [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -2628,29 +2641,29 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "ruff" -version = "0.11.9" +version = "0.12.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.11.9-py3-none-linux_armv6l.whl", hash = "sha256:a31a1d143a5e6f499d1fb480f8e1e780b4dfdd580f86e05e87b835d22c5c6f8c"}, - {file = "ruff-0.11.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:66bc18ca783b97186a1f3100e91e492615767ae0a3be584e1266aa9051990722"}, - {file = "ruff-0.11.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bd576cd06962825de8aece49f28707662ada6a1ff2db848d1348e12c580acbf1"}, - {file = "ruff-0.11.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b1d18b4be8182cc6fddf859ce432cc9631556e9f371ada52f3eaefc10d878de"}, - {file = "ruff-0.11.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0f3f46f759ac623e94824b1e5a687a0df5cd7f5b00718ff9c24f0a894a683be7"}, - {file = "ruff-0.11.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f34847eea11932d97b521450cf3e1d17863cfa5a94f21a056b93fb86f3f3dba2"}, - {file = "ruff-0.11.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f33b15e00435773df97cddcd263578aa83af996b913721d86f47f4e0ee0ff271"}, - {file = "ruff-0.11.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b27613a683b086f2aca8996f63cb3dd7bc49e6eccf590563221f7b43ded3f65"}, - {file = "ruff-0.11.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e0d88756e63e8302e630cee3ce2ffb77859797cc84a830a24473939e6da3ca6"}, - {file = "ruff-0.11.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:537c82c9829d7811e3aa680205f94c81a2958a122ac391c0eb60336ace741a70"}, - {file = "ruff-0.11.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:440ac6a7029f3dee7d46ab7de6f54b19e34c2b090bb4f2480d0a2d635228f381"}, - {file = "ruff-0.11.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:71c539bac63d0788a30227ed4d43b81353c89437d355fdc52e0cda4ce5651787"}, - {file = "ruff-0.11.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c67117bc82457e4501473c5f5217d49d9222a360794bfb63968e09e70f340abd"}, - {file = "ruff-0.11.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e4b78454f97aa454586e8a5557facb40d683e74246c97372af3c2d76901d697b"}, - {file = "ruff-0.11.9-py3-none-win32.whl", hash = "sha256:7fe1bc950e7d7b42caaee2a8a3bc27410547cc032c9558ee2e0f6d3b209e845a"}, - {file = "ruff-0.11.9-py3-none-win_amd64.whl", hash = "sha256:52edaa4a6d70f8180343a5b7f030c7edd36ad180c9f4d224959c2d689962d964"}, - {file = "ruff-0.11.9-py3-none-win_arm64.whl", hash = "sha256:bcf42689c22f2e240f496d0c183ef2c6f7b35e809f12c1db58f75d9aa8d630ca"}, - {file = "ruff-0.11.9.tar.gz", hash = "sha256:ebd58d4f67a00afb3a30bf7d383e52d0e036e6195143c6db7019604a05335517"}, + {file = "ruff-0.12.3-py3-none-linux_armv6l.whl", hash = "sha256:47552138f7206454eaf0c4fe827e546e9ddac62c2a3d2585ca54d29a890137a2"}, + {file = "ruff-0.12.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0a9153b000c6fe169bb307f5bd1b691221c4286c133407b8827c406a55282041"}, + {file = "ruff-0.12.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fa6b24600cf3b750e48ddb6057e901dd5b9aa426e316addb2a1af185a7509882"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2506961bf6ead54887ba3562604d69cb430f59b42133d36976421bc8bd45901"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4faaff1f90cea9d3033cbbcdf1acf5d7fb11d8180758feb31337391691f3df0"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40dced4a79d7c264389de1c59467d5d5cefd79e7e06d1dfa2c75497b5269a5a6"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0262d50ba2767ed0fe212aa7e62112a1dcbfd46b858c5bf7bbd11f326998bafc"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12371aec33e1a3758597c5c631bae9a5286f3c963bdfb4d17acdd2d395406687"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:560f13b6baa49785665276c963edc363f8ad4b4fc910a883e2625bdb14a83a9e"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023040a3499f6f974ae9091bcdd0385dd9e9eb4942f231c23c57708147b06311"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:883d844967bffff5ab28bba1a4d246c1a1b2933f48cb9840f3fdc5111c603b07"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2120d3aa855ff385e0e562fdee14d564c9675edbe41625c87eeab744a7830d12"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b16647cbb470eaf4750d27dddc6ebf7758b918887b56d39e9c22cce2049082b"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e1417051edb436230023575b149e8ff843a324557fe0a265863b7602df86722f"}, + {file = "ruff-0.12.3-py3-none-win32.whl", hash = "sha256:dfd45e6e926deb6409d0616078a666ebce93e55e07f0fb0228d4b2608b2c248d"}, + {file = "ruff-0.12.3-py3-none-win_amd64.whl", hash = "sha256:a946cf1e7ba3209bdef039eb97647f1c77f6f540e5845ec9c114d3af8df873e7"}, + {file = "ruff-0.12.3-py3-none-win_arm64.whl", hash = "sha256:5f9c7c9c8f84c2d7f27e93674d27136fbf489720251544c4da7fb3d742e011b1"}, + {file = "ruff-0.12.3.tar.gz", hash = "sha256:f1b5a4b6668fd7b7ea3697d8d98857390b40c1320a63a178eee6be0899ea2d77"}, ] [[package]] @@ -3068,13 +3081,13 @@ files = [ [[package]] name = "types-beautifulsoup4" -version = "4.12.0.20250204" +version = "4.12.0.20250516" description = "Typing stubs for beautifulsoup4" optional = false python-versions = ">=3.9" files = [ - {file = "types_beautifulsoup4-4.12.0.20250204-py3-none-any.whl", hash = "sha256:57ce9e75717b63c390fd789c787d267a67eb01fa6d800a03b9bdde2e877ed1eb"}, - {file = "types_beautifulsoup4-4.12.0.20250204.tar.gz", hash = "sha256:f083d8edcbd01279f8c3995b56cfff2d01f1bb894c3b502ba118d36fbbc495bf"}, + {file = "types_beautifulsoup4-4.12.0.20250516-py3-none-any.whl", hash = "sha256:5923399d4a1ba9cc8f0096fe334cc732e130269541d66261bb42ab039c0376ee"}, + {file = "types_beautifulsoup4-4.12.0.20250516.tar.gz", hash = "sha256:aa19dd73b33b70d6296adf92da8ab8a0c945c507e6fb7d5db553415cc77b417e"}, ] [package.dependencies] @@ -3082,13 +3095,13 @@ types-html5lib = "*" [[package]] name = "types-flask-cors" -version = "5.0.0.20250413" +version = "6.0.0.20250520" description = "Typing stubs for Flask-Cors" optional = false python-versions = ">=3.9" files = [ - {file = "types_flask_cors-5.0.0.20250413-py3-none-any.whl", hash = "sha256:8183fdba764d45a5b40214468a1d5daa0e86c4ee6042d13f38cc428308f27a64"}, - {file = "types_flask_cors-5.0.0.20250413.tar.gz", hash = "sha256:b346d052f4ef3b606b73faf13e868e458f1efdbfedcbe1aba739eb2f54a6cf5f"}, + {file = "types_flask_cors-6.0.0.20250520-py3-none-any.whl", hash = "sha256:8898ed43a6b68d0b3b499e1d2f7aa696a99a001610de44e09fc6f404d16eb704"}, + {file = "types_flask_cors-6.0.0.20250520.tar.gz", hash = "sha256:9357c21be733f65e568ff27e816426832f3e3fd906eedbb896bcc6b1cfa026e6"}, ] [package.dependencies] @@ -3096,24 +3109,24 @@ Flask = ">=2.0.0" [[package]] name = "types-html5lib" -version = "1.1.11.20241018" +version = "1.1.11.20250708" description = "Typing stubs for html5lib" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "types-html5lib-1.1.11.20241018.tar.gz", hash = "sha256:98042555ff78d9e3a51c77c918b1041acbb7eb6c405408d8a9e150ff5beccafa"}, - {file = "types_html5lib-1.1.11.20241018-py3-none-any.whl", hash = "sha256:3f1e064d9ed2c289001ae6392c84c93833abb0816165c6ff0abfc304a779f403"}, + {file = "types_html5lib-1.1.11.20250708-py3-none-any.whl", hash = "sha256:bb898066b155de7081cb182179e2ded31b9e0e234605e2cb46536894e68a6954"}, + {file = "types_html5lib-1.1.11.20250708.tar.gz", hash = "sha256:24321720fdbac71cee50d5a4bec9b7448495b7217974cffe3fcf1ede4eef7afe"}, ] [[package]] name = "types-mock" -version = "5.2.0.20250306" +version = "5.2.0.20250516" description = "Typing stubs for mock" optional = false python-versions = ">=3.9" files = [ - {file = "types_mock-5.2.0.20250306-py3-none-any.whl", hash = "sha256:eb69fec98b8de26be1d7121623d05a2f117d1ea2e01dd30c123d07d204a15c95"}, - {file = "types_mock-5.2.0.20250306.tar.gz", hash = "sha256:15882cb5cf9980587a7607e31890801223801d7997f559686805ce09b6536087"}, + {file = "types_mock-5.2.0.20250516-py3-none-any.whl", hash = "sha256:e50fbd0c3be8bcea25c30a47fac0b7a6ca22f630ef2f53416a73b319b39dfde1"}, + {file = "types_mock-5.2.0.20250516.tar.gz", hash = "sha256:aab7d3d9ad3814f2f8da12cc8e42d9be7d38200c5f214e3c0278c38fa01299d7"}, ] [[package]] @@ -3129,24 +3142,24 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.20250402" +version = "6.0.12.20250516" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.9" files = [ - {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, - {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, + {file = "types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530"}, + {file = "types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba"}, ] [[package]] name = "types-requests" -version = "2.32.0.20250328" +version = "2.32.4.20250611" description = "Typing stubs for requests" optional = false python-versions = ">=3.9" files = [ - {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"}, - {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"}, + {file = "types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072"}, + {file = "types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826"}, ] [package.dependencies] @@ -3165,13 +3178,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, ] [[package]] @@ -3187,13 +3200,13 @@ files = [ [[package]] name = "urllib3" -version = "2.4.0" +version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" files = [ - {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, - {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] @@ -3232,13 +3245,13 @@ files = [ [[package]] name = "zipp" -version = "3.21.0" +version = "3.23.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, ] [package.extras] @@ -3246,7 +3259,7 @@ check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] From 62bbd187a99b820a913d8b4e9a07a29780cd0bae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0ar=C5=ABnas=20Nejus?= Date: Tue, 15 Jul 2025 13:48:04 +0100 Subject: [PATCH 95/95] Bump tj-actions/changed-files --- .github/workflows/changelog_reminder.yaml | 12 ++++++------ .github/workflows/ci.yaml | 2 +- .github/workflows/lint.yml | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/changelog_reminder.yaml b/.github/workflows/changelog_reminder.yaml index da0f670a0..a9c26c1f5 100644 --- a/.github/workflows/changelog_reminder.yaml +++ b/.github/workflows/changelog_reminder.yaml @@ -1,6 +1,6 @@ name: Verify changelog updated -on: +on: pull_request_target: types: - opened @@ -14,20 +14,20 @@ jobs: - name: Get all updated Python files id: changed-python-files - uses: tj-actions/changed-files@v44 + uses: tj-actions/changed-files@v46 with: files: | **.py - name: Check for the changelog update id: changelog-update - uses: tj-actions/changed-files@v44 + uses: tj-actions/changed-files@v46 with: files: docs/changelog.rst - + - name: Comment under the PR with a reminder if: steps.changed-python-files.outputs.any_changed == 'true' && steps.changelog-update.outputs.any_changed == 'false' uses: thollander/actions-comment-pull-request@v2 with: - message: 'Thank you for the PR! The changelog has not been updated, so here is a friendly reminder to check if you need to add an entry.' - GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}' + message: 'Thank you for the PR! The changelog has not been updated, so here is a friendly reminder to check if you need to add an entry.' + GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}' diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index baeb52f18..2c429a897 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -37,7 +37,7 @@ jobs: - name: Get changed lyrics files id: lyrics-update - uses: tj-actions/changed-files@v45 + uses: tj-actions/changed-files@v46 with: files: | beetsplug/lyrics.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 7900d247d..696a4f826 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -22,13 +22,13 @@ jobs: - uses: actions/checkout@v4 - name: Get changed docs files id: changed-doc-files - uses: tj-actions/changed-files@v44 + uses: tj-actions/changed-files@v46 with: files: | docs/** - name: Get changed python files id: raw-changed-python-files - uses: tj-actions/changed-files@v44 + uses: tj-actions/changed-files@v46 with: files: | **.py