diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..12d5ffd94 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,45 @@ +# 2014 +# flake8-cleanliness in missing +e21c04e9125a28ae0452374acf03d93315eb4381 + +# 2016 +# Removed unicode_literals from library, logging and mediafile +43572f50b0eb3522239d94149d91223e67d9a009 +# Removed unicode_literals from plugins +53d2c8d9db87be4d4750ad879bf46176537be73f +# reformat flake8 errors +1db46dfeb6607c164afb247d8da82443677795c1 + +# 2021 +# pyupgrade root +e26276658052947e9464d9726b703335304c7c13 +# pyupgrade beets dir +6d1316f463cb7c9390f85bf35b220e250a35004a +# pyupgrade autotag dir +f8b8938fd8bbe91898d0982552bc75d35703d3ef +# pyupgrade dbcore dir +d288f872903c79a7ee7c5a7c9cc690809441196e +# pyupgrade ui directory +432fa557258d9ff01e23ed750f9a86a96239599e +# pyupgrade util dir +af102c3e2f1c7a49e99839e2825906fe01780eec +# fix unused import and flake8 +910354a6c617ed5aa643cff666205b43e1557373 +# pyupgrade beetsplug and tests +1ec87a3bdd737abe46c6e614051bf9e314db4619 + +# 2022 +# Reformat flake8 config comments +abc3dfbf429b179fac25bd1dff72d577cd4d04c7 + +# 2023 +# Apply formatting tools to all files +a6e5201ff3fad4c69bf24d17bace2ef744b9f51b + +# 2024 +# Reformat the codebase +85a17ee5039628a6f3cdcb7a03d7d1bd530fbe89 +# Fix lint issues +f36bc497c8c8f89004f3f6879908d3f0b25123e1 +# Remove some lint exclusions and fix the issues +5f78d1b82b2292d5ce0c99623ba0ec444b80d24c diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8d0befeee..2ed4548ce 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,10 +14,10 @@ jobs: fail-fast: false matrix: platform: [ubuntu-latest, windows-latest] - python-version: ["3.8", "3.9"] + python-version: ["3.9"] runs-on: ${{ matrix.platform }} env: - IS_MAIN_PYTHON: ${{ matrix.python-version == '3.8' && matrix.platform == 'ubuntu-latest' }} + IS_MAIN_PYTHON: ${{ matrix.python-version == '3.9' && matrix.platform == 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 - name: Install Python tools @@ -29,15 +29,16 @@ jobs: python-version: ${{ matrix.python-version }} cache: poetry - - name: Install PyGobject dependencies on Ubuntu + - name: Install PyGobject and release script dependencies on Ubuntu if: matrix.platform == 'ubuntu-latest' run: | sudo apt update - sudo apt install ffmpeg gobject-introspection libgirepository1.0-dev - poetry install --extras replaygain + sudo apt install ffmpeg gobject-introspection libgirepository1.0-dev pandoc + poetry install --with=release --extras=docs --extras=replaygain --extras=reflink + poe docs - name: Install Python dependencies - run: poetry install --only=main,test + run: poetry install --only=main,test --extras=autobpm - if: ${{ env.IS_MAIN_PYTHON != 'true' }} name: Test without coverage @@ -75,4 +76,4 @@ jobs: uses: codecov/codecov-action@v4 with: files: ./coverage.xml - use_oidc: true + use_oidc: ${{ !(github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork) }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4e2a2db26..9e2552ab1 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,7 +7,7 @@ on: - master env: - PYTHON_VERSION: 3.8 + PYTHON_VERSION: 3.9 jobs: changed-files: @@ -60,7 +60,7 @@ jobs: cache: poetry - name: Install dependencies - run: poetry install --only=format + run: poetry install --only=lint - name: Check code formatting # the job output will contain colored diffs with what needs adjusting @@ -84,10 +84,7 @@ jobs: run: poetry install --only=lint - name: Lint code - uses: liskin/gh-problem-matcher-wrap@v3 - with: - linters: flake8 - run: poe lint ${{ needs.changed-files.outputs.changed_python_files }} + run: poe lint --output-format=github ${{ needs.changed-files.outputs.changed_python_files }} mypy: if: needs.changed-files.outputs.any_python_changed == 'true' @@ -128,13 +125,13 @@ jobs: cache: poetry - name: Install dependencies - run: poetry install --only=docs + run: poetry install --extras=docs - name: Add Sphinx problem matcher run: echo "::add-matcher::.github/sphinx-problem-matcher.json" - name: Build docs - run: | + run: |- poe docs |& tee /tmp/output # fail the job if there are issues grep -q " WARNING:" /tmp/output && exit 1 || exit 0 diff --git a/.github/workflows/make_release.yaml b/.github/workflows/make_release.yaml index 24218b6f9..248755703 100644 --- a/.github/workflows/make_release.yaml +++ b/.github/workflows/make_release.yaml @@ -10,10 +10,11 @@ on: env: PYTHON_VERSION: 3.8 NEW_VERSION: ${{ inputs.version }} + NEW_TAG: v${{ inputs.version }} jobs: increment-version: - name: Bump project version and commit it + name: Bump version, commit and create tag runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -28,20 +29,26 @@ jobs: run: poetry install --only=release - name: Bump project version - id: script run: poe bump "${{ env.NEW_VERSION }}" - uses: EndBug/add-and-commit@v9 - name: Commit the changes + id: commit_and_tag + name: Commit the changes and create tag with: message: "Increment version to ${{ env.NEW_VERSION }}" + tag: "${{ env.NEW_TAG }} --force" build: name: Get changelog and build the distribution package runs-on: ubuntu-latest needs: increment-version + outputs: + changelog: ${{ steps.generate_changelog.outputs.changelog }} steps: - uses: actions/checkout@v4 + with: + ref: ${{ env.NEW_TAG }} + - name: Install Python tools uses: BrandonLWhite/pipx-install-action@v0.1.1 - uses: actions/setup-python@v5 @@ -50,16 +57,23 @@ jobs: cache: poetry - name: Install dependencies - run: poetry install --only=release + run: poetry install --with=release --extras=docs - name: Install pandoc run: sudo apt update && sudo apt install pandoc -y - name: Obtain the changelog - run: echo "changelog=$(poe changelog)" >> $GITHUB_OUTPUT + id: generate_changelog + run: | + poe docs + { + echo 'changelog<> "$GITHUB_OUTPUT" - name: Build a binary wheel and a source tarball - run: poetry build + run: poe build - name: Store the distribution packages uses: actions/upload-artifact@v4 @@ -88,19 +102,12 @@ jobs: make-github-release: name: Create GitHub release runs-on: ubuntu-latest - needs: publish-to-pypi + needs: [build, publish-to-pypi] env: CHANGELOG: ${{ needs.build.outputs.changelog }} steps: - - name: Tag the commit - id: tag_version - uses: mathieudutour/github-tag-action@v6 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - custom_tag: ${{ env.NEW_VERSION }} - - name: Download all the dists - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: python-package-distributions path: dist/ @@ -108,8 +115,6 @@ jobs: - name: Create a GitHub release id: make_release uses: ncipollo/release-action@v1 - env: - NEW_TAG: ${{ steps.tag_version.outputs.new_tag }} with: tag: ${{ env.NEW_TAG }} name: Release ${{ env.NEW_TAG }} @@ -117,7 +122,8 @@ jobs: artifacts: dist/* - name: Send release toot to Fosstodon uses: cbrgm/mastodon-github-action@v2 + continue-on-error: true with: access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} url: ${{ secrets.MASTODON_URL }} - message: "Version ${{ steps.tag_version.outputs.new_tag }} of beets has been released! Check out all of the new changes at ${{ steps.create_release.outputs.html_url }}" + message: "Version ${{ env.NEW_TAG }} of beets has been released! Check out all of the new changes at ${{ steps.make_release.outputs.html_url }}" diff --git a/.gitignore b/.gitignore index 15f11a433..90ef7387d 100644 --- a/.gitignore +++ b/.gitignore @@ -91,3 +91,6 @@ ENV/ /.pydevproject /.settings .vscode + +# pyright +pyrightconfig.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a41758897..d773af3e1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,13 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - - repo: https://github.com/psf/black - rev: 24.2.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.8.1 hooks: - - id: black - - - repo: https://github.com/pycqa/isort - rev: 5.13.2 - hooks: - - id: isort - name: isort (python) + - id: ruff-format diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index e54076ba0..f44e89b6e 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -118,10 +118,10 @@ command. Instead, you can activate the virtual environment in your shell with:: $ poetry shell -You should see ``(beets-py38)`` prefix in your shell prompt. Now you can run +You should see ``(beets-py3.9)`` prefix in your shell prompt. Now you can run commands directly, for example:: - $ (beets-py38) pytest + $ (beets-py3.9) pytest Additionally, `poethepoet`_ task runner assists us with the most common operations. Formatting, linting, testing are defined as ``poe`` tasks in @@ -237,7 +237,7 @@ There are a few coding conventions we use in beets: .. code-block:: python with g.lib.transaction() as tx: - rows = tx.query('SELECT DISTINCT "{0}" FROM "{1}" ORDER BY "{2}"' + rows = tx.query("SELECT DISTINCT '{0}' FROM '{1}' ORDER BY '{2}'" .format(field, model._table, sort_field)) To fetch Item objects from the database, use lib.items(…) and supply @@ -248,7 +248,7 @@ There are a few coding conventions we use in beets: .. code-block:: python with lib.transaction() as tx: - rows = tx.query('SELECT …') + rows = tx.query("SELECT …") Transaction objects help control concurrent access to the database and assist in debugging conflicting accesses. @@ -274,14 +274,13 @@ There are a few coding conventions we use in beets: Style ----- -We follow `black`_ formatting and `google's docstring format`_. +We use `ruff`_ to format and lint the codebase. -Use ``poe check-format`` and ``poe lint`` to check your code for style and +Run ``poe check-format`` and ``poe lint`` to check your code for style and linting errors. Running ``poe format`` will automatically format your code according to the specifications required by the project. -.. _black: https://black.readthedocs.io/en/stable/ -.. _google's docstring format: https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings +.. _ruff: https://docs.astral.sh/ruff/ Handling Paths -------------- @@ -345,10 +344,10 @@ environment variable ``SKIP_SLOW_TESTS``, for example:: Coverage ^^^^^^^^ -Coverage is measured automatically when running the tests. If you find it takes -a while to calculate, disable it:: +The ``test`` command does not include coverage as it slows down testing. In +order to measure it, use the ``test-with-coverage`` task - $ poe test --no-cov + $ poe test-with-coverage [pytest options] You are welcome to explore coverage by opening the HTML report in ``.reports/html/index.html``. @@ -379,28 +378,24 @@ Writing Tests Writing tests is done by adding or modifying files in folder `test`_. Take a look at `https://github.com/beetbox/beets/blob/master/test/test_template.py#L224`_ -to get a basic view on how tests are written. We currently allow writing -tests with either `unittest`_ or `pytest`_. +to get a basic view on how tests are written. Since we are currently migrating +the tests from `unittest`_ to `pytest`_, new tests should be written using +`pytest`_. Contributions migrating existing tests are welcome! -Any tests that involve sending out network traffic e.g. an external API -call, should be skipped normally and run under our weekly `integration -test`_ suite. These tests can be useful in detecting external changes -that would affect ``beets``. In order to do this, simply add the -following snippet before the applicable test case: +External API requests under test should be mocked with `requests_mock`_, +However, we still want to know whether external APIs are up and that they +return expected responses, therefore we test them weekly with our `integration +test`_ suite. + +In order to add such a test, mark your test with the ``integration_test`` marker .. code-block:: python - @unittest.skipUnless( - os.environ.get('INTEGRATION_TEST', '0') == '1', - 'integration testing not enabled') + @pytest.mark.integration_test + def test_external_api_call(): + ... -If you do this, it is also advised to create a similar test that 'mocks' -the network call and can be run under normal circumstances by our CI and -others. See `unittest.mock`_ for more info. - -- **AVOID** using the ``start()`` and ``stop()`` methods of - ``mock.patch``, as they require manual cleanup. Use the annotation or - context manager forms instead. +This way, the test will be run only in the integration test suite. .. _Codecov: https://codecov.io/github/beetbox/beets .. _pytest-random: https://github.com/klrmn/pytest-random @@ -410,6 +405,6 @@ others. See `unittest.mock`_ for more info. .. _`https://github.com/beetbox/beets/blob/master/test/test_template.py#L224`: https://github.com/beetbox/beets/blob/master/test/test_template.py#L224 .. _unittest: https://docs.python.org/3/library/unittest.html .. _integration test: https://github.com/beetbox/beets/actions?query=workflow%3A%22integration+tests%22 -.. _unittest.mock: https://docs.python.org/3/library/unittest.mock.html +.. _requests-mock: https://requests-mock.readthedocs.io/en/latest/response.html .. _documentation: https://beets.readthedocs.io/en/stable/ .. _vim: https://www.vim.org/ diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index f5459a443..000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,36 +0,0 @@ -# Include tests (but avoid including *.pyc, etc.) -prune test -recursive-include test/rsrc * -recursive-exclude test/rsrc *.pyc -recursive-exclude test/rsrc *.pyo -include test/*.py - -# Include relevant text files. -include LICENSE README.rst -# And generated manpages. -include man/beet.1 -include man/beetsconfig.5 - -# Include the Sphinx documentation. -recursive-include docs *.rst *.py Makefile *.png -prune docs/_build - -# Resources for web plugin. -recursive-include beetsplug/web/templates * -recursive-include beetsplug/web/static * - -# And for the lastgenre plugin. -include beetsplug/lastgenre/genres.txt -include beetsplug/lastgenre/genres-tree.yaml - -# Exclude junk. -global-exclude .DS_Store - -# Include default config -include beets/config_default.yaml - -# Shell completion template -include beets/ui/completion_base.sh - -# Include extra bits -recursive-include extra * diff --git a/beets/__init__.py b/beets/__init__.py index 16f51f85d..845d251ae 100644 --- a/beets/__init__.py +++ b/beets/__init__.py @@ -17,7 +17,7 @@ from sys import stderr import confuse -__version__ = "2.0.0" +__version__ = "2.2.0" __author__ = "Adrian Sampson " diff --git a/beets/__main__.py b/beets/__main__.py index 81995f7af..3473c6319 100644 --- a/beets/__main__.py +++ b/beets/__main__.py @@ -16,7 +16,6 @@ `python -m beets`. """ - import sys from .ui import main diff --git a/beets/art.py b/beets/art.py index 466d40005..2ff58c309 100644 --- a/beets/art.py +++ b/beets/art.py @@ -16,7 +16,6 @@ music and items' embedded album art. """ - import os from tempfile import NamedTemporaryFile diff --git a/beets/autotag/__init__.py b/beets/autotag/__init__.py index 54a9d5546..42f957b0d 100644 --- a/beets/autotag/__init__.py +++ b/beets/autotag/__init__.py @@ -12,23 +12,41 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Facilities for automatically determining files' correct metadata. -""" -from typing import Mapping +"""Facilities for automatically determining files' correct metadata.""" + +from collections.abc import Mapping, Sequence +from typing import Union from beets import config, logging -from beets.library import Item +from beets.library import Album, Item, LibModel # Parts of external interface. -from .hooks import ( # noqa - AlbumInfo, - AlbumMatch, - Distance, - TrackInfo, - TrackMatch, +from beets.util import unique_list + +from .hooks import AlbumInfo, AlbumMatch, Distance, TrackInfo, TrackMatch +from .match import ( + Proposal, + Recommendation, + current_metadata, + tag_album, + tag_item, ) -from .match import Recommendation # noqa -from .match import Proposal, current_metadata, tag_album, tag_item # noqa + +__all__ = [ + "AlbumInfo", + "AlbumMatch", + "Distance", + "TrackInfo", + "TrackMatch", + "Proposal", + "Recommendation", + "apply_album_metadata", + "apply_item_metadata", + "apply_metadata", + "current_metadata", + "tag_album", + "tag_item", +] # Global logger. log = logging.getLogger("beets") @@ -80,6 +98,71 @@ SPECIAL_FIELDS = { # Additional utilities for the main interface. +def _apply_metadata( + info: Union[AlbumInfo, TrackInfo], + db_obj: Union[Album, Item], + nullable_fields: Sequence[str] = [], +): + """Set the db_obj's metadata to match the info.""" + special_fields = SPECIAL_FIELDS[ + "album" if isinstance(info, AlbumInfo) else "track" + ] + + for field, value in info.items(): + # We only overwrite fields that are not already hardcoded. + if field in special_fields: + continue + + # Don't overwrite fields with empty values unless the + # field is explicitly allowed to be overwritten. + if value is None and field not in nullable_fields: + continue + + db_obj[field] = value + + +def correct_list_fields(m: LibModel) -> None: + """Synchronise single and list values for the list fields that we use. + + That is, ensure the same value in the single field and the first element + in the list. + + For context, the value we set as, say, ``mb_artistid`` is simply ignored: + Under the current :class:`MediaFile` implementation, fields ``albumtype``, + ``mb_artistid`` and ``mb_albumartistid`` are mapped to the first element of + ``albumtypes``, ``mb_artistids`` and ``mb_albumartistids`` respectively. + + This means setting ``mb_artistid`` has no effect. However, beets + functionality still assumes that ``mb_artistid`` is independent and stores + its value in the database. If ``mb_artistid`` != ``mb_artistids[0]``, + ``beet write`` command thinks that ``mb_artistid`` is modified and tries to + update the field in the file. Of course nothing happens, so the same diff + is shown every time the command is run. + + We can avoid this issue by ensuring that ``mb_artistid`` has the same value + as ``mb_artistids[0]``, and that's what this function does. + + Note: :class:`Album` model does not have ``mb_artistids`` and + ``mb_albumartistids`` fields therefore we need to check for their presence. + """ + + def ensure_first_value(single_field: str, list_field: str) -> None: + """Ensure the first ``list_field`` item is equal to ``single_field``.""" + single_val, list_val = getattr(m, single_field), getattr(m, list_field) + if single_val: + setattr(m, list_field, unique_list([single_val, *list_val])) + elif list_val: + setattr(m, single_field, list_val[0]) + + ensure_first_value("albumtype", "albumtypes") + + if hasattr(m, "mb_artistids"): + ensure_first_value("mb_artistid", "mb_artistids") + + if hasattr(m, "mb_albumartistids"): + ensure_first_value("mb_albumartistid", "mb_albumartistids") + + def apply_item_metadata(item: Item, track_info: TrackInfo): """Set an item's metadata from its matched TrackInfo object.""" item.artist = track_info.artist @@ -96,18 +179,19 @@ def apply_item_metadata(item: Item, track_info: TrackInfo): if track_info.artists_ids: item.mb_artistids = track_info.artists_ids - for field, value in track_info.items(): - # We only overwrite fields that are not already hardcoded. - if field in SPECIAL_FIELDS["track"]: - continue - if value is None: - continue - item[field] = value + _apply_metadata(track_info, item) + correct_list_fields(item) # At the moment, the other metadata is left intact (including album # and track number). Perhaps these should be emptied? +def apply_album_metadata(album_info: AlbumInfo, album: Album): + """Set the album's metadata to match the AlbumInfo object.""" + _apply_metadata(album_info, album) + correct_list_fields(album) + + def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]): """Set the items' metadata to match an AlbumInfo object using a mapping from Items to TrackInfo objects. @@ -218,21 +302,16 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]): # Track alt. item.track_alt = track_info.track_alt - # Don't overwrite fields with empty values unless the - # field is explicitly allowed to be overwritten - for field, value in album_info.items(): - if field in SPECIAL_FIELDS["album"]: - continue - clobber = field in config["overwrite_null"]["album"].as_str_seq() - if value is None and not clobber: - continue - item[field] = value + _apply_metadata( + album_info, + item, + nullable_fields=config["overwrite_null"]["album"].as_str_seq(), + ) - for field, value in track_info.items(): - if field in SPECIAL_FIELDS["track"]: - continue - clobber = field in config["overwrite_null"]["track"].as_str_seq() - value = getattr(track_info, field) - if value is None and not clobber: - continue - item[field] = value + _apply_metadata( + track_info, + item, + nullable_fields=config["overwrite_null"]["track"].as_str_seq(), + ) + + correct_list_fields(item) diff --git a/beets/autotag/hooks.py b/beets/autotag/hooks.py index efd71da9b..3fa80c6f3 100644 --- a/beets/autotag/hooks.py +++ b/beets/autotag/hooks.py @@ -17,37 +17,28 @@ from __future__ import annotations import re -from collections import namedtuple from functools import total_ordering -from typing import ( - Any, - Callable, - Dict, - Iterable, - Iterator, - List, - Optional, - Tuple, - TypeVar, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar, cast from jellyfish import levenshtein_distance from unidecode import unidecode from beets import config, logging, plugins from beets.autotag import mb -from beets.library import Item from beets.util import as_string, cached_classproperty +if TYPE_CHECKING: + from collections.abc import Iterable, Iterator + + from beets.library import Item + log = logging.getLogger("beets") V = TypeVar("V") # Classes used to represent candidate options. -class AttrDict(Dict[str, V]): +class AttrDict(dict[str, V]): """A dictionary that supports attribute ("dot") access, so `d.field` is equivalent to `d['field']`. """ @@ -82,47 +73,47 @@ class AlbumInfo(AttrDict): # TYPING: are all of these correct? I've assumed optional strings def __init__( self, - tracks: List[TrackInfo], - album: Optional[str] = None, - album_id: Optional[str] = None, - artist: Optional[str] = None, - artist_id: Optional[str] = None, - artists: Optional[List[str]] = None, - artists_ids: Optional[List[str]] = None, - asin: Optional[str] = None, - albumtype: Optional[str] = None, - albumtypes: Optional[List[str]] = None, + tracks: list[TrackInfo], + album: str | None = None, + album_id: str | None = None, + artist: str | None = None, + artist_id: str | None = None, + artists: list[str] | None = None, + artists_ids: list[str] | None = None, + asin: str | None = None, + albumtype: str | None = None, + albumtypes: list[str] | None = None, va: bool = False, - year: Optional[int] = None, - month: Optional[int] = None, - day: Optional[int] = None, - label: Optional[str] = None, - barcode: Optional[str] = None, - mediums: Optional[int] = None, - artist_sort: Optional[str] = None, - artists_sort: Optional[List[str]] = None, - releasegroup_id: Optional[str] = None, - release_group_title: Optional[str] = None, - catalognum: Optional[str] = None, - script: Optional[str] = None, - language: Optional[str] = None, - country: Optional[str] = None, - style: Optional[str] = None, - genre: Optional[str] = None, - albumstatus: Optional[str] = None, - media: Optional[str] = None, - albumdisambig: Optional[str] = None, - releasegroupdisambig: Optional[str] = None, - artist_credit: Optional[str] = None, - artists_credit: Optional[List[str]] = None, - original_year: Optional[int] = None, - original_month: Optional[int] = None, - original_day: Optional[int] = None, - data_source: Optional[str] = None, - data_url: Optional[str] = None, - discogs_albumid: Optional[str] = None, - discogs_labelid: Optional[str] = None, - discogs_artistid: Optional[str] = None, + year: int | None = None, + month: int | None = None, + day: int | None = None, + label: str | None = None, + barcode: str | None = None, + mediums: int | None = None, + artist_sort: str | None = None, + artists_sort: list[str] | None = None, + releasegroup_id: str | None = None, + release_group_title: str | None = None, + catalognum: str | None = None, + script: str | None = None, + language: str | None = None, + country: str | None = None, + style: str | None = None, + genre: str | None = None, + albumstatus: str | None = None, + media: str | None = None, + albumdisambig: str | None = None, + releasegroupdisambig: str | None = None, + artist_credit: str | None = None, + artists_credit: list[str] | None = None, + original_year: int | None = None, + original_month: int | None = None, + original_day: int | None = None, + data_source: str | None = None, + data_url: str | None = None, + discogs_albumid: str | None = None, + discogs_labelid: str | None = None, + discogs_artistid: str | None = None, **kwargs, ): self.album = album @@ -168,42 +159,6 @@ class AlbumInfo(AttrDict): self.discogs_artistid = discogs_artistid self.update(kwargs) - # Work around a bug in python-musicbrainz-ngs that causes some - # strings to be bytes rather than Unicode. - # https://github.com/alastair/python-musicbrainz-ngs/issues/85 - def decode(self, codec: str = "utf-8"): - """Ensure that all string attributes on this object, and the - constituent `TrackInfo` objects, are decoded to Unicode. - """ - for fld in [ - "album", - "artist", - "albumtype", - "label", - "barcode", - "artist_sort", - "catalognum", - "script", - "language", - "country", - "style", - "genre", - "albumstatus", - "albumdisambig", - "releasegroupdisambig", - "artist_credit", - "media", - "discogs_albumid", - "discogs_labelid", - "discogs_artistid", - ]: - value = getattr(self, fld) - if isinstance(value, bytes): - setattr(self, fld, value.decode(codec, "ignore")) - - for track in self.tracks: - track.decode(codec) - def copy(self) -> AlbumInfo: dupe = AlbumInfo([]) dupe.update(self) @@ -226,38 +181,38 @@ class TrackInfo(AttrDict): # TYPING: are all of these correct? I've assumed optional strings def __init__( self, - title: Optional[str] = None, - track_id: Optional[str] = None, - release_track_id: Optional[str] = None, - artist: Optional[str] = None, - artist_id: Optional[str] = None, - artists: Optional[List[str]] = None, - artists_ids: Optional[List[str]] = None, - length: Optional[float] = None, - index: Optional[int] = None, - medium: Optional[int] = None, - medium_index: Optional[int] = None, - medium_total: Optional[int] = None, - artist_sort: Optional[str] = None, - artists_sort: Optional[List[str]] = None, - disctitle: Optional[str] = None, - artist_credit: Optional[str] = None, - artists_credit: Optional[List[str]] = None, - data_source: Optional[str] = None, - data_url: Optional[str] = None, - media: Optional[str] = None, - lyricist: Optional[str] = None, - composer: Optional[str] = None, - composer_sort: Optional[str] = None, - arranger: Optional[str] = None, - track_alt: Optional[str] = None, - work: Optional[str] = None, - mb_workid: Optional[str] = None, - work_disambig: Optional[str] = None, - bpm: Optional[str] = None, - initial_key: Optional[str] = None, - genre: Optional[str] = None, - album: Optional[str] = None, + title: str | None = None, + track_id: str | None = None, + release_track_id: str | None = None, + artist: str | None = None, + artist_id: str | None = None, + artists: list[str] | None = None, + artists_ids: list[str] | None = None, + length: float | None = None, + index: int | None = None, + medium: int | None = None, + medium_index: int | None = None, + medium_total: int | None = None, + artist_sort: str | None = None, + artists_sort: list[str] | None = None, + disctitle: str | None = None, + artist_credit: str | None = None, + artists_credit: list[str] | None = None, + data_source: str | None = None, + data_url: str | None = None, + media: str | None = None, + lyricist: str | None = None, + composer: str | None = None, + composer_sort: str | None = None, + arranger: str | None = None, + track_alt: str | None = None, + work: str | None = None, + mb_workid: str | None = None, + work_disambig: str | None = None, + bpm: str | None = None, + initial_key: str | None = None, + genre: str | None = None, + album: str | None = None, **kwargs, ): self.title = title @@ -294,24 +249,6 @@ class TrackInfo(AttrDict): self.album = album self.update(kwargs) - # As above, work around a bug in python-musicbrainz-ngs. - def decode(self, codec="utf-8"): - """Ensure that all string attributes on this object are decoded - to Unicode. - """ - for fld in [ - "title", - "artist", - "medium", - "artist_sort", - "disctitle", - "artist_credit", - "media", - ]: - value = getattr(self, fld) - if isinstance(value, bytes): - setattr(self, fld, value.decode(codec, "ignore")) - def copy(self) -> TrackInfo: dupe = TrackInfo() dupe.update(self) @@ -355,7 +292,7 @@ def _string_dist_basic(str1: str, str2: str) -> float: return levenshtein_distance(str1, str2) / float(max(len(str1), len(str2))) -def string_dist(str1: Optional[str], str2: Optional[str]) -> float: +def string_dist(str1: str | None, str2: str | None) -> float: """Gives an "intuitive" edit distance between two strings. This is an edit distance, normalized by the string length, with a number of tweaks that reflect intuition about text. @@ -422,10 +359,10 @@ class Distance: def __init__(self): self._penalties = {} - self.tracks: Dict[TrackInfo, Distance] = {} + self.tracks: dict[TrackInfo, Distance] = {} @cached_classproperty - def _weights(cls) -> Dict[str, float]: # noqa: N805 + def _weights(cls) -> dict[str, float]: """A dictionary from keys to floating-point weights.""" weights_view = config["match"]["distance_weights"] weights = {} @@ -461,7 +398,7 @@ class Distance: dist_raw += sum(penalty) * self._weights[key] return dist_raw - def items(self) -> List[Tuple[str, float]]: + def items(self) -> list[tuple[str, float]]: """Return a list of (key, dist) pairs, with `dist` being the weighted distance, sorted from highest to lowest. Does not include penalties with a zero value. @@ -511,16 +448,16 @@ class Distance: return dist / dist_max return 0.0 - def __iter__(self) -> Iterator[Tuple[str, float]]: + def __iter__(self) -> Iterator[tuple[str, float]]: return iter(self.items()) def __len__(self) -> int: return len(self.items()) - def keys(self) -> List[str]: + def keys(self) -> list[str]: return [key for key, _ in self.items()] - def update(self, dist: "Distance"): + def update(self, dist: Distance): """Adds all the distance penalties from `dist`.""" if not isinstance(dist, Distance): raise ValueError( @@ -531,7 +468,7 @@ class Distance: # Adding components. - def _eq(self, value1: Union[re.Pattern[str], Any], value2: Any) -> bool: + def _eq(self, value1: re.Pattern[str] | Any, value2: Any) -> bool: """Returns True if `value1` is equal to `value2`. `value1` may be a compiled regular expression, in which case it will be matched against `value2`. @@ -555,7 +492,7 @@ class Distance: self, key: str, value: Any, - options: Union[List[Any], Tuple[Any, ...], Any], + options: list[Any] | tuple[Any, ...] | Any, ): """Adds a distance penalty of 1.0 if `value` doesn't match any of the values in `options`. If an option is a compiled regular @@ -598,7 +535,7 @@ class Distance: self, key: str, value: Any, - options: Union[List[Any], Tuple[Any, ...], Any], + options: list[Any] | tuple[Any, ...] | Any, ): """Adds a distance penalty that corresponds to the position at which `value` appears in `options`. A distance penalty of 0.0 @@ -620,8 +557,8 @@ class Distance: def add_ratio( self, key: str, - number1: Union[int, float], - number2: Union[int, float], + number1: int | float, + number2: int | float, ): """Adds a distance penalty for `number1` as a ratio of `number2`. `number1` is bound at 0 and `number2`. @@ -633,7 +570,7 @@ class Distance: dist = 0.0 self.add(key, dist) - def add_string(self, key: str, str1: Optional[str], str2: Optional[str]): + def add_string(self, key: str, str1: str | None, str2: str | None): """Adds a distance penalty based on the edit distance between `str1` and `str2`. """ @@ -643,17 +580,24 @@ class Distance: # Structures that compose all the information for a candidate match. -AlbumMatch = namedtuple( - "AlbumMatch", ["distance", "info", "mapping", "extra_items", "extra_tracks"] -) -TrackMatch = namedtuple("TrackMatch", ["distance", "info"]) +class AlbumMatch(NamedTuple): + distance: Distance + info: AlbumInfo + mapping: dict[Item, TrackInfo] + extra_items: list[Item] + extra_tracks: list[TrackInfo] + + +class TrackMatch(NamedTuple): + distance: Distance + info: TrackInfo # Aggregation of sources. -def album_for_mbid(release_id: str) -> Optional[AlbumInfo]: +def album_for_mbid(release_id: str) -> AlbumInfo | None: """Get an AlbumInfo object for a MusicBrainz release ID. Return None if the ID is not found. """ @@ -667,7 +611,7 @@ def album_for_mbid(release_id: str) -> Optional[AlbumInfo]: return None -def track_for_mbid(recording_id: str) -> Optional[TrackInfo]: +def track_for_mbid(recording_id: str) -> TrackInfo | None: """Get a TrackInfo object for a MusicBrainz recording ID. Return None if the ID is not found. """ @@ -713,12 +657,12 @@ def invoke_mb(call_func: Callable, *args): @plugins.notify_info_yielded("albuminfo_received") def album_candidates( - items: List[Item], + items: list[Item], artist: str, album: str, va_likely: bool, - extra_tags: Dict, -) -> Iterable[Tuple]: + extra_tags: dict, +) -> Iterable[tuple]: """Search for album matches. ``items`` is a list of Item objects that make up the album. ``artist`` and ``album`` are the respective names (strings), which may be derived from the item list or may be @@ -746,7 +690,7 @@ def album_candidates( @plugins.notify_info_yielded("trackinfo_received") -def item_candidates(item: Item, artist: str, title: str) -> Iterable[Tuple]: +def item_candidates(item: Item, artist: str, title: str) -> Iterable[tuple]: """Search for item matches. ``item`` is the Item to be matched. ``artist`` and ``title`` are strings and either reflect the item or are specified by the user. diff --git a/beets/autotag/match.py b/beets/autotag/match.py index 63db9e33c..a7121fd34 100644 --- a/beets/autotag/match.py +++ b/beets/autotag/match.py @@ -16,24 +16,17 @@ releases and tracks. """ +from __future__ import annotations import datetime import re -from collections import namedtuple -from typing import ( - Any, - Dict, - Iterable, - List, - Optional, - Sequence, - Tuple, - TypeVar, - Union, - cast, -) +from collections.abc import Iterable, Sequence +from enum import IntEnum +from functools import cache +from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar, cast -from munkres import Munkres +import lap +import numpy as np from beets import config, logging, plugins from beets.autotag import ( @@ -44,9 +37,10 @@ from beets.autotag import ( TrackMatch, hooks, ) -from beets.library import Item from beets.util import plurality -from beets.util.enumeration import OrderedEnum + +if TYPE_CHECKING: + from beets.library import Item # Artist signals that indicate "various artists". These are used at the # album level to determine whether a given release is likely a VA @@ -61,7 +55,7 @@ log = logging.getLogger("beets") # Recommendation enumeration. -class Recommendation(OrderedEnum): +class Recommendation(IntEnum): """Indicates a qualitative suggestion to the user about what should be done with a given match. """ @@ -76,7 +70,10 @@ class Recommendation(OrderedEnum): # consists of a list of possible candidates (i.e., AlbumInfo or TrackInfo # objects) and a recommendation value. -Proposal = namedtuple("Proposal", ("candidates", "recommendation")) + +class Proposal(NamedTuple): + candidates: Sequence[AlbumMatch | TrackMatch] + recommendation: Recommendation # Primary matching functionality. @@ -84,7 +81,7 @@ Proposal = namedtuple("Proposal", ("candidates", "recommendation")) def current_metadata( items: Iterable[Item], -) -> Tuple[Dict[str, Any], Dict[str, Any]]: +) -> tuple[dict[str, Any], dict[str, Any]]: """Extract the likely current metadata for an album given a list of its items. Return two dictionaries: - The most common value for each field. @@ -123,29 +120,29 @@ def current_metadata( def assign_items( items: Sequence[Item], tracks: Sequence[TrackInfo], -) -> Tuple[Dict[Item, TrackInfo], List[Item], List[TrackInfo]]: +) -> tuple[dict[Item, TrackInfo], list[Item], list[TrackInfo]]: """Given a list of Items and a list of TrackInfo objects, find the best mapping between them. Returns a mapping from Items to TrackInfo objects, a set of extra Items, and a set of extra TrackInfo objects. These "extra" objects occur when there is an unequal number of objects of the two types. """ - # Construct the cost matrix. - costs: List[List[Distance]] = [] - for item in items: - row = [] - for track in tracks: - row.append(track_distance(item, track)) - costs.append(row) - - # Find a minimum-cost bipartite matching. log.debug("Computing track assignment...") - matching = Munkres().compute(costs) + # Construct the cost matrix. + costs = [[float(track_distance(i, t)) for t in tracks] for i in items] + # Assign items to tracks + _, _, assigned_item_idxs = lap.lapjv(np.array(costs), extend_cost=True) log.debug("...done.") - # Produce the output matching. - mapping = {items[i]: tracks[j] for (i, j) in matching} - extra_items = list(set(items) - set(mapping.keys())) + # Each item in `assigned_item_idxs` list corresponds to a track in the + # `tracks` list. Each value is either an index into the assigned item in + # `items` list, or -1 if that track has no match. + mapping = { + items[iidx]: t + for iidx, t in zip(assigned_item_idxs, tracks) + if iidx != -1 + } + extra_items = list(set(items) - mapping.keys()) extra_items.sort(key=lambda i: (i.disc, i.track, i.title)) extra_tracks = list(set(tracks) - set(mapping.values())) extra_tracks.sort(key=lambda t: (t.index, t.title)) @@ -159,6 +156,18 @@ def track_index_changed(item: Item, track_info: TrackInfo) -> bool: return item.track not in (track_info.medium_index, track_info.index) +@cache +def get_track_length_grace() -> float: + """Get cached grace period for track length matching.""" + return config["match"]["track_length_grace"].as_number() + + +@cache +def get_track_length_max() -> float: + """Get cached maximum track length for track length matching.""" + return config["match"]["track_length_max"].as_number() + + def track_distance( item: Item, track_info: TrackInfo, @@ -167,23 +176,17 @@ def track_distance( """Determines the significance of a track metadata change. Returns a Distance object. `incl_artist` indicates that a distance component should be included for the track artist (i.e., for various-artist releases). + + ``track_length_grace`` and ``track_length_max`` configuration options are + cached because this function is called many times during the matching + process and their access comes with a performance overhead. """ dist = hooks.Distance() # Length. - if track_info.length: - item_length = cast(float, item.length) - track_length_grace = cast( - Union[float, int], - config["match"]["track_length_grace"].as_number(), - ) - track_length_max = cast( - Union[float, int], - config["match"]["track_length_max"].as_number(), - ) - - diff = abs(item_length - track_info.length) - track_length_grace - dist.add_ratio("track_length", diff, track_length_max) + if info_length := track_info.length: + diff = abs(item.length - info_length) - get_track_length_grace() + dist.add_ratio("track_length", diff, get_track_length_max()) # Title. dist.add_string("track_title", item.title, track_info.title) @@ -204,6 +207,10 @@ def track_distance( if item.mb_trackid: dist.add_expr("track_id", item.mb_trackid != track_info.track_id) + # Penalize mismatching disc numbers. + if track_info.medium and item.disc: + dist.add_expr("medium", item.disc != track_info.medium) + # Plugins. dist.update(plugins.track_distance(item, track_info)) @@ -213,7 +220,7 @@ def track_distance( def distance( items: Sequence[Item], album_info: AlbumInfo, - mapping: Dict[Item, TrackInfo], + mapping: dict[Item, TrackInfo], ) -> Distance: """Determines how "significant" an album metadata change would be. Returns a Distance object. `album_info` is an AlbumInfo object @@ -351,7 +358,7 @@ def match_by_id(items: Iterable[Item]): def _recommendation( - results: Sequence[Union[AlbumMatch, TrackMatch]], + results: Sequence[AlbumMatch | TrackMatch], ) -> Recommendation: """Given a sorted list of AlbumMatch or TrackMatch objects, return a recommendation based on the results' distances. @@ -417,7 +424,7 @@ def _sort_candidates(candidates: Iterable[AnyMatch]) -> Sequence[AnyMatch]: def _add_candidate( items: Sequence[Item], - results: Dict[Any, AlbumMatch], + results: dict[Any, AlbumMatch], info: AlbumInfo, ): """Given a candidate AlbumInfo object, attempt to add the candidate @@ -469,10 +476,10 @@ def _add_candidate( def tag_album( items, - search_artist: Optional[str] = None, - search_album: Optional[str] = None, - search_ids: List[str] = [], -) -> Tuple[str, str, Proposal]: + search_artist: str | None = None, + search_album: str | None = None, + search_ids: list[str] = [], +) -> tuple[str, str, Proposal]: """Return a tuple of the current artist name, the current album name, and a `Proposal` containing `AlbumMatch` candidates. @@ -497,7 +504,7 @@ def tag_album( log.debug("Tagging {0} - {1}", cur_artist, cur_album) # The output result, keys are the MB album ID. - candidates: Dict[Any, AlbumMatch] = {} + candidates: dict[Any, AlbumMatch] = {} # Search by explicit ID. if search_ids: @@ -561,9 +568,9 @@ def tag_album( def tag_item( item, - search_artist: Optional[str] = None, - search_title: Optional[str] = None, - search_ids: Optional[List[str]] = None, + search_artist: str | None = None, + search_title: str | None = None, + search_ids: list[str] | None = None, ) -> Proposal: """Find metadata for a single track. Return a `Proposal` consisting of `TrackMatch` objects. @@ -576,7 +583,7 @@ def tag_item( # Holds candidates found so far: keys are MBIDs; values are # (distance, TrackInfo) pairs. candidates = {} - rec: Optional[Recommendation] = None + rec: Recommendation | None = None # First, try matching by MusicBrainz ID. trackids = search_ids or [t for t in [item.mb_trackid] if t] diff --git a/beets/autotag/mb.py b/beets/autotag/mb.py index 9d3fe6d43..90c2013d8 100644 --- a/beets/autotag/mb.py +++ b/beets/autotag/mb.py @@ -12,14 +12,16 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Searches for albums in the MusicBrainz database. -""" +"""Searches for albums in the MusicBrainz database.""" + from __future__ import annotations import re import traceback from collections import Counter -from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, cast +from collections.abc import Iterator, Sequence +from itertools import product +from typing import Any, cast from urllib.parse import urljoin import musicbrainzngs @@ -53,7 +55,7 @@ FIELDS_TO_MB_KEYS = { musicbrainzngs.set_useragent("beets", beets.__version__, "https://beets.io/") -class MusicBrainzAPIError(util.HumanReadableException): +class MusicBrainzAPIError(util.HumanReadableError): """An error while talking to MusicBrainz. The `query` field is the parameter to the action and may have any type. """ @@ -130,7 +132,7 @@ def configure(): ) -def _preferred_alias(aliases: List): +def _preferred_alias(aliases: list): """Given an list of alias structures for an artist credit, select and return the user's preferred alias alias or None if no matching alias is found. @@ -165,7 +167,7 @@ def _preferred_alias(aliases: List): return matches[0] -def _preferred_release_event(release: Dict[str, Any]) -> Tuple[str, str]: +def _preferred_release_event(release: dict[str, Any]) -> tuple[str, str]: """Given a release, select and return the user's preferred release event as a tuple of (country, release_date). Fall back to the default release event if a preferred event is not found. @@ -185,8 +187,8 @@ def _preferred_release_event(release: Dict[str, Any]) -> Tuple[str, str]: def _multi_artist_credit( - credit: List[Dict], include_join_phrase: bool -) -> Tuple[List[str], List[str], List[str]]: + credit: list[dict], include_join_phrase: bool +) -> tuple[list[str], list[str], list[str]]: """Given a list representing an ``artist-credit`` block, accumulate data into a triple of joined artist name lists: canonical, sort, and credit. @@ -233,7 +235,7 @@ def _multi_artist_credit( ) -def _flatten_artist_credit(credit: List[Dict]) -> Tuple[str, str, str]: +def _flatten_artist_credit(credit: list[dict]) -> tuple[str, str, str]: """Given a list representing an ``artist-credit`` block, flatten the data into a triple of joined artist name strings: canonical, sort, and credit. @@ -248,12 +250,12 @@ def _flatten_artist_credit(credit: List[Dict]) -> Tuple[str, str, str]: ) -def _artist_ids(credit: List[Dict]) -> List[str]: +def _artist_ids(credit: list[dict]) -> list[str]: """ Given a list representing an ``artist-credit``, return a list of artist IDs """ - artist_ids: List[str] = [] + artist_ids: list[str] = [] for el in credit: if isinstance(el, dict): artist_ids.append(el["artist"]["id"]) @@ -275,11 +277,11 @@ def _get_related_artist_names(relations, relation_type): def track_info( - recording: Dict, - index: Optional[int] = None, - medium: Optional[int] = None, - medium_index: Optional[int] = None, - medium_total: Optional[int] = None, + recording: dict, + index: int | None = None, + medium: int | None = None, + medium_index: int | None = None, + medium_total: int | None = None, ) -> beets.autotag.hooks.TrackInfo: """Translates a MusicBrainz recording result dictionary into a beets ``TrackInfo`` object. Three parameters are optional and are used @@ -372,7 +374,6 @@ def track_info( for extra_trackdata in extra_trackdatas: info.update(extra_trackdata) - info.decode() return info @@ -400,7 +401,7 @@ def _set_date_str( setattr(info, key, date_num) -def album_info(release: Dict) -> beets.autotag.hooks.AlbumInfo: +def album_info(release: dict) -> beets.autotag.hooks.AlbumInfo: """Takes a MusicBrainz release result dictionary and returns a beets AlbumInfo object containing the interesting data about that release. """ @@ -619,79 +620,50 @@ def album_info(release: Dict) -> beets.autotag.hooks.AlbumInfo: ) # We might find links to external sources (Discogs, Bandcamp, ...) - if any( - config["musicbrainz"]["external_ids"].get().values() - ) and release.get("url-relation-list"): - discogs_url, bandcamp_url, spotify_url = None, None, None - deezer_url, beatport_url, tidal_url = None, None, None - fetch_discogs, fetch_bandcamp, fetch_spotify = False, False, False - fetch_deezer, fetch_beatport, fetch_tidal = False, False, False + external_ids = config["musicbrainz"]["external_ids"].get() + wanted_sources = {site for site, wanted in external_ids.items() if wanted} + if wanted_sources and (url_rels := release.get("url-relation-list")): + urls = {} - if config["musicbrainz"]["external_ids"]["discogs"].get(): - fetch_discogs = True - if config["musicbrainz"]["external_ids"]["bandcamp"].get(): - fetch_bandcamp = True - if config["musicbrainz"]["external_ids"]["spotify"].get(): - fetch_spotify = True - if config["musicbrainz"]["external_ids"]["deezer"].get(): - fetch_deezer = True - if config["musicbrainz"]["external_ids"]["beatport"].get(): - fetch_beatport = True - if config["musicbrainz"]["external_ids"]["tidal"].get(): - fetch_tidal = True + for source, url in product(wanted_sources, url_rels): + if f"{source}.com" in (target := url["target"]): + urls[source] = target + log.debug( + "Found link to {} release via MusicBrainz", + source.capitalize(), + ) - for url in release["url-relation-list"]: - if fetch_discogs and url["type"] == "discogs": - log.debug("Found link to Discogs release via MusicBrainz") - discogs_url = url["target"] - if fetch_bandcamp and "bandcamp.com" in url["target"]: - log.debug("Found link to Bandcamp release via MusicBrainz") - bandcamp_url = url["target"] - if fetch_spotify and "spotify.com" in url["target"]: - log.debug("Found link to Spotify album via MusicBrainz") - spotify_url = url["target"] - if fetch_deezer and "deezer.com" in url["target"]: - log.debug("Found link to Deezer album via MusicBrainz") - deezer_url = url["target"] - if fetch_beatport and "beatport.com" in url["target"]: - log.debug("Found link to Beatport release via MusicBrainz") - beatport_url = url["target"] - if fetch_tidal and "tidal.com" in url["target"]: - log.debug("Found link to Tidal release via MusicBrainz") - tidal_url = url["target"] - - if discogs_url: - info.discogs_albumid = extract_discogs_id_regex(discogs_url) - if bandcamp_url: - info.bandcamp_album_id = bandcamp_url - if spotify_url: + if "discogs" in urls: + info.discogs_albumid = extract_discogs_id_regex(urls["discogs"]) + if "bandcamp" in urls: + info.bandcamp_album_id = urls["bandcamp"] + if "spotify" in urls: info.spotify_album_id = MetadataSourcePlugin._get_id( - "album", spotify_url, spotify_id_regex + "album", urls["spotify"], spotify_id_regex ) - if deezer_url: + if "deezer" in urls: info.deezer_album_id = MetadataSourcePlugin._get_id( - "album", deezer_url, deezer_id_regex + "album", urls["deezer"], deezer_id_regex ) - if beatport_url: + if "beatport" in urls: info.beatport_album_id = MetadataSourcePlugin._get_id( - "album", beatport_url, beatport_id_regex + "album", urls["beatport"], beatport_id_regex ) - if tidal_url: - info.tidal_album_id = tidal_url.split("/")[-1] + if "tidal" in urls: + info.tidal_album_id = urls["tidal"].split("/")[-1] extra_albumdatas = plugins.send("mb_album_extract", data=release) for extra_albumdata in extra_albumdatas: info.update(extra_albumdata) - info.decode() return info def match_album( artist: str, album: str, - tracks: Optional[int] = None, - extra_tags: Optional[Dict[str, Any]] = None, + tracks: int | None = None, + extra_tags: dict[str, Any] | None = None, ) -> Iterator[beets.autotag.hooks.AlbumInfo]: """Searches for a single album ("release" in MusicBrainz parlance) and returns an iterator over AlbumInfo objects. May raise a @@ -768,7 +740,7 @@ def match_track( yield track_info(recording) -def _parse_id(s: str) -> Optional[str]: +def _parse_id(s: str) -> str | None: """Search for a MusicBrainz ID in the given string and return it. If no ID can be found, return None. """ @@ -785,8 +757,8 @@ def _is_translation(r): def _find_actual_release_from_pseudo_release( - pseudo_rel: Dict, -) -> Optional[Dict]: + pseudo_rel: dict, +) -> dict | None: try: relations = pseudo_rel["release"]["release-relation-list"] except KeyError: @@ -805,7 +777,7 @@ def _find_actual_release_from_pseudo_release( def _merge_pseudo_and_actual_album( pseudo: beets.autotag.hooks.AlbumInfo, actual: beets.autotag.hooks.AlbumInfo -) -> Optional[beets.autotag.hooks.AlbumInfo]: +) -> beets.autotag.hooks.AlbumInfo | None: """ Merges a pseudo release with its actual release. @@ -843,7 +815,7 @@ def _merge_pseudo_and_actual_album( return merged -def album_for_id(releaseid: str) -> Optional[beets.autotag.hooks.AlbumInfo]: +def album_for_id(releaseid: str) -> beets.autotag.hooks.AlbumInfo | None: """Fetches an album by its MusicBrainz ID and returns an AlbumInfo object or None if the album is not found. May raise a MusicBrainzAPIError. @@ -881,7 +853,7 @@ def album_for_id(releaseid: str) -> Optional[beets.autotag.hooks.AlbumInfo]: return release -def track_for_id(releaseid: str) -> Optional[beets.autotag.hooks.TrackInfo]: +def track_for_id(releaseid: str) -> beets.autotag.hooks.TrackInfo | None: """Fetches a track by its MusicBrainz ID. Returns a TrackInfo object or None if no track is found. May raise a MusicBrainzAPIError. """ diff --git a/beets/config_default.yaml b/beets/config_default.yaml index b28165c2c..c5cebd441 100644 --- a/beets/config_default.yaml +++ b/beets/config_default.yaml @@ -207,6 +207,7 @@ match: track_index: 1.0 track_length: 2.0 track_id: 5.0 + medium: 1.0 preferred: countries: [] media: [] diff --git a/beets/dbcore/__init__.py b/beets/dbcore/__init__.py index 06d0b3dc9..fa20eb00d 100644 --- a/beets/dbcore/__init__.py +++ b/beets/dbcore/__init__.py @@ -32,4 +32,18 @@ from .queryparse import ( ) from .types import Type -# flake8: noqa +__all__ = [ + "AndQuery", + "Database", + "FieldQuery", + "InvalidQueryError", + "MatchQuery", + "Model", + "OrQuery", + "Query", + "Results", + "Type", + "parse_sorted_query", + "query_from_strings", + "sort_from_strings", +] diff --git a/beets/dbcore/db.py b/beets/dbcore/db.py index 566c11631..64e77f814 100755 --- a/beets/dbcore/db.py +++ b/beets/dbcore/db.py @@ -24,28 +24,9 @@ import threading import time from abc import ABC from collections import defaultdict +from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence from sqlite3 import Connection -from types import TracebackType -from typing import ( - Any, - AnyStr, - Callable, - DefaultDict, - Dict, - Generator, - Generic, - Iterable, - Iterator, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, AnyStr, Callable, Generic, TypeVar, cast from unidecode import unidecode @@ -56,6 +37,8 @@ from . import types from .query import ( AndQuery, FieldQuery, + FieldQueryType, + FieldSort, MatchQuery, NullSort, Query, @@ -63,6 +46,18 @@ from .query import ( TrueQuery, ) +if TYPE_CHECKING: + from types import TracebackType + + from .query import SQLiteType + + D = TypeVar("D", bound="Database", default=Any) +else: + D = TypeVar("D", bound="Database") + + +FlexAttrs = dict[str, str] + class DBAccessError(Exception): """The SQLite database became inaccessible. @@ -120,7 +115,7 @@ class FormattedMapping(Mapping[str, str]): def get( # type: ignore self, key: str, - default: Optional[str] = None, + default: str | None = None, ) -> str: """Similar to Mapping.get(key, default), but always formats to str.""" if default is None: @@ -158,14 +153,14 @@ class FormattedMapping(Mapping[str, str]): class LazyConvertDict: """Lazily convert types for attributes fetched from the database""" - def __init__(self, model_cls: "Model"): + def __init__(self, model_cls: Model): """Initialize the object empty""" # FIXME: Dict[str, SQLiteType] - self._data: Dict[str, Any] = {} + self._data: dict[str, Any] = {} self.model_cls = model_cls - self._converted: Dict[str, Any] = {} + self._converted: dict[str, Any] = {} - def init(self, data: Dict[str, Any]): + def init(self, data: dict[str, Any]): """Set the base data that should be lazily converted""" self._data = data @@ -195,7 +190,7 @@ class LazyConvertDict: if key in self._data: del self._data[key] - def keys(self) -> List[str]: + def keys(self) -> list[str]: """Get a list of available field names for this object.""" return list(self._converted.keys()) + list(self._data.keys()) @@ -213,14 +208,14 @@ class LazyConvertDict: for key, value in values.items(): self[key] = value - def items(self) -> Iterable[Tuple[str, Any]]: + def items(self) -> Iterable[tuple[str, Any]]: """Iterate over (key, value) pairs that this object contains. Computed fields are not included. """ for key in self: yield key, self[key] - def get(self, key: str, default: Optional[Any] = None): + def get(self, key: str, default: Any | None = None): """Get the value for a given key or `default` if it does not exist. """ @@ -252,7 +247,7 @@ class LazyConvertDict: # Abstract base for model classes. -class Model(ABC): +class Model(ABC, Generic[D]): """An abstract object representing an object in the database. Model objects act like dictionaries (i.e., they allow subscript access like ``obj['field']``). The same field set is available via attribute @@ -286,7 +281,7 @@ class Model(ABC): """The flex field SQLite table name. """ - _fields: Dict[str, types.Type] = {} + _fields: dict[str, types.Type] = {} """A mapping indicating available "fixed" fields on this type. The keys are field names and the values are `Type` objects. """ @@ -296,16 +291,16 @@ class Model(ABC): terms. """ - _types: Dict[str, types.Type] = {} + _types: dict[str, types.Type] = {} """Optional Types for non-fixed (i.e., flexible and computed) fields. """ - _sorts: Dict[str, Type[Sort]] = {} + _sorts: dict[str, type[FieldSort]] = {} """Optional named sort criteria. The keys are strings and the values are subclasses of `Sort`. """ - _queries: Dict[str, Type[FieldQuery]] = {} + _queries: dict[str, FieldQueryType] = {} """Named queries that use a field-like `name:value` syntax but which do not relate to any specific field. """ @@ -322,7 +317,7 @@ class Model(ABC): """ @cached_classproperty - def _relation(cls) -> type[Model]: + def _relation(cls): """The model that this model is closely related to.""" return cls @@ -348,7 +343,7 @@ class Model(ABC): return cls._relation._fields.keys() - cls.shared_db_fields @classmethod - def _getters(cls: Type["Model"]): + def _getters(cls: type[Model]): """Return a mapping from field names to getter functions.""" # We could cache this if it becomes a performance problem to # gather the getter mapping every time. @@ -363,7 +358,7 @@ class Model(ABC): # Basic operation. - def __init__(self, db: Optional[Database] = None, **values): + def __init__(self, db: D | None = None, **values): """Create a new object with an optional Database association and initial field values. """ @@ -378,10 +373,10 @@ class Model(ABC): @classmethod def _awaken( - cls: Type[AnyModel], - db: Optional[Database] = None, - fixed_values: Dict[str, Any] = {}, - flex_values: Dict[str, Any] = {}, + cls: type[AnyModel], + db: D | None = None, + fixed_values: dict[str, Any] = {}, + flex_values: dict[str, Any] = {}, ) -> AnyModel: """Create an object with values drawn from the database. @@ -409,7 +404,7 @@ class Model(ABC): if self._db: self._revision = self._db.revision - def _check_db(self, need_id: bool = True) -> Database: + def _check_db(self, need_id: bool = True) -> D: """Ensure that this object is associated with a database row: it has a reference to a database (`_db`) and an id. A ValueError exception is raised otherwise. @@ -421,7 +416,7 @@ class Model(ABC): return self._db - def copy(self) -> "Model": + def copy(self) -> Model: """Create a copy of the model object. The field values and other state is duplicated, but the new copy @@ -537,7 +532,7 @@ class Model(ABC): for key, value in values.items(): self[key] = value - def items(self) -> Iterator[Tuple[str, Any]]: + def items(self) -> Iterator[tuple[str, Any]]: """Iterate over (key, value) pairs that this object contains. Computed fields are not included. """ @@ -579,7 +574,7 @@ class Model(ABC): # Database interaction (CRUD methods). - def store(self, fields: Optional[Iterable[str]] = None): + def store(self, fields: Iterable[str] | None = None): """Save the object's metadata into the library database. :param fields: the fields to be stored. If not specified, all fields will be. @@ -590,7 +585,7 @@ class Model(ABC): # Build assignments for query. assignments = [] - subvars = [] + subvars: list[SQLiteType] = [] for key in fields: if key != "id" and key in self._dirty: self._dirty.remove(key) @@ -653,7 +648,7 @@ class Model(ABC): f"DELETE FROM {self._flex_table} WHERE entity_id=?", (self.id,) ) - def add(self, db: Optional["Database"] = None): + def add(self, db: D | None = None): """Add the object to the library database. This object must be associated with a database; you can provide one via the `db` parameter or use the currently associated database. @@ -692,7 +687,7 @@ class Model(ABC): def evaluate_template( self, - template: Union[str, functemplate.Template], + template: str | functemplate.Template, for_path: bool = False, ) -> str: """Evaluate a template (a string or a `Template` object) using @@ -730,16 +725,16 @@ class Model(ABC): cls, field, pattern, - query_cls: Type[FieldQuery] = MatchQuery, + query_cls: FieldQueryType = MatchQuery, ) -> FieldQuery: """Get a `FieldQuery` for this model.""" return query_cls(field, pattern, field in cls._fields) @classmethod def all_fields_query( - cls: Type["Model"], - pats: Mapping, - query_cls: Type[FieldQuery] = MatchQuery, + cls: type[Model], + pats: Mapping[str, str], + query_cls: FieldQueryType = MatchQuery, ): """Get a query that matches many fields with different patterns. @@ -764,11 +759,11 @@ class Results(Generic[AnyModel]): def __init__( self, - model_class: Type[AnyModel], - rows: List[Mapping], - db: "Database", + model_class: type[AnyModel], + rows: list[sqlite3.Row], + db: D, flex_rows, - query: Optional[Query] = None, + query: Query | None = None, sort=None, ): """Create a result set that will construct objects of type @@ -800,7 +795,7 @@ class Results(Generic[AnyModel]): # The materialized objects corresponding to rows that have been # consumed. - self._objects: List[AnyModel] = [] + self._objects: list[AnyModel] = [] def _get_objects(self) -> Iterator[AnyModel]: """Construct and generate Model objects for they query. The @@ -850,9 +845,9 @@ class Results(Generic[AnyModel]): # Objects are pre-sorted (i.e., by the database). return self._get_objects() - def _get_indexed_flex_attrs(self) -> Mapping: + def _get_indexed_flex_attrs(self) -> dict[int, FlexAttrs]: """Index flexible attributes by the entity id they belong to""" - flex_values: Dict[int, Dict[str, Any]] = {} + flex_values: dict[int, FlexAttrs] = {} for row in self.flex_rows: if row["entity_id"] not in flex_values: flex_values[row["entity_id"]] = {} @@ -861,7 +856,9 @@ class Results(Generic[AnyModel]): return flex_values - def _make_model(self, row, flex_values: Dict = {}) -> AnyModel: + def _make_model( + self, row: sqlite3.Row, flex_values: FlexAttrs = {} + ) -> AnyModel: """Create a Model object for the given row""" cols = dict(row) values = {k: v for (k, v) in cols.items() if not k[:4] == "flex"} @@ -912,7 +909,7 @@ class Results(Generic[AnyModel]): except StopIteration: raise IndexError(f"result index {n} out of range") - def get(self) -> Optional[AnyModel]: + def get(self) -> AnyModel | None: """Return the first matching object, or None if no objects match. """ @@ -933,10 +930,10 @@ class Transaction: current transaction. """ - def __init__(self, db: "Database"): + def __init__(self, db: Database): self.db = db - def __enter__(self) -> "Transaction": + def __enter__(self) -> Transaction: """Begin a transaction. This transaction may be created while another is active in a different thread. """ @@ -951,7 +948,7 @@ class Transaction: def __exit__( self, - exc_type: Type[Exception], + exc_type: type[Exception], exc_value: Exception, traceback: TracebackType, ): @@ -970,14 +967,16 @@ class Transaction: self._mutated = False self.db._db_lock.release() - def query(self, statement: str, subvals: Sequence = ()) -> List: + def query( + self, statement: str, subvals: Sequence[SQLiteType] = () + ) -> list[sqlite3.Row]: """Execute an SQL statement with substitution values and return a list of rows from the database. """ cursor = self.db._connection().execute(statement, subvals) return cursor.fetchall() - def mutate(self, statement: str, subvals: Sequence = ()) -> Any: + def mutate(self, statement: str, subvals: Sequence[SQLiteType] = ()) -> Any: """Execute an SQL statement with substitution values and return the row ID of the last affected row. """ @@ -1010,7 +1009,7 @@ class Database: the backend. """ - _models: Sequence[Type[Model]] = () + _models: Sequence[type[Model]] = () """The Model subclasses representing tables in this database. """ @@ -1031,9 +1030,9 @@ class Database: self.path = path self.timeout = timeout - self._connections: Dict[int, sqlite3.Connection] = {} - self._tx_stacks: DefaultDict[int, List[Transaction]] = defaultdict(list) - self._extensions: List[str] = [] + self._connections: dict[int, sqlite3.Connection] = {} + self._tx_stacks: defaultdict[int, list[Transaction]] = defaultdict(list) + self._extensions: list[str] = [] # A lock to protect the _connections and _tx_stacks maps, which # both map thread IDs to private resources. @@ -1110,7 +1109,7 @@ class Database: value = value.decode() return re.search(pattern, str(value)) is not None - def bytelower(bytestring: Optional[AnyStr]) -> Optional[AnyStr]: + def bytelower(bytestring: AnyStr | None) -> AnyStr | None: """A custom ``bytelower`` sqlite function so we can compare bytestrings in a semi case insensitive fashion. @@ -1138,7 +1137,7 @@ class Database: conn.close() @contextlib.contextmanager - def _tx_stack(self) -> Generator[List, None, None]: + def _tx_stack(self) -> Generator[list[Transaction]]: """A context manager providing access to the current thread's transaction stack. The context manager synchronizes access to the stack map. Transactions should never migrate across threads. @@ -1224,18 +1223,16 @@ class Database: UNIQUE(entity_id, key) ON CONFLICT REPLACE); CREATE INDEX IF NOT EXISTS {0}_by_entity ON {0} (entity_id); - """.format( - flex_table - ) + """.format(flex_table) ) # Querying. def _fetch( self, - model_cls: Type[AnyModel], - query: Optional[Query] = None, - sort: Optional[Sort] = None, + model_cls: type[AnyModel], + query: Query | None = None, + sort: Sort | None = None, ) -> Results[AnyModel]: """Fetch the objects of type `model_cls` matching the given query. The query may be given as a string, string sequence, a @@ -1291,9 +1288,9 @@ class Database: def _get( self, - model_cls: Type[AnyModel], + model_cls: type[AnyModel], id, - ) -> Optional[AnyModel]: + ) -> AnyModel | None: """Get a Model object by its id or None if the id does not exist. """ diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index f8cf7fe4c..866162c4a 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -19,31 +19,22 @@ from __future__ import annotations import re import unicodedata from abc import ABC, abstractmethod +from collections.abc import Iterator, MutableSequence, Sequence from datetime import datetime, timedelta from functools import reduce from operator import mul, or_ -from typing import ( - TYPE_CHECKING, - Any, - Collection, - Generic, - Iterator, - List, - MutableSequence, - Optional, - Pattern, - Sequence, - Set, - Tuple, - Type, - TypeVar, - Union, -) +from re import Pattern +from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union from beets import util if TYPE_CHECKING: from beets.dbcore import Model + from beets.dbcore.db import AnyModel + + P = TypeVar("P", default=Any) +else: + P = TypeVar("P") class ParsingError(ValueError): @@ -83,11 +74,11 @@ class Query(ABC): """An abstract class representing a query into the database.""" @property - def field_names(self) -> Set[str]: + def field_names(self) -> set[str]: """Return a set with field names that this query operates on.""" return set() - def clause(self) -> Tuple[Optional[str], Sequence[Any]]: + def clause(self) -> tuple[str | None, Sequence[Any]]: """Generate an SQLite expression implementing the query. Return (clause, subvals) where clause is a valid sqlite @@ -121,9 +112,9 @@ class Query(ABC): return hash(type(self)) -P = TypeVar("P") -SQLiteType = Union[str, bytes, float, int, memoryview] +SQLiteType = Union[str, bytes, float, int, memoryview, None] AnySQLiteType = TypeVar("AnySQLiteType", bound=SQLiteType) +FieldQueryType = type["FieldQuery"] class FieldQuery(Query, Generic[P]): @@ -141,7 +132,7 @@ class FieldQuery(Query, Generic[P]): ) @property - def field_names(self) -> Set[str]: + def field_names(self) -> set[str]: """Return a set with field names that this query operates on.""" return {self.field_name} @@ -150,10 +141,10 @@ class FieldQuery(Query, Generic[P]): self.pattern = pattern self.fast = fast - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: return self.field, () - def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]: + def clause(self) -> tuple[str | None, Sequence[SQLiteType]]: if self.fast: return self.col_clause() else: @@ -188,7 +179,7 @@ class FieldQuery(Query, Generic[P]): class MatchQuery(FieldQuery[AnySQLiteType]): """A query that looks for exact matches in an Model field.""" - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: return self.field + " = ?", [self.pattern] @classmethod @@ -202,7 +193,7 @@ class NoneQuery(FieldQuery[None]): def __init__(self, field, fast: bool = True): super().__init__(field, None, fast) - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: return self.field + " IS NULL", () def match(self, obj: Model) -> bool: @@ -239,7 +230,7 @@ class StringFieldQuery(FieldQuery[P]): class StringQuery(StringFieldQuery[str]): """A query that matches a whole string in a specific Model field.""" - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: search = ( self.pattern.replace("\\", "\\\\") .replace("%", "\\%") @@ -257,7 +248,7 @@ class StringQuery(StringFieldQuery[str]): class SubstringQuery(StringFieldQuery[str]): """A query that matches a substring in a specific Model field.""" - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: pattern = ( self.pattern.replace("\\", "\\\\") .replace("%", "\\%") @@ -292,7 +283,7 @@ class RegexpQuery(StringFieldQuery[Pattern[str]]): super().__init__(field_name, pattern_re, fast) - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: return f" regexp({self.field}, ?)", [self.pattern.pattern] @staticmethod @@ -303,7 +294,7 @@ class RegexpQuery(StringFieldQuery[Pattern[str]]): return unicodedata.normalize("NFC", s) @classmethod - def string_match(cls, pattern: Pattern, value: str) -> bool: + def string_match(cls, pattern: Pattern[str], value: str) -> bool: return pattern.search(cls._normalize(value)) is not None @@ -333,7 +324,7 @@ class BytesQuery(FieldQuery[bytes]): `MatchQuery` when matching on BLOB values. """ - def __init__(self, field_name: str, pattern: Union[bytes, str, memoryview]): + def __init__(self, field_name: str, pattern: bytes | str | memoryview): # Use a buffer/memoryview representation of the pattern for SQLite # matching. This instructs SQLite to treat the blob as binary # rather than encoded Unicode. @@ -351,7 +342,7 @@ class BytesQuery(FieldQuery[bytes]): super().__init__(field_name, bytes_pattern) - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: return self.field + " = ?", [self.buf_pattern] @classmethod @@ -368,7 +359,7 @@ class NumericQuery(FieldQuery[str]): a float. """ - def _convert(self, s: str) -> Union[float, int, None]: + def _convert(self, s: str) -> float | int | None: """Convert a string to a numeric type (float or int). Return None if `s` is empty. @@ -416,7 +407,7 @@ class NumericQuery(FieldQuery[str]): return False return True - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: if self.point is not None: return self.field + "=?", (self.point,) else: @@ -444,7 +435,7 @@ class InQuery(Generic[AnySQLiteType], FieldQuery[Sequence[AnySQLiteType]]): def subvals(self) -> Sequence[SQLiteType]: return self.pattern - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: placeholders = ", ".join(["?"] * len(self.subvals)) return f"{self.field_name} IN ({placeholders})", self.subvals @@ -461,11 +452,11 @@ class CollectionQuery(Query): """ @property - def field_names(self) -> Set[str]: + def field_names(self) -> set[str]: """Return a set with field names that this query operates on.""" return reduce(or_, (sq.field_names for sq in self.subqueries)) - def __init__(self, subqueries: Sequence = ()): + def __init__(self, subqueries: Sequence[Query] = ()): self.subqueries = subqueries # Act like a sequence. @@ -476,7 +467,7 @@ class CollectionQuery(Query): def __getitem__(self, key): return self.subqueries[key] - def __iter__(self) -> Iterator: + def __iter__(self) -> Iterator[Query]: return iter(self.subqueries) def __contains__(self, subq) -> bool: @@ -485,12 +476,12 @@ class CollectionQuery(Query): def clause_with_joiner( self, joiner: str, - ) -> Tuple[Optional[str], Sequence[SQLiteType]]: + ) -> tuple[str | None, Sequence[SQLiteType]]: """Return a clause created by joining together the clauses of all subqueries with the string joiner (padded by spaces). """ clause_parts = [] - subvals = [] + subvals: list[SQLiteType] = [] for subq in self.subqueries: subq_clause, subq_subvals = subq.clause() if not subq_clause: @@ -521,11 +512,11 @@ class AnyFieldQuery(CollectionQuery): """ @property - def field_names(self) -> Set[str]: + def field_names(self) -> set[str]: """Return a set with field names that this query operates on.""" return set(self.fields) - def __init__(self, pattern, fields, cls: Type[FieldQuery]): + def __init__(self, pattern, fields, cls: FieldQueryType): self.pattern = pattern self.fields = fields self.query_class = cls @@ -536,7 +527,7 @@ class AnyFieldQuery(CollectionQuery): # TYPING ERROR super().__init__(subqueries) - def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]: + def clause(self) -> tuple[str | None, Sequence[SQLiteType]]: return self.clause_with_joiner("or") def match(self, obj: Model) -> bool: @@ -563,7 +554,7 @@ class MutableCollectionQuery(CollectionQuery): query is initialized. """ - subqueries: MutableSequence + subqueries: MutableSequence[Query] def __setitem__(self, key, value): self.subqueries[key] = value @@ -575,7 +566,7 @@ class MutableCollectionQuery(CollectionQuery): class AndQuery(MutableCollectionQuery): """A conjunction of a list of other queries.""" - def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]: + def clause(self) -> tuple[str | None, Sequence[SQLiteType]]: return self.clause_with_joiner("and") def match(self, obj: Model) -> bool: @@ -585,7 +576,7 @@ class AndQuery(MutableCollectionQuery): class OrQuery(MutableCollectionQuery): """A conjunction of a list of other queries.""" - def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]: + def clause(self) -> tuple[str | None, Sequence[SQLiteType]]: return self.clause_with_joiner("or") def match(self, obj: Model) -> bool: @@ -598,14 +589,14 @@ class NotQuery(Query): """ @property - def field_names(self) -> Set[str]: + def field_names(self) -> set[str]: """Return a set with field names that this query operates on.""" return self.subquery.field_names def __init__(self, subquery): self.subquery = subquery - def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]: + def clause(self) -> tuple[str | None, Sequence[SQLiteType]]: clause, subvals = self.subquery.clause() if clause: return f"not ({clause})", subvals @@ -630,7 +621,7 @@ class NotQuery(Query): class TrueQuery(Query): """A query that always matches.""" - def clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def clause(self) -> tuple[str, Sequence[SQLiteType]]: return "1", () def match(self, obj: Model) -> bool: @@ -640,7 +631,7 @@ class TrueQuery(Query): class FalseQuery(Query): """A query that never matches.""" - def clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def clause(self) -> tuple[str, Sequence[SQLiteType]]: return "0", () def match(self, obj: Model) -> bool: @@ -650,7 +641,7 @@ class FalseQuery(Query): # Time/date queries. -def _parse_periods(pattern: str) -> Tuple[Optional[Period], Optional[Period]]: +def _parse_periods(pattern: str) -> tuple[Period | None, Period | None]: """Parse a string containing two dates separated by two dots (..). Return a pair of `Period` objects. """ @@ -696,7 +687,7 @@ class Period: self.precision = precision @classmethod - def parse(cls: Type["Period"], string: str) -> Optional["Period"]: + def parse(cls: type[Period], string: str) -> Period | None: """Parse a date and return a `Period` object or `None` if the string is empty, or raise an InvalidQueryArgumentValueError if the string cannot be parsed to a date. @@ -715,7 +706,7 @@ class Period: def find_date_and_format( string: str, - ) -> Union[Tuple[None, None], Tuple[datetime, int]]: + ) -> tuple[None, None] | tuple[datetime, int]: for ord, format in enumerate(cls.date_formats): for format_option in format: try: @@ -729,7 +720,7 @@ class Period: if not string: return None - date: Optional[datetime] + date: datetime | None # Check for a relative date. match_dq = re.match(cls.relative_re, string) @@ -789,7 +780,7 @@ class DateInterval: A right endpoint of None means towards infinity. """ - def __init__(self, start: Optional[datetime], end: Optional[datetime]): + def __init__(self, start: datetime | None, end: datetime | None): if start is not None and end is not None and not start < end: raise ValueError( "start date {} is not before end date {}".format(start, end) @@ -800,8 +791,8 @@ class DateInterval: @classmethod def from_periods( cls, - start: Optional[Period], - end: Optional[Period], + start: Period | None, + end: Period | None, ) -> DateInterval: """Create an interval with two Periods as the endpoints.""" end_date = end.open_right_endpoint() if end is not None else None @@ -843,7 +834,7 @@ class DateQuery(FieldQuery[str]): _clause_tmpl = "{0} {1} ?" - def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]: + def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: clause_parts = [] subvals = [] @@ -875,7 +866,7 @@ class DurationQuery(NumericQuery): or M:SS time interval. """ - def _convert(self, s: str) -> Optional[float]: + def _convert(self, s: str) -> float | None: """Convert a M:SS or numeric string to a float. Return None if `s` is empty. @@ -902,13 +893,13 @@ class Sort: the database. """ - def order_clause(self) -> Optional[str]: + def order_clause(self) -> str | None: """Generates a SQL fragment to be used in a ORDER BY clause, or None if no fragment is used (i.e., this is a slow sort). """ return None - def sort(self, items: List) -> List: + def sort(self, items: list[AnyModel]) -> list[AnyModel]: """Sort the list of objects and return a list.""" return sorted(items) @@ -931,7 +922,7 @@ class Sort: class MultipleSort(Sort): """Sort that encapsulates multiple sub-sorts.""" - def __init__(self, sorts: Optional[List[Sort]] = None): + def __init__(self, sorts: list[Sort] | None = None): self.sorts = sorts or [] def add_sort(self, sort: Sort): @@ -994,7 +985,7 @@ class FieldSort(Sort): def __init__( self, - field, + field: str, ascending: bool = True, case_insensitive: bool = True, ): @@ -1002,13 +993,20 @@ class FieldSort(Sort): self.ascending = ascending self.case_insensitive = case_insensitive - def sort(self, objs: Collection): + def sort(self, objs: list[AnyModel]) -> list[AnyModel]: # TODO: Conversion and null-detection here. In Python 3, # comparisons with None fail. We should also support flexible # attributes with different types without falling over. def key(obj: Model) -> Any: - field_val = obj.get(self.field, "") + field_val = obj.get(self.field, None) + if field_val is None: + if _type := obj._types.get(self.field): + # If the field is typed, use its null value. + field_val = obj._types[self.field].null + else: + # If not, fall back to using an empty string. + field_val = "" if self.case_insensitive and isinstance(field_val, str): field_val = field_val.lower() return field_val @@ -1040,8 +1038,8 @@ class FixedFieldSort(FieldSort): if self.case_insensitive: field = ( "(CASE " - 'WHEN TYPEOF({0})="text" THEN LOWER({0}) ' - 'WHEN TYPEOF({0})="blob" THEN LOWER({0}) ' + "WHEN TYPEOF({0})='text' THEN LOWER({0}) " + "WHEN TYPEOF({0})='blob' THEN LOWER({0}) " "ELSE {0} END)".format(self.field) ) else: @@ -1061,7 +1059,7 @@ class SlowFieldSort(FieldSort): class NullSort(Sort): """No sorting. Leave results unsorted.""" - def sort(self, items: List) -> List: + def sort(self, items: list[AnyModel]) -> list[AnyModel]: return items def __nonzero__(self) -> bool: @@ -1075,3 +1073,23 @@ class NullSort(Sort): def __hash__(self) -> int: return 0 + + +class SmartArtistSort(FieldSort): + """Sort by artist (either album artist or track artist), + prioritizing the sort field over the raw field. + """ + + def order_clause(self): + order = "ASC" if self.ascending else "DESC" + collate = "COLLATE NOCASE" if self.case_insensitive else "" + field = self.field + + return f"COALESCE(NULLIF({field}_sort, ''), {field}) {collate} {order}" + + def sort(self, objs: list[AnyModel]) -> list[AnyModel]: + def key(o): + val = o[f"{self.field}_sort"] or o[self.field] + return val.lower() if self.case_insensitive else val + + return sorted(objs, key=key, reverse=not self.ascending) diff --git a/beets/dbcore/queryparse.py b/beets/dbcore/queryparse.py index b7558038f..289632668 100644 --- a/beets/dbcore/queryparse.py +++ b/beets/dbcore/queryparse.py @@ -14,12 +14,20 @@ """Parsing of strings into DBCore queries.""" +from __future__ import annotations + import itertools import re -from typing import Collection, Dict, List, Optional, Sequence, Tuple, Type +from typing import TYPE_CHECKING from . import Model, query -from .query import Sort + +if TYPE_CHECKING: + from collections.abc import Collection, Sequence + + from .query import FieldQueryType, Sort + + Prefixes = dict[str, FieldQueryType] PARSE_QUERY_PART_REGEX = re.compile( # Non-capturing optional segment for the keyword. @@ -35,10 +43,10 @@ PARSE_QUERY_PART_REGEX = re.compile( def parse_query_part( part: str, - query_classes: Dict[str, Type[query.FieldQuery]] = {}, - prefixes: Dict = {}, - default_class: Type[query.SubstringQuery] = query.SubstringQuery, -) -> Tuple[Optional[str], str, Type[query.FieldQuery], bool]: + query_classes: dict[str, FieldQueryType] = {}, + prefixes: Prefixes = {}, + default_class: type[query.SubstringQuery] = query.SubstringQuery, +) -> tuple[str | None, str, FieldQueryType, bool]: """Parse a single *query part*, which is a chunk of a complete query string representing a single criterion. @@ -104,8 +112,8 @@ def parse_query_part( def construct_query_part( - model_cls: Type[Model], - prefixes: Dict, + model_cls: type[Model], + prefixes: Prefixes, query_part: str, ) -> query.Query: """Parse a *query part* string and return a :class:`Query` object. @@ -127,7 +135,7 @@ def construct_query_part( # Use `model_cls` to build up a map from field (or query) names to # `Query` classes. - query_classes: Dict[str, Type[query.FieldQuery]] = {} + query_classes: dict[str, FieldQueryType] = {} for k, t in itertools.chain( model_cls._fields.items(), model_cls._types.items() ): @@ -152,14 +160,15 @@ def construct_query_part( # Field queries get constructed according to the name of the field # they are querying. else: - key = key.lower() - if key in model_cls.shared_db_fields: + field = table = key.lower() + if field in model_cls.shared_db_fields: # This field exists in both tables, so SQLite will encounter # an OperationalError if we try to query it in a join. # Using an explicit table name resolves this. - key = f"{model_cls._table}.{key}" + table = f"{model_cls._table}.{field}" - out_query = query_class(key, pattern, key in model_cls.all_db_fields) + field_in_db = field in model_cls.all_db_fields + out_query = query_class(table, pattern, field_in_db) # Apply negation. if negate: @@ -170,9 +179,9 @@ def construct_query_part( # TYPING ERROR def query_from_strings( - query_cls: Type[query.CollectionQuery], - model_cls: Type[Model], - prefixes: Dict, + query_cls: type[query.CollectionQuery], + model_cls: type[Model], + prefixes: Prefixes, query_parts: Collection[str], ) -> query.Query: """Creates a collection query of type `query_cls` from a list of @@ -188,7 +197,7 @@ def query_from_strings( def construct_sort_part( - model_cls: Type[Model], + model_cls: type[Model], part: str, case_insensitive: bool = True, ) -> Sort: @@ -206,20 +215,20 @@ def construct_sort_part( assert direction in ("+", "-"), "part must end with + or -" is_ascending = direction == "+" - if field in model_cls._sorts: - sort = model_cls._sorts[field]( - model_cls, is_ascending, case_insensitive - ) + if sort_cls := model_cls._sorts.get(field): + if isinstance(sort_cls, query.SmartArtistSort): + field = "albumartist" if model_cls.__name__ == "Album" else "artist" elif field in model_cls._fields: - sort = query.FixedFieldSort(field, is_ascending, case_insensitive) + sort_cls = query.FixedFieldSort else: # Flexible or computed. - sort = query.SlowFieldSort(field, is_ascending, case_insensitive) - return sort + sort_cls = query.SlowFieldSort + + return sort_cls(field, is_ascending, case_insensitive) def sort_from_strings( - model_cls: Type[Model], + model_cls: type[Model], sort_parts: Sequence[str], case_insensitive: bool = True, ) -> Sort: @@ -238,11 +247,11 @@ def sort_from_strings( def parse_sorted_query( - model_cls: Type[Model], - parts: List[str], - prefixes: Dict = {}, + model_cls: type[Model], + parts: list[str], + prefixes: Prefixes = {}, case_insensitive: bool = True, -) -> Tuple[query.Query, Sort]: +) -> tuple[query.Query, Sort]: """Given a list of strings, create the `Query` and `Sort` that they represent. """ diff --git a/beets/dbcore/types.py b/beets/dbcore/types.py index 432db2b72..2a64b2ed9 100644 --- a/beets/dbcore/types.py +++ b/beets/dbcore/types.py @@ -12,15 +12,23 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Representation of type information for DBCore model fields. -""" +"""Representation of type information for DBCore model fields.""" + +from __future__ import annotations + import typing from abc import ABC -from typing import Any, Generic, List, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast from beets.util import str2bool -from .query import BooleanQuery, FieldQuery, NumericQuery, SubstringQuery +from .query import ( + BooleanQuery, + FieldQueryType, + NumericQuery, + SQLiteType, + SubstringQuery, +) class ModelType(typing.Protocol): @@ -35,8 +43,12 @@ class ModelType(typing.Protocol): # Generic type variables, used for the value type T and null type N (if # nullable, else T and N are set to the same type for the concrete subclasses # of Type). -N = TypeVar("N") -T = TypeVar("T", bound=ModelType) +if TYPE_CHECKING: + N = TypeVar("N", default=Any) + T = TypeVar("T", bound=ModelType, default=Any) +else: + N = TypeVar("N") + T = TypeVar("T", bound=ModelType) class Type(ABC, Generic[T, N]): @@ -49,11 +61,11 @@ class Type(ABC, Generic[T, N]): """The SQLite column type for the value. """ - query: typing.Type[FieldQuery] = SubstringQuery + query: FieldQueryType = SubstringQuery """The `Query` subclass to be used when querying the field. """ - model_type: typing.Type[T] + model_type: type[T] """The Python type that is used to represent the value in the model. The model is guaranteed to return a value of this type if the field @@ -69,7 +81,7 @@ class Type(ABC, Generic[T, N]): # have a field null_type similar to `model_type` and use that here. return cast(N, self.model_type()) - def format(self, value: Union[N, T]) -> str: + def format(self, value: N | T) -> str: """Given a value of this type, produce a Unicode string representing the value. This is used in template evaluation. """ @@ -83,7 +95,7 @@ class Type(ABC, Generic[T, N]): else: return str(value) - def parse(self, string: str) -> Union[T, N]: + def parse(self, string: str) -> T | N: """Parse a (possibly human-written) string and return the indicated value of this type. """ @@ -92,7 +104,7 @@ class Type(ABC, Generic[T, N]): except ValueError: return self.null - def normalize(self, value: Any) -> Union[T, N]: + def normalize(self, value: Any) -> T | N: """Given a value that will be assigned into a field of this type, normalize the value to have the appropriate type. This base implementation only reinterprets `None`. @@ -105,10 +117,7 @@ class Type(ABC, Generic[T, N]): # `self.model_type(value)` return cast(T, value) - def from_sql( - self, - sql_value: Union[None, int, float, str, bytes], - ) -> Union[T, N]: + def from_sql(self, sql_value: SQLiteType) -> T | N: """Receives the value stored in the SQL backend and return the value to be stored in the model. @@ -129,7 +138,7 @@ class Type(ABC, Generic[T, N]): else: return self.normalize(sql_value) - def to_sql(self, model_value: Any) -> Union[None, int, float, str, bytes]: + def to_sql(self, model_value: Any) -> SQLiteType: """Convert a value as stored in the model object to a value used by the database adapter. """ @@ -154,7 +163,7 @@ class BaseInteger(Type[int, N]): query = NumericQuery model_type = int - def normalize(self, value: Any) -> Union[int, N]: + def normalize(self, value: Any) -> int | N: try: return self.model_type(round(float(value))) except ValueError: @@ -183,7 +192,7 @@ class BasePaddedInt(BaseInteger[N]): def __init__(self, digits: int): self.digits = digits - def format(self, value: Union[int, N]) -> str: + def format(self, value: int | N) -> str: return "{0:0{1}d}".format(value or 0, self.digits) @@ -232,13 +241,13 @@ class BaseFloat(Type[float, N]): """ sql = "REAL" - query: typing.Type[FieldQuery[Any]] = NumericQuery + query: FieldQueryType = NumericQuery model_type = float def __init__(self, digits: int = 1): self.digits = digits - def format(self, value: Union[float, N]) -> str: + def format(self, value: float | N) -> str: return "{0:.{1}f}".format(value or 0, self.digits) @@ -264,7 +273,7 @@ class BaseString(Type[T, N]): sql = "TEXT" query = SubstringQuery - def normalize(self, value: Any) -> Union[T, N]: + def normalize(self, value: Any) -> T | N: if value is None: return self.null else: @@ -277,7 +286,7 @@ class String(BaseString[str, Any]): model_type = str -class DelimitedString(BaseString[List[str], List[str]]): +class DelimitedString(BaseString[list[str], list[str]]): """A list of Unicode strings, represented in-database by a single string containing delimiter-separated values. """ @@ -287,7 +296,7 @@ class DelimitedString(BaseString[List[str], List[str]]): def __init__(self, delimiter: str): self.delimiter = delimiter - def format(self, value: List[str]): + def format(self, value: list[str]): return self.delimiter.join(value) def parse(self, string: str): @@ -295,7 +304,7 @@ class DelimitedString(BaseString[List[str], List[str]]): return [] return string.split(self.delimiter) - def to_sql(self, model_value: List[str]): + def to_sql(self, model_value: list[str]): return self.delimiter.join(model_value) diff --git a/beets/importer.py b/beets/importer.py index f6517b515..ab2382c9f 100644 --- a/beets/importer.py +++ b/beets/importer.py @@ -60,8 +60,7 @@ HISTORY_KEY = "taghistory" # def extend_reimport_fresh_fields_item(): # importer.REIMPORT_FRESH_FIELDS_ITEM.extend(['tidal_track_popularity'] # ) -REIMPORT_FRESH_FIELDS_ALBUM = ["data_source"] -REIMPORT_FRESH_FIELDS_ITEM = [ +REIMPORT_FRESH_FIELDS_ALBUM = [ "data_source", "bandcamp_album_id", "spotify_album_id", @@ -69,12 +68,13 @@ REIMPORT_FRESH_FIELDS_ITEM = [ "beatport_album_id", "tidal_album_id", ] +REIMPORT_FRESH_FIELDS_ITEM = list(REIMPORT_FRESH_FIELDS_ALBUM) # Global logger. log = logging.getLogger("beets") -class ImportAbort(Exception): +class ImportAbortError(Exception): """Raised when the user aborts the tagging operation.""" pass @@ -360,7 +360,7 @@ class ImportSession: pl.run_parallel(QUEUE_SIZE) else: pl.run_sequential() - except ImportAbort: + except ImportAbortError: # User aborted operation. Silently stop. pass @@ -605,7 +605,7 @@ class ImportTask(BaseImportTask): """ items = self.imported_items() for field, view in config["import"]["set_fields"].items(): - value = view.get() + value = str(view.get()) log.debug( "Set field {1}={2} for {0}", displayable_path(self.paths), @@ -627,8 +627,7 @@ class ImportTask(BaseImportTask): self.save_progress() if session.config["incremental"] and not ( # Should we skip recording to incremental list? - self.skip - and session.config["incremental_skip_later"] + self.skip and session.config["incremental_skip_later"] ): self.save_history() @@ -815,9 +814,16 @@ class ImportTask(BaseImportTask): with lib.transaction(): self.record_replaced(lib) self.remove_replaced(lib) + self.album = lib.add_album(self.imported_items()) - if "data_source" in self.imported_items()[0]: - self.album.data_source = self.imported_items()[0].data_source + if self.choice_flag == action.APPLY: + # Copy album flexible fields to the DB + # TODO: change the flow so we create the `Album` object earlier, + # and we can move this into `self.apply_metadata`, just like + # is done for tracks. + autotag.apply_album_metadata(self.match.info, self.album) + self.album.store() + self.reimport_metadata(lib) def record_replaced(self, lib): @@ -940,7 +946,7 @@ class ImportTask(BaseImportTask): dup_item.remove() log.debug( "{0} of {1} items replaced", - sum(bool(l) for l in self.replaced_items.values()), + sum(bool(v) for v in self.replaced_items.values()), len(self.imported_items()), ) @@ -1056,7 +1062,7 @@ class SingletonImportTask(ImportTask): values, for the singleton item. """ for field, view in config["import"]["set_fields"].items(): - value = view.get() + value = str(view.get()) log.debug( "Set field {1}={2} for {0}", displayable_path(self.paths), diff --git a/beets/library.py b/beets/library.py index 6d0ee613b..2430f7125 100644 --- a/beets/library.py +++ b/beets/library.py @@ -12,8 +12,8 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""The core data store and collection logic for beets. -""" +"""The core data store and collection logic for beets.""" + from __future__ import annotations import os @@ -24,7 +24,9 @@ import sys import time import unicodedata from functools import cached_property +from pathlib import Path +import platformdirs from mediafile import MediaFile, UnreadableFileError import beets @@ -293,50 +295,6 @@ class DurationType(types.Float): return self.null -# Library-specific sort types. - - -class SmartArtistSort(dbcore.query.Sort): - """Sort by artist (either album artist or track artist), - prioritizing the sort field over the raw field. - """ - - def __init__(self, model_cls, ascending=True, case_insensitive=True): - self.album = model_cls is Album - self.ascending = ascending - self.case_insensitive = case_insensitive - - def order_clause(self): - order = "ASC" if self.ascending else "DESC" - field = "albumartist" if self.album else "artist" - collate = "COLLATE NOCASE" if self.case_insensitive else "" - return ( - "(CASE {0}_sort WHEN NULL THEN {0} " - 'WHEN "" THEN {0} ' - "ELSE {0}_sort END) {1} {2}" - ).format(field, collate, order) - - def sort(self, objs): - if self.album: - - def field(a): - return a.albumartist_sort or a.albumartist - - else: - - def field(i): - return i.artist_sort or i.artist - - if self.case_insensitive: - - def key(x): - return field(x).lower() - - else: - key = field - return sorted(objs, key=key, reverse=not self.ascending) - - # Special path format key. PF_KEY_DEFAULT = "default" @@ -382,7 +340,7 @@ class WriteError(FileOperationError): # Item and Album model classes. -class LibModel(dbcore.Model): +class LibModel(dbcore.Model["Library"]): """Shared concrete functionality for Items and Albums.""" # Config key that specifies how an instance should be formatted. @@ -633,7 +591,7 @@ class Item(LibModel): _formatter = FormattedItemMapping - _sorts = {"artist": SmartArtistSort} + _sorts = {"artist": dbcore.query.SmartArtistSort} _queries = {"singleton": SingletonQuery} @@ -658,6 +616,11 @@ class Item(LibModel): f"ON {cls._table}.album_id = {cls._relation._table}.id" ) + @property + def filepath(self) -> Path | None: + """The path to the item's file as pathlib.Path.""" + return Path(os.fsdecode(self.path)) if self.path else self.path + @property def _cached_album(self): """The Album object that this item belongs to, if any, or @@ -1074,10 +1037,10 @@ class Item(LibModel): instead of encoded as a bytestring. basedir can override the library's base directory for the destination. """ - self._check_db() + db = self._check_db() platform = platform or sys.platform - basedir = basedir or self._db.directory - path_formats = path_formats or self._db.path_formats + basedir = basedir or db.directory + path_formats = path_formats or db.path_formats if replacements is None: replacements = self._db.replacements @@ -1120,7 +1083,7 @@ class Item(LibModel): maxlen = beets.config["max_filename_length"].get(int) if not maxlen: # When zero, try to determine from filesystem. - maxlen = util.max_filename_length(self._db.directory) + maxlen = util.max_filename_length(db.directory) subpath, fellback = util.legalize_path( subpath, @@ -1208,8 +1171,8 @@ class Album(LibModel): } _sorts = { - "albumartist": SmartArtistSort, - "artist": SmartArtistSort, + "albumartist": dbcore.query.SmartArtistSort, + "artist": dbcore.query.SmartArtistSort, } # List of keys that are set on an album's items. @@ -1595,18 +1558,20 @@ class Library(dbcore.Database): def __init__( self, path="library.blb", - directory="~/Music", + directory: str | None = None, path_formats=((PF_KEY_DEFAULT, "$artist/$album/$track $title"),), replacements=None, ): timeout = beets.config["timeout"].as_number() super().__init__(path, timeout=timeout) - self.directory = bytestring_path(normpath(directory)) + self.directory = normpath(directory or platformdirs.user_music_path()) + self.path_formats = path_formats self.replacements = replacements - self._memotable = {} # Used for template substitution performance. + # Used for template substitution performance. + self._memotable: dict[tuple[str, ...], str] = {} # Adding objects to the database. @@ -1741,6 +1706,11 @@ class DefaultTemplateFunctions: _prefix = "tmpl_" + @cached_classproperty + def _func_names(cls) -> list[str]: + """Names of tmpl_* functions in this class.""" + return [s for s in dir(cls) if s.startswith(cls._prefix)] + def __init__(self, item=None, lib=None): """Parametrize the functions. @@ -1772,6 +1742,11 @@ class DefaultTemplateFunctions: """Convert a string to upper case.""" return s.upper() + @staticmethod + def tmpl_capitalize(s): + """Converts to a capitalized string.""" + return s.capitalize() + @staticmethod def tmpl_title(s): """Convert a string to title case.""" @@ -2038,11 +2013,3 @@ class DefaultTemplateFunctions: return trueval if trueval else self.item.formatted().get(field) else: return falseval - - -# Get the name of tmpl_* functions in the above class. -DefaultTemplateFunctions._func_names = [ - s - for s in dir(DefaultTemplateFunctions) - if s.startswith(DefaultTemplateFunctions._prefix) -] diff --git a/beets/logging.py b/beets/logging.py index faa93d59d..fd8b1962f 100644 --- a/beets/logging.py +++ b/beets/logging.py @@ -20,10 +20,34 @@ use {}-style formatting and can interpolate keywords arguments to the logging calls (`debug`, `info`, etc). """ - -import logging import threading from copy import copy +from logging import ( + DEBUG, + INFO, + NOTSET, + WARNING, + FileHandler, + Filter, + Handler, + Logger, + NullHandler, + StreamHandler, +) + +__all__ = [ + "DEBUG", + "INFO", + "NOTSET", + "WARNING", + "FileHandler", + "Filter", + "Handler", + "Logger", + "NullHandler", + "StreamHandler", + "getLogger", +] def logsafe(val): @@ -46,7 +70,7 @@ def logsafe(val): return val -class StrFormatLogger(logging.Logger): +class StrFormatLogger(Logger): """A version of `Logger` that uses `str.format`-style formatting instead of %-style formatting and supports keyword arguments. @@ -96,12 +120,12 @@ class StrFormatLogger(logging.Logger): ) -class ThreadLocalLevelLogger(logging.Logger): +class ThreadLocalLevelLogger(Logger): """A version of `Logger` whose level is thread-local instead of shared.""" - def __init__(self, name, level=logging.NOTSET): + def __init__(self, name, level=NOTSET): self._thread_level = threading.local() - self.default_level = logging.NOTSET + self.default_level = NOTSET super().__init__(name, level) @property @@ -128,17 +152,13 @@ class BeetsLogger(ThreadLocalLevelLogger, StrFormatLogger): pass -my_manager = copy(logging.Logger.manager) +my_manager = copy(Logger.manager) my_manager.loggerClass = BeetsLogger -# Act like the stdlib logging module by re-exporting its namespace. -from logging import * # noqa - - # Override the `getLogger` to use our machinery. def getLogger(name=None): # noqa if name: return my_manager.getLogger(name) else: - return logging.Logger.root + return Logger.root diff --git a/beets/plugins.py b/beets/plugins.py index 35995c341..299c41815 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -14,7 +14,6 @@ """Support for beets plugins.""" - import abc import inspect import re @@ -36,7 +35,7 @@ LASTFM_KEY = "2dc3914abf35f0d9c92d97d8f8e42b43" log = logging.getLogger("beets") -class PluginConflictException(Exception): +class PluginConflictError(Exception): """Indicates that the services provided by one plugin conflict with those of another. @@ -343,7 +342,7 @@ def types(model_cls): plugin_types = getattr(plugin, attr_name, {}) for field in plugin_types: if field in types and plugin_types[field] != types[field]: - raise PluginConflictException( + raise PluginConflictError( "Plugin {} defines flexible field {} " "which has already been defined with " "another type.".format(plugin.name, field) @@ -447,13 +446,13 @@ def import_stages(): def _check_conflicts_and_merge(plugin, plugin_funcs, funcs): """Check the provided template functions for conflicts and merge into funcs. - Raises a `PluginConflictException` if a plugin defines template functions + Raises a `PluginConflictError` if a plugin defines template functions for fields that another plugin has already defined template functions for. """ if plugin_funcs: if not plugin_funcs.keys().isdisjoint(funcs.keys()): conflicted_fields = ", ".join(plugin_funcs.keys() & funcs.keys()) - raise PluginConflictException( + raise PluginConflictError( f"Plugin {plugin.name} defines template functions for " f"{conflicted_fields} that conflict with another plugin." ) @@ -519,7 +518,7 @@ def feat_tokens(for_artist=True): feat_words = ["ft", "featuring", "feat", "feat.", "ft."] if for_artist: feat_words += ["with", "vs", "and", "con", "&"] - return r"(?<=\s)(?:{})(?=\s)".format( + return r"(?<=[\s(\[])(?:{})(?=\s)".format( "|".join(re.escape(x) for x in feat_words) ) diff --git a/beets/test/_common.py b/beets/test/_common.py index 746fa1a56..757d461bd 100644 --- a/beets/test/_common.py +++ b/beets/test/_common.py @@ -15,22 +15,18 @@ """Some common functionality for beets' test cases.""" import os -import shutil import sys -import tempfile -import time import unittest from contextlib import contextmanager -import beets # noqa: E402 -import beets.library # noqa: E402 +import beets +import beets.library # Make sure the development versions of the plugins are used -import beetsplug # noqa: E402 -from beets import util # noqa: E402 -from beets import importer, logging # noqa: E402 -from beets.ui import commands # noqa: E402 -from beets.util import bytestring_path, syspath # noqa: E402 +import beetsplug +from beets import importer, logging, util +from beets.ui import commands +from beets.util import syspath beetsplug.__path__ = [ os.path.abspath( @@ -62,24 +58,12 @@ log = logging.getLogger("beets") log.propagate = True log.setLevel(logging.DEBUG) -# Dummy item creation. -_item_ident = 0 - # OS feature test. HAVE_SYMLINK = sys.platform != "win32" HAVE_HARDLINK = sys.platform != "win32" -try: - import reflink - - HAVE_REFLINK = reflink.supported_at(tempfile.gettempdir()) -except ImportError: - HAVE_REFLINK = False - def item(lib=None): - global _item_ident - _item_ident += 1 i = beets.library.Item( title="the title", artist="the artist", @@ -104,7 +88,6 @@ def item(lib=None): comments="the comments", bpm=8, comp=True, - path=f"somepath{_item_ident}", length=60.0, bitrate=128000, format="FLAC", @@ -121,33 +104,6 @@ def item(lib=None): return i -_album_ident = 0 - - -def album(lib=None): - global _item_ident - _item_ident += 1 - i = beets.library.Album( - artpath=None, - albumartist="some album artist", - albumartist_sort="some sort album artist", - albumartist_credit="some album artist credit", - album="the album", - genre="the genre", - year=2014, - month=2, - day=5, - tracktotal=0, - disctotal=1, - comp=False, - mb_albumid="someID-1", - mb_albumartistid="someID-1", - ) - if lib: - lib.add(i) - return i - - # Dummy import session. def import_session(lib=None, loghandler=None, paths=[], query=[], cli=False): cls = commands.TerminalImportSession if cli else importer.ImportSession @@ -157,137 +113,35 @@ def import_session(lib=None, loghandler=None, paths=[], query=[], cli=False): class Assertions: """A mixin with additional unit test assertions.""" - def assertExists(self, path): # noqa - self.assertTrue( - os.path.exists(syspath(path)), f"file does not exist: {path!r}" - ) + def assertExists(self, path): + assert os.path.exists(syspath(path)), f"file does not exist: {path!r}" - def assertNotExists(self, path): # noqa - self.assertFalse( - os.path.exists(syspath(path)), f"file exists: {path!r}" - ) + def assertNotExists(self, path): + assert not os.path.exists(syspath(path)), f"file exists: {path!r}" - def assertIsFile(self, path): # noqa + def assertIsFile(self, path): self.assertExists(path) - self.assertTrue( - os.path.isfile(syspath(path)), - "path exists, but is not a regular file: {!r}".format(path), - ) + assert os.path.isfile( + syspath(path) + ), "path exists, but is not a regular file: {!r}".format(path) - def assertIsDir(self, path): # noqa + def assertIsDir(self, path): self.assertExists(path) - self.assertTrue( - os.path.isdir(syspath(path)), - "path exists, but is not a directory: {!r}".format(path), - ) + assert os.path.isdir( + syspath(path) + ), "path exists, but is not a directory: {!r}".format(path) def assert_equal_path(self, a, b): """Check that two paths are equal.""" - self.assertEqual( - util.normpath(a), - util.normpath(b), - f"paths are not equal: {a!r} and {b!r}", - ) + a_bytes, b_bytes = util.normpath(a), util.normpath(b) - -# A test harness for all beets tests. -# Provides temporary, isolated configuration. -class TestCase(unittest.TestCase, Assertions): - """A unittest.TestCase subclass that saves and restores beets' - global configuration. This allows tests to make temporary - modifications that will then be automatically removed when the test - completes. Also provides some additional assertion methods, a - temporary directory, and a DummyIO. - """ - - def setUp(self): - # A "clean" source list including only the defaults. - beets.config.sources = [] - beets.config.read(user=False, defaults=True) - - # Direct paths to a temporary directory. Tests can also use this - # temporary directory. - self.temp_dir = util.bytestring_path(tempfile.mkdtemp()) - - beets.config["statefile"] = os.fsdecode( - os.path.join(self.temp_dir, b"state.pickle") - ) - beets.config["library"] = os.fsdecode( - os.path.join(self.temp_dir, b"library.db") - ) - beets.config["directory"] = os.fsdecode( - os.path.join(self.temp_dir, b"libdir") - ) - - # Set $HOME, which is used by Confuse to create directories. - self._old_home = os.environ.get("HOME") - os.environ["HOME"] = os.fsdecode(self.temp_dir) - - # Initialize, but don't install, a DummyIO. - self.io = DummyIO() - - def tearDown(self): - if os.path.isdir(syspath(self.temp_dir)): - shutil.rmtree(syspath(self.temp_dir)) - if self._old_home is None: - del os.environ["HOME"] - else: - os.environ["HOME"] = self._old_home - self.io.restore() - - beets.config.clear() - beets.config._materialized = False - - -class LibTestCase(TestCase): - """A test case that includes an in-memory library object (`lib`) and - an item added to the library (`i`). - """ - - def setUp(self): - super().setUp() - self.lib = beets.library.Library(":memory:") - self.i = item(self.lib) - - def tearDown(self): - self.lib._connection().close() - super().tearDown() - - -# Mock timing. - - -class Timecop: - """Mocks the timing system (namely time() and sleep()) for testing. - Inspired by the Ruby timecop library. - """ - - def __init__(self): - self.now = time.time() - - def time(self): - return self.now - - def sleep(self, amount): - self.now += amount - - def install(self): - self.orig = { - "time": time.time, - "sleep": time.sleep, - } - time.time = self.time - time.sleep = self.sleep - - def restore(self): - time.time = self.orig["time"] - time.sleep = self.orig["sleep"] + assert a_bytes == b_bytes, f"{a_bytes=} != {b_bytes=}" # Mock I/O. -class InputException(Exception): +class InputError(Exception): def __init__(self, output=None): self.output = output @@ -334,9 +188,9 @@ class DummyIn: def readline(self): if not self.buf: if self.out: - raise InputException(self.out.get()) + raise InputError(self.out.get()) else: - raise InputException() + raise InputError() self.reads += 1 return self.buf.pop(0) @@ -388,25 +242,6 @@ class Bag: return self.fields.get(key) -# Convenience methods for setting up a temporary sandbox directory for tests -# that need to interact with the filesystem. - - -class TempDirMixin: - """Text mixin for creating and deleting a temporary directory.""" - - def create_temp_dir(self): - """Create a temporary directory and assign it into `self.temp_dir`. - Call `remove_temp_dir` later to delete it. - """ - self.temp_dir = bytestring_path(tempfile.mkdtemp()) - - def remove_temp_dir(self): - """Delete the temporary directory created by `create_temp_dir`.""" - if os.path.isdir(syspath(self.temp_dir)): - shutil.rmtree(syspath(self.temp_dir)) - - # Platform mocking. diff --git a/beets/test/helper.py b/beets/test/helper.py index c9b30f619..4effa47f8 100644 --- a/beets/test/helper.py +++ b/beets/test/helper.py @@ -20,9 +20,6 @@ information or mock the environment. - `has_program` checks the presence of a command on the system. -- The `generate_album_info` and `generate_track_info` functions return - fixtures to be used when mocking the autotagger. - - The `ImportSessionFixture` allows one to run importer code while controlling the interactions through code. @@ -36,19 +33,24 @@ import os.path import shutil import subprocess import sys +import unittest from contextlib import contextmanager from enum import Enum +from functools import cached_property from io import StringIO -from tempfile import mkdtemp, mkstemp -from typing import ClassVar +from pathlib import Path +from tempfile import gettempdir, mkdtemp, mkstemp +from typing import Any, ClassVar +from unittest.mock import patch import responses from mediafile import Image, MediaFile import beets import beets.plugins -from beets import autotag, config, importer, logging, util +from beets import autotag, importer, logging, util from beets.autotag.hooks import AlbumInfo, TrackInfo +from beets.importer import ImportSession from beets.library import Album, Item, Library from beets.test import _common from beets.ui.commands import TerminalImportSession @@ -142,16 +144,47 @@ def has_program(cmd, args=["--version"]): return True -class TestHelper: +def check_reflink_support(path: str) -> bool: + try: + import reflink + except ImportError: + return False + + return reflink.supported_at(path) + + +class ConfigMixin: + @cached_property + def config(self) -> beets.IncludeLazyConfig: + """Base beets configuration for tests.""" + config = beets.config + config.sources = [] + config.read(user=False, defaults=True) + + config["plugins"] = [] + config["verbose"] = 1 + config["ui"]["color"] = False + config["threaded"] = False + return config + + +NEEDS_REFLINK = unittest.skipUnless( + check_reflink_support(gettempdir()), "no reflink support for libdir" +) + + +class TestHelper(_common.Assertions, ConfigMixin): """Helper mixin for high-level cli and plugin tests. This mixin provides methods to isolate beets' global state provide fixtures. """ + db_on_disk: ClassVar[bool] = False + # TODO automate teardown through hook registration - def setup_beets(self, disk=False): + def setup_beets(self): """Setup pristine global configuration and library for testing. Sets ``beets.config`` so we can safely use any functionality @@ -166,129 +199,40 @@ class TestHelper: - ``libdir`` Path to a subfolder of ``temp_dir``, containing the library's media files. Same as ``config['directory']``. - - ``config`` The global configuration used by beets. - - ``lib`` Library instance created with the settings from ``config``. Make sure you call ``teardown_beets()`` afterwards. """ self.create_temp_dir() - os.environ["BEETSDIR"] = os.fsdecode(self.temp_dir) - - self.config = beets.config - self.config.clear() - self.config.read() - - self.config["plugins"] = [] - self.config["verbose"] = 1 - self.config["ui"]["color"] = False - self.config["threaded"] = False + temp_dir_str = os.fsdecode(self.temp_dir) + self.env_patcher = patch.dict( + "os.environ", + { + "BEETSDIR": temp_dir_str, + "HOME": temp_dir_str, # used by Confuse to create directories. + }, + ) + self.env_patcher.start() self.libdir = os.path.join(self.temp_dir, b"libdir") os.mkdir(syspath(self.libdir)) self.config["directory"] = os.fsdecode(self.libdir) - if disk: + if self.db_on_disk: dbpath = util.bytestring_path(self.config["library"].as_filename()) else: dbpath = ":memory:" self.lib = Library(dbpath, self.libdir) + # Initialize, but don't install, a DummyIO. + self.io = _common.DummyIO() + def teardown_beets(self): + self.env_patcher.stop() + self.io.restore() self.lib._close() - if "BEETSDIR" in os.environ: - del os.environ["BEETSDIR"] self.remove_temp_dir() - self.config.clear() - beets.config.read(user=False, defaults=True) - - def load_plugins(self, *plugins): - """Load and initialize plugins by names. - - Similar setting a list of plugins in the configuration. Make - sure you call ``unload_plugins()`` afterwards. - """ - # FIXME this should eventually be handled by a plugin manager - beets.config["plugins"] = plugins - beets.plugins.load_plugins(plugins) - beets.plugins.find_plugins() - - # Take a backup of the original _types and _queries to restore - # when unloading. - Item._original_types = dict(Item._types) - Album._original_types = dict(Album._types) - Item._types.update(beets.plugins.types(Item)) - Album._types.update(beets.plugins.types(Album)) - - Item._original_queries = dict(Item._queries) - Album._original_queries = dict(Album._queries) - Item._queries.update(beets.plugins.named_queries(Item)) - Album._queries.update(beets.plugins.named_queries(Album)) - - def unload_plugins(self): - """Unload all plugins and remove the from the configuration.""" - # FIXME this should eventually be handled by a plugin manager - beets.config["plugins"] = [] - beets.plugins._classes = set() - beets.plugins._instances = {} - Item._types = Item._original_types - Album._types = Album._original_types - Item._queries = Item._original_queries - Album._queries = Album._original_queries - - def create_importer(self, item_count=1, album_count=1): - """Create files to import and return corresponding session. - - Copies the specified number of files to a subdirectory of - `self.temp_dir` and creates a `ImportSessionFixture` for this path. - """ - import_dir = os.path.join(self.temp_dir, b"import") - if not os.path.isdir(syspath(import_dir)): - os.mkdir(syspath(import_dir)) - - album_no = 0 - while album_count: - album = util.bytestring_path(f"album {album_no}") - album_dir = os.path.join(import_dir, album) - if os.path.exists(syspath(album_dir)): - album_no += 1 - continue - os.mkdir(syspath(album_dir)) - album_count -= 1 - - track_no = 0 - album_item_count = item_count - while album_item_count: - title = f"track {track_no}" - src = os.path.join(_common.RSRC, b"full.mp3") - title_file = util.bytestring_path(f"{title}.mp3") - dest = os.path.join(album_dir, title_file) - if os.path.exists(syspath(dest)): - track_no += 1 - continue - album_item_count -= 1 - shutil.copy(syspath(src), syspath(dest)) - mediafile = MediaFile(dest) - mediafile.update( - { - "artist": "artist", - "albumartist": "album artist", - "title": title, - "album": album, - "mb_albumid": None, - "mb_trackid": None, - } - ) - mediafile.save() - - config["import"]["quiet"] = True - config["import"]["autotag"] = False - config["import"]["resume"] = False - - return ImportSessionFixture( - self.lib, loghandler=None, query=None, paths=[import_dir] - ) # Library fixtures methods @@ -304,16 +248,15 @@ class TestHelper: The item is attached to the database from `self.lib`. """ - item_count = self._get_item_count() values_ = { "title": "t\u00eftle {0}", "artist": "the \u00e4rtist", "album": "the \u00e4lbum", - "track": item_count, + "track": 1, "format": "MP3", } values_.update(values) - values_["title"] = values_["title"].format(item_count) + values_["title"] = values_["title"].format(1) values_["db"] = self.lib item = Item(**values_) if "path" not in values: @@ -430,12 +373,6 @@ class TestHelper: return path - def _get_item_count(self): - if not hasattr(self, "__item_count"): - count = 0 - self.__item_count = count + 1 - return count - # Running beets commands def run_command(self, *args, **kwargs): @@ -457,11 +394,11 @@ class TestHelper: # Safe file operations - def create_temp_dir(self): + def create_temp_dir(self, **kwargs): """Create a temporary directory and assign it into `self.temp_dir`. Call `remove_temp_dir` later to delete it. """ - temp_dir = mkdtemp() + temp_dir = mkdtemp(**kwargs) self.temp_dir = util.bytestring_path(temp_dir) def remove_temp_dir(self): @@ -490,99 +427,212 @@ class TestHelper: return path +# A test harness for all beets tests. +# Provides temporary, isolated configuration. +class BeetsTestCase(unittest.TestCase, TestHelper): + """A unittest.TestCase subclass that saves and restores beets' + global configuration. This allows tests to make temporary + modifications that will then be automatically removed when the test + completes. Also provides some additional assertion methods, a + temporary directory, and a DummyIO. + """ + + def setUp(self): + self.setup_beets() + + def tearDown(self): + self.teardown_beets() + + +class ItemInDBTestCase(BeetsTestCase): + """A test case that includes an in-memory library object (`lib`) and + an item added to the library (`i`). + """ + + def setUp(self): + super().setUp() + self.i = _common.item(self.lib) + + +class PluginMixin(ConfigMixin): + plugin: ClassVar[str] + preload_plugin: ClassVar[bool] = True + + def setup_beets(self): + super().setup_beets() + if self.preload_plugin: + self.load_plugins() + + def teardown_beets(self): + super().teardown_beets() + self.unload_plugins() + + def load_plugins(self, *plugins: str) -> None: + """Load and initialize plugins by names. + + Similar setting a list of plugins in the configuration. Make + sure you call ``unload_plugins()`` afterwards. + """ + # FIXME this should eventually be handled by a plugin manager + plugins = (self.plugin,) if hasattr(self, "plugin") else plugins + self.config["plugins"] = plugins + beets.plugins.load_plugins(plugins) + beets.plugins.find_plugins() + + # Take a backup of the original _types and _queries to restore + # when unloading. + Item._original_types = dict(Item._types) + Album._original_types = dict(Album._types) + Item._types.update(beets.plugins.types(Item)) + Album._types.update(beets.plugins.types(Album)) + + Item._original_queries = dict(Item._queries) + Album._original_queries = dict(Album._queries) + Item._queries.update(beets.plugins.named_queries(Item)) + Album._queries.update(beets.plugins.named_queries(Album)) + + def unload_plugins(self) -> None: + """Unload all plugins and remove them from the configuration.""" + # FIXME this should eventually be handled by a plugin manager + for plugin_class in beets.plugins._instances: + plugin_class.listeners = None + self.config["plugins"] = [] + beets.plugins._classes = set() + beets.plugins._instances = {} + Item._types = getattr(Item, "_original_types", {}) + Album._types = getattr(Album, "_original_types", {}) + Item._queries = getattr(Item, "_original_queries", {}) + Album._queries = getattr(Album, "_original_queries", {}) + + @contextmanager + def configure_plugin(self, config: Any): + self.config[self.plugin].set(config) + self.load_plugins(self.plugin) + + yield + + self.unload_plugins() + + +class PluginTestCase(PluginMixin, BeetsTestCase): + pass + + class ImportHelper(TestHelper): """Provides tools to setup a library, a directory containing files that are to be imported and an import session. The class also provides stubs for the autotagging library and several assertions for the library. """ - def setup_beets(self, disk=False): - super().setup_beets(disk) + resource_path = syspath(os.path.join(_common.RSRC, b"full.mp3")) + default_import_config = { + "autotag": True, + "copy": True, + "hardlink": False, + "link": False, + "move": False, + "resume": False, + "singletons": False, + "timid": True, + } + + lib: Library + importer: ImportSession + + @cached_property + def import_path(self) -> Path: + import_path = Path(os.fsdecode(self.temp_dir)) / "import" + import_path.mkdir(exist_ok=True) + return import_path + + @cached_property + def import_dir(self) -> bytes: + return bytestring_path(self.import_path) + + def setUp(self): + super().setUp() + self.import_media = [] self.lib.path_formats = [ ("default", os.path.join("$artist", "$album", "$title")), ("singleton:true", os.path.join("singletons", "$title")), ("comp:true", os.path.join("compilations", "$album", "$title")), ] - def _create_import_dir(self, count=3): - """Creates a directory with media files to import. - Sets ``self.import_dir`` to the path of the directory. Also sets - ``self.import_media`` to a list :class:`MediaFile` for all the files in - the directory. + def prepare_track_for_import( + self, + track_id: int, + album_path: Path, + album_id: int | None = None, + ) -> Path: + track_path = album_path / f"track_{track_id}.mp3" + shutil.copy(self.resource_path, track_path) + medium = MediaFile(track_path) + medium.update( + { + "album": "Tag Album" + (f" {album_id}" if album_id else ""), + "albumartist": None, + "mb_albumid": None, + "comp": None, + "artist": "Tag Artist", + "title": f"Tag Track {track_id}", + "track": track_id, + "mb_trackid": None, + } + ) + medium.save() + self.import_media.append(medium) + return track_path + + def prepare_album_for_import( + self, + item_count: int, + album_id: int | None = None, + album_path: Path | None = None, + ) -> list[Path]: + """Create an album directory with media files to import. The directory has following layout - the_album/ + album/ track_1.mp3 track_2.mp3 track_3.mp3 - - :param count: Number of files to create """ - self.import_dir = os.path.join(self.temp_dir, b"testsrcdir") - if os.path.isdir(syspath(self.import_dir)): - shutil.rmtree(syspath(self.import_dir)) + if not album_path: + album_dir = f"album_{album_id}" if album_id else "album" + album_path = self.import_path / album_dir - album_path = os.path.join(self.import_dir, b"the_album") - os.makedirs(syspath(album_path)) + album_path.mkdir(exist_ok=True) - resource_path = os.path.join(_common.RSRC, b"full.mp3") + return [ + self.prepare_track_for_import(tid, album_path, album_id=album_id) + for tid in range(1, item_count + 1) + ] - metadata = { - "artist": "Tag Artist", - "album": "Tag Album", - "albumartist": None, - "mb_trackid": None, - "mb_albumid": None, - "comp": None, - } - self.media_files = [] - for i in range(count): - # Copy files - medium_path = os.path.join( - album_path, bytestring_path("track_%d.mp3" % (i + 1)) - ) - shutil.copy(syspath(resource_path), syspath(medium_path)) - medium = MediaFile(medium_path) + def prepare_albums_for_import(self, count: int = 1) -> None: + album_dirs = Path(os.fsdecode(self.import_dir)).glob("album_*") + base_idx = int(str(max(album_dirs, default="0")).split("_")[-1]) + 1 - # Set metadata - metadata["track"] = i + 1 - metadata["title"] = "Tag Title %d" % (i + 1) - for attr in metadata: - setattr(medium, attr, metadata[attr]) - medium.save() - self.media_files.append(medium) - self.import_media = self.media_files + for album_id in range(base_idx, count + base_idx): + self.prepare_album_for_import(1, album_id=album_id) - def _setup_import_session( - self, - import_dir=None, - delete=False, - threaded=False, - copy=True, - singletons=False, - move=False, - autotag=True, - link=False, - hardlink=False, - ): - config["import"]["copy"] = copy - config["import"]["delete"] = delete - config["import"]["timid"] = True - config["threaded"] = False - config["import"]["singletons"] = singletons - config["import"]["move"] = move - config["import"]["autotag"] = autotag - config["import"]["resume"] = False - config["import"]["link"] = link - config["import"]["hardlink"] = hardlink - - self.importer = ImportSessionFixture( + def _get_import_session(self, import_dir: bytes) -> ImportSession: + return ImportSessionFixture( self.lib, loghandler=None, query=None, - paths=[import_dir or self.import_dir], + paths=[import_dir], ) + def setup_importer( + self, import_dir: bytes | None = None, **kwargs + ) -> ImportSession: + self.config["import"].set_args({**self.default_import_config, **kwargs}) + self.importer = self._get_import_session(import_dir or self.import_dir) + return self.importer + + def setup_singleton_importer(self, **kwargs) -> ImportSession: + return self.setup_importer(singletons=True, **kwargs) + def assert_file_in_lib(self, *segments): """Join the ``segments`` and assert that this path exists in the library directory. @@ -596,10 +646,25 @@ class ImportHelper(TestHelper): self.assertNotExists(os.path.join(self.libdir, *segments)) def assert_lib_dir_empty(self): - self.assertEqual(len(os.listdir(syspath(self.libdir))), 0) + assert not os.listdir(syspath(self.libdir)) -class ImportSessionFixture(importer.ImportSession): +class AsIsImporterMixin: + def setUp(self): + super().setUp() + self.prepare_album_for_import(1) + + def run_asis_importer(self, **kwargs): + importer = self.setup_importer(autotag=False, **kwargs) + importer.run() + return importer + + +class ImportTestCase(ImportHelper, BeetsTestCase): + pass + + +class ImportSessionFixture(ImportSession): """ImportSession that can be controlled programaticaly. >>> lib = Library(':memory:') @@ -646,10 +711,6 @@ class ImportSessionFixture(importer.ImportSession): default_resolution = "REMOVE" - def add_resolution(self, resolution): - assert isinstance(resolution, self.Resolution) - self._resolutions.append(resolution) - def resolve_duplicate(self, task, found_duplicates): try: res = self._resolutions.pop(0) @@ -702,124 +763,28 @@ class TerminalImportSessionFixture(TerminalImportSession): self.io.addinput("T") elif choice == importer.action.SKIP: self.io.addinput("S") - elif isinstance(choice, int): + else: self.io.addinput("M") self.io.addinput(str(choice)) self._add_choice_input() - else: - raise Exception("Unknown choice %s" % choice) -class TerminalImportSessionSetup: - """Overwrites ImportHelper._setup_import_session to provide a terminal importer""" +class TerminalImportMixin(ImportHelper): + """Provides_a terminal importer for the import session.""" - def _setup_import_session( - self, - import_dir=None, - delete=False, - threaded=False, - copy=True, - singletons=False, - move=False, - autotag=True, - ): - config["import"]["copy"] = copy - config["import"]["delete"] = delete - config["import"]["timid"] = True - config["threaded"] = False - config["import"]["singletons"] = singletons - config["import"]["move"] = move - config["import"]["autotag"] = autotag - config["import"]["resume"] = False + io: _common.DummyIO - if not hasattr(self, "io"): - self.io = _common.DummyIO() + def _get_import_session(self, import_dir: bytes) -> importer.ImportSession: self.io.install() - self.importer = TerminalImportSessionFixture( + return TerminalImportSessionFixture( self.lib, loghandler=None, query=None, io=self.io, - paths=[import_dir or self.import_dir], + paths=[import_dir], ) -def generate_album_info(album_id, track_values): - """Return `AlbumInfo` populated with mock data. - - Sets the album info's `album_id` field is set to the corresponding - argument. For each pair (`id`, `values`) in `track_values` the `TrackInfo` - from `generate_track_info` is added to the album info's `tracks` field. - Most other fields of the album and track info are set to "album - info" and "track info", respectively. - """ - tracks = [generate_track_info(id, values) for id, values in track_values] - album = AlbumInfo( - album_id="album info", - album="album info", - artist="album info", - artist_id="album info", - tracks=tracks, - ) - for field in ALBUM_INFO_FIELDS: - setattr(album, field, "album info") - - return album - - -ALBUM_INFO_FIELDS = [ - "album", - "album_id", - "artist", - "artist_id", - "asin", - "albumtype", - "va", - "label", - "barcode", - "artist_sort", - "releasegroup_id", - "catalognum", - "language", - "country", - "albumstatus", - "media", - "albumdisambig", - "releasegroupdisambig", - "artist_credit", - "data_source", - "data_url", -] - - -def generate_track_info(track_id="track info", values={}): - """Return `TrackInfo` populated with mock data. - - The `track_id` field is set to the corresponding argument. All other - string fields are set to "track info". - """ - track = TrackInfo( - title="track info", - track_id=track_id, - ) - for field in TRACK_INFO_FIELDS: - setattr(track, field, "track info") - for field, value in values.items(): - setattr(track, field, value) - return track - - -TRACK_INFO_FIELDS = [ - "artist", - "artist_id", - "artist_sort", - "disctitle", - "artist_credit", - "data_source", - "data_url", -] - - class AutotagStub: """Stub out MusicBrainz album and track matcher and control what the autotagger returns. @@ -888,7 +853,7 @@ class AutotagStub: def _make_track_match(self, artist, album, number): return TrackInfo( - title="Applied Title %d" % number, + title="Applied Track %d" % number, track_id="match %d" % number, artist=artist, length=1, @@ -919,6 +884,7 @@ class AutotagStub: artist_id="artistid" + id, albumtype="soundtrack", data_source="match_source", + bandcamp_album_id="bc_url", ) @@ -932,7 +898,7 @@ class FetchImageHelper: super().run(*args, **kwargs) IMAGEHEADER = { - "image/jpeg": b"\x00" * 6 + b"JFIF", + "image/jpeg": b"\xff\xd8\xff" + b"\x00" * 3 + b"JFIF", "image/png": b"\211PNG\r\n\032\n", } diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 8580bd1e8..386410a09 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -17,7 +17,6 @@ interface. To invoke the CLI, just call beets.ui.main(). The actual CLI commands are implemented in the ui.commands module. """ - import errno import optparse import os.path @@ -28,7 +27,7 @@ import sys import textwrap import traceback from difflib import SequenceMatcher -from typing import Any, Callable, List +from typing import Any, Callable import confuse @@ -318,7 +317,7 @@ def input_options( # Wrap the query text. # Start prompt with U+279C: Heavy Round-Tipped Rightwards Arrow - prompt = colorize("action", "\u279C ") + prompt = colorize("action", "\u279c ") line_length = 0 for i, (part, length) in enumerate( zip(prompt_parts, prompt_part_lengths) @@ -387,7 +386,7 @@ def input_yn(prompt, require=False): "yes" unless `require` is `True`, in which case there is no default. """ # Start prompt with U+279C: Heavy Round-Tipped Rightwards Arrow - yesno = colorize("action", "\u279C ") + colorize( + yesno = colorize("action", "\u279c ") + colorize( "action_description", "Enter Y or N:" ) sel = input_options(("y", "n"), require, prompt, yesno) @@ -1451,7 +1450,7 @@ class Subcommand: invoked by a SubcommandOptionParser. """ - func: Callable[[library.Library, optparse.Values, List[str]], Any] + func: Callable[[library.Library, optparse.Values, list[str]], Any] def __init__(self, name, parser=None, help="", aliases=(), hide=False): """Creates a new subcommand. name is the primary way to invoke @@ -1497,9 +1496,7 @@ class SubcommandsOptionParser(CommonOptionsParser): """ # A more helpful default usage. if "usage" not in kwargs: - kwargs[ - "usage" - ] = """ + kwargs["usage"] = """ %prog COMMAND [ARGS...] %prog help COMMAND""" kwargs["add_help_option"] = False @@ -1861,13 +1858,21 @@ def main(args=None): """Run the main command-line interface for beets. Includes top-level exception handlers that print friendly error messages. """ + if "AppData\\Local\\Microsoft\\WindowsApps" in sys.exec_prefix: + log.error( + "error: beets is unable to use the Microsoft Store version of " + "Python. Please install Python from https://python.org.\n" + "error: More details can be found here " + "https://beets.readthedocs.io/en/stable/guides/main.html" + ) + sys.exit(1) try: _raw_main(args) except UserError as exc: message = exc.args[0] if exc.args else None log.error("error: {0}", message) sys.exit(1) - except util.HumanReadableException as exc: + except util.HumanReadableError as exc: exc.log(log) sys.exit(1) except library.FileOperationError as exc: diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 826dc07a3..99aa04f0a 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -16,13 +16,13 @@ interface. """ - import os import re -from collections import Counter, namedtuple +from collections import Counter +from collections.abc import Sequence from itertools import chain from platform import python_version -from typing import Sequence +from typing import Any, NamedTuple import beets from beets import autotag, config, importer, library, logging, plugins, ui, util @@ -47,7 +47,6 @@ from beets.util import ( from . import _store_dict VARIOUS_ARTISTS = "Various Artists" -PromptChoice = namedtuple("PromptChoice", ["short", "long", "callback"]) # Global logger. log = logging.getLogger("beets") @@ -664,8 +663,8 @@ class AlbumChange(ChangeRepresentation): suggests for them. """ # Tracks. - # match is an AlbumMatch named tuple, mapping is a dict - # Sort the pairs by the track_info index (at index 1 of the namedtuple) + # match is an AlbumMatch NamedTuple, mapping is a dict + # Sort the pairs by the track_info index (at index 1 of the NamedTuple) pairs = list(self.match.mapping.items()) pairs.sort(key=lambda item_and_track_info: item_and_track_info[1].index) # Build up LHS and RHS for track difference display. The `lines` list @@ -840,6 +839,12 @@ def _summary_judgment(rec): return action +class PromptChoice(NamedTuple): + short: str + long: str + callback: Any + + def choose_candidate( candidates, singleton, @@ -1022,7 +1027,7 @@ def manual_id(session, task): def abort_action(session, task): """A prompt choice callback that aborts the importer.""" - raise importer.ImportAbort() + raise importer.ImportAbortError() class TerminalImportSession(importer.ImportSession): @@ -1052,7 +1057,7 @@ class TerminalImportSession(importer.ImportSession): if len(actions) == 1: return actions[0] elif len(actions) > 1: - raise plugins.PluginConflictException( + raise plugins.PluginConflictError( "Only one handler for `import_task_before_choice` may return " "an action." ) @@ -1312,8 +1317,7 @@ def import_files(lib, paths, query): loghandler = logging.FileHandler(logpath, encoding="utf-8") except OSError: raise ui.UserError( - "could not open log file for writing: " - "{}".format(displayable_path(logpath)) + f"Could not open log file for writing: {displayable_path(logpath)}" ) else: loghandler = None diff --git a/beets/util/__init__.py b/beets/util/__init__.py index 9076bea30..32a63b216 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -13,6 +13,7 @@ # included in all copies or substantial portions of the Software. """Miscellaneous utility functions.""" + from __future__ import annotations import errno @@ -26,45 +27,47 @@ import subprocess import sys import tempfile import traceback -from collections import Counter, namedtuple +from collections import Counter from contextlib import suppress from enum import Enum -from logging import Logger +from importlib import import_module from multiprocessing.pool import ThreadPool from pathlib import Path +from re import Pattern from typing import ( + TYPE_CHECKING, Any, AnyStr, Callable, - Generator, Iterable, - List, - MutableSequence, - Optional, - Pattern, - Sequence, - Tuple, + NamedTuple, TypeVar, Union, ) +from unidecode import unidecode + +from beets.util import hidden + +if TYPE_CHECKING: + from collections.abc import Iterator, Sequence + from logging import Logger + if sys.version_info >= (3, 10): from typing import TypeAlias else: from typing_extensions import TypeAlias -from unidecode import unidecode - -from beets.util import hidden MAX_FILENAME_LENGTH = 200 WINDOWS_MAGIC_PREFIX = "\\\\?\\" T = TypeVar("T") -Bytes_or_String: TypeAlias = Union[str, bytes] -PathLike = Union[str, bytes, Path] +BytesOrStr = Union[str, bytes] +PathLike = Union[BytesOrStr, Path] +Replacements: TypeAlias = "Sequence[tuple[Pattern[str], str]]" -class HumanReadableException(Exception): +class HumanReadableError(Exception): """An Exception that can include a human-readable error message to be logged without a traceback. Can preserve a traceback for debugging purposes as well. @@ -120,7 +123,7 @@ class HumanReadableException(Exception): logger.error("{0}: {1}", self.error_kind, self.args[0]) -class FilesystemError(HumanReadableException): +class FilesystemError(HumanReadableError): """An error that occurred while performing a filesystem manipulation via a function in this module. The `paths` field is a sequence of pathnames involved in the operation. @@ -161,16 +164,16 @@ class MoveOperation(Enum): REFLINK_AUTO = 5 -def normpath(path: bytes) -> bytes: +def normpath(path: PathLike) -> bytes: """Provide the canonical form of the path suitable for storing in the database. """ - path = syspath(path, prefix=False) - path = os.path.normpath(os.path.abspath(os.path.expanduser(path))) - return bytestring_path(path) + str_path = syspath(path, prefix=False) + str_path = os.path.normpath(os.path.abspath(os.path.expanduser(str_path))) + return bytestring_path(str_path) -def ancestry(path: bytes) -> List[str]: +def ancestry(path: AnyStr) -> list[AnyStr]: """Return a list consisting of path's parent directory, its grandparent, and so on. For instance: @@ -179,7 +182,7 @@ def ancestry(path: bytes) -> List[str]: The argument should *not* be the result of a call to `syspath`. """ - out = [] + out: list[AnyStr] = [] last_path = None while path: path = os.path.dirname(path) @@ -196,34 +199,34 @@ def ancestry(path: bytes) -> List[str]: def sorted_walk( path: AnyStr, - ignore: Sequence = (), + ignore: Sequence[bytes] = (), ignore_hidden: bool = False, - logger: Optional[Logger] = None, -) -> Generator[Tuple, None, None]: + logger: Logger | None = None, +) -> Iterator[tuple[bytes, Sequence[bytes], Sequence[bytes]]]: """Like `os.walk`, but yields things in case-insensitive sorted, breadth-first order. Directory and file names matching any glob pattern in `ignore` are skipped. If `logger` is provided, then warning messages are logged there when a directory cannot be listed. """ # Make sure the paths aren't Unicode strings. - path = bytestring_path(path) + bytes_path = bytestring_path(path) ignore = [bytestring_path(i) for i in ignore] # Get all the directories and files at this level. try: - contents = os.listdir(syspath(path)) + contents = os.listdir(syspath(bytes_path)) except OSError as exc: if logger: logger.warning( "could not list directory {}: {}".format( - displayable_path(path), exc.strerror + displayable_path(bytes_path), exc.strerror ) ) return dirs = [] files = [] - for base in contents: - base = bytestring_path(base) + for str_base in contents: + base = bytestring_path(str_base) # Skip ignored filenames. skip = False @@ -231,7 +234,7 @@ def sorted_walk( if fnmatch.fnmatch(base, pat): if logger: logger.debug( - "ignoring {} due to ignore rule {}".format(base, pat) + "ignoring '{}' due to ignore rule '{}'", base, pat ) skip = True break @@ -239,7 +242,7 @@ def sorted_walk( continue # Add to output as either a file or a directory. - cur = os.path.join(path, base) + cur = os.path.join(bytes_path, base) if (ignore_hidden and not hidden.is_hidden(cur)) or not ignore_hidden: if os.path.isdir(syspath(cur)): dirs.append(base) @@ -249,12 +252,11 @@ def sorted_walk( # Sort lists (case-insensitive) and yield the current level. dirs.sort(key=bytes.lower) files.sort(key=bytes.lower) - yield (path, dirs, files) + yield (bytes_path, dirs, files) # Recurse into directories. for base in dirs: - cur = os.path.join(path, base) - # yield from sorted_walk(...) + cur = os.path.join(bytes_path, base) yield from sorted_walk(cur, ignore, ignore_hidden, logger) @@ -295,8 +297,8 @@ def fnmatch_all(names: Sequence[bytes], patterns: Sequence[bytes]) -> bool: def prune_dirs( - path: str, - root: Optional[Bytes_or_String] = None, + path: bytes, + root: bytes | None = None, clutter: Sequence[str] = (".DS_Store", "Thumbs.db"), ): """If path is an empty directory, then remove it. Recursively remove @@ -307,41 +309,41 @@ def prune_dirs( (i.e., no recursive removal). """ path = normpath(path) - if root is not None: - root = normpath(root) - + root = normpath(root) if root else None ancestors = ancestry(path) + if root is None: # Only remove the top directory. ancestors = [] elif root in ancestors: - # Only remove directories below the root. + # Only remove directories below the root_bytes. ancestors = ancestors[ancestors.index(root) + 1 :] else: # Remove nothing. return + bytes_clutter = [bytestring_path(c) for c in clutter] + # Traverse upward from path. ancestors.append(path) ancestors.reverse() for directory in ancestors: - directory = syspath(directory) + str_directory = syspath(directory) if not os.path.exists(directory): # Directory gone already. continue - clutter: List[bytes] = [bytestring_path(c) for c in clutter] - match_paths = [bytestring_path(d) for d in os.listdir(directory)] + match_paths = [bytestring_path(d) for d in os.listdir(str_directory)] try: - if fnmatch_all(match_paths, clutter): + if fnmatch_all(match_paths, bytes_clutter): # Directory contains only clutter (or nothing). - shutil.rmtree(directory) + shutil.rmtree(str_directory) else: break except OSError: break -def components(path: AnyStr) -> MutableSequence[AnyStr]: +def components(path: AnyStr) -> list[AnyStr]: """Return a list of the path components in path. For instance: >>> components(b'/a/b/c') @@ -387,7 +389,7 @@ def _fsencoding() -> str: return encoding -def bytestring_path(path: Bytes_or_String) -> bytes: +def bytestring_path(path: PathLike) -> bytes: """Given a path, which is either a bytes or a unicode, returns a str path (ensuring that we never deal with Unicode pathnames). Path should be bytes but has safeguards for strings to be converted. @@ -396,25 +398,28 @@ def bytestring_path(path: Bytes_or_String) -> bytes: if isinstance(path, bytes): return path + str_path = str(path) + # On Windows, remove the magic prefix added by `syspath`. This makes # ``bytestring_path(syspath(X)) == X``, i.e., we can safely # round-trip through `syspath`. - if os.path.__name__ == "ntpath" and path.startswith(WINDOWS_MAGIC_PREFIX): - path = path[len(WINDOWS_MAGIC_PREFIX) :] + if os.path.__name__ == "ntpath" and str_path.startswith( + WINDOWS_MAGIC_PREFIX + ): + str_path = str_path[len(WINDOWS_MAGIC_PREFIX) :] # Try to encode with default encodings, but fall back to utf-8. try: - return path.encode(_fsencoding()) + return str_path.encode(_fsencoding()) except (UnicodeError, LookupError): - return path.encode("utf-8") + return str_path.encode("utf-8") PATH_SEP: bytes = bytestring_path(os.sep) def displayable_path( - path: Union[bytes, str, Tuple[Union[bytes, str], ...]], - separator: str = "; ", + path: BytesOrStr | tuple[BytesOrStr, ...], separator: str = "; " ) -> str: """Attempts to decode a bytestring path to a unicode object for the purpose of displaying it to the user. If the `path` argument is a @@ -434,59 +439,52 @@ def displayable_path( return path.decode("utf-8", "ignore") -def syspath(path: Bytes_or_String, prefix: bool = True) -> Bytes_or_String: +def syspath(path: PathLike, prefix: bool = True) -> str: """Convert a path for use by the operating system. In particular, paths on Windows must receive a magic prefix and must be converted to Unicode before they are sent to the OS. To disable the magic prefix on Windows, set `prefix` to False---but only do this if you *really* know what you're doing. """ + str_path = os.fsdecode(path) # Don't do anything if we're not on windows if os.path.__name__ != "ntpath": - return path - - if not isinstance(path, str): - # Beets currently represents Windows paths internally with UTF-8 - # arbitrarily. But earlier versions used MBCS because it is - # reported as the FS encoding by Windows. Try both. - try: - path = path.decode("utf-8") - except UnicodeError: - # The encoding should always be MBCS, Windows' broken - # Unicode representation. - assert isinstance(path, bytes) - encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() - path = path.decode(encoding, "replace") + return str_path # Add the magic prefix if it isn't already there. # https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx - if prefix and not path.startswith(WINDOWS_MAGIC_PREFIX): - if path.startswith("\\\\"): + if prefix and not str_path.startswith(WINDOWS_MAGIC_PREFIX): + if str_path.startswith("\\\\"): # UNC path. Final path should look like \\?\UNC\... - path = "UNC" + path[1:] - path = WINDOWS_MAGIC_PREFIX + path + str_path = "UNC" + str_path[1:] + str_path = WINDOWS_MAGIC_PREFIX + str_path - return path + return str_path def samefile(p1: bytes, p2: bytes) -> bool: """Safer equality for paths.""" if p1 == p2: return True - return shutil._samefile(syspath(p1), syspath(p2)) + with suppress(OSError): + return os.path.samefile(syspath(p1), syspath(p2)) + + return False -def remove(path: Optional[bytes], soft: bool = True): +def remove(path: bytes, soft: bool = True): """Remove the file. If `soft`, then no error will be raised if the file does not exist. """ - path = syspath(path) - if not path or (soft and not os.path.exists(path)): + str_path = syspath(path) + if not str_path or (soft and not os.path.exists(str_path)): return try: - os.remove(path) + os.remove(str_path) except OSError as exc: - raise FilesystemError(exc, "delete", (path,), traceback.format_exc()) + raise FilesystemError( + exc, "delete", (str_path,), traceback.format_exc() + ) def copy(path: bytes, dest: bytes, replace: bool = False): @@ -497,23 +495,22 @@ def copy(path: bytes, dest: bytes, replace: bool = False): """ if samefile(path, dest): return - path = syspath(path) - dest = syspath(dest) - if not replace and os.path.exists(dest): - raise FilesystemError("file exists", "copy", (path, dest)) + str_path = syspath(path) + str_dest = syspath(dest) + if not replace and os.path.exists(str_dest): + raise FilesystemError("file exists", "copy", (str_path, str_dest)) try: - shutil.copyfile(path, dest) + shutil.copyfile(str_path, str_dest) except OSError as exc: - raise FilesystemError(exc, "copy", (path, dest), traceback.format_exc()) + raise FilesystemError( + exc, "copy", (str_path, str_dest), traceback.format_exc() + ) def move(path: bytes, dest: bytes, replace: bool = False): """Rename a file. `dest` may not be a directory. If `dest` already exists, raises an OSError unless `replace` is True. Has no effect if - `path` is the same as `dest`. If the paths are on different - filesystems (or the rename otherwise fails), a copy is attempted - instead, in which case metadata will *not* be preserved. Paths are - translated to system paths. + `path` is the same as `dest`. Paths are translated to system paths. """ if os.path.isdir(syspath(path)): raise FilesystemError("source is directory", "move", (path, dest)) @@ -539,22 +536,36 @@ def move(path: bytes, dest: bytes, replace: bool = False): ) try: with open(syspath(path), "rb") as f: - shutil.copyfileobj(f, tmp) + # mypy bug: + # - https://github.com/python/mypy/issues/15031 + # - https://github.com/python/mypy/issues/14943 + # Fix not yet released: + # - https://github.com/python/mypy/pull/14975 + shutil.copyfileobj(f, tmp) # type: ignore[misc] finally: tmp.close() - # Move the copied file into place. try: - os.replace(tmp.name, syspath(dest)) - tmp = None + # Copy file metadata + shutil.copystat(syspath(path), tmp.name) + except OSError: + # Ignore errors because it doesn't matter too much. We may be on a + # filesystem that doesn't support this. + pass + + # Move the copied file into place. + tmp_filename = tmp.name + try: + os.replace(tmp_filename, syspath(dest)) + tmp_filename = "" os.remove(syspath(path)) except OSError as exc: raise FilesystemError( exc, "move", (path, dest), traceback.format_exc() ) finally: - if tmp is not None: - os.remove(tmp) + if tmp_filename: + os.remove(tmp_filename) def link(path: bytes, dest: bytes, replace: bool = False): @@ -622,31 +633,33 @@ def reflink( Raise an `OSError` if `dest` already exists, unless `replace` is True. If `path` == `dest`, then do nothing. - If reflinking fails and `fallback` is enabled, try copying the file - instead. Otherwise, raise an error without trying a plain copy. - - May raise an `ImportError` if the `reflink` module is not available. + If `fallback` is enabled, ignore errors and copy the file instead. + Otherwise, errors are re-raised as FilesystemError with an explanation. """ - import reflink as pyreflink - if samefile(path, dest): return if os.path.exists(syspath(dest)) and not replace: - raise FilesystemError("file exists", "rename", (path, dest)) + raise FilesystemError("target exists", "rename", (path, dest)) + + if fallback: + with suppress(Exception): + return import_module("reflink").reflink(path, dest) + return copy(path, dest, replace) try: - pyreflink.reflink(path, dest) - except (NotImplementedError, pyreflink.ReflinkImpossibleError): - if fallback: - copy(path, dest, replace) - else: - raise FilesystemError( - "OS/filesystem does not support reflinks.", - "link", - (path, dest), - traceback.format_exc(), - ) + import_module("reflink").reflink(path, dest) + except (ImportError, OSError): + raise + except Exception as exc: + msg = { + "EXDEV": "Cannot reflink across devices", + "EOPNOTSUPP": "Device does not support reflinks", + }.get(str(exc), "OS does not support reflinks") + + raise FilesystemError( + msg, "reflink", (path, dest), traceback.format_exc() + ) from exc def unique_path(path: bytes) -> bytes: @@ -676,7 +689,7 @@ def unique_path(path: bytes) -> bytes: # Unix. They are forbidden here because they cause problems on Samba # shares, which are sufficiently common as to cause frequent problems. # https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx -CHAR_REPLACE: List[Tuple[Pattern, str]] = [ +CHAR_REPLACE = [ (re.compile(r"[\\/]"), "_"), # / and \ -- forbidden everywhere. (re.compile(r"^\."), "_"), # Leading dot (hidden files on Unix). (re.compile(r"[\x00-\x1f]"), ""), # Control characters. @@ -686,10 +699,7 @@ CHAR_REPLACE: List[Tuple[Pattern, str]] = [ ] -def sanitize_path( - path: str, - replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]] = None, -) -> str: +def sanitize_path(path: str, replacements: Replacements | None = None) -> str: """Takes a path (as a Unicode string) and makes sure that it is legal. Returns a new path. Only works with fragments; won't work reliably on Windows when a path begins with a drive letter. Path @@ -729,11 +739,11 @@ def truncate_path(path: AnyStr, length: int = MAX_FILENAME_LENGTH) -> AnyStr: def _legalize_stage( path: str, - replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]], + replacements: Replacements | None, length: int, extension: str, fragment: bool, -) -> Tuple[Bytes_or_String, bool]: +) -> tuple[BytesOrStr, bool]: """Perform a single round of path legalization steps (sanitation/replacement, encoding from Unicode to bytes, extension-appending, and truncation). Return the path (Unicode if @@ -759,11 +769,11 @@ def _legalize_stage( def legalize_path( path: str, - replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]], + replacements: Replacements | None, length: int, extension: bytes, fragment: bool, -) -> Tuple[Union[Bytes_or_String, bool]]: +) -> tuple[BytesOrStr, bool]: """Given a path-like Unicode string, produce a legal path. Return the path and a flag indicating whether some replacements had to be ignored (see below). @@ -830,7 +840,7 @@ def as_string(value: Any) -> str: return str(value) -def plurality(objs: Sequence[T]) -> T: +def plurality(objs: Sequence[T]) -> tuple[T, int]: """Given a sequence of hashble objects, returns the object that is most common in the set and the its number of appearance. The sequence must contain at least one object. @@ -841,7 +851,7 @@ def plurality(objs: Sequence[T]) -> T: return c.most_common(1)[0] -def convert_command_args(args: List[bytes]) -> List[str]: +def convert_command_args(args: list[BytesOrStr]) -> list[str]: """Convert command arguments, which may either be `bytes` or `str` objects, to uniformly surrogate-escaped strings.""" assert isinstance(args, list) @@ -855,13 +865,12 @@ def convert_command_args(args: List[bytes]) -> List[str]: # stdout and stderr as bytes -CommandOutput = namedtuple("CommandOutput", ("stdout", "stderr")) +class CommandOutput(NamedTuple): + stdout: bytes + stderr: bytes -def command_output( - cmd: List[Bytes_or_String], - shell: bool = False, -) -> CommandOutput: +def command_output(cmd: list[BytesOrStr], shell: bool = False) -> CommandOutput: """Runs the command and returns its output after it has exited. Returns a CommandOutput. The attributes ``stdout`` and ``stderr`` contain @@ -879,7 +888,7 @@ def command_output( This replaces `subprocess.check_output` which can have problems if lots of output is sent to stderr. """ - cmd = convert_command_args(cmd) + converted_cmd = convert_command_args(cmd) devnull = subprocess.DEVNULL @@ -895,13 +904,13 @@ def command_output( if proc.returncode: raise subprocess.CalledProcessError( returncode=proc.returncode, - cmd=" ".join(map(str, cmd)), + cmd=" ".join(converted_cmd), output=stdout + stderr, ) return CommandOutput(stdout, stderr) -def max_filename_length(path: AnyStr, limit=MAX_FILENAME_LENGTH) -> int: +def max_filename_length(path: BytesOrStr, limit=MAX_FILENAME_LENGTH) -> int: """Attempt to determine the maximum filename length for the filesystem containing `path`. If the value is greater than `limit`, then `limit` is used instead (to prevent errors when a filesystem @@ -1041,7 +1050,7 @@ def asciify_path(path: str, sep_replace: str) -> str: # if this platform has an os.altsep, change it to os.sep. if os.altsep: path = path.replace(os.altsep, os.sep) - path_components: List[Bytes_or_String] = path.split(os.sep) + path_components: list[str] = path.split(os.sep) for index, item in enumerate(path_components): path_components[index] = unidecode(item).replace(os.sep, sep_replace) if os.altsep: @@ -1051,7 +1060,7 @@ def asciify_path(path: str, sep_replace: str) -> str: return os.sep.join(path_components) -def par_map(transform: Callable, items: Iterable): +def par_map(transform: Callable[[T], Any], items: Sequence[T]) -> None: """Apply the function `transform` to all the elements in the iterable `items`, like `map(transform, items)` but with no return value. @@ -1065,7 +1074,7 @@ def par_map(transform: Callable, items: Iterable): pool.join() -class cached_classproperty: # noqa: N801 +class cached_classproperty: """A decorator implementing a read-only property that is *lazy* in the sense that the getter is only invoked once. Subsequent accesses through *any* instance use the cached result. @@ -1123,3 +1132,8 @@ def get_temp_filename( _, filename = tempfile.mkstemp(dir=tempdir, prefix=prefix, suffix=suffix) return bytestring_path(filename) + + +def unique_list(elements: Iterable[T]) -> list[T]: + """Return a list with unique elements in the original order.""" + return list(dict.fromkeys(elements)) diff --git a/beets/util/bluelet.py b/beets/util/bluelet.py index db34486b5..b81b389e0 100644 --- a/beets/util/bluelet.py +++ b/beets/util/bluelet.py @@ -203,7 +203,7 @@ def _event_select(events): return ready_events -class ThreadException(Exception): +class ThreadError(Exception): def __init__(self, coro, exc_info): self.coro = coro self.exc_info = exc_info @@ -266,7 +266,7 @@ def run(root_coro): """After an event is fired, run a given coroutine associated with it in the threads dict until it yields again. If the coroutine exits, then the thread is removed from the pool. If the coroutine - raises an exception, it is reraised in a ThreadException. If + raises an exception, it is reraised in a ThreadError. If is_exc is True, then the value must be an exc_info tuple and the exception is thrown into the coroutine. """ @@ -281,7 +281,7 @@ def run(root_coro): except BaseException: # Thread raised some other exception. del threads[coro] - raise ThreadException(coro, sys.exc_info()) + raise ThreadError(coro, sys.exc_info()) else: if isinstance(next_event, types.GeneratorType): # Automatically invoke sub-coroutines. (Shorthand for @@ -369,7 +369,7 @@ def run(root_coro): else: advance_thread(event2coro[event], value) - except ThreadException as te: + except ThreadError as te: # Exception raised from inside a thread. event = ExceptionEvent(te.exc_info) if te.coro in delegators: diff --git a/beets/util/confit.py b/beets/util/confit.py deleted file mode 100644 index db72d8b8c..000000000 --- a/beets/util/confit.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is part of beets. -# Copyright 2016-2019, Adrian Sampson. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. - - -import warnings - -import confuse - -warnings.warn( - "beets.util.confit is deprecated; use confuse instead", - # Show the location of the `import confit` statement as the warning's - # source, rather than this file, such that the offending module can be - # identified easily. - stacklevel=2, -) - -# Import everything from the confuse module into this module. -for key, value in confuse.__dict__.items(): - if key not in ["__name__"]: - globals()[key] = value - - -# Cleanup namespace. -del key, value, warnings, confuse diff --git a/beets/util/enumeration.py b/beets/util/enumeration.py deleted file mode 100644 index 33a6be58f..000000000 --- a/beets/util/enumeration.py +++ /dev/null @@ -1,42 +0,0 @@ -# This file is part of beets. -# Copyright 2016, Adrian Sampson. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. - - -from enum import Enum - - -class OrderedEnum(Enum): - """ - An Enum subclass that allows comparison of members. - """ - - def __ge__(self, other): - if self.__class__ is other.__class__: - return self.value >= other.value - return NotImplemented - - def __gt__(self, other): - if self.__class__ is other.__class__: - return self.value > other.value - return NotImplemented - - def __le__(self, other): - if self.__class__ is other.__class__: - return self.value <= other.value - return NotImplemented - - def __lt__(self, other): - if self.__class__ is other.__class__: - return self.value < other.value - return NotImplemented diff --git a/beets/util/functemplate.py b/beets/util/functemplate.py index 7d7e8f01f..b0daefac2 100644 --- a/beets/util/functemplate.py +++ b/beets/util/functemplate.py @@ -26,7 +26,6 @@ This is sort of like a tiny, horrible degeneration of a real templating engine like Jinja2 or Mustache. """ - import ast import dis import functools diff --git a/beets/util/pipeline.py b/beets/util/pipeline.py index c4933ff00..d23b1bd10 100644 --- a/beets/util/pipeline.py +++ b/beets/util/pipeline.py @@ -31,7 +31,6 @@ To do so, pass an iterable of coroutines to the Pipeline constructor in place of any single coroutine. """ - import queue import sys from threading import Lock, Thread diff --git a/beets/vfs.py b/beets/vfs.py index 4a9681a92..cdbf197a6 100644 --- a/beets/vfs.py +++ b/beets/vfs.py @@ -16,11 +16,14 @@ libraries. """ -from collections import namedtuple +from typing import Any, NamedTuple from beets import util -Node = namedtuple("Node", ["files", "dirs"]) + +class Node(NamedTuple): + files: dict[str, Any] + dirs: dict[str, Any] def _insert(node, path, itemid): diff --git a/beetsplug/__init__.py b/beetsplug/__init__.py index 763ff3a05..ad573cdb3 100644 --- a/beetsplug/__init__.py +++ b/beetsplug/__init__.py @@ -14,7 +14,6 @@ """A namespace package for beets plugins.""" - # Make this a namespace package. from pkgutil import extend_path diff --git a/beetsplug/absubmit.py b/beetsplug/absubmit.py index fc40b85e7..bbbc14edf 100644 --- a/beetsplug/absubmit.py +++ b/beetsplug/absubmit.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Calculate acoustic information and submit to AcousticBrainz. -""" - +"""Calculate acoustic information and submit to AcousticBrainz.""" import errno import hashlib @@ -187,9 +185,9 @@ only files which would be processed", with open(filename) as tmp_file: analysis = json.load(tmp_file) # Add the hash to the output. - analysis["metadata"]["version"][ - "essentia_build_sha" - ] = self.extractor_sha + analysis["metadata"]["version"]["essentia_build_sha"] = ( + self.extractor_sha + ) return analysis finally: try: diff --git a/beetsplug/acousticbrainz.py b/beetsplug/acousticbrainz.py index a4b153fc9..899288260 100644 --- a/beetsplug/acousticbrainz.py +++ b/beetsplug/acousticbrainz.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Fetch various AcousticBrainz metadata using MBID. -""" +"""Fetch various AcousticBrainz metadata using MBID.""" from collections import defaultdict diff --git a/beetsplug/albumtypes.py b/beetsplug/albumtypes.py index 5200b5c6d..b1e143a88 100644 --- a/beetsplug/albumtypes.py +++ b/beetsplug/albumtypes.py @@ -14,7 +14,6 @@ """Adds an album template field for formatted album types.""" - from beets.autotag.mb import VARIOUS_ARTISTS_ID from beets.library import Album from beets.plugins import BeetsPlugin diff --git a/beetsplug/aura.py b/beetsplug/aura.py index 09d859200..a9b270657 100644 --- a/beetsplug/aura.py +++ b/beetsplug/aura.py @@ -14,13 +14,13 @@ """An AURA server using Flask.""" - import os import re import sys +from collections.abc import Mapping from dataclasses import dataclass from mimetypes import guess_type -from typing import ClassVar, Mapping, Type +from typing import ClassVar from flask import ( Blueprint, @@ -128,7 +128,7 @@ ARTIST_ATTR_MAP = { class AURADocument: """Base class for building AURA documents.""" - model_cls: ClassVar[Type[LibModel]] + model_cls: ClassVar[type[LibModel]] lib: Library args: Mapping[str, str] @@ -154,7 +154,7 @@ class AURADocument: return make_response(document, status) @classmethod - def get_attribute_converter(cls, beets_attr: str) -> Type[SQLiteType]: + def get_attribute_converter(cls, beets_attr: str) -> type[SQLiteType]: """Work out what data type an attribute should be for beets. Args: @@ -375,7 +375,7 @@ class TrackDocument(AURADocument): return self.lib.items(query, sort) @classmethod - def get_attribute_converter(cls, beets_attr: str) -> Type[SQLiteType]: + def get_attribute_converter(cls, beets_attr: str) -> type[SQLiteType]: """Work out what data type an attribute should be for beets. Args: diff --git a/beetsplug/autobpm.py b/beetsplug/autobpm.py index aace0c593..9c953f711 100644 --- a/beetsplug/autobpm.py +++ b/beetsplug/autobpm.py @@ -11,81 +11,74 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Uses Librosa to calculate the `bpm` field. -""" +"""Uses Librosa to calculate the `bpm` field.""" +from __future__ import annotations -from librosa import beat, load -from soundfile import LibsndfileError +from collections.abc import Iterable +from typing import TYPE_CHECKING + +import librosa -from beets import ui, util from beets.plugins import BeetsPlugin +from beets.ui import Subcommand, should_write + +if TYPE_CHECKING: + from beets.importer import ImportTask + from beets.library import Item, Library class AutoBPMPlugin(BeetsPlugin): - def __init__(self): + def __init__(self) -> None: super().__init__() self.config.add( { "auto": True, "overwrite": False, + "beat_track_kwargs": {}, } ) - if self.config["auto"].get(bool): + if self.config["auto"]: self.import_stages = [self.imported] - def commands(self): - cmd = ui.Subcommand( + def commands(self) -> list[Subcommand]: + cmd = Subcommand( "autobpm", help="detect and add bpm from audio using Librosa" ) cmd.func = self.command return [cmd] - def command(self, lib, opts, args): - self.calculate_bpm(lib.items(ui.decargs(args)), write=ui.should_write()) + def command(self, lib: Library, _, args: list[str]) -> None: + self.calculate_bpm(list(lib.items(args)), write=should_write()) - def imported(self, session, task): + def imported(self, _, task: ImportTask) -> None: self.calculate_bpm(task.imported_items()) - def calculate_bpm(self, items, write=False): - overwrite = self.config["overwrite"].get(bool) - + def calculate_bpm(self, items: list[Item], write: bool = False) -> None: for item in items: - if item["bpm"]: - self._log.info( - "found bpm {0} for {1}", - item["bpm"], - util.displayable_path(item.path), - ) - if not overwrite: + path = item.filepath + if bpm := item.bpm: + self._log.info("BPM for {} already exists: {}", path, bpm) + if not self.config["overwrite"]: continue try: - y, sr = load(util.syspath(item.path), res_type="kaiser_fast") - except LibsndfileError as exc: - self._log.error( - "LibsndfileError: failed to load {0} {1}", - util.displayable_path(item.path), - exc, - ) - continue - except ValueError as exc: - self._log.error( - "ValueError: failed to load {0} {1}", - util.displayable_path(item.path), - exc, - ) + y, sr = librosa.load(item.filepath, res_type="kaiser_fast") + except Exception as exc: + self._log.error("Failed to load {}: {}", path, exc) continue - tempo, _ = beat.beat_track(y=y, sr=sr) - bpm = round(tempo) + kwargs = self.config["beat_track_kwargs"].flatten() + try: + tempo, _ = librosa.beat.beat_track(y=y, sr=sr, **kwargs) + except Exception as exc: + self._log.error("Failed to measure BPM for {}: {}", path, exc) + continue + + bpm = round(tempo[0] if isinstance(tempo, Iterable) else tempo) item["bpm"] = bpm - self._log.info( - "added computed bpm {0} for {1}", - bpm, - util.displayable_path(item.path), - ) + self._log.info("Computed BPM for {}: {}", path, bpm) if write: item.try_write() diff --git a/beetsplug/badfiles.py b/beetsplug/badfiles.py index 056b65346..f93f03d5e 100644 --- a/beetsplug/badfiles.py +++ b/beetsplug/badfiles.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Use command-line tools to check for audio file corruption. -""" - +"""Use command-line tools to check for audio file corruption.""" import errno import os @@ -30,7 +28,7 @@ from beets.ui import Subcommand from beets.util import displayable_path, par_map -class CheckerCommandException(Exception): +class CheckerCommandError(Exception): """Raised when running a checker failed. Attributes: @@ -70,7 +68,7 @@ class BadFiles(BeetsPlugin): errors = 1 status = e.returncode except OSError as e: - raise CheckerCommandException(cmd, e) + raise CheckerCommandError(cmd, e) output = output.decode(sys.getdefaultencoding(), "replace") return status, errors, [line for line in output.split("\n") if line] @@ -128,7 +126,7 @@ class BadFiles(BeetsPlugin): path = item.path.decode(sys.getfilesystemencoding()) try: status, errors, output = checker(path) - except CheckerCommandException as e: + except CheckerCommandError as e: if e.errno == errno.ENOENT: self._log.error( "command not found: {} when validating file: {}", @@ -200,7 +198,7 @@ class BadFiles(BeetsPlugin): elif sel == "c": return None elif sel == "b": - raise importer.ImportAbort() + raise importer.ImportAbortError() else: raise Exception(f"Unexpected selection: {sel}") diff --git a/beetsplug/bareasc.py b/beetsplug/bareasc.py index 8cdcbb113..0a867dfe1 100644 --- a/beetsplug/bareasc.py +++ b/beetsplug/bareasc.py @@ -18,7 +18,6 @@ """Provides a bare-ASCII matching query.""" - from unidecode import unidecode from beets import ui diff --git a/beetsplug/beatport.py b/beetsplug/beatport.py index 6108b0399..fab720c2b 100644 --- a/beetsplug/beatport.py +++ b/beetsplug/beatport.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Adds Beatport release and track search support to the autotagger -""" +"""Adds Beatport release and track search support to the autotagger""" import json import re diff --git a/beetsplug/bench.py b/beetsplug/bench.py index 673b9b7c6..62d512ce7 100644 --- a/beetsplug/bench.py +++ b/beetsplug/bench.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Some simple performance benchmarks for beets. -""" - +"""Some simple performance benchmarks for beets.""" import cProfile import timeit diff --git a/beetsplug/bpd/__init__.py b/beetsplug/bpd/__init__.py index a4cb4d291..da6c2eb46 100644 --- a/beetsplug/bpd/__init__.py +++ b/beetsplug/bpd/__init__.py @@ -17,7 +17,6 @@ Beets library. Attempts to implement a compatible protocol to allow use of the wide range of MPD clients. """ - import inspect import math import random @@ -27,7 +26,6 @@ import sys import time import traceback from string import Template -from typing import List from mediafile import MediaFile @@ -168,13 +166,13 @@ def cast_arg(t, val): raise ArgumentTypeError() -class BPDClose(Exception): +class BPDCloseError(Exception): """Raised by a command invocation to indicate that the connection should be closed. """ -class BPDIdle(Exception): +class BPDIdleError(Exception): """Raised by a command to indicate the client wants to enter the idle state and should be notified when a relevant event happens. """ @@ -349,7 +347,7 @@ class BaseServer: for system in subsystems: if system not in SUBSYSTEMS: raise BPDError(ERROR_ARG, f"Unrecognised idle event: {system}") - raise BPDIdle(subsystems) # put the connection into idle mode + raise BPDIdleError(subsystems) # put the connection into idle mode def cmd_kill(self, conn): """Exits the server process.""" @@ -357,7 +355,7 @@ class BaseServer: def cmd_close(self, conn): """Closes the connection.""" - raise BPDClose() + raise BPDCloseError() def cmd_password(self, conn, password): """Attempts password authentication.""" @@ -739,13 +737,13 @@ class BaseServer: # Additions to the MPD protocol. - def cmd_crash_TypeError(self, conn): # noqa: N802 + def cmd_crash(self, conn): """Deliberately trigger a TypeError for testing purposes. We want to test that the server properly responds with ERROR_SYSTEM without crashing, and that this is not treated as ERROR_ARG (since it is caused by a programming error, not a protocol error). """ - "a" + 2 + raise TypeError class Connection: @@ -773,8 +771,8 @@ class Connection: if isinstance(lines, str): lines = [lines] out = NEWLINE.join(lines) + NEWLINE - for l in out.split(NEWLINE)[:-1]: - self.debug(l, kind=">") + for line in out.split(NEWLINE)[:-1]: + self.debug(line, kind=">") if isinstance(out, str): out = out.encode("utf-8") return self.sock.sendall(out) @@ -853,8 +851,8 @@ class MPDConnection(Connection): self.disconnect() # Client sent a blank line. break line = line.decode("utf8") # MPD protocol uses UTF-8. - for l in line.split(NEWLINE): - self.debug(l, kind="<") + for line in line.split(NEWLINE): + self.debug(line, kind="<") if self.idle_subscriptions: # The connection is in idle mode. @@ -888,12 +886,12 @@ class MPDConnection(Connection): # Ordinary command. try: yield bluelet.call(self.do_command(Command(line))) - except BPDClose: + except BPDCloseError: # Command indicates that the conn should close. self.sock.close() self.disconnect() # Client explicitly closed. return - except BPDIdle as e: + except BPDIdleError as e: self.idle_subscriptions = e.subsystems self.debug( "awaiting: {}".format(" ".join(e.subsystems)), kind="z" @@ -922,8 +920,8 @@ class ControlConnection(Connection): if not line: break # Client sent a blank line. line = line.decode("utf8") # Protocol uses UTF-8. - for l in line.split(NEWLINE): - self.debug(l, kind="<") + for line in line.split(NEWLINE): + self.debug(line, kind="<") command = Command(line) try: func = command.delegate("ctrl_", self) @@ -1046,12 +1044,12 @@ class Command: e.cmd_name = self.name raise e - except BPDClose: + except BPDCloseError: # An indication that the connection should close. Send # it on the Connection. raise - except BPDIdle: + except BPDIdleError: raise except Exception: @@ -1060,7 +1058,7 @@ class Command: raise BPDError(ERROR_SYSTEM, "server error", self.name) -class CommandList(List[Command]): +class CommandList(list[Command]): """A list of commands issued by the client for processing by the server. May be verbose, in which case the response is delimited, or not. Should be a list of `Command` objects. diff --git a/beetsplug/bpd/gstplayer.py b/beetsplug/bpd/gstplayer.py index 77ddc1983..03fb179aa 100644 --- a/beetsplug/bpd/gstplayer.py +++ b/beetsplug/bpd/gstplayer.py @@ -16,7 +16,6 @@ music player. """ - import _thread import copy import os diff --git a/beetsplug/bpm.py b/beetsplug/bpm.py index 3edcbef82..10edfbfd7 100644 --- a/beetsplug/bpm.py +++ b/beetsplug/bpm.py @@ -14,7 +14,6 @@ """Determine BPM by pressing a key to the rhythm.""" - import time from beets import ui diff --git a/beetsplug/bpsync.py b/beetsplug/bpsync.py index 4f3e0e907..05be94c99 100644 --- a/beetsplug/bpsync.py +++ b/beetsplug/bpsync.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Update library's tags using Beatport. -""" +"""Update library's tags using Beatport.""" from beets import autotag, library, ui, util from beets.plugins import BeetsPlugin, apply_item_changes diff --git a/beetsplug/bucket.py b/beetsplug/bucket.py index 59ee080bb..9246539fc 100644 --- a/beetsplug/bucket.py +++ b/beetsplug/bucket.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Provides the %bucket{} function for path formatting. -""" - +"""Provides the %bucket{} function for path formatting.""" import re import string diff --git a/beetsplug/convert.py b/beetsplug/convert.py index f150b7c36..536acf16e 100644 --- a/beetsplug/convert.py +++ b/beetsplug/convert.py @@ -12,8 +12,8 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Converts tracks or albums to external directory -""" +"""Converts tracks or albums to external directory""" + import logging import os import shlex @@ -85,18 +85,23 @@ def get_format(fmt=None): return (command.encode("utf-8"), extension.encode("utf-8")) +def in_no_convert(item: Item) -> bool: + no_convert_query = config["convert"]["no_convert"].as_str() + + if no_convert_query: + query, _ = parse_query_string(no_convert_query, Item) + return query.match(item) + else: + return False + + def should_transcode(item, fmt): """Determine whether the item should be transcoded as part of conversion (i.e., its bitrate is high or it has the wrong format). """ - no_convert_queries = config["convert"]["no_convert"].as_str_seq() - if no_convert_queries: - for query_string in no_convert_queries: - query, _ = parse_query_string(query_string, Item) - if query.match(item): - return False - if config["convert"]["never_convert_lossy_files"] and not ( - item.format.lower() in LOSSLESS_FORMATS + if in_no_convert(item) or ( + config["convert"]["never_convert_lossy_files"] + and item.format.lower() not in LOSSLESS_FORMATS ): return False maxbr = config["convert"]["max_bitrate"].get(Optional(int)) diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py index a861ea0e4..6f05474b9 100644 --- a/beetsplug/deezer.py +++ b/beetsplug/deezer.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Adds Deezer release and track search support to the autotagger -""" +"""Adds Deezer release and track search support to the autotagger""" import collections import time @@ -112,8 +111,8 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): day = None else: raise ui.UserError( - "Invalid `release_date` returned " - "by {} API: '{}'".format(self.data_source, release_date) + f"Invalid `release_date` returned by {self.data_source} API: " + f"{release_date!r}" ) tracks_obj = self.fetch_data(self.album_url + deezer_id + "/tracks") if tracks_obj is None: @@ -279,12 +278,20 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin): if not query: return None self._log.debug(f"Searching {self.data_source} for '{query}'") - response = requests.get( - self.search_url + query_type, - params={"q": query}, - timeout=10, - ) - response.raise_for_status() + try: + response = requests.get( + self.search_url + query_type, + params={"q": query}, + timeout=10, + ) + response.raise_for_status() + except requests.exceptions.RequestException as e: + self._log.error( + "Error fetching data from {} API\n Error: {}", + self.data_source, + e, + ) + return None response_data = response.json().get("data", []) self._log.debug( "Found {} result(s) from {} for '{}'", diff --git a/beetsplug/discogs.py b/beetsplug/discogs.py index 344d67a24..0dc8e8a17 100644 --- a/beetsplug/discogs.py +++ b/beetsplug/discogs.py @@ -16,6 +16,8 @@ python3-discogs-client library. """ +from __future__ import annotations + import http.client import json import os @@ -30,6 +32,7 @@ from discogs_client import Client, Master, Release from discogs_client import __version__ as dc_string from discogs_client.exceptions import DiscogsAPIError from requests.exceptions import ConnectionError +from typing_extensions import TypedDict import beets import beets.ui @@ -52,6 +55,12 @@ CONNECTION_ERRORS = ( ) +class ReleaseFormat(TypedDict): + name: str + qty: int + descriptions: list[str] | None + + class DiscogsPlugin(BeetsPlugin): def __init__(self): super().__init__() @@ -363,6 +372,18 @@ class DiscogsPlugin(BeetsPlugin): ) return None + @staticmethod + def get_media_and_albumtype( + formats: list[ReleaseFormat] | None, + ) -> tuple[str | None, str | None]: + media = albumtype = None + if formats and (first_format := formats[0]): + if descriptions := first_format["descriptions"]: + albumtype = ", ".join(descriptions) + media = first_format["name"] + + return media, albumtype + def get_album_info(self, result): """Returns an AlbumInfo object for a discogs Release object.""" # Explicitly reload the `Release` fields, as they might not be yet @@ -413,13 +434,11 @@ class DiscogsPlugin(BeetsPlugin): # Extract information for the optional AlbumInfo fields that are # contained on nested discogs fields. - albumtype = media = label = catalogno = labelid = None - if result.data.get("formats"): - albumtype = ( - ", ".join(result.data["formats"][0].get("descriptions", [])) - or None - ) - media = result.data["formats"][0]["name"] + media, albumtype = self.get_media_and_albumtype( + result.data.get("formats") + ) + + label = catalogno = labelid = None if result.data.get("labels"): label = result.data["labels"][0].get("name") catalogno = result.data["labels"][0].get("catno") diff --git a/beetsplug/duplicates.py b/beetsplug/duplicates.py index ced96e403..1e30a60a5 100644 --- a/beetsplug/duplicates.py +++ b/beetsplug/duplicates.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""List duplicate tracks or albums. -""" +"""List duplicate tracks or albums.""" import os import shlex @@ -304,7 +303,9 @@ class DuplicatesPlugin(BeetsPlugin): kind = "items" if all(isinstance(o, Item) for o in objs) else "albums" if tiebreak and kind in tiebreak.keys(): - key = lambda x: tuple(getattr(x, k) for k in tiebreak[kind]) + + def key(x): + return tuple(getattr(x, k) for k in tiebreak[kind]) else: if kind == "items": @@ -317,9 +318,13 @@ class DuplicatesPlugin(BeetsPlugin): ) fields = Item.all_keys() - key = lambda x: sum(1 for f in fields if truthy(getattr(x, f))) + + def key(x): + return sum(1 for f in fields if truthy(getattr(x, f))) else: - key = lambda x: len(x.items()) + + def key(x): + return len(x.items()) return sorted(objs, key=key, reverse=True) diff --git a/beetsplug/edit.py b/beetsplug/edit.py index 323dd9e41..51b36bdab 100644 --- a/beetsplug/edit.py +++ b/beetsplug/edit.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Open metadata information in a text editor to let the user edit it. -""" +"""Open metadata information in a text editor to let the user edit it.""" import codecs import os diff --git a/beetsplug/embyupdate.py b/beetsplug/embyupdate.py index 22c889473..2cda6af5e 100644 --- a/beetsplug/embyupdate.py +++ b/beetsplug/embyupdate.py @@ -1,11 +1,11 @@ """Updates the Emby Library whenever the beets library is changed. - emby: - host: localhost - port: 8096 - username: user - apikey: apikey - password: password +emby: + host: localhost + port: 8096 + username: user + apikey: apikey + password: password """ import hashlib diff --git a/beetsplug/export.py b/beetsplug/export.py index ef3ba94aa..9b8ad3580 100644 --- a/beetsplug/export.py +++ b/beetsplug/export.py @@ -11,9 +11,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Exports data from beets -""" - +"""Exports data from beets""" import codecs import csv diff --git a/beetsplug/fetchart.py b/beetsplug/fetchart.py index 72aa3aa29..0da884278 100644 --- a/beetsplug/fetchart.py +++ b/beetsplug/fetchart.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Fetches album art. -""" +"""Fetches album art.""" import os import re @@ -1252,10 +1251,6 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin): self.cautious = self.config["cautious"].get(bool) self.store_source = self.config["store_source"].get(bool) - self.src_removed = config["import"]["delete"].get(bool) or config[ - "import" - ]["move"].get(bool) - self.cover_format = self.config["cover_format"].get( confuse.Optional(str) ) @@ -1297,6 +1292,10 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin): for s, c in sources ] + @staticmethod + def _is_source_file_removal_enabled(): + return config["import"]["delete"] or config["import"]["move"] + # Asynchronous; after music is added to the library. def fetch_art(self, session, task): """Find art for the album being imported.""" @@ -1339,10 +1338,11 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin): """Place the discovered art in the filesystem.""" if task in self.art_candidates: candidate = self.art_candidates.pop(task) + removal_enabled = FetchArtPlugin._is_source_file_removal_enabled() - self._set_art(task.album, candidate, not self.src_removed) + self._set_art(task.album, candidate, not removal_enabled) - if self.src_removed: + if removal_enabled: task.prune(candidate.path) # Manual album art fetching. diff --git a/beetsplug/filefilter.py b/beetsplug/filefilter.py index 5618c1bd1..b78a3750e 100644 --- a/beetsplug/filefilter.py +++ b/beetsplug/filefilter.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Filter imported files using a regular expression. -""" - +"""Filter imported files using a regular expression.""" import re diff --git a/beetsplug/fish.py b/beetsplug/fish.py index 71ac85743..4cf9b60a1 100644 --- a/beetsplug/fish.py +++ b/beetsplug/fish.py @@ -22,7 +22,6 @@ by default but can be added via the `-e` / `--extravalues` flag. For example: `beet fish -e genre -e albumartist` """ - import os from operator import attrgetter diff --git a/beetsplug/freedesktop.py b/beetsplug/freedesktop.py index a9a25279c..50bbf18e5 100644 --- a/beetsplug/freedesktop.py +++ b/beetsplug/freedesktop.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Creates freedesktop.org-compliant .directory files on an album level. -""" - +"""Creates freedesktop.org-compliant .directory files on an album level.""" from beets import ui from beets.plugins import BeetsPlugin diff --git a/beetsplug/ftintitle.py b/beetsplug/ftintitle.py index 2ba94c333..e4f51fd10 100644 --- a/beetsplug/ftintitle.py +++ b/beetsplug/ftintitle.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Moves "featured" artists to the title from the artist field. -""" +"""Moves "featured" artists to the title from the artist field.""" import re @@ -38,7 +37,13 @@ def split_on_feat(artist): def contains_feat(title): """Determine whether the title contains a "featured" marker.""" - return bool(re.search(plugins.feat_tokens(), title, flags=re.IGNORECASE)) + return bool( + re.search( + plugins.feat_tokens(for_artist=False), + title, + flags=re.IGNORECASE, + ) + ) def find_feat_part(artist, albumartist): @@ -78,6 +83,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin): "auto": True, "drop": False, "format": "feat. {0}", + "keep_in_artist": False, } ) @@ -101,10 +107,11 @@ class FtInTitlePlugin(plugins.BeetsPlugin): def func(lib, opts, args): self.config.set_args(opts) drop_feat = self.config["drop"].get(bool) + keep_in_artist_field = self.config["keep_in_artist"].get(bool) write = ui.should_write() for item in lib.items(ui.decargs(args)): - self.ft_in_title(item, drop_feat) + self.ft_in_title(item, drop_feat, keep_in_artist_field) item.store() if write: item.try_write() @@ -115,20 +122,27 @@ class FtInTitlePlugin(plugins.BeetsPlugin): def imported(self, session, task): """Import hook for moving featuring artist automatically.""" drop_feat = self.config["drop"].get(bool) + keep_in_artist_field = self.config["keep_in_artist"].get(bool) for item in task.imported_items(): - self.ft_in_title(item, drop_feat) + self.ft_in_title(item, drop_feat, keep_in_artist_field) item.store() - def update_metadata(self, item, feat_part, drop_feat): + def update_metadata(self, item, feat_part, drop_feat, keep_in_artist_field): """Choose how to add new artists to the title and set the new metadata. Also, print out messages about any changes that are made. If `drop_feat` is set, then do not add the artist to the title; just remove it from the artist field. """ - # In all cases, update the artist fields. - self._log.info("artist: {0} -> {1}", item.artist, item.albumartist) - item.artist = item.albumartist + # In case the artist is kept, do not update the artist fields. + if keep_in_artist_field: + self._log.info( + "artist: {0} (Not changing due to keep_in_artist)", item.artist + ) + else: + self._log.info("artist: {0} -> {1}", item.artist, item.albumartist) + item.artist = item.albumartist + if item.artist_sort: # Just strip the featured artist from the sort name. item.artist_sort, _ = split_on_feat(item.artist_sort) @@ -142,7 +156,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin): self._log.info("title: {0} -> {1}", item.title, new_title) item.title = new_title - def ft_in_title(self, item, drop_feat): + def ft_in_title(self, item, drop_feat, keep_in_artist_field): """Look for featured artists in the item's artist fields and move them to the title. """ @@ -163,6 +177,8 @@ class FtInTitlePlugin(plugins.BeetsPlugin): # If we have a featuring artist, move it to the title. if feat_part: - self.update_metadata(item, feat_part, drop_feat) + self.update_metadata( + item, feat_part, drop_feat, keep_in_artist_field + ) else: self._log.info("no featuring artists found") diff --git a/beetsplug/fuzzy.py b/beetsplug/fuzzy.py index 45ada8b0b..959544ed3 100644 --- a/beetsplug/fuzzy.py +++ b/beetsplug/fuzzy.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Provides a fuzzy matching query. -""" - +"""Provides a fuzzy matching query.""" import difflib diff --git a/beetsplug/importfeeds.py b/beetsplug/importfeeds.py index 316c1c72b..0a5a6afe4 100644 --- a/beetsplug/importfeeds.py +++ b/beetsplug/importfeeds.py @@ -17,6 +17,7 @@ music player. Also allow printing the new file locations to stdout in case one wants to manually add music to a player by its path. """ + import datetime import os import re diff --git a/beetsplug/info.py b/beetsplug/info.py index 1c3b6f542..d759d6066 100644 --- a/beetsplug/info.py +++ b/beetsplug/info.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Shows file metadata. -""" - +"""Shows file metadata.""" import os diff --git a/beetsplug/inline.py b/beetsplug/inline.py index 4ca676e5f..4092c46d0 100644 --- a/beetsplug/inline.py +++ b/beetsplug/inline.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Allows inline path template customization code in the config file. -""" +"""Allows inline path template customization code in the config file.""" import itertools import traceback diff --git a/beetsplug/ipfs.py b/beetsplug/ipfs.py index 06835ba94..29d65ab78 100644 --- a/beetsplug/ipfs.py +++ b/beetsplug/ipfs.py @@ -11,9 +11,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Adds support for ipfs. Requires go-ipfs and a running ipfs daemon -""" - +"""Adds support for ipfs. Requires go-ipfs and a running ipfs daemon""" import os import shutil diff --git a/beetsplug/keyfinder.py b/beetsplug/keyfinder.py index d6605486d..87f0cc427 100644 --- a/beetsplug/keyfinder.py +++ b/beetsplug/keyfinder.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Uses the `KeyFinder` program to add the `initial_key` field. -""" - +"""Uses the `KeyFinder` program to add the `initial_key` field.""" import os.path import subprocess diff --git a/beetsplug/lastgenre/__init__.py b/beetsplug/lastgenre/__init__.py index 50be9d1b9..0498c4c52 100644 --- a/beetsplug/lastgenre/__init__.py +++ b/beetsplug/lastgenre/__init__.py @@ -21,6 +21,7 @@ and has been edited to remove some questionable entries. The scraper script used is available here: https://gist.github.com/1241307 """ + import codecs import os import traceback @@ -408,9 +409,14 @@ class LastGenrePlugin(plugins.BeetsPlugin): for album in lib.albums(ui.decargs(args)): album.genre, src = self._get_genre(album) self._log.info( - "genre for album {0} ({1}): {0.genre}", album, src + 'genre for album "{0.album}" ({1}): {0.genre}', + album, + src, ) - album.store() + if "track" in self.sources: + album.store(inherit=False) + else: + album.store() for item in album.items(): # If we're using track-level sources, also look up each @@ -419,7 +425,7 @@ class LastGenrePlugin(plugins.BeetsPlugin): item.genre, src = self._get_genre(item) item.store() self._log.info( - "genre for track {0} ({1}): {0.genre}", + 'genre for track "{0.title}" ({1}): {0.genre}', item, src, ) @@ -431,10 +437,10 @@ class LastGenrePlugin(plugins.BeetsPlugin): # an album for item in lib.items(ui.decargs(args)): item.genre, src = self._get_genre(item) - self._log.debug( - "added last.fm item genre ({0}): {1}", src, item.genre - ) item.store() + self._log.info( + "genre for track {0.title} ({1}): {0.genre}", item, src + ) lastgenre_cmd.func = lastgenre_func return [lastgenre_cmd] @@ -445,23 +451,32 @@ class LastGenrePlugin(plugins.BeetsPlugin): album = task.album album.genre, src = self._get_genre(album) self._log.debug( - "added last.fm album genre ({0}): {1}", src, album.genre + 'genre for album "{0.album}" ({1}): {0.genre}', album, src ) - album.store() + # If we're using track-level sources, store the album genre only, + # then also look up individual track genres. if "track" in self.sources: + album.store(inherit=False) for item in album.items(): item.genre, src = self._get_genre(item) self._log.debug( - "added last.fm item genre ({0}): {1}", src, item.genre + 'genre for track "{0.title}" ({1}): {0.genre}', + item, + src, ) item.store() + # Store the album genre and inherit to tracks. + else: + album.store() else: item = task.item item.genre, src = self._get_genre(item) self._log.debug( - "added last.fm item genre ({0}): {1}", src, item.genre + 'genre for track "{0.title}" ({1}): {0.genre}', + item, + src, ) item.store() diff --git a/beetsplug/listenbrainz.py b/beetsplug/listenbrainz.py index 4855481f8..1e2912793 100644 --- a/beetsplug/listenbrainz.py +++ b/beetsplug/listenbrainz.py @@ -148,9 +148,6 @@ class ListenBrainzPlugin(BeetsPlugin): return self._make_request(url) def get_listenbrainz_playlists(self): - """Returns a list of playlists created by ListenBrainz.""" - import re - resp = self.get_playlists_createdfor(self.username) playlists = resp.get("playlists") listenbrainz_playlists = [] @@ -159,35 +156,32 @@ class ListenBrainzPlugin(BeetsPlugin): playlist_info = playlist.get("playlist") if playlist_info.get("creator") == "listenbrainz": title = playlist_info.get("title") - match = re.search( - r"(Missed Recordings of \d{4}|Discoveries of \d{4})", title + self._log.debug(f"Playlist title: {title}") + playlist_type = ( + "Exploration" if "Exploration" in title else "Jams" ) - if "Exploration" in title: - playlist_type = "Exploration" - elif "Jams" in title: - playlist_type = "Jams" - elif match: - playlist_type = match.group(1) - else: - playlist_type = None - if "week of " in title: + if "week of" in title: date_str = title.split("week of ")[1].split(" ")[0] date = datetime.datetime.strptime( date_str, "%Y-%m-%d" ).date() else: - date = None + continue identifier = playlist_info.get("identifier") id = identifier.split("/")[-1] - if playlist_type in ["Jams", "Exploration"]: - listenbrainz_playlists.append( - { - "type": playlist_type, - "date": date, - "identifier": id, - "title": title, - } - ) + listenbrainz_playlists.append( + {"type": playlist_type, "date": date, "identifier": id} + ) + listenbrainz_playlists = sorted( + listenbrainz_playlists, key=lambda x: x["type"] + ) + listenbrainz_playlists = sorted( + listenbrainz_playlists, key=lambda x: x["date"], reverse=True + ) + for playlist in listenbrainz_playlists: + self._log.debug( + f'Playlist: {playlist["type"]} - {playlist["date"]}' + ) return listenbrainz_playlists def get_playlist(self, identifier): @@ -199,17 +193,20 @@ class ListenBrainzPlugin(BeetsPlugin): """This function returns a list of tracks in the playlist.""" tracks = [] for track in playlist.get("playlist").get("track"): + identifier = track.get("identifier") + if isinstance(identifier, list): + identifier = identifier[0] + tracks.append( { - "artist": track.get("creator"), - "identifier": track.get("identifier").split("/")[-1], + "artist": track.get("creator", "Unknown artist"), + "identifier": identifier.split("/")[-1], "title": track.get("title"), } ) return self.get_track_info(tracks) def get_track_info(self, tracks): - """Returns a list of track info.""" track_info = [] for track in tracks: identifier = track.get("identifier") @@ -242,25 +239,37 @@ class ListenBrainzPlugin(BeetsPlugin): ) return track_info - def get_weekly_playlist(self, index): - """Returns a list of weekly playlists based on the index.""" + def get_weekly_playlist(self, playlist_type, most_recent=True): + # Fetch all playlists playlists = self.get_listenbrainz_playlists() - playlist = self.get_playlist(playlists[index].get("identifier")) - self._log.info(f"Getting {playlist.get('playlist').get('title')}") + # Filter playlists by type + filtered_playlists = [ + p for p in playlists if p["type"] == playlist_type + ] + # Sort playlists by date in descending order + sorted_playlists = sorted( + filtered_playlists, key=lambda x: x["date"], reverse=True + ) + # Select the most recent or older playlist based on the most_recent flag + selected_playlist = ( + sorted_playlists[0] if most_recent else sorted_playlists[1] + ) + self._log.debug( + f"Selected playlist: {selected_playlist['type']} " + f"- {selected_playlist['date']}" + ) + # Fetch and return tracks from the selected playlist + playlist = self.get_playlist(selected_playlist.get("identifier")) return self.get_tracks_from_playlist(playlist) def get_weekly_exploration(self): - """Returns a list of weekly exploration.""" - return self.get_weekly_playlist(0) + return self.get_weekly_playlist("Exploration", most_recent=True) def get_weekly_jams(self): - """Returns a list of weekly jams.""" - return self.get_weekly_playlist(1) + return self.get_weekly_playlist("Jams", most_recent=True) def get_last_weekly_exploration(self): - """Returns a list of weekly exploration.""" - return self.get_weekly_playlist(3) + return self.get_weekly_playlist("Exploration", most_recent=False) def get_last_weekly_jams(self): - """Returns a list of weekly jams.""" - return self.get_weekly_playlist(3) + return self.get_weekly_playlist("Jams", most_recent=False) diff --git a/beetsplug/loadext.py b/beetsplug/loadext.py index 5e8b59a8f..cc673dab2 100644 --- a/beetsplug/loadext.py +++ b/beetsplug/loadext.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Load SQLite extensions. -""" - +"""Load SQLite extensions.""" import sqlite3 diff --git a/beetsplug/lyrics.py b/beetsplug/lyrics.py index db29c9c6c..047550386 100644 --- a/beetsplug/lyrics.py +++ b/beetsplug/lyrics.py @@ -12,9 +12,9 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Fetches, embeds, and displays lyrics. -""" +"""Fetches, embeds, and displays lyrics.""" +from __future__ import annotations import difflib import errno @@ -24,8 +24,10 @@ import os.path import re import struct import unicodedata -import urllib import warnings +from functools import partial +from typing import ClassVar +from urllib.parse import quote, urlencode import requests from unidecode import unidecode @@ -48,26 +50,11 @@ except ImportError: import beets from beets import plugins, ui -from beets.autotag.hooks import string_dist DIV_RE = re.compile(r"<(/?)div>?", re.I) COMMENT_RE = re.compile(r"", re.S) TAG_RE = re.compile(r"<[^>]*>") BREAK_RE = re.compile(r"\n?\s*]*)*>\s*\n?", re.I) -URL_CHARACTERS = { - "\u2018": "'", - "\u2019": "'", - "\u201c": '"', - "\u201d": '"', - "\u2010": "-", - "\u2011": "-", - "\u2012": "-", - "\u2013": "-", - "\u2014": "-", - "\u2015": "-", - "\u2016": "-", - "\u2026": "...", -} USER_AGENT = f"beets/{beets.__version__}" # The content for the base index.rst generated in ReST mode. @@ -235,21 +222,6 @@ class Backend: self._log = log self.config = config - @staticmethod - def _encode(s): - """Encode the string for inclusion in a URL""" - if isinstance(s, str): - for char, repl in URL_CHARACTERS.items(): - s = s.replace(char, repl) - s = s.encode("utf-8", "ignore") - return urllib.parse.quote(s) - - def build_url(self, artist, title): - return self.URL_PATTERN % ( - self._encode(artist.title()), - self._encode(title.title()), - ) - def fetch_url(self, url): """Retrieve the content at a given URL, or return None if the source is unreachable. @@ -305,12 +277,29 @@ class LRCLib(Backend): return None if self.config["synced"]: - return data.get("syncedLyrics") + return data.get("syncedLyrics") or data.get("plainLyrics") return data.get("plainLyrics") -class MusiXmatch(Backend): +class DirectBackend(Backend): + """A backend for fetching lyrics directly.""" + + URL_TEMPLATE: ClassVar[str] #: May include formatting placeholders + + @classmethod + def encode(cls, text: str) -> str: + """Encode the string for inclusion in a URL.""" + raise NotImplementedError + + @classmethod + def build_url(cls, *args: str) -> str: + return cls.URL_TEMPLATE.format(*map(cls.encode, args)) + + +class MusiXmatch(DirectBackend): + URL_TEMPLATE = "https://www.musixmatch.com/lyrics/{}/{}" + REPLACEMENTS = { r"\s+": "-", "<": "Less_Than", @@ -320,14 +309,12 @@ class MusiXmatch(Backend): r"[\]\}]": ")", } - URL_PATTERN = "https://www.musixmatch.com/lyrics/%s/%s" - @classmethod - def _encode(cls, s): + def encode(cls, text: str) -> str: for old, new in cls.REPLACEMENTS.items(): - s = re.sub(old, new, s) + text = re.sub(old, new, text) - return super()._encode(s) + return quote(unidecode(text)) def fetch(self, artist, title, album=None, length=None): url = self.build_url(artist, title) @@ -496,90 +483,34 @@ class Genius(Backend): return lyrics_div.get_text() -class Tekstowo(Backend): - # Fetch lyrics from Tekstowo.pl. - REQUIRES_BS = True +class Tekstowo(DirectBackend): + """Fetch lyrics from Tekstowo.pl.""" - BASE_URL = "http://www.tekstowo.pl" - URL_PATTERN = BASE_URL + "/wyszukaj.html?search-title=%s&search-artist=%s" + REQUIRES_BS = True + URL_TEMPLATE = "https://www.tekstowo.pl/piosenka,{},{}.html" + + non_alpha_to_underscore = partial(re.compile(r"\W").sub, "_") + + @classmethod + def encode(cls, text: str) -> str: + return cls.non_alpha_to_underscore(unidecode(text.lower())) def fetch(self, artist, title, album=None, length=None): - url = self.build_url(title, artist) - search_results = self.fetch_url(url) - if not search_results: - return None + if html := self.fetch_url(self.build_url(artist, title)): + return self.extract_lyrics(html) - song_page_url = self.parse_search_results(search_results) - if not song_page_url: - return None + return None - song_page_html = self.fetch_url(song_page_url) - if not song_page_html: - return None - - return self.extract_lyrics(song_page_html, artist, title) - - def parse_search_results(self, html): + def extract_lyrics(self, html: str) -> str | None: html = _scrape_strip_cruft(html) html = _scrape_merge_paragraphs(html) soup = try_parse_html(html) - if not soup: - return None - content_div = soup.find("div", class_="content") - if not content_div: - return None + if lyrics_div := soup.select_one("div.song-text > div.inner-text"): + return lyrics_div.get_text() - card_div = content_div.find("div", class_="card") - if not card_div: - return None - - song_rows = card_div.find_all("div", class_="box-przeboje") - if not song_rows: - return None - - song_row = song_rows[0] - if not song_row: - return None - - link = song_row.find("a") - if not link: - return None - - return self.BASE_URL + link.get("href") - - def extract_lyrics(self, html, artist, title): - html = _scrape_strip_cruft(html) - html = _scrape_merge_paragraphs(html) - - soup = try_parse_html(html) - if not soup: - return None - - info_div = soup.find("div", class_="col-auto") - if not info_div: - return None - - info_elements = info_div.find_all("a") - if not info_elements: - return None - - html_title = info_elements[-1].get_text() - html_artist = info_elements[-2].get_text() - - title_dist = string_dist(html_title, title) - artist_dist = string_dist(html_artist, artist) - - thresh = self.config["dist_thresh"].get(float) - if title_dist > thresh or artist_dist > thresh: - return None - - lyrics_div = soup.select("div.song-text > div.inner-text") - if not lyrics_div: - return None - - return lyrics_div[0].get_text() + return None def remove_credits(text): @@ -741,7 +672,7 @@ class Google(Backend): url = "https://www.googleapis.com/customsearch/v1?key=%s&cx=%s&q=%s" % ( self.api_key, self.engine_id, - urllib.parse.quote(query.encode("utf-8")), + quote(query.encode("utf-8")), ) data = self.fetch_url(url) @@ -888,7 +819,7 @@ class LyricsPlugin(plugins.BeetsPlugin): oauth_token = json.loads( requests.post( oauth_url, - data=urllib.parse.urlencode(params), + data=urlencode(params), timeout=10, ).content ) @@ -1063,7 +994,7 @@ class LyricsPlugin(plugins.BeetsPlugin): if any(lyrics): break - lyrics = "\n\n---\n\n".join([l for l in lyrics if l]) + lyrics = "\n\n---\n\n".join(filter(None, lyrics)) if lyrics: self._log.info("fetched lyrics: {0}", item) diff --git a/beetsplug/mbsync.py b/beetsplug/mbsync.py index 0e63a6f22..283c40186 100644 --- a/beetsplug/mbsync.py +++ b/beetsplug/mbsync.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Update library's tags using MusicBrainz. -""" +"""Update library's tags using MusicBrainz.""" import re from collections import defaultdict diff --git a/beetsplug/metasync/__init__.py b/beetsplug/metasync/__init__.py index d17071b5b..2466efe54 100644 --- a/beetsplug/metasync/__init__.py +++ b/beetsplug/metasync/__init__.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Synchronize information from music player libraries -""" - +"""Synchronize information from music player libraries""" from abc import ABCMeta, abstractmethod from importlib import import_module @@ -126,8 +124,7 @@ class MetaSyncPlugin(BeetsPlugin): meta_source_instances[player] = cls(self.config, self._log) except (ImportError, ConfigValueError) as e: self._log.error( - "Failed to instantiate metadata source " - "'{}': {}".format(player, e) + f"Failed to instantiate metadata source {player!r}: {e}" ) # Avoid needlessly iterating over items diff --git a/beetsplug/metasync/amarok.py b/beetsplug/metasync/amarok.py index 195cd8787..f8dcbe3f3 100644 --- a/beetsplug/metasync/amarok.py +++ b/beetsplug/metasync/amarok.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Synchronize information from amarok's library via dbus -""" - +"""Synchronize information from amarok's library via dbus""" from datetime import datetime from os.path import basename diff --git a/beetsplug/metasync/itunes.py b/beetsplug/metasync/itunes.py index 15cbd7bb3..02f592fdc 100644 --- a/beetsplug/metasync/itunes.py +++ b/beetsplug/metasync/itunes.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Synchronize information from iTunes's library -""" - +"""Synchronize information from iTunes's library""" import os import plistlib diff --git a/beetsplug/missing.py b/beetsplug/missing.py index 2e37fde78..d5e4deda1 100644 --- a/beetsplug/missing.py +++ b/beetsplug/missing.py @@ -13,8 +13,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""List missing tracks. -""" +"""List missing tracks.""" from collections import defaultdict diff --git a/beetsplug/parentwork.py b/beetsplug/parentwork.py index 4ddef1c14..26f8f224f 100644 --- a/beetsplug/parentwork.py +++ b/beetsplug/parentwork.py @@ -16,7 +16,6 @@ and work composition date """ - import musicbrainzngs from beets import ui diff --git a/beetsplug/play.py b/beetsplug/play.py index 3476e5824..ddebd7d41 100644 --- a/beetsplug/play.py +++ b/beetsplug/play.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Send the results of a query to the configured music player as a playlist. -""" +"""Send the results of a query to the configured music player as a playlist.""" import shlex import subprocess @@ -197,7 +196,7 @@ class PlayPlugin(BeetsPlugin): filename = get_temp_filename(__name__, suffix=".m3u") with open(filename, "wb") as m3u: if utf8_bom: - m3u.write(b"\xEF\xBB\xBF") + m3u.write(b"\xef\xbb\xbf") for item in paths_list: m3u.write(item + b"\n") diff --git a/beetsplug/playlist.py b/beetsplug/playlist.py index 83f95796e..cf1d500e8 100644 --- a/beetsplug/playlist.py +++ b/beetsplug/playlist.py @@ -15,7 +15,7 @@ import fnmatch import os import tempfile -from typing import Sequence +from collections.abc import Sequence import beets from beets.dbcore.query import InQuery diff --git a/beetsplug/random.py b/beetsplug/random.py index dc94a0e3a..05f2cdf77 100644 --- a/beetsplug/random.py +++ b/beetsplug/random.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Get a random song or album from the library. -""" +"""Get a random song or album from the library.""" from beets.plugins import BeetsPlugin from beets.random import random_objs diff --git a/beetsplug/replaygain.py b/beetsplug/replaygain.py index a2753f960..5ee9aa486 100644 --- a/beetsplug/replaygain.py +++ b/beetsplug/replaygain.py @@ -13,10 +13,11 @@ # included in all copies or substantial portions of the Software. +from __future__ import annotations + import collections import enum import math -import optparse import os import queue import signal @@ -25,32 +26,24 @@ import sys import warnings from abc import ABC, abstractmethod from dataclasses import dataclass -from logging import Logger from multiprocessing.pool import ThreadPool from threading import Event, Thread -from typing import ( - Any, - Callable, - DefaultDict, - Dict, - List, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, -) - -from confuse import ConfigView +from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast from beets import ui -from beets.importer import ImportSession, ImportTask -from beets.library import Album, Item, Library from beets.plugins import BeetsPlugin from beets.util import command_output, displayable_path, syspath +if TYPE_CHECKING: + import optparse + from collections.abc import Sequence + from logging import Logger + + from confuse import ConfigView + + from beets.importer import ImportSession, ImportTask + from beets.library import Album, Item, Library + # Utilities. @@ -69,7 +62,7 @@ class FatalGstreamerPluginReplayGainError(FatalReplayGainError): loading the required plugins.""" -def call(args: List[Any], log: Logger, **kwargs: Any): +def call(args: list[Any], log: Logger, **kwargs: Any): """Execute the command and return its output or raise a ReplayGainError on failure. """ @@ -134,9 +127,9 @@ class RgTask: def __init__( self, items: Sequence[Item], - album: Optional[Album], + album: Album | None, target_level: float, - peak_method: Optional[PeakMethod], + peak_method: PeakMethod | None, backend_name: str, log: Logger, ): @@ -146,8 +139,8 @@ class RgTask: self.peak_method = peak_method self.backend_name = backend_name self._log = log - self.album_gain: Optional[Gain] = None - self.track_gains: Optional[List[Gain]] = None + self.album_gain: Gain | None = None + self.track_gains: list[Gain] | None = None def _store_track_gain(self, item: Item, track_gain: Gain): """Store track gain for a single item in the database.""" @@ -236,7 +229,7 @@ class R128Task(RgTask): def __init__( self, items: Sequence[Item], - album: Optional[Album], + album: Album | None, target_level: float, backend_name: str, log: Logger, @@ -334,9 +327,7 @@ class FfmpegBackend(Backend): task.target_level, task.peak_method, count_blocks=False, - )[ - 0 - ] # take only the gain, discarding number of gating blocks + )[0] # take only the gain, discarding number of gating blocks for item in task.items ] @@ -350,7 +341,7 @@ class FfmpegBackend(Backend): # analyse tracks # Gives a list of tuples (track_gain, track_n_blocks) - track_results: List[Tuple[Gain, int]] = [ + track_results: list[tuple[Gain, int]] = [ self._analyse_item( item, task.target_level, @@ -360,7 +351,7 @@ class FfmpegBackend(Backend): for item in task.items ] - track_gains: List[Gain] = [tg for tg, _nb in track_results] + track_gains: list[Gain] = [tg for tg, _nb in track_results] # Album peak is maximum track peak album_peak = max(tg.peak for tg in track_gains) @@ -411,8 +402,8 @@ class FfmpegBackend(Backend): return task def _construct_cmd( - self, item: Item, peak_method: Optional[PeakMethod] - ) -> List[Union[str, bytes]]: + self, item: Item, peak_method: PeakMethod | None + ) -> list[str | bytes]: """Construct the shell command to analyse items.""" return [ self._ffmpeg_path, @@ -435,9 +426,9 @@ class FfmpegBackend(Backend): self, item: Item, target_level: float, - peak_method: Optional[PeakMethod], + peak_method: PeakMethod | None, count_blocks: bool = True, - ) -> Tuple[Gain, int]: + ) -> tuple[Gain, int]: """Analyse item. Return a pair of a Gain object and the number of gating blocks above the threshold. @@ -649,7 +640,7 @@ class CommandBackend(Backend): items: Sequence[Item], target_level: float, is_album: bool, - ) -> List[Gain]: + ) -> list[Gain]: """Computes the track or album gain of a list of items, returns a list of TrackGain objects. @@ -669,7 +660,7 @@ class CommandBackend(Backend): # tag-writing; this turns the mp3gain/aacgain tool into a gain # calculator rather than a tag manipulator because we take care # of changing tags ourselves. - cmd: List[Union[bytes, str]] = [self.command, "-o", "-s", "s"] + cmd: list[bytes | str] = [self.command, "-o", "-s", "s"] if self.noclip: # Adjust to avoid clipping. cmd = cmd + ["-k"] @@ -687,7 +678,7 @@ class CommandBackend(Backend): output, len(items) + (1 if is_album else 0) ) - def parse_tool_output(self, text: bytes, num_lines: int) -> List[Gain]: + def parse_tool_output(self, text: bytes, num_lines: int) -> list[Gain]: """Given the tab-delimited output from an invocation of mp3gain or aacgain, parse the text and return a list of dictionaries containing information about each analyzed file. @@ -773,7 +764,7 @@ class GStreamerBackend(Backend): self._main_loop = self.GLib.MainLoop() - self._files: List[bytes] = [] + self._files: list[bytes] = [] def _import_gst(self): """Import the necessary GObject-related modules and assign `Gst` @@ -813,7 +804,7 @@ class GStreamerBackend(Backend): self._files = [i.path for i in items] # FIXME: Turn this into DefaultDict[bytes, Gain] - self._file_tags: DefaultDict[bytes, Dict[str, float]] = ( + self._file_tags: collections.defaultdict[bytes, dict[str, float]] = ( collections.defaultdict(dict) ) @@ -1194,20 +1185,20 @@ class ExceptionWatcher(Thread): # whether `_stopevent` is set pass - def join(self, timeout: Optional[float] = None): + def join(self, timeout: float | None = None): self._stopevent.set() Thread.join(self, timeout) # Main plugin logic. -BACKEND_CLASSES: List[Type[Backend]] = [ +BACKEND_CLASSES: list[type[Backend]] = [ CommandBackend, GStreamerBackend, AudioToolsBackend, FfmpegBackend, ] -BACKENDS: Dict[str, Type[Backend]] = {b.NAME: b for b in BACKEND_CLASSES} +BACKENDS: dict[str, type[Backend]] = {b.NAME: b for b in BACKEND_CLASSES} class ReplayGainPlugin(BeetsPlugin): @@ -1334,7 +1325,7 @@ class ReplayGainPlugin(BeetsPlugin): self, items: Sequence[Item], use_r128: bool, - album: Optional[Album] = None, + album: Album | None = None, ) -> RgTask: if use_r128: return R128Task( @@ -1377,7 +1368,7 @@ class ReplayGainPlugin(BeetsPlugin): self._log.info("analyzing {0}", album) - discs: Dict[int, List[Item]] = {} + discs: dict[int, list[Item]] = {} if self.config["per_disc"].get(bool): for item in album.items(): if discs.get(item.disc) is None: @@ -1449,8 +1440,8 @@ class ReplayGainPlugin(BeetsPlugin): def _apply( self, func: Callable[..., AnyRgTask], - args: List[Any], - kwds: Dict[str, Any], + args: list[Any], + kwds: dict[str, Any], callback: Callable[[AnyRgTask], Any], ): if self.pool is not None: @@ -1527,7 +1518,7 @@ class ReplayGainPlugin(BeetsPlugin): self, lib: Library, opts: optparse.Values, - args: List[str], + args: list[str], ): try: write = ui.should_write(opts.write) @@ -1564,7 +1555,7 @@ class ReplayGainPlugin(BeetsPlugin): # Silence interrupt exceptions pass - def commands(self) -> List[ui.Subcommand]: + def commands(self) -> list[ui.Subcommand]: """Return the "replaygain" ui subcommand.""" cmd = ui.Subcommand("replaygain", help="analyze for ReplayGain") cmd.parser.add_album_option() diff --git a/beetsplug/scrub.py b/beetsplug/scrub.py index d1e63ee31..630a4e6e6 100644 --- a/beetsplug/scrub.py +++ b/beetsplug/scrub.py @@ -16,7 +16,6 @@ automatically whenever tags are written. """ - import mediafile import mutagen diff --git a/beetsplug/smartplaylist.py b/beetsplug/smartplaylist.py index 6124c7b40..6bd47b15d 100644 --- a/beetsplug/smartplaylist.py +++ b/beetsplug/smartplaylist.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Generates smart playlists based on beets queries. -""" - +"""Generates smart playlists based on beets queries.""" import json import os diff --git a/beetsplug/substitute.py b/beetsplug/substitute.py index 94b790075..a89d0af16 100644 --- a/beetsplug/substitute.py +++ b/beetsplug/substitute.py @@ -34,8 +34,7 @@ class Substitute(BeetsPlugin): """Do the actual replacing.""" if text: for pattern, replacement in self.substitute_rules: - if pattern.match(text.lower()): - return replacement + text = pattern.sub(replacement, text) return text else: return "" @@ -47,10 +46,8 @@ class Substitute(BeetsPlugin): substitute rules. """ super().__init__() - self.substitute_rules = [] self.template_funcs["substitute"] = self.tmpl_substitute - - for key, view in self.config.items(): - value = view.as_str() - pattern = re.compile(key.lower()) - self.substitute_rules.append((pattern, value)) + self.substitute_rules = [ + (re.compile(key, flags=re.IGNORECASE), value) + for key, value in self.config.flatten().items() + ] diff --git a/beetsplug/the.py b/beetsplug/the.py index c6fb46ddf..42da708a3 100644 --- a/beetsplug/the.py +++ b/beetsplug/the.py @@ -14,9 +14,7 @@ """Moves patterns in path formats (suitable for moving articles).""" - import re -from typing import List from beets.plugins import BeetsPlugin @@ -29,7 +27,7 @@ FORMAT = "{0}, {1}" class ThePlugin(BeetsPlugin): - patterns: List[str] = [] + patterns: list[str] = [] def __init__(self): super().__init__() diff --git a/beetsplug/thumbnails.py b/beetsplug/thumbnails.py index 19c19f06c..3f88248e0 100644 --- a/beetsplug/thumbnails.py +++ b/beetsplug/thumbnails.py @@ -18,7 +18,6 @@ This plugin is POSIX-only. Spec: standards.freedesktop.org/thumbnail-spec/latest/index.html """ - import ctypes import ctypes.util import os @@ -280,8 +279,7 @@ class GioURI(URIGetter): if not uri_ptr: self.libgio.g_free(uri_ptr) raise RuntimeError( - "No URI received from the gfile pointer for " - "{}".format(displayable_path(path)) + f"No URI received from the gfile pointer for {displayable_path(path)}" ) try: diff --git a/beetsplug/web/__init__.py b/beetsplug/web/__init__.py index dcd0ba38c..55864f503 100644 --- a/beetsplug/web/__init__.py +++ b/beetsplug/web/__init__.py @@ -231,7 +231,7 @@ def _get_unique_table_field_values(model, field, sort_field): raise KeyError with g.lib.transaction() as tx: rows = tx.query( - 'SELECT DISTINCT "{}" FROM "{}" ORDER BY "{}"'.format( + "SELECT DISTINCT '{}' FROM '{}' ORDER BY '{}'".format( field, model._table, sort_field ) ) diff --git a/beetsplug/zero.py b/beetsplug/zero.py index 14c157ce8..bda4052ab 100644 --- a/beetsplug/zero.py +++ b/beetsplug/zero.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -""" Clears tag fields in media files.""" - +"""Clears tag fields in media files.""" import re diff --git a/docs/changelog.rst b/docs/changelog.rst index e063586a3..fe6c7ed88 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1,44 +1,152 @@ Changelog ========= +Changelog goes here! Please add your entry to the bottom of one of the lists below! + Unreleased ---------- -Changelog goes here! Please add your entry to the bottom of one of the lists below! +Beets now requires Python 3.9 or later since support for EOL Python 3.8 has +been dropped. New features: -* Ability to query albums with track db fields and vice-versa, for example - `beet list -a title:something` or `beet list artpath:cover`. Consequently - album queries involving `path` field have been sped up, like `beet list -a - path:/path/`. +Bug fixes: + +* :doc:`plugins/lyrics`: LRCLib will fallback to plain lyrics if synced lyrics + are not found and `synced` flag is set to `yes`. +* Synchronise files included in the source distribution with what we used to + have before the introduction of Poetry. + :bug:`5531` + :bug:`5526` +* :ref:`write-cmd`: Fix the issue where for certain files differences in + ``mb_artistid``, ``mb_albumartistid`` and ``albumtype`` fields are shown on + every attempt to write tags. Note: your music needs to be reimported with + ``beet import -LI`` or synchronised with ``beet mbsync`` in order to fix + this! + :bug:`5265` + :bug:`5371` + :bug:`4715` +* :ref:`import-cmd`: Fix ``MemoryError`` and improve performance tagging large + albums by replacing ``munkres`` library with ``lap.lapjv``. + :bug:`5207` +* :ref:`query-sort`: Fix a bug that would raise an exception when sorting on + a non-string field that is not populated in all items. + :bug:`5512` +* :doc:`plugins/lastgenre`: Fix track-level genre handling. Now when an album-level + genre is set already, single tracks don't fall back to the album's genre and + request their own last.fm genre. Also log messages regarding what's been + tagged are now more polished. + :bug:`5582` + +For packagers: + +* The minimum supported Python version is now 3.9. + +Other changes: + +* Release workflow: fix the issue where the new release tag is created for the + wrong (outdated) commit. Now the tag is created in the same workflow step + right after committing the version update. + :bug:`5539` + +2.2.0 (December 02, 2024) +------------------------- + +New features: + +* :doc:`/plugins/substitute`: Allow the replacement string to use capture groups + from the match. It is thus possible to create more general rules, applying to + many different artists at once. * Ability to use relative_to as "m3u" to set playlist files as relative to where each playlist is at, including subdirectories. Bug fixes: -* Improved naming of temporary files by separating the random part with the file extension. -* Fixed the ``auto`` value for the :ref:`reflink` config option. -* Fixed lyrics plugin only getting part of the lyrics from ``Genius.com`` :bug:`4815` +* Check if running python from the Microsoft Store and provide feedback to install + from python.org. + :bug:`5467` +* Fix bug where matcher doesn't consider medium number when importing. This makes + it difficult to import hybrid SACDs and other releases with duplicate tracks. + :bug:`5148` +* Bring back test files and the manual to the source distribution tarball. + :bug:`5513` + +Other changes: + +* Changed `bitesize` label to `good first issue`. Our `contribute`_ page is now + automatically populated with these issues. :bug:`4855` + +.. _contribute: https://github.com/beetbox/beets/contribute + +2.1.0 (November 22, 2024) +------------------------- + +New features: + +* New template function added: ``%capitalize``. Converts the first letter of + the text to uppercase and the rest to lowercase. +* Ability to query albums with track db fields and vice-versa, for example + ``beet list -a title:something`` or ``beet list artpath:cover``. Consequently + album queries involving ``path`` field have been sped up, like ``beet list -a + path:/path/``. +* :doc:`plugins/ftintitle`: New ``keep_in_artist`` option for the plugin, which + allows keeping the "feat." part in the artist metadata while still changing + the title. +* :doc:`plugins/autobpm`: Add new configuration option ``beat_track_kwargs`` + which enables adjusting keyword arguments supplied to librosa's + ``beat_track`` function call. +* Beets now uses ``platformdirs`` to determine the default music directory. + This location varies between systems -- for example, users can configure it + on Unix systems via ``user-dirs.dirs(5)``. + +Bug fixes: + +* :doc:`plugins/ftintitle`: The detection of a "feat. X" part in a song title does not produce any false + positives caused by words like "and" or "with" anymore. :bug:`5441` +* :doc:`plugins/ftintitle`: The detection of a "feat. X" part now also matches such parts if they are in + parentheses or brackets. :bug:`5436` +* Improve naming of temporary files by separating the random part with the file extension. +* Fix the ``auto`` value for the :ref:`reflink` config option. +* Fix lyrics plugin only getting part of the lyrics from ``Genius.com`` :bug:`4815` +* Album flexible fields are now correctly saved. For instance MusicBrainz external links + such as `bandcamp_album_id` will be available on albums in addition to tracks. + For albums already in your library, a re-import is required for the fields to be added. + Such a re-import can be done with, in this case, `beet import -L data_source:=MusicBrainz`. +* :doc:`plugins/autobpm`: Fix the ``TypeError`` where tempo was being returned + as a numpy array. Update ``librosa`` dependency constraint to prevent similar + issues in the future. + :bug:`5289` +* :doc:`plugins/discogs`: Fix the ``TypeError`` when there is no description. +* Use single quotes in all SQL queries + :bug:`4709` +* :doc:`plugins/lyrics`: Update ``tekstowo`` backend to fetch lyrics directly + since recent updates to their website made it unsearchable. + :bug:`5456` +* :doc:`plugins/convert`: Fixed the convert plugin ``no_convert`` option so + that it no longer treats "and" and "or" queries the same. To maintain + previous behaviour add commas between your query keywords. For help see + :ref:`combiningqueries`. +* Fix the ``TypeError`` when :ref:`set_fields` is provided non-string values. :bug:`4840` For packagers: * The minimum supported Python version is now 3.8. -* The `beet` script has been removed from the repository. -* The `typing_extensions` is required for Python 3.10 and below. +* The ``beet`` script has been removed from the repository. +* The ``typing_extensions`` is required for Python 3.10 and below. Other changes: -* :doc:`contributing`: The project now uses `poetry` for packaging and +* :doc:`contributing`: The project now uses ``poetry`` for packaging and dependency management. This change affects project management and mostly affects beets developers. Please see updates in :ref:`getting-the-source` and :ref:`testing` for more information. -* :doc:`contributing`: Since `poetry` now manages local virtual environments, - `tox` has been replaced by a task runner `poethepoet`. This change affects +* :doc:`contributing`: Since ``poetry`` now manages local virtual environments, + `tox` has been replaced by a task runner ``poethepoet``. This change affects beets developers and contributors. Please see updates in the :ref:`development-tools` section for more details. Type ``poe`` while in the project directory to see the available commands. * Installation instructions have been made consistent across plugins - documentation. Users should simply install `beets` with an `extra` of the + documentation. Users should simply install ``beets`` with an ``extra`` of the corresponding plugin name in order to install extra dependencies for that plugin. * GitHub workflows have been reorganised for clarity: style, linting, type and @@ -49,6 +157,16 @@ Other changes: documentation is changed, and they only check the changed files. When dependencies are updated (``poetry.lock``), then the entire code base is checked. +* The long-deprecated ``beets.util.confit`` module has been removed. This may + cause extremely outdated external plugins to fail to load. +* :doc:`plugins/autobpm`: Add plugin dependencies to ``pyproject.toml`` under + the ``autobpm`` extra and update the plugin installation instructions in the + docs. + Since importing the bpm calculation functionality from ``librosa`` takes + around 4 seconds, update the plugin to only do so when it actually needs to + calculate the bpm. Previously this import was being done immediately, so + every ``beet`` invocation was being delayed by a couple of seconds. + :bug:`5185` 2.0.0 (May 30, 2024) -------------------- @@ -210,9 +328,14 @@ New features: * Add support for `barcode` field. :bug:`3172` * :doc:`/plugins/smartplaylist`: Add new config option `smartplaylist.fields`. +* :doc:`/plugins/fetchart`: Defer source removal config option evaluation to + the point where they are used really, supporting temporary config changes. Bug fixes: +* Improve ListenBrainz error handling. + :bug:`5459` +* :doc:`/plugins/deezer`: Improve requests error handling. * :doc:`/plugins/lastimport`: Improve error handling in the `process_tracks` function and enable it to be used with other plugins. * :doc:`/plugins/spotify`: Improve handling of ConnectionError. * :doc:`/plugins/deezer`: Improve Deezer plugin error handling and set requests timeout to 10 seconds. @@ -349,7 +472,7 @@ Bug fixes: :bug:`4947` * Fix bug where unimported plugin would not ignore children directories of ignored directories. - :bug:`5130` + :bug:`5130` * Fix bug where some plugin commands hang indefinitely due to a missing `requests` timeout. * Fix cover art resizing logic to support multiple steps of resizing diff --git a/docs/conf.py b/docs/conf.py index 1404b2bb2..904aacdc0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,8 +11,8 @@ master_doc = "index" project = "beets" copyright = "2016, Adrian Sampson" -version = "2.0" -release = "2.0.0" +version = "2.2" +release = "2.2.0" pygments_style = "sphinx" diff --git a/docs/dev/plugins.rst b/docs/dev/plugins.rst index 92a7f870e..fa7fa645e 100644 --- a/docs/dev/plugins.rst +++ b/docs/dev/plugins.rst @@ -542,6 +542,9 @@ Specifying types has several advantages: * User input for flexible fields may be validated and converted. +* Items missing the given field can use an appropriate null value for + querying and sorting purposes. + .. _plugin-logging: diff --git a/docs/guides/main.rst b/docs/guides/main.rst index 9468a8c50..665ad12d8 100644 --- a/docs/guides/main.rst +++ b/docs/guides/main.rst @@ -109,7 +109,7 @@ get it right: should open the "System Properties" screen, then select the "Advanced" tab, then hit the "Environmental Variables..." button, and then look for the PATH variable in the table. Add the following to the end of the variable's value: - ``;C:\Python37;C:\Python37\Scripts``. You may need to adjust these paths to + ``;C:\Python38;C:\Python38\Scripts``. You may need to adjust these paths to point to your Python installation. 3. Now install beets by running: ``pip install beets`` @@ -132,6 +132,19 @@ trouble or you have more detail to contribute here, please direct it to .. _install pip: https://pip.pypa.io/en/stable/installing/ .. _get-pip.py: https://bootstrap.pypa.io/get-pip.py +Installing on ARM (Raspberry Pi and similar) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Beets on ARM devices is not recommended for Linux novices. If you are +comfortable with light troubleshooting in tools like ``pip``, ``make``, +and beets' command-line binary dependencies (e.g. ``ffmpeg`` and +``ImageMagick``), you will probably be okay on ARM devices like the +Raspberry Pi. We have `notes for ARM`_ and an `older ARM reference`_. +Beets is generally developed on x86-64 based devices, and most plugins +target that platform as well. + +.. _notes for ARM: https://github.com/beetbox/beets/discussions/4910 +.. _older ARM reference: https://discourse.beets.io/t/diary-of-beets-on-arm-odroid-hc4-armbian/1993 Configuring ----------- diff --git a/docs/plugins/autobpm.rst b/docs/plugins/autobpm.rst index caf128b8c..53908c517 100644 --- a/docs/plugins/autobpm.rst +++ b/docs/plugins/autobpm.rst @@ -6,8 +6,15 @@ of a track from its audio data and store it in the `bpm` field of your database. It does so automatically when importing music or through the ``beet autobpm [QUERY]`` command. -To use the ``autobpm`` plugin, enable it in your configuration (see -:ref:`using-plugins`). +Install +------- + +To use the ``autobpm`` plugin, first enable it in your configuration (see +:ref:`using-plugins`). Then, install ``beets`` with ``autobpm`` extra + +.. code-block:: bash + + pip install "beets[autobpm]" Configuration ------------- @@ -21,5 +28,14 @@ configuration file. The available options are: - **overwrite**: Calculate a BPM even for files that already have a `bpm` value. Default: ``no``. +- **beat_track_kwargs**: Any extra keyword arguments that you would like to + provide to librosa's `beat_track`_ function call, for example: + +.. code-block:: yaml + + autobpm: + beat_track_kwargs: + start_bpm: 160 .. _Librosa: https://github.com/librosa/librosa/ +.. _beat_track: https://librosa.org/doc/latest/generated/librosa.beat.beat_track.html diff --git a/docs/plugins/ftintitle.rst b/docs/plugins/ftintitle.rst index 66c9ecd69..63d023dc9 100644 --- a/docs/plugins/ftintitle.rst +++ b/docs/plugins/ftintitle.rst @@ -27,6 +27,10 @@ file. The available options are: - **format**: Defines the format for the featuring X part of the new title field. In this format the ``{0}`` is used to define where the featured artists are placed. Default: ``feat. {0}`` +- **keep_in_artist**: Keep the featuring X part in the artist field. This can + be useful if you still want to be able to search for features in the artist + field. + Default: ``no``. Running Manually ---------------- diff --git a/docs/plugins/substitute.rst b/docs/plugins/substitute.rst index b443f27ac..87ee2ad45 100644 --- a/docs/plugins/substitute.rst +++ b/docs/plugins/substitute.rst @@ -11,13 +11,34 @@ the ``rewrite`` plugin modifies the metadata, this plugin does not. Enable the ``substitute`` plugin (see :ref:`using-plugins`), then make a ``substitute:`` section in your config file to contain your rules. Each rule consists of a case-insensitive regular expression pattern, and a -replacement value. For example, you might use: +replacement string. For example, you might use: + +.. code-block:: yaml substitute: - .*jimi hendrix.*: Jimi Hendrix + .*jimi hendrix.*: Jimi Hendrix + +The replacement can be an expression utilising the matched regex, allowing us +to create more general rules. Say for example, we want to sort all albums by +multiple artists into the directory of the first artist. We can thus capture +everything before the first ``,``, `` &`` or `` and``, and use this capture +group in the output, discarding the rest of the string. + +.. code-block:: yaml + + substitute: + ^(.*?)(,| &| and).*: \1 + +This would handle all the below cases in a single rule: + + Bob Dylan and The Band -> Bob Dylan + Neil Young & Crazy Horse -> Neil Young + James Yorkston, Nina Persson & The Second Hand Orchestra -> James Yorkston To apply the substitution, you have to call the function ``%substitute{}`` in the paths section. For example: - + +.. code-block:: yaml + paths: - default: %substitute{$albumartist}/$year - $album%aunique{}/$track - $title \ No newline at end of file + default: \%substitute{$albumartist}/$year - $album\%aunique{}/$track - $title diff --git a/docs/plugins/zero.rst b/docs/plugins/zero.rst index 1ed915891..e3d717dfd 100644 --- a/docs/plugins/zero.rst +++ b/docs/plugins/zero.rst @@ -36,9 +36,10 @@ fields to nullify and the conditions for nullifying them: For example:: zero: - fields: month day genre comments + fields: month day genre genres comments comments: [EAC, LAME, from.+collection, 'ripped by'] genre: [rnb, 'power metal'] + genres: [rnb, 'power metal'] update_database: true If a custom pattern is not defined for a given field, the field will be nulled @@ -60,4 +61,4 @@ art from files' tags unless you tell it not to. To keep the album art, include the special field ``images`` in the list. For example:: zero: - keep_fields: title artist album year track genre images + keep_fields: title artist album year track genre genres images diff --git a/docs/reference/config.rst b/docs/reference/config.rst index e3a931885..234185e79 100644 --- a/docs/reference/config.rst +++ b/docs/reference/config.rst @@ -276,6 +276,21 @@ Either ``yes`` or ``no``, indicating whether matched albums should have their That is, if this option is turned on, then ``year`` will always equal ``original_year`` and so on. Default: ``no``. +.. _overwrite_null: + +overwrite_null +~~~~~~~~~~~~~~ + +This confusingly-named option indicates which fields have meaningful `null` values. If +an album or track field is in the corresponding list, then an existing value for this +field in an item in the database can be overwritten with `null`. By default, however, +`null` is interpreted as information about the field being unavailable, so it would not +overwrite existing values. For example:: + + overwrite_null: + album: ["albumid"] + track: ["title", "date"] + .. _artist_credit: artist_credit @@ -600,13 +615,13 @@ Defaults to ``no``. This kind of clone is only available on certain filesystems: for example, btrfs and APFS. For more details on filesystem support, see the `pyreflink`_ -documentation. Note that you need to install ``pyreflink``, either through +documentation. Note that you need to install ``pyreflink``, either through ``python -m pip install beets[reflink]`` or ``python -m pip install reflink``. The option is ignored if ``move`` is enabled (i.e., beets can move or copy files but it doesn't make sense to do both). -.. _file clones: https://blogs.oracle.com/otn/save-disk-space-on-linux-by-cloning-files-on-btrfs-and-ocfs2 +.. _file clones: https://en.wikipedia.org/wiki/Copy-on-write .. _pyreflink: https://reflink.readthedocs.io/en/latest/ resume diff --git a/docs/reference/pathformat.rst b/docs/reference/pathformat.rst index 7c52a92eb..d80bdec34 100644 --- a/docs/reference/pathformat.rst +++ b/docs/reference/pathformat.rst @@ -60,6 +60,7 @@ These functions are built in to beets: * ``%lower{text}``: Convert ``text`` to lowercase. * ``%upper{text}``: Convert ``text`` to UPPERCASE. +* ``%capitalize{text}``: Make the first letter of ``text`` UPPERCASE and the rest lowercase. * ``%title{text}``: Convert ``text`` to Title Case. * ``%left{text,n}``: Return the first ``n`` characters of ``text``. * ``%right{text,n}``: Return the last ``n`` characters of ``text``. diff --git a/docs/team.rst b/docs/team.rst index dbaaa563e..eae3ef532 100644 --- a/docs/team.rst +++ b/docs/team.rst @@ -15,6 +15,13 @@ give you an idea of what you can expect from these *knowledge owners*. * Testing out new contributions * beets as a music discovery tool +@bal-e +====== + +* Documentation +* The Fish plugin +* Type annotations + @govynnus ========= diff --git a/extra/beets.reg b/extra/beets.reg index c02303d3d..84eb090ad 100644 Binary files a/extra/beets.reg and b/extra/beets.reg differ diff --git a/extra/release.py b/extra/release.py index 2c3ffd1bf..e1c036b28 100755 --- a/extra/release.py +++ b/extra/release.py @@ -1,27 +1,146 @@ #!/usr/bin/env python3 -"""A utility script for automating the beets release process. -""" +"""A utility script for automating the beets release process.""" + from __future__ import annotations import re import subprocess +from contextlib import redirect_stdout from datetime import datetime, timezone +from functools import partial +from io import StringIO from pathlib import Path -from typing import Callable +from typing import Callable, NamedTuple import click import tomli from packaging.version import Version, parse +from sphinx.ext import intersphinx +from typing_extensions import TypeAlias BASE = Path(__file__).parent.parent.absolute() PYPROJECT = BASE / "pyproject.toml" CHANGELOG = BASE / "docs" / "changelog.rst" +DOCS = "https://beets.readthedocs.io/en/stable" -MD_CHANGELOG_SECTION_LIST = re.compile(r"- .+?(?=\n\n###|$)", re.DOTALL) -version_header = r"\d+\.\d+\.\d+ \([^)]+\)" +VERSION_HEADER = r"\d+\.\d+\.\d+ \([^)]+\)" RST_LATEST_CHANGES = re.compile( - rf"{version_header}\n--+\s+(.+?)\n\n+{version_header}", re.DOTALL + rf"{VERSION_HEADER}\n--+\s+(.+?)\n\n+{VERSION_HEADER}", re.DOTALL +) + +Replacement: TypeAlias = "tuple[str, str | Callable[[re.Match[str]], str]]" + + +class Ref(NamedTuple): + """A reference to documentation with ID, path, and optional title.""" + + id: str + path: str | None + title: str | None + + @classmethod + def from_line(cls, line: str) -> Ref: + """Create Ref from a Sphinx objects.inv line. + + Each line has the following structure: + [optional title : ] + + """ + if len(line_parts := line.split(" ", 1)) == 1: + return cls(line, None, None) + + id, path_with_name = line_parts + parts = [p.strip() for p in path_with_name.split(":", 1)] + + if len(parts) == 1: + path, name = parts[0], None + else: + name, path = parts + + return cls(id, path, name) + + @property + def url(self) -> str: + """Full documentation URL.""" + return f"{DOCS}/{self.path}" + + @property + def name(self) -> str: + """Display name (title if available, otherwise ID).""" + return self.title or self.id + + +def get_refs() -> dict[str, Ref]: + """Parse Sphinx objects.inv and return dict of documentation references.""" + objects_filepath = Path("docs/_build/html/objects.inv") + if not objects_filepath.exists(): + raise ValueError("Documentation does not exist. Run 'poe docs' first.") + + captured_output = StringIO() + + with redirect_stdout(captured_output): + intersphinx.inspect_main([str(objects_filepath)]) + + return { + r.id: r + for ln in captured_output.getvalue().split("\n") + if ln.startswith("\t") and (r := Ref.from_line(ln.strip())) + } + + +def create_rst_replacements() -> list[Replacement]: + """Generate list of pattern replacements for RST changelog.""" + refs = get_refs() + + def make_ref_link(ref_id: str, name: str | None = None) -> str: + ref = refs[ref_id] + return rf"`{name or ref.name} <{ref.url}>`_" + + commands = "|".join(r.split("-")[0] for r in refs if r.endswith("-cmd")) + plugins = "|".join( + r.split("/")[-1] for r in refs if r.startswith("plugins/") + ) + return [ + # Fix nested bullet points indent: use 2 spaces consistently + (r"(?<=\n) {3,4}(?=\*)", " "), + # Fix nested text indent: use 4 spaces consistently + (r"(?<=\n) {5,6}(?=[\w:`])", " "), + # Replace Sphinx :ref: and :doc: directives by documentation URLs + # :ref:`/plugins/autobpm` -> [AutoBPM Plugin](DOCS/plugins/autobpm.html) + ( + r":(?:ref|doc):`+(?:([^`<]+)<)?/?([\w./_-]+)>?`+", + lambda m: make_ref_link(m[2], m[1]), + ), + # Convert command references to documentation URLs + # `beet move` or `move` command -> [import](DOCS/reference/cli.html#import) + ( + rf"`+beet ({commands})`+|`+({commands})`+(?= command)", + lambda m: make_ref_link(f"{m[1] or m[2]}-cmd"), + ), + # Convert plugin references to documentation URLs + # `fetchart` plugin -> [fetchart](DOCS/plugins/fetchart.html) + (rf"`+({plugins})`+", lambda m: make_ref_link(f"plugins/{m[1]}")), + # Add additional backticks around existing backticked text to ensure it + # is rendered as inline code in Markdown + (r"(?<=[\s])(`[^`]+`)(?!_)", r"`\1`"), + # Convert bug references to GitHub issue links + (r":bug:`(\d+)`", r":bug: (#\1)"), + # Convert user references to GitHub @mentions + (r":user:`(\w+)`", r"\@\1"), + ] + + +MD_REPLACEMENTS: list[Replacement] = [ + (r"^ (- )", r"\1"), # remove indent from top-level bullet points + (r"^ +( - )", r"\1"), # adjust nested bullet points indent + (r"^(\w[^\n]{,80}):(?=\n\n[^ ])", r"### \1"), # format section headers + (r"^(\w[^\n]{81,}):(?=\n\n[^ ])", r"**\1**"), # and bolden too long ones + (r"### [^\n]+\n+(?=### )", ""), # remove empty sections +] +order_bullet_points = partial( + re.compile("(\n- .*?(?=\n(?! *- )|$))", flags=re.DOTALL).sub, + lambda m: "\n- ".join(sorted(m.group().split("\n- "))), ) @@ -40,7 +159,13 @@ def update_changelog(text: str, new: Version) -> str: Unreleased ---------- -Changelog goes here! Please add your entry to the bottom of one of the lists below! +New features: + +Bug fixes: + +For packagers: + +Other changes: {new_header} {'-' * len(new_header)} @@ -92,50 +217,36 @@ def bump_version(new: Version) -> None: def rst2md(text: str) -> str: """Use Pandoc to convert text from ReST to Markdown.""" - # Other backslashes with verbatim ranges. - rst = re.sub(r"(?<=[\s(])`([^`]+)`(?=[^_])", r"``\1``", text) - - # Bug numbers. - rst = re.sub(r":bug:`(\d+)`", r":bug: (#\1)", rst) - - # Users. - rst = re.sub(r":user:`(\w+)`", r"@\1", rst) return ( subprocess.check_output( - ["/usr/bin/pandoc", "--from=rst", "--to=gfm", "--wrap=none"], - input=rst.encode(), + ["pandoc", "--from=rst", "--to=gfm+hard_line_breaks"], + input=text.encode(), ) .decode() .strip() ) -def changelog_as_markdown() -> str: +def get_changelog_contents() -> str | None: + if m := RST_LATEST_CHANGES.search(CHANGELOG.read_text()): + return m.group(1) + + return None + + +def changelog_as_markdown(rst: str) -> str: """Get the latest changelog entry as hacked up Markdown.""" - with CHANGELOG.open() as f: - contents = f.read() + for pattern, repl in create_rst_replacements(): + rst = re.sub(pattern, repl, rst, flags=re.M | re.DOTALL) - m = RST_LATEST_CHANGES.search(contents) - rst = m.group(1) if m else "" - - # Convert with Pandoc. md = rst2md(rst) - # Make sections stand out - md = re.sub(r"^(\w.+?):$", r"### \1", md, flags=re.M) + for pattern, repl in MD_REPLACEMENTS: + md = re.sub(pattern, repl, md, flags=re.M | re.DOTALL) - # Highlight plugin names - md = re.sub( - r"^- `/?plugins/(\w+)`:?", r"- Plugin **`\1`**:", md, flags=re.M - ) - - # Highlights command names. - md = re.sub(r"^- `(\w+)-cmd`:?", r"- Command **`\1`**:", md, flags=re.M) - - # sort list items alphabetically for each of the sections - return MD_CHANGELOG_SECTION_LIST.sub( - lambda m: "\n".join(sorted(m.group().splitlines())), md - ) + # order bullet points in each of the lists alphabetically to + # improve readability + return order_bullet_points(md) @click.group() @@ -153,7 +264,11 @@ def bump(version: Version) -> None: @cli.command() def changelog(): """Get the most recent version's changelog as Markdown.""" - print(changelog_as_markdown()) + if changelog := get_changelog_contents(): + try: + print(changelog_as_markdown(changelog)) + except ValueError as e: + raise click.exceptions.UsageError(str(e)) if __name__ == "__main__": diff --git a/poetry.lock b/poetry.lock index f7d53578a..61413bf4e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,38 +2,42 @@ [[package]] name = "accessible-pygments" -version = "0.0.4" +version = "0.0.5" description = "A collection of accessible pygments styles" -optional = false -python-versions = "*" +optional = true +python-versions = ">=3.9" files = [ - {file = "accessible-pygments-0.0.4.tar.gz", hash = "sha256:e7b57a9b15958e9601c7e9eb07a440c813283545a20973f2574a5f453d0e953e"}, - {file = "accessible_pygments-0.0.4-py2.py3-none-any.whl", hash = "sha256:416c6d8c1ea1c5ad8701903a20fcedf953c6e720d64f33dc47bfb2d3f2fa4e8d"}, + {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"}, + {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"}, ] [package.dependencies] pygments = ">=1.5" +[package.extras] +dev = ["pillow", "pkginfo (>=1.10)", "playwright", "pre-commit", "setuptools", "twine (>=5.0)"] +tests = ["hypothesis", "pytest"] + [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" -optional = false -python-versions = ">=3.6" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = true +python-versions = ">=3.9" files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.2.post1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, ] [package.dependencies] @@ -43,9 +47,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "appdirs" @@ -74,18 +78,15 @@ test = ["tox"] [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" -optional = false +optional = true python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] @@ -110,61 +111,15 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "black" -version = "24.4.2" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "blinker" -version = "1.8.2" +version = "1.9.0" description = "Fast, simple object-to-object and broadcast signaling" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, - {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, + {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, + {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, ] [[package]] @@ -184,6 +139,10 @@ files = [ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, @@ -196,8 +155,14 @@ files = [ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, @@ -208,8 +173,24 @@ files = [ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, + {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"}, + {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"}, + {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"}, + {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"}, + {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"}, + {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"}, + {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"}, + {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"}, + {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"}, + {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"}, + {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"}, + {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, @@ -219,6 +200,10 @@ files = [ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, @@ -230,6 +215,10 @@ files = [ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, @@ -242,6 +231,10 @@ files = [ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, @@ -254,6 +247,10 @@ files = [ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, @@ -300,74 +297,89 @@ cffi = ">=1.0.0" [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -375,101 +387,116 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -528,63 +555,73 @@ pyyaml = "*" [[package]] name = "coverage" -version = "7.5.3" +version = "7.6.8" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, + {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, + {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, + {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, + {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, + {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, + {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, + {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, + {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, + {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, + {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, + {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, + {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, + {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, + {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, + {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, + {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, + {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, + {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, + {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, + {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, + {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, + {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, + {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, + {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, + {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, ] [package.dependencies] @@ -608,64 +645,70 @@ doc = ["sphinx", "sphinx_rtd_theme"] test = ["tap.py"] [[package]] -name = "docutils" -version = "0.20.1" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.7" +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = true +python-versions = ">=3.5" files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "docutils" +version = "0.21.2" +description = "Docutils -- Python Documentation Utilities" +optional = true +python-versions = ">=3.9" +files = [ + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] -name = "flake8" -version = "5.0.4" -description = "the modular source code checker: pep8 pyflakes and co" +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." optional = false -python-versions = ">=3.6.1" +python-versions = "*" files = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" - [[package]] name = "flask" -version = "3.0.3" +version = "3.1.0" description = "A simple framework for building complex web applications." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, - {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, + {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"}, + {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"}, ] [package.dependencies] -blinker = ">=1.6.2" +blinker = ">=1.9" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} -itsdangerous = ">=2.1.2" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.2" Jinja2 = ">=3.1.2" -Werkzeug = ">=3.0.0" +Werkzeug = ">=3.1" [package.extras] async = ["asgiref (>=3.2)"] @@ -673,13 +716,13 @@ dotenv = ["python-dotenv"] [[package]] name = "flask-cors" -version = "4.0.1" +version = "5.0.0" description = "A Flask extension adding a decorator for CORS support" optional = true python-versions = "*" files = [ - {file = "Flask_Cors-4.0.1-py2.py3-none-any.whl", hash = "sha256:f2a704e4458665580c074b714c4627dd5a306b333deb9074d0b1794dfa2fb677"}, - {file = "flask_cors-4.0.1.tar.gz", hash = "sha256:eeb69b342142fdbf4766ad99357a7f3876a2ceb77689dc10ff912aac06c389e4"}, + {file = "Flask_Cors-5.0.0-py2.py3-none-any.whl", hash = "sha256:b9e307d082a9261c100d8fb0ba909eec6a228ed1b60a8315fd85f783d61910bc"}, + {file = "flask_cors-5.0.0.tar.gz", hash = "sha256:5aadb4b950c4e93745034594d9f3ea6591f734bb3662e16e255ffbf5e89c88ef"}, ] [package.dependencies] @@ -698,13 +741,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -715,17 +758,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.28.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc"}, + {file = "httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0"}, ] [package.dependencies] @@ -733,25 +776,28 @@ anyio = "*" certifi = "*" httpcore = "==1.*" idna = "*" -sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "ifaddr" version = "0.2.0" @@ -767,7 +813,7 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false +optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, @@ -776,22 +822,26 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "inflate64" @@ -871,23 +921,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.dependencies] -colorama = {version = ">=0.4.6", optional = true, markers = "extra == \"colors\""} - -[package.extras] -colors = ["colorama (>=0.4.6)"] - [[package]] name = "itsdangerous" version = "2.2.0" @@ -901,84 +934,92 @@ files = [ [[package]] name = "jellyfish" -version = "1.0.4" +version = "1.1.0" description = "Approximate and phonetic matching of strings." optional = false python-versions = ">=3.7" files = [ - {file = "jellyfish-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f2cfa648575d1e8197cd61a1936929913916173aee8ee66210eb6a08b1a2fa11"}, - {file = "jellyfish-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c915ce23a518d2289c63a502064bf970b9748d9e4164e21e36ebba40c07ad37"}, - {file = "jellyfish-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a001b0794aa84bcab311f6019289904ddf417b47ffe2b4796b41a8314bae3c1c"}, - {file = "jellyfish-1.0.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ffe094f4d5b1de72ed1e8bb3e729ada8915f096bc04c344aabb4327a669f272e"}, - {file = "jellyfish-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:481689f6e1a695ddf44b7fe9250d0f86839d98ab5418115d8e52886d488fd259"}, - {file = "jellyfish-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:30ccf4b1e6a6f9a54f60250f5d65898746186b93aceebfd0dc7159cbe5554db3"}, - {file = "jellyfish-1.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0459f8dc1d63a165f3848ed9f756107cff0d4990e3dffbaed839228073b628b7"}, - {file = "jellyfish-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1830f125dc2f41babd0101b2d67a325972f79e991af1e0a135d9efe2c890bbbc"}, - {file = "jellyfish-1.0.4-cp310-none-win32.whl", hash = "sha256:169634dc62f7bb9b93c03d3d7b5da327426023d47d58bd8b424c61aaaa33085c"}, - {file = "jellyfish-1.0.4-cp310-none-win_amd64.whl", hash = "sha256:5b87fca57f6240fe4658810587a0ff49f261a6a49943ad003bbc109c358cec2e"}, - {file = "jellyfish-1.0.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:1561cd0d1687113a5b1e4c4f0e1ab373fbc851af0c9c769a486f94f9ede27cd5"}, - {file = "jellyfish-1.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d60ab87c542659bd71ed952fbc9c4186293e49016dd92ca79156fee6574a17d"}, - {file = "jellyfish-1.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ceba547f96de1f58d29f87c816aab4ec02cbeb6606a48fcad1dcf35c1f06042"}, - {file = "jellyfish-1.0.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1457d3de969f652d6396cb0329cae3f8d75e0851b335f106624bc991c63c80b"}, - {file = "jellyfish-1.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90384f8b4446a84682bb18051cdc17a069963fff1d0af03ccd2b044b62af6d44"}, - {file = "jellyfish-1.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:896552560dcba125db074b980ccc17e123e9444593b272edb82254e4b30f0bd1"}, - {file = "jellyfish-1.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:47b0914f375be24976f26ff1436f22dc9fa1ecf9513dbe7ebbee6af5f85409a2"}, - {file = "jellyfish-1.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1529a9f1627acffda79ab490ca3f67d35ee1e15c2d30b3f9f93be895b212c4c5"}, - {file = "jellyfish-1.0.4-cp311-none-win32.whl", hash = "sha256:4a47daa243798db689f8b1f3e6c811032075fd434e2af9dfea35a1553152e34e"}, - {file = "jellyfish-1.0.4-cp311-none-win_amd64.whl", hash = "sha256:1d42fdf67a34a346d906b0ab8d56db9cc68bf6497382650d4581ec0fc8eef342"}, - {file = "jellyfish-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:81f68a8ba074ef3630fab6e635d542708c6e7b5c504a546257b796835b28a5d5"}, - {file = "jellyfish-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:535a7e99c2b6d456b06d160c950379f18cb72effeb856069eae5d024e09b4afd"}, - {file = "jellyfish-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76b7936b69687d788aabb86875c0780d6a77dbac9d1738503b0091af744ff79b"}, - {file = "jellyfish-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:015997043c3eecbf686a71838028180d885f0dc4f7e99daf7194e1787ecd5909"}, - {file = "jellyfish-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c7c6113565bcd3771882ff1c6a31573ef3ce755f882e1bf27b233c44a24f35"}, - {file = "jellyfish-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:069d406850c68a43513a4ddbbf4191e83a2b8fb08119d708086a21c2cf6c406e"}, - {file = "jellyfish-1.0.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:642dde8453ae45235c377ad3ce6cc990bf90fe9c4222896ab3f0f6c5609089a4"}, - {file = "jellyfish-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:967723d8a1321327b3d6e9eada2db95659ff27ab6de8bb8dc82eefe1ce993333"}, - {file = "jellyfish-1.0.4-cp312-none-win32.whl", hash = "sha256:bd33becfa61956c8ebd12bcb7227d48156d7e4c289780bcccf06e55acde12bf6"}, - {file = "jellyfish-1.0.4-cp312-none-win_amd64.whl", hash = "sha256:c3addb4eebadd7cd4f6cdbff55a4a28caf2448333131b20661d4ff342f53e8a4"}, - {file = "jellyfish-1.0.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b365ef28bed2673fffec38775aae36fd9d692ff3e2c6f331ed1898c00cd1f388"}, - {file = "jellyfish-1.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44329a7fa570a167144ef20d8d57dff7c59028ecc392cccd9385d23c321aa784"}, - {file = "jellyfish-1.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59f4d263d15d868372c2dcfe12007707d3448a9253a1d912230f213b456ca697"}, - {file = "jellyfish-1.0.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5dca33ca59163445c8afdedff7f47d7f27f0a88fabedfde4ea7197698adc6335"}, - {file = "jellyfish-1.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42056f50f5c604132ca50088253122c8a0bdbf8f37959b60f4fd75026396be3c"}, - {file = "jellyfish-1.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2c001b7cfb14216108dcc851ac39fb428ed82138dabcd6a20e051a2ec5cd6f9e"}, - {file = "jellyfish-1.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c9c713fb0012db4468c2aff8e3a6b6dbd63908563bec0f0ed508d280f0fc4937"}, - {file = "jellyfish-1.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d78874364780b2a6ec248c3d92fb5343cdc5dadaa552cc564eebe7941678303c"}, - {file = "jellyfish-1.0.4-cp38-none-win32.whl", hash = "sha256:363562fa5d99bc688a187ede4c05a9e220baf296a450fe1a24e2c02a65d0077c"}, - {file = "jellyfish-1.0.4-cp38-none-win_amd64.whl", hash = "sha256:27f51df4818fc3eea3ed32c9f50883c011c9e1f972cc2b04cef614d24179f427"}, - {file = "jellyfish-1.0.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:651f8c75bf4352427f1160c2b1d9e994862cc86a9ce2106e9c0c2d87e973ca88"}, - {file = "jellyfish-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9fd3409238012c3d153b5d3ec5f3fe32648ceb6a86f67d42434f0f5f2447a28"}, - {file = "jellyfish-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e003e5db6607b46d7f1e321628a0a04e8f56adcbdd8aadfb6b61ec6764bc028a"}, - {file = "jellyfish-1.0.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d443734829b7e38a4e4525cc572e6ff6e0588254ae346a11c976b935efdbc54"}, - {file = "jellyfish-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984e6307ed255224dc38370b953b48e6f13950209d76489ade582dedc66a001a"}, - {file = "jellyfish-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00e23fb1e87c8b99008fe0a9c00f509754cf0e1a74d9a975fc3737790e756679"}, - {file = "jellyfish-1.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f6f5aaa4f4326feb6754878f3875ee39c5df2e650abe04f9da28c80c3e341728"}, - {file = "jellyfish-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d40c3ebd4a00a55845b5653e4a5a8784a032a68e91ca3713163e446b48168887"}, - {file = "jellyfish-1.0.4-cp39-none-win32.whl", hash = "sha256:ce7a7c6ab717d7b8961d234a60c0e12f80a24b4b0ec213a2272f4cdba013b5f8"}, - {file = "jellyfish-1.0.4-cp39-none-win_amd64.whl", hash = "sha256:73e0789d20eda3923a6531693aca1ca6231bec12b9b7c6d7c2ed37b1889f40c1"}, - {file = "jellyfish-1.0.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89f6db50049492c49b622e8e8c04c5494d4c96c92f0ae573288eefb809d60d1f"}, - {file = "jellyfish-1.0.4-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5af807b15db3c7d08c30911fbe82266ff1089f28ba5a682e87e3145943936cd"}, - {file = "jellyfish-1.0.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17c7822dcb9e70b682604941852f6bba78b047b60d41d2df0e6a75a5d2b1bb78"}, - {file = "jellyfish-1.0.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f581b0184ce7a000f38941b2c81a4480b5fd52cdeb5672f9f682d9e3adb8db84"}, - {file = "jellyfish-1.0.4-pp310-pypy310_pp73-musllinux_1_1_i686.whl", hash = "sha256:f5bc5e05616155306756abe2afda23caefb766b59c849d88285f67bcdcf5a5bb"}, - {file = "jellyfish-1.0.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4de7c4e7245a2fa9d24bb75dd66aaf77b3521b2377883af0b6008740024ce598"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e94d594b4b4902a5278346852227f9be281a4aa61907ecd00091451bce8279be"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:91dc251d02dee6686a1dc31a30a0ea7d8f49781556272b1044cb1a942db75a40"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:359aa8c0a55560bd0e6d17e2098b96428d91f066120a708ad889a729801a78b9"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02525584cb1d09f3b5b784685a63ba6e889bce240acef12161d7a6f428ea6c2e"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f9d82531af35123632bd02a4394c8783a6f81d89b5c1711bd4c1a6c800a8dc"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c54255f79a5d00a8261c48e75b483bf1ba2aa87655bd69d4cff805eab758c0f4"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-musllinux_1_1_i686.whl", hash = "sha256:209ccfbc135d150f6af2db9d3c13764f690aa6196fb74ecffd5394c367ed4938"}, - {file = "jellyfish-1.0.4-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3ee641c1c68e0458cac4b9c8c4432d535de1bdd66e8514e541939094622b55cc"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d9552ac0478d3015f93161e7d7e1a388b3204fac2a1a22aaf30d67c3670eb6f2"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2cc0a11166cb1cea0700585de63fa46252a5efa46bc31bc4d869b71e3c666ded"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6285981622d31c11994f2abcd0d35ec7661cba842538e59bfb735fbedf7b6531"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07e10f5ab014a626048ff7cd78a6a52c81845f89d94902371278c4be66d91909"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8eaa4f2cf78f34cfd41270968954b3bc1eed433b6e7d124786b8064021c110a5"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83e3c4b446da02926b382f8eb8b931e266927e82162adf4c57f0c55c2bf95b35"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-musllinux_1_1_i686.whl", hash = "sha256:d88f47c5c3d97f40b4aa42b83d7ca03707bd6bebd945c9532da6e25515bbeea4"}, - {file = "jellyfish-1.0.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:a4b717a510e64773c4f882b373d4aeda7d2b2b8ffae87c16a906426b7cd02d55"}, - {file = "jellyfish-1.0.4.tar.gz", hash = "sha256:72aabb3bedd513cdd20712242fd51173b59972c0b146b7a0b9c6f32f1656293f"}, + {file = "jellyfish-1.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:feb1fa5838f2bb6dbc9f6d07dabf4b9d91e130b289d72bd70dc33b651667688f"}, + {file = "jellyfish-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:623fa58cca9b8e594a46e7b9cf3af629588a202439d97580a153d6af24736a1b"}, + {file = "jellyfish-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a87e4a17006f7cdd7027a053aeeaacfb0b3366955e242cd5b74bbf882bafe022"}, + {file = "jellyfish-1.1.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f10fa36491840bda29f2164cc49e61244ea27c5db5a66aaa437724f5626f5610"}, + {file = "jellyfish-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24f91daaa515284cdb691b1e01b0f91f9c9e51e685420725a1ded4f54d5376ff"}, + {file = "jellyfish-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:65e58350618ebb1488246998a7356a8c9a7c839ec3ecfe936df55be6776fc173"}, + {file = "jellyfish-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5c5ed62b23093b11de130c3fe1b381a2d3bfaf086757fa21341ac6f30a353e92"}, + {file = "jellyfish-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c42aa02e791d3e5a8fc6a96bec9f64ebbb2afef27b01eca201b56132e3d0c64e"}, + {file = "jellyfish-1.1.0-cp310-none-win32.whl", hash = "sha256:84680353261161c627cbdd622ea4243e3d3da75894bfacc2f3fcbbe56e8e59d4"}, + {file = "jellyfish-1.1.0-cp310-none-win_amd64.whl", hash = "sha256:017c794b89d827d0306cb056fc5fbd040ff558a90ff0e68a6b60d6e6ba661fe3"}, + {file = "jellyfish-1.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:fed2e4ecf9b4995d2aa771453d0a0fdf47a5e1b13dbd74b98a30cb0070ede30c"}, + {file = "jellyfish-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61a382ba8a3d3cd0bd50029062d54d3a0726679be248789fef6a3901eee47a60"}, + {file = "jellyfish-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a4b526ed2080b97431454075c46c19baddc944e95cc605248e32a2a07be231e"}, + {file = "jellyfish-1.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0fa7450c3217724b73099cb18ee594926fcbc1cc4d9964350f31a4c1dc267b35"}, + {file = "jellyfish-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ebb6e9647d5d52f4d461a163449f6d1c73f1a80ccbe98bb17efac0062a6423"}, + {file = "jellyfish-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:759172602343115f910d7c63b39239051e32425115bc31ab4dafdaf6177f880c"}, + {file = "jellyfish-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:273fdc362ccdb09259eec9bc4abdc2467d9a54bd94d05ae22e71423dd1357255"}, + {file = "jellyfish-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bd5c335f8d762447691dc0572f4eaf0cfdfbfffb6dce740341425ab1b32134ff"}, + {file = "jellyfish-1.1.0-cp311-none-win32.whl", hash = "sha256:cc16a60a42f1541ad9c13c72c797107388227f01189aa3c0ec7ee9b939e57ea8"}, + {file = "jellyfish-1.1.0-cp311-none-win_amd64.whl", hash = "sha256:95dfe61eabf360a92e6d76d1c4dbafa29bcb3f70e2ad7354de2661141fcce038"}, + {file = "jellyfish-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:828a7000d369cbd4d812b88510c01fdab20b73dc54c63cdbe03bdff67ab362d0"}, + {file = "jellyfish-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e250dc1074d730a03c96ac9dfce44716cf45e0e2825cbddaf32a015cdf9cf594"}, + {file = "jellyfish-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87dc2a82c45b773a579fb695a5956a54106c1187f27c9ccee8508726d2e59cfc"}, + {file = "jellyfish-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41677ec860454da5977c698fc64fed73b4054a92c5c62ba7d1af535f8082ac7"}, + {file = "jellyfish-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d4002d01252f18eb26f28b66f6c9ce0696221804d8769553c5912b2f221a18"}, + {file = "jellyfish-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:936df26c10ca6cd6b4f0fb97753087354c568e2129c197cbb4e0f0172db7511f"}, + {file = "jellyfish-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:684c2093fa0d68a91146e15a1e9ca859259b19d3bc36ec4d60948d86751f744e"}, + {file = "jellyfish-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2fcaefebe9d67f282d89d3a66646b77184a42b3eca2771636789b2dc1288c003"}, + {file = "jellyfish-1.1.0-cp312-none-win32.whl", hash = "sha256:e512c99941a257541ffd9f75c7a5c4689de0206841b72f1eb015599d17fed2c3"}, + {file = "jellyfish-1.1.0-cp312-none-win_amd64.whl", hash = "sha256:2b928bad2887c662783a4d9b5828ed1fa0e943f680589f7fc002c456fc02e184"}, + {file = "jellyfish-1.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5d510b04e2a39f27aef391ca18bf527ec5d9a2438a63731b87faada83996cb92"}, + {file = "jellyfish-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:57d005cc5daa4d0a8d88341d86b1dce24e3f1d7721da75326c0b7af598a4f58c"}, + {file = "jellyfish-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889edab0fb2a29d29c148c9327752df525c9bdaef03eef01d1bd9c1f90b47ebf"}, + {file = "jellyfish-1.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:937b657aacba8fe8482ebc5fea5ba1aee987ecb9da0f037bfb8a1a9045d05746"}, + {file = "jellyfish-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cb5088436ce1fdabcb46aed3a3cc215f0432313596f4e5abe5300ed833b697c"}, + {file = "jellyfish-1.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:af74156301a0ff05a22e8cf46250678e23fa447279ba6dffbf9feff01128f51d"}, + {file = "jellyfish-1.1.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3f978bc430bbed4df3c10b2a66be7b5bddd09e6c2856c7a17fa2298fb193d4d4"}, + {file = "jellyfish-1.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b460f0bbde533f6f8624c1d7439e7f511b227ca18a58781e7f38f21961bd3f09"}, + {file = "jellyfish-1.1.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:7cd4b706cb6c4739846d78a398c67996cb451b09a732a625793cfe8d4f37af1b"}, + {file = "jellyfish-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61cded25b47fe6b4c2ea9478c0a5a7531845218525a1b2627c67907ee9fe9b15"}, + {file = "jellyfish-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04bf33577059afba33227977e4a2c08ccb954eb77c849fde564af3e31ee509d9"}, + {file = "jellyfish-1.1.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:327496501a44fbdfe0602fdc6a7d4317a7598202f1f652c9c4f0a49529a385cd"}, + {file = "jellyfish-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0d1e6bac549cc2919b83d0ebe26566404ae3dfef5ef86229d1d826e3aeaba4b"}, + {file = "jellyfish-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b5fec525f15b39687dbfd75589333df4e6f6d15d3b1e0ada02bf206363dfd2af"}, + {file = "jellyfish-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8b2faf015e86a9efd5679b3abde83cbd8f3104b9e89445aa76b8481b206b3e67"}, + {file = "jellyfish-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b73efda07d52a1583afb8915a5f9feb017d0b60ae6d03071b21cc4f0a8a08ec1"}, + {file = "jellyfish-1.1.0-cp38-none-win32.whl", hash = "sha256:4a5199583a956d313be825972d7c14a0d9e455884acd12c03d05e4272c6c3bb8"}, + {file = "jellyfish-1.1.0-cp38-none-win_amd64.whl", hash = "sha256:755b68920a839f9e2b4813f0990a8dadcc9a24980bb29839f636ab5e36aaa256"}, + {file = "jellyfish-1.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e965241e54f9cb9be6fe8f7a1376b6cc61ff831de017bde9150156771820f669"}, + {file = "jellyfish-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e59a4c3bf0847dfff44195a4c250bc9e281b1c403f6212534ee36fc7c913dc1"}, + {file = "jellyfish-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84fa4e72b7754060d352604e07ea89af98403b0436caad443276ae46135b7fd7"}, + {file = "jellyfish-1.1.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:125e9bfd1cc2c053eae3afa04fa142bbc8b3c1290a40a3416271b221f7e6bc87"}, + {file = "jellyfish-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4a8fff36462bf1bdaa339d58fadd7e79a63690902e6d7ddd65a84efc3a4cc6d"}, + {file = "jellyfish-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6b438b3d7f970cfd8f77b30b05694537a54c08f3775b35debae45ff5a469f1a5"}, + {file = "jellyfish-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:cf8d26c3735b5c2764cc53482dec14bb9b794ba829db3cd4c9a29d194a61cada"}, + {file = "jellyfish-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f341d0582ecac0aa73f380056dc8d25d8a60104f94debe8bf3f924a32a11588d"}, + {file = "jellyfish-1.1.0-cp39-none-win32.whl", hash = "sha256:49f2be59573b22d0adb615585ff66ca050198ec1f9f6784eec168bcd8137caf5"}, + {file = "jellyfish-1.1.0-cp39-none-win_amd64.whl", hash = "sha256:c58988138666b1cd860004c1afc7a09bb402e71e16e1f324be5c5d2b85fdfa3e"}, + {file = "jellyfish-1.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54effec80c7a5013bea8e2ea6cd87fdd35a2c5b35f86ccf69ec33f4212245f25"}, + {file = "jellyfish-1.1.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12ae67e9016c9a173453023fd7b400ec002bbc106c12722d914c53951acfa190"}, + {file = "jellyfish-1.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd342f9d4fb0ead8a3c30fe26e442308fb665ca37f4aa97baf448d814469bf1"}, + {file = "jellyfish-1.1.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b0dc9f1bb335b6caa412c3d27028e25d315ef2bc993d425db93e451d7bc28056"}, + {file = "jellyfish-1.1.0-pp310-pypy310_pp73-musllinux_1_1_i686.whl", hash = "sha256:3f12cb59b3266e37ec47bd7c2c37faadc74ae8ccdc0190444daeafda3bd93da2"}, + {file = "jellyfish-1.1.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c7ea99734b7767243b5b98eca953f0d719b48b0d630af3965638699728ef7523"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1a90889fdb96ca27fc176e19a472c736e044d7190c924d9b7cfb0444881f921c"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:c01cdf0d52d07e07fb0dfa2b3c03ca3b5a07088f08b38b06376ed228d842e501"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a4678a2623cc83fde7ff683ba78d308edf7e54a1c81dd295cdf525761b9fcc1"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b557b8e1fdad4a36f467ee44f5532a4a13e5300b93b2b5e70ff75d0d16458132"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5c34d12730d912bafab9f6daaa7fb2c6fa6afc0a8fc2c4cdc017df485d8d843"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d977a1e0fa3814d517b16d58a39a16e449bbd900b966dd921e770d0fd67bfa45"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-musllinux_1_1_i686.whl", hash = "sha256:6662152bf510cc7daef18965dd80cfa98710b479bda87a3170c86c4e0a6dc1ab"}, + {file = "jellyfish-1.1.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e447e3807c73aeda7b592919c105bf98ce0297a228aff68aafe4fe70a39b9a78"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ca252e6088c6afe5f8138ce9f557157ad0329f0610914ba50729c641d57cd662"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b2512ab6a1625a168796faaa159e1d1b8847cb3d0cc2b1b09ae77ff0623e7d10"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b868da3186306efb48fbd8a8dee0a742a5c8bc9c4c74aa5003914a8600435ba8"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bcc2cb1f007ddfad2f9175a8c1f934a8a0a6cc73187e2339fe1a4b3fd90b263e"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e17885647f3a0faf1518cf6b319865b2e84439cfc16a3ea14468513c0fba227"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:84ea543d05e6b7a7a704d45ebd9c753e2425da01fc5000ddc149031be541c4d5"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-musllinux_1_1_i686.whl", hash = "sha256:065a59ab0d02969d45e5ab4b0315ed6f5977a4eb8eaef24f2589e25b85822d18"}, + {file = "jellyfish-1.1.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f747f34071e1558151b342a2bf96b813e04b5384024ba7c50f3c907fbaab484f"}, + {file = "jellyfish-1.1.0.tar.gz", hash = "sha256:2a2eec494c81dc1eb23dfef543110dad1873538eccaffabea8520bdac8aecbc1"}, ] [[package]] @@ -998,6 +1039,17 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = true +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + [[package]] name = "langdetect" version = "1.0.9" @@ -1012,155 +1064,297 @@ files = [ [package.dependencies] six = "*" +[[package]] +name = "lap" +version = "0.5.12" +description = "Linear Assignment Problem solver (LAPJV/LAPMOD)." +optional = false +python-versions = ">=3.7" +files = [ + {file = "lap-0.5.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c3a38070b24531949e30d7ebc83ca533fcbef6b1d6562f035cae3b44dfbd5ec"}, + {file = "lap-0.5.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a301dc9b8a30e41e4121635a0e3d0f6374a08bb9509f618d900e18d209b815c4"}, + {file = "lap-0.5.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f0c1b9ab32c9ba9a94e3f139a0c30141a15fb9e71d69570a6851bbae254c299"}, + {file = "lap-0.5.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f702e9fbbe3aa265708817ba9d4efb44d52f7013b792c9795f7501ecf269311a"}, + {file = "lap-0.5.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9836f034c25b1dfeabd812b7359816911ed05fe55f53e70c30ef849adf07df02"}, + {file = "lap-0.5.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0416780dbdca2769231a53fb5491bce52775299b014041296a8b5be2d00689df"}, + {file = "lap-0.5.12-cp310-cp310-win_amd64.whl", hash = "sha256:2d6e137e1beb779fcd6a42968feb6a122fdddf72e5b58d865191c31a01ba6804"}, + {file = "lap-0.5.12-cp310-cp310-win_arm64.whl", hash = "sha256:a40d52c5511421497ae3f82a5ca85a5442d8776ba2991c6fca146afceea7608f"}, + {file = "lap-0.5.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d928652e77bec5a71dc4eb4fb8e15d455253b2a391ca8478ceab7d171cbaec2e"}, + {file = "lap-0.5.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4a0ea039fcb2fd388b5e7c1be3402c483d32d3ef8c70261c69ab969ec25cd83"}, + {file = "lap-0.5.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87c0e736c31af0a827dc642132d09c5d4f77d30f5b3f0743b9cd31ef12adb96c"}, + {file = "lap-0.5.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5270141f97027776ced4b6540d51899ff151d8833b5f93f2428de36c2270a9ed"}, + {file = "lap-0.5.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:04dc4b44c633051a9942ad60c9ad3da28d7c5f09de93d6054b763c57cbc4ac90"}, + {file = "lap-0.5.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:560ec8b9100f78d6111b0acd9ff8805e4315372f23c2dcad2f5f9f8d9c681261"}, + {file = "lap-0.5.12-cp311-cp311-win_amd64.whl", hash = "sha256:851b9bcc898fa763d6e7c307d681dde199ca969ab00e8292fc13cff34107ea38"}, + {file = "lap-0.5.12-cp311-cp311-win_arm64.whl", hash = "sha256:49e14fdbf4d55e7eda6dfd3aba433a91b00d87c7be4dd25059952b871b1e3399"}, + {file = "lap-0.5.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1211fca9d16c0b1383c7a93be2045096ca5e4c306e794fcf777ac52b30f98829"}, + {file = "lap-0.5.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8dcafbf8363308fb289d7cd3ae9df375ad090dbc2b70f5d7d038832e87d2b1a1"}, + {file = "lap-0.5.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f721ed3fd2b4f6f614870d12aec48bc44c089587930512c3187c51583c811b1c"}, + {file = "lap-0.5.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:797d9e14e517ac06337b6dca875bdf9f0d88ec4c3214ebb6d0676fed197dc13f"}, + {file = "lap-0.5.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a2424daf7c7afec9b93ed02af921813ab4330826948ce780a25d94ca42df605"}, + {file = "lap-0.5.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1c34c3d8aefbf7d0cb709801ccf78c6ac31f4b1dc26c169ed1496ed3cb6f4556"}, + {file = "lap-0.5.12-cp312-cp312-win_amd64.whl", hash = "sha256:753ef9bd12805adbf0d09d916e6f0d271aebe3d2284a1f639bd3401329e436e5"}, + {file = "lap-0.5.12-cp312-cp312-win_arm64.whl", hash = "sha256:83e507f6def40244da3e03c71f1b1f54ceab3978cde72a84b84caadd8728977e"}, + {file = "lap-0.5.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c4fdbd8d94ad5da913ade49635bad3fc4352ee5621a9f785494c11df5412d6d"}, + {file = "lap-0.5.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e2d01113eec42174e051ee5cebb5d33ec95d37bd2c422b7a3c09bbebaf30b635"}, + {file = "lap-0.5.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a6e8ed53cb4d85fa0875092bc17436d7eeab2c7fb3574e551c611c352fea8c8"}, + {file = "lap-0.5.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dd54bf8bb48c87f6276555e8014d4ea27742d84ddbb0e7b68be575f4ca438d7"}, + {file = "lap-0.5.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9db0e048cfb561f21671a3603dc2761f108b3111da66a7b7d2f035974dcf966e"}, + {file = "lap-0.5.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:517b8bd02e56b8466244fc4c0988aece04e6f8b11f43406ae195b4ce308733fb"}, + {file = "lap-0.5.12-cp313-cp313-win_amd64.whl", hash = "sha256:59dba008db14f640a20f4385916def4b343fa59efb4e82066df81db5a9444d5e"}, + {file = "lap-0.5.12-cp313-cp313-win_arm64.whl", hash = "sha256:30309f6aff8e4d616856ec8c6eec7ad5b48d2687887b931302b5c8e6dfac347a"}, + {file = "lap-0.5.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ec648065b28d6cdda1c754ab578c989e228094dc2ee74a16ff4e2ba27b53444e"}, + {file = "lap-0.5.12-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e736814170a8f3483cf6fa9c99136ee58afb071113712291a759dea03701598c"}, + {file = "lap-0.5.12-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6ccb09cf0dbae0daf4129cf13de3518eea8fd4959067bf0fe1c2b97d128039"}, + {file = "lap-0.5.12-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:59d8afdc349a9dc178742b138c1d703f49a38d7a63df6f048de0122ce1584fb9"}, + {file = "lap-0.5.12-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:026191d639a317366c884bcf7576caa1d88c21f439af153d3d74861fe39d6d68"}, + {file = "lap-0.5.12-cp37-cp37m-win_amd64.whl", hash = "sha256:b5dd6fa3f7a00746573a345865b35296b3b718ba706be9b58a197b11a5717b70"}, + {file = "lap-0.5.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64282f9c8c3ecba0400e3beb587441c294f592e404375336172173382205f1d7"}, + {file = "lap-0.5.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:29e72adc2da0ec5eedea151b8dd6e75ea01803fdcbd67d1b4b80f4146cb5de2d"}, + {file = "lap-0.5.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ffc274987108e53d2da6c3a013d2a99c07ebd8ef6e7609951675dcd13642c17"}, + {file = "lap-0.5.12-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72237a267e0245512a58a23a604f1a2590a52cfe43695e1ad84d69d1f51b1a0e"}, + {file = "lap-0.5.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:bde8fcd01ac29a9d734e659916cada9a7992e8a9b585cd21062aafa0cef66cbe"}, + {file = "lap-0.5.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ee454ab4b9fa7f600b8ea2f53952e4b60826d45c2ef72eb5694e7dda70e6d525"}, + {file = "lap-0.5.12-cp38-cp38-win_amd64.whl", hash = "sha256:c40d24d52a7fd70eff15f18626a69a1b0fd014e41fb899a9a9b6984f6753e94b"}, + {file = "lap-0.5.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f3b6fb7837f57fba552621ce63d2fe23f17ccf77899bcb04d1909a7362ff9692"}, + {file = "lap-0.5.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b097b065ec14a91619914dbd6ec311273963d37d77cb1cf873906a28661d974"}, + {file = "lap-0.5.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7eac1ba8ffd3a2cd892f03ab7507d294b5f24ea6511ce6dd28b3edc2fc4f4da9"}, + {file = "lap-0.5.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2f20aca4f5546b07ef71112b76a0f6e2d07399b84c791bb91e7700a6f799dc7"}, + {file = "lap-0.5.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4182642094cb10377551372c4994505b2b7c82113b210448b87f7f4652cc208"}, + {file = "lap-0.5.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f80e7d5b2d7b9b2e799978b2febca6b2f25044496ff94e9c043123eb495bd1a"}, + {file = "lap-0.5.12-cp39-cp39-win_amd64.whl", hash = "sha256:2e2b7015bd1bab150688c950738fda76b70388793bd539e0e63888ece57af1e7"}, + {file = "lap-0.5.12-cp39-cp39-win_arm64.whl", hash = "sha256:4019cce8c9e10b6c0aab8d23fddeb01efd251010605ea9d4e69f93111380b06e"}, + {file = "lap-0.5.12.tar.gz", hash = "sha256:570b414ea7ae6c04bd49d0ec8cdac1dc5634737755784d44e37f9f668bab44fd"}, +] + +[package.dependencies] +numpy = ">=1.21.6" + +[[package]] +name = "lazy-loader" +version = "0.4" +description = "Makes it easy to load subpackages and functions on demand." +optional = true +python-versions = ">=3.7" +files = [ + {file = "lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc"}, + {file = "lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +dev = ["changelist (==0.5)"] +lint = ["pre-commit (==3.7.0)"] +test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] + +[[package]] +name = "librosa" +version = "0.10.2.post1" +description = "Python module for audio and music processing" +optional = true +python-versions = ">=3.7" +files = [ + {file = "librosa-0.10.2.post1-py3-none-any.whl", hash = "sha256:dc882750e8b577a63039f25661b7e39ec4cfbacc99c1cffba666cd664fb0a7a0"}, + {file = "librosa-0.10.2.post1.tar.gz", hash = "sha256:cd99f16717cbcd1e0983e37308d1db46a6f7dfc2e396e5a9e61e6821e44bd2e7"}, +] + +[package.dependencies] +audioread = ">=2.1.9" +decorator = ">=4.3.0" +joblib = ">=0.14" +lazy-loader = ">=0.1" +msgpack = ">=1.0" +numba = ">=0.51.0" +numpy = ">=1.20.3,<1.22.0 || >1.22.0,<1.22.1 || >1.22.1,<1.22.2 || >1.22.2" +pooch = ">=1.1" +scikit-learn = ">=0.20.0" +scipy = ">=1.2.0" +soundfile = ">=0.12.1" +soxr = ">=0.3.2" +typing-extensions = ">=4.1.1" + +[package.extras] +display = ["matplotlib (>=3.5.0)"] +docs = ["ipython (>=7.0)", "matplotlib (>=3.5.0)", "mir-eval (>=0.5)", "numba (>=0.51)", "numpydoc", "presets", "sphinx (!=1.3.1)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.7)", "sphinx-multiversion (>=0.2.3)", "sphinx-rtd-theme (>=1.2.0)", "sphinxcontrib-svg2pdfconverter"] +tests = ["matplotlib (>=3.5.0)", "packaging (>=20.0)", "pytest", "pytest-cov", "pytest-mpl", "resampy (>=0.2.2)", "samplerate", "types-decorator"] + +[[package]] +name = "llvmlite" +version = "0.43.0" +description = "lightweight wrapper around basic LLVM functionality" +optional = true +python-versions = ">=3.9" +files = [ + {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, + {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, + {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, + {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, + {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, + {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, + {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, +] + [[package]] name = "lxml" -version = "5.2.2" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=3.6" files = [ - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, - {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, - {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, - {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, - {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, - {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, - {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, - {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, - {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, - {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, - {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, - {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, - {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, - {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, - {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, - {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, - {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, - {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, - {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.extras] @@ -1168,102 +1362,92 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "mediafile" -version = "0.12.0" +version = "0.13.0" description = "Handles low-level interfacing for files' tags. Wraps Mutagen to" optional = false python-versions = ">=3.7" files = [ - {file = "mediafile-0.12.0-py3-none-any.whl", hash = "sha256:6b6fdb61bb151cd9d6a8a8821ce28adee604ede8a9a992f0d9dd3e835ef4899b"}, - {file = "mediafile-0.12.0.tar.gz", hash = "sha256:d75d805a06ed56150dbcea76505e700f9809abd9e98f98117ae46f5df2ccf1d7"}, + {file = "mediafile-0.13.0-py3-none-any.whl", hash = "sha256:cd8d183d0e0671b5203a86e92cf4e3338ecc892a1ec9dcd7ec0ed87779e514cb"}, + {file = "mediafile-0.13.0.tar.gz", hash = "sha256:de71063e1bffe9733d6ccad526ea7dac8a9ce760105827f81ab0cb034c729a6d"}, ] [package.dependencies] +filetype = ">=1.2.0" mutagen = ">=1.46" -six = ">=1.9" [package.extras] test = ["tox"] @@ -1284,6 +1468,79 @@ build = ["blurb", "twine", "wheel"] docs = ["sphinx"] test = ["pytest", "pytest-cov"] +[[package]] +name = "msgpack" +version = "1.1.0" +description = "MessagePack serializer" +optional = true +python-versions = ">=3.8" +files = [ + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, +] + [[package]] name = "multivolumefile" version = "0.2.3" @@ -1300,17 +1557,6 @@ check = ["check-manifest", "flake8", "flake8-black", "isort (>=5.0.3)", "pygment test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "hypothesis", "pyannotate", "pytest", "pytest-cov"] type = ["mypy", "mypy-extensions"] -[[package]] -name = "munkres" -version = "1.1.4" -description = "Munkres (Hungarian) algorithm for the Assignment Problem" -optional = false -python-versions = "*" -files = [ - {file = "munkres-1.1.4-py2.py3-none-any.whl", hash = "sha256:6b01867d4a8480d865aea2326e4b8f7c46431e9e55b4a2e32d989307d7bced2a"}, - {file = "munkres-1.1.4.tar.gz", hash = "sha256:fc44bf3c3979dada4b6b633ddeeb8ffbe8388ee9409e4d4e8310c2da1792db03"}, -] - [[package]] name = "musicbrainzngs" version = "0.7.1" @@ -1335,47 +1581,53 @@ files = [ [[package]] name = "mypy" -version = "1.10.0" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -1391,6 +1643,94 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "numba" +version = "0.60.0" +description = "compiling Python code using LLVM" +optional = true +python-versions = ">=3.9" +files = [ + {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, + {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, + {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, + {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, + {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, + {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, + {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, + {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, + {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, + {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, + {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, + {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, + {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, +] + +[package.dependencies] +llvmlite = "==0.43.*" +numpy = ">=1.22,<2.1" + +[[package]] +name = "numpy" +version = "2.0.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -1409,120 +1749,101 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pep8-naming" -version = "0.14.1" -description = "Check PEP-8 naming conventions, plugin for flake8" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pep8-naming-0.14.1.tar.gz", hash = "sha256:1ef228ae80875557eb6c1549deafed4dabbf3261cfcafa12f773fe0db9be8a36"}, - {file = "pep8_naming-0.14.1-py3-none-any.whl", hash = "sha256:63f514fc777d715f935faf185dedd679ab99526a7f2f503abb61587877f7b1c5"}, -] - -[package.dependencies] -flake8 = ">=5.0.0" - [[package]] name = "pillow" -version = "10.3.0" +version = "11.0.0" description = "Python Imaging Library (Fork)" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, - {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, - {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, - {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, - {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, - {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, - {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, - {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, - {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, - {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, - {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, - {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, - {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, - {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, - {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, - {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, - {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, + {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, + {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, + {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, + {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, + {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, + {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, + {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, + {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, + {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, + {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, + {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, + {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, + {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, + {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, + {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, + {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, + {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, + {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, + {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, + {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -1531,19 +1852,19 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -1560,43 +1881,66 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pooch" +version = "1.8.2" +description = "A friend to fetch your data files" +optional = true +python-versions = ">=3.7" +files = [ + {file = "pooch-1.8.2-py3-none-any.whl", hash = "sha256:3529a57096f7198778a5ceefd5ac3ef0e4d06a6ddaf9fc2d609b806f25302c47"}, + {file = "pooch-1.8.2.tar.gz", hash = "sha256:76561f0de68a01da4df6af38e9955c4c9d1a5c90da73f7e40276a5728ec83d10"}, +] + +[package.dependencies] +packaging = ">=20.0" +platformdirs = ">=2.5.0" +requests = ">=2.19.0" + +[package.extras] +progress = ["tqdm (>=4.41.0,<5.0.0)"] +sftp = ["paramiko (>=2.7.0)"] +xxhash = ["xxhash (>=1.4.3)"] + [[package]] name = "psutil" -version = "5.9.8" +version = "6.1.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "py7zr" -version = "0.21.0" +version = "0.22.0" description = "Pure python 7-zip library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "py7zr-0.21.0-py3-none-any.whl", hash = "sha256:ea6ded2e5c6d8539e3406cb3b0317192b32af59cff13eaf87702acc36a274da6"}, - {file = "py7zr-0.21.0.tar.gz", hash = "sha256:213a9cc46940fb8f63b4163643a8f5b36bbc798134746c3992d3bc6b14edab87"}, + {file = "py7zr-0.22.0-py3-none-any.whl", hash = "sha256:993b951b313500697d71113da2681386589b7b74f12e48ba13cc12beca79d078"}, + {file = "py7zr-0.22.0.tar.gz", hash = "sha256:c6c7aea5913535184003b73938490f9a4d8418598e533f9ca991d3b8e45a139e"}, ] [package.dependencies] @@ -1615,7 +1959,7 @@ texttable = "*" check = ["black (>=23.1.0)", "check-manifest", "flake8 (<8)", "flake8-black (>=0.3.6)", "flake8-deprecated", "flake8-isort", "isort (>=5.0.3)", "lxml", "mypy (>=0.940)", "mypy-extensions (>=0.4.1)", "pygments", "readme-renderer", "twine", "types-psutil"] debug = ["pytest", "pytest-leaks", "pytest-profiling"] docs = ["docutils", "sphinx (>=5.0)", "sphinx-a4doc", "sphinx-py3doc-enhanced-theme"] -test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "py-cpuinfo", "pyannotate", "pytest", "pytest-benchmark", "pytest-cov", "pytest-remotedata", "pytest-timeout"] +test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "py-cpuinfo", "pytest", "pytest-benchmark", "pytest-cov", "pytest-remotedata", "pytest-timeout"] test-compat = ["libarchive-c"] [[package]] @@ -1688,37 +2032,22 @@ test = ["coverage[toml] (>=5.2)", "hypothesis", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "pycairo" -version = "1.26.0" +version = "1.27.0" description = "Python interface for cairo" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pycairo-1.26.0-cp310-cp310-win32.whl", hash = "sha256:696ba8024d2827e66e088a6e05a3b0aea30d289476bcb2ca47c9670d40900a50"}, - {file = "pycairo-1.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6690a00fb225c19f42d76660e676aba7ae7cb18f3632cb02bce7f0d9b9c3800"}, - {file = "pycairo-1.26.0-cp310-cp310-win_arm64.whl", hash = "sha256:1d54e28170a5e790269d9db4c195cca5152ff018ba7e330d0ed05d86ccc2ea7d"}, - {file = "pycairo-1.26.0-cp311-cp311-win32.whl", hash = "sha256:5986b8da3e7de7ab931d7ad527938df38f75d3a3bdea2b515c786c5ca2c5093c"}, - {file = "pycairo-1.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:d374d9ec6d2f791bf57105d87a9028db1ef2b687848f64a524e447033eae7229"}, - {file = "pycairo-1.26.0-cp311-cp311-win_arm64.whl", hash = "sha256:20a31af89d92ffd5fc60c08e65ff649f16e18621a14a40dbdb049fc74942d7a9"}, - {file = "pycairo-1.26.0-cp312-cp312-win32.whl", hash = "sha256:d63929ab5a2f890a333f2f2f51de9f1c9fe20d1bddc982c2ca577b737448d72f"}, - {file = "pycairo-1.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:8616408ae93de4824a3777ec532ea75643e4bf74e49d601062c0b1788180c962"}, - {file = "pycairo-1.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:a611e4d82ad8470138bb46d465d47e8db826d9d80b6a520ccd83ee007f2073e4"}, - {file = "pycairo-1.26.0-cp38-cp38-win32.whl", hash = "sha256:675578bc6d62d15ff8669f264783efc9c8c73e3a6f564b294a70fb45a2f78667"}, - {file = "pycairo-1.26.0-cp38-cp38-win_amd64.whl", hash = "sha256:aac447b423b33b64119ecdd1ffebf9163b07f5401c5da50c707197efdd1c918a"}, - {file = "pycairo-1.26.0-cp39-cp39-win32.whl", hash = "sha256:9fa51168010e2dfb45499df071fca2d921893f724646f3454951000a7ad0cabb"}, - {file = "pycairo-1.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:3e4e18ea03122e60abe3eb611e2849859cc950083ff85d8369328eadf3df63f5"}, - {file = "pycairo-1.26.0-cp39-cp39-win_arm64.whl", hash = "sha256:a8f3b567ba2ad55624a809823ccf75aff8d768c20216cb5888365f6fc695c1d2"}, - {file = "pycairo-1.26.0.tar.gz", hash = "sha256:2dddd0a874fbddb21e14acd9b955881ee1dc6e63b9c549a192d613a907f9cbeb"}, -] - -[[package]] -name = "pycodestyle" -version = "2.9.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, + {file = "pycairo-1.27.0-cp310-cp310-win32.whl", hash = "sha256:e20f431244634cf244ab6b4c3a2e540e65746eed1324573cf291981c3e65fc05"}, + {file = "pycairo-1.27.0-cp310-cp310-win_amd64.whl", hash = "sha256:03bf570e3919901572987bc69237b648fe0de242439980be3e606b396e3318c9"}, + {file = "pycairo-1.27.0-cp311-cp311-win32.whl", hash = "sha256:9a9b79f92a434dae65c34c830bb9abdbd92654195e73d52663cbe45af1ad14b2"}, + {file = "pycairo-1.27.0-cp311-cp311-win_amd64.whl", hash = "sha256:d40a6d80b15dacb3672dc454df4bc4ab3988c6b3f36353b24a255dc59a1c8aea"}, + {file = "pycairo-1.27.0-cp312-cp312-win32.whl", hash = "sha256:e2239b9bb6c05edae5f3be97128e85147a155465e644f4d98ea0ceac7afc04ee"}, + {file = "pycairo-1.27.0-cp312-cp312-win_amd64.whl", hash = "sha256:27cb4d3a80e3b9990af552818515a8e466e0317063a6e61585533f1a86f1b7d5"}, + {file = "pycairo-1.27.0-cp313-cp313-win32.whl", hash = "sha256:01505c138a313df2469f812405963532fc2511fb9bca9bdc8e0ab94c55d1ced8"}, + {file = "pycairo-1.27.0-cp313-cp313-win_amd64.whl", hash = "sha256:b0349d744c068b6644ae23da6ada111c8a8a7e323b56cbce3707cba5bdb474cc"}, + {file = "pycairo-1.27.0-cp39-cp39-win32.whl", hash = "sha256:f9ca8430751f1fdcd3f072377560c9e15608b9a42d61375469db853566993c9b"}, + {file = "pycairo-1.27.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b1321652a6e27c4de3069709b1cae22aed2707fd8c5e889c04a95669228af2a"}, + {file = "pycairo-1.27.0.tar.gz", hash = "sha256:5cb21e7a00a2afcafea7f14390235be33497a2cce53a98a19389492a60628430"}, ] [[package]] @@ -1734,54 +2063,54 @@ files = [ [[package]] name = "pycryptodomex" -version = "3.20.0" +version = "3.21.0" description = "Cryptographic library for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pycryptodomex-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:645bd4ca6f543685d643dadf6a856cc382b654cc923460e3a10a49c1b3832aeb"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ff5c9a67f8a4fba4aed887216e32cbc48f2a6fb2673bb10a99e43be463e15913"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8ee606964553c1a0bc74057dd8782a37d1c2bc0f01b83193b6f8bb14523b877b"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7805830e0c56d88f4d491fa5ac640dfc894c5ec570d1ece6ed1546e9df2e98d6"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:bc3ee1b4d97081260d92ae813a83de4d2653206967c4a0a017580f8b9548ddbc"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:8af1a451ff9e123d0d8bd5d5e60f8e3315c3a64f3cdd6bc853e26090e195cdc8"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cbe71b6712429650e3883dc81286edb94c328ffcd24849accac0a4dbcc76958a"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:76bd15bb65c14900d98835fcd10f59e5e0435077431d3a394b60b15864fddd64"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:653b29b0819605fe0898829c8ad6400a6ccde096146730c2da54eede9b7b8baa"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a5ec91388984909bb5398ea49ee61b68ecb579123694bffa172c3b0a107079"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:108e5f1c1cd70ffce0b68739c75734437c919d2eaec8e85bffc2c8b4d2794305"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:59af01efb011b0e8b686ba7758d59cf4a8263f9ad35911bfe3f416cee4f5c08c"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e186342cfcc3aafaad565cbd496060e5a614b441cacc3995ef0091115c1f6c5"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:25cd61e846aaab76d5791d006497134602a9e451e954833018161befc3b5b9ed"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:9c682436c359b5ada67e882fec34689726a09c461efd75b6ea77b2403d5665b7"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-win32.whl", hash = "sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc"}, - {file = "pycryptodomex-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458"}, - {file = "pycryptodomex-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88afd7a3af7ddddd42c2deda43d53d3dfc016c11327d0915f90ca34ebda91499"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d3584623e68a5064a04748fb6d76117a21a7cb5eaba20608a41c7d0c61721794"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dcac11031a71348faaed1f403a0debd56bf5404232284cf8c761ff918886ebc"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69138068268127cd605e03438312d8f271135a33140e2742b417d027a0539427"}, - {file = "pycryptodomex-3.20.0.tar.gz", hash = "sha256:7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dbeb84a399373df84a69e0919c1d733b89e049752426041deeb30d68e9867822"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a192fb46c95489beba9c3f002ed7d93979423d1b2a53eab8771dbb1339eb3ddd"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:1233443f19d278c72c4daae749872a4af3787a813e05c3561c73ab0c153c7b0f"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbb07f88e277162b8bfca7134b34f18b400d84eac7375ce73117f865e3c80d4c"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e859e53d983b7fe18cb8f1b0e29d991a5c93be2c8dd25db7db1fe3bd3617f6f9"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:ef046b2e6c425647971b51424f0f88d8a2e0a2a63d3531817968c42078895c00"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:da76ebf6650323eae7236b54b1b1f0e57c16483be6e3c1ebf901d4ada47563b6"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:c07e64867a54f7e93186a55bec08a18b7302e7bee1b02fd84c6089ec215e723a"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:56435c7124dd0ce0c8bdd99c52e5d183a0ca7fdcd06c5d5509423843f487dd0b"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65d275e3f866cf6fe891411be9c1454fb58809ccc5de6d3770654c47197acd65"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:5241bdb53bcf32a9568770a6584774b1b8109342bd033398e4ff2da052123832"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:34325b84c8b380675fd2320d0649cdcbc9cf1e0d1526edbe8fce43ed858cdc7e"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:103c133d6cd832ae7266feb0a65b69e3a5e4dbbd6f3a3ae3211a557fd653f516"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77ac2ea80bcb4b4e1c6a596734c775a1615d23e31794967416afc14852a639d3"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aa0cf13a1a1128b3e964dc667e5fe5c6235f7d7cfb0277213f0e2a783837cc2"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46eb1f0c8d309da63a2064c28de54e5e614ad17b7e2f88df0faef58ce192fc7b"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:cc7e111e66c274b0df5f4efa679eb31e23c7545d702333dfd2df10ab02c2a2ce"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:770d630a5c46605ec83393feaa73a9635a60e55b112e1fb0c3cea84c2897aa0a"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:52e23a0a6e61691134aa8c8beba89de420602541afaae70f66e16060fdcd677e"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-win32.whl", hash = "sha256:a3d77919e6ff56d89aada1bd009b727b874d464cb0e2e3f00a49f7d2e709d76e"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b0e9765f93fe4890f39875e6c90c96cb341767833cfa767f41b490b506fa9ec0"}, + {file = "pycryptodomex-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:feaecdce4e5c0045e7a287de0c4351284391fe170729aa9182f6bd967631b3a8"}, + {file = "pycryptodomex-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:365aa5a66d52fd1f9e0530ea97f392c48c409c2f01ff8b9a39c73ed6f527d36c"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3efddfc50ac0ca143364042324046800c126a1d63816d532f2e19e6f2d8c0c31"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df2608682db8279a9ebbaf05a72f62a321433522ed0e499bc486a6889b96bf3"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5823d03e904ea3e53aebd6799d6b8ec63b7675b5d2f4a4bd5e3adcb512d03b37"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:27e84eeff24250ffec32722334749ac2a57a5fd60332cd6a0680090e7c42877e"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8ef436cdeea794015263853311f84c1ff0341b98fc7908e8a70595a68cefd971"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1058e6dfe827f4209c5cae466e67610bcd0d66f2f037465daa2a29d92d952b"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ba09a5b407cbb3bcb325221e346a140605714b5e880741dc9a1e9ecf1688d42"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a9d8342cf22b74a746e3c6c9453cb0cfbb55943410e3a2619bd9164b48dc9d9"}, + {file = "pycryptodomex-3.21.0.tar.gz", hash = "sha256:222d0bd05381dd25c32dd6065c071ebf084212ab79bab4599ba9e6a3e0009e6c"}, ] [[package]] name = "pydata-sphinx-theme" -version = "0.14.4" +version = "0.16.0" description = "Bootstrap-based Sphinx theme from the PyData community" -optional = false -python-versions = ">=3.8" +optional = true +python-versions = ">=3.9" files = [ - {file = "pydata_sphinx_theme-0.14.4-py3-none-any.whl", hash = "sha256:ac15201f4c2e2e7042b0cad8b30251433c1f92be762ddcefdb4ae68811d918d9"}, - {file = "pydata_sphinx_theme-0.14.4.tar.gz", hash = "sha256:f5d7a2cb7a98e35b9b49d3b02cec373ad28958c2ed5c9b1ffe6aff6c56e9de5b"}, + {file = "pydata_sphinx_theme-0.16.0-py3-none-any.whl", hash = "sha256:18c810ee4e67e05281e371e156c1fb5bb0fa1f2747240461b225272f7d8d57d8"}, + {file = "pydata_sphinx_theme-0.16.0.tar.gz", hash = "sha256:721dd26e05fa8b992d66ef545536e6cbe0110afb9865820a08894af1ad6f7707"}, ] [package.dependencies] @@ -1789,33 +2118,22 @@ accessible-pygments = "*" Babel = "*" beautifulsoup4 = "*" docutils = "!=0.17.0" -packaging = "*" pygments = ">=2.7" -sphinx = ">=5.0" +sphinx = ">=6.1" typing-extensions = "*" [package.extras] a11y = ["pytest-playwright"] -dev = ["nox", "pre-commit", "pydata-sphinx-theme[doc,test]", "pyyaml"] -doc = ["ablog (>=0.11.0rc2)", "colorama", "ipykernel", "ipyleaflet", "jupyter_sphinx", "jupyterlite-sphinx", "linkify-it-py", "matplotlib", "myst-parser", "nbsphinx", "numpy", "numpydoc", "pandas", "plotly", "rich", "sphinx-autoapi (>=3.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-favicon (>=1.0.1)", "sphinx-sitemap", "sphinx-togglebutton", "sphinxcontrib-youtube (<1.4)", "sphinxext-rediraffe", "xarray"] -test = ["pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "pyflakes" -version = "2.5.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, -] +dev = ["pandoc", "pre-commit", "pydata-sphinx-theme[doc,test]", "pyyaml", "sphinx-theme-builder[cli]", "tox"] +doc = ["ablog (>=0.11.8)", "colorama", "graphviz", "ipykernel", "ipyleaflet", "ipywidgets", "jupyter_sphinx", "jupyterlite-sphinx", "linkify-it-py", "matplotlib", "myst-parser", "nbsphinx", "numpy", "numpydoc", "pandas", "plotly", "rich", "sphinx-autoapi (>=3.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-favicon (>=1.0.1)", "sphinx-sitemap", "sphinx-togglebutton", "sphinxcontrib-youtube (>=1.4.1)", "sphinxext-rediraffe", "xarray"] +i18n = ["Babel", "jinja2"] +test = ["pytest", "pytest-cov", "pytest-regressions", "sphinx[test]"] [[package]] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, @@ -1827,21 +2145,17 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pygobject" -version = "3.48.2" +version = "3.50.0" description = "Python bindings for GObject Introspection" optional = true -python-versions = "<4,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "pygobject-3.48.2.tar.gz", hash = "sha256:c3c0a7afbe5b2c1c64dc0530109b4dd571085153dbedfbccb8ec7c5ad233f977"}, + {file = "pygobject-3.50.0.tar.gz", hash = "sha256:4500ad3dbf331773d8dedf7212544c999a76fc96b63a91b3dcac1e5925a1d103"}, ] [package.dependencies] pycairo = ">=1.16" -[package.extras] -dev = ["flake8", "pytest", "pytest-cov"] -docs = ["sphinx (>=4.0,<5.0)", "sphinx-rtd-theme (>=0.5,<2.0)"] - [[package]] name = "pylast" version = "5.3.0" @@ -1947,13 +2261,13 @@ test = ["coverage[toml] (>=5.2)", "hypothesis", "pytest (>=6.0)", "pytest-benchm [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] @@ -1961,7 +2275,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] @@ -1969,17 +2283,17 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-cov" -version = "5.0.0" +version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, ] [package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} +coverage = {version = ">=7.5", extras = ["toml"]} pytest = ">=4.6" [package.extras] @@ -2034,13 +2348,13 @@ twisted = ["Twisted"] [[package]] name = "python3-discogs-client" -version = "2.7" +version = "2.7.1" description = "Python API client for Discogs" optional = false python-versions = "*" files = [ - {file = "python3-discogs-client-2.7.tar.gz", hash = "sha256:25949b9dc6130985d8e0199e4c950351e364e273f9476546bd9e171802e007a1"}, - {file = "python3_discogs_client-2.7-py3-none-any.whl", hash = "sha256:a510c07033640676a29204fe5793acbb7fb60b568d53ef8fc857ae15348ddd49"}, + {file = "python3_discogs_client-2.7.1-py3-none-any.whl", hash = "sha256:5fb5f3d2f288a8ce2c8c152444258bacedb35b7d61bc466bddae332b6c737444"}, + {file = "python3_discogs_client-2.7.1.tar.gz", hash = "sha256:f2453582f5d044ea5847d27cfe56473179e51c9a836913b46db803c20ae598f9"}, ] [package.dependencies] @@ -2048,16 +2362,8 @@ oauthlib = "*" python-dateutil = "*" requests = "*" -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] [[package]] name = "pyxdg" @@ -2072,160 +2378,156 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "pyzstd" -version = "0.16.0" +version = "0.16.2" description = "Python bindings to Zstandard (zstd) compression library." optional = false python-versions = ">=3.5" files = [ - {file = "pyzstd-0.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78f5e65eb15d93f687715be9241c8b55d838fba9b7045d83530f8831544f1413"}, - {file = "pyzstd-0.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:35962bc10480aebd5b32fa344430bddd19ef384286501c1c8092b6a9a1ee6a99"}, - {file = "pyzstd-0.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48037009be790fca505a62705a7997eef0cd384c3ef6c10a769734660245ee73"}, - {file = "pyzstd-0.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a57f2a0531ad2cd33bb78d8555e85a250877e555a68c0add6308ceeca8d84f1"}, - {file = "pyzstd-0.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa219d5d6124f1623b39f296a1fcc4cac1d8c82f137516bd362a38c16adcd92b"}, - {file = "pyzstd-0.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f560d24557bbc54eb1aa01ee6e587d4d199b785593462567ddf752de3c1c4974"}, - {file = "pyzstd-0.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d14862ce066da0494e0f9466afc3b8fcd6c03f7250323cf8ef62c67158c77e57"}, - {file = "pyzstd-0.16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5d0db66651ed5a866a1452e7a450e41a5ec743abbeea1f1bc85ef7c64f5f6b8f"}, - {file = "pyzstd-0.16.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f47aada7fdc6bcad8ec4ee4ff00a8d2d9a0e05b5516df3f304afbf527b026221"}, - {file = "pyzstd-0.16.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5c43e2222bbbe660dea8fe335f5c633b3c9ed10628a4b53a160ddd54d15cffc2"}, - {file = "pyzstd-0.16.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:d897ec18822e348f8ef9a17e421716ed224a3726fde806aae04469fec8f0ac9d"}, - {file = "pyzstd-0.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d5c98986d774e9321fb1d4fe0362658560e14c1d7afbe2d298b89a24c2f7b4f"}, - {file = "pyzstd-0.16.0-cp310-cp310-win32.whl", hash = "sha256:84135917c99476c6abeee420ffd005a856d8fde0e5f585b0c484d5923392035b"}, - {file = "pyzstd-0.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:06b9dfd615fb5635c05153431e520954a0e81683c5a6e3ed1134f60cc45b80f1"}, - {file = "pyzstd-0.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c9c1ede5c4e35b059e8734dfa8d23a59b8fcfe3e0ece4f7d226bc5e1816512c9"}, - {file = "pyzstd-0.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75f4363157777cbcbbd14ff823388fddfca597d44c77c27473c4c4000d7a5c99"}, - {file = "pyzstd-0.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48ff680078aec3b9515f149010981c7feeef6c2706987ac7bdc7cc1ea05f8f7d"}, - {file = "pyzstd-0.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbeaa0af865427405a1c0e8c65841a23de66af8ca5d796522f7b105386cd8522"}, - {file = "pyzstd-0.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f27e083a63b9463fd2640065af1b924f05831839f23d936a97c4f510a54f6b"}, - {file = "pyzstd-0.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dd4592c2fca923041c57aa2bfe428de14cc45f3a00ab825b353160994bc15e7"}, - {file = "pyzstd-0.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f22fb00bfcca4b2e0b36afd4f3a3194c1bc93b2a76e51932ccfd3b6aa62501"}, - {file = "pyzstd-0.16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:586538aa2a992a55c10d88c58166e6023968a9825719bce5a09397b73eea658f"}, - {file = "pyzstd-0.16.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8e51d69446d96f5767e0f1b0676341d5d576c151dfe3dd14aff7a163db1b4d7c"}, - {file = "pyzstd-0.16.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8c675edd26cd2531163e51dcb3c7c73145e2fa3b77a1ff59ce9ed963ff56017"}, - {file = "pyzstd-0.16.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a765c5fc05fe1c843863cc3723e39e8207c28d9a7152ee6d621fa3908ef4880"}, - {file = "pyzstd-0.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79f4c9f1d7906eb890dafae4820f69bd24658297e9ebcdd74867330e8e7bf9b0"}, - {file = "pyzstd-0.16.0-cp311-cp311-win32.whl", hash = "sha256:6aa796663db6d1d01ebdcd80022de840005ae173e01a7b03b3934811b7ae39bc"}, - {file = "pyzstd-0.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:7a82cd4e772e5d1400502d68da7ecd71a6f1ff37243017f284bee3d2106a2496"}, - {file = "pyzstd-0.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e0f5a1865a00798a74d50fcc9956a3d7fa7413cbc1c6d6d04833d89f36e35226"}, - {file = "pyzstd-0.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00954290d6d46ab13535becbbc1327c56f0a9c5d7b7cf967e6587c1395cade42"}, - {file = "pyzstd-0.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:796a29cbb6414b6cb84d8e7448262ba286847b946de9a149dec97469a4789552"}, - {file = "pyzstd-0.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c68761529a43358151ac507aeb9c6b7c1a990235ce7b7d41f8ea62c62d4679e"}, - {file = "pyzstd-0.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8436ce4fa7e7ddaa8d29717fd73e0699883ef6e78ef4d785c244779a7ad1942b"}, - {file = "pyzstd-0.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349d643aeb8d7d9e0a407cef29d6210afbe646cc19b4e237456e585591eda223"}, - {file = "pyzstd-0.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4cf0fed2d5c9de3da211dceff3ed9a09b8f998f7df57da847145863a786454b"}, - {file = "pyzstd-0.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:691cadd48f225097a2588e7db492ac88c669c061208749bc0200ee39e4425e32"}, - {file = "pyzstd-0.16.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:33efaf2cc4efd2b100699d953cd70b5a54c3ca912297211fda01875f4636f655"}, - {file = "pyzstd-0.16.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b3cc09eecd318310cfd6e7f245248cf16ca014ea5903580d72231d93330952de"}, - {file = "pyzstd-0.16.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89187af1ca5a9b65c477817e0fe7e411f4edd99e5575aaaef6a9e5ff62028854"}, - {file = "pyzstd-0.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7d5888e206190d36fbffed6d7e9cacd79e64fd34e9a07359e16862973d90b33"}, - {file = "pyzstd-0.16.0-cp312-cp312-win32.whl", hash = "sha256:3c5f28a145677431347772b43a9604b67691b16e233ec7a92fc77fc5fb670608"}, - {file = "pyzstd-0.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:8a2d5a8b74db3df772bb4f230319241e73629b04cb777b22f9dcd2084d92977a"}, - {file = "pyzstd-0.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:94fe8c5f1f11397b5db8b1850168e5bed13b3f3e1bc36e4292819d85be51a63c"}, - {file = "pyzstd-0.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d1e6ae36c717abd32b55a275d7fbf9041b6de3a103639739ec3e8c8283773fb3"}, - {file = "pyzstd-0.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33bc6f6048f7f7fc506e6ad03fb822a78c2b8209e73b2eddc69d3d6767d0385c"}, - {file = "pyzstd-0.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c4cdb0e407bec2f3ece10275449822575f6634727ee1a18e87c5e5a7b565bb1"}, - {file = "pyzstd-0.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e4cf6d11427d43734e8cb246ecfb7af169983ef796b415379602ea0605f5116"}, - {file = "pyzstd-0.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c0bbdb3ae1c300941c1f89219a8d09d142ddb7bfc78e61da80c8bdc03c05be8"}, - {file = "pyzstd-0.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c34c06a6496b4aacdab03133671dd5638417bda09a1f186ba1a39c1dbd1add24"}, - {file = "pyzstd-0.16.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:29ca6db3fb72d17bcec091b9ba485c715f63ca00bfcd993f92cb20037ae98b25"}, - {file = "pyzstd-0.16.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:26e42ccb76a53c1b943021eeb0eb4d78f46093c16e4e658a7204c838d5b36df0"}, - {file = "pyzstd-0.16.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:76697baa4d9fd621bd5b99719d3b55fadeb665af9a49523debfc9ae5fbefef13"}, - {file = "pyzstd-0.16.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:708c442f8f6540ffad24a894bdea3c019250e02dcdbd0fbd27fc977b1a88b4f2"}, - {file = "pyzstd-0.16.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:994a21a75d7b2602a78c2f88f809427ce1051e43af7aad6cda524ccdc859354e"}, - {file = "pyzstd-0.16.0-cp38-cp38-win32.whl", hash = "sha256:80962ff81a3389b5579d1206bea1bb48da38991407442d2a9287f6da1ccb2c80"}, - {file = "pyzstd-0.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:363c11a4d60fa0e2e7437f7494291c24eaf2752c8d8e3adf8f92cb0168073464"}, - {file = "pyzstd-0.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:094cec5425097ae1f9a40bb02de917d2274bfa872665fe2e5b4101ee94d8b31d"}, - {file = "pyzstd-0.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ca9f1f6bd487c9b990e509c17e0a701f554db9e77bd5121c27f1db4594ac4c0a"}, - {file = "pyzstd-0.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff99a11dd76aec5a5234c1158d6b8dacb61b208f3f30a2bf7ae3b23243190581"}, - {file = "pyzstd-0.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2820b607be0346b3e24b097d759393bd4bcccc0620e8e825591061a2c3a0add5"}, - {file = "pyzstd-0.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef883837c16c076f11da37323f589779806073eeacaef3912f2da0359cb8c2cf"}, - {file = "pyzstd-0.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c3181a462cdb55df5ddeffe3cf5223cda36c81feceeb231688af08d30f11022"}, - {file = "pyzstd-0.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80741b9f18149264acb639287347cfc6eecff109b5c6d95dbf7222756b107b57"}, - {file = "pyzstd-0.16.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb70083bf00426194a85d69939c52b1759462873bf6e4d62f481e2bc3e642ea1"}, - {file = "pyzstd-0.16.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:44f818ea8c191285365a0add6fc03f88225f1fdcff570dc78e9f548444042441"}, - {file = "pyzstd-0.16.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:983ea93ed937d329c88ef15d5e3b09e32372590c1a80586b2013f17aed436cb8"}, - {file = "pyzstd-0.16.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0eadba403ec861fa4c600ad43dbd8ac17b7c22a796d3bd9d92918f4e8a15a6e8"}, - {file = "pyzstd-0.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a4e12b6702481ace7071357c1b81b9faf6f660da55ff9ccd6383fed474348cc6"}, - {file = "pyzstd-0.16.0-cp39-cp39-win32.whl", hash = "sha256:bc5e630db572362aef4d8a78f82a40e2b9756de7622feb07031bd400a696ad78"}, - {file = "pyzstd-0.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:8ef9fa7fe28dd6b7d09b8be89aea4e8f2d18b23a89294f51aa48dbc6c306a039"}, - {file = "pyzstd-0.16.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1b8db95f23d928ba87297afe6d4fff21bbb1af343147ff50c174674312afc29d"}, - {file = "pyzstd-0.16.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3f661848fa1984f3b17da676c88ccd08d8c3fab5501a1d1c8ac5abece48566f2"}, - {file = "pyzstd-0.16.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acfe529ff44d379ee889f03c2d353f94b1f16c83a92852061f9672982a3ef32d"}, - {file = "pyzstd-0.16.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:493edd702bc16dae1f4d76461688714c488af1b33f5b3a77c1a86d5c81240f9e"}, - {file = "pyzstd-0.16.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10143cad228ebeb9eda7793995b2d0b3fef0685258d9b794f6320824302c47d7"}, - {file = "pyzstd-0.16.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:784f7f87ae2e25459ef78282fbe9f0d2fec9ced84e4acb5d28621a0db274a13b"}, - {file = "pyzstd-0.16.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35ba0ee9d6d502da2bc01d78d22f51a1812ff8d55fb444447f7782f5ce8c1e35"}, - {file = "pyzstd-0.16.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:e8eae552db2aa587c986f460915786bf9058a88d831d562cadba01f3069736a9"}, - {file = "pyzstd-0.16.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e31e0d2023b693ca530d95df7cff8d736f66b755018398bc518160f91e80bd0a"}, - {file = "pyzstd-0.16.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0fa1ef68839d99b0c0d66fe060303f7f2916f021289a7e04a818ef9461bbbe1"}, - {file = "pyzstd-0.16.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a55aac43a685b7d2b9e7c4f9f3768ad6e0d5f9ad7698b8bf9124fbeb814d43"}, - {file = "pyzstd-0.16.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:20259fa302f1050bd02d78d93db78870bed385c6d3d299990fe806095426869f"}, - {file = "pyzstd-0.16.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bd27ab78269148c65d988a6b26471d621d4cc6eed6b92462b7f8850162e5c4f2"}, - {file = "pyzstd-0.16.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5d8a3263b7e23a3593eb4fcc5cc77e053c7d15c874db16ce6ee8b4d94f8d825"}, - {file = "pyzstd-0.16.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75f5e862e1646f1688e97f4aa69988d6589a1e036f081e98a3f202fa4647e69b"}, - {file = "pyzstd-0.16.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19deddb2975af861320fd7b68196fbb2a4a8500897354919baf693702786e349"}, - {file = "pyzstd-0.16.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48b4368b832233205a74e9f1dfe2647d9bc49ea8357b09963fd5f15062bdd0a"}, - {file = "pyzstd-0.16.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:74521d819ceea90794aded974cc3024c65c094050e6c4a6f4b7478af3461e3ad"}, - {file = "pyzstd-0.16.0.tar.gz", hash = "sha256:fd43a0ae38ae15223fb1057729001829c3336e90f4acf04cf12ebdec33346658"}, + {file = "pyzstd-0.16.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:637376c8f8cbd0afe1cab613f8c75fd502bd1016bf79d10760a2d5a00905fe62"}, + {file = "pyzstd-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e7a7118cbcfa90ca2ddbf9890c7cb582052a9a8cf2b7e2c1bbaf544bee0f16a"}, + {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74cb1ba05876179525144511eed3bd5a509b0ab2b10632c1215a85db0834dfd"}, + {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c084dde218ffbf112e507e72cbf626b8f58ce9eb23eec129809e31037984662"}, + {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4646459ebd3d7a59ddbe9312f020bcf7cdd1f059a2ea07051258f7af87a0b31"}, + {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14bfc2833cc16d7657fc93259edeeaa793286e5031b86ca5dc861ba49b435fce"}, + {file = "pyzstd-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f27d488f19e5bf27d1e8aa1ae72c6c0a910f1e1ffbdf3c763d02ab781295dd27"}, + {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e134ca968ff7dcfa8b7d433318f01d309b74ee87e0d2bcadc117c08e1c80db"}, + {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6b5f64cd3963c58b8f886eb6139bb8d164b42a74f8a1bb95d49b4804f4592d61"}, + {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0b4a8266871b9e0407f9fd8e8d077c3558cf124d174e6357b523d14f76971009"}, + {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1bb19f7acac30727354c25125922aa59f44d82e0e6a751df17d0d93ff6a73853"}, + {file = "pyzstd-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3008325b7368e794d66d4d98f2ee1d867ef5afd09fd388646ae02b25343c420d"}, + {file = "pyzstd-0.16.2-cp310-cp310-win32.whl", hash = "sha256:66f2d5c0bbf5bf32c577aa006197b3525b80b59804450e2c32fbcc2d16e850fd"}, + {file = "pyzstd-0.16.2-cp310-cp310-win_amd64.whl", hash = "sha256:5fe5f5459ebe1161095baa7a86d04ab625b35148f6c425df0347ed6c90a2fd58"}, + {file = "pyzstd-0.16.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c1bdbe7f01c7f37d5cd07be70e32a84010d7dfd6677920c0de04cf7d245b60d"}, + {file = "pyzstd-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1882a3ceaaf9adc12212d587d150ec5e58cfa9a765463d803d739abbd3ac0f7a"}, + {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea46a8b9d60f6a6eba29facba54c0f0d70328586f7ef0da6f57edf7e43db0303"}, + {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7865bc06589cdcecdede0deefe3da07809d5b7ad9044c224d7b2a0867256957"}, + {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52f938a65b409c02eb825e8c77fc5ea54508b8fc44b5ce226db03011691ae8cc"}, + {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e97620d3f53a0282947304189deef7ca7f7d0d6dfe15033469dc1c33e779d5e5"}, + {file = "pyzstd-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c40e9983d017108670dc8df68ceef14c7c1cf2d19239213274783041d0e64c"}, + {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7cd4b3b2c6161066e4bde6af1cf78ed3acf5d731884dd13fdf31f1db10830080"}, + {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:454f31fd84175bb203c8c424f2255a343fa9bd103461a38d1bf50487c3b89508"}, + {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5ef754a93743f08fb0386ce3596780bfba829311b49c8f4107af1a4bcc16935d"}, + {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:be81081db9166e10846934f0e3576a263cbe18d81eca06e6a5c23533f8ce0dc6"}, + {file = "pyzstd-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:738bcb2fa1e5f1868986f5030955e64de53157fa1141d01f3a4daf07a1aaf644"}, + {file = "pyzstd-0.16.2-cp311-cp311-win32.whl", hash = "sha256:0ea214c9b97046867d1657d55979021028d583704b30c481a9c165191b08d707"}, + {file = "pyzstd-0.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:c17c0fc02f0e75b0c7cd21f8eaf4c6ce4112333b447d93da1773a5f705b2c178"}, + {file = "pyzstd-0.16.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4081fd841a9efe9ded7290ee7502dbf042c4158b90edfadea3b8a072c8ec4e1"}, + {file = "pyzstd-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fd3fa45d2aeb65367dd702806b2e779d13f1a3fa2d13d5ec777cfd09de6822de"}, + {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8b5f0d2c07994a5180d8259d51df6227a57098774bb0618423d7eb4a7303467"}, + {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60c9d25b15c7ae06ed5d516d096a0d8254f9bed4368b370a09cccf191eaab5cb"}, + {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29acf31ce37254f6cad08deb24b9d9ba954f426fa08f8fae4ab4fdc51a03f4ae"}, + {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec77612a17697a9f7cf6634ffcee616eba9b997712fdd896e77fd19ab3a0618"}, + {file = "pyzstd-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:313ea4974be93be12c9a640ab40f0fc50a023178aae004a8901507b74f190173"}, + {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e91acdefc8c2c6c3b8d5b1b5fe837dce4e591ecb7c0a2a50186f552e57d11203"}, + {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:929bd91a403539e72b5b5cb97f725ac4acafe692ccf52f075e20cd9bf6e5493d"}, + {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:740837a379aa32d110911ebcbbc524f9a9b145355737527543a884bd8777ca4f"}, + {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:adfc0e80dd157e6d1e0b0112c8ecc4b58a7a23760bd9623d74122ef637cfbdb6"}, + {file = "pyzstd-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:79b183beae1c080ad3dca39019e49b7785391947f9aab68893ad85d27828c6e7"}, + {file = "pyzstd-0.16.2-cp312-cp312-win32.whl", hash = "sha256:b8d00631a3c466bc313847fab2a01f6b73b3165de0886fb03210e08567ae3a89"}, + {file = "pyzstd-0.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:c0d43764e9a60607f35d8cb3e60df772a678935ab0e02e2804d4147377f4942c"}, + {file = "pyzstd-0.16.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3ae9ae7ad730562810912d7ecaf1fff5eaf4c726f4b4dfe04784ed5f06d7b91f"}, + {file = "pyzstd-0.16.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2ce8d3c213f76a564420f3d0137066ac007ce9fb4e156b989835caef12b367a7"}, + {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2c14dac23c865e2d78cebd9087e148674b7154f633afd4709b4cd1520b99a61"}, + {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4527969d66a943e36ef374eda847e918077de032d58b5df84d98ffd717b6fa77"}, + {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd8256149b88e657e99f31e6d4b114c8ff2935951f1d8bb8e1fe501b224999c0"}, + {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bd1f1822d65c9054bf36d35307bf8ed4aa2d2d6827431761a813628ff671b1d"}, + {file = "pyzstd-0.16.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6733f4d373ec9ad2c1976cf06f973a3324c1f9abe236d114d6bb91165a397d"}, + {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7bec165ab6524663f00b69bfefd13a46a69fed3015754abaf81b103ec73d92c6"}, + {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4460fa6949aac6528a1ad0de8871079600b12b3ef4db49316306786a3598321"}, + {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75df79ea0315c97d88337953a17daa44023dbf6389f8151903d371513f503e3c"}, + {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:93e1d45f4a196afb6f18682c79bdd5399277ead105b67f30b35c04c207966071"}, + {file = "pyzstd-0.16.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:075e18b871f38a503b5d23e40a661adfc750bd4bd0bb8b208c1e290f3ceb8fa2"}, + {file = "pyzstd-0.16.2-cp313-cp313-win32.whl", hash = "sha256:9e4295eb299f8d87e3487852bca033d30332033272a801ca8130e934475e07a9"}, + {file = "pyzstd-0.16.2-cp313-cp313-win_amd64.whl", hash = "sha256:18deedc70f858f4cf574e59f305d2a0678e54db2751a33dba9f481f91bc71c28"}, + {file = "pyzstd-0.16.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a9892b707ef52f599098b1e9528df0e7849c5ec01d3e8035fb0e67de4b464839"}, + {file = "pyzstd-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4fbd647864341f3c174c4a6d7f20e6ea6b4be9d840fb900dc0faf0849561badc"}, + {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ac2c15656cc6194c4fed1cb0e8159f9394d4ea1d58be755448743d2ec6c9c4"}, + {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b239fb9a20c1be3374b9a2bd183ba624fd22ad7a3f67738c0d80cda68b4ae1d3"}, + {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc52400412cdae2635e0978b8d6bcc0028cc638fdab2fd301f6d157675d26896"}, + {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b766a6aeb8dbb6c46e622e7a1aebfa9ab03838528273796941005a5ce7257b1"}, + {file = "pyzstd-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd4b8676052f9d59579242bf3cfe5fd02532b6a9a93ab7737c118ae3b8509dc"}, + {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1c6c0a677aac7c0e3d2d2605d4d68ffa9893fdeeb2e071040eb7c8750969d463"}, + {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:15f9c2d612e7e2023d68d321d1b479846751f792af89141931d44e82ae391394"}, + {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:11740bff847aad23beef4085a1bb767d101895881fe891f0a911aa27d43c372c"}, + {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b9067483ebe860e4130a03ee665b3d7be4ec1608b208e645d5e7eb3492379464"}, + {file = "pyzstd-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:988f0ba19b14c2fe0afefc444ac1edfb2f497b7d7c3212b2f587504cc2ec804e"}, + {file = "pyzstd-0.16.2-cp39-cp39-win32.whl", hash = "sha256:8855acb1c3e3829030b9e9e9973b19e2d70f33efb14ad5c474b4d086864c959c"}, + {file = "pyzstd-0.16.2-cp39-cp39-win_amd64.whl", hash = "sha256:018e88378df5e76f5e1d8cf4416576603b6bc4a103cbc66bb593eaac54c758de"}, + {file = "pyzstd-0.16.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4b631117b97a42ff6dfd0ffc885a92fff462d7c34766b28383c57b996f863338"}, + {file = "pyzstd-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:56493a3fbe1b651a02102dd0902b0aa2377a732ff3544fb6fb3f114ca18db52f"}, + {file = "pyzstd-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1eae9bdba4a1e5d3181331f403114ff5b8ce0f4b569f48eba2b9beb2deef1e4"}, + {file = "pyzstd-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1be6972391c8aeecc7e61feb96ffc8e77a401bcba6ed994e7171330c45a1948"}, + {file = "pyzstd-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:761439d687e3a5687c2ff5c6a1190e1601362a4a3e8c6c82ff89719d51d73e19"}, + {file = "pyzstd-0.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f5fbdb8cf31b60b2dc586fecb9b73e2f172c21a0b320ed275f7b8d8a866d9003"}, + {file = "pyzstd-0.16.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:183f26e34f9becf0f2db38be9c0bfb136753d228bcb47c06c69175901bea7776"}, + {file = "pyzstd-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:88318b64b5205a67748148d6d244097fa6cf61fcea02ad3435511b9e7155ae16"}, + {file = "pyzstd-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73142aa2571b6480136a1865ebda8257e09eabbc8bcd54b222202f6fa4febe1e"}, + {file = "pyzstd-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d3f8877c29a97f1b1bba16f3d3ab01ad10ad3da7bad317aecf36aaf8848b37c"}, + {file = "pyzstd-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f25754562473ac7de856b8331ebd5964f5d85601045627a5f0bb0e4e899990"}, + {file = "pyzstd-0.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6ce17e84310080c55c02827ad9bb17893c00a845c8386a328b346f814aabd2c1"}, + {file = "pyzstd-0.16.2.tar.gz", hash = "sha256:179c1a2ea1565abf09c5f2fd72f9ce7c54b2764cf7369e05c0bfd8f1f67f63d2"}, ] [[package]] @@ -2243,7 +2545,7 @@ files = [ name = "reflink" version = "0.2.2" description = "Python reflink wraps around platform specific reflink implementations" -optional = false +optional = true python-versions = "*" files = [ {file = "reflink-0.2.2-cp36-cp36m-win32.whl", hash = "sha256:8435c7153af4d6e66dc8acb48a9372c8ec6f978a09cdf7b57cd6656d969e343a"}, @@ -2293,15 +2595,35 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "resampy" +version = "0.4.3" +description = "Efficient signal resampling" +optional = true +python-versions = "*" +files = [ + {file = "resampy-0.4.3-py3-none-any.whl", hash = "sha256:ad2ed64516b140a122d96704e32bc0f92b23f45419e8b8f478e5a05f83edcebd"}, + {file = "resampy-0.4.3.tar.gz", hash = "sha256:a0d1c28398f0e55994b739650afef4e3974115edbe96cd4bb81968425e916e47"}, +] + +[package.dependencies] +numba = ">=0.53" +numpy = ">=1.17" + +[package.extras] +design = ["optuna (>=2.10.0)"] +docs = ["numpydoc", "sphinx (!=1.3.1)"] +tests = ["pytest (<8)", "pytest-cov", "scipy (>=1.1)"] + [[package]] name = "responses" -version = "0.25.2" +version = "0.25.3" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" files = [ - {file = "responses-0.25.2-py3-none-any.whl", hash = "sha256:b59707ea25de536d324670791ab073fafd41f3a351cec9c51cb6147089a9a30a"}, - {file = "responses-0.25.2.tar.gz", hash = "sha256:77a61ad7e6016ed2ac00739b7efa5f35c42351d5b9b5d26bb1be87f197632487"}, + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, ] [package.dependencies] @@ -2312,6 +2634,125 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +[[package]] +name = "ruff" +version = "0.8.1" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.8.1-py3-none-linux_armv6l.whl", hash = "sha256:fae0805bd514066f20309f6742f6ee7904a773eb9e6c17c45d6b1600ca65c9b5"}, + {file = "ruff-0.8.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8a4f7385c2285c30f34b200ca5511fcc865f17578383db154e098150ce0a087"}, + {file = "ruff-0.8.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd054486da0c53e41e0086e1730eb77d1f698154f910e0cd9e0d64274979a209"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2029b8c22da147c50ae577e621a5bfbc5d1fed75d86af53643d7a7aee1d23871"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2666520828dee7dfc7e47ee4ea0d928f40de72056d929a7c5292d95071d881d1"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333c57013ef8c97a53892aa56042831c372e0bb1785ab7026187b7abd0135ad5"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:288326162804f34088ac007139488dcb43de590a5ccfec3166396530b58fb89d"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b12c39b9448632284561cbf4191aa1b005882acbc81900ffa9f9f471c8ff7e26"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:364e6674450cbac8e998f7b30639040c99d81dfb5bbc6dfad69bc7a8f916b3d1"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22346f845fec132aa39cd29acb94451d030c10874408dbf776af3aaeb53284c"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2f2f7a7e7648a2bfe6ead4e0a16745db956da0e3a231ad443d2a66a105c04fa"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:adf314fc458374c25c5c4a4a9270c3e8a6a807b1bec018cfa2813d6546215540"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a885d68342a231b5ba4d30b8c6e1b1ee3a65cf37e3d29b3c74069cdf1ee1e3c9"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d2c16e3508c8cc73e96aa5127d0df8913d2290098f776416a4b157657bee44c5"}, + {file = "ruff-0.8.1-py3-none-win32.whl", hash = "sha256:93335cd7c0eaedb44882d75a7acb7df4b77cd7cd0d2255c93b28791716e81790"}, + {file = "ruff-0.8.1-py3-none-win_amd64.whl", hash = "sha256:2954cdbe8dfd8ab359d4a30cd971b589d335a44d444b6ca2cb3d1da21b75e4b6"}, + {file = "ruff-0.8.1-py3-none-win_arm64.whl", hash = "sha256:55873cc1a473e5ac129d15eccb3c008c096b94809d693fc7053f588b67822737"}, + {file = "ruff-0.8.1.tar.gz", hash = "sha256:3583db9a6450364ed5ca3f3b4225958b24f78178908d5c4bc0f46251ccca898f"}, +] + +[[package]] +name = "scikit-learn" +version = "1.5.2" +description = "A set of python modules for machine learning and data mining" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6"}, + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8"}, + {file = "scikit_learn-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1"}, + {file = "scikit_learn-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, + {file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"}, + {file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"}, + {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"}, + {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"}, + {file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7"}, + {file = "scikit_learn-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe"}, + {file = "scikit_learn-1.5.2.tar.gz", hash = "sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==2.5.6)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, + {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}, + {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}, + {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}, + {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}, + {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"}, + {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"}, + {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}, + {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}, + {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}, + {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + [[package]] name = "six" version = "1.16.0" @@ -2338,7 +2779,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false +optional = true python-versions = "*" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, @@ -2347,13 +2788,13 @@ files = [ [[package]] name = "soco" -version = "0.30.4" +version = "0.30.6" description = "SoCo (Sonos Controller) is a simple library to control Sonos speakers." optional = true python-versions = ">=3.6" files = [ - {file = "soco-0.30.4-py2.py3-none-any.whl", hash = "sha256:b1406cbfd7d42bceeb5f46a32d272dbf240029495b7aad41200a6bb77fc4bd99"}, - {file = "soco-0.30.4.tar.gz", hash = "sha256:97c77ad353f8233117659250c71113419d288bc5447148c6bd4a2486e9cfd3be"}, + {file = "soco-0.30.6-py2.py3-none-any.whl", hash = "sha256:06c486218d0558a89276ed573ae2264d8e9bfd95a46a7dc253e03d19a3e6f423"}, + {file = "soco-0.30.6.tar.gz", hash = "sha256:7ae48e865dbf1d9fae8023e1b69465c2c4c17048992a05e9c017b35c43d4f4f2"}, ] [package.dependencies] @@ -2367,102 +2808,166 @@ xmltodict = "*" events-asyncio = ["aiohttp"] testing = ["black (>=22.12.0)", "coveralls", "flake8", "graphviz", "importlib-metadata (<5)", "pylint", "pytest (>=2.5)", "pytest-cov (<2.6.0)", "requests-mock", "sphinx (==4.5.0)", "sphinx-rtd-theme", "twine", "wheel"] +[[package]] +name = "soundfile" +version = "0.12.1" +description = "An audio library based on libsndfile, CFFI and NumPy" +optional = true +python-versions = "*" +files = [ + {file = "soundfile-0.12.1-py2.py3-none-any.whl", hash = "sha256:828a79c2e75abab5359f780c81dccd4953c45a2c4cd4f05ba3e233ddf984b882"}, + {file = "soundfile-0.12.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:d922be1563ce17a69582a352a86f28ed8c9f6a8bc951df63476ffc310c064bfa"}, + {file = "soundfile-0.12.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:bceaab5c4febb11ea0554566784bcf4bc2e3977b53946dda2b12804b4fe524a8"}, + {file = "soundfile-0.12.1-py2.py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:2dc3685bed7187c072a46ab4ffddd38cef7de9ae5eb05c03df2ad569cf4dacbc"}, + {file = "soundfile-0.12.1-py2.py3-none-manylinux_2_31_x86_64.whl", hash = "sha256:074247b771a181859d2bc1f98b5ebf6d5153d2c397b86ee9e29ba602a8dfe2a6"}, + {file = "soundfile-0.12.1-py2.py3-none-win32.whl", hash = "sha256:59dfd88c79b48f441bbf6994142a19ab1de3b9bb7c12863402c2bc621e49091a"}, + {file = "soundfile-0.12.1-py2.py3-none-win_amd64.whl", hash = "sha256:0d86924c00b62552b650ddd28af426e3ff2d4dc2e9047dae5b3d8452e0a49a77"}, + {file = "soundfile-0.12.1.tar.gz", hash = "sha256:e8e1017b2cf1dda767aef19d2fd9ee5ebe07e050d430f77a0a7c66ba08b8cdae"}, +] + +[package.dependencies] +cffi = ">=1.0" + +[package.extras] +numpy = ["numpy"] + [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] -name = "sphinx" -version = "7.1.2" -description = "Python documentation generator" -optional = false -python-versions = ">=3.8" +name = "soxr" +version = "0.5.0.post1" +description = "High quality, one-dimensional sample-rate conversion library" +optional = true +python-versions = ">=3.9" files = [ - {file = "sphinx-7.1.2-py3-none-any.whl", hash = "sha256:d170a81825b2fcacb6dfd5a0d7f578a053e45d3f2b153fecc948c37344eb4cbe"}, - {file = "sphinx-7.1.2.tar.gz", hash = "sha256:780f4d32f1d7d1126576e0e5ecc19dc32ab76cd24e950228dcf7b1f6d3d9e22f"}, + {file = "soxr-0.5.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:7406d782d85f8cf64e66b65e6b7721973de8a1dc50b9e88bc2288c343a987484"}, + {file = "soxr-0.5.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa0a382fb8d8e2afed2c1642723b2d2d1b9a6728ff89f77f3524034c8885b8c9"}, + {file = "soxr-0.5.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b01d3efb95a2851f78414bcd00738b0253eec3f5a1e5482838e965ffef84969"}, + {file = "soxr-0.5.0.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcc049b0a151a65aa75b92f0ac64bb2dba785d16b78c31c2b94e68c141751d6d"}, + {file = "soxr-0.5.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:97f269bc26937c267a2ace43a77167d0c5c8bba5a2b45863bb6042b5b50c474e"}, + {file = "soxr-0.5.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6fb77b626773a966e3d8f6cb24f6f74b5327fa5dc90f1ff492450e9cdc03a378"}, + {file = "soxr-0.5.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:39e0f791ba178d69cd676485dbee37e75a34f20daa478d90341ecb7f6d9d690f"}, + {file = "soxr-0.5.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f0b558f445ba4b64dbcb37b5f803052eee7d93b1dbbbb97b3ec1787cb5a28eb"}, + {file = "soxr-0.5.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca6903671808e0a6078b0d146bb7a2952b118dfba44008b2aa60f221938ba829"}, + {file = "soxr-0.5.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:c4d8d5283ed6f5efead0df2c05ae82c169cfdfcf5a82999c2d629c78b33775e8"}, + {file = "soxr-0.5.0.post1-cp312-abi3-macosx_10_14_x86_64.whl", hash = "sha256:fef509466c9c25f65eae0ce1e4b9ac9705d22c6038c914160ddaf459589c6e31"}, + {file = "soxr-0.5.0.post1-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:4704ba6b13a3f1e41d12acf192878384c1c31f71ce606829c64abdf64a8d7d32"}, + {file = "soxr-0.5.0.post1-cp312-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd052a66471a7335b22a6208601a9d0df7b46b8d087dce4ff6e13eed6a33a2a1"}, + {file = "soxr-0.5.0.post1-cp312-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f16810dd649ab1f433991d2a9661e9e6a116c2b4101039b53b3c3e90a094fc"}, + {file = "soxr-0.5.0.post1-cp312-abi3-win_amd64.whl", hash = "sha256:b1be9fee90afb38546bdbd7bde714d1d9a8c5a45137f97478a83b65e7f3146f6"}, + {file = "soxr-0.5.0.post1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:c5af7b355959061beb90a1d73c4834ece4549f07b708f8c73c088153cec29935"}, + {file = "soxr-0.5.0.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e1dda616fc797b1507b65486f3116ed2c929f13c722922963dd419d64ada6c07"}, + {file = "soxr-0.5.0.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94de2812368e98cb42b4eaeddf8ee1657ecc19bd053f8e67b9b5aa12a3592012"}, + {file = "soxr-0.5.0.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8e9c980637e03d3f345a4fd81d56477a58c294fb26205fa121bc4eb23d9d01"}, + {file = "soxr-0.5.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:7e71b0b0db450f36de70f1047505231db77a713f8c47df9342582ae8a4b828f2"}, + {file = "soxr-0.5.0.post1.tar.gz", hash = "sha256:7092b9f3e8a416044e1fa138c8172520757179763b85dc53aa9504f4813cff73"}, ] [package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.18.1,<0.21" +numpy = "*" + +[package.extras] +docs = ["linkify-it-py", "myst-parser", "sphinx", "sphinx-book-theme"] +test = ["pytest"] + +[[package]] +name = "sphinx" +version = "7.4.7" +description = "Python documentation generator" +optional = true +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.13" -requests = ">=2.25.0" -snowballstemmer = ">=2.0" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] -test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.4" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.8" +optional = true +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -optional = false -python-versions = ">=3.5" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = true +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.1" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.8" +optional = true +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false +optional = true python-versions = ">=3.5" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, @@ -2474,32 +2979,34 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -optional = false -python-versions = ">=3.5" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = true +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -optional = false -python-versions = ">=3.5" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = true +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] @@ -2513,26 +3020,67 @@ files = [ {file = "texttable-1.7.0.tar.gz", hash = "sha256:2d2068fb55115807d3ac77a4ca68fa48803e84ebb0ee2340f858107a36522638"}, ] +[[package]] +name = "threadpoolctl" +version = "3.5.0" +description = "threadpoolctl" +optional = true +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, +] + [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "types-beautifulsoup4" -version = "4.12.0.20240511" +version = "4.12.0.20241020" description = "Typing stubs for beautifulsoup4" optional = false python-versions = ">=3.8" files = [ - {file = "types-beautifulsoup4-4.12.0.20240511.tar.gz", hash = "sha256:004f6096fdd83b19cdbf6cb10e4eae57b10205eccc365d0a69d77da836012e28"}, - {file = "types_beautifulsoup4-4.12.0.20240511-py3-none-any.whl", hash = "sha256:7ceda66a93ba28d759d5046d7fec9f4cad2f563a77b3a789efc90bcadafeefd1"}, + {file = "types-beautifulsoup4-4.12.0.20241020.tar.gz", hash = "sha256:158370d08d0cd448bd11b132a50ff5279237a5d4b5837beba074de152a513059"}, + {file = "types_beautifulsoup4-4.12.0.20241020-py3-none-any.whl", hash = "sha256:c95e66ce15a4f5f0835f7fbc5cd886321ae8294f977c495424eaf4225307fd30"}, ] [package.dependencies] @@ -2540,13 +3088,13 @@ types-html5lib = "*" [[package]] name = "types-flask-cors" -version = "4.0.0.20240523" +version = "5.0.0.20240902" description = "Typing stubs for Flask-Cors" optional = false python-versions = ">=3.8" files = [ - {file = "types-Flask-Cors-4.0.0.20240523.tar.gz", hash = "sha256:be57da07b6fd398eef2a79d4d43436fb72cfc3dd23a29a141627cd12576b8b1b"}, - {file = "types_Flask_Cors-4.0.0.20240523-py3-none-any.whl", hash = "sha256:010c03c4f4f17ee1fb4eea6072f86c90bac03692b8ec09d2f7078bc72a6b1694"}, + {file = "types-Flask-Cors-5.0.0.20240902.tar.gz", hash = "sha256:8921b273bf7cd9636df136b66408efcfa6338a935e5c8f53f5eff1cee03f3394"}, + {file = "types_Flask_Cors-5.0.0.20240902-py3-none-any.whl", hash = "sha256:595e5f36056cd128ab905832e055f2e5d116fbdc685356eea4490bc77df82137"}, ] [package.dependencies] @@ -2554,46 +3102,57 @@ Flask = ">=2.0.0" [[package]] name = "types-html5lib" -version = "1.1.11.20240228" +version = "1.1.11.20241018" description = "Typing stubs for html5lib" optional = false python-versions = ">=3.8" files = [ - {file = "types-html5lib-1.1.11.20240228.tar.gz", hash = "sha256:22736b7299e605ec4ba539d48691e905fd0c61c3ea610acc59922232dc84cede"}, - {file = "types_html5lib-1.1.11.20240228-py3-none-any.whl", hash = "sha256:af5de0125cb0fe5667543b158db83849b22e25c0e36c9149836b095548bf1020"}, + {file = "types-html5lib-1.1.11.20241018.tar.gz", hash = "sha256:98042555ff78d9e3a51c77c918b1041acbb7eb6c405408d8a9e150ff5beccafa"}, + {file = "types_html5lib-1.1.11.20241018-py3-none-any.whl", hash = "sha256:3f1e064d9ed2c289001ae6392c84c93833abb0816165c6ff0abfc304a779f403"}, +] + +[[package]] +name = "types-mock" +version = "5.1.0.20240425" +description = "Typing stubs for mock" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-mock-5.1.0.20240425.tar.gz", hash = "sha256:5281a645d72e827d70043e3cc144fe33b1c003db084f789dc203aa90e812a5a4"}, + {file = "types_mock-5.1.0.20240425-py3-none-any.whl", hash = "sha256:d586a01d39ad919d3ddcd73de6cde73ca7f3c69707219f722d1b8d7733641ad7"}, ] [[package]] name = "types-pillow" -version = "10.2.0.20240520" +version = "10.2.0.20240822" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" files = [ - {file = "types-Pillow-10.2.0.20240520.tar.gz", hash = "sha256:130b979195465fa1e1676d8e81c9c7c30319e8e95b12fae945e8f0d525213107"}, - {file = "types_Pillow-10.2.0.20240520-py3-none-any.whl", hash = "sha256:33c36494b380e2a269bb742181bea5d9b00820367822dbd3760f07210a1da23d"}, + {file = "types-Pillow-10.2.0.20240822.tar.gz", hash = "sha256:559fb52a2ef991c326e4a0d20accb3bb63a7ba8d40eb493e0ecb0310ba52f0d3"}, + {file = "types_Pillow-10.2.0.20240822-py3-none-any.whl", hash = "sha256:d9dab025aba07aeb12fd50a6799d4eac52a9603488eca09d7662543983f16c5d"}, ] [[package]] name = "types-pyyaml" -version = "6.0.12.20240311" +version = "6.0.12.20240917" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, - {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, + {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, + {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, ] [[package]] name = "types-requests" -version = "2.32.0.20240602" +version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240602.tar.gz", hash = "sha256:3f98d7bbd0dd94ebd10ff43a7fbe20c3b8528acace6d8efafef0b6a184793f06"}, - {file = "types_requests-2.32.0.20240602-py3-none-any.whl", hash = "sha256:ed3946063ea9fbc6b5fc0c44fa279188bae42d582cb63760be6cb4b9d06c3de8"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] @@ -2634,13 +3193,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -2651,13 +3210,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, ] [package.dependencies] @@ -2668,37 +3227,43 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "xmltodict" -version = "0.13.0" +version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = true -python-versions = ">=3.4" +python-versions = ">=3.6" files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, ] [[package]] name = "zipp" -version = "3.19.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] absubmit = ["requests"] aura = ["Pillow", "flask", "flask-cors"] +autobpm = ["librosa", "resampy"] beatport = ["requests-oauthlib"] bpd = ["PyGObject"] chroma = ["pyacoustid"] discogs = ["python3-discogs-client"] +docs = ["pydata-sphinx-theme", "sphinx"] embedart = ["Pillow"] embyupdate = ["requests"] fetchart = ["Pillow", "beautifulsoup4", "langdetect", "requests"] @@ -2719,5 +3284,5 @@ web = ["flask", "flask-cors"] [metadata] lock-version = "2.0" -python-versions = ">=3.8,<4" -content-hash = "48fba7c7149c8cb7824f96329bd469a9e9c84b13d233f957889b8b1d7076392f" +python-versions = ">=3.9,<4" +content-hash = "2edbbe1f3488fb9d3a05e2d60c23d3fd1afaa8154d2a71ffad83b34476ceac78" diff --git a/pyproject.toml b/pyproject.toml index 45e7afdc1..3d91a59b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "beets" -version = "2.0.0" +version = "2.2.0" description = "music tagger and library organizer" authors = ["Adrian Sampson "] maintainers = ["Serene-Arc"] @@ -17,7 +17,6 @@ classifiers = [ "Environment :: Web Environment", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: Implementation :: CPython", @@ -26,28 +25,40 @@ packages = [ { include = "beets" }, { include = "beetsplug" }, ] +include = [ # extra files to include in the sdist + { path = "docs", format = "sdist" }, + { path = "extra", format = "sdist" }, + { path = "man/**/*", format = "sdist" }, + { path = "test/*.py", format = "sdist" }, + { path = "test/rsrc/**/*", format = "sdist" }, +] +exclude = ["docs/_build", "docs/modd.conf", "docs/**/*.css"] [tool.poetry.urls] Changelog = "https://github.com/beetbox/beets/blob/master/docs/changelog.rst" "Bug Tracker" = "https://github.com/beetbox/beets/issues" [tool.poetry.dependencies] -python = ">=3.8,<4" +python = ">=3.9,<4" colorama = { version = "*", markers = "sys_platform == 'win32'" } confuse = ">=1.5.0" jellyfish = "*" +lap = ">=0.5.12" mediafile = ">=0.12.0" -munkres = ">=1.0.0" musicbrainzngs = ">=0.4" +numpy = ">=1.24.4" +platformdirs = ">=3.5.0" pyyaml = "*" typing_extensions = { version = "*", python = "<=3.10" } unidecode = ">=1.3.6" + beautifulsoup4 = { version = "*", optional = true } dbus-python = { version = "*", optional = true } flask = { version = "*", optional = true } flask-cors = { version = "*", optional = true } langdetect = { version = "*", optional = true } +librosa = { version = "^0.10.2.post1", optional = true } mutagen = { version = ">=1.33", optional = true } Pillow = { version = "*", optional = true } py7zr = { version = "*", optional = true } @@ -60,9 +71,13 @@ pyxdg = { version = "*", optional = true } rarfile = { version = "*", optional = true } reflink = { version = "*", optional = true } requests = { version = "*", optional = true } +resampy = { version = ">=0.4.3", optional = true } requests-oauthlib = { version = ">=0.6.1", optional = true } soco = { version = "*", optional = true } +pydata-sphinx-theme = { version = "*", optional = true } +sphinx = { version = "*", optional = true } + [tool.poetry.group.test.dependencies] beautifulsoup4 = "*" codecov = ">=2.1.13" @@ -77,31 +92,22 @@ python3-discogs-client = ">=2.3.15" py7zr = "*" pyxdg = "*" rarfile = "*" -reflink = "*" requests_oauthlib = "*" responses = ">=0.3.0" -[tool.poetry.group.format.dependencies] -isort = { version = "<5.14", extras = ["colors"] } -black = ">=24.3,<25" - [tool.poetry.group.lint.dependencies] -flake8 = "*" -pep8-naming = "*" +ruff = ">=0.6.4" [tool.poetry.group.typing.dependencies] mypy = "*" types-beautifulsoup4 = "*" +types-mock = "*" types-Flask-Cors = "*" types-Pillow = "*" types-PyYAML = "*" types-requests = "*" types-urllib3 = "*" -[tool.poetry.group.docs.dependencies] -pydata-sphinx-theme = "*" -sphinx = "*" - [tool.poetry.group.release.dependencies] click = ">=8.1.7" packaging = ">=24.0" @@ -111,11 +117,13 @@ tomli = ">=2.0.1" # inline comments note required external / non-python dependencies absubmit = ["requests"] # extractor binary from https://acousticbrainz.org/download aura = ["flask", "flask-cors", "Pillow"] +autobpm = ["librosa", "resampy"] # badfiles # mp3val and flac beatport = ["requests-oauthlib"] bpd = ["PyGObject"] # python-gi and GStreamer 1.0+ chroma = ["pyacoustid"] # chromaprint or fpcalc # convert # ffmpeg +docs = ["pydata-sphinx-theme", "sphinx"] discogs = ["python3-discogs-client"] embedart = ["Pillow"] # ImageMagick embyupdate = ["requests"] @@ -150,23 +158,14 @@ build-backend = "poetry.core.masonry.api" poethepoet = ">=0.26" poetry = ">=1.8" -# We use a default path '.' to make black and isort behave like flake8 and -# mypy do: they act on the entire codebase (flake8 does it by default, and -# mypy follows our configuration) by default. Positional command-line arguments -# override this. Therefore, locally you can run `poe check-format ` -# to quickly check a specific path. -# -# Note: both tools respect .gitignore, therefore if we see them format -# something unwanted locally, we should add these paths to .gitignore. -[tool.poe.tasks._black] -help = "Run black" -cmd = "black $OPTS $path" -args = { path = { help = "Path to blacken", positional = true, multiple = true, default = "." } } - -[tool.poe.tasks._isort] -help = "Run isort" -cmd = "isort $OPTS $path" -args = { path = { help = "Path to isort", positional = true, multiple = true, default = "." } } +[tool.poe.tasks.build] +help = "Build the package" +shell = """ +make -C docs man +rm -rf man +mv docs/_build/man . +poetry build +""" [tool.poe.tasks.bump] help = "Bump project version and update relevant files" @@ -183,8 +182,7 @@ cmd = "make -C docs linkcheck" [tool.poe.tasks.check-format] help = "Check the code for style issues" -ref = "format" -env.OPTS = "--check --diff --color" +cmd = "ruff format --check --diff" [tool.poe.tasks.check-types] help = "Check the code for typing issues. Accepts mypy options." @@ -196,13 +194,11 @@ cmd = "make -C docs html" [tool.poe.tasks.format] help = "Format the codebase" -ignore_fail = "return_non_zero" -sequence = ["_black $path", "_isort $path"] -args = { path = { help = "Path to format", positional = true, multiple = true, default = "." } } +cmd = "ruff format" [tool.poe.tasks.lint] -help = "Check the code for linting issues. Accepts flake8 options." -cmd = "flake8" +help = "Check the code for linting issues. Accepts ruff options." +cmd = "ruff check" [tool.poe.tasks.update-dependencies] help = "Update dependencies to their latest versions." @@ -245,13 +241,42 @@ done """ interpreter = "zsh" -[tool.black] +[tool.ruff] +target-version = "py39" line-length = 80 -target-version = ["py38", "py39", "py310", "py311"] -[tool.isort] -profile = "black" -py_version = 38 -multi_line_output = 3 -line_length = 80 -indent = 4 +[tool.ruff.lint] +select = [ + # "ARG", # flake8-unused-arguments + # "C4", # flake8-comprehensions + "E", # pycodestyle + "F", # pyflakes + # "B", # flake8-bugbear + "I", # isort + "N", # pep8-naming + "PT", # flake8-pytest-style + # "RUF", # ruff + # "UP", # pyupgrade + "TCH", # flake8-type-checking + "W", # pycodestyle +] +[tool.ruff.lint.per-file-ignores] +"beets/**" = ["PT"] + +[tool.ruff.lint.isort] +split-on-trailing-comma = false + +[tool.ruff.lint.pycodestyle] +max-line-length = 88 + +[tool.ruff.lint.flake8-pytest-style] +fixture-parentheses = false +mark-parentheses = false +parametrize-names-type = "csv" + +[tool.ruff.lint.flake8-unused-arguments] +ignore-variadic-names = true + +[tool.ruff.lint.pep8-naming] +classmethod-decorators = ["cached_classproperty"] +extend-ignore-names = ["assert*", "cached_classproperty"] diff --git a/setup.cfg b/setup.cfg index b918bdb1d..8cf0dc3d0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,11 +7,14 @@ addopts = # show all skipped/failed/xfailed tests in the summary except passed -ra --strict-config +markers = + integration_test: mark a test as an integration test [coverage:run] data_file = .reports/coverage/data branch = true relative_files = true +omit = beets/test/* [coverage:report] precision = 2 @@ -27,41 +30,6 @@ exclude_lines = [coverage:html] show_contexts = true -[flake8] -min-version = 3.8 -accept-encodings = utf-8 -max-line-length = 88 -classmethod-decorators = - classmethod - cached_classproperty -# errors we ignore; see https://www.flake8rules.com/ for more info -ignore = - # pycodestyle errors - # continuation line under-indented for hanging indent - E121, - # closing bracket does not match indentation of opening bracket's line - E123, - # continuation line over-indented for hanging indent - E126, - # multiple spaces after non-arithmetic operators (for vertical alignment) - E241, - # expected 2 blank lines after end of function or class - E305, - # do not assign a lambda expression, use a def - E731, - # do not use variables name 'I', 'O', or 'l' - E741, - # pycodestyle warnings: line breaks around binary operators - W503, - W504, - # mccabe errors: function is too complex - C901, - # Exception subclasses should be named with an Error suffix - N818, - # Exclude rules for black compatibility - E203, - E704, - [mypy] files = beets,beetsplug,test,extra,docs allow_any_generics = false diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 000000000..8b29946ae --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,12 @@ +import os + +import pytest + + +def pytest_runtest_setup(item: pytest.Item): + """Skip integration tests if INTEGRATION_TEST environment variable is not set.""" + if os.environ.get("INTEGRATION_TEST"): + return + + if next(item.iter_markers(name="integration_test"), None): + pytest.skip(f"INTEGRATION_TEST=1 required: {item.nodeid}") diff --git a/test/plugins/lyrics_download_samples.py b/test/plugins/lyrics_download_samples.py index 9a1c86591..4d68e7d50 100644 --- a/test/plugins/lyrics_download_samples.py +++ b/test/plugins/lyrics_download_samples.py @@ -15,10 +15,11 @@ import os import sys -from test.plugins import test_lyrics import requests +from test.plugins import test_lyrics + def mkdir_p(path): try: diff --git a/test/plugins/test_acousticbrainz.py b/test/plugins/test_acousticbrainz.py index fbf83def0..2c4f0d9d6 100644 --- a/test/plugins/test_acousticbrainz.py +++ b/test/plugins/test_acousticbrainz.py @@ -12,9 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for the 'acousticbrainz' plugin. -""" - +"""Tests for the 'acousticbrainz' plugin.""" import json import os.path @@ -30,9 +28,10 @@ class MapDataToSchemeTest(unittest.TestCase): data = {"key 1": "value 1", "key 2": "value 2"} scheme = {"key 1": "attribute 1", "key 2": "attribute 2"} mapping = set(ab._map_data_to_scheme(data, scheme)) - self.assertEqual( - mapping, {("attribute 1", "value 1"), ("attribute 2", "value 2")} - ) + assert mapping == { + ("attribute 1", "value 1"), + ("attribute 2", "value 2"), + } def test_recurse(self): ab = AcousticPlugin() @@ -51,21 +50,18 @@ class MapDataToSchemeTest(unittest.TestCase): }, } mapping = set(ab._map_data_to_scheme(data, scheme)) - self.assertEqual( - mapping, - { - ("attribute 1", "value"), - ("attribute 2", "subvalue"), - ("attribute 3", "subsubvalue"), - }, - ) + assert mapping == { + ("attribute 1", "value"), + ("attribute 2", "subvalue"), + ("attribute 3", "subsubvalue"), + } def test_composite(self): ab = AcousticPlugin() data = {"key 1": "part 1", "key 2": "part 2"} scheme = {"key 1": ("attribute", 0), "key 2": ("attribute", 1)} mapping = set(ab._map_data_to_scheme(data, scheme)) - self.assertEqual(mapping, {("attribute", "part 1 part 2")}) + assert mapping == {("attribute", "part 1 part 2")} def test_realistic(self): ab = AcousticPlugin() @@ -98,12 +94,4 @@ class MapDataToSchemeTest(unittest.TestCase): ("moods_mirex", "Cluster3"), ("timbre", "bright"), } - self.assertEqual(mapping, expected) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert mapping == expected diff --git a/test/plugins/test_advancedrewrite.py b/test/plugins/test_advancedrewrite.py index 71f92c4dd..d2be1fa6c 100644 --- a/test/plugins/test_advancedrewrite.py +++ b/test/plugins/test_advancedrewrite.py @@ -12,156 +12,116 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test the advancedrewrite plugin for various configurations. -""" +"""Test the advancedrewrite plugin for various configurations.""" -import unittest +import pytest -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beets.ui import UserError PLUGIN_NAME = "advancedrewrite" -class AdvancedRewritePluginTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +class AdvancedRewritePluginTest(PluginTestCase): + plugin = "advancedrewrite" + preload_plugin = False def test_simple_rewrite_example(self): - self.config[PLUGIN_NAME] = [ - {"artist ODD EYE CIRCLE": "이달의 소녀 오드아이써클"}, - ] - self.load_plugins(PLUGIN_NAME) + with self.configure_plugin( + [{"artist ODD EYE CIRCLE": "이달의 소녀 오드아이써클"}] + ): + item = self.add_item( + artist="ODD EYE CIRCLE", + albumartist="ODD EYE CIRCLE", + ) - item = self.add_item( - title="Uncover", - artist="ODD EYE CIRCLE", - albumartist="ODD EYE CIRCLE", - album="Mix & Match", - ) - - self.assertEqual(item.artist, "이달의 소녀 오드아이써클") + assert item.artist == "이달의 소녀 오드아이써클" def test_advanced_rewrite_example(self): - self.config[PLUGIN_NAME] = [ - { - "match": "mb_artistid:dec0f331-cb08-4c8e-9c9f-aeb1f0f6d88c year:..2022", - "replacements": { - "artist": "이달의 소녀 오드아이써클", - "artist_sort": "LOONA / ODD EYE CIRCLE", + with self.configure_plugin( + [ + { + "match": "mb_artistid:dec0f331-cb08-4c8e-9c9f-aeb1f0f6d88c year:..2022", # noqa: E501 + "replacements": { + "artist": "이달의 소녀 오드아이써클", + "artist_sort": "LOONA / ODD EYE CIRCLE", + }, }, - }, - ] - self.load_plugins(PLUGIN_NAME) + ] + ): + item_a = self.add_item( + artist="ODD EYE CIRCLE", + artist_sort="ODD EYE CIRCLE", + mb_artistid="dec0f331-cb08-4c8e-9c9f-aeb1f0f6d88c", + year=2017, + ) + item_b = self.add_item( + artist="ODD EYE CIRCLE", + artist_sort="ODD EYE CIRCLE", + mb_artistid="dec0f331-cb08-4c8e-9c9f-aeb1f0f6d88c", + year=2023, + ) - item_a = self.add_item( - title="Uncover", - artist="ODD EYE CIRCLE", - albumartist="ODD EYE CIRCLE", - artist_sort="ODD EYE CIRCLE", - albumartist_sort="ODD EYE CIRCLE", - album="Mix & Match", - mb_artistid="dec0f331-cb08-4c8e-9c9f-aeb1f0f6d88c", - year=2017, - ) - item_b = self.add_item( - title="Air Force One", - artist="ODD EYE CIRCLE", - albumartist="ODD EYE CIRCLE", - artist_sort="ODD EYE CIRCLE", - albumartist_sort="ODD EYE CIRCLE", - album="ODD EYE CIRCLE ", - mb_artistid="dec0f331-cb08-4c8e-9c9f-aeb1f0f6d88c", - year=2023, - ) + # Assert that all replacements were applied to item_a + assert "이달의 소녀 오드아이써클" == item_a.artist + assert "LOONA / ODD EYE CIRCLE" == item_a.artist_sort + assert "LOONA / ODD EYE CIRCLE" == item_a.albumartist_sort - # Assert that all replacements were applied to item_a - self.assertEqual("이달의 소녀 오드아이써클", item_a.artist) - self.assertEqual("LOONA / ODD EYE CIRCLE", item_a.artist_sort) - self.assertEqual("LOONA / ODD EYE CIRCLE", item_a.albumartist_sort) - - # Assert that no replacements were applied to item_b - self.assertEqual("ODD EYE CIRCLE", item_b.artist) + # Assert that no replacements were applied to item_b + assert "ODD EYE CIRCLE" == item_b.artist def test_advanced_rewrite_example_with_multi_valued_field(self): - self.config[PLUGIN_NAME] = [ - { - "match": "artist:배유빈 feat. 김미현", - "replacements": { - "artists": ["유빈", "미미"], + with self.configure_plugin( + [ + { + "match": "artist:배유빈 feat. 김미현", + "replacements": {"artists": ["유빈", "미미"]}, }, - }, - ] - self.load_plugins(PLUGIN_NAME) + ] + ): + item = self.add_item( + artist="배유빈 feat. 김미현", + artists=["배유빈", "김미현"], + ) - item = self.add_item( - artist="배유빈 feat. 김미현", - artists=["배유빈", "김미현"], - ) - - self.assertEqual(item.artists, ["유빈", "미미"]) + assert item.artists == ["유빈", "미미"] def test_fail_when_replacements_empty(self): - self.config[PLUGIN_NAME] = [ - { - "match": "artist:A", - "replacements": {}, - }, - ] - with self.assertRaises( - UserError, - msg="Advanced rewrites must have at least one replacement", + with ( + pytest.raises( + UserError, + match="Advanced rewrites must have at least one replacement", + ), + self.configure_plugin([{"match": "artist:A", "replacements": {}}]), ): - self.load_plugins(PLUGIN_NAME) + pass def test_fail_when_rewriting_single_valued_field_with_list(self): - self.config[PLUGIN_NAME] = [ - { - "match": "artist:'A & B'", - "replacements": { - "artist": ["C", "D"], - }, - }, - ] - with self.assertRaises( - UserError, - msg="Field artist is not a multi-valued field but a list was given: C, D", + with ( + pytest.raises( + UserError, + match="Field artist is not a multi-valued field but a list was given: C, D", # noqa: E501 + ), + self.configure_plugin( + [ + { + "match": "artist:'A & B'", + "replacements": {"artist": ["C", "D"]}, + }, + ] + ), ): - self.load_plugins(PLUGIN_NAME) + pass def test_combined_rewrite_example(self): - self.config[PLUGIN_NAME] = [ - {"artist A": "B"}, - { - "match": "album:'C'", - "replacements": { - "artist": "D", - }, - }, - ] - self.load_plugins(PLUGIN_NAME) + with self.configure_plugin( + [ + {"artist A": "B"}, + {"match": "album:'C'", "replacements": {"artist": "D"}}, + ] + ): + item = self.add_item(artist="A", albumartist="A") + assert item.artist == "B" - item = self.add_item( - artist="A", - albumartist="A", - ) - self.assertEqual(item.artist, "B") - - item = self.add_item( - artist="C", - albumartist="C", - album="C", - ) - self.assertEqual(item.artist, "D") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + item = self.add_item(artist="C", albumartist="C", album="C") + assert item.artist == "D" diff --git a/test/plugins/test_albumtypes.py b/test/plugins/test_albumtypes.py index 6b3b48d10..8be1ff011 100644 --- a/test/plugins/test_albumtypes.py +++ b/test/plugins/test_albumtypes.py @@ -14,27 +14,17 @@ """Tests for the 'albumtypes' plugin.""" - -import unittest -from typing import Sequence, Tuple +from collections.abc import Sequence from beets.autotag.mb import VARIOUS_ARTISTS_ID -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beetsplug.albumtypes import AlbumTypesPlugin -class AlbumTypesPluginTest(unittest.TestCase, TestHelper): +class AlbumTypesPluginTest(PluginTestCase): """Tests for albumtypes plugin.""" - def setUp(self): - """Set up tests.""" - self.setup_beets() - self.load_plugins("albumtypes") - - def tearDown(self): - """Tear down tests.""" - self.unload_plugins() - self.teardown_beets() + plugin = "albumtypes" def test_renames_types(self): """Tests if the plugin correctly renames the specified types.""" @@ -44,7 +34,7 @@ class AlbumTypesPluginTest(unittest.TestCase, TestHelper): album = self._create_album(album_types=["ep", "remix"]) subject = AlbumTypesPlugin() result = subject._atypes(album) - self.assertEqual("(EP)(Remix)", result) + assert "(EP)(Remix)" == result return def test_returns_only_specified_types(self): @@ -55,7 +45,7 @@ class AlbumTypesPluginTest(unittest.TestCase, TestHelper): album = self._create_album(album_types=["ep", "remix", "soundtrack"]) subject = AlbumTypesPlugin() result = subject._atypes(album) - self.assertEqual("(EP)", result) + assert "(EP)" == result def test_respects_type_order(self): """Tests if the types are returned in the same order as config.""" @@ -65,7 +55,7 @@ class AlbumTypesPluginTest(unittest.TestCase, TestHelper): album = self._create_album(album_types=["ep", "remix"]) subject = AlbumTypesPlugin() result = subject._atypes(album) - self.assertEqual("(Remix)(EP)", result) + assert "(Remix)(EP)" == result return def test_ignores_va(self): @@ -80,7 +70,7 @@ class AlbumTypesPluginTest(unittest.TestCase, TestHelper): ) subject = AlbumTypesPlugin() result = subject._atypes(album) - self.assertEqual("(OST)", result) + assert "(OST)" == result def test_respects_defaults(self): """Tests if the plugin uses the default values if config not given.""" @@ -97,11 +87,11 @@ class AlbumTypesPluginTest(unittest.TestCase, TestHelper): ) subject = AlbumTypesPlugin() result = subject._atypes(album) - self.assertEqual("[EP][Single][OST][Live][Remix]", result) + assert "[EP][Single][OST][Live][Remix]" == result def _set_config( self, - types: Sequence[Tuple[str, str]], + types: Sequence[tuple[str, str]], ignore_va: Sequence[str], bracket: str, ): diff --git a/test/plugins/test_art.py b/test/plugins/test_art.py index 8a2aa5870..acb712354 100644 --- a/test/plugins/test_art.py +++ b/test/plugins/test_art.py @@ -14,19 +14,23 @@ """Tests for the album art fetchers.""" - import os import shutil -import unittest from unittest.mock import patch import confuse +import pytest import responses -from beets import config, importer, library, logging, util +from beets import config, importer, logging, util from beets.autotag import AlbumInfo, AlbumMatch from beets.test import _common -from beets.test.helper import CleanupModulesMixin, FetchImageHelper, capture_log +from beets.test.helper import ( + BeetsTestCase, + CleanupModulesMixin, + FetchImageHelper, + capture_log, +) from beets.util import syspath from beets.util.artresizer import ArtResizer from beetsplug import fetchart @@ -44,7 +48,7 @@ class Settings: setattr(self, k, v) -class UseThePlugin(CleanupModulesMixin, _common.TestCase): +class UseThePlugin(CleanupModulesMixin, BeetsTestCase): modules = (fetchart.__name__, ArtResizer.__module__) def setUp(self): @@ -205,23 +209,23 @@ class FetchImageTest(FetchImageTestCase): def test_invalid_type_returns_none(self): self.mock_response(self.URL, "image/watercolour") self.source.fetch_image(self.candidate, self.settings) - self.assertIsNone(self.candidate.path) + assert self.candidate.path is None def test_jpeg_type_returns_path(self): self.mock_response(self.URL, "image/jpeg") self.source.fetch_image(self.candidate, self.settings) - self.assertIsNotNone(self.candidate.path) + assert self.candidate.path is not None def test_extension_set_by_content_type(self): self.mock_response(self.URL, "image/png") self.source.fetch_image(self.candidate, self.settings) - self.assertEqual(os.path.splitext(self.candidate.path)[1], b".png") + assert os.path.splitext(self.candidate.path)[1] == b".png" self.assertExists(self.candidate.path) def test_does_not_rely_on_server_content_type(self): self.mock_response(self.URL, "image/jpeg", "image/png") self.source.fetch_image(self.candidate, self.settings) - self.assertEqual(os.path.splitext(self.candidate.path)[1], b".png") + assert os.path.splitext(self.candidate.path)[1] == b".png" self.assertExists(self.candidate.path) @@ -237,27 +241,27 @@ class FSArtTest(UseThePlugin): def test_finds_jpg_in_directory(self): _common.touch(os.path.join(self.dpath, b"a.jpg")) candidate = next(self.source.get(None, self.settings, [self.dpath])) - self.assertEqual(candidate.path, os.path.join(self.dpath, b"a.jpg")) + assert candidate.path == os.path.join(self.dpath, b"a.jpg") def test_appropriately_named_file_takes_precedence(self): _common.touch(os.path.join(self.dpath, b"a.jpg")) _common.touch(os.path.join(self.dpath, b"art.jpg")) candidate = next(self.source.get(None, self.settings, [self.dpath])) - self.assertEqual(candidate.path, os.path.join(self.dpath, b"art.jpg")) + assert candidate.path == os.path.join(self.dpath, b"art.jpg") def test_non_image_file_not_identified(self): _common.touch(os.path.join(self.dpath, b"a.txt")) - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(None, self.settings, [self.dpath])) def test_cautious_skips_fallback(self): _common.touch(os.path.join(self.dpath, b"a.jpg")) self.settings.cautious = True - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(None, self.settings, [self.dpath])) def test_empty_dir(self): - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(None, self.settings, [self.dpath])) def test_precedence_amongst_correct_files(self): @@ -270,7 +274,7 @@ class FSArtTest(UseThePlugin): candidate.path for candidate in self.source.get(None, self.settings, [self.dpath]) ] - self.assertEqual(candidates, paths) + assert candidates == paths class CombinedTest(FetchImageTestCase, CAAHelper): @@ -290,40 +294,40 @@ class CombinedTest(FetchImageTestCase, CAAHelper): self.mock_response(self.AMAZON_URL) album = _common.Bag(asin=self.ASIN) candidate = self.plugin.art_for_album(album, None) - self.assertIsNotNone(candidate) + assert candidate is not None def test_main_interface_returns_none_for_missing_asin_and_path(self): album = _common.Bag() candidate = self.plugin.art_for_album(album, None) - self.assertIsNone(candidate) + assert candidate is None def test_main_interface_gives_precedence_to_fs_art(self): _common.touch(os.path.join(self.dpath, b"art.jpg")) self.mock_response(self.AMAZON_URL) album = _common.Bag(asin=self.ASIN) candidate = self.plugin.art_for_album(album, [self.dpath]) - self.assertIsNotNone(candidate) - self.assertEqual(candidate.path, os.path.join(self.dpath, b"art.jpg")) + assert candidate is not None + assert candidate.path == os.path.join(self.dpath, b"art.jpg") def test_main_interface_falls_back_to_amazon(self): self.mock_response(self.AMAZON_URL) album = _common.Bag(asin=self.ASIN) candidate = self.plugin.art_for_album(album, [self.dpath]) - self.assertIsNotNone(candidate) - self.assertFalse(candidate.path.startswith(self.dpath)) + assert candidate is not None + assert not candidate.path.startswith(self.dpath) def test_main_interface_tries_amazon_before_aao(self): self.mock_response(self.AMAZON_URL) album = _common.Bag(asin=self.ASIN) self.plugin.art_for_album(album, [self.dpath]) - self.assertEqual(len(responses.calls), 1) - self.assertEqual(responses.calls[0].request.url, self.AMAZON_URL) + assert len(responses.calls) == 1 + assert responses.calls[0].request.url == self.AMAZON_URL def test_main_interface_falls_back_to_aao(self): self.mock_response(self.AMAZON_URL, content_type="text/html") album = _common.Bag(asin=self.ASIN) self.plugin.art_for_album(album, [self.dpath]) - self.assertEqual(responses.calls[-1].request.url, self.AAO_URL) + assert responses.calls[-1].request.url == self.AAO_URL def test_main_interface_uses_caa_when_mbid_available(self): self.mock_caa_response(self.RELEASE_URL, self.RESPONSE_RELEASE) @@ -342,14 +346,14 @@ class CombinedTest(FetchImageTestCase, CAAHelper): asin=self.ASIN, ) candidate = self.plugin.art_for_album(album, None) - self.assertIsNotNone(candidate) - self.assertEqual(len(responses.calls), 3) - self.assertEqual(responses.calls[0].request.url, self.RELEASE_URL) + assert candidate is not None + assert len(responses.calls) == 3 + assert responses.calls[0].request.url == self.RELEASE_URL def test_local_only_does_not_access_network(self): album = _common.Bag(mb_albumid=self.MBID, asin=self.ASIN) self.plugin.art_for_album(album, None, local_only=True) - self.assertEqual(len(responses.calls), 0) + assert len(responses.calls) == 0 def test_local_only_gets_fs_image(self): _common.touch(os.path.join(self.dpath, b"art.jpg")) @@ -357,9 +361,9 @@ class CombinedTest(FetchImageTestCase, CAAHelper): candidate = self.plugin.art_for_album( album, [self.dpath], local_only=True ) - self.assertIsNotNone(candidate) - self.assertEqual(candidate.path, os.path.join(self.dpath, b"art.jpg")) - self.assertEqual(len(responses.calls), 0) + assert candidate is not None + assert candidate.path == os.path.join(self.dpath, b"art.jpg") + assert len(responses.calls) == 0 class AAOTest(UseThePlugin): @@ -389,12 +393,12 @@ class AAOTest(UseThePlugin): self.mock_response(self.AAO_URL, body) album = _common.Bag(asin=self.ASIN) candidate = next(self.source.get(album, self.settings, [])) - self.assertEqual(candidate.url, "TARGET_URL") + assert candidate.url == "TARGET_URL" def test_aao_scraper_returns_no_result_when_no_image_present(self): self.mock_response(self.AAO_URL, "blah blah") album = _common.Bag(asin=self.ASIN) - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(album, self.settings, [])) @@ -427,8 +431,8 @@ class ITunesStoreTest(UseThePlugin): }""" self.mock_response(fetchart.ITunesStore.API_URL, json) candidate = next(self.source.get(self.album, self.settings, [])) - self.assertEqual(candidate.url, "url_to_the_image") - self.assertEqual(candidate.match, fetchart.Candidate.MATCH_EXACT) + assert candidate.url == "url_to_the_image" + assert candidate.match == fetchart.Candidate.MATCH_EXACT def test_itunesstore_no_result(self): json = '{"results": []}' @@ -436,9 +440,9 @@ class ITunesStoreTest(UseThePlugin): expected = "got no results" with capture_log("beets.test_art") as logs: - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(self.album, self.settings, [])) - self.assertIn(expected, logs[1]) + assert expected in logs[1] def test_itunesstore_requestexception(self): responses.add( @@ -450,9 +454,9 @@ class ITunesStoreTest(UseThePlugin): expected = "iTunes search failed: 404 Client Error" with capture_log("beets.test_art") as logs: - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(self.album, self.settings, [])) - self.assertIn(expected, logs[1]) + assert expected in logs[1] def test_itunesstore_fallback_match(self): json = """{ @@ -466,8 +470,8 @@ class ITunesStoreTest(UseThePlugin): }""" self.mock_response(fetchart.ITunesStore.API_URL, json) candidate = next(self.source.get(self.album, self.settings, [])) - self.assertEqual(candidate.url, "url_to_the_image") - self.assertEqual(candidate.match, fetchart.Candidate.MATCH_FALLBACK) + assert candidate.url == "url_to_the_image" + assert candidate.match == fetchart.Candidate.MATCH_FALLBACK def test_itunesstore_returns_result_without_artwork(self): json = """{ @@ -483,9 +487,9 @@ class ITunesStoreTest(UseThePlugin): expected = "Malformed itunes candidate" with capture_log("beets.test_art") as logs: - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(self.album, self.settings, [])) - self.assertIn(expected, logs[1]) + assert expected in logs[1] def test_itunesstore_returns_no_result_when_error_received(self): json = '{"error": {"errors": [{"reason": "some reason"}]}}' @@ -493,9 +497,9 @@ class ITunesStoreTest(UseThePlugin): expected = "not found in json. Fields are" with capture_log("beets.test_art") as logs: - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(self.album, self.settings, [])) - self.assertIn(expected, logs[1]) + assert expected in logs[1] def test_itunesstore_returns_no_result_with_malformed_response(self): json = """bla blup""" @@ -503,9 +507,9 @@ class ITunesStoreTest(UseThePlugin): expected = "Could not decode json response:" with capture_log("beets.test_art") as logs: - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(self.album, self.settings, [])) - self.assertIn(expected, logs[1]) + assert expected in logs[1] class GoogleImageTest(UseThePlugin): @@ -528,20 +532,20 @@ class GoogleImageTest(UseThePlugin): json = '{"items": [{"link": "url_to_the_image"}]}' self.mock_response(fetchart.GoogleImages.URL, json) candidate = next(self.source.get(album, self.settings, [])) - self.assertEqual(candidate.url, "url_to_the_image") + assert candidate.url == "url_to_the_image" def test_google_art_returns_no_result_when_error_received(self): album = _common.Bag(albumartist="some artist", album="some album") json = '{"error": {"errors": [{"reason": "some reason"}]}}' self.mock_response(fetchart.GoogleImages.URL, json) - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(album, self.settings, [])) def test_google_art_returns_no_result_with_malformed_response(self): album = _common.Bag(albumartist="some artist", album="some album") json = """bla blup""" self.mock_response(fetchart.GoogleImages.URL, json) - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(album, self.settings, [])) @@ -562,9 +566,9 @@ class CoverArtArchiveTest(UseThePlugin, CAAHelper): self.mock_caa_response(self.RELEASE_URL, self.RESPONSE_RELEASE) self.mock_caa_response(self.GROUP_URL, self.RESPONSE_GROUP) candidates = list(self.source.get(album, self.settings, [])) - self.assertEqual(len(candidates), 3) - self.assertEqual(len(responses.calls), 2) - self.assertEqual(responses.calls[0].request.url, self.RELEASE_URL) + assert len(candidates) == 3 + assert len(responses.calls) == 2 + assert responses.calls[0].request.url == self.RELEASE_URL def test_fetchart_uses_caa_pre_sized_maxwidth_thumbs(self): # CAA provides pre-sized thumbnails of width 250px, 500px, and 1200px @@ -578,9 +582,9 @@ class CoverArtArchiveTest(UseThePlugin, CAAHelper): self.mock_caa_response(self.RELEASE_URL, self.RESPONSE_RELEASE) self.mock_caa_response(self.GROUP_URL, self.RESPONSE_GROUP) candidates = list(self.source.get(album, self.settings, [])) - self.assertEqual(len(candidates), 3) + assert len(candidates) == 3 for candidate in candidates: - self.assertIn(f"-{maxwidth}.jpg", candidate.url) + assert f"-{maxwidth}.jpg" in candidate.url def test_caa_finds_image_if_maxwidth_is_set_and_thumbnails_is_empty(self): # CAA provides pre-sized thumbnails of width 250px, 500px, and 1200px @@ -599,9 +603,9 @@ class CoverArtArchiveTest(UseThePlugin, CAAHelper): self.RESPONSE_GROUP_WITHOUT_THUMBNAILS, ) candidates = list(self.source.get(album, self.settings, [])) - self.assertEqual(len(candidates), 3) + assert len(candidates) == 3 for candidate in candidates: - self.assertNotIn(f"-{maxwidth}.jpg", candidate.url) + assert f"-{maxwidth}.jpg" not in candidate.url class FanartTVTest(UseThePlugin): @@ -683,7 +687,7 @@ class FanartTVTest(UseThePlugin): self.RESPONSE_MULTIPLE, ) candidate = next(self.source.get(album, self.settings, [])) - self.assertEqual(candidate.url, "http://example.com/1.jpg") + assert candidate.url == "http://example.com/1.jpg" def test_fanarttv_returns_no_result_when_error_received(self): album = _common.Bag(mb_releasegroupid="thereleasegroupid") @@ -691,7 +695,7 @@ class FanartTVTest(UseThePlugin): fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", self.RESPONSE_ERROR, ) - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(album, self.settings, [])) def test_fanarttv_returns_no_result_with_malformed_response(self): @@ -700,7 +704,7 @@ class FanartTVTest(UseThePlugin): fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", self.RESPONSE_MALFORMED, ) - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(album, self.settings, [])) def test_fanarttv_only_other_images(self): @@ -710,7 +714,7 @@ class FanartTVTest(UseThePlugin): fetchart.FanartTV.API_ALBUMS + "thereleasegroupid", self.RESPONSE_NO_ART, ) - with self.assertRaises(StopIteration): + with pytest.raises(StopIteration): next(self.source.get(album, self.settings, [])) @@ -731,16 +735,12 @@ class ArtImporterTest(UseThePlugin): self.plugin.art_for_album = art_for_album # Test library. - self.libpath = os.path.join(self.temp_dir, b"tmplib.blb") - self.libdir = os.path.join(self.temp_dir, b"tmplib") - os.mkdir(syspath(self.libdir)) os.mkdir(syspath(os.path.join(self.libdir, b"album"))) itempath = os.path.join(self.libdir, b"album", b"test.mp3") shutil.copyfile( syspath(os.path.join(_common.RSRC, b"full.mp3")), syspath(itempath), ) - self.lib = library.Library(self.libpath) self.i = _common.item() self.i.path = itempath self.album = self.lib.add_album([self.i]) @@ -763,7 +763,6 @@ class ArtImporterTest(UseThePlugin): self.task.set_choice(AlbumMatch(0, info, {}, set(), set())) def tearDown(self): - self.lib._connection().close() super().tearDown() self.plugin.art_for_album = self.old_afa @@ -779,13 +778,12 @@ class ArtImporterTest(UseThePlugin): artpath = self.lib.albums()[0].artpath if should_exist: - self.assertEqual( - artpath, - os.path.join(os.path.dirname(self.i.path), b"cover.jpg"), + assert artpath == os.path.join( + os.path.dirname(self.i.path), b"cover.jpg" ) self.assertExists(artpath) else: - self.assertEqual(artpath, None) + assert artpath is None return artpath def test_fetch_art(self): @@ -805,9 +803,13 @@ class ArtImporterTest(UseThePlugin): self.assertExists(self.art_file) def test_delete_original_file(self): - self.plugin.src_removed = True - self._fetch_art(True) - self.assertNotExists(self.art_file) + prev_move = config["import"]["move"].get() + try: + config["import"]["move"] = True + self._fetch_art(True) + self.assertNotExists(self.art_file) + finally: + config["import"]["move"] = prev_move def test_do_not_delete_original_if_already_in_place(self): artdest = os.path.join(os.path.dirname(self.i.path), b"cover.jpg") @@ -863,18 +865,18 @@ class ArtForAlbumTest(UseThePlugin): fetchart.FileSystem.get = self.old_fs_source_get super().tearDown() - def _assertImageIsValidArt(self, image_file, should_exist): # noqa + def assertImageIsValidArt(self, image_file, should_exist): self.assertExists(image_file) self.image_file = image_file candidate = self.plugin.art_for_album(self.album, [""], True) if should_exist: - self.assertNotEqual(candidate, None) - self.assertEqual(candidate.path, self.image_file) + assert candidate is not None + assert candidate.path == self.image_file self.assertExists(candidate.path) else: - self.assertIsNone(candidate) + assert candidate is None def _assert_image_operated(self, image_file, operation, should_operate): self.image_file = image_file @@ -882,7 +884,7 @@ class ArtForAlbumTest(UseThePlugin): ArtResizer.shared, operation, return_value=self.image_file ) as mock_operation: self.plugin.art_for_album(self.album, [""], True) - self.assertEqual(mock_operation.called, should_operate) + assert mock_operation.called == should_operate def _require_backend(self): """Skip the test if the art resizer doesn't have ImageMagick or @@ -894,42 +896,42 @@ class ArtForAlbumTest(UseThePlugin): def test_respect_minwidth(self): self._require_backend() self.plugin.minwidth = 300 - self._assertImageIsValidArt(self.IMG_225x225, False) - self._assertImageIsValidArt(self.IMG_348x348, True) + self.assertImageIsValidArt(self.IMG_225x225, False) + self.assertImageIsValidArt(self.IMG_348x348, True) def test_respect_enforce_ratio_yes(self): self._require_backend() self.plugin.enforce_ratio = True - self._assertImageIsValidArt(self.IMG_500x490, False) - self._assertImageIsValidArt(self.IMG_225x225, True) + self.assertImageIsValidArt(self.IMG_500x490, False) + self.assertImageIsValidArt(self.IMG_225x225, True) def test_respect_enforce_ratio_no(self): self.plugin.enforce_ratio = False - self._assertImageIsValidArt(self.IMG_500x490, True) + self.assertImageIsValidArt(self.IMG_500x490, True) def test_respect_enforce_ratio_px_above(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_px = 5 - self._assertImageIsValidArt(self.IMG_500x490, False) + self.assertImageIsValidArt(self.IMG_500x490, False) def test_respect_enforce_ratio_px_below(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_px = 15 - self._assertImageIsValidArt(self.IMG_500x490, True) + self.assertImageIsValidArt(self.IMG_500x490, True) def test_respect_enforce_ratio_percent_above(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_percent = (500 - 490) / 500 * 0.5 - self._assertImageIsValidArt(self.IMG_500x490, False) + self.assertImageIsValidArt(self.IMG_500x490, False) def test_respect_enforce_ratio_percent_below(self): self._require_backend() self.plugin.enforce_ratio = True self.plugin.margin_percent = (500 - 490) / 500 * 1.5 - self._assertImageIsValidArt(self.IMG_500x490, True) + self.assertImageIsValidArt(self.IMG_500x490, True) def test_resize_if_necessary(self): self._require_backend() @@ -946,7 +948,7 @@ class ArtForAlbumTest(UseThePlugin): self._require_backend() self.plugin.max_filesize = self.IMG_225x225_SIZE self._assert_image_operated(self.IMG_225x225, self.RESIZE_OP, False) - self._assertImageIsValidArt(self.IMG_225x225, True) + self.assertImageIsValidArt(self.IMG_225x225, True) def test_fileresize_no_scale(self): self._require_backend() @@ -977,14 +979,14 @@ class ArtForAlbumTest(UseThePlugin): self._assert_image_operated(self.IMG_348x348, self.RESIZE_OP, True) -class DeprecatedConfigTest(_common.TestCase): +class DeprecatedConfigTest(BeetsTestCase): """While refactoring the plugin, the remote_priority option was deprecated, and a new codepath should translate its effect. Check that it actually does so. """ # If we subclassed UseThePlugin, the configuration change would either be - # overwritten by _common.TestCase or be set after constructing the + # overwritten by BeetsTestCase or be set after constructing the # plugin object def setUp(self): super().setUp() @@ -992,17 +994,17 @@ class DeprecatedConfigTest(_common.TestCase): self.plugin = fetchart.FetchArtPlugin() def test_moves_filesystem_to_end(self): - self.assertEqual(type(self.plugin.sources[-1]), fetchart.FileSystem) + assert isinstance(self.plugin.sources[-1], fetchart.FileSystem) -class EnforceRatioConfigTest(_common.TestCase): +class EnforceRatioConfigTest(BeetsTestCase): """Throw some data at the regexes.""" def _load_with_config(self, values, should_raise): if should_raise: for v in values: config["fetchart"]["enforce_ratio"] = v - with self.assertRaises(confuse.ConfigValueError): + with pytest.raises(confuse.ConfigValueError): fetchart.FetchArtPlugin() else: for v in values: @@ -1016,11 +1018,3 @@ class EnforceRatioConfigTest(_common.TestCase): def test_percent(self): self._load_with_config("0% 0.00% 5.1% 5% 100%".split(), False) self._load_with_config("00% 1.234% foo5% 100.1%".split(), True) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_aura.py b/test/plugins/test_aura.py index c0a76b1c5..f4535c738 100644 --- a/test/plugins/test_aura.py +++ b/test/plugins/test_aura.py @@ -1,7 +1,7 @@ import os from http import HTTPStatus from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional import pytest from flask.testing import Client @@ -59,8 +59,8 @@ class TestAuraResponse: """Return a callback accepting `endpoint` and `params` parameters.""" def get( - endpoint: str, params: Dict[str, str] - ) -> Optional[Dict[str, Any]]: + endpoint: str, params: dict[str, str] + ) -> Optional[dict[str, Any]]: """Add additional `params` and GET the given endpoint. `include` parameter is added to every call to check that the @@ -91,6 +91,7 @@ class TestAuraResponse: "artist": item.artist, "size": Path(os.fsdecode(item.path)).stat().st_size, "title": item.title, + "track": 1, }, "relationships": { "albums": {"data": [{"id": str(album.id), "type": "album"}]}, diff --git a/test/plugins/test_autobpm.py b/test/plugins/test_autobpm.py new file mode 100644 index 000000000..6093d999f --- /dev/null +++ b/test/plugins/test_autobpm.py @@ -0,0 +1,46 @@ +import importlib.util +import os + +import pytest + +from beets.test.helper import ImportHelper, PluginMixin + +github_ci = os.environ.get("GITHUB_ACTIONS") == "true" +if not github_ci and not importlib.util.find_spec("librosa"): + pytest.skip("librosa isn't available", allow_module_level=True) + + +class TestAutoBPMPlugin(PluginMixin, ImportHelper): + plugin = "autobpm" + + @pytest.fixture(scope="class", name="lib") + def fixture_lib(self): + self.setup_beets() + + yield self.lib + + self.teardown_beets() + + @pytest.fixture(scope="class") + def item(self): + return self.add_item_fixture() + + @pytest.fixture(scope="class") + def importer(self, lib): + self.import_media = [] + self.prepare_album_for_import(1) + track = self.import_media[0] + track.bpm = None + track.save() + return self.setup_importer(autotag=False) + + def test_command(self, lib, item): + self.run_command("autobpm", lib=lib) + + item.load() + assert item.bpm == 117 + + def test_import(self, lib, importer): + importer.run() + + assert lib.items().get().bpm == 117 diff --git a/test/plugins/test_bareasc.py b/test/plugins/test_bareasc.py index feb99953c..e699a3dcf 100644 --- a/test/plugins/test_bareasc.py +++ b/test/plugins/test_bareasc.py @@ -3,21 +3,20 @@ """Tests for the 'bareasc' plugin.""" -import unittest - from beets import logging -from beets.test.helper import TestHelper, capture_stdout +from beets.test.helper import PluginTestCase, capture_stdout -class BareascPluginTest(unittest.TestCase, TestHelper): +class BareascPluginTest(PluginTestCase): """Test bare ASCII query matching.""" + plugin = "bareasc" + def setUp(self): """Set up test environment for bare ASCII query matching.""" - self.setup_beets() + super().setUp() self.log = logging.getLogger("beets.web") self.config["bareasc"]["prefix"] = "#" - self.load_plugins("bareasc") # Add library elements. Note that self.lib.add overrides any "id=" # and assigns the next free id number. @@ -27,9 +26,6 @@ class BareascPluginTest(unittest.TestCase, TestHelper): self.add_item(title="without umlaut or e", artist="Bruggen") self.add_item(title="without umlaut with e", artist="Brueggen") - def tearDown(self): - self.teardown_beets() - def test_bareasc_search(self): test_cases = [ ( @@ -65,16 +61,14 @@ class BareascPluginTest(unittest.TestCase, TestHelper): for query, expected_titles in test_cases: with self.subTest(query=query, expected_titles=expected_titles): items = self.lib.items(query) - self.assertListEqual( - [item.title for item in items], expected_titles - ) + assert [item.title for item in items] == expected_titles def test_bareasc_list_output(self): """Bare-ASCII version of list command - check output.""" with capture_stdout() as output: self.run_command("bareasc", "with accents") - self.assertIn("Antonin Dvorak", output.getvalue()) + assert "Antonin Dvorak" in output.getvalue() def test_bareasc_format_output(self): """Bare-ASCII version of list -f command - check output.""" @@ -83,13 +77,4 @@ class BareascPluginTest(unittest.TestCase, TestHelper): "bareasc", "with accents", "-f", "$artist:: $title" ) - self.assertEqual("Antonin Dvorak:: with accents\n", output.getvalue()) - - -def suite(): - """loader.""" - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert "Antonin Dvorak:: with accents\n" == output.getvalue() diff --git a/test/plugins/test_beatport.py b/test/plugins/test_beatport.py index fd75f2154..d072340b5 100644 --- a/test/plugins/test_beatport.py +++ b/test/plugins/test_beatport.py @@ -12,19 +12,16 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for the 'beatport' plugin. -""" +"""Tests for the 'beatport' plugin.""" -import unittest from datetime import timedelta -from beets import library from beets.test import _common -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase from beetsplug import beatport -class BeatportTest(_common.TestCase, TestHelper): +class BeatportTest(BeetsTestCase): def _make_release_response(self): """Returns a dict that mimics a response from the beatport API. @@ -450,9 +447,7 @@ class BeatportTest(_common.TestCase, TestHelper): return results def setUp(self): - self.setup_beets() - self.load_plugins("beatport") - self.lib = library.Library(":memory:") + super().setUp() # Set up 'album'. response_release = self._make_release_response() @@ -468,10 +463,6 @@ class BeatportTest(_common.TestCase, TestHelper): # Set up 'test_tracks' self.test_tracks = self.test_album.items() - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def mk_test_album(self): items = [_common.item() for _ in range(6)] for item in items: @@ -538,38 +529,32 @@ class BeatportTest(_common.TestCase, TestHelper): # Test BeatportRelease. def test_album_name_applied(self): - self.assertEqual(self.album.name, self.test_album["album"]) + assert self.album.name == self.test_album["album"] def test_catalog_number_applied(self): - self.assertEqual( - self.album.catalog_number, self.test_album["catalognum"] - ) + assert self.album.catalog_number == self.test_album["catalognum"] def test_label_applied(self): - self.assertEqual(self.album.label_name, self.test_album["label"]) + assert self.album.label_name == self.test_album["label"] def test_category_applied(self): - self.assertEqual(self.album.category, "Release") + assert self.album.category == "Release" def test_album_url_applied(self): - self.assertEqual( - self.album.url, "https://beatport.com/release/charade/1742984" - ) + assert self.album.url == "https://beatport.com/release/charade/1742984" # Test BeatportTrack. def test_title_applied(self): for track, test_track in zip(self.tracks, self.test_tracks): - self.assertEqual(track.name, test_track.title) + assert track.name == test_track.title def test_mix_name_applied(self): for track, test_track in zip(self.tracks, self.test_tracks): - self.assertEqual(track.mix_name, test_track.mix_name) + assert track.mix_name == test_track.mix_name def test_length_applied(self): for track, test_track in zip(self.tracks, self.test_tracks): - self.assertEqual( - int(track.length.total_seconds()), int(test_track.length) - ) + assert int(track.length.total_seconds()) == int(test_track.length) def test_track_url_applied(self): # Specify beatport ids here because an 'item.id' is beets-internal. @@ -583,25 +568,24 @@ class BeatportTest(_common.TestCase, TestHelper): ] # Concatenate with 'id' to pass strict equality test. for track, test_track, id in zip(self.tracks, self.test_tracks, ids): - self.assertEqual( - track.url, - "https://beatport.com/track/" + test_track.url + "/" + str(id), + assert ( + track.url == f"https://beatport.com/track/{test_track.url}/{id}" ) def test_bpm_applied(self): for track, test_track in zip(self.tracks, self.test_tracks): - self.assertEqual(track.bpm, test_track.bpm) + assert track.bpm == test_track.bpm def test_initial_key_applied(self): for track, test_track in zip(self.tracks, self.test_tracks): - self.assertEqual(track.initial_key, test_track.initial_key) + assert track.initial_key == test_track.initial_key def test_genre_applied(self): for track, test_track in zip(self.tracks, self.test_tracks): - self.assertEqual(track.genre, test_track.genre) + assert track.genre == test_track.genre -class BeatportResponseEmptyTest(_common.TestCase, TestHelper): +class BeatportResponseEmptyTest(BeetsTestCase): def _make_tracks_response(self): results = [ { @@ -628,9 +612,7 @@ class BeatportResponseEmptyTest(_common.TestCase, TestHelper): return results def setUp(self): - self.setup_beets() - self.load_plugins("beatport") - self.lib = library.Library(":memory:") + super().setUp() # Set up 'tracks'. self.response_tracks = self._make_tracks_response() @@ -639,14 +621,10 @@ class BeatportResponseEmptyTest(_common.TestCase, TestHelper): # Make alias to be congruent with class `BeatportTest`. self.test_tracks = self.response_tracks - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def test_response_tracks_empty(self): response_tracks = [] tracks = [beatport.BeatportTrack(t) for t in response_tracks] - self.assertEqual(tracks, []) + assert tracks == [] def test_sub_genre_empty_fallback(self): """No 'sub_genre' is provided. Test if fallback to 'genre' works.""" @@ -655,9 +633,7 @@ class BeatportResponseEmptyTest(_common.TestCase, TestHelper): self.test_tracks[0]["subGenres"] = [] - self.assertEqual( - tracks[0].genre, self.test_tracks[0]["genres"][0]["name"] - ) + assert tracks[0].genre == self.test_tracks[0]["genres"][0]["name"] def test_genre_empty(self): """No 'genre' is provided. Test if 'sub_genre' is applied.""" @@ -666,14 +642,4 @@ class BeatportResponseEmptyTest(_common.TestCase, TestHelper): self.test_tracks[0]["genres"] = [] - self.assertEqual( - tracks[0].genre, self.test_tracks[0]["subGenres"][0]["name"] - ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert tracks[0].genre == self.test_tracks[0]["subGenres"][0]["name"] diff --git a/test/plugins/test_bucket.py b/test/plugins/test_bucket.py index 4f43f5ef5..e3611912a 100644 --- a/test/plugins/test_bucket.py +++ b/test/plugins/test_bucket.py @@ -14,22 +14,20 @@ """Tests for the 'bucket' plugin.""" +from datetime import datetime -import unittest +import pytest from beets import config, ui -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase from beetsplug import bucket -class BucketPluginTest(unittest.TestCase, TestHelper): +class BucketPluginTest(BeetsTestCase): def setUp(self): - self.setup_beets() + super().setUp() self.plugin = bucket.BucketPlugin() - def tearDown(self): - self.teardown_beets() - def _setup_config( self, bucket_year=[], @@ -47,74 +45,75 @@ class BucketPluginTest(unittest.TestCase, TestHelper): """If a single year is given, range starts from this year and stops at the year preceding the one of next bucket.""" self._setup_config(bucket_year=["1950s", "1970s"]) - self.assertEqual(self.plugin._tmpl_bucket("1959"), "1950s") - self.assertEqual(self.plugin._tmpl_bucket("1969"), "1950s") + assert self.plugin._tmpl_bucket("1959") == "1950s" + assert self.plugin._tmpl_bucket("1969") == "1950s" def test_year_single_year_last_folder(self): """If a single year is given for the last bucket, extend it to current year.""" self._setup_config(bucket_year=["1950", "1970"]) - self.assertEqual(self.plugin._tmpl_bucket("2014"), "1970") - self.assertEqual(self.plugin._tmpl_bucket("2025"), "2025") + assert self.plugin._tmpl_bucket("2014") == "1970" + next_year = datetime.now().year + 1 + assert self.plugin._tmpl_bucket(str(next_year)) == str(next_year) def test_year_two_years(self): """Buckets can be named with the 'from-to' syntax.""" self._setup_config(bucket_year=["1950-59", "1960-1969"]) - self.assertEqual(self.plugin._tmpl_bucket("1959"), "1950-59") - self.assertEqual(self.plugin._tmpl_bucket("1969"), "1960-1969") + assert self.plugin._tmpl_bucket("1959") == "1950-59" + assert self.plugin._tmpl_bucket("1969") == "1960-1969" def test_year_multiple_years(self): """Buckets can be named by listing all the years""" self._setup_config(bucket_year=["1950,51,52,53"]) - self.assertEqual(self.plugin._tmpl_bucket("1953"), "1950,51,52,53") - self.assertEqual(self.plugin._tmpl_bucket("1974"), "1974") + assert self.plugin._tmpl_bucket("1953") == "1950,51,52,53" + assert self.plugin._tmpl_bucket("1974") == "1974" def test_year_out_of_range(self): """If no range match, return the year""" self._setup_config(bucket_year=["1950-59", "1960-69"]) - self.assertEqual(self.plugin._tmpl_bucket("1974"), "1974") + assert self.plugin._tmpl_bucket("1974") == "1974" self._setup_config(bucket_year=[]) - self.assertEqual(self.plugin._tmpl_bucket("1974"), "1974") + assert self.plugin._tmpl_bucket("1974") == "1974" def test_year_out_of_range_extrapolate(self): """If no defined range match, extrapolate all ranges using the most common syntax amongst existing buckets and return the matching one.""" self._setup_config(bucket_year=["1950-59", "1960-69"], extrapolate=True) - self.assertEqual(self.plugin._tmpl_bucket("1914"), "1910-19") + assert self.plugin._tmpl_bucket("1914") == "1910-19" # pick single year format self._setup_config( bucket_year=["1962-81", "2002", "2012"], extrapolate=True ) - self.assertEqual(self.plugin._tmpl_bucket("1983"), "1982") + assert self.plugin._tmpl_bucket("1983") == "1982" # pick from-end format self._setup_config( bucket_year=["1962-81", "2002", "2012-14"], extrapolate=True ) - self.assertEqual(self.plugin._tmpl_bucket("1983"), "1982-01") + assert self.plugin._tmpl_bucket("1983") == "1982-01" # extrapolate add ranges, but never modifies existing ones self._setup_config( bucket_year=["1932", "1942", "1952", "1962-81", "2002"], extrapolate=True, ) - self.assertEqual(self.plugin._tmpl_bucket("1975"), "1962-81") + assert self.plugin._tmpl_bucket("1975") == "1962-81" def test_alpha_all_chars(self): """Alphabet buckets can be named by listing all their chars""" self._setup_config(bucket_alpha=["ABCD", "FGH", "IJKL"]) - self.assertEqual(self.plugin._tmpl_bucket("garry"), "FGH") + assert self.plugin._tmpl_bucket("garry") == "FGH" def test_alpha_first_last_chars(self): """Alphabet buckets can be named by listing the 'from-to' syntax""" self._setup_config(bucket_alpha=["0->9", "A->D", "F-H", "I->Z"]) - self.assertEqual(self.plugin._tmpl_bucket("garry"), "F-H") - self.assertEqual(self.plugin._tmpl_bucket("2pac"), "0->9") + assert self.plugin._tmpl_bucket("garry") == "F-H" + assert self.plugin._tmpl_bucket("2pac") == "0->9" def test_alpha_out_of_range(self): """If no range match, return the initial""" self._setup_config(bucket_alpha=["ABCD", "FGH", "IJKL"]) - self.assertEqual(self.plugin._tmpl_bucket("errol"), "E") + assert self.plugin._tmpl_bucket("errol") == "E" self._setup_config(bucket_alpha=[]) - self.assertEqual(self.plugin._tmpl_bucket("errol"), "E") + assert self.plugin._tmpl_bucket("errol") == "E" def test_alpha_regex(self): """Check regex is used""" @@ -122,10 +121,10 @@ class BucketPluginTest(unittest.TestCase, TestHelper): bucket_alpha=["foo", "bar"], bucket_alpha_regex={"foo": "^[a-d]", "bar": "^[e-z]"}, ) - self.assertEqual(self.plugin._tmpl_bucket("alpha"), "foo") - self.assertEqual(self.plugin._tmpl_bucket("delta"), "foo") - self.assertEqual(self.plugin._tmpl_bucket("zeta"), "bar") - self.assertEqual(self.plugin._tmpl_bucket("Alpha"), "A") + assert self.plugin._tmpl_bucket("alpha") == "foo" + assert self.plugin._tmpl_bucket("delta") == "foo" + assert self.plugin._tmpl_bucket("zeta") == "bar" + assert self.plugin._tmpl_bucket("Alpha") == "A" def test_alpha_regex_mix(self): """Check mixing regex and non-regex is possible""" @@ -133,46 +132,38 @@ class BucketPluginTest(unittest.TestCase, TestHelper): bucket_alpha=["A - D", "E - L"], bucket_alpha_regex={"A - D": "^[0-9a-dA-D…äÄ]"}, ) - self.assertEqual(self.plugin._tmpl_bucket("alpha"), "A - D") - self.assertEqual(self.plugin._tmpl_bucket("Ärzte"), "A - D") - self.assertEqual(self.plugin._tmpl_bucket("112"), "A - D") - self.assertEqual(self.plugin._tmpl_bucket("…and Oceans"), "A - D") - self.assertEqual(self.plugin._tmpl_bucket("Eagles"), "E - L") + assert self.plugin._tmpl_bucket("alpha") == "A - D" + assert self.plugin._tmpl_bucket("Ärzte") == "A - D" + assert self.plugin._tmpl_bucket("112") == "A - D" + assert self.plugin._tmpl_bucket("…and Oceans") == "A - D" + assert self.plugin._tmpl_bucket("Eagles") == "E - L" def test_bad_alpha_range_def(self): """If bad alpha range definition, a UserError is raised.""" - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): self._setup_config(bucket_alpha=["$%"]) def test_bad_year_range_def_no4digits(self): """If bad year range definition, a UserError is raised. Range origin must be expressed on 4 digits. """ - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): self._setup_config(bucket_year=["62-64"]) def test_bad_year_range_def_nodigits(self): """If bad year range definition, a UserError is raised. At least the range origin must be declared. """ - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): self._setup_config(bucket_year=["nodigits"]) def check_span_from_str(self, sstr, dfrom, dto): d = bucket.span_from_str(sstr) - self.assertEqual(dfrom, d["from"]) - self.assertEqual(dto, d["to"]) + assert dfrom == d["from"] + assert dto == d["to"] def test_span_from_str(self): self.check_span_from_str("1980 2000", 1980, 2000) self.check_span_from_str("1980 00", 1980, 2000) self.check_span_from_str("1930 00", 1930, 2000) self.check_span_from_str("1930 50", 1930, 1950) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_convert.py b/test/plugins/test_convert.py index bc13b6fec..e43970dda 100644 --- a/test/plugins/test_convert.py +++ b/test/plugins/test_convert.py @@ -19,12 +19,21 @@ import re import sys import unittest +import pytest from mediafile import MediaFile from beets import util -from beets.test import _common, helper -from beets.test.helper import capture_log, control_stdin +from beets.library import Item +from beets.test import _common +from beets.test.helper import ( + AsIsImporterMixin, + ImportHelper, + PluginTestCase, + capture_log, + control_stdin, +) from beets.util import bytestring_path, displayable_path +from beetsplug import convert def shell_quote(text): @@ -33,7 +42,7 @@ def shell_quote(text): return shlex.quote(text) -class TestHelper(helper.TestHelper): +class ConvertMixin: def tagged_copy_cmd(self, tag): """Return a conversion command that copies files and appends `tag` to the copy. @@ -49,7 +58,7 @@ class TestHelper(helper.TestHelper): shell_quote(sys.executable), shell_quote(stub), tag ) - def assertFileTag(self, path, tag): # noqa + def assertFileTag(self, path, tag): """Assert that the path is a file and the files content ends with `tag`. """ @@ -58,15 +67,11 @@ class TestHelper(helper.TestHelper): self.assertIsFile(path) with open(path, "rb") as f: f.seek(-len(display_tag), os.SEEK_END) - self.assertEqual( - f.read(), - tag, - "{} is not tagged with {}".format( - displayable_path(path), display_tag - ), - ) + assert ( + f.read() == tag + ), f"{displayable_path(path)} is not tagged with {display_tag}" - def assertNoFileTag(self, path, tag): # noqa + def assertNoFileTag(self, path, tag): """Assert that the path is a file and the files content does not end with `tag`. """ @@ -75,22 +80,20 @@ class TestHelper(helper.TestHelper): self.assertIsFile(path) with open(path, "rb") as f: f.seek(-len(tag), os.SEEK_END) - self.assertNotEqual( - f.read(), - tag, - "{} is unexpectedly tagged with {}".format( - displayable_path(path), display_tag - ), - ) + assert ( + f.read() != tag + ), f"{displayable_path(path)} is unexpectedly tagged with {display_tag}" + + +class ConvertTestCase(ConvertMixin, PluginTestCase): + db_on_disk = True + plugin = "convert" @_common.slow_test() -class ImportConvertTest(_common.TestCase, TestHelper): +class ImportConvertTest(AsIsImporterMixin, ImportHelper, ConvertTestCase): def setUp(self): - self.setup_beets(disk=True) # Converter is threaded - self.importer = self.create_importer() - self.load_plugins("convert") - + super().setUp() self.config["convert"] = { "dest": os.path.join(self.temp_dir, b"convert"), "command": self.tagged_copy_cmd("convert"), @@ -100,12 +103,8 @@ class ImportConvertTest(_common.TestCase, TestHelper): "quiet": False, } - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def test_import_converted(self): - self.importer.run() + self.run_asis_importer() item = self.lib.items().get() self.assertFileTag(item.path, "convert") @@ -114,24 +113,20 @@ class ImportConvertTest(_common.TestCase, TestHelper): def test_import_original_on_convert_error(self): # `false` exits with non-zero code self.config["convert"]["command"] = "false" - self.importer.run() + self.run_asis_importer() item = self.lib.items().get() - self.assertIsNotNone(item) + assert item is not None self.assertIsFile(item.path) def test_delete_originals(self): self.config["convert"]["delete_originals"] = True - self.importer.run() + self.run_asis_importer() for path in self.importer.paths: for root, dirnames, filenames in os.walk(path): - self.assertEqual( - len(fnmatch.filter(filenames, "*.mp3")), - 0, - "Non-empty import directory {}".format( - util.displayable_path(path) - ), - ) + assert ( + len(fnmatch.filter(filenames, "*.mp3")) == 0 + ), f"Non-empty import directory {util.displayable_path(path)}" def get_count_of_import_files(self): import_file_count = 0 @@ -163,12 +158,11 @@ class ConvertCommand: @_common.slow_test() -class ConvertCliTest(_common.TestCase, TestHelper, ConvertCommand): +class ConvertCliTest(ConvertTestCase, ConvertCommand): def setUp(self): - self.setup_beets(disk=True) # Converter is threaded + super().setUp() self.album = self.add_album_fixture(ext="ogg") self.item = self.album.items()[0] - self.load_plugins("convert") self.convert_dest = bytestring_path( os.path.join(self.temp_dir, b"convert_dest") @@ -187,10 +181,6 @@ class ConvertCliTest(_common.TestCase, TestHelper, ConvertCommand): }, } - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def test_convert(self): with control_stdin("y"): self.run_convert() @@ -209,13 +199,13 @@ class ConvertCliTest(_common.TestCase, TestHelper, ConvertCommand): self.assertNotExists(converted) def test_convert_keep_new(self): - self.assertEqual(os.path.splitext(self.item.path)[1], b".ogg") + assert os.path.splitext(self.item.path)[1] == b".ogg" with control_stdin("y"): self.run_convert("--keep-new") self.item.load() - self.assertEqual(os.path.splitext(self.item.path)[1], b".mp3") + assert os.path.splitext(self.item.path)[1] == b".mp3" def test_format_option(self): with control_stdin("y"): @@ -235,14 +225,14 @@ class ConvertCliTest(_common.TestCase, TestHelper, ConvertCommand): self.run_convert() converted = os.path.join(self.convert_dest, b"converted.mp3") mediafile = MediaFile(converted) - self.assertEqual(mediafile.images[0].data, image_data) + assert mediafile.images[0].data == image_data def test_skip_existing(self): converted = os.path.join(self.convert_dest, b"converted.mp3") self.touch(converted, content="XXX") self.run_convert("--yes") with open(converted) as f: - self.assertEqual(f.read(), "XXX") + assert f.read() == "XXX" def test_pretend(self): self.run_convert("--pretend") @@ -252,7 +242,7 @@ class ConvertCliTest(_common.TestCase, TestHelper, ConvertCommand): def test_empty_query(self): with capture_log("beets.convert") as logs: self.run_convert("An impossible query") - self.assertEqual(logs[0], "convert: Empty query result.") + assert logs[0] == "convert: Empty query result." def test_no_transcode_when_maxbr_set_high_and_different_formats(self): self.config["convert"]["max_bitrate"] = 5000 @@ -301,21 +291,20 @@ class ConvertCliTest(_common.TestCase, TestHelper, ConvertCommand): with control_stdin("y"): self.run_convert("--playlist", "playlist.m3u8") m3u_created = os.path.join(self.convert_dest, b"playlist.m3u8") - self.assertTrue(os.path.exists(m3u_created)) + assert os.path.exists(m3u_created) def test_playlist_pretend(self): self.run_convert("--playlist", "playlist.m3u8", "--pretend") m3u_created = os.path.join(self.convert_dest, b"playlist.m3u8") - self.assertFalse(os.path.exists(m3u_created)) + assert not os.path.exists(m3u_created) @_common.slow_test() -class NeverConvertLossyFilesTest(_common.TestCase, TestHelper, ConvertCommand): +class NeverConvertLossyFilesTest(ConvertTestCase, ConvertCommand): """Test the effect of the `never_convert_lossy_files` option.""" def setUp(self): - self.setup_beets(disk=True) # Converter is threaded - self.load_plugins("convert") + super().setUp() self.convert_dest = os.path.join(self.temp_dir, b"convert_dest") self.config["convert"] = { @@ -328,10 +317,6 @@ class NeverConvertLossyFilesTest(_common.TestCase, TestHelper, ConvertCommand): }, } - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def test_transcode_from_lossless(self): [item] = self.add_item_fixtures(ext="flac") with control_stdin("y"): @@ -355,9 +340,19 @@ class NeverConvertLossyFilesTest(_common.TestCase, TestHelper, ConvertCommand): self.assertNoFileTag(converted, "mp3") -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) +class TestNoConvert: + """Test the effect of the `no_convert` option.""" - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + @pytest.mark.parametrize( + "config_value, should_skip", + [ + ("", False), + ("bitrate:320", False), + ("bitrate:320 format:ogg", False), + ("bitrate:320 , format:ogg", True), + ], + ) + def test_no_convert_skip(self, config_value, should_skip): + item = Item(format="ogg", bitrate=256) + convert.config["convert"]["no_convert"] = config_value + assert convert.in_no_convert(item) == should_skip diff --git a/test/plugins/test_discogs.py b/test/plugins/test_discogs.py index 6ee57dcd9..8a4609e25 100644 --- a/test/plugins/test_discogs.py +++ b/test/plugins/test_discogs.py @@ -12,20 +12,18 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for discogs plugin. -""" +"""Tests for discogs plugin.""" -import unittest +import pytest from beets import config -from beets.test import _common from beets.test._common import Bag -from beets.test.helper import capture_log +from beets.test.helper import BeetsTestCase, capture_log from beets.util.id_extractors import extract_discogs_id_regex from beetsplug.discogs import DiscogsPlugin -class DGAlbumInfoTest(_common.TestCase): +class DGAlbumInfoTest(BeetsTestCase): def _make_release(self, tracks=None): """Returns a Bag that mimics a discogs_client.Release. The list of elements on the returned Bag is incomplete, including just @@ -96,59 +94,59 @@ class DGAlbumInfoTest(_common.TestCase): d = DiscogsPlugin().get_album_info(release) t = d.tracks - self.assertEqual(d.media, "FORMAT") - self.assertEqual(t[0].media, d.media) - self.assertEqual(t[1].media, d.media) + assert d.media == "FORMAT" + assert t[0].media == d.media + assert t[1].media == d.media def test_parse_medium_numbers_single_medium(self): release = self._make_release_from_positions(["1", "2"]) d = DiscogsPlugin().get_album_info(release) t = d.tracks - self.assertEqual(d.mediums, 1) - self.assertEqual(t[0].medium, 1) - self.assertEqual(t[0].medium_total, 2) - self.assertEqual(t[1].medium, 1) - self.assertEqual(t[0].medium_total, 2) + assert d.mediums == 1 + assert t[0].medium == 1 + assert t[0].medium_total == 2 + assert t[1].medium == 1 + assert t[0].medium_total == 2 def test_parse_medium_numbers_two_mediums(self): release = self._make_release_from_positions(["1-1", "2-1"]) d = DiscogsPlugin().get_album_info(release) t = d.tracks - self.assertEqual(d.mediums, 2) - self.assertEqual(t[0].medium, 1) - self.assertEqual(t[0].medium_total, 1) - self.assertEqual(t[1].medium, 2) - self.assertEqual(t[1].medium_total, 1) + assert d.mediums == 2 + assert t[0].medium == 1 + assert t[0].medium_total == 1 + assert t[1].medium == 2 + assert t[1].medium_total == 1 def test_parse_medium_numbers_two_mediums_two_sided(self): release = self._make_release_from_positions(["A1", "B1", "C1"]) d = DiscogsPlugin().get_album_info(release) t = d.tracks - self.assertEqual(d.mediums, 2) - self.assertEqual(t[0].medium, 1) - self.assertEqual(t[0].medium_total, 2) - self.assertEqual(t[0].medium_index, 1) - self.assertEqual(t[1].medium, 1) - self.assertEqual(t[1].medium_total, 2) - self.assertEqual(t[1].medium_index, 2) - self.assertEqual(t[2].medium, 2) - self.assertEqual(t[2].medium_total, 1) - self.assertEqual(t[2].medium_index, 1) + assert d.mediums == 2 + assert t[0].medium == 1 + assert t[0].medium_total == 2 + assert t[0].medium_index == 1 + assert t[1].medium == 1 + assert t[1].medium_total == 2 + assert t[1].medium_index == 2 + assert t[2].medium == 2 + assert t[2].medium_total == 1 + assert t[2].medium_index == 1 def test_parse_track_indices(self): release = self._make_release_from_positions(["1", "2"]) d = DiscogsPlugin().get_album_info(release) t = d.tracks - self.assertEqual(t[0].medium_index, 1) - self.assertEqual(t[0].index, 1) - self.assertEqual(t[0].medium_total, 2) - self.assertEqual(t[1].medium_index, 2) - self.assertEqual(t[1].index, 2) - self.assertEqual(t[1].medium_total, 2) + assert t[0].medium_index == 1 + assert t[0].index == 1 + assert t[0].medium_total == 2 + assert t[1].medium_index == 2 + assert t[1].index == 2 + assert t[1].medium_total == 2 def test_parse_track_indices_several_media(self): release = self._make_release_from_positions( @@ -157,19 +155,19 @@ class DGAlbumInfoTest(_common.TestCase): d = DiscogsPlugin().get_album_info(release) t = d.tracks - self.assertEqual(d.mediums, 3) - self.assertEqual(t[0].medium_index, 1) - self.assertEqual(t[0].index, 1) - self.assertEqual(t[0].medium_total, 2) - self.assertEqual(t[1].medium_index, 2) - self.assertEqual(t[1].index, 2) - self.assertEqual(t[1].medium_total, 2) - self.assertEqual(t[2].medium_index, 1) - self.assertEqual(t[2].index, 3) - self.assertEqual(t[2].medium_total, 1) - self.assertEqual(t[3].medium_index, 1) - self.assertEqual(t[3].index, 4) - self.assertEqual(t[3].medium_total, 1) + assert d.mediums == 3 + assert t[0].medium_index == 1 + assert t[0].index == 1 + assert t[0].medium_total == 2 + assert t[1].medium_index == 2 + assert t[1].index == 2 + assert t[1].medium_total == 2 + assert t[2].medium_index == 1 + assert t[2].index == 3 + assert t[2].medium_total == 1 + assert t[3].medium_index == 1 + assert t[3].index == 4 + assert t[3].medium_total == 1 def test_parse_position(self): """Test the conversion of discogs `position` to medium, medium_index @@ -190,31 +188,31 @@ class DGAlbumInfoTest(_common.TestCase): d = DiscogsPlugin() for position, expected in positions: - self.assertEqual(d.get_track_index(position), expected) + assert d.get_track_index(position) == expected def test_parse_tracklist_without_sides(self): """Test standard Discogs position 12.2.9#1: "without sides".""" release = self._make_release_from_positions(["1", "2", "3"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 3) + assert d.mediums == 1 + assert len(d.tracks) == 3 def test_parse_tracklist_with_sides(self): """Test standard Discogs position 12.2.9#2: "with sides".""" release = self._make_release_from_positions(["A1", "A2", "B1", "B2"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) # 2 sides = 1 LP - self.assertEqual(len(d.tracks), 4) + assert d.mediums == 1 # 2 sides = 1 LP + assert len(d.tracks) == 4 def test_parse_tracklist_multiple_lp(self): """Test standard Discogs position 12.2.9#3: "multiple LP".""" release = self._make_release_from_positions(["A1", "A2", "B1", "C1"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 2) # 3 sides = 1 LP + 1 LP - self.assertEqual(len(d.tracks), 4) + assert d.mediums == 2 # 3 sides = 1 LP + 1 LP + assert len(d.tracks) == 4 def test_parse_tracklist_multiple_cd(self): """Test standard Discogs position 12.2.9#4: "multiple CDs".""" @@ -223,56 +221,56 @@ class DGAlbumInfoTest(_common.TestCase): ) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 3) - self.assertEqual(len(d.tracks), 4) + assert d.mediums == 3 + assert len(d.tracks) == 4 def test_parse_tracklist_non_standard(self): """Test non standard Discogs position.""" release = self._make_release_from_positions(["I", "II", "III", "IV"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 4) + assert d.mediums == 1 + assert len(d.tracks) == 4 def test_parse_tracklist_subtracks_dot(self): """Test standard Discogs position 12.2.9#5: "sub tracks, dots".""" release = self._make_release_from_positions(["1", "2.1", "2.2", "3"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 3) + assert d.mediums == 1 + assert len(d.tracks) == 3 release = self._make_release_from_positions( ["A1", "A2.1", "A2.2", "A3"] ) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 3) + assert d.mediums == 1 + assert len(d.tracks) == 3 def test_parse_tracklist_subtracks_letter(self): """Test standard Discogs position 12.2.9#5: "sub tracks, letter".""" release = self._make_release_from_positions(["A1", "A2a", "A2b", "A3"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 3) + assert d.mediums == 1 + assert len(d.tracks) == 3 release = self._make_release_from_positions( ["A1", "A2.a", "A2.b", "A3"] ) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 3) + assert d.mediums == 1 + assert len(d.tracks) == 3 def test_parse_tracklist_subtracks_extra_material(self): """Test standard Discogs position 12.2.9#6: "extra material".""" release = self._make_release_from_positions(["1", "2", "Video 1"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 2) - self.assertEqual(len(d.tracks), 3) + assert d.mediums == 2 + assert len(d.tracks) == 3 def test_parse_tracklist_subtracks_indices(self): """Test parsing of subtracks that include index tracks.""" @@ -283,10 +281,10 @@ class DGAlbumInfoTest(_common.TestCase): release.data["tracklist"][1]["title"] = "TRACK GROUP TITLE" d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(d.tracks[0].disctitle, "MEDIUM TITLE") - self.assertEqual(len(d.tracks), 1) - self.assertEqual(d.tracks[0].title, "TRACK GROUP TITLE") + assert d.mediums == 1 + assert d.tracks[0].disctitle == "MEDIUM TITLE" + assert len(d.tracks) == 1 + assert d.tracks[0].title == "TRACK GROUP TITLE" def test_parse_tracklist_subtracks_nested_logical(self): """Test parsing of subtracks defined inside a index track that are @@ -301,9 +299,9 @@ class DGAlbumInfoTest(_common.TestCase): ] d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 3) - self.assertEqual(d.tracks[1].title, "TRACK GROUP TITLE") + assert d.mediums == 1 + assert len(d.tracks) == 3 + assert d.tracks[1].title == "TRACK GROUP TITLE" def test_parse_tracklist_subtracks_nested_physical(self): """Test parsing of subtracks defined inside a index track that are @@ -318,10 +316,10 @@ class DGAlbumInfoTest(_common.TestCase): ] d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 1) - self.assertEqual(len(d.tracks), 4) - self.assertEqual(d.tracks[1].title, "TITLE ONE") - self.assertEqual(d.tracks[2].title, "TITLE TWO") + assert d.mediums == 1 + assert len(d.tracks) == 4 + assert d.tracks[1].title == "TITLE ONE" + assert d.tracks[2].title == "TITLE TWO" def test_parse_tracklist_disctitles(self): """Test parsing of index tracks that act as disc titles.""" @@ -334,11 +332,11 @@ class DGAlbumInfoTest(_common.TestCase): release.data["tracklist"][3]["title"] = "MEDIUM TITLE CD2" d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.mediums, 2) - self.assertEqual(d.tracks[0].disctitle, "MEDIUM TITLE CD1") - self.assertEqual(d.tracks[1].disctitle, "MEDIUM TITLE CD1") - self.assertEqual(d.tracks[2].disctitle, "MEDIUM TITLE CD2") - self.assertEqual(len(d.tracks), 3) + assert d.mediums == 2 + assert d.tracks[0].disctitle == "MEDIUM TITLE CD1" + assert d.tracks[1].disctitle == "MEDIUM TITLE CD1" + assert d.tracks[2].disctitle == "MEDIUM TITLE CD2" + assert len(d.tracks) == 3 def test_parse_minimal_release(self): """Test parsing of a release with the minimal amount of information.""" @@ -355,9 +353,9 @@ class DGAlbumInfoTest(_common.TestCase): artists=[Bag(data=d) for d in data["artists"]], ) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.artist, "ARTIST NAME") - self.assertEqual(d.album, "TITLE") - self.assertEqual(len(d.tracks), 1) + assert d.artist == "ARTIST NAME" + assert d.album == "TITLE" + assert len(d.tracks) == 1 def test_parse_release_without_required_fields(self): """Test parsing of a release that does not have the required fields.""" @@ -365,8 +363,8 @@ class DGAlbumInfoTest(_common.TestCase): with capture_log() as logs: d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d, None) - self.assertIn("Release does not contain the required fields", logs[0]) + assert d is None + assert "Release does not contain the required fields" in logs[0] def test_album_for_id(self): """Test parsing for a valid Discogs release_id""" @@ -397,15 +395,15 @@ class DGAlbumInfoTest(_common.TestCase): match = extract_discogs_id_regex(test_pattern) if not match: match = "" - self.assertEqual(match, expected) + assert match == expected def test_default_genre_style_settings(self): """Test genre default settings, genres to genre, styles to style""" release = self._make_release_from_positions(["1", "2"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.genre, "GENRE1, GENRE2") - self.assertEqual(d.style, "STYLE1, STYLE2") + assert d.genre == "GENRE1, GENRE2" + assert d.style == "STYLE1, STYLE2" def test_append_style_to_genre(self): """Test appending style to genre if config enabled""" @@ -413,8 +411,8 @@ class DGAlbumInfoTest(_common.TestCase): release = self._make_release_from_positions(["1", "2"]) d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.genre, "GENRE1, GENRE2, STYLE1, STYLE2") - self.assertEqual(d.style, "STYLE1, STYLE2") + assert d.genre == "GENRE1, GENRE2, STYLE1, STYLE2" + assert d.style == "STYLE1, STYLE2" def test_append_style_to_genre_no_style(self): """Test nothing appended to genre if style is empty""" @@ -423,13 +421,28 @@ class DGAlbumInfoTest(_common.TestCase): release.data["styles"] = [] d = DiscogsPlugin().get_album_info(release) - self.assertEqual(d.genre, "GENRE1, GENRE2") - self.assertEqual(d.style, None) + assert d.genre == "GENRE1, GENRE2" + assert d.style is None -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) +@pytest.mark.parametrize( + "formats, expected_media, expected_albumtype", + [ + (None, None, None), + ( + [ + { + "descriptions": ['7"', "Single", "45 RPM"], + "name": "Vinyl", + "qty": 1, + } + ], + "Vinyl", + '7", Single, 45 RPM', + ), + ], +) +def test_get_media_and_albumtype(formats, expected_media, expected_albumtype): + result = DiscogsPlugin.get_media_and_albumtype(formats) - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert result == (expected_media, expected_albumtype) diff --git a/test/plugins/test_edit.py b/test/plugins/test_edit.py index 7c1fcf0b3..2d557d623 100644 --- a/test/plugins/test_edit.py +++ b/test/plugins/test_edit.py @@ -13,7 +13,6 @@ # included in all copies or substantial portions of the Software. import codecs -import unittest from unittest.mock import patch from beets.dbcore.query import TrueQuery @@ -21,12 +20,12 @@ from beets.library import Item from beets.test import _common from beets.test.helper import ( AutotagStub, - ImportHelper, - TerminalImportSessionSetup, - TestHelper, + BeetsTestCase, + ImportTestCase, + PluginMixin, + TerminalImportMixin, control_stdin, ) -from beetsplug.edit import EditPlugin class ModifyFileMocker: @@ -73,10 +72,12 @@ class ModifyFileMocker: f.write(contents) -class EditMixin: +class EditMixin(PluginMixin): """Helper containing some common functionality used for the Edit tests.""" - def assertItemFieldsModified( # noqa + plugin = "edit" + + def assertItemFieldsModified( self, library_items, items, fields=[], allowed=["path"] ): """Assert that items in the library (`lib_items`) have different values @@ -93,7 +94,7 @@ class EditMixin: for field in lib_item._fields if lib_item[field] != item[field] ] - self.assertEqual(set(diff_fields).difference(allowed), set(fields)) + assert set(diff_fields).difference(allowed) == set(fields) def run_mocked_interpreter(self, modify_file_args={}, stdin=[]): """Run the edit command during an import session, with mocked stdin and @@ -115,7 +116,7 @@ class EditMixin: @_common.slow_test() @patch("beets.library.Item.write") -class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): +class EditCommandTest(EditMixin, BeetsTestCase): """Black box tests for `beetsplug.edit`. Command line interaction is simulated using `test.helper.control_stdin()`, and yaml editing via an external editor is simulated using `ModifyFileMocker`. @@ -125,8 +126,7 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): TRACK_COUNT = 10 def setUp(self): - self.setup_beets() - self.load_plugins("edit") + super().setUp() # Add an album, storing the original fields for comparison. self.album = self.add_album_fixture(track_count=self.TRACK_COUNT) self.album_orig = {f: self.album[f] for f in self.album._fields} @@ -134,12 +134,7 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): {f: item[f] for f in item._fields} for item in self.album.items() ] - def tearDown(self): - EditPlugin.listeners = None - self.teardown_beets() - self.unload_plugins() - - def assertCounts( # noqa + def assertCounts( self, mock_write, album_count=ALBUM_COUNT, @@ -148,11 +143,11 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): title_starts_with="", ): """Several common assertions on Album, Track and call counts.""" - self.assertEqual(len(self.lib.albums()), album_count) - self.assertEqual(len(self.lib.items()), track_count) - self.assertEqual(mock_write.call_count, write_call_count) - self.assertTrue( - all(i.title.startswith(title_starts_with) for i in self.lib.items()) + assert len(self.lib.albums()) == album_count + assert len(self.lib.items()) == track_count + assert mock_write.call_count == write_call_count + assert all( + i.title.startswith(title_starts_with) for i in self.lib.items() ) def test_title_edit_discard(self, mock_write): @@ -204,9 +199,7 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): self.assertItemFieldsModified( list(self.album.items())[:-1], self.items_orig[:-1], [] ) - self.assertEqual( - list(self.album.items())[-1].title, "modified t\u00eftle 9" - ) + assert list(self.album.items())[-1].title == "modified t\u00eftle 9" def test_noedit(self, mock_write): """Do not edit anything.""" @@ -239,7 +232,7 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): ) # Ensure album is *not* modified. self.album.load() - self.assertEqual(self.album.album, "\u00e4lbum") + assert self.album.album == "\u00e4lbum" def test_single_edit_add_field(self, mock_write): """Edit the yaml file appending an extra field to the first item, then @@ -252,7 +245,7 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): ["a"], ) - self.assertEqual(self.lib.items("id:2")[0].foo, "bar") + assert self.lib.items("id:2")[0].foo == "bar" # Even though a flexible attribute was written (which is not directly # written to the tags), write should still be called since templates # might use it. @@ -271,7 +264,7 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): self.album.load() self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT) - self.assertEqual(self.album.album, "modified \u00e4lbum") + assert self.album.album == "modified \u00e4lbum" self.assertItemFieldsModified( self.album.items(), self.items_orig, ["album", "mtime"] ) @@ -287,7 +280,7 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): self.album.load() self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT) - self.assertEqual(self.album.albumartist, "the modified album artist") + assert self.album.albumartist == "the modified album artist" self.assertItemFieldsModified( self.album.items(), self.items_orig, ["albumartist", "mtime"] ) @@ -322,41 +315,39 @@ class EditCommandTest(unittest.TestCase, TestHelper, EditMixin): @_common.slow_test() -class EditDuringImporterTest( - TerminalImportSessionSetup, - unittest.TestCase, - ImportHelper, - TestHelper, - EditMixin, +class EditDuringImporterTestCase( + EditMixin, TerminalImportMixin, ImportTestCase ): """TODO""" IGNORED = ["added", "album_id", "id", "mtime", "path"] def setUp(self): - self.setup_beets() - self.load_plugins("edit") + super().setUp() # Create some mediafiles, and store them for comparison. - self._create_import_dir(3) - self.items_orig = [Item.from_path(f.path) for f in self.media_files] + self.prepare_album_for_import(1) + self.items_orig = [Item.from_path(f.path) for f in self.import_media] self.matcher = AutotagStub().install() self.matcher.matching = AutotagStub.GOOD - self.config["import"]["timid"] = True def tearDown(self): - EditPlugin.listeners = None - self.unload_plugins() - self.teardown_beets() + super().tearDown() self.matcher.restore() + +@_common.slow_test() +class EditDuringImporterNonSingletonTest(EditDuringImporterTestCase): + def setUp(self): + super().setUp() + self.importer = self.setup_importer() + def test_edit_apply_asis(self): """Edit the album field for all items in the library, apply changes, using the original item tags. """ - self._setup_import_session() # Edit track titles. self.run_mocked_interpreter( - {"replacements": {"Tag Title": "Edited Title"}}, + {"replacements": {"Tag Track": "Edited Track"}}, # eDit, Apply changes. ["d", "a"], ) @@ -373,21 +364,18 @@ class EditDuringImporterTest( "mb_albumartistids", ], ) - self.assertTrue( - all("Edited Title" in i.title for i in self.lib.items()) - ) + assert all("Edited Track" in i.title for i in self.lib.items()) # Ensure album is *not* fetched from a candidate. - self.assertEqual(self.lib.albums()[0].mb_albumid, "") + assert self.lib.albums()[0].mb_albumid == "" def test_edit_discard_asis(self): """Edit the album field for all items in the library, discard changes, using the original item tags. """ - self._setup_import_session() # Edit track titles. self.run_mocked_interpreter( - {"replacements": {"Tag Title": "Edited Title"}}, + {"replacements": {"Tag Track": "Edited Track"}}, # eDit, Cancel, Use as-is. ["d", "c", "u"], ) @@ -399,38 +387,34 @@ class EditDuringImporterTest( [], self.IGNORED + ["albumartist", "mb_albumartistid"], ) - self.assertTrue(all("Tag Title" in i.title for i in self.lib.items())) + assert all("Tag Track" in i.title for i in self.lib.items()) # Ensure album is *not* fetched from a candidate. - self.assertEqual(self.lib.albums()[0].mb_albumid, "") + assert self.lib.albums()[0].mb_albumid == "" def test_edit_apply_candidate(self): """Edit the album field for all items in the library, apply changes, using a candidate. """ - self._setup_import_session() # Edit track titles. self.run_mocked_interpreter( - {"replacements": {"Applied Title": "Edited Title"}}, + {"replacements": {"Applied Track": "Edited Track"}}, # edit Candidates, 1, Apply changes. ["c", "1", "a"], ) # Check that 'title' field is modified, and other fields come from # the candidate. - self.assertTrue( - all("Edited Title " in i.title for i in self.lib.items()) - ) - self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items())) + assert all("Edited Track " in i.title for i in self.lib.items()) + assert all("match " in i.mb_trackid for i in self.lib.items()) # Ensure album is fetched from a candidate. - self.assertIn("albumid", self.lib.albums()[0].mb_albumid) + assert "albumid" in self.lib.albums()[0].mb_albumid def test_edit_retag_apply(self): """Import the album using a candidate, then retag and edit and apply changes. """ - self._setup_import_session() self.run_mocked_interpreter( {}, # 1, Apply changes. @@ -442,51 +426,68 @@ class EditDuringImporterTest( self.importer.paths = [] self.importer.query = TrueQuery() self.run_mocked_interpreter( - {"replacements": {"Applied Title": "Edited Title"}}, + {"replacements": {"Applied Track": "Edited Track"}}, # eDit, Apply changes. ["d", "a"], ) # Check that 'title' field is modified, and other fields come from # the candidate. - self.assertTrue( - all("Edited Title " in i.title for i in self.lib.items()) - ) - self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items())) + assert all("Edited Track " in i.title for i in self.lib.items()) + assert all("match " in i.mb_trackid for i in self.lib.items()) # Ensure album is fetched from a candidate. - self.assertIn("albumid", self.lib.albums()[0].mb_albumid) + assert "albumid" in self.lib.albums()[0].mb_albumid def test_edit_discard_candidate(self): """Edit the album field for all items in the library, discard changes, using a candidate. """ - self._setup_import_session() # Edit track titles. self.run_mocked_interpreter( - {"replacements": {"Applied Title": "Edited Title"}}, + {"replacements": {"Applied Track": "Edited Track"}}, # edit Candidates, 1, Apply changes. ["c", "1", "a"], ) # Check that 'title' field is modified, and other fields come from # the candidate. - self.assertTrue( - all("Edited Title " in i.title for i in self.lib.items()) - ) - self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items())) + assert all("Edited Track " in i.title for i in self.lib.items()) + assert all("match " in i.mb_trackid for i in self.lib.items()) # Ensure album is fetched from a candidate. - self.assertIn("albumid", self.lib.albums()[0].mb_albumid) + assert "albumid" in self.lib.albums()[0].mb_albumid + + def test_edit_apply_candidate_singleton(self): + """Edit the album field for all items in the library, apply changes, + using a candidate and singleton mode. + """ + # Edit track titles. + self.run_mocked_interpreter( + {"replacements": {"Applied Track": "Edited Track"}}, + # edit Candidates, 1, Apply changes, aBort. + ["c", "1", "a", "b"], + ) + + # Check that 'title' field is modified, and other fields come from + # the candidate. + assert all("Edited Track " in i.title for i in self.lib.items()) + assert all("match " in i.mb_trackid for i in self.lib.items()) + + +@_common.slow_test() +class EditDuringImporterSingletonTest(EditDuringImporterTestCase): + def setUp(self): + super().setUp() + self.importer = self.setup_singleton_importer() def test_edit_apply_asis_singleton(self): """Edit the album field for all items in the library, apply changes, using the original item tags and singleton mode. """ - self._setup_import_session(singletons=True) # Edit track titles. self.run_mocked_interpreter( - {"replacements": {"Tag Title": "Edited Title"}}, + {"replacements": {"Tag Track": "Edited Track"}}, # eDit, Apply changes, aBort. ["d", "a", "b"], ) @@ -498,33 +499,4 @@ class EditDuringImporterTest( ["title"], self.IGNORED + ["albumartist", "mb_albumartistid"], ) - self.assertTrue( - all("Edited Title" in i.title for i in self.lib.items()) - ) - - def test_edit_apply_candidate_singleton(self): - """Edit the album field for all items in the library, apply changes, - using a candidate and singleton mode. - """ - self._setup_import_session() - # Edit track titles. - self.run_mocked_interpreter( - {"replacements": {"Applied Title": "Edited Title"}}, - # edit Candidates, 1, Apply changes, aBort. - ["c", "1", "a", "b"], - ) - - # Check that 'title' field is modified, and other fields come from - # the candidate. - self.assertTrue( - all("Edited Title " in i.title for i in self.lib.items()) - ) - self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items())) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert all("Edited Track" in i.title for i in self.lib.items()) diff --git a/test/plugins/test_embedart.py b/test/plugins/test_embedart.py index 48a110295..14bfdf522 100644 --- a/test/plugins/test_embedart.py +++ b/test/plugins/test_embedart.py @@ -17,16 +17,17 @@ import os.path import shutil import tempfile import unittest -from test.test_art_resize import DummyIMBackend from unittest.mock import MagicMock, patch +import pytest from mediafile import MediaFile from beets import art, config, logging, ui from beets.test import _common -from beets.test.helper import FetchImageHelper, TestHelper +from beets.test.helper import BeetsTestCase, FetchImageHelper, PluginMixin from beets.util import bytestring_path, displayable_path, syspath from beets.util.artresizer import ArtResizer +from test.test_art_resize import DummyIMBackend def require_artresizer_compare(test): @@ -40,17 +41,16 @@ def require_artresizer_compare(test): return wrapper -class EmbedartCliTest(TestHelper, FetchImageHelper): +class EmbedartCliTest(PluginMixin, FetchImageHelper, BeetsTestCase): + plugin = "embedart" small_artpath = os.path.join(_common.RSRC, b"image-2x3.jpg") abbey_artpath = os.path.join(_common.RSRC, b"abbey.jpg") abbey_similarpath = os.path.join(_common.RSRC, b"abbey-similar.jpg") abbey_differentpath = os.path.join(_common.RSRC, b"abbey-different.jpg") def setUp(self): - self.io = _common.DummyIO() + super().setUp() # Converter is threaded self.io.install() - self.setup_beets() # Converter is threaded - self.load_plugins("embedart") def _setup_data(self, artpath=None): if not artpath: @@ -58,10 +58,6 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): with open(syspath(artpath), "rb") as f: self.image_data = f.read() - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def test_embed_art_from_file_with_yes_input(self): self._setup_data() album = self.add_album_fixture() @@ -69,7 +65,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.io.addinput("y") self.run_command("embedart", "-f", self.small_artpath) mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.images[0].data, self.image_data) + assert mediafile.images[0].data == self.image_data def test_embed_art_from_file_with_no_input(self): self._setup_data() @@ -79,7 +75,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.run_command("embedart", "-f", self.small_artpath) mediafile = MediaFile(syspath(item.path)) # make sure that images array is empty (nothing embedded) - self.assertFalse(mediafile.images) + assert not mediafile.images def test_embed_art_from_file(self): self._setup_data() @@ -87,7 +83,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): item = album.items()[0] self.run_command("embedart", "-y", "-f", self.small_artpath) mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.images[0].data, self.image_data) + assert mediafile.images[0].data == self.image_data def test_embed_art_from_album(self): self._setup_data() @@ -97,7 +93,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): album.store() self.run_command("embedart", "-y") mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.images[0].data, self.image_data) + assert mediafile.images[0].data == self.image_data def test_embed_art_remove_art_file(self): self._setup_data() @@ -127,7 +123,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): def test_art_file_missing(self): self.add_album_fixture() logging.getLogger("beets.embedart").setLevel(logging.DEBUG) - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): self.run_command("embedart", "-y", "-f", "/doesnotexist") def test_embed_non_image_file(self): @@ -145,7 +141,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): os.remove(syspath(tmp_path)) mediafile = MediaFile(syspath(album.items()[0].path)) - self.assertFalse(mediafile.images) # No image added. + assert not mediafile.images # No image added. @require_artresizer_compare def test_reject_different_art(self): @@ -157,13 +153,9 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.run_command("embedart", "-y", "-f", self.abbey_differentpath) mediafile = MediaFile(syspath(item.path)) - self.assertEqual( - mediafile.images[0].data, - self.image_data, - "Image written is not {}".format( - displayable_path(self.abbey_artpath) - ), - ) + assert ( + mediafile.images[0].data == self.image_data + ), f"Image written is not {displayable_path(self.abbey_artpath)}" @require_artresizer_compare def test_accept_similar_art(self): @@ -175,13 +167,9 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.run_command("embedart", "-y", "-f", self.abbey_similarpath) mediafile = MediaFile(syspath(item.path)) - self.assertEqual( - mediafile.images[0].data, - self.image_data, - "Image written is not {}".format( - displayable_path(self.abbey_similarpath) - ), - ) + assert ( + mediafile.images[0].data == self.image_data + ), f"Image written is not {displayable_path(self.abbey_similarpath)}" def test_non_ascii_album_path(self): resource_path = os.path.join(_common.RSRC, b"image.mp3") @@ -214,7 +202,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.io.addinput("y") self.run_command("clearart") mediafile = MediaFile(syspath(item.path)) - self.assertFalse(mediafile.images) + assert not mediafile.images def test_clear_art_with_no_input(self): self._setup_data() @@ -225,7 +213,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.io.addinput("n") self.run_command("clearart") mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.images[0].data, self.image_data) + assert mediafile.images[0].data == self.image_data def test_embed_art_from_url_with_yes_input(self): self._setup_data() @@ -235,10 +223,9 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.io.addinput("y") self.run_command("embedart", "-u", "http://example.com/test.jpg") mediafile = MediaFile(syspath(item.path)) - self.assertEqual( - mediafile.images[0].data, - self.IMAGEHEADER.get("image/jpeg").ljust(32, b"\x00"), - ) + assert mediafile.images[0].data == self.IMAGEHEADER.get( + "image/jpeg" + ).ljust(32, b"\x00") def test_embed_art_from_url_png(self): self._setup_data() @@ -247,10 +234,9 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.mock_response("http://example.com/test.png", "image/png") self.run_command("embedart", "-y", "-u", "http://example.com/test.png") mediafile = MediaFile(syspath(item.path)) - self.assertEqual( - mediafile.images[0].data, - self.IMAGEHEADER.get("image/png").ljust(32, b"\x00"), - ) + assert mediafile.images[0].data == self.IMAGEHEADER.get( + "image/png" + ).ljust(32, b"\x00") def test_embed_art_from_url_not_image(self): self._setup_data() @@ -259,7 +245,7 @@ class EmbedartCliTest(TestHelper, FetchImageHelper): self.mock_response("http://example.com/test.html", "text/html") self.run_command("embedart", "-y", "-u", "http://example.com/test.html") mediafile = MediaFile(syspath(item.path)) - self.assertFalse(mediafile.images) + assert not mediafile.images class DummyArtResizer(ArtResizer): @@ -313,42 +299,34 @@ class ArtSimilarityTest(unittest.TestCase): def test_compare_success_similar(self, mock_extract, mock_subprocess): self._mock_popens(mock_extract, mock_subprocess, 0, "10", "err") - self.assertTrue(self._similarity(20)) + assert self._similarity(20) def test_compare_success_different(self, mock_extract, mock_subprocess): self._mock_popens(mock_extract, mock_subprocess, 0, "10", "err") - self.assertFalse(self._similarity(5)) + assert not self._similarity(5) def test_compare_status1_similar(self, mock_extract, mock_subprocess): self._mock_popens(mock_extract, mock_subprocess, 1, "out", "10") - self.assertTrue(self._similarity(20)) + assert self._similarity(20) def test_compare_status1_different(self, mock_extract, mock_subprocess): self._mock_popens(mock_extract, mock_subprocess, 1, "out", "10") - self.assertFalse(self._similarity(5)) + assert not self._similarity(5) def test_compare_failed(self, mock_extract, mock_subprocess): self._mock_popens(mock_extract, mock_subprocess, 2, "out", "10") - self.assertIsNone(self._similarity(20)) + assert self._similarity(20) is None def test_compare_parsing_error(self, mock_extract, mock_subprocess): self._mock_popens(mock_extract, mock_subprocess, 0, "foo", "bar") - self.assertIsNone(self._similarity(20)) + assert self._similarity(20) is None def test_compare_parsing_error_and_failure( self, mock_extract, mock_subprocess ): self._mock_popens(mock_extract, mock_subprocess, 1, "foo", "bar") - self.assertIsNone(self._similarity(20)) + assert self._similarity(20) is None def test_convert_failure(self, mock_extract, mock_subprocess): self._mock_popens(mock_extract, mock_subprocess, convert_status=1) - self.assertIsNone(self._similarity(20)) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert self._similarity(20) is None diff --git a/test/plugins/test_embyupdate.py b/test/plugins/test_embyupdate.py index 57859f5ac..8def5dca5 100644 --- a/test/plugins/test_embyupdate.py +++ b/test/plugins/test_embyupdate.py @@ -1,15 +1,14 @@ -import unittest - import responses -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beetsplug import embyupdate -class EmbyUpdateTest(unittest.TestCase, TestHelper): +class EmbyUpdateTest(PluginTestCase): + plugin = "embyupdate" + def setUp(self): - self.setup_beets() - self.load_plugins("embyupdate") + super().setUp() self.config["emby"] = { "host": "localhost", @@ -18,85 +17,74 @@ class EmbyUpdateTest(unittest.TestCase, TestHelper): "password": "password", } - def tearDown(self): - self.teardown_beets() - self.unload_plugins() - def test_api_url_only_name(self): - self.assertEqual( + assert ( embyupdate.api_url( self.config["emby"]["host"].get(), self.config["emby"]["port"].get(), "/Library/Refresh", - ), - "http://localhost:8096/Library/Refresh?format=json", + ) + == "http://localhost:8096/Library/Refresh?format=json" ) def test_api_url_http(self): - self.assertEqual( + assert ( embyupdate.api_url( "http://localhost", self.config["emby"]["port"].get(), "/Library/Refresh", - ), - "http://localhost:8096/Library/Refresh?format=json", + ) + == "http://localhost:8096/Library/Refresh?format=json" ) def test_api_url_https(self): - self.assertEqual( + assert ( embyupdate.api_url( "https://localhost", self.config["emby"]["port"].get(), "/Library/Refresh", - ), - "https://localhost:8096/Library/Refresh?format=json", + ) + == "https://localhost:8096/Library/Refresh?format=json" ) def test_password_data(self): - self.assertEqual( - embyupdate.password_data( - self.config["emby"]["username"].get(), - self.config["emby"]["password"].get(), - ), - { - "username": "username", - "password": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", - "passwordMd5": "5f4dcc3b5aa765d61d8327deb882cf99", - }, - ) + assert embyupdate.password_data( + self.config["emby"]["username"].get(), + self.config["emby"]["password"].get(), + ) == { + "username": "username", + "password": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "passwordMd5": "5f4dcc3b5aa765d61d8327deb882cf99", + } def test_create_header_no_token(self): - self.assertEqual( - embyupdate.create_headers("e8837bc1-ad67-520e-8cd2-f629e3155721"), - { - "x-emby-authorization": ( - "MediaBrowser " - 'UserId="e8837bc1-ad67-520e-8cd2-f629e3155721", ' - 'Client="other", ' - 'Device="beets", ' - 'DeviceId="beets", ' - 'Version="0.0.0"' - ) - }, - ) + assert embyupdate.create_headers( + "e8837bc1-ad67-520e-8cd2-f629e3155721" + ) == { + "x-emby-authorization": ( + "MediaBrowser " + 'UserId="e8837bc1-ad67-520e-8cd2-f629e3155721", ' + 'Client="other", ' + 'Device="beets", ' + 'DeviceId="beets", ' + 'Version="0.0.0"' + ) + } def test_create_header_with_token(self): - self.assertEqual( - embyupdate.create_headers( - "e8837bc1-ad67-520e-8cd2-f629e3155721", token="abc123" + assert embyupdate.create_headers( + "e8837bc1-ad67-520e-8cd2-f629e3155721", token="abc123" + ) == { + "x-emby-authorization": ( + "MediaBrowser " + 'UserId="e8837bc1-ad67-520e-8cd2-f629e3155721", ' + 'Client="other", ' + 'Device="beets", ' + 'DeviceId="beets", ' + 'Version="0.0.0"' ), - { - "x-emby-authorization": ( - "MediaBrowser " - 'UserId="e8837bc1-ad67-520e-8cd2-f629e3155721", ' - 'Client="other", ' - 'Device="beets", ' - 'DeviceId="beets", ' - 'Version="0.0.0"' - ), - "x-mediabrowser-token": "abc123", - }, - ) + "x-mediabrowser-token": "abc123", + } @responses.activate def test_get_token(self): @@ -178,9 +166,9 @@ class EmbyUpdateTest(unittest.TestCase, TestHelper): "passwordMd5": "5f4dcc3b5aa765d61d8327deb882cf99", } - self.assertEqual( - embyupdate.get_token("http://localhost", 8096, headers, auth_data), - "4b19180cf02748f7b95c7e8e76562fc8", + assert ( + embyupdate.get_token("http://localhost", 8096, headers, auth_data) + == "4b19180cf02748f7b95c7e8e76562fc8" ) @responses.activate @@ -235,14 +223,6 @@ class EmbyUpdateTest(unittest.TestCase, TestHelper): response = embyupdate.get_user("http://localhost", 8096, "username") - self.assertEqual(response[0]["Id"], "2ec276a2642e54a19b612b9418a8bd3b") + assert response[0]["Id"] == "2ec276a2642e54a19b612b9418a8bd3b" - self.assertEqual(response[0]["Name"], "username") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert response[0]["Name"] == "username" diff --git a/test/plugins/test_export.py b/test/plugins/test_export.py index b949fe4f8..f37a0d2a7 100644 --- a/test/plugins/test_export.py +++ b/test/plugins/test_export.py @@ -12,29 +12,23 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test the beets.export utilities associated with the export plugin. -""" - +"""Test the beets.export utilities associated with the export plugin.""" import json import re # used to test csv format -import unittest from xml.etree import ElementTree from xml.etree.ElementTree import Element -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase -class ExportPluginTest(unittest.TestCase, TestHelper): +class ExportPluginTest(PluginTestCase): + plugin = "export" + def setUp(self): - self.setup_beets() - self.load_plugins("export") + super().setUp() self.test_values = {"title": "xtitle", "album": "xalbum"} - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def execute_command(self, format_type, artist): query = ",".join(self.test_values.keys()) out = self.run_with_output( @@ -56,16 +50,16 @@ class ExportPluginTest(unittest.TestCase, TestHelper): out = self.execute_command(format_type="json", artist=item1.artist) json_data = json.loads(out)[0] for key, val in self.test_values.items(): - self.assertIn(key, json_data) - self.assertEqual(val, json_data[key]) + assert key in json_data + assert val == json_data[key] def test_jsonlines_output(self): item1 = self.create_item() out = self.execute_command(format_type="jsonlines", artist=item1.artist) json_data = json.loads(out) for key, val in self.test_values.items(): - self.assertIn(key, json_data) - self.assertEqual(val, json_data[key]) + assert key in json_data + assert val == json_data[key] def test_csv_output(self): item1 = self.create_item() @@ -74,25 +68,17 @@ class ExportPluginTest(unittest.TestCase, TestHelper): head = re.split(",", csv_list[0]) vals = re.split(",|\r", csv_list[1]) for index, column in enumerate(head): - self.assertIsNotNone(self.test_values.get(column, None)) - self.assertEqual(vals[index], self.test_values[column]) + assert self.test_values.get(column, None) is not None + assert vals[index] == self.test_values[column] def test_xml_output(self): item1 = self.create_item() out = self.execute_command(format_type="xml", artist=item1.artist) library = ElementTree.fromstring(out) - self.assertIsInstance(library, Element) + assert isinstance(library, Element) for track in library[0]: for details in track: tag = details.tag txt = details.text - self.assertIn(tag, self.test_values, msg=tag) - self.assertEqual(self.test_values[tag], txt, msg=txt) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert tag in self.test_values, tag + assert self.test_values[tag] == txt, txt diff --git a/test/plugins/test_fetchart.py b/test/plugins/test_fetchart.py index b3307472a..853820d92 100644 --- a/test/plugins/test_fetchart.py +++ b/test/plugins/test_fetchart.py @@ -16,29 +16,25 @@ import ctypes import os import sys -import unittest from beets import util -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase -class FetchartCliTest(unittest.TestCase, TestHelper): +class FetchartCliTest(PluginTestCase): + plugin = "fetchart" + def setUp(self): - self.setup_beets() - self.load_plugins("fetchart") + super().setUp() self.config["fetchart"]["cover_names"] = "c\xc3\xb6ver.jpg" self.config["art_filename"] = "mycover" self.album = self.add_album() self.cover_path = os.path.join(self.album.path, b"mycover.jpg") - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def check_cover_is_stored(self): - self.assertEqual(self.album["artpath"], self.cover_path) + assert self.album["artpath"] == self.cover_path with open(util.syspath(self.cover_path)) as f: - self.assertEqual(f.read(), "IMAGE") + assert f.read() == "IMAGE" def hide_file_windows(self): hidden_mask = 2 @@ -60,14 +56,14 @@ class FetchartCliTest(unittest.TestCase, TestHelper): os.makedirs(os.path.join(self.album.path, b"mycover.jpg")) self.run_command("fetchart") self.album.load() - self.assertIsNone(self.album["artpath"]) + assert self.album["artpath"] is None def test_filesystem_does_not_pick_up_ignored_file(self): self.touch(b"co_ver.jpg", dir=self.album.path, content="IMAGE") self.config["ignore"] = ["*_*"] self.run_command("fetchart") self.album.load() - self.assertIsNone(self.album["artpath"]) + assert self.album["artpath"] is None def test_filesystem_picks_up_non_ignored_file(self): self.touch(b"cover.jpg", dir=self.album.path, content="IMAGE") @@ -84,7 +80,7 @@ class FetchartCliTest(unittest.TestCase, TestHelper): self.config["ignore_hidden"] = True self.run_command("fetchart") self.album.load() - self.assertIsNone(self.album["artpath"]) + assert self.album["artpath"] is None def test_filesystem_picks_up_non_hidden_file(self): self.touch(b"cover.jpg", dir=self.album.path, content="IMAGE") @@ -102,11 +98,3 @@ class FetchartCliTest(unittest.TestCase, TestHelper): self.run_command("fetchart") self.album.load() self.check_cover_is_stored() - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_filefilter.py b/test/plugins/test_filefilter.py index 10eed77c4..7f9fa2d18 100644 --- a/test/plugins/test_filefilter.py +++ b/test/plugins/test_filefilter.py @@ -12,219 +12,89 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for the `filefilter` plugin. -""" +"""Tests for the `filefilter` plugin.""" + +from beets.test.helper import ImportTestCase, PluginMixin +from beets.util import bytestring_path -import os -import shutil -import unittest +class FileFilterPluginMixin(PluginMixin, ImportTestCase): + plugin = "filefilter" + preload_plugin = False -from mediafile import MediaFile - -from beets import config -from beets.test import _common -from beets.test.helper import ImportHelper, capture_log -from beets.util import bytestring_path, displayable_path, syspath -from beetsplug.filefilter import FileFilterPlugin - - -class FileFilterPluginTest(unittest.TestCase, ImportHelper): def setUp(self): - self.setup_beets() - self.__create_import_dir(2) - self._setup_import_session() - config["import"]["pretend"] = True + super().setUp() + self.prepare_tracks_for_import() - def tearDown(self): - self.teardown_beets() - - def __copy_file(self, dest_path, metadata): - # Copy files - resource_path = os.path.join(_common.RSRC, b"full.mp3") - shutil.copy(syspath(resource_path), syspath(dest_path)) - medium = MediaFile(dest_path) - # Set metadata - for attr in metadata: - setattr(medium, attr, metadata[attr]) - medium.save() - - def __create_import_dir(self, count): - self.import_dir = os.path.join(self.temp_dir, b"testsrcdir") - if os.path.isdir(syspath(self.import_dir)): - shutil.rmtree(syspath(self.import_dir)) - - self.artist_path = os.path.join(self.import_dir, b"artist") - self.album_path = os.path.join(self.artist_path, b"album") - self.misc_path = os.path.join(self.import_dir, b"misc") - os.makedirs(syspath(self.album_path)) - os.makedirs(syspath(self.misc_path)) - - metadata = { - "artist": "Tag Artist", - "album": "Tag Album", - "albumartist": None, - "mb_trackid": None, - "mb_albumid": None, - "comp": None, + def prepare_tracks_for_import(self): + self.album_track, self.other_album_track, self.single_track = ( + bytestring_path(self.prepare_album_for_import(1, album_path=p)[0]) + for p in [ + self.import_path / "album", + self.import_path / "other_album", + self.import_path, + ] + ) + self.all_tracks = { + self.album_track, + self.other_album_track, + self.single_track, } - self.album_paths = [] - for i in range(count): - metadata["track"] = i + 1 - metadata["title"] = "Tag Title Album %d" % (i + 1) - track_file = bytestring_path("%02d - track.mp3" % (i + 1)) - dest_path = os.path.join(self.album_path, track_file) - self.__copy_file(dest_path, metadata) - self.album_paths.append(dest_path) - self.artist_paths = [] - metadata["album"] = None - for i in range(count): - metadata["track"] = i + 10 - metadata["title"] = "Tag Title Artist %d" % (i + 1) - track_file = bytestring_path("track_%d.mp3" % (i + 1)) - dest_path = os.path.join(self.artist_path, track_file) - self.__copy_file(dest_path, metadata) - self.artist_paths.append(dest_path) - - self.misc_paths = [] - for i in range(count): - metadata["artist"] = "Artist %d" % (i + 42) - metadata["track"] = i + 5 - metadata["title"] = "Tag Title Misc %d" % (i + 1) - track_file = bytestring_path("track_%d.mp3" % (i + 1)) - dest_path = os.path.join(self.misc_path, track_file) - self.__copy_file(dest_path, metadata) - self.misc_paths.append(dest_path) - - def __run(self, expected_lines, singletons=False): - self.load_plugins("filefilter") - - import_files = [self.import_dir] - self._setup_import_session(singletons=singletons) - self.importer.paths = import_files - - with capture_log() as logs: + def _run(self, config, expected_album_count, expected_paths): + with self.configure_plugin(config): self.importer.run() - self.unload_plugins() - FileFilterPlugin.listeners = None - logs = [line for line in logs if not line.startswith("Sending event:")] + assert len(self.lib.albums()) == expected_album_count + assert {i.path for i in self.lib.items()} == expected_paths - self.assertEqual(logs, expected_lines) + +class FileFilterPluginNonSingletonTest(FileFilterPluginMixin): + def setUp(self): + super().setUp() + self.importer = self.setup_importer(autotag=False, copy=False) def test_import_default(self): """The default configuration should import everything.""" - self.__run( - [ - "Album: %s" % displayable_path(self.artist_path), - " %s" % displayable_path(self.artist_paths[0]), - " %s" % displayable_path(self.artist_paths[1]), - "Album: %s" % displayable_path(self.album_path), - " %s" % displayable_path(self.album_paths[0]), - " %s" % displayable_path(self.album_paths[1]), - "Album: %s" % displayable_path(self.misc_path), - " %s" % displayable_path(self.misc_paths[0]), - " %s" % displayable_path(self.misc_paths[1]), - ] - ) + self._run({}, 3, self.all_tracks) def test_import_nothing(self): - config["filefilter"]["path"] = "not_there" - self.__run( - ["No files imported from %s" % displayable_path(self.import_dir)] + self._run({"path": "not_there"}, 0, set()) + + def test_global_config(self): + self._run( + {"path": ".*album.*"}, + 2, + {self.album_track, self.other_album_track}, ) - # Global options - def test_import_global(self): - config["filefilter"]["path"] = ".*track_1.*\\.mp3" - self.__run( - [ - "Album: %s" % displayable_path(self.artist_path), - " %s" % displayable_path(self.artist_paths[0]), - "Album: %s" % displayable_path(self.misc_path), - " %s" % displayable_path(self.misc_paths[0]), - ] - ) - self.__run( - [ - "Singleton: %s" % displayable_path(self.artist_paths[0]), - "Singleton: %s" % displayable_path(self.misc_paths[0]), - ], - singletons=True, + def test_album_config(self): + self._run( + {"album_path": ".*other_album.*"}, + 1, + {self.other_album_track}, ) - # Album options - def test_import_album(self): - config["filefilter"]["album_path"] = ".*track_1.*\\.mp3" - self.__run( - [ - "Album: %s" % displayable_path(self.artist_path), - " %s" % displayable_path(self.artist_paths[0]), - "Album: %s" % displayable_path(self.misc_path), - " %s" % displayable_path(self.misc_paths[0]), - ] - ) - self.__run( - [ - "Singleton: %s" % displayable_path(self.artist_paths[0]), - "Singleton: %s" % displayable_path(self.artist_paths[1]), - "Singleton: %s" % displayable_path(self.album_paths[0]), - "Singleton: %s" % displayable_path(self.album_paths[1]), - "Singleton: %s" % displayable_path(self.misc_paths[0]), - "Singleton: %s" % displayable_path(self.misc_paths[1]), - ], - singletons=True, - ) - - # Singleton options - def test_import_singleton(self): - config["filefilter"]["singleton_path"] = ".*track_1.*\\.mp3" - self.__run( - [ - "Singleton: %s" % displayable_path(self.artist_paths[0]), - "Singleton: %s" % displayable_path(self.misc_paths[0]), - ], - singletons=True, - ) - self.__run( - [ - "Album: %s" % displayable_path(self.artist_path), - " %s" % displayable_path(self.artist_paths[0]), - " %s" % displayable_path(self.artist_paths[1]), - "Album: %s" % displayable_path(self.album_path), - " %s" % displayable_path(self.album_paths[0]), - " %s" % displayable_path(self.album_paths[1]), - "Album: %s" % displayable_path(self.misc_path), - " %s" % displayable_path(self.misc_paths[0]), - " %s" % displayable_path(self.misc_paths[1]), - ] - ) - - # Album and singleton options - def test_import_both(self): - config["filefilter"]["album_path"] = ".*track_1.*\\.mp3" - config["filefilter"]["singleton_path"] = ".*track_2.*\\.mp3" - self.__run( - [ - "Album: %s" % displayable_path(self.artist_path), - " %s" % displayable_path(self.artist_paths[0]), - "Album: %s" % displayable_path(self.misc_path), - " %s" % displayable_path(self.misc_paths[0]), - ] - ) - self.__run( - [ - "Singleton: %s" % displayable_path(self.artist_paths[1]), - "Singleton: %s" % displayable_path(self.misc_paths[1]), - ], - singletons=True, - ) + def test_singleton_config(self): + """Check that singleton configuration is ignored for album import.""" + self._run({"singleton_path": ".*other_album.*"}, 3, self.all_tracks) -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) +class FileFilterPluginSingletonTest(FileFilterPluginMixin): + def setUp(self): + super().setUp() + self.importer = self.setup_singleton_importer(autotag=False, copy=False) + def test_global_config(self): + self._run( + {"path": ".*album.*"}, 0, {self.album_track, self.other_album_track} + ) -if __name__ == "__main__": - unittest.main(defaultTest="suite") + def test_album_config(self): + """Check that album configuration is ignored for singleton import.""" + self._run({"album_path": ".*other_album.*"}, 0, self.all_tracks) + + def test_singleton_config(self): + self._run( + {"singleton_path": ".*other_album.*"}, 0, {self.other_album_track} + ) diff --git a/test/plugins/test_ftintitle.py b/test/plugins/test_ftintitle.py index 60dd3668e..1dbe4a727 100644 --- a/test/plugins/test_ftintitle.py +++ b/test/plugins/test_ftintitle.py @@ -14,22 +14,14 @@ """Tests for the 'ftintitle' plugin.""" - import unittest -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beetsplug import ftintitle -class FtInTitlePluginFunctional(unittest.TestCase, TestHelper): - def setUp(self): - """Set up configuration""" - self.setup_beets() - self.load_plugins("ftintitle") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +class FtInTitlePluginFunctional(PluginTestCase): + plugin = "ftintitle" def _ft_add_item(self, path, artist, title, aartist): return self.add_item( @@ -40,47 +32,64 @@ class FtInTitlePluginFunctional(unittest.TestCase, TestHelper): albumartist=aartist, ) - def _ft_set_config(self, ftformat, drop=False, auto=True): + def _ft_set_config( + self, ftformat, drop=False, auto=True, keep_in_artist=False + ): self.config["ftintitle"]["format"] = ftformat self.config["ftintitle"]["drop"] = drop self.config["ftintitle"]["auto"] = auto + self.config["ftintitle"]["keep_in_artist"] = keep_in_artist def test_functional_drop(self): item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") self.run_command("ftintitle", "-d") item.load() - self.assertEqual(item["artist"], "Alice") - self.assertEqual(item["title"], "Song 1") + assert item["artist"] == "Alice" + assert item["title"] == "Song 1" def test_functional_not_found(self): item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "George") self.run_command("ftintitle", "-d") item.load() # item should be unchanged - self.assertEqual(item["artist"], "Alice ft Bob") - self.assertEqual(item["title"], "Song 1") + assert item["artist"] == "Alice ft Bob" + assert item["title"] == "Song 1" def test_functional_custom_format(self): self._ft_set_config("feat. {0}") item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") self.run_command("ftintitle") item.load() - self.assertEqual(item["artist"], "Alice") - self.assertEqual(item["title"], "Song 1 feat. Bob") + assert item["artist"] == "Alice" + assert item["title"] == "Song 1 feat. Bob" self._ft_set_config("featuring {0}") item = self._ft_add_item("/", "Alice feat. Bob", "Song 1", "Alice") self.run_command("ftintitle") item.load() - self.assertEqual(item["artist"], "Alice") - self.assertEqual(item["title"], "Song 1 featuring Bob") + assert item["artist"] == "Alice" + assert item["title"] == "Song 1 featuring Bob" self._ft_set_config("with {0}") item = self._ft_add_item("/", "Alice feat Bob", "Song 1", "Alice") self.run_command("ftintitle") item.load() - self.assertEqual(item["artist"], "Alice") - self.assertEqual(item["title"], "Song 1 with Bob") + assert item["artist"] == "Alice" + assert item["title"] == "Song 1 with Bob" + + def test_functional_keep_in_artist(self): + self._ft_set_config("feat. {0}", keep_in_artist=True) + item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") + self.run_command("ftintitle") + item.load() + assert item["artist"] == "Alice ft Bob" + assert item["title"] == "Song 1 feat. Bob" + + item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice") + self.run_command("ftintitle", "-d") + item.load() + assert item["artist"] == "Alice ft Bob" + assert item["title"] == "Song 1" class FtInTitlePluginTest(unittest.TestCase): @@ -146,41 +155,37 @@ class FtInTitlePluginTest(unittest.TestCase): feat_part = ftintitle.find_feat_part( test_case["artist"], test_case["album_artist"] ) - self.assertEqual(feat_part, test_case["feat_part"]) + assert feat_part == test_case["feat_part"] def test_split_on_feat(self): parts = ftintitle.split_on_feat("Alice ft. Bob") - self.assertEqual(parts, ("Alice", "Bob")) + assert parts == ("Alice", "Bob") parts = ftintitle.split_on_feat("Alice feat Bob") - self.assertEqual(parts, ("Alice", "Bob")) + assert parts == ("Alice", "Bob") parts = ftintitle.split_on_feat("Alice feat. Bob") - self.assertEqual(parts, ("Alice", "Bob")) + assert parts == ("Alice", "Bob") parts = ftintitle.split_on_feat("Alice featuring Bob") - self.assertEqual(parts, ("Alice", "Bob")) + assert parts == ("Alice", "Bob") parts = ftintitle.split_on_feat("Alice & Bob") - self.assertEqual(parts, ("Alice", "Bob")) + assert parts == ("Alice", "Bob") parts = ftintitle.split_on_feat("Alice and Bob") - self.assertEqual(parts, ("Alice", "Bob")) + assert parts == ("Alice", "Bob") parts = ftintitle.split_on_feat("Alice With Bob") - self.assertEqual(parts, ("Alice", "Bob")) + assert parts == ("Alice", "Bob") parts = ftintitle.split_on_feat("Alice defeat Bob") - self.assertEqual(parts, ("Alice defeat Bob", None)) + assert parts == ("Alice defeat Bob", None) def test_contains_feat(self): - self.assertTrue(ftintitle.contains_feat("Alice ft. Bob")) - self.assertTrue(ftintitle.contains_feat("Alice feat. Bob")) - self.assertTrue(ftintitle.contains_feat("Alice feat Bob")) - self.assertTrue(ftintitle.contains_feat("Alice featuring Bob")) - self.assertTrue(ftintitle.contains_feat("Alice & Bob")) - self.assertTrue(ftintitle.contains_feat("Alice and Bob")) - self.assertTrue(ftintitle.contains_feat("Alice With Bob")) - self.assertFalse(ftintitle.contains_feat("Alice defeat Bob")) - self.assertFalse(ftintitle.contains_feat("Aliceft.Bob")) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert ftintitle.contains_feat("Alice ft. Bob") + assert ftintitle.contains_feat("Alice feat. Bob") + assert ftintitle.contains_feat("Alice feat Bob") + assert ftintitle.contains_feat("Alice featuring Bob") + assert ftintitle.contains_feat("Alice (ft. Bob)") + assert ftintitle.contains_feat("Alice (feat. Bob)") + assert ftintitle.contains_feat("Alice [ft. Bob]") + assert ftintitle.contains_feat("Alice [feat. Bob]") + assert not ftintitle.contains_feat("Alice defeat Bob") + assert not ftintitle.contains_feat("Aliceft.Bob") + assert not ftintitle.contains_feat("Alice (defeat Bob)") + assert not ftintitle.contains_feat("Live and Let Go") + assert not ftintitle.contains_feat("Come With Me") diff --git a/test/plugins/test_hook.py b/test/plugins/test_hook.py index 1364028f6..993b95911 100644 --- a/test/plugins/test_hook.py +++ b/test/plugins/test_hook.py @@ -13,162 +13,113 @@ # included in all copies or substantial portions of the Software. +from __future__ import annotations + import os.path import sys -import tempfile import unittest +from contextlib import contextmanager +from typing import TYPE_CHECKING, Callable -from beets import config, plugins -from beets.test import _common -from beets.test.helper import TestHelper, capture_log +from beets import plugins +from beets.test.helper import PluginTestCase, capture_log + +if TYPE_CHECKING: + from collections.abc import Iterator -def get_temporary_path(): - temporary_directory = tempfile._get_default_tempdir() - temporary_name = next(tempfile._get_candidate_names()) +class HookTestCase(PluginTestCase): + plugin = "hook" + preload_plugin = False - return os.path.join(temporary_directory, temporary_name) + def _get_hook(self, event: str, command: str) -> dict[str, str]: + return {"event": event, "command": command} -class HookTest(_common.TestCase, TestHelper): - TEST_HOOK_COUNT = 5 +class HookLogsTest(HookTestCase): + @contextmanager + def _configure_logs(self, command: str) -> Iterator[list[str]]: + config = {"hooks": [self._get_hook("test_event", command)]} - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - - def _add_hook(self, event, command): - hook = {"event": event, "command": command} - - hooks = config["hook"]["hooks"].get(list) if "hook" in config else [] - hooks.append(hook) - - config["hook"]["hooks"] = hooks + with self.configure_plugin(config), capture_log("beets.hook") as logs: + plugins.send("test_event") + yield logs def test_hook_empty_command(self): - self._add_hook("test_event", "") - - self.load_plugins("hook") - - with capture_log("beets.hook") as logs: - plugins.send("test_event") - - self.assertIn('hook: invalid command ""', logs) + with self._configure_logs("") as logs: + assert 'hook: invalid command ""' in logs # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") def test_hook_non_zero_exit(self): - self._add_hook("test_event", 'sh -c "exit 1"') - - self.load_plugins("hook") - - with capture_log("beets.hook") as logs: - plugins.send("test_event") - - self.assertIn("hook: hook for test_event exited with status 1", logs) + with self._configure_logs('sh -c "exit 1"') as logs: + assert "hook: hook for test_event exited with status 1" in logs def test_hook_non_existent_command(self): - self._add_hook("test_event", "non-existent-command") + with self._configure_logs("non-existent-command") as logs: + logs = "\n".join(logs) - self.load_plugins("hook") + assert "hook: hook for test_event failed: " in logs + # The error message is different for each OS. Unfortunately the text is + # different in each case, where the only shared text is the string + # 'file' and substring 'Err' + assert "Err" in logs + assert "file" in logs - with capture_log("beets.hook") as logs: - plugins.send("test_event") - self.assertTrue( - any( - message.startswith("hook: hook for test_event failed: ") - for message in logs - ) - ) +class HookCommandTest(HookTestCase): + TEST_HOOK_COUNT = 2 + + events = [f"test_event_{i}" for i in range(TEST_HOOK_COUNT)] + + def setUp(self): + super().setUp() + temp_dir = os.fsdecode(self.temp_dir) + self.paths = [os.path.join(temp_dir, e) for e in self.events] + + def _test_command( + self, + make_test_path: Callable[[str, str], str], + send_path_kwarg: bool = False, + ) -> None: + """Check that each of the configured hooks is executed. + + Configure hooks for each event: + 1. Use the given 'make_test_path' callable to create a test path from the event + and the original path. + 2. Configure a hook with a command to touch this path. + + For each of the original paths: + 1. Send a test event + 2. Assert that a file has been created under the original path, which proves + that the configured hook command has been executed. + """ + hooks = [ + self._get_hook(e, f"touch {make_test_path(e, p)}") + for e, p in zip(self.events, self.paths) + ] + + with self.configure_plugin({"hooks": hooks}): + for event, path in zip(self.events, self.paths): + if send_path_kwarg: + plugins.send(event, path=path) + else: + plugins.send(event) + assert os.path.isfile(path) - # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") def test_hook_no_arguments(self): - temporary_paths = [ - get_temporary_path() for i in range(self.TEST_HOOK_COUNT) - ] + self._test_command(lambda _, p: p) - for index, path in enumerate(temporary_paths): - self._add_hook(f"test_no_argument_event_{index}", f'touch "{path}"') - - self.load_plugins("hook") - - for index in range(len(temporary_paths)): - plugins.send(f"test_no_argument_event_{index}") - - for path in temporary_paths: - self.assertTrue(os.path.isfile(path)) - os.remove(path) - - # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") def test_hook_event_substitution(self): - temporary_directory = tempfile._get_default_tempdir() - event_names = [ - f"test_event_event_{i}" for i in range(self.TEST_HOOK_COUNT) - ] + self._test_command(lambda e, p: p.replace(e, "{event}")) - for event in event_names: - self._add_hook(event, f'touch "{temporary_directory}/{{event}}"') - - self.load_plugins("hook") - - for event in event_names: - plugins.send(event) - - for event in event_names: - path = os.path.join(temporary_directory, event) - - self.assertTrue(os.path.isfile(path)) - os.remove(path) - - # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") def test_hook_argument_substitution(self): - temporary_paths = [ - get_temporary_path() for i in range(self.TEST_HOOK_COUNT) - ] + self._test_command(lambda *_: "{path}", send_path_kwarg=True) - for index, path in enumerate(temporary_paths): - self._add_hook(f"test_argument_event_{index}", 'touch "{path}"') - - self.load_plugins("hook") - - for index, path in enumerate(temporary_paths): - plugins.send(f"test_argument_event_{index}", path=path) - - for path in temporary_paths: - self.assertTrue(os.path.isfile(path)) - os.remove(path) - - # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") def test_hook_bytes_interpolation(self): - temporary_paths = [ - get_temporary_path().encode("utf-8") - for i in range(self.TEST_HOOK_COUNT) - ] - - for index, path in enumerate(temporary_paths): - self._add_hook(f"test_bytes_event_{index}", 'touch "{path}"') - - self.load_plugins("hook") - - for index, path in enumerate(temporary_paths): - plugins.send(f"test_bytes_event_{index}", path=path) - - for path in temporary_paths: - self.assertTrue(os.path.isfile(path)) - os.remove(path) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + self.paths = [p.encode() for p in self.paths] + self._test_command(lambda *_: "{path}", send_path_kwarg=True) diff --git a/test/plugins/test_ihate.py b/test/plugins/test_ihate.py index fdd656703..f941d566c 100644 --- a/test/plugins/test_ihate.py +++ b/test/plugins/test_ihate.py @@ -16,38 +16,30 @@ class IHatePluginTest(unittest.TestCase): task = importer.SingletonImportTask(None, test_item) # Empty query should let it pass. - self.assertFalse(IHatePlugin.do_i_hate_this(task, match_pattern)) + assert not IHatePlugin.do_i_hate_this(task, match_pattern) # 1 query match. match_pattern = ["artist:bad_artist", "artist:TestArtist"] - self.assertTrue(IHatePlugin.do_i_hate_this(task, match_pattern)) + assert IHatePlugin.do_i_hate_this(task, match_pattern) # 2 query matches, either should trigger. match_pattern = ["album:test", "artist:testartist"] - self.assertTrue(IHatePlugin.do_i_hate_this(task, match_pattern)) + assert IHatePlugin.do_i_hate_this(task, match_pattern) # Query is blocked by AND clause. match_pattern = ["album:notthis genre:testgenre"] - self.assertFalse(IHatePlugin.do_i_hate_this(task, match_pattern)) + assert not IHatePlugin.do_i_hate_this(task, match_pattern) # Both queries are blocked by AND clause with unmatched condition. match_pattern = [ "album:notthis genre:testgenre", "artist:testartist album:notthis", ] - self.assertFalse(IHatePlugin.do_i_hate_this(task, match_pattern)) + assert not IHatePlugin.do_i_hate_this(task, match_pattern) # Only one query should fire. match_pattern = [ "album:testalbum genre:testgenre", "artist:testartist album:notthis", ] - self.assertTrue(IHatePlugin.do_i_hate_this(task, match_pattern)) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert IHatePlugin.do_i_hate_this(task, match_pattern) diff --git a/test/plugins/test_importadded.py b/test/plugins/test_importadded.py index 647892e1d..d48ec6c46 100644 --- a/test/plugins/test_importadded.py +++ b/test/plugins/test_importadded.py @@ -16,10 +16,11 @@ """Tests for the `importadded` plugin.""" import os -import unittest + +import pytest from beets import importer -from beets.test.helper import AutotagStub, ImportHelper +from beets.test.helper import AutotagStub, ImportTestCase, PluginMixin from beets.util import displayable_path, syspath from beetsplug.importadded import ImportAddedPlugin @@ -40,50 +41,49 @@ def modify_mtimes(paths, offset=-60000): os.utime(syspath(path), (mstat.st_atime, mstat.st_mtime + offset * i)) -class ImportAddedTest(unittest.TestCase, ImportHelper): +class ImportAddedTest(PluginMixin, ImportTestCase): # The minimum mtime of the files to be imported + plugin = "importadded" min_mtime = None def setUp(self): preserve_plugin_listeners() - self.setup_beets() - self.load_plugins("importadded") - self._create_import_dir(2) + super().setUp() + self.prepare_album_for_import(2) # Different mtimes on the files to be imported in order to test the # plugin - modify_mtimes(mfile.path for mfile in self.media_files) + modify_mtimes(mfile.path for mfile in self.import_media) self.min_mtime = min( - os.path.getmtime(mfile.path) for mfile in self.media_files + os.path.getmtime(mfile.path) for mfile in self.import_media ) self.matcher = AutotagStub().install() self.matcher.macthin = AutotagStub.GOOD - self._setup_import_session() + self.importer = self.setup_importer() self.importer.add_choice(importer.action.APPLY) def tearDown(self): - self.unload_plugins() - self.teardown_beets() + super().tearDown() self.matcher.restore() def find_media_file(self, item): """Find the pre-import MediaFile for an Item""" - for m in self.media_files: + for m in self.import_media: if m.title.replace("Tag", "Applied") == item.title: return m raise AssertionError( "No MediaFile found for Item " + displayable_path(item.path) ) - def assertEqualTimes(self, first, second, msg=None): # noqa + def assertEqualTimes(self, first, second, msg=None): """For comparing file modification times at a sufficient precision""" - self.assertAlmostEqual(first, second, places=4, msg=msg) + assert first == pytest.approx(second, rel=1e-4), msg - def assertAlbumImport(self): # noqa + def assertAlbumImport(self): self.importer.run() album = self.lib.albums().get() - self.assertEqual(album.added, self.min_mtime) + assert album.added == self.min_mtime for item in album.items(): - self.assertEqual(item.added, self.min_mtime) + assert item.added == self.min_mtime def test_import_album_with_added_dates(self): self.assertAlbumImport() @@ -99,7 +99,7 @@ class ImportAddedTest(unittest.TestCase, ImportHelper): self.config["importadded"]["preserve_mtimes"] = True self.importer.run() album = self.lib.albums().get() - self.assertEqual(album.added, self.min_mtime) + assert album.added == self.min_mtime for item in album.items(): self.assertEqualTimes(item.added, self.min_mtime) mediafile_mtime = os.path.getmtime(self.find_media_file(item).path) @@ -115,7 +115,7 @@ class ImportAddedTest(unittest.TestCase, ImportHelper): # Newer Item path mtimes as if Beets had modified them modify_mtimes(items_added_before.keys(), offset=10000) # Reimport - self._setup_import_session(import_dir=album.path) + self.setup_importer(import_dir=self.libdir) self.importer.run() # Verify the reimported items album = self.lib.albums().get() @@ -156,8 +156,7 @@ class ImportAddedTest(unittest.TestCase, ImportHelper): # Newer Item path mtimes as if Beets had modified them modify_mtimes(items_added_before.keys(), offset=10000) # Reimport - import_dir = os.path.dirname(list(items_added_before.keys())[0]) - self._setup_import_session(import_dir=import_dir, singletons=True) + self.setup_importer(import_dir=self.libdir, singletons=True) self.importer.run() # Verify the reimported items items_added_after = {item.path: item.added for item in self.lib.items()} @@ -168,11 +167,3 @@ class ImportAddedTest(unittest.TestCase, ImportHelper): "reimport modified Item.added for " + displayable_path(item_path), ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_importfeeds.py b/test/plugins/test_importfeeds.py index 7d95a150b..5f1f915ad 100644 --- a/test/plugins/test_importfeeds.py +++ b/test/plugins/test_importfeeds.py @@ -1,27 +1,20 @@ import datetime import os import os.path -import shutil -import tempfile -import unittest from beets import config -from beets.library import Album, Item, Library +from beets.library import Album, Item +from beets.test.helper import BeetsTestCase from beetsplug.importfeeds import ImportFeedsPlugin -class ImportfeedsTestTest(unittest.TestCase): +class ImportfeedsTestTest(BeetsTestCase): def setUp(self): - config.clear() - config.read(user=False) + super().setUp() self.importfeeds = ImportFeedsPlugin() - self.lib = Library(":memory:") - self.feeds_dir = tempfile.mkdtemp() + self.feeds_dir = os.path.join(os.fsdecode(self.temp_dir), "importfeeds") config["importfeeds"]["dir"] = self.feeds_dir - def tearDown(self): - shutil.rmtree(self.feeds_dir) - def test_multi_format_album_playlist(self): config["importfeeds"]["formats"] = "m3u_multi" album = Album(album="album/name", id=1) @@ -34,9 +27,9 @@ class ImportfeedsTestTest(unittest.TestCase): playlist_path = os.path.join( self.feeds_dir, os.listdir(self.feeds_dir)[0] ) - self.assertTrue(playlist_path.endswith("album_name.m3u")) + assert playlist_path.endswith("album_name.m3u") with open(playlist_path) as playlist: - self.assertIn(item_path, playlist.read()) + assert item_path in playlist.read() def test_playlist_in_subdir(self): config["importfeeds"]["formats"] = "m3u" @@ -54,8 +47,8 @@ class ImportfeedsTestTest(unittest.TestCase): self.feeds_dir, config["importfeeds"]["m3u_name"].get() ) playlist_subdir = os.path.dirname(playlist) - self.assertTrue(os.path.isdir(playlist_subdir)) - self.assertTrue(os.path.isfile(playlist)) + assert os.path.isdir(playlist_subdir) + assert os.path.isfile(playlist) def test_playlist_per_session(self): config["importfeeds"]["formats"] = "m3u_session" @@ -70,14 +63,6 @@ class ImportfeedsTestTest(unittest.TestCase): self.importfeeds.album_imported(self.lib, album) date = datetime.datetime.now().strftime("%Y%m%d_%Hh%M") playlist = os.path.join(self.feeds_dir, f"imports_{date}.m3u") - self.assertTrue(os.path.isfile(playlist)) + assert os.path.isfile(playlist) with open(playlist) as playlist_contents: - self.assertIn(item_path, playlist_contents.read()) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert item_path in playlist_contents.read() diff --git a/test/plugins/test_info.py b/test/plugins/test_info.py index bfba739c5..c1b3fc941 100644 --- a/test/plugins/test_info.py +++ b/test/plugins/test_info.py @@ -13,22 +13,14 @@ # included in all copies or substantial portions of the Software. -import unittest - from mediafile import MediaFile -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beets.util import displayable_path -class InfoTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.load_plugins("info") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +class InfoTest(PluginTestCase): + plugin = "info" def test_path(self): path = self.create_mediafile_fixture() @@ -41,11 +33,11 @@ class InfoTest(unittest.TestCase, TestHelper): mediafile.save() out = self.run_with_output("info", path) - self.assertIn(displayable_path(path), out) - self.assertIn("albumartist: AAA", out) - self.assertIn("disctitle: DDD", out) - self.assertIn("genres: a; b; c", out) - self.assertNotIn("composer:", out) + assert displayable_path(path) in out + assert "albumartist: AAA" in out + assert "disctitle: DDD" in out + assert "genres: a; b; c" in out + assert "composer:" not in out def test_item_query(self): item1, item2 = self.add_item_fixtures(count=2) @@ -55,10 +47,10 @@ class InfoTest(unittest.TestCase, TestHelper): item1.store() out = self.run_with_output("info", "album:yyyy") - self.assertIn(displayable_path(item1.path), out) - self.assertIn("album: xxxx", out) + assert displayable_path(item1.path) in out + assert "album: xxxx" in out - self.assertNotIn(displayable_path(item2.path), out) + assert displayable_path(item2.path) not in out def test_item_library_query(self): (item,) = self.add_item_fixtures() @@ -66,8 +58,8 @@ class InfoTest(unittest.TestCase, TestHelper): item.store() out = self.run_with_output("info", "--library", "album:xxxx") - self.assertIn(displayable_path(item.path), out) - self.assertIn("album: xxxx", out) + assert displayable_path(item.path) in out + assert "album: xxxx" in out def test_collect_item_and_path(self): path = self.create_mediafile_fixture() @@ -84,9 +76,9 @@ class InfoTest(unittest.TestCase, TestHelper): mediafile.save() out = self.run_with_output("info", "--summarize", "album:AAA", path) - self.assertIn("album: AAA", out) - self.assertIn("tracktotal: 5", out) - self.assertIn("title: [various]", out) + assert "album: AAA" in out + assert "tracktotal: 5" in out + assert "title: [various]" in out def test_collect_item_and_path_with_multi_values(self): path = self.create_mediafile_fixture() @@ -109,11 +101,11 @@ class InfoTest(unittest.TestCase, TestHelper): mediafile.save() out = self.run_with_output("info", "--summarize", "album:AAA", path) - self.assertIn("album: AAA", out) - self.assertIn("tracktotal: 5", out) - self.assertIn("title: [various]", out) - self.assertIn("albumartists: [various]", out) - self.assertIn("artists: Artist A; Artist Z", out) + assert "album: AAA" in out + assert "tracktotal: 5" in out + assert "title: [various]" in out + assert "albumartists: [various]" in out + assert "artists: Artist A; Artist Z" in out def test_custom_format(self): self.add_item_fixtures() @@ -123,12 +115,4 @@ class InfoTest(unittest.TestCase, TestHelper): "--format", "$track. $title - $artist ($length)", ) - self.assertEqual("02. tïtle 0 - the artist (0:01)\n", out) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert "02. tïtle 0 - the artist (0:01)\n" == out diff --git a/test/plugins/test_ipfs.py b/test/plugins/test_ipfs.py index 65b713101..34c31d777 100644 --- a/test/plugins/test_ipfs.py +++ b/test/plugins/test_ipfs.py @@ -13,33 +13,24 @@ import os -import unittest from unittest.mock import Mock, patch -from beets import library from beets.test import _common -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beets.util import _fsencoding, bytestring_path from beetsplug.ipfs import IPFSPlugin @patch("beets.util.command_output", Mock()) -class IPFSPluginTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.load_plugins("ipfs") - self.lib = library.Library(":memory:") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +class IPFSPluginTest(PluginTestCase): + plugin = "ipfs" def test_stored_hashes(self): test_album = self.mk_test_album() ipfs = IPFSPlugin() added_albums = ipfs.ipfs_added_albums(self.lib, self.lib.path) added_album = added_albums.get_album(1) - self.assertEqual(added_album.ipfs, test_album.ipfs) + assert added_album.ipfs == test_album.ipfs found = False want_item = test_album.items()[2] for check_item in added_album.items(): @@ -50,15 +41,16 @@ class IPFSPluginTest(unittest.TestCase, TestHelper): ) want_path = "/ipfs/{}/{}".format(test_album.ipfs, ipfs_item) want_path = bytestring_path(want_path) - self.assertEqual(check_item.path, want_path) - self.assertEqual( - check_item.get("ipfs", with_album=False), want_item.ipfs + assert check_item.path == want_path + assert ( + check_item.get("ipfs", with_album=False) + == want_item.ipfs ) - self.assertEqual(check_item.title, want_item.title) + assert check_item.title == want_item.title found = True except AttributeError: pass - self.assertTrue(found) + assert found def mk_test_album(self): items = [_common.item() for _ in range(3)] @@ -87,11 +79,3 @@ class IPFSPluginTest(unittest.TestCase, TestHelper): album.store(inherit=False) return album - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_keyfinder.py b/test/plugins/test_keyfinder.py index 8509fe357..15314e141 100644 --- a/test/plugins/test_keyfinder.py +++ b/test/plugins/test_keyfinder.py @@ -13,23 +13,16 @@ # included in all copies or substantial portions of the Software. -import unittest from unittest.mock import patch from beets import util from beets.library import Item -from beets.test.helper import TestHelper +from beets.test.helper import AsIsImporterMixin, ImportTestCase, PluginMixin @patch("beets.util.command_output") -class KeyFinderTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.load_plugins("keyfinder") - - def tearDown(self): - self.teardown_beets() - self.unload_plugins() +class KeyFinderTest(AsIsImporterMixin, PluginMixin, ImportTestCase): + plugin = "keyfinder" def test_add_key(self, command_output): item = Item(path="/file") @@ -39,18 +32,17 @@ class KeyFinderTest(unittest.TestCase, TestHelper): self.run_command("keyfinder") item.load() - self.assertEqual(item["initial_key"], "C#m") + assert item["initial_key"] == "C#m" command_output.assert_called_with( ["KeyFinder", "-f", util.syspath(item.path)] ) def test_add_key_on_import(self, command_output): command_output.return_value = util.CommandOutput(b"dbm", b"") - importer = self.create_importer() - importer.run() + self.run_asis_importer() item = self.lib.items().get() - self.assertEqual(item["initial_key"], "C#m") + assert item["initial_key"] == "C#m" def test_force_overwrite(self, command_output): self.config["keyfinder"]["overwrite"] = True @@ -62,7 +54,7 @@ class KeyFinderTest(unittest.TestCase, TestHelper): self.run_command("keyfinder") item.load() - self.assertEqual(item["initial_key"], "C#m") + assert item["initial_key"] == "C#m" def test_do_not_overwrite(self, command_output): item = Item(path="/file", initial_key="F") @@ -72,7 +64,7 @@ class KeyFinderTest(unittest.TestCase, TestHelper): self.run_command("keyfinder") item.load() - self.assertEqual(item["initial_key"], "F") + assert item["initial_key"] == "F" def test_no_key(self, command_output): item = Item(path="/file") @@ -82,12 +74,4 @@ class KeyFinderTest(unittest.TestCase, TestHelper): self.run_command("keyfinder") item.load() - self.assertIsNone(item["initial_key"]) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert item["initial_key"] is None diff --git a/test/plugins/test_lastgenre.py b/test/plugins/test_lastgenre.py index 6f250c3ba..17156453e 100644 --- a/test/plugins/test_lastgenre.py +++ b/test/plugins/test_lastgenre.py @@ -14,24 +14,19 @@ """Tests for the 'lastgenre' plugin.""" - -import unittest from unittest.mock import Mock from beets import config from beets.test import _common -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase from beetsplug import lastgenre -class LastGenrePluginTest(unittest.TestCase, TestHelper): +class LastGenrePluginTest(BeetsTestCase): def setUp(self): - self.setup_beets() + super().setUp() self.plugin = lastgenre.LastGenrePlugin() - def tearDown(self): - self.teardown_beets() - def _setup_config( self, whitelist=False, canonical=False, count=1, prefer_specific=False ): @@ -49,50 +44,46 @@ class LastGenrePluginTest(unittest.TestCase, TestHelper): def test_default(self): """Fetch genres with whitelist and c14n deactivated""" self._setup_config() - self.assertEqual( - self.plugin._resolve_genres(["delta blues"]), "Delta Blues" - ) + assert self.plugin._resolve_genres(["delta blues"]) == "Delta Blues" def test_c14n_only(self): """Default c14n tree funnels up to most common genre except for *wrong* genres that stay unchanged. """ self._setup_config(canonical=True, count=99) - self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "Blues") - self.assertEqual( - self.plugin._resolve_genres(["iota blues"]), "Iota Blues" - ) + assert self.plugin._resolve_genres(["delta blues"]) == "Blues" + assert self.plugin._resolve_genres(["iota blues"]) == "Iota Blues" def test_whitelist_only(self): """Default whitelist rejects *wrong* (non existing) genres.""" self._setup_config(whitelist=True) - self.assertEqual(self.plugin._resolve_genres(["iota blues"]), "") + assert self.plugin._resolve_genres(["iota blues"]) == "" def test_whitelist_c14n(self): """Default whitelist and c14n both activated result in all parents genres being selected (from specific to common). """ self._setup_config(canonical=True, whitelist=True, count=99) - self.assertEqual( - self.plugin._resolve_genres(["delta blues"]), "Delta Blues, Blues" + assert ( + self.plugin._resolve_genres(["delta blues"]) == "Delta Blues, Blues" ) def test_whitelist_custom(self): """Keep only genres that are in the whitelist.""" self._setup_config(whitelist={"blues", "rock", "jazz"}, count=2) - self.assertEqual(self.plugin._resolve_genres(["pop", "blues"]), "Blues") + assert self.plugin._resolve_genres(["pop", "blues"]) == "Blues" self._setup_config(canonical="", whitelist={"rock"}) - self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "") + assert self.plugin._resolve_genres(["delta blues"]) == "" def test_count(self): """Keep the n first genres, as we expect them to be sorted from more to less popular. """ self._setup_config(whitelist={"blues", "rock", "jazz"}, count=2) - self.assertEqual( - self.plugin._resolve_genres(["jazz", "pop", "rock", "blues"]), - "Jazz, Rock", + assert ( + self.plugin._resolve_genres(["jazz", "pop", "rock", "blues"]) + == "Jazz, Rock" ) def test_count_c14n(self): @@ -102,53 +93,51 @@ class LastGenrePluginTest(unittest.TestCase, TestHelper): ) # thanks to c14n, 'blues' superseeds 'country blues' and takes the # second slot - self.assertEqual( + assert ( self.plugin._resolve_genres( ["jazz", "pop", "country blues", "rock"] - ), - "Jazz, Blues", + ) + == "Jazz, Blues" ) def test_c14n_whitelist(self): """Genres first pass through c14n and are then filtered""" self._setup_config(canonical=True, whitelist={"rock"}) - self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "") + assert self.plugin._resolve_genres(["delta blues"]) == "" def test_empty_string_enables_canonical(self): """For backwards compatibility, setting the `canonical` option to the empty string enables it using the default tree. """ self._setup_config(canonical="", count=99) - self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "Blues") + assert self.plugin._resolve_genres(["delta blues"]) == "Blues" def test_empty_string_enables_whitelist(self): """Again for backwards compatibility, setting the `whitelist` option to the empty string enables the default set of genres. """ self._setup_config(whitelist="") - self.assertEqual(self.plugin._resolve_genres(["iota blues"]), "") + assert self.plugin._resolve_genres(["iota blues"]) == "" def test_prefer_specific_loads_tree(self): """When prefer_specific is enabled but canonical is not the tree still has to be loaded. """ self._setup_config(prefer_specific=True, canonical=False) - self.assertNotEqual(self.plugin.c14n_branches, []) + assert self.plugin.c14n_branches != [] def test_prefer_specific_without_canonical(self): """Prefer_specific works without canonical.""" self._setup_config(prefer_specific=True, canonical=False, count=4) - self.assertEqual( - self.plugin._resolve_genres(["math rock", "post-rock"]), - "Post-Rock, Math Rock", + assert ( + self.plugin._resolve_genres(["math rock", "post-rock"]) + == "Post-Rock, Math Rock" ) def test_no_duplicate(self): """Remove duplicated genres.""" self._setup_config(count=99) - self.assertEqual( - self.plugin._resolve_genres(["blues", "blues"]), "Blues" - ) + assert self.plugin._resolve_genres(["blues", "blues"]) == "Blues" def test_tags_for(self): class MockPylastElem: @@ -170,9 +159,9 @@ class LastGenrePluginTest(unittest.TestCase, TestHelper): plugin = lastgenre.LastGenrePlugin() res = plugin._tags_for(MockPylastObj()) - self.assertEqual(res, ["pop", "rap"]) + assert res == ["pop", "rap"] res = plugin._tags_for(MockPylastObj(), min_weight=50) - self.assertEqual(res, ["pop"]) + assert res == ["pop"] def test_get_genre(self): mock_genres = {"track": "1", "album": "2", "artist": "3"} @@ -196,48 +185,36 @@ class LastGenrePluginTest(unittest.TestCase, TestHelper): config["lastgenre"] = {"force": False} res = self.plugin._get_genre(item) - self.assertEqual(res, (item.genre, "keep")) + assert res == (item.genre, "keep") config["lastgenre"] = {"force": True, "source": "track"} res = self.plugin._get_genre(item) - self.assertEqual(res, (mock_genres["track"], "track")) + assert res == (mock_genres["track"], "track") config["lastgenre"] = {"source": "album"} res = self.plugin._get_genre(item) - self.assertEqual(res, (mock_genres["album"], "album")) + assert res == (mock_genres["album"], "album") config["lastgenre"] = {"source": "artist"} res = self.plugin._get_genre(item) - self.assertEqual(res, (mock_genres["artist"], "artist")) + assert res == (mock_genres["artist"], "artist") mock_genres["artist"] = None res = self.plugin._get_genre(item) - self.assertEqual(res, (item.genre, "original")) + assert res == (item.genre, "original") config["lastgenre"] = {"fallback": "rap"} item.genre = None res = self.plugin._get_genre(item) - self.assertEqual( - res, (config["lastgenre"]["fallback"].get(), "fallback") - ) + assert res == (config["lastgenre"]["fallback"].get(), "fallback") def test_sort_by_depth(self): self._setup_config(canonical=True) # Normal case. tags = ("electronic", "ambient", "post-rock", "downtempo") res = self.plugin._sort_by_depth(tags) - self.assertEqual( - res, ["post-rock", "downtempo", "ambient", "electronic"] - ) + assert res == ["post-rock", "downtempo", "ambient", "electronic"] # Non-canonical tag ('chillout') present. tags = ("electronic", "ambient", "chillout") res = self.plugin._sort_by_depth(tags) - self.assertEqual(res, ["ambient", "electronic"]) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert res == ["ambient", "electronic"] diff --git a/test/plugins/test_limit.py b/test/plugins/test_limit.py index 0ed6c9202..12700295e 100644 --- a/test/plugins/test_limit.py +++ b/test/plugins/test_limit.py @@ -13,20 +13,19 @@ """Tests for the 'limit' plugin.""" -import unittest - -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase -class LimitPluginTest(unittest.TestCase, TestHelper): +class LimitPluginTest(PluginTestCase): """Unit tests for LimitPlugin Note: query prefix tests do not work correctly with `run_with_output`. """ + plugin = "limit" + def setUp(self): - self.setup_beets() - self.load_plugins("limit") + super().setUp() # we'll create an even number of tracks in the library self.num_test_items = 10 @@ -46,62 +45,50 @@ class LimitPluginTest(unittest.TestCase, TestHelper): self.track_head_range = "track:.." + str(self.num_limit) self.track_tail_range = "track:" + str(self.num_limit + 1) + ".." - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def test_no_limit(self): """Returns all when there is no limit or filter.""" result = self.run_with_output("lslimit") - self.assertEqual(result.count("\n"), self.num_test_items) + assert result.count("\n") == self.num_test_items def test_lslimit_head(self): """Returns the expected number with `lslimit --head`.""" result = self.run_with_output("lslimit", "--head", str(self.num_limit)) - self.assertEqual(result.count("\n"), self.num_limit) + assert result.count("\n") == self.num_limit def test_lslimit_tail(self): """Returns the expected number with `lslimit --tail`.""" result = self.run_with_output("lslimit", "--tail", str(self.num_limit)) - self.assertEqual(result.count("\n"), self.num_limit) + assert result.count("\n") == self.num_limit def test_lslimit_head_invariant(self): """Returns the expected number with `lslimit --head` and a filter.""" result = self.run_with_output( "lslimit", "--head", str(self.num_limit), self.track_tail_range ) - self.assertEqual(result.count("\n"), self.num_limit) + assert result.count("\n") == self.num_limit def test_lslimit_tail_invariant(self): """Returns the expected number with `lslimit --tail` and a filter.""" result = self.run_with_output( "lslimit", "--tail", str(self.num_limit), self.track_head_range ) - self.assertEqual(result.count("\n"), self.num_limit) + assert result.count("\n") == self.num_limit def test_prefix(self): """Returns the expected number with the query prefix.""" result = self.lib.items(self.num_limit_prefix) - self.assertEqual(len(result), self.num_limit) + assert len(result) == self.num_limit def test_prefix_when_correctly_ordered(self): """Returns the expected number with the query prefix and filter when the prefix portion (correctly) appears last.""" correct_order = self.track_tail_range + " " + self.num_limit_prefix result = self.lib.items(correct_order) - self.assertEqual(len(result), self.num_limit) + assert len(result) == self.num_limit def test_prefix_when_incorrectly_ordred(self): """Returns no results with the query prefix and filter when the prefix portion (incorrectly) appears first.""" incorrect_order = self.num_limit_prefix + " " + self.track_tail_range result = self.lib.items(incorrect_order) - self.assertEqual(len(result), 0) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert len(result) == 0 diff --git a/test/plugins/test_lyrics.py b/test/plugins/test_lyrics.py index 39fb19a24..f9f9d9be7 100644 --- a/test/plugins/test_lyrics.py +++ b/test/plugins/test_lyrics.py @@ -14,7 +14,6 @@ """Tests for the 'lyrics' plugin.""" - import itertools import os import re @@ -22,6 +21,7 @@ import unittest from unittest.mock import MagicMock, patch import confuse +import pytest import requests from beets import logging @@ -45,116 +45,106 @@ class LyricsPluginTest(unittest.TestCase): def test_search_artist(self): item = Item(artist="Alice ft. Bob", title="song") - self.assertIn(("Alice ft. Bob", ["song"]), lyrics.search_pairs(item)) - self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item)) + assert ("Alice ft. Bob", ["song"]) in lyrics.search_pairs(item) + assert ("Alice", ["song"]) in lyrics.search_pairs(item) item = Item(artist="Alice feat Bob", title="song") - self.assertIn(("Alice feat Bob", ["song"]), lyrics.search_pairs(item)) - self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item)) + assert ("Alice feat Bob", ["song"]) in lyrics.search_pairs(item) + assert ("Alice", ["song"]) in lyrics.search_pairs(item) item = Item(artist="Alice feat. Bob", title="song") - self.assertIn(("Alice feat. Bob", ["song"]), lyrics.search_pairs(item)) - self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item)) + assert ("Alice feat. Bob", ["song"]) in lyrics.search_pairs(item) + assert ("Alice", ["song"]) in lyrics.search_pairs(item) item = Item(artist="Alice feats Bob", title="song") - self.assertIn(("Alice feats Bob", ["song"]), lyrics.search_pairs(item)) - self.assertNotIn(("Alice", ["song"]), lyrics.search_pairs(item)) + assert ("Alice feats Bob", ["song"]) in lyrics.search_pairs(item) + assert ("Alice", ["song"]) not in lyrics.search_pairs(item) item = Item(artist="Alice featuring Bob", title="song") - self.assertIn( - ("Alice featuring Bob", ["song"]), lyrics.search_pairs(item) - ) - self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item)) + assert ("Alice featuring Bob", ["song"]) in lyrics.search_pairs(item) + assert ("Alice", ["song"]) in lyrics.search_pairs(item) item = Item(artist="Alice & Bob", title="song") - self.assertIn(("Alice & Bob", ["song"]), lyrics.search_pairs(item)) - self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item)) + assert ("Alice & Bob", ["song"]) in lyrics.search_pairs(item) + assert ("Alice", ["song"]) in lyrics.search_pairs(item) item = Item(artist="Alice and Bob", title="song") - self.assertIn(("Alice and Bob", ["song"]), lyrics.search_pairs(item)) - self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item)) + assert ("Alice and Bob", ["song"]) in lyrics.search_pairs(item) + assert ("Alice", ["song"]) in lyrics.search_pairs(item) item = Item(artist="Alice and Bob", title="song") - self.assertEqual( - ("Alice and Bob", ["song"]), list(lyrics.search_pairs(item))[0] - ) + assert ("Alice and Bob", ["song"]) == list(lyrics.search_pairs(item))[0] def test_search_artist_sort(self): item = Item(artist="CHVRCHΞS", title="song", artist_sort="CHVRCHES") - self.assertIn(("CHVRCHΞS", ["song"]), lyrics.search_pairs(item)) - self.assertIn(("CHVRCHES", ["song"]), lyrics.search_pairs(item)) + assert ("CHVRCHΞS", ["song"]) in lyrics.search_pairs(item) + assert ("CHVRCHES", ["song"]) in lyrics.search_pairs(item) # Make sure that the original artist name is still the first entry - self.assertEqual( - ("CHVRCHΞS", ["song"]), list(lyrics.search_pairs(item))[0] - ) + assert ("CHVRCHΞS", ["song"]) == list(lyrics.search_pairs(item))[0] item = Item( artist="横山克", title="song", artist_sort="Masaru Yokoyama" ) - self.assertIn(("横山克", ["song"]), lyrics.search_pairs(item)) - self.assertIn(("Masaru Yokoyama", ["song"]), lyrics.search_pairs(item)) + assert ("横山克", ["song"]) in lyrics.search_pairs(item) + assert ("Masaru Yokoyama", ["song"]) in lyrics.search_pairs(item) # Make sure that the original artist name is still the first entry - self.assertEqual( - ("横山克", ["song"]), list(lyrics.search_pairs(item))[0] - ) + assert ("横山克", ["song"]) == list(lyrics.search_pairs(item))[0] def test_search_pairs_multi_titles(self): item = Item(title="1 / 2", artist="A") - self.assertIn(("A", ["1 / 2"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["1", "2"]), lyrics.search_pairs(item)) + assert ("A", ["1 / 2"]) in lyrics.search_pairs(item) + assert ("A", ["1", "2"]) in lyrics.search_pairs(item) item = Item(title="1/2", artist="A") - self.assertIn(("A", ["1/2"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["1", "2"]), lyrics.search_pairs(item)) + assert ("A", ["1/2"]) in lyrics.search_pairs(item) + assert ("A", ["1", "2"]) in lyrics.search_pairs(item) def test_search_pairs_titles(self): item = Item(title="Song (live)", artist="A") - self.assertIn(("A", ["Song"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["Song (live)"]), lyrics.search_pairs(item)) + assert ("A", ["Song"]) in lyrics.search_pairs(item) + assert ("A", ["Song (live)"]) in lyrics.search_pairs(item) item = Item(title="Song (live) (new)", artist="A") - self.assertIn(("A", ["Song"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["Song (live) (new)"]), lyrics.search_pairs(item)) + assert ("A", ["Song"]) in lyrics.search_pairs(item) + assert ("A", ["Song (live) (new)"]) in lyrics.search_pairs(item) item = Item(title="Song (live (new))", artist="A") - self.assertIn(("A", ["Song"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["Song (live (new))"]), lyrics.search_pairs(item)) + assert ("A", ["Song"]) in lyrics.search_pairs(item) + assert ("A", ["Song (live (new))"]) in lyrics.search_pairs(item) item = Item(title="Song ft. B", artist="A") - self.assertIn(("A", ["Song"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["Song ft. B"]), lyrics.search_pairs(item)) + assert ("A", ["Song"]) in lyrics.search_pairs(item) + assert ("A", ["Song ft. B"]) in lyrics.search_pairs(item) item = Item(title="Song featuring B", artist="A") - self.assertIn(("A", ["Song"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["Song featuring B"]), lyrics.search_pairs(item)) + assert ("A", ["Song"]) in lyrics.search_pairs(item) + assert ("A", ["Song featuring B"]) in lyrics.search_pairs(item) item = Item(title="Song and B", artist="A") - self.assertNotIn(("A", ["Song"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["Song and B"]), lyrics.search_pairs(item)) + assert ("A", ["Song and B"]) in lyrics.search_pairs(item) + assert ("A", ["Song"]) not in lyrics.search_pairs(item) item = Item(title="Song: B", artist="A") - self.assertIn(("A", ["Song"]), lyrics.search_pairs(item)) - self.assertIn(("A", ["Song: B"]), lyrics.search_pairs(item)) + assert ("A", ["Song"]) in lyrics.search_pairs(item) + assert ("A", ["Song: B"]) in lyrics.search_pairs(item) def test_remove_credits(self): - self.assertEqual( + assert ( lyrics.remove_credits( """It's close to midnight Lyrics brought by example.com""" - ), - "It's close to midnight", - ) - self.assertEqual( - lyrics.remove_credits("""Lyrics brought by example.com"""), "" + ) + == "It's close to midnight" ) + assert lyrics.remove_credits("""Lyrics brought by example.com""") == "" # don't remove 2nd verse for the only reason it contains 'lyrics' word text = """Look at all the shit that i done bought her See lyrics ain't nothin if the beat aint crackin""" - self.assertEqual(lyrics.remove_credits(text), text) + assert lyrics.remove_credits(text) == text def test_is_lyrics(self): texts = ["LyricsMania.com - Copyright (c) 2013 - All Rights Reserved"] @@ -163,11 +153,11 @@ class LyricsPluginTest(unittest.TestCase): of mywickedsongtext brand""" ] for t in texts: - self.assertFalse(google.is_lyrics(t)) + assert not google.is_lyrics(t) def test_slugify(self): text = "http://site.com/\xe7afe-au_lait(boisson)" - self.assertEqual(google.slugify(text), "http://site.com/cafe_au_lait") + assert google.slugify(text) == "http://site.com/cafe_au_lait" def test_scrape_strip_cruft(self): text = """ @@ -176,26 +166,22 @@ class LyricsPluginTest(unittest.TestCase): two !

four""" - self.assertEqual( - lyrics._scrape_strip_cruft(text, True), "one\ntwo !\n\nfour" - ) + assert lyrics._scrape_strip_cruft(text, True) == "one\ntwo !\n\nfour" def test_scrape_strip_scripts(self): text = """foobaz""" - self.assertEqual(lyrics._scrape_strip_cruft(text, True), "foobaz") + assert lyrics._scrape_strip_cruft(text, True) == "foobaz" def test_scrape_strip_tag_in_comment(self): text = """fooqux""" - self.assertEqual(lyrics._scrape_strip_cruft(text, True), "fooqux") + assert lyrics._scrape_strip_cruft(text, True) == "fooqux" def test_scrape_merge_paragraphs(self): text = "one

two

three" - self.assertEqual( - lyrics._scrape_merge_paragraphs(text), "one\ntwo\nthree" - ) + assert lyrics._scrape_merge_paragraphs(text) == "one\ntwo\nthree" def test_missing_lyrics(self): - self.assertFalse(google.is_lyrics(LYRICS_TEXTS["missing_texts"])) + assert not google.is_lyrics(LYRICS_TEXTS["missing_texts"]) def url_to_filename(url): @@ -350,10 +336,7 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest, LyricsAssertions): LyricsGoogleBaseTest.setUp(self) self.plugin = lyrics.LyricsPlugin() - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) + @pytest.mark.integration_test def test_backend_sources_ok(self): """Test default backends with songs known to exist in respective databases. @@ -366,10 +349,7 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest, LyricsAssertions): res = backend.fetch(s["artist"], s["title"]) self.assertLyricsContentOk(s["title"], res) - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) + @pytest.mark.integration_test def test_google_sources_ok(self): """Test if lyrics present on websites registered in beets google custom search engine are correctly scraped. @@ -379,7 +359,7 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest, LyricsAssertions): for s in sources: url = s["url"] + s["path"] res = lyrics.scrape_lyrics_from_html(raw_backend.fetch_url(url)) - self.assertTrue(google.is_lyrics(res), url) + assert google.is_lyrics(res), url self.assertLyricsContentOk(s["title"], res, url) @@ -403,7 +383,7 @@ class LyricsGooglePluginMachineryTest(LyricsGoogleBaseTest, LyricsAssertions): """Test that lyrics of the mocked page are correctly scraped""" url = self.source["url"] + self.source["path"] res = lyrics.scrape_lyrics_from_html(raw_backend.fetch_url(url)) - self.assertTrue(google.is_lyrics(res), url) + assert google.is_lyrics(res), url self.assertLyricsContentOk(self.source["title"], res, url) @patch.object(lyrics.Backend, "fetch_url", MockFetchUrl()) @@ -419,12 +399,9 @@ class LyricsGooglePluginMachineryTest(LyricsGoogleBaseTest, LyricsAssertions): soup = BeautifulSoup( html, "html.parser", parse_only=SoupStrainer("title") ) - self.assertTrue( - google.is_page_candidate( - url, soup.title.string, s["title"], s["artist"] - ), - url, - ) + assert google.is_page_candidate( + url, soup.title.string, s["title"], s["artist"] + ), url def test_is_page_candidate_fuzzy_match(self): """Test matching html page title with song infos -- when song infos are @@ -435,16 +412,14 @@ class LyricsGooglePluginMachineryTest(LyricsGoogleBaseTest, LyricsAssertions): url_title = "example.com | Beats song by John doe" # very small diffs (typo) are ok eg 'beats' vs 'beets' with same artist - self.assertTrue( - google.is_page_candidate(url, url_title, s["title"], s["artist"]), - url, - ) + assert google.is_page_candidate( + url, url_title, s["title"], s["artist"] + ), url # reject different title url_title = "example.com | seets bong lyrics by John doe" - self.assertFalse( - google.is_page_candidate(url, url_title, s["title"], s["artist"]), - url, - ) + assert not google.is_page_candidate( + url, url_title, s["title"], s["artist"] + ), url def test_is_page_candidate_special_chars(self): """Ensure that `is_page_candidate` doesn't crash when the artist @@ -486,23 +461,23 @@ class GeniusScrapeLyricsFromHtmlTest(GeniusBaseTest): # expected return value None url = "https://genius.com/sample" mock = MockFetchUrl() - self.assertIsNone(genius._scrape_lyrics_from_html(mock(url))) + assert genius._scrape_lyrics_from_html(mock(url)) is None def test_good_lyrics(self): """Ensure we are able to scrape a page with lyrics""" url = "https://genius.com/Ttng-chinchilla-lyrics" mock = MockFetchUrl() lyrics = genius._scrape_lyrics_from_html(mock(url)) - self.assertIsNotNone(lyrics) - self.assertEqual(lyrics.count("\n"), 28) + assert lyrics is not None + assert lyrics.count("\n") == 28 def test_good_lyrics_multiple_divs(self): """Ensure we are able to scrape a page with lyrics""" url = "https://genius.com/2pac-all-eyez-on-me-lyrics" mock = MockFetchUrl() lyrics = genius._scrape_lyrics_from_html(mock(url)) - self.assertIsNotNone(lyrics) - self.assertEqual(lyrics.count("\n"), 133) + assert lyrics is not None + assert lyrics.count("\n") == 133 # TODO: find an example of a lyrics page with multiple divs and test it @@ -528,14 +503,14 @@ class GeniusFetchTest(GeniusBaseTest): { "result": { "primary_artist": { - "name": "\u200Bblackbear", + "name": "\u200bblackbear", }, "url": "blackbear_url", } }, { "result": { - "primary_artist": {"name": "El\u002Dp"}, + "primary_artist": {"name": "El\u002dp"}, "url": "El-p_url", } }, @@ -545,21 +520,21 @@ class GeniusFetchTest(GeniusBaseTest): ) as mock_json: # genius uses zero-width-spaces (\u200B) for lowercase # artists so we make sure we can match those - self.assertIsNotNone(genius.fetch("blackbear", "Idfc")) + assert genius.fetch("blackbear", "Idfc") is not None mock_fetch_url.assert_called_once_with("blackbear_url") mock_scrape.assert_called_once_with(True) # genius uses the hyphen minus (\u002D) as their dash - self.assertIsNotNone(genius.fetch("El-p", "Idfc")) + assert genius.fetch("El-p", "Idfc") is not None mock_fetch_url.assert_called_with("El-p_url") mock_scrape.assert_called_with(True) # test no matching artist - self.assertIsNone(genius.fetch("doesntexist", "none")) + assert genius.fetch("doesntexist", "none") is None # test invalid json mock_json.return_value = None - self.assertIsNone(genius.fetch("blackbear", "Idfc")) + assert genius.fetch("blackbear", "Idfc") is None # TODO: add integration test hitting real api @@ -589,9 +564,7 @@ class TekstowoExtractLyricsTest(TekstowoBaseTest): """Ensure we are able to scrape a page with lyrics""" url = "https://www.tekstowo.pl/piosenka,24kgoldn,city_of_angels_1.html" mock = MockFetchUrl() - self.assertIsNotNone( - tekstowo.extract_lyrics(mock(url), "24kGoldn", "City of Angels") - ) + assert tekstowo.extract_lyrics(mock(url)) def test_no_lyrics(self): """Ensure we don't crash when the scraping the html for a Tekstowo page @@ -602,61 +575,7 @@ class TekstowoExtractLyricsTest(TekstowoBaseTest): "beethoven_piano_sonata_17_tempest_the_3rd_movement.html" ) mock = MockFetchUrl() - self.assertEqual( - tekstowo.extract_lyrics( - mock(url), - "Beethoven", - "Beethoven Piano Sonata 17" "Tempest The 3rd Movement", - ), - None, - ) - - def test_song_no_match(self): - """Ensure we return None when a song does not match the search query""" - # https://github.com/beetbox/beets/issues/4406 - # expected return value None - url = ( - "https://www.tekstowo.pl/piosenka,bailey_bigger" - ",black_eyed_susan.html" - ) - mock = MockFetchUrl() - self.assertEqual( - tekstowo.extract_lyrics( - mock(url), "Kelly Bailey", "Black Mesa Inbound" - ), - None, - ) - - -class TekstowoParseSearchResultsTest(TekstowoBaseTest): - """tests Tekstowo.parse_search_results()""" - - def setUp(self): - """Set up configuration""" - TekstowoBaseTest.setUp(self) - self.plugin = lyrics.LyricsPlugin() - - def test_multiple_results(self): - """Ensure we are able to scrape a page with multiple search results""" - url = ( - "https://www.tekstowo.pl/szukaj,wykonawca,juice+wrld" - ",tytul,lucid+dreams.html" - ) - mock = MockFetchUrl() - self.assertEqual( - tekstowo.parse_search_results(mock(url)), - "http://www.tekstowo.pl/piosenka,juice_wrld," - "lucid_dreams__remix__ft__lil_uzi_vert.html", - ) - - def test_no_results(self): - """Ensure we are able to scrape a page with no search results""" - url = ( - "https://www.tekstowo.pl/szukaj,wykonawca," - "agfdgja,tytul,agfdgafg.html" - ) - mock = MockFetchUrl() - self.assertEqual(tekstowo.parse_search_results(mock(url)), None) + assert not tekstowo.extract_lyrics(mock(url)) class TekstowoIntegrationTest(TekstowoBaseTest, LyricsAssertions): @@ -668,26 +587,20 @@ class TekstowoIntegrationTest(TekstowoBaseTest, LyricsAssertions): self.plugin = lyrics.LyricsPlugin() tekstowo.config = self.plugin.config - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) + @pytest.mark.integration_test def test_normal(self): """Ensure we can fetch a song's lyrics in the ordinary case""" lyrics = tekstowo.fetch("Boy in Space", "u n eye") self.assertLyricsContentOk("u n eye", lyrics) - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) + @pytest.mark.integration_test def test_no_matching_results(self): """Ensure we fetch nothing if there are search results returned but no matches""" # https://github.com/beetbox/beets/issues/4406 # expected return value None lyrics = tekstowo.fetch("Kelly Bailey", "Black Mesa Inbound") - self.assertEqual(lyrics, None) + assert lyrics is None # test LRCLib backend @@ -707,12 +620,26 @@ class LRCLibLyricsTest(unittest.TestCase): mock_get.return_value.json.return_value = mock_response mock_get.return_value.status_code = 200 + self.plugin.config["synced"] = False lyrics = lrclib.fetch("la", "la", "la", 999) - self.assertEqual(lyrics, mock_response["plainLyrics"]) + assert lyrics == mock_response["plainLyrics"] self.plugin.config["synced"] = True lyrics = lrclib.fetch("la", "la", "la", 999) - self.assertEqual(lyrics, mock_response["syncedLyrics"]) + assert lyrics == mock_response["syncedLyrics"] + + @patch("beetsplug.lyrics.requests.get") + def test_fetch_synced_lyrics_fallback(self, mock_get): + mock_response = { + "syncedLyrics": "", + "plainLyrics": "la la la", + } + mock_get.return_value.json.return_value = mock_response + mock_get.return_value.status_code = 200 + + self.plugin.config["synced"] = True + lyrics = lrclib.fetch("la", "la", "la", 999) + assert lyrics == mock_response["plainLyrics"] @patch("beetsplug.lyrics.requests.get") def test_fetch_plain_lyrics(self, mock_get): @@ -723,9 +650,10 @@ class LRCLibLyricsTest(unittest.TestCase): mock_get.return_value.json.return_value = mock_response mock_get.return_value.status_code = 200 + self.plugin.config["synced"] = False lyrics = lrclib.fetch("la", "la", "la", 999) - self.assertEqual(lyrics, mock_response["plainLyrics"]) + assert lyrics == mock_response["plainLyrics"] @patch("beetsplug.lyrics.requests.get") def test_fetch_not_found(self, mock_get): @@ -739,7 +667,7 @@ class LRCLibLyricsTest(unittest.TestCase): lyrics = lrclib.fetch("la", "la", "la", 999) - self.assertIsNone(lyrics) + assert lyrics is None @patch("beetsplug.lyrics.requests.get") def test_fetch_exception(self, mock_get): @@ -747,7 +675,7 @@ class LRCLibLyricsTest(unittest.TestCase): lyrics = lrclib.fetch("la", "la", "la", 999) - self.assertIsNone(lyrics) + assert lyrics is None class LRCLibIntegrationTest(LyricsAssertions): @@ -755,34 +683,22 @@ class LRCLibIntegrationTest(LyricsAssertions): self.plugin = lyrics.LyricsPlugin() lrclib.config = self.plugin.config - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) + @pytest.mark.integration_test def test_track_with_lyrics(self): lyrics = lrclib.fetch("Boy in Space", "u n eye", "Live EP", 160) self.assertLyricsContentOk("u n eye", lyrics) - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) + @pytest.mark.integration_test def test_instrumental_track(self): lyrics = lrclib.fetch( - "Kelly Bailey", - "Black Mesa Inbound", - "Half Life 2 Soundtrack", - 134, + "Kelly Bailey", "Black Mesa Inbound", "Half Life 2 Soundtrack", 134 ) - self.assertIsNone(lyrics) + assert lyrics is None - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) + @pytest.mark.integration_test def test_nonexistent_track(self): lyrics = lrclib.fetch("blah", "blah", "blah", 999) - self.assertIsNone(lyrics) + assert lyrics is None # test utilities @@ -792,35 +708,25 @@ class SlugTests(unittest.TestCase): def test_slug(self): # plain ascii passthrough text = "test" - self.assertEqual(lyrics.slug(text), "test") + assert lyrics.slug(text) == "test" # german unicode and capitals text = "Mørdag" - self.assertEqual(lyrics.slug(text), "mordag") + assert lyrics.slug(text) == "mordag" # more accents and quotes text = "l'été c'est fait pour jouer" - self.assertEqual(lyrics.slug(text), "l-ete-c-est-fait-pour-jouer") + assert lyrics.slug(text) == "l-ete-c-est-fait-pour-jouer" # accents, parens and spaces text = "\xe7afe au lait (boisson)" - self.assertEqual(lyrics.slug(text), "cafe-au-lait-boisson") + assert lyrics.slug(text) == "cafe-au-lait-boisson" text = "Multiple spaces -- and symbols! -- merged" - self.assertEqual( - lyrics.slug(text), "multiple-spaces-and-symbols-merged" - ) - text = "\u200Bno-width-space" - self.assertEqual(lyrics.slug(text), "no-width-space") + assert lyrics.slug(text) == "multiple-spaces-and-symbols-merged" + text = "\u200bno-width-space" + assert lyrics.slug(text) == "no-width-space" # variations of dashes should get standardized - dashes = ["\u200D", "\u2010"] + dashes = ["\u200d", "\u2010"] for dash1, dash2 in itertools.combinations(dashes, 2): - self.assertEqual(lyrics.slug(dash1), lyrics.slug(dash2)) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert lyrics.slug(dash1) == lyrics.slug(dash2) diff --git a/test/plugins/test_mbsubmit.py b/test/plugins/test_mbsubmit.py index 40024bc71..f92d85973 100644 --- a/test/plugins/test_mbsubmit.py +++ b/test/plugins/test_mbsubmit.py @@ -13,31 +13,27 @@ # included in all copies or substantial portions of the Software. -import unittest - from beets.test.helper import ( AutotagStub, - ImportHelper, - TerminalImportSessionSetup, - TestHelper, + ImportTestCase, + PluginMixin, + TerminalImportMixin, capture_stdout, control_stdin, ) -class MBSubmitPluginTest( - TerminalImportSessionSetup, unittest.TestCase, ImportHelper, TestHelper -): +class MBSubmitPluginTest(PluginMixin, TerminalImportMixin, ImportTestCase): + plugin = "mbsubmit" + def setUp(self): - self.setup_beets() - self.load_plugins("mbsubmit") - self._create_import_dir(2) - self._setup_import_session() + super().setUp() + self.prepare_album_for_import(2) + self.setup_importer() self.matcher = AutotagStub().install() def tearDown(self): - self.unload_plugins() - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_print_tracks_output(self): @@ -52,10 +48,10 @@ class MBSubmitPluginTest( # Manually build the string for comparing the output. tracklist = ( "Open files with Picard? " - "01. Tag Title 1 - Tag Artist (0:01)\n" - "02. Tag Title 2 - Tag Artist (0:01)" + "01. Tag Track 1 - Tag Artist (0:01)\n" + "02. Tag Track 2 - Tag Artist (0:01)" ) - self.assertIn(tracklist, output.getvalue()) + assert tracklist in output.getvalue() def test_print_tracks_output_as_tracks(self): """Test the output of the "print tracks" choice, as singletons.""" @@ -68,14 +64,6 @@ class MBSubmitPluginTest( # Manually build the string for comparing the output. tracklist = ( - "Open files with Picard? " "02. Tag Title 2 - Tag Artist (0:01)" + "Open files with Picard? " "02. Tag Track 2 - Tag Artist (0:01)" ) - self.assertIn(tracklist, output.getvalue()) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert tracklist in output.getvalue() diff --git a/test/plugins/test_mbsync.py b/test/plugins/test_mbsync.py index bc41b3464..f65df4256 100644 --- a/test/plugins/test_mbsync.py +++ b/test/plugins/test_mbsync.py @@ -12,193 +12,78 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. - -import unittest from unittest.mock import patch -from beets import config +from beets.autotag.hooks import AlbumInfo, TrackInfo from beets.library import Item -from beets.test.helper import ( - TestHelper, - capture_log, - generate_album_info, - generate_track_info, -) +from beets.test.helper import PluginTestCase, capture_log -class MbsyncCliTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.load_plugins("mbsync") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +class MbsyncCliTest(PluginTestCase): + plugin = "mbsync" @patch("beets.autotag.mb.album_for_id") @patch("beets.autotag.mb.track_for_id") def test_update_library(self, track_for_id, album_for_id): - album_for_id.return_value = generate_album_info( - "album id", [("track id", {"release_track_id": "release track id"})] - ) - track_for_id.return_value = generate_track_info( - "singleton track id", {"title": "singleton info"} - ) - album_item = Item( - album="old title", + album="old album", mb_albumid="81ae60d4-5b75-38df-903a-db2cfa51c2c6", - mb_trackid="old track id", - mb_releasetrackid="release track id", - path="", + mb_trackid="track id", ) - album = self.lib.add_album([album_item]) + self.lib.add_album([album_item]) - item = Item( - title="old title", - mb_trackid="b8c2cf90-83f9-3b5f-8ccd-31fb866fcf37", - path="", + singleton = Item( + title="old title", mb_trackid="b8c2cf90-83f9-3b5f-8ccd-31fb866fcf37" + ) + self.lib.add(singleton) + + album_for_id.return_value = AlbumInfo( + album_id="album id", + album="new album", + tracks=[ + TrackInfo(track_id=album_item.mb_trackid, title="new title") + ], + ) + track_for_id.return_value = TrackInfo( + track_id=singleton.mb_trackid, title="new title" ) - self.lib.add(item) with capture_log() as logs: self.run_command("mbsync") - self.assertIn("Sending event: albuminfo_received", logs) - self.assertIn("Sending event: trackinfo_received", logs) + assert "Sending event: albuminfo_received" in logs + assert "Sending event: trackinfo_received" in logs - item.load() - self.assertEqual(item.title, "singleton info") + singleton.load() + assert singleton.title == "new title" album_item.load() - self.assertEqual(album_item.title, "track info") - self.assertEqual(album_item.mb_trackid, "track id") + assert album_item.title == "new title" + assert album_item.mb_trackid == "track id" + assert album_item.get_album().album == "new album" - album.load() - self.assertEqual(album.album, "album info") + def test_custom_format(self): + for item in [ + Item(artist="albumartist", album="no id"), + Item( + artist="albumartist", + album="invalid id", + mb_albumid="a1b2c3d4", + ), + ]: + self.lib.add_album([item]) - def test_message_when_skipping(self): - config["format_item"] = "$artist - $album - $title" - config["format_album"] = "$albumartist - $album" + for item in [ + Item(artist="artist", title="no id"), + Item(artist="artist", title="invalid id", mb_trackid="a1b2c3d4"), + ]: + self.lib.add(item) - # Test album with no mb_albumid. - # The default format for an album include $albumartist so - # set that here, too. - album_invalid = Item( - albumartist="album info", album="album info", path="" - ) - self.lib.add_album([album_invalid]) - - # default format with capture_log("beets.mbsync") as logs: - self.run_command("mbsync") - e = ( - "mbsync: Skipping album with no mb_albumid: " - + "album info - album info" - ) - self.assertEqual(e, logs[0]) - - # custom format - with capture_log("beets.mbsync") as logs: - self.run_command("mbsync", "-f", "'$album'") - e = "mbsync: Skipping album with no mb_albumid: 'album info'" - self.assertEqual(e, logs[0]) - - # restore the config - config["format_item"] = "$artist - $album - $title" - config["format_album"] = "$albumartist - $album" - - # Test singleton with no mb_trackid. - # The default singleton format includes $artist and $album - # so we need to stub them here - item_invalid = Item( - artist="album info", - album="album info", - title="old title", - path="", - ) - self.lib.add(item_invalid) - - # default format - with capture_log("beets.mbsync") as logs: - self.run_command("mbsync") - e = ( - "mbsync: Skipping singleton with no mb_trackid: " - + "album info - album info - old title" - ) - self.assertEqual(e, logs[0]) - - # custom format - with capture_log("beets.mbsync") as logs: - self.run_command("mbsync", "-f", "'$title'") - e = "mbsync: Skipping singleton with no mb_trackid: 'old title'" - self.assertEqual(e, logs[0]) - - def test_message_when_invalid(self): - config["format_item"] = "$artist - $album - $title" - config["format_album"] = "$albumartist - $album" - - # Test album with invalid mb_albumid. - # The default format for an album include $albumartist so - # set that here, too. - album_invalid = Item( - albumartist="album info", - album="album info", - mb_albumid="a1b2c3d4", - path="", - ) - self.lib.add_album([album_invalid]) - - # default format - with capture_log("beets.mbsync") as logs: - self.run_command("mbsync") - e = ( - "mbsync: Skipping album with invalid mb_albumid: " - + "album info - album info" - ) - self.assertEqual(e, logs[0]) - - # custom format - with capture_log("beets.mbsync") as logs: - self.run_command("mbsync", "-f", "'$album'") - e = "mbsync: Skipping album with invalid mb_albumid: 'album info'" - self.assertEqual(e, logs[0]) - - # restore the config - config["format_item"] = "$artist - $album - $title" - config["format_album"] = "$albumartist - $album" - - # Test singleton with invalid mb_trackid. - # The default singleton format includes $artist and $album - # so we need to stub them here - item_invalid = Item( - artist="album info", - album="album info", - title="old title", - mb_trackid="a1b2c3d4", - path="", - ) - self.lib.add(item_invalid) - - # default format - with capture_log("beets.mbsync") as logs: - self.run_command("mbsync") - e = ( - "mbsync: Skipping singleton with invalid mb_trackid: " - + "album info - album info - old title" - ) - self.assertEqual(e, logs[0]) - - # custom format - with capture_log("beets.mbsync") as logs: - self.run_command("mbsync", "-f", "'$title'") - e = "mbsync: Skipping singleton with invalid mb_trackid: 'old title'" - self.assertEqual(e, logs[0]) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + self.run_command("mbsync", "-f", "'%if{$album,$album,$title}'") + assert set(logs) == { + "mbsync: Skipping album with no mb_albumid: 'no id'", + "mbsync: Skipping album with invalid mb_albumid: 'invalid id'", + "mbsync: Skipping singleton with no mb_trackid: 'no id'", + "mbsync: Skipping singleton with invalid mb_trackid: 'invalid id'", + } diff --git a/test/plugins/test_mpdstats.py b/test/plugins/test_mpdstats.py index 40804fabb..dcaf196ef 100644 --- a/test/plugins/test_mpdstats.py +++ b/test/plugins/test_mpdstats.py @@ -13,23 +13,16 @@ # included in all copies or substantial portions of the Software. -import unittest from unittest.mock import ANY, Mock, call, patch from beets import util from beets.library import Item -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beetsplug.mpdstats import MPDStats -class MPDStatsTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.load_plugins("mpdstats") - - def tearDown(self): - self.teardown_beets() - self.unload_plugins() +class MPDStatsTest(PluginTestCase): + plugin = "mpdstats" def test_update_rating(self): item = Item(title="title", path="", id=1) @@ -38,8 +31,8 @@ class MPDStatsTest(unittest.TestCase, TestHelper): log = Mock() mpdstats = MPDStats(self.lib, log) - self.assertFalse(mpdstats.update_rating(item, True)) - self.assertFalse(mpdstats.update_rating(None, True)) + assert not mpdstats.update_rating(item, True) + assert not mpdstats.update_rating(None, True) def test_get_item(self): item_path = util.normpath("/foo/bar.flac") @@ -49,9 +42,9 @@ class MPDStatsTest(unittest.TestCase, TestHelper): log = Mock() mpdstats = MPDStats(self.lib, log) - self.assertEqual(str(mpdstats.get_item(item_path)), str(item)) - self.assertIsNone(mpdstats.get_item("/some/non-existing/path")) - self.assertIn("item not found:", log.info.call_args[0][0]) + assert str(mpdstats.get_item(item_path)) == str(item) + assert mpdstats.get_item("/some/non-existing/path") is None + assert "item not found:" in log.info.call_args[0][0] FAKE_UNKNOWN_STATE = "some-unknown-one" STATUSES = [ @@ -88,11 +81,3 @@ class MPDStatsTest(unittest.TestCase, TestHelper): log.info.assert_has_calls( [call("pause"), call("playing {0}", ANY), call("stop")] ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_parentwork.py b/test/plugins/test_parentwork.py index 377784983..99267f6ff 100644 --- a/test/plugins/test_parentwork.py +++ b/test/plugins/test_parentwork.py @@ -14,13 +14,12 @@ """Tests for the 'parentwork' plugin.""" - -import os -import unittest from unittest.mock import patch +import pytest + from beets.library import Item -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beetsplug import parentwork work = { @@ -85,21 +84,11 @@ def mock_workid_response(mbid, includes): return p_work -class ParentWorkIntegrationTest(unittest.TestCase, TestHelper): - def setUp(self): - """Set up configuration""" - self.setup_beets() - self.load_plugins("parentwork") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +@pytest.mark.integration_test +class ParentWorkIntegrationTest(PluginTestCase): + plugin = "parentwork" # test how it works with real musicbrainz data - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) def test_normal_case_real(self): item = Item( path="/file", @@ -112,14 +101,8 @@ class ParentWorkIntegrationTest(unittest.TestCase, TestHelper): self.run_command("parentwork") item.load() - self.assertEqual( - item["mb_parentworkid"], "32c8943f-1b27-3a23-8660-4567f4847c94" - ) + assert item["mb_parentworkid"] == "32c8943f-1b27-3a23-8660-4567f4847c94" - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) def test_force_real(self): self.config["parentwork"]["force"] = True item = Item( @@ -135,14 +118,8 @@ class ParentWorkIntegrationTest(unittest.TestCase, TestHelper): self.run_command("parentwork") item.load() - self.assertEqual( - item["mb_parentworkid"], "32c8943f-1b27-3a23-8660-4567f4847c94" - ) + assert item["mb_parentworkid"] == "32c8943f-1b27-3a23-8660-4567f4847c94" - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) def test_no_force_real(self): self.config["parentwork"]["force"] = False item = Item( @@ -159,40 +136,36 @@ class ParentWorkIntegrationTest(unittest.TestCase, TestHelper): self.run_command("parentwork") item.load() - self.assertEqual(item["mb_parentworkid"], "XXX") + assert item["mb_parentworkid"] == "XXX" # test different cases, still with Matthew Passion Ouverture or Mozart # requiem - @unittest.skipUnless( - os.environ.get("INTEGRATION_TEST", "0") == "1", - "integration testing not enabled", - ) def test_direct_parent_work_real(self): mb_workid = "2e4a3668-458d-3b2a-8be2-0b08e0d8243a" - self.assertEqual( - "f04b42df-7251-4d86-a5ee-67cfa49580d1", - parentwork.direct_parent_id(mb_workid)[0], + assert ( + "f04b42df-7251-4d86-a5ee-67cfa49580d1" + == parentwork.direct_parent_id(mb_workid)[0] ) - self.assertEqual( - "45afb3b2-18ac-4187-bc72-beb1b1c194ba", - parentwork.work_parent_id(mb_workid)[0], + assert ( + "45afb3b2-18ac-4187-bc72-beb1b1c194ba" + == parentwork.work_parent_id(mb_workid)[0] ) -class ParentWorkTest(unittest.TestCase, TestHelper): +class ParentWorkTest(PluginTestCase): + plugin = "parentwork" + def setUp(self): """Set up configuration""" - self.setup_beets() - self.load_plugins("parentwork") + super().setUp() self.patcher = patch( "musicbrainzngs.get_work_by_id", side_effect=mock_workid_response ) self.patcher.start() def tearDown(self): - self.unload_plugins() - self.teardown_beets() + super().tearDown() self.patcher.stop() def test_normal_case(self): @@ -202,7 +175,7 @@ class ParentWorkTest(unittest.TestCase, TestHelper): self.run_command("parentwork") item.load() - self.assertEqual(item["mb_parentworkid"], "3") + assert item["mb_parentworkid"] == "3" def test_force(self): self.config["parentwork"]["force"] = True @@ -218,7 +191,7 @@ class ParentWorkTest(unittest.TestCase, TestHelper): self.run_command("parentwork") item.load() - self.assertEqual(item["mb_parentworkid"], "3") + assert item["mb_parentworkid"] == "3" def test_no_force(self): self.config["parentwork"]["force"] = False @@ -234,16 +207,8 @@ class ParentWorkTest(unittest.TestCase, TestHelper): self.run_command("parentwork") item.load() - self.assertEqual(item["mb_parentworkid"], "XXX") + assert item["mb_parentworkid"] == "XXX" def test_direct_parent_work(self): - self.assertEqual("2", parentwork.direct_parent_id("1")[0]) - self.assertEqual("3", parentwork.work_parent_id("1")[0]) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert "2" == parentwork.direct_parent_id("1")[0] + assert "3" == parentwork.work_parent_id("1")[0] diff --git a/test/plugins/test_permissions.py b/test/plugins/test_permissions.py index d10a873cd..274cd92ac 100644 --- a/test/plugins/test_permissions.py +++ b/test/plugins/test_permissions.py @@ -1,13 +1,11 @@ -"""Tests for the 'permissions' plugin. -""" +"""Tests for the 'permissions' plugin.""" import os import platform -import unittest from unittest.mock import Mock, patch from beets.test._common import touch -from beets.test.helper import TestHelper +from beets.test.helper import AsIsImporterMixin, ImportTestCase, PluginMixin from beets.util import displayable_path from beetsplug.permissions import ( check_permissions, @@ -16,17 +14,14 @@ from beetsplug.permissions import ( ) -class PermissionsPluginTest(unittest.TestCase, TestHelper): +class PermissionsPluginTest(AsIsImporterMixin, PluginMixin, ImportTestCase): + plugin = "permissions" + def setUp(self): - self.setup_beets() - self.load_plugins("permissions") + super().setUp() self.config["permissions"] = {"file": "777", "dir": "777"} - def tearDown(self): - self.teardown_beets() - self.unload_plugins() - def test_permissions_on_album_imported(self): self.do_thing(True) @@ -48,10 +43,9 @@ class PermissionsPluginTest(unittest.TestCase, TestHelper): & 0o777 ) - self.importer = self.create_importer() typs = ["file", "dir"] - track_file = (b"album 0", b"track 0.mp3") + track_file = (b"album", b"track_1.mp3") self.exp_perms = { True: { k: convert_perm(self.config["permissions"][k].get()) @@ -60,7 +54,7 @@ class PermissionsPluginTest(unittest.TestCase, TestHelper): False: {k: get_stat(v) for (k, v) in zip(typs, (track_file, ()))}, } - self.importer.run() + self.run_asis_importer() item = self.lib.items().get() self.assertPerms(item.path, "file", expect_success) @@ -68,7 +62,7 @@ class PermissionsPluginTest(unittest.TestCase, TestHelper): for path in dirs_in_library(self.lib.directory, item.path): self.assertPerms(path, "dir", expect_success) - def assertPerms(self, path, typ, expect_success): # noqa + def assertPerms(self, path, typ, expect_success): for x in [ (True, self.exp_perms[expect_success][typ], "!="), (False, self.exp_perms[not expect_success][typ], "=="), @@ -79,13 +73,13 @@ class PermissionsPluginTest(unittest.TestCase, TestHelper): x[2], oct(x[1]), ) - self.assertEqual(x[0], check_permissions(path, x[1]), msg=msg) + assert x[0] == check_permissions(path, x[1]), msg def test_convert_perm_from_string(self): - self.assertEqual(convert_perm("10"), 8) + assert convert_perm("10") == 8 def test_convert_perm_from_int(self): - self.assertEqual(convert_perm(10), 8) + assert convert_perm(10) == 8 def test_permissions_on_set_art(self): self.do_set_art(True) @@ -97,20 +91,9 @@ class PermissionsPluginTest(unittest.TestCase, TestHelper): def do_set_art(self, expect_success): if platform.system() == "Windows": self.skipTest("permissions not available on Windows") - self.importer = self.create_importer() - self.importer.run() + self.run_asis_importer() album = self.lib.albums().get() artpath = os.path.join(self.temp_dir, b"cover.jpg") touch(artpath) album.set_art(artpath) - self.assertEqual( - expect_success, check_permissions(album.artpath, 0o777) - ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert expect_success == check_permissions(album.artpath, 0o777) diff --git a/test/plugins/test_play.py b/test/plugins/test_play.py index ac60e8281..712739633 100644 --- a/test/plugins/test_play.py +++ b/test/plugins/test_play.py @@ -14,33 +14,30 @@ """Tests for the play plugin""" - import os import sys import unittest from unittest.mock import ANY, patch -from beets.test.helper import CleanupModulesMixin, TestHelper, control_stdin +import pytest + +from beets.test.helper import CleanupModulesMixin, PluginTestCase, control_stdin from beets.ui import UserError from beets.util import open_anything from beetsplug.play import PlayPlugin @patch("beetsplug.play.util.interactive_open") -class PlayPluginTest(CleanupModulesMixin, unittest.TestCase, TestHelper): +class PlayPluginTest(CleanupModulesMixin, PluginTestCase): modules = (PlayPlugin.__module__,) + plugin = "play" def setUp(self): - self.setup_beets() - self.load_plugins("play") + super().setUp() self.item = self.add_item(album="a nice älbum", title="aNiceTitle") self.lib.add_album([self.item]) self.config["play"]["command"] = "echo" - def tearDown(self): - self.teardown_beets() - self.unload_plugins() - def run_and_assert( self, open_mock, @@ -54,7 +51,7 @@ class PlayPluginTest(CleanupModulesMixin, unittest.TestCase, TestHelper): expected_playlist = expected_playlist or self.item.path.decode("utf-8") exp_playlist = expected_playlist + "\n" with open(open_mock.call_args[0][0][0], "rb") as playlist: - self.assertEqual(exp_playlist, playlist.read().decode("utf-8")) + assert exp_playlist == playlist.read().decode("utf-8") def test_basic(self, open_mock): self.run_and_assert(open_mock) @@ -99,9 +96,8 @@ class PlayPluginTest(CleanupModulesMixin, unittest.TestCase, TestHelper): open_mock.assert_called_once_with(ANY, open_anything()) with open(open_mock.call_args[0][0][0], "rb") as f: playlist = f.read().decode("utf-8") - self.assertEqual( - "{}\n".format(os.path.dirname(self.item.path.decode("utf-8"))), - playlist, + assert ( + f'{os.path.dirname(self.item.path.decode("utf-8"))}\n' == playlist ) def test_raw(self, open_mock): @@ -143,13 +139,5 @@ class PlayPluginTest(CleanupModulesMixin, unittest.TestCase, TestHelper): def test_command_failed(self, open_mock): open_mock.side_effect = OSError("some reason") - with self.assertRaises(UserError): + with pytest.raises(UserError): self.run_command("play", "title:aNiceTitle") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_player.py b/test/plugins/test_player.py index f7c9c892d..b17a78c17 100644 --- a/test/plugins/test_player.py +++ b/test/plugins/test_player.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for BPD's implementation of the MPD protocol. -""" +"""Tests for BPD's implementation of the MPD protocol.""" import importlib.util import multiprocessing as mp @@ -30,9 +29,10 @@ from contextlib import contextmanager from unittest import mock import confuse +import pytest import yaml -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beets.util import bluelet from beetsplug import bpd @@ -41,7 +41,7 @@ gstplayer = importlib.util.module_from_spec( ) -def _gstplayer_play(*_): # noqa: 42 +def _gstplayer_play(*_): bpd.gstplayer._GstPlayer.playing = True return mock.DEFAULT @@ -77,42 +77,42 @@ class CommandParseTest(unittest.TestCase): def test_no_args(self): s = r"command" c = bpd.Command(s) - self.assertEqual(c.name, "command") - self.assertEqual(c.args, []) + assert c.name == "command" + assert c.args == [] def test_one_unquoted_arg(self): s = r"command hello" c = bpd.Command(s) - self.assertEqual(c.name, "command") - self.assertEqual(c.args, ["hello"]) + assert c.name == "command" + assert c.args == ["hello"] def test_two_unquoted_args(self): s = r"command hello there" c = bpd.Command(s) - self.assertEqual(c.name, "command") - self.assertEqual(c.args, ["hello", "there"]) + assert c.name == "command" + assert c.args == ["hello", "there"] def test_one_quoted_arg(self): s = r'command "hello there"' c = bpd.Command(s) - self.assertEqual(c.name, "command") - self.assertEqual(c.args, ["hello there"]) + assert c.name == "command" + assert c.args == ["hello there"] def test_heterogenous_args(self): s = r'command "hello there" sir' c = bpd.Command(s) - self.assertEqual(c.name, "command") - self.assertEqual(c.args, ["hello there", "sir"]) + assert c.name == "command" + assert c.args == ["hello there", "sir"] def test_quote_in_arg(self): s = r'command "hello \" there"' c = bpd.Command(s) - self.assertEqual(c.args, ['hello " there']) + assert c.args == ['hello " there'] def test_backslash_in_arg(self): s = r'command "hello \\ there"' c = bpd.Command(s) - self.assertEqual(c.args, ["hello \\ there"]) + assert c.args == ["hello \\ there"] class MPCResponse: @@ -242,15 +242,15 @@ class MPCClient: return line -def implements(commands, expectedFailure=False): # noqa: N803 +def implements(commands, fail=False): def _test(self): with self.run_bpd() as client: response = client.send_command("commands") self._assert_ok(response) implemented = response.data["command"] - self.assertEqual(commands.intersection(implemented), commands) + assert commands.intersection(implemented) == commands - return unittest.expectedFailure(_test) if expectedFailure else _test + return unittest.expectedFailure(_test) if fail else _test bluelet_listener = bluelet.Listener @@ -277,10 +277,12 @@ def start_server(args, assigned_port, listener_patch): beets.ui.main(args) -class BPDTestHelper(unittest.TestCase, TestHelper): +class BPDTestHelper(PluginTestCase): + db_on_disk = True + plugin = "bpd" + def setUp(self): - self.setup_beets(disk=True) - self.load_plugins("bpd") + super().setUp() self.item1 = self.add_item( title="Track One Title", track=1, @@ -295,10 +297,6 @@ class BPDTestHelper(unittest.TestCase, TestHelper): ) self.lib.add_album([self.item1, self.item2]) - def tearDown(self): - self.teardown_beets() - self.unload_plugins() - @contextmanager def run_bpd( self, @@ -379,10 +377,8 @@ class BPDTestHelper(unittest.TestCase, TestHelper): def _assert_ok(self, *responses): for response in responses: - self.assertTrue(response is not None) - self.assertTrue( - response.ok, "Response failed: {}".format(response.err_data) - ) + assert response is not None + assert response.ok, f"Response failed: {response.err_data}" def _assert_failed(self, response, code, pos=None): """Check that a command failed with a specific error code. If this @@ -392,11 +388,11 @@ class BPDTestHelper(unittest.TestCase, TestHelper): previous_commands = response[0:pos] self._assert_ok(*previous_commands) response = response[pos] - self.assertFalse(response.ok) + assert not response.ok if pos is not None: - self.assertEqual(pos, response.err_data[1]) + assert pos == response.err_data[1] if code is not None: - self.assertEqual(code, response.err_data[0]) + assert code == response.err_data[0] def _bpd_add(self, client, *items, **kwargs): """Add the given item to the BPD playlist or queue.""" @@ -422,7 +418,7 @@ class BPDTestHelper(unittest.TestCase, TestHelper): class BPDTest(BPDTestHelper): def test_server_hello(self): with self.run_bpd(do_hello=False) as client: - self.assertEqual(client.readline(), b"OK MPD 0.16.0\n") + assert client.readline() == b"OK MPD 0.16.0\n" def test_unknown_cmd(self): with self.run_bpd() as client: @@ -441,7 +437,7 @@ class BPDTest(BPDTestHelper): def test_system_error(self): with self.run_bpd() as client: - response = client.send_command("crash_TypeError") + response = client.send_command("crash") self._assert_failed(response, bpd.ERROR_SYSTEM) def test_empty_request(self): @@ -464,17 +460,16 @@ class BPDQueryTest(BPDTestHelper): ("play",), ("currentsong",), ("stop",), ("currentsong",) ) self._assert_ok(*responses) - self.assertEqual("1", responses[1].data["Id"]) - self.assertNotIn("Id", responses[3].data) + assert "1" == responses[1].data["Id"] + assert "Id" not in responses[3].data def test_cmd_currentsong_tagtypes(self): with self.run_bpd() as client: self._bpd_add(client, self.item1) responses = client.send_commands(("play",), ("currentsong",)) self._assert_ok(*responses) - self.assertEqual( - BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA), - set(responses[1].data.keys()), + assert BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA) == set( + responses[1].data.keys() ) def test_cmd_status(self): @@ -495,7 +490,7 @@ class BPDQueryTest(BPDTestHelper): "state", "volume", } - self.assertEqual(fields_not_playing, set(responses[0].data.keys())) + assert fields_not_playing == set(responses[0].data.keys()) fields_playing = fields_not_playing | { "song", "songid", @@ -507,7 +502,7 @@ class BPDQueryTest(BPDTestHelper): "nextsong", "nextsongid", } - self.assertEqual(fields_playing, set(responses[2].data.keys())) + assert fields_playing == set(responses[2].data.keys()) def test_cmd_stats(self): with self.run_bpd() as client: @@ -522,7 +517,7 @@ class BPDQueryTest(BPDTestHelper): "db_update", "playtime", } - self.assertEqual(details, set(response.data.keys())) + assert details == set(response.data.keys()) def test_cmd_idle(self): def _toggle(c): @@ -549,7 +544,7 @@ class BPDQueryTest(BPDTestHelper): response1 = client.send_command("random", "1") response2 = client2.send_command("idle") self._assert_ok(response1, response2) - self.assertEqual("options", response2.data["changed"]) + assert "options" == response2.data["changed"] def test_cmd_noidle(self): with self.run_bpd() as client: @@ -592,11 +587,11 @@ class BPDPlaybackTest(BPDTestHelper): ("status",), ) self._assert_ok(*responses) - self.assertEqual(responses[1].data["Id"], responses[3].data["Id"]) - self.assertEqual(["1", "2"], responses[5].data["Id"]) - self.assertEqual("2", responses[8].data["Id"]) - self.assertEqual("1", responses[9].data["consume"]) - self.assertEqual("play", responses[9].data["state"]) + assert responses[1].data["Id"] == responses[3].data["Id"] + assert ["1", "2"] == responses[5].data["Id"] + assert "2" == responses[8].data["Id"] + assert "1" == responses[9].data["consume"] + assert "play" == responses[9].data["state"] def test_cmd_consume_in_reverse(self): with self.run_bpd() as client: @@ -610,9 +605,9 @@ class BPDPlaybackTest(BPDTestHelper): ("status",), ) self._assert_ok(*responses) - self.assertEqual(["1", "2"], responses[2].data["Id"]) - self.assertEqual("1", responses[4].data["Id"]) - self.assertEqual("play", responses[5].data["state"]) + assert ["1", "2"] == responses[2].data["Id"] + assert "1" == responses[4].data["Id"] + assert "play" == responses[5].data["state"] def test_cmd_single(self): with self.run_bpd() as client: @@ -626,10 +621,10 @@ class BPDPlaybackTest(BPDTestHelper): ("status",), ) self._assert_ok(*responses) - self.assertEqual("0", responses[0].data["single"]) - self.assertEqual("1", responses[3].data["single"]) - self.assertEqual("play", responses[3].data["state"]) - self.assertEqual("stop", responses[5].data["state"]) + assert "0" == responses[0].data["single"] + assert "1" == responses[3].data["single"] + assert "play" == responses[3].data["state"] + assert "stop" == responses[5].data["state"] def test_cmd_repeat(self): with self.run_bpd() as client: @@ -644,9 +639,9 @@ class BPDPlaybackTest(BPDTestHelper): ("currentsong",), ) self._assert_ok(*responses) - self.assertEqual("1", responses[2].data["Id"]) - self.assertEqual("2", responses[4].data["Id"]) - self.assertEqual("1", responses[6].data["Id"]) + assert "1" == responses[2].data["Id"] + assert "2" == responses[4].data["Id"] + assert "1" == responses[6].data["Id"] def test_cmd_repeat_with_single(self): with self.run_bpd() as client: @@ -661,9 +656,9 @@ class BPDPlaybackTest(BPDTestHelper): ("currentsong",), ) self._assert_ok(*responses) - self.assertEqual("1", responses[3].data["Id"]) - self.assertEqual("play", responses[5].data["state"]) - self.assertEqual("1", responses[6].data["Id"]) + assert "1" == responses[3].data["Id"] + assert "play" == responses[5].data["state"] + assert "1" == responses[6].data["Id"] def test_cmd_repeat_in_reverse(self): with self.run_bpd() as client: @@ -676,8 +671,8 @@ class BPDPlaybackTest(BPDTestHelper): ("currentsong",), ) self._assert_ok(*responses) - self.assertEqual("1", responses[2].data["Id"]) - self.assertEqual("2", responses[4].data["Id"]) + assert "1" == responses[2].data["Id"] + assert "2" == responses[4].data["Id"] def test_cmd_repeat_with_single_in_reverse(self): with self.run_bpd() as client: @@ -692,9 +687,9 @@ class BPDPlaybackTest(BPDTestHelper): ("currentsong",), ) self._assert_ok(*responses) - self.assertEqual("1", responses[3].data["Id"]) - self.assertEqual("play", responses[5].data["state"]) - self.assertEqual("1", responses[6].data["Id"]) + assert "1" == responses[3].data["Id"] + assert "play" == responses[5].data["state"] + assert "1" == responses[6].data["Id"] def test_cmd_crossfade(self): with self.run_bpd() as client: @@ -707,14 +702,14 @@ class BPDPlaybackTest(BPDTestHelper): response = client.send_command("crossfade", "0.5") self._assert_failed(responses, bpd.ERROR_ARG, pos=3) self._assert_failed(response, bpd.ERROR_ARG) - self.assertNotIn("xfade", responses[0].data) - self.assertAlmostEqual(123, int(responses[2].data["xfade"])) + assert "xfade" not in responses[0].data + assert 123 == pytest.approx(int(responses[2].data["xfade"])) def test_cmd_mixrampdb(self): with self.run_bpd() as client: responses = client.send_commands(("mixrampdb", "-17"), ("status",)) self._assert_ok(*responses) - self.assertAlmostEqual(-17, float(responses[1].data["mixrampdb"])) + assert -17 == pytest.approx(float(responses[1].data["mixrampdb"])) def test_cmd_mixrampdelay(self): with self.run_bpd() as client: @@ -726,8 +721,8 @@ class BPDPlaybackTest(BPDTestHelper): ("mixrampdelay", "-2"), ) self._assert_failed(responses, bpd.ERROR_ARG, pos=4) - self.assertAlmostEqual(2, float(responses[1].data["mixrampdelay"])) - self.assertNotIn("mixrampdelay", responses[3].data) + assert 2 == pytest.approx(float(responses[1].data["mixrampdelay"])) + assert "mixrampdelay" not in responses[3].data def test_cmd_setvol(self): with self.run_bpd() as client: @@ -739,8 +734,8 @@ class BPDPlaybackTest(BPDTestHelper): ("setvol", "101"), ) self._assert_failed(responses, bpd.ERROR_ARG, pos=4) - self.assertEqual("67", responses[1].data["volume"]) - self.assertEqual("32", responses[3].data["volume"]) + assert "67" == responses[1].data["volume"] + assert "32" == responses[3].data["volume"] def test_cmd_volume(self): with self.run_bpd() as client: @@ -748,7 +743,7 @@ class BPDPlaybackTest(BPDTestHelper): ("setvol", "10"), ("volume", "5"), ("volume", "-2"), ("status",) ) self._assert_ok(*responses) - self.assertEqual("13", responses[3].data["volume"]) + assert "13" == responses[3].data["volume"] def test_cmd_replay_gain(self): with self.run_bpd() as client: @@ -758,7 +753,7 @@ class BPDPlaybackTest(BPDTestHelper): ("replay_gain_mode", "notanoption"), ) self._assert_failed(responses, bpd.ERROR_ARG, pos=2) - self.assertAlmostEqual("track", responses[1].data["replay_gain_mode"]) + assert "track" == responses[1].data["replay_gain_mode"] class BPDControlTest(BPDTestHelper): @@ -768,7 +763,7 @@ class BPDControlTest(BPDTestHelper): "seekid", "seekcur", }, - expectedFailure=True, + fail=True, ) def test_cmd_play(self): @@ -782,9 +777,9 @@ class BPDControlTest(BPDTestHelper): ("currentsong",), ) self._assert_ok(*responses) - self.assertEqual("stop", responses[0].data["state"]) - self.assertEqual("play", responses[2].data["state"]) - self.assertEqual("2", responses[4].data["Id"]) + assert "stop" == responses[0].data["state"] + assert "play" == responses[2].data["state"] + assert "2" == responses[4].data["Id"] def test_cmd_playid(self): with self.run_bpd() as client: @@ -797,8 +792,8 @@ class BPDControlTest(BPDTestHelper): client.send_commands(("playid", "2"), ("currentsong",)) ) self._assert_ok(*responses) - self.assertEqual("2", responses[1].data["Id"]) - self.assertEqual("2", responses[4].data["Id"]) + assert "2" == responses[1].data["Id"] + assert "2" == responses[4].data["Id"] def test_cmd_pause(self): with self.run_bpd() as client: @@ -807,8 +802,8 @@ class BPDControlTest(BPDTestHelper): ("play",), ("pause",), ("status",), ("currentsong",) ) self._assert_ok(*responses) - self.assertEqual("pause", responses[2].data["state"]) - self.assertEqual("1", responses[3].data["Id"]) + assert "pause" == responses[2].data["state"] + assert "1" == responses[3].data["Id"] def test_cmd_stop(self): with self.run_bpd() as client: @@ -817,8 +812,8 @@ class BPDControlTest(BPDTestHelper): ("play",), ("stop",), ("status",), ("currentsong",) ) self._assert_ok(*responses) - self.assertEqual("stop", responses[2].data["state"]) - self.assertNotIn("Id", responses[3].data) + assert "stop" == responses[2].data["state"] + assert "Id" not in responses[3].data def test_cmd_next(self): with self.run_bpd() as client: @@ -832,9 +827,9 @@ class BPDControlTest(BPDTestHelper): ("status",), ) self._assert_ok(*responses) - self.assertEqual("1", responses[1].data["Id"]) - self.assertEqual("2", responses[3].data["Id"]) - self.assertEqual("stop", responses[5].data["state"]) + assert "1" == responses[1].data["Id"] + assert "2" == responses[3].data["Id"] + assert "stop" == responses[5].data["state"] def test_cmd_previous(self): with self.run_bpd() as client: @@ -849,10 +844,10 @@ class BPDControlTest(BPDTestHelper): ("currentsong",), ) self._assert_ok(*responses) - self.assertEqual("2", responses[1].data["Id"]) - self.assertEqual("1", responses[3].data["Id"]) - self.assertEqual("play", responses[5].data["state"]) - self.assertEqual("1", responses[6].data["Id"]) + assert "2" == responses[1].data["Id"] + assert "1" == responses[3].data["Id"] + assert "play" == responses[5].data["state"] + assert "1" == responses[6].data["Id"] class BPDQueueTest(BPDTestHelper): @@ -878,7 +873,7 @@ class BPDQueueTest(BPDTestHelper): "addtagid", "cleartagid", }, - expectedFailure=True, + fail=True, ) METADATA = {"Pos", "Time", "Id", "file", "duration"} @@ -897,17 +892,16 @@ class BPDQueueTest(BPDTestHelper): ("playlistinfo", "200"), ) self._assert_failed(responses, bpd.ERROR_ARG, pos=3) - self.assertEqual("1", responses[1].data["Id"]) - self.assertEqual(["1", "2"], responses[2].data["Id"]) + assert "1" == responses[1].data["Id"] + assert ["1", "2"] == responses[2].data["Id"] def test_cmd_playlistinfo_tagtypes(self): with self.run_bpd() as client: self._bpd_add(client, self.item1) response = client.send_command("playlistinfo", "0") self._assert_ok(response) - self.assertEqual( - BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA), - set(response.data.keys()), + assert BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA) == set( + response.data.keys() ) def test_cmd_playlistid(self): @@ -917,8 +911,8 @@ class BPDQueueTest(BPDTestHelper): ("playlistid", "2"), ("playlistid",) ) self._assert_ok(*responses) - self.assertEqual("Track Two Title", responses[0].data["Title"]) - self.assertEqual(["1", "2"], responses[1].data["Track"]) + assert "Track Two Title" == responses[0].data["Title"] + assert ["1", "2"] == responses[1].data["Track"] class BPDPlaylistsTest(BPDTestHelper): @@ -996,14 +990,14 @@ class BPDDatabaseTest(BPDTestHelper): "update", "rescan", }, - expectedFailure=True, + fail=True, ) def test_cmd_search(self): with self.run_bpd() as client: response = client.send_command("search", "track", "1") self._assert_ok(response) - self.assertEqual(self.item1.title, response.data["Title"]) + assert self.item1.title == response.data["Title"] def test_cmd_list(self): with self.run_bpd() as client: @@ -1013,8 +1007,8 @@ class BPDDatabaseTest(BPDTestHelper): ("list", "album", "artist", "Artist Name", "track"), ) self._assert_failed(responses, bpd.ERROR_ARG, pos=2) - self.assertEqual("Album Title", responses[0].data["Album"]) - self.assertEqual(["1", "2"], responses[1].data["Track"]) + assert "Album Title" == responses[0].data["Album"] + assert ["1", "2"] == responses[1].data["Track"] def test_cmd_list_three_arg_form(self): with self.run_bpd() as client: @@ -1024,7 +1018,7 @@ class BPDDatabaseTest(BPDTestHelper): ("list", "track", "Artist Name"), ) self._assert_failed(responses, bpd.ERROR_ARG, pos=2) - self.assertEqual(responses[0].data, responses[1].data) + assert responses[0].data == responses[1].data def test_cmd_lsinfo(self): with self.run_bpd() as client: @@ -1038,14 +1032,14 @@ class BPDDatabaseTest(BPDTestHelper): "lsinfo", response2.data["directory"] ) self._assert_ok(response3) - self.assertIn(self.item1.title, response3.data["Title"]) + assert self.item1.title in response3.data["Title"] def test_cmd_count(self): with self.run_bpd() as client: response = client.send_command("count", "track", "1") self._assert_ok(response) - self.assertEqual("1", response.data["songs"]) - self.assertEqual("0", response.data["playtime"]) + assert "1" == response.data["songs"] + assert "0" == response.data["playtime"] class BPDMountsTest(BPDTestHelper): @@ -1056,7 +1050,7 @@ class BPDMountsTest(BPDTestHelper): "listmounts", "listneighbors", }, - expectedFailure=True, + fail=True, ) @@ -1065,7 +1059,7 @@ class BPDStickerTest(BPDTestHelper): { "sticker", }, - expectedFailure=True, + fail=True, ) @@ -1132,7 +1126,7 @@ class BPDConnectionTest(BPDTestHelper): with self.run_bpd() as client: response = client.send_command("tagtypes") self._assert_ok(response) - self.assertEqual(self.TAGTYPES, set(response.data["tagtype"])) + assert self.TAGTYPES == set(response.data["tagtype"]) @unittest.skip def test_tagtypes_mask(self): @@ -1148,7 +1142,7 @@ class BPDPartitionTest(BPDTestHelper): "listpartitions", "newpartition", }, - expectedFailure=True, + fail=True, ) @@ -1160,7 +1154,7 @@ class BPDDeviceTest(BPDTestHelper): "toggleoutput", "outputs", }, - expectedFailure=True, + fail=True, ) @@ -1172,16 +1166,16 @@ class BPDReflectionTest(BPDTestHelper): "notcommands", "urlhandlers", }, - expectedFailure=True, + fail=True, ) def test_cmd_decoders(self): with self.run_bpd() as client: response = client.send_command("decoders") self._assert_ok(response) - self.assertEqual("default", response.data["plugin"]) - self.assertEqual("mp3", response.data["suffix"]) - self.assertEqual("audio/mpeg", response.data["mime_type"]) + assert "default" == response.data["plugin"] + assert "mp3" == response.data["suffix"] + assert "audio/mpeg" == response.data["mime_type"] class BPDPeersTest(BPDTestHelper): @@ -1193,13 +1187,5 @@ class BPDPeersTest(BPDTestHelper): "readmessages", "sendmessage", }, - expectedFailure=True, + fail=True, ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_playlist.py b/test/plugins/test_playlist.py index a4e6a91f9..ee4059b70 100644 --- a/test/plugins/test_playlist.py +++ b/test/plugins/test_playlist.py @@ -14,17 +14,19 @@ import os -import unittest from shlex import quote import beets -from beets.test import _common, helper +from beets.test import _common +from beets.test.helper import PluginTestCase -class PlaylistTestHelper(helper.TestHelper): +class PlaylistTestCase(PluginTestCase): + plugin = "playlist" + preload_plugin = False + def setUp(self): - self.setup_beets() - self.lib = beets.library.Library(":memory:") + super().setUp() self.music_dir = os.path.expanduser(os.path.join("~", "Music")) @@ -78,27 +80,17 @@ class PlaylistTestHelper(helper.TestHelper): self.config["playlist"]["playlist_dir"] = self.playlist_dir self.setup_test() - self.load_plugins("playlist") + self.load_plugins() def setup_test(self): raise NotImplementedError - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - -class PlaylistQueryTestHelper(PlaylistTestHelper): +class PlaylistQueryTest: def test_name_query_with_absolute_paths_in_playlist(self): q = "playlist:absolute" results = self.lib.items(q) - self.assertEqual( - {i.title for i in results}, - { - "some item", - "another item", - }, - ) + assert {i.title for i in results} == {"some item", "another item"} def test_path_query_with_absolute_paths_in_playlist(self): q = "playlist:{}".format( @@ -110,24 +102,12 @@ class PlaylistQueryTestHelper(PlaylistTestHelper): ) ) results = self.lib.items(q) - self.assertEqual( - {i.title for i in results}, - { - "some item", - "another item", - }, - ) + assert {i.title for i in results} == {"some item", "another item"} def test_name_query_with_relative_paths_in_playlist(self): q = "playlist:relative" results = self.lib.items(q) - self.assertEqual( - {i.title for i in results}, - { - "some item", - "another item", - }, - ) + assert {i.title for i in results} == {"some item", "another item"} def test_path_query_with_relative_paths_in_playlist(self): q = "playlist:{}".format( @@ -139,18 +119,12 @@ class PlaylistQueryTestHelper(PlaylistTestHelper): ) ) results = self.lib.items(q) - self.assertEqual( - {i.title for i in results}, - { - "some item", - "another item", - }, - ) + assert {i.title for i in results} == {"some item", "another item"} def test_name_query_with_nonexisting_playlist(self): q = "playlist:nonexisting" results = self.lib.items(q) - self.assertEqual(set(results), set()) + assert set(results) == set() def test_path_query_with_nonexisting_playlist(self): q = "playlist:{}".format( @@ -163,10 +137,10 @@ class PlaylistQueryTestHelper(PlaylistTestHelper): ) ) results = self.lib.items(q) - self.assertEqual(set(results), set()) + assert set(results) == set() -class PlaylistTestRelativeToLib(PlaylistQueryTestHelper, unittest.TestCase): +class PlaylistTestRelativeToLib(PlaylistQueryTest, PlaylistTestCase): def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: f.write( @@ -187,7 +161,7 @@ class PlaylistTestRelativeToLib(PlaylistQueryTestHelper, unittest.TestCase): self.config["playlist"]["relative_to"] = "library" -class PlaylistTestRelativeToDir(PlaylistQueryTestHelper, unittest.TestCase): +class PlaylistTestRelativeToDir(PlaylistQueryTest, PlaylistTestCase): def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: f.write( @@ -208,7 +182,7 @@ class PlaylistTestRelativeToDir(PlaylistQueryTestHelper, unittest.TestCase): self.config["playlist"]["relative_to"] = self.music_dir -class PlaylistTestRelativeToPls(PlaylistQueryTestHelper, unittest.TestCase): +class PlaylistTestRelativeToPls(PlaylistQueryTest, PlaylistTestCase): def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: f.write( @@ -251,7 +225,7 @@ class PlaylistTestRelativeToPls(PlaylistQueryTestHelper, unittest.TestCase): self.config["playlist"]["playlist_dir"] = self.playlist_dir -class PlaylistUpdateTestHelper(PlaylistTestHelper): +class PlaylistUpdateTest: def setup_test(self): with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f: f.write( @@ -273,7 +247,7 @@ class PlaylistUpdateTestHelper(PlaylistTestHelper): self.config["playlist"]["relative_to"] = "library" -class PlaylistTestItemMoved(PlaylistUpdateTestHelper, unittest.TestCase): +class PlaylistTestItemMoved(PlaylistUpdateTest, PlaylistTestCase): def test_item_moved(self): # Emit item_moved event for an item that is in a playlist results = self.lib.items( @@ -315,31 +289,25 @@ class PlaylistTestItemMoved(PlaylistUpdateTestHelper, unittest.TestCase): with open(playlist_path) as f: lines = [line.strip() for line in f.readlines()] - self.assertEqual( - lines, - [ - os.path.join(self.music_dir, "a", "b", "c.mp3"), - os.path.join(self.music_dir, "g", "h", "i.mp3"), - os.path.join(self.music_dir, "nonexisting.mp3"), - ], - ) + assert lines == [ + os.path.join(self.music_dir, "a", "b", "c.mp3"), + os.path.join(self.music_dir, "g", "h", "i.mp3"), + os.path.join(self.music_dir, "nonexisting.mp3"), + ] # Check playlist with relative paths playlist_path = os.path.join(self.playlist_dir, "relative.m3u") with open(playlist_path) as f: lines = [line.strip() for line in f.readlines()] - self.assertEqual( - lines, - [ - os.path.join("a", "b", "c.mp3"), - os.path.join("g", "h", "i.mp3"), - "nonexisting.mp3", - ], - ) + assert lines == [ + os.path.join("a", "b", "c.mp3"), + os.path.join("g", "h", "i.mp3"), + "nonexisting.mp3", + ] -class PlaylistTestItemRemoved(PlaylistUpdateTestHelper, unittest.TestCase): +class PlaylistTestItemRemoved(PlaylistUpdateTest, PlaylistTestCase): def test_item_removed(self): # Emit item_removed event for an item that is in a playlist results = self.lib.items( @@ -367,31 +335,14 @@ class PlaylistTestItemRemoved(PlaylistUpdateTestHelper, unittest.TestCase): with open(playlist_path) as f: lines = [line.strip() for line in f.readlines()] - self.assertEqual( - lines, - [ - os.path.join(self.music_dir, "a", "b", "c.mp3"), - os.path.join(self.music_dir, "nonexisting.mp3"), - ], - ) + assert lines == [ + os.path.join(self.music_dir, "a", "b", "c.mp3"), + os.path.join(self.music_dir, "nonexisting.mp3"), + ] # Check playlist with relative paths playlist_path = os.path.join(self.playlist_dir, "relative.m3u") with open(playlist_path) as f: lines = [line.strip() for line in f.readlines()] - self.assertEqual( - lines, - [ - os.path.join("a", "b", "c.mp3"), - "nonexisting.mp3", - ], - ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert lines == [os.path.join("a", "b", "c.mp3"), "nonexisting.mp3"] diff --git a/test/plugins/test_plexupdate.py b/test/plugins/test_plexupdate.py index f45ea9d8f..f319db6ce 100644 --- a/test/plugins/test_plexupdate.py +++ b/test/plugins/test_plexupdate.py @@ -1,12 +1,12 @@ -import unittest - import responses -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase from beetsplug.plexupdate import get_music_section, update_plex -class PlexUpdateTest(unittest.TestCase, TestHelper): +class PlexUpdateTest(PluginTestCase): + plugin = "plexupdate" + def add_response_get_music_section(self, section_name="Music"): """Create response for mocking the get_music_section function.""" @@ -73,22 +73,17 @@ class PlexUpdateTest(unittest.TestCase, TestHelper): ) def setUp(self): - self.setup_beets() - self.load_plugins("plexupdate") + super().setUp() self.config["plex"] = {"host": "localhost", "port": 32400} - def tearDown(self): - self.teardown_beets() - self.unload_plugins() - @responses.activate def test_get_music_section(self): # Adding response. self.add_response_get_music_section() # Test if section key is "2" out of the mocking data. - self.assertEqual( + assert ( get_music_section( self.config["plex"]["host"], self.config["plex"]["port"], @@ -96,8 +91,8 @@ class PlexUpdateTest(unittest.TestCase, TestHelper): self.config["plex"]["library_name"].get(), self.config["plex"]["secure"], self.config["plex"]["ignore_cert_errors"], - ), - "2", + ) + == "2" ) @responses.activate @@ -105,7 +100,7 @@ class PlexUpdateTest(unittest.TestCase, TestHelper): # Adding response. self.add_response_get_music_section("My Music Library") - self.assertEqual( + assert ( get_music_section( self.config["plex"]["host"], self.config["plex"]["port"], @@ -113,8 +108,8 @@ class PlexUpdateTest(unittest.TestCase, TestHelper): "My Music Library", self.config["plex"]["secure"], self.config["plex"]["ignore_cert_errors"], - ), - "2", + ) + == "2" ) @responses.activate @@ -124,7 +119,7 @@ class PlexUpdateTest(unittest.TestCase, TestHelper): self.add_response_update_plex() # Testing status code of the mocking request. - self.assertEqual( + assert ( update_plex( self.config["plex"]["host"], self.config["plex"]["port"], @@ -132,14 +127,6 @@ class PlexUpdateTest(unittest.TestCase, TestHelper): self.config["plex"]["library_name"].get(), self.config["plex"]["secure"], self.config["plex"]["ignore_cert_errors"], - ).status_code, - 200, + ).status_code + == 200 ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_plugin_mediafield.py b/test/plugins/test_plugin_mediafield.py index 0e03886cf..898e891ce 100644 --- a/test/plugins/test_plugin_mediafield.py +++ b/test/plugins/test_plugin_mediafield.py @@ -12,18 +12,18 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests the facility that lets plugins add custom field to MediaFile. -""" +"""Tests the facility that lets plugins add custom field to MediaFile.""" import os import shutil -import unittest import mediafile +import pytest from beets.library import Item from beets.plugins import BeetsPlugin from beets.test import _common +from beets.test.helper import BeetsTestCase from beets.util import bytestring_path, syspath field_extension = mediafile.MediaField( @@ -41,7 +41,7 @@ list_field_extension = mediafile.ListMediaField( ) -class ExtendedFieldTestMixin(_common.TestCase): +class ExtendedFieldTestMixin(BeetsTestCase): def _mediafile_fixture(self, name, extension="mp3"): name = bytestring_path(name + "." + extension) src = os.path.join(_common.RSRC, name) @@ -59,7 +59,7 @@ class ExtendedFieldTestMixin(_common.TestCase): mf.save() mf = mediafile.MediaFile(mf.path) - self.assertEqual(mf.customtag, "F#") + assert mf.customtag == "F#" finally: delattr(mediafile.MediaFile, "customtag") @@ -75,7 +75,7 @@ class ExtendedFieldTestMixin(_common.TestCase): mf.save() mf = mediafile.MediaFile(mf.path) - self.assertEqual(mf.customlisttag, ["a", "b"]) + assert mf.customlisttag == ["a", "b"] finally: delattr(mediafile.MediaFile, "customlisttag") @@ -87,12 +87,12 @@ class ExtendedFieldTestMixin(_common.TestCase): try: mf = self._mediafile_fixture("empty") - self.assertIsNone(mf.customtag) + assert mf.customtag is None item = Item(path=mf.path, customtag="Gb") item.write() mf = mediafile.MediaFile(mf.path) - self.assertEqual(mf.customtag, "Gb") + assert mf.customtag == "Gb" finally: delattr(mediafile.MediaFile, "customtag") @@ -108,26 +108,20 @@ class ExtendedFieldTestMixin(_common.TestCase): mf.save() item = Item.from_path(mf.path) - self.assertEqual(item["customtag"], "F#") + assert item["customtag"] == "F#" finally: delattr(mediafile.MediaFile, "customtag") Item._media_fields.remove("customtag") def test_invalid_descriptor(self): - with self.assertRaises(ValueError) as cm: + with pytest.raises( + ValueError, match="must be an instance of MediaField" + ): mediafile.MediaFile.add_field("somekey", True) - self.assertIn("must be an instance of MediaField", str(cm.exception)) def test_overwrite_property(self): - with self.assertRaises(ValueError) as cm: + with pytest.raises( + ValueError, match='property "artist" already exists' + ): mediafile.MediaFile.add_field("artist", mediafile.MediaField()) - self.assertIn('property "artist" already exists', str(cm.exception)) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_random.py b/test/plugins/test_random.py index b371fa832..5bff1ee5e 100644 --- a/test/plugins/test_random.py +++ b/test/plugins/test_random.py @@ -12,19 +12,19 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test the beets.random utilities associated with the random plugin. -""" - +"""Test the beets.random utilities associated with the random plugin.""" import math import unittest from random import Random +import pytest + from beets import random from beets.test.helper import TestHelper -class RandomTest(unittest.TestCase, TestHelper): +class RandomTest(TestHelper, unittest.TestCase): def setUp(self): self.lib = None self.artist1 = "Artist 1" @@ -37,9 +37,6 @@ class RandomTest(unittest.TestCase, TestHelper): self.random_gen = Random() self.random_gen.seed(12345) - def tearDown(self): - pass - def _stats(self, data): mean = sum(data) / len(data) stdev = math.sqrt(sum((p - mean) ** 2 for p in data) / (len(data) - 1)) @@ -77,14 +74,6 @@ class RandomTest(unittest.TestCase, TestHelper): mean1, stdev1, median1 = experiment("artist") mean2, stdev2, median2 = experiment("track") - self.assertAlmostEqual(0, median1, delta=1) - self.assertAlmostEqual(len(self.items) // 2, median2, delta=1) - self.assertGreater(stdev2, stdev1) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert 0 == pytest.approx(median1, abs=1) + assert len(self.items) // 2 == pytest.approx(median2, abs=1) + assert stdev2 > stdev1 diff --git a/test/plugins/test_replaygain.py b/test/plugins/test_replaygain.py index 92e3e5f65..091298766 100644 --- a/test/plugins/test_replaygain.py +++ b/test/plugins/test_replaygain.py @@ -14,11 +14,18 @@ import unittest +from typing import ClassVar +import pytest from mediafile import MediaFile from beets import config -from beets.test.helper import TestHelper, has_program +from beets.test.helper import ( + AsIsImporterMixin, + ImportTestCase, + PluginMixin, + has_program, +) from beetsplug.replaygain import ( FatalGstreamerPluginReplayGainError, GStreamerBackend, @@ -51,6 +58,29 @@ def reset_replaygain(item): item.store() +class ReplayGainTestCase(PluginMixin, ImportTestCase): + db_on_disk = True + plugin = "replaygain" + preload_plugin = False + + backend: ClassVar[str] + + def setUp(self): + # Implemented by Mixins, see above. This may decide to skip the test. + self.test_backend() + + super().setUp() + self.config["replaygain"]["backend"] = self.backend + + self.load_plugins() + + +class ThreadedImportMixin: + def setUp(self): + super().setUp() + self.config["threaded"] = True + + class GstBackendMixin: backend = "gstreamer" has_r128_support = True @@ -85,22 +115,9 @@ class FfmpegBackendMixin: pass -class ReplayGainCliTestBase(TestHelper): +class ReplayGainCliTest: FNAME: str - def setUp(self): - # Implemented by Mixins, see above. This may decide to skip the test. - self.test_backend() - - self.setup_beets(disk=True) - self.config["replaygain"]["backend"] = self.backend - - try: - self.load_plugins("replaygain") - except Exception: - self.teardown_beets() - self.unload_plugins() - def _add_album(self, *args, **kwargs): # Use a file with non-zero volume (most test assets are total silence) album = self.add_album_fixture(*args, fname=self.FNAME, **kwargs) @@ -109,19 +126,15 @@ class ReplayGainCliTestBase(TestHelper): return album - def tearDown(self): - self.teardown_beets() - self.unload_plugins() - def test_cli_saves_track_gain(self): self._add_album(2) for item in self.lib.items(): - self.assertIsNone(item.rg_track_peak) - self.assertIsNone(item.rg_track_gain) + assert item.rg_track_peak is None + assert item.rg_track_gain is None mediafile = MediaFile(item.path) - self.assertIsNone(mediafile.rg_track_peak) - self.assertIsNone(mediafile.rg_track_gain) + assert mediafile.rg_track_peak is None + assert mediafile.rg_track_gain is None self.run_command("replaygain") @@ -134,14 +147,14 @@ class ReplayGainCliTestBase(TestHelper): self.skipTest("decoder plugins could not be loaded.") for item in self.lib.items(): - self.assertIsNotNone(item.rg_track_peak) - self.assertIsNotNone(item.rg_track_gain) + assert item.rg_track_peak is not None + assert item.rg_track_gain is not None mediafile = MediaFile(item.path) - self.assertAlmostEqual( - mediafile.rg_track_peak, item.rg_track_peak, places=6 + assert mediafile.rg_track_peak == pytest.approx( + item.rg_track_peak, abs=1e-6 ) - self.assertAlmostEqual( - mediafile.rg_track_gain, item.rg_track_gain, places=2 + assert mediafile.rg_track_gain == pytest.approx( + item.rg_track_gain, abs=1e-2 ) def test_cli_skips_calculated_tracks(self): @@ -155,9 +168,9 @@ class ReplayGainCliTestBase(TestHelper): self.run_command("replaygain") item_rg.load() - self.assertIsNotNone(item_rg.rg_track_gain) - self.assertIsNotNone(item_rg.rg_track_peak) - self.assertIsNone(item_rg.r128_track_gain) + assert item_rg.rg_track_gain is not None + assert item_rg.rg_track_peak is not None + assert item_rg.r128_track_gain is None item_rg.rg_track_gain += 1.0 item_rg.rg_track_peak += 1.0 @@ -167,9 +180,9 @@ class ReplayGainCliTestBase(TestHelper): if self.has_r128_support: item_r128.load() - self.assertIsNotNone(item_r128.r128_track_gain) - self.assertIsNone(item_r128.rg_track_gain) - self.assertIsNone(item_r128.rg_track_peak) + assert item_r128.r128_track_gain is not None + assert item_r128.rg_track_gain is None + assert item_r128.rg_track_peak is None item_r128.r128_track_gain += 1.0 item_r128.store() @@ -178,12 +191,12 @@ class ReplayGainCliTestBase(TestHelper): self.run_command("replaygain") item_rg.load() - self.assertEqual(item_rg.rg_track_gain, rg_track_gain) - self.assertEqual(item_rg.rg_track_peak, rg_track_peak) + assert item_rg.rg_track_gain == rg_track_gain + assert item_rg.rg_track_peak == rg_track_peak if self.has_r128_support: item_r128.load() - self.assertEqual(item_r128.r128_track_gain, r128_track_gain) + assert item_r128.r128_track_gain == r128_track_gain def test_cli_does_not_skip_wrong_tag_type(self): """Check that items that have tags of the wrong type won't be skipped.""" @@ -213,23 +226,23 @@ class ReplayGainCliTestBase(TestHelper): item_rg.load() item_r128.load() - self.assertIsNotNone(item_rg.rg_track_gain) - self.assertIsNotNone(item_rg.rg_track_peak) + assert item_rg.rg_track_gain is not None + assert item_rg.rg_track_peak is not None # FIXME: Should the plugin null this field? - # self.assertIsNone(item_rg.r128_track_gain) + # assert item_rg.r128_track_gain is None - self.assertIsNotNone(item_r128.r128_track_gain) + assert item_r128.r128_track_gain is not None # FIXME: Should the plugin null these fields? - # self.assertIsNone(item_r128.rg_track_gain) - # self.assertIsNone(item_r128.rg_track_peak) + # assert item_r128.rg_track_gain is None + # assert item_r128.rg_track_peak is None def test_cli_saves_album_gain_to_file(self): self._add_album(2) for item in self.lib.items(): mediafile = MediaFile(item.path) - self.assertIsNone(mediafile.rg_album_peak) - self.assertIsNone(mediafile.rg_album_gain) + assert mediafile.rg_album_peak is None + assert mediafile.rg_album_gain is None self.run_command("replaygain", "-a") @@ -241,11 +254,11 @@ class ReplayGainCliTestBase(TestHelper): gains.append(mediafile.rg_album_gain) # Make sure they are all the same - self.assertEqual(max(peaks), min(peaks)) - self.assertEqual(max(gains), min(gains)) + assert max(peaks) == min(peaks) + assert max(gains) == min(gains) - self.assertNotEqual(max(gains), 0.0) - self.assertNotEqual(max(peaks), 0.0) + assert max(gains) != 0.0 + assert max(peaks) != 0.0 def test_cli_writes_only_r128_tags(self): if not self.has_r128_support: @@ -262,11 +275,11 @@ class ReplayGainCliTestBase(TestHelper): for item in album.items(): mediafile = MediaFile(item.path) # does not write REPLAYGAIN_* tags - self.assertIsNone(mediafile.rg_track_gain) - self.assertIsNone(mediafile.rg_album_gain) + assert mediafile.rg_track_gain is None + assert mediafile.rg_album_gain is None # writes R128_* tags - self.assertIsNotNone(mediafile.r128_track_gain) - self.assertIsNotNone(mediafile.r128_album_gain) + assert mediafile.r128_track_gain is not None + assert mediafile.r128_album_gain is not None def test_targetlevel_has_effect(self): album = self._add_album(1) @@ -281,7 +294,7 @@ class ReplayGainCliTestBase(TestHelper): gain_relative_to_84 = analyse(84) gain_relative_to_89 = analyse(89) - self.assertNotEqual(gain_relative_to_84, gain_relative_to_89) + assert gain_relative_to_84 != gain_relative_to_89 def test_r128_targetlevel_has_effect(self): if not self.has_r128_support: @@ -303,7 +316,7 @@ class ReplayGainCliTestBase(TestHelper): gain_relative_to_84 = analyse(84) gain_relative_to_89 = analyse(89) - self.assertNotEqual(gain_relative_to_84, gain_relative_to_89) + assert gain_relative_to_84 != gain_relative_to_89 def test_per_disc(self): # Use the per_disc option and add a little more concurrency. @@ -314,98 +327,68 @@ class ReplayGainCliTestBase(TestHelper): # FIXME: Add fixtures with known track/album gain (within a suitable # tolerance) so that we can actually check per-disc operation here. for item in album.items(): - self.assertIsNotNone(item.rg_track_gain) - self.assertIsNotNone(item.rg_album_gain) + assert item.rg_track_gain is not None + assert item.rg_album_gain is not None @unittest.skipIf(not GST_AVAILABLE, "gstreamer cannot be found") class ReplayGainGstCliTest( - ReplayGainCliTestBase, unittest.TestCase, GstBackendMixin + ReplayGainCliTest, ReplayGainTestCase, GstBackendMixin ): FNAME = "full" # file contains only silence @unittest.skipIf(not GAIN_PROG_AVAILABLE, "no *gain command found") class ReplayGainCmdCliTest( - ReplayGainCliTestBase, unittest.TestCase, CmdBackendMixin + ReplayGainCliTest, ReplayGainTestCase, CmdBackendMixin ): FNAME = "full" # file contains only silence @unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found") class ReplayGainFfmpegCliTest( - ReplayGainCliTestBase, unittest.TestCase, FfmpegBackendMixin + ReplayGainCliTest, ReplayGainTestCase, FfmpegBackendMixin ): FNAME = "full" # file contains only silence @unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found") class ReplayGainFfmpegNoiseCliTest( - ReplayGainCliTestBase, unittest.TestCase, FfmpegBackendMixin + ReplayGainCliTest, ReplayGainTestCase, FfmpegBackendMixin ): FNAME = "whitenoise" -class ImportTest(TestHelper): - threaded = False - - def setUp(self): - # Implemented by Mixins, see above. This may decide to skip the test. - self.test_backend() - - self.setup_beets(disk=True) - self.config["threaded"] = self.threaded - self.config["replaygain"]["backend"] = self.backend - - try: - self.load_plugins("replaygain") - except Exception: - self.teardown_beets() - self.unload_plugins() - - self.importer = self.create_importer() - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - +class ImportTest(AsIsImporterMixin): def test_import_converted(self): - self.importer.run() + self.run_asis_importer() for item in self.lib.items(): # FIXME: Add fixtures with known track/album gain (within a # suitable tolerance) so that we can actually check correct # operation here. - self.assertIsNotNone(item.rg_track_gain) - self.assertIsNotNone(item.rg_album_gain) + assert item.rg_track_gain is not None + assert item.rg_album_gain is not None @unittest.skipIf(not GST_AVAILABLE, "gstreamer cannot be found") -class ReplayGainGstImportTest(ImportTest, unittest.TestCase, GstBackendMixin): +class ReplayGainGstImportTest(ImportTest, ReplayGainTestCase, GstBackendMixin): pass @unittest.skipIf(not GAIN_PROG_AVAILABLE, "no *gain command found") -class ReplayGainCmdImportTest(ImportTest, unittest.TestCase, CmdBackendMixin): +class ReplayGainCmdImportTest(ImportTest, ReplayGainTestCase, CmdBackendMixin): pass @unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found") class ReplayGainFfmpegImportTest( - ImportTest, unittest.TestCase, FfmpegBackendMixin + ImportTest, ReplayGainTestCase, FfmpegBackendMixin ): pass @unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found") class ReplayGainFfmpegThreadedImportTest( - ImportTest, unittest.TestCase, FfmpegBackendMixin + ThreadedImportMixin, ImportTest, ReplayGainTestCase, FfmpegBackendMixin ): - threaded = True - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + pass diff --git a/test/plugins/test_smartplaylist.py b/test/plugins/test_smartplaylist.py index 68c11c910..a50f3e622 100644 --- a/test/plugins/test_smartplaylist.py +++ b/test/plugins/test_smartplaylist.py @@ -13,33 +13,33 @@ # included in all copies or substantial portions of the Software. -import unittest from os import fsdecode, path, remove from shutil import rmtree from tempfile import mkdtemp from unittest.mock import MagicMock, Mock, PropertyMock +import pytest + from beets import config from beets.dbcore import OrQuery from beets.dbcore.query import FixedFieldSort, MultipleSort, NullSort from beets.library import Album, Item, parse_query_string -from beets.test import _common -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase, PluginTestCase from beets.ui import UserError from beets.util import CHAR_REPLACE, bytestring_path, syspath from beetsplug.smartplaylist import SmartPlaylistPlugin -class SmartPlaylistTest(_common.TestCase): +class SmartPlaylistTest(BeetsTestCase): def test_build_queries(self): spl = SmartPlaylistPlugin() - self.assertIsNone(spl._matched_playlists) - self.assertIsNone(spl._unmatched_playlists) + assert spl._matched_playlists is None + assert spl._unmatched_playlists is None config["smartplaylist"]["playlists"].set([]) spl.build_queries() - self.assertEqual(spl._matched_playlists, set()) - self.assertEqual(spl._unmatched_playlists, set()) + assert spl._matched_playlists == set() + assert spl._unmatched_playlists == set() config["smartplaylist"]["playlists"].set( [ @@ -49,7 +49,7 @@ class SmartPlaylistTest(_common.TestCase): ] ) spl.build_queries() - self.assertEqual(spl._matched_playlists, set()) + assert spl._matched_playlists == set() foo_foo = parse_query_string("FOO foo", Item) baz_baz = parse_query_string("BAZ baz", Item) baz_baz2 = parse_query_string("BAZ baz", Album) @@ -59,14 +59,11 @@ class SmartPlaylistTest(_common.TestCase): parse_query_string("BAR bar2", Album)[0], ) ) - self.assertEqual( - spl._unmatched_playlists, - { - ("foo", foo_foo, (None, None)), - ("baz", baz_baz, baz_baz2), - ("bar", (None, None), (bar_bar, None)), - }, - ) + assert spl._unmatched_playlists == { + ("foo", foo_foo, (None, None)), + ("baz", baz_baz, baz_baz2), + ("bar", (None, None), (bar_bar, None)), + } def test_build_queries_with_sorts(self): spl = SmartPlaylistPlugin() @@ -90,19 +87,16 @@ class SmartPlaylistTest(_common.TestCase): spl.build_queries() sorts = {name: sort for name, (_, sort), _ in spl._unmatched_playlists} - asseq = self.assertEqual # less cluttered code sort = FixedFieldSort # short cut since we're only dealing with this - asseq(sorts["no_sort"], NullSort()) - asseq(sorts["one_sort"], sort("year")) - asseq(sorts["only_empty_sorts"], None) - asseq(sorts["one_non_empty_sort"], sort("year")) - asseq( - sorts["multiple_sorts"], - MultipleSort([sort("year"), sort("genre", False)]), + assert sorts["no_sort"] == NullSort() + assert sorts["one_sort"] == sort("year") + assert sorts["only_empty_sorts"] is None + assert sorts["one_non_empty_sort"] == sort("year") + assert sorts["multiple_sorts"] == MultipleSort( + [sort("year"), sort("genre", False)] ) - asseq( - sorts["mixed"], - MultipleSort([sort("year"), sort("genre"), sort("id", False)]), + assert sorts["mixed"] == MultipleSort( + [sort("year"), sort("genre"), sort("id", False)] ) def test_matches(self): @@ -111,21 +105,21 @@ class SmartPlaylistTest(_common.TestCase): a = MagicMock(Album) i = MagicMock(Item) - self.assertFalse(spl.matches(i, None, None)) - self.assertFalse(spl.matches(a, None, None)) + assert not spl.matches(i, None, None) + assert not spl.matches(a, None, None) query = Mock() query.match.side_effect = {i: True}.__getitem__ - self.assertTrue(spl.matches(i, query, None)) - self.assertFalse(spl.matches(a, query, None)) + assert spl.matches(i, query, None) + assert not spl.matches(a, query, None) a_query = Mock() a_query.match.side_effect = {a: True}.__getitem__ - self.assertFalse(spl.matches(i, None, a_query)) - self.assertTrue(spl.matches(a, None, a_query)) + assert not spl.matches(i, None, a_query) + assert spl.matches(a, None, a_query) - self.assertTrue(spl.matches(i, query, a_query)) - self.assertTrue(spl.matches(a, query, a_query)) + assert spl.matches(i, query, a_query) + assert spl.matches(a, query, a_query) def test_db_changes(self): spl = SmartPlaylistPlugin() @@ -140,18 +134,18 @@ class SmartPlaylistTest(_common.TestCase): spl.matches = Mock(return_value=False) spl.db_change(None, "nothing") - self.assertEqual(spl._unmatched_playlists, {pl1, pl2, pl3}) - self.assertEqual(spl._matched_playlists, set()) + assert spl._unmatched_playlists == {pl1, pl2, pl3} + assert spl._matched_playlists == set() spl.matches.side_effect = lambda _, q, __: q == "q3" spl.db_change(None, "matches 3") - self.assertEqual(spl._unmatched_playlists, {pl1, pl2}) - self.assertEqual(spl._matched_playlists, {pl3}) + assert spl._unmatched_playlists == {pl1, pl2} + assert spl._matched_playlists == {pl3} spl.matches.side_effect = lambda _, q, __: q == "q1" spl.db_change(None, "matches 3") - self.assertEqual(spl._matched_playlists, {pl1, pl3}) - self.assertEqual(spl._unmatched_playlists, {pl2}) + assert spl._matched_playlists == {pl1, pl3} + assert spl._unmatched_playlists == {pl2} def test_playlist_update(self): spl = SmartPlaylistPlugin() @@ -189,7 +183,7 @@ class SmartPlaylistTest(_common.TestCase): content = f.read() rmtree(syspath(dir)) - self.assertEqual(content, b"/tagada.mp3\n") + assert content == b"/tagada.mp3\n" def test_playlist_update_output_extm3u(self): spl = SmartPlaylistPlugin() @@ -234,11 +228,11 @@ class SmartPlaylistTest(_common.TestCase): content = f.read() rmtree(syspath(dir)) - self.assertEqual( - content, - b"#EXTM3U\n" + assert ( + content + == b"#EXTM3U\n" + b"#EXTINF:300,fake artist - fake title\n" - + b"http://beets:8337/files/tagada.mp3\n", + + b"http://beets:8337/files/tagada.mp3\n" ) def test_playlist_update_output_extm3u_fields(self): @@ -286,11 +280,11 @@ class SmartPlaylistTest(_common.TestCase): content = f.read() rmtree(syspath(dir)) - self.assertEqual( - content, - b"#EXTM3U\n" + assert ( + content + == b"#EXTM3U\n" + b'#EXTINF:300 id="456" genre="Fake Genre",Fake Artist - fake Title\n' - + b"/tagada.mp3\n", + + b"/tagada.mp3\n" ) def test_playlist_update_uri_format(self): @@ -336,12 +330,14 @@ class SmartPlaylistTest(_common.TestCase): content = f.read() rmtree(syspath(dir)) - self.assertEqual(content, b"http://beets:8337/item/3/file\n") + assert content == b"http://beets:8337/item/3/file\n" -class SmartPlaylistCLITest(_common.TestCase, TestHelper): +class SmartPlaylistCLITest(PluginTestCase): + plugin = "smartplaylist" + def setUp(self): - self.setup_beets() + super().setUp() self.item = self.add_item() config["smartplaylist"]["playlists"].set( @@ -351,37 +347,24 @@ class SmartPlaylistCLITest(_common.TestCase, TestHelper): ] ) config["smartplaylist"]["playlist_dir"].set(fsdecode(self.temp_dir)) - self.load_plugins("smartplaylist") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() def test_splupdate(self): - with self.assertRaises(UserError): + with pytest.raises(UserError): self.run_with_output("splupdate", "tagada") self.run_with_output("splupdate", "my_playlist") m3u_path = path.join(self.temp_dir, b"my_playlist.m3u") self.assertExists(m3u_path) with open(syspath(m3u_path), "rb") as f: - self.assertEqual(f.read(), self.item.path + b"\n") + assert f.read() == self.item.path + b"\n" remove(syspath(m3u_path)) self.run_with_output("splupdate", "my_playlist.m3u") with open(syspath(m3u_path), "rb") as f: - self.assertEqual(f.read(), self.item.path + b"\n") + assert f.read() == self.item.path + b"\n" remove(syspath(m3u_path)) self.run_with_output("splupdate") for name in (b"my_playlist.m3u", b"all.m3u"): with open(path.join(self.temp_dir, name), "rb") as f: - self.assertEqual(f.read(), self.item.path + b"\n") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert f.read() == self.item.path + b"\n" diff --git a/test/plugins/test_spotify.py b/test/plugins/test_spotify.py index ae5ce5228..a2336df10 100644 --- a/test/plugins/test_spotify.py +++ b/test/plugins/test_spotify.py @@ -1,15 +1,13 @@ """Tests for the 'spotify' plugin""" import os -import unittest from urllib.parse import parse_qs, urlparse import responses -from beets import config from beets.library import Item from beets.test import _common -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase from beetsplug import spotify @@ -25,11 +23,10 @@ def _params(url): return parse_qs(urlparse(url).query) -class SpotifyPluginTest(_common.TestCase, TestHelper): +class SpotifyPluginTest(BeetsTestCase): @responses.activate def setUp(self): - config.clear() - self.setup_beets() + super().setUp() responses.add( responses.POST, spotify.SpotifyPlugin.oauth_token_url, @@ -46,17 +43,14 @@ class SpotifyPluginTest(_common.TestCase, TestHelper): opts = ArgumentsMock("list", False) self.spotify._parse_opts(opts) - def tearDown(self): - self.teardown_beets() - def test_args(self): opts = ArgumentsMock("fail", True) - self.assertFalse(self.spotify._parse_opts(opts)) + assert not self.spotify._parse_opts(opts) opts = ArgumentsMock("list", False) - self.assertTrue(self.spotify._parse_opts(opts)) + assert self.spotify._parse_opts(opts) def test_empty_query(self): - self.assertIsNone(self.spotify._match_library_tracks(self.lib, "1=2")) + assert self.spotify._match_library_tracks(self.lib, "1=2") is None @responses.activate def test_missing_request(self): @@ -81,14 +75,14 @@ class SpotifyPluginTest(_common.TestCase, TestHelper): length=10, ) item.add(self.lib) - self.assertEqual([], self.spotify._match_library_tracks(self.lib, "")) + assert [] == self.spotify._match_library_tracks(self.lib, "") params = _params(responses.calls[0].request.url) query = params["q"][0] - self.assertIn("duifhjslkef", query) - self.assertIn("artist:ujydfsuihse", query) - self.assertIn("album:lkajsdflakjsd", query) - self.assertEqual(params["type"], ["track"]) + assert "duifhjslkef" in query + assert "artist:ujydfsuihse" in query + assert "album:lkajsdflakjsd" in query + assert params["type"] == ["track"] @responses.activate def test_track_request(self): @@ -114,16 +108,16 @@ class SpotifyPluginTest(_common.TestCase, TestHelper): ) item.add(self.lib) results = self.spotify._match_library_tracks(self.lib, "Happy") - self.assertEqual(1, len(results)) - self.assertEqual("6NPVjNh8Jhru9xOmyQigds", results[0]["id"]) + assert 1 == len(results) + assert "6NPVjNh8Jhru9xOmyQigds" == results[0]["id"] self.spotify._output_match_results(results) params = _params(responses.calls[0].request.url) query = params["q"][0] - self.assertIn("Happy", query) - self.assertIn("artist:Pharrell Williams", query) - self.assertIn("album:Despicable Me 2", query) - self.assertEqual(params["type"], ["track"]) + assert "Happy" in query + assert "artist:Pharrell Williams" in query + assert "album:Despicable Me 2" in query + assert params["type"] == ["track"] @responses.activate def test_track_for_id(self): @@ -180,13 +174,5 @@ class SpotifyPluginTest(_common.TestCase, TestHelper): item.add(self.lib) results = self.spotify._match_library_tracks(self.lib, "Happy") - self.assertEqual(1, len(results)) - self.assertEqual("6NPVjNh8Jhru9xOmyQigds", results[0]["id"]) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert 1 == len(results) + assert "6NPVjNh8Jhru9xOmyQigds" == results[0]["id"] diff --git a/test/plugins/test_subsonicupdate.py b/test/plugins/test_subsonicupdate.py index 3f84d848f..891f75cb7 100644 --- a/test/plugins/test_subsonicupdate.py +++ b/test/plugins/test_subsonicupdate.py @@ -1,13 +1,11 @@ """Tests for the 'subsonic' plugin.""" -import unittest from urllib.parse import parse_qs, urlparse import responses from beets import config -from beets.test import _common -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase from beetsplug import subsonicupdate @@ -26,14 +24,13 @@ def _params(url): return parse_qs(urlparse(url).query) -class SubsonicPluginTest(_common.TestCase, TestHelper): +class SubsonicPluginTest(BeetsTestCase): """Test class for subsonicupdate.""" @responses.activate def setUp(self): """Sets up config and plugin for test.""" - config.clear() - self.setup_beets() + super().setUp() config["subsonic"]["user"] = "admin" config["subsonic"]["pass"] = "admin" @@ -90,10 +87,6 @@ class SubsonicPluginTest(_common.TestCase, TestHelper): } """ - def tearDown(self): - """Tears down tests.""" - self.teardown_beets() - @responses.activate def test_start_scan(self): """Tests success path based on best case scenario.""" @@ -189,12 +182,3 @@ class SubsonicPluginTest(_common.TestCase, TestHelper): ) self.subsonicupdate.start_scan() - - -def suite(): - """Default test suite.""" - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_substitute.py b/test/plugins/test_substitute.py new file mode 100644 index 000000000..48014e231 --- /dev/null +++ b/test/plugins/test_substitute.py @@ -0,0 +1,90 @@ +# This file is part of beets. +# Copyright 2024, Nicholas Boyd Isacsson. +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. + +"""Test the substitute plugin regex functionality.""" + +from beets.test.helper import PluginTestCase +from beetsplug.substitute import Substitute + + +class SubstitutePluginTest(PluginTestCase): + plugin = "substitute" + preload_plugin = False + + def run_substitute(self, config, cases): + with self.configure_plugin(config): + for input, expected in cases: + assert Substitute().tmpl_substitute(input) == expected + + def test_simple_substitute(self): + self.run_substitute( + { + "a": "x", + "b": "y", + "c": "z", + }, + [("a", "x"), ("b", "y"), ("c", "z")], + ) + + def test_case_insensitivity(self): + self.run_substitute({"a": "x"}, [("A", "x")]) + + def test_unmatched_input_preserved(self): + self.run_substitute({"a": "x"}, [("c", "c")]) + + def test_regex_to_static(self): + self.run_substitute( + {".*jimi hendrix.*": "Jimi Hendrix"}, + [("The Jimi Hendrix Experience", "Jimi Hendrix")], + ) + + def test_regex_capture_group(self): + self.run_substitute( + {"^(.*?)(,| &| and).*": r"\1"}, + [ + ("King Creosote & Jon Hopkins", "King Creosote"), + ( + "Michael Hurley, The Holy Modal Rounders, Jeffrey Frederick & " + + "The Clamtones", + "Michael Hurley", + ), + ("James Yorkston and the Athletes", "James Yorkston"), + ], + ) + + def test_partial_substitution(self): + self.run_substitute({r"\.": ""}, [("U.N.P.O.C.", "UNPOC")]) + + def test_rules_applied_in_definition_order(self): + self.run_substitute( + { + "a": "x", + "[ab]": "y", + "b": "z", + }, + [ + ("a", "x"), + ("b", "y"), + ], + ) + + def test_rules_applied_in_sequence(self): + self.run_substitute( + {"a": "b", "b": "c", "d": "a"}, + [ + ("a", "c"), + ("b", "c"), + ("d", "a"), + ], + ) diff --git a/test/plugins/test_the.py b/test/plugins/test_the.py index e6d510774..bf073301b 100644 --- a/test/plugins/test_the.py +++ b/test/plugins/test_the.py @@ -1,71 +1,49 @@ """Tests for the 'the' plugin""" -import unittest - from beets import config -from beets.test import _common +from beets.test.helper import BeetsTestCase from beetsplug.the import FORMAT, PATTERN_A, PATTERN_THE, ThePlugin -class ThePluginTest(_common.TestCase): +class ThePluginTest(BeetsTestCase): def test_unthe_with_default_patterns(self): - self.assertEqual(ThePlugin().unthe("", PATTERN_THE), "") - self.assertEqual( - ThePlugin().unthe("The Something", PATTERN_THE), "Something, The" + assert ThePlugin().unthe("", PATTERN_THE) == "" + assert ( + ThePlugin().unthe("The Something", PATTERN_THE) == "Something, The" ) - self.assertEqual(ThePlugin().unthe("The The", PATTERN_THE), "The, The") - self.assertEqual( - ThePlugin().unthe("The The", PATTERN_THE), "The, The" + assert ThePlugin().unthe("The The", PATTERN_THE) == "The, The" + assert ThePlugin().unthe("The The", PATTERN_THE) == "The, The" + assert ThePlugin().unthe("The The X", PATTERN_THE) == "The X, The" + assert ThePlugin().unthe("the The", PATTERN_THE) == "The, the" + assert ( + ThePlugin().unthe("Protected The", PATTERN_THE) == "Protected The" ) - self.assertEqual( - ThePlugin().unthe("The The X", PATTERN_THE), "The X, The" - ) - self.assertEqual(ThePlugin().unthe("the The", PATTERN_THE), "The, the") - self.assertEqual( - ThePlugin().unthe("Protected The", PATTERN_THE), "Protected The" - ) - self.assertEqual(ThePlugin().unthe("A Boy", PATTERN_A), "Boy, A") - self.assertEqual(ThePlugin().unthe("a girl", PATTERN_A), "girl, a") - self.assertEqual(ThePlugin().unthe("An Apple", PATTERN_A), "Apple, An") - self.assertEqual( - ThePlugin().unthe("An A Thing", PATTERN_A), "A Thing, An" - ) - self.assertEqual( - ThePlugin().unthe("the An Arse", PATTERN_A), "the An Arse" - ) - self.assertEqual( - ThePlugin().unthe("TET - Travailleur", PATTERN_THE), - "TET - Travailleur", + assert ThePlugin().unthe("A Boy", PATTERN_A) == "Boy, A" + assert ThePlugin().unthe("a girl", PATTERN_A) == "girl, a" + assert ThePlugin().unthe("An Apple", PATTERN_A) == "Apple, An" + assert ThePlugin().unthe("An A Thing", PATTERN_A) == "A Thing, An" + assert ThePlugin().unthe("the An Arse", PATTERN_A) == "the An Arse" + assert ( + ThePlugin().unthe("TET - Travailleur", PATTERN_THE) + == "TET - Travailleur" ) def test_unthe_with_strip(self): config["the"]["strip"] = True - self.assertEqual( - ThePlugin().unthe("The Something", PATTERN_THE), "Something" - ) - self.assertEqual(ThePlugin().unthe("An A", PATTERN_A), "A") + assert ThePlugin().unthe("The Something", PATTERN_THE) == "Something" + assert ThePlugin().unthe("An A", PATTERN_A) == "A" def test_template_function_with_defaults(self): ThePlugin().patterns = [PATTERN_THE, PATTERN_A] - self.assertEqual(ThePlugin().the_template_func("The The"), "The, The") - self.assertEqual(ThePlugin().the_template_func("An A"), "A, An") + assert ThePlugin().the_template_func("The The") == "The, The" + assert ThePlugin().the_template_func("An A") == "A, An" def test_custom_pattern(self): config["the"]["patterns"] = ["^test\\s"] config["the"]["format"] = FORMAT - self.assertEqual( - ThePlugin().the_template_func("test passed"), "passed, test" - ) + assert ThePlugin().the_template_func("test passed") == "passed, test" def test_custom_format(self): config["the"]["patterns"] = [PATTERN_THE, PATTERN_A] config["the"]["format"] = "{1} ({0})" - self.assertEqual(ThePlugin().the_template_func("The A"), "The (A)") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert ThePlugin().the_template_func("The A") == "The (A)" diff --git a/test/plugins/test_thumbnails.py b/test/plugins/test_thumbnails.py index 951fc6e8c..3eb36cd25 100644 --- a/test/plugins/test_thumbnails.py +++ b/test/plugins/test_thumbnails.py @@ -14,12 +14,13 @@ import os.path -import unittest from shutil import rmtree from tempfile import mkdtemp from unittest.mock import Mock, call, patch -from beets.test.helper import TestHelper +import pytest + +from beets.test.helper import BeetsTestCase from beets.util import bytestring_path, syspath from beetsplug.thumbnails import ( LARGE_DIR, @@ -30,17 +31,11 @@ from beetsplug.thumbnails import ( ) -class ThumbnailsTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.teardown_beets() - +class ThumbnailsTest(BeetsTestCase): @patch("beetsplug.thumbnails.ArtResizer") - @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok") + @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok", Mock()) @patch("beetsplug.thumbnails.os.stat") - def test_add_tags(self, mock_stat, _, mock_artresizer): + def test_add_tags(self, mock_stat, mock_artresizer): plugin = ThumbnailsPlugin() plugin.get_uri = Mock( side_effect={b"/path/to/cover": "COVER_URI"}.__getitem__ @@ -65,7 +60,7 @@ class ThumbnailsTest(unittest.TestCase, TestHelper): mock_artresizer.shared.local = False mock_artresizer.shared.can_write_metadata = False plugin = ThumbnailsPlugin() - self.assertFalse(plugin._check_local_ok()) + assert not plugin._check_local_ok() # test dirs creation mock_artresizer.shared.local = True @@ -81,37 +76,35 @@ class ThumbnailsTest(unittest.TestCase, TestHelper): mock_os.path.exists = exists plugin = ThumbnailsPlugin() mock_os.makedirs.assert_called_once_with(syspath(NORMAL_DIR)) - self.assertTrue(plugin._check_local_ok()) + assert plugin._check_local_ok() # test metadata writer function mock_os.path.exists = lambda _: True mock_artresizer.shared.local = True mock_artresizer.shared.can_write_metadata = False - with self.assertRaises(RuntimeError): + with pytest.raises(RuntimeError): ThumbnailsPlugin() mock_artresizer.shared.local = True mock_artresizer.shared.can_write_metadata = True - self.assertTrue(ThumbnailsPlugin()._check_local_ok()) + assert ThumbnailsPlugin()._check_local_ok() # test URI getter function giouri_inst = mock_giouri.return_value giouri_inst.available = True - self.assertEqual(ThumbnailsPlugin().get_uri, giouri_inst.uri) + assert ThumbnailsPlugin().get_uri == giouri_inst.uri giouri_inst.available = False - self.assertEqual( - ThumbnailsPlugin().get_uri.__self__.__class__, PathlibURI - ) + assert ThumbnailsPlugin().get_uri.__self__.__class__ == PathlibURI - @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok") + @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok", Mock()) @patch("beetsplug.thumbnails.ArtResizer") @patch("beetsplug.thumbnails.util") @patch("beetsplug.thumbnails.os") @patch("beetsplug.thumbnails.shutil") def test_make_cover_thumbnail( - self, mock_shutils, mock_os, mock_util, mock_artresizer, _ + self, mock_shutils, mock_os, mock_util, mock_artresizer ): thumbnail_dir = os.path.normpath(b"/thumbnail/dir") md5_file = os.path.join(thumbnail_dir, b"md5") @@ -166,37 +159,39 @@ class ThumbnailsTest(unittest.TestCase, TestHelper): mock_os.stat.side_effect = os_stat plugin.make_cover_thumbnail(album, 12345, thumbnail_dir) - self.assertEqual(mock_resize.call_count, 0) + assert mock_resize.call_count == 0 # and with force plugin.config["force"] = True plugin.make_cover_thumbnail(album, 12345, thumbnail_dir) mock_resize.assert_called_once_with(12345, path_to_art, md5_file) - @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok") - def test_make_dolphin_cover_thumbnail(self, _): + @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok", Mock()) + def test_make_dolphin_cover_thumbnail(self): plugin = ThumbnailsPlugin() tmp = bytestring_path(mkdtemp()) album = Mock(path=tmp, artpath=os.path.join(tmp, b"cover.jpg")) plugin.make_dolphin_cover_thumbnail(album) with open(os.path.join(tmp, b".directory"), "rb") as f: - self.assertEqual( - f.read().splitlines(), [b"[Desktop Entry]", b"Icon=./cover.jpg"] - ) + assert f.read().splitlines() == [ + b"[Desktop Entry]", + b"Icon=./cover.jpg", + ] # not rewritten when it already exists (yup that's a big limitation) album.artpath = b"/my/awesome/art.tiff" plugin.make_dolphin_cover_thumbnail(album) with open(os.path.join(tmp, b".directory"), "rb") as f: - self.assertEqual( - f.read().splitlines(), [b"[Desktop Entry]", b"Icon=./cover.jpg"] - ) + assert f.read().splitlines() == [ + b"[Desktop Entry]", + b"Icon=./cover.jpg", + ] rmtree(syspath(tmp)) - @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok") + @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok", Mock()) @patch("beetsplug.thumbnails.ArtResizer") - def test_process_album(self, mock_artresizer, _): + def test_process_album(self, mock_artresizer): get_size = mock_artresizer.shared.get_size plugin = ThumbnailsPlugin() @@ -206,20 +201,20 @@ class ThumbnailsTest(unittest.TestCase, TestHelper): # no art album = Mock(artpath=None) plugin.process_album(album) - self.assertEqual(get_size.call_count, 0) - self.assertEqual(make_dolphin.call_count, 0) + assert get_size.call_count == 0 + assert make_dolphin.call_count == 0 # cannot get art size album.artpath = b"/path/to/art" get_size.return_value = None plugin.process_album(album) get_size.assert_called_once_with(b"/path/to/art") - self.assertEqual(make_cover.call_count, 0) + assert make_cover.call_count == 0 # dolphin tests plugin.config["dolphin"] = False plugin.process_album(album) - self.assertEqual(make_dolphin.call_count, 0) + assert make_dolphin.call_count == 0 plugin.config["dolphin"] = True plugin.process_album(album) @@ -239,9 +234,9 @@ class ThumbnailsTest(unittest.TestCase, TestHelper): any_order=True, ) - @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok") + @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok", Mock()) @patch("beetsplug.thumbnails.decargs") - def test_invokations(self, mock_decargs, _): + def test_invokations(self, mock_decargs): plugin = ThumbnailsPlugin() plugin.process_album = Mock() album = Mock() @@ -260,9 +255,9 @@ class ThumbnailsTest(unittest.TestCase, TestHelper): def test_thumbnail_file_name(self, mock_basedir): plug = ThumbnailsPlugin() plug.get_uri = Mock(return_value="file:///my/uri") - self.assertEqual( - plug.thumbnail_file_name(b"idontcare"), - b"9488f5797fbe12ffb316d607dfd93d04.png", + assert ( + plug.thumbnail_file_name(b"idontcare") + == b"9488f5797fbe12ffb316d607dfd93d04.png" ) def test_uri(self): @@ -270,12 +265,12 @@ class ThumbnailsTest(unittest.TestCase, TestHelper): if not gio.available: self.skipTest("GIO library not found") - self.assertEqual(gio.uri("/foo"), "file:///") # silent fail - self.assertEqual(gio.uri(b"/foo"), "file:///foo") - self.assertEqual(gio.uri(b"/foo!"), "file:///foo!") - self.assertEqual( - gio.uri(b"/music/\xec\x8b\xb8\xec\x9d\xb4"), - "file:///music/%EC%8B%B8%EC%9D%B4", + assert gio.uri("/foo") == "file:///" # silent fail + assert gio.uri(b"/foo") == "file:///foo" + assert gio.uri(b"/foo!") == "file:///foo!" + assert ( + gio.uri(b"/music/\xec\x8b\xb8\xec\x9d\xb4") + == "file:///music/%EC%8B%B8%EC%9D%B4" ) @@ -287,11 +282,3 @@ class TestPathlibURI: # test it won't break if we pass it bytes for a path test_uri.uri(b"/") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_types_plugin.py b/test/plugins/test_types_plugin.py index 8225c3302..b41e9bb18 100644 --- a/test/plugins/test_types_plugin.py +++ b/test/plugins/test_types_plugin.py @@ -14,22 +14,16 @@ import time -import unittest from datetime import datetime +import pytest from confuse import ConfigValueError -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase -class TypesPluginTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.load_plugins("types") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +class TypesPluginTest(PluginTestCase): + plugin = "types" def test_integer_modify_and_query(self): self.config["types"] = {"myint": "int"} @@ -37,15 +31,15 @@ class TypesPluginTest(unittest.TestCase, TestHelper): # Do not match unset values out = self.list("myint:1..3") - self.assertEqual("", out) + assert "" == out self.modify("myint=2") item.load() - self.assertEqual(item["myint"], 2) + assert item["myint"] == 2 # Match in range out = self.list("myint:1..3") - self.assertIn("aaa", out) + assert "aaa" in out def test_album_integer_modify_and_query(self): self.config["types"] = {"myint": "int"} @@ -53,15 +47,15 @@ class TypesPluginTest(unittest.TestCase, TestHelper): # Do not match unset values out = self.list_album("myint:1..3") - self.assertEqual("", out) + assert "" == out self.modify("-a", "myint=2") album.load() - self.assertEqual(album["myint"], 2) + assert album["myint"] == 2 # Match in range out = self.list_album("myint:1..3") - self.assertIn("aaa", out) + assert "aaa" in out def test_float_modify_and_query(self): self.config["types"] = {"myfloat": "float"} @@ -69,15 +63,15 @@ class TypesPluginTest(unittest.TestCase, TestHelper): # Do not match unset values out = self.list("myfloat:10..0") - self.assertEqual("", out) + assert "" == out self.modify("myfloat=-9.1") item.load() - self.assertEqual(item["myfloat"], -9.1) + assert item["myfloat"] == -9.1 # Match in range out = self.list("myfloat:-10..0") - self.assertIn("aaa", out) + assert "aaa" in out def test_bool_modify_and_query(self): self.config["types"] = {"mybool": "bool"} @@ -87,28 +81,28 @@ class TypesPluginTest(unittest.TestCase, TestHelper): # Do not match unset values out = self.list("mybool:true, mybool:false") - self.assertEqual("", out) + assert "" == out # Set true self.modify("mybool=1", "artist:true") true.load() - self.assertTrue(true["mybool"]) + assert true["mybool"] # Set false self.modify("mybool=false", "artist:false") false.load() - self.assertFalse(false["mybool"]) + assert not false["mybool"] # Query bools out = self.list("mybool:true", "$artist $mybool") - self.assertEqual("true True", out) + assert "true True" == out out = self.list("mybool:false", "$artist $mybool") # Dealing with unset fields? - # self.assertEqual('false False', out) + # assert 'false False' == out # out = self.list('mybool:', '$artist $mybool') - # self.assertIn('unset $mybool', out) + # assert 'unset $mybool' in out def test_date_modify_and_query(self): self.config["types"] = {"mydate": "date"} @@ -119,27 +113,27 @@ class TypesPluginTest(unittest.TestCase, TestHelper): # Do not match unset values out = self.list("mydate:..2000") - self.assertEqual("", out) + assert "" == out self.modify("mydate=1999-01-01", "artist:prince") old.load() - self.assertEqual(old["mydate"], mktime(1999, 1, 1)) + assert old["mydate"] == mktime(1999, 1, 1) self.modify("mydate=1999-12-30", "artist:britney") new.load() - self.assertEqual(new["mydate"], mktime(1999, 12, 30)) + assert new["mydate"] == mktime(1999, 12, 30) # Match in range out = self.list("mydate:..1999-07", "$artist $mydate") - self.assertEqual("prince 1999-01-01", out) + assert "prince 1999-01-01" == out # FIXME some sort of timezone issue here # out = self.list('mydate:1999-12-30', '$artist $mydate') - # self.assertEqual('britney 1999-12-30', out) + # assert 'britney 1999-12-30' == out def test_unknown_type_error(self): self.config["types"] = {"flex": "unkown type"} - with self.assertRaises(ConfigValueError): + with pytest.raises(ConfigValueError): self.run_command("ls") def test_template_if_def(self): @@ -161,29 +155,20 @@ class TypesPluginTest(unittest.TestCase, TestHelper): without_fields = self.add_item(artist="britney") int_template = "%ifdef{playcount,Play count: $playcount,Not played}" - self.assertEqual( - with_fields.evaluate_template(int_template), "Play count: 10" - ) - self.assertEqual( - without_fields.evaluate_template(int_template), "Not played" - ) + assert with_fields.evaluate_template(int_template) == "Play count: 10" + assert without_fields.evaluate_template(int_template) == "Not played" float_template = "%ifdef{rating,Rating: $rating,Not rated}" - self.assertEqual( - with_fields.evaluate_template(float_template), "Rating: 5.0" - ) - self.assertEqual( - without_fields.evaluate_template(float_template), "Not rated" - ) + assert with_fields.evaluate_template(float_template) == "Rating: 5.0" + assert without_fields.evaluate_template(float_template) == "Not rated" bool_template = "%ifdef{starred,Starred: $starred,Not starred}" - self.assertIn( - with_fields.evaluate_template(bool_template).lower(), - ("starred: true", "starred: yes", "starred: y"), - ) - self.assertEqual( - without_fields.evaluate_template(bool_template), "Not starred" + assert with_fields.evaluate_template(bool_template).lower() in ( + "starred: true", + "starred: yes", + "starred: y", ) + assert without_fields.evaluate_template(bool_template) == "Not starred" def modify(self, *args): return self.run_with_output( @@ -199,11 +184,3 @@ class TypesPluginTest(unittest.TestCase, TestHelper): def mktime(*args): return time.mktime(datetime(*args).timetuple()) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/plugins/test_web.py b/test/plugins/test_web.py index afd1ed706..2ad07bbe5 100644 --- a/test/plugins/test_web.py +++ b/test/plugins/test_web.py @@ -4,15 +4,16 @@ import json import os.path import platform import shutil -import unittest +from collections import Counter from beets import logging from beets.library import Album, Item from beets.test import _common +from beets.test.helper import ItemInDBTestCase from beetsplug import web -class WebPluginTest(_common.LibTestCase): +class WebPluginTest(ItemInDBTestCase): def setUp(self): super().setUp() self.log = logging.getLogger("beets.web") @@ -74,8 +75,8 @@ class WebPluginTest(_common.LibTestCase): self.path_prefix + os.sep + os.path.join(b"path_1").decode("utf-8") ) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["path"], expected_path) + assert response.status_code == 200 + assert res_json["path"] == expected_path web.app.config["INCLUDE_PATHS"] = False @@ -89,8 +90,8 @@ class WebPluginTest(_common.LibTestCase): + os.path.join(b"somewhere2", b"art_path_2").decode("utf-8") ) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["artpath"], expected_path) + assert response.status_code == 200 + assert res_json["artpath"] == expected_path web.app.config["INCLUDE_PATHS"] = False @@ -99,44 +100,44 @@ class WebPluginTest(_common.LibTestCase): response = self.client.get("/item/1") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertNotIn("path", res_json) + assert response.status_code == 200 + assert "path" not in res_json def test_config_include_artpaths_false(self): web.app.config["INCLUDE_PATHS"] = False response = self.client.get("/album/2") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertNotIn("artpath", res_json) + assert response.status_code == 200 + assert "artpath" not in res_json def test_get_all_items(self): response = self.client.get("/item/") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["items"]), 3) + assert response.status_code == 200 + assert len(res_json["items"]) == 3 def test_get_single_item_by_id(self): response = self.client.get("/item/1") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], 1) - self.assertEqual(res_json["title"], "title") + assert response.status_code == 200 + assert res_json["id"] == 1 + assert res_json["title"] == "title" def test_get_multiple_items_by_id(self): response = self.client.get("/item/1,2") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["items"]), 2) + assert response.status_code == 200 + assert len(res_json["items"]) == 2 response_titles = {item["title"] for item in res_json["items"]} - self.assertEqual(response_titles, {"title", "another title"}) + assert response_titles == {"title", "another title"} def test_get_single_item_not_found(self): response = self.client.get("/item/4") - self.assertEqual(response.status_code, 404) + assert response.status_code == 404 def test_get_single_item_by_path(self): data_path = os.path.join(_common.RSRC, b"full.mp3") @@ -144,8 +145,8 @@ class WebPluginTest(_common.LibTestCase): response = self.client.get("/item/path/" + data_path.decode("utf-8")) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["title"], "full") + assert response.status_code == 200 + assert res_json["title"] == "full" def test_get_single_item_by_path_not_found_if_not_in_library(self): data_path = os.path.join(_common.RSRC, b"full.mp3") @@ -153,51 +154,51 @@ class WebPluginTest(_common.LibTestCase): # to the library. response = self.client.get("/item/path/" + data_path.decode("utf-8")) - self.assertEqual(response.status_code, 404) + assert response.status_code == 404 def test_get_item_empty_query(self): """testing item query: """ response = self.client.get("/item/query/") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["items"]), 3) + assert response.status_code == 200 + assert len(res_json["items"]) == 3 def test_get_simple_item_query(self): """testing item query: another""" response = self.client.get("/item/query/another") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["title"], "another title") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["title"] == "another title" def test_query_item_string(self): """testing item query: testattr:ABC""" response = self.client.get("/item/query/testattr%3aABC") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["title"], "and a third") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["title"] == "and a third" def test_query_item_regex(self): """testing item query: testattr::[A-C]+""" response = self.client.get("/item/query/testattr%3a%3a[A-C]%2b") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["title"], "and a third") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["title"] == "and a third" def test_query_item_regex_backslash(self): # """ testing item query: testattr::\w+ """ response = self.client.get("/item/query/testattr%3a%3a%5cw%2b") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["title"], "and a third") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["title"] == "and a third" def test_query_item_path(self): # """ testing item query: path:\somewhere\a """ @@ -210,95 +211,95 @@ class WebPluginTest(_common.LibTestCase): ) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["title"], "another title") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["title"] == "another title" def test_get_all_albums(self): response = self.client.get("/album/") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 response_albums = [album["album"] for album in res_json["albums"]] - self.assertCountEqual(response_albums, ["album", "other album"]) + assert Counter(response_albums) == {"album": 1, "other album": 1} def test_get_single_album_by_id(self): response = self.client.get("/album/2") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], 2) - self.assertEqual(res_json["album"], "other album") + assert response.status_code == 200 + assert res_json["id"] == 2 + assert res_json["album"] == "other album" def test_get_multiple_albums_by_id(self): response = self.client.get("/album/1,2") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 response_albums = [album["album"] for album in res_json["albums"]] - self.assertCountEqual(response_albums, ["album", "other album"]) + assert Counter(response_albums) == {"album": 1, "other album": 1} def test_get_album_empty_query(self): response = self.client.get("/album/query/") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["albums"]), 2) + assert response.status_code == 200 + assert len(res_json["albums"]) == 2 def test_get_simple_album_query(self): response = self.client.get("/album/query/other") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["album"], "other album") - self.assertEqual(res_json["results"][0]["id"], 2) + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["album"] == "other album" + assert res_json["results"][0]["id"] == 2 def test_get_album_details(self): response = self.client.get("/album/2?expand") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["items"]), 2) - self.assertEqual(res_json["items"][0]["album"], "other album") - self.assertEqual(res_json["items"][1]["album"], "other album") + assert response.status_code == 200 + assert len(res_json["items"]) == 2 + assert res_json["items"][0]["album"] == "other album" + assert res_json["items"][1]["album"] == "other album" response_track_titles = {item["title"] for item in res_json["items"]} - self.assertEqual(response_track_titles, {"title", "and a third"}) + assert response_track_titles == {"title", "and a third"} def test_query_album_string(self): """testing query: albumtest:xy""" response = self.client.get("/album/query/albumtest%3axy") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["album"], "album") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["album"] == "album" def test_query_album_artpath_regex(self): """testing query: artpath::art_""" response = self.client.get("/album/query/artpath%3a%3aart_") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["album"], "other album") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["album"] == "other album" def test_query_album_regex_backslash(self): # """ testing query: albumtest::\w+ """ response = self.client.get("/album/query/albumtest%3a%3a%5cw%2b") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) - self.assertEqual(res_json["results"][0]["album"], "album") + assert response.status_code == 200 + assert len(res_json["results"]) == 1 + assert res_json["results"][0]["album"] == "album" def test_get_stats(self): response = self.client.get("/stats") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["items"], 3) - self.assertEqual(res_json["albums"], 2) + assert response.status_code == 200 + assert res_json["items"] == 3 + assert res_json["albums"] == 2 def test_delete_item_id(self): web.app.config["READONLY"] = False @@ -311,17 +312,17 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary item we just created response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) + assert response.status_code == 200 + assert res_json["id"] == item_id # Delete item by id response = self.client.delete("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 # Check the item has gone response = self.client.get("/item/" + str(item_id)) - self.assertEqual(response.status_code, 404) + assert response.status_code == 404 # Note: if this fails, the item may still be around # and may cause other tests to fail @@ -331,26 +332,26 @@ class WebPluginTest(_common.LibTestCase): # Create an item with a file ipath = os.path.join(self.temp_dir, b"testfile1.mp3") shutil.copy(os.path.join(_common.RSRC, b"full.mp3"), ipath) - self.assertTrue(os.path.exists(ipath)) + assert os.path.exists(ipath) item_id = self.lib.add(Item.from_path(ipath)) # Check we can find the temporary item we just created response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) + assert response.status_code == 200 + assert res_json["id"] == item_id # Delete item by id, without deleting file response = self.client.delete("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 # Check the item has gone response = self.client.get("/item/" + str(item_id)) - self.assertEqual(response.status_code, 404) + assert response.status_code == 404 # Check the file has not gone - self.assertTrue(os.path.exists(ipath)) + assert os.path.exists(ipath) os.remove(ipath) def test_delete_item_with_file(self): @@ -359,26 +360,26 @@ class WebPluginTest(_common.LibTestCase): # Create an item with a file ipath = os.path.join(self.temp_dir, b"testfile2.mp3") shutil.copy(os.path.join(_common.RSRC, b"full.mp3"), ipath) - self.assertTrue(os.path.exists(ipath)) + assert os.path.exists(ipath) item_id = self.lib.add(Item.from_path(ipath)) # Check we can find the temporary item we just created response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) + assert response.status_code == 200 + assert res_json["id"] == item_id # Delete item by id, with file response = self.client.delete("/item/" + str(item_id) + "?delete") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 # Check the item has gone response = self.client.get("/item/" + str(item_id)) - self.assertEqual(response.status_code, 404) + assert response.status_code == 404 # Check the file has gone - self.assertFalse(os.path.exists(ipath)) + assert not os.path.exists(ipath) def test_delete_item_query(self): web.app.config["READONLY"] = False @@ -391,19 +392,19 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary item we just created response = self.client.get("/item/query/test_delete_item_query") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) + assert response.status_code == 200 + assert len(res_json["results"]) == 1 # Delete item by query response = self.client.delete("/item/query/test_delete_item_query") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 # Check the item has gone response = self.client.get("/item/query/test_delete_item_query") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 0) + assert response.status_code == 200 + assert len(res_json["results"]) == 0 def test_delete_item_all_fails(self): """DELETE is not supported for list all""" @@ -412,7 +413,7 @@ class WebPluginTest(_common.LibTestCase): # Delete all items response = self.client.delete("/item/") - self.assertEqual(response.status_code, 405) + assert response.status_code == 405 # Note: if this fails, all items have gone and rest of # tests will fail! @@ -428,18 +429,18 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary item we just created response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) + assert response.status_code == 200 + assert res_json["id"] == item_id # Try to delete item by id response = self.client.delete("/item/" + str(item_id)) - self.assertEqual(response.status_code, 405) + assert response.status_code == 405 # Check the item has not gone response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) + assert response.status_code == 200 + assert res_json["id"] == item_id # Remove it self.lib.get_item(item_id).remove() @@ -455,18 +456,18 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary item we just created response = self.client.get("/item/query/test_delete_item_q_ro") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) + assert response.status_code == 200 + assert len(res_json["results"]) == 1 # Try to delete item by query response = self.client.delete("/item/query/test_delete_item_q_ro") - self.assertEqual(response.status_code, 405) + assert response.status_code == 405 # Check the item has not gone response = self.client.get("/item/query/test_delete_item_q_ro") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) + assert response.status_code == 200 + assert len(res_json["results"]) == 1 # Remove it self.lib.get_item(item_id).remove() @@ -482,17 +483,17 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary album we just created response = self.client.get("/album/" + str(album_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], album_id) + assert response.status_code == 200 + assert res_json["id"] == album_id # Delete album by id response = self.client.delete("/album/" + str(album_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 # Check the album has gone response = self.client.get("/album/" + str(album_id)) - self.assertEqual(response.status_code, 404) + assert response.status_code == 404 # Note: if this fails, the album may still be around # and may cause other tests to fail @@ -507,19 +508,19 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary album we just created response = self.client.get("/album/query/test_delete_album_query") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) + assert response.status_code == 200 + assert len(res_json["results"]) == 1 # Delete album response = self.client.delete("/album/query/test_delete_album_query") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) + assert response.status_code == 200 # Check the album has gone response = self.client.get("/album/query/test_delete_album_query") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 0) + assert response.status_code == 200 + assert len(res_json["results"]) == 0 def test_delete_album_all_fails(self): """DELETE is not supported for list all""" @@ -528,7 +529,7 @@ class WebPluginTest(_common.LibTestCase): # Delete all albums response = self.client.delete("/album/") - self.assertEqual(response.status_code, 405) + assert response.status_code == 405 # Note: if this fails, all albums have gone and rest of # tests will fail! @@ -544,18 +545,18 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary album we just created response = self.client.get("/album/" + str(album_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], album_id) + assert response.status_code == 200 + assert res_json["id"] == album_id # Try to delete album by id response = self.client.delete("/album/" + str(album_id)) - self.assertEqual(response.status_code, 405) + assert response.status_code == 405 # Check the item has not gone response = self.client.get("/album/" + str(album_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], album_id) + assert response.status_code == 200 + assert res_json["id"] == album_id # Remove it self.lib.get_album(album_id).remove() @@ -573,18 +574,18 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary album we just created response = self.client.get("/album/query/test_delete_album_query_ro") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) + assert response.status_code == 200 + assert len(res_json["results"]) == 1 # Try to delete album response = self.client.delete("/album/query/test_delete_album_query_ro") - self.assertEqual(response.status_code, 405) + assert response.status_code == 405 # Check the album has not gone response = self.client.get("/album/query/test_delete_album_query_ro") res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(res_json["results"]), 1) + assert response.status_code == 200 + assert len(res_json["results"]) == 1 # Remove it self.lib.get_album(album_id).remove() @@ -604,11 +605,10 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary item we just created response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) - self.assertEqual( - [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["1", "Old"] - ) + assert response.status_code == 200 + assert res_json["id"] == item_id + assert res_json["test_patch_f1"] == "1" + assert res_json["test_patch_f2"] == "Old" # Patch item by id # patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}]}) @@ -616,20 +616,18 @@ class WebPluginTest(_common.LibTestCase): "/item/" + str(item_id), json={"test_patch_f2": "New"} ) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) - self.assertEqual( - [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["1", "New"] - ) + assert response.status_code == 200 + assert res_json["id"] == item_id + assert res_json["test_patch_f1"] == "1" + assert res_json["test_patch_f2"] == "New" # Check the update has really worked response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) - self.assertEqual( - [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["1", "New"] - ) + assert response.status_code == 200 + assert res_json["id"] == item_id + assert res_json["test_patch_f1"] == "1" + assert res_json["test_patch_f2"] == "New" # Remove the item self.lib.get_item(item_id).remove() @@ -651,18 +649,17 @@ class WebPluginTest(_common.LibTestCase): # Check we can find the temporary item we just created response = self.client.get("/item/" + str(item_id)) res_json = json.loads(response.data.decode("utf-8")) - self.assertEqual(response.status_code, 200) - self.assertEqual(res_json["id"], item_id) - self.assertEqual( - [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["2", "Old"] - ) + assert response.status_code == 200 + assert res_json["id"] == item_id + assert res_json["test_patch_f1"] == "2" + assert res_json["test_patch_f2"] == "Old" # Patch item by id # patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}) response = self.client.patch( "/item/" + str(item_id), json={"test_patch_f2": "New"} ) - self.assertEqual(response.status_code, 405) + assert response.status_code == 405 # Remove the item self.lib.get_item(item_id).remove() @@ -670,17 +667,9 @@ class WebPluginTest(_common.LibTestCase): def test_get_item_file(self): ipath = os.path.join(self.temp_dir, b"testfile2.mp3") shutil.copy(os.path.join(_common.RSRC, b"full.mp3"), ipath) - self.assertTrue(os.path.exists(ipath)) + assert os.path.exists(ipath) item_id = self.lib.add(Item.from_path(ipath)) response = self.client.get("/item/" + str(item_id) + "/file") - self.assertEqual(response.status_code, 200) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert response.status_code == 200 diff --git a/test/plugins/test_zero.py b/test/plugins/test_zero.py index 378e419d5..51913c8e0 100644 --- a/test/plugins/test_zero.py +++ b/test/plugins/test_zero.py @@ -1,32 +1,18 @@ """Tests for the 'zero' plugin""" -import unittest - from mediafile import MediaFile from beets.library import Item -from beets.test.helper import TestHelper, control_stdin +from beets.test.helper import PluginTestCase, control_stdin from beets.util import syspath from beetsplug.zero import ZeroPlugin -class ZeroPluginTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.config["zero"] = { - "fields": [], - "keep_fields": [], - "update_database": False, - } - - def tearDown(self): - ZeroPlugin.listeners = None - self.teardown_beets() - self.unload_plugins() +class ZeroPluginTest(PluginTestCase): + plugin = "zero" + preload_plugin = False def test_no_patterns(self): - self.config["zero"]["fields"] = ["comments", "month"] - item = self.add_item_fixture( comments="test comment", title="Title", @@ -35,88 +21,79 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): ) item.write() - self.load_plugins("zero") - item.write() + with self.configure_plugin({"fields": ["comments", "month"]}): + item.write() mf = MediaFile(syspath(item.path)) - self.assertIsNone(mf.comments) - self.assertIsNone(mf.month) - self.assertEqual(mf.title, "Title") - self.assertEqual(mf.year, 2000) + assert mf.comments is None + assert mf.month is None + assert mf.title == "Title" + assert mf.year == 2000 def test_pattern_match(self): - self.config["zero"]["fields"] = ["comments"] - self.config["zero"]["comments"] = ["encoded by"] - item = self.add_item_fixture(comments="encoded by encoder") item.write() - self.load_plugins("zero") - item.write() + with self.configure_plugin( + {"fields": ["comments"], "comments": ["encoded by"]} + ): + item.write() mf = MediaFile(syspath(item.path)) - self.assertIsNone(mf.comments) + assert mf.comments is None def test_pattern_nomatch(self): - self.config["zero"]["fields"] = ["comments"] - self.config["zero"]["comments"] = ["encoded by"] - item = self.add_item_fixture(comments="recorded at place") item.write() - self.load_plugins("zero") - item.write() + with self.configure_plugin( + {"fields": ["comments"], "comments": ["encoded_by"]} + ): + item.write() mf = MediaFile(syspath(item.path)) - self.assertEqual(mf.comments, "recorded at place") + assert mf.comments == "recorded at place" def test_do_not_change_database(self): - self.config["zero"]["fields"] = ["year"] - item = self.add_item_fixture(year=2000) item.write() - self.load_plugins("zero") - item.write() + with self.configure_plugin({"fields": ["year"]}): + item.write() - self.assertEqual(item["year"], 2000) + assert item["year"] == 2000 def test_change_database(self): - self.config["zero"]["fields"] = ["year"] - self.config["zero"]["update_database"] = True - item = self.add_item_fixture(year=2000) item.write() - self.load_plugins("zero") - item.write() + with self.configure_plugin( + {"fields": ["year"], "update_database": True} + ): + item.write() - self.assertEqual(item["year"], 0) + assert item["year"] == 0 def test_album_art(self): - self.config["zero"]["fields"] = ["images"] - path = self.create_mediafile_fixture(images=["jpg"]) item = Item.from_path(path) - self.load_plugins("zero") - item.write() + with self.configure_plugin({"fields": ["images"]}): + item.write() mf = MediaFile(syspath(path)) - self.assertFalse(mf.images) + assert not mf.images def test_auto_false(self): - self.config["zero"]["fields"] = ["year"] - self.config["zero"]["update_database"] = True - self.config["zero"]["auto"] = False - item = self.add_item_fixture(year=2000) item.write() - self.load_plugins("zero") - item.write() + with self.configure_plugin( + {"fields": ["year"], "update_database": True, "auto": False} + ): + item.write() - self.assertEqual(item["year"], 2000) + assert item["year"] == 2000 def test_subcommand_update_database_true(self): item = self.add_item_fixture( @@ -124,21 +101,22 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): ) item.write() item_id = item.id - self.config["zero"]["fields"] = ["comments"] - self.config["zero"]["update_database"] = True - self.config["zero"]["auto"] = False - self.load_plugins("zero") - with control_stdin("y"): + with ( + self.configure_plugin( + {"fields": ["comments"], "update_database": True, "auto": False} + ), + control_stdin("y"), + ): self.run_command("zero") mf = MediaFile(syspath(item.path)) item = self.lib.get_item(item_id) - self.assertEqual(item["year"], 2016) - self.assertEqual(mf.year, 2016) - self.assertIsNone(mf.comments) - self.assertEqual(item["comments"], "") + assert item["year"] == 2016 + assert mf.year == 2016 + assert mf.comments is None + assert item["comments"] == "" def test_subcommand_update_database_false(self): item = self.add_item_fixture( @@ -147,21 +125,25 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): item.write() item_id = item.id - self.config["zero"]["fields"] = ["comments"] - self.config["zero"]["update_database"] = False - self.config["zero"]["auto"] = False - - self.load_plugins("zero") - with control_stdin("y"): + with ( + self.configure_plugin( + { + "fields": ["comments"], + "update_database": False, + "auto": False, + } + ), + control_stdin("y"), + ): self.run_command("zero") mf = MediaFile(syspath(item.path)) item = self.lib.get_item(item_id) - self.assertEqual(item["year"], 2016) - self.assertEqual(mf.year, 2016) - self.assertEqual(item["comments"], "test comment") - self.assertIsNone(mf.comments) + assert item["year"] == 2016 + assert mf.year == 2016 + assert item["comments"] == "test comment" + assert mf.comments is None def test_subcommand_query_include(self): item = self.add_item_fixture( @@ -170,17 +152,15 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): item.write() - self.config["zero"]["fields"] = ["comments"] - self.config["zero"]["update_database"] = False - self.config["zero"]["auto"] = False - - self.load_plugins("zero") - self.run_command("zero", "year: 2016") + with self.configure_plugin( + {"fields": ["comments"], "update_database": False, "auto": False} + ): + self.run_command("zero", "year: 2016") mf = MediaFile(syspath(item.path)) - self.assertEqual(mf.year, 2016) - self.assertIsNone(mf.comments) + assert mf.year == 2016 + assert mf.comments is None def test_subcommand_query_exclude(self): item = self.add_item_fixture( @@ -189,70 +169,68 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): item.write() - self.config["zero"]["fields"] = ["comments"] - self.config["zero"]["update_database"] = False - self.config["zero"]["auto"] = False - - self.load_plugins("zero") - self.run_command("zero", "year: 0000") + with self.configure_plugin( + {"fields": ["comments"], "update_database": False, "auto": False} + ): + self.run_command("zero", "year: 0000") mf = MediaFile(syspath(item.path)) - self.assertEqual(mf.year, 2016) - self.assertEqual(mf.comments, "test comment") + assert mf.year == 2016 + assert mf.comments == "test comment" def test_no_fields(self): item = self.add_item_fixture(year=2016) item.write() mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.year, 2016) + assert mediafile.year == 2016 item_id = item.id - self.load_plugins("zero") - with control_stdin("y"): + with self.configure_plugin({"fields": []}), control_stdin("y"): self.run_command("zero") item = self.lib.get_item(item_id) - self.assertEqual(item["year"], 2016) - self.assertEqual(mediafile.year, 2016) + assert item["year"] == 2016 + assert mediafile.year == 2016 def test_whitelist_and_blacklist(self): item = self.add_item_fixture(year=2016) item.write() mf = MediaFile(syspath(item.path)) - self.assertEqual(mf.year, 2016) + assert mf.year == 2016 item_id = item.id - self.config["zero"]["fields"] = ["year"] - self.config["zero"]["keep_fields"] = ["comments"] - self.load_plugins("zero") - with control_stdin("y"): + with ( + self.configure_plugin( + {"fields": ["year"], "keep_fields": ["comments"]} + ), + control_stdin("y"), + ): self.run_command("zero") item = self.lib.get_item(item_id) - self.assertEqual(item["year"], 2016) - self.assertEqual(mf.year, 2016) + assert item["year"] == 2016 + assert mf.year == 2016 def test_keep_fields(self): item = self.add_item_fixture(year=2016, comments="test comment") - self.config["zero"]["keep_fields"] = ["year"] - self.config["zero"]["fields"] = None - self.config["zero"]["update_database"] = True - tags = { "comments": "test comment", "year": 2016, } - self.load_plugins("zero") - z = ZeroPlugin() - z.write_event(item, item.path, tags) - self.assertIsNone(tags["comments"]) - self.assertEqual(tags["year"], 2016) + with self.configure_plugin( + {"fields": None, "keep_fields": ["year"], "update_database": True} + ): + z = ZeroPlugin() + z.write_event(item, item.path, tags) + + assert tags["comments"] is None + assert tags["year"] == 2016 def test_keep_fields_removes_preserved_tags(self): self.config["zero"]["keep_fields"] = ["year"] @@ -261,7 +239,7 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): z = ZeroPlugin() - self.assertNotIn("id", z.fields_to_progs) + assert "id" not in z.fields_to_progs def test_fields_removes_preserved_tags(self): self.config["zero"]["fields"] = ["year id"] @@ -269,7 +247,7 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): z = ZeroPlugin() - self.assertNotIn("id", z.fields_to_progs) + assert "id" not in z.fields_to_progs def test_empty_query_n_response_no_changes(self): item = self.add_item_fixture( @@ -277,26 +255,18 @@ class ZeroPluginTest(unittest.TestCase, TestHelper): ) item.write() item_id = item.id - self.config["zero"]["fields"] = ["comments"] - self.config["zero"]["update_database"] = True - self.config["zero"]["auto"] = False - - self.load_plugins("zero") - with control_stdin("n"): + with ( + self.configure_plugin( + {"fields": ["comments"], "update_database": True, "auto": False} + ), + control_stdin("n"), + ): self.run_command("zero") mf = MediaFile(syspath(item.path)) item = self.lib.get_item(item_id) - self.assertEqual(item["year"], 2016) - self.assertEqual(mf.year, 2016) - self.assertEqual(mf.comments, "test comment") - self.assertEqual(item["comments"], "test comment") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert item["year"] == 2016 + assert mf.year == 2016 + assert mf.comments == "test comment" + assert item["comments"] == "test comment" diff --git a/test/rsrc/lyrics/tekstowopl/szukajwykonawcaagfdgjatytulagfdgafg.txt b/test/rsrc/lyrics/tekstowopl/szukajwykonawcaagfdgjatytulagfdgafg.txt deleted file mode 100755 index a137059de..000000000 --- a/test/rsrc/lyrics/tekstowopl/szukajwykonawcaagfdgjatytulagfdgafg.txt +++ /dev/null @@ -1,537 +0,0 @@ - - - - - - - - - Wyszukiwarka - teksty piosenek, tłumaczenia piosenek, teledyski na Tekstowo.pl - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

- -
- -
-
-
- - - - - - - - -
- -
- - -
-
-
-
- -
- 2 170 872 tekstów, 20 217 poszukiwanych i 376 oczekujących -
-
- -
- - -
- -
- -
- -
- - - - -
- -
-
-
- - -

Znalezione utwory:

-
-
- brak wyników wyszukiwania -
-
- - - - - - -
-
-
- -
-
- -
- -
-
- - -
-
- -
- -
- - -
-
-
- -
-
- 2 170 872 tekstów, 20 217 poszukiwanych i 376 oczekujących -
-
-
-

Największy serwis z tekstami piosenek w Polsce. Każdy może znaleźć u nas teksty piosenek, teledyski oraz tłumaczenia swoich ulubionych utworów.
Zachęcamy wszystkich użytkowników do dodawania nowych tekstów, tłumaczeń i teledysków!

-
- Reklama | - Kontakt | - FAQ - Polityka prywatności -
-
-
- -
- - - -
- -
-
- - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/test/rsrc/lyrics/tekstowopl/szukajwykonawcajuicewrldtytulluciddreams.txt b/test/rsrc/lyrics/tekstowopl/szukajwykonawcajuicewrldtytulluciddreams.txt deleted file mode 100755 index 40e8fa3cb..000000000 --- a/test/rsrc/lyrics/tekstowopl/szukajwykonawcajuicewrldtytulluciddreams.txt +++ /dev/null @@ -1,584 +0,0 @@ - - - - - - - - - Wyszukiwarka - teksty piosenek, tłumaczenia piosenek, teledyski na Tekstowo.pl - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
-
-
- - - - - - - - -
- -
- - -
-
-
-
- -
- 2 170 872 tekstów, 20 217 poszukiwanych i 376 oczekujących -
-
- -
- - -
- -
- -
- -
- - - - -
- - - -
-
- -
- -
-
- - -
-
- -
- -
- - -
-
-
- -
-
- 2 170 872 tekstów, 20 217 poszukiwanych i 376 oczekujących -
-
-
-

Największy serwis z tekstami piosenek w Polsce. Każdy może znaleźć u nas teksty piosenek, teledyski oraz tłumaczenia swoich ulubionych utworów.
Zachęcamy wszystkich użytkowników do dodawania nowych tekstów, tłumaczeń i teledysków!

-
- Reklama | - Kontakt | - FAQ - Polityka prywatności -
-
-
- -
- - - -
- -
-
- - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/test/test_art_resize.py b/test/test_art_resize.py index ac9463cba..8dd4d0e89 100644 --- a/test/test_art_resize.py +++ b/test/test_art_resize.py @@ -14,13 +14,12 @@ """Tests for image resizing based on filesize.""" - import os import unittest from unittest.mock import patch from beets.test import _common -from beets.test.helper import CleanupModulesMixin, TestHelper +from beets.test.helper import BeetsTestCase, CleanupModulesMixin from beets.util import command_output, syspath from beets.util.artresizer import IMBackend, PILBackend @@ -48,7 +47,7 @@ class DummyPILBackend(PILBackend): pass -class ArtResizerFileSizeTest(CleanupModulesMixin, _common.TestCase, TestHelper): +class ArtResizerFileSizeTest(CleanupModulesMixin, BeetsTestCase): """Unittest test case for Art Resizer to a specific filesize.""" modules = (IMBackend.__module__,) @@ -56,14 +55,6 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, _common.TestCase, TestHelper): IMG_225x225 = os.path.join(_common.RSRC, b"abbey.jpg") IMG_225x225_SIZE = os.stat(syspath(IMG_225x225)).st_size - def setUp(self): - """Called before each test, setting up beets.""" - self.setup_beets() - - def tearDown(self): - """Called after each test, unloading all plugins.""" - self.teardown_beets() - def _test_img_resize(self, backend): """Test resizing based on file size, given a resize_func.""" # Check quality setting unaffected by new parameter @@ -85,8 +76,9 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, _common.TestCase, TestHelper): ) self.assertExists(im_a) # target size was achieved - self.assertLess( - os.stat(syspath(im_a)).st_size, os.stat(syspath(im_95_qual)).st_size + assert ( + os.stat(syspath(im_a)).st_size + < os.stat(syspath(im_95_qual)).st_size ) # Attempt with lower initial quality @@ -106,8 +98,9 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, _common.TestCase, TestHelper): ) self.assertExists(im_b) # Check high (initial) quality still gives a smaller filesize - self.assertLess( - os.stat(syspath(im_b)).st_size, os.stat(syspath(im_75_qual)).st_size + assert ( + os.stat(syspath(im_b)).st_size + < os.stat(syspath(im_75_qual)).st_size ) @unittest.skipUnless(PILBackend.available(), "PIL not available") @@ -131,7 +124,7 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, _common.TestCase, TestHelper): from PIL import Image with Image.open(path) as img: - self.assertNotIn("progression", img.info) + assert "progression" not in img.info @unittest.skipUnless(IMBackend.available(), "ImageMagick not available") def test_im_file_deinterlace(self): @@ -148,7 +141,7 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, _common.TestCase, TestHelper): syspath(path, prefix=False), ] out = command_output(cmd).stdout - self.assertEqual(out, b"None") + assert out == b"None" @patch("beets.util.artresizer.util") def test_write_metadata_im(self, mock_util): @@ -162,12 +155,3 @@ class ArtResizerFileSizeTest(CleanupModulesMixin, _common.TestCase, TestHelper): except AssertionError: command = im.convert_cmd + "foo -set b B -set a A foo".split() mock_util.command_output.assert_called_once_with(command) - - -def suite(): - """Run this suite of tests.""" - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_autotag.py b/test/test_autotag.py index e9b44458c..7f8ed3d2e 100644 --- a/test/test_autotag.py +++ b/test/test_autotag.py @@ -12,41 +12,42 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for autotagging functionality. -""" +"""Tests for autotagging functionality.""" import re import unittest +import pytest + from beets import autotag, config -from beets.autotag import AlbumInfo, TrackInfo, match +from beets.autotag import AlbumInfo, TrackInfo, correct_list_fields, match from beets.autotag.hooks import Distance, string_dist from beets.library import Item -from beets.test import _common +from beets.test.helper import BeetsTestCase, ConfigMixin from beets.util import plurality -class PluralityTest(_common.TestCase): +class PluralityTest(BeetsTestCase): def test_plurality_consensus(self): objs = [1, 1, 1, 1] obj, freq = plurality(objs) - self.assertEqual(obj, 1) - self.assertEqual(freq, 4) + assert obj == 1 + assert freq == 4 def test_plurality_near_consensus(self): objs = [1, 1, 2, 1] obj, freq = plurality(objs) - self.assertEqual(obj, 1) - self.assertEqual(freq, 3) + assert obj == 1 + assert freq == 3 def test_plurality_conflict(self): objs = [1, 1, 2, 2, 3] obj, freq = plurality(objs) - self.assertIn(obj, (1, 2)) - self.assertEqual(freq, 2) + assert obj in (1, 2) + assert freq == 2 def test_plurality_empty_sequence_raises_error(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="must be non-empty"): plurality([]) def test_current_metadata_finds_pluralities(self): @@ -56,9 +57,9 @@ class PluralityTest(_common.TestCase): Item(artist="The Beatles", album="Teh White Album"), ] likelies, consensus = match.current_metadata(items) - self.assertEqual(likelies["artist"], "The Beatles") - self.assertEqual(likelies["album"], "The White Album") - self.assertFalse(consensus["artist"]) + assert likelies["artist"] == "The Beatles" + assert likelies["album"] == "The White Album" + assert not consensus["artist"] def test_current_metadata_artist_consensus(self): items = [ @@ -67,9 +68,9 @@ class PluralityTest(_common.TestCase): Item(artist="The Beatles", album="Teh White Album"), ] likelies, consensus = match.current_metadata(items) - self.assertEqual(likelies["artist"], "The Beatles") - self.assertEqual(likelies["album"], "The White Album") - self.assertTrue(consensus["artist"]) + assert likelies["artist"] == "The Beatles" + assert likelies["album"] == "The White Album" + assert consensus["artist"] def test_albumartist_consensus(self): items = [ @@ -78,8 +79,8 @@ class PluralityTest(_common.TestCase): Item(artist="tartist3", album="album", albumartist="aartist"), ] likelies, consensus = match.current_metadata(items) - self.assertEqual(likelies["artist"], "aartist") - self.assertFalse(consensus["artist"]) + assert likelies["artist"] == "aartist" + assert not consensus["artist"] def test_current_metadata_likelies(self): fields = [ @@ -96,16 +97,13 @@ class PluralityTest(_common.TestCase): "media", "albumdisambig", ] - items = [ - Item(**{f: "{}_{}".format(f, i or 1) for f in fields}) - for i in range(5) - ] + items = [Item(**{f: f"{f}_{i or 1}" for f in fields}) for i in range(5)] likelies, _ = match.current_metadata(items) for f in fields: if isinstance(likelies[f], int): - self.assertEqual(likelies[f], 0) + assert likelies[f] == 0 else: - self.assertEqual(likelies[f], "%s_1" % f) + assert likelies[f] == f"{f}_1" def _make_item(title, track, artist="some artist"): @@ -146,7 +144,7 @@ def _clear_weights(): Distance.__dict__["_weights"].cache = {} -class DistanceTest(_common.TestCase): +class DistanceTest(BeetsTestCase): def tearDown(self): super().tearDown() _clear_weights() @@ -154,91 +152,89 @@ class DistanceTest(_common.TestCase): def test_add(self): dist = Distance() dist.add("add", 1.0) - self.assertEqual(dist._penalties, {"add": [1.0]}) + assert dist._penalties == {"add": [1.0]} def test_add_equality(self): dist = Distance() dist.add_equality("equality", "ghi", ["abc", "def", "ghi"]) - self.assertEqual(dist._penalties["equality"], [0.0]) + assert dist._penalties["equality"] == [0.0] dist.add_equality("equality", "xyz", ["abc", "def", "ghi"]) - self.assertEqual(dist._penalties["equality"], [0.0, 1.0]) + assert dist._penalties["equality"] == [0.0, 1.0] dist.add_equality("equality", "abc", re.compile(r"ABC", re.I)) - self.assertEqual(dist._penalties["equality"], [0.0, 1.0, 0.0]) + assert dist._penalties["equality"] == [0.0, 1.0, 0.0] def test_add_expr(self): dist = Distance() dist.add_expr("expr", True) - self.assertEqual(dist._penalties["expr"], [1.0]) + assert dist._penalties["expr"] == [1.0] dist.add_expr("expr", False) - self.assertEqual(dist._penalties["expr"], [1.0, 0.0]) + assert dist._penalties["expr"] == [1.0, 0.0] def test_add_number(self): dist = Distance() # Add a full penalty for each number of difference between two numbers. dist.add_number("number", 1, 1) - self.assertEqual(dist._penalties["number"], [0.0]) + assert dist._penalties["number"] == [0.0] dist.add_number("number", 1, 2) - self.assertEqual(dist._penalties["number"], [0.0, 1.0]) + assert dist._penalties["number"] == [0.0, 1.0] dist.add_number("number", 2, 1) - self.assertEqual(dist._penalties["number"], [0.0, 1.0, 1.0]) + assert dist._penalties["number"] == [0.0, 1.0, 1.0] dist.add_number("number", -1, 2) - self.assertEqual( - dist._penalties["number"], [0.0, 1.0, 1.0, 1.0, 1.0, 1.0] - ) + assert dist._penalties["number"] == [0.0, 1.0, 1.0, 1.0, 1.0, 1.0] def test_add_priority(self): dist = Distance() dist.add_priority("priority", "abc", "abc") - self.assertEqual(dist._penalties["priority"], [0.0]) + assert dist._penalties["priority"] == [0.0] dist.add_priority("priority", "def", ["abc", "def"]) - self.assertEqual(dist._penalties["priority"], [0.0, 0.5]) + assert dist._penalties["priority"] == [0.0, 0.5] dist.add_priority( "priority", "gh", ["ab", "cd", "ef", re.compile("GH", re.I)] ) - self.assertEqual(dist._penalties["priority"], [0.0, 0.5, 0.75]) + assert dist._penalties["priority"] == [0.0, 0.5, 0.75] dist.add_priority("priority", "xyz", ["abc", "def"]) - self.assertEqual(dist._penalties["priority"], [0.0, 0.5, 0.75, 1.0]) + assert dist._penalties["priority"] == [0.0, 0.5, 0.75, 1.0] def test_add_ratio(self): dist = Distance() dist.add_ratio("ratio", 25, 100) - self.assertEqual(dist._penalties["ratio"], [0.25]) + assert dist._penalties["ratio"] == [0.25] dist.add_ratio("ratio", 10, 5) - self.assertEqual(dist._penalties["ratio"], [0.25, 1.0]) + assert dist._penalties["ratio"] == [0.25, 1.0] dist.add_ratio("ratio", -5, 5) - self.assertEqual(dist._penalties["ratio"], [0.25, 1.0, 0.0]) + assert dist._penalties["ratio"] == [0.25, 1.0, 0.0] dist.add_ratio("ratio", 5, 0) - self.assertEqual(dist._penalties["ratio"], [0.25, 1.0, 0.0, 0.0]) + assert dist._penalties["ratio"] == [0.25, 1.0, 0.0, 0.0] def test_add_string(self): dist = Distance() sdist = string_dist("abc", "bcd") dist.add_string("string", "abc", "bcd") - self.assertEqual(dist._penalties["string"], [sdist]) - self.assertNotEqual(dist._penalties["string"], [0]) + assert dist._penalties["string"] == [sdist] + assert dist._penalties["string"] != [0] def test_add_string_none(self): dist = Distance() dist.add_string("string", None, "string") - self.assertEqual(dist._penalties["string"], [1]) + assert dist._penalties["string"] == [1] def test_add_string_both_none(self): dist = Distance() dist.add_string("string", None, None) - self.assertEqual(dist._penalties["string"], [0]) + assert dist._penalties["string"] == [0] def test_distance(self): config["match"]["distance_weights"]["album"] = 2.0 @@ -249,11 +245,11 @@ class DistanceTest(_common.TestCase): dist.add("album", 0.5) dist.add("media", 0.25) dist.add("media", 0.75) - self.assertEqual(dist.distance, 0.5) + assert dist.distance == 0.5 # __getitem__() - self.assertEqual(dist["album"], 0.25) - self.assertEqual(dist["media"], 0.25) + assert dist["album"] == 0.25 + assert dist["media"] == 0.25 def test_max_distance(self): config["match"]["distance_weights"]["album"] = 3.0 @@ -264,7 +260,7 @@ class DistanceTest(_common.TestCase): dist.add("album", 0.5) dist.add("medium", 0.0) dist.add("medium", 0.0) - self.assertEqual(dist.max_distance, 5.0) + assert dist.max_distance == 5.0 def test_operators(self): config["match"]["distance_weights"]["source"] = 1.0 @@ -277,14 +273,14 @@ class DistanceTest(_common.TestCase): dist.add("album", 0.5) dist.add("medium", 0.25) dist.add("medium", 0.75) - self.assertEqual(len(dist), 2) - self.assertEqual(list(dist), [("album", 0.2), ("medium", 0.2)]) - self.assertEqual(dist, 0.4) - self.assertLess(dist, 1.0) - self.assertGreater(dist, 0.0) - self.assertEqual(dist - 0.4, 0.0) - self.assertEqual(0.4 - dist, 0.0) - self.assertEqual(float(dist), 0.4) + assert len(dist) == 2 + assert list(dist) == [("album", 0.2), ("medium", 0.2)] + assert dist == 0.4 + assert dist < 1.0 + assert dist > 0.0 + assert dist - 0.4 == 0.0 + assert 0.4 - dist == 0.0 + assert float(dist) == 0.4 def test_raw_distance(self): config["match"]["distance_weights"]["album"] = 3.0 @@ -295,7 +291,7 @@ class DistanceTest(_common.TestCase): dist.add("album", 0.5) dist.add("medium", 0.25) dist.add("medium", 0.5) - self.assertEqual(dist.raw_distance, 2.25) + assert dist.raw_distance == 2.25 def test_items(self): config["match"]["distance_weights"]["album"] = 4.0 @@ -305,13 +301,13 @@ class DistanceTest(_common.TestCase): dist = Distance() dist.add("album", 0.1875) dist.add("medium", 0.75) - self.assertEqual(dist.items(), [("medium", 0.25), ("album", 0.125)]) + assert dist.items() == [("medium", 0.25), ("album", 0.125)] # Sort by key if distance is equal. dist = Distance() dist.add("album", 0.375) dist.add("medium", 0.75) - self.assertEqual(dist.items(), [("album", 0.25), ("medium", 0.25)]) + assert dist.items() == [("album", 0.25), ("medium", 0.25)] def test_update(self): dist1 = Distance() @@ -325,40 +321,41 @@ class DistanceTest(_common.TestCase): dist1.update(dist2) - self.assertEqual( - dist1._penalties, {"album": [0.5, 0.75, 0.25], "media": [1.0, 0.05]} - ) + assert dist1._penalties == { + "album": [0.5, 0.75, 0.25], + "media": [1.0, 0.05], + } -class TrackDistanceTest(_common.TestCase): +class TrackDistanceTest(BeetsTestCase): def test_identical_tracks(self): item = _make_item("one", 1) info = _make_trackinfo()[0] dist = match.track_distance(item, info, incl_artist=True) - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_different_title(self): item = _make_item("foo", 1) info = _make_trackinfo()[0] dist = match.track_distance(item, info, incl_artist=True) - self.assertNotEqual(dist, 0.0) + assert dist != 0.0 def test_different_artist(self): item = _make_item("one", 1) item.artist = "foo" info = _make_trackinfo()[0] dist = match.track_distance(item, info, incl_artist=True) - self.assertNotEqual(dist, 0.0) + assert dist != 0.0 def test_various_artists_tolerated(self): item = _make_item("one", 1) item.artist = "Various Artists" info = _make_trackinfo()[0] dist = match.track_distance(item, info, incl_artist=True) - self.assertEqual(dist, 0.0) + assert dist == 0.0 -class AlbumDistanceTest(_common.TestCase): +class AlbumDistanceTest(BeetsTestCase): def _mapping(self, items, info): out = {} for i, t in zip(items, info.tracks): @@ -379,7 +376,7 @@ class AlbumDistanceTest(_common.TestCase): tracks=_make_trackinfo(), va=False, ) - self.assertEqual(self._dist(items, info), 0) + assert self._dist(items, info) == 0 def test_incomplete_album(self): items = [] @@ -392,9 +389,9 @@ class AlbumDistanceTest(_common.TestCase): va=False, ) dist = self._dist(items, info) - self.assertNotEqual(dist, 0) + assert dist != 0 # Make sure the distance is not too great - self.assertLess(dist, 0.2) + assert dist < 0.2 def test_global_artists_differ(self): items = [] @@ -407,7 +404,7 @@ class AlbumDistanceTest(_common.TestCase): tracks=_make_trackinfo(), va=False, ) - self.assertNotEqual(self._dist(items, info), 0) + assert self._dist(items, info) != 0 def test_comp_track_artists_match(self): items = [] @@ -420,7 +417,7 @@ class AlbumDistanceTest(_common.TestCase): tracks=_make_trackinfo(), va=True, ) - self.assertEqual(self._dist(items, info), 0) + assert self._dist(items, info) == 0 def test_comp_no_track_artists(self): # Some VA releases don't have track artists (incomplete metadata). @@ -437,7 +434,7 @@ class AlbumDistanceTest(_common.TestCase): info.tracks[0].artist = None info.tracks[1].artist = None info.tracks[2].artist = None - self.assertEqual(self._dist(items, info), 0) + assert self._dist(items, info) == 0 def test_comp_track_artists_do_not_match(self): items = [] @@ -450,7 +447,7 @@ class AlbumDistanceTest(_common.TestCase): tracks=_make_trackinfo(), va=True, ) - self.assertNotEqual(self._dist(items, info), 0) + assert self._dist(items, info) != 0 def test_tracks_out_of_order(self): items = [] @@ -464,7 +461,7 @@ class AlbumDistanceTest(_common.TestCase): va=False, ) dist = self._dist(items, info) - self.assertTrue(0 < dist < 0.2) + assert 0 < dist < 0.2 def test_two_medium_release(self): items = [] @@ -481,7 +478,7 @@ class AlbumDistanceTest(_common.TestCase): info.tracks[1].medium_index = 2 info.tracks[2].medium_index = 1 dist = self._dist(items, info) - self.assertEqual(dist, 0) + assert dist == 0 def test_per_medium_track_numbers(self): items = [] @@ -498,106 +495,49 @@ class AlbumDistanceTest(_common.TestCase): info.tracks[1].medium_index = 2 info.tracks[2].medium_index = 1 dist = self._dist(items, info) - self.assertEqual(dist, 0) + assert dist == 0 -class AssignmentTest(unittest.TestCase): - def item(self, title, track): - return Item( - title=title, - track=track, - mb_trackid="", - mb_albumid="", - mb_artistid="", - ) +class TestAssignment(ConfigMixin): + A = "one" + B = "two" + C = "three" - def test_reorder_when_track_numbers_incorrect(self): - items = [] - items.append(self.item("one", 1)) - items.append(self.item("three", 2)) - items.append(self.item("two", 3)) - trackinfo = [] - trackinfo.append(TrackInfo(title="one")) - trackinfo.append(TrackInfo(title="two")) - trackinfo.append(TrackInfo(title="three")) - mapping, extra_items, extra_tracks = match.assign_items( - items, trackinfo - ) - self.assertEqual(extra_items, []) - self.assertEqual(extra_tracks, []) - self.assertEqual( - mapping, - { - items[0]: trackinfo[0], - items[1]: trackinfo[2], - items[2]: trackinfo[1], - }, - ) + @pytest.fixture(autouse=True) + def _setup_config(self): + self.config["match"]["track_length_grace"] = 10 + self.config["match"]["track_length_max"] = 30 - def test_order_works_with_invalid_track_numbers(self): - items = [] - items.append(self.item("one", 1)) - items.append(self.item("three", 1)) - items.append(self.item("two", 1)) - trackinfo = [] - trackinfo.append(TrackInfo(title="one")) - trackinfo.append(TrackInfo(title="two")) - trackinfo.append(TrackInfo(title="three")) - mapping, extra_items, extra_tracks = match.assign_items( - items, trackinfo - ) - self.assertEqual(extra_items, []) - self.assertEqual(extra_tracks, []) - self.assertEqual( - mapping, - { - items[0]: trackinfo[0], - items[1]: trackinfo[2], - items[2]: trackinfo[1], - }, - ) + @pytest.mark.parametrize( + # 'expected' is a tuple of expected (mapping, extra_items, extra_tracks) + "item_titles, track_titles, expected", + [ + # items ordering gets corrected + ([A, C, B], [A, B, C], ({A: A, B: B, C: C}, [], [])), + # unmatched tracks are returned as 'extra_tracks' + # the first track is unmatched + ([B, C], [A, B, C], ({B: B, C: C}, [], [A])), + # the middle track is unmatched + ([A, C], [A, B, C], ({A: A, C: C}, [], [B])), + # the last track is unmatched + ([A, B], [A, B, C], ({A: A, B: B}, [], [C])), + # unmatched items are returned as 'extra_items' + ([A, C, B], [A, C], ({A: A, C: C}, [B], [])), + ], + ) + def test_assign_tracks(self, item_titles, track_titles, expected): + expected_mapping, expected_extra_items, expected_extra_tracks = expected - def test_order_works_with_missing_tracks(self): - items = [] - items.append(self.item("one", 1)) - items.append(self.item("three", 3)) - trackinfo = [] - trackinfo.append(TrackInfo(title="one")) - trackinfo.append(TrackInfo(title="two")) - trackinfo.append(TrackInfo(title="three")) - mapping, extra_items, extra_tracks = match.assign_items( - items, trackinfo - ) - self.assertEqual(extra_items, []) - self.assertEqual(extra_tracks, [trackinfo[1]]) - self.assertEqual( - mapping, - { - items[0]: trackinfo[0], - items[1]: trackinfo[2], - }, - ) + items = [Item(title=title) for title in item_titles] + tracks = [TrackInfo(title=title) for title in track_titles] - def test_order_works_with_extra_tracks(self): - items = [] - items.append(self.item("one", 1)) - items.append(self.item("two", 2)) - items.append(self.item("three", 3)) - trackinfo = [] - trackinfo.append(TrackInfo(title="one")) - trackinfo.append(TrackInfo(title="three")) - mapping, extra_items, extra_tracks = match.assign_items( - items, trackinfo - ) - self.assertEqual(extra_items, [items[1]]) - self.assertEqual(extra_tracks, []) - self.assertEqual( - mapping, - { - items[0]: trackinfo[0], - items[2]: trackinfo[1], - }, - ) + mapping, extra_items, extra_tracks = match.assign_items(items, tracks) + + assert ( + {i.title: t.title for i, t in mapping.items()}, + [i.title for i in extra_items], + [t.title for t in extra_tracks], + ) == (expected_mapping, expected_extra_items, expected_extra_tracks) def test_order_works_when_track_names_are_entirely_wrong(self): # A real-world test case contributed by a user. @@ -608,9 +548,6 @@ class AssignmentTest(unittest.TestCase): title=f"ben harper - Burn to Shine {i}", track=i, length=length, - mb_trackid="", - mb_albumid="", - mb_artistid="", ) items = [] @@ -644,13 +581,9 @@ class AssignmentTest(unittest.TestCase): trackinfo.append(info(11, "Beloved One", 243.733)) trackinfo.append(info(12, "In the Lord's Arms", 186.13300000000001)) - mapping, extra_items, extra_tracks = match.assign_items( - items, trackinfo - ) - self.assertEqual(extra_items, []) - self.assertEqual(extra_tracks, []) - for item, info in mapping.items(): - self.assertEqual(items.index(item), trackinfo.index(info)) + expected = dict(zip(items, trackinfo)), [], [] + + assert match.assign_items(items, trackinfo) == expected class ApplyTestUtil: @@ -664,7 +597,7 @@ class ApplyTestUtil: autotag.apply_metadata(info, mapping) -class ApplyTest(_common.TestCase, ApplyTestUtil): +class ApplyTest(BeetsTestCase, ApplyTestUtil): def setUp(self): super().setUp() @@ -718,118 +651,107 @@ class ApplyTest(_common.TestCase, ApplyTestUtil): def test_titles_applied(self): self._apply() - self.assertEqual(self.items[0].title, "oneNew") - self.assertEqual(self.items[1].title, "twoNew") + assert self.items[0].title == "oneNew" + assert self.items[1].title == "twoNew" def test_album_and_artist_applied_to_all(self): self._apply() - self.assertEqual(self.items[0].album, "albumNew") - self.assertEqual(self.items[1].album, "albumNew") - self.assertEqual(self.items[0].artist, "artistNew") - self.assertEqual(self.items[1].artist, "artistNew") - self.assertEqual(self.items[0].artists, ["artistNew", "artistNew2"]) - self.assertEqual(self.items[1].artists, ["artistNew", "artistNew2"]) - self.assertEqual( - self.items[0].albumartists, ["artistNew", "artistNew2"] - ) - self.assertEqual( - self.items[1].albumartists, ["artistNew", "artistNew2"] - ) + assert self.items[0].album == "albumNew" + assert self.items[1].album == "albumNew" + assert self.items[0].artist == "artistNew" + assert self.items[1].artist == "artistNew" + assert self.items[0].artists == ["artistNew", "artistNew2"] + assert self.items[1].artists == ["artistNew", "artistNew2"] + assert self.items[0].albumartists == ["artistNew", "artistNew2"] + assert self.items[1].albumartists == ["artistNew", "artistNew2"] def test_track_index_applied(self): self._apply() - self.assertEqual(self.items[0].track, 1) - self.assertEqual(self.items[1].track, 2) + assert self.items[0].track == 1 + assert self.items[1].track == 2 def test_track_total_applied(self): self._apply() - self.assertEqual(self.items[0].tracktotal, 2) - self.assertEqual(self.items[1].tracktotal, 2) + assert self.items[0].tracktotal == 2 + assert self.items[1].tracktotal == 2 def test_disc_index_applied(self): self._apply() - self.assertEqual(self.items[0].disc, 1) - self.assertEqual(self.items[1].disc, 2) + assert self.items[0].disc == 1 + assert self.items[1].disc == 2 def test_disc_total_applied(self): self._apply() - self.assertEqual(self.items[0].disctotal, 2) - self.assertEqual(self.items[1].disctotal, 2) + assert self.items[0].disctotal == 2 + assert self.items[1].disctotal == 2 def test_per_disc_numbering(self): self._apply(per_disc_numbering=True) - self.assertEqual(self.items[0].track, 1) - self.assertEqual(self.items[1].track, 1) + assert self.items[0].track == 1 + assert self.items[1].track == 1 def test_per_disc_numbering_track_total(self): self._apply(per_disc_numbering=True) - self.assertEqual(self.items[0].tracktotal, 1) - self.assertEqual(self.items[1].tracktotal, 1) + assert self.items[0].tracktotal == 1 + assert self.items[1].tracktotal == 1 def test_artist_credit(self): self._apply(artist_credit=True) - self.assertEqual(self.items[0].artist, "trackArtistCredit") - self.assertEqual(self.items[1].artist, "albumArtistCredit") - self.assertEqual(self.items[0].albumartist, "albumArtistCredit") - self.assertEqual(self.items[1].albumartist, "albumArtistCredit") - self.assertEqual( - self.items[0].albumartists, - ["albumArtistCredit", "albumArtistCredit2"], - ) - self.assertEqual( - self.items[1].albumartists, - ["albumArtistCredit", "albumArtistCredit2"], - ) + assert self.items[0].artist == "trackArtistCredit" + assert self.items[1].artist == "albumArtistCredit" + assert self.items[0].albumartist == "albumArtistCredit" + assert self.items[1].albumartist == "albumArtistCredit" + assert self.items[0].albumartists == [ + "albumArtistCredit", + "albumArtistCredit2", + ] + assert self.items[1].albumartists == [ + "albumArtistCredit", + "albumArtistCredit2", + ] def test_artist_credit_prefers_artist_over_albumartist_credit(self): self.info.tracks[0].artist = "oldArtist" self.info.tracks[0].artist_credit = None self._apply(artist_credit=True) - self.assertEqual(self.items[0].artist, "oldArtist") + assert self.items[0].artist == "oldArtist" def test_artist_credit_falls_back_to_albumartist(self): self.info.artist_credit = None self._apply(artist_credit=True) - self.assertEqual(self.items[1].artist, "artistNew") + assert self.items[1].artist == "artistNew" def test_mb_trackid_applied(self): self._apply() - self.assertEqual( - self.items[0].mb_trackid, "dfa939ec-118c-4d0f-84a0-60f3d1e6522c" + assert ( + self.items[0].mb_trackid == "dfa939ec-118c-4d0f-84a0-60f3d1e6522c" ) - self.assertEqual( - self.items[1].mb_trackid, "40130ed1-a27c-42fd-a328-1ebefb6caef4" + assert ( + self.items[1].mb_trackid == "40130ed1-a27c-42fd-a328-1ebefb6caef4" ) def test_mb_albumid_and_artistid_applied(self): self._apply() for item in self.items: - self.assertEqual( - item.mb_albumid, "7edb51cb-77d6-4416-a23c-3a8c2994a2c7" - ) - self.assertEqual( - item.mb_artistid, "a6623d39-2d8e-4f70-8242-0a9553b91e50" - ) - self.assertEqual( - item.mb_artistids, - [ - "a6623d39-2d8e-4f70-8242-0a9553b91e50", - "a6623d39-2d8e-4f70-8242-0a9553b91e51", - ], - ) + assert item.mb_albumid == "7edb51cb-77d6-4416-a23c-3a8c2994a2c7" + assert item.mb_artistid == "a6623d39-2d8e-4f70-8242-0a9553b91e50" + assert item.mb_artistids == [ + "a6623d39-2d8e-4f70-8242-0a9553b91e50", + "a6623d39-2d8e-4f70-8242-0a9553b91e51", + ] def test_albumtype_applied(self): self._apply() - self.assertEqual(self.items[0].albumtype, "album") - self.assertEqual(self.items[1].albumtype, "album") + assert self.items[0].albumtype == "album" + assert self.items[1].albumtype == "album" def test_album_artist_overrides_empty_track_artist(self): my_info = self.info.copy() self._apply(info=my_info) - self.assertEqual(self.items[0].artist, "artistNew") - self.assertEqual(self.items[1].artist, "artistNew") - self.assertEqual(self.items[0].artists, ["artistNew", "artistNew2"]) - self.assertEqual(self.items[1].artists, ["artistNew", "artistNew2"]) + assert self.items[0].artist == "artistNew" + assert self.items[1].artist == "artistNew" + assert self.items[0].artists == ["artistNew", "artistNew2"] + assert self.items[1].artists == ["artistNew", "artistNew2"] def test_album_artist_overridden_by_nonempty_track_artist(self): my_info = self.info.copy() @@ -838,49 +760,50 @@ class ApplyTest(_common.TestCase, ApplyTestUtil): my_info.tracks[0].artists = ["artist1!", "artist1!!"] my_info.tracks[1].artists = ["artist2!", "artist2!!"] self._apply(info=my_info) - self.assertEqual(self.items[0].artist, "artist1!") - self.assertEqual(self.items[1].artist, "artist2!") - self.assertEqual(self.items[0].artists, ["artist1!", "artist1!!"]) - self.assertEqual(self.items[1].artists, ["artist2!", "artist2!!"]) + assert self.items[0].artist == "artist1!" + assert self.items[1].artist == "artist2!" + assert self.items[0].artists == ["artist1!", "artist1!!"] + assert self.items[1].artists == ["artist2!", "artist2!!"] def test_artist_credit_applied(self): self._apply() - self.assertEqual(self.items[0].albumartist_credit, "albumArtistCredit") - self.assertEqual( - self.items[0].albumartists_credit, - ["albumArtistCredit", "albumArtistCredit2"], - ) - self.assertEqual(self.items[0].artist_credit, "trackArtistCredit") - self.assertEqual(self.items[0].artists_credit, ["trackArtistCredit"]) - self.assertEqual(self.items[1].albumartist_credit, "albumArtistCredit") - self.assertEqual( - self.items[1].albumartists_credit, - ["albumArtistCredit", "albumArtistCredit2"], - ) - self.assertEqual(self.items[1].artist_credit, "albumArtistCredit") - self.assertEqual( - self.items[1].artists_credit, - ["albumArtistCredit", "albumArtistCredit2"], - ) + assert self.items[0].albumartist_credit == "albumArtistCredit" + assert self.items[0].albumartists_credit == [ + "albumArtistCredit", + "albumArtistCredit2", + ] + assert self.items[0].artist_credit == "trackArtistCredit" + assert self.items[0].artists_credit == ["trackArtistCredit"] + assert self.items[1].albumartist_credit == "albumArtistCredit" + assert self.items[1].albumartists_credit == [ + "albumArtistCredit", + "albumArtistCredit2", + ] + assert self.items[1].artist_credit == "albumArtistCredit" + assert self.items[1].artists_credit == [ + "albumArtistCredit", + "albumArtistCredit2", + ] def test_artist_sort_applied(self): self._apply() - self.assertEqual(self.items[0].albumartist_sort, "albumArtistSort") - self.assertEqual( - self.items[0].albumartists_sort, - ["albumArtistSort", "albumArtistSort2"], - ) - self.assertEqual(self.items[0].artist_sort, "trackArtistSort") - self.assertEqual(self.items[0].artists_sort, ["trackArtistSort"]) - self.assertEqual(self.items[1].albumartist_sort, "albumArtistSort") - self.assertEqual( - self.items[1].albumartists_sort, - ["albumArtistSort", "albumArtistSort2"], - ) - self.assertEqual(self.items[1].artist_sort, "albumArtistSort") - self.assertEqual( - self.items[1].artists_sort, ["albumArtistSort", "albumArtistSort2"] - ) + assert self.items[0].albumartist_sort == "albumArtistSort" + assert self.items[0].albumartists_sort == [ + "albumArtistSort", + "albumArtistSort2", + ] + assert self.items[0].artist_sort == "trackArtistSort" + assert self.items[0].artists_sort == ["trackArtistSort"] + assert self.items[1].albumartist_sort == "albumArtistSort" + assert self.items[1].albumartists_sort == [ + "albumArtistSort", + "albumArtistSort2", + ] + assert self.items[1].artist_sort == "albumArtistSort" + assert self.items[1].artists_sort == [ + "albumArtistSort", + "albumArtistSort2", + ] def test_full_date_applied(self): my_info = self.info.copy() @@ -889,9 +812,9 @@ class ApplyTest(_common.TestCase, ApplyTestUtil): my_info.day = 18 self._apply(info=my_info) - self.assertEqual(self.items[0].year, 2013) - self.assertEqual(self.items[0].month, 12) - self.assertEqual(self.items[0].day, 18) + assert self.items[0].year == 2013 + assert self.items[0].month == 12 + assert self.items[0].day == 18 def test_date_only_zeros_month_and_day(self): self.items = [] @@ -902,9 +825,9 @@ class ApplyTest(_common.TestCase, ApplyTestUtil): my_info.year = 2013 self._apply(info=my_info) - self.assertEqual(self.items[0].year, 2013) - self.assertEqual(self.items[0].month, 0) - self.assertEqual(self.items[0].day, 0) + assert self.items[0].year == 2013 + assert self.items[0].month == 0 + assert self.items[0].day == 0 def test_missing_date_applies_nothing(self): self.items = [] @@ -913,19 +836,19 @@ class ApplyTest(_common.TestCase, ApplyTestUtil): self._apply() - self.assertEqual(self.items[0].year, 1) - self.assertEqual(self.items[0].month, 2) - self.assertEqual(self.items[0].day, 3) + assert self.items[0].year == 1 + assert self.items[0].month == 2 + assert self.items[0].day == 3 def test_data_source_applied(self): my_info = self.info.copy() my_info.data_source = "MusicBrainz" self._apply(info=my_info) - self.assertEqual(self.items[0].data_source, "MusicBrainz") + assert self.items[0].data_source == "MusicBrainz" -class ApplyCompilationTest(_common.TestCase, ApplyTestUtil): +class ApplyCompilationTest(BeetsTestCase, ApplyTestUtil): def setUp(self): super().setUp() @@ -962,97 +885,97 @@ class ApplyCompilationTest(_common.TestCase, ApplyTestUtil): def test_album_and_track_artists_separate(self): self._apply() - self.assertEqual(self.items[0].artist, "artistOneNew") - self.assertEqual(self.items[1].artist, "artistTwoNew") - self.assertEqual(self.items[0].albumartist, "variousNew") - self.assertEqual(self.items[1].albumartist, "variousNew") + assert self.items[0].artist == "artistOneNew" + assert self.items[1].artist == "artistTwoNew" + assert self.items[0].albumartist == "variousNew" + assert self.items[1].albumartist == "variousNew" def test_mb_albumartistid_applied(self): self._apply() - self.assertEqual( - self.items[0].mb_albumartistid, - "89ad4ac3-39f7-470e-963a-56509c546377", + assert ( + self.items[0].mb_albumartistid + == "89ad4ac3-39f7-470e-963a-56509c546377" ) - self.assertEqual( - self.items[1].mb_albumartistid, - "89ad4ac3-39f7-470e-963a-56509c546377", + assert ( + self.items[1].mb_albumartistid + == "89ad4ac3-39f7-470e-963a-56509c546377" ) - self.assertEqual( - self.items[0].mb_artistid, "a05686fc-9db2-4c23-b99e-77f5db3e5282" + assert ( + self.items[0].mb_artistid == "a05686fc-9db2-4c23-b99e-77f5db3e5282" ) - self.assertEqual( - self.items[1].mb_artistid, "80b3cf5e-18fe-4c59-98c7-e5bb87210710" + assert ( + self.items[1].mb_artistid == "80b3cf5e-18fe-4c59-98c7-e5bb87210710" ) def test_va_flag_cleared_does_not_set_comp(self): self._apply() - self.assertFalse(self.items[0].comp) - self.assertFalse(self.items[1].comp) + assert not self.items[0].comp + assert not self.items[1].comp def test_va_flag_sets_comp(self): va_info = self.info.copy() va_info.va = True self._apply(info=va_info) - self.assertTrue(self.items[0].comp) - self.assertTrue(self.items[1].comp) + assert self.items[0].comp + assert self.items[1].comp class StringDistanceTest(unittest.TestCase): def test_equal_strings(self): dist = string_dist("Some String", "Some String") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_different_strings(self): dist = string_dist("Some String", "Totally Different") - self.assertNotEqual(dist, 0.0) + assert dist != 0.0 def test_punctuation_ignored(self): dist = string_dist("Some String", "Some.String!") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_case_ignored(self): dist = string_dist("Some String", "sOME sTring") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_leading_the_has_lower_weight(self): dist1 = string_dist("XXX Band Name", "Band Name") dist2 = string_dist("The Band Name", "Band Name") - self.assertLess(dist2, dist1) + assert dist2 < dist1 def test_parens_have_lower_weight(self): dist1 = string_dist("One .Two.", "One") dist2 = string_dist("One (Two)", "One") - self.assertLess(dist2, dist1) + assert dist2 < dist1 def test_brackets_have_lower_weight(self): dist1 = string_dist("One .Two.", "One") dist2 = string_dist("One [Two]", "One") - self.assertLess(dist2, dist1) + assert dist2 < dist1 def test_ep_label_has_zero_weight(self): dist = string_dist("My Song (EP)", "My Song") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_featured_has_lower_weight(self): dist1 = string_dist("My Song blah Someone", "My Song") dist2 = string_dist("My Song feat Someone", "My Song") - self.assertLess(dist2, dist1) + assert dist2 < dist1 def test_postfix_the(self): dist = string_dist("The Song Title", "Song Title, The") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_postfix_a(self): dist = string_dist("A Song Title", "Song Title, A") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_postfix_an(self): dist = string_dist("An Album Title", "Album Title, An") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_empty_strings(self): dist = string_dist("", "") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_solo_pattern(self): # Just make sure these don't crash. @@ -1062,37 +985,45 @@ class StringDistanceTest(unittest.TestCase): def test_heuristic_does_not_harm_distance(self): dist = string_dist("Untitled", "[Untitled]") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_ampersand_expansion(self): dist = string_dist("And", "&") - self.assertEqual(dist, 0.0) + assert dist == 0.0 def test_accented_characters(self): dist = string_dist("\xe9\xe1\xf1", "ean") - self.assertEqual(dist, 0.0) + assert dist == 0.0 -class EnumTest(_common.TestCase): - """ - Test Enum Subclasses defined in beets.util.enumeration - """ +@pytest.mark.parametrize( + "single_field,list_field", + [ + ("mb_artistid", "mb_artistids"), + ("mb_albumartistid", "mb_albumartistids"), + ("albumtype", "albumtypes"), + ], +) +@pytest.mark.parametrize( + "single_value,list_value", + [ + (None, []), + (None, ["1"]), + (None, ["1", "2"]), + ("1", []), + ("1", ["1"]), + ("1", ["1", "2"]), + ("1", ["2", "1"]), + ], +) +def test_correct_list_fields( + single_field, list_field, single_value, list_value +): + """Ensure that the first value in a list field matches the single field.""" + data = {single_field: single_value, list_field: list_value} + item = Item(**data) - def test_ordered_enum(self): - OrderedEnumClass = match.OrderedEnum( # noqa - "OrderedEnumTest", ["a", "b", "c"] - ) - self.assertLess(OrderedEnumClass.a, OrderedEnumClass.b) - self.assertLess(OrderedEnumClass.a, OrderedEnumClass.c) - self.assertLess(OrderedEnumClass.b, OrderedEnumClass.c) - self.assertGreater(OrderedEnumClass.b, OrderedEnumClass.a) - self.assertGreater(OrderedEnumClass.c, OrderedEnumClass.a) - self.assertGreater(OrderedEnumClass.c, OrderedEnumClass.b) + correct_list_fields(item) - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + single_val, list_val = item[single_field], item[list_field] + assert (not single_val and not list_val) or single_val == list_val[0] diff --git a/test/test_config_command.py b/test/test_config_command.py index 0b122cf1c..b68c4f042 100644 --- a/test/test_config_command.py +++ b/test/test_config_command.py @@ -1,16 +1,16 @@ import os -import unittest from unittest.mock import patch +import pytest import yaml from beets import config, ui -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase -class ConfigCommandTest(unittest.TestCase, TestHelper): +class ConfigCommandTest(BeetsTestCase): def setUp(self): - self.setup_beets() + super().setUp() for k in ("VISUAL", "EDITOR"): if k in os.environ: del os.environ[k] @@ -31,9 +31,6 @@ class ConfigCommandTest(unittest.TestCase, TestHelper): config["password"].redact = True config._materialized = False - def tearDown(self): - self.teardown_beets() - def _run_with_yaml_output(self, *args): output = self.run_with_output(*args) return yaml.safe_load(output) @@ -41,52 +38,52 @@ class ConfigCommandTest(unittest.TestCase, TestHelper): def test_show_user_config(self): output = self._run_with_yaml_output("config", "-c") - self.assertEqual(output["option"], "value") - self.assertEqual(output["password"], "password_value") + assert output["option"] == "value" + assert output["password"] == "password_value" def test_show_user_config_with_defaults(self): output = self._run_with_yaml_output("config", "-dc") - self.assertEqual(output["option"], "value") - self.assertEqual(output["password"], "password_value") - self.assertEqual(output["library"], "lib") - self.assertFalse(output["import"]["timid"]) + assert output["option"] == "value" + assert output["password"] == "password_value" + assert output["library"] == "lib" + assert not output["import"]["timid"] def test_show_user_config_with_cli(self): output = self._run_with_yaml_output( "--config", self.cli_config_path, "config" ) - self.assertEqual(output["library"], "lib") - self.assertEqual(output["option"], "cli overwrite") + assert output["library"] == "lib" + assert output["option"] == "cli overwrite" def test_show_redacted_user_config(self): output = self._run_with_yaml_output("config") - self.assertEqual(output["option"], "value") - self.assertEqual(output["password"], "REDACTED") + assert output["option"] == "value" + assert output["password"] == "REDACTED" def test_show_redacted_user_config_with_defaults(self): output = self._run_with_yaml_output("config", "-d") - self.assertEqual(output["option"], "value") - self.assertEqual(output["password"], "REDACTED") - self.assertFalse(output["import"]["timid"]) + assert output["option"] == "value" + assert output["password"] == "REDACTED" + assert not output["import"]["timid"] def test_config_paths(self): output = self.run_with_output("config", "-p") paths = output.split("\n") - self.assertEqual(len(paths), 2) - self.assertEqual(paths[0], self.config_path) + assert len(paths) == 2 + assert paths[0] == self.config_path def test_config_paths_with_cli(self): output = self.run_with_output( "--config", self.cli_config_path, "config", "-p" ) paths = output.split("\n") - self.assertEqual(len(paths), 3) - self.assertEqual(paths[0], self.cli_config_path) + assert len(paths) == 3 + assert paths[0] == self.cli_config_path def test_edit_config_with_visual_or_editor_env(self): os.environ["EDITOR"] = "myeditor" @@ -114,12 +111,12 @@ class ConfigCommandTest(unittest.TestCase, TestHelper): ) def test_config_editor_not_found(self): - with self.assertRaises(ui.UserError) as user_error: - with patch("os.execlp") as execlp: - execlp.side_effect = OSError("here is problem") - self.run_command("config", "-e") - self.assertIn("Could not edit configuration", str(user_error.exception)) - self.assertIn("here is problem", str(user_error.exception)) + msg_match = "Could not edit configuration.*here is problem" + with ( + patch("os.execlp", side_effect=OSError("here is problem")), + pytest.raises(ui.UserError, match=msg_match), + ): + self.run_command("config", "-e") def test_edit_invalid_config_file(self): with open(self.config_path, "w") as file: @@ -131,11 +128,3 @@ class ConfigCommandTest(unittest.TestCase, TestHelper): with patch("os.execlp") as execlp: self.run_command("config", "-e") execlp.assert_called_once_with("myeditor", "myeditor", self.config_path) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_datequery.py b/test/test_datequery.py index 2b666f0d1..9c968e998 100644 --- a/test/test_datequery.py +++ b/test/test_datequery.py @@ -12,20 +12,21 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test for dbcore's date-based queries. -""" +"""Test for dbcore's date-based queries.""" import time import unittest from datetime import datetime, timedelta +import pytest + from beets.dbcore.query import ( DateInterval, DateQuery, InvalidQueryArgumentValueError, _parse_periods, ) -from beets.test import _common +from beets.test.helper import ItemInDBTestCase def _date(string): @@ -132,27 +133,25 @@ class DateIntervalTest(unittest.TestCase): self.assertContains("..", date=datetime.min) self.assertContains("..", "1000-01-01T00:00:00") - def assertContains( # noqa - self, interval_pattern, date_pattern=None, date=None - ): + def assertContains(self, interval_pattern, date_pattern=None, date=None): if date is None: date = _date(date_pattern) (start, end) = _parse_periods(interval_pattern) interval = DateInterval.from_periods(start, end) - self.assertTrue(interval.contains(date)) + assert interval.contains(date) - def assertExcludes(self, interval_pattern, date_pattern): # noqa + def assertExcludes(self, interval_pattern, date_pattern): date = _date(date_pattern) (start, end) = _parse_periods(interval_pattern) interval = DateInterval.from_periods(start, end) - self.assertFalse(interval.contains(date)) + assert not interval.contains(date) def _parsetime(s): return time.mktime(datetime.strptime(s, "%Y-%m-%d %H:%M").timetuple()) -class DateQueryTest(_common.LibTestCase): +class DateQueryTest(ItemInDBTestCase): def setUp(self): super().setUp() self.i.added = _parsetime("2013-03-30 22:21") @@ -161,33 +160,33 @@ class DateQueryTest(_common.LibTestCase): def test_single_month_match_fast(self): query = DateQuery("added", "2013-03") matched = self.lib.items(query) - self.assertEqual(len(matched), 1) + assert len(matched) == 1 def test_single_month_nonmatch_fast(self): query = DateQuery("added", "2013-04") matched = self.lib.items(query) - self.assertEqual(len(matched), 0) + assert len(matched) == 0 def test_single_month_match_slow(self): query = DateQuery("added", "2013-03") - self.assertTrue(query.match(self.i)) + assert query.match(self.i) def test_single_month_nonmatch_slow(self): query = DateQuery("added", "2013-04") - self.assertFalse(query.match(self.i)) + assert not query.match(self.i) def test_single_day_match_fast(self): query = DateQuery("added", "2013-03-30") matched = self.lib.items(query) - self.assertEqual(len(matched), 1) + assert len(matched) == 1 def test_single_day_nonmatch_fast(self): query = DateQuery("added", "2013-03-31") matched = self.lib.items(query) - self.assertEqual(len(matched), 0) + assert len(matched) == 0 -class DateQueryTestRelative(_common.LibTestCase): +class DateQueryTestRelative(ItemInDBTestCase): def setUp(self): super().setUp() @@ -201,39 +200,39 @@ class DateQueryTestRelative(_common.LibTestCase): def test_single_month_match_fast(self): query = DateQuery("added", self._now.strftime("%Y-%m")) matched = self.lib.items(query) - self.assertEqual(len(matched), 1) + assert len(matched) == 1 def test_single_month_nonmatch_fast(self): query = DateQuery( "added", (self._now + timedelta(days=30)).strftime("%Y-%m") ) matched = self.lib.items(query) - self.assertEqual(len(matched), 0) + assert len(matched) == 0 def test_single_month_match_slow(self): query = DateQuery("added", self._now.strftime("%Y-%m")) - self.assertTrue(query.match(self.i)) + assert query.match(self.i) def test_single_month_nonmatch_slow(self): query = DateQuery( "added", (self._now + timedelta(days=30)).strftime("%Y-%m") ) - self.assertFalse(query.match(self.i)) + assert not query.match(self.i) def test_single_day_match_fast(self): query = DateQuery("added", self._now.strftime("%Y-%m-%d")) matched = self.lib.items(query) - self.assertEqual(len(matched), 1) + assert len(matched) == 1 def test_single_day_nonmatch_fast(self): query = DateQuery( "added", (self._now + timedelta(days=1)).strftime("%Y-%m-%d") ) matched = self.lib.items(query) - self.assertEqual(len(matched), 0) + assert len(matched) == 0 -class DateQueryTestRelativeMore(_common.LibTestCase): +class DateQueryTestRelativeMore(ItemInDBTestCase): def setUp(self): super().setUp() self.i.added = _parsetime(datetime.now().strftime("%Y-%m-%d %H:%M")) @@ -243,46 +242,46 @@ class DateQueryTestRelativeMore(_common.LibTestCase): for timespan in ["d", "w", "m", "y"]: query = DateQuery("added", "-4" + timespan + "..+4" + timespan) matched = self.lib.items(query) - self.assertEqual(len(matched), 1) + assert len(matched) == 1 def test_relative_fail(self): for timespan in ["d", "w", "m", "y"]: query = DateQuery("added", "-2" + timespan + "..-1" + timespan) matched = self.lib.items(query) - self.assertEqual(len(matched), 0) + assert len(matched) == 0 def test_start_relative(self): for timespan in ["d", "w", "m", "y"]: query = DateQuery("added", "-4" + timespan + "..") matched = self.lib.items(query) - self.assertEqual(len(matched), 1) + assert len(matched) == 1 def test_start_relative_fail(self): for timespan in ["d", "w", "m", "y"]: query = DateQuery("added", "4" + timespan + "..") matched = self.lib.items(query) - self.assertEqual(len(matched), 0) + assert len(matched) == 0 def test_end_relative(self): for timespan in ["d", "w", "m", "y"]: query = DateQuery("added", "..+4" + timespan) matched = self.lib.items(query) - self.assertEqual(len(matched), 1) + assert len(matched) == 1 def test_end_relative_fail(self): for timespan in ["d", "w", "m", "y"]: query = DateQuery("added", "..-4" + timespan) matched = self.lib.items(query) - self.assertEqual(len(matched), 0) + assert len(matched) == 0 class DateQueryConstructTest(unittest.TestCase): def test_long_numbers(self): - with self.assertRaises(InvalidQueryArgumentValueError): + with pytest.raises(InvalidQueryArgumentValueError): DateQuery("added", "1409830085..1412422089") def test_too_many_components(self): - with self.assertRaises(InvalidQueryArgumentValueError): + with pytest.raises(InvalidQueryArgumentValueError): DateQuery("added", "12-34-56-78") def test_invalid_date_query(self): @@ -297,32 +296,24 @@ class DateQueryConstructTest(unittest.TestCase): "..2aa", ] for q in q_list: - with self.assertRaises(InvalidQueryArgumentValueError): + with pytest.raises(InvalidQueryArgumentValueError): DateQuery("added", q) def test_datetime_uppercase_t_separator(self): date_query = DateQuery("added", "2000-01-01T12") - self.assertEqual(date_query.interval.start, datetime(2000, 1, 1, 12)) - self.assertEqual(date_query.interval.end, datetime(2000, 1, 1, 13)) + assert date_query.interval.start == datetime(2000, 1, 1, 12) + assert date_query.interval.end == datetime(2000, 1, 1, 13) def test_datetime_lowercase_t_separator(self): date_query = DateQuery("added", "2000-01-01t12") - self.assertEqual(date_query.interval.start, datetime(2000, 1, 1, 12)) - self.assertEqual(date_query.interval.end, datetime(2000, 1, 1, 13)) + assert date_query.interval.start == datetime(2000, 1, 1, 12) + assert date_query.interval.end == datetime(2000, 1, 1, 13) def test_datetime_space_separator(self): date_query = DateQuery("added", "2000-01-01 12") - self.assertEqual(date_query.interval.start, datetime(2000, 1, 1, 12)) - self.assertEqual(date_query.interval.end, datetime(2000, 1, 1, 13)) + assert date_query.interval.start == datetime(2000, 1, 1, 12) + assert date_query.interval.end == datetime(2000, 1, 1, 13) def test_datetime_invalid_separator(self): - with self.assertRaises(InvalidQueryArgumentValueError): + with pytest.raises(InvalidQueryArgumentValueError): DateQuery("added", "2000-01-01x12") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_dbcore.py b/test/test_dbcore.py index 763601b7f..ba2b84ad2 100644 --- a/test/test_dbcore.py +++ b/test/test_dbcore.py @@ -20,6 +20,8 @@ import sqlite3 import unittest from tempfile import mkstemp +import pytest + from beets import dbcore from beets.test import _common @@ -190,7 +192,7 @@ class MigrationTest(unittest.TestCase): c.execute("select * from test") row = c.fetchone() c.connection.close() - self.assertEqual(len(row.keys()), len(ModelFixture2._fields)) + assert len(row.keys()) == len(ModelFixture2._fields) def test_open_with_new_field_adds_column(self): new_lib = DatabaseFixture3(self.libfile) @@ -198,7 +200,7 @@ class MigrationTest(unittest.TestCase): c.execute("select * from test") row = c.fetchone() c.connection.close() - self.assertEqual(len(row.keys()), len(ModelFixture3._fields)) + assert len(row.keys()) == len(ModelFixture3._fields) def test_open_with_fewer_fields_leaves_untouched(self): new_lib = DatabaseFixture1(self.libfile) @@ -206,7 +208,7 @@ class MigrationTest(unittest.TestCase): c.execute("select * from test") row = c.fetchone() c.connection.close() - self.assertEqual(len(row.keys()), len(ModelFixture2._fields)) + assert len(row.keys()) == len(ModelFixture2._fields) def test_open_with_multiple_new_fields(self): new_lib = DatabaseFixture4(self.libfile) @@ -214,7 +216,7 @@ class MigrationTest(unittest.TestCase): c.execute("select * from test") row = c.fetchone() c.connection.close() - self.assertEqual(len(row.keys()), len(ModelFixture4._fields)) + assert len(row.keys()) == len(ModelFixture4._fields) def test_extra_model_adds_table(self): new_lib = DatabaseFixtureTwoModels(self.libfile) @@ -237,18 +239,16 @@ class TransactionTest(unittest.TestCase): old_rev = self.db.revision with self.db.transaction() as tx: tx.mutate( - "INSERT INTO {} " - "(field_one) " - "VALUES (?);".format(ModelFixture1._table), + f"INSERT INTO {ModelFixture1._table} (field_one) VALUES (?);", (111,), ) - self.assertGreater(self.db.revision, old_rev) + assert self.db.revision > old_rev def test_query_no_increase_revision(self): old_rev = self.db.revision with self.db.transaction() as tx: tx.query("PRAGMA table_info(%s)" % ModelFixture1._table) - self.assertEqual(self.db.revision, old_rev) + assert self.db.revision == old_rev class ModelTest(unittest.TestCase): @@ -262,7 +262,7 @@ class ModelTest(unittest.TestCase): model = ModelFixture1() model.add(self.db) rows = self.db._connection().execute("select * from test").fetchall() - self.assertEqual(len(rows), 1) + assert len(rows) == 1 def test_store_fixed_field(self): model = ModelFixture1() @@ -270,37 +270,37 @@ class ModelTest(unittest.TestCase): model.field_one = 123 model.store() row = self.db._connection().execute("select * from test").fetchone() - self.assertEqual(row["field_one"], 123) + assert row["field_one"] == 123 def test_revision(self): old_rev = self.db.revision model = ModelFixture1() model.add(self.db) model.store() - self.assertEqual(model._revision, self.db.revision) - self.assertGreater(self.db.revision, old_rev) + assert model._revision == self.db.revision + assert self.db.revision > old_rev mid_rev = self.db.revision model2 = ModelFixture1() model2.add(self.db) model2.store() - self.assertGreater(model2._revision, mid_rev) - self.assertGreater(self.db.revision, model._revision) + assert model2._revision > mid_rev + assert self.db.revision > model._revision # revision changed, so the model should be re-loaded model.load() - self.assertEqual(model._revision, self.db.revision) + assert model._revision == self.db.revision # revision did not change, so no reload mod2_old_rev = model2._revision model2.load() - self.assertEqual(model2._revision, mod2_old_rev) + assert model2._revision == mod2_old_rev def test_retrieve_by_id(self): model = ModelFixture1() model.add(self.db) other_model = self.db._get(ModelFixture1, model.id) - self.assertEqual(model.id, other_model.id) + assert model.id == other_model.id def test_store_and_retrieve_flexattr(self): model = ModelFixture1() @@ -309,21 +309,21 @@ class ModelTest(unittest.TestCase): model.store() other_model = self.db._get(ModelFixture1, model.id) - self.assertEqual(other_model.foo, "bar") + assert other_model.foo == "bar" def test_delete_flexattr(self): model = ModelFixture1() model["foo"] = "bar" - self.assertIn("foo", model) + assert "foo" in model del model["foo"] - self.assertNotIn("foo", model) + assert "foo" not in model def test_delete_flexattr_via_dot(self): model = ModelFixture1() model["foo"] = "bar" - self.assertIn("foo", model) + assert "foo" in model del model.foo - self.assertNotIn("foo", model) + assert "foo" not in model def test_delete_flexattr_persists(self): model = ModelFixture1() @@ -336,11 +336,11 @@ class ModelTest(unittest.TestCase): model.store() model = self.db._get(ModelFixture1, model.id) - self.assertNotIn("foo", model) + assert "foo" not in model def test_delete_non_existent_attribute(self): model = ModelFixture1() - with self.assertRaises(KeyError): + with pytest.raises(KeyError): del model["foo"] def test_delete_fixed_attribute(self): @@ -350,26 +350,26 @@ class ModelTest(unittest.TestCase): model.some_boolean_field = True for field, type_ in model._fields.items(): - self.assertNotEqual(model[field], type_.null) + assert model[field] != type_.null for field, type_ in model._fields.items(): del model[field] - self.assertEqual(model[field], type_.null) + assert model[field] == type_.null def test_null_value_normalization_by_type(self): model = ModelFixture1() model.field_one = None - self.assertEqual(model.field_one, 0) + assert model.field_one == 0 def test_null_value_stays_none_for_untyped_field(self): model = ModelFixture1() model.foo = None - self.assertIsNone(model.foo) + assert model.foo is None def test_normalization_for_typed_flex_fields(self): model = ModelFixture1() model.some_float_field = None - self.assertEqual(model.some_float_field, 0.0) + assert model.some_float_field == 0.0 def test_load_deleted_flex_field(self): model1 = ModelFixture1() @@ -377,47 +377,47 @@ class ModelTest(unittest.TestCase): model1.add(self.db) model2 = self.db._get(ModelFixture1, model1.id) - self.assertIn("flex_field", model2) + assert "flex_field" in model2 del model1["flex_field"] model1.store() model2.load() - self.assertNotIn("flex_field", model2) + assert "flex_field" not in model2 def test_check_db_fails(self): - with self.assertRaisesRegex(ValueError, "no database"): + with pytest.raises(ValueError, match="no database"): dbcore.Model()._check_db() - with self.assertRaisesRegex(ValueError, "no id"): + with pytest.raises(ValueError, match="no id"): ModelFixture1(self.db)._check_db() dbcore.Model(self.db)._check_db(need_id=False) def test_missing_field(self): - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): ModelFixture1(self.db).nonExistingKey def test_computed_field(self): model = ModelFixtureWithGetters() - self.assertEqual(model.aComputedField, "thing") - with self.assertRaisesRegex(KeyError, "computed field .+ deleted"): + assert model.aComputedField == "thing" + with pytest.raises(KeyError, match="computed field .+ deleted"): del model.aComputedField def test_items(self): model = ModelFixture1(self.db) model.id = 5 - self.assertEqual( - {("id", 5), ("field_one", 0), ("field_two", "")}, set(model.items()) + assert {("id", 5), ("field_one", 0), ("field_two", "")} == set( + model.items() ) def test_delete_internal_field(self): model = dbcore.Model() del model._db - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): model._db def test_parse_nonstring(self): - with self.assertRaisesRegex(TypeError, "must be a string"): + with pytest.raises(TypeError, match="must be a string"): dbcore.Model._parse(None, 42) @@ -426,87 +426,87 @@ class FormatTest(unittest.TestCase): model = ModelFixture1() model.field_one = 155 value = model.formatted().get("field_one") - self.assertEqual(value, "155") + assert value == "155" def test_format_fixed_field_integer_normalized(self): """The normalize method of the Integer class rounds floats""" model = ModelFixture1() model.field_one = 142.432 value = model.formatted().get("field_one") - self.assertEqual(value, "142") + assert value == "142" model.field_one = 142.863 value = model.formatted().get("field_one") - self.assertEqual(value, "143") + assert value == "143" def test_format_fixed_field_string(self): model = ModelFixture1() model.field_two = "caf\xe9" value = model.formatted().get("field_two") - self.assertEqual(value, "caf\xe9") + assert value == "caf\xe9" def test_format_flex_field(self): model = ModelFixture1() model.other_field = "caf\xe9" value = model.formatted().get("other_field") - self.assertEqual(value, "caf\xe9") + assert value == "caf\xe9" def test_format_flex_field_bytes(self): model = ModelFixture1() model.other_field = "caf\xe9".encode() value = model.formatted().get("other_field") - self.assertTrue(isinstance(value, str)) - self.assertEqual(value, "caf\xe9") + assert isinstance(value, str) + assert value == "caf\xe9" def test_format_unset_field(self): model = ModelFixture1() value = model.formatted().get("other_field") - self.assertEqual(value, "") + assert value == "" def test_format_typed_flex_field(self): model = ModelFixture1() model.some_float_field = 3.14159265358979 value = model.formatted().get("some_float_field") - self.assertEqual(value, "3.1") + assert value == "3.1" class FormattedMappingTest(unittest.TestCase): def test_keys_equal_model_keys(self): model = ModelFixture1() formatted = model.formatted() - self.assertEqual(set(model.keys(True)), set(formatted.keys())) + assert set(model.keys(True)) == set(formatted.keys()) def test_get_unset_field(self): model = ModelFixture1() formatted = model.formatted() - with self.assertRaises(KeyError): + with pytest.raises(KeyError): formatted["other_field"] def test_get_method_with_default(self): model = ModelFixture1() formatted = model.formatted() - self.assertEqual(formatted.get("other_field"), "") + assert formatted.get("other_field") == "" def test_get_method_with_specified_default(self): model = ModelFixture1() formatted = model.formatted() - self.assertEqual(formatted.get("other_field", "default"), "default") + assert formatted.get("other_field", "default") == "default" class ParseTest(unittest.TestCase): def test_parse_fixed_field(self): value = ModelFixture1._parse("field_one", "2") - self.assertIsInstance(value, int) - self.assertEqual(value, 2) + assert isinstance(value, int) + assert value == 2 def test_parse_flex_field(self): value = ModelFixture1._parse("some_float_field", "2") - self.assertIsInstance(value, float) - self.assertEqual(value, 2.0) + assert isinstance(value, float) + assert value == 2.0 def test_parse_untyped_field(self): value = ModelFixture1._parse("field_nine", "2") - self.assertEqual(value, "2") + assert value == "2" class QueryParseTest(unittest.TestCase): @@ -515,59 +515,57 @@ class QueryParseTest(unittest.TestCase): part, {"year": dbcore.query.NumericQuery}, {":": dbcore.query.RegexpQuery}, - )[ - :-1 - ] # remove the negate flag + )[:-1] # remove the negate flag def test_one_basic_term(self): q = "test" r = (None, "test", dbcore.query.SubstringQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_one_keyed_term(self): q = "test:val" r = ("test", "val", dbcore.query.SubstringQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_colon_at_end(self): q = "test:" r = ("test", "", dbcore.query.SubstringQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_one_basic_regexp(self): q = r":regexp" r = (None, "regexp", dbcore.query.RegexpQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_keyed_regexp(self): q = r"test::regexp" r = ("test", "regexp", dbcore.query.RegexpQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_escaped_colon(self): q = r"test\:val" r = (None, "test:val", dbcore.query.SubstringQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_escaped_colon_in_regexp(self): q = r":test\:regexp" r = (None, "test:regexp", dbcore.query.RegexpQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_single_year(self): q = "year:1999" r = ("year", "1999", dbcore.query.NumericQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_multiple_years(self): q = "year:1999..2010" r = ("year", "1999..2010", dbcore.query.NumericQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r def test_empty_query_part(self): q = "" r = (None, "", dbcore.query.SubstringQuery) - self.assertEqual(self.pqp(q), r) + assert self.pqp(q) == r class QueryFromStringsTest(unittest.TestCase): @@ -581,28 +579,28 @@ class QueryFromStringsTest(unittest.TestCase): def test_zero_parts(self): q = self.qfs([]) - self.assertIsInstance(q, dbcore.query.AndQuery) - self.assertEqual(len(q.subqueries), 1) - self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery) + assert isinstance(q, dbcore.query.AndQuery) + assert len(q.subqueries) == 1 + assert isinstance(q.subqueries[0], dbcore.query.TrueQuery) def test_two_parts(self): q = self.qfs(["foo", "bar:baz"]) - self.assertIsInstance(q, dbcore.query.AndQuery) - self.assertEqual(len(q.subqueries), 2) - self.assertIsInstance(q.subqueries[0], dbcore.query.AnyFieldQuery) - self.assertIsInstance(q.subqueries[1], dbcore.query.SubstringQuery) + assert isinstance(q, dbcore.query.AndQuery) + assert len(q.subqueries) == 2 + assert isinstance(q.subqueries[0], dbcore.query.AnyFieldQuery) + assert isinstance(q.subqueries[1], dbcore.query.SubstringQuery) def test_parse_fixed_type_query(self): q = self.qfs(["field_one:2..3"]) - self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery) + assert isinstance(q.subqueries[0], dbcore.query.NumericQuery) def test_parse_flex_type_query(self): q = self.qfs(["some_float_field:2..3"]) - self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery) + assert isinstance(q.subqueries[0], dbcore.query.NumericQuery) def test_empty_query_part(self): q = self.qfs([""]) - self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery) + assert isinstance(q.subqueries[0], dbcore.query.TrueQuery) class SortFromStringsTest(unittest.TestCase): @@ -614,31 +612,31 @@ class SortFromStringsTest(unittest.TestCase): def test_zero_parts(self): s = self.sfs([]) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(s, dbcore.query.NullSort()) + assert isinstance(s, dbcore.query.NullSort) + assert s == dbcore.query.NullSort() def test_one_parts(self): s = self.sfs(["field+"]) - self.assertIsInstance(s, dbcore.query.Sort) + assert isinstance(s, dbcore.query.Sort) def test_two_parts(self): s = self.sfs(["field+", "another_field-"]) - self.assertIsInstance(s, dbcore.query.MultipleSort) - self.assertEqual(len(s.sorts), 2) + assert isinstance(s, dbcore.query.MultipleSort) + assert len(s.sorts) == 2 def test_fixed_field_sort(self): s = self.sfs(["field_one+"]) - self.assertIsInstance(s, dbcore.query.FixedFieldSort) - self.assertEqual(s, dbcore.query.FixedFieldSort("field_one")) + assert isinstance(s, dbcore.query.FixedFieldSort) + assert s == dbcore.query.FixedFieldSort("field_one") def test_flex_field_sort(self): s = self.sfs(["flex_field+"]) - self.assertIsInstance(s, dbcore.query.SlowFieldSort) - self.assertEqual(s, dbcore.query.SlowFieldSort("flex_field")) + assert isinstance(s, dbcore.query.SlowFieldSort) + assert s == dbcore.query.SlowFieldSort("flex_field") def test_special_sort(self): s = self.sfs(["some_sort+"]) - self.assertIsInstance(s, SortFixture) + assert isinstance(s, SortFixture) class ParseSortedQueryTest(unittest.TestCase): @@ -650,45 +648,45 @@ class ParseSortedQueryTest(unittest.TestCase): def test_and_query(self): q, s = self.psq("foo bar") - self.assertIsInstance(q, dbcore.query.AndQuery) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(len(q.subqueries), 2) + assert isinstance(q, dbcore.query.AndQuery) + assert isinstance(s, dbcore.query.NullSort) + assert len(q.subqueries) == 2 def test_or_query(self): q, s = self.psq("foo , bar") - self.assertIsInstance(q, dbcore.query.OrQuery) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(len(q.subqueries), 2) + assert isinstance(q, dbcore.query.OrQuery) + assert isinstance(s, dbcore.query.NullSort) + assert len(q.subqueries) == 2 def test_no_space_before_comma_or_query(self): q, s = self.psq("foo, bar") - self.assertIsInstance(q, dbcore.query.OrQuery) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(len(q.subqueries), 2) + assert isinstance(q, dbcore.query.OrQuery) + assert isinstance(s, dbcore.query.NullSort) + assert len(q.subqueries) == 2 def test_no_spaces_or_query(self): q, s = self.psq("foo,bar") - self.assertIsInstance(q, dbcore.query.AndQuery) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(len(q.subqueries), 1) + assert isinstance(q, dbcore.query.AndQuery) + assert isinstance(s, dbcore.query.NullSort) + assert len(q.subqueries) == 1 def test_trailing_comma_or_query(self): q, s = self.psq("foo , bar ,") - self.assertIsInstance(q, dbcore.query.OrQuery) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(len(q.subqueries), 3) + assert isinstance(q, dbcore.query.OrQuery) + assert isinstance(s, dbcore.query.NullSort) + assert len(q.subqueries) == 3 def test_leading_comma_or_query(self): q, s = self.psq(", foo , bar") - self.assertIsInstance(q, dbcore.query.OrQuery) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(len(q.subqueries), 3) + assert isinstance(q, dbcore.query.OrQuery) + assert isinstance(s, dbcore.query.NullSort) + assert len(q.subqueries) == 3 def test_only_direction(self): q, s = self.psq("-") - self.assertIsInstance(q, dbcore.query.AndQuery) - self.assertIsInstance(s, dbcore.query.NullSort) - self.assertEqual(len(q.subqueries), 1) + assert isinstance(q, dbcore.query.AndQuery) + assert isinstance(s, dbcore.query.NullSort) + assert len(q.subqueries) == 1 class ResultsIteratorTest(unittest.TestCase): @@ -706,12 +704,12 @@ class ResultsIteratorTest(unittest.TestCase): def test_iterate_once(self): objs = self.db._fetch(ModelFixture1) - self.assertEqual(len(list(objs)), 2) + assert len(list(objs)) == 2 def test_iterate_twice(self): objs = self.db._fetch(ModelFixture1) list(objs) - self.assertEqual(len(list(objs)), 2) + assert len(list(objs)) == 2 def test_concurrent_iterators(self): results = self.db._fetch(ModelFixture1) @@ -719,54 +717,47 @@ class ResultsIteratorTest(unittest.TestCase): it2 = iter(results) next(it1) list(it2) - self.assertEqual(len(list(it1)), 1) + assert len(list(it1)) == 1 def test_slow_query(self): q = dbcore.query.SubstringQuery("foo", "ba", False) objs = self.db._fetch(ModelFixture1, q) - self.assertEqual(len(list(objs)), 2) + assert len(list(objs)) == 2 def test_slow_query_negative(self): q = dbcore.query.SubstringQuery("foo", "qux", False) objs = self.db._fetch(ModelFixture1, q) - self.assertEqual(len(list(objs)), 0) + assert len(list(objs)) == 0 def test_iterate_slow_sort(self): s = dbcore.query.SlowFieldSort("foo") res = self.db._fetch(ModelFixture1, sort=s) objs = list(res) - self.assertEqual(objs[0].foo, "bar") - self.assertEqual(objs[1].foo, "baz") + assert objs[0].foo == "bar" + assert objs[1].foo == "baz" def test_unsorted_subscript(self): objs = self.db._fetch(ModelFixture1) - self.assertEqual(objs[0].foo, "baz") - self.assertEqual(objs[1].foo, "bar") + assert objs[0].foo == "baz" + assert objs[1].foo == "bar" def test_slow_sort_subscript(self): s = dbcore.query.SlowFieldSort("foo") objs = self.db._fetch(ModelFixture1, sort=s) - self.assertEqual(objs[0].foo, "bar") - self.assertEqual(objs[1].foo, "baz") + assert objs[0].foo == "bar" + assert objs[1].foo == "baz" def test_length(self): objs = self.db._fetch(ModelFixture1) - self.assertEqual(len(objs), 2) + assert len(objs) == 2 def test_out_of_range(self): objs = self.db._fetch(ModelFixture1) - with self.assertRaises(IndexError): + with pytest.raises(IndexError): objs[100] def test_no_results(self): - self.assertIsNone( + assert ( self.db._fetch(ModelFixture1, dbcore.query.FalseQuery()).get() + is None ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_files.py b/test/test_files.py index 46aebe54f..72b1610c0 100644 --- a/test/test_files.py +++ b/test/test_files.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test file manipulation functionality of Item. -""" +"""Test file manipulation functionality of Item.""" import os import shutil @@ -21,14 +20,17 @@ import stat import unittest from os.path import join +import pytest + import beets.library from beets import util from beets.test import _common from beets.test._common import item, touch +from beets.test.helper import NEEDS_REFLINK, BeetsTestCase from beets.util import MoveOperation, bytestring_path, syspath -class MoveTest(_common.TestCase): +class MoveTest(BeetsTestCase): def setUp(self): super().setUp() @@ -40,14 +42,10 @@ class MoveTest(_common.TestCase): ) # add it to a temporary library - self.lib = beets.library.Library(":memory:") self.i = beets.library.Item.from_path(self.path) self.lib.add(self.i) # set up the destination - self.libdir = join(self.temp_dir, b"testlibdir") - os.mkdir(syspath(self.libdir)) - self.lib.directory = self.libdir self.lib.path_formats = [ ("default", join("$artist", "$album", "$title")) ] @@ -88,59 +86,57 @@ class MoveTest(_common.TestCase): self.i.move(operation=MoveOperation.COPY) self.assertExists(self.path) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflink") def test_reflink_arrives(self): self.i.move(operation=MoveOperation.REFLINK_AUTO) self.assertExists(self.dest) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflink") def test_reflink_does_not_depart(self): self.i.move(operation=MoveOperation.REFLINK_AUTO) self.assertExists(self.path) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflink") + @NEEDS_REFLINK def test_force_reflink_arrives(self): self.i.move(operation=MoveOperation.REFLINK) self.assertExists(self.dest) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflink") + @NEEDS_REFLINK def test_force_reflink_does_not_depart(self): self.i.move(operation=MoveOperation.REFLINK) self.assertExists(self.path) def test_move_changes_path(self): self.i.move() - self.assertEqual(self.i.path, util.normpath(self.dest)) + assert self.i.path == util.normpath(self.dest) def test_copy_already_at_destination(self): self.i.move() old_path = self.i.path self.i.move(operation=MoveOperation.COPY) - self.assertEqual(self.i.path, old_path) + assert self.i.path == old_path def test_move_already_at_destination(self): self.i.move() old_path = self.i.path self.i.move() - self.assertEqual(self.i.path, old_path) + assert self.i.path == old_path def test_move_file_with_colon(self): self.i.artist = "C:DOS" self.i.move() - self.assertIn("C_DOS", self.i.path.decode()) + assert "C_DOS" in self.i.path.decode() def test_move_file_with_multiple_colons(self): # print(beets.config["replace"]) self.i.artist = "COM:DOS" self.i.move() - self.assertIn("COM_DOS", self.i.path.decode()) + assert "COM_DOS" in self.i.path.decode() def test_move_file_with_colon_alt_separator(self): old = beets.config["drive_sep_replace"] beets.config["drive_sep_replace"] = "0" self.i.artist = "C:DOS" self.i.move() - self.assertIn("C0DOS", self.i.path.decode()) + assert "C0DOS" in self.i.path.decode() beets.config["drive_sep_replace"] = old def test_read_only_file_copied_writable(self): @@ -149,7 +145,7 @@ class MoveTest(_common.TestCase): try: self.i.move(operation=MoveOperation.COPY) - self.assertTrue(os.access(syspath(self.i.path), os.W_OK)) + assert os.access(syspath(self.i.path), os.W_OK) finally: # Make everything writable so it can be cleaned up. os.chmod(syspath(self.path), 0o777) @@ -162,18 +158,15 @@ class MoveTest(_common.TestCase): touch(dest) self.i.move() - self.assertNotEqual(self.i.path, dest) - self.assertEqual(os.path.dirname(self.i.path), os.path.dirname(dest)) + assert self.i.path != dest + assert os.path.dirname(self.i.path) == os.path.dirname(dest) @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_link_arrives(self): self.i.move(operation=MoveOperation.LINK) self.assertExists(self.dest) - self.assertTrue(os.path.islink(syspath(self.dest))) - self.assertEqual( - bytestring_path(os.readlink(syspath(self.dest))), - self.path, - ) + assert os.path.islink(syspath(self.dest)) + assert bytestring_path(os.readlink(syspath(self.dest))) == self.path @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_link_does_not_depart(self): @@ -183,7 +176,7 @@ class MoveTest(_common.TestCase): @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_link_changes_path(self): self.i.move(operation=MoveOperation.LINK) - self.assertEqual(self.i.path, util.normpath(self.dest)) + assert self.i.path == util.normpath(self.dest) @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_hardlink_arrives(self): @@ -191,9 +184,9 @@ class MoveTest(_common.TestCase): self.assertExists(self.dest) s1 = os.stat(syspath(self.path)) s2 = os.stat(syspath(self.dest)) - self.assertTrue( - (s1[stat.ST_INO], s1[stat.ST_DEV]) - == (s2[stat.ST_INO], s2[stat.ST_DEV]) + assert (s1[stat.ST_INO], s1[stat.ST_DEV]) == ( + s2[stat.ST_INO], + s2[stat.ST_DEV], ) @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") @@ -204,57 +197,54 @@ class MoveTest(_common.TestCase): @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_hardlink_changes_path(self): self.i.move(operation=MoveOperation.HARDLINK) - self.assertEqual(self.i.path, util.normpath(self.dest)) + assert self.i.path == util.normpath(self.dest) -class HelperTest(_common.TestCase): +class HelperTest(BeetsTestCase): def test_ancestry_works_on_file(self): p = "/a/b/c" a = ["/", "/a", "/a/b"] - self.assertEqual(util.ancestry(p), a) + assert util.ancestry(p) == a def test_ancestry_works_on_dir(self): p = "/a/b/c/" a = ["/", "/a", "/a/b", "/a/b/c"] - self.assertEqual(util.ancestry(p), a) + assert util.ancestry(p) == a def test_ancestry_works_on_relative(self): p = "a/b/c" a = ["a", "a/b"] - self.assertEqual(util.ancestry(p), a) + assert util.ancestry(p) == a def test_components_works_on_file(self): p = "/a/b/c" a = ["/", "a", "b", "c"] - self.assertEqual(util.components(p), a) + assert util.components(p) == a def test_components_works_on_dir(self): p = "/a/b/c/" a = ["/", "a", "b", "c"] - self.assertEqual(util.components(p), a) + assert util.components(p) == a def test_components_works_on_relative(self): p = "a/b/c" a = ["a", "b", "c"] - self.assertEqual(util.components(p), a) + assert util.components(p) == a def test_forward_slash(self): p = rb"C:\a\b\c" a = rb"C:/a/b/c" - self.assertEqual(util.path_as_posix(p), a) + assert util.path_as_posix(p) == a -class AlbumFileTest(_common.TestCase): +class AlbumFileTest(BeetsTestCase): def setUp(self): super().setUp() # Make library and item. - self.lib = beets.library.Library(":memory:") self.lib.path_formats = [ ("default", join("$albumartist", "$album", "$title")) ] - self.libdir = os.path.join(self.temp_dir, b"testlibdir") - self.lib.directory = self.libdir self.i = item(self.lib) # Make a file for the item. self.i.path = self.i.destination() @@ -271,7 +261,7 @@ class AlbumFileTest(_common.TestCase): self.ai.store() self.i.load() - self.assertTrue(b"newAlbumName" in self.i.path) + assert b"newAlbumName" in self.i.path def test_albuminfo_move_moves_file(self): oldpath = self.i.path @@ -293,7 +283,7 @@ class AlbumFileTest(_common.TestCase): self.assertExists(oldpath) self.assertExists(self.i.path) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflink") + @NEEDS_REFLINK def test_albuminfo_move_reflinks_file(self): oldpath = self.i.path self.ai.album = "newAlbumName" @@ -301,24 +291,21 @@ class AlbumFileTest(_common.TestCase): self.ai.store() self.i.load() - self.assertTrue(os.path.exists(oldpath)) - self.assertTrue(os.path.exists(self.i.path)) + assert os.path.exists(oldpath) + assert os.path.exists(self.i.path) def test_albuminfo_move_to_custom_dir(self): self.ai.move(basedir=self.otherdir) self.i.load() self.ai.store() - self.assertIn(b"testotherdir", self.i.path) + assert b"testotherdir" in self.i.path -class ArtFileTest(_common.TestCase): +class ArtFileTest(BeetsTestCase): def setUp(self): super().setUp() # Make library and item. - self.lib = beets.library.Library(":memory:") - self.libdir = os.path.join(self.temp_dir, b"testlibdir") - self.lib.directory = self.libdir self.i = item(self.lib) self.i.path = self.i.destination() # Make a music file. @@ -346,7 +333,7 @@ class ArtFileTest(_common.TestCase): self.ai.move() self.i.load() - self.assertNotEqual(self.i.path, oldpath) + assert self.i.path != oldpath self.assertNotExists(self.art) newart = self.lib.get_album(self.i).art_destination(self.art) self.assertExists(newart) @@ -361,7 +348,7 @@ class ArtFileTest(_common.TestCase): self.assertNotExists(self.art) newart = self.lib.get_album(self.i).artpath self.assertExists(newart) - self.assertIn(b"testotherdir", newart) + assert b"testotherdir" in newart def test_setart_copies_image(self): util.remove(self.art) @@ -374,7 +361,7 @@ class ArtFileTest(_common.TestCase): ai = self.lib.add_album((i2,)) i2.move(operation=MoveOperation.COPY) - self.assertIsNone(ai.artpath) + assert ai.artpath is None ai.set_art(newart) self.assertExists(ai.artpath) @@ -427,8 +414,8 @@ class ArtFileTest(_common.TestCase): # Set the art. ai.set_art(newart) - self.assertNotEqual(artdest, ai.artpath) - self.assertEqual(os.path.dirname(artdest), os.path.dirname(ai.artpath)) + assert artdest != ai.artpath + assert os.path.dirname(artdest) == os.path.dirname(ai.artpath) def test_setart_sets_permissions(self): util.remove(self.art) @@ -446,8 +433,8 @@ class ArtFileTest(_common.TestCase): ai.set_art(newart) mode = stat.S_IMODE(os.stat(syspath(ai.artpath)).st_mode) - self.assertTrue(mode & stat.S_IRGRP) - self.assertTrue(os.access(syspath(ai.artpath), os.W_OK)) + assert mode & stat.S_IRGRP + assert os.access(syspath(ai.artpath), os.W_OK) finally: # Make everything writable so it can be cleaned up. @@ -463,7 +450,7 @@ class ArtFileTest(_common.TestCase): self.ai.items()[0].move() artpath = self.lib.albums()[0].artpath - self.assertTrue(b"different_album" in artpath) + assert b"different_album" in artpath self.assertExists(artpath) self.assertNotExists(oldartpath) @@ -480,19 +467,16 @@ class ArtFileTest(_common.TestCase): self.i.move() artpath = self.lib.albums()[0].artpath - self.assertNotIn(b"different_album", artpath) - self.assertEqual(artpath, oldartpath) + assert b"different_album" not in artpath + assert artpath == oldartpath self.assertExists(oldartpath) -class RemoveTest(_common.TestCase): +class RemoveTest(BeetsTestCase): def setUp(self): super().setUp() # Make library and item. - self.lib = beets.library.Library(":memory:") - self.libdir = os.path.join(self.temp_dir, b"testlibdir") - self.lib.directory = self.libdir self.i = item(self.lib) self.i.path = self.i.destination() # Make a music file. @@ -546,7 +530,7 @@ class RemoveTest(_common.TestCase): # Tests that we can "delete" nonexistent files. -class SoftRemoveTest(_common.TestCase): +class SoftRemoveTest(BeetsTestCase): def setUp(self): super().setUp() @@ -564,7 +548,7 @@ class SoftRemoveTest(_common.TestCase): self.fail("OSError when removing path") -class SafeMoveCopyTest(_common.TestCase): +class SafeMoveCopyTest(BeetsTestCase): def setUp(self): super().setUp() @@ -584,23 +568,22 @@ class SafeMoveCopyTest(_common.TestCase): self.assertExists(self.dest) self.assertExists(self.path) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflink") + @NEEDS_REFLINK def test_successful_reflink(self): util.reflink(self.path, self.dest) self.assertExists(self.dest) self.assertExists(self.path) def test_unsuccessful_move(self): - with self.assertRaises(util.FilesystemError): + with pytest.raises(util.FilesystemError): util.move(self.path, self.otherpath) def test_unsuccessful_copy(self): - with self.assertRaises(util.FilesystemError): + with pytest.raises(util.FilesystemError): util.copy(self.path, self.otherpath) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflink") def test_unsuccessful_reflink(self): - with self.assertRaises(util.FilesystemError): + with pytest.raises(util.FilesystemError, match="target exists"): util.reflink(self.path, self.otherpath) def test_self_move(self): @@ -612,7 +595,7 @@ class SafeMoveCopyTest(_common.TestCase): self.assertExists(self.path) -class PruneTest(_common.TestCase): +class PruneTest(BeetsTestCase): def setUp(self): super().setUp() @@ -632,7 +615,7 @@ class PruneTest(_common.TestCase): self.assertNotExists(self.sub) -class WalkTest(_common.TestCase): +class WalkTest(BeetsTestCase): def setUp(self): super().setUp() @@ -645,28 +628,28 @@ class WalkTest(_common.TestCase): def test_sorted_files(self): res = list(util.sorted_walk(self.base)) - self.assertEqual(len(res), 2) - self.assertEqual(res[0], (self.base, [b"d"], [b"x", b"y"])) - self.assertEqual(res[1], (os.path.join(self.base, b"d"), [], [b"z"])) + assert len(res) == 2 + assert res[0] == (self.base, [b"d"], [b"x", b"y"]) + assert res[1] == (os.path.join(self.base, b"d"), [], [b"z"]) def test_ignore_file(self): res = list(util.sorted_walk(self.base, (b"x",))) - self.assertEqual(len(res), 2) - self.assertEqual(res[0], (self.base, [b"d"], [b"y"])) - self.assertEqual(res[1], (os.path.join(self.base, b"d"), [], [b"z"])) + assert len(res) == 2 + assert res[0] == (self.base, [b"d"], [b"y"]) + assert res[1] == (os.path.join(self.base, b"d"), [], [b"z"]) def test_ignore_directory(self): res = list(util.sorted_walk(self.base, (b"d",))) - self.assertEqual(len(res), 1) - self.assertEqual(res[0], (self.base, [], [b"x", b"y"])) + assert len(res) == 1 + assert res[0] == (self.base, [], [b"x", b"y"]) def test_ignore_everything(self): res = list(util.sorted_walk(self.base, (b"*",))) - self.assertEqual(len(res), 1) - self.assertEqual(res[0], (self.base, [], [])) + assert len(res) == 1 + assert res[0] == (self.base, [], []) -class UniquePathTest(_common.TestCase): +class UniquePathTest(BeetsTestCase): def setUp(self): super().setUp() @@ -679,22 +662,22 @@ class UniquePathTest(_common.TestCase): def test_new_file_unchanged(self): path = util.unique_path(os.path.join(self.base, b"z.mp3")) - self.assertEqual(path, os.path.join(self.base, b"z.mp3")) + assert path == os.path.join(self.base, b"z.mp3") def test_conflicting_file_appends_1(self): path = util.unique_path(os.path.join(self.base, b"y.mp3")) - self.assertEqual(path, os.path.join(self.base, b"y.1.mp3")) + assert path == os.path.join(self.base, b"y.1.mp3") def test_conflicting_file_appends_higher_number(self): path = util.unique_path(os.path.join(self.base, b"x.mp3")) - self.assertEqual(path, os.path.join(self.base, b"x.3.mp3")) + assert path == os.path.join(self.base, b"x.3.mp3") def test_conflicting_file_with_number_increases_number(self): path = util.unique_path(os.path.join(self.base, b"x.1.mp3")) - self.assertEqual(path, os.path.join(self.base, b"x.3.mp3")) + assert path == os.path.join(self.base, b"x.3.mp3") -class MkDirAllTest(_common.TestCase): +class MkDirAllTest(BeetsTestCase): def test_parent_exists(self): path = os.path.join(self.temp_dir, b"foo", b"bar", b"baz", b"qux.mp3") util.mkdirall(path) @@ -704,11 +687,3 @@ class MkDirAllTest(_common.TestCase): path = os.path.join(self.temp_dir, b"foo", b"bar", b"baz", b"qux.mp3") util.mkdirall(path) self.assertNotExists(path) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_hidden.py b/test/test_hidden.py index f60f1f6e9..a7e6a1a10 100644 --- a/test/test_hidden.py +++ b/test/test_hidden.py @@ -14,7 +14,6 @@ """Tests for the 'hidden' utility.""" - import ctypes import errno import subprocess @@ -45,7 +44,7 @@ class HiddenFileTest(unittest.TestCase): else: raise e - self.assertTrue(hidden.is_hidden(f.name)) + assert hidden.is_hidden(f.name) def test_windows_hidden(self): if not sys.platform == "win32": @@ -64,7 +63,7 @@ class HiddenFileTest(unittest.TestCase): if not success: self.skipTest("unable to set file attributes") - self.assertTrue(hidden.is_hidden(f.name)) + assert hidden.is_hidden(f.name) def test_other_hidden(self): if sys.platform == "darwin" or sys.platform == "win32": @@ -73,12 +72,4 @@ class HiddenFileTest(unittest.TestCase): with tempfile.NamedTemporaryFile(prefix=".tmp") as f: fn = util.bytestring_path(f.name) - self.assertTrue(hidden.is_hidden(fn)) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert hidden.is_hidden(fn) diff --git a/test/test_importer.py b/test/test_importer.py index fe41ad2f5..ad6b837f5 100644 --- a/test/test_importer.py +++ b/test/test_importer.py @@ -13,8 +13,8 @@ # included in all copies or substantial portions of the Software. -"""Tests for the general importer functionality. -""" +"""Tests for the general importer functionality.""" + import os import re import shutil @@ -23,11 +23,13 @@ import sys import unicodedata import unittest from io import StringIO +from pathlib import Path from tarfile import TarFile from tempfile import mkstemp from unittest.mock import Mock, patch from zipfile import ZipFile +import pytest from mediafile import MediaFile from beets import config, importer, logging, util @@ -35,87 +37,77 @@ from beets.autotag import AlbumInfo, AlbumMatch, TrackInfo from beets.importer import albums_in_dir from beets.test import _common from beets.test.helper import ( + NEEDS_REFLINK, + AsIsImporterMixin, AutotagStub, - ImportHelper, - TestHelper, + BeetsTestCase, + ImportTestCase, + PluginMixin, capture_log, has_program, ) from beets.util import bytestring_path, displayable_path, syspath -class ScrubbedImportTest(_common.TestCase, ImportHelper): - def setUp(self): - self.setup_beets(disk=True) - self.load_plugins("scrub") - self._create_import_dir(2) - self._setup_import_session(autotag=False) - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() +class ScrubbedImportTest(AsIsImporterMixin, PluginMixin, ImportTestCase): + db_on_disk = True + plugin = "scrub" def test_tags_not_scrubbed(self): config["plugins"] = ["scrub"] config["scrub"]["auto"] = False config["import"]["write"] = True for mediafile in self.import_media: - self.assertEqual(mediafile.artist, "Tag Artist") - self.assertEqual(mediafile.album, "Tag Album") - self.importer.run() + assert mediafile.artist == "Tag Artist" + assert mediafile.album == "Tag Album" + self.run_asis_importer() for item in self.lib.items(): imported_file = os.path.join(item.path) imported_file = MediaFile(imported_file) - self.assertEqual(imported_file.artist, "Tag Artist") - self.assertEqual(imported_file.album, "Tag Album") + assert imported_file.artist == "Tag Artist" + assert imported_file.album == "Tag Album" def test_tags_restored(self): config["plugins"] = ["scrub"] config["scrub"]["auto"] = True config["import"]["write"] = True for mediafile in self.import_media: - self.assertEqual(mediafile.artist, "Tag Artist") - self.assertEqual(mediafile.album, "Tag Album") - self.importer.run() + assert mediafile.artist == "Tag Artist" + assert mediafile.album == "Tag Album" + self.run_asis_importer() for item in self.lib.items(): imported_file = os.path.join(item.path) imported_file = MediaFile(imported_file) - self.assertEqual(imported_file.artist, "Tag Artist") - self.assertEqual(imported_file.album, "Tag Album") + assert imported_file.artist == "Tag Artist" + assert imported_file.album == "Tag Album" def test_tags_not_restored(self): config["plugins"] = ["scrub"] config["scrub"]["auto"] = True config["import"]["write"] = False for mediafile in self.import_media: - self.assertEqual(mediafile.artist, "Tag Artist") - self.assertEqual(mediafile.album, "Tag Album") - self.importer.run() + assert mediafile.artist == "Tag Artist" + assert mediafile.album == "Tag Album" + self.run_asis_importer() for item in self.lib.items(): imported_file = os.path.join(item.path) imported_file = MediaFile(imported_file) - self.assertIsNone(imported_file.artist) - self.assertIsNone(imported_file.album) + assert imported_file.artist is None + assert imported_file.album is None @_common.slow_test() -class NonAutotaggedImportTest(_common.TestCase, ImportHelper): - def setUp(self): - self.setup_beets(disk=True) - self._create_import_dir(2) - self._setup_import_session(autotag=False) - - def tearDown(self): - self.teardown_beets() +class NonAutotaggedImportTest(AsIsImporterMixin, ImportTestCase): + db_on_disk = True def test_album_created_with_track_artist(self): - self.importer.run() + self.run_asis_importer() albums = self.lib.albums() - self.assertEqual(len(albums), 1) - self.assertEqual(albums[0].albumartist, "Tag Artist") + assert len(albums) == 1 + assert albums[0].albumartist == "Tag Artist" def test_import_copy_arrives(self): - self.importer.run() + self.run_asis_importer() for mediafile in self.import_media: self.assert_file_in_lib( b"Tag Artist", @@ -126,7 +118,7 @@ class NonAutotaggedImportTest(_common.TestCase, ImportHelper): def test_threaded_import_copy_arrives(self): config["threaded"] = True - self.importer.run() + self.run_asis_importer() for mediafile in self.import_media: self.assert_file_in_lib( b"Tag Artist", @@ -135,35 +127,27 @@ class NonAutotaggedImportTest(_common.TestCase, ImportHelper): ) def test_import_with_move_deletes_import_files(self): - config["import"]["move"] = True - for mediafile in self.import_media: self.assertExists(mediafile.path) - self.importer.run() + self.run_asis_importer(move=True) for mediafile in self.import_media: self.assertNotExists(mediafile.path) def test_import_with_move_prunes_directory_empty(self): - config["import"]["move"] = True - - self.assertExists(os.path.join(self.import_dir, b"the_album")) - self.importer.run() - self.assertNotExists(os.path.join(self.import_dir, b"the_album")) + self.assertExists(os.path.join(self.import_dir, b"album")) + self.run_asis_importer(move=True) + self.assertNotExists(os.path.join(self.import_dir, b"album")) def test_import_with_move_prunes_with_extra_clutter(self): - self.touch(os.path.join(self.import_dir, b"the_album", b"alog.log")) + self.touch(os.path.join(self.import_dir, b"album", b"alog.log")) config["clutter"] = ["*.log"] - config["import"]["move"] = True - self.assertExists(os.path.join(self.import_dir, b"the_album")) - self.importer.run() - self.assertNotExists(os.path.join(self.import_dir, b"the_album")) + self.assertExists(os.path.join(self.import_dir, b"album")) + self.run_asis_importer(move=True) + self.assertNotExists(os.path.join(self.import_dir, b"album")) def test_threaded_import_move_arrives(self): - config["import"]["move"] = True - config["import"]["threaded"] = True - - self.importer.run() + self.run_asis_importer(move=True, threaded=True) for mediafile in self.import_media: self.assert_file_in_lib( b"Tag Artist", @@ -172,36 +156,28 @@ class NonAutotaggedImportTest(_common.TestCase, ImportHelper): ) def test_threaded_import_move_deletes_import(self): - config["import"]["move"] = True - config["threaded"] = True - - self.importer.run() + self.run_asis_importer(move=True, threaded=True) for mediafile in self.import_media: self.assertNotExists(mediafile.path) def test_import_without_delete_retains_files(self): - config["import"]["delete"] = False - self.importer.run() + self.run_asis_importer(delete=False) for mediafile in self.import_media: self.assertExists(mediafile.path) def test_import_with_delete_removes_files(self): - config["import"]["delete"] = True - - self.importer.run() + self.run_asis_importer(delete=True) for mediafile in self.import_media: self.assertNotExists(mediafile.path) def test_import_with_delete_prunes_directory_empty(self): - config["import"]["delete"] = True - self.assertExists(os.path.join(self.import_dir, b"the_album")) - self.importer.run() - self.assertNotExists(os.path.join(self.import_dir, b"the_album")) + self.assertExists(os.path.join(self.import_dir, b"album")) + self.run_asis_importer(delete=True) + self.assertNotExists(os.path.join(self.import_dir, b"album")) @unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks") def test_import_link_arrives(self): - config["import"]["link"] = True - self.importer.run() + self.run_asis_importer(link=True) for mediafile in self.import_media: filename = os.path.join( self.libdir, @@ -210,7 +186,7 @@ class NonAutotaggedImportTest(_common.TestCase, ImportHelper): util.bytestring_path(f"{mediafile.title}.mp3"), ) self.assertExists(filename) - self.assertTrue(os.path.islink(syspath(filename))) + assert os.path.islink(syspath(filename)) self.assert_equal_path( util.bytestring_path(os.readlink(syspath(filename))), mediafile.path, @@ -218,8 +194,7 @@ class NonAutotaggedImportTest(_common.TestCase, ImportHelper): @unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks") def test_import_hardlink_arrives(self): - config["import"]["hardlink"] = True - self.importer.run() + self.run_asis_importer(hardlink=True) for mediafile in self.import_media: filename = os.path.join( self.libdir, @@ -230,17 +205,16 @@ class NonAutotaggedImportTest(_common.TestCase, ImportHelper): self.assertExists(filename) s1 = os.stat(syspath(mediafile.path)) s2 = os.stat(syspath(filename)) - self.assertTrue( - (s1[stat.ST_INO], s1[stat.ST_DEV]) - == (s2[stat.ST_INO], s2[stat.ST_DEV]) + assert (s1[stat.ST_INO], s1[stat.ST_DEV]) == ( + s2[stat.ST_INO], + s2[stat.ST_DEV], ) - @unittest.skipUnless(_common.HAVE_REFLINK, "need reflinks") + @NEEDS_REFLINK def test_import_reflink_arrives(self): # Detecting reflinks is currently tricky due to various fs # implementations, we'll just check the file exists. - config["import"]["reflink"] = True - self.importer.run() + self.run_asis_importer(reflink=True) for mediafile in self.import_media: self.assert_file_in_lib( b"Tag Artist", @@ -250,8 +224,7 @@ class NonAutotaggedImportTest(_common.TestCase, ImportHelper): def test_import_reflink_auto_arrives(self): # Should pass regardless of reflink support due to fallback. - config["import"]["reflink"] = "auto" - self.importer.run() + self.run_asis_importer(reflink="auto") for mediafile in self.import_media: self.assert_file_in_lib( b"Tag Artist", @@ -271,47 +244,36 @@ def create_archive(session): return path -class RmTempTest(unittest.TestCase, ImportHelper, _common.Assertions): +class RmTempTest(BeetsTestCase): """Tests that temporarily extracted archives are properly removed after usage. """ def setUp(self): - self.setup_beets() + super().setUp() self.want_resume = False self.config["incremental"] = False self._old_home = None - def tearDown(self): - self.teardown_beets() - def test_rm(self): zip_path = create_archive(self) archive_task = importer.ArchiveImportTask(zip_path) archive_task.extract() tmp_path = archive_task.toppath - self._setup_import_session(autotag=False, import_dir=tmp_path) self.assertExists(tmp_path) archive_task.finalize(self) self.assertNotExists(tmp_path) -class ImportZipTest(unittest.TestCase, ImportHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.teardown_beets() - +class ImportZipTest(AsIsImporterMixin, ImportTestCase): def test_import_zip(self): zip_path = create_archive(self) - self.assertEqual(len(self.lib.items()), 0) - self.assertEqual(len(self.lib.albums()), 0) + assert len(self.lib.items()) == 0 + assert len(self.lib.albums()) == 0 - self._setup_import_session(autotag=False, import_dir=zip_path) - self.importer.run() - self.assertEqual(len(self.lib.items()), 1) - self.assertEqual(len(self.lib.albums()), 1) + self.run_asis_importer(import_dir=zip_path) + assert len(self.lib.items()) == 1 + assert len(self.lib.albums()) == 1 class ImportTarTest(ImportZipTest): @@ -344,73 +306,72 @@ class ImportPasswordRarTest(ImportZipTest): return os.path.join(_common.RSRC, b"password.rar") -class ImportSingletonTest(_common.TestCase, ImportHelper): +class ImportSingletonTest(ImportTestCase): """Test ``APPLY`` and ``ASIS`` choices for an import session with singletons config set to True. """ def setUp(self): - self.setup_beets() - self._create_import_dir(1) - self._setup_import_session() - config["import"]["singletons"] = True + super().setUp() + self.prepare_album_for_import(1) + self.importer = self.setup_singleton_importer() self.matcher = AutotagStub().install() def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_apply_asis_adds_track(self): - self.assertIsNone(self.lib.items().get()) + assert self.lib.items().get() is None self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.items().get().title, "Tag Title 1") + assert self.lib.items().get().title == "Tag Track 1" def test_apply_asis_does_not_add_album(self): - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None def test_apply_asis_adds_singleton_path(self): self.assert_lib_dir_empty() self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assert_file_in_lib(b"singletons", b"Tag Title 1.mp3") + self.assert_file_in_lib(b"singletons", b"Tag Track 1.mp3") def test_apply_candidate_adds_track(self): - self.assertIsNone(self.lib.items().get()) + assert self.lib.items().get() is None self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().title, "Applied Title 1") + assert self.lib.items().get().title == "Applied Track 1" def test_apply_candidate_does_not_add_album(self): self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None def test_apply_candidate_adds_singleton_path(self): self.assert_lib_dir_empty() self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assert_file_in_lib(b"singletons", b"Applied Title 1.mp3") + self.assert_file_in_lib(b"singletons", b"Applied Track 1.mp3") def test_skip_does_not_add_first_track(self): self.importer.add_choice(importer.action.SKIP) self.importer.run() - self.assertIsNone(self.lib.items().get()) + assert self.lib.items().get() is None def test_skip_adds_other_tracks(self): - self._create_import_dir(2) + self.prepare_album_for_import(2) self.importer.add_choice(importer.action.SKIP) self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 def test_import_single_files(self): resource_path = os.path.join(_common.RSRC, b"empty.mp3") @@ -418,102 +379,106 @@ class ImportSingletonTest(_common.TestCase, ImportHelper): util.copy(resource_path, single_path) import_files = [ - os.path.join(self.import_dir, b"the_album"), + os.path.join(self.import_dir, b"album"), single_path, ] - self._setup_import_session(singletons=False) + self.setup_importer() self.importer.paths = import_files self.importer.add_choice(importer.action.ASIS) self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(len(self.lib.items()), 2) - self.assertEqual(len(self.lib.albums()), 2) + assert len(self.lib.items()) == 2 + assert len(self.lib.albums()) == 2 def test_set_fields(self): - genre = "\U0001F3B7 Jazz" + genre = "\U0001f3b7 Jazz" collection = "To Listen" + disc = 0 config["import"]["set_fields"] = { "collection": collection, "genre": genre, "title": "$title - formatted", + "disc": disc, } # As-is item import. - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None self.importer.add_choice(importer.action.ASIS) self.importer.run() for item in self.lib.items(): item.load() # TODO: Not sure this is necessary. - self.assertEqual(item.genre, genre) - self.assertEqual(item.collection, collection) - self.assertEqual(item.title, "Tag Title 1 - formatted") + assert item.genre == genre + assert item.collection == collection + assert item.title == "Tag Track 1 - formatted" + assert item.disc == disc # Remove item from library to test again with APPLY choice. item.remove() # Autotagged. - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None self.importer.clear_choices() self.importer.add_choice(importer.action.APPLY) self.importer.run() for item in self.lib.items(): item.load() - self.assertEqual(item.genre, genre) - self.assertEqual(item.collection, collection) - self.assertEqual(item.title, "Applied Title 1 - formatted") + assert item.genre == genre + assert item.collection == collection + assert item.title == "Applied Track 1 - formatted" + assert item.disc == disc -class ImportTest(_common.TestCase, ImportHelper): +class ImportTest(ImportTestCase): """Test APPLY, ASIS and SKIP choices.""" def setUp(self): - self.setup_beets() - self._create_import_dir(1) - self._setup_import_session() + super().setUp() + self.prepare_album_for_import(1) + self.setup_importer() self.matcher = AutotagStub().install() self.matcher.macthin = AutotagStub.GOOD def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_apply_asis_adds_album(self): - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.albums().get().album, "Tag Album") + assert self.lib.albums().get().album == "Tag Album" def test_apply_asis_adds_tracks(self): - self.assertIsNone(self.lib.items().get()) + assert self.lib.items().get() is None self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.items().get().title, "Tag Title 1") + assert self.lib.items().get().title == "Tag Track 1" def test_apply_asis_adds_album_path(self): self.assert_lib_dir_empty() self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assert_file_in_lib(b"Tag Artist", b"Tag Album", b"Tag Title 1.mp3") + self.assert_file_in_lib(b"Tag Artist", b"Tag Album", b"Tag Track 1.mp3") def test_apply_candidate_adds_album(self): - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.albums().get().album, "Applied Album") + assert self.lib.albums().get().album == "Applied Album" def test_apply_candidate_adds_tracks(self): - self.assertIsNone(self.lib.items().get()) + assert self.lib.items().get() is None self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().title, "Applied Title 1") + assert self.lib.items().get().title == "Applied Track 1" def test_apply_candidate_adds_album_path(self): self.assert_lib_dir_empty() @@ -521,7 +486,7 @@ class ImportTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.APPLY) self.importer.run() self.assert_file_in_lib( - b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3" + b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" ) def test_apply_from_scratch_removes_other_metadata(self): @@ -533,14 +498,14 @@ class ImportTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().genre, "") + assert self.lib.items().get().genre == "" def test_apply_from_scratch_keeps_format(self): config["import"]["from_scratch"] = True self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().format, "MP3") + assert self.lib.items().get().format == "MP3" def test_apply_from_scratch_keeps_bitrate(self): config["import"]["from_scratch"] = True @@ -548,14 +513,12 @@ class ImportTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().bitrate, bitrate) + assert self.lib.items().get().bitrate == bitrate def test_apply_with_move_deletes_import(self): config["import"]["move"] = True - import_file = os.path.join( - self.import_dir, b"the_album", b"track_1.mp3" - ) + import_file = os.path.join(self.import_dir, b"album", b"track_1.mp3") self.assertExists(import_file) self.importer.add_choice(importer.action.APPLY) @@ -565,9 +528,7 @@ class ImportTest(_common.TestCase, ImportHelper): def test_apply_with_delete_deletes_import(self): config["import"]["delete"] = True - import_file = os.path.join( - self.import_dir, b"the_album", b"track_1.mp3" - ) + import_file = os.path.join(self.import_dir, b"album", b"track_1.mp3") self.assertExists(import_file) self.importer.add_choice(importer.action.APPLY) @@ -577,133 +538,130 @@ class ImportTest(_common.TestCase, ImportHelper): def test_skip_does_not_add_track(self): self.importer.add_choice(importer.action.SKIP) self.importer.run() - self.assertIsNone(self.lib.items().get()) + assert self.lib.items().get() is None def test_skip_non_album_dirs(self): - self.assertIsDir(os.path.join(self.import_dir, b"the_album")) + self.assertIsDir(os.path.join(self.import_dir, b"album")) self.touch(b"cruft", dir=self.import_dir) self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(len(self.lib.albums()), 1) + assert len(self.lib.albums()) == 1 def test_unmatched_tracks_not_added(self): - self._create_import_dir(2) + self.prepare_album_for_import(2) self.matcher.matching = self.matcher.MISSING self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 def test_empty_directory_warning(self): import_dir = os.path.join(self.temp_dir, b"empty") self.touch(b"non-audio", dir=import_dir) - self._setup_import_session(import_dir=import_dir) + self.setup_importer(import_dir=import_dir) with capture_log() as logs: self.importer.run() import_dir = displayable_path(import_dir) - self.assertIn(f"No files imported from {import_dir}", logs) + assert f"No files imported from {import_dir}" in logs def test_empty_directory_singleton_warning(self): import_dir = os.path.join(self.temp_dir, b"empty") self.touch(b"non-audio", dir=import_dir) - self._setup_import_session(import_dir=import_dir, singletons=True) + self.setup_singleton_importer(import_dir=import_dir) with capture_log() as logs: self.importer.run() import_dir = displayable_path(import_dir) - self.assertIn(f"No files imported from {import_dir}", logs) + assert f"No files imported from {import_dir}" in logs def test_asis_no_data_source(self): - self.assertIsNone(self.lib.items().get()) + assert self.lib.items().get() is None self.importer.add_choice(importer.action.ASIS) self.importer.run() - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.lib.items().get().data_source def test_set_fields(self): - genre = "\U0001F3B7 Jazz" + genre = "\U0001f3b7 Jazz" collection = "To Listen" comments = "managed by beets" + disc = 0 config["import"]["set_fields"] = { "genre": genre, "collection": collection, "comments": comments, "album": "$album - formatted", + "disc": disc, } # As-is album import. - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None self.importer.add_choice(importer.action.ASIS) self.importer.run() for album in self.lib.albums(): album.load() # TODO: Not sure this is necessary. - self.assertEqual(album.genre, genre) - self.assertEqual(album.comments, comments) + assert album.genre == genre + assert album.comments == comments for item in album.items(): - self.assertEqual(item.get("genre", with_album=False), genre) - self.assertEqual( - item.get("collection", with_album=False), collection - ) - self.assertEqual( - item.get("comments", with_album=False), comments - ) - self.assertEqual( - item.get("album", with_album=False), "Tag Album - formatted" + assert item.get("genre", with_album=False) == genre + assert item.get("collection", with_album=False) == collection + assert item.get("comments", with_album=False) == comments + assert ( + item.get("album", with_album=False) + == "Tag Album - formatted" ) + assert item.disc == disc # Remove album from library to test again with APPLY choice. album.remove() # Autotagged. - self.assertIsNone(self.lib.albums().get()) + assert self.lib.albums().get() is None self.importer.clear_choices() self.importer.add_choice(importer.action.APPLY) self.importer.run() for album in self.lib.albums(): album.load() - self.assertEqual(album.genre, genre) - self.assertEqual(album.comments, comments) + assert album.genre == genre + assert album.comments == comments for item in album.items(): - self.assertEqual(item.get("genre", with_album=False), genre) - self.assertEqual( - item.get("collection", with_album=False), collection - ) - self.assertEqual( - item.get("comments", with_album=False), comments - ) - self.assertEqual( - item.get("album", with_album=False), - "Applied Album - formatted", + assert item.get("genre", with_album=False) == genre + assert item.get("collection", with_album=False) == collection + assert item.get("comments", with_album=False) == comments + assert ( + item.get("album", with_album=False) + == "Applied Album - formatted" ) + assert item.disc == disc -class ImportTracksTest(_common.TestCase, ImportHelper): +class ImportTracksTest(ImportTestCase): """Test TRACKS and APPLY choice.""" def setUp(self): - self.setup_beets() - self._create_import_dir(1) - self._setup_import_session() + super().setUp() + self.prepare_album_for_import(1) + self.setup_importer() self.matcher = AutotagStub().install() def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_apply_tracks_adds_singleton_track(self): - self.assertIsNone(self.lib.items().get()) - self.assertIsNone(self.lib.albums().get()) + assert self.lib.items().get() is None + assert self.lib.albums().get() is None self.importer.add_choice(importer.action.TRACKS) self.importer.add_choice(importer.action.APPLY) self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().title, "Applied Title 1") - self.assertIsNone(self.lib.albums().get()) + assert self.lib.items().get().title == "Applied Track 1" + assert self.lib.albums().get() is None def test_apply_tracks_adds_singleton_path(self): self.assert_lib_dir_empty() @@ -712,28 +670,28 @@ class ImportTracksTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.APPLY) self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assert_file_in_lib(b"singletons", b"Applied Title 1.mp3") + self.assert_file_in_lib(b"singletons", b"Applied Track 1.mp3") -class ImportCompilationTest(_common.TestCase, ImportHelper): +class ImportCompilationTest(ImportTestCase): """Test ASIS import of a folder containing tracks with different artists.""" def setUp(self): - self.setup_beets() - self._create_import_dir(3) - self._setup_import_session() + super().setUp() + self.prepare_album_for_import(3) + self.setup_importer() self.matcher = AutotagStub().install() def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_asis_homogenous_sets_albumartist(self): self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.albums().get().albumartist, "Tag Artist") + assert self.lib.albums().get().albumartist == "Tag Artist" for item in self.lib.items(): - self.assertEqual(item.albumartist, "Tag Artist") + assert item.albumartist == "Tag Artist" def test_asis_heterogenous_sets_various_albumartist(self): self.import_media[0].artist = "Other Artist" @@ -743,9 +701,9 @@ class ImportCompilationTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.albums().get().albumartist, "Various Artists") + assert self.lib.albums().get().albumartist == "Various Artists" for item in self.lib.items(): - self.assertEqual(item.albumartist, "Various Artists") + assert item.albumartist == "Various Artists" def test_asis_heterogenous_sets_compilation(self): self.import_media[0].artist = "Other Artist" @@ -756,7 +714,7 @@ class ImportCompilationTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.ASIS) self.importer.run() for item in self.lib.items(): - self.assertTrue(item.comp) + assert item.comp def test_asis_sets_majority_albumartist(self): self.import_media[0].artist = "Other Artist" @@ -766,9 +724,9 @@ class ImportCompilationTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.albums().get().albumartist, "Other Artist") + assert self.lib.albums().get().albumartist == "Other Artist" for item in self.lib.items(): - self.assertEqual(item.albumartist, "Other Artist") + assert item.albumartist == "Other Artist" def test_asis_albumartist_tag_sets_albumartist(self): self.import_media[0].artist = "Other Artist" @@ -780,13 +738,11 @@ class ImportCompilationTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.albums().get().albumartist, "Album Artist") - self.assertEqual( - self.lib.albums().get().mb_albumartistid, "Album Artist ID" - ) + assert self.lib.albums().get().albumartist == "Album Artist" + assert self.lib.albums().get().mb_albumartistid == "Album Artist ID" for item in self.lib.items(): - self.assertEqual(item.albumartist, "Album Artist") - self.assertEqual(item.mb_albumartistid, "Album Artist ID") + assert item.albumartist == "Album Artist" + assert item.mb_albumartistid == "Album Artist ID" def test_asis_albumartists_tag_sets_multi_albumartists(self): self.import_media[0].artist = "Other Artist" @@ -801,126 +757,116 @@ class ImportCompilationTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.ASIS) self.importer.run() - self.assertEqual(self.lib.albums().get().albumartist, "Album Artist") - self.assertEqual( - self.lib.albums().get().albumartists, - ["Album Artist 1", "Album Artist 2"], - ) - self.assertEqual( - self.lib.albums().get().mb_albumartistid, "Album Artist ID" - ) + assert self.lib.albums().get().albumartist == "Album Artist" + assert self.lib.albums().get().albumartists == [ + "Album Artist 1", + "Album Artist 2", + ] + assert self.lib.albums().get().mb_albumartistid == "Album Artist ID" # Make sure both custom media items get tested asserted_multi_artists_0 = False asserted_multi_artists_1 = False for item in self.lib.items(): - self.assertEqual(item.albumartist, "Album Artist") - self.assertEqual( - item.albumartists, ["Album Artist 1", "Album Artist 2"] - ) - self.assertEqual(item.mb_albumartistid, "Album Artist ID") + assert item.albumartist == "Album Artist" + assert item.albumartists == ["Album Artist 1", "Album Artist 2"] + assert item.mb_albumartistid == "Album Artist ID" if item.artist == "Other Artist": asserted_multi_artists_0 = True - self.assertEqual( - item.artists, ["Other Artist", "Other Artist 2"] - ) + assert item.artists == ["Other Artist", "Other Artist 2"] if item.artist == "Another Artist": asserted_multi_artists_1 = True - self.assertEqual( - item.artists, ["Another Artist", "Another Artist 2"] - ) + assert item.artists == ["Another Artist", "Another Artist 2"] - self.assertTrue(asserted_multi_artists_0 and asserted_multi_artists_1) + assert asserted_multi_artists_0 + assert asserted_multi_artists_1 -class ImportExistingTest(_common.TestCase, ImportHelper): +class ImportExistingTest(ImportTestCase): """Test importing files that are already in the library directory.""" def setUp(self): - self.setup_beets() - self._create_import_dir(1) + super().setUp() + self.prepare_album_for_import(1) self.matcher = AutotagStub().install() - self._setup_import_session() - self.setup_importer = self.importer - self.setup_importer.default_choice = importer.action.APPLY - - self._setup_import_session(import_dir=self.libdir) + self.reimporter = self.setup_importer(import_dir=self.libdir) + self.importer = self.setup_importer() def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_does_not_duplicate_item(self): - self.setup_importer.run() - self.assertEqual(len(self.lib.items()), 1) - - self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 + + self.reimporter.add_choice(importer.action.APPLY) + self.reimporter.run() + assert len(self.lib.items()) == 1 def test_does_not_duplicate_album(self): - self.setup_importer.run() - self.assertEqual(len(self.lib.albums()), 1) - - self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(len(self.lib.albums()), 1) + assert len(self.lib.albums()) == 1 + + self.reimporter.add_choice(importer.action.APPLY) + self.reimporter.run() + assert len(self.lib.albums()) == 1 def test_does_not_duplicate_singleton_track(self): - self.setup_importer.add_choice(importer.action.TRACKS) - self.setup_importer.add_choice(importer.action.APPLY) - self.setup_importer.run() - self.assertEqual(len(self.lib.items()), 1) - self.importer.add_choice(importer.action.TRACKS) self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 + + self.reimporter.add_choice(importer.action.TRACKS) + self.reimporter.add_choice(importer.action.APPLY) + self.reimporter.run() + assert len(self.lib.items()) == 1 def test_asis_updates_metadata(self): - self.setup_importer.run() + self.importer.run() medium = MediaFile(self.lib.items().get().path) medium.title = "New Title" medium.save() - self.importer.add_choice(importer.action.ASIS) - self.importer.run() - self.assertEqual(self.lib.items().get().title, "New Title") + self.reimporter.add_choice(importer.action.ASIS) + self.reimporter.run() + assert self.lib.items().get().title == "New Title" def test_asis_updated_moves_file(self): - self.setup_importer.run() + self.importer.run() medium = MediaFile(self.lib.items().get().path) medium.title = "New Title" medium.save() old_path = os.path.join( - b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3" + b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" ) self.assert_file_in_lib(old_path) - self.importer.add_choice(importer.action.ASIS) - self.importer.run() + self.reimporter.add_choice(importer.action.ASIS) + self.reimporter.run() self.assert_file_in_lib( b"Applied Artist", b"Applied Album", b"New Title.mp3" ) self.assert_file_not_in_lib(old_path) def test_asis_updated_without_copy_does_not_move_file(self): - self.setup_importer.run() + self.importer.run() medium = MediaFile(self.lib.items().get().path) medium.title = "New Title" medium.save() old_path = os.path.join( - b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3" + b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" ) self.assert_file_in_lib(old_path) config["import"]["copy"] = False - self.importer.add_choice(importer.action.ASIS) - self.importer.run() + self.reimporter.add_choice(importer.action.ASIS) + self.reimporter.run() self.assert_file_not_in_lib( b"Applied Artist", b"Applied Album", b"New Title.mp3" ) @@ -928,17 +874,16 @@ class ImportExistingTest(_common.TestCase, ImportHelper): def test_outside_file_is_copied(self): config["import"]["copy"] = False - self.setup_importer.run() + self.importer.run() self.assert_equal_path( self.lib.items().get().path, self.import_media[0].path ) - config["import"]["copy"] = True - self._setup_import_session() - self.importer.add_choice(importer.action.APPLY) - self.importer.run() + self.reimporter = self.setup_importer() + self.reimporter.add_choice(importer.action.APPLY) + self.reimporter.run() new_path = os.path.join( - b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3" + b"Applied Artist", b"Applied Album", b"Applied Track 1.mp3" ) self.assert_file_in_lib(new_path) @@ -948,24 +893,24 @@ class ImportExistingTest(_common.TestCase, ImportHelper): def test_outside_file_is_moved(self): config["import"]["copy"] = False - self.setup_importer.run() + self.importer.run() self.assert_equal_path( self.lib.items().get().path, self.import_media[0].path ) - self._setup_import_session(move=True) - self.importer.add_choice(importer.action.APPLY) - self.importer.run() + self.reimporter = self.setup_importer(move=True) + self.reimporter.add_choice(importer.action.APPLY) + self.reimporter.run() self.assertNotExists(self.import_media[0].path) -class GroupAlbumsImportTest(_common.TestCase, ImportHelper): +class GroupAlbumsImportTest(ImportTestCase): def setUp(self): - self.setup_beets() - self._create_import_dir(3) + super().setUp() + self.prepare_album_for_import(3) self.matcher = AutotagStub().install() self.matcher.matching = AutotagStub.NONE - self._setup_import_session() + self.setup_importer() # Split tracks into two albums and use both as-is self.importer.add_choice(importer.action.ALBUMS) @@ -973,7 +918,7 @@ class GroupAlbumsImportTest(_common.TestCase, ImportHelper): self.importer.add_choice(importer.action.ASIS) def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_add_album_for_different_artist_and_different_album(self): @@ -983,7 +928,7 @@ class GroupAlbumsImportTest(_common.TestCase, ImportHelper): self.importer.run() albums = {album.album for album in self.lib.albums()} - self.assertEqual(albums, {"Album B", "Tag Album"}) + assert albums == {"Album B", "Tag Album"} def test_add_album_for_different_artist_and_same_albumartist(self): self.import_media[0].artist = "Artist B" @@ -995,7 +940,7 @@ class GroupAlbumsImportTest(_common.TestCase, ImportHelper): self.importer.run() artists = {album.albumartist for album in self.lib.albums()} - self.assertEqual(artists, {"Album Artist", "Tag Artist"}) + assert artists == {"Album Artist", "Tag Artist"} def test_add_album_for_same_artist_and_different_album(self): self.import_media[0].album = "Album B" @@ -1003,7 +948,7 @@ class GroupAlbumsImportTest(_common.TestCase, ImportHelper): self.importer.run() albums = {album.album for album in self.lib.albums()} - self.assertEqual(albums, {"Album B", "Tag Album"}) + assert albums == {"Album B", "Tag Album"} def test_add_album_for_same_album_and_different_artist(self): self.import_media[0].artist = "Artist B" @@ -1011,7 +956,7 @@ class GroupAlbumsImportTest(_common.TestCase, ImportHelper): self.importer.run() artists = {album.albumartist for album in self.lib.albums()} - self.assertEqual(artists, {"Artist B", "Tag Artist"}) + assert artists == {"Artist B", "Tag Artist"} def test_incremental(self): config["import"]["incremental"] = True @@ -1020,7 +965,7 @@ class GroupAlbumsImportTest(_common.TestCase, ImportHelper): self.importer.run() albums = {album.album for album in self.lib.albums()} - self.assertEqual(albums, {"Album B", "Tag Album"}) + assert albums == {"Album B", "Tag Album"} class GlobalGroupAlbumsImportTest(GroupAlbumsImportTest): @@ -1031,30 +976,30 @@ class GlobalGroupAlbumsImportTest(GroupAlbumsImportTest): config["import"]["group_albums"] = True -class ChooseCandidateTest(_common.TestCase, ImportHelper): +class ChooseCandidateTest(ImportTestCase): def setUp(self): - self.setup_beets() - self._create_import_dir(1) - self._setup_import_session() + super().setUp() + self.prepare_album_for_import(1) + self.setup_importer() self.matcher = AutotagStub().install() self.matcher.matching = AutotagStub.BAD def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def test_choose_first_candidate(self): self.importer.add_choice(1) self.importer.run() - self.assertEqual(self.lib.albums().get().album, "Applied Album M") + assert self.lib.albums().get().album == "Applied Album M" def test_choose_second_candidate(self): self.importer.add_choice(2) self.importer.run() - self.assertEqual(self.lib.albums().get().album, "Applied Album MM") + assert self.lib.albums().get().album == "Applied Album MM" -class InferAlbumDataTest(_common.TestCase): +class InferAlbumDataTest(BeetsTestCase): def setUp(self): super().setUp() @@ -1076,8 +1021,8 @@ class InferAlbumDataTest(_common.TestCase): def test_asis_homogenous_single_artist(self): self.task.set_choice(importer.action.ASIS) self.task.align_album_level_fields() - self.assertFalse(self.items[0].comp) - self.assertEqual(self.items[0].albumartist, self.items[2].artist) + assert not self.items[0].comp + assert self.items[0].albumartist == self.items[2].artist def test_asis_heterogenous_va(self): self.items[0].artist = "another artist" @@ -1086,8 +1031,8 @@ class InferAlbumDataTest(_common.TestCase): self.task.align_album_level_fields() - self.assertTrue(self.items[0].comp) - self.assertEqual(self.items[0].albumartist, "Various Artists") + assert self.items[0].comp + assert self.items[0].albumartist == "Various Artists" def test_asis_comp_applied_to_all_items(self): self.items[0].artist = "another artist" @@ -1097,8 +1042,8 @@ class InferAlbumDataTest(_common.TestCase): self.task.align_album_level_fields() for item in self.items: - self.assertTrue(item.comp) - self.assertEqual(item.albumartist, "Various Artists") + assert item.comp + assert item.albumartist == "Various Artists" def test_asis_majority_artist_single_artist(self): self.items[0].artist = "another artist" @@ -1106,8 +1051,8 @@ class InferAlbumDataTest(_common.TestCase): self.task.align_album_level_fields() - self.assertFalse(self.items[0].comp) - self.assertEqual(self.items[0].albumartist, self.items[2].artist) + assert not self.items[0].comp + assert self.items[0].albumartist == self.items[2].artist def test_asis_track_albumartist_override(self): self.items[0].artist = "another artist" @@ -1119,18 +1064,16 @@ class InferAlbumDataTest(_common.TestCase): self.task.align_album_level_fields() - self.assertEqual(self.items[0].albumartist, "some album artist") - self.assertEqual(self.items[0].mb_albumartistid, "some album artist id") + assert self.items[0].albumartist == "some album artist" + assert self.items[0].mb_albumartistid == "some album artist id" def test_apply_gets_artist_and_id(self): self.task.set_choice(AlbumMatch(0, None, {}, set(), set())) # APPLY self.task.align_album_level_fields() - self.assertEqual(self.items[0].albumartist, self.items[0].artist) - self.assertEqual( - self.items[0].mb_albumartistid, self.items[0].mb_artistid - ) + assert self.items[0].albumartist == self.items[0].artist + assert self.items[0].mb_albumartistid == self.items[0].mb_artistid def test_apply_lets_album_values_override(self): for item in self.items: @@ -1140,15 +1083,15 @@ class InferAlbumDataTest(_common.TestCase): self.task.align_album_level_fields() - self.assertEqual(self.items[0].albumartist, "some album artist") - self.assertEqual(self.items[0].mb_albumartistid, "some album artist id") + assert self.items[0].albumartist == "some album artist" + assert self.items[0].mb_albumartistid == "some album artist id" def test_small_single_artist_album(self): self.items = [self.items[0]] self.task.items = self.items self.task.set_choice(importer.action.ASIS) self.task.align_album_level_fields() - self.assertFalse(self.items[0].comp) + assert not self.items[0].comp def match_album_mock(*args, **kwargs): @@ -1170,47 +1113,43 @@ def match_album_mock(*args, **kwargs): @patch("beets.autotag.mb.match_album", Mock(side_effect=match_album_mock)) -class ImportDuplicateAlbumTest( - unittest.TestCase, TestHelper, _common.Assertions -): +class ImportDuplicateAlbumTest(ImportTestCase): def setUp(self): - self.setup_beets() + super().setUp() # Original album self.add_album_fixture(albumartist="artist", album="album") # Create import session - self.importer = self.create_importer() - config["import"]["autotag"] = True - config["import"]["duplicate_keys"]["album"] = "albumartist album" - - def tearDown(self): - self.teardown_beets() + self.prepare_album_for_import(1) + self.importer = self.setup_importer( + duplicate_keys={"album": "albumartist album"} + ) def test_remove_duplicate_album(self): item = self.lib.items().get() - self.assertEqual(item.title, "t\xeftle 0") + assert item.title == "t\xeftle 0" self.assertExists(item.path) self.importer.default_resolution = self.importer.Resolution.REMOVE self.importer.run() self.assertNotExists(item.path) - self.assertEqual(len(self.lib.albums()), 1) - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.albums()) == 1 + assert len(self.lib.items()) == 1 item = self.lib.items().get() - self.assertEqual(item.title, "new title") + assert item.title == "new title" def test_no_autotag_keeps_duplicate_album(self): config["import"]["autotag"] = False item = self.lib.items().get() - self.assertEqual(item.title, "t\xeftle 0") + assert item.title == "t\xeftle 0" self.assertExists(item.path) # Imported item has the same artist and album as the one in the # library. import_file = os.path.join( - self.importer.paths[0], b"album 0", b"track 0.mp3" + self.importer.paths[0], b"album", b"track_1.mp3" ) import_file = MediaFile(import_file) import_file.artist = item["artist"] @@ -1222,33 +1161,33 @@ class ImportDuplicateAlbumTest( self.importer.run() self.assertExists(item.path) - self.assertEqual(len(self.lib.albums()), 2) - self.assertEqual(len(self.lib.items()), 2) + assert len(self.lib.albums()) == 2 + assert len(self.lib.items()) == 2 def test_keep_duplicate_album(self): self.importer.default_resolution = self.importer.Resolution.KEEPBOTH self.importer.run() - self.assertEqual(len(self.lib.albums()), 2) - self.assertEqual(len(self.lib.items()), 2) + assert len(self.lib.albums()) == 2 + assert len(self.lib.items()) == 2 def test_skip_duplicate_album(self): item = self.lib.items().get() - self.assertEqual(item.title, "t\xeftle 0") + assert item.title == "t\xeftle 0" self.importer.default_resolution = self.importer.Resolution.SKIP self.importer.run() - self.assertEqual(len(self.lib.albums()), 1) - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.albums()) == 1 + assert len(self.lib.items()) == 1 item = self.lib.items().get() - self.assertEqual(item.title, "t\xeftle 0") + assert item.title == "t\xeftle 0" def test_merge_duplicate_album(self): self.importer.default_resolution = self.importer.Resolution.MERGE self.importer.run() - self.assertEqual(len(self.lib.albums()), 1) + assert len(self.lib.albums()) == 1 def test_twice_in_import_dir(self): self.skipTest("write me") @@ -1258,7 +1197,7 @@ class ImportDuplicateAlbumTest( item = self.lib.items().get() import_file = MediaFile( - os.path.join(self.importer.paths[0], b"album 0", b"track 0.mp3") + os.path.join(self.importer.paths[0], b"album", b"track_1.mp3") ) import_file.artist = item["artist"] import_file.albumartist = item["artist"] @@ -1269,8 +1208,8 @@ class ImportDuplicateAlbumTest( self.importer.default_resolution = self.importer.Resolution.SKIP self.importer.run() - self.assertEqual(len(self.lib.albums()), 2) - self.assertEqual(len(self.lib.items()), 2) + assert len(self.lib.albums()) == 2 + assert len(self.lib.items()) == 2 def add_album_fixture(self, **kwargs): # TODO move this into upstream @@ -1294,11 +1233,9 @@ def match_track_mock(*args, **kwargs): @patch("beets.autotag.mb.match_track", Mock(side_effect=match_track_mock)) -class ImportDuplicateSingletonTest( - unittest.TestCase, TestHelper, _common.Assertions -): +class ImportDuplicateSingletonTest(ImportTestCase): def setUp(self): - self.setup_beets() + super().setUp() # Original file in library self.add_item_fixture( @@ -1306,57 +1243,54 @@ class ImportDuplicateSingletonTest( ) # Import session - self.importer = self.create_importer() - config["import"]["autotag"] = True - config["import"]["singletons"] = True - config["import"]["duplicate_keys"]["item"] = "artist title" - - def tearDown(self): - self.teardown_beets() + self.prepare_album_for_import(1) + self.importer = self.setup_singleton_importer( + duplicate_keys={"album": "artist title"} + ) def test_remove_duplicate(self): item = self.lib.items().get() - self.assertEqual(item.mb_trackid, "old trackid") + assert item.mb_trackid == "old trackid" self.assertExists(item.path) self.importer.default_resolution = self.importer.Resolution.REMOVE self.importer.run() self.assertNotExists(item.path) - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 item = self.lib.items().get() - self.assertEqual(item.mb_trackid, "new trackid") + assert item.mb_trackid == "new trackid" def test_keep_duplicate(self): - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 self.importer.default_resolution = self.importer.Resolution.KEEPBOTH self.importer.run() - self.assertEqual(len(self.lib.items()), 2) + assert len(self.lib.items()) == 2 def test_skip_duplicate(self): item = self.lib.items().get() - self.assertEqual(item.mb_trackid, "old trackid") + assert item.mb_trackid == "old trackid" self.importer.default_resolution = self.importer.Resolution.SKIP self.importer.run() - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 item = self.lib.items().get() - self.assertEqual(item.mb_trackid, "old trackid") + assert item.mb_trackid == "old trackid" def test_keep_when_extra_key_is_different(self): config["import"]["duplicate_keys"]["item"] = "artist title flex" item = self.lib.items().get() item.flex = "different" item.store() - self.assertEqual(len(self.lib.items()), 1) + assert len(self.lib.items()) == 1 self.importer.default_resolution = self.importer.Resolution.SKIP self.importer.run() - self.assertEqual(len(self.lib.items()), 2) + assert len(self.lib.items()) == 2 def test_twice_in_import_dir(self): self.skipTest("write me") @@ -1369,84 +1303,71 @@ class ImportDuplicateSingletonTest( return item -class TagLogTest(_common.TestCase): +class TagLogTest(BeetsTestCase): def test_tag_log_line(self): sio = StringIO() handler = logging.StreamHandler(sio) session = _common.import_session(loghandler=handler) session.tag_log("status", "path") - self.assertIn("status path", sio.getvalue()) + assert "status path" in sio.getvalue() def test_tag_log_unicode(self): sio = StringIO() handler = logging.StreamHandler(sio) session = _common.import_session(loghandler=handler) session.tag_log("status", "caf\xe9") # send unicode - self.assertIn("status caf\xe9", sio.getvalue()) + assert "status caf\xe9" in sio.getvalue() -class ResumeImportTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.teardown_beets() - +class ResumeImportTest(ImportTestCase): @patch("beets.plugins.send") def test_resume_album(self, plugins_send): - self.importer = self.create_importer(album_count=2) - self.config["import"]["resume"] = True + self.prepare_albums_for_import(2) + self.importer = self.setup_importer(autotag=False, resume=True) # Aborts import after one album. This also ensures that we skip # the first album in the second try. def raise_exception(event, **kwargs): if event == "album_imported": - raise importer.ImportAbort + raise importer.ImportAbortError plugins_send.side_effect = raise_exception self.importer.run() - self.assertEqual(len(self.lib.albums()), 1) - self.assertIsNotNone(self.lib.albums("album:album 0").get()) + assert len(self.lib.albums()) == 1 + assert self.lib.albums("album:'Album 1'").get() is not None self.importer.run() - self.assertEqual(len(self.lib.albums()), 2) - self.assertIsNotNone(self.lib.albums("album:album 1").get()) + assert len(self.lib.albums()) == 2 + assert self.lib.albums("album:'Album 2'").get() is not None @patch("beets.plugins.send") def test_resume_singleton(self, plugins_send): - self.importer = self.create_importer(item_count=2) - self.config["import"]["resume"] = True - self.config["import"]["singletons"] = True + self.prepare_album_for_import(2) + self.importer = self.setup_singleton_importer( + autotag=False, resume=True + ) # Aborts import after one track. This also ensures that we skip # the first album in the second try. def raise_exception(event, **kwargs): if event == "item_imported": - raise importer.ImportAbort + raise importer.ImportAbortError plugins_send.side_effect = raise_exception self.importer.run() - self.assertEqual(len(self.lib.items()), 1) - self.assertIsNotNone(self.lib.items("title:track 0").get()) + assert len(self.lib.items()) == 1 + assert self.lib.items("title:'Track 1'").get() is not None self.importer.run() - self.assertEqual(len(self.lib.items()), 2) - self.assertIsNotNone(self.lib.items("title:track 1").get()) + assert len(self.lib.items()) == 2 + assert self.lib.items("title:'Track 1'").get() is not None -class IncrementalImportTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - self.config["import"]["incremental"] = True - - def tearDown(self): - self.teardown_beets() - +class IncrementalImportTest(AsIsImporterMixin, ImportTestCase): def test_incremental_album(self): - importer = self.create_importer(album_count=1) - importer.run() + importer = self.run_asis_importer(incremental=True) # Change album name so the original file would be imported again # if incremental was off. @@ -1454,14 +1375,11 @@ class IncrementalImportTest(unittest.TestCase, TestHelper): album["album"] = "edited album" album.store() - importer = self.create_importer(album_count=1) importer.run() - self.assertEqual(len(self.lib.albums()), 2) + assert len(self.lib.albums()) == 2 def test_incremental_item(self): - self.config["import"]["singletons"] = True - importer = self.create_importer(item_count=1) - importer.run() + importer = self.run_asis_importer(incremental=True, singletons=True) # Change track name so the original file would be imported again # if incremental was off. @@ -1469,16 +1387,14 @@ class IncrementalImportTest(unittest.TestCase, TestHelper): item["artist"] = "edited artist" item.store() - importer = self.create_importer(item_count=1) importer.run() - self.assertEqual(len(self.lib.items()), 2) + assert len(self.lib.items()) == 2 def test_invalid_state_file(self): - importer = self.create_importer() with open(self.config["statefile"].as_filename(), "wb") as f: f.write(b"000") - importer.run() - self.assertEqual(len(self.lib.albums()), 1) + self.run_asis_importer(incremental=True) + assert len(self.lib.albums()) == 1 def _mkmp3(path): @@ -1488,7 +1404,7 @@ def _mkmp3(path): ) -class AlbumsInDirTest(_common.TestCase): +class AlbumsInDirTest(BeetsTestCase): def setUp(self): super().setUp() @@ -1510,27 +1426,27 @@ class AlbumsInDirTest(_common.TestCase): def test_finds_all_albums(self): albums = list(albums_in_dir(self.base)) - self.assertEqual(len(albums), 4) + assert len(albums) == 4 def test_separates_contents(self): found = [] for _, album in albums_in_dir(self.base): found.append(re.search(rb"album(.)song", album[0]).group(1)) - self.assertIn(b"1", found) - self.assertIn(b"2", found) - self.assertIn(b"3", found) - self.assertIn(b"4", found) + assert b"1" in found + assert b"2" in found + assert b"3" in found + assert b"4" in found def test_finds_multiple_songs(self): for _, album in albums_in_dir(self.base): n = re.search(rb"album(.)song", album[0]).group(1) if n == b"1": - self.assertEqual(len(album), 2) + assert len(album) == 2 else: - self.assertEqual(len(album), 1) + assert len(album) == 1 -class MultiDiscAlbumsInDirTest(_common.TestCase): +class MultiDiscAlbumsInDirTest(BeetsTestCase): def create_music(self, files=True, ascii=True): """Create some music in multiple album directories. @@ -1603,54 +1519,54 @@ class MultiDiscAlbumsInDirTest(_common.TestCase): def test_coalesce_nested_album_multiple_subdirs(self): self.create_music() albums = list(albums_in_dir(self.base)) - self.assertEqual(len(albums), 4) + assert len(albums) == 4 root, items = albums[0] - self.assertEqual(root, self.dirs[0:3]) - self.assertEqual(len(items), 3) + assert root == self.dirs[0:3] + assert len(items) == 3 def test_coalesce_nested_album_single_subdir(self): self.create_music() albums = list(albums_in_dir(self.base)) root, items = albums[1] - self.assertEqual(root, self.dirs[3:5]) - self.assertEqual(len(items), 1) + assert root == self.dirs[3:5] + assert len(items) == 1 def test_coalesce_flattened_album_case_typo(self): self.create_music() albums = list(albums_in_dir(self.base)) root, items = albums[2] - self.assertEqual(root, self.dirs[6:8]) - self.assertEqual(len(items), 2) + assert root == self.dirs[6:8] + assert len(items) == 2 def test_single_disc_album(self): self.create_music() albums = list(albums_in_dir(self.base)) root, items = albums[3] - self.assertEqual(root, self.dirs[8:]) - self.assertEqual(len(items), 1) + assert root == self.dirs[8:] + assert len(items) == 1 def test_do_not_yield_empty_album(self): self.create_music(files=False) albums = list(albums_in_dir(self.base)) - self.assertEqual(len(albums), 0) + assert len(albums) == 0 def test_single_disc_unicode(self): self.create_music(ascii=False) albums = list(albums_in_dir(self.base)) root, items = albums[3] - self.assertEqual(root, self.dirs[8:]) - self.assertEqual(len(items), 1) + assert root == self.dirs[8:] + assert len(items) == 1 def test_coalesce_multiple_unicode(self): self.create_music(ascii=False) albums = list(albums_in_dir(self.base)) - self.assertEqual(len(albums), 4) + assert len(albums) == 4 root, items = albums[0] - self.assertEqual(root, self.dirs[0:3]) - self.assertEqual(len(items), 3) + assert root == self.dirs[0:3] + assert len(items) == 3 -class ReimportTest(unittest.TestCase, ImportHelper, _common.Assertions): +class ReimportTest(ImportTestCase): """Test "re-imports", in which the autotagging machinery is used for music that's already in the library. @@ -1660,7 +1576,7 @@ class ReimportTest(unittest.TestCase, ImportHelper, _common.Assertions): """ def setUp(self): - self.setup_beets() + super().setUp() # The existing album. album = self.add_album_fixture() @@ -1678,11 +1594,11 @@ class ReimportTest(unittest.TestCase, ImportHelper, _common.Assertions): self.matcher.matching = AutotagStub.GOOD def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() def _setup_session(self, singletons=False): - self._setup_import_session(self._album().path, singletons=singletons) + self.setup_importer(import_dir=self.libdir, singletons=singletons) self.importer.add_choice(importer.action.APPLY) def _album(self): @@ -1693,45 +1609,45 @@ class ReimportTest(unittest.TestCase, ImportHelper, _common.Assertions): def test_reimported_album_gets_new_metadata(self): self._setup_session() - self.assertEqual(self._album().album, "\xe4lbum") + assert self._album().album == "\xe4lbum" self.importer.run() - self.assertEqual(self._album().album, "the album") + assert self._album().album == "the album" def test_reimported_album_preserves_flexattr(self): self._setup_session() self.importer.run() - self.assertEqual(self._album().foo, "bar") + assert self._album().foo == "bar" def test_reimported_album_preserves_added(self): self._setup_session() self.importer.run() - self.assertEqual(self._album().added, 4242.0) + assert self._album().added == 4242.0 def test_reimported_album_preserves_item_flexattr(self): self._setup_session() self.importer.run() - self.assertEqual(self._item().baz, "qux") + assert self._item().baz == "qux" def test_reimported_album_preserves_item_added(self): self._setup_session() self.importer.run() - self.assertEqual(self._item().added, 4747.0) + assert self._item().added == 4747.0 def test_reimported_item_gets_new_metadata(self): self._setup_session(True) - self.assertEqual(self._item().title, "t\xeftle 0") + assert self._item().title == "t\xeftle 0" self.importer.run() - self.assertEqual(self._item().title, "full") + assert self._item().title == "full" def test_reimported_item_preserves_flexattr(self): self._setup_session(True) self.importer.run() - self.assertEqual(self._item().baz, "qux") + assert self._item().baz == "qux" def test_reimported_item_preserves_added(self): self._setup_session(True) self.importer.run() - self.assertEqual(self._item().added, 4747.0) + assert self._item().added == 4747.0 def test_reimported_item_preserves_art(self): self._setup_session() @@ -1743,109 +1659,70 @@ class ReimportTest(unittest.TestCase, ImportHelper, _common.Assertions): self.importer.run() new_album = self._album() new_artpath = new_album.art_destination(art_source) - self.assertEqual(new_album.artpath, new_artpath) + assert new_album.artpath == new_artpath self.assertExists(new_artpath) if new_artpath != old_artpath: self.assertNotExists(old_artpath) + def test_reimported_album_has_new_flexattr(self): + self._setup_session() + assert self._album().get("bandcamp_album_id") is None + self.importer.run() + assert self._album().bandcamp_album_id == "bc_url" + def test_reimported_album_not_preserves_flexattr(self): self._setup_session() - self.assertEqual(self._album().data_source, "original_source") + assert self._album().data_source == "original_source" self.importer.run() - self.assertEqual(self._album().data_source, "match_source") + assert self._album().data_source == "match_source" -class ImportPretendTest(_common.TestCase, ImportHelper): +class ImportPretendTest(ImportTestCase): """Test the pretend commandline option""" - def __init__(self, method_name="runTest"): - super().__init__(method_name) - self.matcher = None - def setUp(self): - self.io = _common.DummyIO() - self.setup_beets() - self.__create_import_dir() - self.__create_empty_import_dir() - self._setup_import_session() - config["import"]["pretend"] = True + super().setUp() self.matcher = AutotagStub().install() self.io.install() + self.album_track_path = self.prepare_album_for_import(1)[0] + self.single_path = self.prepare_track_for_import(2, self.import_path) + self.album_path = self.album_track_path.parent + def tearDown(self): - self.teardown_beets() + super().tearDown() self.matcher.restore() - def __create_import_dir(self): - self._create_import_dir(1) - resource_path = os.path.join(_common.RSRC, b"empty.mp3") - single_path = os.path.join(self.import_dir, b"track_2.mp3") - shutil.copy(syspath(resource_path), syspath(single_path)) - self.import_paths = [ - os.path.join(self.import_dir, b"the_album"), - single_path, - ] - self.import_files = [ - displayable_path( - os.path.join(self.import_paths[0], b"track_1.mp3") - ), - displayable_path(single_path), - ] - - def __create_empty_import_dir(self): - path = os.path.join(self.temp_dir, b"empty") - os.makedirs(syspath(path)) - self.empty_path = path - - def __run(self, import_paths, singletons=True): - self._setup_import_session(singletons=singletons) - self.importer.paths = import_paths - + def __run(self, importer): with capture_log() as logs: - self.importer.run() + importer.run() - logs = [line for line in logs if not line.startswith("Sending event:")] + assert len(self.lib.items()) == 0 + assert len(self.lib.albums()) == 0 - self.assertEqual(len(self.lib.items()), 0) - self.assertEqual(len(self.lib.albums()), 0) - - return logs + return [line for line in logs if not line.startswith("Sending event:")] def test_import_singletons_pretend(self): - logs = self.__run(self.import_paths) - - self.assertEqual( - logs, - [ - "Singleton: %s" % displayable_path(self.import_files[0]), - "Singleton: %s" % displayable_path(self.import_paths[1]), - ], - ) + assert self.__run(self.setup_singleton_importer(pretend=True)) == [ + f"Singleton: {self.single_path}", + f"Singleton: {self.album_track_path}", + ] def test_import_album_pretend(self): - logs = self.__run(self.import_paths, singletons=False) - - self.assertEqual( - logs, - [ - "Album: %s" % displayable_path(self.import_paths[0]), - " %s" % displayable_path(self.import_files[0]), - "Album: %s" % displayable_path(self.import_paths[1]), - " %s" % displayable_path(self.import_paths[1]), - ], - ) + assert self.__run(self.setup_importer(pretend=True)) == [ + f"Album: {self.import_path}", + f" {self.single_path}", + f"Album: {self.album_path}", + f" {self.album_track_path}", + ] def test_import_pretend_empty(self): - logs = self.__run([self.empty_path]) + empty_path = Path(os.fsdecode(self.temp_dir)) / "empty" + empty_path.mkdir() - self.assertEqual( - logs, - [ - "No files imported from {}".format( - displayable_path(self.empty_path) - ) - ], - ) + importer = self.setup_importer(pretend=True, import_dir=empty_path) + + assert self.__run(importer) == [f"No files imported from {empty_path}"] # Helpers for ImportMusicBrainzIdTest. @@ -1857,7 +1734,7 @@ def mocked_get_release_by_id( """Mimic musicbrainzngs.get_release_by_id, accepting only a restricted list of MB ids (ID_RELEASE_0, ID_RELEASE_1). The returned dict differs only in the release title and artist name, so that ID_RELEASE_0 is a closer match - to the items created by ImportHelper._create_import_dir().""" + to the items created by ImportHelper.prepare_album_for_import().""" # Map IDs to (release title, artist), so the distances are different. releases = { ImportMusicBrainzIdTest.ID_RELEASE_0: ("VALID_RELEASE_0", "TAG ARTIST"), @@ -1910,7 +1787,8 @@ def mocked_get_recording_by_id( """Mimic musicbrainzngs.get_recording_by_id, accepting only a restricted list of MB ids (ID_RECORDING_0, ID_RECORDING_1). The returned dict differs only in the recording title and artist name, so that ID_RECORDING_0 is a - closer match to the items created by ImportHelper._create_import_dir().""" + closer match to the items created by ImportHelper.prepare_album_for_import(). + """ # Map IDs to (recording title, artist), so the distances are different. releases = { ImportMusicBrainzIdTest.ID_RECORDING_0: ( @@ -1948,7 +1826,7 @@ def mocked_get_recording_by_id( "musicbrainzngs.get_release_by_id", Mock(side_effect=mocked_get_release_by_id), ) -class ImportMusicBrainzIdTest(_common.TestCase, ImportHelper): +class ImportMusicBrainzIdTest(ImportTestCase): """Test the --musicbrainzid argument.""" MB_RELEASE_PREFIX = "https://musicbrainz.org/release/" @@ -1959,55 +1837,52 @@ class ImportMusicBrainzIdTest(_common.TestCase, ImportHelper): ID_RECORDING_1 = "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb" def setUp(self): - self.setup_beets() - self._create_import_dir(1) - - def tearDown(self): - self.teardown_beets() + super().setUp() + self.prepare_album_for_import(1) def test_one_mbid_one_album(self): - self.config["import"]["search_ids"] = [ - self.MB_RELEASE_PREFIX + self.ID_RELEASE_0 - ] - self._setup_import_session() + self.setup_importer( + search_ids=[self.MB_RELEASE_PREFIX + self.ID_RELEASE_0] + ) self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.albums().get().album, "VALID_RELEASE_0") + assert self.lib.albums().get().album == "VALID_RELEASE_0" def test_several_mbid_one_album(self): - self.config["import"]["search_ids"] = [ - self.MB_RELEASE_PREFIX + self.ID_RELEASE_0, - self.MB_RELEASE_PREFIX + self.ID_RELEASE_1, - ] - self._setup_import_session() + self.setup_importer( + search_ids=[ + self.MB_RELEASE_PREFIX + self.ID_RELEASE_0, + self.MB_RELEASE_PREFIX + self.ID_RELEASE_1, + ] + ) self.importer.add_choice(2) # Pick the 2nd best match (release 1). self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.albums().get().album, "VALID_RELEASE_1") + assert self.lib.albums().get().album == "VALID_RELEASE_1" def test_one_mbid_one_singleton(self): - self.config["import"]["search_ids"] = [ - self.MB_RECORDING_PREFIX + self.ID_RECORDING_0 - ] - self._setup_import_session(singletons=True) + self.setup_singleton_importer( + search_ids=[self.MB_RECORDING_PREFIX + self.ID_RECORDING_0] + ) self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().title, "VALID_RECORDING_0") + assert self.lib.items().get().title == "VALID_RECORDING_0" def test_several_mbid_one_singleton(self): - self.config["import"]["search_ids"] = [ - self.MB_RECORDING_PREFIX + self.ID_RECORDING_0, - self.MB_RECORDING_PREFIX + self.ID_RECORDING_1, - ] - self._setup_import_session(singletons=True) + self.setup_singleton_importer( + search_ids=[ + self.MB_RECORDING_PREFIX + self.ID_RECORDING_0, + self.MB_RECORDING_PREFIX + self.ID_RECORDING_1, + ] + ) self.importer.add_choice(2) # Pick the 2nd best match (recording 1). self.importer.add_choice(importer.action.APPLY) self.importer.run() - self.assertEqual(self.lib.items().get().title, "VALID_RECORDING_1") + assert self.lib.items().get().title == "VALID_RECORDING_1" def test_candidates_album(self): """Test directly ImportTask.lookup_candidates().""" @@ -2021,10 +1896,9 @@ class ImportMusicBrainzIdTest(_common.TestCase, ImportHelper): ] task.lookup_candidates() - self.assertEqual( - {"VALID_RELEASE_0", "VALID_RELEASE_1"}, - {c.info.album for c in task.candidates}, - ) + assert {"VALID_RELEASE_0", "VALID_RELEASE_1"} == { + c.info.album for c in task.candidates + } def test_candidates_singleton(self): """Test directly SingletonImportTask.lookup_candidates().""" @@ -2038,15 +1912,6 @@ class ImportMusicBrainzIdTest(_common.TestCase, ImportHelper): ] task.lookup_candidates() - self.assertEqual( - {"VALID_RECORDING_0", "VALID_RECORDING_1"}, - {c.info.title for c in task.candidates}, - ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert {"VALID_RECORDING_0", "VALID_RECORDING_1"} == { + c.info.title for c in task.candidates + } diff --git a/test/test_library.py b/test/test_library.py index c9d0440b5..b5e6d4eeb 100644 --- a/test/test_library.py +++ b/test/test_library.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for non-query database functions of Item. -""" +"""Tests for non-query database functions of Item.""" import os import os.path @@ -25,44 +24,46 @@ import time import unicodedata import unittest +import pytest from mediafile import MediaFile, UnreadableFileError import beets.dbcore.query import beets.library from beets import config, plugins, util +from beets.library import Album from beets.test import _common from beets.test._common import item -from beets.test.helper import TestHelper +from beets.test.helper import BeetsTestCase, ItemInDBTestCase from beets.util import bytestring_path, syspath # Shortcut to path normalization. np = util.normpath -class LoadTest(_common.LibTestCase): +class LoadTest(ItemInDBTestCase): def test_load_restores_data_from_db(self): original_title = self.i.title self.i.title = "something" self.i.load() - self.assertEqual(original_title, self.i.title) + assert original_title == self.i.title def test_load_clears_dirty_flags(self): self.i.artist = "something" - self.assertIn("artist", self.i._dirty) + assert "artist" in self.i._dirty self.i.load() - self.assertNotIn("artist", self.i._dirty) + assert "artist" not in self.i._dirty -class StoreTest(_common.LibTestCase): +class StoreTest(ItemInDBTestCase): def test_store_changes_database_value(self): self.i.year = 1987 self.i.store() new_year = ( self.lib._connection() - .execute("select year from items where " 'title="the title"') + .execute("select year from items where title = ?", (self.i.title,)) .fetchone()["year"] ) - self.assertEqual(new_year, 1987) + assert new_year == 1987 def test_store_only_writes_dirty_fields(self): original_genre = self.i.genre @@ -70,19 +71,18 @@ class StoreTest(_common.LibTestCase): self.i.store() new_genre = ( self.lib._connection() - .execute("select genre from items where " 'title="the title"') + .execute("select genre from items where title = ?", (self.i.title,)) .fetchone()["genre"] ) - self.assertEqual(new_genre, original_genre) + assert new_genre == original_genre def test_store_clears_dirty_flags(self): self.i.composer = "tvp" self.i.store() - self.assertNotIn("composer", self.i._dirty) + assert "composer" not in self.i._dirty def test_store_album_cascades_flex_deletes(self): - album = _common.album() - album.flex1 = "Flex-1" + album = Album(flex1="Flex-1") self.lib.add(album) item = _common.item() item.album_id = album.id @@ -90,14 +90,13 @@ class StoreTest(_common.LibTestCase): self.lib.add(item) del album.flex1 album.store() - self.assertNotIn("flex1", album) - self.assertNotIn("flex1", album.items()[0]) + assert "flex1" not in album + assert "flex1" not in album.items()[0] -class AddTest(_common.TestCase): +class AddTest(BeetsTestCase): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") self.i = item() def test_item_add_inserts_row(self): @@ -105,11 +104,12 @@ class AddTest(_common.TestCase): new_grouping = ( self.lib._connection() .execute( - "select grouping from items " 'where composer="the composer"' + "select grouping from items where composer = ?", + (self.i.composer,), ) .fetchone()["grouping"] ) - self.assertEqual(new_grouping, self.i.grouping) + assert new_grouping == self.i.grouping def test_library_add_path_inserts_row(self): i = beets.library.Item.from_path( @@ -119,82 +119,77 @@ class AddTest(_common.TestCase): new_grouping = ( self.lib._connection() .execute( - "select grouping from items " 'where composer="the composer"' + "select grouping from items where composer = ?", + (self.i.composer,), ) .fetchone()["grouping"] ) - self.assertEqual(new_grouping, self.i.grouping) + assert new_grouping == self.i.grouping -class RemoveTest(_common.LibTestCase): +class RemoveTest(ItemInDBTestCase): def test_remove_deletes_from_db(self): self.i.remove() c = self.lib._connection().execute("select * from items") - self.assertIsNone(c.fetchone()) + assert c.fetchone() is None -class GetSetTest(_common.TestCase): +class GetSetTest(BeetsTestCase): def setUp(self): super().setUp() self.i = item() def test_set_changes_value(self): self.i.bpm = 4915 - self.assertEqual(self.i.bpm, 4915) + assert self.i.bpm == 4915 def test_set_sets_dirty_flag(self): self.i.comp = not self.i.comp - self.assertIn("comp", self.i._dirty) + assert "comp" in self.i._dirty def test_set_does_not_dirty_if_value_unchanged(self): self.i.title = self.i.title - self.assertNotIn("title", self.i._dirty) + assert "title" not in self.i._dirty def test_invalid_field_raises_attributeerror(self): - self.assertRaises(AttributeError, getattr, self.i, "xyzzy") + with pytest.raises(AttributeError): + self.i.xyzzy def test_album_fallback(self): # integration test of item-album fallback - lib = beets.library.Library(":memory:") - i = item(lib) - album = lib.add_album([i]) + i = item(self.lib) + album = self.lib.add_album([i]) album["flex"] = "foo" album.store() - self.assertIn("flex", i) - self.assertNotIn("flex", i.keys(with_album=False)) - self.assertEqual(i["flex"], "foo") - self.assertEqual(i.get("flex"), "foo") - self.assertIsNone(i.get("flex", with_album=False)) - self.assertIsNone(i.get("flexx")) + assert "flex" in i + assert "flex" not in i.keys(with_album=False) + assert i["flex"] == "foo" + assert i.get("flex") == "foo" + assert i.get("flex", with_album=False) is None + assert i.get("flexx") is None -class DestinationTest(_common.TestCase): +class DestinationTest(BeetsTestCase): + """Confirm tests handle temporary directory path containing '.'""" + + def create_temp_dir(self, **kwargs): + kwargs["prefix"] = "." + super().create_temp_dir(**kwargs) + def setUp(self): super().setUp() - # default directory is ~/Music and the only reason why it was switched - # to ~/.Music is to confirm that tests works well when path to - # temporary directory contains . - self.lib = beets.library.Library(":memory:", "~/.Music") self.i = item(self.lib) - def tearDown(self): - super().tearDown() - self.lib._connection().close() - - # Reset config if it was changed in test cases - config.clear() - config.read(user=False, defaults=True) - def test_directory_works_with_trailing_slash(self): self.lib.directory = b"one/" self.lib.path_formats = [("default", "two")] - self.assertEqual(self.i.destination(), np("one/two")) + assert self.i.destination() == np("one/two") def test_directory_works_without_trailing_slash(self): self.lib.directory = b"one" self.lib.path_formats = [("default", "two")] - self.assertEqual(self.i.destination(), np("one/two")) + assert self.i.destination() == np("one/two") def test_destination_substitutes_metadata_values(self): self.lib.directory = b"base" @@ -202,19 +197,19 @@ class DestinationTest(_common.TestCase): self.i.title = "three" self.i.artist = "two" self.i.album = "one" - self.assertEqual(self.i.destination(), np("base/one/two three")) + assert self.i.destination() == np("base/one/two three") def test_destination_preserves_extension(self): self.lib.directory = b"base" self.lib.path_formats = [("default", "$title")] self.i.path = "hey.audioformat" - self.assertEqual(self.i.destination(), np("base/the title.audioformat")) + assert self.i.destination() == np("base/the title.audioformat") def test_lower_case_extension(self): self.lib.directory = b"base" self.lib.path_formats = [("default", "$title")] self.i.path = "hey.MP3" - self.assertEqual(self.i.destination(), np("base/the title.mp3")) + assert self.i.destination() == np("base/the title.mp3") def test_destination_pads_some_indices(self): self.lib.directory = b"base" @@ -226,7 +221,7 @@ class DestinationTest(_common.TestCase): self.i.disc = 3 self.i.disctotal = 4 self.i.bpm = 5 - self.assertEqual(self.i.destination(), np("base/01 02 03 04 5")) + assert self.i.destination() == np("base/01 02 03 04 5") def test_destination_pads_date_values(self): self.lib.directory = b"base" @@ -234,52 +229,52 @@ class DestinationTest(_common.TestCase): self.i.year = 1 self.i.month = 2 self.i.day = 3 - self.assertEqual(self.i.destination(), np("base/0001-02-03")) + assert self.i.destination() == np("base/0001-02-03") def test_destination_escapes_slashes(self): self.i.album = "one/two" dest = self.i.destination() - self.assertIn(b"one", dest) - self.assertIn(b"two", dest) - self.assertNotIn(b"one/two", dest) + assert b"one" in dest + assert b"two" in dest + assert b"one/two" not in dest def test_destination_escapes_leading_dot(self): self.i.album = ".something" dest = self.i.destination() - self.assertIn(b"something", dest) - self.assertNotIn(b"/.something", dest) + assert b"something" in dest + assert b"/.something" not in dest def test_destination_preserves_legitimate_slashes(self): self.i.artist = "one" self.i.album = "two" dest = self.i.destination() - self.assertIn(os.path.join(b"one", b"two"), dest) + assert os.path.join(b"one", b"two") in dest def test_destination_long_names_truncated(self): self.i.title = "X" * 300 self.i.artist = "Y" * 300 for c in self.i.destination().split(util.PATH_SEP): - self.assertLessEqual(len(c), 255) + assert len(c) <= 255 def test_destination_long_names_keep_extension(self): self.i.title = "X" * 300 self.i.path = b"something.extn" dest = self.i.destination() - self.assertEqual(dest[-5:], b".extn") + assert dest[-5:] == b".extn" def test_distination_windows_removes_both_separators(self): self.i.title = "one \\ two / three.mp3" with _common.platform_windows(): p = self.i.destination() - self.assertNotIn(b"one \\ two", p) - self.assertNotIn(b"one / two", p) - self.assertNotIn(b"two \\ three", p) - self.assertNotIn(b"two / three", p) + assert b"one \\ two" not in p + assert b"one / two" not in p + assert b"two \\ three" not in p + assert b"two / three" not in p def test_path_with_format(self): self.lib.path_formats = [("default", "$artist/$album ($format)")] p = self.i.destination() - self.assertIn(b"(FLAC)", p) + assert b"(FLAC)" in p def test_heterogeneous_album_gets_single_directory(self): i1, i2 = item(), item() @@ -287,14 +282,14 @@ class DestinationTest(_common.TestCase): i1.year, i2.year = 2009, 2010 self.lib.path_formats = [("default", "$album ($year)/$track $title")] dest1, dest2 = i1.destination(), i2.destination() - self.assertEqual(os.path.dirname(dest1), os.path.dirname(dest2)) + assert os.path.dirname(dest1) == os.path.dirname(dest2) def test_default_path_for_non_compilations(self): self.i.comp = False self.lib.add_album([self.i]) self.lib.directory = b"one" self.lib.path_formats = [("default", "two"), ("comp:true", "three")] - self.assertEqual(self.i.destination(), np("one/two")) + assert self.i.destination() == np("one/two") def test_singleton_path(self): i = item(self.lib) @@ -304,7 +299,7 @@ class DestinationTest(_common.TestCase): ("singleton:true", "four"), ("comp:true", "three"), ] - self.assertEqual(i.destination(), np("one/four")) + assert i.destination() == np("one/four") def test_comp_before_singleton_path(self): i = item(self.lib) @@ -315,17 +310,14 @@ class DestinationTest(_common.TestCase): ("comp:true", "three"), ("singleton:true", "four"), ] - self.assertEqual(i.destination(), np("one/three")) + assert i.destination() == np("one/three") def test_comp_path(self): self.i.comp = True self.lib.add_album([self.i]) self.lib.directory = b"one" - self.lib.path_formats = [ - ("default", "two"), - ("comp:true", "three"), - ] - self.assertEqual(self.i.destination(), np("one/three")) + self.lib.path_formats = [("default", "two"), ("comp:true", "three")] + assert self.i.destination() == np("one/three") def test_albumtype_query_path(self): self.i.comp = True @@ -337,7 +329,7 @@ class DestinationTest(_common.TestCase): ("albumtype:sometype", "four"), ("comp:true", "three"), ] - self.assertEqual(self.i.destination(), np("one/four")) + assert self.i.destination() == np("one/four") def test_albumtype_path_fallback_to_comp(self): self.i.comp = True @@ -349,84 +341,84 @@ class DestinationTest(_common.TestCase): ("albumtype:anothertype", "four"), ("comp:true", "three"), ] - self.assertEqual(self.i.destination(), np("one/three")) + assert self.i.destination() == np("one/three") def test_get_formatted_does_not_replace_separators(self): with _common.platform_posix(): name = os.path.join("a", "b") self.i.title = name newname = self.i.formatted().get("title") - self.assertEqual(name, newname) + assert name == newname def test_get_formatted_pads_with_zero(self): with _common.platform_posix(): self.i.track = 1 name = self.i.formatted().get("track") - self.assertTrue(name.startswith("0")) + assert name.startswith("0") def test_get_formatted_uses_kbps_bitrate(self): with _common.platform_posix(): self.i.bitrate = 12345 val = self.i.formatted().get("bitrate") - self.assertEqual(val, "12kbps") + assert val == "12kbps" def test_get_formatted_uses_khz_samplerate(self): with _common.platform_posix(): self.i.samplerate = 12345 val = self.i.formatted().get("samplerate") - self.assertEqual(val, "12kHz") + assert val == "12kHz" def test_get_formatted_datetime(self): with _common.platform_posix(): self.i.added = 1368302461.210265 val = self.i.formatted().get("added") - self.assertTrue(val.startswith("2013")) + assert val.startswith("2013") def test_get_formatted_none(self): with _common.platform_posix(): self.i.some_other_field = None val = self.i.formatted().get("some_other_field") - self.assertEqual(val, "") + assert val == "" def test_artist_falls_back_to_albumartist(self): self.i.artist = "" self.i.albumartist = "something" self.lib.path_formats = [("default", "$artist")] p = self.i.destination() - self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"something") + assert p.rsplit(util.PATH_SEP, 1)[1] == b"something" def test_albumartist_falls_back_to_artist(self): self.i.artist = "trackartist" self.i.albumartist = "" self.lib.path_formats = [("default", "$albumartist")] p = self.i.destination() - self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"trackartist") + assert p.rsplit(util.PATH_SEP, 1)[1] == b"trackartist" def test_artist_overrides_albumartist(self): self.i.artist = "theartist" self.i.albumartist = "something" self.lib.path_formats = [("default", "$artist")] p = self.i.destination() - self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"theartist") + assert p.rsplit(util.PATH_SEP, 1)[1] == b"theartist" def test_albumartist_overrides_artist(self): self.i.artist = "theartist" self.i.albumartist = "something" self.lib.path_formats = [("default", "$albumartist")] p = self.i.destination() - self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"something") + assert p.rsplit(util.PATH_SEP, 1)[1] == b"something" def test_unicode_normalized_nfd_on_mac(self): instr = unicodedata.normalize("NFC", "caf\xe9") self.lib.path_formats = [("default", instr)] dest = self.i.destination(platform="darwin", fragment=True) - self.assertEqual(dest, unicodedata.normalize("NFD", instr)) + assert dest == unicodedata.normalize("NFD", instr) def test_unicode_normalized_nfc_on_linux(self): instr = unicodedata.normalize("NFD", "caf\xe9") self.lib.path_formats = [("default", instr)] dest = self.i.destination(platform="linux", fragment=True) - self.assertEqual(dest, unicodedata.normalize("NFC", instr)) + assert dest == unicodedata.normalize("NFC", instr) def test_non_mbcs_characters_on_windows(self): oldfunc = sys.getfilesystemencoding @@ -435,9 +427,9 @@ class DestinationTest(_common.TestCase): self.i.title = "h\u0259d" self.lib.path_formats = [("default", "$title")] p = self.i.destination() - self.assertNotIn(b"?", p) + assert b"?" not in p # We use UTF-8 to encode Windows paths now. - self.assertIn("h\u0259d".encode(), p) + assert "h\u0259d".encode() in p finally: sys.getfilesystemencoding = oldfunc @@ -445,7 +437,7 @@ class DestinationTest(_common.TestCase): self.lib.path_formats = [("default", "foo")] self.i.path = util.bytestring_path("bar.caf\xe9") dest = self.i.destination(platform="linux", fragment=True) - self.assertEqual(dest, "foo.caf\xe9") + assert dest == "foo.caf\xe9" def test_asciify_and_replace(self): config["asciify_paths"] = True @@ -453,14 +445,14 @@ class DestinationTest(_common.TestCase): self.lib.directory = b"lib" self.lib.path_formats = [("default", "$title")] self.i.title = "\u201c\u00f6\u2014\u00cf\u201d" - self.assertEqual(self.i.destination(), np("lib/qo--Iq")) + assert self.i.destination() == np("lib/qo--Iq") def test_asciify_character_expanding_to_slash(self): config["asciify_paths"] = True self.lib.directory = b"lib" self.lib.path_formats = [("default", "$title")] self.i.title = "ab\xa2\xbdd" - self.assertEqual(self.i.destination(), np("lib/abC_ 1_2d")) + assert self.i.destination() == np("lib/abC_ 1_2d") def test_destination_with_replacements(self): self.lib.directory = b"base" @@ -468,7 +460,7 @@ class DestinationTest(_common.TestCase): self.lib.path_formats = [("default", "$album/$title")] self.i.title = "foo" self.i.album = "bar" - self.assertEqual(self.i.destination(), np("base/ber/foo")) + assert self.i.destination() == np("base/ber/foo") def test_destination_with_replacements_argument(self): self.lib.directory = b"base" @@ -477,8 +469,8 @@ class DestinationTest(_common.TestCase): self.i.title = "foo" self.i.album = "bar" replacements = [(re.compile(r"a"), "e")] - self.assertEqual( - self.i.destination(replacements=replacements), np("base/ber/foo") + assert self.i.destination(replacements=replacements) == np( + "base/ber/foo" ) @unittest.skip("unimplemented: #359") @@ -490,7 +482,7 @@ class DestinationTest(_common.TestCase): self.i.artist = "" self.i.albumartist = "" self.i.album = "one" - self.assertEqual(self.i.destination(), np("base/one/_/three")) + assert self.i.destination() == np("base/one/_/three") @unittest.skip("unimplemented: #359") def test_destination_with_empty_final_component(self): @@ -500,7 +492,7 @@ class DestinationTest(_common.TestCase): self.i.title = "" self.i.album = "one" self.i.path = "foo.mp3" - self.assertEqual(self.i.destination(), np("base/one/_.mp3")) + assert self.i.destination() == np("base/one/_.mp3") def test_legalize_path_one_for_one_replacement(self): # Use a replacement that should always replace the last X in any @@ -515,7 +507,7 @@ class DestinationTest(_common.TestCase): # The final path should reflect the replacement. dest = self.i.destination() - self.assertEqual(dest[-2:], b"XZ") + assert dest[-2:] == b"XZ" def test_legalize_path_one_for_many_replacement(self): # Use a replacement that should always replace the last X in any @@ -531,16 +523,16 @@ class DestinationTest(_common.TestCase): # The final path should ignore the user replacement and create a path # of the correct length, containing Xs. dest = self.i.destination() - self.assertEqual(dest[-2:], b"XX") + assert dest[-2:] == b"XX" def test_album_field_query(self): self.lib.directory = b"one" self.lib.path_formats = [("default", "two"), ("flex:foo", "three")] album = self.lib.add_album([self.i]) - self.assertEqual(self.i.destination(), np("one/two")) + assert self.i.destination() == np("one/two") album["flex"] = "foo" album.store() - self.assertEqual(self.i.destination(), np("one/three")) + assert self.i.destination() == np("one/three") def test_album_field_in_template(self): self.lib.directory = b"one" @@ -548,38 +540,38 @@ class DestinationTest(_common.TestCase): album = self.lib.add_album([self.i]) album["flex"] = "foo" album.store() - self.assertEqual(self.i.destination(), np("one/foo/two")) + assert self.i.destination() == np("one/foo/two") -class ItemFormattedMappingTest(_common.LibTestCase): +class ItemFormattedMappingTest(ItemInDBTestCase): def test_formatted_item_value(self): formatted = self.i.formatted() - self.assertEqual(formatted["artist"], "the artist") + assert formatted["artist"] == "the artist" def test_get_unset_field(self): formatted = self.i.formatted() - with self.assertRaises(KeyError): + with pytest.raises(KeyError): formatted["other_field"] def test_get_method_with_default(self): formatted = self.i.formatted() - self.assertEqual(formatted.get("other_field"), "") + assert formatted.get("other_field") == "" def test_get_method_with_specified_default(self): formatted = self.i.formatted() - self.assertEqual(formatted.get("other_field", "default"), "default") + assert formatted.get("other_field", "default") == "default" def test_item_precedence(self): album = self.lib.add_album([self.i]) album["artist"] = "foo" album.store() - self.assertNotEqual("foo", self.i.formatted().get("artist")) + assert "foo" != self.i.formatted().get("artist") def test_album_flex_field(self): album = self.lib.add_album([self.i]) album["flex"] = "foo" album.store() - self.assertEqual("foo", self.i.formatted().get("flex")) + assert "foo" == self.i.formatted().get("flex") def test_album_field_overrides_item_field_for_path(self): # Make the album inconsistent with the item. @@ -591,23 +583,23 @@ class ItemFormattedMappingTest(_common.LibTestCase): # Ensure the album takes precedence. formatted = self.i.formatted(for_path=True) - self.assertEqual(formatted["album"], "foo") + assert formatted["album"] == "foo" def test_artist_falls_back_to_albumartist(self): self.i.artist = "" formatted = self.i.formatted() - self.assertEqual(formatted["artist"], "the album artist") + assert formatted["artist"] == "the album artist" def test_albumartist_falls_back_to_artist(self): self.i.albumartist = "" formatted = self.i.formatted() - self.assertEqual(formatted["albumartist"], "the artist") + assert formatted["albumartist"] == "the artist" def test_both_artist_and_albumartist_empty(self): self.i.artist = "" self.i.albumartist = "" formatted = self.i.formatted() - self.assertEqual(formatted["albumartist"], "") + assert formatted["albumartist"] == "" class PathFormattingMixin: @@ -621,21 +613,16 @@ class PathFormattingMixin: i = self.i with _common.platform_posix(): actual = i.destination() - self.assertEqual(actual, dest) + assert actual == dest -class DestinationFunctionTest(_common.TestCase, PathFormattingMixin): +class DestinationFunctionTest(BeetsTestCase, PathFormattingMixin): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") self.lib.directory = b"/base" self.lib.path_formats = [("default", "path")] self.i = item(self.lib) - def tearDown(self): - super().tearDown() - self.lib._connection().close() - def test_upper_case_literal(self): self._setf("%upper{foo}") self._assert_dest(b"/base/FOO") @@ -644,6 +631,10 @@ class DestinationFunctionTest(_common.TestCase, PathFormattingMixin): self._setf("%upper{$title}") self._assert_dest(b"/base/THE TITLE") + def test_capitalize_variable(self): + self._setf("%capitalize{$title}") + self._assert_dest(b"/base/The title") + def test_title_case_variable(self): self._setf("%title{$title}") self._assert_dest(b"/base/The Title") @@ -733,10 +724,9 @@ class DestinationFunctionTest(_common.TestCase, PathFormattingMixin): self._assert_dest(b"/base/Alice & Bob") -class DisambiguationTest(_common.TestCase, PathFormattingMixin): +class DisambiguationTest(BeetsTestCase, PathFormattingMixin): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") self.lib.directory = b"/base" self.lib.path_formats = [("default", "path")] @@ -750,10 +740,6 @@ class DisambiguationTest(_common.TestCase, PathFormattingMixin): self._setf("foo%aunique{albumartist album,year}/$title") - def tearDown(self): - super().tearDown() - self.lib._connection().close() - def test_unique_expands_to_disambiguating_year(self): self._assert_dest(b"/base/foo [2001]/the title", self.i1) @@ -822,10 +808,9 @@ class DisambiguationTest(_common.TestCase, PathFormattingMixin): self._assert_dest(b"/base/foo/the title", self.i1) -class SingletonDisambiguationTest(_common.TestCase, PathFormattingMixin): +class SingletonDisambiguationTest(BeetsTestCase, PathFormattingMixin): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") self.lib.directory = b"/base" self.lib.path_formats = [("default", "path")] @@ -839,10 +824,6 @@ class SingletonDisambiguationTest(_common.TestCase, PathFormattingMixin): self._setf("foo/$title%sunique{artist title,year}") - def tearDown(self): - super().tearDown() - self.lib._connection().close() - def test_sunique_expands_to_disambiguating_year(self): self._assert_dest(b"/base/foo/the title [2001]", self.i1) @@ -907,7 +888,7 @@ class SingletonDisambiguationTest(_common.TestCase, PathFormattingMixin): self._assert_dest(b"/base/foo/the title", self.i1) -class PluginDestinationTest(_common.TestCase): +class PluginDestinationTest(BeetsTestCase): def setUp(self): super().setUp() @@ -923,7 +904,6 @@ class PluginDestinationTest(_common.TestCase): self.old_field_getters = plugins.item_field_getters plugins.item_field_getters = field_getters - self.lib = beets.library.Library(":memory:") self.lib.directory = b"/base" self.lib.path_formats = [("default", "$artist $foo")] self.i = item(self.lib) @@ -935,7 +915,7 @@ class PluginDestinationTest(_common.TestCase): def _assert_dest(self, dest): with _common.platform_posix(): the_dest = self.i.destination() - self.assertEqual(the_dest, b"/base/" + dest) + assert the_dest == b"/base/" + dest def test_undefined_value_not_substituted(self): self._assert_dest(b"the artist $foo") @@ -959,26 +939,25 @@ class PluginDestinationTest(_common.TestCase): self._assert_dest(b"the artist bar_baz") -class AlbumInfoTest(_common.TestCase): +class AlbumInfoTest(BeetsTestCase): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") self.i = item() self.lib.add_album((self.i,)) def test_albuminfo_reflects_metadata(self): ai = self.lib.get_album(self.i) - self.assertEqual(ai.mb_albumartistid, self.i.mb_albumartistid) - self.assertEqual(ai.albumartist, self.i.albumartist) - self.assertEqual(ai.album, self.i.album) - self.assertEqual(ai.year, self.i.year) + assert ai.mb_albumartistid == self.i.mb_albumartistid + assert ai.albumartist == self.i.albumartist + assert ai.album == self.i.album + assert ai.year == self.i.year def test_albuminfo_stores_art(self): ai = self.lib.get_album(self.i) ai.artpath = "/my/great/art" ai.store() new_ai = self.lib.get_album(self.i) - self.assertEqual(new_ai.artpath, b"/my/great/art") + assert new_ai.artpath == b"/my/great/art" def test_albuminfo_for_two_items_doesnt_duplicate_row(self): i2 = item(self.lib) @@ -988,20 +967,20 @@ class AlbumInfoTest(_common.TestCase): c = self.lib._connection().cursor() c.execute("select * from albums where album=?", (self.i.album,)) # Cursor should only return one row. - self.assertIsNotNone(c.fetchone()) - self.assertIsNone(c.fetchone()) + assert c.fetchone() is not None + assert c.fetchone() is None def test_individual_tracks_have_no_albuminfo(self): i2 = item() i2.album = "aTotallyDifferentAlbum" self.lib.add(i2) ai = self.lib.get_album(i2) - self.assertIsNone(ai) + assert ai is None def test_get_album_by_id(self): ai = self.lib.get_album(self.i) ai = self.lib.get_album(self.i.id) - self.assertIsNotNone(ai) + assert ai is not None def test_album_items_consistent(self): ai = self.lib.get_album(self.i) @@ -1016,29 +995,29 @@ class AlbumInfoTest(_common.TestCase): ai.album = "myNewAlbum" ai.store() i = self.lib.items()[0] - self.assertEqual(i.album, "myNewAlbum") + assert i.album == "myNewAlbum" def test_albuminfo_change_albumartist_changes_items(self): ai = self.lib.get_album(self.i) ai.albumartist = "myNewArtist" ai.store() i = self.lib.items()[0] - self.assertEqual(i.albumartist, "myNewArtist") - self.assertNotEqual(i.artist, "myNewArtist") + assert i.albumartist == "myNewArtist" + assert i.artist != "myNewArtist" def test_albuminfo_change_artist_does_change_items(self): ai = self.lib.get_album(self.i) ai.artist = "myNewArtist" ai.store(inherit=True) i = self.lib.items()[0] - self.assertEqual(i.artist, "myNewArtist") + assert i.artist == "myNewArtist" def test_albuminfo_change_artist_does_not_change_items(self): ai = self.lib.get_album(self.i) ai.artist = "myNewArtist" ai.store(inherit=False) i = self.lib.items()[0] - self.assertNotEqual(i.artist, "myNewArtist") + assert i.artist != "myNewArtist" def test_albuminfo_remove_removes_items(self): item_id = self.i.id @@ -1046,12 +1025,12 @@ class AlbumInfoTest(_common.TestCase): c = self.lib._connection().execute( "SELECT id FROM items WHERE id=?", (item_id,) ) - self.assertEqual(c.fetchone(), None) + assert c.fetchone() is None def test_removing_last_item_removes_album(self): - self.assertEqual(len(self.lib.albums()), 1) + assert len(self.lib.albums()) == 1 self.i.remove() - self.assertEqual(len(self.lib.albums()), 0) + assert len(self.lib.albums()) == 0 def test_noop_albuminfo_changes_affect_items(self): i = self.lib.items()[0] @@ -1061,17 +1040,15 @@ class AlbumInfoTest(_common.TestCase): ai.album = ai.album ai.store() i = self.lib.items()[0] - self.assertEqual(i.album, ai.album) + assert i.album == ai.album -class ArtDestinationTest(_common.TestCase): +class ArtDestinationTest(BeetsTestCase): def setUp(self): super().setUp() config["art_filename"] = "artimage" config["replace"] = {"X": "Y"} - self.lib = beets.library.Library( - ":memory:", replacements=[(re.compile("X"), "Y")] - ) + self.lib.replacements = [(re.compile("X"), "Y")] self.i = item(self.lib) self.i.path = self.i.destination() self.ai = self.lib.add_album((self.i,)) @@ -1079,35 +1056,34 @@ class ArtDestinationTest(_common.TestCase): def test_art_filename_respects_setting(self): art = self.ai.art_destination("something.jpg") new_art = bytestring_path("%sartimage.jpg" % os.path.sep) - self.assertIn(new_art, art) + assert new_art in art def test_art_path_in_item_dir(self): art = self.ai.art_destination("something.jpg") track = self.i.destination() - self.assertEqual(os.path.dirname(art), os.path.dirname(track)) + assert os.path.dirname(art) == os.path.dirname(track) def test_art_path_sanitized(self): config["art_filename"] = "artXimage" art = self.ai.art_destination("something.jpg") - self.assertIn(b"artYimage", art) + assert b"artYimage" in art -class PathStringTest(_common.TestCase): +class PathStringTest(BeetsTestCase): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") self.i = item(self.lib) def test_item_path_is_bytestring(self): - self.assertTrue(isinstance(self.i.path, bytes)) + assert isinstance(self.i.path, bytes) def test_fetched_item_path_is_bytestring(self): i = list(self.lib.items())[0] - self.assertTrue(isinstance(i.path, bytes)) + assert isinstance(i.path, bytes) def test_unicode_path_becomes_bytestring(self): self.i.path = "unicodepath" - self.assertTrue(isinstance(self.i.path, bytes)) + assert isinstance(self.i.path, bytes) def test_unicode_in_database_becomes_bytestring(self): self.lib._connection().execute( @@ -1117,14 +1093,14 @@ class PathStringTest(_common.TestCase): (self.i.id, "somepath"), ) i = list(self.lib.items())[0] - self.assertTrue(isinstance(i.path, bytes)) + assert isinstance(i.path, bytes) def test_special_chars_preserved_in_database(self): path = "b\xe1r".encode() self.i.path = path self.i.store() i = list(self.lib.items())[0] - self.assertEqual(i.path, path) + assert i.path == path def test_special_char_path_added_to_database(self): self.i.remove() @@ -1133,18 +1109,18 @@ class PathStringTest(_common.TestCase): i.path = path self.lib.add(i) i = list(self.lib.items())[0] - self.assertEqual(i.path, path) + assert i.path == path def test_destination_returns_bytestring(self): self.i.artist = "b\xe1r" dest = self.i.destination() - self.assertTrue(isinstance(dest, bytes)) + assert isinstance(dest, bytes) def test_art_destination_returns_bytestring(self): self.i.artist = "b\xe1r" alb = self.lib.add_album([self.i]) dest = alb.art_destination("image.jpg") - self.assertTrue(isinstance(dest, bytes)) + assert isinstance(dest, bytes) def test_artpath_stores_special_chars(self): path = b"b\xe1r" @@ -1152,22 +1128,22 @@ class PathStringTest(_common.TestCase): alb.artpath = path alb.store() alb = self.lib.get_album(self.i) - self.assertEqual(path, alb.artpath) + assert path == alb.artpath def test_sanitize_path_with_special_chars(self): path = "b\xe1r?" new_path = util.sanitize_path(path) - self.assertTrue(new_path.startswith("b\xe1r")) + assert new_path.startswith("b\xe1r") def test_sanitize_path_returns_unicode(self): path = "b\xe1r?" new_path = util.sanitize_path(path) - self.assertTrue(isinstance(new_path, str)) + assert isinstance(new_path, str) def test_unicode_artpath_becomes_bytestring(self): alb = self.lib.add_album([self.i]) alb.artpath = "somep\xe1th" - self.assertTrue(isinstance(alb.artpath, bytes)) + assert isinstance(alb.artpath, bytes) def test_unicode_artpath_in_database_decoded(self): alb = self.lib.add_album([self.i]) @@ -1175,10 +1151,10 @@ class PathStringTest(_common.TestCase): "update albums set artpath=? where id=?", ("somep\xe1th", alb.id) ) alb = self.lib.get_album(alb.id) - self.assertTrue(isinstance(alb.artpath, bytes)) + assert isinstance(alb.artpath, bytes) -class MtimeTest(_common.TestCase): +class MtimeTest(BeetsTestCase): def setUp(self): super().setUp() self.ipath = os.path.join(self.temp_dir, b"testfile.mp3") @@ -1187,7 +1163,6 @@ class MtimeTest(_common.TestCase): syspath(self.ipath), ) self.i = beets.library.Item.from_path(self.ipath) - self.lib = beets.library.Library(":memory:") self.lib.add(self.i) def tearDown(self): @@ -1199,70 +1174,66 @@ class MtimeTest(_common.TestCase): return int(os.path.getmtime(self.ipath)) def test_mtime_initially_up_to_date(self): - self.assertGreaterEqual(self.i.mtime, self._mtime()) + assert self.i.mtime >= self._mtime() def test_mtime_reset_on_db_modify(self): self.i.title = "something else" - self.assertLess(self.i.mtime, self._mtime()) + assert self.i.mtime < self._mtime() def test_mtime_up_to_date_after_write(self): self.i.title = "something else" self.i.write() - self.assertGreaterEqual(self.i.mtime, self._mtime()) + assert self.i.mtime >= self._mtime() def test_mtime_up_to_date_after_read(self): self.i.title = "something else" self.i.read() - self.assertGreaterEqual(self.i.mtime, self._mtime()) + assert self.i.mtime >= self._mtime() -class ImportTimeTest(_common.TestCase): - def setUp(self): - super().setUp() - self.lib = beets.library.Library(":memory:") - +class ImportTimeTest(BeetsTestCase): def added(self): self.track = item() self.album = self.lib.add_album((self.track,)) - self.assertGreater(self.album.added, 0) - self.assertGreater(self.track.added, 0) + assert self.album.added > 0 + assert self.track.added > 0 def test_atime_for_singleton(self): self.singleton = item(self.lib) - self.assertGreater(self.singleton.added, 0) + assert self.singleton.added > 0 -class TemplateTest(_common.LibTestCase): +class TemplateTest(ItemInDBTestCase): def test_year_formatted_in_template(self): self.i.year = 123 self.i.store() - self.assertEqual(self.i.evaluate_template("$year"), "0123") + assert self.i.evaluate_template("$year") == "0123" def test_album_flexattr_appears_in_item_template(self): self.album = self.lib.add_album([self.i]) self.album.foo = "baz" self.album.store() - self.assertEqual(self.i.evaluate_template("$foo"), "baz") + assert self.i.evaluate_template("$foo") == "baz" def test_album_and_item_format(self): config["format_album"] = "foö $foo" album = beets.library.Album() album.foo = "bar" album.tagada = "togodo" - self.assertEqual(f"{album}", "foö bar") - self.assertEqual(f"{album:$tagada}", "togodo") - self.assertEqual(str(album), "foö bar") - self.assertEqual(bytes(album), b"fo\xc3\xb6 bar") + assert f"{album}" == "foö bar" + assert f"{album:$tagada}" == "togodo" + assert str(album) == "foö bar" + assert bytes(album) == b"fo\xc3\xb6 bar" config["format_item"] = "bar $foo" item = beets.library.Item() item.foo = "bar" item.tagada = "togodo" - self.assertEqual(f"{item}", "bar bar") - self.assertEqual(f"{item:$tagada}", "togodo") + assert f"{item}" == "bar bar" + assert f"{item:$tagada}" == "togodo" -class UnicodePathTest(_common.LibTestCase): +class UnicodePathTest(ItemInDBTestCase): def test_unicode_path(self): self.i.path = os.path.join(_common.RSRC, "unicode\u2019d.mp3".encode()) # If there are any problems with unicode paths, we will raise @@ -1271,17 +1242,11 @@ class UnicodePathTest(_common.LibTestCase): self.i.write() -class WriteTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.teardown_beets() - +class WriteTest(BeetsTestCase): def test_write_nonexistant(self): item = self.create_item() item.path = b"/path/does/not/exist" - with self.assertRaises(beets.library.ReadError): + with pytest.raises(beets.library.ReadError): item.write() def test_no_write_permission(self): @@ -1290,7 +1255,8 @@ class WriteTest(unittest.TestCase, TestHelper): os.chmod(path, stat.S_IRUSR) try: - self.assertRaises(beets.library.WriteError, item.write) + with pytest.raises(beets.library.WriteError): + item.write() finally: # Restore write permissions so the file can be cleaned up. @@ -1302,29 +1268,27 @@ class WriteTest(unittest.TestCase, TestHelper): shutil.copy(syspath(item.path), syspath(custom_path)) item["artist"] = "new artist" - self.assertNotEqual( - MediaFile(syspath(custom_path)).artist, "new artist" - ) - self.assertNotEqual(MediaFile(syspath(item.path)).artist, "new artist") + assert MediaFile(syspath(custom_path)).artist != "new artist" + assert MediaFile(syspath(item.path)).artist != "new artist" item.write(custom_path) - self.assertEqual(MediaFile(syspath(custom_path)).artist, "new artist") - self.assertNotEqual(MediaFile(syspath(item.path)).artist, "new artist") + assert MediaFile(syspath(custom_path)).artist == "new artist" + assert MediaFile(syspath(item.path)).artist != "new artist" def test_write_custom_tags(self): item = self.add_item_fixture(artist="old artist") item.write(tags={"artist": "new artist"}) - self.assertNotEqual(item.artist, "new artist") - self.assertEqual(MediaFile(syspath(item.path)).artist, "new artist") + assert item.artist != "new artist" + assert MediaFile(syspath(item.path)).artist == "new artist" def test_write_multi_tags(self): item = self.add_item_fixture(artist="old artist") item.write(tags={"artists": ["old artist", "another artist"]}) - self.assertEqual( - MediaFile(syspath(item.path)).artists, - ["old artist", "another artist"], - ) + assert MediaFile(syspath(item.path)).artists == [ + "old artist", + "another artist", + ] def test_write_multi_tags_id3v23(self): item = self.add_item_fixture(artist="old artist") @@ -1332,9 +1296,9 @@ class WriteTest(unittest.TestCase, TestHelper): tags={"artists": ["old artist", "another artist"]}, id3v23=True ) - self.assertEqual( - MediaFile(syspath(item.path)).artists, ["old artist/another artist"] - ) + assert MediaFile(syspath(item.path)).artists == [ + "old artist/another artist" + ] def test_write_date_field(self): # Since `date` is not a MediaField, this should do nothing. @@ -1342,47 +1306,40 @@ class WriteTest(unittest.TestCase, TestHelper): clean_year = item.year item.date = "foo" item.write() - self.assertEqual(MediaFile(syspath(item.path)).year, clean_year) + assert MediaFile(syspath(item.path)).year == clean_year class ItemReadTest(unittest.TestCase): def test_unreadable_raise_read_error(self): unreadable = os.path.join(_common.RSRC, b"image-2x3.png") item = beets.library.Item() - with self.assertRaises(beets.library.ReadError) as cm: + with pytest.raises(beets.library.ReadError) as exc_info: item.read(unreadable) - self.assertIsInstance(cm.exception.reason, UnreadableFileError) + assert isinstance(exc_info.value.reason, UnreadableFileError) def test_nonexistent_raise_read_error(self): item = beets.library.Item() - with self.assertRaises(beets.library.ReadError): + with pytest.raises(beets.library.ReadError): item.read("/thisfiledoesnotexist") -class FilesizeTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.teardown_beets() - +class FilesizeTest(BeetsTestCase): def test_filesize(self): item = self.add_item_fixture() - self.assertNotEqual(item.filesize, 0) + assert item.filesize != 0 def test_nonexistent_file(self): item = beets.library.Item() - self.assertEqual(item.filesize, 0) + assert item.filesize == 0 class ParseQueryTest(unittest.TestCase): def test_parse_invalid_query_string(self): - with self.assertRaises(beets.dbcore.InvalidQueryError) as raised: + with pytest.raises(beets.dbcore.query.ParsingError): beets.library.parse_query_string('foo"', None) - self.assertIsInstance(raised.exception, beets.dbcore.query.ParsingError) def test_parse_bytes(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): beets.library.parse_query_string(b"query", None) @@ -1395,53 +1352,45 @@ class LibraryFieldTypesTest(unittest.TestCase): # format time_format = beets.config["time_format"].as_str() time_local = time.strftime(time_format, time.localtime(123456789)) - self.assertEqual(time_local, t.format(123456789)) + assert time_local == t.format(123456789) # parse - self.assertEqual(123456789.0, t.parse(time_local)) - self.assertEqual(123456789.0, t.parse("123456789.0")) - self.assertEqual(t.null, t.parse("not123456789.0")) - self.assertEqual(t.null, t.parse("1973-11-29")) + assert 123456789.0 == t.parse(time_local) + assert 123456789.0 == t.parse("123456789.0") + assert t.null == t.parse("not123456789.0") + assert t.null == t.parse("1973-11-29") def test_pathtype(self): t = beets.library.PathType() # format - self.assertEqual("/tmp", t.format("/tmp")) - self.assertEqual("/tmp/\xe4lbum", t.format("/tmp/\u00e4lbum")) + assert "/tmp" == t.format("/tmp") + assert "/tmp/\xe4lbum" == t.format("/tmp/\u00e4lbum") # parse - self.assertEqual(np(b"/tmp"), t.parse("/tmp")) - self.assertEqual(np(b"/tmp/\xc3\xa4lbum"), t.parse("/tmp/\u00e4lbum/")) + assert np(b"/tmp") == t.parse("/tmp") + assert np(b"/tmp/\xc3\xa4lbum") == t.parse("/tmp/\u00e4lbum/") def test_musicalkey(self): t = beets.library.MusicalKey() # parse - self.assertEqual("C#m", t.parse("c#m")) - self.assertEqual("Gm", t.parse("g minor")) - self.assertEqual("Not c#m", t.parse("not C#m")) + assert "C#m" == t.parse("c#m") + assert "Gm" == t.parse("g minor") + assert "Not c#m" == t.parse("not C#m") def test_durationtype(self): t = beets.library.DurationType() # format - self.assertEqual("1:01", t.format(61.23)) - self.assertEqual("60:01", t.format(3601.23)) - self.assertEqual("0:00", t.format(None)) + assert "1:01" == t.format(61.23) + assert "60:01" == t.format(3601.23) + assert "0:00" == t.format(None) # parse - self.assertEqual(61.0, t.parse("1:01")) - self.assertEqual(61.23, t.parse("61.23")) - self.assertEqual(3601.0, t.parse("60:01")) - self.assertEqual(t.null, t.parse("1:00:01")) - self.assertEqual(t.null, t.parse("not61.23")) + assert 61.0 == t.parse("1:01") + assert 61.23 == t.parse("61.23") + assert 3601.0 == t.parse("60:01") + assert t.null == t.parse("1:00:01") + assert t.null == t.parse("not61.23") # config format_raw_length beets.config["format_raw_length"] = True - self.assertEqual(61.23, t.format(61.23)) - self.assertEqual(3601.23, t.format(3601.23)) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert 61.23 == t.format(61.23) + assert 3601.23 == t.format(3601.23) diff --git a/test/test_logging.py b/test/test_logging.py index 58be799e0..d95a54387 100644 --- a/test/test_logging.py +++ b/test/test_logging.py @@ -3,51 +3,57 @@ import logging as log import sys import threading -import unittest from io import StringIO import beets.logging as blog import beetsplug from beets import plugins, ui from beets.test import _common, helper -from beets.test._common import TestCase +from beets.test.helper import ( + AsIsImporterMixin, + BeetsTestCase, + ImportTestCase, + PluginMixin, +) -class LoggingTest(TestCase): +class LoggingTest(BeetsTestCase): def test_logging_management(self): l1 = log.getLogger("foo123") l2 = blog.getLogger("foo123") - self.assertEqual(l1, l2) - self.assertEqual(l1.__class__, log.Logger) + assert l1 == l2 + assert l1.__class__ == log.Logger l3 = blog.getLogger("bar123") l4 = log.getLogger("bar123") - self.assertEqual(l3, l4) - self.assertEqual(l3.__class__, blog.BeetsLogger) - self.assertIsInstance( + assert l3 == l4 + assert l3.__class__ == blog.BeetsLogger + assert isinstance( l3, (blog.StrFormatLogger, blog.ThreadLocalLevelLogger) ) l5 = l3.getChild("shalala") - self.assertEqual(l5.__class__, blog.BeetsLogger) + assert l5.__class__ == blog.BeetsLogger l6 = blog.getLogger() - self.assertNotEqual(l1, l6) + assert l1 != l6 def test_str_format_logging(self): - l = blog.getLogger("baz123") + logger = blog.getLogger("baz123") stream = StringIO() handler = log.StreamHandler(stream) - l.addHandler(handler) - l.propagate = False + logger.addHandler(handler) + logger.propagate = False - l.warning("foo {0} {bar}", "oof", bar="baz") + logger.warning("foo {0} {bar}", "oof", bar="baz") handler.flush() - self.assertTrue(stream.getvalue(), "foo oof baz") + assert stream.getvalue(), "foo oof baz" -class LoggingLevelTest(unittest.TestCase, helper.TestHelper): +class LoggingLevelTest(AsIsImporterMixin, PluginMixin, ImportTestCase): + plugin = "dummy" + class DummyModule: class DummyPlugin(plugins.BeetsPlugin): def __init__(self): @@ -74,100 +80,90 @@ class LoggingLevelTest(unittest.TestCase, helper.TestHelper): def setUp(self): sys.modules["beetsplug.dummy"] = self.DummyModule beetsplug.dummy = self.DummyModule - self.setup_beets() - self.load_plugins("dummy") - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - del beetsplug.dummy - sys.modules.pop("beetsplug.dummy") - self.DummyModule.DummyPlugin.listeners = None - self.DummyModule.DummyPlugin._raw_listeners = None + super().setUp() def test_command_level0(self): self.config["verbose"] = 0 with helper.capture_log() as logs: self.run_command("dummy") - self.assertIn("dummy: warning cmd", logs) - self.assertIn("dummy: info cmd", logs) - self.assertNotIn("dummy: debug cmd", logs) + assert "dummy: warning cmd" in logs + assert "dummy: info cmd" in logs + assert "dummy: debug cmd" not in logs def test_command_level1(self): self.config["verbose"] = 1 with helper.capture_log() as logs: self.run_command("dummy") - self.assertIn("dummy: warning cmd", logs) - self.assertIn("dummy: info cmd", logs) - self.assertIn("dummy: debug cmd", logs) + assert "dummy: warning cmd" in logs + assert "dummy: info cmd" in logs + assert "dummy: debug cmd" in logs def test_command_level2(self): self.config["verbose"] = 2 with helper.capture_log() as logs: self.run_command("dummy") - self.assertIn("dummy: warning cmd", logs) - self.assertIn("dummy: info cmd", logs) - self.assertIn("dummy: debug cmd", logs) + assert "dummy: warning cmd" in logs + assert "dummy: info cmd" in logs + assert "dummy: debug cmd" in logs def test_listener_level0(self): self.config["verbose"] = 0 with helper.capture_log() as logs: plugins.send("dummy_event") - self.assertIn("dummy: warning listener", logs) - self.assertNotIn("dummy: info listener", logs) - self.assertNotIn("dummy: debug listener", logs) + assert "dummy: warning listener" in logs + assert "dummy: info listener" not in logs + assert "dummy: debug listener" not in logs def test_listener_level1(self): self.config["verbose"] = 1 with helper.capture_log() as logs: plugins.send("dummy_event") - self.assertIn("dummy: warning listener", logs) - self.assertIn("dummy: info listener", logs) - self.assertNotIn("dummy: debug listener", logs) + assert "dummy: warning listener" in logs + assert "dummy: info listener" in logs + assert "dummy: debug listener" not in logs def test_listener_level2(self): self.config["verbose"] = 2 with helper.capture_log() as logs: plugins.send("dummy_event") - self.assertIn("dummy: warning listener", logs) - self.assertIn("dummy: info listener", logs) - self.assertIn("dummy: debug listener", logs) + assert "dummy: warning listener" in logs + assert "dummy: info listener" in logs + assert "dummy: debug listener" in logs def test_import_stage_level0(self): self.config["verbose"] = 0 with helper.capture_log() as logs: - importer = self.create_importer() - importer.run() - self.assertIn("dummy: warning import_stage", logs) - self.assertNotIn("dummy: info import_stage", logs) - self.assertNotIn("dummy: debug import_stage", logs) + self.run_asis_importer() + assert "dummy: warning import_stage" in logs + assert "dummy: info import_stage" not in logs + assert "dummy: debug import_stage" not in logs def test_import_stage_level1(self): self.config["verbose"] = 1 with helper.capture_log() as logs: - importer = self.create_importer() - importer.run() - self.assertIn("dummy: warning import_stage", logs) - self.assertIn("dummy: info import_stage", logs) - self.assertNotIn("dummy: debug import_stage", logs) + self.run_asis_importer() + assert "dummy: warning import_stage" in logs + assert "dummy: info import_stage" in logs + assert "dummy: debug import_stage" not in logs def test_import_stage_level2(self): self.config["verbose"] = 2 with helper.capture_log() as logs: - importer = self.create_importer() - importer.run() - self.assertIn("dummy: warning import_stage", logs) - self.assertIn("dummy: info import_stage", logs) - self.assertIn("dummy: debug import_stage", logs) + self.run_asis_importer() + assert "dummy: warning import_stage" in logs + assert "dummy: info import_stage" in logs + assert "dummy: debug import_stage" in logs @_common.slow_test() -class ConcurrentEventsTest(TestCase, helper.TestHelper): +class ConcurrentEventsTest(AsIsImporterMixin, ImportTestCase): """Similar to LoggingLevelTest but lower-level and focused on multiple events interaction. Since this is a bit heavy we don't do it in LoggingLevelTest. """ + db_on_disk = True + class DummyPlugin(plugins.BeetsPlugin): def __init__(self, test_case): plugins.BeetsPlugin.__init__(self, "dummy") @@ -186,30 +182,24 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper): def listener1(self): try: - self.test_case.assertEqual(self._log.level, log.INFO) + assert self._log.level == log.INFO self.t1_step = 1 self.lock1.acquire() - self.test_case.assertEqual(self._log.level, log.INFO) + assert self._log.level == log.INFO self.t1_step = 2 except Exception as e: self.exc = e def listener2(self): try: - self.test_case.assertEqual(self._log.level, log.DEBUG) + assert self._log.level == log.DEBUG self.t2_step = 1 self.lock2.acquire() - self.test_case.assertEqual(self._log.level, log.DEBUG) + assert self._log.level == log.DEBUG self.t2_step = 2 except Exception as e: self.exc = e - def setUp(self): - self.setup_beets(disk=True) - - def tearDown(self): - self.teardown_beets() - def test_concurrent_events(self): dp = self.DummyPlugin(self) @@ -220,37 +210,37 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper): try: dp.lock1.acquire() dp.lock2.acquire() - self.assertEqual(dp._log.level, log.NOTSET) + assert dp._log.level == log.NOTSET self.config["verbose"] = 1 t1 = threading.Thread(target=dp.listeners["dummy_event1"][0]) t1.start() # blocked. t1 tested its log level while dp.t1_step != 1: check_dp_exc() - self.assertTrue(t1.is_alive()) - self.assertEqual(dp._log.level, log.NOTSET) + assert t1.is_alive() + assert dp._log.level == log.NOTSET self.config["verbose"] = 2 t2 = threading.Thread(target=dp.listeners["dummy_event2"][0]) t2.start() # blocked. t2 tested its log level while dp.t2_step != 1: check_dp_exc() - self.assertTrue(t2.is_alive()) - self.assertEqual(dp._log.level, log.NOTSET) + assert t2.is_alive() + assert dp._log.level == log.NOTSET dp.lock1.release() # dummy_event1 tests its log level + finishes while dp.t1_step != 2: check_dp_exc() t1.join(0.1) - self.assertFalse(t1.is_alive()) - self.assertTrue(t2.is_alive()) - self.assertEqual(dp._log.level, log.NOTSET) + assert not t1.is_alive() + assert t2.is_alive() + assert dp._log.level == log.NOTSET dp.lock2.release() # dummy_event2 tests its log level + finishes while dp.t2_step != 2: check_dp_exc() t2.join(0.1) - self.assertFalse(t2.is_alive()) + assert not t2.is_alive() except Exception: print("Alive threads:", threading.enumerate()) @@ -269,28 +259,17 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper): blog.getLogger("beets").set_global_level(blog.WARNING) with helper.capture_log() as logs: - importer = self.create_importer() - importer.run() - self.assertEqual(logs, []) + self.run_asis_importer() + assert logs == [] blog.getLogger("beets").set_global_level(blog.INFO) with helper.capture_log() as logs: - importer = self.create_importer() - importer.run() - for l in logs: - self.assertIn("import", l) - self.assertIn("album", l) + self.run_asis_importer() + for line in logs: + assert "import" in line + assert "album" in line blog.getLogger("beets").set_global_level(blog.DEBUG) with helper.capture_log() as logs: - importer = self.create_importer() - importer.run() - self.assertIn("Sending event: database_change", logs) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + self.run_asis_importer() + assert "Sending event: database_change" in logs diff --git a/test/test_m3ufile.py b/test/test_m3ufile.py index 2db5cac06..12686d824 100644 --- a/test/test_m3ufile.py +++ b/test/test_m3ufile.py @@ -13,13 +13,14 @@ # included in all copies or substantial portions of the Software. """Testsuite for the M3UFile class.""" - import sys import unittest from os import path from shutil import rmtree from tempfile import mkdtemp +import pytest + from beets.test._common import RSRC from beets.util import bytestring_path from beets.util.m3u import EmptyPlaylistError, M3UFile @@ -33,7 +34,7 @@ class M3UFileTest(unittest.TestCase): tempdir = bytestring_path(mkdtemp()) the_playlist_file = path.join(tempdir, b"playlist.m3u8") m3ufile = M3UFile(the_playlist_file) - with self.assertRaises(EmptyPlaylistError): + with pytest.raises(EmptyPlaylistError): m3ufile.write() rmtree(tempdir) @@ -49,7 +50,7 @@ class M3UFileTest(unittest.TestCase): ] ) m3ufile.write() - self.assertTrue(path.exists(the_playlist_file)) + assert path.exists(the_playlist_file) rmtree(tempdir) def test_playlist_write_unicode(self): @@ -64,7 +65,7 @@ class M3UFileTest(unittest.TestCase): ] ) m3ufile.write() - self.assertTrue(path.exists(the_playlist_file)) + assert path.exists(the_playlist_file) rmtree(tempdir) @unittest.skipUnless(sys.platform == "win32", "win32") @@ -82,23 +83,16 @@ class M3UFileTest(unittest.TestCase): ] ) m3ufile.write() - self.assertTrue(path.exists(the_playlist_file)) + assert path.exists(the_playlist_file) m3ufile_read = M3UFile(the_playlist_file) m3ufile_read.load() - self.assertEqual( - m3ufile.media_list[0], - bytestring_path( - path.join("x:\\", "This", "is", "å", "path", "to_a_file.mp3") - ), + assert m3ufile.media_list[0] == bytestring_path( + path.join("x:\\", "This", "is", "å", "path", "to_a_file.mp3") ) - self.assertEqual( - m3ufile.media_list[1], - bytestring_path(r"x:\This\is\another\path\tö_a_file.mp3"), - bytestring_path( - path.join( - "x:\\", "This", "is", "another", "path", "tö_a_file.mp3" - ) - ), + assert m3ufile.media_list[1] == bytestring_path( + r"x:\This\is\another\path\tö_a_file.mp3" + ), bytestring_path( + path.join("x:\\", "This", "is", "another", "path", "tö_a_file.mp3") ) rmtree(tempdir) @@ -108,9 +102,8 @@ class M3UFileTest(unittest.TestCase): the_playlist_file = path.join(RSRC, b"playlist.m3u") m3ufile = M3UFile(the_playlist_file) m3ufile.load() - self.assertEqual( - m3ufile.media_list[0], - bytestring_path("/This/is/a/path/to_a_file.mp3"), + assert m3ufile.media_list[0] == bytestring_path( + "/This/is/a/path/to_a_file.mp3" ) @unittest.skipIf(sys.platform == "win32", "win32") @@ -119,9 +112,8 @@ class M3UFileTest(unittest.TestCase): the_playlist_file = path.join(RSRC, b"playlist.m3u8") m3ufile = M3UFile(the_playlist_file) m3ufile.load() - self.assertEqual( - m3ufile.media_list[0], - bytestring_path("/This/is/å/path/to_a_file.mp3"), + assert m3ufile.media_list[0] == bytestring_path( + "/This/is/å/path/to_a_file.mp3" ) @unittest.skipUnless(sys.platform == "win32", "win32") @@ -133,27 +125,18 @@ class M3UFileTest(unittest.TestCase): ) m3ufile = M3UFile(the_playlist_file) m3ufile.load() - self.assertEqual(m3ufile.media_list[0], winpath) + assert m3ufile.media_list[0] == winpath def test_playlist_load_extm3u(self): """Test loading a playlist with an #EXTM3U header.""" the_playlist_file = path.join(RSRC, b"playlist.m3u") m3ufile = M3UFile(the_playlist_file) m3ufile.load() - self.assertTrue(m3ufile.extm3u) + assert m3ufile.extm3u def test_playlist_load_non_extm3u(self): """Test loading a playlist without an #EXTM3U header.""" the_playlist_file = path.join(RSRC, b"playlist_non_ext.m3u") m3ufile = M3UFile(the_playlist_file) m3ufile.load() - self.assertFalse(m3ufile.extm3u) - - -def suite(): - """This testsuite's main function.""" - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert not m3ufile.extm3u diff --git a/test/test_mb.py b/test/test_mb.py index efd620522..37b5c0fff 100644 --- a/test/test_mb.py +++ b/test/test_mb.py @@ -12,18 +12,16 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for MusicBrainz API wrapper. -""" +"""Tests for MusicBrainz API wrapper.""" -import unittest from unittest import mock from beets import config from beets.autotag import mb -from beets.test import _common +from beets.test.helper import BeetsTestCase -class MBAlbumInfoTest(_common.TestCase): +class MBAlbumInfoTest(BeetsTestCase): def _make_release( self, date_str="2009", @@ -213,25 +211,25 @@ class MBAlbumInfoTest(_common.TestCase): def test_parse_release_with_year(self): release = self._make_release("1984") d = mb.album_info(release) - self.assertEqual(d.album, "ALBUM TITLE") - self.assertEqual(d.album_id, "ALBUM ID") - self.assertEqual(d.artist, "ARTIST NAME") - self.assertEqual(d.artist_id, "ARTIST ID") - self.assertEqual(d.original_year, 1984) - self.assertEqual(d.year, 3001) - self.assertEqual(d.artist_credit, "ARTIST CREDIT") + assert d.album == "ALBUM TITLE" + assert d.album_id == "ALBUM ID" + assert d.artist == "ARTIST NAME" + assert d.artist_id == "ARTIST ID" + assert d.original_year == 1984 + assert d.year == 3001 + assert d.artist_credit == "ARTIST CREDIT" def test_parse_release_type(self): release = self._make_release("1984") d = mb.album_info(release) - self.assertEqual(d.albumtype, "album") + assert d.albumtype == "album" def test_parse_release_full_date(self): release = self._make_release("1987-03-31") d = mb.album_info(release) - self.assertEqual(d.original_year, 1987) - self.assertEqual(d.original_month, 3) - self.assertEqual(d.original_day, 31) + assert d.original_year == 1987 + assert d.original_month == 3 + assert d.original_day == 31 def test_parse_tracks(self): tracks = [ @@ -242,13 +240,13 @@ class MBAlbumInfoTest(_common.TestCase): d = mb.album_info(release) t = d.tracks - self.assertEqual(len(t), 2) - self.assertEqual(t[0].title, "TITLE ONE") - self.assertEqual(t[0].track_id, "ID ONE") - self.assertEqual(t[0].length, 100.0) - self.assertEqual(t[1].title, "TITLE TWO") - self.assertEqual(t[1].track_id, "ID TWO") - self.assertEqual(t[1].length, 200.0) + assert len(t) == 2 + assert t[0].title == "TITLE ONE" + assert t[0].track_id == "ID ONE" + assert t[0].length == 100.0 + assert t[1].title == "TITLE TWO" + assert t[1].track_id == "ID TWO" + assert t[1].length == 200.0 def test_parse_track_indices(self): tracks = [ @@ -259,10 +257,10 @@ class MBAlbumInfoTest(_common.TestCase): d = mb.album_info(release) t = d.tracks - self.assertEqual(t[0].medium_index, 1) - self.assertEqual(t[0].index, 1) - self.assertEqual(t[1].medium_index, 2) - self.assertEqual(t[1].index, 2) + assert t[0].medium_index == 1 + assert t[0].index == 1 + assert t[1].medium_index == 2 + assert t[1].index == 2 def test_parse_medium_numbers_single_medium(self): tracks = [ @@ -272,10 +270,10 @@ class MBAlbumInfoTest(_common.TestCase): release = self._make_release(tracks=tracks) d = mb.album_info(release) - self.assertEqual(d.mediums, 1) + assert d.mediums == 1 t = d.tracks - self.assertEqual(t[0].medium, 1) - self.assertEqual(t[1].medium, 1) + assert t[0].medium == 1 + assert t[1].medium == 1 def test_parse_medium_numbers_two_mediums(self): tracks = [ @@ -299,91 +297,91 @@ class MBAlbumInfoTest(_common.TestCase): ) d = mb.album_info(release) - self.assertEqual(d.mediums, 2) + assert d.mediums == 2 t = d.tracks - self.assertEqual(t[0].medium, 1) - self.assertEqual(t[0].medium_index, 1) - self.assertEqual(t[0].index, 1) - self.assertEqual(t[1].medium, 2) - self.assertEqual(t[1].medium_index, 1) - self.assertEqual(t[1].index, 2) + assert t[0].medium == 1 + assert t[0].medium_index == 1 + assert t[0].index == 1 + assert t[1].medium == 2 + assert t[1].medium_index == 1 + assert t[1].index == 2 def test_parse_release_year_month_only(self): release = self._make_release("1987-03") d = mb.album_info(release) - self.assertEqual(d.original_year, 1987) - self.assertEqual(d.original_month, 3) + assert d.original_year == 1987 + assert d.original_month == 3 def test_no_durations(self): tracks = [self._make_track("TITLE", "ID", None)] release = self._make_release(tracks=tracks) d = mb.album_info(release) - self.assertIsNone(d.tracks[0].length) + assert d.tracks[0].length is None def test_track_length_overrides_recording_length(self): tracks = [self._make_track("TITLE", "ID", 1.0 * 1000.0)] release = self._make_release(tracks=tracks, track_length=2.0 * 1000.0) d = mb.album_info(release) - self.assertEqual(d.tracks[0].length, 2.0) + assert d.tracks[0].length == 2.0 def test_no_release_date(self): release = self._make_release(None) d = mb.album_info(release) - self.assertFalse(d.original_year) - self.assertFalse(d.original_month) - self.assertFalse(d.original_day) + assert not d.original_year + assert not d.original_month + assert not d.original_day def test_various_artists_defaults_false(self): release = self._make_release(None) d = mb.album_info(release) - self.assertFalse(d.va) + assert not d.va def test_detect_various_artists(self): release = self._make_release(None) release["artist-credit"][0]["artist"]["id"] = mb.VARIOUS_ARTISTS_ID d = mb.album_info(release) - self.assertTrue(d.va) + assert d.va def test_parse_artist_sort_name(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.artist_sort, "ARTIST SORT NAME") + assert d.artist_sort == "ARTIST SORT NAME" def test_parse_releasegroupid(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.releasegroup_id, "RELEASE GROUP ID") + assert d.releasegroup_id == "RELEASE GROUP ID" def test_parse_asin(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.asin, "ALBUM ASIN") + assert d.asin == "ALBUM ASIN" def test_parse_catalognum(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.catalognum, "CATALOG NUMBER") + assert d.catalognum == "CATALOG NUMBER" def test_parse_textrepr(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.script, "SCRIPT") - self.assertEqual(d.language, "LANGUAGE") + assert d.script == "SCRIPT" + assert d.language == "LANGUAGE" def test_parse_country(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.country, "COUNTRY") + assert d.country == "COUNTRY" def test_parse_status(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.albumstatus, "STATUS") + assert d.albumstatus == "STATUS" def test_parse_barcode(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.barcode, "BARCODE") + assert d.barcode == "BARCODE" def test_parse_media(self): tracks = [ @@ -392,13 +390,13 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(None, tracks=tracks) d = mb.album_info(release) - self.assertEqual(d.media, "FORMAT") + assert d.media == "FORMAT" def test_parse_disambig(self): release = self._make_release(None) d = mb.album_info(release) - self.assertEqual(d.albumdisambig, "R_DISAMBIGUATION") - self.assertEqual(d.releasegroupdisambig, "RG_DISAMBIGUATION") + assert d.albumdisambig == "R_DISAMBIGUATION" + assert d.releasegroupdisambig == "RG_DISAMBIGUATION" def test_parse_disctitle(self): tracks = [ @@ -408,64 +406,64 @@ class MBAlbumInfoTest(_common.TestCase): release = self._make_release(None, tracks=tracks) d = mb.album_info(release) t = d.tracks - self.assertEqual(t[0].disctitle, "MEDIUM TITLE") - self.assertEqual(t[1].disctitle, "MEDIUM TITLE") + assert t[0].disctitle == "MEDIUM TITLE" + assert t[1].disctitle == "MEDIUM TITLE" def test_missing_language(self): release = self._make_release(None) del release["text-representation"]["language"] d = mb.album_info(release) - self.assertIsNone(d.language) + assert d.language is None def test_parse_recording_artist(self): tracks = [self._make_track("a", "b", 1, True)] release = self._make_release(None, tracks=tracks) track = mb.album_info(release).tracks[0] - self.assertEqual(track.artist, "RECORDING ARTIST NAME") - self.assertEqual(track.artist_id, "RECORDING ARTIST ID") - self.assertEqual(track.artist_sort, "RECORDING ARTIST SORT NAME") - self.assertEqual(track.artist_credit, "RECORDING ARTIST CREDIT") + assert track.artist == "RECORDING ARTIST NAME" + assert track.artist_id == "RECORDING ARTIST ID" + assert track.artist_sort == "RECORDING ARTIST SORT NAME" + assert track.artist_credit == "RECORDING ARTIST CREDIT" def test_parse_recording_artist_multi(self): tracks = [self._make_track("a", "b", 1, True, multi_artist_credit=True)] release = self._make_release(None, tracks=tracks) track = mb.album_info(release).tracks[0] - self.assertEqual( - track.artist, "RECORDING ARTIST NAME & RECORDING ARTIST 2 NAME" + assert track.artist == "RECORDING ARTIST NAME & RECORDING ARTIST 2 NAME" + assert track.artist_id == "RECORDING ARTIST ID" + assert ( + track.artist_sort + == "RECORDING ARTIST SORT NAME & RECORDING ARTIST 2 SORT NAME" ) - self.assertEqual(track.artist_id, "RECORDING ARTIST ID") - self.assertEqual( - track.artist_sort, - "RECORDING ARTIST SORT NAME & RECORDING ARTIST 2 SORT NAME", - ) - self.assertEqual( - track.artist_credit, - "RECORDING ARTIST CREDIT & RECORDING ARTIST 2 CREDIT", + assert ( + track.artist_credit + == "RECORDING ARTIST CREDIT & RECORDING ARTIST 2 CREDIT" ) - self.assertEqual( - track.artists, ["RECORDING ARTIST NAME", "RECORDING ARTIST 2 NAME"] - ) - self.assertEqual( - track.artists_ids, ["RECORDING ARTIST ID", "RECORDING ARTIST 2 ID"] - ) - self.assertEqual( - track.artists_sort, - ["RECORDING ARTIST SORT NAME", "RECORDING ARTIST 2 SORT NAME"], - ) - self.assertEqual( - track.artists_credit, - ["RECORDING ARTIST CREDIT", "RECORDING ARTIST 2 CREDIT"], - ) + assert track.artists == [ + "RECORDING ARTIST NAME", + "RECORDING ARTIST 2 NAME", + ] + assert track.artists_ids == [ + "RECORDING ARTIST ID", + "RECORDING ARTIST 2 ID", + ] + assert track.artists_sort == [ + "RECORDING ARTIST SORT NAME", + "RECORDING ARTIST 2 SORT NAME", + ] + assert track.artists_credit == [ + "RECORDING ARTIST CREDIT", + "RECORDING ARTIST 2 CREDIT", + ] def test_track_artist_overrides_recording_artist(self): tracks = [self._make_track("a", "b", 1, True)] release = self._make_release(None, tracks=tracks, track_artist=True) track = mb.album_info(release).tracks[0] - self.assertEqual(track.artist, "TRACK ARTIST NAME") - self.assertEqual(track.artist_id, "TRACK ARTIST ID") - self.assertEqual(track.artist_sort, "TRACK ARTIST SORT NAME") - self.assertEqual(track.artist_credit, "TRACK ARTIST CREDIT") + assert track.artist == "TRACK ARTIST NAME" + assert track.artist_id == "TRACK ARTIST ID" + assert track.artist_sort == "TRACK ARTIST SORT NAME" + assert track.artist_credit == "TRACK ARTIST CREDIT" def test_track_artist_overrides_recording_artist_multi(self): tracks = [self._make_track("a", "b", 1, True, multi_artist_credit=True)] @@ -473,43 +471,37 @@ class MBAlbumInfoTest(_common.TestCase): None, tracks=tracks, track_artist=True, multi_artist_credit=True ) track = mb.album_info(release).tracks[0] - self.assertEqual( - track.artist, "TRACK ARTIST NAME & TRACK ARTIST 2 NAME" + assert track.artist == "TRACK ARTIST NAME & TRACK ARTIST 2 NAME" + assert track.artist_id == "TRACK ARTIST ID" + assert ( + track.artist_sort + == "TRACK ARTIST SORT NAME & TRACK ARTIST 2 SORT NAME" ) - self.assertEqual(track.artist_id, "TRACK ARTIST ID") - self.assertEqual( - track.artist_sort, - "TRACK ARTIST SORT NAME & TRACK ARTIST 2 SORT NAME", - ) - self.assertEqual( - track.artist_credit, "TRACK ARTIST CREDIT & TRACK ARTIST 2 CREDIT" + assert ( + track.artist_credit == "TRACK ARTIST CREDIT & TRACK ARTIST 2 CREDIT" ) - self.assertEqual( - track.artists, ["TRACK ARTIST NAME", "TRACK ARTIST 2 NAME"] - ) - self.assertEqual( - track.artists_ids, ["TRACK ARTIST ID", "TRACK ARTIST 2 ID"] - ) - self.assertEqual( - track.artists_sort, - ["TRACK ARTIST SORT NAME", "TRACK ARTIST 2 SORT NAME"], - ) - self.assertEqual( - track.artists_credit, - ["TRACK ARTIST CREDIT", "TRACK ARTIST 2 CREDIT"], - ) + assert track.artists == ["TRACK ARTIST NAME", "TRACK ARTIST 2 NAME"] + assert track.artists_ids == ["TRACK ARTIST ID", "TRACK ARTIST 2 ID"] + assert track.artists_sort == [ + "TRACK ARTIST SORT NAME", + "TRACK ARTIST 2 SORT NAME", + ] + assert track.artists_credit == [ + "TRACK ARTIST CREDIT", + "TRACK ARTIST 2 CREDIT", + ] def test_parse_recording_remixer(self): tracks = [self._make_track("a", "b", 1, remixer=True)] release = self._make_release(None, tracks=tracks) track = mb.album_info(release).tracks[0] - self.assertEqual(track.remixer, "RECORDING REMIXER ARTIST NAME") + assert track.remixer == "RECORDING REMIXER ARTIST NAME" def test_data_source(self): release = self._make_release() d = mb.album_info(release) - self.assertEqual(d.data_source, "MusicBrainz") + assert d.data_source == "MusicBrainz" def test_ignored_media(self): config["match"]["ignored_media"] = ["IGNORED1", "IGNORED2"] @@ -519,7 +511,7 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks, medium_format="IGNORED1") d = mb.album_info(release) - self.assertEqual(len(d.tracks), 0) + assert len(d.tracks) == 0 def test_no_ignored_media(self): config["match"]["ignored_media"] = ["IGNORED1", "IGNORED2"] @@ -529,7 +521,7 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks, medium_format="NON-IGNORED") d = mb.album_info(release) - self.assertEqual(len(d.tracks), 2) + assert len(d.tracks) == 2 def test_skip_data_track(self): tracks = [ @@ -539,9 +531,9 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks) d = mb.album_info(release) - self.assertEqual(len(d.tracks), 2) - self.assertEqual(d.tracks[0].title, "TITLE ONE") - self.assertEqual(d.tracks[1].title, "TITLE TWO") + assert len(d.tracks) == 2 + assert d.tracks[0].title == "TITLE ONE" + assert d.tracks[1].title == "TITLE TWO" def test_skip_audio_data_tracks_by_default(self): tracks = [ @@ -555,9 +547,9 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) d = mb.album_info(release) - self.assertEqual(len(d.tracks), 2) - self.assertEqual(d.tracks[0].title, "TITLE ONE") - self.assertEqual(d.tracks[1].title, "TITLE TWO") + assert len(d.tracks) == 2 + assert d.tracks[0].title == "TITLE ONE" + assert d.tracks[1].title == "TITLE TWO" def test_no_skip_audio_data_tracks_if_configured(self): config["match"]["ignore_data_tracks"] = False @@ -572,10 +564,10 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) d = mb.album_info(release) - self.assertEqual(len(d.tracks), 3) - self.assertEqual(d.tracks[0].title, "TITLE ONE") - self.assertEqual(d.tracks[1].title, "TITLE TWO") - self.assertEqual(d.tracks[2].title, "TITLE AUDIO DATA") + assert len(d.tracks) == 3 + assert d.tracks[0].title == "TITLE ONE" + assert d.tracks[1].title == "TITLE TWO" + assert d.tracks[2].title == "TITLE AUDIO DATA" def test_skip_video_tracks_by_default(self): tracks = [ @@ -587,9 +579,9 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks) d = mb.album_info(release) - self.assertEqual(len(d.tracks), 2) - self.assertEqual(d.tracks[0].title, "TITLE ONE") - self.assertEqual(d.tracks[1].title, "TITLE TWO") + assert len(d.tracks) == 2 + assert d.tracks[0].title == "TITLE ONE" + assert d.tracks[1].title == "TITLE TWO" def test_skip_video_data_tracks_by_default(self): tracks = [ @@ -603,9 +595,9 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) d = mb.album_info(release) - self.assertEqual(len(d.tracks), 2) - self.assertEqual(d.tracks[0].title, "TITLE ONE") - self.assertEqual(d.tracks[1].title, "TITLE TWO") + assert len(d.tracks) == 2 + assert d.tracks[0].title == "TITLE ONE" + assert d.tracks[1].title == "TITLE TWO" def test_no_skip_video_tracks_if_configured(self): config["match"]["ignore_data_tracks"] = False @@ -619,10 +611,10 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks) d = mb.album_info(release) - self.assertEqual(len(d.tracks), 3) - self.assertEqual(d.tracks[0].title, "TITLE ONE") - self.assertEqual(d.tracks[1].title, "TITLE VIDEO") - self.assertEqual(d.tracks[2].title, "TITLE TWO") + assert len(d.tracks) == 3 + assert d.tracks[0].title == "TITLE ONE" + assert d.tracks[1].title == "TITLE VIDEO" + assert d.tracks[2].title == "TITLE TWO" def test_no_skip_video_data_tracks_if_configured(self): config["match"]["ignore_data_tracks"] = False @@ -638,10 +630,10 @@ class MBAlbumInfoTest(_common.TestCase): ] release = self._make_release(tracks=tracks, data_tracks=data_tracks) d = mb.album_info(release) - self.assertEqual(len(d.tracks), 3) - self.assertEqual(d.tracks[0].title, "TITLE ONE") - self.assertEqual(d.tracks[1].title, "TITLE TWO") - self.assertEqual(d.tracks[2].title, "TITLE VIDEO") + assert len(d.tracks) == 3 + assert d.tracks[0].title == "TITLE ONE" + assert d.tracks[1].title == "TITLE TWO" + assert d.tracks[2].title == "TITLE VIDEO" def test_track_disambiguation(self): tracks = [ @@ -657,30 +649,30 @@ class MBAlbumInfoTest(_common.TestCase): d = mb.album_info(release) t = d.tracks - self.assertEqual(len(t), 2) - self.assertIsNone(t[0].trackdisambig) - self.assertEqual(t[1].trackdisambig, "SECOND TRACK") + assert len(t) == 2 + assert t[0].trackdisambig is None + assert t[1].trackdisambig == "SECOND TRACK" -class ParseIDTest(_common.TestCase): +class ParseIDTest(BeetsTestCase): def test_parse_id_correct(self): id_string = "28e32c71-1450-463e-92bf-e0a46446fc11" out = mb._parse_id(id_string) - self.assertEqual(out, id_string) + assert out == id_string def test_parse_id_non_id_returns_none(self): id_string = "blah blah" out = mb._parse_id(id_string) - self.assertIsNone(out) + assert out is None def test_parse_id_url_finds_id(self): id_string = "28e32c71-1450-463e-92bf-e0a46446fc11" id_url = "https://musicbrainz.org/entity/%s" % id_string out = mb._parse_id(id_url) - self.assertEqual(out, id_string) + assert out == id_string -class ArtistFlatteningTest(_common.TestCase): +class ArtistFlatteningTest(BeetsTestCase): def _credit_dict(self, suffix=""): return { "artist": { @@ -705,26 +697,26 @@ class ArtistFlatteningTest(_common.TestCase): def test_single_artist(self): credit = [self._credit_dict()] a, s, c = mb._flatten_artist_credit(credit) - self.assertEqual(a, "NAME") - self.assertEqual(s, "SORT") - self.assertEqual(c, "CREDIT") + assert a == "NAME" + assert s == "SORT" + assert c == "CREDIT" a, s, c = mb._multi_artist_credit(credit, include_join_phrase=False) - self.assertEqual(a, ["NAME"]) - self.assertEqual(s, ["SORT"]) - self.assertEqual(c, ["CREDIT"]) + assert a == ["NAME"] + assert s == ["SORT"] + assert c == ["CREDIT"] def test_two_artists(self): credit = [self._credit_dict("a"), " AND ", self._credit_dict("b")] a, s, c = mb._flatten_artist_credit(credit) - self.assertEqual(a, "NAMEa AND NAMEb") - self.assertEqual(s, "SORTa AND SORTb") - self.assertEqual(c, "CREDITa AND CREDITb") + assert a == "NAMEa AND NAMEb" + assert s == "SORTa AND SORTb" + assert c == "CREDITa AND CREDITb" a, s, c = mb._multi_artist_credit(credit, include_join_phrase=False) - self.assertEqual(a, ["NAMEa", "NAMEb"]) - self.assertEqual(s, ["SORTa", "SORTb"]) - self.assertEqual(c, ["CREDITa", "CREDITb"]) + assert a == ["NAMEa", "NAMEb"] + assert s == ["SORTa", "SORTb"] + assert c == ["CREDITa", "CREDITb"] def test_alias(self): credit_dict = self._credit_dict() @@ -739,35 +731,35 @@ class ArtistFlatteningTest(_common.TestCase): # test no alias config["import"]["languages"] = [""] flat = mb._flatten_artist_credit([credit_dict]) - self.assertEqual(flat, ("NAME", "SORT", "CREDIT")) + assert flat == ("NAME", "SORT", "CREDIT") # test en primary config["import"]["languages"] = ["en"] flat = mb._flatten_artist_credit([credit_dict]) - self.assertEqual(flat, ("ALIASen", "ALIASSORTen", "CREDIT")) + assert flat == ("ALIASen", "ALIASSORTen", "CREDIT") # test en_GB en primary config["import"]["languages"] = ["en_GB", "en"] flat = mb._flatten_artist_credit([credit_dict]) - self.assertEqual(flat, ("ALIASen_GB", "ALIASSORTen_GB", "CREDIT")) + assert flat == ("ALIASen_GB", "ALIASSORTen_GB", "CREDIT") # test en en_GB primary config["import"]["languages"] = ["en", "en_GB"] flat = mb._flatten_artist_credit([credit_dict]) - self.assertEqual(flat, ("ALIASen", "ALIASSORTen", "CREDIT")) + assert flat == ("ALIASen", "ALIASSORTen", "CREDIT") # test fr primary config["import"]["languages"] = ["fr"] flat = mb._flatten_artist_credit([credit_dict]) - self.assertEqual(flat, ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT")) + assert flat == ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT") # test for not matching non-primary config["import"]["languages"] = ["pt_BR", "fr"] flat = mb._flatten_artist_credit([credit_dict]) - self.assertEqual(flat, ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT")) + assert flat == ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT") -class MBLibraryTest(unittest.TestCase): +class MBLibraryTest(BeetsTestCase): def test_match_track(self): with mock.patch("musicbrainzngs.search_recordings") as p: p.return_value = { @@ -782,8 +774,8 @@ class MBLibraryTest(unittest.TestCase): ti = list(mb.match_track("hello", "there"))[0] p.assert_called_with(artist="hello", recording="there", limit=5) - self.assertEqual(ti.title, "foo") - self.assertEqual(ti.track_id, "bar") + assert ti.title == "foo" + assert ti.track_id == "bar" def test_match_album(self): mbid = "d2a6f856-b553-40a0-ac54-a321e8e2da99" @@ -836,20 +828,20 @@ class MBLibraryTest(unittest.TestCase): sp.assert_called_with(artist="hello", release="there", limit=5) gp.assert_called_with(mbid, mock.ANY) - self.assertEqual(ai.tracks[0].title, "foo") - self.assertEqual(ai.album, "hi") + assert ai.tracks[0].title == "foo" + assert ai.album == "hi" def test_match_track_empty(self): with mock.patch("musicbrainzngs.search_recordings") as p: til = list(mb.match_track(" ", " ")) - self.assertFalse(p.called) - self.assertEqual(til, []) + assert not p.called + assert til == [] def test_match_album_empty(self): with mock.patch("musicbrainzngs.search_releases") as p: ail = list(mb.match_album(" ", " ")) - self.assertFalse(p.called) - self.assertEqual(ail, []) + assert not p.called + assert ail == [] def test_follow_pseudo_releases(self): side_effect = [ @@ -936,7 +928,7 @@ class MBLibraryTest(unittest.TestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") - self.assertEqual(album.country, "COUNTRY") + assert album.country == "COUNTRY" def test_pseudo_releases_with_empty_links(self): side_effect = [ @@ -981,7 +973,7 @@ class MBLibraryTest(unittest.TestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") - self.assertIsNone(album.country) + assert album.country is None def test_pseudo_releases_without_links(self): side_effect = [ @@ -1025,7 +1017,7 @@ class MBLibraryTest(unittest.TestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") - self.assertIsNone(album.country) + assert album.country is None def test_pseudo_releases_with_unsupported_links(self): side_effect = [ @@ -1076,12 +1068,4 @@ class MBLibraryTest(unittest.TestCase): with mock.patch("musicbrainzngs.get_release_by_id") as gp: gp.side_effect = side_effect album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02") - self.assertIsNone(album.country) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert album.country is None diff --git a/test/test_metasync.py b/test/test_metasync.py index 9dae099af..13c003a1c 100644 --- a/test/test_metasync.py +++ b/test/test_metasync.py @@ -16,12 +16,11 @@ import os import platform import time -import unittest from datetime import datetime from beets.library import Item from beets.test import _common -from beets.test.helper import TestHelper +from beets.test.helper import PluginTestCase def _parsetime(s): @@ -32,15 +31,15 @@ def _is_windows(): return platform.system() == "Windows" -class MetaSyncTest(_common.TestCase, TestHelper): +class MetaSyncTest(PluginTestCase): + plugin = "metasync" itunes_library_unix = os.path.join(_common.RSRC, b"itunes_library_unix.xml") itunes_library_windows = os.path.join( _common.RSRC, b"itunes_library_windows.xml" ) def setUp(self): - self.setup_beets() - self.load_plugins("metasync") + super().setUp() self.config["metasync"]["source"] = "itunes" @@ -70,12 +69,12 @@ class MetaSyncTest(_common.TestCase, TestHelper): items[1].album = "An Awesome Wave" if _is_windows(): - items[0].path = ( - "G:\\Music\\Alt-J\\An Awesome Wave\\03 Tessellate.mp3" - ) - items[1].path = ( - "G:\\Music\\Alt-J\\An Awesome Wave\\04 Breezeblocks.mp3" - ) + items[ + 0 + ].path = "G:\\Music\\Alt-J\\An Awesome Wave\\03 Tessellate.mp3" + items[ + 1 + ].path = "G:\\Music\\Alt-J\\An Awesome Wave\\04 Breezeblocks.mp3" else: items[0].path = "/Music/Alt-J/An Awesome Wave/03 Tessellate.mp3" items[1].path = "/Music/Alt-J/An Awesome Wave/04 Breezeblocks.mp3" @@ -83,60 +82,44 @@ class MetaSyncTest(_common.TestCase, TestHelper): for item in items: self.lib.add(item) - def tearDown(self): - self.unload_plugins() - self.teardown_beets() - def test_load_item_types(self): # This test also verifies that the MetaSources have loaded correctly - self.assertIn("amarok_score", Item._types) - self.assertIn("itunes_rating", Item._types) + assert "amarok_score" in Item._types + assert "itunes_rating" in Item._types def test_pretend_sync_from_itunes(self): out = self.run_with_output("metasync", "-p") - self.assertIn("itunes_rating: 60 -> 80", out) - self.assertIn("itunes_rating: 100", out) - self.assertIn("itunes_playcount: 31", out) - self.assertIn("itunes_skipcount: 3", out) - self.assertIn("itunes_lastplayed: 2015-05-04 12:20:51", out) - self.assertIn("itunes_lastskipped: 2015-02-05 15:41:04", out) - self.assertIn("itunes_dateadded: 2014-04-24 09:28:38", out) - self.assertEqual(self.lib.items()[0].itunes_rating, 60) + assert "itunes_rating: 60 -> 80" in out + assert "itunes_rating: 100" in out + assert "itunes_playcount: 31" in out + assert "itunes_skipcount: 3" in out + assert "itunes_lastplayed: 2015-05-04 12:20:51" in out + assert "itunes_lastskipped: 2015-02-05 15:41:04" in out + assert "itunes_dateadded: 2014-04-24 09:28:38" in out + assert self.lib.items()[0].itunes_rating == 60 def test_sync_from_itunes(self): self.run_command("metasync") - self.assertEqual(self.lib.items()[0].itunes_rating, 80) - self.assertEqual(self.lib.items()[0].itunes_playcount, 0) - self.assertEqual(self.lib.items()[0].itunes_skipcount, 3) - self.assertFalse(hasattr(self.lib.items()[0], "itunes_lastplayed")) - self.assertEqual( - self.lib.items()[0].itunes_lastskipped, - _parsetime("2015-02-05 15:41:04"), + assert self.lib.items()[0].itunes_rating == 80 + assert self.lib.items()[0].itunes_playcount == 0 + assert self.lib.items()[0].itunes_skipcount == 3 + assert not hasattr(self.lib.items()[0], "itunes_lastplayed") + assert self.lib.items()[0].itunes_lastskipped == _parsetime( + "2015-02-05 15:41:04" ) - self.assertEqual( - self.lib.items()[0].itunes_dateadded, - _parsetime("2014-04-24 09:28:38"), + assert self.lib.items()[0].itunes_dateadded == _parsetime( + "2014-04-24 09:28:38" ) - self.assertEqual(self.lib.items()[1].itunes_rating, 100) - self.assertEqual(self.lib.items()[1].itunes_playcount, 31) - self.assertEqual(self.lib.items()[1].itunes_skipcount, 0) - self.assertEqual( - self.lib.items()[1].itunes_lastplayed, - _parsetime("2015-05-04 12:20:51"), + assert self.lib.items()[1].itunes_rating == 100 + assert self.lib.items()[1].itunes_playcount == 31 + assert self.lib.items()[1].itunes_skipcount == 0 + assert self.lib.items()[1].itunes_lastplayed == _parsetime( + "2015-05-04 12:20:51" ) - self.assertEqual( - self.lib.items()[1].itunes_dateadded, - _parsetime("2014-04-24 09:28:38"), + assert self.lib.items()[1].itunes_dateadded == _parsetime( + "2014-04-24 09:28:38" ) - self.assertFalse(hasattr(self.lib.items()[1], "itunes_lastskipped")) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert not hasattr(self.lib.items()[1], "itunes_lastskipped") diff --git a/test/test_pipeline.py b/test/test_pipeline.py index f5d4cebf3..83b8d744c 100644 --- a/test/test_pipeline.py +++ b/test/test_pipeline.py @@ -12,11 +12,12 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test the "pipeline.py" restricted parallel programming library. -""" +"""Test the "pipeline.py" restricted parallel programming library.""" import unittest +import pytest + from beets.util import pipeline @@ -32,14 +33,14 @@ def _work(): i *= 2 -def _consume(l): +def _consume(result): while True: i = yield - l.append(i) + result.append(i) # A worker that raises an exception. -class ExceptionFixture(Exception): +class PipelineError(Exception): pass @@ -48,7 +49,7 @@ def _exc_work(num=3): while True: i = yield i if i == num: - raise ExceptionFixture() + raise PipelineError() i *= 2 @@ -73,138 +74,153 @@ def _multi_work(): class SimplePipelineTest(unittest.TestCase): def setUp(self): - self.l = [] - self.pl = pipeline.Pipeline((_produce(), _work(), _consume(self.l))) - - def test_run_sequential(self): - self.pl.run_sequential() - self.assertEqual(self.l, [0, 2, 4, 6, 8]) - - def test_run_parallel(self): - self.pl.run_parallel() - self.assertEqual(self.l, [0, 2, 4, 6, 8]) - - def test_pull(self): - pl = pipeline.Pipeline((_produce(), _work())) - self.assertEqual(list(pl.pull()), [0, 2, 4, 6, 8]) - - def test_pull_chain(self): - pl = pipeline.Pipeline((_produce(), _work())) - pl2 = pipeline.Pipeline((pl.pull(), _work())) - self.assertEqual(list(pl2.pull()), [0, 4, 8, 12, 16]) - - -class ParallelStageTest(unittest.TestCase): - def setUp(self): - self.l = [] + self.result = [] self.pl = pipeline.Pipeline( - (_produce(), (_work(), _work()), _consume(self.l)) + (_produce(), _work(), _consume(self.result)) ) def test_run_sequential(self): self.pl.run_sequential() - self.assertEqual(self.l, [0, 2, 4, 6, 8]) + assert self.result == [0, 2, 4, 6, 8] + + def test_run_parallel(self): + self.pl.run_parallel() + assert self.result == [0, 2, 4, 6, 8] + + def test_pull(self): + pl = pipeline.Pipeline((_produce(), _work())) + assert list(pl.pull()) == [0, 2, 4, 6, 8] + + def test_pull_chain(self): + pl = pipeline.Pipeline((_produce(), _work())) + pl2 = pipeline.Pipeline((pl.pull(), _work())) + assert list(pl2.pull()) == [0, 4, 8, 12, 16] + + +class ParallelStageTest(unittest.TestCase): + def setUp(self): + self.result = [] + self.pl = pipeline.Pipeline( + (_produce(), (_work(), _work()), _consume(self.result)) + ) + + def test_run_sequential(self): + self.pl.run_sequential() + assert self.result == [0, 2, 4, 6, 8] def test_run_parallel(self): self.pl.run_parallel() # Order possibly not preserved; use set equality. - self.assertEqual(set(self.l), {0, 2, 4, 6, 8}) + assert set(self.result) == {0, 2, 4, 6, 8} def test_pull(self): pl = pipeline.Pipeline((_produce(), (_work(), _work()))) - self.assertEqual(list(pl.pull()), [0, 2, 4, 6, 8]) + assert list(pl.pull()) == [0, 2, 4, 6, 8] class ExceptionTest(unittest.TestCase): def setUp(self): - self.l = [] - self.pl = pipeline.Pipeline((_produce(), _exc_work(), _consume(self.l))) + self.result = [] + self.pl = pipeline.Pipeline( + (_produce(), _exc_work(), _consume(self.result)) + ) def test_run_sequential(self): - self.assertRaises(ExceptionFixture, self.pl.run_sequential) + with pytest.raises(PipelineError): + self.pl.run_sequential() def test_run_parallel(self): - self.assertRaises(ExceptionFixture, self.pl.run_parallel) + with pytest.raises(PipelineError): + self.pl.run_parallel() def test_pull(self): pl = pipeline.Pipeline((_produce(), _exc_work())) pull = pl.pull() for i in range(3): next(pull) - self.assertRaises(ExceptionFixture, pull.__next__) + with pytest.raises(PipelineError): + next(pull) class ParallelExceptionTest(unittest.TestCase): def setUp(self): - self.l = [] + self.result = [] self.pl = pipeline.Pipeline( - (_produce(), (_exc_work(), _exc_work()), _consume(self.l)) + (_produce(), (_exc_work(), _exc_work()), _consume(self.result)) ) def test_run_parallel(self): - self.assertRaises(ExceptionFixture, self.pl.run_parallel) + with pytest.raises(PipelineError): + self.pl.run_parallel() class ConstrainedThreadedPipelineTest(unittest.TestCase): + def setUp(self): + self.result = [] + def test_constrained(self): - l = [] # Do a "significant" amount of work... - pl = pipeline.Pipeline((_produce(1000), _work(), _consume(l))) + self.pl = pipeline.Pipeline( + (_produce(1000), _work(), _consume(self.result)) + ) # ... with only a single queue slot. - pl.run_parallel(1) - self.assertEqual(l, [i * 2 for i in range(1000)]) + self.pl.run_parallel(1) + assert self.result == [i * 2 for i in range(1000)] def test_constrained_exception(self): # Raise an exception in a constrained pipeline. - l = [] - pl = pipeline.Pipeline((_produce(1000), _exc_work(), _consume(l))) - self.assertRaises(ExceptionFixture, pl.run_parallel, 1) + self.pl = pipeline.Pipeline( + (_produce(1000), _exc_work(), _consume(self.result)) + ) + with pytest.raises(PipelineError): + self.pl.run_parallel(1) def test_constrained_parallel(self): - l = [] - pl = pipeline.Pipeline( - (_produce(1000), (_work(), _work()), _consume(l)) + self.pl = pipeline.Pipeline( + (_produce(1000), (_work(), _work()), _consume(self.result)) ) - pl.run_parallel(1) - self.assertEqual(set(l), {i * 2 for i in range(1000)}) + self.pl.run_parallel(1) + assert set(self.result) == {i * 2 for i in range(1000)} class BubbleTest(unittest.TestCase): def setUp(self): - self.l = [] - self.pl = pipeline.Pipeline((_produce(), _bub_work(), _consume(self.l))) - - def test_run_sequential(self): - self.pl.run_sequential() - self.assertEqual(self.l, [0, 2, 4, 8]) - - def test_run_parallel(self): - self.pl.run_parallel() - self.assertEqual(self.l, [0, 2, 4, 8]) - - def test_pull(self): - pl = pipeline.Pipeline((_produce(), _bub_work())) - self.assertEqual(list(pl.pull()), [0, 2, 4, 8]) - - -class MultiMessageTest(unittest.TestCase): - def setUp(self): - self.l = [] + self.result = [] self.pl = pipeline.Pipeline( - (_produce(), _multi_work(), _consume(self.l)) + (_produce(), _bub_work(), _consume(self.result)) ) def test_run_sequential(self): self.pl.run_sequential() - self.assertEqual(self.l, [0, 0, 1, -1, 2, -2, 3, -3, 4, -4]) + assert self.result == [0, 2, 4, 8] def test_run_parallel(self): self.pl.run_parallel() - self.assertEqual(self.l, [0, 0, 1, -1, 2, -2, 3, -3, 4, -4]) + assert self.result == [0, 2, 4, 8] + + def test_pull(self): + pl = pipeline.Pipeline((_produce(), _bub_work())) + assert list(pl.pull()) == [0, 2, 4, 8] + + +class MultiMessageTest(unittest.TestCase): + def setUp(self): + self.result = [] + self.pl = pipeline.Pipeline( + (_produce(), _multi_work(), _consume(self.result)) + ) + + def test_run_sequential(self): + self.pl.run_sequential() + assert self.result == [0, 0, 1, -1, 2, -2, 3, -3, 4, -4] + + def test_run_parallel(self): + self.pl.run_parallel() + assert self.result == [0, 0, 1, -1, 2, -2, 3, -3, 4, -4] def test_pull(self): pl = pipeline.Pipeline((_produce(), _multi_work())) - self.assertEqual(list(pl.pull()), [0, 0, 1, -1, 2, -2, 3, -3, 4, -4]) + assert list(pl.pull()) == [0, 0, 1, -1, 2, -2, 3, -3, 4, -4] class StageDecoratorTest(unittest.TestCase): @@ -214,7 +230,7 @@ class StageDecoratorTest(unittest.TestCase): return i + n pl = pipeline.Pipeline([iter([1, 2, 3]), add(2)]) - self.assertEqual(list(pl.pull()), [3, 4, 5]) + assert list(pl.pull()) == [3, 4, 5] def test_mutator_stage_decorator(self): @pipeline.mutator_stage @@ -222,19 +238,6 @@ class StageDecoratorTest(unittest.TestCase): item[key] = True pl = pipeline.Pipeline( - [ - iter([{"x": False}, {"a": False}]), - setkey("x"), - ] + [iter([{"x": False}, {"a": False}]), setkey("x")] ) - self.assertEqual( - list(pl.pull()), [{"x": True}, {"a": False, "x": True}] - ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert list(pl.pull()) == [{"x": True}, {"a": False, "x": True}] diff --git a/test/test_plugins.py b/test/test_plugins.py index 707c7db31..efa26d084 100644 --- a/test/test_plugins.py +++ b/test/test_plugins.py @@ -15,10 +15,10 @@ import itertools import os -import shutil import unittest from unittest.mock import ANY, Mock, patch +import pytest from mediafile import MediaFile from beets import config, plugins, ui @@ -32,14 +32,9 @@ from beets.importer import ( from beets.library import Item from beets.plugins import MetadataSourcePlugin from beets.test import helper -from beets.test._common import RSRC -from beets.test.helper import ( - AutotagStub, - ImportHelper, - TerminalImportSessionSetup, - TestHelper, -) -from beets.util import bytestring_path, displayable_path, syspath +from beets.test.helper import AutotagStub, ImportHelper, TerminalImportMixin +from beets.test.helper import PluginTestCase as BasePluginTestCase +from beets.util import displayable_path, syspath from beets.util.id_extractors import ( beatport_id_regex, deezer_id_regex, @@ -47,11 +42,10 @@ from beets.util.id_extractors import ( ) -class PluginLoaderTestCase(unittest.TestCase, TestHelper): +class PluginLoaderTestCase(BasePluginTestCase): def setup_plugin_loader(self): # FIXME the mocking code is horrific, but this is the lowest and # earliest level of the plugin mechanism we can hook into. - self.load_plugins() self._plugin_loader_patch = patch("beets.plugins.load_plugins") self._plugin_classes = set() load_plugins = self._plugin_loader_patch.start() @@ -60,21 +54,26 @@ class PluginLoaderTestCase(unittest.TestCase, TestHelper): plugins._classes.update(self._plugin_classes) load_plugins.side_effect = myload - self.setup_beets() def teardown_plugin_loader(self): self._plugin_loader_patch.stop() - self.unload_plugins() def register_plugin(self, plugin_class): self._plugin_classes.add(plugin_class) def setUp(self): self.setup_plugin_loader() + super().setUp() def tearDown(self): self.teardown_plugin_loader() - self.teardown_beets() + super().tearDown() + + +class PluginImportTestCase(ImportHelper, PluginLoaderTestCase): + def setUp(self): + super().setUp() + self.prepare_album_for_import(2) class ItemTypesTest(PluginLoaderTestCase): @@ -90,17 +89,17 @@ class ItemTypesTest(PluginLoaderTestCase): # Do not match unset values out = self.run_with_output("ls", "rating:1..3") - self.assertNotIn("aaa", out) + assert "aaa" not in out self.run_command("modify", "rating=2", "--yes") # Match in range out = self.run_with_output("ls", "rating:1..3") - self.assertIn("aaa", out) + assert "aaa" in out # Don't match out of range out = self.run_with_output("ls", "rating:3..5") - self.assertNotIn("aaa", out) + assert "aaa" not in out class ItemWriteTest(PluginLoaderTestCase): @@ -124,7 +123,7 @@ class ItemWriteTest(PluginLoaderTestCase): item.write() mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.artist, "YYY") + assert mediafile.artist == "YYY" def register_listener(self, event, func): self.event_listener_plugin.register_listener(event, func) @@ -142,7 +141,8 @@ class ItemTypeConflictTest(PluginLoaderTestCase): self.advent_listener_plugin = AdventListenerPlugin self.register_plugin(EventListenerPlugin) self.register_plugin(AdventListenerPlugin) - self.assertRaises(plugins.PluginConflictException, plugins.types, Item) + with pytest.raises(plugins.PluginConflictError): + plugins.types(Item) def test_match(self): class EventListenerPlugin(plugins.BeetsPlugin): @@ -155,74 +155,29 @@ class ItemTypeConflictTest(PluginLoaderTestCase): self.advent_listener_plugin = AdventListenerPlugin self.register_plugin(EventListenerPlugin) self.register_plugin(AdventListenerPlugin) - self.assertIsNotNone(plugins.types(Item)) + assert plugins.types(Item) is not None -class EventsTest(ImportHelper, PluginLoaderTestCase): +class EventsTest(PluginImportTestCase): def setUp(self): super().setUp() - self.__create_import_dir(2) - config["import"]["pretend"] = True - - def __copy_file(self, dest_path, metadata): - # Copy files - resource_path = os.path.join(RSRC, b"full.mp3") - shutil.copy(syspath(resource_path), syspath(dest_path)) - medium = MediaFile(dest_path) - # Set metadata - for attr in metadata: - setattr(medium, attr, metadata[attr]) - medium.save() - - def __create_import_dir(self, count): - self.import_dir = os.path.join(self.temp_dir, b"testsrcdir") - if os.path.isdir(syspath(self.import_dir)): - shutil.rmtree(syspath(self.import_dir)) - - self.album_path = os.path.join(self.import_dir, b"album") - os.makedirs(self.album_path) - - metadata = { - "artist": "Tag Artist", - "album": "Tag Album", - "albumartist": None, - "mb_trackid": None, - "mb_albumid": None, - "comp": None, - } - self.file_paths = [] - for i in range(count): - metadata["track"] = i + 1 - metadata["title"] = "Tag Title Album %d" % (i + 1) - track_file = bytestring_path("%02d - track.mp3" % (i + 1)) - dest_path = os.path.join(self.album_path, track_file) - self.__copy_file(dest_path, metadata) - self.file_paths.append(dest_path) def test_import_task_created(self): - import_files = [self.import_dir] - self._setup_import_session(singletons=False) - self.importer.paths = import_files + self.importer = self.setup_importer(pretend=True) with helper.capture_log() as logs: self.importer.run() - self.unload_plugins() # Exactly one event should have been imported (for the album). # Sentinels do not get emitted. - self.assertEqual(logs.count("Sending event: import_task_created"), 1) + assert logs.count("Sending event: import_task_created") == 1 logs = [line for line in logs if not line.startswith("Sending event:")] - self.assertEqual( - logs, - [ - "Album: {}".format( - displayable_path(os.path.join(self.import_dir, b"album")) - ), - " {}".format(displayable_path(self.file_paths[0])), - " {}".format(displayable_path(self.file_paths[1])), - ], - ) + assert logs == [ + f'Album: {displayable_path(os.path.join(self.import_dir, b"album"))}', + f" {displayable_path(self.import_media[0].path)}", + f" {displayable_path(self.import_media[1].path)}", + ] def test_import_task_created_with_plugin(self): class ToSingletonPlugin(plugins.BeetsPlugin): @@ -250,38 +205,29 @@ class EventsTest(ImportHelper, PluginLoaderTestCase): to_singleton_plugin = ToSingletonPlugin self.register_plugin(to_singleton_plugin) - import_files = [self.import_dir] - self._setup_import_session(singletons=False) - self.importer.paths = import_files + self.importer = self.setup_importer(pretend=True) with helper.capture_log() as logs: self.importer.run() - self.unload_plugins() # Exactly one event should have been imported (for the album). # Sentinels do not get emitted. - self.assertEqual(logs.count("Sending event: import_task_created"), 1) + assert logs.count("Sending event: import_task_created") == 1 logs = [line for line in logs if not line.startswith("Sending event:")] - self.assertEqual( - logs, - [ - "Singleton: {}".format(displayable_path(self.file_paths[0])), - "Singleton: {}".format(displayable_path(self.file_paths[1])), - ], - ) + assert logs == [ + f"Singleton: {displayable_path(self.import_media[0].path)}", + f"Singleton: {displayable_path(self.import_media[1].path)}", + ] class HelpersTest(unittest.TestCase): def test_sanitize_choices(self): - self.assertEqual( - plugins.sanitize_choices(["A", "Z"], ("A", "B")), ["A"] - ) - self.assertEqual(plugins.sanitize_choices(["A", "A"], ("A")), ["A"]) - self.assertEqual( - plugins.sanitize_choices(["D", "*", "A"], ("A", "B", "C", "D")), - ["D", "B", "C", "A"], - ) + assert plugins.sanitize_choices(["A", "Z"], ("A", "B")) == ["A"] + assert plugins.sanitize_choices(["A", "A"], ("A")) == ["A"] + assert plugins.sanitize_choices( + ["D", "*", "A"], ("A", "B", "C", "D") + ) == ["D", "B", "C", "A"] class ListenersTest(PluginLoaderTestCase): @@ -296,17 +242,13 @@ class ListenersTest(PluginLoaderTestCase): pass d = DummyPlugin() - self.assertEqual(DummyPlugin._raw_listeners["cli_exit"], [d.dummy]) + assert DummyPlugin._raw_listeners["cli_exit"] == [d.dummy] d2 = DummyPlugin() - self.assertEqual( - DummyPlugin._raw_listeners["cli_exit"], [d.dummy, d2.dummy] - ) + assert DummyPlugin._raw_listeners["cli_exit"] == [d.dummy, d2.dummy] d.register_listener("cli_exit", d2.dummy) - self.assertEqual( - DummyPlugin._raw_listeners["cli_exit"], [d.dummy, d2.dummy] - ) + assert DummyPlugin._raw_listeners["cli_exit"] == [d.dummy, d2.dummy] @patch("beets.plugins.find_plugins") @patch("inspect.getfullargspec") @@ -338,8 +280,6 @@ class ListenersTest(PluginLoaderTestCase): @patch("beets.plugins.find_plugins") def test_listener_params(self, mock_find_plugins): - test = self - class DummyPlugin(plugins.BeetsPlugin): def __init__(self): super().__init__() @@ -351,10 +291,10 @@ class ListenersTest(PluginLoaderTestCase): self.register_listener(f"event{i}", meth) def dummy1(self, foo): - test.assertEqual(foo, 5) + assert foo == 5 def dummy2(self, foo=None): - test.assertEqual(foo, 5) + assert foo == 5 def dummy3(self): # argument cut off @@ -365,23 +305,23 @@ class ListenersTest(PluginLoaderTestCase): pass def dummy5(self, bar): - test.assertFalse(True) + assert not True # more complex examples def dummy6(self, foo, bar=None): - test.assertEqual(foo, 5) - test.assertEqual(bar, None) + assert foo == 5 + assert bar is None def dummy7(self, foo, **kwargs): - test.assertEqual(foo, 5) - test.assertEqual(kwargs, {}) + assert foo == 5 + assert kwargs == {} def dummy8(self, foo, bar, **kwargs): - test.assertFalse(True) + assert not True def dummy9(self, **kwargs): - test.assertEqual(kwargs, {"foo": 5}) + assert kwargs == {"foo": 5} d = DummyPlugin() mock_find_plugins.return_value = (d,) @@ -391,25 +331,22 @@ class ListenersTest(PluginLoaderTestCase): plugins.send("event3", foo=5) plugins.send("event4", foo=5) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): plugins.send("event5", foo=5) plugins.send("event6", foo=5) plugins.send("event7", foo=5) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): plugins.send("event8", foo=5) plugins.send("event9", foo=5) -class PromptChoicesTest( - TerminalImportSessionSetup, ImportHelper, PluginLoaderTestCase -): +class PromptChoicesTest(TerminalImportMixin, PluginImportTestCase): def setUp(self): super().setUp() - self._create_import_dir(3) - self._setup_import_session() + self.setup_importer() self.matcher = AutotagStub().install() # keep track of ui.input_option() calls self.input_options_patcher = patch( @@ -564,10 +501,10 @@ class PromptChoicesTest( with patch.object(DummyPlugin, "foo", autospec=True) as mock_foo: with helper.control_stdin("\n".join(["f", "s"])): self.importer.run() - self.assertEqual(mock_foo.call_count, 1) + assert mock_foo.call_count == 1 # input_options should be called twice, as foo() returns None - self.assertEqual(self.mock_input_options.call_count, 2) + assert self.mock_input_options.call_count == 2 self.mock_input_options.assert_called_with( opts, default="a", require=ANY ) @@ -616,36 +553,36 @@ class ParseSpotifyIDTest(unittest.TestCase): def test_parse_id_correct(self): id_string = "39WqpoPgZxygo6YQjehLJJ" out = MetadataSourcePlugin._get_id("album", id_string, spotify_id_regex) - self.assertEqual(out, id_string) + assert out == id_string def test_parse_id_non_id_returns_none(self): id_string = "blah blah" out = MetadataSourcePlugin._get_id("album", id_string, spotify_id_regex) - self.assertIsNone(out) + assert out is None def test_parse_id_url_finds_id(self): id_string = "39WqpoPgZxygo6YQjehLJJ" id_url = "https://open.spotify.com/album/%s" % id_string out = MetadataSourcePlugin._get_id("album", id_url, spotify_id_regex) - self.assertEqual(out, id_string) + assert out == id_string class ParseDeezerIDTest(unittest.TestCase): def test_parse_id_correct(self): id_string = "176356382" out = MetadataSourcePlugin._get_id("album", id_string, deezer_id_regex) - self.assertEqual(out, id_string) + assert out == id_string def test_parse_id_non_id_returns_none(self): id_string = "blah blah" out = MetadataSourcePlugin._get_id("album", id_string, deezer_id_regex) - self.assertIsNone(out) + assert out is None def test_parse_id_url_finds_id(self): id_string = "176356382" id_url = "https://www.deezer.com/album/%s" % id_string out = MetadataSourcePlugin._get_id("album", id_url, deezer_id_regex) - self.assertEqual(out, id_string) + assert out == id_string class ParseBeatportIDTest(unittest.TestCase): @@ -654,25 +591,17 @@ class ParseBeatportIDTest(unittest.TestCase): out = MetadataSourcePlugin._get_id( "album", id_string, beatport_id_regex ) - self.assertEqual(out, id_string) + assert out == id_string def test_parse_id_non_id_returns_none(self): id_string = "blah blah" out = MetadataSourcePlugin._get_id( "album", id_string, beatport_id_regex ) - self.assertIsNone(out) + assert out is None def test_parse_id_url_finds_id(self): id_string = "3089651" id_url = "https://www.beatport.com/release/album-name/%s" % id_string out = MetadataSourcePlugin._get_id("album", id_url, beatport_id_regex) - self.assertEqual(out, id_string) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert out == id_string diff --git a/test/test_query.py b/test/test_query.py index 69277cfcd..6f7fe4da7 100644 --- a/test/test_query.py +++ b/test/test_query.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Various tests for querying the library database. -""" +"""Various tests for querying the library database.""" import os import sys @@ -21,6 +20,9 @@ import unittest from contextlib import contextmanager from functools import partial +import pytest +from mock import patch + import beets.library from beets import dbcore, util from beets.dbcore import types @@ -29,8 +31,8 @@ from beets.dbcore.query import ( NoneQuery, ParsingError, ) -from beets.library import Item, Library -from beets.test import _common, helper +from beets.test import _common +from beets.test.helper import BeetsTestCase, ItemInDBTestCase from beets.util import syspath # Because the absolute path begins with something like C:, we @@ -38,36 +40,42 @@ from beets.util import syspath WIN32_NO_IMPLICIT_PATHS = "Implicit paths are not supported on Windows" -class TestHelper(helper.TestHelper): - def assertInResult(self, item, results): # noqa +class AssertsMixin: + def assert_items_matched(self, results, titles): + assert {i.title for i in results} == set(titles) + + def assert_albums_matched(self, results, albums): + assert {a.album for a in results} == set(albums) + + def assertInResult(self, item, results): result_ids = [i.id for i in results] - self.assertIn(item.id, result_ids) + assert item.id in result_ids - def assertNotInResult(self, item, results): # noqa + def assertNotInResult(self, item, results): result_ids = [i.id for i in results] - self.assertNotIn(item.id, result_ids) + assert item.id not in result_ids -class AnyFieldQueryTest(_common.LibTestCase): +class AnyFieldQueryTest(ItemInDBTestCase): def test_no_restriction(self): q = dbcore.query.AnyFieldQuery( "title", beets.library.Item._fields.keys(), dbcore.query.SubstringQuery, ) - self.assertEqual(self.lib.items(q).get().title, "the title") + assert self.lib.items(q).get().title == "the title" def test_restriction_completeness(self): q = dbcore.query.AnyFieldQuery( "title", ["title"], dbcore.query.SubstringQuery ) - self.assertEqual(self.lib.items(q).get().title, "the title") + assert self.lib.items(q).get().title == "the title" def test_restriction_soundness(self): q = dbcore.query.AnyFieldQuery( "title", ["artist"], dbcore.query.SubstringQuery ) - self.assertIsNone(self.lib.items(q).get()) + assert self.lib.items(q).get() is None def test_eq(self): q1 = dbcore.query.AnyFieldQuery( @@ -76,26 +84,17 @@ class AnyFieldQueryTest(_common.LibTestCase): q2 = dbcore.query.AnyFieldQuery( "foo", ["bar"], dbcore.query.SubstringQuery ) - self.assertEqual(q1, q2) + assert q1 == q2 q2.query_class = None - self.assertNotEqual(q1, q2) - - -class AssertsMixin: - def assert_items_matched(self, results, titles): - self.assertEqual({i.title for i in results}, set(titles)) - - def assert_albums_matched(self, results, albums): - self.assertEqual({a.album for a in results}, set(albums)) + assert q1 != q2 # A test case class providing a library with some dummy data and some # assertions involving that data. -class DummyDataTestCase(_common.TestCase, AssertsMixin): +class DummyDataTestCase(BeetsTestCase, AssertsMixin): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") items = [_common.item() for _ in range(3)] items[0].title = "foo bar" items[0].artist = "one" @@ -358,19 +357,19 @@ class GetTest(DummyDataTestCase): q = "xyzzy:nonsense" results = self.lib.items(q) titles = [i.title for i in results] - self.assertEqual(titles, []) + assert titles == [] def test_unknown_field_name_no_results_in_album_query(self): q = "xyzzy:nonsense" results = self.lib.albums(q) names = [a.album for a in results] - self.assertEqual(names, []) + assert names == [] def test_item_field_name_matches_nothing_in_album_query(self): q = "format:nonsense" results = self.lib.albums(q) names = [a.album for a in results] - self.assertEqual(names, []) + assert names == [] def test_unicode_query(self): item = self.lib.items().get() @@ -384,12 +383,12 @@ class GetTest(DummyDataTestCase): def test_numeric_search_positive(self): q = dbcore.query.NumericQuery("year", "2001") results = self.lib.items(q) - self.assertTrue(results) + assert results def test_numeric_search_negative(self): q = dbcore.query.NumericQuery("year", "1999") results = self.lib.items(q) - self.assertFalse(results) + assert not results def test_album_field_fallback(self): self.album["albumflex"] = "foo" @@ -397,81 +396,71 @@ class GetTest(DummyDataTestCase): q = "albumflex:foo" results = self.lib.items(q) - self.assert_items_matched( - results, - [ - "foo bar", - "baz qux", - ], - ) + self.assert_items_matched(results, ["foo bar", "baz qux"]) def test_invalid_query(self): - with self.assertRaises(InvalidQueryArgumentValueError) as raised: + with pytest.raises(InvalidQueryArgumentValueError, match="not an int"): dbcore.query.NumericQuery("year", "199a") - self.assertIn("not an int", str(raised.exception)) - with self.assertRaises(InvalidQueryArgumentValueError) as raised: + msg_match = r"not a regular expression.*unterminated subpattern" + with pytest.raises(ParsingError, match=msg_match): dbcore.query.RegexpQuery("year", "199(") - exception_text = str(raised.exception) - self.assertIn("not a regular expression", exception_text) - self.assertIn("unterminated subpattern", exception_text) - self.assertIsInstance(raised.exception, ParsingError) -class MatchTest(_common.TestCase): +class MatchTest(BeetsTestCase): def setUp(self): super().setUp() self.item = _common.item() def test_regex_match_positive(self): q = dbcore.query.RegexpQuery("album", "^the album$") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) def test_regex_match_negative(self): q = dbcore.query.RegexpQuery("album", "^album$") - self.assertFalse(q.match(self.item)) + assert not q.match(self.item) def test_regex_match_non_string_value(self): q = dbcore.query.RegexpQuery("disc", "^6$") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) def test_substring_match_positive(self): q = dbcore.query.SubstringQuery("album", "album") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) def test_substring_match_negative(self): q = dbcore.query.SubstringQuery("album", "ablum") - self.assertFalse(q.match(self.item)) + assert not q.match(self.item) def test_substring_match_non_string_value(self): q = dbcore.query.SubstringQuery("disc", "6") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) def test_exact_match_nocase_positive(self): q = dbcore.query.StringQuery("genre", "the genre") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) q = dbcore.query.StringQuery("genre", "THE GENRE") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) def test_exact_match_nocase_negative(self): q = dbcore.query.StringQuery("genre", "genre") - self.assertFalse(q.match(self.item)) + assert not q.match(self.item) def test_year_match_positive(self): q = dbcore.query.NumericQuery("year", "1") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) def test_year_match_negative(self): q = dbcore.query.NumericQuery("year", "10") - self.assertFalse(q.match(self.item)) + assert not q.match(self.item) def test_bitrate_range_positive(self): q = dbcore.query.NumericQuery("bitrate", "100000..200000") - self.assertTrue(q.match(self.item)) + assert q.match(self.item) def test_bitrate_range_negative(self): q = dbcore.query.NumericQuery("bitrate", "200000..300000") - self.assertFalse(q.match(self.item)) + assert not q.match(self.item) def test_open_range(self): dbcore.query.NumericQuery("bitrate", "100000..") @@ -481,13 +470,13 @@ class MatchTest(_common.TestCase): q2 = dbcore.query.MatchQuery("foo", "bar") q3 = dbcore.query.MatchQuery("foo", "baz") q4 = dbcore.query.StringFieldQuery("foo", "bar") - self.assertEqual(q1, q2) - self.assertNotEqual(q1, q3) - self.assertNotEqual(q1, q4) - self.assertNotEqual(q3, q4) + assert q1 == q2 + assert q1 != q3 + assert q1 != q4 + assert q3 != q4 -class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin): +class PathQueryTest(ItemInDBTestCase, AssertsMixin): def setUp(self): super().setUp() @@ -669,13 +658,13 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin): is_path_query = beets.library.PathQuery.is_path_query with self.force_implicit_query_detection(): - self.assertTrue(is_path_query("/foo/bar")) - self.assertTrue(is_path_query("foo/bar")) - self.assertTrue(is_path_query("foo/")) - self.assertFalse(is_path_query("foo")) - self.assertTrue(is_path_query("foo/:bar")) - self.assertFalse(is_path_query("foo:bar/")) - self.assertFalse(is_path_query("foo:/bar")) + assert is_path_query("/foo/bar") + assert is_path_query("foo/bar") + assert is_path_query("foo/") + assert not is_path_query("foo") + assert is_path_query("foo/:bar") + assert not is_path_query("foo:bar/") + assert not is_path_query("foo:/bar") # FIXME: shouldn't this also work on windows? @unittest.skipIf(sys.platform == "win32", WIN32_NO_IMPLICIT_PATHS) @@ -689,18 +678,18 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin): is_path_query = beets.library.PathQuery.is_path_query path = self.touch(os.path.join(b"foo", b"bar")) - self.assertTrue(os.path.isabs(util.syspath(path))) + assert os.path.isabs(util.syspath(path)) path_str = path.decode("utf-8") # The file itself. - self.assertTrue(is_path_query(path_str)) + assert is_path_query(path_str) # The parent directory. parent = os.path.dirname(path_str) - self.assertTrue(is_path_query(parent)) + assert is_path_query(parent) # Some non-existent path. - self.assertFalse(is_path_query(path_str + "baz")) + assert not is_path_query(f"{path_str}baz") def test_detect_relative_path(self): """Test detection of implicit path queries based on whether or @@ -717,60 +706,48 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin): cur_dir = os.getcwd() try: os.chdir(syspath(self.temp_dir)) - self.assertTrue(is_path_query("foo/")) - self.assertTrue(is_path_query("foo/bar")) - self.assertTrue(is_path_query("foo/bar:tagada")) - self.assertFalse(is_path_query("bar")) + assert is_path_query("foo/") + assert is_path_query("foo/bar") + assert is_path_query("foo/bar:tagada") + assert not is_path_query("bar") finally: os.chdir(cur_dir) -class IntQueryTest(unittest.TestCase, TestHelper): - def setUp(self): - self.lib = Library(":memory:") - - def tearDown(self): - Item._types = {} - +class IntQueryTest(BeetsTestCase): def test_exact_value_match(self): item = self.add_item(bpm=120) matched = self.lib.items("bpm:120").get() - self.assertEqual(item.id, matched.id) + assert item.id == matched.id def test_range_match(self): item = self.add_item(bpm=120) self.add_item(bpm=130) matched = self.lib.items("bpm:110..125") - self.assertEqual(1, len(matched)) - self.assertEqual(item.id, matched.get().id) + assert 1 == len(matched) + assert item.id == matched.get().id + @patch("beets.library.Item._types", {"myint": types.Integer()}) def test_flex_range_match(self): - Item._types = {"myint": types.Integer()} item = self.add_item(myint=2) matched = self.lib.items("myint:2").get() - self.assertEqual(item.id, matched.id) + assert item.id == matched.id + @patch("beets.library.Item._types", {"myint": types.Integer()}) def test_flex_dont_match_missing(self): - Item._types = {"myint": types.Integer()} self.add_item() matched = self.lib.items("myint:2").get() - self.assertIsNone(matched) + assert matched is None def test_no_substring_match(self): self.add_item(bpm=120) matched = self.lib.items("bpm:12").get() - self.assertIsNone(matched) + assert matched is None -class BoolQueryTest(unittest.TestCase, TestHelper): - def setUp(self): - self.lib = Library(":memory:") - Item._types = {"flexbool": types.Boolean()} - - def tearDown(self): - Item._types = {} - +@patch("beets.library.Item._types", {"flexbool": types.Boolean()}) +class BoolQueryTest(BeetsTestCase, AssertsMixin): def test_parse_true(self): item_true = self.add_item(comp=True) item_false = self.add_item(comp=False) @@ -818,11 +795,11 @@ class BoolQueryTest(unittest.TestCase, TestHelper): class DefaultSearchFieldsTest(DummyDataTestCase): def test_albums_matches_album(self): albums = list(self.lib.albums("baz")) - self.assertEqual(len(albums), 1) + assert len(albums) == 1 def test_albums_matches_albumartist(self): albums = list(self.lib.albums(["album artist"])) - self.assertEqual(len(albums), 1) + assert len(albums) == 1 def test_items_matches_title(self): items = self.lib.items("beets") @@ -833,10 +810,7 @@ class DefaultSearchFieldsTest(DummyDataTestCase): self.assert_items_matched(items, []) -class NoneQueryTest(unittest.TestCase, TestHelper): - def setUp(self): - self.lib = Library(":memory:") - +class NoneQueryTest(BeetsTestCase, AssertsMixin): def test_match_singletons(self): singleton = self.add_item() album_item = self.add_album().items().get() @@ -871,10 +845,10 @@ class NoneQueryTest(unittest.TestCase, TestHelper): self.assertInResult(item, matched) -class NotQueryMatchTest(_common.TestCase): +class NotQueryMatchTest(BeetsTestCase): """Test `query.NotQuery` matching against a single item, using the same cases and assertions as on `MatchTest`, plus assertion on the negated - queries (ie. assertTrue(q) -> assertFalse(NotQuery(q))). + queries (ie. assert q -> assert not NotQuery(q)). """ def setUp(self): @@ -883,53 +857,53 @@ class NotQueryMatchTest(_common.TestCase): def test_regex_match_positive(self): q = dbcore.query.RegexpQuery("album", "^the album$") - self.assertTrue(q.match(self.item)) - self.assertFalse(dbcore.query.NotQuery(q).match(self.item)) + assert q.match(self.item) + assert not dbcore.query.NotQuery(q).match(self.item) def test_regex_match_negative(self): q = dbcore.query.RegexpQuery("album", "^album$") - self.assertFalse(q.match(self.item)) - self.assertTrue(dbcore.query.NotQuery(q).match(self.item)) + assert not q.match(self.item) + assert dbcore.query.NotQuery(q).match(self.item) def test_regex_match_non_string_value(self): q = dbcore.query.RegexpQuery("disc", "^6$") - self.assertTrue(q.match(self.item)) - self.assertFalse(dbcore.query.NotQuery(q).match(self.item)) + assert q.match(self.item) + assert not dbcore.query.NotQuery(q).match(self.item) def test_substring_match_positive(self): q = dbcore.query.SubstringQuery("album", "album") - self.assertTrue(q.match(self.item)) - self.assertFalse(dbcore.query.NotQuery(q).match(self.item)) + assert q.match(self.item) + assert not dbcore.query.NotQuery(q).match(self.item) def test_substring_match_negative(self): q = dbcore.query.SubstringQuery("album", "ablum") - self.assertFalse(q.match(self.item)) - self.assertTrue(dbcore.query.NotQuery(q).match(self.item)) + assert not q.match(self.item) + assert dbcore.query.NotQuery(q).match(self.item) def test_substring_match_non_string_value(self): q = dbcore.query.SubstringQuery("disc", "6") - self.assertTrue(q.match(self.item)) - self.assertFalse(dbcore.query.NotQuery(q).match(self.item)) + assert q.match(self.item) + assert not dbcore.query.NotQuery(q).match(self.item) def test_year_match_positive(self): q = dbcore.query.NumericQuery("year", "1") - self.assertTrue(q.match(self.item)) - self.assertFalse(dbcore.query.NotQuery(q).match(self.item)) + assert q.match(self.item) + assert not dbcore.query.NotQuery(q).match(self.item) def test_year_match_negative(self): q = dbcore.query.NumericQuery("year", "10") - self.assertFalse(q.match(self.item)) - self.assertTrue(dbcore.query.NotQuery(q).match(self.item)) + assert not q.match(self.item) + assert dbcore.query.NotQuery(q).match(self.item) def test_bitrate_range_positive(self): q = dbcore.query.NumericQuery("bitrate", "100000..200000") - self.assertTrue(q.match(self.item)) - self.assertFalse(dbcore.query.NotQuery(q).match(self.item)) + assert q.match(self.item) + assert not dbcore.query.NotQuery(q).match(self.item) def test_bitrate_range_negative(self): q = dbcore.query.NumericQuery("bitrate", "200000..300000") - self.assertFalse(q.match(self.item)) - self.assertTrue(dbcore.query.NotQuery(q).match(self.item)) + assert not q.match(self.item) + assert dbcore.query.NotQuery(q).match(self.item) def test_open_range(self): q = dbcore.query.NumericQuery("bitrate", "100000..") @@ -942,7 +916,7 @@ class NotQueryTest(DummyDataTestCase): - `test_get_yyy`: tests on query strings (similar to `GetTest`) """ - def assertNegationProperties(self, q): # noqa + def assertNegationProperties(self, q): """Given a Query `q`, assert that: - q OR not(q) == all items - q AND not(q) == 0 @@ -959,15 +933,14 @@ class NotQueryTest(DummyDataTestCase): all_titles = {i.title for i in self.lib.items()} q_results = {i.title for i in self.lib.items(q)} not_q_results = {i.title for i in self.lib.items(not_q)} - self.assertEqual(q_results.union(not_q_results), all_titles) - self.assertEqual(q_results.intersection(not_q_results), set()) + assert q_results.union(not_q_results) == all_titles + assert q_results.intersection(not_q_results) == set() # round trip not_not_q = dbcore.query.NotQuery(not_q) - self.assertEqual( - {i.title for i in self.lib.items(q)}, - {i.title for i in self.lib.items(not_not_q)}, - ) + assert {i.title for i in self.lib.items(q)} == { + i.title for i in self.lib.items(not_not_q) + } def test_type_and(self): # not(a and b) <-> not(a) or not(b) @@ -1120,21 +1093,19 @@ class NotQueryTest(DummyDataTestCase): q_slow = dbcore.query.NotQuery(klass(*(args + [False]))) try: - self.assertEqual( - [i.title for i in self.lib.items(q_fast)], - [i.title for i in self.lib.items(q_slow)], - ) + assert [i.title for i in self.lib.items(q_fast)] == [ + i.title for i in self.lib.items(q_slow) + ] except NotImplementedError: # ignore classes that do not provide `fast` implementation pass -class RelatedQueriesTest(_common.TestCase, AssertsMixin): +class RelatedQueriesTest(BeetsTestCase, AssertsMixin): """Test album-level queries with track-level filters and vice-versa.""" def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") albums = [] for album_idx in range(1, 3): @@ -1168,11 +1139,3 @@ class RelatedQueriesTest(_common.TestCase, AssertsMixin): q = "catalognum:ABC Album1" results = self.lib.albums(q) self.assert_albums_matched(results, ["Album1"]) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_release.py b/test/test_release.py new file mode 100644 index 000000000..4b3f37113 --- /dev/null +++ b/test/test_release.py @@ -0,0 +1,108 @@ +"""Tests for the release utils.""" + +import os +import shutil +import sys + +import pytest + +release = pytest.importorskip("extra.release") + + +pytestmark = pytest.mark.skipif( + not ( + (os.environ.get("GITHUB_ACTIONS") == "true" and sys.platform != "win32") + or bool(shutil.which("pandoc")) + ), + reason="pandoc isn't available", +) + + +@pytest.fixture +def rst_changelog(): + return """New features: + +* :doc:`/plugins/substitute`: Some substitute + multi-line change. + :bug:`5467` +* :ref:`list-cmd` Update. + +You can do something with this command:: + + $ do-something + +Bug fixes: + +* Some fix that refers to an issue. + :bug:`5467` +* Some fix that mentions user :user:`username`. +* Some fix thanks to + :user:`username`. :bug:`5467` +* Some fix with its own bullet points using incorrect indentation: + * First nested bullet point + with some text that wraps to the next line + * Second nested bullet point +* Another fix with its own bullet points using correct indentation: + * First + * Second + +Section naaaaaaaaaaaaaaaaaaaaaaaammmmmmmmmmmmmmmmeeeeeeeeeeeeeee with over 80 +characters: + +Empty section: + +Other changes: + +* Changed `bitesize` label to `good first issue`. Our `contribute`_ page is now + automatically populated with these issues. :bug:`4855` + +.. _contribute: https://github.com/beetbox/beets/contribute + +2.1.0 (November 22, 2024) +------------------------- + +Bug fixes: + +* Fixed something.""" + + +@pytest.fixture +def md_changelog(): + return r"""### New features + +- [Substitute Plugin](https://beets.readthedocs.io/en/stable/plugins/substitute.html): Some substitute multi-line change. :bug: (\#5467) +- [list](https://beets.readthedocs.io/en/stable/reference/cli.html#list-cmd) Update. + +You can do something with this command: + + $ do-something + +### Bug fixes + +- Another fix with its own bullet points using correct indentation: + - First + - Second +- Some fix thanks to @username. :bug: (\#5467) +- Some fix that mentions user @username. +- Some fix that refers to an issue. :bug: (\#5467) +- Some fix with its own bullet points using incorrect indentation: + - First nested bullet point with some text that wraps to the next line + - Second nested bullet point + +**Section naaaaaaaaaaaaaaaaaaaaaaaammmmmmmmmmmmmmmmeeeeeeeeeeeeeee with over 80 characters** + +### Other changes + +- Changed `bitesize` label to `good first issue`. Our [contribute](https://github.com/beetbox/beets/contribute) page is now automatically populated with these issues. :bug: (\#4855) + +# 2.1.0 (November 22, 2024) + +### Bug fixes + +- Fixed something.""" # noqa: E501 + + +def test_convert_rst_to_md(rst_changelog, md_changelog): + actual = release.changelog_as_markdown(rst_changelog) + + assert actual == md_changelog diff --git a/test/test_sort.py b/test/test_sort.py index 52fa02600..d6aa5c518 100644 --- a/test/test_sort.py +++ b/test/test_sort.py @@ -12,45 +12,50 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Various tests for querying the library database. -""" +"""Various tests for querying the library database.""" -import unittest +from mock import patch import beets.library from beets import config, dbcore +from beets.dbcore import types +from beets.library import Album from beets.test import _common +from beets.test.helper import BeetsTestCase # A test case class providing a library with some dummy data and some # assertions involving that data. -class DummyDataTestCase(_common.TestCase): +class DummyDataTestCase(BeetsTestCase): def setUp(self): super().setUp() - self.lib = beets.library.Library(":memory:") - albums = [_common.album() for _ in range(3)] - albums[0].album = "Album A" - albums[0].genre = "Rock" - albums[0].year = 2001 - albums[0].flex1 = "Flex1-1" - albums[0].flex2 = "Flex2-A" - albums[0].albumartist = "Foo" - albums[0].albumartist_sort = None - albums[1].album = "Album B" - albums[1].genre = "Rock" - albums[1].year = 2001 - albums[1].flex1 = "Flex1-2" - albums[1].flex2 = "Flex2-A" - albums[1].albumartist = "Bar" - albums[1].albumartist_sort = None - albums[2].album = "Album C" - albums[2].genre = "Jazz" - albums[2].year = 2005 - albums[2].flex1 = "Flex1-1" - albums[2].flex2 = "Flex2-B" - albums[2].albumartist = "Baz" - albums[2].albumartist_sort = None + albums = [ + Album( + album="Album A", + genre="Rock", + year=2001, + flex1="Flex1-1", + flex2="Flex2-A", + albumartist="Foo", + ), + Album( + album="Album B", + genre="Rock", + year=2001, + flex1="Flex1-2", + flex2="Flex2-A", + albumartist="Bar", + ), + Album( + album="Album C", + genre="Jazz", + year=2005, + flex1="Flex1-1", + flex2="Flex2-B", + albumartist="Baz", + ), + ] for album in albums: self.lib.add(album) @@ -108,25 +113,25 @@ class SortFixedFieldTest(DummyDataTestCase): q = "" sort = dbcore.query.FixedFieldSort("year", True) results = self.lib.items(q, sort) - self.assertLessEqual(results[0]["year"], results[1]["year"]) - self.assertEqual(results[0]["year"], 2001) + assert results[0]["year"] <= results[1]["year"] + assert results[0]["year"] == 2001 # same thing with query string q = "year+" results2 = self.lib.items(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_desc(self): q = "" sort = dbcore.query.FixedFieldSort("year", False) results = self.lib.items(q, sort) - self.assertGreaterEqual(results[0]["year"], results[1]["year"]) - self.assertEqual(results[0]["year"], 2004) + assert results[0]["year"] >= results[1]["year"] + assert results[0]["year"] == 2004 # same thing with query string q = "year-" results2 = self.lib.items(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_two_field_asc(self): q = "" @@ -136,25 +141,25 @@ class SortFixedFieldTest(DummyDataTestCase): sort.add_sort(s1) sort.add_sort(s2) results = self.lib.items(q, sort) - self.assertLessEqual(results[0]["album"], results[1]["album"]) - self.assertLessEqual(results[1]["album"], results[2]["album"]) - self.assertEqual(results[0]["album"], "Baz") - self.assertEqual(results[1]["album"], "Baz") - self.assertLessEqual(results[0]["year"], results[1]["year"]) + assert results[0]["album"] <= results[1]["album"] + assert results[1]["album"] <= results[2]["album"] + assert results[0]["album"] == "Baz" + assert results[1]["album"] == "Baz" + assert results[0]["year"] <= results[1]["year"] # same thing with query string q = "album+ year+" results2 = self.lib.items(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_path_field(self): q = "" sort = dbcore.query.FixedFieldSort("path", True) results = self.lib.items(q, sort) - self.assertEqual(results[0]["path"], b"/path0.mp3") - self.assertEqual(results[1]["path"], b"/patH1.mp3") - self.assertEqual(results[2]["path"], b"/paTH2.mp3") - self.assertEqual(results[3]["path"], b"/PATH3.mp3") + assert results[0]["path"] == b"/path0.mp3" + assert results[1]["path"] == b"/patH1.mp3" + assert results[2]["path"] == b"/paTH2.mp3" + assert results[3]["path"] == b"/PATH3.mp3" class SortFlexFieldTest(DummyDataTestCase): @@ -162,27 +167,27 @@ class SortFlexFieldTest(DummyDataTestCase): q = "" sort = dbcore.query.SlowFieldSort("flex1", True) results = self.lib.items(q, sort) - self.assertLessEqual(results[0]["flex1"], results[1]["flex1"]) - self.assertEqual(results[0]["flex1"], "Flex1-0") + assert results[0]["flex1"] <= results[1]["flex1"] + assert results[0]["flex1"] == "Flex1-0" # same thing with query string q = "flex1+" results2 = self.lib.items(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_desc(self): q = "" sort = dbcore.query.SlowFieldSort("flex1", False) results = self.lib.items(q, sort) - self.assertGreaterEqual(results[0]["flex1"], results[1]["flex1"]) - self.assertGreaterEqual(results[1]["flex1"], results[2]["flex1"]) - self.assertGreaterEqual(results[2]["flex1"], results[3]["flex1"]) - self.assertEqual(results[0]["flex1"], "Flex1-2") + assert results[0]["flex1"] >= results[1]["flex1"] + assert results[1]["flex1"] >= results[2]["flex1"] + assert results[2]["flex1"] >= results[3]["flex1"] + assert results[0]["flex1"] == "Flex1-2" # same thing with query string q = "flex1-" results2 = self.lib.items(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_two_field(self): q = "" @@ -192,16 +197,16 @@ class SortFlexFieldTest(DummyDataTestCase): sort.add_sort(s1) sort.add_sort(s2) results = self.lib.items(q, sort) - self.assertGreaterEqual(results[0]["flex2"], results[1]["flex2"]) - self.assertGreaterEqual(results[1]["flex2"], results[2]["flex2"]) - self.assertEqual(results[0]["flex2"], "Flex2-A") - self.assertEqual(results[1]["flex2"], "Flex2-A") - self.assertLessEqual(results[0]["flex1"], results[1]["flex1"]) + assert results[0]["flex2"] >= results[1]["flex2"] + assert results[1]["flex2"] >= results[2]["flex2"] + assert results[0]["flex2"] == "Flex2-A" + assert results[1]["flex2"] == "Flex2-A" + assert results[0]["flex1"] <= results[1]["flex1"] # same thing with query string q = "flex2- flex1+" results2 = self.lib.items(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id class SortAlbumFixedFieldTest(DummyDataTestCase): @@ -209,25 +214,25 @@ class SortAlbumFixedFieldTest(DummyDataTestCase): q = "" sort = dbcore.query.FixedFieldSort("year", True) results = self.lib.albums(q, sort) - self.assertLessEqual(results[0]["year"], results[1]["year"]) - self.assertEqual(results[0]["year"], 2001) + assert results[0]["year"] <= results[1]["year"] + assert results[0]["year"] == 2001 # same thing with query string q = "year+" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_desc(self): q = "" sort = dbcore.query.FixedFieldSort("year", False) results = self.lib.albums(q, sort) - self.assertGreaterEqual(results[0]["year"], results[1]["year"]) - self.assertEqual(results[0]["year"], 2005) + assert results[0]["year"] >= results[1]["year"] + assert results[0]["year"] == 2005 # same thing with query string q = "year-" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_two_field_asc(self): q = "" @@ -237,16 +242,16 @@ class SortAlbumFixedFieldTest(DummyDataTestCase): sort.add_sort(s1) sort.add_sort(s2) results = self.lib.albums(q, sort) - self.assertLessEqual(results[0]["genre"], results[1]["genre"]) - self.assertLessEqual(results[1]["genre"], results[2]["genre"]) - self.assertEqual(results[1]["genre"], "Rock") - self.assertEqual(results[2]["genre"], "Rock") - self.assertLessEqual(results[1]["album"], results[2]["album"]) + assert results[0]["genre"] <= results[1]["genre"] + assert results[1]["genre"] <= results[2]["genre"] + assert results[1]["genre"] == "Rock" + assert results[2]["genre"] == "Rock" + assert results[1]["album"] <= results[2]["album"] # same thing with query string q = "genre+ album+" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id class SortAlbumFlexFieldTest(DummyDataTestCase): @@ -254,25 +259,25 @@ class SortAlbumFlexFieldTest(DummyDataTestCase): q = "" sort = dbcore.query.SlowFieldSort("flex1", True) results = self.lib.albums(q, sort) - self.assertLessEqual(results[0]["flex1"], results[1]["flex1"]) - self.assertLessEqual(results[1]["flex1"], results[2]["flex1"]) + assert results[0]["flex1"] <= results[1]["flex1"] + assert results[1]["flex1"] <= results[2]["flex1"] # same thing with query string q = "flex1+" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_desc(self): q = "" sort = dbcore.query.SlowFieldSort("flex1", False) results = self.lib.albums(q, sort) - self.assertGreaterEqual(results[0]["flex1"], results[1]["flex1"]) - self.assertGreaterEqual(results[1]["flex1"], results[2]["flex1"]) + assert results[0]["flex1"] >= results[1]["flex1"] + assert results[1]["flex1"] >= results[2]["flex1"] # same thing with query string q = "flex1-" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_two_field_asc(self): q = "" @@ -282,16 +287,16 @@ class SortAlbumFlexFieldTest(DummyDataTestCase): sort.add_sort(s1) sort.add_sort(s2) results = self.lib.albums(q, sort) - self.assertLessEqual(results[0]["flex2"], results[1]["flex2"]) - self.assertLessEqual(results[1]["flex2"], results[2]["flex2"]) - self.assertEqual(results[0]["flex2"], "Flex2-A") - self.assertEqual(results[1]["flex2"], "Flex2-A") - self.assertLessEqual(results[0]["flex1"], results[1]["flex1"]) + assert results[0]["flex2"] <= results[1]["flex2"] + assert results[1]["flex2"] <= results[2]["flex2"] + assert results[0]["flex2"] == "Flex2-A" + assert results[1]["flex2"] == "Flex2-A" + assert results[0]["flex1"] <= results[1]["flex1"] # same thing with query string q = "flex2+ flex1+" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id class SortAlbumComputedFieldTest(DummyDataTestCase): @@ -299,25 +304,25 @@ class SortAlbumComputedFieldTest(DummyDataTestCase): q = "" sort = dbcore.query.SlowFieldSort("path", True) results = self.lib.albums(q, sort) - self.assertLessEqual(results[0]["path"], results[1]["path"]) - self.assertLessEqual(results[1]["path"], results[2]["path"]) + assert results[0]["path"] <= results[1]["path"] + assert results[1]["path"] <= results[2]["path"] # same thing with query string q = "path+" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_sort_desc(self): q = "" sort = dbcore.query.SlowFieldSort("path", False) results = self.lib.albums(q, sort) - self.assertGreaterEqual(results[0]["path"], results[1]["path"]) - self.assertGreaterEqual(results[1]["path"], results[2]["path"]) + assert results[0]["path"] >= results[1]["path"] + assert results[1]["path"] >= results[2]["path"] # same thing with query string q = "path-" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id class SortCombinedFieldTest(DummyDataTestCase): @@ -329,12 +334,12 @@ class SortCombinedFieldTest(DummyDataTestCase): sort.add_sort(s1) sort.add_sort(s2) results = self.lib.albums(q, sort) - self.assertLessEqual(results[0]["path"], results[1]["path"]) - self.assertLessEqual(results[1]["path"], results[2]["path"]) + assert results[0]["path"] <= results[1]["path"] + assert results[1]["path"] <= results[2]["path"] q = "path+ year+" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_computed_second(self): q = "" @@ -344,36 +349,36 @@ class SortCombinedFieldTest(DummyDataTestCase): sort.add_sort(s1) sort.add_sort(s2) results = self.lib.albums(q, sort) - self.assertLessEqual(results[0]["year"], results[1]["year"]) - self.assertLessEqual(results[1]["year"], results[2]["year"]) - self.assertLessEqual(results[0]["path"], results[1]["path"]) + assert results[0]["year"] <= results[1]["year"] + assert results[1]["year"] <= results[2]["year"] + assert results[0]["path"] <= results[1]["path"] q = "year+ path+" results2 = self.lib.albums(q) for r1, r2 in zip(results, results2): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id class ConfigSortTest(DummyDataTestCase): def test_default_sort_item(self): results = list(self.lib.items()) - self.assertLess(results[0].artist, results[1].artist) + assert results[0].artist < results[1].artist def test_config_opposite_sort_item(self): config["sort_item"] = "artist-" results = list(self.lib.items()) - self.assertGreater(results[0].artist, results[1].artist) + assert results[0].artist > results[1].artist def test_default_sort_album(self): results = list(self.lib.albums()) - self.assertLess(results[0].albumartist, results[1].albumartist) + assert results[0].albumartist < results[1].albumartist def test_config_opposite_sort_album(self): config["sort_album"] = "albumartist-" results = list(self.lib.albums()) - self.assertGreater(results[0].albumartist, results[1].albumartist) + assert results[0].albumartist > results[1].albumartist -class CaseSensitivityTest(DummyDataTestCase, _common.TestCase): +class CaseSensitivityTest(DummyDataTestCase, BeetsTestCase): """If case_insensitive is false, lower-case values should be placed after all upper-case values. E.g., `Foo Qux bar` """ @@ -381,14 +386,14 @@ class CaseSensitivityTest(DummyDataTestCase, _common.TestCase): def setUp(self): super().setUp() - album = _common.album() - album.album = "album" - album.genre = "alternative" - album.year = "2001" - album.flex1 = "flex1" - album.flex2 = "flex2-A" - album.albumartist = "bar" - album.albumartist_sort = None + album = Album( + album="album", + genre="alternative", + year="2001", + flex1="flex1", + flex2="flex2-A", + albumartist="bar", + ) self.lib.add(album) item = _common.item() @@ -416,43 +421,43 @@ class CaseSensitivityTest(DummyDataTestCase, _common.TestCase): config["sort_case_insensitive"] = True q = "artist+" results = list(self.lib.items(q)) - self.assertEqual(results[0].artist, "lowercase") - self.assertEqual(results[1].artist, "One") + assert results[0].artist == "lowercase" + assert results[1].artist == "One" def test_smart_artist_case_sensitive(self): config["sort_case_insensitive"] = False q = "artist+" results = list(self.lib.items(q)) - self.assertEqual(results[0].artist, "One") - self.assertEqual(results[-1].artist, "lowercase") + assert results[0].artist == "One" + assert results[-1].artist == "lowercase" def test_fixed_field_case_insensitive(self): config["sort_case_insensitive"] = True q = "album+" results = list(self.lib.albums(q)) - self.assertEqual(results[0].album, "album") - self.assertEqual(results[1].album, "Album A") + assert results[0].album == "album" + assert results[1].album == "Album A" def test_fixed_field_case_sensitive(self): config["sort_case_insensitive"] = False q = "album+" results = list(self.lib.albums(q)) - self.assertEqual(results[0].album, "Album A") - self.assertEqual(results[-1].album, "album") + assert results[0].album == "Album A" + assert results[-1].album == "album" def test_flex_field_case_insensitive(self): config["sort_case_insensitive"] = True q = "flex1+" results = list(self.lib.items(q)) - self.assertEqual(results[0].flex1, "flex1") - self.assertEqual(results[1].flex1, "Flex1-0") + assert results[0].flex1 == "flex1" + assert results[1].flex1 == "Flex1-0" def test_flex_field_case_sensitive(self): config["sort_case_insensitive"] = False q = "flex1+" results = list(self.lib.items(q)) - self.assertEqual(results[0].flex1, "Flex1-0") - self.assertEqual(results[-1].flex1, "flex1") + assert results[0].flex1 == "Flex1-0" + assert results[-1].flex1 == "flex1" def test_case_sensitive_only_affects_text(self): config["sort_case_insensitive"] = True @@ -461,9 +466,9 @@ class CaseSensitivityTest(DummyDataTestCase, _common.TestCase): # If the numerical values were sorted as strings, # then ['1', '10', '2'] would be valid. # print([r.track for r in results]) - self.assertEqual(results[0].track, 1) - self.assertEqual(results[1].track, 2) - self.assertEqual(results[-1].track, 10) + assert results[0].track == 1 + assert results[1].track == 2 + assert results[-1].track == 10 class NonExistingFieldTest(DummyDataTestCase): @@ -477,46 +482,76 @@ class NonExistingFieldTest(DummyDataTestCase): for q1 in qs: results1 = list(self.lib.items(q1)) for r1, r2 in zip(results0, results1): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_combined_non_existing_field_asc(self): all_results = list(self.lib.items("id+")) q = "foo+ id+" results = list(self.lib.items(q)) - self.assertEqual(len(all_results), len(results)) + assert len(all_results) == len(results) for r1, r2 in zip(all_results, results): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_combined_non_existing_field_desc(self): all_results = list(self.lib.items("id+")) q = "foo- id+" results = list(self.lib.items(q)) - self.assertEqual(len(all_results), len(results)) + assert len(all_results) == len(results) for r1, r2 in zip(all_results, results): - self.assertEqual(r1.id, r2.id) + assert r1.id == r2.id def test_field_present_in_some_items(self): - """Test ordering by a field not present on all items.""" - # append 'foo' to two to items (1,2) - items = self.lib.items("id+") - ids = [i.id for i in items] - items[1].foo = "bar1" - items[2].foo = "bar2" - items[1].store() - items[2].store() + """Test ordering by a (string) field not present on all items.""" + # append 'foo' to two items (1,2) + lower_foo_item, higher_foo_item, *items_without_foo = self.lib.items( + "id+" + ) + lower_foo_item.foo, higher_foo_item.foo = "bar1", "bar2" + lower_foo_item.store() + higher_foo_item.store() results_asc = list(self.lib.items("foo+ id+")) - self.assertEqual( - [i.id for i in results_asc], + assert [i.id for i in results_asc] == [ # items without field first - [ids[0], ids[3], ids[1], ids[2]], - ) + *[i.id for i in items_without_foo], + lower_foo_item.id, + higher_foo_item.id, + ] + results_desc = list(self.lib.items("foo- id+")) - self.assertEqual( - [i.id for i in results_desc], + assert [i.id for i in results_desc] == [ + higher_foo_item.id, + lower_foo_item.id, # items without field last - [ids[2], ids[1], ids[0], ids[3]], + *[i.id for i in items_without_foo], + ] + + @patch("beets.library.Item._types", {"myint": types.Integer()}) + def test_int_field_present_in_some_items(self): + """Test ordering by an int-type field not present on all items.""" + # append int-valued 'myint' to two items (1,2) + lower_myint_item, higher_myint_item, *items_without_myint = ( + self.lib.items("id+") ) + lower_myint_item.myint, higher_myint_item.myint = 1, 2 + lower_myint_item.store() + higher_myint_item.store() + + results_asc = list(self.lib.items("myint+ id+")) + assert [i.id for i in results_asc] == [ + # items without field first + *[i.id for i in items_without_myint], + lower_myint_item.id, + higher_myint_item.id, + ] + + results_desc = list(self.lib.items("myint- id+")) + assert [i.id for i in results_desc] == [ + higher_myint_item.id, + lower_myint_item.id, + # items without field last + *[i.id for i in items_without_myint], + ] def test_negation_interaction(self): """Test the handling of negation and sorting together. @@ -527,15 +562,7 @@ class NonExistingFieldTest(DummyDataTestCase): query, sort = beets.library.parse_query_string( "-bar+", beets.library.Item ) - self.assertEqual(len(query.subqueries), 1) - self.assertTrue(isinstance(query.subqueries[0], dbcore.query.TrueQuery)) - self.assertTrue(isinstance(sort, dbcore.query.SlowFieldSort)) - self.assertEqual(sort.field, "-bar") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert len(query.subqueries) == 1 + assert isinstance(query.subqueries[0], dbcore.query.TrueQuery) + assert isinstance(sort, dbcore.query.SlowFieldSort) + assert sort.field == "-bar" diff --git a/test/test_template.py b/test/test_template.py index 24a5351df..236bee5aa 100644 --- a/test/test_template.py +++ b/test/test_template.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for template engine. -""" +"""Tests for template engine.""" import unittest @@ -49,194 +48,179 @@ def _normparse(text): class ParseTest(unittest.TestCase): def test_empty_string(self): - self.assertEqual(list(_normparse("")), []) + assert list(_normparse("")) == [] def _assert_symbol(self, obj, ident): """Assert that an object is a Symbol with the given identifier.""" - self.assertTrue( - isinstance(obj, functemplate.Symbol), "not a Symbol: %s" % repr(obj) - ) - self.assertEqual( - obj.ident, - ident, - "wrong identifier: %s vs. %s" % (repr(obj.ident), repr(ident)), - ) + assert isinstance(obj, functemplate.Symbol), f"not a Symbol: {obj}" + assert obj.ident == ident, f"wrong identifier: {obj.ident} vs. {ident}" def _assert_call(self, obj, ident, numargs): """Assert that an object is a Call with the given identifier and argument count. """ - self.assertTrue( - isinstance(obj, functemplate.Call), "not a Call: %s" % repr(obj) - ) - self.assertEqual( - obj.ident, - ident, - "wrong identifier: %s vs. %s" % (repr(obj.ident), repr(ident)), - ) - self.assertEqual( - len(obj.args), - numargs, - "wrong argument count in %s: %i vs. %i" - % (repr(obj.ident), len(obj.args), numargs), - ) + assert isinstance(obj, functemplate.Call), f"not a Call: {obj}" + assert obj.ident == ident, f"wrong identifier: {obj.ident} vs. {ident}" + assert ( + len(obj.args) == numargs + ), f"wrong argument count in {obj.ident}: {len(obj.args)} vs. {numargs}" def test_plain_text(self): - self.assertEqual(list(_normparse("hello world")), ["hello world"]) + assert list(_normparse("hello world")) == ["hello world"] def test_escaped_character_only(self): - self.assertEqual(list(_normparse("$$")), ["$"]) + assert list(_normparse("$$")) == ["$"] def test_escaped_character_in_text(self): - self.assertEqual(list(_normparse("a $$ b")), ["a $ b"]) + assert list(_normparse("a $$ b")) == ["a $ b"] def test_escaped_character_at_start(self): - self.assertEqual(list(_normparse("$$ hello")), ["$ hello"]) + assert list(_normparse("$$ hello")) == ["$ hello"] def test_escaped_character_at_end(self): - self.assertEqual(list(_normparse("hello $$")), ["hello $"]) + assert list(_normparse("hello $$")) == ["hello $"] def test_escaped_function_delim(self): - self.assertEqual(list(_normparse("a $% b")), ["a % b"]) + assert list(_normparse("a $% b")) == ["a % b"] def test_escaped_sep(self): - self.assertEqual(list(_normparse("a $, b")), ["a , b"]) + assert list(_normparse("a $, b")) == ["a , b"] def test_escaped_close_brace(self): - self.assertEqual(list(_normparse("a $} b")), ["a } b"]) + assert list(_normparse("a $} b")) == ["a } b"] def test_bare_value_delim_kept_intact(self): - self.assertEqual(list(_normparse("a $ b")), ["a $ b"]) + assert list(_normparse("a $ b")) == ["a $ b"] def test_bare_function_delim_kept_intact(self): - self.assertEqual(list(_normparse("a % b")), ["a % b"]) + assert list(_normparse("a % b")) == ["a % b"] def test_bare_opener_kept_intact(self): - self.assertEqual(list(_normparse("a { b")), ["a { b"]) + assert list(_normparse("a { b")) == ["a { b"] def test_bare_closer_kept_intact(self): - self.assertEqual(list(_normparse("a } b")), ["a } b"]) + assert list(_normparse("a } b")) == ["a } b"] def test_bare_sep_kept_intact(self): - self.assertEqual(list(_normparse("a , b")), ["a , b"]) + assert list(_normparse("a , b")) == ["a , b"] def test_symbol_alone(self): parts = list(_normparse("$foo")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_symbol(parts[0], "foo") def test_symbol_in_text(self): parts = list(_normparse("hello $foo world")) - self.assertEqual(len(parts), 3) - self.assertEqual(parts[0], "hello ") + assert len(parts) == 3 + assert parts[0] == "hello " self._assert_symbol(parts[1], "foo") - self.assertEqual(parts[2], " world") + assert parts[2] == " world" def test_symbol_with_braces(self): parts = list(_normparse("hello${foo}world")) - self.assertEqual(len(parts), 3) - self.assertEqual(parts[0], "hello") + assert len(parts) == 3 + assert parts[0] == "hello" self._assert_symbol(parts[1], "foo") - self.assertEqual(parts[2], "world") + assert parts[2] == "world" def test_unclosed_braces_symbol(self): - self.assertEqual(list(_normparse("a ${ b")), ["a ${ b"]) + assert list(_normparse("a ${ b")) == ["a ${ b"] def test_empty_braces_symbol(self): - self.assertEqual(list(_normparse("a ${} b")), ["a ${} b"]) + assert list(_normparse("a ${} b")) == ["a ${} b"] def test_call_without_args_at_end(self): - self.assertEqual(list(_normparse("foo %bar")), ["foo %bar"]) + assert list(_normparse("foo %bar")) == ["foo %bar"] def test_call_without_args(self): - self.assertEqual(list(_normparse("foo %bar baz")), ["foo %bar baz"]) + assert list(_normparse("foo %bar baz")) == ["foo %bar baz"] def test_call_with_unclosed_args(self): - self.assertEqual(list(_normparse("foo %bar{ baz")), ["foo %bar{ baz"]) + assert list(_normparse("foo %bar{ baz")) == ["foo %bar{ baz"] def test_call_with_unclosed_multiple_args(self): - self.assertEqual( - list(_normparse("foo %bar{bar,bar baz")), ["foo %bar{bar,bar baz"] - ) + assert list(_normparse("foo %bar{bar,bar baz")) == [ + "foo %bar{bar,bar baz" + ] def test_call_empty_arg(self): parts = list(_normparse("%foo{}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 1) - self.assertEqual(list(_normexpr(parts[0].args[0])), []) + assert list(_normexpr(parts[0].args[0])) == [] def test_call_single_arg(self): parts = list(_normparse("%foo{bar}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 1) - self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar"]) + assert list(_normexpr(parts[0].args[0])) == ["bar"] def test_call_two_args(self): parts = list(_normparse("%foo{bar,baz}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 2) - self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar"]) - self.assertEqual(list(_normexpr(parts[0].args[1])), ["baz"]) + assert list(_normexpr(parts[0].args[0])) == ["bar"] + assert list(_normexpr(parts[0].args[1])) == ["baz"] def test_call_with_escaped_sep(self): parts = list(_normparse("%foo{bar$,baz}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 1) - self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar,baz"]) + assert list(_normexpr(parts[0].args[0])) == ["bar,baz"] def test_call_with_escaped_close(self): parts = list(_normparse("%foo{bar$}baz}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 1) - self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar}baz"]) + assert list(_normexpr(parts[0].args[0])) == ["bar}baz"] def test_call_with_symbol_argument(self): parts = list(_normparse("%foo{$bar,baz}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 2) arg_parts = list(_normexpr(parts[0].args[0])) - self.assertEqual(len(arg_parts), 1) + assert len(arg_parts) == 1 self._assert_symbol(arg_parts[0], "bar") - self.assertEqual(list(_normexpr(parts[0].args[1])), ["baz"]) + assert list(_normexpr(parts[0].args[1])) == ["baz"] def test_call_with_nested_call_argument(self): parts = list(_normparse("%foo{%bar{},baz}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 2) arg_parts = list(_normexpr(parts[0].args[0])) - self.assertEqual(len(arg_parts), 1) + assert len(arg_parts) == 1 self._assert_call(arg_parts[0], "bar", 1) - self.assertEqual(list(_normexpr(parts[0].args[1])), ["baz"]) + assert list(_normexpr(parts[0].args[1])) == ["baz"] def test_nested_call_with_argument(self): parts = list(_normparse("%foo{%bar{baz}}")) - self.assertEqual(len(parts), 1) + assert len(parts) == 1 self._assert_call(parts[0], "foo", 1) arg_parts = list(_normexpr(parts[0].args[0])) - self.assertEqual(len(arg_parts), 1) + assert len(arg_parts) == 1 self._assert_call(arg_parts[0], "bar", 1) - self.assertEqual(list(_normexpr(arg_parts[0].args[0])), ["baz"]) + assert list(_normexpr(arg_parts[0].args[0])) == ["baz"] def test_sep_before_call_two_args(self): parts = list(_normparse("hello, %foo{bar,baz}")) - self.assertEqual(len(parts), 2) - self.assertEqual(parts[0], "hello, ") + assert len(parts) == 2 + assert parts[0] == "hello, " self._assert_call(parts[1], "foo", 2) - self.assertEqual(list(_normexpr(parts[1].args[0])), ["bar"]) - self.assertEqual(list(_normexpr(parts[1].args[1])), ["baz"]) + assert list(_normexpr(parts[1].args[0])) == ["bar"] + assert list(_normexpr(parts[1].args[1])) == ["baz"] def test_sep_with_symbols(self): parts = list(_normparse("hello,$foo,$bar")) - self.assertEqual(len(parts), 4) - self.assertEqual(parts[0], "hello,") + assert len(parts) == 4 + assert parts[0] == "hello," self._assert_symbol(parts[1], "foo") - self.assertEqual(parts[2], ",") + assert parts[2] == "," self._assert_symbol(parts[3], "bar") def test_newline_at_end(self): parts = list(_normparse("foo\n")) - self.assertEqual(len(parts), 1) - self.assertEqual(parts[0], "foo\n") + assert len(parts) == 1 + assert parts[0] == "foo\n" class EvalTest(unittest.TestCase): @@ -252,49 +236,41 @@ class EvalTest(unittest.TestCase): return functemplate.Template(template).substitute(values, functions) def test_plain_text(self): - self.assertEqual(self._eval("foo"), "foo") + assert self._eval("foo") == "foo" def test_subtitute_value(self): - self.assertEqual(self._eval("$foo"), "bar") + assert self._eval("$foo") == "bar" def test_subtitute_value_in_text(self): - self.assertEqual(self._eval("hello $foo world"), "hello bar world") + assert self._eval("hello $foo world") == "hello bar world" def test_not_subtitute_undefined_value(self): - self.assertEqual(self._eval("$bar"), "$bar") + assert self._eval("$bar") == "$bar" def test_function_call(self): - self.assertEqual(self._eval("%lower{FOO}"), "foo") + assert self._eval("%lower{FOO}") == "foo" def test_function_call_with_text(self): - self.assertEqual(self._eval("A %lower{FOO} B"), "A foo B") + assert self._eval("A %lower{FOO} B") == "A foo B" def test_nested_function_call(self): - self.assertEqual(self._eval("%lower{%lower{FOO}}"), "foo") + assert self._eval("%lower{%lower{FOO}}") == "foo" def test_symbol_in_argument(self): - self.assertEqual(self._eval("%lower{$baz}"), "bar") + assert self._eval("%lower{$baz}") == "bar" def test_function_call_exception(self): res = self._eval("%lower{a,b,c,d,e}") - self.assertTrue(isinstance(res, str)) + assert isinstance(res, str) def test_function_returning_integer(self): - self.assertEqual(self._eval("%len{foo}"), "3") + assert self._eval("%len{foo}") == "3" def test_not_subtitute_undefined_func(self): - self.assertEqual(self._eval("%bar{}"), "%bar{}") + assert self._eval("%bar{}") == "%bar{}" def test_not_subtitute_func_with_no_args(self): - self.assertEqual(self._eval("%lower"), "%lower") + assert self._eval("%lower") == "%lower" def test_function_call_with_empty_arg(self): - self.assertEqual(self._eval("%len{}"), "0") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert self._eval("%len{}") == "0" diff --git a/test/test_ui.py b/test/test_ui.py index f7494bafb..041570067 100644 --- a/test/test_ui.py +++ b/test/test_ui.py @@ -12,8 +12,7 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for the command-line interface. -""" +"""Tests for the command-line interface.""" import os import platform @@ -24,6 +23,7 @@ import sys import unittest from unittest.mock import Mock, patch +import pytest from confuse import ConfigError from mediafile import MediaFile @@ -31,7 +31,8 @@ from beets import autotag, config, library, plugins, ui, util from beets.autotag.match import distance from beets.test import _common from beets.test.helper import ( - TestHelper, + BeetsTestCase, + PluginTestCase, capture_stdout, control_stdin, has_program, @@ -40,9 +41,9 @@ from beets.ui import commands from beets.util import MoveOperation, syspath -class ListTest(unittest.TestCase): +class ListTest(BeetsTestCase): def setUp(self): - self.lib = library.Library(":memory:") + super().setUp() self.item = _common.item() self.item.path = "xxx/yyy" self.lib.add(self.item) @@ -55,7 +56,7 @@ class ListTest(unittest.TestCase): def test_list_outputs_item(self): stdout = self._run_list() - self.assertIn("the title", stdout.getvalue()) + assert "the title" in stdout.getvalue() def test_list_unicode_query(self): self.item.title = "na\xefve" @@ -64,61 +65,55 @@ class ListTest(unittest.TestCase): stdout = self._run_list(["na\xefve"]) out = stdout.getvalue() - self.assertIn("na\xefve", out) + assert "na\xefve" in out def test_list_item_path(self): stdout = self._run_list(fmt="$path") - self.assertEqual(stdout.getvalue().strip(), "xxx/yyy") + assert stdout.getvalue().strip() == "xxx/yyy" def test_list_album_outputs_something(self): stdout = self._run_list(album=True) - self.assertGreater(len(stdout.getvalue()), 0) + assert len(stdout.getvalue()) > 0 def test_list_album_path(self): stdout = self._run_list(album=True, fmt="$path") - self.assertEqual(stdout.getvalue().strip(), "xxx") + assert stdout.getvalue().strip() == "xxx" def test_list_album_omits_title(self): stdout = self._run_list(album=True) - self.assertNotIn("the title", stdout.getvalue()) + assert "the title" not in stdout.getvalue() def test_list_uses_track_artist(self): stdout = self._run_list() - self.assertIn("the artist", stdout.getvalue()) - self.assertNotIn("the album artist", stdout.getvalue()) + assert "the artist" in stdout.getvalue() + assert "the album artist" not in stdout.getvalue() def test_list_album_uses_album_artist(self): stdout = self._run_list(album=True) - self.assertNotIn("the artist", stdout.getvalue()) - self.assertIn("the album artist", stdout.getvalue()) + assert "the artist" not in stdout.getvalue() + assert "the album artist" in stdout.getvalue() def test_list_item_format_artist(self): stdout = self._run_list(fmt="$artist") - self.assertIn("the artist", stdout.getvalue()) + assert "the artist" in stdout.getvalue() def test_list_item_format_multiple(self): stdout = self._run_list(fmt="$artist - $album - $year") - self.assertEqual( - "the artist - the album - 0001", stdout.getvalue().strip() - ) + assert "the artist - the album - 0001" == stdout.getvalue().strip() def test_list_album_format(self): stdout = self._run_list(album=True, fmt="$genre") - self.assertIn("the genre", stdout.getvalue()) - self.assertNotIn("the album", stdout.getvalue()) + assert "the genre" in stdout.getvalue() + assert "the album" not in stdout.getvalue() -class RemoveTest(_common.TestCase, TestHelper): +class RemoveTest(BeetsTestCase): def setUp(self): super().setUp() self.io.install() - self.libdir = os.path.join(self.temp_dir, b"testlibdir") - os.mkdir(syspath(self.libdir)) - # Copy a file into the library. - self.lib = library.Library(":memory:", self.libdir) self.item_path = os.path.join(_common.RSRC, b"full.mp3") self.i = library.Item.from_path(self.item_path) self.lib.add(self.i) @@ -128,26 +123,26 @@ class RemoveTest(_common.TestCase, TestHelper): self.io.addinput("y") commands.remove_items(self.lib, "", False, False, False) items = self.lib.items() - self.assertEqual(len(list(items)), 0) + assert len(list(items)) == 0 self.assertExists(self.i.path) def test_remove_items_with_delete(self): self.io.addinput("y") commands.remove_items(self.lib, "", False, True, False) items = self.lib.items() - self.assertEqual(len(list(items)), 0) + assert len(list(items)) == 0 self.assertNotExists(self.i.path) def test_remove_items_with_force_no_delete(self): commands.remove_items(self.lib, "", False, False, True) items = self.lib.items() - self.assertEqual(len(list(items)), 0) + assert len(list(items)) == 0 self.assertExists(self.i.path) def test_remove_items_with_force_delete(self): commands.remove_items(self.lib, "", False, True, True) items = self.lib.items() - self.assertEqual(len(list(items)), 0) + assert len(list(items)) == 0 self.assertNotExists(self.i.path) def test_remove_items_select_with_delete(self): @@ -159,7 +154,7 @@ class RemoveTest(_common.TestCase, TestHelper): self.io.addinput(s) commands.remove_items(self.lib, "", False, True, False) items = self.lib.items() - self.assertEqual(len(list(items)), 1) + assert len(list(items)) == 1 # There is probably no guarantee that the items are queried in any # spcecific order, thus just ensure that exactly one was removed. # To improve upon this, self.io would need to have the capability to @@ -167,7 +162,7 @@ class RemoveTest(_common.TestCase, TestHelper): num_existing = 0 num_existing += 1 if os.path.exists(syspath(self.i.path)) else 0 num_existing += 1 if os.path.exists(syspath(i2.path)) else 0 - self.assertEqual(num_existing, 1) + assert num_existing == 1 def test_remove_albums_select_with_delete(self): a1 = self.add_album_fixture() @@ -175,29 +170,26 @@ class RemoveTest(_common.TestCase, TestHelper): path1 = a1.items()[0].path path2 = a2.items()[0].path items = self.lib.items() - self.assertEqual(len(list(items)), 3) + assert len(list(items)) == 3 for s in ("s", "y", "n"): self.io.addinput(s) commands.remove_items(self.lib, "", True, True, False) items = self.lib.items() - self.assertEqual(len(list(items)), 2) # incl. the item from setUp() + assert len(list(items)) == 2 # incl. the item from setUp() # See test_remove_items_select_with_delete() num_existing = 0 num_existing += 1 if os.path.exists(syspath(path1)) else 0 num_existing += 1 if os.path.exists(syspath(path2)) else 0 - self.assertEqual(num_existing, 1) + assert num_existing == 1 -class ModifyTest(unittest.TestCase, TestHelper): +class ModifyTest(BeetsTestCase): def setUp(self): - self.setup_beets() + super().setUp() self.album = self.add_album_fixture() [self.item] = self.album.items() - def tearDown(self): - self.teardown_beets() - def modify_inp(self, inp, *args): with control_stdin(inp): self.run_command("modify", *args) @@ -210,50 +202,50 @@ class ModifyTest(unittest.TestCase, TestHelper): def test_modify_item(self): self.modify("title=newTitle") item = self.lib.items().get() - self.assertEqual(item.title, "newTitle") + assert item.title == "newTitle" def test_modify_item_abort(self): item = self.lib.items().get() title = item.title self.modify_inp("n", "title=newTitle") item = self.lib.items().get() - self.assertEqual(item.title, title) + assert item.title == title def test_modify_item_no_change(self): title = "Tracktitle" item = self.add_item_fixture(title=title) self.modify_inp("y", "title", f"title={title}") item = self.lib.items(title).get() - self.assertEqual(item.title, title) + assert item.title == title def test_modify_write_tags(self): self.modify("title=newTitle") item = self.lib.items().get() item.read() - self.assertEqual(item.title, "newTitle") + assert item.title == "newTitle" def test_modify_dont_write_tags(self): self.modify("--nowrite", "title=newTitle") item = self.lib.items().get() item.read() - self.assertNotEqual(item.title, "newTitle") + assert item.title != "newTitle" def test_move(self): self.modify("title=newTitle") item = self.lib.items().get() - self.assertIn(b"newTitle", item.path) + assert b"newTitle" in item.path def test_not_move(self): self.modify("--nomove", "title=newTitle") item = self.lib.items().get() - self.assertNotIn(b"newTitle", item.path) + assert b"newTitle" not in item.path def test_no_write_no_move(self): self.modify("--nomove", "--nowrite", "title=newTitle") item = self.lib.items().get() item.read() - self.assertNotIn(b"newTitle", item.path) - self.assertNotEqual(item.title, "newTitle") + assert b"newTitle" not in item.path + assert item.title != "newTitle" def test_update_mtime(self): item = self.item @@ -261,15 +253,15 @@ class ModifyTest(unittest.TestCase, TestHelper): self.modify("title=newTitle") item.load() - self.assertNotEqual(old_mtime, item.mtime) - self.assertEqual(item.current_mtime(), item.mtime) + assert old_mtime != item.mtime + assert item.current_mtime() == item.mtime def test_reset_mtime_with_no_write(self): item = self.item self.modify("--nowrite", "title=newTitle") item.load() - self.assertEqual(0, item.mtime) + assert 0 == item.mtime def test_selective_modify(self): title = "Tracktitle" @@ -285,8 +277,8 @@ class ModifyTest(unittest.TestCase, TestHelper): ) original_items = self.lib.items(f"artist:{original_artist}") new_items = self.lib.items(f"artist:{new_artist}") - self.assertEqual(len(list(original_items)), 3) - self.assertEqual(len(list(new_items)), 7) + assert len(list(original_items)) == 3 + assert len(list(new_items)) == 7 def test_modify_formatted(self): for i in range(0, 3): @@ -298,45 +290,45 @@ class ModifyTest(unittest.TestCase, TestHelper): for item in items: orig_title = item.title item.load() - self.assertEqual(item.title, f"{orig_title} - append") + assert item.title == f"{orig_title} - append" # Album Tests def test_modify_album(self): self.modify("--album", "album=newAlbum") album = self.lib.albums().get() - self.assertEqual(album.album, "newAlbum") + assert album.album == "newAlbum" def test_modify_album_write_tags(self): self.modify("--album", "album=newAlbum") item = self.lib.items().get() item.read() - self.assertEqual(item.album, "newAlbum") + assert item.album == "newAlbum" def test_modify_album_dont_write_tags(self): self.modify("--album", "--nowrite", "album=newAlbum") item = self.lib.items().get() item.read() - self.assertEqual(item.album, "the album") + assert item.album == "the album" def test_album_move(self): self.modify("--album", "album=newAlbum") item = self.lib.items().get() item.read() - self.assertIn(b"newAlbum", item.path) + assert b"newAlbum" in item.path def test_album_not_move(self): self.modify("--nomove", "--album", "album=newAlbum") item = self.lib.items().get() item.read() - self.assertNotIn(b"newAlbum", item.path) + assert b"newAlbum" not in item.path def test_modify_album_formatted(self): item = self.lib.items().get() orig_album = item.album self.modify("--album", "album=${album} - append") item.load() - self.assertEqual(item.album, f"{orig_album} - append") + assert item.album == f"{orig_album} - append" # Misc @@ -344,12 +336,12 @@ class ModifyTest(unittest.TestCase, TestHelper): self.modify("initial_key=C#m") item = self.lib.items().get() mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.initial_key, "C#m") + assert mediafile.initial_key == "C#m" def test_set_flexattr(self): self.modify("flexattr=testAttr") item = self.lib.items().get() - self.assertEqual(item.flexattr, "testAttr") + assert item.flexattr == "testAttr" def test_remove_flexattr(self): item = self.lib.items().get() @@ -358,7 +350,7 @@ class ModifyTest(unittest.TestCase, TestHelper): self.modify("flexattr!") item = self.lib.items().get() - self.assertNotIn("flexattr", item) + assert "flexattr" not in item @unittest.skip("not yet implemented") def test_delete_initial_key_tag(self): @@ -368,48 +360,42 @@ class ModifyTest(unittest.TestCase, TestHelper): item.store() mediafile = MediaFile(syspath(item.path)) - self.assertEqual(mediafile.initial_key, "C#m") + assert mediafile.initial_key == "C#m" self.modify("initial_key!") mediafile = MediaFile(syspath(item.path)) - self.assertIsNone(mediafile.initial_key) + assert mediafile.initial_key is None def test_arg_parsing_colon_query(self): (query, mods, dels) = commands.modify_parse_args( ["title:oldTitle", "title=newTitle"] ) - self.assertEqual(query, ["title:oldTitle"]) - self.assertEqual(mods, {"title": "newTitle"}) + assert query == ["title:oldTitle"] + assert mods == {"title": "newTitle"} def test_arg_parsing_delete(self): (query, mods, dels) = commands.modify_parse_args( ["title:oldTitle", "title!"] ) - self.assertEqual(query, ["title:oldTitle"]) - self.assertEqual(dels, ["title"]) + assert query == ["title:oldTitle"] + assert dels == ["title"] def test_arg_parsing_query_with_exclaimation(self): (query, mods, dels) = commands.modify_parse_args( ["title:oldTitle!", "title=newTitle!"] ) - self.assertEqual(query, ["title:oldTitle!"]) - self.assertEqual(mods, {"title": "newTitle!"}) + assert query == ["title:oldTitle!"] + assert mods == {"title": "newTitle!"} def test_arg_parsing_equals_in_value(self): (query, mods, dels) = commands.modify_parse_args( ["title:foo=bar", "title=newTitle"] ) - self.assertEqual(query, ["title:foo=bar"]) - self.assertEqual(mods, {"title": "newTitle"}) + assert query == ["title:foo=bar"] + assert mods == {"title": "newTitle"} -class WriteTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.teardown_beets() - +class WriteTest(BeetsTestCase): def write_cmd(self, *args): return self.run_with_output("write", *args) @@ -419,11 +405,11 @@ class WriteTest(unittest.TestCase, TestHelper): item.store() item = self.lib.items().get() - self.assertEqual(item.mtime, 0) + assert item.mtime == 0 self.write_cmd() item = self.lib.items().get() - self.assertEqual(item.mtime, item.current_mtime()) + assert item.mtime == item.current_mtime() def test_non_metadata_field_unchanged(self): """Changing a non-"tag" field like `bitrate` and writing should @@ -439,7 +425,7 @@ class WriteTest(unittest.TestCase, TestHelper): output = self.write_cmd() - self.assertEqual(output, "") + assert output == "" def test_write_metadata_field(self): item = self.add_item_fixture() @@ -451,18 +437,15 @@ class WriteTest(unittest.TestCase, TestHelper): output = self.write_cmd() - self.assertIn(f"{old_title} -> new title", output) + assert f"{old_title} -> new title" in output -class MoveTest(_common.TestCase): +class MoveTest(BeetsTestCase): def setUp(self): super().setUp() self.io.install() - self.libdir = os.path.join(self.temp_dir, b"testlibdir") - os.mkdir(syspath(self.libdir)) - self.itempath = os.path.join(self.libdir, b"srcfile") shutil.copy( syspath(os.path.join(_common.RSRC, b"full.mp3")), @@ -470,7 +453,6 @@ class MoveTest(_common.TestCase): ) # Add a file to the library but don't copy it in yet. - self.lib = library.Library(":memory:", self.libdir) self.i = library.Item.from_path(self.itempath) self.lib.add(self.i) self.album = self.lib.add_album([self.i]) @@ -494,84 +476,81 @@ class MoveTest(_common.TestCase): def test_move_item(self): self._move() self.i.load() - self.assertIn(b"testlibdir", self.i.path) + assert b"libdir" in self.i.path self.assertExists(self.i.path) self.assertNotExists(self.itempath) def test_copy_item(self): self._move(copy=True) self.i.load() - self.assertIn(b"testlibdir", self.i.path) + assert b"libdir" in self.i.path self.assertExists(self.i.path) self.assertExists(self.itempath) def test_move_album(self): self._move(album=True) self.i.load() - self.assertIn(b"testlibdir", self.i.path) + assert b"libdir" in self.i.path self.assertExists(self.i.path) self.assertNotExists(self.itempath) def test_copy_album(self): self._move(copy=True, album=True) self.i.load() - self.assertIn(b"testlibdir", self.i.path) + assert b"libdir" in self.i.path self.assertExists(self.i.path) self.assertExists(self.itempath) def test_move_item_custom_dir(self): self._move(dest=self.otherdir) self.i.load() - self.assertIn(b"testotherdir", self.i.path) + assert b"testotherdir" in self.i.path self.assertExists(self.i.path) self.assertNotExists(self.itempath) def test_move_album_custom_dir(self): self._move(dest=self.otherdir, album=True) self.i.load() - self.assertIn(b"testotherdir", self.i.path) + assert b"testotherdir" in self.i.path self.assertExists(self.i.path) self.assertNotExists(self.itempath) def test_pretend_move_item(self): self._move(dest=self.otherdir, pretend=True) self.i.load() - self.assertIn(b"srcfile", self.i.path) + assert b"srcfile" in self.i.path def test_pretend_move_album(self): self._move(album=True, pretend=True) self.i.load() - self.assertIn(b"srcfile", self.i.path) + assert b"srcfile" in self.i.path def test_export_item_custom_dir(self): self._move(dest=self.otherdir, export=True) self.i.load() - self.assertEqual(self.i.path, self.itempath) + assert self.i.path == self.itempath self.assertExists(self.otherdir) def test_export_album_custom_dir(self): self._move(dest=self.otherdir, album=True, export=True) self.i.load() - self.assertEqual(self.i.path, self.itempath) + assert self.i.path == self.itempath self.assertExists(self.otherdir) def test_pretend_export_item(self): self._move(dest=self.otherdir, pretend=True, export=True) self.i.load() - self.assertIn(b"srcfile", self.i.path) + assert b"srcfile" in self.i.path self.assertNotExists(self.otherdir) -class UpdateTest(_common.TestCase): +class UpdateTest(BeetsTestCase): def setUp(self): super().setUp() self.io.install() - self.libdir = os.path.join(self.temp_dir, b"testlibdir") - # Copy a file into the library. - self.lib = library.Library(":memory:", self.libdir) item_path = os.path.join(_common.RSRC, b"full.mp3") item_path_two = os.path.join(_common.RSRC, b"full.flac") self.i = library.Item.from_path(item_path) @@ -613,18 +592,18 @@ class UpdateTest(_common.TestCase): ) def test_delete_removes_item(self): - self.assertTrue(list(self.lib.items())) + assert list(self.lib.items()) util.remove(self.i.path) util.remove(self.i2.path) self._update() - self.assertFalse(list(self.lib.items())) + assert not list(self.lib.items()) def test_delete_removes_album(self): - self.assertTrue(self.lib.albums()) + assert self.lib.albums() util.remove(self.i.path) util.remove(self.i2.path) self._update() - self.assertFalse(self.lib.albums()) + assert not self.lib.albums() def test_delete_removes_album_art(self): artpath = self.album.artpath @@ -640,7 +619,7 @@ class UpdateTest(_common.TestCase): mf.save() self._update() item = self.lib.items().get() - self.assertEqual(item.title, "differentTitle") + assert item.title == "differentTitle" def test_modified_metadata_moved(self): mf = MediaFile(syspath(self.i.path)) @@ -648,7 +627,7 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=True) item = self.lib.items().get() - self.assertIn(b"differentTitle", item.path) + assert b"differentTitle" in item.path def test_modified_metadata_not_moved(self): mf = MediaFile(syspath(self.i.path)) @@ -656,7 +635,7 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=False) item = self.lib.items().get() - self.assertNotIn(b"differentTitle", item.path) + assert b"differentTitle" not in item.path def test_selective_modified_metadata_moved(self): mf = MediaFile(syspath(self.i.path)) @@ -665,8 +644,8 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=True, fields=["title"]) item = self.lib.items().get() - self.assertIn(b"differentTitle", item.path) - self.assertNotEqual(item.genre, "differentGenre") + assert b"differentTitle" in item.path + assert item.genre != "differentGenre" def test_selective_modified_metadata_not_moved(self): mf = MediaFile(syspath(self.i.path)) @@ -675,8 +654,8 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=False, fields=["title"]) item = self.lib.items().get() - self.assertNotIn(b"differentTitle", item.path) - self.assertNotEqual(item.genre, "differentGenre") + assert b"differentTitle" not in item.path + assert item.genre != "differentGenre" def test_modified_album_metadata_moved(self): mf = MediaFile(syspath(self.i.path)) @@ -684,7 +663,7 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=True) item = self.lib.items().get() - self.assertIn(b"differentAlbum", item.path) + assert b"differentAlbum" in item.path def test_modified_album_metadata_art_moved(self): artpath = self.album.artpath @@ -693,8 +672,8 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=True) album = self.lib.albums()[0] - self.assertNotEqual(artpath, album.artpath) - self.assertIsNotNone(album.artpath) + assert artpath != album.artpath + assert album.artpath is not None def test_selective_modified_album_metadata_moved(self): mf = MediaFile(syspath(self.i.path)) @@ -703,8 +682,8 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=True, fields=["album"]) item = self.lib.items().get() - self.assertIn(b"differentAlbum", item.path) - self.assertNotEqual(item.genre, "differentGenre") + assert b"differentAlbum" in item.path + assert item.genre != "differentGenre" def test_selective_modified_album_metadata_not_moved(self): mf = MediaFile(syspath(self.i.path)) @@ -713,8 +692,8 @@ class UpdateTest(_common.TestCase): mf.save() self._update(move=True, fields=["genre"]) item = self.lib.items().get() - self.assertNotIn(b"differentAlbum", item.path) - self.assertEqual(item.genre, "differentGenre") + assert b"differentAlbum" not in item.path + assert item.genre == "differentGenre" def test_mtime_match_skips_update(self): mf = MediaFile(syspath(self.i.path)) @@ -727,7 +706,7 @@ class UpdateTest(_common.TestCase): self._update(reset_mtime=False) item = self.lib.items().get() - self.assertEqual(item.title, "full") + assert item.title == "full" def test_multivalued_albumtype_roundtrip(self): # https://github.com/beetbox/beets/issues/4528 @@ -745,14 +724,14 @@ class UpdateTest(_common.TestCase): album.try_sync(write=True, move=False) album.load() - self.assertEqual(album.albumtype, correct_albumtype) - self.assertEqual(album.albumtypes, correct_albumtypes) + assert album.albumtype == correct_albumtype + assert album.albumtypes == correct_albumtypes self._update() album.load() - self.assertEqual(album.albumtype, correct_albumtype) - self.assertEqual(album.albumtypes, correct_albumtypes) + assert album.albumtype == correct_albumtype + assert album.albumtypes == correct_albumtypes def test_modified_metadata_excluded(self): mf = MediaFile(syspath(self.i.path)) @@ -760,10 +739,10 @@ class UpdateTest(_common.TestCase): mf.save() self._update(exclude_fields=["lyrics"]) item = self.lib.items().get() - self.assertNotEqual(item.lyrics, "new lyrics") + assert item.lyrics != "new lyrics" -class PrintTest(_common.TestCase): +class PrintTest(BeetsTestCase): def setUp(self): super().setUp() self.io.install() @@ -802,11 +781,12 @@ class PrintTest(_common.TestCase): del os.environ["LC_CTYPE"] -class ImportTest(_common.TestCase): +class ImportTest(BeetsTestCase): def test_quiet_timid_disallowed(self): config["import"]["quiet"] = True config["import"]["timid"] = True - self.assertRaises(ui.UserError, commands.import_files, None, [], None) + with pytest.raises(ui.UserError): + commands.import_files(None, [], None) def test_parse_paths_from_logfile(self): if os.path.__name__ == "ntpath": @@ -840,23 +820,28 @@ class ImportTest(_common.TestCase): with open(logfile, mode="w") as fp: fp.write(logfile_content) actual_paths = list(commands._paths_from_logfile(logfile)) - self.assertEqual(actual_paths, expected_paths) + assert actual_paths == expected_paths @_common.slow_test() -class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): +class TestPluginTestCase(PluginTestCase): + plugin = "test" + def setUp(self): - self.setup_beets() + super().setUp() + config["pluginpath"] = [_common.PLUGINPATH] + + +class ConfigTest(TestPluginTestCase): + def setUp(self): + super().setUp() # Don't use the BEETSDIR from `helper`. Instead, we point the home # directory there. Some tests will set `BEETSDIR` themselves. del os.environ["BEETSDIR"] - self._old_home = os.environ.get("HOME") - os.environ["HOME"] = os.fsdecode(self.temp_dir) # Also set APPDATA, the Windows equivalent of setting $HOME. - self._old_appdata = os.environ.get("APPDATA") - os.environ["APPDATA"] = os.fsdecode( + appdata_dir = os.fsdecode( os.path.join(self.temp_dir, b"AppData", b"Roaming") ) @@ -866,8 +851,8 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): # Default user configuration if platform.system() == "Windows": - self.user_config_dir = os.path.join( - self.temp_dir, b"AppData", b"Roaming", b"beets" + self.user_config_dir = os.fsencode( + os.path.join(appdata_dir, "beets") ) else: self.user_config_dir = os.path.join( @@ -881,21 +866,19 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): # Custom BEETSDIR self.beetsdir = os.path.join(self.temp_dir, b"beetsdir") os.makedirs(syspath(self.beetsdir)) + self.env_patcher = patch( + "os.environ", + {"HOME": os.fsdecode(self.temp_dir), "APPDATA": appdata_dir}, + ) + self.env_patcher.start() self._reset_config() - self.load_plugins() def tearDown(self): + self.env_patcher.stop() commands.default_commands.pop() os.chdir(syspath(self._orig_cwd)) - if self._old_home is not None: - os.environ["HOME"] = self._old_home - if self._old_appdata is None: - del os.environ["APPDATA"] - else: - os.environ["APPDATA"] = self._old_appdata - self.unload_plugins() - self.teardown_beets() + super().tearDown() def _make_test_cmd(self): test_cmd = ui.Subcommand("test", help="test") @@ -922,8 +905,8 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): self.run_command("test", lib=None) key, template = self.test_cmd.lib.path_formats[0] - self.assertEqual(key, "x") - self.assertEqual(template.original, "y") + assert key == "x" + assert template.original == "y" def test_default_paths_preserved(self): default_formats = ui.get_path_formats() @@ -933,15 +916,15 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): config.write("paths: {x: y}") self.run_command("test", lib=None) key, template = self.test_cmd.lib.path_formats[0] - self.assertEqual(key, "x") - self.assertEqual(template.original, "y") - self.assertEqual(self.test_cmd.lib.path_formats[1:], default_formats) + assert key == "x" + assert template.original == "y" + assert self.test_cmd.lib.path_formats[1:] == default_formats def test_nonexistant_db(self): with self.write_config_file() as config: config.write("library: /xxx/yyy/not/a/real/path") - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): self.run_command("test", lib=None) def test_user_config_file(self): @@ -949,7 +932,7 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): file.write("anoption: value") self.run_command("test", lib=None) - self.assertEqual(config["anoption"].get(), "value") + assert config["anoption"].get() == "value" def test_replacements_parsed(self): with self.write_config_file() as config: @@ -958,7 +941,7 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): self.run_command("test", lib=None) replacements = self.test_cmd.lib.replacements repls = [(p.pattern, s) for p, s in replacements] # Compare patterns. - self.assertEqual(repls, [("[xy]", "z")]) + assert repls == [("[xy]", "z")] def test_multiple_replacements_parsed(self): with self.write_config_file() as config: @@ -966,20 +949,14 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): self.run_command("test", lib=None) replacements = self.test_cmd.lib.replacements repls = [(p.pattern, s) for p, s in replacements] - self.assertEqual( - repls, - [ - ("[xy]", "z"), - ("foo", "bar"), - ], - ) + assert repls == [("[xy]", "z"), ("foo", "bar")] def test_cli_config_option(self): config_path = os.path.join(self.temp_dir, b"config.yaml") with open(config_path, "w") as file: file.write("anoption: value") self.run_command("--config", config_path, "test", lib=None) - self.assertEqual(config["anoption"].get(), "value") + assert config["anoption"].get() == "value" def test_cli_config_file_overwrites_user_defaults(self): with open(self.user_config_path, "w") as file: @@ -989,7 +966,7 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): with open(cli_config_path, "w") as file: file.write("anoption: cli overwrite") self.run_command("--config", cli_config_path, "test", lib=None) - self.assertEqual(config["anoption"].get(), "cli overwrite") + assert config["anoption"].get() == "cli overwrite" def test_cli_config_file_overwrites_beetsdir_defaults(self): os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) @@ -1001,7 +978,7 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): with open(cli_config_path, "w") as file: file.write("anoption: cli overwrite") self.run_command("--config", cli_config_path, "test", lib=None) - self.assertEqual(config["anoption"].get(), "cli overwrite") + assert config["anoption"].get() == "cli overwrite" # @unittest.skip('Difficult to implement with optparse') # def test_multiple_cli_config_files(self): @@ -1016,8 +993,8 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): # # self.run_command('--config', cli_config_path_1, # '--config', cli_config_path_2, 'test', lib=None) - # self.assertEqual(config['first'].get(), 'value') - # self.assertEqual(config['second'].get(), 'value') + # assert config['first'].get() == 'value' + # assert config['second'].get() == 'value' # # @unittest.skip('Difficult to implement with optparse') # def test_multiple_cli_config_overwrite(self): @@ -1033,7 +1010,7 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): # # self.run_command('--config', cli_config_path, # '--config', cli_overwrite_config_path, 'test') - # self.assertEqual(config['anoption'].get(), 'cli overwrite') + # assert config['anoption'].get() == 'cli overwrite' # FIXME: fails on windows @unittest.skipIf(sys.platform == "win32", "win32") @@ -1086,7 +1063,8 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): file.write("plugins: test") self.run_command("--config", cli_config_path, "plugin", lib=None) - self.assertTrue(plugins.find_plugins()[0].is_test_plugin) + assert plugins.find_plugins()[0].is_test_plugin + self.unload_plugins() def test_beetsdir_config(self): os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) @@ -1096,13 +1074,14 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): file.write("anoption: overwrite") config.read() - self.assertEqual(config["anoption"].get(), "overwrite") + assert config["anoption"].get() == "overwrite" def test_beetsdir_points_to_file_error(self): beetsdir = os.path.join(self.temp_dir, b"beetsfile") open(beetsdir, "a").close() os.environ["BEETSDIR"] = os.fsdecode(beetsdir) - self.assertRaises(ConfigError, self.run_command, "test") + with pytest.raises(ConfigError): + self.run_command("test") def test_beetsdir_config_does_not_load_default_user_config(self): os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) @@ -1111,7 +1090,7 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): file.write("anoption: value") config.read() - self.assertFalse(config["anoption"].exists()) + assert not config["anoption"].exists() def test_default_config_paths_resolve_relative_to_beetsdir(self): os.environ["BEETSDIR"] = os.fsdecode(self.beetsdir) @@ -1145,7 +1124,7 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions): ) -class ShowModelChangeTest(_common.TestCase): +class ShowModelChangeTest(BeetsTestCase): def setUp(self): super().setUp() self.io.install() @@ -1160,44 +1139,44 @@ class ShowModelChangeTest(_common.TestCase): def test_identical(self): change, out = self._show() - self.assertFalse(change) - self.assertEqual(out, "") + assert not change + assert out == "" def test_string_fixed_field_change(self): self.b.title = "x" change, out = self._show() - self.assertTrue(change) - self.assertIn("title", out) + assert change + assert "title" in out def test_int_fixed_field_change(self): self.b.track = 9 change, out = self._show() - self.assertTrue(change) - self.assertIn("track", out) + assert change + assert "track" in out def test_floats_close_to_identical(self): self.a.length = 1.00001 self.b.length = 1.00005 change, out = self._show() - self.assertFalse(change) - self.assertEqual(out, "") + assert not change + assert out == "" def test_floats_different(self): self.a.length = 1.00001 self.b.length = 2.00001 change, out = self._show() - self.assertTrue(change) - self.assertIn("length", out) + assert change + assert "length" in out def test_both_values_shown(self): self.a.title = "foo" self.b.title = "bar" change, out = self._show() - self.assertIn("foo", out) - self.assertIn("bar", out) + assert "foo" in out + assert "bar" in out -class ShowChangeTest(_common.TestCase): +class ShowChangeTest(BeetsTestCase): def setUp(self): super().setUp() self.io.install() @@ -1243,64 +1222,68 @@ class ShowChangeTest(_common.TestCase): def test_null_change(self): msg = self._show_change() - self.assertIn("match (90.0%)", msg) - self.assertIn("album, artist", msg) + assert "match (90.0%)" in msg + assert "album, artist" in msg def test_album_data_change(self): msg = self._show_change( cur_artist="another artist", cur_album="another album" ) - self.assertIn("another artist -> the artist", msg) - self.assertIn("another album -> the album", msg) + assert "another artist -> the artist" in msg + assert "another album -> the album" in msg def test_item_data_change(self): self.items[0].title = "different" msg = self._show_change() - self.assertTrue("different" in msg and "the title" in msg) + assert "different" in msg + assert "the title" in msg def test_item_data_change_with_unicode(self): self.items[0].title = "caf\xe9" msg = self._show_change() - self.assertTrue("caf\xe9" in msg and "the title" in msg) + assert "caf\xe9" in msg + assert "the title" in msg def test_album_data_change_with_unicode(self): msg = self._show_change(cur_artist="caf\xe9", cur_album="another album") - self.assertTrue("caf\xe9" in msg and "the artist" in msg) + assert "caf\xe9" in msg + assert "the artist" in msg def test_item_data_change_title_missing(self): self.items[0].title = "" msg = re.sub(r" +", " ", self._show_change()) - self.assertTrue("file.mp3" in msg and "the title" in msg) + assert "file.mp3" in msg + assert "the title" in msg def test_item_data_change_title_missing_with_unicode_filename(self): self.items[0].title = "" self.items[0].path = "/path/to/caf\xe9.mp3".encode() msg = re.sub(r" +", " ", self._show_change()) - self.assertTrue("caf\xe9.mp3" in msg or "caf.mp3" in msg) + assert "caf\xe9.mp3" in msg or "caf.mp3" in msg def test_colorize(self): - self.assertEqual("test", ui.uncolorize("test")) + assert "test" == ui.uncolorize("test") txt = ui.uncolorize("\x1b[31mtest\x1b[39;49;00m") - self.assertEqual("test", txt) + assert "test" == txt txt = ui.uncolorize("\x1b[31mtest\x1b[39;49;00m test") - self.assertEqual("test test", txt) + assert "test test" == txt txt = ui.uncolorize("\x1b[31mtest\x1b[39;49;00mtest") - self.assertEqual("testtest", txt) + assert "testtest" == txt txt = ui.uncolorize("test \x1b[31mtest\x1b[39;49;00m test") - self.assertEqual("test test test", txt) + assert "test test test" == txt def test_color_split(self): exp = ("test", "") res = ui.color_split("test", 5) - self.assertEqual(exp, res) + assert exp == res exp = ("\x1b[31mtes\x1b[39;49;00m", "\x1b[31mt\x1b[39;49;00m") res = ui.color_split("\x1b[31mtest\x1b[39;49;00m", 3) - self.assertEqual(exp, res) + assert exp == res def test_split_into_lines(self): # Test uncolored text txt = ui.split_into_lines("test test test", [5, 5, 5]) - self.assertEqual(txt, ["test", "test", "test"]) + assert txt == ["test", "test", "test"] # Test multiple colored texts colored_text = "\x1b[31mtest \x1b[39;49;00m" * 3 split_txt = [ @@ -1309,18 +1292,18 @@ class ShowChangeTest(_common.TestCase): "\x1b[31mtest\x1b[39;49;00m", ] txt = ui.split_into_lines(colored_text, [5, 5, 5]) - self.assertEqual(txt, split_txt) + assert txt == split_txt # Test single color, multi space text colored_text = "\x1b[31m test test test \x1b[39;49;00m" txt = ui.split_into_lines(colored_text, [5, 5, 5]) - self.assertEqual(txt, split_txt) + assert txt == split_txt # Test single color, different spacing colored_text = "\x1b[31mtest\x1b[39;49;00mtest test test" # ToDo: fix color_len to handle mid-text color escapes, and thus # split colored texts over newlines (potentially with dashes?) split_txt = ["\x1b[31mtest\x1b[39;49;00mt", "est", "test", "test"] txt = ui.split_into_lines(colored_text, [5, 5, 5]) - self.assertEqual(txt, split_txt) + assert txt == split_txt def test_album_data_change_wrap_newline(self): # Patch ui.term_width to force wrapping @@ -1332,9 +1315,9 @@ class ShowChangeTest(_common.TestCase): cur_artist=long_name, cur_album="another album" ) # _common.log.info("Message:{}".format(msg)) - self.assertIn("artist: another artist", msg) - self.assertIn(" -> the artist", msg) - self.assertNotIn("another album -> the album", msg) + assert "artist: another artist" in msg + assert " -> the artist" in msg + assert "another album -> the album" not in msg def test_item_data_change_wrap_column(self): # Patch ui.term_width to force wrapping @@ -1344,7 +1327,7 @@ class ShowChangeTest(_common.TestCase): long_title = "a track with a" + (" very" * 10) + " long name" self.items[0].title = long_title msg = self._show_change() - self.assertIn("(#1) a track (1:00) -> (#1) the title (0:00)", msg) + assert "(#1) a track (1:00) -> (#1) the title (0:00)" in msg def test_item_data_change_wrap_newline(self): # Patch ui.term_width to force wrapping @@ -1353,12 +1336,12 @@ class ShowChangeTest(_common.TestCase): long_title = "a track with a" + (" very" * 10) + " long name" self.items[0].title = long_title msg = self._show_change() - self.assertIn("(#1) a track with", msg) - self.assertIn(" -> (#1) the title (0:00)", msg) + assert "(#1) a track with" in msg + assert " -> (#1) the title (0:00)" in msg @patch("beets.library.Item.try_filesize", Mock(return_value=987)) -class SummarizeItemsTest(_common.TestCase): +class SummarizeItemsTest(BeetsTestCase): def setUp(self): super().setUp() item = library.Item() @@ -1369,59 +1352,53 @@ class SummarizeItemsTest(_common.TestCase): def test_summarize_item(self): summary = commands.summarize_items([], True) - self.assertEqual(summary, "") + assert summary == "" summary = commands.summarize_items([self.item], True) - self.assertEqual(summary, "F, 4kbps, 10:54, 987.0 B") + assert summary == "F, 4kbps, 10:54, 987.0 B" def test_summarize_items(self): summary = commands.summarize_items([], False) - self.assertEqual(summary, "0 items") + assert summary == "0 items" summary = commands.summarize_items([self.item], False) - self.assertEqual(summary, "1 items, F, 4kbps, 10:54, 987.0 B") + assert summary == "1 items, F, 4kbps, 10:54, 987.0 B" # make a copy of self.item i2 = self.item.copy() summary = commands.summarize_items([self.item, i2], False) - self.assertEqual(summary, "2 items, F, 4kbps, 21:48, 1.9 KiB") + assert summary == "2 items, F, 4kbps, 21:48, 1.9 KiB" i2.format = "G" summary = commands.summarize_items([self.item, i2], False) - self.assertEqual(summary, "2 items, F 1, G 1, 4kbps, 21:48, 1.9 KiB") + assert summary == "2 items, F 1, G 1, 4kbps, 21:48, 1.9 KiB" summary = commands.summarize_items([self.item, i2, i2], False) - self.assertEqual(summary, "3 items, G 2, F 1, 4kbps, 32:42, 2.9 KiB") + assert summary == "3 items, G 2, F 1, 4kbps, 32:42, 2.9 KiB" -class PathFormatTest(_common.TestCase): +class PathFormatTest(BeetsTestCase): def test_custom_paths_prepend(self): default_formats = ui.get_path_formats() config["paths"] = {"foo": "bar"} pf = ui.get_path_formats() key, tmpl = pf[0] - self.assertEqual(key, "foo") - self.assertEqual(tmpl.original, "bar") - self.assertEqual(pf[1:], default_formats) + assert key == "foo" + assert tmpl.original == "bar" + assert pf[1:] == default_formats @_common.slow_test() -class PluginTest(_common.TestCase, TestHelper): +class PluginTest(TestPluginTestCase): def test_plugin_command_from_pluginpath(self): - config["pluginpath"] = [_common.PLUGINPATH] - config["plugins"] = ["test"] self.run_command("test", lib=None) @_common.slow_test() -class CompletionTest(_common.TestCase, TestHelper): +class CompletionTest(TestPluginTestCase): def test_completion(self): - # Load plugin commands - config["pluginpath"] = [_common.PLUGINPATH] - config["plugins"] = ["test"] - # Do not load any other bash completion scripts on the system. env = dict(os.environ) env["BASH_COMPLETION_DIR"] = os.devnull @@ -1461,154 +1438,143 @@ class CompletionTest(_common.TestCase, TestHelper): with open(test_script_name, "rb") as test_script_file: tester.stdin.writelines(test_script_file) out, err = tester.communicate() - self.assertFalse( - tester.returncode != 0 or out != b"completion tests passed\n", - f"test/test_completion.sh did not execute properly. " - f'Output:{out.decode("utf-8")}', + assert tester.returncode == 0 + assert out == b"completion tests passed\n", ( + "test/test_completion.sh did not execute properly. " + f'Output:{out.decode("utf-8")}' ) -class CommonOptionsParserCliTest(unittest.TestCase, TestHelper): +class CommonOptionsParserCliTest(BeetsTestCase): """Test CommonOptionsParser and formatting LibModel formatting on 'list' command. """ def setUp(self): - self.setup_beets() + super().setUp() self.item = _common.item() self.item.path = b"xxx/yyy" self.lib.add(self.item) self.lib.add_album([self.item]) - self.load_plugins() - - def tearDown(self): - self.unload_plugins() - self.teardown_beets() def test_base(self): - l = self.run_with_output("ls") - self.assertEqual(l, "the artist - the album - the title\n") + output = self.run_with_output("ls") + assert output == "the artist - the album - the title\n" - l = self.run_with_output("ls", "-a") - self.assertEqual(l, "the album artist - the album\n") + output = self.run_with_output("ls", "-a") + assert output == "the album artist - the album\n" def test_path_option(self): - l = self.run_with_output("ls", "-p") - self.assertEqual(l, "xxx/yyy\n") + output = self.run_with_output("ls", "-p") + assert output == "xxx/yyy\n" - l = self.run_with_output("ls", "-a", "-p") - self.assertEqual(l, "xxx\n") + output = self.run_with_output("ls", "-a", "-p") + assert output == "xxx\n" def test_format_option(self): - l = self.run_with_output("ls", "-f", "$artist") - self.assertEqual(l, "the artist\n") + output = self.run_with_output("ls", "-f", "$artist") + assert output == "the artist\n" - l = self.run_with_output("ls", "-a", "-f", "$albumartist") - self.assertEqual(l, "the album artist\n") + output = self.run_with_output("ls", "-a", "-f", "$albumartist") + assert output == "the album artist\n" def test_format_option_unicode(self): - l = self.run_with_output( + output = self.run_with_output( b"ls", b"-f", "caf\xe9".encode(util.arg_encoding()) ) - self.assertEqual(l, "caf\xe9\n") + assert output == "caf\xe9\n" def test_root_format_option(self): - l = self.run_with_output( + output = self.run_with_output( "--format-item", "$artist", "--format-album", "foo", "ls" ) - self.assertEqual(l, "the artist\n") + assert output == "the artist\n" - l = self.run_with_output( + output = self.run_with_output( "--format-item", "foo", "--format-album", "$albumartist", "ls", "-a" ) - self.assertEqual(l, "the album artist\n") + assert output == "the album artist\n" def test_help(self): - l = self.run_with_output("help") - self.assertIn("Usage:", l) + output = self.run_with_output("help") + assert "Usage:" in output - l = self.run_with_output("help", "list") - self.assertIn("Usage:", l) + output = self.run_with_output("help", "list") + assert "Usage:" in output - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): self.run_command("help", "this.is.not.a.real.command") def test_stats(self): - l = self.run_with_output("stats") - self.assertIn("Approximate total size:", l) + output = self.run_with_output("stats") + assert "Approximate total size:" in output # # Need to have more realistic library setup for this to work - # l = self.run_with_output('stats', '-e') - # self.assertIn('Total size:', l) + # output = self.run_with_output('stats', '-e') + # assert 'Total size:' in output def test_version(self): - l = self.run_with_output("version") - self.assertIn("Python version", l) - self.assertIn("no plugins loaded", l) + output = self.run_with_output("version") + assert "Python version" in output + assert "no plugins loaded" in output # # Need to have plugin loaded - # l = self.run_with_output('version') - # self.assertIn('plugins: ', l) + # output = self.run_with_output('version') + # assert 'plugins: ' in output -class CommonOptionsParserTest(unittest.TestCase, TestHelper): - def setUp(self): - self.setup_beets() - - def tearDown(self): - self.teardown_beets() - +class CommonOptionsParserTest(BeetsTestCase): def test_album_option(self): parser = ui.CommonOptionsParser() - self.assertFalse(parser._album_flags) + assert not parser._album_flags parser.add_album_option() - self.assertTrue(bool(parser._album_flags)) + assert bool(parser._album_flags) - self.assertEqual(parser.parse_args([]), ({"album": None}, [])) - self.assertEqual(parser.parse_args(["-a"]), ({"album": True}, [])) - self.assertEqual(parser.parse_args(["--album"]), ({"album": True}, [])) + assert parser.parse_args([]) == ({"album": None}, []) + assert parser.parse_args(["-a"]) == ({"album": True}, []) + assert parser.parse_args(["--album"]) == ({"album": True}, []) def test_path_option(self): parser = ui.CommonOptionsParser() parser.add_path_option() - self.assertFalse(parser._album_flags) + assert not parser._album_flags config["format_item"].set("$foo") - self.assertEqual(parser.parse_args([]), ({"path": None}, [])) - self.assertEqual(config["format_item"].as_str(), "$foo") + assert parser.parse_args([]) == ({"path": None}, []) + assert config["format_item"].as_str() == "$foo" - self.assertEqual( - parser.parse_args(["-p"]), ({"path": True, "format": "$path"}, []) + assert parser.parse_args(["-p"]) == ( + {"path": True, "format": "$path"}, + [], ) - self.assertEqual( - parser.parse_args(["--path"]), - ({"path": True, "format": "$path"}, []), + assert parser.parse_args(["--path"]) == ( + {"path": True, "format": "$path"}, + [], ) - self.assertEqual(config["format_item"].as_str(), "$path") - self.assertEqual(config["format_album"].as_str(), "$path") + assert config["format_item"].as_str() == "$path" + assert config["format_album"].as_str() == "$path" def test_format_option(self): parser = ui.CommonOptionsParser() parser.add_format_option() - self.assertFalse(parser._album_flags) + assert not parser._album_flags config["format_item"].set("$foo") - self.assertEqual(parser.parse_args([]), ({"format": None}, [])) - self.assertEqual(config["format_item"].as_str(), "$foo") + assert parser.parse_args([]) == ({"format": None}, []) + assert config["format_item"].as_str() == "$foo" - self.assertEqual( - parser.parse_args(["-f", "$bar"]), ({"format": "$bar"}, []) - ) - self.assertEqual( - parser.parse_args(["--format", "$baz"]), ({"format": "$baz"}, []) + assert parser.parse_args(["-f", "$bar"]) == ({"format": "$bar"}, []) + assert parser.parse_args(["--format", "$baz"]) == ( + {"format": "$baz"}, + [], ) - self.assertEqual(config["format_item"].as_str(), "$baz") - self.assertEqual(config["format_album"].as_str(), "$baz") + assert config["format_item"].as_str() == "$baz" + assert config["format_album"].as_str() == "$baz" def test_format_option_with_target(self): - with self.assertRaises(KeyError): + with pytest.raises(KeyError): ui.CommonOptionsParser().add_format_option(target="thingy") parser = ui.CommonOptionsParser() @@ -1617,12 +1583,10 @@ class CommonOptionsParserTest(unittest.TestCase, TestHelper): config["format_item"].set("$item") config["format_album"].set("$album") - self.assertEqual( - parser.parse_args(["-f", "$bar"]), ({"format": "$bar"}, []) - ) + assert parser.parse_args(["-f", "$bar"]) == ({"format": "$bar"}, []) - self.assertEqual(config["format_item"].as_str(), "$bar") - self.assertEqual(config["format_album"].as_str(), "$album") + assert config["format_item"].as_str() == "$bar" + assert config["format_album"].as_str() == "$album" def test_format_option_with_album(self): parser = ui.CommonOptionsParser() @@ -1633,52 +1597,44 @@ class CommonOptionsParserTest(unittest.TestCase, TestHelper): config["format_album"].set("$album") parser.parse_args(["-f", "$bar"]) - self.assertEqual(config["format_item"].as_str(), "$bar") - self.assertEqual(config["format_album"].as_str(), "$album") + assert config["format_item"].as_str() == "$bar" + assert config["format_album"].as_str() == "$album" parser.parse_args(["-a", "-f", "$foo"]) - self.assertEqual(config["format_item"].as_str(), "$bar") - self.assertEqual(config["format_album"].as_str(), "$foo") + assert config["format_item"].as_str() == "$bar" + assert config["format_album"].as_str() == "$foo" parser.parse_args(["-f", "$foo2", "-a"]) - self.assertEqual(config["format_album"].as_str(), "$foo2") + assert config["format_album"].as_str() == "$foo2" def test_add_all_common_options(self): parser = ui.CommonOptionsParser() parser.add_all_common_options() - self.assertEqual( - parser.parse_args([]), - ({"album": None, "path": None, "format": None}, []), + assert parser.parse_args([]) == ( + {"album": None, "path": None, "format": None}, + [], ) -class EncodingTest(_common.TestCase): +class EncodingTest(BeetsTestCase): """Tests for the `terminal_encoding` config option and our `_in_encoding` and `_out_encoding` utility functions. """ def out_encoding_overridden(self): config["terminal_encoding"] = "fake_encoding" - self.assertEqual(ui._out_encoding(), "fake_encoding") + assert ui._out_encoding() == "fake_encoding" def in_encoding_overridden(self): config["terminal_encoding"] = "fake_encoding" - self.assertEqual(ui._in_encoding(), "fake_encoding") + assert ui._in_encoding() == "fake_encoding" def out_encoding_default_utf8(self): with patch("sys.stdout") as stdout: stdout.encoding = None - self.assertEqual(ui._out_encoding(), "utf-8") + assert ui._out_encoding() == "utf-8" def in_encoding_default_utf8(self): with patch("sys.stdin") as stdin: stdin.encoding = None - self.assertEqual(ui._in_encoding(), "utf-8") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert ui._in_encoding() == "utf-8" diff --git a/test/test_ui_commands.py b/test/test_ui_commands.py index f371a1ab1..897cba8a1 100644 --- a/test/test_ui_commands.py +++ b/test/test_ui_commands.py @@ -12,33 +12,21 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test module for file ui/commands.py -""" - +"""Test module for file ui/commands.py""" import os import shutil -import unittest + +import pytest from beets import library, ui from beets.test import _common +from beets.test.helper import BeetsTestCase, ItemInDBTestCase from beets.ui import commands from beets.util import syspath -class QueryTest(_common.TestCase): - def setUp(self): - super().setUp() - - self.libdir = os.path.join(self.temp_dir, b"testlibdir") - os.mkdir(syspath(self.libdir)) - - # Add a file to the library but don't copy it in yet. - self.lib = library.Library(":memory:", self.libdir) - - # Alternate destination directory. - # self.otherdir = os.path.join(self.temp_dir, b"testotherdir") - +class QueryTest(BeetsTestCase): def add_item(self, filename=b"srcfile", templatefile=b"full.mp3"): itempath = os.path.join(self.libdir, filename) shutil.copy( @@ -57,15 +45,15 @@ class QueryTest(_common.TestCase): self, num_items, num_albums, q=(), album=False, also_items=True ): items, albums = commands._do_query(self.lib, q, album, also_items) - self.assertEqual(len(items), num_items) - self.assertEqual(len(albums), num_albums) + assert len(items) == num_items + assert len(albums) == num_albums def test_query_empty(self): - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): commands._do_query(self.lib, (), False) def test_query_empty_album(self): - with self.assertRaises(ui.UserError): + with pytest.raises(ui.UserError): commands._do_query(self.lib, (), True) def test_query_item(self): @@ -87,7 +75,7 @@ class QueryTest(_common.TestCase): self.check_do_query(0, 2, album=True, also_items=False) -class FieldsTest(_common.LibTestCase): +class FieldsTest(ItemInDBTestCase): def setUp(self): super().setUp() @@ -97,10 +85,10 @@ class FieldsTest(_common.LibTestCase): super().tearDown() self.io.restore() - def remove_keys(self, l, text): + def remove_keys(self, keys, text): for i in text: try: - l.remove(i) + keys.remove(i) except ValueError: pass @@ -113,13 +101,5 @@ class FieldsTest(_common.LibTestCase): self.remove_keys(items, output) self.remove_keys(albums, output) - self.assertEqual(len(items), 0) - self.assertEqual(len(albums), 0) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert len(items) == 0 + assert len(albums) == 0 diff --git a/test/test_ui_importer.py b/test/test_ui_importer.py index 3991376ac..5a27a5b93 100644 --- a/test/test_ui_importer.py +++ b/test/test_ui_importer.py @@ -18,67 +18,53 @@ test_importer module. But here the test importer inherits from ``TerminalImportSession``. So we test this class, too. """ -import unittest +from beets.test.helper import TerminalImportMixin from test import test_importer -from beets.test.helper import TerminalImportSessionSetup - class NonAutotaggedImportTest( - TerminalImportSessionSetup, test_importer.NonAutotaggedImportTest + TerminalImportMixin, test_importer.NonAutotaggedImportTest ): pass -class ImportTest(TerminalImportSessionSetup, test_importer.ImportTest): +class ImportTest(TerminalImportMixin, test_importer.ImportTest): pass class ImportSingletonTest( - TerminalImportSessionSetup, test_importer.ImportSingletonTest + TerminalImportMixin, test_importer.ImportSingletonTest ): pass -class ImportTracksTest( - TerminalImportSessionSetup, test_importer.ImportTracksTest -): +class ImportTracksTest(TerminalImportMixin, test_importer.ImportTracksTest): pass class ImportCompilationTest( - TerminalImportSessionSetup, test_importer.ImportCompilationTest + TerminalImportMixin, test_importer.ImportCompilationTest ): pass -class ImportExistingTest( - TerminalImportSessionSetup, test_importer.ImportExistingTest -): +class ImportExistingTest(TerminalImportMixin, test_importer.ImportExistingTest): pass class ChooseCandidateTest( - TerminalImportSessionSetup, test_importer.ChooseCandidateTest + TerminalImportMixin, test_importer.ChooseCandidateTest ): pass class GroupAlbumsImportTest( - TerminalImportSessionSetup, test_importer.GroupAlbumsImportTest + TerminalImportMixin, test_importer.GroupAlbumsImportTest ): pass class GlobalGroupAlbumsImportTest( - TerminalImportSessionSetup, test_importer.GlobalGroupAlbumsImportTest + TerminalImportMixin, test_importer.GlobalGroupAlbumsImportTest ): pass - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_ui_init.py b/test/test_ui_init.py index 77c9c784b..a6f06c494 100644 --- a/test/test_ui_init.py +++ b/test/test_ui_init.py @@ -12,21 +12,19 @@ # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Test module for file ui/__init__.py -""" +"""Test module for file ui/__init__.py""" import os import shutil -import unittest from copy import deepcopy from random import random from beets import config, ui from beets.test import _common -from beets.test.helper import control_stdin +from beets.test.helper import BeetsTestCase, ItemInDBTestCase, control_stdin -class InputMethodsTest(_common.TestCase): +class InputMethodsTest(BeetsTestCase): def setUp(self): super().setUp() self.io.install() @@ -45,14 +43,14 @@ class InputMethodsTest(_common.TestCase): items = ui.input_select_objects( "Prompt", full_items, self._print_helper ) - self.assertEqual(items, []) + assert items == [] # Test yes self.io.addinput("y") items = ui.input_select_objects( "Prompt", full_items, self._print_helper ) - self.assertEqual(items, full_items) + assert items == full_items # Test selective 1 self.io.addinput("s") @@ -64,7 +62,7 @@ class InputMethodsTest(_common.TestCase): items = ui.input_select_objects( "Prompt", full_items, self._print_helper ) - self.assertEqual(items, ["2", "4"]) + assert items == ["2", "4"] # Test selective 2 self.io.addinput("s") @@ -76,7 +74,7 @@ class InputMethodsTest(_common.TestCase): items = ui.input_select_objects( "Prompt", full_items, lambda s: self._print_helper2(s, "Prefix") ) - self.assertEqual(items, ["1", "2", "4"]) + assert items == ["1", "2", "4"] # Test selective 3 self.io.addinput("s") @@ -87,13 +85,10 @@ class InputMethodsTest(_common.TestCase): items = ui.input_select_objects( "Prompt", full_items, self._print_helper ) - self.assertEqual(items, ["1", "3"]) + assert items == ["1", "3"] -class InitTest(_common.LibTestCase): - def setUp(self): - super().setUp() - +class InitTest(ItemInDBTestCase): def test_human_bytes(self): tests = [ (0, "0.0 B"), @@ -110,7 +105,7 @@ class InitTest(_common.LibTestCase): (pow(2, 100), "big"), ] for i, h in tests: - self.assertEqual(h, ui.human_bytes(i)) + assert h == ui.human_bytes(i) def test_human_seconds(self): tests = [ @@ -126,10 +121,10 @@ class InitTest(_common.LibTestCase): (314496000, "1.0 decades"), ] for i, h in tests: - self.assertEqual(h, ui.human_seconds(i)) + assert h == ui.human_seconds(i) -class ParentalDirCreation(_common.TestCase): +class ParentalDirCreation(BeetsTestCase): def test_create_yes(self): non_exist_path = _common.os.fsdecode( os.path.join(self.temp_dir, b"nonexist", str(random()).encode()) @@ -163,11 +158,3 @@ class ParentalDirCreation(_common.TestCase): if lib: lib._close() raise OSError("Parent directories should not be created.") - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/test_util.py b/test/test_util.py index bb5d1e691..85534949f 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -11,8 +11,7 @@ # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. -"""Tests for base utils from the beets.util package. -""" +"""Tests for base utils from the beets.util package.""" import os import platform @@ -22,20 +21,23 @@ import sys import unittest from unittest.mock import Mock, patch +import pytest + from beets import util from beets.test import _common +from beets.test.helper import BeetsTestCase class UtilTest(unittest.TestCase): def test_open_anything(self): with _common.system_mock("Windows"): - self.assertEqual(util.open_anything(), "start") + assert util.open_anything() == "start" with _common.system_mock("Darwin"): - self.assertEqual(util.open_anything(), "open") + assert util.open_anything() == "open" with _common.system_mock("Tagada"): - self.assertEqual(util.open_anything(), "xdg-open") + assert util.open_anything() == "xdg-open" @patch("os.execlp") @patch("beets.util.open_anything") @@ -51,73 +53,56 @@ class UtilTest(unittest.TestCase): def test_sanitize_unix_replaces_leading_dot(self): with _common.platform_posix(): p = util.sanitize_path("one/.two/three") - self.assertNotIn(".", p) + assert "." not in p def test_sanitize_windows_replaces_trailing_dot(self): with _common.platform_windows(): p = util.sanitize_path("one/two./three") - self.assertNotIn(".", p) + assert "." not in p def test_sanitize_windows_replaces_illegal_chars(self): with _common.platform_windows(): p = util.sanitize_path(':*?"<>|') - self.assertNotIn(":", p) - self.assertNotIn("*", p) - self.assertNotIn("?", p) - self.assertNotIn('"', p) - self.assertNotIn("<", p) - self.assertNotIn(">", p) - self.assertNotIn("|", p) + assert ":" not in p + assert "*" not in p + assert "?" not in p + assert '"' not in p + assert "<" not in p + assert ">" not in p + assert "|" not in p def test_sanitize_windows_replaces_trailing_space(self): with _common.platform_windows(): p = util.sanitize_path("one/two /three") - self.assertNotIn(" ", p) + assert " " not in p def test_sanitize_path_works_on_empty_string(self): with _common.platform_posix(): p = util.sanitize_path("") - self.assertEqual(p, "") + assert p == "" def test_sanitize_with_custom_replace_overrides_built_in_sub(self): with _common.platform_posix(): - p = util.sanitize_path( - "a/.?/b", - [ - (re.compile(r"foo"), "bar"), - ], - ) - self.assertEqual(p, "a/.?/b") + p = util.sanitize_path("a/.?/b", [(re.compile(r"foo"), "bar")]) + assert p == "a/.?/b" def test_sanitize_with_custom_replace_adds_replacements(self): with _common.platform_posix(): - p = util.sanitize_path( - "foo/bar", - [ - (re.compile(r"foo"), "bar"), - ], - ) - self.assertEqual(p, "bar/bar") + p = util.sanitize_path("foo/bar", [(re.compile(r"foo"), "bar")]) + assert p == "bar/bar" @unittest.skip("unimplemented: #359") def test_sanitize_empty_component(self): with _common.platform_posix(): - p = util.sanitize_path( - "foo//bar", - [ - (re.compile(r"^$"), "_"), - ], - ) - self.assertEqual(p, "foo/_/bar") + p = util.sanitize_path("foo//bar", [(re.compile(r"^$"), "_")]) + assert p == "foo/_/bar" @unittest.skipIf(sys.platform == "win32", "win32") def test_convert_command_args_keeps_undecodeable_bytes(self): arg = b"\x82" # non-ascii bytes cmd_args = util.convert_command_args([arg]) - self.assertEqual( - cmd_args[0], arg.decode(util.arg_encoding(), "surrogateescape") - ) + assert cmd_args[0] == arg.decode(util.arg_encoding(), "surrogateescape") @patch("beets.util.subprocess.Popen") def test_command_output(self, mock_popen): @@ -127,10 +112,10 @@ class UtilTest(unittest.TestCase): return m mock_popen.side_effect = popen_fail - with self.assertRaises(subprocess.CalledProcessError) as exc_context: + with pytest.raises(subprocess.CalledProcessError) as exc_info: util.command_output(["taga", "\xc3\xa9"]) - self.assertEqual(exc_context.exception.returncode, 1) - self.assertEqual(exc_context.exception.cmd, "taga \xc3\xa9") + assert exc_info.value.returncode == 1 + assert exc_info.value.cmd == "taga \xc3\xa9" def test_case_sensitive_default(self): path = util.bytestring_path( @@ -139,10 +124,7 @@ class UtilTest(unittest.TestCase): ) ) - self.assertEqual( - util.case_sensitive(path), - platform.system() != "Windows", - ) + assert util.case_sensitive(path) == (platform.system() != "Windows") @unittest.skipIf(sys.platform == "win32", "fs is not case sensitive") def test_case_sensitive_detects_sensitive(self): @@ -157,13 +139,13 @@ class UtilTest(unittest.TestCase): pass -class PathConversionTest(_common.TestCase): +class PathConversionTest(BeetsTestCase): def test_syspath_windows_format(self): with _common.platform_windows(): path = os.path.join("a", "b", "c") outpath = util.syspath(path) - self.assertTrue(isinstance(outpath, str)) - self.assertTrue(outpath.startswith("\\\\?\\")) + assert isinstance(outpath, str) + assert outpath.startswith("\\\\?\\") def test_syspath_windows_format_unc_path(self): # The \\?\ prefix on Windows behaves differently with UNC @@ -171,14 +153,14 @@ class PathConversionTest(_common.TestCase): path = "\\\\server\\share\\file.mp3" with _common.platform_windows(): outpath = util.syspath(path) - self.assertTrue(isinstance(outpath, str)) - self.assertEqual(outpath, "\\\\?\\UNC\\server\\share\\file.mp3") + assert isinstance(outpath, str) + assert outpath == "\\\\?\\UNC\\server\\share\\file.mp3" def test_syspath_posix_unchanged(self): with _common.platform_posix(): path = os.path.join("a", "b", "c") outpath = util.syspath(path) - self.assertEqual(path, outpath) + assert path == outpath def _windows_bytestring_path(self, path): old_gfse = sys.getfilesystemencoding @@ -192,49 +174,26 @@ class PathConversionTest(_common.TestCase): def test_bytestring_path_windows_encodes_utf8(self): path = "caf\xe9" outpath = self._windows_bytestring_path(path) - self.assertEqual(path, outpath.decode("utf-8")) + assert path == outpath.decode("utf-8") def test_bytesting_path_windows_removes_magic_prefix(self): path = "\\\\?\\C:\\caf\xe9" outpath = self._windows_bytestring_path(path) - self.assertEqual(outpath, "C:\\caf\xe9".encode()) + assert outpath == "C:\\caf\xe9".encode() -class PathTruncationTest(_common.TestCase): +class PathTruncationTest(BeetsTestCase): def test_truncate_bytestring(self): with _common.platform_posix(): p = util.truncate_path(b"abcde/fgh", 4) - self.assertEqual(p, b"abcd/fgh") + assert p == b"abcd/fgh" def test_truncate_unicode(self): with _common.platform_posix(): p = util.truncate_path("abcde/fgh", 4) - self.assertEqual(p, "abcd/fgh") + assert p == "abcd/fgh" def test_truncate_preserves_extension(self): with _common.platform_posix(): p = util.truncate_path("abcde/fgh.ext", 5) - self.assertEqual(p, "abcde/f.ext") - - -class ConfitDeprecationTest(_common.TestCase): - def test_confit_deprecattion_warning_origin(self): - """Test that importing `confit` raises a warning. - - In addition, ensure that the warning originates from the actual - import statement, not the `confit` module. - """ - # See https://github.com/beetbox/beets/discussions/4024 - with self.assertWarns(UserWarning) as w: - import beets.util.confit # noqa: F401 - - self.assertIn(__file__, w.filename) - self.assertNotIn("confit.py", w.filename) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") + assert p == "abcde/f.ext" diff --git a/test/test_vfs.py b/test/test_vfs.py index 789356157..7f75fbd83 100644 --- a/test/test_vfs.py +++ b/test/test_vfs.py @@ -14,40 +14,28 @@ """Tests for the virtual filesystem builder..""" -import unittest - -from beets import library, vfs +from beets import vfs from beets.test import _common +from beets.test.helper import BeetsTestCase -class VFSTest(_common.TestCase): +class VFSTest(BeetsTestCase): def setUp(self): super().setUp() - self.lib = library.Library( - ":memory:", - path_formats=[ - ("default", "albums/$album/$title"), - ("singleton:true", "tracks/$artist/$title"), - ], - ) + self.lib.path_formats = [ + ("default", "albums/$album/$title"), + ("singleton:true", "tracks/$artist/$title"), + ] self.lib.add(_common.item()) self.lib.add_album([_common.item()]) self.tree = vfs.libtree(self.lib) def test_singleton_item(self): - self.assertEqual( - self.tree.dirs["tracks"].dirs["the artist"].files["the title"], 1 + assert ( + self.tree.dirs["tracks"].dirs["the artist"].files["the title"] == 1 ) def test_album_item(self): - self.assertEqual( - self.tree.dirs["albums"].dirs["the album"].files["the title"], 2 + assert ( + self.tree.dirs["albums"].dirs["the album"].files["the title"] == 2 ) - - -def suite(): - return unittest.TestLoader().loadTestsFromName(__name__) - - -if __name__ == "__main__": - unittest.main(defaultTest="suite") diff --git a/test/testall.py b/test/testall.py index 27d852a77..af83dea88 100755 --- a/test/testall.py +++ b/test/testall.py @@ -16,25 +16,7 @@ import os -import re import sys -import unittest pkgpath = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) or ".." sys.path.insert(0, pkgpath) - - -def suite(): - s = unittest.TestSuite() - # Get the suite() of every module in this directory beginning with - # "test_". - for fname in os.listdir(os.path.join(pkgpath, "test")): - match = re.match(r"(test_\S+)\.py$", fname) - if match: - modname = match.group(1) - s.addTest(__import__(modname).suite()) - return s - - -if __name__ == "__main__": - unittest.main(defaultTest="suite")