mirror of
https://github.com/beetbox/beets.git
synced 2026-02-08 16:34:12 +01:00
Merge branch 'master' into master
This commit is contained in:
commit
c8bfa8a8cc
193 changed files with 6343 additions and 4090 deletions
|
|
@ -57,3 +57,19 @@ c490ac5810b70f3cf5fd8649669838e8fdb19f4d
|
|||
769dcdc88a1263638ae25944ba6b2be3e8933666
|
||||
# Reformat all docs using docstrfmt
|
||||
ab5acaabb3cd24c482adb7fa4800c89fd6a2f08d
|
||||
# Replace format calls with f-strings
|
||||
4a361bd501e85de12c91c2474c423559ca672852
|
||||
# Replace percent formatting
|
||||
9352a79e4108bd67f7e40b1e944c01e0a7353272
|
||||
# Replace string concatenation (' + ')
|
||||
1c16b2b3087e9c3635d68d41c9541c4319d0bdbe
|
||||
# Do not use backslashes to deal with long strings
|
||||
2fccf64efe82851861e195b521b14680b480a42a
|
||||
# Do not use explicit indices for logging args when not needed
|
||||
d93ddf8dd43e4f9ed072a03829e287c78d2570a2
|
||||
# Moved dev docs
|
||||
07549ed896d9649562d40b75cd30702e6fa6e975
|
||||
# Moved plugin docs Further Reading chapter
|
||||
33f1a5d0bef8ca08be79ee7a0d02a018d502680d
|
||||
# Moved art.py utility module from beets into beetsplug
|
||||
28aee0fde463f1e18dfdba1994e2bdb80833722f
|
||||
|
|
|
|||
5
.github/CODEOWNERS
vendored
Normal file
5
.github/CODEOWNERS
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# assign the entire repo to the maintainers team
|
||||
* @beetbox/maintainers
|
||||
|
||||
# Specific ownerships:
|
||||
/beets/metadata_plugins.py @semohr
|
||||
2
.github/workflows/changelog_reminder.yaml
vendored
2
.github/workflows/changelog_reminder.yaml
vendored
|
|
@ -10,7 +10,7 @@ jobs:
|
|||
check_changes:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Get all updated Python files
|
||||
id: changed-python-files
|
||||
|
|
|
|||
20
.github/workflows/ci.yaml
vendored
20
.github/workflows/ci.yaml
vendored
|
|
@ -20,17 +20,17 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ubuntu-latest, windows-latest]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
env:
|
||||
IS_MAIN_PYTHON: ${{ matrix.python-version == '3.9' && matrix.platform == 'ubuntu-latest' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- name: Setup Python with poetry caching
|
||||
# poetry cache requires poetry to already be installed, weirdly
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: poetry
|
||||
|
|
@ -39,7 +39,15 @@ jobs:
|
|||
if: matrix.platform == 'ubuntu-latest'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install --yes --no-install-recommends ffmpeg gobject-introspection gstreamer1.0-plugins-base python3-gst-1.0 libcairo2-dev libgirepository-2.0-dev pandoc imagemagick
|
||||
sudo apt install --yes --no-install-recommends \
|
||||
ffmpeg \
|
||||
gobject-introspection \
|
||||
gstreamer1.0-plugins-base \
|
||||
python3-gst-1.0 \
|
||||
libcairo2-dev \
|
||||
libgirepository-2.0-dev \
|
||||
pandoc \
|
||||
imagemagick
|
||||
|
||||
- name: Get changed lyrics files
|
||||
id: lyrics-update
|
||||
|
|
@ -90,10 +98,10 @@ jobs:
|
|||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Get the coverage report
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: coverage-report
|
||||
|
||||
|
|
|
|||
4
.github/workflows/integration_test.yaml
vendored
4
.github/workflows/integration_test.yaml
vendored
|
|
@ -7,10 +7,10 @@ jobs:
|
|||
test_integration:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: poetry
|
||||
|
|
|
|||
20
.github/workflows/lint.yml
vendored
20
.github/workflows/lint.yml
vendored
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
changed_doc_files: ${{ steps.changed-doc-files.outputs.all_changed_files }}
|
||||
changed_python_files: ${{ steps.changed-python-files.outputs.all_changed_files }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Get changed docs files
|
||||
id: changed-doc-files
|
||||
uses: tj-actions/changed-files@v46
|
||||
|
|
@ -56,10 +56,10 @@ jobs:
|
|||
name: Check formatting
|
||||
needs: changed-files
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: poetry
|
||||
|
|
@ -77,10 +77,10 @@ jobs:
|
|||
name: Check linting
|
||||
needs: changed-files
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: poetry
|
||||
|
|
@ -97,10 +97,10 @@ jobs:
|
|||
name: Check types with mypy
|
||||
needs: changed-files
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: poetry
|
||||
|
|
@ -120,10 +120,10 @@ jobs:
|
|||
name: Check docs
|
||||
needs: changed-files
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: poetry
|
||||
|
|
@ -143,4 +143,4 @@ jobs:
|
|||
run: poe lint-docs
|
||||
|
||||
- name: Build docs
|
||||
run: poe docs -e 'SPHINXOPTS=--fail-on-warning --keep-going'
|
||||
run: poe docs -- -e 'SPHINXOPTS=--fail-on-warning --keep-going'
|
||||
|
|
|
|||
12
.github/workflows/make_release.yaml
vendored
12
.github/workflows/make_release.yaml
vendored
|
|
@ -17,10 +17,10 @@ jobs:
|
|||
name: Bump version, commit and create tag
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: poetry
|
||||
|
|
@ -45,13 +45,13 @@ jobs:
|
|||
outputs:
|
||||
changelog: ${{ steps.generate_changelog.outputs.changelog }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ env.NEW_TAG }}
|
||||
|
||||
- name: Install Python tools
|
||||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: poetry
|
||||
|
|
@ -92,7 +92,7 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
|
@ -107,7 +107,7 @@ jobs:
|
|||
CHANGELOG: ${{ needs.build.outputs.changelog }}
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
|
|
|||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -94,3 +94,6 @@ ENV/
|
|||
|
||||
# pyright
|
||||
pyrightconfig.json
|
||||
|
||||
# Pyrefly
|
||||
pyrefly.toml
|
||||
|
|
|
|||
|
|
@ -180,8 +180,7 @@ Your First Contribution
|
|||
If this is your first time contributing to an open source project, welcome! If
|
||||
you are confused at all about how to contribute or what to contribute, take a
|
||||
look at `this great tutorial <http://makeapullrequest.com/>`__, or stop by our
|
||||
`discussion board <https://github.com/beetbox/beets/discussions/>`__ if you have
|
||||
any questions.
|
||||
`discussion board`_ if you have any questions.
|
||||
|
||||
We maintain a list of issues we reserved for those new to open source labeled
|
||||
`first timers only`_. Since the goal of these issues is to get users comfortable
|
||||
|
|
@ -216,6 +215,15 @@ will ship in no time.
|
|||
Remember, code contributions have four parts: the code, the tests, the
|
||||
documentation, and the changelog entry. Thank you for contributing!
|
||||
|
||||
.. admonition:: Ownership
|
||||
|
||||
If you are the owner of a plugin, please consider reviewing pull requests
|
||||
that affect your plugin. If you are not the owner of a plugin, please
|
||||
consider becoming one! You can do so by adding an entry to
|
||||
``.github/CODEOWNERS``. This way, you will automatically receive a review
|
||||
request for pull requests that adjust the code that you own. If you have any
|
||||
questions, please ask on our `discussion board`_.
|
||||
|
||||
The Code
|
||||
--------
|
||||
|
||||
|
|
@ -238,25 +246,22 @@ There are a few coding conventions we use in beets:
|
|||
.. code-block:: python
|
||||
|
||||
with g.lib.transaction() as tx:
|
||||
rows = tx.query(
|
||||
"SELECT DISTINCT '{0}' FROM '{1}' ORDER BY '{2}'".format(
|
||||
field, model._table, sort_field
|
||||
)
|
||||
)
|
||||
rows = tx.query("SELECT DISTINCT {field} FROM {model._table} ORDER BY {sort_field}")
|
||||
|
||||
To fetch Item objects from the database, use lib.items(…) and supply a query
|
||||
as an argument. Resist the urge to write raw SQL for your query. If you must
|
||||
use lower-level queries into the database, do this:
|
||||
use lower-level queries into the database, do this, for example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with lib.transaction() as tx:
|
||||
rows = tx.query("SELECT …")
|
||||
rows = tx.query("SELECT path FROM items WHERE album_id = ?", (album_id,))
|
||||
|
||||
Transaction objects help control concurrent access to the database and assist
|
||||
in debugging conflicting accesses.
|
||||
|
||||
- ``str.format()`` should be used instead of the ``%`` operator
|
||||
- f-strings should be used instead of the ``%`` operator and ``str.format()``
|
||||
calls.
|
||||
- Never ``print`` informational messages; use the `logging
|
||||
<http://docs.python.org/library/logging.html>`__ module instead. In
|
||||
particular, we have our own logging shim, so you’ll see ``from beets import
|
||||
|
|
@ -264,7 +269,7 @@ There are a few coding conventions we use in beets:
|
|||
|
||||
- The loggers use `str.format
|
||||
<http://docs.python.org/library/stdtypes.html#str.format>`__-style logging
|
||||
instead of ``%``-style, so you can type ``log.debug("{0}", obj)`` to do your
|
||||
instead of ``%``-style, so you can type ``log.debug("{}", obj)`` to do your
|
||||
formatting.
|
||||
|
||||
- Exception handlers must use ``except A as B:`` instead of ``except A, B:``.
|
||||
|
|
@ -281,31 +286,6 @@ according to the specifications required by the project.
|
|||
Similarly, run ``poe format-docs`` and ``poe lint-docs`` to ensure consistent
|
||||
documentation formatting and check for any issues.
|
||||
|
||||
Handling Paths
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
A great deal of convention deals with the handling of **paths**. Paths are
|
||||
stored internally—in the database, for instance—as byte strings (i.e., ``bytes``
|
||||
instead of ``str`` in Python 3). This is because POSIX operating systems’ path
|
||||
names are only reliably usable as byte strings—operating systems typically
|
||||
recommend but do not require that filenames use a given encoding, so violations
|
||||
of any reported encoding are inevitable. On Windows, the strings are always
|
||||
encoded with UTF-8; on Unix, the encoding is controlled by the filesystem. Here
|
||||
are some guidelines to follow:
|
||||
|
||||
- If you have a Unicode path or you’re not sure whether something is Unicode or
|
||||
not, pass it through ``bytestring_path`` function in the ``beets.util`` module
|
||||
to convert it to bytes.
|
||||
- Pass every path name through the ``syspath`` function (also in ``beets.util``)
|
||||
before sending it to any *operating system* file operation (``open``, for
|
||||
example). This is necessary to use long filenames (which, maddeningly, must be
|
||||
Unicode) on Windows. This allows us to consistently store bytes in the
|
||||
database but use the native encoding rule on both POSIX and Windows.
|
||||
- Similarly, the ``displayable_path`` utility function converts bytestring paths
|
||||
to a Unicode string for displaying to the user. Every time you want to print
|
||||
out a string to the terminal or log it with the ``logging`` module, feed it
|
||||
through this function.
|
||||
|
||||
Editor Settings
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
|
|
@ -397,6 +377,8 @@ This way, the test will be run only in the integration test suite.
|
|||
|
||||
.. _codecov: https://codecov.io/github/beetbox/beets
|
||||
|
||||
.. _discussion board: https://github.com/beetbox/beets/discussions
|
||||
|
||||
.. _documentation: https://beets.readthedocs.io/en/stable/
|
||||
|
||||
.. _https://github.com/beetbox/beets/blob/master/test/test_template.py#l224: https://github.com/beetbox/beets/blob/master/test/test_template.py#L224
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ Beets is the media library management system for obsessive music geeks.
|
|||
|
||||
The purpose of beets is to get your music collection right once and for all. It
|
||||
catalogs your collection, automatically improving its metadata as it goes. It
|
||||
then provides a bouquet of tools for manipulating and accessing your music.
|
||||
then provides a suite of tools for manipulating and accessing your music.
|
||||
|
||||
Here's an example of beets' brainy tag corrector doing its thing:
|
||||
|
||||
|
|
|
|||
|
|
@ -17,10 +17,25 @@ from sys import stderr
|
|||
|
||||
import confuse
|
||||
|
||||
__version__ = "2.3.1"
|
||||
from .util import deprecate_imports
|
||||
|
||||
__version__ = "2.5.1"
|
||||
__author__ = "Adrian Sampson <adrian@radbox.org>"
|
||||
|
||||
|
||||
def __getattr__(name: str):
|
||||
"""Handle deprecated imports."""
|
||||
return deprecate_imports(
|
||||
old_module=__name__,
|
||||
new_module_by_name={
|
||||
"art": "beetsplug._utils",
|
||||
"vfs": "beetsplug._utils",
|
||||
},
|
||||
name=name,
|
||||
version="3.0.0",
|
||||
)
|
||||
|
||||
|
||||
class IncludeLazyConfig(confuse.LazyConfig):
|
||||
"""A version of Confuse's LazyConfig that also merges in data from
|
||||
YAML files specified in an `include` setting.
|
||||
|
|
@ -35,7 +50,7 @@ class IncludeLazyConfig(confuse.LazyConfig):
|
|||
except confuse.NotFoundError:
|
||||
pass
|
||||
except confuse.ConfigReadError as err:
|
||||
stderr.write("configuration `import` failed: {}".format(err.reason))
|
||||
stderr.write(f"configuration `import` failed: {err.reason}")
|
||||
|
||||
|
||||
config = IncludeLazyConfig("beets", __name__)
|
||||
|
|
|
|||
|
|
@ -261,7 +261,7 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
|
|||
continue
|
||||
|
||||
for suffix in "year", "month", "day":
|
||||
key = prefix + suffix
|
||||
key = f"{prefix}{suffix}"
|
||||
value = getattr(album_info, key) or 0
|
||||
|
||||
# If we don't even have a year, apply nothing.
|
||||
|
|
|
|||
|
|
@ -78,10 +78,10 @@ def string_dist(str1: str | None, str2: str | None) -> float:
|
|||
# example, "the something" should be considered equal to
|
||||
# "something, the".
|
||||
for word in SD_END_WORDS:
|
||||
if str1.endswith(", %s" % word):
|
||||
str1 = "{} {}".format(word, str1[: -len(word) - 2])
|
||||
if str2.endswith(", %s" % word):
|
||||
str2 = "{} {}".format(word, str2[: -len(word) - 2])
|
||||
if str1.endswith(f", {word}"):
|
||||
str1 = f"{word} {str1[: -len(word) - 2]}"
|
||||
if str2.endswith(f", {word}"):
|
||||
str2 = f"{word} {str2[: -len(word) - 2]}"
|
||||
|
||||
# Perform a couple of basic normalizing substitutions.
|
||||
for pat, repl in SD_REPLACE:
|
||||
|
|
@ -230,7 +230,7 @@ class Distance:
|
|||
"""Adds all the distance penalties from `dist`."""
|
||||
if not isinstance(dist, Distance):
|
||||
raise ValueError(
|
||||
"`dist` must be a Distance object, not {}".format(type(dist))
|
||||
f"`dist` must be a Distance object, not {type(dist)}"
|
||||
)
|
||||
for key, penalties in dist._penalties.items():
|
||||
self._penalties.setdefault(key, []).extend(penalties)
|
||||
|
|
@ -345,6 +345,12 @@ class Distance:
|
|||
dist = string_dist(str1, str2)
|
||||
self.add(key, dist)
|
||||
|
||||
def add_data_source(self, before: str | None, after: str | None) -> None:
|
||||
if before != after and (
|
||||
before or len(metadata_plugins.find_metadata_source_plugins()) > 1
|
||||
):
|
||||
self.add("data_source", metadata_plugins.get_penalty(after))
|
||||
|
||||
|
||||
@cache
|
||||
def get_track_length_grace() -> float:
|
||||
|
|
@ -408,8 +414,7 @@ def track_distance(
|
|||
if track_info.medium and item.disc:
|
||||
dist.add_expr("medium", item.disc != track_info.medium)
|
||||
|
||||
# Plugins.
|
||||
dist.update(metadata_plugins.track_distance(item, track_info))
|
||||
dist.add_data_source(item.get("data_source"), track_info.data_source)
|
||||
|
||||
return dist
|
||||
|
||||
|
|
@ -444,7 +449,7 @@ def distance(
|
|||
# Preferred media options.
|
||||
media_patterns: Sequence[str] = preferred_config["media"].as_str_seq()
|
||||
options = [
|
||||
re.compile(r"(\d+x)?(%s)" % pat, re.I) for pat in media_patterns
|
||||
re.compile(rf"(\d+x)?({pat})", re.I) for pat in media_patterns
|
||||
]
|
||||
if options:
|
||||
dist.add_priority("media", album_info.media, options)
|
||||
|
|
@ -525,7 +530,6 @@ def distance(
|
|||
for _ in range(len(items) - len(mapping)):
|
||||
dist.add("unmatched_tracks", 1.0)
|
||||
|
||||
# Plugins.
|
||||
dist.update(metadata_plugins.album_distance(items, album_info, mapping))
|
||||
dist.add_data_source(likelies["data_source"], album_info.data_source)
|
||||
|
||||
return dist
|
||||
|
|
|
|||
|
|
@ -16,236 +16,201 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from copy import deepcopy
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar
|
||||
|
||||
from beets import logging
|
||||
from typing_extensions import Self
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import Item
|
||||
|
||||
from .distance import Distance
|
||||
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
V = TypeVar("V")
|
||||
|
||||
|
||||
# Classes used to represent candidate options.
|
||||
class AttrDict(dict[str, V]):
|
||||
"""A dictionary that supports attribute ("dot") access, so `d.field`
|
||||
is equivalent to `d['field']`.
|
||||
"""
|
||||
"""Mapping enabling attribute-style access to stored metadata values."""
|
||||
|
||||
def copy(self) -> Self:
|
||||
return deepcopy(self)
|
||||
|
||||
def __getattr__(self, attr: str) -> V:
|
||||
if attr in self:
|
||||
return self[attr]
|
||||
else:
|
||||
raise AttributeError
|
||||
|
||||
def __setattr__(self, key: str, value: V):
|
||||
raise AttributeError(
|
||||
f"'{self.__class__.__name__}' object has no attribute '{attr}'"
|
||||
)
|
||||
|
||||
def __setattr__(self, key: str, value: V) -> None:
|
||||
self.__setitem__(key, value)
|
||||
|
||||
def __hash__(self):
|
||||
def __hash__(self) -> int: # type: ignore[override]
|
||||
return id(self)
|
||||
|
||||
|
||||
class AlbumInfo(AttrDict[Any]):
|
||||
"""Describes a canonical release that may be used to match a release
|
||||
in the library. Consists of these data members:
|
||||
class Info(AttrDict[Any]):
|
||||
"""Container for metadata about a musical entity."""
|
||||
|
||||
- ``album``: the release title
|
||||
- ``album_id``: MusicBrainz ID; UUID fragment only
|
||||
- ``artist``: name of the release's primary artist
|
||||
- ``artist_id``
|
||||
- ``tracks``: list of TrackInfo objects making up the release
|
||||
def __init__(
|
||||
self,
|
||||
album: str | None = None,
|
||||
artist_credit: str | None = None,
|
||||
artist_id: str | None = None,
|
||||
artist: str | None = None,
|
||||
artists_credit: list[str] | None = None,
|
||||
artists_ids: list[str] | None = None,
|
||||
artists: list[str] | None = None,
|
||||
artist_sort: str | None = None,
|
||||
artists_sort: list[str] | None = None,
|
||||
data_source: str | None = None,
|
||||
data_url: str | None = None,
|
||||
genre: str | None = None,
|
||||
media: str | None = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.album = album
|
||||
self.artist = artist
|
||||
self.artist_credit = artist_credit
|
||||
self.artist_id = artist_id
|
||||
self.artists = artists or []
|
||||
self.artists_credit = artists_credit or []
|
||||
self.artists_ids = artists_ids or []
|
||||
self.artist_sort = artist_sort
|
||||
self.artists_sort = artists_sort or []
|
||||
self.data_source = data_source
|
||||
self.data_url = data_url
|
||||
self.genre = genre
|
||||
self.media = media
|
||||
self.update(kwargs)
|
||||
|
||||
``mediums`` along with the fields up through ``tracks`` are required.
|
||||
The others are optional and may be None.
|
||||
|
||||
class AlbumInfo(Info):
|
||||
"""Metadata snapshot representing a single album candidate.
|
||||
|
||||
Aggregates track entries and album-wide context gathered from an external
|
||||
provider. Used during matching to evaluate similarity against a group of
|
||||
user items, and later to drive tagging decisions once selected.
|
||||
"""
|
||||
|
||||
# TYPING: are all of these correct? I've assumed optional strings
|
||||
def __init__(
|
||||
self,
|
||||
tracks: list[TrackInfo],
|
||||
album: str | None = None,
|
||||
*,
|
||||
album_id: str | None = None,
|
||||
artist: str | None = None,
|
||||
artist_id: str | None = None,
|
||||
artists: list[str] | None = None,
|
||||
artists_ids: list[str] | None = None,
|
||||
asin: str | None = None,
|
||||
albumdisambig: str | None = None,
|
||||
albumstatus: str | None = None,
|
||||
albumtype: str | None = None,
|
||||
albumtypes: list[str] | None = None,
|
||||
asin: str | None = None,
|
||||
barcode: str | None = None,
|
||||
catalognum: str | None = None,
|
||||
country: str | None = None,
|
||||
day: int | None = None,
|
||||
discogs_albumid: str | None = None,
|
||||
discogs_artistid: str | None = None,
|
||||
discogs_labelid: str | None = None,
|
||||
label: str | None = None,
|
||||
language: str | None = None,
|
||||
mediums: int | None = None,
|
||||
month: int | None = None,
|
||||
original_day: int | None = None,
|
||||
original_month: int | None = None,
|
||||
original_year: int | None = None,
|
||||
release_group_title: str | None = None,
|
||||
releasegroup_id: str | None = None,
|
||||
releasegroupdisambig: str | None = None,
|
||||
script: str | None = None,
|
||||
style: str | None = None,
|
||||
va: bool = False,
|
||||
year: int | None = None,
|
||||
month: int | None = None,
|
||||
day: int | None = None,
|
||||
label: str | None = None,
|
||||
barcode: str | None = None,
|
||||
mediums: int | None = None,
|
||||
artist_sort: str | None = None,
|
||||
artists_sort: list[str] | None = None,
|
||||
releasegroup_id: str | None = None,
|
||||
release_group_title: str | None = None,
|
||||
catalognum: str | None = None,
|
||||
script: str | None = None,
|
||||
language: str | None = None,
|
||||
country: str | None = None,
|
||||
style: str | None = None,
|
||||
genre: str | None = None,
|
||||
albumstatus: str | None = None,
|
||||
media: str | None = None,
|
||||
albumdisambig: str | None = None,
|
||||
releasegroupdisambig: str | None = None,
|
||||
artist_credit: str | None = None,
|
||||
artists_credit: list[str] | None = None,
|
||||
original_year: int | None = None,
|
||||
original_month: int | None = None,
|
||||
original_day: int | None = None,
|
||||
data_source: str | None = None,
|
||||
data_url: str | None = None,
|
||||
discogs_albumid: str | None = None,
|
||||
discogs_labelid: str | None = None,
|
||||
discogs_artistid: str | None = None,
|
||||
**kwargs,
|
||||
):
|
||||
self.album = album
|
||||
self.album_id = album_id
|
||||
self.artist = artist
|
||||
self.artist_id = artist_id
|
||||
self.artists = artists or []
|
||||
self.artists_ids = artists_ids or []
|
||||
) -> None:
|
||||
self.tracks = tracks
|
||||
self.asin = asin
|
||||
self.album_id = album_id
|
||||
self.albumdisambig = albumdisambig
|
||||
self.albumstatus = albumstatus
|
||||
self.albumtype = albumtype
|
||||
self.albumtypes = albumtypes or []
|
||||
self.asin = asin
|
||||
self.barcode = barcode
|
||||
self.catalognum = catalognum
|
||||
self.country = country
|
||||
self.day = day
|
||||
self.discogs_albumid = discogs_albumid
|
||||
self.discogs_artistid = discogs_artistid
|
||||
self.discogs_labelid = discogs_labelid
|
||||
self.label = label
|
||||
self.language = language
|
||||
self.mediums = mediums
|
||||
self.month = month
|
||||
self.original_day = original_day
|
||||
self.original_month = original_month
|
||||
self.original_year = original_year
|
||||
self.release_group_title = release_group_title
|
||||
self.releasegroup_id = releasegroup_id
|
||||
self.releasegroupdisambig = releasegroupdisambig
|
||||
self.script = script
|
||||
self.style = style
|
||||
self.va = va
|
||||
self.year = year
|
||||
self.month = month
|
||||
self.day = day
|
||||
self.label = label
|
||||
self.barcode = barcode
|
||||
self.mediums = mediums
|
||||
self.artist_sort = artist_sort
|
||||
self.artists_sort = artists_sort or []
|
||||
self.releasegroup_id = releasegroup_id
|
||||
self.release_group_title = release_group_title
|
||||
self.catalognum = catalognum
|
||||
self.script = script
|
||||
self.language = language
|
||||
self.country = country
|
||||
self.style = style
|
||||
self.genre = genre
|
||||
self.albumstatus = albumstatus
|
||||
self.media = media
|
||||
self.albumdisambig = albumdisambig
|
||||
self.releasegroupdisambig = releasegroupdisambig
|
||||
self.artist_credit = artist_credit
|
||||
self.artists_credit = artists_credit or []
|
||||
self.original_year = original_year
|
||||
self.original_month = original_month
|
||||
self.original_day = original_day
|
||||
self.data_source = data_source
|
||||
self.data_url = data_url
|
||||
self.discogs_albumid = discogs_albumid
|
||||
self.discogs_labelid = discogs_labelid
|
||||
self.discogs_artistid = discogs_artistid
|
||||
self.update(kwargs)
|
||||
|
||||
def copy(self) -> AlbumInfo:
|
||||
dupe = AlbumInfo([])
|
||||
dupe.update(self)
|
||||
dupe.tracks = [track.copy() for track in self.tracks]
|
||||
return dupe
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class TrackInfo(AttrDict[Any]):
|
||||
"""Describes a canonical track present on a release. Appears as part
|
||||
of an AlbumInfo's ``tracks`` list. Consists of these data members:
|
||||
class TrackInfo(Info):
|
||||
"""Metadata snapshot for a single track candidate.
|
||||
|
||||
- ``title``: name of the track
|
||||
- ``track_id``: MusicBrainz ID; UUID fragment only
|
||||
|
||||
Only ``title`` and ``track_id`` are required. The rest of the fields
|
||||
may be None. The indices ``index``, ``medium``, and ``medium_index``
|
||||
are all 1-based.
|
||||
Captures identifying details and creative credits used to compare against
|
||||
a user's item. Instances often originate within an AlbumInfo but may also
|
||||
stand alone for singleton matching.
|
||||
"""
|
||||
|
||||
# TYPING: are all of these correct? I've assumed optional strings
|
||||
def __init__(
|
||||
self,
|
||||
title: str | None = None,
|
||||
track_id: str | None = None,
|
||||
release_track_id: str | None = None,
|
||||
artist: str | None = None,
|
||||
artist_id: str | None = None,
|
||||
artists: list[str] | None = None,
|
||||
artists_ids: list[str] | None = None,
|
||||
length: float | None = None,
|
||||
*,
|
||||
arranger: str | None = None,
|
||||
bpm: str | None = None,
|
||||
composer: str | None = None,
|
||||
composer_sort: str | None = None,
|
||||
disctitle: str | None = None,
|
||||
index: int | None = None,
|
||||
initial_key: str | None = None,
|
||||
length: float | None = None,
|
||||
lyricist: str | None = None,
|
||||
mb_workid: str | None = None,
|
||||
medium: int | None = None,
|
||||
medium_index: int | None = None,
|
||||
medium_total: int | None = None,
|
||||
artist_sort: str | None = None,
|
||||
artists_sort: list[str] | None = None,
|
||||
disctitle: str | None = None,
|
||||
artist_credit: str | None = None,
|
||||
artists_credit: list[str] | None = None,
|
||||
data_source: str | None = None,
|
||||
data_url: str | None = None,
|
||||
media: str | None = None,
|
||||
lyricist: str | None = None,
|
||||
composer: str | None = None,
|
||||
composer_sort: str | None = None,
|
||||
arranger: str | None = None,
|
||||
release_track_id: str | None = None,
|
||||
title: str | None = None,
|
||||
track_alt: str | None = None,
|
||||
track_id: str | None = None,
|
||||
work: str | None = None,
|
||||
mb_workid: str | None = None,
|
||||
work_disambig: str | None = None,
|
||||
bpm: str | None = None,
|
||||
initial_key: str | None = None,
|
||||
genre: str | None = None,
|
||||
album: str | None = None,
|
||||
**kwargs,
|
||||
):
|
||||
self.title = title
|
||||
self.track_id = track_id
|
||||
self.release_track_id = release_track_id
|
||||
self.artist = artist
|
||||
self.artist_id = artist_id
|
||||
self.artists = artists or []
|
||||
self.artists_ids = artists_ids or []
|
||||
self.length = length
|
||||
) -> None:
|
||||
self.arranger = arranger
|
||||
self.bpm = bpm
|
||||
self.composer = composer
|
||||
self.composer_sort = composer_sort
|
||||
self.disctitle = disctitle
|
||||
self.index = index
|
||||
self.media = media
|
||||
self.initial_key = initial_key
|
||||
self.length = length
|
||||
self.lyricist = lyricist
|
||||
self.mb_workid = mb_workid
|
||||
self.medium = medium
|
||||
self.medium_index = medium_index
|
||||
self.medium_total = medium_total
|
||||
self.artist_sort = artist_sort
|
||||
self.artists_sort = artists_sort or []
|
||||
self.disctitle = disctitle
|
||||
self.artist_credit = artist_credit
|
||||
self.artists_credit = artists_credit or []
|
||||
self.data_source = data_source
|
||||
self.data_url = data_url
|
||||
self.lyricist = lyricist
|
||||
self.composer = composer
|
||||
self.composer_sort = composer_sort
|
||||
self.arranger = arranger
|
||||
self.release_track_id = release_track_id
|
||||
self.title = title
|
||||
self.track_alt = track_alt
|
||||
self.track_id = track_id
|
||||
self.work = work
|
||||
self.mb_workid = mb_workid
|
||||
self.work_disambig = work_disambig
|
||||
self.bpm = bpm
|
||||
self.initial_key = initial_key
|
||||
self.genre = genre
|
||||
self.album = album
|
||||
self.update(kwargs)
|
||||
|
||||
def copy(self) -> TrackInfo:
|
||||
dupe = TrackInfo()
|
||||
dupe.update(self)
|
||||
return dupe
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
# Structures that compose all the information for a candidate match.
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ def match_by_id(items: Iterable[Item]) -> AlbumInfo | None:
|
|||
log.debug("No album ID consensus.")
|
||||
return None
|
||||
# If all album IDs are equal, look up the album.
|
||||
log.debug("Searching for discovered album ID: {0}", first)
|
||||
log.debug("Searching for discovered album ID: {}", first)
|
||||
return metadata_plugins.album_for_id(first)
|
||||
|
||||
|
||||
|
|
@ -197,9 +197,7 @@ def _add_candidate(
|
|||
checking the track count, ordering the items, checking for
|
||||
duplicates, and calculating the distance.
|
||||
"""
|
||||
log.debug(
|
||||
"Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id
|
||||
)
|
||||
log.debug("Candidate: {0.artist} - {0.album} ({0.album_id})", info)
|
||||
|
||||
# Discard albums with zero tracks.
|
||||
if not info.tracks:
|
||||
|
|
@ -215,7 +213,7 @@ def _add_candidate(
|
|||
required_tags: Sequence[str] = config["match"]["required"].as_str_seq()
|
||||
for req_tag in required_tags:
|
||||
if getattr(info, req_tag) is None:
|
||||
log.debug("Ignored. Missing required tag: {0}", req_tag)
|
||||
log.debug("Ignored. Missing required tag: {}", req_tag)
|
||||
return
|
||||
|
||||
# Find mapping between the items and the track info.
|
||||
|
|
@ -229,10 +227,10 @@ def _add_candidate(
|
|||
ignored_tags: Sequence[str] = config["match"]["ignored"].as_str_seq()
|
||||
for penalty in ignored_tags:
|
||||
if penalty in penalties:
|
||||
log.debug("Ignored. Penalty: {0}", penalty)
|
||||
log.debug("Ignored. Penalty: {}", penalty)
|
||||
return
|
||||
|
||||
log.debug("Success. Distance: {0}", dist)
|
||||
log.debug("Success. Distance: {}", dist)
|
||||
results[info.album_id] = hooks.AlbumMatch(
|
||||
dist, info, mapping, extra_items, extra_tracks
|
||||
)
|
||||
|
|
@ -265,7 +263,7 @@ def tag_album(
|
|||
likelies, consensus = get_most_common_tags(items)
|
||||
cur_artist: str = likelies["artist"]
|
||||
cur_album: str = likelies["album"]
|
||||
log.debug("Tagging {0} - {1}", cur_artist, cur_album)
|
||||
log.debug("Tagging {} - {}", cur_artist, cur_album)
|
||||
|
||||
# The output result, keys are the MB album ID.
|
||||
candidates: dict[Any, AlbumMatch] = {}
|
||||
|
|
@ -273,7 +271,7 @@ def tag_album(
|
|||
# Search by explicit ID.
|
||||
if search_ids:
|
||||
for search_id in search_ids:
|
||||
log.debug("Searching for album ID: {0}", search_id)
|
||||
log.debug("Searching for album ID: {}", search_id)
|
||||
if info := metadata_plugins.album_for_id(search_id):
|
||||
_add_candidate(items, candidates, info)
|
||||
|
||||
|
|
@ -283,7 +281,7 @@ def tag_album(
|
|||
if info := match_by_id(items):
|
||||
_add_candidate(items, candidates, info)
|
||||
rec = _recommendation(list(candidates.values()))
|
||||
log.debug("Album ID match recommendation is {0}", rec)
|
||||
log.debug("Album ID match recommendation is {}", rec)
|
||||
if candidates and not config["import"]["timid"]:
|
||||
# If we have a very good MBID match, return immediately.
|
||||
# Otherwise, this match will compete against metadata-based
|
||||
|
|
@ -300,7 +298,7 @@ def tag_album(
|
|||
if not (search_artist and search_album):
|
||||
# No explicit search terms -- use current metadata.
|
||||
search_artist, search_album = cur_artist, cur_album
|
||||
log.debug("Search terms: {0} - {1}", search_artist, search_album)
|
||||
log.debug("Search terms: {} - {}", search_artist, search_album)
|
||||
|
||||
# Is this album likely to be a "various artist" release?
|
||||
va_likely = (
|
||||
|
|
@ -308,7 +306,7 @@ def tag_album(
|
|||
or (search_artist.lower() in VA_ARTISTS)
|
||||
or any(item.comp for item in items)
|
||||
)
|
||||
log.debug("Album might be VA: {0}", va_likely)
|
||||
log.debug("Album might be VA: {}", va_likely)
|
||||
|
||||
# Get the results from the data sources.
|
||||
for matched_candidate in metadata_plugins.candidates(
|
||||
|
|
@ -316,7 +314,7 @@ def tag_album(
|
|||
):
|
||||
_add_candidate(items, candidates, matched_candidate)
|
||||
|
||||
log.debug("Evaluating {0} candidates.", len(candidates))
|
||||
log.debug("Evaluating {} candidates.", len(candidates))
|
||||
# Sort and get the recommendation.
|
||||
candidates_sorted = _sort_candidates(candidates.values())
|
||||
rec = _recommendation(candidates_sorted)
|
||||
|
|
@ -345,7 +343,7 @@ def tag_item(
|
|||
trackids = search_ids or [t for t in [item.mb_trackid] if t]
|
||||
if trackids:
|
||||
for trackid in trackids:
|
||||
log.debug("Searching for track ID: {0}", trackid)
|
||||
log.debug("Searching for track ID: {}", trackid)
|
||||
if info := metadata_plugins.track_for_id(trackid):
|
||||
dist = track_distance(item, info, incl_artist=True)
|
||||
candidates[info.track_id] = hooks.TrackMatch(dist, info)
|
||||
|
|
@ -369,7 +367,7 @@ def tag_item(
|
|||
# Search terms.
|
||||
search_artist = search_artist or item.artist
|
||||
search_title = search_title or item.title
|
||||
log.debug("Item search terms: {0} - {1}", search_artist, search_title)
|
||||
log.debug("Item search terms: {} - {}", search_artist, search_title)
|
||||
|
||||
# Get and evaluate candidate metadata.
|
||||
for track_info in metadata_plugins.item_candidates(
|
||||
|
|
@ -379,7 +377,7 @@ def tag_item(
|
|||
candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info)
|
||||
|
||||
# Sort by distance and return with recommendation.
|
||||
log.debug("Found {0} candidates.", len(candidates))
|
||||
log.debug("Found {} candidates.", len(candidates))
|
||||
candidates_sorted = _sort_candidates(candidates.values())
|
||||
rec = _recommendation(candidates_sorted)
|
||||
return Proposal(candidates_sorted, rec)
|
||||
|
|
|
|||
|
|
@ -127,19 +127,12 @@ ui:
|
|||
action_default: ['bold', 'cyan']
|
||||
action: ['bold', 'cyan']
|
||||
# New Colors
|
||||
text: ['normal']
|
||||
text_faint: ['faint']
|
||||
import_path: ['bold', 'blue']
|
||||
import_path_items: ['bold', 'blue']
|
||||
added: ['green']
|
||||
removed: ['red']
|
||||
changed: ['yellow']
|
||||
added_highlight: ['bold', 'green']
|
||||
removed_highlight: ['bold', 'red']
|
||||
changed_highlight: ['bold', 'yellow']
|
||||
text_diff_added: ['bold', 'red']
|
||||
text_diff_added: ['bold', 'green']
|
||||
text_diff_removed: ['bold', 'red']
|
||||
text_diff_changed: ['bold', 'red']
|
||||
action_description: ['white']
|
||||
import:
|
||||
indentation:
|
||||
|
|
@ -173,7 +166,7 @@ match:
|
|||
missing_tracks: medium
|
||||
unmatched_tracks: medium
|
||||
distance_weights:
|
||||
source: 2.0
|
||||
data_source: 2.0
|
||||
artist: 3.0
|
||||
album: 3.0
|
||||
media: 1.0
|
||||
|
|
|
|||
|
|
@ -17,15 +17,17 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from abc import ABC
|
||||
from collections import defaultdict
|
||||
from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
|
||||
from sqlite3 import Connection
|
||||
from sqlite3 import Connection, sqlite_version_info
|
||||
from typing import TYPE_CHECKING, Any, AnyStr, Callable, Generic
|
||||
|
||||
from typing_extensions import TypeVar # default value support
|
||||
|
|
@ -64,6 +66,16 @@ class DBAccessError(Exception):
|
|||
"""
|
||||
|
||||
|
||||
class DBCustomFunctionError(Exception):
|
||||
"""A sqlite function registered by beets failed."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"beets defined SQLite function failed; "
|
||||
"see the other errors above for details"
|
||||
)
|
||||
|
||||
|
||||
class FormattedMapping(Mapping[str, str]):
|
||||
"""A `dict`-like formatted view of a model.
|
||||
|
||||
|
|
@ -390,9 +402,9 @@ class Model(ABC, Generic[D]):
|
|||
return obj
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{}({})".format(
|
||||
type(self).__name__,
|
||||
", ".join(f"{k}={v!r}" for k, v in dict(self).items()),
|
||||
return (
|
||||
f"{type(self).__name__}"
|
||||
f"({', '.join(f'{k}={v!r}' for k, v in dict(self).items())})"
|
||||
)
|
||||
|
||||
def clear_dirty(self):
|
||||
|
|
@ -409,9 +421,9 @@ class Model(ABC, Generic[D]):
|
|||
exception is raised otherwise.
|
||||
"""
|
||||
if not self._db:
|
||||
raise ValueError("{} has no database".format(type(self).__name__))
|
||||
raise ValueError(f"{type(self).__name__} has no database")
|
||||
if need_id and not self.id:
|
||||
raise ValueError("{} has no id".format(type(self).__name__))
|
||||
raise ValueError(f"{type(self).__name__} has no id")
|
||||
|
||||
return self._db
|
||||
|
||||
|
|
@ -588,16 +600,14 @@ class Model(ABC, Generic[D]):
|
|||
for key in fields:
|
||||
if key != "id" and key in self._dirty:
|
||||
self._dirty.remove(key)
|
||||
assignments.append(key + "=?")
|
||||
assignments.append(f"{key}=?")
|
||||
value = self._type(key).to_sql(self[key])
|
||||
subvars.append(value)
|
||||
|
||||
with db.transaction() as tx:
|
||||
# Main table update.
|
||||
if assignments:
|
||||
query = "UPDATE {} SET {} WHERE id=?".format(
|
||||
self._table, ",".join(assignments)
|
||||
)
|
||||
query = f"UPDATE {self._table} SET {','.join(assignments)} WHERE id=?"
|
||||
subvars.append(self.id)
|
||||
tx.mutate(query, subvars)
|
||||
|
||||
|
|
@ -607,9 +617,9 @@ class Model(ABC, Generic[D]):
|
|||
self._dirty.remove(key)
|
||||
value = self._type(key).to_sql(value)
|
||||
tx.mutate(
|
||||
"INSERT INTO {} "
|
||||
f"INSERT INTO {self._flex_table} "
|
||||
"(entity_id, key, value) "
|
||||
"VALUES (?, ?, ?);".format(self._flex_table),
|
||||
"VALUES (?, ?, ?);",
|
||||
(self.id, key, value),
|
||||
)
|
||||
|
||||
|
|
@ -930,10 +940,10 @@ class Transaction:
|
|||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[Exception],
|
||||
exc_value: Exception,
|
||||
traceback: TracebackType,
|
||||
):
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> bool | None:
|
||||
"""Complete a transaction. This must be the most recently
|
||||
entered but not yet exited transaction. If it is the last active
|
||||
transaction, the database updates are committed.
|
||||
|
|
@ -949,6 +959,14 @@ class Transaction:
|
|||
self._mutated = False
|
||||
self.db._db_lock.release()
|
||||
|
||||
if (
|
||||
isinstance(exc_value, sqlite3.OperationalError)
|
||||
and exc_value.args[0] == "user-defined function raised exception"
|
||||
):
|
||||
raise DBCustomFunctionError()
|
||||
|
||||
return None
|
||||
|
||||
def query(
|
||||
self, statement: str, subvals: Sequence[SQLiteType] = ()
|
||||
) -> list[sqlite3.Row]:
|
||||
|
|
@ -1009,6 +1027,13 @@ class Database:
|
|||
"sqlite3 must be compiled with multi-threading support"
|
||||
)
|
||||
|
||||
# Print tracebacks for exceptions in user defined functions
|
||||
# See also `self.add_functions` and `DBCustomFunctionError`.
|
||||
#
|
||||
# `if`: use feature detection because PyPy doesn't support this.
|
||||
if hasattr(sqlite3, "enable_callback_tracebacks"):
|
||||
sqlite3.enable_callback_tracebacks(True)
|
||||
|
||||
self.path = path
|
||||
self.timeout = timeout
|
||||
|
||||
|
|
@ -1104,9 +1129,16 @@ class Database:
|
|||
|
||||
return bytestring
|
||||
|
||||
conn.create_function("regexp", 2, regexp)
|
||||
conn.create_function("unidecode", 1, unidecode)
|
||||
conn.create_function("bytelower", 1, bytelower)
|
||||
create_function = conn.create_function
|
||||
if sys.version_info >= (3, 8) and sqlite_version_info >= (3, 8, 3):
|
||||
# Let sqlite make extra optimizations
|
||||
create_function = functools.partial(
|
||||
conn.create_function, deterministic=True
|
||||
)
|
||||
|
||||
create_function("regexp", 2, regexp)
|
||||
create_function("unidecode", 1, unidecode)
|
||||
create_function("bytelower", 1, bytelower)
|
||||
|
||||
def _close(self):
|
||||
"""Close the all connections to the underlying SQLite database
|
||||
|
|
@ -1160,7 +1192,7 @@ class Database:
|
|||
"""
|
||||
# Get current schema.
|
||||
with self.transaction() as tx:
|
||||
rows = tx.query("PRAGMA table_info(%s)" % table)
|
||||
rows = tx.query(f"PRAGMA table_info({table})")
|
||||
current_fields = {row[1] for row in rows}
|
||||
|
||||
field_names = set(fields.keys())
|
||||
|
|
@ -1173,9 +1205,7 @@ class Database:
|
|||
columns = []
|
||||
for name, typ in fields.items():
|
||||
columns.append(f"{name} {typ.sql}")
|
||||
setup_sql = "CREATE TABLE {} ({});\n".format(
|
||||
table, ", ".join(columns)
|
||||
)
|
||||
setup_sql = f"CREATE TABLE {table} ({', '.join(columns)});\n"
|
||||
|
||||
else:
|
||||
# Table exists does not match the field set.
|
||||
|
|
@ -1183,8 +1213,8 @@ class Database:
|
|||
for name, typ in fields.items():
|
||||
if name in current_fields:
|
||||
continue
|
||||
setup_sql += "ALTER TABLE {} ADD COLUMN {} {};\n".format(
|
||||
table, name, typ.sql
|
||||
setup_sql += (
|
||||
f"ALTER TABLE {table} ADD COLUMN {name} {typ.sql};\n"
|
||||
)
|
||||
|
||||
with self.transaction() as tx:
|
||||
|
|
@ -1195,18 +1225,16 @@ class Database:
|
|||
for the given entity (if they don't exist).
|
||||
"""
|
||||
with self.transaction() as tx:
|
||||
tx.script(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS {0} (
|
||||
tx.script(f"""
|
||||
CREATE TABLE IF NOT EXISTS {flex_table} (
|
||||
id INTEGER PRIMARY KEY,
|
||||
entity_id INTEGER,
|
||||
key TEXT,
|
||||
value TEXT,
|
||||
UNIQUE(entity_id, key) ON CONFLICT REPLACE);
|
||||
CREATE INDEX IF NOT EXISTS {0}_by_entity
|
||||
ON {0} (entity_id);
|
||||
""".format(flex_table)
|
||||
)
|
||||
CREATE INDEX IF NOT EXISTS {flex_table}_by_entity
|
||||
ON {flex_table} (entity_id);
|
||||
""")
|
||||
|
||||
# Querying.
|
||||
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@ class MatchQuery(FieldQuery[AnySQLiteType]):
|
|||
"""A query that looks for exact matches in an Model field."""
|
||||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
return self.field + " = ?", [self.pattern]
|
||||
return f"{self.field} = ?", [self.pattern]
|
||||
|
||||
@classmethod
|
||||
def value_match(cls, pattern: AnySQLiteType, value: Any) -> bool:
|
||||
|
|
@ -204,7 +204,7 @@ class NoneQuery(FieldQuery[None]):
|
|||
super().__init__(field, None, fast)
|
||||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
return self.field + " IS NULL", ()
|
||||
return f"{self.field} IS NULL", ()
|
||||
|
||||
def match(self, obj: Model) -> bool:
|
||||
return obj.get(self.field_name) is None
|
||||
|
|
@ -246,7 +246,7 @@ class StringQuery(StringFieldQuery[str]):
|
|||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
)
|
||||
clause = self.field + " like ? escape '\\'"
|
||||
clause = f"{self.field} like ? escape '\\'"
|
||||
subvals = [search]
|
||||
return clause, subvals
|
||||
|
||||
|
|
@ -264,8 +264,8 @@ class SubstringQuery(StringFieldQuery[str]):
|
|||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
)
|
||||
search = "%" + pattern + "%"
|
||||
clause = self.field + " like ? escape '\\'"
|
||||
search = f"%{pattern}%"
|
||||
clause = f"{self.field} like ? escape '\\'"
|
||||
subvals = [search]
|
||||
return clause, subvals
|
||||
|
||||
|
|
@ -471,11 +471,11 @@ class NumericQuery(FieldQuery[str]):
|
|||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
if self.point is not None:
|
||||
return self.field + "=?", (self.point,)
|
||||
return f"{self.field}=?", (self.point,)
|
||||
else:
|
||||
if self.rangemin is not None and self.rangemax is not None:
|
||||
return (
|
||||
"{0} >= ? AND {0} <= ?".format(self.field),
|
||||
f"{self.field} >= ? AND {self.field} <= ?",
|
||||
(self.rangemin, self.rangemax),
|
||||
)
|
||||
elif self.rangemin is not None:
|
||||
|
|
@ -549,9 +549,9 @@ class CollectionQuery(Query):
|
|||
if not subq_clause:
|
||||
# Fall back to slow query.
|
||||
return None, ()
|
||||
clause_parts.append("(" + subq_clause + ")")
|
||||
clause_parts.append(f"({subq_clause})")
|
||||
subvals += subq_subvals
|
||||
clause = (" " + joiner + " ").join(clause_parts)
|
||||
clause = f" {joiner} ".join(clause_parts)
|
||||
return clause, subvals
|
||||
|
||||
def __repr__(self) -> str:
|
||||
|
|
@ -690,9 +690,7 @@ class Period:
|
|||
("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second
|
||||
)
|
||||
relative_units = {"y": 365, "m": 30, "w": 7, "d": 1}
|
||||
relative_re = (
|
||||
"(?P<sign>[+|-]?)(?P<quantity>[0-9]+)" + "(?P<timespan>[y|m|w|d])"
|
||||
)
|
||||
relative_re = "(?P<sign>[+|-]?)(?P<quantity>[0-9]+)(?P<timespan>[y|m|w|d])"
|
||||
|
||||
def __init__(self, date: datetime, precision: str):
|
||||
"""Create a period with the given date (a `datetime` object) and
|
||||
|
|
@ -800,9 +798,7 @@ class DateInterval:
|
|||
|
||||
def __init__(self, start: datetime | None, end: datetime | None):
|
||||
if start is not None and end is not None and not start < end:
|
||||
raise ValueError(
|
||||
"start date {} is not before end date {}".format(start, end)
|
||||
)
|
||||
raise ValueError(f"start date {start} is not before end date {end}")
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
|
|
@ -850,8 +846,6 @@ class DateQuery(FieldQuery[str]):
|
|||
date = datetime.fromtimestamp(timestamp)
|
||||
return self.interval.contains(date)
|
||||
|
||||
_clause_tmpl = "{0} {1} ?"
|
||||
|
||||
def col_clause(self) -> tuple[str, Sequence[SQLiteType]]:
|
||||
clause_parts = []
|
||||
subvals = []
|
||||
|
|
@ -859,11 +853,11 @@ class DateQuery(FieldQuery[str]):
|
|||
# Convert the `datetime` objects to an integer number of seconds since
|
||||
# the (local) Unix epoch using `datetime.timestamp()`.
|
||||
if self.interval.start:
|
||||
clause_parts.append(self._clause_tmpl.format(self.field, ">="))
|
||||
clause_parts.append(f"{self.field} >= ?")
|
||||
subvals.append(int(self.interval.start.timestamp()))
|
||||
|
||||
if self.interval.end:
|
||||
clause_parts.append(self._clause_tmpl.format(self.field, "<"))
|
||||
clause_parts.append(f"{self.field} < ?")
|
||||
subvals.append(int(self.interval.end.timestamp()))
|
||||
|
||||
if clause_parts:
|
||||
|
|
@ -1074,9 +1068,9 @@ class FixedFieldSort(FieldSort):
|
|||
if self.case_insensitive:
|
||||
field = (
|
||||
"(CASE "
|
||||
"WHEN TYPEOF({0})='text' THEN LOWER({0}) "
|
||||
"WHEN TYPEOF({0})='blob' THEN LOWER({0}) "
|
||||
"ELSE {0} END)".format(self.field)
|
||||
f"WHEN TYPEOF({self.field})='text' THEN LOWER({self.field}) "
|
||||
f"WHEN TYPEOF({self.field})='blob' THEN LOWER({self.field}) "
|
||||
f"ELSE {self.field} END)"
|
||||
)
|
||||
else:
|
||||
field = self.field
|
||||
|
|
|
|||
|
|
@ -194,7 +194,7 @@ class BasePaddedInt(BaseInteger[N]):
|
|||
self.digits = digits
|
||||
|
||||
def format(self, value: int | N) -> str:
|
||||
return "{0:0{1}d}".format(value or 0, self.digits)
|
||||
return f"{value or 0:0{self.digits}d}"
|
||||
|
||||
|
||||
class PaddedInt(BasePaddedInt[int]):
|
||||
|
|
@ -219,7 +219,7 @@ class ScaledInt(Integer):
|
|||
self.suffix = suffix
|
||||
|
||||
def format(self, value: int) -> str:
|
||||
return "{}{}".format((value or 0) // self.unit, self.suffix)
|
||||
return f"{(value or 0) // self.unit}{self.suffix}"
|
||||
|
||||
|
||||
class Id(NullInteger):
|
||||
|
|
@ -249,7 +249,7 @@ class BaseFloat(Type[float, N]):
|
|||
self.digits = digits
|
||||
|
||||
def format(self, value: float | N) -> str:
|
||||
return "{0:.{1}f}".format(value or 0, self.digits)
|
||||
return f"{value or 0:.{self.digits}f}"
|
||||
|
||||
|
||||
class Float(BaseFloat[float]):
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ class ImportSession:
|
|||
"""Log a message about a given album to the importer log. The status
|
||||
should reflect the reason the album couldn't be tagged.
|
||||
"""
|
||||
self.logger.info("{0} {1}", status, displayable_path(paths))
|
||||
self.logger.info("{} {}", status, displayable_path(paths))
|
||||
|
||||
def log_choice(self, task: ImportTask, duplicate=False):
|
||||
"""Logs the task's current choice if it should be logged. If
|
||||
|
|
@ -187,7 +187,7 @@ class ImportSession:
|
|||
|
||||
def run(self):
|
||||
"""Run the import task."""
|
||||
self.logger.info("import started {0}", time.asctime())
|
||||
self.logger.info("import started {}", time.asctime())
|
||||
self.set_config(config["import"])
|
||||
|
||||
# Set up the pipeline.
|
||||
|
|
@ -297,7 +297,7 @@ class ImportSession:
|
|||
# Either accept immediately or prompt for input to decide.
|
||||
if self.want_resume is True or self.should_resume(toppath):
|
||||
log.warning(
|
||||
"Resuming interrupted import of {0}",
|
||||
"Resuming interrupted import of {}",
|
||||
util.displayable_path(toppath),
|
||||
)
|
||||
self._is_resuming[toppath] = True
|
||||
|
|
|
|||
|
|
@ -58,11 +58,11 @@ def read_tasks(session: ImportSession):
|
|||
skipped += task_factory.skipped
|
||||
|
||||
if not task_factory.imported:
|
||||
log.warning("No files imported from {0}", displayable_path(toppath))
|
||||
log.warning("No files imported from {}", displayable_path(toppath))
|
||||
|
||||
# Show skipped directories (due to incremental/resume).
|
||||
if skipped:
|
||||
log.info("Skipped {0} paths.", skipped)
|
||||
log.info("Skipped {} paths.", skipped)
|
||||
|
||||
|
||||
def query_tasks(session: ImportSession):
|
||||
|
|
@ -82,10 +82,7 @@ def query_tasks(session: ImportSession):
|
|||
# Search for albums.
|
||||
for album in session.lib.albums(session.query):
|
||||
log.debug(
|
||||
"yielding album {0}: {1} - {2}",
|
||||
album.id,
|
||||
album.albumartist,
|
||||
album.album,
|
||||
"yielding album {0.id}: {0.albumartist} - {0.album}", album
|
||||
)
|
||||
items = list(album.items())
|
||||
_freshen_items(items)
|
||||
|
|
@ -140,7 +137,7 @@ def lookup_candidates(session: ImportSession, task: ImportTask):
|
|||
return
|
||||
|
||||
plugins.send("import_task_start", session=session, task=task)
|
||||
log.debug("Looking up: {0}", displayable_path(task.paths))
|
||||
log.debug("Looking up: {}", displayable_path(task.paths))
|
||||
|
||||
# Restrict the initial lookup to IDs specified by the user via the -m
|
||||
# option. Currently all the IDs are passed onto the tasks directly.
|
||||
|
|
@ -259,11 +256,11 @@ def plugin_stage(
|
|||
def log_files(session: ImportSession, task: ImportTask):
|
||||
"""A coroutine (pipeline stage) to log each file to be imported."""
|
||||
if isinstance(task, SingletonImportTask):
|
||||
log.info("Singleton: {0}", displayable_path(task.item["path"]))
|
||||
log.info("Singleton: {}", displayable_path(task.item["path"]))
|
||||
elif task.items:
|
||||
log.info("Album: {0}", displayable_path(task.paths[0]))
|
||||
log.info("Album: {}", displayable_path(task.paths[0]))
|
||||
for item in task.items:
|
||||
log.info(" {0}", displayable_path(item["path"]))
|
||||
log.info(" {}", displayable_path(item["path"]))
|
||||
|
||||
|
||||
# --------------------------------- Consumer --------------------------------- #
|
||||
|
|
@ -341,9 +338,7 @@ def _resolve_duplicates(session: ImportSession, task: ImportTask):
|
|||
if task.choice_flag in (Action.ASIS, Action.APPLY, Action.RETAG):
|
||||
found_duplicates = task.find_duplicates(session.lib)
|
||||
if found_duplicates:
|
||||
log.debug(
|
||||
"found duplicates: {}".format([o.id for o in found_duplicates])
|
||||
)
|
||||
log.debug("found duplicates: {}", [o.id for o in found_duplicates])
|
||||
|
||||
# Get the default action to follow from config.
|
||||
duplicate_action = config["import"]["duplicate_action"].as_choice(
|
||||
|
|
@ -355,7 +350,7 @@ def _resolve_duplicates(session: ImportSession, task: ImportTask):
|
|||
"ask": "a",
|
||||
}
|
||||
)
|
||||
log.debug("default action for duplicates: {0}", duplicate_action)
|
||||
log.debug("default action for duplicates: {}", duplicate_action)
|
||||
|
||||
if duplicate_action == "s":
|
||||
# Skip new.
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ class ImportState:
|
|||
# unpickling, including ImportError. We use a catch-all
|
||||
# exception to avoid enumerating them all (the docs don't even have a
|
||||
# full list!).
|
||||
log.debug("state file could not be read: {0}", exc)
|
||||
log.debug("state file could not be read: {}", exc)
|
||||
|
||||
def _save(self):
|
||||
try:
|
||||
|
|
@ -100,7 +100,7 @@ class ImportState:
|
|||
f,
|
||||
)
|
||||
except OSError as exc:
|
||||
log.error("state file could not be written: {0}", exc)
|
||||
log.error("state file could not be written: {}", exc)
|
||||
|
||||
# -------------------------------- Tagprogress ------------------------------- #
|
||||
|
||||
|
|
|
|||
|
|
@ -51,15 +51,16 @@ SINGLE_ARTIST_THRESH = 0.25
|
|||
# def extend_reimport_fresh_fields_item():
|
||||
# importer.REIMPORT_FRESH_FIELDS_ITEM.extend(['tidal_track_popularity']
|
||||
# )
|
||||
REIMPORT_FRESH_FIELDS_ALBUM = [
|
||||
REIMPORT_FRESH_FIELDS_ITEM = [
|
||||
"data_source",
|
||||
"bandcamp_album_id",
|
||||
"spotify_album_id",
|
||||
"deezer_album_id",
|
||||
"beatport_album_id",
|
||||
"tidal_album_id",
|
||||
"data_url",
|
||||
]
|
||||
REIMPORT_FRESH_FIELDS_ITEM = list(REIMPORT_FRESH_FIELDS_ALBUM)
|
||||
REIMPORT_FRESH_FIELDS_ALBUM = [*REIMPORT_FRESH_FIELDS_ITEM, "media"]
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
|
@ -267,13 +268,11 @@ class ImportTask(BaseImportTask):
|
|||
|
||||
def remove_duplicates(self, lib: library.Library):
|
||||
duplicate_items = self.duplicate_items(lib)
|
||||
log.debug("removing {0} old duplicated items", len(duplicate_items))
|
||||
log.debug("removing {} old duplicated items", len(duplicate_items))
|
||||
for item in duplicate_items:
|
||||
item.remove()
|
||||
if lib.directory in util.ancestry(item.path):
|
||||
log.debug(
|
||||
"deleting duplicate {0}", util.displayable_path(item.path)
|
||||
)
|
||||
log.debug("deleting duplicate {.filepath}", item)
|
||||
util.remove(item.path)
|
||||
util.prune_dirs(os.path.dirname(item.path), lib.directory)
|
||||
|
||||
|
|
@ -285,10 +284,10 @@ class ImportTask(BaseImportTask):
|
|||
for field, view in config["import"]["set_fields"].items():
|
||||
value = str(view.get())
|
||||
log.debug(
|
||||
"Set field {1}={2} for {0}",
|
||||
util.displayable_path(self.paths),
|
||||
"Set field {}={} for {}",
|
||||
field,
|
||||
value,
|
||||
util.displayable_path(self.paths),
|
||||
)
|
||||
self.album.set_parse(field, format(self.album, value))
|
||||
for item in items:
|
||||
|
|
@ -554,12 +553,11 @@ class ImportTask(BaseImportTask):
|
|||
]
|
||||
if overwritten_fields:
|
||||
log.debug(
|
||||
"Reimported {} {}. Not preserving flexible attributes {}. "
|
||||
"Path: {}",
|
||||
"Reimported {0} {1.id}. Not preserving flexible attributes {2}. "
|
||||
"Path: {1.filepath}",
|
||||
noun,
|
||||
new_obj.id,
|
||||
new_obj,
|
||||
overwritten_fields,
|
||||
util.displayable_path(new_obj.path),
|
||||
)
|
||||
for key in overwritten_fields:
|
||||
del existing_fields[key]
|
||||
|
|
@ -578,17 +576,15 @@ class ImportTask(BaseImportTask):
|
|||
self.album.artpath = replaced_album.artpath
|
||||
self.album.store()
|
||||
log.debug(
|
||||
"Reimported album {}. Preserving attribute ['added']. "
|
||||
"Path: {}",
|
||||
self.album.id,
|
||||
util.displayable_path(self.album.path),
|
||||
"Reimported album {0.album.id}. Preserving attribute ['added']. "
|
||||
"Path: {0.album.filepath}",
|
||||
self,
|
||||
)
|
||||
log.debug(
|
||||
"Reimported album {}. Preserving flexible attributes {}. "
|
||||
"Path: {}",
|
||||
self.album.id,
|
||||
"Reimported album {0.album.id}. Preserving flexible"
|
||||
" attributes {1}. Path: {0.album.filepath}",
|
||||
self,
|
||||
list(album_fields.keys()),
|
||||
util.displayable_path(self.album.path),
|
||||
)
|
||||
|
||||
for item in self.imported_items():
|
||||
|
|
@ -597,21 +593,19 @@ class ImportTask(BaseImportTask):
|
|||
if dup_item.added and dup_item.added != item.added:
|
||||
item.added = dup_item.added
|
||||
log.debug(
|
||||
"Reimported item {}. Preserving attribute ['added']. "
|
||||
"Path: {}",
|
||||
item.id,
|
||||
util.displayable_path(item.path),
|
||||
"Reimported item {0.id}. Preserving attribute ['added']. "
|
||||
"Path: {0.filepath}",
|
||||
item,
|
||||
)
|
||||
item_fields = _reduce_and_log(
|
||||
item, dup_item._values_flex, REIMPORT_FRESH_FIELDS_ITEM
|
||||
)
|
||||
item.update(item_fields)
|
||||
log.debug(
|
||||
"Reimported item {}. Preserving flexible attributes {}. "
|
||||
"Path: {}",
|
||||
item.id,
|
||||
"Reimported item {0.id}. Preserving flexible attributes {1}. "
|
||||
"Path: {0.filepath}",
|
||||
item,
|
||||
list(item_fields.keys()),
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
item.store()
|
||||
|
||||
|
|
@ -621,14 +615,10 @@ class ImportTask(BaseImportTask):
|
|||
"""
|
||||
for item in self.imported_items():
|
||||
for dup_item in self.replaced_items[item]:
|
||||
log.debug(
|
||||
"Replacing item {0}: {1}",
|
||||
dup_item.id,
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
log.debug("Replacing item {.id}: {.filepath}", dup_item, item)
|
||||
dup_item.remove()
|
||||
log.debug(
|
||||
"{0} of {1} items replaced",
|
||||
"{} of {} items replaced",
|
||||
sum(bool(v) for v in self.replaced_items.values()),
|
||||
len(self.imported_items()),
|
||||
)
|
||||
|
|
@ -747,10 +737,10 @@ class SingletonImportTask(ImportTask):
|
|||
for field, view in config["import"]["set_fields"].items():
|
||||
value = str(view.get())
|
||||
log.debug(
|
||||
"Set field {1}={2} for {0}",
|
||||
util.displayable_path(self.paths),
|
||||
"Set field {}={} for {}",
|
||||
field,
|
||||
value,
|
||||
util.displayable_path(self.paths),
|
||||
)
|
||||
self.item.set_parse(field, format(self.item, value))
|
||||
self.item.store()
|
||||
|
|
@ -870,7 +860,7 @@ class ArchiveImportTask(SentinelImportTask):
|
|||
"""Removes the temporary directory the archive was extracted to."""
|
||||
if self.extracted and self.toppath:
|
||||
log.debug(
|
||||
"Removing extracted directory: {0}",
|
||||
"Removing extracted directory: {}",
|
||||
util.displayable_path(self.toppath),
|
||||
)
|
||||
shutil.rmtree(util.syspath(self.toppath))
|
||||
|
|
@ -1002,7 +992,7 @@ class ImportTaskFactory:
|
|||
"""Return a `SingletonImportTask` for the music file."""
|
||||
if self.session.already_imported(self.toppath, [path]):
|
||||
log.debug(
|
||||
"Skipping previously-imported path: {0}",
|
||||
"Skipping previously-imported path: {}",
|
||||
util.displayable_path(path),
|
||||
)
|
||||
self.skipped += 1
|
||||
|
|
@ -1026,7 +1016,7 @@ class ImportTaskFactory:
|
|||
|
||||
if self.session.already_imported(self.toppath, dirs):
|
||||
log.debug(
|
||||
"Skipping previously-imported path: {0}",
|
||||
"Skipping previously-imported path: {}",
|
||||
util.displayable_path(dirs),
|
||||
)
|
||||
self.skipped += 1
|
||||
|
|
@ -1063,19 +1053,17 @@ class ImportTaskFactory:
|
|||
)
|
||||
return
|
||||
|
||||
log.debug(
|
||||
"Extracting archive: {0}", util.displayable_path(self.toppath)
|
||||
)
|
||||
log.debug("Extracting archive: {}", util.displayable_path(self.toppath))
|
||||
archive_task = ArchiveImportTask(self.toppath)
|
||||
try:
|
||||
archive_task.extract()
|
||||
except Exception as exc:
|
||||
log.error("extraction failed: {0}", exc)
|
||||
log.error("extraction failed: {}", exc)
|
||||
return
|
||||
|
||||
# Now read albums from the extracted directory.
|
||||
self.toppath = archive_task.toppath
|
||||
log.debug("Archive extracted to: {0}", self.toppath)
|
||||
log.debug("Archive extracted to: {.toppath}", self)
|
||||
return archive_task
|
||||
|
||||
def read_item(self, path: util.PathBytes):
|
||||
|
|
@ -1091,10 +1079,10 @@ class ImportTaskFactory:
|
|||
# Silently ignore non-music files.
|
||||
pass
|
||||
elif isinstance(exc.reason, mediafile.UnreadableFileError):
|
||||
log.warning("unreadable file: {0}", util.displayable_path(path))
|
||||
log.warning("unreadable file: {}", util.displayable_path(path))
|
||||
else:
|
||||
log.error(
|
||||
"error reading {0}: {1}", util.displayable_path(path), exc
|
||||
"error reading {}: {}", util.displayable_path(path), exc
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -28,11 +28,11 @@ class ReadError(FileOperationError):
|
|||
"""An error while reading a file (i.e. in `Item.read`)."""
|
||||
|
||||
def __str__(self):
|
||||
return "error reading " + str(super())
|
||||
return f"error reading {super()}"
|
||||
|
||||
|
||||
class WriteError(FileOperationError):
|
||||
"""An error while writing a file (i.e. in `Item.write`)."""
|
||||
|
||||
def __str__(self):
|
||||
return "error writing " + str(super())
|
||||
return f"error writing {super()}"
|
||||
|
|
|
|||
|
|
@ -425,7 +425,7 @@ class Album(LibModel):
|
|||
|
||||
new_art = util.unique_path(new_art)
|
||||
log.debug(
|
||||
"moving album art {0} to {1}",
|
||||
"moving album art {} to {}",
|
||||
util.displayable_path(old_art),
|
||||
util.displayable_path(new_art),
|
||||
)
|
||||
|
|
@ -482,7 +482,7 @@ class Album(LibModel):
|
|||
"""
|
||||
item = self.items().get()
|
||||
if not item:
|
||||
raise ValueError("empty album for album id %d" % self.id)
|
||||
raise ValueError(f"empty album for album id {self.id}")
|
||||
return os.path.dirname(item.path)
|
||||
|
||||
def _albumtotal(self):
|
||||
|
|
@ -844,12 +844,9 @@ class Item(LibModel):
|
|||
# This must not use `with_album=True`, because that might access
|
||||
# the database. When debugging, that is not guaranteed to succeed, and
|
||||
# can even deadlock due to the database lock.
|
||||
return "{}({})".format(
|
||||
type(self).__name__,
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self[k])
|
||||
for k in self.keys(with_album=False)
|
||||
),
|
||||
return (
|
||||
f"{type(self).__name__}"
|
||||
f"({', '.join(f'{k}={self[k]!r}' for k in self.keys(with_album=False))})"
|
||||
)
|
||||
|
||||
def keys(self, computed=False, with_album=True):
|
||||
|
|
@ -995,7 +992,7 @@ class Item(LibModel):
|
|||
self.write(*args, **kwargs)
|
||||
return True
|
||||
except FileOperationError as exc:
|
||||
log.error("{0}", exc)
|
||||
log.error("{}", exc)
|
||||
return False
|
||||
|
||||
def try_sync(self, write, move, with_album=True):
|
||||
|
|
@ -1015,10 +1012,7 @@ class Item(LibModel):
|
|||
if move:
|
||||
# Check whether this file is inside the library directory.
|
||||
if self._db and self._db.directory in util.ancestry(self.path):
|
||||
log.debug(
|
||||
"moving {0} to synchronize path",
|
||||
util.displayable_path(self.path),
|
||||
)
|
||||
log.debug("moving {.filepath} to synchronize path", self)
|
||||
self.move(with_album=with_album)
|
||||
self.store()
|
||||
|
||||
|
|
@ -1090,7 +1084,7 @@ class Item(LibModel):
|
|||
try:
|
||||
return os.path.getsize(syspath(self.path))
|
||||
except (OSError, Exception) as exc:
|
||||
log.warning("could not get filesize: {0}", exc)
|
||||
log.warning("could not get filesize: {}", exc)
|
||||
return 0
|
||||
|
||||
# Model methods.
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ use {}-style formatting and can interpolate keywords arguments to the logging
|
|||
calls (`debug`, `info`, etc).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
from copy import copy
|
||||
from logging import (
|
||||
|
|
@ -32,8 +34,10 @@ from logging import (
|
|||
Handler,
|
||||
Logger,
|
||||
NullHandler,
|
||||
RootLogger,
|
||||
StreamHandler,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Any, Mapping, TypeVar, Union, overload
|
||||
|
||||
__all__ = [
|
||||
"DEBUG",
|
||||
|
|
@ -49,8 +53,20 @@ __all__ = [
|
|||
"getLogger",
|
||||
]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
T = TypeVar("T")
|
||||
from types import TracebackType
|
||||
|
||||
def logsafe(val):
|
||||
# see https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi
|
||||
_SysExcInfoType = Union[
|
||||
tuple[type[BaseException], BaseException, Union[TracebackType, None]],
|
||||
tuple[None, None, None],
|
||||
]
|
||||
_ExcInfoType = Union[None, bool, _SysExcInfoType, BaseException]
|
||||
_ArgsType = Union[tuple[object, ...], Mapping[str, object]]
|
||||
|
||||
|
||||
def _logsafe(val: T) -> str | T:
|
||||
"""Coerce `bytes` to `str` to avoid crashes solely due to logging.
|
||||
|
||||
This is particularly relevant for bytestring paths. Much of our code
|
||||
|
|
@ -83,40 +99,45 @@ class StrFormatLogger(Logger):
|
|||
"""
|
||||
|
||||
class _LogMessage:
|
||||
def __init__(self, msg, args, kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
msg: str,
|
||||
args: _ArgsType,
|
||||
kwargs: dict[str, Any],
|
||||
):
|
||||
self.msg = msg
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __str__(self):
|
||||
args = [logsafe(a) for a in self.args]
|
||||
kwargs = {k: logsafe(v) for (k, v) in self.kwargs.items()}
|
||||
args = [_logsafe(a) for a in self.args]
|
||||
kwargs = {k: _logsafe(v) for (k, v) in self.kwargs.items()}
|
||||
return self.msg.format(*args, **kwargs)
|
||||
|
||||
def _log(
|
||||
self,
|
||||
level,
|
||||
msg,
|
||||
args,
|
||||
exc_info=None,
|
||||
extra=None,
|
||||
stack_info=False,
|
||||
level: int,
|
||||
msg: object,
|
||||
args: _ArgsType,
|
||||
exc_info: _ExcInfoType = None,
|
||||
extra: Mapping[str, Any] | None = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
**kwargs,
|
||||
):
|
||||
"""Log msg.format(*args, **kwargs)"""
|
||||
m = self._LogMessage(msg, args, kwargs)
|
||||
|
||||
stacklevel = kwargs.pop("stacklevel", 1)
|
||||
stacklevel = {"stacklevel": stacklevel}
|
||||
if isinstance(msg, str):
|
||||
msg = self._LogMessage(msg, args, kwargs)
|
||||
|
||||
return super()._log(
|
||||
level,
|
||||
m,
|
||||
msg,
|
||||
(),
|
||||
exc_info=exc_info,
|
||||
extra=extra,
|
||||
stack_info=stack_info,
|
||||
**stacklevel,
|
||||
stacklevel=stacklevel,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -156,9 +177,12 @@ my_manager = copy(Logger.manager)
|
|||
my_manager.loggerClass = BeetsLogger
|
||||
|
||||
|
||||
# Override the `getLogger` to use our machinery.
|
||||
def getLogger(name=None): # noqa
|
||||
@overload
|
||||
def getLogger(name: str) -> BeetsLogger: ...
|
||||
@overload
|
||||
def getLogger(name: None = ...) -> RootLogger: ...
|
||||
def getLogger(name=None) -> BeetsLogger | RootLogger: # noqa: N802
|
||||
if name:
|
||||
return my_manager.getLogger(name)
|
||||
return my_manager.getLogger(name) # type: ignore[return-value]
|
||||
else:
|
||||
return Logger.root
|
||||
|
|
|
|||
|
|
@ -8,11 +8,12 @@ implemented as plugins.
|
|||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import inspect
|
||||
import re
|
||||
import warnings
|
||||
from functools import cache, cached_property
|
||||
from typing import TYPE_CHECKING, Generic, Literal, Sequence, TypedDict, TypeVar
|
||||
|
||||
import unidecode
|
||||
from confuse import NotFoundError
|
||||
from typing_extensions import NotRequired
|
||||
|
||||
from beets.util import cached_classproperty
|
||||
|
|
@ -23,36 +24,14 @@ from .plugins import BeetsPlugin, find_plugins, notify_info_yielded, send
|
|||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from confuse import ConfigView
|
||||
|
||||
from .autotag import Distance
|
||||
from .autotag.hooks import AlbumInfo, Item, TrackInfo
|
||||
|
||||
|
||||
@cache
|
||||
def find_metadata_source_plugins() -> list[MetadataSourcePlugin]:
|
||||
"""Returns a list of MetadataSourcePlugin subclass instances
|
||||
|
||||
Resolved from all currently loaded beets plugins.
|
||||
"""
|
||||
|
||||
all_plugins = find_plugins()
|
||||
metadata_plugins: list[MetadataSourcePlugin | BeetsPlugin] = []
|
||||
for plugin in all_plugins:
|
||||
if isinstance(plugin, MetadataSourcePlugin):
|
||||
metadata_plugins.append(plugin)
|
||||
elif hasattr(plugin, "data_source"):
|
||||
# TODO: Remove this in the future major release, v3.0.0
|
||||
warnings.warn(
|
||||
f"{plugin.__class__.__name__} is used as a legacy metadata source. "
|
||||
"It should extend MetadataSourcePlugin instead of BeetsPlugin. "
|
||||
"Support for this will be removed in the v3.0.0 release!",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
metadata_plugins.append(plugin)
|
||||
|
||||
# typeignore: BeetsPlugin is not a MetadataSourcePlugin (legacy support)
|
||||
return metadata_plugins # type: ignore[return-value]
|
||||
"""Return a list of all loaded metadata source plugins."""
|
||||
# TODO: Make this an isinstance(MetadataSourcePlugin, ...) check in v3.0.0
|
||||
return [p for p in find_plugins() if hasattr(p, "data_source")] # type: ignore[misc]
|
||||
|
||||
|
||||
@notify_info_yielded("albuminfo_received")
|
||||
|
|
@ -95,46 +74,17 @@ def track_for_id(_id: str) -> TrackInfo | None:
|
|||
return None
|
||||
|
||||
|
||||
def track_distance(item: Item, info: TrackInfo) -> Distance:
|
||||
"""Returns the track distance for an item and trackinfo.
|
||||
|
||||
Returns a Distance object is populated by all metadata source plugins
|
||||
that implement the :py:meth:`MetadataSourcePlugin.track_distance` method.
|
||||
"""
|
||||
from beets.autotag.distance import Distance
|
||||
|
||||
dist = Distance()
|
||||
for plugin in find_metadata_source_plugins():
|
||||
dist.update(plugin.track_distance(item, info))
|
||||
return dist
|
||||
|
||||
|
||||
def album_distance(
|
||||
items: Sequence[Item],
|
||||
album_info: AlbumInfo,
|
||||
mapping: dict[Item, TrackInfo],
|
||||
) -> Distance:
|
||||
"""Returns the album distance calculated by plugins."""
|
||||
from beets.autotag.distance import Distance
|
||||
|
||||
dist = Distance()
|
||||
for plugin in find_metadata_source_plugins():
|
||||
dist.update(plugin.album_distance(items, album_info, mapping))
|
||||
return dist
|
||||
|
||||
|
||||
def _get_distance(
|
||||
config: ConfigView, data_source: str, info: AlbumInfo | TrackInfo
|
||||
) -> Distance:
|
||||
"""Returns the ``data_source`` weight and the maximum source weight
|
||||
for albums or individual tracks.
|
||||
"""
|
||||
from beets.autotag.distance import Distance
|
||||
|
||||
dist = Distance()
|
||||
if info.data_source == data_source:
|
||||
dist.add("source", config["source_weight"].as_number())
|
||||
return dist
|
||||
@cache
|
||||
def get_penalty(data_source: str | None) -> float:
|
||||
"""Get the penalty value for the given data source."""
|
||||
return next(
|
||||
(
|
||||
p.data_source_mismatch_penalty
|
||||
for p in find_metadata_source_plugins()
|
||||
if p.data_source == data_source
|
||||
),
|
||||
MetadataSourcePlugin.DEFAULT_DATA_SOURCE_MISMATCH_PENALTY,
|
||||
)
|
||||
|
||||
|
||||
class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta):
|
||||
|
|
@ -145,9 +95,31 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta):
|
|||
and tracks, and to retrieve album and track information by ID.
|
||||
"""
|
||||
|
||||
DEFAULT_DATA_SOURCE_MISMATCH_PENALTY = 0.5
|
||||
|
||||
@cached_classproperty
|
||||
def data_source(cls) -> str:
|
||||
"""The data source name for this plugin.
|
||||
|
||||
This is inferred from the plugin name.
|
||||
"""
|
||||
return cls.__name__.replace("Plugin", "") # type: ignore[attr-defined]
|
||||
|
||||
@cached_property
|
||||
def data_source_mismatch_penalty(self) -> float:
|
||||
try:
|
||||
return self.config["source_weight"].as_number()
|
||||
except NotFoundError:
|
||||
return self.config["data_source_mismatch_penalty"].as_number()
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.config.add({"source_weight": 0.5})
|
||||
self.config.add(
|
||||
{
|
||||
"search_limit": 5,
|
||||
"data_source_mismatch_penalty": self.DEFAULT_DATA_SOURCE_MISMATCH_PENALTY, # noqa: E501
|
||||
}
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def album_for_id(self, album_id: str) -> AlbumInfo | None:
|
||||
|
|
@ -219,35 +191,6 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta):
|
|||
|
||||
return (self.track_for_id(id) for id in ids)
|
||||
|
||||
def album_distance(
|
||||
self,
|
||||
items: Sequence[Item],
|
||||
album_info: AlbumInfo,
|
||||
mapping: dict[Item, TrackInfo],
|
||||
) -> Distance:
|
||||
"""Calculate the distance for an album based on its items and album info."""
|
||||
return _get_distance(
|
||||
data_source=self.data_source, info=album_info, config=self.config
|
||||
)
|
||||
|
||||
def track_distance(
|
||||
self,
|
||||
item: Item,
|
||||
info: TrackInfo,
|
||||
) -> Distance:
|
||||
"""Calculate the distance for a track based on its item and track info."""
|
||||
return _get_distance(
|
||||
data_source=self.data_source, info=info, config=self.config
|
||||
)
|
||||
|
||||
@cached_classproperty
|
||||
def data_source(cls) -> str:
|
||||
"""The data source name for this plugin.
|
||||
|
||||
This is inferred from the plugin name.
|
||||
"""
|
||||
return cls.__name__.replace("Plugin", "") # type: ignore[attr-defined]
|
||||
|
||||
def _extract_id(self, url: str) -> str | None:
|
||||
"""Extract an ID from a URL for this metadata source plugin.
|
||||
|
||||
|
|
@ -266,10 +209,9 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta):
|
|||
"""Returns an artist string (all artists) and an artist_id (the main
|
||||
artist) for a list of artist object dicts.
|
||||
|
||||
For each artist, this function moves articles (such as 'a', 'an',
|
||||
and 'the') to the front and strips trailing disambiguation numbers. It
|
||||
returns a tuple containing the comma-separated string of all
|
||||
normalized artists and the ``id`` of the main/first artist.
|
||||
For each artist, this function moves articles (such as 'a', 'an', and 'the')
|
||||
to the front. It returns a tuple containing the comma-separated string
|
||||
of all normalized artists and the ``id`` of the main/first artist.
|
||||
Alternatively a keyword can be used to combine artists together into a
|
||||
single string by passing the join_key argument.
|
||||
|
||||
|
|
@ -293,8 +235,6 @@ class MetadataSourcePlugin(BeetsPlugin, metaclass=abc.ABCMeta):
|
|||
if not artist_id:
|
||||
artist_id = artist[id_key]
|
||||
name = artist[name_key]
|
||||
# Strip disambiguation number.
|
||||
name = re.sub(r" \(\d+\)$", "", name)
|
||||
# Move articles to the front.
|
||||
name = re.sub(r"^(.*?), (a|an|the)$", r"\2 \1", name, flags=re.I)
|
||||
# Use a join keyword if requested and available.
|
||||
|
|
@ -334,18 +274,26 @@ class SearchApiMetadataSourcePlugin(
|
|||
of identifiers for the requested type (album or track).
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.config.add(
|
||||
{
|
||||
"search_query_ascii": False,
|
||||
}
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _search_api(
|
||||
self,
|
||||
query_type: Literal["album", "track"],
|
||||
filters: SearchFilter,
|
||||
keywords: str = "",
|
||||
query_string: str = "",
|
||||
) -> Sequence[R]:
|
||||
"""Perform a search on the API.
|
||||
|
||||
:param query_type: The type of query to perform.
|
||||
:param filters: A dictionary of filters to apply to the search.
|
||||
:param keywords: Additional keywords to include in the search.
|
||||
:param query_string: Additional query to include in the search.
|
||||
|
||||
Should return a list of identifiers for the requested type (album or track).
|
||||
"""
|
||||
|
|
@ -358,7 +306,9 @@ class SearchApiMetadataSourcePlugin(
|
|||
album: str,
|
||||
va_likely: bool,
|
||||
) -> Iterable[AlbumInfo]:
|
||||
query_filters: SearchFilter = {"album": album}
|
||||
query_filters: SearchFilter = {}
|
||||
if album:
|
||||
query_filters["album"] = album
|
||||
if not va_likely:
|
||||
query_filters["artist"] = artist
|
||||
|
||||
|
|
@ -373,7 +323,9 @@ class SearchApiMetadataSourcePlugin(
|
|||
def item_candidates(
|
||||
self, item: Item, artist: str, title: str
|
||||
) -> Iterable[TrackInfo]:
|
||||
results = self._search_api("track", {"artist": artist}, keywords=title)
|
||||
results = self._search_api(
|
||||
"track", {"artist": artist}, query_string=title
|
||||
)
|
||||
if not results:
|
||||
return []
|
||||
|
||||
|
|
@ -382,12 +334,26 @@ class SearchApiMetadataSourcePlugin(
|
|||
self.tracks_for_ids([result["id"] for result in results if result]),
|
||||
)
|
||||
|
||||
def _construct_search_query(
|
||||
self, filters: SearchFilter, query_string: str
|
||||
) -> str:
|
||||
"""Construct a query string with the specified filters and keywords to
|
||||
be provided to the spotify (or similar) search API.
|
||||
|
||||
# Dynamically copy methods to BeetsPlugin for legacy support
|
||||
# TODO: Remove this in the future major release, v3.0.0
|
||||
The returned format was initially designed for spotify's search API but
|
||||
we found is also useful with other APIs that support similar query structures.
|
||||
see `spotify <https://developer.spotify.com/documentation/web-api/reference/search>`_
|
||||
and `deezer <https://developers.deezer.com/api/search>`_.
|
||||
|
||||
for name, method in inspect.getmembers(
|
||||
MetadataSourcePlugin, predicate=inspect.isfunction
|
||||
):
|
||||
if not hasattr(BeetsPlugin, name):
|
||||
setattr(BeetsPlugin, name, method)
|
||||
:param filters: Field filters to apply.
|
||||
:param query_string: Query keywords to use.
|
||||
:return: Query string to be provided to the search API.
|
||||
"""
|
||||
|
||||
components = [query_string, *(f"{k}:'{v}'" for k, v in filters.items())]
|
||||
query = " ".join(filter(None, components))
|
||||
|
||||
if self.config["search_query_ascii"].get():
|
||||
query = unidecode.unidecode(query)
|
||||
|
||||
return query
|
||||
|
|
|
|||
126
beets/plugins.py
126
beets/plugins.py
|
|
@ -20,8 +20,10 @@ import abc
|
|||
import inspect
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from functools import wraps
|
||||
from functools import cached_property, wraps
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
from types import GenericAlias
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeVar
|
||||
|
|
@ -130,9 +132,9 @@ class PluginLogFilter(logging.Filter):
|
|||
def filter(self, record):
|
||||
if hasattr(record.msg, "msg") and isinstance(record.msg.msg, str):
|
||||
# A _LogMessage from our hacked-up Logging replacement.
|
||||
record.msg.msg = self.prefix + record.msg.msg
|
||||
record.msg.msg = f"{self.prefix}{record.msg.msg}"
|
||||
elif isinstance(record.msg, str):
|
||||
record.msg = self.prefix + record.msg
|
||||
record.msg = f"{self.prefix}{record.msg}"
|
||||
return True
|
||||
|
||||
|
||||
|
|
@ -158,6 +160,59 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
early_import_stages: list[ImportStageFunc]
|
||||
import_stages: list[ImportStageFunc]
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
"""Enable legacy metadata‐source plugins to work with the new interface.
|
||||
|
||||
When a plugin subclass of BeetsPlugin defines a `data_source` attribute
|
||||
but does not inherit from MetadataSourcePlugin, this hook:
|
||||
|
||||
1. Skips abstract classes.
|
||||
2. Warns that the class should extend MetadataSourcePlugin (deprecation).
|
||||
3. Copies any nonabstract methods from MetadataSourcePlugin onto the
|
||||
subclass to provide the full plugin API.
|
||||
|
||||
This compatibility layer will be removed in the v3.0.0 release.
|
||||
"""
|
||||
# TODO: Remove in v3.0.0
|
||||
if inspect.isabstract(cls):
|
||||
return
|
||||
|
||||
from beets.metadata_plugins import MetadataSourcePlugin
|
||||
|
||||
if issubclass(cls, MetadataSourcePlugin) or not hasattr(
|
||||
cls, "data_source"
|
||||
):
|
||||
return
|
||||
|
||||
warnings.warn(
|
||||
f"{cls.__name__} is used as a legacy metadata source. "
|
||||
"It should extend MetadataSourcePlugin instead of BeetsPlugin. "
|
||||
"Support for this will be removed in the v3.0.0 release!",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
method: property | cached_property[Any] | Callable[..., Any]
|
||||
for name, method in inspect.getmembers(
|
||||
MetadataSourcePlugin,
|
||||
predicate=lambda f: ( # type: ignore[arg-type]
|
||||
(
|
||||
isinstance(f, (property, cached_property))
|
||||
and not hasattr(
|
||||
BeetsPlugin,
|
||||
getattr(f, "attrname", None) or f.fget.__name__, # type: ignore[union-attr]
|
||||
)
|
||||
)
|
||||
or (
|
||||
inspect.isfunction(f)
|
||||
and f.__name__
|
||||
and not getattr(f, "__isabstractmethod__", False)
|
||||
and not hasattr(BeetsPlugin, f.__name__)
|
||||
)
|
||||
),
|
||||
):
|
||||
setattr(cls, name, method)
|
||||
|
||||
def __init__(self, name: str | None = None):
|
||||
"""Perform one-time plugin setup."""
|
||||
|
||||
|
|
@ -181,6 +236,37 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
if not any(isinstance(f, PluginLogFilter) for f in self._log.filters):
|
||||
self._log.addFilter(PluginLogFilter(self))
|
||||
|
||||
# In order to verify the config we need to make sure the plugin is fully
|
||||
# configured (plugins usually add the default configuration *after*
|
||||
# calling super().__init__()).
|
||||
self.register_listener("pluginload", self._verify_config)
|
||||
|
||||
def _verify_config(self, *_, **__) -> None:
|
||||
"""Verify plugin configuration.
|
||||
|
||||
If deprecated 'source_weight' option is explicitly set by the user, they
|
||||
will see a warning in the logs. Otherwise, this must be configured by
|
||||
a third party plugin, thus we raise a deprecation warning which won't be
|
||||
shown to user but will be visible to plugin developers.
|
||||
"""
|
||||
# TODO: Remove in v3.0.0
|
||||
if (
|
||||
not hasattr(self, "data_source")
|
||||
or "source_weight" not in self.config
|
||||
):
|
||||
return
|
||||
|
||||
message = (
|
||||
"'source_weight' configuration option is deprecated and will be"
|
||||
" removed in v3.0.0. Use 'data_source_mismatch_penalty' instead"
|
||||
)
|
||||
for source in self.config.root().sources:
|
||||
if "source_weight" in (source.get(self.name) or {}):
|
||||
if source.filename: # user config
|
||||
self._log.warning(message)
|
||||
else: # 3rd-party plugin config
|
||||
warnings.warn(message, DeprecationWarning, stacklevel=0)
|
||||
|
||||
def commands(self) -> Sequence[Subcommand]:
|
||||
"""Should return a list of beets.ui.Subcommand objects for
|
||||
commands that should be added to beets' CLI.
|
||||
|
|
@ -347,14 +433,20 @@ def _get_plugin(name: str) -> BeetsPlugin | None:
|
|||
Attempts to import the plugin module, locate the appropriate plugin class
|
||||
within it, and return an instance. Handles import failures gracefully and
|
||||
logs warnings for missing plugins or loading errors.
|
||||
|
||||
Note we load the *last* plugin class found in the plugin namespace. This
|
||||
allows plugins to define helper classes that inherit from BeetsPlugin
|
||||
without those being loaded as the main plugin class.
|
||||
|
||||
Returns None if the plugin could not be loaded for any reason.
|
||||
"""
|
||||
try:
|
||||
try:
|
||||
namespace = __import__(f"{PLUGIN_NAMESPACE}.{name}", None, None)
|
||||
namespace = import_module(f"{PLUGIN_NAMESPACE}.{name}")
|
||||
except Exception as exc:
|
||||
raise PluginImportError(name) from exc
|
||||
|
||||
for obj in getattr(namespace, name).__dict__.values():
|
||||
for obj in reversed(namespace.__dict__.values()):
|
||||
if (
|
||||
inspect.isclass(obj)
|
||||
and not isinstance(
|
||||
|
|
@ -363,6 +455,12 @@ def _get_plugin(name: str) -> BeetsPlugin | None:
|
|||
and issubclass(obj, BeetsPlugin)
|
||||
and obj != BeetsPlugin
|
||||
and not inspect.isabstract(obj)
|
||||
# Only consider this plugin's module or submodules to avoid
|
||||
# conflicts when plugins import other BeetsPlugin classes
|
||||
and (
|
||||
obj.__module__ == namespace.__name__
|
||||
or obj.__module__.startswith(f"{namespace.__name__}.")
|
||||
)
|
||||
):
|
||||
return obj()
|
||||
|
||||
|
|
@ -384,7 +482,7 @@ def load_plugins() -> None:
|
|||
"""
|
||||
if not _instances:
|
||||
names = get_plugin_names()
|
||||
log.info("Loading plugins: {}", ", ".join(sorted(names)))
|
||||
log.debug("Loading plugins: {}", ", ".join(sorted(names)))
|
||||
_instances.extend(filter(None, map(_get_plugin, names)))
|
||||
|
||||
send("pluginload")
|
||||
|
|
@ -424,9 +522,9 @@ def types(model_cls: type[AnyModel]) -> dict[str, Type]:
|
|||
for field in plugin_types:
|
||||
if field in types and plugin_types[field] != types[field]:
|
||||
raise PluginConflictError(
|
||||
"Plugin {} defines flexible field {} "
|
||||
f"Plugin {plugin.name} defines flexible field {field} "
|
||||
"which has already been defined with "
|
||||
"another type.".format(plugin.name, field)
|
||||
"another type."
|
||||
)
|
||||
types.update(plugin_types)
|
||||
return types
|
||||
|
|
@ -543,7 +641,7 @@ def send(event: EventType, **arguments: Any) -> list[Any]:
|
|||
|
||||
Return a list of non-None values returned from the handlers.
|
||||
"""
|
||||
log.debug("Sending event: {0}", event)
|
||||
log.debug("Sending event: {}", event)
|
||||
return [
|
||||
r
|
||||
for handler in BeetsPlugin.listeners[event]
|
||||
|
|
@ -551,17 +649,21 @@ def send(event: EventType, **arguments: Any) -> list[Any]:
|
|||
]
|
||||
|
||||
|
||||
def feat_tokens(for_artist: bool = True) -> str:
|
||||
def feat_tokens(
|
||||
for_artist: bool = True, custom_words: list[str] | None = None
|
||||
) -> str:
|
||||
"""Return a regular expression that matches phrases like "featuring"
|
||||
that separate a main artist or a song title from secondary artists.
|
||||
The `for_artist` option determines whether the regex should be
|
||||
suitable for matching artist fields (the default) or title fields.
|
||||
"""
|
||||
feat_words = ["ft", "featuring", "feat", "feat.", "ft."]
|
||||
if isinstance(custom_words, list):
|
||||
feat_words += custom_words
|
||||
if for_artist:
|
||||
feat_words += ["with", "vs", "and", "con", "&"]
|
||||
return r"(?<=[\s(\[])(?:{})(?=\s)".format(
|
||||
"|".join(re.escape(x) for x in feat_words)
|
||||
return (
|
||||
rf"(?<=[\s(\[])(?:{'|'.join(re.escape(x) for x in feat_words)})(?=\s)"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ class DummyIn:
|
|||
self.out = out
|
||||
|
||||
def add(self, s):
|
||||
self.buf.append(s + "\n")
|
||||
self.buf.append(f"{s}\n")
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -58,7 +58,6 @@ from beets.ui.commands import TerminalImportSession
|
|||
from beets.util import (
|
||||
MoveOperation,
|
||||
bytestring_path,
|
||||
cached_classproperty,
|
||||
clean_module_tempdir,
|
||||
syspath,
|
||||
)
|
||||
|
|
@ -267,7 +266,7 @@ class TestHelper(ConfigMixin):
|
|||
The item is attached to the database from `self.lib`.
|
||||
"""
|
||||
values_ = {
|
||||
"title": "t\u00eftle {0}",
|
||||
"title": "t\u00eftle {}",
|
||||
"artist": "the \u00e4rtist",
|
||||
"album": "the \u00e4lbum",
|
||||
"track": 1,
|
||||
|
|
@ -278,7 +277,7 @@ class TestHelper(ConfigMixin):
|
|||
values_["db"] = self.lib
|
||||
item = Item(**values_)
|
||||
if "path" not in values:
|
||||
item["path"] = "audio." + item["format"].lower()
|
||||
item["path"] = f"audio.{item['format'].lower()}"
|
||||
# mtime needs to be set last since other assignments reset it.
|
||||
item.mtime = 12345
|
||||
return item
|
||||
|
|
@ -310,7 +309,7 @@ class TestHelper(ConfigMixin):
|
|||
item = self.create_item(**values)
|
||||
extension = item["format"].lower()
|
||||
item["path"] = os.path.join(
|
||||
_common.RSRC, util.bytestring_path("min." + extension)
|
||||
_common.RSRC, util.bytestring_path(f"min.{extension}")
|
||||
)
|
||||
item.add(self.lib)
|
||||
item.move(operation=MoveOperation.COPY)
|
||||
|
|
@ -325,7 +324,7 @@ class TestHelper(ConfigMixin):
|
|||
"""Add a number of items with files to the database."""
|
||||
# TODO base this on `add_item()`
|
||||
items = []
|
||||
path = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
|
||||
path = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
|
||||
for i in range(count):
|
||||
item = Item.from_path(path)
|
||||
item.album = f"\u00e4lbum {i}" # Check unicode paths
|
||||
|
|
@ -372,7 +371,7 @@ class TestHelper(ConfigMixin):
|
|||
specified extension a cover art image is added to the media
|
||||
file.
|
||||
"""
|
||||
src = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
|
||||
src = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
|
||||
handle, path = mkstemp(dir=self.temp_dir)
|
||||
path = bytestring_path(path)
|
||||
os.close(handle)
|
||||
|
|
@ -495,7 +494,6 @@ class PluginMixin(ConfigMixin):
|
|||
# FIXME this should eventually be handled by a plugin manager
|
||||
plugins = (self.plugin,) if hasattr(self, "plugin") else plugins
|
||||
self.config["plugins"] = plugins
|
||||
cached_classproperty.cache.clear()
|
||||
beets.plugins.load_plugins()
|
||||
|
||||
def unload_plugins(self) -> None:
|
||||
|
|
@ -570,7 +568,7 @@ class ImportHelper(TestHelper):
|
|||
medium = MediaFile(track_path)
|
||||
medium.update(
|
||||
{
|
||||
"album": "Tag Album" + (f" {album_id}" if album_id else ""),
|
||||
"album": f"Tag Album{f' {album_id}' if album_id else ''}",
|
||||
"albumartist": None,
|
||||
"mb_albumid": None,
|
||||
"comp": None,
|
||||
|
|
@ -831,23 +829,21 @@ class AutotagStub:
|
|||
|
||||
def _make_track_match(self, artist, album, number):
|
||||
return TrackInfo(
|
||||
title="Applied Track %d" % number,
|
||||
track_id="match %d" % number,
|
||||
title=f"Applied Track {number}",
|
||||
track_id=f"match {number}",
|
||||
artist=artist,
|
||||
length=1,
|
||||
index=0,
|
||||
)
|
||||
|
||||
def _make_album_match(self, artist, album, tracks, distance=0, missing=0):
|
||||
if distance:
|
||||
id = " " + "M" * distance
|
||||
else:
|
||||
id = ""
|
||||
id = f" {'M' * distance}" if distance else ""
|
||||
|
||||
if artist is None:
|
||||
artist = "Various Artists"
|
||||
else:
|
||||
artist = artist.replace("Tag", "Applied") + id
|
||||
album = album.replace("Tag", "Applied") + id
|
||||
artist = f"{artist.replace('Tag', 'Applied')}{id}"
|
||||
album = f"{album.replace('Tag', 'Applied')}{id}"
|
||||
|
||||
track_infos = []
|
||||
for i in range(tracks - missing):
|
||||
|
|
@ -858,8 +854,8 @@ class AutotagStub:
|
|||
album=album,
|
||||
tracks=track_infos,
|
||||
va=False,
|
||||
album_id="albumid" + id,
|
||||
artist_id="artistid" + id,
|
||||
album_id=f"albumid{id}",
|
||||
artist_id=f"artistid{id}",
|
||||
albumtype="soundtrack",
|
||||
data_source="match_source",
|
||||
bandcamp_album_id="bc_url",
|
||||
|
|
@ -885,7 +881,7 @@ class FetchImageHelper:
|
|||
super().run(*args, **kwargs)
|
||||
|
||||
IMAGEHEADER: dict[str, bytes] = {
|
||||
"image/jpeg": b"\xff\xd8\xff" + b"\x00" * 3 + b"JFIF",
|
||||
"image/jpeg": b"\xff\xd8\xff\x00\x00\x00JFIF",
|
||||
"image/png": b"\211PNG\r\n\032\n",
|
||||
"image/gif": b"GIF89a",
|
||||
# dummy type that is definitely not a valid image content type
|
||||
|
|
|
|||
|
|
@ -23,14 +23,16 @@ import errno
|
|||
import optparse
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sqlite3
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
import traceback
|
||||
import warnings
|
||||
from difflib import SequenceMatcher
|
||||
from typing import Any, Callable
|
||||
from functools import cache
|
||||
from itertools import chain
|
||||
from typing import Any, Callable, Literal
|
||||
|
||||
import confuse
|
||||
|
||||
|
|
@ -125,7 +127,7 @@ def print_(*strings: str, end: str = "\n") -> None:
|
|||
The `end` keyword argument behaves similarly to the built-in `print`
|
||||
(it defaults to a newline).
|
||||
"""
|
||||
txt = " ".join(strings or ("",)) + end
|
||||
txt = f"{' '.join(strings or ('',))}{end}"
|
||||
|
||||
# Encode the string and write it to stdout.
|
||||
# On Python 3, sys.stdout expects text strings and uses the
|
||||
|
|
@ -269,7 +271,7 @@ def input_options(
|
|||
)
|
||||
):
|
||||
# The first option is the default; mark it.
|
||||
show_letter = "[%s]" % found_letter.upper()
|
||||
show_letter = f"[{found_letter.upper()}]"
|
||||
is_default = True
|
||||
else:
|
||||
show_letter = found_letter.upper()
|
||||
|
|
@ -308,9 +310,9 @@ def input_options(
|
|||
if isinstance(default, int):
|
||||
default_name = str(default)
|
||||
default_name = colorize("action_default", default_name)
|
||||
tmpl = "# selection (default %s)"
|
||||
prompt_parts.append(tmpl % default_name)
|
||||
prompt_part_lengths.append(len(tmpl % str(default)))
|
||||
tmpl = "# selection (default {})"
|
||||
prompt_parts.append(tmpl.format(default_name))
|
||||
prompt_part_lengths.append(len(tmpl) - 2 + len(str(default)))
|
||||
else:
|
||||
prompt_parts.append("# selection")
|
||||
prompt_part_lengths.append(len(prompt_parts[-1]))
|
||||
|
|
@ -338,7 +340,7 @@ def input_options(
|
|||
|
||||
if line_length != 0:
|
||||
# Not the beginning of the line; need a space.
|
||||
part = " " + part
|
||||
part = f" {part}"
|
||||
length += 1
|
||||
|
||||
prompt += part
|
||||
|
|
@ -349,8 +351,8 @@ def input_options(
|
|||
if not fallback_prompt:
|
||||
fallback_prompt = "Enter one of "
|
||||
if numrange:
|
||||
fallback_prompt += "%i-%i, " % numrange
|
||||
fallback_prompt += ", ".join(display_letters) + ":"
|
||||
fallback_prompt += "{}-{}, ".format(*numrange)
|
||||
fallback_prompt += f"{', '.join(display_letters)}:"
|
||||
|
||||
resp = input_(prompt)
|
||||
while True:
|
||||
|
|
@ -406,7 +408,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None):
|
|||
objects individually.
|
||||
"""
|
||||
choice = input_options(
|
||||
("y", "n", "s"), False, "%s? (Yes/no/select)" % (prompt_all or prompt)
|
||||
("y", "n", "s"), False, f"{prompt_all or prompt}? (Yes/no/select)"
|
||||
)
|
||||
print() # Blank line.
|
||||
|
||||
|
|
@ -420,7 +422,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None):
|
|||
answer = input_options(
|
||||
("y", "n", "q"),
|
||||
True,
|
||||
"%s? (yes/no/quit)" % prompt,
|
||||
f"{prompt}? (yes/no/quit)",
|
||||
"Enter Y or N:",
|
||||
)
|
||||
if answer == "y":
|
||||
|
|
@ -438,7 +440,7 @@ def input_select_objects(prompt, objs, rep, prompt_all=None):
|
|||
# ANSI terminal colorization code heavily inspired by pygments:
|
||||
# https://bitbucket.org/birkenfeld/pygments-main/src/default/pygments/console.py
|
||||
# (pygments is by Tim Hatch, Armin Ronacher, et al.)
|
||||
COLOR_ESCAPE = "\x1b["
|
||||
COLOR_ESCAPE = "\x1b"
|
||||
LEGACY_COLORS = {
|
||||
"black": ["black"],
|
||||
"darkred": ["red"],
|
||||
|
|
@ -463,7 +465,7 @@ LEGACY_COLORS = {
|
|||
"white": ["bold", "white"],
|
||||
}
|
||||
# All ANSI Colors.
|
||||
ANSI_CODES = {
|
||||
CODE_BY_COLOR = {
|
||||
# Styles.
|
||||
"normal": 0,
|
||||
"bold": 1,
|
||||
|
|
@ -494,11 +496,17 @@ ANSI_CODES = {
|
|||
"bg_cyan": 46,
|
||||
"bg_white": 47,
|
||||
}
|
||||
RESET_COLOR = COLOR_ESCAPE + "39;49;00m"
|
||||
|
||||
# These abstract COLOR_NAMES are lazily mapped on to the actual color in COLORS
|
||||
# as they are defined in the configuration files, see function: colorize
|
||||
COLOR_NAMES = [
|
||||
RESET_COLOR = f"{COLOR_ESCAPE}[39;49;00m"
|
||||
# Precompile common ANSI-escape regex patterns
|
||||
ANSI_CODE_REGEX = re.compile(rf"({COLOR_ESCAPE}\[[;0-9]*m)")
|
||||
ESC_TEXT_REGEX = re.compile(
|
||||
rf"""(?P<pretext>[^{COLOR_ESCAPE}]*)
|
||||
(?P<esc>(?:{ANSI_CODE_REGEX.pattern})+)
|
||||
(?P<text>[^{COLOR_ESCAPE}]+)(?P<reset>{re.escape(RESET_COLOR)})
|
||||
(?P<posttext>[^{COLOR_ESCAPE}]*)""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
ColorName = Literal[
|
||||
"text_success",
|
||||
"text_warning",
|
||||
"text_error",
|
||||
|
|
@ -507,76 +515,54 @@ COLOR_NAMES = [
|
|||
"action_default",
|
||||
"action",
|
||||
# New Colors
|
||||
"text",
|
||||
"text_faint",
|
||||
"import_path",
|
||||
"import_path_items",
|
||||
"action_description",
|
||||
"added",
|
||||
"removed",
|
||||
"changed",
|
||||
"added_highlight",
|
||||
"removed_highlight",
|
||||
"changed_highlight",
|
||||
"text_diff_added",
|
||||
"text_diff_removed",
|
||||
"text_diff_changed",
|
||||
]
|
||||
COLORS: dict[str, list[str]] | None = None
|
||||
|
||||
|
||||
def _colorize(color, text):
|
||||
"""Returns a string that prints the given text in the given color
|
||||
in a terminal that is ANSI color-aware. The color must be a list of strings
|
||||
from ANSI_CODES.
|
||||
@cache
|
||||
def get_color_config() -> dict[ColorName, str]:
|
||||
"""Parse and validate color configuration, converting names to ANSI codes.
|
||||
|
||||
Processes the UI color configuration, handling both new list format and
|
||||
legacy single-color format. Validates all color names against known codes
|
||||
and raises an error for any invalid entries.
|
||||
"""
|
||||
# Construct escape sequence to be put before the text by iterating
|
||||
# over all "ANSI codes" in `color`.
|
||||
escape = ""
|
||||
for code in color:
|
||||
escape = escape + COLOR_ESCAPE + "%im" % ANSI_CODES[code]
|
||||
return escape + text + RESET_COLOR
|
||||
colors_by_color_name: dict[ColorName, list[str]] = {
|
||||
k: (v if isinstance(v, list) else LEGACY_COLORS.get(v, [v]))
|
||||
for k, v in config["ui"]["colors"].flatten().items()
|
||||
}
|
||||
|
||||
if invalid_colors := (
|
||||
set(chain.from_iterable(colors_by_color_name.values()))
|
||||
- CODE_BY_COLOR.keys()
|
||||
):
|
||||
raise UserError(
|
||||
f"Invalid color(s) in configuration: {', '.join(invalid_colors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
n: ";".join(str(CODE_BY_COLOR[c]) for c in colors)
|
||||
for n, colors in colors_by_color_name.items()
|
||||
}
|
||||
|
||||
|
||||
def colorize(color_name, text):
|
||||
"""Colorize text if colored output is enabled. (Like _colorize but
|
||||
conditional.)
|
||||
def colorize(color_name: ColorName, text: str) -> str:
|
||||
"""Apply ANSI color formatting to text based on configuration settings.
|
||||
|
||||
Returns colored text when color output is enabled and NO_COLOR environment
|
||||
variable is not set, otherwise returns plain text unchanged.
|
||||
"""
|
||||
if config["ui"]["color"] and "NO_COLOR" not in os.environ:
|
||||
global COLORS
|
||||
if not COLORS:
|
||||
# Read all color configurations and set global variable COLORS.
|
||||
COLORS = dict()
|
||||
for name in COLOR_NAMES:
|
||||
# Convert legacy color definitions (strings) into the new
|
||||
# list-based color definitions. Do this by trying to read the
|
||||
# color definition from the configuration as unicode - if this
|
||||
# is successful, the color definition is a legacy definition
|
||||
# and has to be converted.
|
||||
try:
|
||||
color_def = config["ui"]["colors"][name].get(str)
|
||||
except (confuse.ConfigTypeError, NameError):
|
||||
# Normal color definition (type: list of unicode).
|
||||
color_def = config["ui"]["colors"][name].get(list)
|
||||
else:
|
||||
# Legacy color definition (type: unicode). Convert.
|
||||
if color_def in LEGACY_COLORS:
|
||||
color_def = LEGACY_COLORS[color_def]
|
||||
else:
|
||||
raise UserError("no such color %s", color_def)
|
||||
for code in color_def:
|
||||
if code not in ANSI_CODES.keys():
|
||||
raise ValueError("no such ANSI code %s", code)
|
||||
COLORS[name] = color_def
|
||||
# In case a 3rd party plugin is still passing the actual color ('red')
|
||||
# instead of the abstract color name ('text_error')
|
||||
color = COLORS.get(color_name)
|
||||
if not color:
|
||||
log.debug("Invalid color_name: {0}", color_name)
|
||||
color = color_name
|
||||
return _colorize(color, text)
|
||||
else:
|
||||
return text
|
||||
color_code = get_color_config()[color_name]
|
||||
return f"{COLOR_ESCAPE}[{color_code}m{text}{RESET_COLOR}"
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def uncolorize(colored_text):
|
||||
|
|
@ -589,26 +575,22 @@ def uncolorize(colored_text):
|
|||
# [;\d]* - matches a sequence consisting of one or more digits or
|
||||
# semicola
|
||||
# [A-Za-z] - matches a letter
|
||||
ansi_code_regex = re.compile(r"\x1b\[[;\d]*[A-Za-z]", re.VERBOSE)
|
||||
# Strip ANSI codes from `colored_text` using the regular expression.
|
||||
text = ansi_code_regex.sub("", colored_text)
|
||||
return text
|
||||
return ANSI_CODE_REGEX.sub("", colored_text)
|
||||
|
||||
|
||||
def color_split(colored_text, index):
|
||||
ansi_code_regex = re.compile(r"(\x1b\[[;\d]*[A-Za-z])", re.VERBOSE)
|
||||
length = 0
|
||||
pre_split = ""
|
||||
post_split = ""
|
||||
found_color_code = None
|
||||
found_split = False
|
||||
for part in ansi_code_regex.split(colored_text):
|
||||
for part in ANSI_CODE_REGEX.split(colored_text):
|
||||
# Count how many real letters we have passed
|
||||
length += color_len(part)
|
||||
if found_split:
|
||||
post_split += part
|
||||
else:
|
||||
if ansi_code_regex.match(part):
|
||||
if ANSI_CODE_REGEX.match(part):
|
||||
# This is a color code
|
||||
if part == RESET_COLOR:
|
||||
found_color_code = None
|
||||
|
|
@ -621,8 +603,8 @@ def color_split(colored_text, index):
|
|||
split_index = index - (length - color_len(part))
|
||||
found_split = True
|
||||
if found_color_code:
|
||||
pre_split += part[:split_index] + RESET_COLOR
|
||||
post_split += found_color_code + part[split_index:]
|
||||
pre_split += f"{part[:split_index]}{RESET_COLOR}"
|
||||
post_split += f"{found_color_code}{part[split_index:]}"
|
||||
else:
|
||||
pre_split += part[:split_index]
|
||||
post_split += part[split_index:]
|
||||
|
|
@ -642,7 +624,7 @@ def color_len(colored_text):
|
|||
return len(uncolorize(colored_text))
|
||||
|
||||
|
||||
def _colordiff(a, b):
|
||||
def _colordiff(a: Any, b: Any) -> tuple[str, str]:
|
||||
"""Given two values, return the same pair of strings except with
|
||||
their differences highlighted in the specified color. Strings are
|
||||
highlighted intelligently to show differences; other values are
|
||||
|
|
@ -664,35 +646,21 @@ def _colordiff(a, b):
|
|||
colorize("text_diff_added", str(b)),
|
||||
)
|
||||
|
||||
a_out = []
|
||||
b_out = []
|
||||
before = ""
|
||||
after = ""
|
||||
|
||||
matcher = SequenceMatcher(lambda x: False, a, b)
|
||||
for op, a_start, a_end, b_start, b_end in matcher.get_opcodes():
|
||||
if op == "equal":
|
||||
# In both strings.
|
||||
a_out.append(a[a_start:a_end])
|
||||
b_out.append(b[b_start:b_end])
|
||||
elif op == "insert":
|
||||
# Right only.
|
||||
b_out.append(colorize("text_diff_added", b[b_start:b_end]))
|
||||
elif op == "delete":
|
||||
# Left only.
|
||||
a_out.append(colorize("text_diff_removed", a[a_start:a_end]))
|
||||
elif op == "replace":
|
||||
# Right and left differ. Colorise with second highlight if
|
||||
# it's just a case change.
|
||||
if a[a_start:a_end].lower() != b[b_start:b_end].lower():
|
||||
a_color = "text_diff_removed"
|
||||
b_color = "text_diff_added"
|
||||
else:
|
||||
a_color = b_color = "text_highlight_minor"
|
||||
a_out.append(colorize(a_color, a[a_start:a_end]))
|
||||
b_out.append(colorize(b_color, b[b_start:b_end]))
|
||||
else:
|
||||
assert False
|
||||
before_part, after_part = a[a_start:a_end], b[b_start:b_end]
|
||||
if op in {"delete", "replace"}:
|
||||
before_part = colorize("text_diff_removed", before_part)
|
||||
if op in {"insert", "replace"}:
|
||||
after_part = colorize("text_diff_added", after_part)
|
||||
|
||||
return "".join(a_out), "".join(b_out)
|
||||
before += before_part
|
||||
after += after_part
|
||||
|
||||
return before, after
|
||||
|
||||
|
||||
def colordiff(a, b):
|
||||
|
|
@ -726,32 +694,16 @@ def get_replacements():
|
|||
replacements.append((re.compile(pattern), repl))
|
||||
except re.error:
|
||||
raise UserError(
|
||||
"malformed regular expression in replace: {}".format(pattern)
|
||||
f"malformed regular expression in replace: {pattern}"
|
||||
)
|
||||
return replacements
|
||||
|
||||
|
||||
def term_width():
|
||||
@cache
|
||||
def term_width() -> int:
|
||||
"""Get the width (columns) of the terminal."""
|
||||
fallback = config["ui"]["terminal_width"].get(int)
|
||||
|
||||
# The fcntl and termios modules are not available on non-Unix
|
||||
# platforms, so we fall back to a constant.
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
except ImportError:
|
||||
return fallback
|
||||
|
||||
try:
|
||||
buf = fcntl.ioctl(0, termios.TIOCGWINSZ, " " * 4)
|
||||
except OSError:
|
||||
return fallback
|
||||
try:
|
||||
height, width = struct.unpack("hh", buf)
|
||||
except struct.error:
|
||||
return fallback
|
||||
return width
|
||||
columns, _ = shutil.get_terminal_size(fallback=(0, 0))
|
||||
return columns if columns else config["ui"]["terminal_width"].get(int)
|
||||
|
||||
|
||||
def split_into_lines(string, width_tuple):
|
||||
|
|
@ -765,19 +717,13 @@ def split_into_lines(string, width_tuple):
|
|||
"""
|
||||
first_width, middle_width, last_width = width_tuple
|
||||
words = []
|
||||
esc_text = re.compile(
|
||||
r"""(?P<pretext>[^\x1b]*)
|
||||
(?P<esc>(?:\x1b\[[;\d]*[A-Za-z])+)
|
||||
(?P<text>[^\x1b]+)(?P<reset>\x1b\[39;49;00m)
|
||||
(?P<posttext>[^\x1b]*)""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
if uncolorize(string) == string:
|
||||
# No colors in string
|
||||
words = string.split()
|
||||
else:
|
||||
# Use a regex to find escapes and the text within them.
|
||||
for m in esc_text.finditer(string):
|
||||
for m in ESC_TEXT_REGEX.finditer(string):
|
||||
# m contains four groups:
|
||||
# pretext - any text before escape sequence
|
||||
# esc - intitial escape sequence
|
||||
|
|
@ -806,17 +752,17 @@ def split_into_lines(string, width_tuple):
|
|||
# Colorize each word with pre/post escapes
|
||||
# Reconstruct colored words
|
||||
words += [
|
||||
m.group("esc") + raw_word + RESET_COLOR
|
||||
f"{m['esc']}{raw_word}{RESET_COLOR}"
|
||||
for raw_word in raw_words
|
||||
]
|
||||
elif raw_words:
|
||||
# Pretext stops mid-word
|
||||
if m.group("esc") != RESET_COLOR:
|
||||
# Add the rest of the current word, with a reset after it
|
||||
words[-1] += m.group("esc") + raw_words[0] + RESET_COLOR
|
||||
words[-1] += f"{m['esc']}{raw_words[0]}{RESET_COLOR}"
|
||||
# Add the subsequent colored words:
|
||||
words += [
|
||||
m.group("esc") + raw_word + RESET_COLOR
|
||||
f"{m['esc']}{raw_word}{RESET_COLOR}"
|
||||
for raw_word in raw_words[1:]
|
||||
]
|
||||
else:
|
||||
|
|
@ -907,18 +853,12 @@ def print_column_layout(
|
|||
With subsequent lines (i.e. {lhs1}, {rhs1} onwards) being the
|
||||
rest of contents, wrapped if the width would be otherwise exceeded.
|
||||
"""
|
||||
if right["prefix"] + right["contents"] + right["suffix"] == "":
|
||||
if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
|
||||
# No right hand information, so we don't need a separator.
|
||||
separator = ""
|
||||
first_line_no_wrap = (
|
||||
indent_str
|
||||
+ left["prefix"]
|
||||
+ left["contents"]
|
||||
+ left["suffix"]
|
||||
+ separator
|
||||
+ right["prefix"]
|
||||
+ right["contents"]
|
||||
+ right["suffix"]
|
||||
f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
|
||||
f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
|
||||
)
|
||||
if color_len(first_line_no_wrap) < max_width:
|
||||
# Everything fits, print out line.
|
||||
|
|
@ -1044,18 +984,12 @@ def print_newline_layout(
|
|||
If {lhs0} would go over the maximum width, the subsequent lines are
|
||||
indented a second time for ease of reading.
|
||||
"""
|
||||
if right["prefix"] + right["contents"] + right["suffix"] == "":
|
||||
if f"{right['prefix']}{right['contents']}{right['suffix']}" == "":
|
||||
# No right hand information, so we don't need a separator.
|
||||
separator = ""
|
||||
first_line_no_wrap = (
|
||||
indent_str
|
||||
+ left["prefix"]
|
||||
+ left["contents"]
|
||||
+ left["suffix"]
|
||||
+ separator
|
||||
+ right["prefix"]
|
||||
+ right["contents"]
|
||||
+ right["suffix"]
|
||||
f"{indent_str}{left['prefix']}{left['contents']}{left['suffix']}"
|
||||
f"{separator}{right['prefix']}{right['contents']}{right['suffix']}"
|
||||
)
|
||||
if color_len(first_line_no_wrap) < max_width:
|
||||
# Everything fits, print out line.
|
||||
|
|
@ -1069,7 +1003,7 @@ def print_newline_layout(
|
|||
empty_space - len(indent_str),
|
||||
empty_space - len(indent_str),
|
||||
)
|
||||
left_str = left["prefix"] + left["contents"] + left["suffix"]
|
||||
left_str = f"{left['prefix']}{left['contents']}{left['suffix']}"
|
||||
left_split = split_into_lines(left_str, left_width_tuple)
|
||||
# Repeat calculations for rhs, including separator on first line
|
||||
right_width_tuple = (
|
||||
|
|
@ -1077,19 +1011,19 @@ def print_newline_layout(
|
|||
empty_space - len(indent_str),
|
||||
empty_space - len(indent_str),
|
||||
)
|
||||
right_str = right["prefix"] + right["contents"] + right["suffix"]
|
||||
right_str = f"{right['prefix']}{right['contents']}{right['suffix']}"
|
||||
right_split = split_into_lines(right_str, right_width_tuple)
|
||||
for i, line in enumerate(left_split):
|
||||
if i == 0:
|
||||
print_(indent_str + line)
|
||||
print_(f"{indent_str}{line}")
|
||||
elif line != "":
|
||||
# Ignore empty lines
|
||||
print_(indent_str * 2 + line)
|
||||
print_(f"{indent_str * 2}{line}")
|
||||
for i, line in enumerate(right_split):
|
||||
if i == 0:
|
||||
print_(indent_str + separator + line)
|
||||
print_(f"{indent_str}{separator}{line}")
|
||||
elif line != "":
|
||||
print_(indent_str * 2 + line)
|
||||
print_(f"{indent_str * 2}{line}")
|
||||
|
||||
|
||||
FLOAT_EPSILON = 0.01
|
||||
|
|
@ -1122,13 +1056,15 @@ def _field_diff(field, old, old_fmt, new, new_fmt):
|
|||
if isinstance(oldval, str):
|
||||
oldstr, newstr = colordiff(oldval, newstr)
|
||||
else:
|
||||
oldstr = colorize("text_error", oldstr)
|
||||
newstr = colorize("text_error", newstr)
|
||||
oldstr = colorize("text_diff_removed", oldstr)
|
||||
newstr = colorize("text_diff_added", newstr)
|
||||
|
||||
return f"{oldstr} -> {newstr}"
|
||||
|
||||
|
||||
def show_model_changes(new, old=None, fields=None, always=False):
|
||||
def show_model_changes(
|
||||
new, old=None, fields=None, always=False, print_obj: bool = True
|
||||
):
|
||||
"""Given a Model object, print a list of changes from its pristine
|
||||
version stored in the database. Return a boolean indicating whether
|
||||
any changes were found.
|
||||
|
|
@ -1163,11 +1099,11 @@ def show_model_changes(new, old=None, fields=None, always=False):
|
|||
continue
|
||||
|
||||
changes.append(
|
||||
" {}: {}".format(field, colorize("text_highlight", new_fmt[field]))
|
||||
f" {field}: {colorize('text_highlight', new_fmt[field])}"
|
||||
)
|
||||
|
||||
# Print changes.
|
||||
if changes or always:
|
||||
if print_obj and (changes or always):
|
||||
print_(format(old))
|
||||
if changes:
|
||||
print_("\n".join(changes))
|
||||
|
|
@ -1204,22 +1140,16 @@ def show_path_changes(path_changes):
|
|||
# Print every change over two lines
|
||||
for source, dest in zip(sources, destinations):
|
||||
color_source, color_dest = colordiff(source, dest)
|
||||
print_("{0} \n -> {1}".format(color_source, color_dest))
|
||||
print_(f"{color_source} \n -> {color_dest}")
|
||||
else:
|
||||
# Print every change on a single line, and add a header
|
||||
title_pad = max_width - len("Source ") + len(" -> ")
|
||||
|
||||
print_("Source {0} Destination".format(" " * title_pad))
|
||||
print_(f"Source {' ' * title_pad} Destination")
|
||||
for source, dest in zip(sources, destinations):
|
||||
pad = max_width - len(source)
|
||||
color_source, color_dest = colordiff(source, dest)
|
||||
print_(
|
||||
"{0} {1} -> {2}".format(
|
||||
color_source,
|
||||
" " * pad,
|
||||
color_dest,
|
||||
)
|
||||
)
|
||||
print_(f"{color_source} {' ' * pad} -> {color_dest}")
|
||||
|
||||
|
||||
# Helper functions for option parsing.
|
||||
|
|
@ -1245,9 +1175,7 @@ def _store_dict(option, opt_str, value, parser):
|
|||
raise ValueError
|
||||
except ValueError:
|
||||
raise UserError(
|
||||
"supplied argument `{}' is not of the form `key=value'".format(
|
||||
value
|
||||
)
|
||||
f"supplied argument `{value}' is not of the form `key=value'"
|
||||
)
|
||||
|
||||
option_values[key] = value
|
||||
|
|
@ -1426,8 +1354,8 @@ class Subcommand:
|
|||
@root_parser.setter
|
||||
def root_parser(self, root_parser):
|
||||
self._root_parser = root_parser
|
||||
self.parser.prog = "{} {}".format(
|
||||
as_string(root_parser.get_prog_name()), self.name
|
||||
self.parser.prog = (
|
||||
f"{as_string(root_parser.get_prog_name())} {self.name}"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -1483,7 +1411,7 @@ class SubcommandsOptionParser(CommonOptionsParser):
|
|||
for subcommand in subcommands:
|
||||
name = subcommand.name
|
||||
if subcommand.aliases:
|
||||
name += " (%s)" % ", ".join(subcommand.aliases)
|
||||
name += f" ({', '.join(subcommand.aliases)})"
|
||||
disp_names.append(name)
|
||||
|
||||
# Set the help position based on the max width.
|
||||
|
|
@ -1496,32 +1424,24 @@ class SubcommandsOptionParser(CommonOptionsParser):
|
|||
# Lifted directly from optparse.py.
|
||||
name_width = help_position - formatter.current_indent - 2
|
||||
if len(name) > name_width:
|
||||
name = "%*s%s\n" % (formatter.current_indent, "", name)
|
||||
name = f"{' ' * formatter.current_indent}{name}\n"
|
||||
indent_first = help_position
|
||||
else:
|
||||
name = "%*s%-*s " % (
|
||||
formatter.current_indent,
|
||||
"",
|
||||
name_width,
|
||||
name,
|
||||
)
|
||||
name = f"{' ' * formatter.current_indent}{name:<{name_width}}\n"
|
||||
indent_first = 0
|
||||
result.append(name)
|
||||
help_width = formatter.width - help_position
|
||||
help_lines = textwrap.wrap(subcommand.help, help_width)
|
||||
help_line = help_lines[0] if help_lines else ""
|
||||
result.append("%*s%s\n" % (indent_first, "", help_line))
|
||||
result.append(f"{' ' * indent_first}{help_line}\n")
|
||||
result.extend(
|
||||
[
|
||||
"%*s%s\n" % (help_position, "", line)
|
||||
for line in help_lines[1:]
|
||||
]
|
||||
[f"{' ' * help_position}{line}\n" for line in help_lines[1:]]
|
||||
)
|
||||
formatter.dedent()
|
||||
|
||||
# Concatenate the original help message with the subcommand
|
||||
# list.
|
||||
return out + "".join(result)
|
||||
return f"{out}{''.join(result)}"
|
||||
|
||||
def _subcommand_for_name(self, name):
|
||||
"""Return the subcommand in self.subcommands matching the
|
||||
|
|
@ -1615,19 +1535,19 @@ def _configure(options):
|
|||
|
||||
if overlay_path:
|
||||
log.debug(
|
||||
"overlaying configuration: {0}", util.displayable_path(overlay_path)
|
||||
"overlaying configuration: {}", util.displayable_path(overlay_path)
|
||||
)
|
||||
|
||||
config_path = config.user_config_path()
|
||||
if os.path.isfile(config_path):
|
||||
log.debug("user configuration: {0}", util.displayable_path(config_path))
|
||||
log.debug("user configuration: {}", util.displayable_path(config_path))
|
||||
else:
|
||||
log.debug(
|
||||
"no user configuration found at {0}",
|
||||
"no user configuration found at {}",
|
||||
util.displayable_path(config_path),
|
||||
)
|
||||
|
||||
log.debug("data directory: {0}", util.displayable_path(config.config_dir()))
|
||||
log.debug("data directory: {}", util.displayable_path(config.config_dir()))
|
||||
return config
|
||||
|
||||
|
||||
|
|
@ -1637,10 +1557,8 @@ def _ensure_db_directory_exists(path):
|
|||
newpath = os.path.dirname(path)
|
||||
if not os.path.isdir(newpath):
|
||||
if input_yn(
|
||||
"The database directory {} does not \
|
||||
exist. Create it (Y/n)?".format(
|
||||
util.displayable_path(newpath)
|
||||
)
|
||||
f"The database directory {util.displayable_path(newpath)} does not"
|
||||
" exist. Create it (Y/n)?"
|
||||
):
|
||||
os.makedirs(newpath)
|
||||
|
||||
|
|
@ -1660,12 +1578,11 @@ def _open_library(config: confuse.LazyConfig) -> library.Library:
|
|||
except (sqlite3.OperationalError, sqlite3.DatabaseError) as db_error:
|
||||
log.debug("{}", traceback.format_exc())
|
||||
raise UserError(
|
||||
"database file {} cannot not be opened: {}".format(
|
||||
util.displayable_path(dbpath), db_error
|
||||
)
|
||||
f"database file {util.displayable_path(dbpath)} cannot not be"
|
||||
f" opened: {db_error}"
|
||||
)
|
||||
log.debug(
|
||||
"library database: {0}\nlibrary directory: {1}",
|
||||
"library database: {}\nlibrary directory: {}",
|
||||
util.displayable_path(lib.path),
|
||||
util.displayable_path(lib.directory),
|
||||
)
|
||||
|
|
@ -1782,7 +1699,7 @@ def main(args=None):
|
|||
_raw_main(args)
|
||||
except UserError as exc:
|
||||
message = exc.args[0] if exc.args else None
|
||||
log.error("error: {0}", message)
|
||||
log.error("error: {}", message)
|
||||
sys.exit(1)
|
||||
except util.HumanReadableError as exc:
|
||||
exc.log(log)
|
||||
|
|
@ -1794,10 +1711,10 @@ def main(args=None):
|
|||
log.error("{}", exc)
|
||||
sys.exit(1)
|
||||
except confuse.ConfigError as exc:
|
||||
log.error("configuration error: {0}", exc)
|
||||
log.error("configuration error: {}", exc)
|
||||
sys.exit(1)
|
||||
except db_query.InvalidQueryError as exc:
|
||||
log.error("invalid query: {0}", exc)
|
||||
log.error("invalid query: {}", exc)
|
||||
sys.exit(1)
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EPIPE:
|
||||
|
|
@ -1810,7 +1727,7 @@ def main(args=None):
|
|||
log.debug("{}", traceback.format_exc())
|
||||
except db.DBAccessError as exc:
|
||||
log.error(
|
||||
"database access error: {0}\n"
|
||||
"database access error: {}\n"
|
||||
"the library file might have a permissions problem",
|
||||
exc,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -18,8 +18,10 @@ interface.
|
|||
|
||||
import os
|
||||
import re
|
||||
import textwrap
|
||||
from collections import Counter
|
||||
from collections.abc import Sequence
|
||||
from functools import cached_property
|
||||
from itertools import chain
|
||||
from platform import python_version
|
||||
from typing import Any, NamedTuple
|
||||
|
|
@ -112,15 +114,11 @@ def _parse_logfiles(logfiles):
|
|||
yield from _paths_from_logfile(syspath(normpath(logfile)))
|
||||
except ValueError as err:
|
||||
raise ui.UserError(
|
||||
"malformed logfile {}: {}".format(
|
||||
util.displayable_path(logfile), str(err)
|
||||
)
|
||||
f"malformed logfile {util.displayable_path(logfile)}: {err}"
|
||||
) from err
|
||||
except OSError as err:
|
||||
raise ui.UserError(
|
||||
"unreadable logfile {}: {}".format(
|
||||
util.displayable_path(logfile), str(err)
|
||||
)
|
||||
f"unreadable logfile {util.displayable_path(logfile)}: {err}"
|
||||
) from err
|
||||
|
||||
|
||||
|
|
@ -132,13 +130,13 @@ def _print_keys(query):
|
|||
returned row, with indentation of 2 spaces.
|
||||
"""
|
||||
for row in query:
|
||||
print_(" " * 2 + row["key"])
|
||||
print_(f" {row['key']}")
|
||||
|
||||
|
||||
def fields_func(lib, opts, args):
|
||||
def _print_rows(names):
|
||||
names.sort()
|
||||
print_(" " + "\n ".join(names))
|
||||
print_(textwrap.indent("\n".join(names), " "))
|
||||
|
||||
print_("Item fields:")
|
||||
_print_rows(library.Item.all_keys())
|
||||
|
|
@ -148,13 +146,13 @@ def fields_func(lib, opts, args):
|
|||
|
||||
with lib.transaction() as tx:
|
||||
# The SQL uses the DISTINCT to get unique values from the query
|
||||
unique_fields = "SELECT DISTINCT key FROM (%s)"
|
||||
unique_fields = "SELECT DISTINCT key FROM ({})"
|
||||
|
||||
print_("Item flexible attributes:")
|
||||
_print_keys(tx.query(unique_fields % library.Item._flex_table))
|
||||
_print_keys(tx.query(unique_fields.format(library.Item._flex_table)))
|
||||
|
||||
print_("Album flexible attributes:")
|
||||
_print_keys(tx.query(unique_fields % library.Album._flex_table))
|
||||
_print_keys(tx.query(unique_fields.format(library.Album._flex_table)))
|
||||
|
||||
|
||||
fields_cmd = ui.Subcommand(
|
||||
|
|
@ -213,10 +211,10 @@ def get_singleton_disambig_fields(info: hooks.TrackInfo) -> Sequence[str]:
|
|||
out = []
|
||||
chosen_fields = config["match"]["singleton_disambig_fields"].as_str_seq()
|
||||
calculated_values = {
|
||||
"index": "Index {}".format(str(info.index)),
|
||||
"track_alt": "Track {}".format(info.track_alt),
|
||||
"index": f"Index {info.index}",
|
||||
"track_alt": f"Track {info.track_alt}",
|
||||
"album": (
|
||||
"[{}]".format(info.album)
|
||||
f"[{info.album}]"
|
||||
if (
|
||||
config["import"]["singleton_album_disambig"].get()
|
||||
and info.get("album")
|
||||
|
|
@ -242,7 +240,7 @@ def get_album_disambig_fields(info: hooks.AlbumInfo) -> Sequence[str]:
|
|||
chosen_fields = config["match"]["album_disambig_fields"].as_str_seq()
|
||||
calculated_values = {
|
||||
"media": (
|
||||
"{}x{}".format(info.mediums, info.media)
|
||||
f"{info.mediums}x{info.media}"
|
||||
if (info.mediums and info.mediums > 1)
|
||||
else info.media
|
||||
),
|
||||
|
|
@ -277,7 +275,7 @@ def dist_string(dist):
|
|||
"""Formats a distance (a float) as a colorized similarity percentage
|
||||
string.
|
||||
"""
|
||||
string = "{:.1f}%".format(((1 - dist) * 100))
|
||||
string = f"{(1 - dist) * 100:.1f}%"
|
||||
return dist_colorize(string, dist)
|
||||
|
||||
|
||||
|
|
@ -295,7 +293,7 @@ def penalty_string(distance, limit=None):
|
|||
if limit and len(penalties) > limit:
|
||||
penalties = penalties[:limit] + ["..."]
|
||||
# Prefix penalty string with U+2260: Not Equal To
|
||||
penalty_string = "\u2260 {}".format(", ".join(penalties))
|
||||
penalty_string = f"\u2260 {', '.join(penalties)}"
|
||||
return ui.colorize("changed", penalty_string)
|
||||
|
||||
|
||||
|
|
@ -306,6 +304,10 @@ class ChangeRepresentation:
|
|||
TrackMatch object, accordingly.
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def changed_prefix(self) -> str:
|
||||
return ui.colorize("changed", "\u2260")
|
||||
|
||||
cur_artist = None
|
||||
# cur_album set if album, cur_title set if singleton
|
||||
cur_album = None
|
||||
|
|
@ -360,18 +362,18 @@ class ChangeRepresentation:
|
|||
|
||||
# 'Match' line and similarity.
|
||||
print_(
|
||||
self.indent_header + f"Match ({dist_string(self.match.distance)}):"
|
||||
f"{self.indent_header}Match ({dist_string(self.match.distance)}):"
|
||||
)
|
||||
|
||||
if isinstance(self.match.info, autotag.hooks.AlbumInfo):
|
||||
# Matching an album - print that
|
||||
artist_album_str = (
|
||||
f"{self.match.info.artist}" + f" - {self.match.info.album}"
|
||||
f"{self.match.info.artist} - {self.match.info.album}"
|
||||
)
|
||||
else:
|
||||
# Matching a single track
|
||||
artist_album_str = (
|
||||
f"{self.match.info.artist}" + f" - {self.match.info.title}"
|
||||
f"{self.match.info.artist} - {self.match.info.title}"
|
||||
)
|
||||
print_(
|
||||
self.indent_header
|
||||
|
|
@ -381,17 +383,17 @@ class ChangeRepresentation:
|
|||
# Penalties.
|
||||
penalties = penalty_string(self.match.distance)
|
||||
if penalties:
|
||||
print_(self.indent_header + penalties)
|
||||
print_(f"{self.indent_header}{penalties}")
|
||||
|
||||
# Disambiguation.
|
||||
disambig = disambig_string(self.match.info)
|
||||
if disambig:
|
||||
print_(self.indent_header + disambig)
|
||||
print_(f"{self.indent_header}{disambig}")
|
||||
|
||||
# Data URL.
|
||||
if self.match.info.data_url:
|
||||
url = ui.colorize("text_faint", f"{self.match.info.data_url}")
|
||||
print_(self.indent_header + url)
|
||||
print_(f"{self.indent_header}{url}")
|
||||
|
||||
def show_match_details(self):
|
||||
"""Print out the details of the match, including changes in album name
|
||||
|
|
@ -404,9 +406,8 @@ class ChangeRepresentation:
|
|||
artist_l, artist_r = "", ""
|
||||
if artist_l != artist_r:
|
||||
artist_l, artist_r = ui.colordiff(artist_l, artist_r)
|
||||
# Prefix with U+2260: Not Equal To
|
||||
left = {
|
||||
"prefix": ui.colorize("changed", "\u2260") + " Artist: ",
|
||||
"prefix": f"{self.changed_prefix} Artist: ",
|
||||
"contents": artist_l,
|
||||
"suffix": "",
|
||||
}
|
||||
|
|
@ -414,7 +415,7 @@ class ChangeRepresentation:
|
|||
self.print_layout(self.indent_detail, left, right)
|
||||
|
||||
else:
|
||||
print_(self.indent_detail + "*", "Artist:", artist_r)
|
||||
print_(f"{self.indent_detail}*", "Artist:", artist_r)
|
||||
|
||||
if self.cur_album:
|
||||
# Album
|
||||
|
|
@ -424,31 +425,29 @@ class ChangeRepresentation:
|
|||
and self.match.info.album != VARIOUS_ARTISTS
|
||||
):
|
||||
album_l, album_r = ui.colordiff(album_l, album_r)
|
||||
# Prefix with U+2260: Not Equal To
|
||||
left = {
|
||||
"prefix": ui.colorize("changed", "\u2260") + " Album: ",
|
||||
"prefix": f"{self.changed_prefix} Album: ",
|
||||
"contents": album_l,
|
||||
"suffix": "",
|
||||
}
|
||||
right = {"prefix": "", "contents": album_r, "suffix": ""}
|
||||
self.print_layout(self.indent_detail, left, right)
|
||||
else:
|
||||
print_(self.indent_detail + "*", "Album:", album_r)
|
||||
print_(f"{self.indent_detail}*", "Album:", album_r)
|
||||
elif self.cur_title:
|
||||
# Title - for singletons
|
||||
title_l, title_r = self.cur_title or "", self.match.info.title
|
||||
if self.cur_title != self.match.info.title:
|
||||
title_l, title_r = ui.colordiff(title_l, title_r)
|
||||
# Prefix with U+2260: Not Equal To
|
||||
left = {
|
||||
"prefix": ui.colorize("changed", "\u2260") + " Title: ",
|
||||
"prefix": f"{self.changed_prefix} Title: ",
|
||||
"contents": title_l,
|
||||
"suffix": "",
|
||||
}
|
||||
right = {"prefix": "", "contents": title_r, "suffix": ""}
|
||||
self.print_layout(self.indent_detail, left, right)
|
||||
else:
|
||||
print_(self.indent_detail + "*", "Title:", title_r)
|
||||
print_(f"{self.indent_detail}*", "Title:", title_r)
|
||||
|
||||
def make_medium_info_line(self, track_info):
|
||||
"""Construct a line with the current medium's info."""
|
||||
|
|
@ -490,7 +489,6 @@ class ChangeRepresentation:
|
|||
"""Format colored track indices."""
|
||||
cur_track = self.format_index(item)
|
||||
new_track = self.format_index(track_info)
|
||||
templ = "(#{})"
|
||||
changed = False
|
||||
# Choose color based on change.
|
||||
if cur_track != new_track:
|
||||
|
|
@ -502,10 +500,8 @@ class ChangeRepresentation:
|
|||
else:
|
||||
highlight_color = "text_faint"
|
||||
|
||||
cur_track = templ.format(cur_track)
|
||||
new_track = templ.format(new_track)
|
||||
lhs_track = ui.colorize(highlight_color, cur_track)
|
||||
rhs_track = ui.colorize(highlight_color, new_track)
|
||||
lhs_track = ui.colorize(highlight_color, f"(#{cur_track})")
|
||||
rhs_track = ui.colorize(highlight_color, f"(#{new_track})")
|
||||
return lhs_track, rhs_track, changed
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -573,11 +569,10 @@ class ChangeRepresentation:
|
|||
# the case, thus the 'info' dictionary is unneeded.
|
||||
# penalties = penalty_string(self.match.distance.tracks[track_info])
|
||||
|
||||
prefix = ui.colorize("changed", "\u2260 ") if changed else "* "
|
||||
lhs = {
|
||||
"prefix": prefix + lhs_track + " ",
|
||||
"prefix": f"{self.changed_prefix if changed else '*'} {lhs_track} ",
|
||||
"contents": lhs_title,
|
||||
"suffix": " " + lhs_length,
|
||||
"suffix": f" {lhs_length}",
|
||||
}
|
||||
rhs = {"prefix": "", "contents": "", "suffix": ""}
|
||||
if not changed:
|
||||
|
|
@ -586,9 +581,9 @@ class ChangeRepresentation:
|
|||
else:
|
||||
# Construct a dictionary for the "changed to" side
|
||||
rhs = {
|
||||
"prefix": rhs_track + " ",
|
||||
"prefix": f"{rhs_track} ",
|
||||
"contents": rhs_title,
|
||||
"suffix": " " + rhs_length,
|
||||
"suffix": f" {rhs_length}",
|
||||
}
|
||||
return (lhs, rhs)
|
||||
|
||||
|
|
@ -681,7 +676,7 @@ class AlbumChange(ChangeRepresentation):
|
|||
# Print tracks from previous medium
|
||||
self.print_tracklist(lines)
|
||||
lines = []
|
||||
print_(self.indent_detail + header)
|
||||
print_(f"{self.indent_detail}{header}")
|
||||
# Save new medium details for future comparison.
|
||||
medium, disctitle = track_info.medium, track_info.disctitle
|
||||
|
||||
|
|
@ -697,11 +692,9 @@ class AlbumChange(ChangeRepresentation):
|
|||
# Missing and unmatched tracks.
|
||||
if self.match.extra_tracks:
|
||||
print_(
|
||||
"Missing tracks ({0}/{1} - {2:.1%}):".format(
|
||||
len(self.match.extra_tracks),
|
||||
len(self.match.info.tracks),
|
||||
len(self.match.extra_tracks) / len(self.match.info.tracks),
|
||||
)
|
||||
"Missing tracks"
|
||||
f" ({len(self.match.extra_tracks)}/{len(self.match.info.tracks)} -"
|
||||
f" {len(self.match.extra_tracks) / len(self.match.info.tracks):.1%}):"
|
||||
)
|
||||
for track_info in self.match.extra_tracks:
|
||||
line = f" ! {track_info.title} (#{self.format_index(track_info)})"
|
||||
|
|
@ -711,9 +704,9 @@ class AlbumChange(ChangeRepresentation):
|
|||
if self.match.extra_items:
|
||||
print_(f"Unmatched tracks ({len(self.match.extra_items)}):")
|
||||
for item in self.match.extra_items:
|
||||
line = " ! {} (#{})".format(item.title, self.format_index(item))
|
||||
line = f" ! {item.title} (#{self.format_index(item)})"
|
||||
if item.length:
|
||||
line += " ({})".format(human_seconds_short(item.length))
|
||||
line += f" ({human_seconds_short(item.length)})"
|
||||
print_(ui.colorize("text_warning", line))
|
||||
|
||||
|
||||
|
|
@ -769,7 +762,7 @@ def summarize_items(items, singleton):
|
|||
"""
|
||||
summary_parts = []
|
||||
if not singleton:
|
||||
summary_parts.append("{} items".format(len(items)))
|
||||
summary_parts.append(f"{len(items)} items")
|
||||
|
||||
format_counts = {}
|
||||
for item in items:
|
||||
|
|
@ -789,10 +782,11 @@ def summarize_items(items, singleton):
|
|||
average_bitrate = sum([item.bitrate for item in items]) / len(items)
|
||||
total_duration = sum([item.length for item in items])
|
||||
total_filesize = sum([item.filesize for item in items])
|
||||
summary_parts.append("{}kbps".format(int(average_bitrate / 1000)))
|
||||
summary_parts.append(f"{int(average_bitrate / 1000)}kbps")
|
||||
if items[0].format == "FLAC":
|
||||
sample_bits = "{}kHz/{} bit".format(
|
||||
round(int(items[0].samplerate) / 1000, 1), items[0].bitdepth
|
||||
sample_bits = (
|
||||
f"{round(int(items[0].samplerate) / 1000, 1)}kHz"
|
||||
f"/{items[0].bitdepth} bit"
|
||||
)
|
||||
summary_parts.append(sample_bits)
|
||||
summary_parts.append(human_seconds_short(total_duration))
|
||||
|
|
@ -885,7 +879,7 @@ def choose_candidate(
|
|||
if singleton:
|
||||
print_("No matching recordings found.")
|
||||
else:
|
||||
print_("No matching release found for {} tracks.".format(itemcount))
|
||||
print_(f"No matching release found for {itemcount} tracks.")
|
||||
print_(
|
||||
"For help, see: "
|
||||
"https://beets.readthedocs.org/en/latest/faq.html#nomatch"
|
||||
|
|
@ -910,40 +904,38 @@ def choose_candidate(
|
|||
# Display list of candidates.
|
||||
print_("")
|
||||
print_(
|
||||
'Finding tags for {} "{} - {}".'.format(
|
||||
"track" if singleton else "album",
|
||||
item.artist if singleton else cur_artist,
|
||||
item.title if singleton else cur_album,
|
||||
)
|
||||
f"Finding tags for {'track' if singleton else 'album'} "
|
||||
f'"{item.artist if singleton else cur_artist} -'
|
||||
f' {item.title if singleton else cur_album}".'
|
||||
)
|
||||
|
||||
print_(ui.indent(2) + "Candidates:")
|
||||
print_(" Candidates:")
|
||||
for i, match in enumerate(candidates):
|
||||
# Index, metadata, and distance.
|
||||
index0 = "{0}.".format(i + 1)
|
||||
index0 = f"{i + 1}."
|
||||
index = dist_colorize(index0, match.distance)
|
||||
dist = "({:.1f}%)".format((1 - match.distance) * 100)
|
||||
dist = f"({(1 - match.distance) * 100:.1f}%)"
|
||||
distance = dist_colorize(dist, match.distance)
|
||||
metadata = "{0} - {1}".format(
|
||||
match.info.artist,
|
||||
match.info.title if singleton else match.info.album,
|
||||
metadata = (
|
||||
f"{match.info.artist} -"
|
||||
f" {match.info.title if singleton else match.info.album}"
|
||||
)
|
||||
if i == 0:
|
||||
metadata = dist_colorize(metadata, match.distance)
|
||||
else:
|
||||
metadata = ui.colorize("text_highlight_minor", metadata)
|
||||
line1 = [index, distance, metadata]
|
||||
print_(ui.indent(2) + " ".join(line1))
|
||||
print_(f" {' '.join(line1)}")
|
||||
|
||||
# Penalties.
|
||||
penalties = penalty_string(match.distance, 3)
|
||||
if penalties:
|
||||
print_(ui.indent(13) + penalties)
|
||||
print_(f"{' ' * 13}{penalties}")
|
||||
|
||||
# Disambiguation
|
||||
disambig = disambig_string(match.info)
|
||||
if disambig:
|
||||
print_(ui.indent(13) + disambig)
|
||||
print_(f"{' ' * 13}{disambig}")
|
||||
|
||||
# Ask the user for a choice.
|
||||
sel = ui.input_options(choice_opts, numrange=(1, len(candidates)))
|
||||
|
|
@ -1015,7 +1007,7 @@ def manual_id(session, task):
|
|||
|
||||
Input an ID, either for an album ("release") or a track ("recording").
|
||||
"""
|
||||
prompt = "Enter {} ID:".format("release" if task.is_album else "recording")
|
||||
prompt = f"Enter {'release' if task.is_album else 'recording'} ID:"
|
||||
search_id = input_(prompt).strip()
|
||||
|
||||
if task.is_album:
|
||||
|
|
@ -1043,7 +1035,7 @@ class TerminalImportSession(importer.ImportSession):
|
|||
|
||||
path_str0 = displayable_path(task.paths, "\n")
|
||||
path_str = ui.colorize("import_path", path_str0)
|
||||
items_str0 = "({} items)".format(len(task.items))
|
||||
items_str0 = f"({len(task.items)} items)"
|
||||
items_str = ui.colorize("import_path_items", items_str0)
|
||||
print_(" ".join([path_str, items_str]))
|
||||
|
||||
|
|
@ -1156,7 +1148,7 @@ class TerminalImportSession(importer.ImportSession):
|
|||
that's already in the library.
|
||||
"""
|
||||
log.warning(
|
||||
"This {0} is already in the library!",
|
||||
"This {} is already in the library!",
|
||||
("album" if task.is_album else "item"),
|
||||
)
|
||||
|
||||
|
|
@ -1217,8 +1209,8 @@ class TerminalImportSession(importer.ImportSession):
|
|||
|
||||
def should_resume(self, path):
|
||||
return ui.input_yn(
|
||||
"Import of the directory:\n{}\n"
|
||||
"was interrupted. Resume (Y/n)?".format(displayable_path(path))
|
||||
f"Import of the directory:\n{displayable_path(path)}\n"
|
||||
"was interrupted. Resume (Y/n)?"
|
||||
)
|
||||
|
||||
def _get_choices(self, task):
|
||||
|
|
@ -1288,11 +1280,10 @@ class TerminalImportSession(importer.ImportSession):
|
|||
dup_choices = [c for c in all_choices if c.short == short]
|
||||
for c in dup_choices[1:]:
|
||||
log.warning(
|
||||
"Prompt choice '{0}' removed due to conflict "
|
||||
"with '{1}' (short letter: '{2}')",
|
||||
c.long,
|
||||
dup_choices[0].long,
|
||||
c.short,
|
||||
"Prompt choice '{0.long}' removed due to conflict "
|
||||
"with '{1[0].long}' (short letter: '{0.short}')",
|
||||
c,
|
||||
dup_choices,
|
||||
)
|
||||
extra_choices.remove(c)
|
||||
|
||||
|
|
@ -1317,7 +1308,8 @@ def import_files(lib, paths: list[bytes], query):
|
|||
loghandler = logging.FileHandler(logpath, encoding="utf-8")
|
||||
except OSError:
|
||||
raise ui.UserError(
|
||||
f"Could not open log file for writing: {displayable_path(logpath)}"
|
||||
"Could not open log file for writing:"
|
||||
f" {displayable_path(logpath)}"
|
||||
)
|
||||
else:
|
||||
loghandler = None
|
||||
|
|
@ -1362,9 +1354,7 @@ def import_func(lib, opts, args: list[str]):
|
|||
for path in byte_paths:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
raise ui.UserError(
|
||||
"no such file or directory: {}".format(
|
||||
displayable_path(path)
|
||||
)
|
||||
f"no such file or directory: {displayable_path(path)}"
|
||||
)
|
||||
|
||||
# Check the directories from the logfiles, but don't throw an error in
|
||||
|
|
@ -1374,9 +1364,7 @@ def import_func(lib, opts, args: list[str]):
|
|||
for path in paths_from_logfiles:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
log.warning(
|
||||
"No such file or directory: {}".format(
|
||||
displayable_path(path)
|
||||
)
|
||||
"No such file or directory: {}", displayable_path(path)
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
@ -1650,9 +1638,8 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
# Did the item change since last checked?
|
||||
if item.current_mtime() <= item.mtime:
|
||||
log.debug(
|
||||
"skipping {0} because mtime is up to date ({1})",
|
||||
displayable_path(item.path),
|
||||
item.mtime,
|
||||
"skipping {0.filepath} because mtime is up to date ({0.mtime})",
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
@ -1660,9 +1647,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
try:
|
||||
item.read()
|
||||
except library.ReadError as exc:
|
||||
log.error(
|
||||
"error reading {0}: {1}", displayable_path(item.path), exc
|
||||
)
|
||||
log.error("error reading {.filepath}: {}", item, exc)
|
||||
continue
|
||||
|
||||
# Special-case album artist when it matches track artist. (Hacky
|
||||
|
|
@ -1703,7 +1688,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
continue
|
||||
album = lib.get_album(album_id)
|
||||
if not album: # Empty albums have already been removed.
|
||||
log.debug("emptied album {0}", album_id)
|
||||
log.debug("emptied album {}", album_id)
|
||||
continue
|
||||
first_item = album.items().get()
|
||||
|
||||
|
|
@ -1714,7 +1699,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
|||
|
||||
# Move album art (and any inconsistent items).
|
||||
if move and lib.directory in ancestry(first_item.path):
|
||||
log.debug("moving album {0}", album_id)
|
||||
log.debug("moving album {}", album_id)
|
||||
|
||||
# Manually moving and storing the album.
|
||||
items = list(album.items())
|
||||
|
|
@ -1808,7 +1793,7 @@ def remove_items(lib, query, album, delete, force):
|
|||
if not force:
|
||||
# Prepare confirmation with user.
|
||||
album_str = (
|
||||
" in {} album{}".format(len(albums), "s" if len(albums) > 1 else "")
|
||||
f" in {len(albums)} album{'s' if len(albums) > 1 else ''}"
|
||||
if album
|
||||
else ""
|
||||
)
|
||||
|
|
@ -1816,14 +1801,17 @@ def remove_items(lib, query, album, delete, force):
|
|||
if delete:
|
||||
fmt = "$path - $title"
|
||||
prompt = "Really DELETE"
|
||||
prompt_all = "Really DELETE {} file{}{}".format(
|
||||
len(items), "s" if len(items) > 1 else "", album_str
|
||||
prompt_all = (
|
||||
"Really DELETE"
|
||||
f" {len(items)} file{'s' if len(items) > 1 else ''}{album_str}"
|
||||
)
|
||||
else:
|
||||
fmt = ""
|
||||
prompt = "Really remove from the library?"
|
||||
prompt_all = "Really remove {} item{}{} from the library?".format(
|
||||
len(items), "s" if len(items) > 1 else "", album_str
|
||||
prompt_all = (
|
||||
"Really remove"
|
||||
f" {len(items)} item{'s' if len(items) > 1 else ''}{album_str}"
|
||||
" from the library?"
|
||||
)
|
||||
|
||||
# Helpers for printing affected items
|
||||
|
|
@ -1892,7 +1880,7 @@ def show_stats(lib, query, exact):
|
|||
try:
|
||||
total_size += os.path.getsize(syspath(item.path))
|
||||
except OSError as exc:
|
||||
log.info("could not get size of {}: {}", item.path, exc)
|
||||
log.info("could not get size of {.path}: {}", item, exc)
|
||||
else:
|
||||
total_size += int(item.length * item.bitrate / 8)
|
||||
total_time += item.length
|
||||
|
|
@ -1902,27 +1890,17 @@ def show_stats(lib, query, exact):
|
|||
if item.album_id:
|
||||
albums.add(item.album_id)
|
||||
|
||||
size_str = "" + human_bytes(total_size)
|
||||
size_str = human_bytes(total_size)
|
||||
if exact:
|
||||
size_str += f" ({total_size} bytes)"
|
||||
|
||||
print_(
|
||||
"""Tracks: {}
|
||||
Total time: {}{}
|
||||
{}: {}
|
||||
Artists: {}
|
||||
Albums: {}
|
||||
Album artists: {}""".format(
|
||||
total_items,
|
||||
human_seconds(total_time),
|
||||
f" ({total_time:.2f} seconds)" if exact else "",
|
||||
"Total size" if exact else "Approximate total size",
|
||||
size_str,
|
||||
len(artists),
|
||||
len(albums),
|
||||
len(album_artists),
|
||||
),
|
||||
)
|
||||
print_(f"""Tracks: {total_items}
|
||||
Total time: {human_seconds(total_time)}
|
||||
{f" ({total_time:.2f} seconds)" if exact else ""}
|
||||
{"Total size" if exact else "Approximate total size"}: {size_str}
|
||||
Artists: {len(artists)}
|
||||
Albums: {len(albums)}
|
||||
Album artists: {len(album_artists)}""")
|
||||
|
||||
|
||||
def stats_func(lib, opts, args):
|
||||
|
|
@ -1943,7 +1921,7 @@ default_commands.append(stats_cmd)
|
|||
|
||||
|
||||
def show_version(lib, opts, args):
|
||||
print_("beets version %s" % beets.__version__)
|
||||
print_(f"beets version {beets.__version__}")
|
||||
print_(f"Python version {python_version()}")
|
||||
# Show plugins.
|
||||
names = sorted(p.name for p in plugins.find_plugins())
|
||||
|
|
@ -1977,7 +1955,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
|
|||
|
||||
# Apply changes *temporarily*, preview them, and collect modified
|
||||
# objects.
|
||||
print_("Modifying {} {}s.".format(len(objs), "album" if album else "item"))
|
||||
print_(f"Modifying {len(objs)} {'album' if album else 'item'}s.")
|
||||
changed = []
|
||||
templates = {
|
||||
key: functemplate.template(value) for key, value in mods.items()
|
||||
|
|
@ -2007,7 +1985,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
|
|||
extra = ""
|
||||
|
||||
changed = ui.input_select_objects(
|
||||
"Really modify%s" % extra,
|
||||
f"Really modify{extra}",
|
||||
changed,
|
||||
lambda o: print_and_modify(o, mods, dels),
|
||||
)
|
||||
|
|
@ -2159,7 +2137,7 @@ def move_items(
|
|||
act = "copy" if copy else "move"
|
||||
entity = "album" if album else "item"
|
||||
log.info(
|
||||
"{0} {1} {2}{3}{4}.",
|
||||
"{} {} {}{}{}.",
|
||||
action,
|
||||
len(objs),
|
||||
entity,
|
||||
|
|
@ -2185,7 +2163,7 @@ def move_items(
|
|||
else:
|
||||
if confirm:
|
||||
objs = ui.input_select_objects(
|
||||
"Really %s" % act,
|
||||
f"Really {act}",
|
||||
objs,
|
||||
lambda o: show_path_changes(
|
||||
[(o.path, o.destination(basedir=dest))]
|
||||
|
|
@ -2193,7 +2171,7 @@ def move_items(
|
|||
)
|
||||
|
||||
for obj in objs:
|
||||
log.debug("moving: {0}", util.displayable_path(obj.path))
|
||||
log.debug("moving: {.filepath}", obj)
|
||||
|
||||
if export:
|
||||
# Copy without affecting the database.
|
||||
|
|
@ -2213,9 +2191,7 @@ def move_func(lib, opts, args):
|
|||
if dest is not None:
|
||||
dest = normpath(dest)
|
||||
if not os.path.isdir(syspath(dest)):
|
||||
raise ui.UserError(
|
||||
"no such directory: {}".format(displayable_path(dest))
|
||||
)
|
||||
raise ui.UserError(f"no such directory: {displayable_path(dest)}")
|
||||
|
||||
move_items(
|
||||
lib,
|
||||
|
|
@ -2278,16 +2254,14 @@ def write_items(lib, query, pretend, force):
|
|||
for item in items:
|
||||
# Item deleted?
|
||||
if not os.path.exists(syspath(item.path)):
|
||||
log.info("missing file: {0}", util.displayable_path(item.path))
|
||||
log.info("missing file: {.filepath}", item)
|
||||
continue
|
||||
|
||||
# Get an Item object reflecting the "clean" (on-disk) state.
|
||||
try:
|
||||
clean_item = library.Item.from_path(item.path)
|
||||
except library.ReadError as exc:
|
||||
log.error(
|
||||
"error reading {0}: {1}", displayable_path(item.path), exc
|
||||
)
|
||||
log.error("error reading {.filepath}: {}", item, exc)
|
||||
continue
|
||||
|
||||
# Check for and display changes.
|
||||
|
|
@ -2480,30 +2454,27 @@ def completion_script(commands):
|
|||
yield "_beet() {\n"
|
||||
|
||||
# Command names
|
||||
yield " local commands='%s'\n" % " ".join(command_names)
|
||||
yield f" local commands={' '.join(command_names)!r}\n"
|
||||
yield "\n"
|
||||
|
||||
# Command aliases
|
||||
yield " local aliases='%s'\n" % " ".join(aliases.keys())
|
||||
yield f" local aliases={' '.join(aliases.keys())!r}\n"
|
||||
for alias, cmd in aliases.items():
|
||||
yield " local alias__{}={}\n".format(alias.replace("-", "_"), cmd)
|
||||
yield f" local alias__{alias.replace('-', '_')}={cmd}\n"
|
||||
yield "\n"
|
||||
|
||||
# Fields
|
||||
yield " fields='%s'\n" % " ".join(
|
||||
set(
|
||||
list(library.Item._fields.keys())
|
||||
+ list(library.Album._fields.keys())
|
||||
)
|
||||
)
|
||||
fields = library.Item._fields.keys() | library.Album._fields.keys()
|
||||
yield f" fields={' '.join(fields)!r}\n"
|
||||
|
||||
# Command options
|
||||
for cmd, opts in options.items():
|
||||
for option_type, option_list in opts.items():
|
||||
if option_list:
|
||||
option_list = " ".join(option_list)
|
||||
yield " local {}__{}='{}'\n".format(
|
||||
option_type, cmd.replace("-", "_"), option_list
|
||||
yield (
|
||||
" local"
|
||||
f" {option_type}__{cmd.replace('-', '_')}='{option_list}'\n"
|
||||
)
|
||||
|
||||
yield " _beet_dispatch\n"
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ from typing import (
|
|||
NamedTuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from unidecode import unidecode
|
||||
|
|
@ -112,7 +113,7 @@ class HumanReadableError(Exception):
|
|||
elif hasattr(self.reason, "strerror"): # i.e., EnvironmentError
|
||||
return self.reason.strerror
|
||||
else:
|
||||
return '"{}"'.format(str(self.reason))
|
||||
return f'"{self.reason}"'
|
||||
|
||||
def get_message(self):
|
||||
"""Create the human-readable description of the error, sans
|
||||
|
|
@ -126,7 +127,7 @@ class HumanReadableError(Exception):
|
|||
"""
|
||||
if self.tb:
|
||||
logger.debug(self.tb)
|
||||
logger.error("{0}: {1}", self.error_kind, self.args[0])
|
||||
logger.error("{0.error_kind}: {0.args[0]}", self)
|
||||
|
||||
|
||||
class FilesystemError(HumanReadableError):
|
||||
|
|
@ -142,18 +143,16 @@ class FilesystemError(HumanReadableError):
|
|||
def get_message(self):
|
||||
# Use a nicer English phrasing for some specific verbs.
|
||||
if self.verb in ("move", "copy", "rename"):
|
||||
clause = "while {} {} to {}".format(
|
||||
self._gerund(),
|
||||
displayable_path(self.paths[0]),
|
||||
displayable_path(self.paths[1]),
|
||||
clause = (
|
||||
f"while {self._gerund()} {displayable_path(self.paths[0])} to"
|
||||
f" {displayable_path(self.paths[1])}"
|
||||
)
|
||||
elif self.verb in ("delete", "write", "create", "read"):
|
||||
clause = "while {} {}".format(
|
||||
self._gerund(), displayable_path(self.paths[0])
|
||||
)
|
||||
clause = f"while {self._gerund()} {displayable_path(self.paths[0])}"
|
||||
else:
|
||||
clause = "during {} of paths {}".format(
|
||||
self.verb, ", ".join(displayable_path(p) for p in self.paths)
|
||||
clause = (
|
||||
f"during {self.verb} of paths"
|
||||
f" {', '.join(displayable_path(p) for p in self.paths)}"
|
||||
)
|
||||
|
||||
return f"{self._reasonstr()} {clause}"
|
||||
|
|
@ -223,12 +222,12 @@ def sorted_walk(
|
|||
# Get all the directories and files at this level.
|
||||
try:
|
||||
contents = os.listdir(syspath(bytes_path))
|
||||
except OSError as exc:
|
||||
except OSError:
|
||||
if logger:
|
||||
logger.warning(
|
||||
"could not list directory {}: {}".format(
|
||||
displayable_path(bytes_path), exc.strerror
|
||||
)
|
||||
"could not list directory {}",
|
||||
displayable_path(bytes_path),
|
||||
exc_info=True,
|
||||
)
|
||||
return
|
||||
dirs = []
|
||||
|
|
@ -436,8 +435,8 @@ def syspath(path: PathLike, prefix: bool = True) -> str:
|
|||
if prefix and not str_path.startswith(WINDOWS_MAGIC_PREFIX):
|
||||
if str_path.startswith("\\\\"):
|
||||
# UNC path. Final path should look like \\?\UNC\...
|
||||
str_path = "UNC" + str_path[1:]
|
||||
str_path = WINDOWS_MAGIC_PREFIX + str_path
|
||||
str_path = f"UNC{str_path[1:]}"
|
||||
str_path = f"{WINDOWS_MAGIC_PREFIX}{str_path}"
|
||||
|
||||
return str_path
|
||||
|
||||
|
|
@ -509,8 +508,8 @@ def move(path: bytes, dest: bytes, replace: bool = False):
|
|||
basename = os.path.basename(bytestring_path(dest))
|
||||
dirname = os.path.dirname(bytestring_path(dest))
|
||||
tmp = tempfile.NamedTemporaryFile(
|
||||
suffix=syspath(b".beets", prefix=False),
|
||||
prefix=syspath(b"." + basename + b".", prefix=False),
|
||||
suffix=".beets",
|
||||
prefix=f".{os.fsdecode(basename)}.",
|
||||
dir=syspath(dirname),
|
||||
delete=False,
|
||||
)
|
||||
|
|
@ -719,7 +718,7 @@ def truncate_path(str_path: str) -> str:
|
|||
path = Path(str_path)
|
||||
parent_parts = [truncate_str(p, max_length) for p in path.parts[:-1]]
|
||||
stem = truncate_str(path.stem, max_length - len(path.suffix))
|
||||
return str(Path(*parent_parts, stem)) + path.suffix
|
||||
return f"{Path(*parent_parts, stem)}{path.suffix}"
|
||||
|
||||
|
||||
def _legalize_stage(
|
||||
|
|
@ -838,9 +837,10 @@ def get_most_common_tags(
|
|||
"country",
|
||||
"media",
|
||||
"albumdisambig",
|
||||
"data_source",
|
||||
]
|
||||
for field in fields:
|
||||
values = [item[field] for item in items if item]
|
||||
values = [item.get(field) for item in items if item]
|
||||
likelies[field], freq = plurality(values)
|
||||
consensus[field] = freq == len(values)
|
||||
|
||||
|
|
@ -1053,7 +1053,7 @@ def par_map(transform: Callable[[T], Any], items: Sequence[T]) -> None:
|
|||
pool.join()
|
||||
|
||||
|
||||
class cached_classproperty:
|
||||
class cached_classproperty(Generic[T]):
|
||||
"""Descriptor implementing cached class properties.
|
||||
|
||||
Provides class-level dynamic property behavior where the getter function is
|
||||
|
|
@ -1061,9 +1061,9 @@ class cached_classproperty:
|
|||
instance properties, this operates on the class rather than instances.
|
||||
"""
|
||||
|
||||
cache: ClassVar[dict[tuple[Any, str], Any]] = {}
|
||||
cache: ClassVar[dict[tuple[type[object], str], object]] = {}
|
||||
|
||||
name: str
|
||||
name: str = ""
|
||||
|
||||
# Ideally, we would like to use `Callable[[type[T]], Any]` here,
|
||||
# however, `mypy` is unable to see this as a **class** property, and thinks
|
||||
|
|
@ -1079,21 +1079,21 @@ class cached_classproperty:
|
|||
# "Callable[[Album], ...]"; expected "Callable[[type[Album]], ...]"
|
||||
#
|
||||
# Therefore, we just use `Any` here, which is not ideal, but works.
|
||||
def __init__(self, getter: Callable[[Any], Any]) -> None:
|
||||
def __init__(self, getter: Callable[..., T]) -> None:
|
||||
"""Initialize the descriptor with the property getter function."""
|
||||
self.getter = getter
|
||||
self.getter: Callable[..., T] = getter
|
||||
|
||||
def __set_name__(self, owner: Any, name: str) -> None:
|
||||
def __set_name__(self, owner: object, name: str) -> None:
|
||||
"""Capture the attribute name this descriptor is assigned to."""
|
||||
self.name = name
|
||||
|
||||
def __get__(self, instance: Any, owner: type[Any]) -> Any:
|
||||
def __get__(self, instance: object, owner: type[object]) -> T:
|
||||
"""Compute and cache if needed, and return the property value."""
|
||||
key = owner, self.name
|
||||
key: tuple[type[object], str] = owner, self.name
|
||||
if key not in self.cache:
|
||||
self.cache[key] = self.getter(owner)
|
||||
|
||||
return self.cache[key]
|
||||
return cast(T, self.cache[key])
|
||||
|
||||
|
||||
class LazySharedInstance(Generic[T]):
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ def resize_url(url: str, maxwidth: int, quality: int = 0) -> str:
|
|||
if quality > 0:
|
||||
params["q"] = quality
|
||||
|
||||
return "{}?{}".format(PROXY_URL, urlencode(params))
|
||||
return f"{PROXY_URL}?{urlencode(params)}"
|
||||
|
||||
|
||||
class LocalBackendNotAvailableError(Exception):
|
||||
|
|
@ -255,7 +255,7 @@ class IMBackend(LocalBackend):
|
|||
path_out = get_temp_filename(__name__, "resize_IM_", path_in)
|
||||
|
||||
log.debug(
|
||||
"artresizer: ImageMagick resizing {0} to {1}",
|
||||
"artresizer: ImageMagick resizing {} to {}",
|
||||
displayable_path(path_in),
|
||||
displayable_path(path_out),
|
||||
)
|
||||
|
|
@ -287,7 +287,7 @@ class IMBackend(LocalBackend):
|
|||
util.command_output(cmd)
|
||||
except subprocess.CalledProcessError:
|
||||
log.warning(
|
||||
"artresizer: IM convert failed for {0}",
|
||||
"artresizer: IM convert failed for {}",
|
||||
displayable_path(path_in),
|
||||
)
|
||||
return path_in
|
||||
|
|
@ -306,9 +306,9 @@ class IMBackend(LocalBackend):
|
|||
except subprocess.CalledProcessError as exc:
|
||||
log.warning("ImageMagick size query failed")
|
||||
log.debug(
|
||||
"`convert` exited with (status {}) when "
|
||||
"`convert` exited with (status {.returncode}) when "
|
||||
"getting size with command {}:\n{}",
|
||||
exc.returncode,
|
||||
exc,
|
||||
cmd,
|
||||
exc.output.strip(),
|
||||
)
|
||||
|
|
@ -441,8 +441,8 @@ class IMBackend(LocalBackend):
|
|||
convert_proc.wait()
|
||||
if convert_proc.returncode:
|
||||
log.debug(
|
||||
"ImageMagick convert failed with status {}: {!r}",
|
||||
convert_proc.returncode,
|
||||
"ImageMagick convert failed with status {.returncode}: {!r}",
|
||||
convert_proc,
|
||||
convert_stderr,
|
||||
)
|
||||
return None
|
||||
|
|
@ -452,7 +452,7 @@ class IMBackend(LocalBackend):
|
|||
if compare_proc.returncode:
|
||||
if compare_proc.returncode != 1:
|
||||
log.debug(
|
||||
"ImageMagick compare failed: {0}, {1}",
|
||||
"ImageMagick compare failed: {}, {}",
|
||||
displayable_path(im2),
|
||||
displayable_path(im1),
|
||||
)
|
||||
|
|
@ -472,7 +472,7 @@ class IMBackend(LocalBackend):
|
|||
log.debug("IM output is not a number: {0!r}", out_str)
|
||||
return None
|
||||
|
||||
log.debug("ImageMagick compare score: {0}", phash_diff)
|
||||
log.debug("ImageMagick compare score: {}", phash_diff)
|
||||
return phash_diff <= compare_threshold
|
||||
|
||||
@property
|
||||
|
|
@ -523,7 +523,7 @@ class PILBackend(LocalBackend):
|
|||
from PIL import Image
|
||||
|
||||
log.debug(
|
||||
"artresizer: PIL resizing {0} to {1}",
|
||||
"artresizer: PIL resizing {} to {}",
|
||||
displayable_path(path_in),
|
||||
displayable_path(path_out),
|
||||
)
|
||||
|
|
@ -552,7 +552,7 @@ class PILBackend(LocalBackend):
|
|||
for i in range(5):
|
||||
# 5 attempts is an arbitrary choice
|
||||
filesize = os.stat(syspath(path_out)).st_size
|
||||
log.debug("PIL Pass {0} : Output size: {1}B", i, filesize)
|
||||
log.debug("PIL Pass {} : Output size: {}B", i, filesize)
|
||||
if filesize <= max_filesize:
|
||||
return path_out
|
||||
# The relationship between filesize & quality will be
|
||||
|
|
@ -569,7 +569,7 @@ class PILBackend(LocalBackend):
|
|||
progressive=False,
|
||||
)
|
||||
log.warning(
|
||||
"PIL Failed to resize file to below {0}B", max_filesize
|
||||
"PIL Failed to resize file to below {}B", max_filesize
|
||||
)
|
||||
return path_out
|
||||
|
||||
|
|
@ -577,7 +577,7 @@ class PILBackend(LocalBackend):
|
|||
return path_out
|
||||
except OSError:
|
||||
log.error(
|
||||
"PIL cannot create thumbnail for '{0}'",
|
||||
"PIL cannot create thumbnail for '{}'",
|
||||
displayable_path(path_in),
|
||||
)
|
||||
return path_in
|
||||
|
|
@ -696,7 +696,7 @@ class ArtResizer:
|
|||
for backend_cls in BACKEND_CLASSES:
|
||||
try:
|
||||
self.local_method = backend_cls()
|
||||
log.debug(f"artresizer: method is {self.local_method.NAME}")
|
||||
log.debug("artresizer: method is {.local_method.NAME}", self)
|
||||
break
|
||||
except LocalBackendNotAvailableError:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -559,7 +559,7 @@ def spawn(coro):
|
|||
and child coroutines run concurrently.
|
||||
"""
|
||||
if not isinstance(coro, types.GeneratorType):
|
||||
raise ValueError("%s is not a coroutine" % coro)
|
||||
raise ValueError(f"{coro} is not a coroutine")
|
||||
return SpawnEvent(coro)
|
||||
|
||||
|
||||
|
|
@ -569,7 +569,7 @@ def call(coro):
|
|||
returns a value using end(), then this event returns that value.
|
||||
"""
|
||||
if not isinstance(coro, types.GeneratorType):
|
||||
raise ValueError("%s is not a coroutine" % coro)
|
||||
raise ValueError(f"{coro} is not a coroutine")
|
||||
return DelegationEvent(coro)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ class Symbol:
|
|||
self.original = original
|
||||
|
||||
def __repr__(self):
|
||||
return "Symbol(%s)" % repr(self.ident)
|
||||
return f"Symbol({self.ident!r})"
|
||||
|
||||
def evaluate(self, env):
|
||||
"""Evaluate the symbol in the environment, returning a Unicode
|
||||
|
|
@ -152,7 +152,7 @@ class Symbol:
|
|||
def translate(self):
|
||||
"""Compile the variable lookup."""
|
||||
ident = self.ident
|
||||
expr = ex_rvalue(VARIABLE_PREFIX + ident)
|
||||
expr = ex_rvalue(f"{VARIABLE_PREFIX}{ident}")
|
||||
return [expr], {ident}, set()
|
||||
|
||||
|
||||
|
|
@ -165,9 +165,7 @@ class Call:
|
|||
self.original = original
|
||||
|
||||
def __repr__(self):
|
||||
return "Call({}, {}, {})".format(
|
||||
repr(self.ident), repr(self.args), repr(self.original)
|
||||
)
|
||||
return f"Call({self.ident!r}, {self.args!r}, {self.original!r})"
|
||||
|
||||
def evaluate(self, env):
|
||||
"""Evaluate the function call in the environment, returning a
|
||||
|
|
@ -180,7 +178,7 @@ class Call:
|
|||
except Exception as exc:
|
||||
# Function raised exception! Maybe inlining the name of
|
||||
# the exception will help debug.
|
||||
return "<%s>" % str(exc)
|
||||
return f"<{exc}>"
|
||||
return str(out)
|
||||
else:
|
||||
return self.original
|
||||
|
|
@ -213,7 +211,7 @@ class Call:
|
|||
)
|
||||
)
|
||||
|
||||
subexpr_call = ex_call(FUNCTION_PREFIX + self.ident, arg_exprs)
|
||||
subexpr_call = ex_call(f"{FUNCTION_PREFIX}{self.ident}", arg_exprs)
|
||||
return [subexpr_call], varnames, funcnames
|
||||
|
||||
|
||||
|
|
@ -226,7 +224,7 @@ class Expression:
|
|||
self.parts = parts
|
||||
|
||||
def __repr__(self):
|
||||
return "Expression(%s)" % (repr(self.parts))
|
||||
return f"Expression({self.parts!r})"
|
||||
|
||||
def evaluate(self, env):
|
||||
"""Evaluate the entire expression in the environment, returning
|
||||
|
|
@ -298,9 +296,6 @@ class Parser:
|
|||
GROUP_CLOSE,
|
||||
ESCAPE_CHAR,
|
||||
)
|
||||
special_char_re = re.compile(
|
||||
r"[%s]|\Z" % "".join(re.escape(c) for c in special_chars)
|
||||
)
|
||||
escapable_chars = (SYMBOL_DELIM, FUNC_DELIM, GROUP_CLOSE, ARG_SEP)
|
||||
terminator_chars = (GROUP_CLOSE,)
|
||||
|
||||
|
|
@ -312,24 +307,18 @@ class Parser:
|
|||
"""
|
||||
# Append comma (ARG_SEP) to the list of special characters only when
|
||||
# parsing function arguments.
|
||||
extra_special_chars = ()
|
||||
special_char_re = self.special_char_re
|
||||
if self.in_argument:
|
||||
extra_special_chars = (ARG_SEP,)
|
||||
special_char_re = re.compile(
|
||||
r"[%s]|\Z"
|
||||
% "".join(
|
||||
re.escape(c)
|
||||
for c in self.special_chars + extra_special_chars
|
||||
)
|
||||
)
|
||||
extra_special_chars = (ARG_SEP,) if self.in_argument else ()
|
||||
special_chars = (*self.special_chars, *extra_special_chars)
|
||||
special_char_re = re.compile(
|
||||
rf"[{''.join(map(re.escape, special_chars))}]|\Z"
|
||||
)
|
||||
|
||||
text_parts = []
|
||||
|
||||
while self.pos < len(self.string):
|
||||
char = self.string[self.pos]
|
||||
|
||||
if char not in self.special_chars + extra_special_chars:
|
||||
if char not in special_chars:
|
||||
# A non-special character. Skip to the next special
|
||||
# character, treating the interstice as literal text.
|
||||
next_pos = (
|
||||
|
|
@ -566,9 +555,9 @@ class Template:
|
|||
|
||||
argnames = []
|
||||
for varname in varnames:
|
||||
argnames.append(VARIABLE_PREFIX + varname)
|
||||
argnames.append(f"{VARIABLE_PREFIX}{varname}")
|
||||
for funcname in funcnames:
|
||||
argnames.append(FUNCTION_PREFIX + funcname)
|
||||
argnames.append(f"{FUNCTION_PREFIX}{funcname}")
|
||||
|
||||
func = compile_func(
|
||||
argnames,
|
||||
|
|
@ -578,9 +567,9 @@ class Template:
|
|||
def wrapper_func(values={}, functions={}):
|
||||
args = {}
|
||||
for varname in varnames:
|
||||
args[VARIABLE_PREFIX + varname] = values[varname]
|
||||
args[f"{VARIABLE_PREFIX}{varname}"] = values[varname]
|
||||
for funcname in funcnames:
|
||||
args[FUNCTION_PREFIX + funcname] = functions[funcname]
|
||||
args[f"{FUNCTION_PREFIX}{funcname}"] = functions[funcname]
|
||||
parts = func(**args)
|
||||
return "".join(parts)
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,8 @@ def extract_release_id(source: str, id_: str) -> str | None:
|
|||
source_pattern = PATTERN_BY_SOURCE[source.lower()]
|
||||
except KeyError:
|
||||
log.debug(
|
||||
f"Unknown source '{source}' for ID extraction. Returning id/url as-is."
|
||||
"Unknown source '{}' for ID extraction. Returning id/url as-is.",
|
||||
source,
|
||||
)
|
||||
return id_
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ def human_seconds_short(interval):
|
|||
string.
|
||||
"""
|
||||
interval = int(interval)
|
||||
return "%i:%02i" % (interval // 60, interval % 60)
|
||||
return f"{interval // 60}:{interval % 60:02d}"
|
||||
|
||||
|
||||
def human_bytes(size):
|
||||
|
|
|
|||
3
beetsplug/_utils/__init__.py
Normal file
3
beetsplug/_utils/__init__.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
from . import art, vfs
|
||||
|
||||
__all__ = ["art", "vfs"]
|
||||
|
|
@ -38,11 +38,7 @@ def get_art(log, item):
|
|||
try:
|
||||
mf = mediafile.MediaFile(syspath(item.path))
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
log.warning(
|
||||
"Could not extract art from {0}: {1}",
|
||||
displayable_path(item.path),
|
||||
exc,
|
||||
)
|
||||
log.warning("Could not extract art from {.filepath}: {}", item, exc)
|
||||
return
|
||||
|
||||
return mf.art
|
||||
|
|
@ -83,16 +79,16 @@ def embed_item(
|
|||
|
||||
# Get the `Image` object from the file.
|
||||
try:
|
||||
log.debug("embedding {0}", displayable_path(imagepath))
|
||||
log.debug("embedding {}", displayable_path(imagepath))
|
||||
image = mediafile_image(imagepath, maxwidth)
|
||||
except OSError as exc:
|
||||
log.warning("could not read image file: {0}", exc)
|
||||
log.warning("could not read image file: {}", exc)
|
||||
return
|
||||
|
||||
# Make sure the image kind is safe (some formats only support PNG
|
||||
# and JPEG).
|
||||
if image.mime_type not in ("image/jpeg", "image/png"):
|
||||
log.info("not embedding image of unsupported type: {}", image.mime_type)
|
||||
log.info("not embedding image of unsupported type: {.mime_type}", image)
|
||||
return
|
||||
|
||||
item.try_write(path=itempath, tags={"images": [image]}, id3v23=id3v23)
|
||||
|
|
@ -110,11 +106,11 @@ def embed_album(
|
|||
"""Embed album art into all of the album's items."""
|
||||
imagepath = album.artpath
|
||||
if not imagepath:
|
||||
log.info("No album art present for {0}", album)
|
||||
log.info("No album art present for {}", album)
|
||||
return
|
||||
if not os.path.isfile(syspath(imagepath)):
|
||||
log.info(
|
||||
"Album art not found at {0} for {1}",
|
||||
"Album art not found at {} for {}",
|
||||
displayable_path(imagepath),
|
||||
album,
|
||||
)
|
||||
|
|
@ -122,7 +118,7 @@ def embed_album(
|
|||
if maxwidth:
|
||||
imagepath = resize_image(log, imagepath, maxwidth, quality)
|
||||
|
||||
log.info("Embedding album art into {0}", album)
|
||||
log.info("Embedding album art into {}", album)
|
||||
|
||||
for item in album.items():
|
||||
embed_item(
|
||||
|
|
@ -143,8 +139,7 @@ def resize_image(log, imagepath, maxwidth, quality):
|
|||
specified quality level.
|
||||
"""
|
||||
log.debug(
|
||||
"Resizing album art to {0} pixels wide and encoding at quality \
|
||||
level {1}",
|
||||
"Resizing album art to {} pixels wide and encoding at quality level {}",
|
||||
maxwidth,
|
||||
quality,
|
||||
)
|
||||
|
|
@ -184,18 +179,18 @@ def extract(log, outpath, item):
|
|||
art = get_art(log, item)
|
||||
outpath = bytestring_path(outpath)
|
||||
if not art:
|
||||
log.info("No album art present in {0}, skipping.", item)
|
||||
log.info("No album art present in {}, skipping.", item)
|
||||
return
|
||||
|
||||
# Add an extension to the filename.
|
||||
ext = mediafile.image_extension(art)
|
||||
if not ext:
|
||||
log.warning("Unknown image type in {0}.", displayable_path(item.path))
|
||||
log.warning("Unknown image type in {.filepath}.", item)
|
||||
return
|
||||
outpath += bytestring_path("." + ext)
|
||||
outpath += bytestring_path(f".{ext}")
|
||||
|
||||
log.info(
|
||||
"Extracting album art from: {0} to: {1}",
|
||||
"Extracting album art from: {} to: {}",
|
||||
item,
|
||||
displayable_path(outpath),
|
||||
)
|
||||
|
|
@ -213,7 +208,7 @@ def extract_first(log, outpath, items):
|
|||
|
||||
def clear(log, lib, query):
|
||||
items = lib.items(query)
|
||||
log.info("Clearing album art from {0} items", len(items))
|
||||
log.info("Clearing album art from {} items", len(items))
|
||||
for item in items:
|
||||
log.debug("Clearing art for {0}", item)
|
||||
log.debug("Clearing art for {}", item)
|
||||
item.try_write(tags={"images": None})
|
||||
|
|
@ -16,17 +16,25 @@
|
|||
libraries.
|
||||
"""
|
||||
|
||||
from typing import Any, NamedTuple
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, NamedTuple
|
||||
|
||||
from beets import util
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import Library
|
||||
|
||||
|
||||
class Node(NamedTuple):
|
||||
files: dict[str, Any]
|
||||
dirs: dict[str, Any]
|
||||
files: dict[str, int]
|
||||
# Maps filenames to Item ids.
|
||||
|
||||
dirs: dict[str, Node]
|
||||
# Maps directory names to child nodes.
|
||||
|
||||
|
||||
def _insert(node, path, itemid):
|
||||
def _insert(node: Node, path: list[str], itemid: int):
|
||||
"""Insert an item into a virtual filesystem node."""
|
||||
if len(path) == 1:
|
||||
# Last component. Insert file.
|
||||
|
|
@ -40,7 +48,7 @@ def _insert(node, path, itemid):
|
|||
_insert(node.dirs[dirname], rest, itemid)
|
||||
|
||||
|
||||
def libtree(lib):
|
||||
def libtree(lib: Library) -> Node:
|
||||
"""Generates a filesystem-like directory tree for the files
|
||||
contained in `lib`. Filesystem nodes are (files, dirs) named
|
||||
tuples in which both components are dictionaries. The first
|
||||
|
|
@ -42,9 +42,7 @@ def call(args):
|
|||
try:
|
||||
return util.command_output(args).stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ABSubmitError(
|
||||
"{} exited with status {}".format(args[0], e.returncode)
|
||||
)
|
||||
raise ABSubmitError(f"{args[0]} exited with status {e.returncode}")
|
||||
|
||||
|
||||
class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
||||
|
|
@ -63,9 +61,7 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
# Explicit path to extractor
|
||||
if not os.path.isfile(self.extractor):
|
||||
raise ui.UserError(
|
||||
"Extractor command does not exist: {0}.".format(
|
||||
self.extractor
|
||||
)
|
||||
f"Extractor command does not exist: {self.extractor}."
|
||||
)
|
||||
else:
|
||||
# Implicit path to extractor, search for it in path
|
||||
|
|
@ -101,8 +97,8 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
"with an HTTP scheme"
|
||||
)
|
||||
elif base_url[-1] != "/":
|
||||
base_url = base_url + "/"
|
||||
self.url = base_url + "{mbid}/low-level"
|
||||
base_url = f"{base_url}/"
|
||||
self.url = f"{base_url}{{mbid}}/low-level"
|
||||
|
||||
def commands(self):
|
||||
cmd = ui.Subcommand(
|
||||
|
|
@ -122,8 +118,10 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
dest="pretend_fetch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="pretend to perform action, but show \
|
||||
only files which would be processed",
|
||||
help=(
|
||||
"pretend to perform action, but show only files which would be"
|
||||
" processed"
|
||||
),
|
||||
)
|
||||
cmd.func = self.command
|
||||
return [cmd]
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
"with an HTTP scheme"
|
||||
)
|
||||
elif self.base_url[-1] != "/":
|
||||
self.base_url = self.base_url + "/"
|
||||
self.base_url = f"{self.base_url}/"
|
||||
|
||||
if self.config["auto"]:
|
||||
self.register_listener("import_task_files", self.import_task_files)
|
||||
|
|
@ -153,7 +153,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
try:
|
||||
data.update(res.json())
|
||||
except ValueError:
|
||||
self._log.debug("Invalid Response: {}", res.text)
|
||||
self._log.debug("Invalid Response: {.text}", res)
|
||||
return {}
|
||||
|
||||
return data
|
||||
|
|
@ -300,4 +300,4 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
def _generate_urls(base_url, mbid):
|
||||
"""Generates AcousticBrainz end point urls for given `mbid`."""
|
||||
for level in LEVELS:
|
||||
yield base_url + mbid + level
|
||||
yield f"{base_url}{mbid}{level}"
|
||||
|
|
|
|||
|
|
@ -236,14 +236,14 @@ class AURADocument:
|
|||
# Not the last page so work out links.next url
|
||||
if not self.args:
|
||||
# No existing arguments, so current page is 0
|
||||
next_url = request.url + "?page=1"
|
||||
next_url = f"{request.url}?page=1"
|
||||
elif not self.args.get("page", None):
|
||||
# No existing page argument, so add one to the end
|
||||
next_url = request.url + "&page=1"
|
||||
next_url = f"{request.url}&page=1"
|
||||
else:
|
||||
# Increment page token by 1
|
||||
next_url = request.url.replace(
|
||||
f"page={page}", "page={}".format(page + 1)
|
||||
f"page={page}", f"page={page + 1}"
|
||||
)
|
||||
# Get only the items in the page range
|
||||
data = [
|
||||
|
|
@ -427,9 +427,7 @@ class TrackDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No track with the requested id.",
|
||||
"There is no track with an id of {} in the library.".format(
|
||||
track_id
|
||||
),
|
||||
f"There is no track with an id of {track_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(
|
||||
self.get_resource_object(self.lib, track)
|
||||
|
|
@ -513,9 +511,7 @@ class AlbumDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No album with the requested id.",
|
||||
"There is no album with an id of {} in the library.".format(
|
||||
album_id
|
||||
),
|
||||
f"There is no album with an id of {album_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(
|
||||
self.get_resource_object(self.lib, album)
|
||||
|
|
@ -600,9 +596,7 @@ class ArtistDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No artist with the requested id.",
|
||||
"There is no artist with an id of {} in the library.".format(
|
||||
artist_id
|
||||
),
|
||||
f"There is no artist with an id of {artist_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(artist_resource)
|
||||
|
||||
|
|
@ -703,7 +697,7 @@ class ImageDocument(AURADocument):
|
|||
relationships = {}
|
||||
# Split id into [parent_type, parent_id, filename]
|
||||
id_split = image_id.split("-")
|
||||
relationships[id_split[0] + "s"] = {
|
||||
relationships[f"{id_split[0]}s"] = {
|
||||
"data": [{"type": id_split[0], "id": id_split[1]}]
|
||||
}
|
||||
|
||||
|
|
@ -727,9 +721,7 @@ class ImageDocument(AURADocument):
|
|||
return self.error(
|
||||
"404 Not Found",
|
||||
"No image with the requested id.",
|
||||
"There is no image with an id of {} in the library.".format(
|
||||
image_id
|
||||
),
|
||||
f"There is no image with an id of {image_id} in the library.",
|
||||
)
|
||||
return self.single_resource_document(image_resource)
|
||||
|
||||
|
|
@ -775,9 +767,7 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No track with the requested id.",
|
||||
"There is no track with an id of {} in the library.".format(
|
||||
track_id
|
||||
),
|
||||
f"There is no track with an id of {track_id} in the library.",
|
||||
)
|
||||
|
||||
path = os.fsdecode(track.path)
|
||||
|
|
@ -785,9 +775,8 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No audio file for the requested track.",
|
||||
(
|
||||
"There is no audio file for track {} at the expected location"
|
||||
).format(track_id),
|
||||
f"There is no audio file for track {track_id} at the expected"
|
||||
" location",
|
||||
)
|
||||
|
||||
file_mimetype = guess_type(path)[0]
|
||||
|
|
@ -795,10 +784,8 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"500 Internal Server Error",
|
||||
"Requested audio file has an unknown mimetype.",
|
||||
(
|
||||
"The audio file for track {} has an unknown mimetype. "
|
||||
"Its file extension is {}."
|
||||
).format(track_id, path.split(".")[-1]),
|
||||
f"The audio file for track {track_id} has an unknown mimetype. "
|
||||
f"Its file extension is {path.split('.')[-1]}.",
|
||||
)
|
||||
|
||||
# Check that the Accept header contains the file's mimetype
|
||||
|
|
@ -810,10 +797,8 @@ def audio_file(track_id):
|
|||
return AURADocument.error(
|
||||
"406 Not Acceptable",
|
||||
"Unsupported MIME type or bitrate parameter in Accept header.",
|
||||
(
|
||||
"The audio file for track {} is only available as {} and "
|
||||
"bitrate parameters are not supported."
|
||||
).format(track_id, file_mimetype),
|
||||
f"The audio file for track {track_id} is only available as"
|
||||
f" {file_mimetype} and bitrate parameters are not supported.",
|
||||
)
|
||||
|
||||
return send_file(
|
||||
|
|
@ -896,9 +881,7 @@ def image_file(image_id):
|
|||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No image with the requested id.",
|
||||
"There is no image with an id of {} in the library".format(
|
||||
image_id
|
||||
),
|
||||
f"There is no image with an id of {image_id} in the library",
|
||||
)
|
||||
return send_file(img_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -110,9 +110,7 @@ class BadFiles(BeetsPlugin):
|
|||
self._log.debug("checking path: {}", dpath)
|
||||
if not os.path.exists(item.path):
|
||||
ui.print_(
|
||||
"{}: file does not exist".format(
|
||||
ui.colorize("text_error", dpath)
|
||||
)
|
||||
f"{ui.colorize('text_error', dpath)}: file does not exist"
|
||||
)
|
||||
|
||||
# Run the checker against the file if one is found
|
||||
|
|
@ -129,37 +127,32 @@ class BadFiles(BeetsPlugin):
|
|||
except CheckerCommandError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
self._log.error(
|
||||
"command not found: {} when validating file: {}",
|
||||
e.checker,
|
||||
e.path,
|
||||
"command not found: {0.checker} when validating file: {0.path}",
|
||||
e,
|
||||
)
|
||||
else:
|
||||
self._log.error("error invoking {}: {}", e.checker, e.msg)
|
||||
self._log.error("error invoking {0.checker}: {0.msg}", e)
|
||||
return []
|
||||
|
||||
error_lines = []
|
||||
|
||||
if status > 0:
|
||||
error_lines.append(
|
||||
"{}: checker exited with status {}".format(
|
||||
ui.colorize("text_error", dpath), status
|
||||
)
|
||||
f"{ui.colorize('text_error', dpath)}: checker exited with"
|
||||
f" status {status}"
|
||||
)
|
||||
for line in output:
|
||||
error_lines.append(f" {line}")
|
||||
|
||||
elif errors > 0:
|
||||
error_lines.append(
|
||||
"{}: checker found {} errors or warnings".format(
|
||||
ui.colorize("text_warning", dpath), errors
|
||||
)
|
||||
f"{ui.colorize('text_warning', dpath)}: checker found"
|
||||
f" {status} errors or warnings"
|
||||
)
|
||||
for line in output:
|
||||
error_lines.append(f" {line}")
|
||||
elif self.verbose:
|
||||
error_lines.append(
|
||||
"{}: ok".format(ui.colorize("text_success", dpath))
|
||||
)
|
||||
error_lines.append(f"{ui.colorize('text_success', dpath)}: ok")
|
||||
|
||||
return error_lines
|
||||
|
||||
|
|
@ -180,9 +173,8 @@ class BadFiles(BeetsPlugin):
|
|||
def on_import_task_before_choice(self, task, session):
|
||||
if hasattr(task, "_badfiles_checks_failed"):
|
||||
ui.print_(
|
||||
"{} one or more files failed checks:".format(
|
||||
ui.colorize("text_warning", "BAD")
|
||||
)
|
||||
f"{ui.colorize('text_warning', 'BAD')} one or more files failed"
|
||||
" checks:"
|
||||
)
|
||||
for error in task._badfiles_checks_failed:
|
||||
for error_line in error:
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ class BeatportClient:
|
|||
:returns: OAuth resource owner key and secret as unicode
|
||||
"""
|
||||
self.api.parse_authorization_response(
|
||||
"https://beets.io/auth?" + auth_data
|
||||
f"https://beets.io/auth?{auth_data}"
|
||||
)
|
||||
access_data = self.api.fetch_access_token(
|
||||
self._make_url("/identity/1/oauth/access-token")
|
||||
|
|
@ -200,8 +200,8 @@ class BeatportClient:
|
|||
def _make_url(self, endpoint: str) -> str:
|
||||
"""Get complete URL for a given API endpoint."""
|
||||
if not endpoint.startswith("/"):
|
||||
endpoint = "/" + endpoint
|
||||
return self._api_base + endpoint
|
||||
endpoint = f"/{endpoint}"
|
||||
return f"{self._api_base}{endpoint}"
|
||||
|
||||
def _get(self, endpoint: str, **kwargs) -> list[JSONDict]:
|
||||
"""Perform a GET request on a given API endpoint.
|
||||
|
|
@ -212,14 +212,10 @@ class BeatportClient:
|
|||
try:
|
||||
response = self.api.get(self._make_url(endpoint), params=kwargs)
|
||||
except Exception as e:
|
||||
raise BeatportAPIError(
|
||||
"Error connecting to Beatport API: {}".format(e)
|
||||
)
|
||||
raise BeatportAPIError(f"Error connecting to Beatport API: {e}")
|
||||
if not response:
|
||||
raise BeatportAPIError(
|
||||
"Error {0.status_code} for '{0.request.path_url}".format(
|
||||
response
|
||||
)
|
||||
f"Error {response.status_code} for '{response.request.path_url}"
|
||||
)
|
||||
return response.json()["results"]
|
||||
|
||||
|
|
@ -275,15 +271,14 @@ class BeatportRelease(BeatportObject):
|
|||
self.genre = data.get("genre")
|
||||
|
||||
if "slug" in data:
|
||||
self.url = "https://beatport.com/release/{}/{}".format(
|
||||
data["slug"], data["id"]
|
||||
self.url = (
|
||||
f"https://beatport.com/release/{data['slug']}/{data['id']}"
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<BeatportRelease: {} - {} ({})>".format(
|
||||
self.artists_str(),
|
||||
self.name,
|
||||
self.catalog_number,
|
||||
return (
|
||||
"<BeatportRelease: "
|
||||
f"{self.artists_str()} - {self.name} ({self.catalog_number})>"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -311,9 +306,7 @@ class BeatportTrack(BeatportObject):
|
|||
except ValueError:
|
||||
pass
|
||||
if "slug" in data:
|
||||
self.url = "https://beatport.com/track/{}/{}".format(
|
||||
data["slug"], data["id"]
|
||||
)
|
||||
self.url = f"https://beatport.com/track/{data['slug']}/{data['id']}"
|
||||
self.track_number = data.get("trackNumber")
|
||||
self.bpm = data.get("bpm")
|
||||
self.initial_key = str((data.get("key") or {}).get("shortName"))
|
||||
|
|
@ -335,7 +328,6 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
"apikey": "57713c3906af6f5def151b33601389176b37b429",
|
||||
"apisecret": "b3fe08c93c80aefd749fe871a16cd2bb32e2b954",
|
||||
"tokenfile": "beatport_token.json",
|
||||
"source_weight": 0.5,
|
||||
}
|
||||
)
|
||||
self.config["apikey"].redact = True
|
||||
|
|
@ -373,7 +365,7 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
url = auth_client.get_authorize_url()
|
||||
except AUTH_ERRORS as e:
|
||||
self._log.debug("authentication error: {0}", e)
|
||||
self._log.debug("authentication error: {}", e)
|
||||
raise beets.ui.UserError("communication with Beatport failed")
|
||||
|
||||
beets.ui.print_("To authenticate with Beatport, visit:")
|
||||
|
|
@ -384,11 +376,11 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
token, secret = auth_client.get_access_token(data)
|
||||
except AUTH_ERRORS as e:
|
||||
self._log.debug("authentication error: {0}", e)
|
||||
self._log.debug("authentication error: {}", e)
|
||||
raise beets.ui.UserError("Beatport token request failed")
|
||||
|
||||
# Save the token for later use.
|
||||
self._log.debug("Beatport token {0}, secret {1}", token, secret)
|
||||
self._log.debug("Beatport token {}, secret {}", token, secret)
|
||||
with open(self._tokenfile(), "w") as f:
|
||||
json.dump({"token": token, "secret": secret}, f)
|
||||
|
||||
|
|
@ -412,7 +404,7 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
yield from self._get_releases(query)
|
||||
except BeatportAPIError as e:
|
||||
self._log.debug("API Error: {0} (query: {1})", e, query)
|
||||
self._log.debug("API Error: {} (query: {})", e, query)
|
||||
return
|
||||
|
||||
def item_candidates(
|
||||
|
|
@ -422,14 +414,14 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
try:
|
||||
return self._get_tracks(query)
|
||||
except BeatportAPIError as e:
|
||||
self._log.debug("API Error: {0} (query: {1})", e, query)
|
||||
self._log.debug("API Error: {} (query: {})", e, query)
|
||||
return []
|
||||
|
||||
def album_for_id(self, album_id: str):
|
||||
"""Fetches a release by its Beatport ID and returns an AlbumInfo object
|
||||
or None if the query is not a valid ID or release is not found.
|
||||
"""
|
||||
self._log.debug("Searching for release {0}", album_id)
|
||||
self._log.debug("Searching for release {}", album_id)
|
||||
|
||||
if not (release_id := self._extract_id(album_id)):
|
||||
self._log.debug("Not a valid Beatport release ID.")
|
||||
|
|
@ -444,7 +436,7 @@ class BeatportPlugin(MetadataSourcePlugin):
|
|||
"""Fetches a track by its Beatport ID and returns a TrackInfo object
|
||||
or None if the track is not a valid Beatport ID or track is not found.
|
||||
"""
|
||||
self._log.debug("Searching for track {0}", track_id)
|
||||
self._log.debug("Searching for track {}", track_id)
|
||||
# TODO: move to extractor
|
||||
match = re.search(r"(^|beatport\.com/track/.+/)(\d+)$", track_id)
|
||||
if not match:
|
||||
|
|
|
|||
|
|
@ -17,10 +17,11 @@
|
|||
import cProfile
|
||||
import timeit
|
||||
|
||||
from beets import importer, library, plugins, ui, vfs
|
||||
from beets import importer, library, plugins, ui
|
||||
from beets.autotag import match
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.util.functemplate import Template
|
||||
from beetsplug._utils import vfs
|
||||
|
||||
|
||||
def aunique_benchmark(lib, prof):
|
||||
|
|
|
|||
|
|
@ -30,10 +30,11 @@ from typing import TYPE_CHECKING
|
|||
|
||||
import beets
|
||||
import beets.ui
|
||||
from beets import dbcore, logging, vfs
|
||||
from beets import dbcore, logging
|
||||
from beets.library import Item
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.util import as_string, bluelet
|
||||
from beetsplug._utils import vfs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.dbcore.query import Query
|
||||
|
|
@ -52,7 +53,7 @@ except ImportError as e:
|
|||
PROTOCOL_VERSION = "0.16.0"
|
||||
BUFSIZE = 1024
|
||||
|
||||
HELLO = "OK MPD %s" % PROTOCOL_VERSION
|
||||
HELLO = f"OK MPD {PROTOCOL_VERSION}"
|
||||
CLIST_BEGIN = "command_list_begin"
|
||||
CLIST_VERBOSE_BEGIN = "command_list_ok_begin"
|
||||
CLIST_END = "command_list_end"
|
||||
|
|
@ -282,7 +283,7 @@ class BaseServer:
|
|||
if not self.ctrl_sock:
|
||||
self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port))
|
||||
self.ctrl_sock.sendall((message + "\n").encode("utf-8"))
|
||||
self.ctrl_sock.sendall((f"{message}\n").encode("utf-8"))
|
||||
|
||||
def _send_event(self, event):
|
||||
"""Notify subscribed connections of an event."""
|
||||
|
|
@ -376,13 +377,13 @@ class BaseServer:
|
|||
if self.password and not conn.authenticated:
|
||||
# Not authenticated. Show limited list of commands.
|
||||
for cmd in SAFE_COMMANDS:
|
||||
yield "command: " + cmd
|
||||
yield f"command: {cmd}"
|
||||
|
||||
else:
|
||||
# Authenticated. Show all commands.
|
||||
for func in dir(self):
|
||||
if func.startswith("cmd_"):
|
||||
yield "command: " + func[4:]
|
||||
yield f"command: {func[4:]}"
|
||||
|
||||
def cmd_notcommands(self, conn):
|
||||
"""Lists all unavailable commands."""
|
||||
|
|
@ -392,7 +393,7 @@ class BaseServer:
|
|||
if func.startswith("cmd_"):
|
||||
cmd = func[4:]
|
||||
if cmd not in SAFE_COMMANDS:
|
||||
yield "command: " + cmd
|
||||
yield f"command: {cmd}"
|
||||
|
||||
else:
|
||||
# Authenticated. No commands are unavailable.
|
||||
|
|
@ -406,22 +407,22 @@ class BaseServer:
|
|||
playlist, playlistlength, and xfade.
|
||||
"""
|
||||
yield (
|
||||
"repeat: " + str(int(self.repeat)),
|
||||
"random: " + str(int(self.random)),
|
||||
"consume: " + str(int(self.consume)),
|
||||
"single: " + str(int(self.single)),
|
||||
"playlist: " + str(self.playlist_version),
|
||||
"playlistlength: " + str(len(self.playlist)),
|
||||
"mixrampdb: " + str(self.mixrampdb),
|
||||
f"repeat: {int(self.repeat)}",
|
||||
f"random: {int(self.random)}",
|
||||
f"consume: {int(self.consume)}",
|
||||
f"single: {int(self.single)}",
|
||||
f"playlist: {self.playlist_version}",
|
||||
f"playlistlength: {len(self.playlist)}",
|
||||
f"mixrampdb: {self.mixrampdb}",
|
||||
)
|
||||
|
||||
if self.volume > 0:
|
||||
yield "volume: " + str(self.volume)
|
||||
yield f"volume: {self.volume}"
|
||||
|
||||
if not math.isnan(self.mixrampdelay):
|
||||
yield "mixrampdelay: " + str(self.mixrampdelay)
|
||||
yield f"mixrampdelay: {self.mixrampdelay}"
|
||||
if self.crossfade > 0:
|
||||
yield "xfade: " + str(self.crossfade)
|
||||
yield f"xfade: {self.crossfade}"
|
||||
|
||||
if self.current_index == -1:
|
||||
state = "stop"
|
||||
|
|
@ -429,20 +430,20 @@ class BaseServer:
|
|||
state = "pause"
|
||||
else:
|
||||
state = "play"
|
||||
yield "state: " + state
|
||||
yield f"state: {state}"
|
||||
|
||||
if self.current_index != -1: # i.e., paused or playing
|
||||
current_id = self._item_id(self.playlist[self.current_index])
|
||||
yield "song: " + str(self.current_index)
|
||||
yield "songid: " + str(current_id)
|
||||
yield f"song: {self.current_index}"
|
||||
yield f"songid: {current_id}"
|
||||
if len(self.playlist) > self.current_index + 1:
|
||||
# If there's a next song, report its index too.
|
||||
next_id = self._item_id(self.playlist[self.current_index + 1])
|
||||
yield "nextsong: " + str(self.current_index + 1)
|
||||
yield "nextsongid: " + str(next_id)
|
||||
yield f"nextsong: {self.current_index + 1}"
|
||||
yield f"nextsongid: {next_id}"
|
||||
|
||||
if self.error:
|
||||
yield "error: " + self.error
|
||||
yield f"error: {self.error}"
|
||||
|
||||
def cmd_clearerror(self, conn):
|
||||
"""Removes the persistent error state of the server. This
|
||||
|
|
@ -522,7 +523,7 @@ class BaseServer:
|
|||
|
||||
def cmd_replay_gain_status(self, conn):
|
||||
"""Get the replaygain mode."""
|
||||
yield "replay_gain_mode: " + str(self.replay_gain_mode)
|
||||
yield f"replay_gain_mode: {self.replay_gain_mode}"
|
||||
|
||||
def cmd_clear(self, conn):
|
||||
"""Clear the playlist."""
|
||||
|
|
@ -643,8 +644,8 @@ class BaseServer:
|
|||
Also a dummy implementation.
|
||||
"""
|
||||
for idx, track in enumerate(self.playlist):
|
||||
yield "cpos: " + str(idx)
|
||||
yield "Id: " + str(track.id)
|
||||
yield f"cpos: {idx}"
|
||||
yield f"Id: {track.id}"
|
||||
|
||||
def cmd_currentsong(self, conn):
|
||||
"""Sends information about the currently-playing song."""
|
||||
|
|
@ -759,11 +760,11 @@ class Connection:
|
|||
"""Create a new connection for the accepted socket `client`."""
|
||||
self.server = server
|
||||
self.sock = sock
|
||||
self.address = "{}:{}".format(*sock.sock.getpeername())
|
||||
self.address = ":".join(map(str, sock.sock.getpeername()))
|
||||
|
||||
def debug(self, message, kind=" "):
|
||||
"""Log a debug message about this connection."""
|
||||
self.server._log.debug("{}[{}]: {}", kind, self.address, message)
|
||||
self.server._log.debug("{}[{.address}]: {}", kind, self, message)
|
||||
|
||||
def run(self):
|
||||
pass
|
||||
|
|
@ -899,9 +900,7 @@ class MPDConnection(Connection):
|
|||
return
|
||||
except BPDIdleError as e:
|
||||
self.idle_subscriptions = e.subsystems
|
||||
self.debug(
|
||||
"awaiting: {}".format(" ".join(e.subsystems)), kind="z"
|
||||
)
|
||||
self.debug(f"awaiting: {' '.join(e.subsystems)}", kind="z")
|
||||
yield bluelet.call(self.server.dispatch_events())
|
||||
|
||||
|
||||
|
|
@ -913,7 +912,7 @@ class ControlConnection(Connection):
|
|||
super().__init__(server, sock)
|
||||
|
||||
def debug(self, message, kind=" "):
|
||||
self.server._log.debug("CTRL {}[{}]: {}", kind, self.address, message)
|
||||
self.server._log.debug("CTRL {}[{.address}]: {}", kind, self, message)
|
||||
|
||||
def run(self):
|
||||
"""Listen for control commands and delegate to `ctrl_*` methods."""
|
||||
|
|
@ -933,7 +932,7 @@ class ControlConnection(Connection):
|
|||
func = command.delegate("ctrl_", self)
|
||||
yield bluelet.call(func(*command.args))
|
||||
except (AttributeError, TypeError) as e:
|
||||
yield self.send("ERROR: {}".format(e.args[0]))
|
||||
yield self.send(f"ERROR: {e.args[0]}")
|
||||
except Exception:
|
||||
yield self.send(
|
||||
["ERROR: server error", traceback.format_exc().rstrip()]
|
||||
|
|
@ -992,7 +991,7 @@ class Command:
|
|||
of arguments.
|
||||
"""
|
||||
# Attempt to get correct command function.
|
||||
func_name = prefix + self.name
|
||||
func_name = f"{prefix}{self.name}"
|
||||
if not hasattr(target, func_name):
|
||||
raise AttributeError(f'unknown command "{self.name}"')
|
||||
func = getattr(target, func_name)
|
||||
|
|
@ -1011,7 +1010,7 @@ class Command:
|
|||
# If the command accepts a variable number of arguments skip the check.
|
||||
if wrong_num and not argspec.varargs:
|
||||
raise TypeError(
|
||||
'wrong number of arguments for "{}"'.format(self.name),
|
||||
f'wrong number of arguments for "{self.name}"',
|
||||
self.name,
|
||||
)
|
||||
|
||||
|
|
@ -1110,10 +1109,8 @@ class Server(BaseServer):
|
|||
self.lib = library
|
||||
self.player = gstplayer.GstPlayer(self.play_finished)
|
||||
self.cmd_update(None)
|
||||
log.info("Server ready and listening on {}:{}".format(host, port))
|
||||
log.debug(
|
||||
"Listening for control signals on {}:{}".format(host, ctrl_port)
|
||||
)
|
||||
log.info("Server ready and listening on {}:{}", host, port)
|
||||
log.debug("Listening for control signals on {}:{}", host, ctrl_port)
|
||||
|
||||
def run(self):
|
||||
self.player.run()
|
||||
|
|
@ -1128,23 +1125,21 @@ class Server(BaseServer):
|
|||
|
||||
def _item_info(self, item):
|
||||
info_lines = [
|
||||
"file: " + as_string(item.destination(relative_to_libdir=True)),
|
||||
"Time: " + str(int(item.length)),
|
||||
"duration: " + f"{item.length:.3f}",
|
||||
"Id: " + str(item.id),
|
||||
f"file: {as_string(item.destination(relative_to_libdir=True))}",
|
||||
f"Time: {int(item.length)}",
|
||||
"duration: {item.length:.3f}",
|
||||
f"Id: {item.id}",
|
||||
]
|
||||
|
||||
try:
|
||||
pos = self._id_to_index(item.id)
|
||||
info_lines.append("Pos: " + str(pos))
|
||||
info_lines.append(f"Pos: {pos}")
|
||||
except ArgumentNotFoundError:
|
||||
# Don't include position if not in playlist.
|
||||
pass
|
||||
|
||||
for tagtype, field in self.tagtype_map.items():
|
||||
info_lines.append(
|
||||
"{}: {}".format(tagtype, str(getattr(item, field)))
|
||||
)
|
||||
info_lines.append(f"{tagtype}: {getattr(item, field)}")
|
||||
|
||||
return info_lines
|
||||
|
||||
|
|
@ -1207,7 +1202,7 @@ class Server(BaseServer):
|
|||
|
||||
def _path_join(self, p1, p2):
|
||||
"""Smashes together two BPD paths."""
|
||||
out = p1 + "/" + p2
|
||||
out = f"{p1}/{p2}"
|
||||
return out.replace("//", "/").replace("//", "/")
|
||||
|
||||
def cmd_lsinfo(self, conn, path="/"):
|
||||
|
|
@ -1225,7 +1220,7 @@ class Server(BaseServer):
|
|||
if dirpath.startswith("/"):
|
||||
# Strip leading slash (libmpc rejects this).
|
||||
dirpath = dirpath[1:]
|
||||
yield "directory: %s" % dirpath
|
||||
yield f"directory: {dirpath}"
|
||||
|
||||
def _listall(self, basepath, node, info=False):
|
||||
"""Helper function for recursive listing. If info, show
|
||||
|
|
@ -1237,7 +1232,7 @@ class Server(BaseServer):
|
|||
item = self.lib.get_item(node)
|
||||
yield self._item_info(item)
|
||||
else:
|
||||
yield "file: " + basepath
|
||||
yield f"file: {basepath}"
|
||||
else:
|
||||
# List a directory. Recurse into both directories and files.
|
||||
for name, itemid in sorted(node.files.items()):
|
||||
|
|
@ -1246,7 +1241,7 @@ class Server(BaseServer):
|
|||
yield from self._listall(newpath, itemid, info)
|
||||
for name, subdir in sorted(node.dirs.items()):
|
||||
newpath = self._path_join(basepath, name)
|
||||
yield "directory: " + newpath
|
||||
yield f"directory: {newpath}"
|
||||
yield from self._listall(newpath, subdir, info)
|
||||
|
||||
def cmd_listall(self, conn, path="/"):
|
||||
|
|
@ -1280,7 +1275,7 @@ class Server(BaseServer):
|
|||
for item in self._all_items(self._resolve_path(path)):
|
||||
self.playlist.append(item)
|
||||
if send_id:
|
||||
yield "Id: " + str(item.id)
|
||||
yield f"Id: {item.id}"
|
||||
self.playlist_version += 1
|
||||
self._send_event("playlist")
|
||||
|
||||
|
|
@ -1302,20 +1297,13 @@ class Server(BaseServer):
|
|||
item = self.playlist[self.current_index]
|
||||
|
||||
yield (
|
||||
"bitrate: " + str(item.bitrate / 1000),
|
||||
"audio: {}:{}:{}".format(
|
||||
str(item.samplerate),
|
||||
str(item.bitdepth),
|
||||
str(item.channels),
|
||||
),
|
||||
f"bitrate: {item.bitrate / 1000}",
|
||||
f"audio: {item.samplerate}:{item.bitdepth}:{item.channels}",
|
||||
)
|
||||
|
||||
(pos, total) = self.player.time()
|
||||
yield (
|
||||
"time: {}:{}".format(
|
||||
str(int(pos)),
|
||||
str(int(total)),
|
||||
),
|
||||
f"time: {int(pos)}:{int(total)}",
|
||||
"elapsed: " + f"{pos:.3f}",
|
||||
"duration: " + f"{total:.3f}",
|
||||
)
|
||||
|
|
@ -1335,13 +1323,13 @@ class Server(BaseServer):
|
|||
artists, albums, songs, totaltime = tx.query(statement)[0]
|
||||
|
||||
yield (
|
||||
"artists: " + str(artists),
|
||||
"albums: " + str(albums),
|
||||
"songs: " + str(songs),
|
||||
"uptime: " + str(int(time.time() - self.startup_time)),
|
||||
"playtime: " + "0", # Missing.
|
||||
"db_playtime: " + str(int(totaltime)),
|
||||
"db_update: " + str(int(self.updated_time)),
|
||||
f"artists: {artists}",
|
||||
f"albums: {albums}",
|
||||
f"songs: {songs}",
|
||||
f"uptime: {int(time.time() - self.startup_time)}",
|
||||
"playtime: 0", # Missing.
|
||||
f"db_playtime: {int(totaltime)}",
|
||||
f"db_update: {int(self.updated_time)}",
|
||||
)
|
||||
|
||||
def cmd_decoders(self, conn):
|
||||
|
|
@ -1383,7 +1371,7 @@ class Server(BaseServer):
|
|||
searching.
|
||||
"""
|
||||
for tag in self.tagtype_map:
|
||||
yield "tagtype: " + tag
|
||||
yield f"tagtype: {tag}"
|
||||
|
||||
def _tagtype_lookup(self, tag):
|
||||
"""Uses `tagtype_map` to look up the beets column name for an
|
||||
|
|
@ -1458,12 +1446,9 @@ class Server(BaseServer):
|
|||
|
||||
clause, subvals = query.clause()
|
||||
statement = (
|
||||
"SELECT DISTINCT "
|
||||
+ show_key
|
||||
+ " FROM items WHERE "
|
||||
+ clause
|
||||
+ " ORDER BY "
|
||||
+ show_key
|
||||
f"SELECT DISTINCT {show_key}"
|
||||
f" FROM items WHERE {clause}"
|
||||
f" ORDER BY {show_key}"
|
||||
)
|
||||
self._log.debug(statement)
|
||||
with self.lib.transaction() as tx:
|
||||
|
|
@ -1473,7 +1458,7 @@ class Server(BaseServer):
|
|||
if not row[0]:
|
||||
# Skip any empty values of the field.
|
||||
continue
|
||||
yield show_tag_canon + ": " + str(row[0])
|
||||
yield f"{show_tag_canon}: {row[0]}"
|
||||
|
||||
def cmd_count(self, conn, tag, value):
|
||||
"""Returns the number and total time of songs matching the
|
||||
|
|
@ -1487,8 +1472,8 @@ class Server(BaseServer):
|
|||
):
|
||||
songs += 1
|
||||
playtime += item.length
|
||||
yield "songs: " + str(songs)
|
||||
yield "playtime: " + str(int(playtime))
|
||||
yield f"songs: {songs}"
|
||||
yield f"playtime: {int(playtime)}"
|
||||
|
||||
# Persistent playlist manipulation. In MPD this is an optional feature so
|
||||
# these dummy implementations match MPD's behaviour with the feature off.
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ class GstPlayer:
|
|||
self.player.set_state(Gst.State.NULL)
|
||||
if isinstance(path, str):
|
||||
path = path.encode("utf-8")
|
||||
uri = "file://" + urllib.parse.quote(path)
|
||||
uri = f"file://{urllib.parse.quote(path)}"
|
||||
self.player.set_property("uri", uri)
|
||||
self.player.set_state(Gst.State.PLAYING)
|
||||
self.playing = True
|
||||
|
|
|
|||
|
|
@ -73,12 +73,12 @@ class BPMPlugin(BeetsPlugin):
|
|||
|
||||
item = items[0]
|
||||
if item["bpm"]:
|
||||
self._log.info("Found bpm {0}", item["bpm"])
|
||||
self._log.info("Found bpm {}", item["bpm"])
|
||||
if not overwrite:
|
||||
return
|
||||
|
||||
self._log.info(
|
||||
"Press Enter {0} times to the rhythm or Ctrl-D to exit",
|
||||
"Press Enter {} times to the rhythm or Ctrl-D to exit",
|
||||
self.config["max_strokes"].get(int),
|
||||
)
|
||||
new_bpm = bpm(self.config["max_strokes"].get(int))
|
||||
|
|
@ -86,4 +86,4 @@ class BPMPlugin(BeetsPlugin):
|
|||
if write:
|
||||
item.try_write()
|
||||
item.store()
|
||||
self._log.info("Added new bpm {0}", item["bpm"])
|
||||
self._log.info("Added new bpm {}", item["bpm"])
|
||||
|
|
|
|||
|
|
@ -82,8 +82,8 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
|
||||
if not self.is_beatport_track(item):
|
||||
self._log.info(
|
||||
"Skipping non-{} singleton: {}",
|
||||
self.beatport_plugin.data_source,
|
||||
"Skipping non-{.beatport_plugin.data_source} singleton: {}",
|
||||
self,
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
|
@ -107,8 +107,8 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
return False
|
||||
if not album.mb_albumid.isnumeric():
|
||||
self._log.info(
|
||||
"Skipping album with invalid {} ID: {}",
|
||||
self.beatport_plugin.data_source,
|
||||
"Skipping album with invalid {.beatport_plugin.data_source} ID: {}",
|
||||
self,
|
||||
album,
|
||||
)
|
||||
return False
|
||||
|
|
@ -117,8 +117,8 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
return items
|
||||
if not all(self.is_beatport_track(item) for item in items):
|
||||
self._log.info(
|
||||
"Skipping non-{} release: {}",
|
||||
self.beatport_plugin.data_source,
|
||||
"Skipping non-{.beatport_plugin.data_source} release: {}",
|
||||
self,
|
||||
album,
|
||||
)
|
||||
return False
|
||||
|
|
@ -139,9 +139,7 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid)
|
||||
if not albuminfo:
|
||||
self._log.info(
|
||||
"Release ID {} not found for album {}",
|
||||
album.mb_albumid,
|
||||
album,
|
||||
"Release ID {0.mb_albumid} not found for album {0}", album
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ def span_from_str(span_str):
|
|||
def normalize_year(d, yearfrom):
|
||||
"""Convert string to a 4 digits year"""
|
||||
if yearfrom < 100:
|
||||
raise BucketError("%d must be expressed on 4 digits" % yearfrom)
|
||||
raise BucketError(f"{yearfrom} must be expressed on 4 digits")
|
||||
|
||||
# if two digits only, pick closest year that ends by these two
|
||||
# digits starting from yearfrom
|
||||
|
|
@ -55,14 +55,13 @@ def span_from_str(span_str):
|
|||
years = [int(x) for x in re.findall(r"\d+", span_str)]
|
||||
if not years:
|
||||
raise ui.UserError(
|
||||
"invalid range defined for year bucket '%s': no "
|
||||
"year found" % span_str
|
||||
f"invalid range defined for year bucket {span_str!r}: no year found"
|
||||
)
|
||||
try:
|
||||
years = [normalize_year(x, years[0]) for x in years]
|
||||
except BucketError as exc:
|
||||
raise ui.UserError(
|
||||
"invalid range defined for year bucket '%s': %s" % (span_str, exc)
|
||||
f"invalid range defined for year bucket {span_str!r}: {exc}"
|
||||
)
|
||||
|
||||
res = {"from": years[0], "str": span_str}
|
||||
|
|
@ -125,22 +124,19 @@ def str2fmt(s):
|
|||
"fromnchars": len(m.group("fromyear")),
|
||||
"tonchars": len(m.group("toyear")),
|
||||
}
|
||||
res["fmt"] = "{}%s{}{}{}".format(
|
||||
m.group("bef"),
|
||||
m.group("sep"),
|
||||
"%s" if res["tonchars"] else "",
|
||||
m.group("after"),
|
||||
res["fmt"] = (
|
||||
f"{m['bef']}{{}}{m['sep']}{'{}' if res['tonchars'] else ''}{m['after']}"
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
def format_span(fmt, yearfrom, yearto, fromnchars, tonchars):
|
||||
"""Return a span string representation."""
|
||||
args = str(yearfrom)[-fromnchars:]
|
||||
args = [str(yearfrom)[-fromnchars:]]
|
||||
if tonchars:
|
||||
args = (str(yearfrom)[-fromnchars:], str(yearto)[-tonchars:])
|
||||
args.append(str(yearto)[-tonchars:])
|
||||
|
||||
return fmt % args
|
||||
return fmt.format(*args)
|
||||
|
||||
|
||||
def extract_modes(spans):
|
||||
|
|
@ -169,14 +165,12 @@ def build_alpha_spans(alpha_spans_str, alpha_regexs):
|
|||
else:
|
||||
raise ui.UserError(
|
||||
"invalid range defined for alpha bucket "
|
||||
"'%s': no alphanumeric character found" % elem
|
||||
f"'{elem}': no alphanumeric character found"
|
||||
)
|
||||
spans.append(
|
||||
re.compile(
|
||||
"^["
|
||||
+ ASCII_DIGITS[begin_index : end_index + 1]
|
||||
+ ASCII_DIGITS[begin_index : end_index + 1].upper()
|
||||
+ "]"
|
||||
rf"^[{ASCII_DIGITS[begin_index : end_index + 1]}]",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
)
|
||||
return spans
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def acoustid_match(log, path):
|
|||
duration, fp = acoustid.fingerprint_file(util.syspath(path))
|
||||
except acoustid.FingerprintGenerationError as exc:
|
||||
log.error(
|
||||
"fingerprinting of {0} failed: {1}",
|
||||
"fingerprinting of {} failed: {}",
|
||||
util.displayable_path(repr(path)),
|
||||
exc,
|
||||
)
|
||||
|
|
@ -98,15 +98,17 @@ def acoustid_match(log, path):
|
|||
fp = fp.decode()
|
||||
_fingerprints[path] = fp
|
||||
try:
|
||||
res = acoustid.lookup(API_KEY, fp, duration, meta="recordings releases")
|
||||
res = acoustid.lookup(
|
||||
API_KEY, fp, duration, meta="recordings releases", timeout=10
|
||||
)
|
||||
except acoustid.AcoustidError as exc:
|
||||
log.debug(
|
||||
"fingerprint matching {0} failed: {1}",
|
||||
"fingerprint matching {} failed: {}",
|
||||
util.displayable_path(repr(path)),
|
||||
exc,
|
||||
)
|
||||
return None
|
||||
log.debug("chroma: fingerprinted {0}", util.displayable_path(repr(path)))
|
||||
log.debug("chroma: fingerprinted {}", util.displayable_path(repr(path)))
|
||||
|
||||
# Ensure the response is usable and parse it.
|
||||
if res["status"] != "ok" or not res.get("results"):
|
||||
|
|
@ -144,7 +146,7 @@ def acoustid_match(log, path):
|
|||
release_ids = [rel["id"] for rel in releases]
|
||||
|
||||
log.debug(
|
||||
"matched recordings {0} on releases {1}", recording_ids, release_ids
|
||||
"matched recordings {} on releases {}", recording_ids, release_ids
|
||||
)
|
||||
_matches[path] = recording_ids, release_ids
|
||||
|
||||
|
|
@ -209,7 +211,7 @@ class AcoustidPlugin(MetadataSourcePlugin):
|
|||
if album:
|
||||
albums.append(album)
|
||||
|
||||
self._log.debug("acoustid album candidates: {0}", len(albums))
|
||||
self._log.debug("acoustid album candidates: {}", len(albums))
|
||||
return albums
|
||||
|
||||
def item_candidates(self, item, artist, title) -> Iterable[TrackInfo]:
|
||||
|
|
@ -222,7 +224,7 @@ class AcoustidPlugin(MetadataSourcePlugin):
|
|||
track = self.mb.track_for_id(recording_id)
|
||||
if track:
|
||||
tracks.append(track)
|
||||
self._log.debug("acoustid item candidates: {0}", len(tracks))
|
||||
self._log.debug("acoustid item candidates: {}", len(tracks))
|
||||
return tracks
|
||||
|
||||
def album_for_id(self, *args, **kwargs):
|
||||
|
|
@ -290,11 +292,11 @@ def submit_items(log, userkey, items, chunksize=64):
|
|||
|
||||
def submit_chunk():
|
||||
"""Submit the current accumulated fingerprint data."""
|
||||
log.info("submitting {0} fingerprints", len(data))
|
||||
log.info("submitting {} fingerprints", len(data))
|
||||
try:
|
||||
acoustid.submit(API_KEY, userkey, data)
|
||||
acoustid.submit(API_KEY, userkey, data, timeout=10)
|
||||
except acoustid.AcoustidError as exc:
|
||||
log.warning("acoustid submission error: {0}", exc)
|
||||
log.warning("acoustid submission error: {}", exc)
|
||||
del data[:]
|
||||
|
||||
for item in items:
|
||||
|
|
@ -341,31 +343,23 @@ def fingerprint_item(log, item, write=False):
|
|||
"""
|
||||
# Get a fingerprint and length for this track.
|
||||
if not item.length:
|
||||
log.info("{0}: no duration available", util.displayable_path(item.path))
|
||||
log.info("{.filepath}: no duration available", item)
|
||||
elif item.acoustid_fingerprint:
|
||||
if write:
|
||||
log.info(
|
||||
"{0}: fingerprint exists, skipping",
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
log.info("{.filepath}: fingerprint exists, skipping", item)
|
||||
else:
|
||||
log.info(
|
||||
"{0}: using existing fingerprint",
|
||||
util.displayable_path(item.path),
|
||||
)
|
||||
log.info("{.filepath}: using existing fingerprint", item)
|
||||
return item.acoustid_fingerprint
|
||||
else:
|
||||
log.info("{0}: fingerprinting", util.displayable_path(item.path))
|
||||
log.info("{.filepath}: fingerprinting", item)
|
||||
try:
|
||||
_, fp = acoustid.fingerprint_file(util.syspath(item.path))
|
||||
item.acoustid_fingerprint = fp.decode()
|
||||
if write:
|
||||
log.info(
|
||||
"{0}: writing fingerprint", util.displayable_path(item.path)
|
||||
)
|
||||
log.info("{.filepath}: writing fingerprint", item)
|
||||
item.try_write()
|
||||
if item._db:
|
||||
item.store()
|
||||
return item.acoustid_fingerprint
|
||||
except acoustid.FingerprintGenerationError as exc:
|
||||
log.info("fingerprint generation failed: {0}", exc)
|
||||
log.info("fingerprint generation failed: {}", exc)
|
||||
|
|
|
|||
|
|
@ -25,12 +25,13 @@ from string import Template
|
|||
import mediafile
|
||||
from confuse import ConfigTypeError, Optional
|
||||
|
||||
from beets import art, config, plugins, ui, util
|
||||
from beets import config, plugins, ui, util
|
||||
from beets.library import Item, parse_query_string
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.util import par_map
|
||||
from beets.util.artresizer import ArtResizer
|
||||
from beets.util.m3u import M3UFile
|
||||
from beetsplug._utils import art
|
||||
|
||||
_fs_lock = threading.Lock()
|
||||
_temp_files = [] # Keep track of temporary transcoded files for deletion.
|
||||
|
|
@ -64,9 +65,7 @@ def get_format(fmt=None):
|
|||
command = format_info["command"]
|
||||
extension = format_info.get("extension", fmt)
|
||||
except KeyError:
|
||||
raise ui.UserError(
|
||||
'convert: format {} needs the "command" field'.format(fmt)
|
||||
)
|
||||
raise ui.UserError(f'convert: format {fmt} needs the "command" field')
|
||||
except ConfigTypeError:
|
||||
command = config["convert"]["formats"][fmt].get(str)
|
||||
extension = fmt
|
||||
|
|
@ -77,8 +76,8 @@ def get_format(fmt=None):
|
|||
command = config["convert"]["command"].as_str()
|
||||
elif "opts" in keys:
|
||||
# Undocumented option for backwards compatibility with < 1.3.1.
|
||||
command = "ffmpeg -i $source -y {} $dest".format(
|
||||
config["convert"]["opts"].as_str()
|
||||
command = (
|
||||
f"ffmpeg -i $source -y {config['convert']['opts'].as_str()} $dest"
|
||||
)
|
||||
if "extension" in keys:
|
||||
extension = config["convert"]["extension"].as_str()
|
||||
|
|
@ -123,20 +122,28 @@ class ConvertPlugin(BeetsPlugin):
|
|||
"threads": os.cpu_count(),
|
||||
"format": "mp3",
|
||||
"id3v23": "inherit",
|
||||
"write_metadata": True,
|
||||
"formats": {
|
||||
"aac": {
|
||||
"command": "ffmpeg -i $source -y -vn -acodec aac "
|
||||
"-aq 1 $dest",
|
||||
"command": (
|
||||
"ffmpeg -i $source -y -vn -acodec aac -aq 1 $dest"
|
||||
),
|
||||
"extension": "m4a",
|
||||
},
|
||||
"alac": {
|
||||
"command": "ffmpeg -i $source -y -vn -acodec alac $dest",
|
||||
"command": (
|
||||
"ffmpeg -i $source -y -vn -acodec alac $dest"
|
||||
),
|
||||
"extension": "m4a",
|
||||
},
|
||||
"flac": "ffmpeg -i $source -y -vn -acodec flac $dest",
|
||||
"mp3": "ffmpeg -i $source -y -vn -aq 2 $dest",
|
||||
"opus": "ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest",
|
||||
"ogg": "ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest",
|
||||
"opus": (
|
||||
"ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest"
|
||||
),
|
||||
"ogg": (
|
||||
"ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest"
|
||||
),
|
||||
"wma": "ffmpeg -i $source -y -vn -acodec wmav2 -vn $dest",
|
||||
},
|
||||
"max_bitrate": None,
|
||||
|
|
@ -171,16 +178,17 @@ class ConvertPlugin(BeetsPlugin):
|
|||
"--threads",
|
||||
action="store",
|
||||
type="int",
|
||||
help="change the number of threads, \
|
||||
defaults to maximum available processors",
|
||||
help=(
|
||||
"change the number of threads, defaults to maximum available"
|
||||
" processors"
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-k",
|
||||
"--keep-new",
|
||||
action="store_true",
|
||||
dest="keep_new",
|
||||
help="keep only the converted \
|
||||
and move the old files",
|
||||
help="keep only the converted and move the old files",
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-d", "--dest", action="store", help="set the destination directory"
|
||||
|
|
@ -204,16 +212,16 @@ class ConvertPlugin(BeetsPlugin):
|
|||
"--link",
|
||||
action="store_true",
|
||||
dest="link",
|
||||
help="symlink files that do not \
|
||||
need transcoding.",
|
||||
help="symlink files that do not need transcoding.",
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-H",
|
||||
"--hardlink",
|
||||
action="store_true",
|
||||
dest="hardlink",
|
||||
help="hardlink files that do not \
|
||||
need transcoding. Overrides --link.",
|
||||
help=(
|
||||
"hardlink files that do not need transcoding. Overrides --link."
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-m",
|
||||
|
|
@ -282,7 +290,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
quiet = self.config["quiet"].get(bool)
|
||||
|
||||
if not quiet and not pretend:
|
||||
self._log.info("Encoding {0}", util.displayable_path(source))
|
||||
self._log.info("Encoding {}", util.displayable_path(source))
|
||||
|
||||
command = os.fsdecode(command)
|
||||
source = os.fsdecode(source)
|
||||
|
|
@ -301,7 +309,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
encode_cmd.append(os.fsdecode(args[i]))
|
||||
|
||||
if pretend:
|
||||
self._log.info("{0}", " ".join(args))
|
||||
self._log.info("{}", " ".join(args))
|
||||
return
|
||||
|
||||
try:
|
||||
|
|
@ -309,26 +317,25 @@ class ConvertPlugin(BeetsPlugin):
|
|||
except subprocess.CalledProcessError as exc:
|
||||
# Something went wrong (probably Ctrl+C), remove temporary files
|
||||
self._log.info(
|
||||
"Encoding {0} failed. Cleaning up...",
|
||||
"Encoding {} failed. Cleaning up...",
|
||||
util.displayable_path(source),
|
||||
)
|
||||
self._log.debug(
|
||||
"Command {0} exited with status {1}: {2}",
|
||||
"Command {0} exited with status {1.returncode}: {1.output}",
|
||||
args,
|
||||
exc.returncode,
|
||||
exc.output,
|
||||
exc,
|
||||
)
|
||||
util.remove(dest)
|
||||
util.prune_dirs(os.path.dirname(dest))
|
||||
raise
|
||||
except OSError as exc:
|
||||
raise ui.UserError(
|
||||
"convert: couldn't invoke '{}': {}".format(" ".join(args), exc)
|
||||
f"convert: couldn't invoke {' '.join(args)!r}: {exc}"
|
||||
)
|
||||
|
||||
if not quiet and not pretend:
|
||||
self._log.info(
|
||||
"Finished encoding {0}", util.displayable_path(source)
|
||||
"Finished encoding {}", util.displayable_path(source)
|
||||
)
|
||||
|
||||
def convert_item(
|
||||
|
|
@ -356,7 +363,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
try:
|
||||
mediafile.MediaFile(util.syspath(item.path))
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
self._log.error("Could not open file to convert: {0}", exc)
|
||||
self._log.error("Could not open file to convert: {}", exc)
|
||||
continue
|
||||
|
||||
# When keeping the new file in the library, we first move the
|
||||
|
|
@ -382,21 +389,20 @@ class ConvertPlugin(BeetsPlugin):
|
|||
|
||||
if os.path.exists(util.syspath(dest)):
|
||||
self._log.info(
|
||||
"Skipping {0} (target file exists)",
|
||||
util.displayable_path(item.path),
|
||||
"Skipping {.filepath} (target file exists)", item
|
||||
)
|
||||
continue
|
||||
|
||||
if keep_new:
|
||||
if pretend:
|
||||
self._log.info(
|
||||
"mv {0} {1}",
|
||||
util.displayable_path(item.path),
|
||||
"mv {.filepath} {}",
|
||||
item,
|
||||
util.displayable_path(original),
|
||||
)
|
||||
else:
|
||||
self._log.info(
|
||||
"Moving to {0}", util.displayable_path(original)
|
||||
"Moving to {}", util.displayable_path(original)
|
||||
)
|
||||
util.move(item.path, original)
|
||||
|
||||
|
|
@ -412,10 +418,10 @@ class ConvertPlugin(BeetsPlugin):
|
|||
msg = "ln" if hardlink else ("ln -s" if link else "cp")
|
||||
|
||||
self._log.info(
|
||||
"{2} {0} {1}",
|
||||
"{} {} {}",
|
||||
msg,
|
||||
util.displayable_path(original),
|
||||
util.displayable_path(converted),
|
||||
msg,
|
||||
)
|
||||
else:
|
||||
# No transcoding necessary.
|
||||
|
|
@ -425,9 +431,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
else ("Linking" if link else "Copying")
|
||||
)
|
||||
|
||||
self._log.info(
|
||||
"{1} {0}", util.displayable_path(item.path), msg
|
||||
)
|
||||
self._log.info("{} {.filepath}", msg, item)
|
||||
|
||||
if hardlink:
|
||||
util.hardlink(original, converted)
|
||||
|
|
@ -443,8 +447,9 @@ class ConvertPlugin(BeetsPlugin):
|
|||
if id3v23 == "inherit":
|
||||
id3v23 = None
|
||||
|
||||
# Write tags from the database to the converted file.
|
||||
item.try_write(path=converted, id3v23=id3v23)
|
||||
# Write tags from the database to the file if requested
|
||||
if self.config["write_metadata"].get(bool):
|
||||
item.try_write(path=converted, id3v23=id3v23)
|
||||
|
||||
if keep_new:
|
||||
# If we're keeping the transcoded file, read it again (after
|
||||
|
|
@ -458,8 +463,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
if album and album.artpath:
|
||||
maxwidth = self._get_art_resize(album.artpath)
|
||||
self._log.debug(
|
||||
"embedding album art from {}",
|
||||
util.displayable_path(album.artpath),
|
||||
"embedding album art from {.art_filepath}", album
|
||||
)
|
||||
art.embed_item(
|
||||
self._log,
|
||||
|
|
@ -517,8 +521,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
|
||||
if os.path.exists(util.syspath(dest)):
|
||||
self._log.info(
|
||||
"Skipping {0} (target file exists)",
|
||||
util.displayable_path(album.artpath),
|
||||
"Skipping {.art_filepath} (target file exists)", album
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -528,8 +531,8 @@ class ConvertPlugin(BeetsPlugin):
|
|||
# Either copy or resize (while copying) the image.
|
||||
if maxwidth is not None:
|
||||
self._log.info(
|
||||
"Resizing cover art from {0} to {1}",
|
||||
util.displayable_path(album.artpath),
|
||||
"Resizing cover art from {.art_filepath} to {}",
|
||||
album,
|
||||
util.displayable_path(dest),
|
||||
)
|
||||
if not pretend:
|
||||
|
|
@ -539,10 +542,10 @@ class ConvertPlugin(BeetsPlugin):
|
|||
msg = "ln" if hardlink else ("ln -s" if link else "cp")
|
||||
|
||||
self._log.info(
|
||||
"{2} {0} {1}",
|
||||
util.displayable_path(album.artpath),
|
||||
util.displayable_path(dest),
|
||||
"{} {.art_filepath} {}",
|
||||
msg,
|
||||
album,
|
||||
util.displayable_path(dest),
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
|
|
@ -552,10 +555,10 @@ class ConvertPlugin(BeetsPlugin):
|
|||
)
|
||||
|
||||
self._log.info(
|
||||
"{2} cover art from {0} to {1}",
|
||||
util.displayable_path(album.artpath),
|
||||
util.displayable_path(dest),
|
||||
"{} cover art from {.art_filepath} to {}",
|
||||
msg,
|
||||
album,
|
||||
util.displayable_path(dest),
|
||||
)
|
||||
if hardlink:
|
||||
util.hardlink(album.artpath, dest)
|
||||
|
|
@ -616,7 +619,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
# Playlist paths are understood as relative to the dest directory.
|
||||
pl_normpath = util.normpath(playlist)
|
||||
pl_dir = os.path.dirname(pl_normpath)
|
||||
self._log.info("Creating playlist file {0}", pl_normpath)
|
||||
self._log.info("Creating playlist file {}", pl_normpath)
|
||||
# Generates a list of paths to media files, ensures the paths are
|
||||
# relative to the playlist's location and translates the unicode
|
||||
# strings we get from item.destination to bytes.
|
||||
|
|
@ -644,7 +647,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
tmpdir = self.config["tmpdir"].get()
|
||||
if tmpdir:
|
||||
tmpdir = os.fsdecode(util.bytestring_path(tmpdir))
|
||||
fd, dest = tempfile.mkstemp(os.fsdecode(b"." + ext), dir=tmpdir)
|
||||
fd, dest = tempfile.mkstemp(f".{os.fsdecode(ext)}", dir=tmpdir)
|
||||
os.close(fd)
|
||||
dest = util.bytestring_path(dest)
|
||||
_temp_files.append(dest) # Delete the transcode later.
|
||||
|
|
@ -666,7 +669,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
if self.config["delete_originals"]:
|
||||
self._log.log(
|
||||
logging.DEBUG if self.config["quiet"] else logging.INFO,
|
||||
"Removing original file {0}",
|
||||
"Removing original file {}",
|
||||
source_path,
|
||||
)
|
||||
util.remove(source_path, False)
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ import time
|
|||
from typing import TYPE_CHECKING, Literal, Sequence
|
||||
|
||||
import requests
|
||||
import unidecode
|
||||
|
||||
from beets import ui
|
||||
from beets.autotag import AlbumInfo, TrackInfo
|
||||
|
|
@ -50,6 +49,9 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
album_url = "https://api.deezer.com/album/"
|
||||
track_url = "https://api.deezer.com/track/"
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def commands(self):
|
||||
"""Add beet UI commands to interact with Deezer."""
|
||||
deezer_update_cmd = ui.Subcommand(
|
||||
|
|
@ -97,7 +99,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
f"Invalid `release_date` returned by {self.data_source} API: "
|
||||
f"{release_date!r}"
|
||||
)
|
||||
tracks_obj = self.fetch_data(self.album_url + deezer_id + "/tracks")
|
||||
tracks_obj = self.fetch_data(f"{self.album_url}{deezer_id}/tracks")
|
||||
if tracks_obj is None:
|
||||
return None
|
||||
try:
|
||||
|
|
@ -170,7 +172,7 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
# the track's disc).
|
||||
if not (
|
||||
album_tracks_obj := self.fetch_data(
|
||||
self.album_url + str(track_data["album"]["id"]) + "/tracks"
|
||||
f"{self.album_url}{track_data['album']['id']}/tracks"
|
||||
)
|
||||
):
|
||||
return None
|
||||
|
|
@ -216,27 +218,6 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
deezer_updated=time.time(),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _construct_search_query(
|
||||
filters: SearchFilter, keywords: str = ""
|
||||
) -> str:
|
||||
"""Construct a query string with the specified filters and keywords to
|
||||
be provided to the Deezer Search API
|
||||
(https://developers.deezer.com/api/search).
|
||||
|
||||
:param filters: Field filters to apply.
|
||||
:param keywords: (Optional) Query keywords to use.
|
||||
:return: Query string to be provided to the Search API.
|
||||
"""
|
||||
query_components = [
|
||||
keywords,
|
||||
" ".join(f'{k}:"{v}"' for k, v in filters.items()),
|
||||
]
|
||||
query = " ".join([q for q in query_components if q])
|
||||
if not isinstance(query, str):
|
||||
query = query.decode("utf8")
|
||||
return unidecode.unidecode(query)
|
||||
|
||||
def _search_api(
|
||||
self,
|
||||
query_type: Literal[
|
||||
|
|
@ -250,37 +231,42 @@ class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
|||
"user",
|
||||
],
|
||||
filters: SearchFilter,
|
||||
keywords="",
|
||||
query_string: str = "",
|
||||
) -> Sequence[IDResponse]:
|
||||
"""Query the Deezer Search API for the specified ``keywords``, applying
|
||||
"""Query the Deezer Search API for the specified ``query_string``, applying
|
||||
the provided ``filters``.
|
||||
|
||||
:param filters: Field filters to apply.
|
||||
:param keywords: Query keywords to use.
|
||||
:param query_string: Additional query to include in the search.
|
||||
:return: JSON data for the class:`Response <Response>` object or None
|
||||
if no search results are returned.
|
||||
"""
|
||||
query = self._construct_search_query(keywords=keywords, filters=filters)
|
||||
self._log.debug(f"Searching {self.data_source} for '{query}'")
|
||||
query = self._construct_search_query(
|
||||
query_string=query_string, filters=filters
|
||||
)
|
||||
self._log.debug("Searching {.data_source} for '{}'", self, query)
|
||||
try:
|
||||
response = requests.get(
|
||||
self.search_url + query_type,
|
||||
params={"q": query},
|
||||
f"{self.search_url}{query_type}",
|
||||
params={
|
||||
"q": query,
|
||||
"limit": self.config["search_limit"].get(),
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.error(
|
||||
"Error fetching data from {} API\n Error: {}",
|
||||
self.data_source,
|
||||
"Error fetching data from {.data_source} API\n Error: {}",
|
||||
self,
|
||||
e,
|
||||
)
|
||||
return ()
|
||||
response_data: Sequence[IDResponse] = response.json().get("data", [])
|
||||
self._log.debug(
|
||||
"Found {} result(s) from {} for '{}'",
|
||||
"Found {} result(s) from {.data_source} for '{}'",
|
||||
len(response_data),
|
||||
self.data_source,
|
||||
self,
|
||||
query,
|
||||
)
|
||||
return response_data
|
||||
|
|
|
|||
|
|
@ -27,13 +27,13 @@ import time
|
|||
import traceback
|
||||
from functools import cache
|
||||
from string import ascii_lowercase
|
||||
from typing import TYPE_CHECKING, Sequence
|
||||
from typing import TYPE_CHECKING, Sequence, cast
|
||||
|
||||
import confuse
|
||||
from discogs_client import Client, Master, Release
|
||||
from discogs_client.exceptions import DiscogsAPIError
|
||||
from requests.exceptions import ConnectionError
|
||||
from typing_extensions import TypedDict
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
|
||||
import beets
|
||||
import beets.ui
|
||||
|
|
@ -76,6 +76,8 @@ TRACK_INDEX_RE = re.compile(
|
|||
re.VERBOSE,
|
||||
)
|
||||
|
||||
DISAMBIGUATION_RE = re.compile(r" \(\d+\)")
|
||||
|
||||
|
||||
class ReleaseFormat(TypedDict):
|
||||
name: str
|
||||
|
|
@ -83,6 +85,42 @@ class ReleaseFormat(TypedDict):
|
|||
descriptions: list[str] | None
|
||||
|
||||
|
||||
class Artist(TypedDict):
|
||||
name: str
|
||||
anv: str
|
||||
join: str
|
||||
role: str
|
||||
tracks: str
|
||||
id: str
|
||||
resource_url: str
|
||||
|
||||
|
||||
class Track(TypedDict):
|
||||
position: str
|
||||
type_: str
|
||||
title: str
|
||||
duration: str
|
||||
artists: list[Artist]
|
||||
extraartists: NotRequired[list[Artist]]
|
||||
|
||||
|
||||
class TrackWithSubtracks(Track):
|
||||
sub_tracks: list[TrackWithSubtracks]
|
||||
|
||||
|
||||
class IntermediateTrackInfo(TrackInfo):
|
||||
"""Allows work with string mediums from
|
||||
get_track_info"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
medium_str: str | None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.medium_str = medium_str
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class DiscogsPlugin(MetadataSourcePlugin):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
|
@ -91,12 +129,17 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
"apikey": API_KEY,
|
||||
"apisecret": API_SECRET,
|
||||
"tokenfile": "discogs_token.json",
|
||||
"source_weight": 0.5,
|
||||
"user_token": "",
|
||||
"separator": ", ",
|
||||
"index_tracks": False,
|
||||
"append_style_genre": False,
|
||||
"search_limit": 5,
|
||||
"strip_disambiguation": True,
|
||||
"featured_string": "Feat.",
|
||||
"anv": {
|
||||
"artist_credit": True,
|
||||
"artist": False,
|
||||
"album_artist": False,
|
||||
},
|
||||
}
|
||||
)
|
||||
self.config["apikey"].redact = True
|
||||
|
|
@ -104,7 +147,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
self.config["user_token"].redact = True
|
||||
self.setup()
|
||||
|
||||
def setup(self, session=None):
|
||||
def setup(self, session=None) -> None:
|
||||
"""Create the `discogs_client` field. Authenticate if necessary."""
|
||||
c_key = self.config["apikey"].as_str()
|
||||
c_secret = self.config["apisecret"].as_str()
|
||||
|
|
@ -130,22 +173,22 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
self.discogs_client = Client(USER_AGENT, c_key, c_secret, token, secret)
|
||||
|
||||
def reset_auth(self):
|
||||
def reset_auth(self) -> None:
|
||||
"""Delete token file & redo the auth steps."""
|
||||
os.remove(self._tokenfile())
|
||||
self.setup()
|
||||
|
||||
def _tokenfile(self):
|
||||
def _tokenfile(self) -> str:
|
||||
"""Get the path to the JSON file for storing the OAuth token."""
|
||||
return self.config["tokenfile"].get(confuse.Filename(in_app_dir=True))
|
||||
|
||||
def authenticate(self, c_key, c_secret):
|
||||
def authenticate(self, c_key: str, c_secret: str) -> tuple[str, str]:
|
||||
# Get the link for the OAuth page.
|
||||
auth_client = Client(USER_AGENT, c_key, c_secret)
|
||||
try:
|
||||
_, _, url = auth_client.get_authorize_url()
|
||||
except CONNECTION_ERRORS as e:
|
||||
self._log.debug("connection error: {0}", e)
|
||||
self._log.debug("connection error: {}", e)
|
||||
raise beets.ui.UserError("communication with Discogs failed")
|
||||
|
||||
beets.ui.print_("To authenticate with Discogs, visit:")
|
||||
|
|
@ -158,11 +201,11 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
except DiscogsAPIError:
|
||||
raise beets.ui.UserError("Discogs authorization failed")
|
||||
except CONNECTION_ERRORS as e:
|
||||
self._log.debug("connection error: {0}", e)
|
||||
self._log.debug("connection error: {}", e)
|
||||
raise beets.ui.UserError("Discogs token request failed")
|
||||
|
||||
# Save the token for later use.
|
||||
self._log.debug("Discogs token {0}, secret {1}", token, secret)
|
||||
self._log.debug("Discogs token {}, secret {}", token, secret)
|
||||
with open(self._tokenfile(), "w") as f:
|
||||
json.dump({"token": token, "secret": secret}, f)
|
||||
|
||||
|
|
@ -202,7 +245,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
"""Fetches an album by its Discogs ID and returns an AlbumInfo object
|
||||
or None if the album is not found.
|
||||
"""
|
||||
self._log.debug("Searching for release {0}", album_id)
|
||||
self._log.debug("Searching for release {}", album_id)
|
||||
|
||||
discogs_id = self._extract_id(album_id)
|
||||
|
||||
|
|
@ -216,7 +259,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
except DiscogsAPIError as e:
|
||||
if e.status_code != 404:
|
||||
self._log.debug(
|
||||
"API Error: {0} (query: {1})",
|
||||
"API Error: {} (query: {})",
|
||||
e,
|
||||
result.data["resource_url"],
|
||||
)
|
||||
|
|
@ -250,7 +293,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
try:
|
||||
results = self.discogs_client.search(query, type="release")
|
||||
results.per_page = self.config["search_limit"].as_number()
|
||||
results.per_page = self.config["search_limit"].get()
|
||||
releases = results.page(1)
|
||||
except CONNECTION_ERRORS:
|
||||
self._log.debug(
|
||||
|
|
@ -266,7 +309,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
"""Fetches a master release given its Discogs ID and returns its year
|
||||
or None if the master release is not found.
|
||||
"""
|
||||
self._log.debug("Getting master release {0}", master_id)
|
||||
self._log.debug("Getting master release {}", master_id)
|
||||
result = Master(self.discogs_client, {"id": master_id})
|
||||
|
||||
try:
|
||||
|
|
@ -274,7 +317,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
except DiscogsAPIError as e:
|
||||
if e.status_code != 404:
|
||||
self._log.debug(
|
||||
"API Error: {0} (query: {1})",
|
||||
"API Error: {} (query: {})",
|
||||
e,
|
||||
result.data["resource_url"],
|
||||
)
|
||||
|
|
@ -300,7 +343,26 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
return media, albumtype
|
||||
|
||||
def get_album_info(self, result):
|
||||
def get_artist_with_anv(
|
||||
self, artists: list[Artist], use_anv: bool = False
|
||||
) -> tuple[str, str | None]:
|
||||
"""Iterates through a discogs result, fetching data
|
||||
if the artist anv is to be used, maps that to the name.
|
||||
Calls the parent class get_artist method."""
|
||||
artist_list: list[dict[str | int, str]] = []
|
||||
for artist_data in artists:
|
||||
a: dict[str | int, str] = {
|
||||
"name": artist_data["name"],
|
||||
"id": artist_data["id"],
|
||||
"join": artist_data.get("join", ""),
|
||||
}
|
||||
if use_anv and (anv := artist_data.get("anv", "")):
|
||||
a["name"] = anv
|
||||
artist_list.append(a)
|
||||
artist, artist_id = self.get_artist(artist_list, join_key="join")
|
||||
return self.strip_disambiguation(artist), artist_id
|
||||
|
||||
def get_album_info(self, result: Release) -> AlbumInfo | None:
|
||||
"""Returns an AlbumInfo object for a discogs Release object."""
|
||||
# Explicitly reload the `Release` fields, as they might not be yet
|
||||
# present if the result is from a `discogs_client.search()`.
|
||||
|
|
@ -328,16 +390,29 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
self._log.warning("Release does not contain the required fields")
|
||||
return None
|
||||
|
||||
artist, artist_id = self.get_artist(
|
||||
[a.data for a in result.artists], join_key="join"
|
||||
artist_data = [a.data for a in result.artists]
|
||||
album_artist, album_artist_id = self.get_artist_with_anv(artist_data)
|
||||
album_artist_anv, _ = self.get_artist_with_anv(
|
||||
artist_data, use_anv=True
|
||||
)
|
||||
artist_credit = album_artist_anv
|
||||
|
||||
album = re.sub(r" +", " ", result.title)
|
||||
album_id = result.data["id"]
|
||||
# Use `.data` to access the tracklist directly instead of the
|
||||
# convenient `.tracklist` property, which will strip out useful artist
|
||||
# information and leave us with skeleton `Artist` objects that will
|
||||
# each make an API call just to get the same data back.
|
||||
tracks = self.get_tracks(result.data["tracklist"])
|
||||
tracks = self.get_tracks(
|
||||
result.data["tracklist"],
|
||||
(album_artist, album_artist_anv, album_artist_id),
|
||||
)
|
||||
|
||||
# Assign ANV to the proper fields for tagging
|
||||
if not self.config["anv"]["artist_credit"]:
|
||||
artist_credit = album_artist
|
||||
if self.config["anv"]["album_artist"]:
|
||||
album_artist = album_artist_anv
|
||||
|
||||
# Extract information for the optional AlbumInfo fields, if possible.
|
||||
va = result.data["artists"][0].get("name", "").lower() == "various"
|
||||
|
|
@ -363,15 +438,20 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
label = catalogno = labelid = None
|
||||
if result.data.get("labels"):
|
||||
label = result.data["labels"][0].get("name")
|
||||
label = self.strip_disambiguation(
|
||||
result.data["labels"][0].get("name")
|
||||
)
|
||||
catalogno = result.data["labels"][0].get("catno")
|
||||
labelid = result.data["labels"][0].get("id")
|
||||
|
||||
cover_art_url = self.select_cover_art(result)
|
||||
|
||||
# Additional cleanups (various artists name, catalog number, media).
|
||||
# Additional cleanups
|
||||
# (various artists name, catalog number, media, disambiguation).
|
||||
if va:
|
||||
artist = config["va_name"].as_str()
|
||||
va_name = config["va_name"].as_str()
|
||||
album_artist = va_name
|
||||
artist_credit = va_name
|
||||
if catalogno == "none":
|
||||
catalogno = None
|
||||
# Explicitly set the `media` for the tracks, since it is expected by
|
||||
|
|
@ -379,13 +459,9 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
for track in tracks:
|
||||
track.media = media
|
||||
track.medium_total = mediums.count(track.medium)
|
||||
if not track.artist: # get_track_info often fails to find artist
|
||||
track.artist = artist
|
||||
if not track.artist_id:
|
||||
track.artist_id = artist_id
|
||||
# Discogs does not have track IDs. Invent our own IDs as proposed
|
||||
# in #2336.
|
||||
track.track_id = str(album_id) + "-" + track.track_alt
|
||||
track.track_id = f"{album_id}-{track.track_alt}"
|
||||
track.data_url = data_url
|
||||
track.data_source = "Discogs"
|
||||
|
||||
|
|
@ -398,8 +474,9 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
return AlbumInfo(
|
||||
album=album,
|
||||
album_id=album_id,
|
||||
artist=artist,
|
||||
artist_id=artist_id,
|
||||
artist=album_artist,
|
||||
artist_credit=artist_credit,
|
||||
artist_id=album_artist_id,
|
||||
tracks=tracks,
|
||||
albumtype=albumtype,
|
||||
va=va,
|
||||
|
|
@ -417,11 +494,11 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
data_url=data_url,
|
||||
discogs_albumid=discogs_albumid,
|
||||
discogs_labelid=labelid,
|
||||
discogs_artistid=artist_id,
|
||||
discogs_artistid=album_artist_id,
|
||||
cover_art_url=cover_art_url,
|
||||
)
|
||||
|
||||
def select_cover_art(self, result):
|
||||
def select_cover_art(self, result: Release) -> str | None:
|
||||
"""Returns the best candidate image, if any, from a Discogs `Release` object."""
|
||||
if result.data.get("images") and len(result.data.get("images")) > 0:
|
||||
# The first image in this list appears to be the one displayed first
|
||||
|
|
@ -431,7 +508,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
return None
|
||||
|
||||
def format(self, classification):
|
||||
def format(self, classification: Iterable[str]) -> str | None:
|
||||
if classification:
|
||||
return (
|
||||
self.config["separator"].as_str().join(sorted(classification))
|
||||
|
|
@ -439,22 +516,17 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
else:
|
||||
return None
|
||||
|
||||
def get_tracks(self, tracklist):
|
||||
"""Returns a list of TrackInfo objects for a discogs tracklist."""
|
||||
try:
|
||||
clean_tracklist = self.coalesce_tracks(tracklist)
|
||||
except Exception as exc:
|
||||
# FIXME: this is an extra precaution for making sure there are no
|
||||
# side effects after #2222. It should be removed after further
|
||||
# testing.
|
||||
self._log.debug("{}", traceback.format_exc())
|
||||
self._log.error("uncaught exception in coalesce_tracks: {}", exc)
|
||||
clean_tracklist = tracklist
|
||||
tracks = []
|
||||
def _process_clean_tracklist(
|
||||
self,
|
||||
clean_tracklist: list[Track],
|
||||
album_artist_data: tuple[str, str, str | None],
|
||||
) -> tuple[list[TrackInfo], dict[int, str], int, list[str], list[str]]:
|
||||
# Distinct works and intra-work divisions, as defined by index tracks.
|
||||
tracks: list[TrackInfo] = []
|
||||
index_tracks = {}
|
||||
index = 0
|
||||
# Distinct works and intra-work divisions, as defined by index tracks.
|
||||
divisions, next_divisions = [], []
|
||||
divisions: list[str] = []
|
||||
next_divisions: list[str] = []
|
||||
for track in clean_tracklist:
|
||||
# Only real tracks have `position`. Otherwise, it's an index track.
|
||||
if track["position"]:
|
||||
|
|
@ -464,7 +536,9 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
# divisions.
|
||||
divisions += next_divisions
|
||||
del next_divisions[:]
|
||||
track_info = self.get_track_info(track, index, divisions)
|
||||
track_info = self.get_track_info(
|
||||
track, index, divisions, album_artist_data
|
||||
)
|
||||
track_info.track_alt = track["position"]
|
||||
tracks.append(track_info)
|
||||
else:
|
||||
|
|
@ -476,7 +550,29 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
except IndexError:
|
||||
pass
|
||||
index_tracks[index + 1] = track["title"]
|
||||
return tracks, index_tracks, index, divisions, next_divisions
|
||||
|
||||
def get_tracks(
|
||||
self,
|
||||
tracklist: list[Track],
|
||||
album_artist_data: tuple[str, str, str | None],
|
||||
) -> list[TrackInfo]:
|
||||
"""Returns a list of TrackInfo objects for a discogs tracklist."""
|
||||
try:
|
||||
clean_tracklist: list[Track] = self.coalesce_tracks(
|
||||
cast(list[TrackWithSubtracks], tracklist)
|
||||
)
|
||||
except Exception as exc:
|
||||
# FIXME: this is an extra precaution for making sure there are no
|
||||
# side effects after #2222. It should be removed after further
|
||||
# testing.
|
||||
self._log.debug("{}", traceback.format_exc())
|
||||
self._log.error("uncaught exception in coalesce_tracks: {}", exc)
|
||||
clean_tracklist = tracklist
|
||||
processed = self._process_clean_tracklist(
|
||||
clean_tracklist, album_artist_data
|
||||
)
|
||||
tracks, index_tracks, index, divisions, next_divisions = processed
|
||||
# Fix up medium and medium_index for each track. Discogs position is
|
||||
# unreliable, but tracks are in order.
|
||||
medium = None
|
||||
|
|
@ -485,8 +581,8 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
# If a medium has two sides (ie. vinyl or cassette), each pair of
|
||||
# consecutive sides should belong to the same medium.
|
||||
if all([track.medium is not None for track in tracks]):
|
||||
m = sorted({track.medium.lower() for track in tracks})
|
||||
if all([track.medium_str is not None for track in tracks]):
|
||||
m = sorted({track.medium_str.lower() for track in tracks})
|
||||
# If all track.medium are single consecutive letters, assume it is
|
||||
# a 2-sided medium.
|
||||
if "".join(m) in ascii_lowercase:
|
||||
|
|
@ -500,17 +596,17 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
# side_count is the number of mediums or medium sides (in the case
|
||||
# of two-sided mediums) that were seen before.
|
||||
medium_is_index = (
|
||||
track.medium
|
||||
track.medium_str
|
||||
and not track.medium_index
|
||||
and (
|
||||
len(track.medium) != 1
|
||||
len(track.medium_str) != 1
|
||||
or
|
||||
# Not within standard incremental medium values (A, B, C, ...).
|
||||
ord(track.medium) - 64 != side_count + 1
|
||||
ord(track.medium_str) - 64 != side_count + 1
|
||||
)
|
||||
)
|
||||
|
||||
if not medium_is_index and medium != track.medium:
|
||||
if not medium_is_index and medium != track.medium_str:
|
||||
side_count += 1
|
||||
if sides_per_medium == 2:
|
||||
if side_count % sides_per_medium:
|
||||
|
|
@ -521,7 +617,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
# Medium changed. Reset index_count.
|
||||
medium_count += 1
|
||||
index_count = 0
|
||||
medium = track.medium
|
||||
medium = track.medium_str
|
||||
|
||||
index_count += 1
|
||||
medium_count = 1 if medium_count == 0 else medium_count
|
||||
|
|
@ -537,22 +633,27 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
disctitle = None
|
||||
track.disctitle = disctitle
|
||||
|
||||
return tracks
|
||||
return cast(list[TrackInfo], tracks)
|
||||
|
||||
def coalesce_tracks(self, raw_tracklist):
|
||||
def coalesce_tracks(
|
||||
self, raw_tracklist: list[TrackWithSubtracks]
|
||||
) -> list[Track]:
|
||||
"""Pre-process a tracklist, merging subtracks into a single track. The
|
||||
title for the merged track is the one from the previous index track,
|
||||
if present; otherwise it is a combination of the subtracks titles.
|
||||
"""
|
||||
|
||||
def add_merged_subtracks(tracklist, subtracks):
|
||||
def add_merged_subtracks(
|
||||
tracklist: list[TrackWithSubtracks],
|
||||
subtracks: list[TrackWithSubtracks],
|
||||
) -> None:
|
||||
"""Modify `tracklist` in place, merging a list of `subtracks` into
|
||||
a single track into `tracklist`."""
|
||||
# Calculate position based on first subtrack, without subindex.
|
||||
idx, medium_idx, sub_idx = self.get_track_index(
|
||||
subtracks[0]["position"]
|
||||
)
|
||||
position = "{}{}".format(idx or "", medium_idx or "")
|
||||
position = f"{idx or ''}{medium_idx or ''}"
|
||||
|
||||
if tracklist and not tracklist[-1]["position"]:
|
||||
# Assume the previous index track contains the track title.
|
||||
|
|
@ -574,8 +675,8 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
# option is set
|
||||
if self.config["index_tracks"]:
|
||||
for subtrack in subtracks:
|
||||
subtrack["title"] = "{}: {}".format(
|
||||
index_track["title"], subtrack["title"]
|
||||
subtrack["title"] = (
|
||||
f"{index_track['title']}: {subtrack['title']}"
|
||||
)
|
||||
tracklist.extend(subtracks)
|
||||
else:
|
||||
|
|
@ -585,8 +686,8 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
tracklist.append(track)
|
||||
|
||||
# Pre-process the tracklist, trying to identify subtracks.
|
||||
subtracks = []
|
||||
tracklist = []
|
||||
subtracks: list[TrackWithSubtracks] = []
|
||||
tracklist: list[TrackWithSubtracks] = []
|
||||
prev_subindex = ""
|
||||
for track in raw_tracklist:
|
||||
# Regular subtrack (track with subindex).
|
||||
|
|
@ -621,10 +722,32 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
if subtracks:
|
||||
add_merged_subtracks(tracklist, subtracks)
|
||||
|
||||
return tracklist
|
||||
return cast(list[Track], tracklist)
|
||||
|
||||
def get_track_info(self, track, index, divisions):
|
||||
def strip_disambiguation(self, text: str) -> str:
|
||||
"""Removes discogs specific disambiguations from a string.
|
||||
Turns 'Label Name (5)' to 'Label Name' or 'Artist (1) & Another Artist (2)'
|
||||
to 'Artist & Another Artist'. Does nothing if strip_disambiguation is False."""
|
||||
if not self.config["strip_disambiguation"]:
|
||||
return text
|
||||
return DISAMBIGUATION_RE.sub("", text)
|
||||
|
||||
def get_track_info(
|
||||
self,
|
||||
track: Track,
|
||||
index: int,
|
||||
divisions: list[str],
|
||||
album_artist_data: tuple[str, str, str | None],
|
||||
) -> IntermediateTrackInfo:
|
||||
"""Returns a TrackInfo object for a discogs track."""
|
||||
|
||||
artist, artist_anv, artist_id = album_artist_data
|
||||
artist_credit = artist_anv
|
||||
if not self.config["anv"]["artist_credit"]:
|
||||
artist_credit = artist
|
||||
if self.config["anv"]["artist"]:
|
||||
artist = artist_anv
|
||||
|
||||
title = track["title"]
|
||||
if self.config["index_tracks"]:
|
||||
prefix = ", ".join(divisions)
|
||||
|
|
@ -632,18 +755,44 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
title = f"{prefix}: {title}"
|
||||
track_id = None
|
||||
medium, medium_index, _ = self.get_track_index(track["position"])
|
||||
artist, artist_id = self.get_artist(
|
||||
track.get("artists", []), join_key="join"
|
||||
)
|
||||
|
||||
# If artists are found on the track, we will use those instead
|
||||
if artists := track.get("artists", []):
|
||||
artist, artist_id = self.get_artist_with_anv(
|
||||
artists, self.config["anv"]["artist"]
|
||||
)
|
||||
artist_credit, _ = self.get_artist_with_anv(
|
||||
artists, self.config["anv"]["artist_credit"]
|
||||
)
|
||||
length = self.get_track_length(track["duration"])
|
||||
return TrackInfo(
|
||||
|
||||
# Add featured artists
|
||||
if extraartists := track.get("extraartists", []):
|
||||
featured_list = [
|
||||
artist
|
||||
for artist in extraartists
|
||||
if "Featuring" in artist["role"]
|
||||
]
|
||||
featured, _ = self.get_artist_with_anv(
|
||||
featured_list, self.config["anv"]["artist"]
|
||||
)
|
||||
featured_credit, _ = self.get_artist_with_anv(
|
||||
featured_list, self.config["anv"]["artist_credit"]
|
||||
)
|
||||
if featured:
|
||||
artist += f" {self.config['featured_string']} {featured}"
|
||||
artist_credit += (
|
||||
f" {self.config['featured_string']} {featured_credit}"
|
||||
)
|
||||
return IntermediateTrackInfo(
|
||||
title=title,
|
||||
track_id=track_id,
|
||||
artist_credit=artist_credit,
|
||||
artist=artist,
|
||||
artist_id=artist_id,
|
||||
length=length,
|
||||
index=index,
|
||||
medium=medium,
|
||||
medium_str=medium,
|
||||
medium_index=medium_index,
|
||||
)
|
||||
|
||||
|
|
@ -664,7 +813,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
return medium or None, index or None, subindex or None
|
||||
|
||||
def get_track_length(self, duration):
|
||||
def get_track_length(self, duration: str) -> int | None:
|
||||
"""Returns the track length in seconds for a discogs duration."""
|
||||
try:
|
||||
length = time.strptime(duration, "%M:%S")
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
count = self.config["count"].get(bool)
|
||||
delete = self.config["delete"].get(bool)
|
||||
remove = self.config["remove"].get(bool)
|
||||
fmt = self.config["format"].get(str)
|
||||
fmt_tmpl = self.config["format"].get(str)
|
||||
full = self.config["full"].get(bool)
|
||||
keys = self.config["keys"].as_str_seq()
|
||||
merge = self.config["merge"].get(bool)
|
||||
|
|
@ -175,15 +175,14 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
if path:
|
||||
fmt = "$path"
|
||||
fmt_tmpl = "$path"
|
||||
|
||||
# Default format string for count mode.
|
||||
if count and not fmt:
|
||||
if count and not fmt_tmpl:
|
||||
if album:
|
||||
fmt = "$albumartist - $album"
|
||||
fmt_tmpl = "$albumartist - $album"
|
||||
else:
|
||||
fmt = "$albumartist - $album - $title"
|
||||
fmt += ": {0}"
|
||||
fmt_tmpl = "$albumartist - $album - $title"
|
||||
|
||||
if checksum:
|
||||
for i in items:
|
||||
|
|
@ -207,7 +206,7 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
delete=delete,
|
||||
remove=remove,
|
||||
tag=tag,
|
||||
fmt=fmt.format(obj_count),
|
||||
fmt=f"{fmt_tmpl}: {obj_count}",
|
||||
)
|
||||
|
||||
self._command.func = _dup
|
||||
|
|
@ -255,28 +254,24 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
checksum = getattr(item, key, False)
|
||||
if not checksum:
|
||||
self._log.debug(
|
||||
"key {0} on item {1} not cached:computing checksum",
|
||||
"key {} on item {.filepath} not cached:computing checksum",
|
||||
key,
|
||||
displayable_path(item.path),
|
||||
item,
|
||||
)
|
||||
try:
|
||||
checksum = command_output(args).stdout
|
||||
setattr(item, key, checksum)
|
||||
item.store()
|
||||
self._log.debug(
|
||||
"computed checksum for {0} using {1}", item.title, key
|
||||
"computed checksum for {.title} using {}", item, key
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
self._log.debug(
|
||||
"failed to checksum {0}: {1}",
|
||||
displayable_path(item.path),
|
||||
e,
|
||||
)
|
||||
self._log.debug("failed to checksum {.filepath}: {}", item, e)
|
||||
else:
|
||||
self._log.debug(
|
||||
"key {0} on item {1} cached:not computing checksum",
|
||||
"key {} on item {.filepath} cached:not computing checksum",
|
||||
key,
|
||||
displayable_path(item.path),
|
||||
item,
|
||||
)
|
||||
return key, checksum
|
||||
|
||||
|
|
@ -294,15 +289,15 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
values = [v for v in values if v not in (None, "")]
|
||||
if strict and len(values) < len(keys):
|
||||
self._log.debug(
|
||||
"some keys {0} on item {1} are null or empty: skipping",
|
||||
"some keys {} on item {.filepath} are null or empty: skipping",
|
||||
keys,
|
||||
displayable_path(obj.path),
|
||||
obj,
|
||||
)
|
||||
elif not strict and not len(values):
|
||||
self._log.debug(
|
||||
"all keys {0} on item {1} are null or empty: skipping",
|
||||
"all keys {} on item {.filepath} are null or empty: skipping",
|
||||
keys,
|
||||
displayable_path(obj.path),
|
||||
obj,
|
||||
)
|
||||
else:
|
||||
key = tuple(values)
|
||||
|
|
@ -360,11 +355,11 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
value = getattr(o, f, None)
|
||||
if value:
|
||||
self._log.debug(
|
||||
"key {0} on item {1} is null "
|
||||
"or empty: setting from item {2}",
|
||||
"key {} on item {} is null "
|
||||
"or empty: setting from item {.filepath}",
|
||||
f,
|
||||
displayable_path(objs[0].path),
|
||||
displayable_path(o.path),
|
||||
o,
|
||||
)
|
||||
setattr(objs[0], f, value)
|
||||
objs[0].store()
|
||||
|
|
@ -384,11 +379,11 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
missing.album_id = objs[0].id
|
||||
missing.add(i._db)
|
||||
self._log.debug(
|
||||
"item {0} missing from album {1}:"
|
||||
" merging from {2} into {3}",
|
||||
"item {} missing from album {}:"
|
||||
" merging from {.filepath} into {}",
|
||||
missing,
|
||||
objs[0],
|
||||
displayable_path(o.path),
|
||||
o,
|
||||
displayable_path(missing.destination()),
|
||||
)
|
||||
missing.move(operation=MoveOperation.COPY)
|
||||
|
|
|
|||
|
|
@ -46,9 +46,7 @@ def edit(filename, log):
|
|||
try:
|
||||
subprocess.call(cmd)
|
||||
except OSError as exc:
|
||||
raise ui.UserError(
|
||||
"could not run editor command {!r}: {}".format(cmd[0], exc)
|
||||
)
|
||||
raise ui.UserError(f"could not run editor command {cmd[0]!r}: {exc}")
|
||||
|
||||
|
||||
def dump(arg):
|
||||
|
|
@ -71,9 +69,7 @@ def load(s):
|
|||
for d in yaml.safe_load_all(s):
|
||||
if not isinstance(d, dict):
|
||||
raise ParseError(
|
||||
"each entry must be a dictionary; found {}".format(
|
||||
type(d).__name__
|
||||
)
|
||||
f"each entry must be a dictionary; found {type(d).__name__}"
|
||||
)
|
||||
|
||||
# Convert all keys to strings. They started out as strings,
|
||||
|
|
|
|||
|
|
@ -20,11 +20,12 @@ from mimetypes import guess_extension
|
|||
|
||||
import requests
|
||||
|
||||
from beets import art, config, ui
|
||||
from beets import config, ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import print_
|
||||
from beets.util import bytestring_path, displayable_path, normpath, syspath
|
||||
from beets.util.artresizer import ArtResizer
|
||||
from beetsplug._utils import art
|
||||
|
||||
|
||||
def _confirm(objs, album):
|
||||
|
|
@ -35,8 +36,9 @@ def _confirm(objs, album):
|
|||
to items).
|
||||
"""
|
||||
noun = "album" if album else "file"
|
||||
prompt = "Modify artwork for {} {}{} (Y/n)?".format(
|
||||
len(objs), noun, "s" if len(objs) > 1 else ""
|
||||
prompt = (
|
||||
"Modify artwork for"
|
||||
f" {len(objs)} {noun}{'s' if len(objs) > 1 else ''} (Y/n)?"
|
||||
)
|
||||
|
||||
# Show all the items or albums.
|
||||
|
|
@ -110,9 +112,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
imagepath = normpath(opts.file)
|
||||
if not os.path.isfile(syspath(imagepath)):
|
||||
raise ui.UserError(
|
||||
"image file {} not found".format(
|
||||
displayable_path(imagepath)
|
||||
)
|
||||
f"image file {displayable_path(imagepath)} not found"
|
||||
)
|
||||
|
||||
items = lib.items(args)
|
||||
|
|
@ -137,7 +137,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
response = requests.get(opts.url, timeout=5)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.error("{}".format(e))
|
||||
self._log.error("{}", e)
|
||||
return
|
||||
extension = guess_extension(response.headers["Content-Type"])
|
||||
if extension is None:
|
||||
|
|
@ -149,7 +149,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
with open(tempimg, "wb") as f:
|
||||
f.write(response.content)
|
||||
except Exception as e:
|
||||
self._log.error("Unable to save image: {}".format(e))
|
||||
self._log.error("Unable to save image: {}", e)
|
||||
return
|
||||
items = lib.items(args)
|
||||
# Confirm with user.
|
||||
|
|
@ -274,7 +274,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
"""
|
||||
if self.config["remove_art_file"] and album.artpath:
|
||||
if os.path.isfile(syspath(album.artpath)):
|
||||
self._log.debug("Removing album art file for {0}", album)
|
||||
self._log.debug("Removing album art file for {}", album)
|
||||
os.remove(syspath(album.artpath))
|
||||
album.artpath = None
|
||||
album.store()
|
||||
|
|
|
|||
|
|
@ -38,9 +38,7 @@ def api_url(host, port, endpoint):
|
|||
hostname_list.insert(0, "http://")
|
||||
hostname = "".join(hostname_list)
|
||||
|
||||
joined = urljoin(
|
||||
"{hostname}:{port}".format(hostname=hostname, port=port), endpoint
|
||||
)
|
||||
joined = urljoin(f"{hostname}:{port}", endpoint)
|
||||
|
||||
scheme, netloc, path, query_string, fragment = urlsplit(joined)
|
||||
query_params = parse_qs(query_string)
|
||||
|
|
@ -81,12 +79,12 @@ def create_headers(user_id, token=None):
|
|||
headers = {}
|
||||
|
||||
authorization = (
|
||||
'MediaBrowser UserId="{user_id}", '
|
||||
f'MediaBrowser UserId="{user_id}", '
|
||||
'Client="other", '
|
||||
'Device="beets", '
|
||||
'DeviceId="beets", '
|
||||
'Version="0.0.0"'
|
||||
).format(user_id=user_id)
|
||||
)
|
||||
|
||||
headers["x-emby-authorization"] = authorization
|
||||
|
||||
|
|
@ -186,7 +184,7 @@ class EmbyUpdate(BeetsPlugin):
|
|||
# Get user information from the Emby API.
|
||||
user = get_user(host, port, username)
|
||||
if not user:
|
||||
self._log.warning(f"User {username} could not be found.")
|
||||
self._log.warning("User {} could not be found.", username)
|
||||
return
|
||||
userid = user[0]["Id"]
|
||||
|
||||
|
|
@ -198,7 +196,7 @@ class EmbyUpdate(BeetsPlugin):
|
|||
# Get authentication token.
|
||||
token = get_token(host, port, headers, auth_data)
|
||||
if not token:
|
||||
self._log.warning("Could not get token for user {0}", username)
|
||||
self._log.warning("Could not get token for user {}", username)
|
||||
return
|
||||
|
||||
# Recreate headers with a token.
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ class ExportPlugin(BeetsPlugin):
|
|||
try:
|
||||
data, item = data_emitter(included_keys or "*")
|
||||
except (mediafile.UnreadableFileError, OSError) as ex:
|
||||
self._log.error("cannot read file: {0}", ex)
|
||||
self._log.error("cannot read file: {}", ex)
|
||||
continue
|
||||
|
||||
for key, value in data.items():
|
||||
|
|
|
|||
|
|
@ -36,10 +36,10 @@ from beets.util.config import sanitize_pairs
|
|||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Iterator, Sequence
|
||||
from logging import Logger
|
||||
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
from beets.library import Album, Library
|
||||
from beets.logging import BeetsLogger as Logger
|
||||
|
||||
try:
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
|
@ -133,7 +133,7 @@ class Candidate:
|
|||
# get_size returns None if no local imaging backend is available
|
||||
if not self.size:
|
||||
self.size = ArtResizer.shared.get_size(self.path)
|
||||
self._log.debug("image size: {}", self.size)
|
||||
self._log.debug("image size: {.size}", self)
|
||||
|
||||
if not self.size:
|
||||
self._log.warning(
|
||||
|
|
@ -151,7 +151,7 @@ class Candidate:
|
|||
# Check minimum dimension.
|
||||
if plugin.minwidth and self.size[0] < plugin.minwidth:
|
||||
self._log.debug(
|
||||
"image too small ({} < {})", self.size[0], plugin.minwidth
|
||||
"image too small ({} < {.minwidth})", self.size[0], plugin
|
||||
)
|
||||
return ImageAction.BAD
|
||||
|
||||
|
|
@ -162,10 +162,10 @@ class Candidate:
|
|||
if edge_diff > plugin.margin_px:
|
||||
self._log.debug(
|
||||
"image is not close enough to being "
|
||||
"square, ({} - {} > {})",
|
||||
"square, ({} - {} > {.margin_px})",
|
||||
long_edge,
|
||||
short_edge,
|
||||
plugin.margin_px,
|
||||
plugin,
|
||||
)
|
||||
return ImageAction.BAD
|
||||
elif plugin.margin_percent:
|
||||
|
|
@ -190,7 +190,7 @@ class Candidate:
|
|||
downscale = False
|
||||
if plugin.maxwidth and self.size[0] > plugin.maxwidth:
|
||||
self._log.debug(
|
||||
"image needs rescaling ({} > {})", self.size[0], plugin.maxwidth
|
||||
"image needs rescaling ({} > {.maxwidth})", self.size[0], plugin
|
||||
)
|
||||
downscale = True
|
||||
|
||||
|
|
@ -200,9 +200,9 @@ class Candidate:
|
|||
filesize = os.stat(syspath(self.path)).st_size
|
||||
if filesize > plugin.max_filesize:
|
||||
self._log.debug(
|
||||
"image needs resizing ({}B > {}B)",
|
||||
"image needs resizing ({}B > {.max_filesize}B)",
|
||||
filesize,
|
||||
plugin.max_filesize,
|
||||
plugin,
|
||||
)
|
||||
downsize = True
|
||||
|
||||
|
|
@ -213,9 +213,9 @@ class Candidate:
|
|||
reformat = fmt != plugin.cover_format
|
||||
if reformat:
|
||||
self._log.debug(
|
||||
"image needs reformatting: {} -> {}",
|
||||
"image needs reformatting: {} -> {.cover_format}",
|
||||
fmt,
|
||||
plugin.cover_format,
|
||||
plugin,
|
||||
)
|
||||
|
||||
skip_check_for = skip_check_for or []
|
||||
|
|
@ -329,7 +329,7 @@ def _logged_get(log: Logger, *args, **kwargs) -> requests.Response:
|
|||
prepped.url, {}, None, None, None
|
||||
)
|
||||
send_kwargs.update(settings)
|
||||
log.debug("{}: {}", message, prepped.url)
|
||||
log.debug("{}: {.url}", message, prepped)
|
||||
return s.send(prepped, **send_kwargs)
|
||||
|
||||
|
||||
|
|
@ -542,14 +542,14 @@ class CoverArtArchive(RemoteArtSource):
|
|||
try:
|
||||
response = self.request(url)
|
||||
except requests.RequestException:
|
||||
self._log.debug("{}: error receiving response", self.NAME)
|
||||
self._log.debug("{.NAME}: error receiving response", self)
|
||||
return
|
||||
|
||||
try:
|
||||
data = response.json()
|
||||
except ValueError:
|
||||
self._log.debug(
|
||||
"{}: error loading response: {}", self.NAME, response.text
|
||||
"{.NAME}: error loading response: {.text}", self, response
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -593,7 +593,7 @@ class CoverArtArchive(RemoteArtSource):
|
|||
class Amazon(RemoteArtSource):
|
||||
NAME = "Amazon"
|
||||
ID = "amazon"
|
||||
URL = "https://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg"
|
||||
URL = "https://images.amazon.com/images/P/{}.{:02d}.LZZZZZZZ.jpg"
|
||||
INDICES = (1, 2)
|
||||
|
||||
def get(
|
||||
|
|
@ -606,7 +606,7 @@ class Amazon(RemoteArtSource):
|
|||
if album.asin:
|
||||
for index in self.INDICES:
|
||||
yield self._candidate(
|
||||
url=self.URL % (album.asin, index),
|
||||
url=self.URL.format(album.asin, index),
|
||||
match=MetadataMatch.EXACT,
|
||||
)
|
||||
|
||||
|
|
@ -629,7 +629,7 @@ class AlbumArtOrg(RemoteArtSource):
|
|||
# Get the page from albumart.org.
|
||||
try:
|
||||
resp = self.request(self.URL, params={"asin": album.asin})
|
||||
self._log.debug("scraped art URL: {}", resp.url)
|
||||
self._log.debug("scraped art URL: {.url}", resp)
|
||||
except requests.RequestException:
|
||||
self._log.debug("error scraping art page")
|
||||
return
|
||||
|
|
@ -682,7 +682,7 @@ class GoogleImages(RemoteArtSource):
|
|||
"""
|
||||
if not (album.albumartist and album.album):
|
||||
return
|
||||
search_string = (album.albumartist + "," + album.album).encode("utf-8")
|
||||
search_string = f"{album.albumartist},{album.album}".encode("utf-8")
|
||||
|
||||
try:
|
||||
response = self.request(
|
||||
|
|
@ -702,7 +702,7 @@ class GoogleImages(RemoteArtSource):
|
|||
try:
|
||||
data = response.json()
|
||||
except ValueError:
|
||||
self._log.debug("google: error loading response: {}", response.text)
|
||||
self._log.debug("google: error loading response: {.text}", response)
|
||||
return
|
||||
|
||||
if "error" in data:
|
||||
|
|
@ -723,7 +723,7 @@ class FanartTV(RemoteArtSource):
|
|||
NAME = "fanart.tv"
|
||||
ID = "fanarttv"
|
||||
API_URL = "https://webservice.fanart.tv/v3/"
|
||||
API_ALBUMS = API_URL + "music/albums/"
|
||||
API_ALBUMS = f"{API_URL}music/albums/"
|
||||
PROJECT_KEY = "61a7d0ab4e67162b7a0c7c35915cd48e"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -750,7 +750,7 @@ class FanartTV(RemoteArtSource):
|
|||
|
||||
try:
|
||||
response = self.request(
|
||||
self.API_ALBUMS + album.mb_releasegroupid,
|
||||
f"{self.API_ALBUMS}{album.mb_releasegroupid}",
|
||||
headers={
|
||||
"api-key": self.PROJECT_KEY,
|
||||
"client-key": self.client_key,
|
||||
|
|
@ -764,7 +764,7 @@ class FanartTV(RemoteArtSource):
|
|||
data = response.json()
|
||||
except ValueError:
|
||||
self._log.debug(
|
||||
"fanart.tv: error loading response: {}", response.text
|
||||
"fanart.tv: error loading response: {.text}", response
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -820,7 +820,7 @@ class ITunesStore(RemoteArtSource):
|
|||
return
|
||||
|
||||
payload = {
|
||||
"term": album.albumartist + " " + album.album,
|
||||
"term": f"{album.albumartist} {album.album}",
|
||||
"entity": "album",
|
||||
"media": "music",
|
||||
"limit": 200,
|
||||
|
|
@ -947,14 +947,14 @@ class Wikipedia(RemoteArtSource):
|
|||
data = dbpedia_response.json()
|
||||
results = data["results"]["bindings"]
|
||||
if results:
|
||||
cover_filename = "File:" + results[0]["coverFilename"]["value"]
|
||||
cover_filename = f"File:{results[0]['coverFilename']['value']}"
|
||||
page_id = results[0]["pageId"]["value"]
|
||||
else:
|
||||
self._log.debug("wikipedia: album not found on dbpedia")
|
||||
except (ValueError, KeyError, IndexError):
|
||||
self._log.debug(
|
||||
"wikipedia: error scraping dbpedia response: {}",
|
||||
dbpedia_response.text,
|
||||
"wikipedia: error scraping dbpedia response: {.text}",
|
||||
dbpedia_response,
|
||||
)
|
||||
|
||||
# Ensure we have a filename before attempting to query wikipedia
|
||||
|
|
@ -996,7 +996,7 @@ class Wikipedia(RemoteArtSource):
|
|||
results = data["query"]["pages"][page_id]["images"]
|
||||
for result in results:
|
||||
if re.match(
|
||||
re.escape(lpart) + r".*?\." + re.escape(rpart),
|
||||
rf"{re.escape(lpart)}.*?\.{re.escape(rpart)}",
|
||||
result["title"],
|
||||
):
|
||||
cover_filename = result["title"]
|
||||
|
|
@ -1179,7 +1179,7 @@ class LastFM(RemoteArtSource):
|
|||
if "error" in data:
|
||||
if data["error"] == 6:
|
||||
self._log.debug(
|
||||
"lastfm: no results for {}", album.mb_albumid
|
||||
"lastfm: no results for {.mb_albumid}", album
|
||||
)
|
||||
else:
|
||||
self._log.error(
|
||||
|
|
@ -1200,7 +1200,7 @@ class LastFM(RemoteArtSource):
|
|||
url=images[size], size=self.SIZES[size]
|
||||
)
|
||||
except ValueError:
|
||||
self._log.debug("lastfm: error loading response: {}", response.text)
|
||||
self._log.debug("lastfm: error loading response: {.text}", response)
|
||||
return
|
||||
|
||||
|
||||
|
|
@ -1227,7 +1227,7 @@ class Spotify(RemoteArtSource):
|
|||
paths: None | Sequence[bytes],
|
||||
) -> Iterator[Candidate]:
|
||||
try:
|
||||
url = self.SPOTIFY_ALBUM_URL + album.items().get().spotify_album_id
|
||||
url = f"{self.SPOTIFY_ALBUM_URL}{album.items().get().spotify_album_id}"
|
||||
except AttributeError:
|
||||
self._log.debug("Fetchart: no Spotify album ID found")
|
||||
return
|
||||
|
|
@ -1244,7 +1244,7 @@ class Spotify(RemoteArtSource):
|
|||
soup = BeautifulSoup(html, "html.parser")
|
||||
except ValueError:
|
||||
self._log.debug(
|
||||
"Spotify: error loading response: {}", response.text
|
||||
"Spotify: error loading response: {.text}", response
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -1541,9 +1541,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
out = candidate
|
||||
assert out.path is not None # help mypy
|
||||
self._log.debug(
|
||||
"using {0.LOC} image {1}",
|
||||
source,
|
||||
util.displayable_path(out.path),
|
||||
"using {.LOC} image {.path}", source, out
|
||||
)
|
||||
break
|
||||
# Remove temporary files for invalid candidates.
|
||||
|
|
@ -1576,7 +1574,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
message = ui.colorize(
|
||||
"text_highlight_minor", "has album art"
|
||||
)
|
||||
self._log.info("{0}: {1}", album, message)
|
||||
self._log.info("{}: {}", album, message)
|
||||
else:
|
||||
# In ordinary invocations, look for images on the
|
||||
# filesystem. When forcing, however, always go to the Web
|
||||
|
|
@ -1589,4 +1587,4 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
message = ui.colorize("text_success", "found album art")
|
||||
else:
|
||||
message = ui.colorize("text_error", "no art found")
|
||||
self._log.info("{0}: {1}", album, message)
|
||||
self._log.info("{}: {}", album, message)
|
||||
|
|
|
|||
|
|
@ -89,8 +89,9 @@ class FishPlugin(BeetsPlugin):
|
|||
"-o",
|
||||
"--output",
|
||||
default="~/.config/fish/completions/beet.fish",
|
||||
help="where to save the script. default: "
|
||||
"~/.config/fish/completions",
|
||||
help=(
|
||||
"where to save the script. default: ~/.config/fish/completions"
|
||||
),
|
||||
)
|
||||
return [cmd]
|
||||
|
||||
|
|
@ -122,23 +123,13 @@ class FishPlugin(BeetsPlugin):
|
|||
for name in names:
|
||||
cmd_names_help.append((name, cmd.help))
|
||||
# Concatenate the string
|
||||
totstring = HEAD + "\n"
|
||||
totstring = f"{HEAD}\n"
|
||||
totstring += get_cmds_list([name[0] for name in cmd_names_help])
|
||||
totstring += "" if nobasicfields else get_standard_fields(fields)
|
||||
totstring += get_extravalues(lib, extravalues) if extravalues else ""
|
||||
totstring += (
|
||||
"\n"
|
||||
+ "# ====== {} =====".format("setup basic beet completion")
|
||||
+ "\n" * 2
|
||||
)
|
||||
totstring += "\n# ====== setup basic beet completion =====\n\n"
|
||||
totstring += get_basic_beet_options()
|
||||
totstring += (
|
||||
"\n"
|
||||
+ "# ====== {} =====".format(
|
||||
"setup field completion for subcommands"
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
totstring += "\n# ====== setup field completion for subcommands =====\n"
|
||||
totstring += get_subcommands(cmd_names_help, nobasicfields, extravalues)
|
||||
# Set up completion for all the command options
|
||||
totstring += get_all_commands(beetcmds)
|
||||
|
|
@ -150,23 +141,19 @@ class FishPlugin(BeetsPlugin):
|
|||
def _escape(name):
|
||||
# Escape ? in fish
|
||||
if name == "?":
|
||||
name = "\\" + name
|
||||
name = f"\\{name}"
|
||||
return name
|
||||
|
||||
|
||||
def get_cmds_list(cmds_names):
|
||||
# Make a list of all Beets core & plugin commands
|
||||
substr = ""
|
||||
substr += "set CMDS " + " ".join(cmds_names) + ("\n" * 2)
|
||||
return substr
|
||||
return f"set CMDS {' '.join(cmds_names)}\n\n"
|
||||
|
||||
|
||||
def get_standard_fields(fields):
|
||||
# Make a list of album/track fields and append with ':'
|
||||
fields = (field + ":" for field in fields)
|
||||
substr = ""
|
||||
substr += "set FIELDS " + " ".join(fields) + ("\n" * 2)
|
||||
return substr
|
||||
fields = (f"{field}:" for field in fields)
|
||||
return f"set FIELDS {' '.join(fields)}\n\n"
|
||||
|
||||
|
||||
def get_extravalues(lib, extravalues):
|
||||
|
|
@ -175,14 +162,8 @@ def get_extravalues(lib, extravalues):
|
|||
word = ""
|
||||
values_set = get_set_of_values_for_field(lib, extravalues)
|
||||
for fld in extravalues:
|
||||
extraname = fld.upper() + "S"
|
||||
word += (
|
||||
"set "
|
||||
+ extraname
|
||||
+ " "
|
||||
+ " ".join(sorted(values_set[fld]))
|
||||
+ ("\n" * 2)
|
||||
)
|
||||
extraname = f"{fld.upper()}S"
|
||||
word += f"set {extraname} {' '.join(sorted(values_set[fld]))}\n\n"
|
||||
return word
|
||||
|
||||
|
||||
|
|
@ -226,35 +207,29 @@ def get_subcommands(cmd_name_and_help, nobasicfields, extravalues):
|
|||
for cmdname, cmdhelp in cmd_name_and_help:
|
||||
cmdname = _escape(cmdname)
|
||||
|
||||
word += (
|
||||
"\n"
|
||||
+ "# ------ {} -------".format("fieldsetups for " + cmdname)
|
||||
+ "\n"
|
||||
)
|
||||
word += f"\n# ------ fieldsetups for {cmdname} -------\n"
|
||||
word += BL_NEED2.format(
|
||||
("-a " + cmdname), ("-f " + "-d " + wrap(clean_whitespace(cmdhelp)))
|
||||
f"-a {cmdname}", f"-f -d {wrap(clean_whitespace(cmdhelp))}"
|
||||
)
|
||||
|
||||
if nobasicfields is False:
|
||||
word += BL_USE3.format(
|
||||
cmdname,
|
||||
("-a " + wrap("$FIELDS")),
|
||||
("-f " + "-d " + wrap("fieldname")),
|
||||
f"-a {wrap('$FIELDS')}",
|
||||
f"-f -d {wrap('fieldname')}",
|
||||
)
|
||||
|
||||
if extravalues:
|
||||
for f in extravalues:
|
||||
setvar = wrap("$" + f.upper() + "S")
|
||||
word += (
|
||||
" ".join(
|
||||
BL_EXTRA3.format(
|
||||
(cmdname + " " + f + ":"),
|
||||
("-f " + "-A " + "-a " + setvar),
|
||||
("-d " + wrap(f)),
|
||||
).split()
|
||||
)
|
||||
+ "\n"
|
||||
setvar = wrap(f"${f.upper()}S")
|
||||
word += " ".join(
|
||||
BL_EXTRA3.format(
|
||||
f"{cmdname} {f}:",
|
||||
f"-f -A -a {setvar}",
|
||||
f"-d {wrap(f)}",
|
||||
).split()
|
||||
)
|
||||
word += "\n"
|
||||
return word
|
||||
|
||||
|
||||
|
|
@ -267,59 +242,44 @@ def get_all_commands(beetcmds):
|
|||
for name in names:
|
||||
name = _escape(name)
|
||||
|
||||
word += "\n"
|
||||
word += (
|
||||
("\n" * 2)
|
||||
+ "# ====== {} =====".format("completions for " + name)
|
||||
+ "\n"
|
||||
)
|
||||
word += f"\n\n\n# ====== completions for {name} =====\n"
|
||||
|
||||
for option in cmd.parser._get_all_options()[1:]:
|
||||
cmd_l = (
|
||||
(" -l " + option._long_opts[0].replace("--", ""))
|
||||
f" -l {option._long_opts[0].replace('--', '')}"
|
||||
if option._long_opts
|
||||
else ""
|
||||
)
|
||||
cmd_s = (
|
||||
(" -s " + option._short_opts[0].replace("-", ""))
|
||||
f" -s {option._short_opts[0].replace('-', '')}"
|
||||
if option._short_opts
|
||||
else ""
|
||||
)
|
||||
cmd_need_arg = " -r " if option.nargs in [1] else ""
|
||||
cmd_helpstr = (
|
||||
(" -d " + wrap(" ".join(option.help.split())))
|
||||
f" -d {wrap(' '.join(option.help.split()))}"
|
||||
if option.help
|
||||
else ""
|
||||
)
|
||||
cmd_arglist = (
|
||||
(" -a " + wrap(" ".join(option.choices)))
|
||||
f" -a {wrap(' '.join(option.choices))}"
|
||||
if option.choices
|
||||
else ""
|
||||
)
|
||||
|
||||
word += (
|
||||
" ".join(
|
||||
BL_USE3.format(
|
||||
name,
|
||||
(
|
||||
cmd_need_arg
|
||||
+ cmd_s
|
||||
+ cmd_l
|
||||
+ " -f "
|
||||
+ cmd_arglist
|
||||
),
|
||||
cmd_helpstr,
|
||||
).split()
|
||||
)
|
||||
+ "\n"
|
||||
word += " ".join(
|
||||
BL_USE3.format(
|
||||
name,
|
||||
f"{cmd_need_arg}{cmd_s}{cmd_l} -f {cmd_arglist}",
|
||||
cmd_helpstr,
|
||||
).split()
|
||||
)
|
||||
word += "\n"
|
||||
|
||||
word = word + " ".join(
|
||||
BL_USE3.format(
|
||||
name,
|
||||
("-s " + "h " + "-l " + "help" + " -f "),
|
||||
("-d " + wrap("print help") + "\n"),
|
||||
).split()
|
||||
word = word + BL_USE3.format(
|
||||
name,
|
||||
"-s h -l help -f",
|
||||
f"-d {wrap('print help')}",
|
||||
)
|
||||
return word
|
||||
|
||||
|
|
@ -332,9 +292,9 @@ def clean_whitespace(word):
|
|||
def wrap(word):
|
||||
# Need " or ' around strings but watch out if they're in the string
|
||||
sptoken = '"'
|
||||
if ('"') in word and ("'") in word:
|
||||
if '"' in word and ("'") in word:
|
||||
word.replace('"', sptoken)
|
||||
return '"' + word + '"'
|
||||
return f'"{word}"'
|
||||
|
||||
tok = '"' if "'" in word else "'"
|
||||
return tok + word + tok
|
||||
return f"{tok}{word}{tok}"
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@
|
|||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""If the title is empty, try to extract track and title from the
|
||||
filename.
|
||||
"""If the title is empty, try to extract it from the filename
|
||||
(possibly also extract track and artist)
|
||||
"""
|
||||
|
||||
import os
|
||||
|
|
@ -25,12 +25,12 @@ from beets.util import displayable_path
|
|||
# Filename field extraction patterns.
|
||||
PATTERNS = [
|
||||
# Useful patterns.
|
||||
r"^(?P<artist>.+)[\-_](?P<title>.+)[\-_](?P<tag>.*)$",
|
||||
r"^(?P<track>\d+)[\s.\-_]+(?P<artist>.+)[\-_](?P<title>.+)[\-_](?P<tag>.*)$",
|
||||
r"^(?P<artist>.+)[\-_](?P<title>.+)$",
|
||||
r"^(?P<track>\d+)[\s.\-_]+(?P<artist>.+)[\-_](?P<title>.+)$",
|
||||
r"^(?P<track>\d+)[\s.\-_]+(?P<title>.+)$",
|
||||
r"^(?P<track>\d+)\s+(?P<title>.+)$",
|
||||
(
|
||||
r"^(?P<track>\d+)\.?\s*-\s*(?P<artist>.+?)\s*-\s*(?P<title>.+?)"
|
||||
r"(\s*-\s*(?P<tag>.*))?$"
|
||||
),
|
||||
r"^(?P<artist>.+?)\s*-\s*(?P<title>.+?)(\s*-\s*(?P<tag>.*))?$",
|
||||
r"^(?P<track>\d+)\.?[\s_-]+(?P<title>.+)$",
|
||||
r"^(?P<title>.+) by (?P<artist>.+)$",
|
||||
r"^(?P<track>\d+).*$",
|
||||
r"^(?P<title>.+)$",
|
||||
|
|
@ -98,6 +98,7 @@ def apply_matches(d, log):
|
|||
# Given both an "artist" and "title" field, assume that one is
|
||||
# *actually* the artist, which must be uniform, and use the other
|
||||
# for the title. This, of course, won't work for VA albums.
|
||||
# Only check for "artist": patterns containing it, also contain "title"
|
||||
if "artist" in keys:
|
||||
if equal_fields(d, "artist"):
|
||||
artist = some_map["artist"]
|
||||
|
|
@ -112,21 +113,22 @@ def apply_matches(d, log):
|
|||
for item in d:
|
||||
if not item.artist:
|
||||
item.artist = artist
|
||||
log.info("Artist replaced with: {}".format(item.artist))
|
||||
|
||||
# No artist field: remaining field is the title.
|
||||
else:
|
||||
log.info("Artist replaced with: {.artist}", item)
|
||||
# otherwise, if the pattern contains "title", use that for title_field
|
||||
elif "title" in keys:
|
||||
title_field = "title"
|
||||
else:
|
||||
title_field = None
|
||||
|
||||
# Apply the title and track.
|
||||
# Apply the title and track, if any.
|
||||
for item in d:
|
||||
if bad_title(item.title):
|
||||
if title_field and bad_title(item.title):
|
||||
item.title = str(d[item][title_field])
|
||||
log.info("Title replaced with: {}".format(item.title))
|
||||
log.info("Title replaced with: {.title}", item)
|
||||
|
||||
if "track" in d[item] and item.track == 0:
|
||||
item.track = int(d[item]["track"])
|
||||
log.info("Track replaced with: {}".format(item.track))
|
||||
log.info("Track replaced with: {.track}", item)
|
||||
|
||||
|
||||
# Plugin structure and hook into import process.
|
||||
|
|
@ -160,6 +162,7 @@ class FromFilenamePlugin(plugins.BeetsPlugin):
|
|||
|
||||
# Look for useful information in the filenames.
|
||||
for pattern in PATTERNS:
|
||||
self._log.debug(f"Trying pattern: {pattern}")
|
||||
d = all_matches(names, pattern)
|
||||
if d:
|
||||
apply_matches(d, self._log)
|
||||
|
|
|
|||
|
|
@ -20,21 +20,26 @@ import re
|
|||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import plugins, ui
|
||||
from beets.util import displayable_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
from beets.library import Item
|
||||
|
||||
|
||||
def split_on_feat(artist: str) -> tuple[str, str | None]:
|
||||
def split_on_feat(
|
||||
artist: str,
|
||||
for_artist: bool = True,
|
||||
custom_words: list[str] | None = None,
|
||||
) -> tuple[str, str | None]:
|
||||
"""Given an artist string, split the "main" artist from any artist
|
||||
on the right-hand side of a string like "feat". Return the main
|
||||
artist, which is always a string, and the featuring artist, which
|
||||
may be a string or None if none is present.
|
||||
"""
|
||||
# split on the first "feat".
|
||||
regex = re.compile(plugins.feat_tokens(), re.IGNORECASE)
|
||||
regex = re.compile(
|
||||
plugins.feat_tokens(for_artist, custom_words), re.IGNORECASE
|
||||
)
|
||||
parts = tuple(s.strip() for s in regex.split(artist, 1))
|
||||
if len(parts) == 1:
|
||||
return parts[0], None
|
||||
|
|
@ -43,43 +48,54 @@ def split_on_feat(artist: str) -> tuple[str, str | None]:
|
|||
return parts
|
||||
|
||||
|
||||
def contains_feat(title: str) -> bool:
|
||||
def contains_feat(title: str, custom_words: list[str] | None = None) -> bool:
|
||||
"""Determine whether the title contains a "featured" marker."""
|
||||
return bool(
|
||||
re.search(
|
||||
plugins.feat_tokens(for_artist=False),
|
||||
plugins.feat_tokens(for_artist=False, custom_words=custom_words),
|
||||
title,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def find_feat_part(artist: str, albumartist: str) -> str | None:
|
||||
def find_feat_part(
|
||||
artist: str,
|
||||
albumartist: str | None,
|
||||
custom_words: list[str] | None = None,
|
||||
) -> str | None:
|
||||
"""Attempt to find featured artists in the item's artist fields and
|
||||
return the results. Returns None if no featured artist found.
|
||||
"""
|
||||
# Look for the album artist in the artist field. If it's not
|
||||
# present, give up.
|
||||
albumartist_split = artist.split(albumartist, 1)
|
||||
if len(albumartist_split) <= 1:
|
||||
return None
|
||||
# Handle a wider variety of extraction cases if the album artist is
|
||||
# contained within the track artist.
|
||||
if albumartist and albumartist in artist:
|
||||
albumartist_split = artist.split(albumartist, 1)
|
||||
|
||||
# If the last element of the split (the right-hand side of the
|
||||
# album artist) is nonempty, then it probably contains the
|
||||
# featured artist.
|
||||
elif albumartist_split[1] != "":
|
||||
# Extract the featured artist from the right-hand side.
|
||||
_, feat_part = split_on_feat(albumartist_split[1])
|
||||
return feat_part
|
||||
# If the last element of the split (the right-hand side of the
|
||||
# album artist) is nonempty, then it probably contains the
|
||||
# featured artist.
|
||||
if albumartist_split[1] != "":
|
||||
# Extract the featured artist from the right-hand side.
|
||||
_, feat_part = split_on_feat(
|
||||
albumartist_split[1], custom_words=custom_words
|
||||
)
|
||||
return feat_part
|
||||
|
||||
# Otherwise, if there's nothing on the right-hand side, look for a
|
||||
# featuring artist on the left-hand side.
|
||||
else:
|
||||
lhs, rhs = split_on_feat(albumartist_split[0])
|
||||
if lhs:
|
||||
return lhs
|
||||
# Otherwise, if there's nothing on the right-hand side,
|
||||
# look for a featuring artist on the left-hand side.
|
||||
else:
|
||||
lhs, _ = split_on_feat(
|
||||
albumartist_split[0], custom_words=custom_words
|
||||
)
|
||||
if lhs:
|
||||
return lhs
|
||||
|
||||
return None
|
||||
# Fall back to conservative handling of the track artist without relying
|
||||
# on albumartist, which covers compilations using a 'Various Artists'
|
||||
# albumartist and album tracks by a guest artist featuring a third artist.
|
||||
_, feat_part = split_on_feat(artist, False, custom_words)
|
||||
return feat_part
|
||||
|
||||
|
||||
class FtInTitlePlugin(plugins.BeetsPlugin):
|
||||
|
|
@ -90,8 +106,10 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
{
|
||||
"auto": True,
|
||||
"drop": False,
|
||||
"format": "feat. {0}",
|
||||
"format": "feat. {}",
|
||||
"keep_in_artist": False,
|
||||
"preserve_album_artist": True,
|
||||
"custom_words": [],
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -116,10 +134,20 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
self.config.set_args(opts)
|
||||
drop_feat = self.config["drop"].get(bool)
|
||||
keep_in_artist_field = self.config["keep_in_artist"].get(bool)
|
||||
preserve_album_artist = self.config["preserve_album_artist"].get(
|
||||
bool
|
||||
)
|
||||
custom_words = self.config["custom_words"].get(list)
|
||||
write = ui.should_write()
|
||||
|
||||
for item in lib.items(args):
|
||||
if self.ft_in_title(item, drop_feat, keep_in_artist_field):
|
||||
if self.ft_in_title(
|
||||
item,
|
||||
drop_feat,
|
||||
keep_in_artist_field,
|
||||
preserve_album_artist,
|
||||
custom_words,
|
||||
):
|
||||
item.store()
|
||||
if write:
|
||||
item.try_write()
|
||||
|
|
@ -131,9 +159,17 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
"""Import hook for moving featuring artist automatically."""
|
||||
drop_feat = self.config["drop"].get(bool)
|
||||
keep_in_artist_field = self.config["keep_in_artist"].get(bool)
|
||||
preserve_album_artist = self.config["preserve_album_artist"].get(bool)
|
||||
custom_words = self.config["custom_words"].get(list)
|
||||
|
||||
for item in task.imported_items():
|
||||
if self.ft_in_title(item, drop_feat, keep_in_artist_field):
|
||||
if self.ft_in_title(
|
||||
item,
|
||||
drop_feat,
|
||||
keep_in_artist_field,
|
||||
preserve_album_artist,
|
||||
custom_words,
|
||||
):
|
||||
item.store()
|
||||
|
||||
def update_metadata(
|
||||
|
|
@ -142,6 +178,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
feat_part: str,
|
||||
drop_feat: bool,
|
||||
keep_in_artist_field: bool,
|
||||
custom_words: list[str],
|
||||
) -> None:
|
||||
"""Choose how to add new artists to the title and set the new
|
||||
metadata. Also, print out messages about any changes that are made.
|
||||
|
|
@ -151,23 +188,28 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
# In case the artist is kept, do not update the artist fields.
|
||||
if keep_in_artist_field:
|
||||
self._log.info(
|
||||
"artist: {0} (Not changing due to keep_in_artist)", item.artist
|
||||
"artist: {.artist} (Not changing due to keep_in_artist)", item
|
||||
)
|
||||
else:
|
||||
self._log.info("artist: {0} -> {1}", item.artist, item.albumartist)
|
||||
item.artist = item.albumartist
|
||||
track_artist, _ = split_on_feat(
|
||||
item.artist, custom_words=custom_words
|
||||
)
|
||||
self._log.info("artist: {0.artist} -> {1}", item, track_artist)
|
||||
item.artist = track_artist
|
||||
|
||||
if item.artist_sort:
|
||||
# Just strip the featured artist from the sort name.
|
||||
item.artist_sort, _ = split_on_feat(item.artist_sort)
|
||||
item.artist_sort, _ = split_on_feat(
|
||||
item.artist_sort, custom_words=custom_words
|
||||
)
|
||||
|
||||
# Only update the title if it does not already contain a featured
|
||||
# artist and if we do not drop featuring information.
|
||||
if not drop_feat and not contains_feat(item.title):
|
||||
if not drop_feat and not contains_feat(item.title, custom_words):
|
||||
feat_format = self.config["format"].as_str()
|
||||
new_format = feat_format.format(feat_part)
|
||||
new_title = f"{item.title} {new_format}"
|
||||
self._log.info("title: {0} -> {1}", item.title, new_title)
|
||||
self._log.info("title: {.title} -> {}", item, new_title)
|
||||
item.title = new_title
|
||||
|
||||
def ft_in_title(
|
||||
|
|
@ -175,6 +217,8 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
item: Item,
|
||||
drop_feat: bool,
|
||||
keep_in_artist_field: bool,
|
||||
preserve_album_artist: bool,
|
||||
custom_words: list[str],
|
||||
) -> bool:
|
||||
"""Look for featured artists in the item's artist fields and move
|
||||
them to the title.
|
||||
|
|
@ -188,22 +232,24 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
# Check whether there is a featured artist on this track and the
|
||||
# artist field does not exactly match the album artist field. In
|
||||
# that case, we attempt to move the featured artist to the title.
|
||||
if not albumartist or albumartist == artist:
|
||||
if preserve_album_artist and albumartist and artist == albumartist:
|
||||
return False
|
||||
|
||||
_, featured = split_on_feat(artist)
|
||||
_, featured = split_on_feat(artist, custom_words=custom_words)
|
||||
if not featured:
|
||||
return False
|
||||
|
||||
self._log.info("{}", displayable_path(item.path))
|
||||
self._log.info("{.filepath}", item)
|
||||
|
||||
# Attempt to find the featured artist.
|
||||
feat_part = find_feat_part(artist, albumartist)
|
||||
feat_part = find_feat_part(artist, albumartist, custom_words)
|
||||
|
||||
if not feat_part:
|
||||
self._log.info("no featuring artists found")
|
||||
return False
|
||||
|
||||
# If we have a featuring artist, move it to the title.
|
||||
self.update_metadata(item, feat_part, drop_feat, keep_in_artist_field)
|
||||
self.update_metadata(
|
||||
item, feat_part, drop_feat, keep_in_artist_field, custom_words
|
||||
)
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class HookPlugin(BeetsPlugin):
|
|||
def create_and_register_hook(self, event, command):
|
||||
def hook_function(**kwargs):
|
||||
if command is None or len(command) == 0:
|
||||
self._log.error('invalid command "{0}"', command)
|
||||
self._log.error('invalid command "{}"', command)
|
||||
return
|
||||
|
||||
# For backwards compatibility, use a string formatter that decodes
|
||||
|
|
@ -74,7 +74,7 @@ class HookPlugin(BeetsPlugin):
|
|||
]
|
||||
|
||||
self._log.debug(
|
||||
'running command "{0}" for event {1}',
|
||||
'running command "{}" for event {}',
|
||||
" ".join(command_pieces),
|
||||
event,
|
||||
)
|
||||
|
|
@ -83,9 +83,9 @@ class HookPlugin(BeetsPlugin):
|
|||
subprocess.check_call(command_pieces)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
self._log.error(
|
||||
"hook for {0} exited with status {1}", event, exc.returncode
|
||||
"hook for {} exited with status {.returncode}", event, exc
|
||||
)
|
||||
except OSError as exc:
|
||||
self._log.error("hook for {0} failed: {1}", event, exc)
|
||||
self._log.error("hook for {} failed: {}", event, exc)
|
||||
|
||||
self.register_listener(event, hook_function)
|
||||
|
|
|
|||
|
|
@ -70,10 +70,10 @@ class IHatePlugin(BeetsPlugin):
|
|||
self._log.debug("processing your hate")
|
||||
if self.do_i_hate_this(task, skip_queries):
|
||||
task.choice_flag = Action.SKIP
|
||||
self._log.info("skipped: {0}", summary(task))
|
||||
self._log.info("skipped: {}", summary(task))
|
||||
return
|
||||
if self.do_i_hate_this(task, warn_queries):
|
||||
self._log.info("you may hate this: {0}", summary(task))
|
||||
self._log.info("you may hate this: {}", summary(task))
|
||||
else:
|
||||
self._log.debug("nothing to do")
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
mtime = os.stat(util.syspath(source)).st_mtime
|
||||
self.item_mtime[destination] = mtime
|
||||
self._log.debug(
|
||||
"Recorded mtime {0} for item '{1}' imported from '{2}'",
|
||||
"Recorded mtime {} for item '{}' imported from '{}'",
|
||||
mtime,
|
||||
util.displayable_path(destination),
|
||||
util.displayable_path(source),
|
||||
|
|
@ -103,9 +103,9 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
def update_album_times(self, lib, album):
|
||||
if self.reimported_album(album):
|
||||
self._log.debug(
|
||||
"Album '{0}' is reimported, skipping import of "
|
||||
"Album '{.filepath}' is reimported, skipping import of "
|
||||
"added dates for the album and its items.",
|
||||
util.displayable_path(album.path),
|
||||
album,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -119,18 +119,17 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
item.store()
|
||||
album.added = min(album_mtimes)
|
||||
self._log.debug(
|
||||
"Import of album '{0}', selected album.added={1} "
|
||||
"Import of album '{0.album}', selected album.added={0.added} "
|
||||
"from item file mtimes.",
|
||||
album.album,
|
||||
album.added,
|
||||
album,
|
||||
)
|
||||
album.store()
|
||||
|
||||
def update_item_times(self, lib, item):
|
||||
if self.reimported_item(item):
|
||||
self._log.debug(
|
||||
"Item '{0}' is reimported, skipping import of added date.",
|
||||
util.displayable_path(item.path),
|
||||
"Item '{.filepath}' is reimported, skipping import of added date.",
|
||||
item,
|
||||
)
|
||||
return
|
||||
mtime = self.item_mtime.pop(item.path, None)
|
||||
|
|
@ -139,9 +138,8 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
if self.config["preserve_mtimes"].get(bool):
|
||||
self.write_item_mtime(item, mtime)
|
||||
self._log.debug(
|
||||
"Import of item '{0}', selected item.added={1}",
|
||||
util.displayable_path(item.path),
|
||||
item.added,
|
||||
"Import of item '{0.filepath}', selected item.added={0.added}",
|
||||
item,
|
||||
)
|
||||
item.store()
|
||||
|
||||
|
|
@ -153,7 +151,6 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
if self.config["preserve_write_mtimes"].get(bool):
|
||||
self.write_item_mtime(item, item.added)
|
||||
self._log.debug(
|
||||
"Write of item '{0}', selected item.added={1}",
|
||||
util.displayable_path(item.path),
|
||||
item.added,
|
||||
"Write of item '{0.filepath}', selected item.added={0.added}",
|
||||
item,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ def _build_m3u_filename(basename):
|
|||
path = normpath(
|
||||
os.path.join(
|
||||
config["importfeeds"]["dir"].as_filename(),
|
||||
date + "_" + basename + ".m3u",
|
||||
f"{date}_{basename}.m3u",
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
|
@ -136,7 +136,7 @@ class ImportFeedsPlugin(BeetsPlugin):
|
|||
if "echo" in formats:
|
||||
self._log.info("Location of imported music:")
|
||||
for path in paths:
|
||||
self._log.info(" {0}", path)
|
||||
self._log.info(" {}", path)
|
||||
|
||||
def album_imported(self, lib, album):
|
||||
self._record_items(lib, album.album, album.items())
|
||||
|
|
|
|||
|
|
@ -117,7 +117,6 @@ def print_data(data, item=None, fmt=None):
|
|||
return
|
||||
|
||||
maxwidth = max(len(key) for key in formatted)
|
||||
lineformat = f"{{0:>{maxwidth}}}: {{1}}"
|
||||
|
||||
if path:
|
||||
ui.print_(displayable_path(path))
|
||||
|
|
@ -126,7 +125,7 @@ def print_data(data, item=None, fmt=None):
|
|||
value = formatted[field]
|
||||
if isinstance(value, list):
|
||||
value = "; ".join(value)
|
||||
ui.print_(lineformat.format(field, value))
|
||||
ui.print_(f"{field:>{maxwidth}}: {value}")
|
||||
|
||||
|
||||
def print_data_keys(data, item=None):
|
||||
|
|
@ -139,12 +138,11 @@ def print_data_keys(data, item=None):
|
|||
if len(formatted) == 0:
|
||||
return
|
||||
|
||||
line_format = "{0}{{0}}".format(" " * 4)
|
||||
if path:
|
||||
ui.print_(displayable_path(path))
|
||||
|
||||
for field in sorted(formatted):
|
||||
ui.print_(line_format.format(field))
|
||||
ui.print_(f" {field}")
|
||||
|
||||
|
||||
class InfoPlugin(BeetsPlugin):
|
||||
|
|
@ -221,7 +219,7 @@ class InfoPlugin(BeetsPlugin):
|
|||
try:
|
||||
data, item = data_emitter(included_keys or "*")
|
||||
except (mediafile.UnreadableFileError, OSError) as ex:
|
||||
self._log.error("cannot read file: {0}", ex)
|
||||
self._log.error("cannot read file: {}", ex)
|
||||
continue
|
||||
|
||||
if opts.summarize:
|
||||
|
|
|
|||
|
|
@ -28,8 +28,7 @@ class InlineError(Exception):
|
|||
|
||||
def __init__(self, code, exc):
|
||||
super().__init__(
|
||||
("error in inline path field code:\n%s\n%s: %s")
|
||||
% (code, type(exc).__name__, str(exc))
|
||||
f"error in inline path field code:\n{code}\n{type(exc).__name__}: {exc}"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -37,7 +36,8 @@ def _compile_func(body):
|
|||
"""Given Python code for a function body, return a compiled
|
||||
callable that invokes that code.
|
||||
"""
|
||||
body = "def {}():\n {}".format(FUNC_NAME, body.replace("\n", "\n "))
|
||||
body = body.replace("\n", "\n ")
|
||||
body = f"def {FUNC_NAME}():\n {body}"
|
||||
code = compile(body, "inline", "exec")
|
||||
env = {}
|
||||
eval(code, env)
|
||||
|
|
@ -60,14 +60,14 @@ class InlinePlugin(BeetsPlugin):
|
|||
for key, view in itertools.chain(
|
||||
config["item_fields"].items(), config["pathfields"].items()
|
||||
):
|
||||
self._log.debug("adding item field {0}", key)
|
||||
self._log.debug("adding item field {}", key)
|
||||
func = self.compile_inline(view.as_str(), False)
|
||||
if func is not None:
|
||||
self.template_fields[key] = func
|
||||
|
||||
# Album fields.
|
||||
for key, view in config["album_fields"].items():
|
||||
self._log.debug("adding album field {0}", key)
|
||||
self._log.debug("adding album field {}", key)
|
||||
func = self.compile_inline(view.as_str(), True)
|
||||
if func is not None:
|
||||
self.album_template_fields[key] = func
|
||||
|
|
@ -87,7 +87,7 @@ class InlinePlugin(BeetsPlugin):
|
|||
func = _compile_func(python_code)
|
||||
except SyntaxError:
|
||||
self._log.error(
|
||||
"syntax error in inline field definition:\n{0}",
|
||||
"syntax error in inline field definition:\n{}",
|
||||
traceback.format_exc(),
|
||||
)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
for album in lib.albums(args):
|
||||
if len(album.items()) == 0:
|
||||
self._log.info(
|
||||
"{0} does not contain items, aborting", album
|
||||
"{} does not contain items, aborting", album
|
||||
)
|
||||
|
||||
self.ipfs_add(album)
|
||||
|
|
@ -122,13 +122,13 @@ class IPFSPlugin(BeetsPlugin):
|
|||
return False
|
||||
try:
|
||||
if album.ipfs:
|
||||
self._log.debug("{0} already added", album_dir)
|
||||
self._log.debug("{} already added", album_dir)
|
||||
# Already added to ipfs
|
||||
return False
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self._log.info("Adding {0} to ipfs", album_dir)
|
||||
self._log.info("Adding {} to ipfs", album_dir)
|
||||
|
||||
if self.config["nocopy"]:
|
||||
cmd = "ipfs add --nocopy -q -r".split()
|
||||
|
|
@ -138,7 +138,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
try:
|
||||
output = util.command_output(cmd).stdout.split()
|
||||
except (OSError, subprocess.CalledProcessError) as exc:
|
||||
self._log.error("Failed to add {0}, error: {1}", album_dir, exc)
|
||||
self._log.error("Failed to add {}, error: {}", album_dir, exc)
|
||||
return False
|
||||
length = len(output)
|
||||
|
||||
|
|
@ -146,12 +146,12 @@ class IPFSPlugin(BeetsPlugin):
|
|||
line = line.strip()
|
||||
if linenr == length - 1:
|
||||
# last printed line is the album hash
|
||||
self._log.info("album: {0}", line)
|
||||
self._log.info("album: {}", line)
|
||||
album.ipfs = line
|
||||
else:
|
||||
try:
|
||||
item = album.items()[linenr]
|
||||
self._log.info("item: {0}", line)
|
||||
self._log.info("item: {}", line)
|
||||
item.ipfs = line
|
||||
item.store()
|
||||
except IndexError:
|
||||
|
|
@ -180,11 +180,11 @@ class IPFSPlugin(BeetsPlugin):
|
|||
util.command_output(cmd)
|
||||
except (OSError, subprocess.CalledProcessError) as err:
|
||||
self._log.error(
|
||||
"Failed to get {0} from ipfs.\n{1}", _hash, err.output
|
||||
"Failed to get {} from ipfs.\n{.output}", _hash, err
|
||||
)
|
||||
return False
|
||||
|
||||
self._log.info("Getting {0} from ipfs", _hash)
|
||||
self._log.info("Getting {} from ipfs", _hash)
|
||||
imp = ui.commands.TerminalImportSession(
|
||||
lib, loghandler=None, query=None, paths=[_hash]
|
||||
)
|
||||
|
|
@ -208,7 +208,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
msg = f"Failed to publish library. Error: {err}"
|
||||
self._log.error(msg)
|
||||
return False
|
||||
self._log.info("hash of library: {0}", output)
|
||||
self._log.info("hash of library: {}", output)
|
||||
|
||||
def ipfs_import(self, lib, args):
|
||||
_hash = args[0]
|
||||
|
|
@ -232,7 +232,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
try:
|
||||
util.command_output(cmd)
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
self._log.error(f"Could not import {_hash}")
|
||||
self._log.error("Could not import {}", _hash)
|
||||
return False
|
||||
|
||||
# add all albums from remotes into a combined library
|
||||
|
|
@ -306,7 +306,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
items.append(item)
|
||||
if len(items) < 1:
|
||||
return False
|
||||
self._log.info("Adding '{0}' to temporary library", album)
|
||||
self._log.info("Adding '{}' to temporary library", album)
|
||||
new_album = tmplib.add_album(items)
|
||||
new_album.ipfs = album.ipfs
|
||||
new_album.store(inherit=False)
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
command + [util.syspath(item.path)]
|
||||
).stdout
|
||||
except (subprocess.CalledProcessError, OSError) as exc:
|
||||
self._log.error("execution failed: {0}", exc)
|
||||
self._log.error("execution failed: {}", exc)
|
||||
continue
|
||||
|
||||
try:
|
||||
|
|
@ -73,7 +73,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
except IndexError:
|
||||
# Sometimes keyfinder-cli returns 0 but with no key, usually
|
||||
# when the file is silent or corrupt, so we log and skip.
|
||||
self._log.error("no key returned for path: {0}", item.path)
|
||||
self._log.error("no key returned for path: {.path}", item)
|
||||
continue
|
||||
|
||||
try:
|
||||
|
|
@ -84,9 +84,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
|
||||
item["initial_key"] = key
|
||||
self._log.info(
|
||||
"added computed initial key {0} for {1}",
|
||||
key,
|
||||
util.displayable_path(item.path),
|
||||
"added computed initial key {} for {.filepath}", key, item
|
||||
)
|
||||
|
||||
if write:
|
||||
|
|
|
|||
|
|
@ -96,10 +96,10 @@ class KodiUpdate(BeetsPlugin):
|
|||
continue
|
||||
|
||||
self._log.info(
|
||||
"Kodi update triggered for {0}:{1}",
|
||||
"Kodi update triggered for {}:{}",
|
||||
instance["host"],
|
||||
instance["port"],
|
||||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.warning("Kodi update failed: {0}", str(e))
|
||||
self._log.warning("Kodi update failed: {}", str(e))
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -22,17 +22,23 @@ The scraper script used is available here:
|
|||
https://gist.github.com/1241307
|
||||
"""
|
||||
|
||||
import codecs
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import traceback
|
||||
from typing import Union
|
||||
from functools import singledispatchmethod
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
import pylast
|
||||
import yaml
|
||||
|
||||
from beets import config, library, plugins, ui
|
||||
from beets.library import Album, Item
|
||||
from beets.util import normpath, plurality, unique_list
|
||||
from beets.util import plurality, unique_list
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import LibModel
|
||||
|
||||
LASTFM = pylast.LastFMNetwork(api_key=plugins.LASTFM_KEY)
|
||||
|
||||
|
|
@ -42,10 +48,6 @@ PYLAST_EXCEPTIONS = (
|
|||
pylast.NetworkError,
|
||||
)
|
||||
|
||||
REPLACE = {
|
||||
"\u2010": "-",
|
||||
}
|
||||
|
||||
|
||||
# Canonicalization tree processing.
|
||||
|
||||
|
|
@ -104,7 +106,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
"separator": ", ",
|
||||
"prefer_specific": False,
|
||||
"title_case": True,
|
||||
"extended_debug": False,
|
||||
"pretend": False,
|
||||
}
|
||||
)
|
||||
self.setup()
|
||||
|
|
@ -115,39 +117,54 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
self.import_stages = [self.imported]
|
||||
|
||||
self._genre_cache = {}
|
||||
self.whitelist = self._load_whitelist()
|
||||
self.c14n_branches, self.canonicalize = self._load_c14n_tree()
|
||||
|
||||
# Read the whitelist file if enabled.
|
||||
self.whitelist = set()
|
||||
def _load_whitelist(self) -> set[str]:
|
||||
"""Load the whitelist from a text file.
|
||||
|
||||
Default whitelist is used if config is True, empty string or set to "nothing".
|
||||
"""
|
||||
whitelist = set()
|
||||
wl_filename = self.config["whitelist"].get()
|
||||
if wl_filename in (True, ""): # Indicates the default whitelist.
|
||||
if wl_filename in (True, "", None): # Indicates the default whitelist.
|
||||
wl_filename = WHITELIST
|
||||
if wl_filename:
|
||||
wl_filename = normpath(wl_filename)
|
||||
with open(wl_filename, "rb") as f:
|
||||
for line in f:
|
||||
line = line.decode("utf-8").strip().lower()
|
||||
if line and not line.startswith("#"):
|
||||
self.whitelist.add(line)
|
||||
self._log.debug("Loading whitelist {}", wl_filename)
|
||||
text = Path(wl_filename).expanduser().read_text(encoding="utf-8")
|
||||
for line in text.splitlines():
|
||||
if (line := line.strip().lower()) and not line.startswith("#"):
|
||||
whitelist.add(line)
|
||||
|
||||
# Read the genres tree for canonicalization if enabled.
|
||||
self.c14n_branches = []
|
||||
return whitelist
|
||||
|
||||
def _load_c14n_tree(self) -> tuple[list[list[str]], bool]:
|
||||
"""Load the canonicalization tree from a YAML file.
|
||||
|
||||
Default tree is used if config is True, empty string, set to "nothing"
|
||||
or if prefer_specific is enabled.
|
||||
"""
|
||||
c14n_branches: list[list[str]] = []
|
||||
c14n_filename = self.config["canonical"].get()
|
||||
self.canonicalize = c14n_filename is not False
|
||||
|
||||
canonicalize = c14n_filename is not False
|
||||
# Default tree
|
||||
if c14n_filename in (True, ""):
|
||||
c14n_filename = C14N_TREE
|
||||
elif not self.canonicalize and self.config["prefer_specific"].get():
|
||||
if c14n_filename in (True, "", None) or (
|
||||
# prefer_specific requires a tree, load default tree
|
||||
not canonicalize and self.config["prefer_specific"].get()
|
||||
):
|
||||
c14n_filename = C14N_TREE
|
||||
|
||||
# Read the tree
|
||||
if c14n_filename:
|
||||
self._log.debug("Loading canonicalization tree {0}", c14n_filename)
|
||||
c14n_filename = normpath(c14n_filename)
|
||||
with codecs.open(c14n_filename, "r", encoding="utf-8") as f:
|
||||
self._log.debug("Loading canonicalization tree {}", c14n_filename)
|
||||
with Path(c14n_filename).expanduser().open(encoding="utf-8") as f:
|
||||
genres_tree = yaml.safe_load(f)
|
||||
flatten_tree(genres_tree, [], self.c14n_branches)
|
||||
flatten_tree(genres_tree, [], c14n_branches)
|
||||
return c14n_branches, canonicalize
|
||||
|
||||
def _tunelog(self, msg, *args, **kwargs):
|
||||
"""Log tuning messages at DEBUG level when verbosity level is high enough."""
|
||||
if config["verbose"].as_number() >= 3:
|
||||
self._log.debug(msg, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def sources(self) -> tuple[str, ...]:
|
||||
|
|
@ -184,31 +201,28 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
return [p[1] for p in depth_tag_pairs]
|
||||
|
||||
def _resolve_genres(self, tags: list[str]) -> list[str]:
|
||||
"""Filter, deduplicate, sort, canonicalize provided genres list.
|
||||
"""Canonicalize, sort and filter a list of genres.
|
||||
|
||||
- Returns an empty list if the input tags list is empty.
|
||||
- If canonicalization is enabled, it extends the list by incorporating
|
||||
parent genres from the canonicalization tree. When a whitelist is set,
|
||||
only parent tags that pass a validity check (_is_valid) are included;
|
||||
otherwise, it adds the oldest ancestor.
|
||||
- During canonicalization, it stops adding parent tags if the count of
|
||||
tags reaches the configured limit (count).
|
||||
otherwise, it adds the oldest ancestor. Adding parent tags is stopped
|
||||
when the count of tags reaches the configured limit (count).
|
||||
- The tags list is then deduplicated to ensure only unique genres are
|
||||
retained.
|
||||
- Optionally, if the 'prefer_specific' configuration is enabled, the
|
||||
list is sorted by the specificity (depth in the canonicalization tree)
|
||||
of the genres.
|
||||
- The method then filters the tag list, ensuring that only valid
|
||||
genres (those that pass the _is_valid method) are kept. If a
|
||||
whitelist is set, only genres in the whitelist are considered valid
|
||||
(which may even result in no genres at all being retained).
|
||||
- Finally, the filtered list of genres, limited to
|
||||
the configured count is returned.
|
||||
- If the 'prefer_specific' configuration is enabled, the list is sorted
|
||||
by the specificity (depth in the canonicalization tree) of the genres.
|
||||
- Finally applies whitelist filtering to ensure that only valid
|
||||
genres are kept. (This may result in no genres at all being retained).
|
||||
- Returns the filtered list of genres, limited to the configured count.
|
||||
"""
|
||||
if not tags:
|
||||
return []
|
||||
|
||||
count = self.config["count"].get(int)
|
||||
|
||||
# Canonicalization (if enabled)
|
||||
if self.canonicalize:
|
||||
# Extend the list to consider tags parents in the c14n tree
|
||||
tags_all = []
|
||||
|
|
@ -242,8 +256,8 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
# c14n only adds allowed genres but we may have had forbidden genres in
|
||||
# the original tags list
|
||||
valid_tags = self._filter_valid_genres(tags)
|
||||
return valid_tags[: self.config["count"].get(int)]
|
||||
valid_tags = [t for t in tags if self._is_valid(t)]
|
||||
return valid_tags[:count]
|
||||
|
||||
def fetch_genre(self, lastfm_obj):
|
||||
"""Return the genre for a pylast entity or None if no suitable genre
|
||||
|
|
@ -252,12 +266,6 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
min_weight = self.config["min_weight"].get(int)
|
||||
return self._tags_for(lastfm_obj, min_weight)
|
||||
|
||||
def _filter_valid_genres(self, genres: list[str]) -> list[str]:
|
||||
"""Filter list of genres, only keep valid."""
|
||||
if not genres:
|
||||
return []
|
||||
return [x for x in genres if self._is_valid(x)]
|
||||
|
||||
def _is_valid(self, genre: str) -> bool:
|
||||
"""Check if the genre is valid.
|
||||
|
||||
|
|
@ -281,7 +289,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
"""
|
||||
# Shortcut if we're missing metadata.
|
||||
if any(not s for s in args):
|
||||
return None
|
||||
return []
|
||||
|
||||
key = f"{entity}.{'-'.join(str(a) for a in args)}"
|
||||
if key not in self._genre_cache:
|
||||
|
|
@ -289,34 +297,27 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
self._genre_cache[key] = self.fetch_genre(method(*args))
|
||||
|
||||
genre = self._genre_cache[key]
|
||||
if self.config["extended_debug"]:
|
||||
self._log.debug(f"last.fm (unfiltered) {entity} tags: {genre}")
|
||||
self._tunelog("last.fm (unfiltered) {} tags: {}", entity, genre)
|
||||
return genre
|
||||
|
||||
def fetch_album_genre(self, obj):
|
||||
"""Return the album genre for this Item or Album."""
|
||||
return self._filter_valid_genres(
|
||||
self._last_lookup(
|
||||
"album", LASTFM.get_album, obj.albumartist, obj.album
|
||||
)
|
||||
"""Return raw album genres from Last.fm for this Item or Album."""
|
||||
return self._last_lookup(
|
||||
"album", LASTFM.get_album, obj.albumartist, obj.album
|
||||
)
|
||||
|
||||
def fetch_album_artist_genre(self, obj):
|
||||
"""Return the album artist genre for this Item or Album."""
|
||||
return self._filter_valid_genres(
|
||||
self._last_lookup("artist", LASTFM.get_artist, obj.albumartist)
|
||||
)
|
||||
"""Return raw album artist genres from Last.fm for this Item or Album."""
|
||||
return self._last_lookup("artist", LASTFM.get_artist, obj.albumartist)
|
||||
|
||||
def fetch_artist_genre(self, item):
|
||||
"""Returns the track artist genre for this Item."""
|
||||
return self._filter_valid_genres(
|
||||
self._last_lookup("artist", LASTFM.get_artist, item.artist)
|
||||
)
|
||||
"""Returns raw track artist genres from Last.fm for this Item."""
|
||||
return self._last_lookup("artist", LASTFM.get_artist, item.artist)
|
||||
|
||||
def fetch_track_genre(self, obj):
|
||||
"""Returns the track genre for this Item."""
|
||||
return self._filter_valid_genres(
|
||||
self._last_lookup("track", LASTFM.get_track, obj.artist, obj.title)
|
||||
"""Returns raw track genres from Last.fm for this Item."""
|
||||
return self._last_lookup(
|
||||
"track", LASTFM.get_track, obj.artist, obj.title
|
||||
)
|
||||
|
||||
# Main processing: _get_genre() and helpers.
|
||||
|
|
@ -330,7 +331,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
return self.config["separator"].as_str().join(formatted)
|
||||
|
||||
def _get_existing_genres(self, obj: Union[Album, Item]) -> list[str]:
|
||||
def _get_existing_genres(self, obj: LibModel) -> list[str]:
|
||||
"""Return a list of genres for this Item or Album. Empty string genres
|
||||
are removed."""
|
||||
separator = self.config["separator"].get()
|
||||
|
|
@ -346,14 +347,12 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
self, old: list[str], new: list[str]
|
||||
) -> list[str]:
|
||||
"""Combine old and new genres and process via _resolve_genres."""
|
||||
self._log.debug(f"valid last.fm tags: {new}")
|
||||
self._log.debug(f"existing genres taken into account: {old}")
|
||||
self._log.debug("raw last.fm tags: {}", new)
|
||||
self._log.debug("existing genres taken into account: {}", old)
|
||||
combined = old + new
|
||||
return self._resolve_genres(combined)
|
||||
|
||||
def _get_genre(
|
||||
self, obj: Union[Album, Item]
|
||||
) -> tuple[Union[str, None], ...]:
|
||||
def _get_genre(self, obj: LibModel) -> tuple[Union[str, None], ...]:
|
||||
"""Get the final genre string for an Album or Item object.
|
||||
|
||||
`self.sources` specifies allowed genre sources. Starting with the first
|
||||
|
|
@ -372,9 +371,22 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
applied, while "artist, any" means only new last.fm genres are included
|
||||
and the whitelist feature was disabled.
|
||||
"""
|
||||
|
||||
def _try_resolve_stage(stage_label: str, keep_genres, new_genres):
|
||||
"""Try to resolve genres for a given stage and log the result."""
|
||||
resolved_genres = self._combine_resolve_and_log(
|
||||
keep_genres, new_genres
|
||||
)
|
||||
if resolved_genres:
|
||||
suffix = "whitelist" if self.whitelist else "any"
|
||||
label = f"{stage_label}, {suffix}"
|
||||
if keep_genres:
|
||||
label = f"keep + {label}"
|
||||
return self._format_and_stringify(resolved_genres), label
|
||||
return None
|
||||
|
||||
keep_genres = []
|
||||
new_genres = []
|
||||
label = ""
|
||||
genres = self._get_existing_genres(obj)
|
||||
|
||||
if genres and not self.config["force"]:
|
||||
|
|
@ -394,20 +406,26 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
# album artist, or most popular track genre.
|
||||
if isinstance(obj, library.Item) and "track" in self.sources:
|
||||
if new_genres := self.fetch_track_genre(obj):
|
||||
label = "track"
|
||||
if result := _try_resolve_stage(
|
||||
"track", keep_genres, new_genres
|
||||
):
|
||||
return result
|
||||
|
||||
if not new_genres and "album" in self.sources:
|
||||
if "album" in self.sources:
|
||||
if new_genres := self.fetch_album_genre(obj):
|
||||
label = "album"
|
||||
if result := _try_resolve_stage(
|
||||
"album", keep_genres, new_genres
|
||||
):
|
||||
return result
|
||||
|
||||
if not new_genres and "artist" in self.sources:
|
||||
if "artist" in self.sources:
|
||||
new_genres = []
|
||||
if isinstance(obj, library.Item):
|
||||
new_genres = self.fetch_artist_genre(obj)
|
||||
label = "artist"
|
||||
stage_label = "artist"
|
||||
elif obj.albumartist != config["va_name"].as_str():
|
||||
new_genres = self.fetch_album_artist_genre(obj)
|
||||
label = "album artist"
|
||||
stage_label = "album artist"
|
||||
else:
|
||||
# For "Various Artists", pick the most popular track genre.
|
||||
item_genres = []
|
||||
|
|
@ -422,24 +440,18 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
if item_genres:
|
||||
most_popular, rank = plurality(item_genres)
|
||||
new_genres = [most_popular]
|
||||
label = "most popular track"
|
||||
stage_label = "most popular track"
|
||||
self._log.debug(
|
||||
'Most popular track genre "{}" ({}) for VA album.',
|
||||
most_popular,
|
||||
rank,
|
||||
)
|
||||
|
||||
# Return with a combined or freshly fetched genre list.
|
||||
if new_genres:
|
||||
resolved_genres = self._combine_resolve_and_log(
|
||||
keep_genres, new_genres
|
||||
)
|
||||
if resolved_genres:
|
||||
suffix = "whitelist" if self.whitelist else "any"
|
||||
label += f", {suffix}"
|
||||
if keep_genres:
|
||||
label = f"keep + {label}"
|
||||
return self._format_and_stringify(resolved_genres), label
|
||||
if new_genres:
|
||||
if result := _try_resolve_stage(
|
||||
stage_label, keep_genres, new_genres
|
||||
):
|
||||
return result
|
||||
|
||||
# Nothing found, leave original if configured and valid.
|
||||
if obj.genre and self.config["keep_existing"]:
|
||||
|
|
@ -455,8 +467,47 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
# Beets plugin hooks and CLI.
|
||||
|
||||
def _fetch_and_log_genre(self, obj: LibModel) -> None:
|
||||
"""Fetch genre and log it."""
|
||||
self._log.info(str(obj))
|
||||
obj.genre, label = self._get_genre(obj)
|
||||
self._log.debug("Resolved ({}): {}", label, obj.genre)
|
||||
|
||||
ui.show_model_changes(obj, fields=["genre"], print_obj=False)
|
||||
|
||||
@singledispatchmethod
|
||||
def _process(self, obj: LibModel, write: bool) -> None:
|
||||
"""Process an object, dispatching to the appropriate method."""
|
||||
raise NotImplementedError
|
||||
|
||||
@_process.register
|
||||
def _process_track(self, obj: Item, write: bool) -> None:
|
||||
"""Process a single track/item."""
|
||||
self._fetch_and_log_genre(obj)
|
||||
if not self.config["pretend"]:
|
||||
obj.try_sync(write=write, move=False)
|
||||
|
||||
@_process.register
|
||||
def _process_album(self, obj: Album, write: bool) -> None:
|
||||
"""Process an entire album."""
|
||||
self._fetch_and_log_genre(obj)
|
||||
if "track" in self.sources:
|
||||
for item in obj.items():
|
||||
self._process(item, write)
|
||||
|
||||
if not self.config["pretend"]:
|
||||
obj.try_sync(
|
||||
write=write, move=False, inherit="track" not in self.sources
|
||||
)
|
||||
|
||||
def commands(self):
|
||||
lastgenre_cmd = ui.Subcommand("lastgenre", help="fetch genres")
|
||||
lastgenre_cmd.parser.add_option(
|
||||
"-p",
|
||||
"--pretend",
|
||||
action="store_true",
|
||||
help="show actions but do nothing",
|
||||
)
|
||||
lastgenre_cmd.parser.add_option(
|
||||
"-f",
|
||||
"--force",
|
||||
|
|
@ -506,94 +557,20 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
dest="album",
|
||||
help="match albums instead of items (default)",
|
||||
)
|
||||
lastgenre_cmd.parser.add_option(
|
||||
"-d",
|
||||
"--debug",
|
||||
action="store_true",
|
||||
dest="extended_debug",
|
||||
help="extended last.fm debug logging",
|
||||
)
|
||||
lastgenre_cmd.parser.set_defaults(album=True)
|
||||
|
||||
def lastgenre_func(lib, opts, args):
|
||||
write = ui.should_write()
|
||||
self.config.set_args(opts)
|
||||
|
||||
if opts.album:
|
||||
# Fetch genres for whole albums
|
||||
for album in lib.albums(args):
|
||||
album.genre, src = self._get_genre(album)
|
||||
self._log.info(
|
||||
'genre for album "{0.album}" ({1}): {0.genre}',
|
||||
album,
|
||||
src,
|
||||
)
|
||||
if "track" in self.sources:
|
||||
album.store(inherit=False)
|
||||
else:
|
||||
album.store()
|
||||
|
||||
for item in album.items():
|
||||
# If we're using track-level sources, also look up each
|
||||
# track on the album.
|
||||
if "track" in self.sources:
|
||||
item.genre, src = self._get_genre(item)
|
||||
item.store()
|
||||
self._log.info(
|
||||
'genre for track "{0.title}" ({1}): {0.genre}',
|
||||
item,
|
||||
src,
|
||||
)
|
||||
|
||||
if write:
|
||||
item.try_write()
|
||||
else:
|
||||
# Just query singletons, i.e. items that are not part of
|
||||
# an album
|
||||
for item in lib.items(args):
|
||||
item.genre, src = self._get_genre(item)
|
||||
item.store()
|
||||
self._log.info(
|
||||
"genre for track {0.title} ({1}): {0.genre}", item, src
|
||||
)
|
||||
method = lib.albums if opts.album else lib.items
|
||||
for obj in method(args):
|
||||
self._process(obj, write=ui.should_write())
|
||||
|
||||
lastgenre_cmd.func = lastgenre_func
|
||||
return [lastgenre_cmd]
|
||||
|
||||
def imported(self, session, task):
|
||||
"""Event hook called when an import task finishes."""
|
||||
if task.is_album:
|
||||
album = task.album
|
||||
album.genre, src = self._get_genre(album)
|
||||
self._log.debug(
|
||||
'genre for album "{0.album}" ({1}): {0.genre}', album, src
|
||||
)
|
||||
|
||||
# If we're using track-level sources, store the album genre only,
|
||||
# then also look up individual track genres.
|
||||
if "track" in self.sources:
|
||||
album.store(inherit=False)
|
||||
for item in album.items():
|
||||
item.genre, src = self._get_genre(item)
|
||||
self._log.debug(
|
||||
'genre for track "{0.title}" ({1}): {0.genre}',
|
||||
item,
|
||||
src,
|
||||
)
|
||||
item.store()
|
||||
# Store the album genre and inherit to tracks.
|
||||
else:
|
||||
album.store()
|
||||
|
||||
else:
|
||||
item = task.item
|
||||
item.genre, src = self._get_genre(item)
|
||||
self._log.debug(
|
||||
'genre for track "{0.title}" ({1}): {0.genre}',
|
||||
item,
|
||||
src,
|
||||
)
|
||||
item.store()
|
||||
self._process(task.album if task.is_album else task.item, write=False)
|
||||
|
||||
def _tags_for(self, obj, min_weight=None):
|
||||
"""Core genre identification routine.
|
||||
|
|
@ -613,12 +590,12 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
try:
|
||||
res = obj.get_top_tags()
|
||||
except PYLAST_EXCEPTIONS as exc:
|
||||
self._log.debug("last.fm error: {0}", exc)
|
||||
self._log.debug("last.fm error: {}", exc)
|
||||
return []
|
||||
except Exception as exc:
|
||||
# Isolate bugs in pylast.
|
||||
self._log.debug("{}", traceback.format_exc())
|
||||
self._log.error("error in pylast library: {0}", exc)
|
||||
self._log.error("error in pylast library: {}", exc)
|
||||
return []
|
||||
|
||||
# Filter by weight (optionally).
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@
|
|||
- cape jazz
|
||||
- chimurenga
|
||||
- coupé-décalé
|
||||
- egyptian
|
||||
- fuji music
|
||||
- genge
|
||||
- highlife
|
||||
|
|
@ -35,6 +36,7 @@
|
|||
- sega
|
||||
- seggae
|
||||
- semba
|
||||
- shangaan electro
|
||||
- soukous
|
||||
- taarab
|
||||
- zouglou
|
||||
|
|
@ -133,6 +135,7 @@
|
|||
- chutney
|
||||
- chutney soca
|
||||
- compas
|
||||
- folklore argentino
|
||||
- mambo
|
||||
- merengue
|
||||
- méringue
|
||||
|
|
@ -185,6 +188,7 @@
|
|||
- humor
|
||||
- parody music
|
||||
- stand-up
|
||||
- kabarett
|
||||
- country:
|
||||
- alternative country:
|
||||
- cowpunk
|
||||
|
|
@ -250,7 +254,6 @@
|
|||
- acid breaks
|
||||
- baltimore club
|
||||
- big beat
|
||||
- breakbeat hardcore
|
||||
- broken beat
|
||||
- florida breaks
|
||||
- nu skool breaks
|
||||
|
|
@ -287,12 +290,15 @@
|
|||
- jump-up
|
||||
- liquid funk
|
||||
- neurofunk
|
||||
- oldschool jungle:
|
||||
- jungle:
|
||||
- darkside jungle
|
||||
- ragga jungle
|
||||
- oldschool jungle
|
||||
- raggacore
|
||||
- sambass
|
||||
- techstep
|
||||
- leftfield
|
||||
- halftime
|
||||
- electro:
|
||||
- crunk
|
||||
- electro backbeat
|
||||
|
|
@ -343,6 +349,7 @@
|
|||
- hardcore:
|
||||
- bouncy house
|
||||
- bouncy techno
|
||||
- breakbeat hardcore
|
||||
- breakcore
|
||||
- digital hardcore
|
||||
- doomcore
|
||||
|
|
@ -400,6 +407,8 @@
|
|||
- power electronics
|
||||
- power noise
|
||||
- witch house
|
||||
- juke:
|
||||
- footwork
|
||||
- post-disco:
|
||||
- boogie
|
||||
- dance-pop
|
||||
|
|
@ -414,6 +423,7 @@
|
|||
- techno:
|
||||
- acid techno
|
||||
- detroit techno
|
||||
- dub techno
|
||||
- free tekno
|
||||
- ghettotech
|
||||
- minimal
|
||||
|
|
@ -469,7 +479,6 @@
|
|||
- chap hop
|
||||
- christian hip hop
|
||||
- conscious hip hop
|
||||
- country-rap
|
||||
- crunkcore
|
||||
- cumbia rap
|
||||
- east coast hip hop:
|
||||
|
|
@ -481,6 +490,7 @@
|
|||
- freestyle rap
|
||||
- g-funk
|
||||
- gangsta rap
|
||||
- glitch hop
|
||||
- golden age hip hop
|
||||
- hip hop soul
|
||||
- hip pop
|
||||
|
|
@ -521,11 +531,14 @@
|
|||
- west coast hip hop:
|
||||
- chicano rap
|
||||
- jerkin'
|
||||
- austrian hip hop
|
||||
- german hip hop
|
||||
- jazz:
|
||||
- asian american jazz
|
||||
- avant-garde jazz
|
||||
- bebop
|
||||
- boogie-woogie
|
||||
- brass band
|
||||
- british dance band
|
||||
- chamber jazz
|
||||
- continental jazz
|
||||
|
|
@ -568,14 +581,13 @@
|
|||
- vocal jazz
|
||||
- west coast gypsy jazz
|
||||
- west coast jazz
|
||||
- other:
|
||||
- worldbeat
|
||||
- kids music:
|
||||
- kinderlieder
|
||||
- pop:
|
||||
- adult contemporary
|
||||
- arab pop
|
||||
- baroque pop
|
||||
- bubblegum pop
|
||||
- chanson
|
||||
- christian pop
|
||||
- classical crossover
|
||||
- europop:
|
||||
|
|
@ -640,6 +652,7 @@
|
|||
- beat music
|
||||
- chinese rock
|
||||
- christian rock
|
||||
- classic rock
|
||||
- dark cabaret
|
||||
- desert rock
|
||||
- experimental rock
|
||||
|
|
@ -720,6 +733,7 @@
|
|||
- art punk
|
||||
- christian punk
|
||||
- deathrock
|
||||
- deutschpunk
|
||||
- folk punk:
|
||||
- celtic punk
|
||||
- gypsy punk
|
||||
|
|
@ -762,5 +776,18 @@
|
|||
- dancehall
|
||||
- ska:
|
||||
- 2 tone
|
||||
- dub
|
||||
- rocksteady
|
||||
- dub
|
||||
- soundtrack:
|
||||
- singer-songwriter:
|
||||
- cantautorato
|
||||
- cantautor
|
||||
- cantautora
|
||||
- chanson
|
||||
- canción de autor
|
||||
- nueva canción
|
||||
- world:
|
||||
- world dub
|
||||
- world fusion
|
||||
- worldbeat
|
||||
|
||||
|
|
|
|||
|
|
@ -160,10 +160,14 @@ calypso jazz
|
|||
calypso-style baila
|
||||
campursari
|
||||
canatronic
|
||||
canción de autor
|
||||
candombe
|
||||
canon
|
||||
canrock
|
||||
cantata
|
||||
cantautorato
|
||||
cantautor
|
||||
cantautora
|
||||
cante chico
|
||||
cante jondo
|
||||
canterbury scene
|
||||
|
|
@ -371,6 +375,7 @@ desert rock
|
|||
desi
|
||||
detroit blues
|
||||
detroit techno
|
||||
dub techno
|
||||
dhamar
|
||||
dhimotiká
|
||||
dhrupad
|
||||
|
|
@ -684,7 +689,7 @@ indo rock
|
|||
indonesian pop
|
||||
indoyíftika
|
||||
industrial death metal
|
||||
industrial hip-hop
|
||||
industrial hip hop
|
||||
industrial metal
|
||||
industrial music
|
||||
industrial musical
|
||||
|
|
@ -1069,10 +1074,10 @@ nortec
|
|||
norteño
|
||||
northern soul
|
||||
nota
|
||||
nu breaks
|
||||
nu jazz
|
||||
nu metal
|
||||
nu soul
|
||||
nu skool breaks
|
||||
nueva canción
|
||||
nyatiti
|
||||
néo kýma
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class CustomUser(pylast.User):
|
|||
tuple with the total number of pages of results. Includes an MBID, if
|
||||
found.
|
||||
"""
|
||||
doc = self._request(self.ws_prefix + "." + method, cacheable, params)
|
||||
doc = self._request(f"{self.ws_prefix}.{method}", cacheable, params)
|
||||
|
||||
toptracks_node = doc.getElementsByTagName("toptracks")[0]
|
||||
total_pages = int(toptracks_node.getAttribute("totalPages"))
|
||||
|
|
@ -120,7 +120,7 @@ def import_lastfm(lib, log):
|
|||
if not user:
|
||||
raise ui.UserError("You must specify a user name for lastimport")
|
||||
|
||||
log.info("Fetching last.fm library for @{0}", user)
|
||||
log.info("Fetching last.fm library for @{}", user)
|
||||
|
||||
page_total = 1
|
||||
page_current = 0
|
||||
|
|
@ -130,7 +130,7 @@ def import_lastfm(lib, log):
|
|||
# Iterate through a yet to be known page total count
|
||||
while page_current < page_total:
|
||||
log.info(
|
||||
"Querying page #{0}{1}...",
|
||||
"Querying page #{}{}...",
|
||||
page_current + 1,
|
||||
f"/{page_total}" if page_total > 1 else "",
|
||||
)
|
||||
|
|
@ -147,27 +147,27 @@ def import_lastfm(lib, log):
|
|||
unknown_total += unknown
|
||||
break
|
||||
else:
|
||||
log.error("ERROR: unable to read page #{0}", page_current + 1)
|
||||
log.error("ERROR: unable to read page #{}", page_current + 1)
|
||||
if retry < retry_limit:
|
||||
log.info(
|
||||
"Retrying page #{0}... ({1}/{2} retry)",
|
||||
"Retrying page #{}... ({}/{} retry)",
|
||||
page_current + 1,
|
||||
retry + 1,
|
||||
retry_limit,
|
||||
)
|
||||
else:
|
||||
log.error(
|
||||
"FAIL: unable to fetch page #{0}, ",
|
||||
"tried {1} times",
|
||||
"FAIL: unable to fetch page #{}, ",
|
||||
"tried {} times",
|
||||
page_current,
|
||||
retry + 1,
|
||||
)
|
||||
page_current += 1
|
||||
|
||||
log.info("... done!")
|
||||
log.info("finished processing {0} song pages", page_total)
|
||||
log.info("{0} unknown play-counts", unknown_total)
|
||||
log.info("{0} play-counts imported", found_total)
|
||||
log.info("finished processing {} song pages", page_total)
|
||||
log.info("{} unknown play-counts", unknown_total)
|
||||
log.info("{} play-counts imported", found_total)
|
||||
|
||||
|
||||
def fetch_tracks(user, page, limit):
|
||||
|
|
@ -201,7 +201,7 @@ def process_tracks(lib, tracks, log):
|
|||
total = len(tracks)
|
||||
total_found = 0
|
||||
total_fails = 0
|
||||
log.info("Received {0} tracks in this page, processing...", total)
|
||||
log.info("Received {} tracks in this page, processing...", total)
|
||||
|
||||
for num in range(0, total):
|
||||
song = None
|
||||
|
|
@ -220,7 +220,7 @@ def process_tracks(lib, tracks, log):
|
|||
else None
|
||||
)
|
||||
|
||||
log.debug("query: {0} - {1} ({2})", artist, title, album)
|
||||
log.debug("query: {} - {} ({})", artist, title, album)
|
||||
|
||||
# First try to query by musicbrainz's trackid
|
||||
if trackid:
|
||||
|
|
@ -231,7 +231,7 @@ def process_tracks(lib, tracks, log):
|
|||
# If not, try just album/title
|
||||
if song is None:
|
||||
log.debug(
|
||||
"no album match, trying by album/title: {0} - {1}", album, title
|
||||
"no album match, trying by album/title: {} - {}", album, title
|
||||
)
|
||||
query = dbcore.AndQuery(
|
||||
[
|
||||
|
|
@ -268,10 +268,9 @@ def process_tracks(lib, tracks, log):
|
|||
count = int(song.get("play_count", 0))
|
||||
new_count = int(tracks[num].get("playcount", 1))
|
||||
log.debug(
|
||||
"match: {0} - {1} ({2}) updating: play_count {3} => {4}",
|
||||
song.artist,
|
||||
song.title,
|
||||
song.album,
|
||||
"match: {0.artist} - {0.title} ({0.album}) updating:"
|
||||
" play_count {1} => {2}",
|
||||
song,
|
||||
count,
|
||||
new_count,
|
||||
)
|
||||
|
|
@ -280,11 +279,11 @@ def process_tracks(lib, tracks, log):
|
|||
total_found += 1
|
||||
else:
|
||||
total_fails += 1
|
||||
log.info(" - No match: {0} - {1} ({2})", artist, title, album)
|
||||
log.info(" - No match: {} - {} ({})", artist, title, album)
|
||||
|
||||
if total_fails > 0:
|
||||
log.info(
|
||||
"Acquired {0}/{1} play-counts ({2} unknown)",
|
||||
"Acquired {}/{} play-counts ({} unknown)",
|
||||
total_found,
|
||||
total,
|
||||
total_fails,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ from beetsplug.lastimport import process_tracks
|
|||
class ListenBrainzPlugin(BeetsPlugin):
|
||||
"""A Beets plugin for interacting with ListenBrainz."""
|
||||
|
||||
data_source = "ListenBrainz"
|
||||
ROOT = "http://api.listenbrainz.org/1/"
|
||||
|
||||
def __init__(self):
|
||||
|
|
@ -27,7 +26,7 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
def commands(self):
|
||||
"""Add beet UI commands to interact with ListenBrainz."""
|
||||
lbupdate_cmd = ui.Subcommand(
|
||||
"lbimport", help=f"Import {self.data_source} history"
|
||||
"lbimport", help="Import ListenBrainz history"
|
||||
)
|
||||
|
||||
def func(lib, opts, args):
|
||||
|
|
@ -42,14 +41,14 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
unknown_total = 0
|
||||
ls = self.get_listens()
|
||||
tracks = self.get_tracks_from_listens(ls)
|
||||
log.info(f"Found {len(ls)} listens")
|
||||
log.info("Found {} listens", len(ls))
|
||||
if tracks:
|
||||
found, unknown = process_tracks(lib, tracks, log)
|
||||
found_total += found
|
||||
unknown_total += unknown
|
||||
log.info("... done!")
|
||||
log.info("{0} unknown play-counts", unknown_total)
|
||||
log.info("{0} play-counts imported", found_total)
|
||||
log.info("{} unknown play-counts", unknown_total)
|
||||
log.info("{} play-counts imported", found_total)
|
||||
|
||||
def _make_request(self, url, params=None):
|
||||
"""Makes a request to the ListenBrainz API."""
|
||||
|
|
@ -63,7 +62,7 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._log.debug(f"Invalid Search Error: {e}")
|
||||
self._log.debug("Invalid Search Error: {}", e)
|
||||
return None
|
||||
|
||||
def get_listens(self, min_ts=None, max_ts=None, count=None):
|
||||
|
|
@ -156,7 +155,7 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
playlist_info = playlist.get("playlist")
|
||||
if playlist_info.get("creator") == "listenbrainz":
|
||||
title = playlist_info.get("title")
|
||||
self._log.debug(f"Playlist title: {title}")
|
||||
self._log.debug("Playlist title: {}", title)
|
||||
playlist_type = (
|
||||
"Exploration" if "Exploration" in title else "Jams"
|
||||
)
|
||||
|
|
@ -179,9 +178,7 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
listenbrainz_playlists, key=lambda x: x["date"], reverse=True
|
||||
)
|
||||
for playlist in listenbrainz_playlists:
|
||||
self._log.debug(
|
||||
f"Playlist: {playlist['type']} - {playlist['date']}"
|
||||
)
|
||||
self._log.debug("Playlist: {0[type]} - {0[date]}", playlist)
|
||||
return listenbrainz_playlists
|
||||
|
||||
def get_playlist(self, identifier):
|
||||
|
|
|
|||
|
|
@ -42,10 +42,9 @@ from beets.autotag.distance import string_dist
|
|||
from beets.util.config import sanitize_choices
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from logging import Logger
|
||||
|
||||
from beets.importer import ImportTask
|
||||
from beets.library import Item, Library
|
||||
from beets.logging import BeetsLogger as Logger
|
||||
|
||||
from ._typing import (
|
||||
GeniusAPI,
|
||||
|
|
@ -154,7 +153,7 @@ def search_pairs(item):
|
|||
# examples include (live), (remix), and (acoustic).
|
||||
r"(.+?)\s+[(].*[)]$",
|
||||
# Remove any featuring artists from the title
|
||||
r"(.*?) {}".format(plugins.feat_tokens(for_artist=False)),
|
||||
rf"(.*?) {plugins.feat_tokens(for_artist=False)}",
|
||||
# Remove part of title after colon ':' for songs with subtitles
|
||||
r"(.+?)\s*:.*",
|
||||
]
|
||||
|
|
@ -186,7 +185,7 @@ def slug(text: str) -> str:
|
|||
|
||||
|
||||
class RequestHandler:
|
||||
_log: beets.logging.Logger
|
||||
_log: Logger
|
||||
|
||||
def debug(self, message: str, *args) -> None:
|
||||
"""Log a debug message with the class name."""
|
||||
|
|
@ -508,9 +507,9 @@ class SearchBackend(SoupMixin, Backend):
|
|||
# log out the candidate that did not make it but was close.
|
||||
# This may show a matching candidate with some noise in the name
|
||||
self.debug(
|
||||
"({}, {}) does not match ({}, {}) but dist was close: {:.2f}",
|
||||
result.artist,
|
||||
result.title,
|
||||
"({0.artist}, {0.title}) does not match ({1}, {2}) but dist"
|
||||
" was close: {3:.2f}",
|
||||
result,
|
||||
target_artist,
|
||||
target_title,
|
||||
max_dist,
|
||||
|
|
@ -582,7 +581,7 @@ class Tekstowo(SearchBackend):
|
|||
"""Fetch lyrics from Tekstowo.pl."""
|
||||
|
||||
BASE_URL = "https://www.tekstowo.pl"
|
||||
SEARCH_URL = BASE_URL + "/szukaj,{}.html"
|
||||
SEARCH_URL = f"{BASE_URL}/szukaj,{{}}.html"
|
||||
|
||||
def build_url(self, artist, title):
|
||||
artistitle = f"{artist.title()} {title.title()}"
|
||||
|
|
@ -644,7 +643,7 @@ class Google(SearchBackend):
|
|||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
#: Split cleaned up URL title into artist and title parts.
|
||||
URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +")
|
||||
URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +|, ")
|
||||
|
||||
SOURCE_DIST_FACTOR = {"www.azlyrics.com": 0.5, "www.songlyrics.com": 0.6}
|
||||
|
||||
|
|
@ -702,8 +701,8 @@ class Google(SearchBackend):
|
|||
result_artist, result_title = "", parts[0]
|
||||
else:
|
||||
# sort parts by their similarity to the artist
|
||||
parts.sort(key=lambda p: cls.get_part_dist(artist, title, p))
|
||||
result_artist, result_title = parts[0], " ".join(parts[1:])
|
||||
result_artist = min(parts, key=lambda p: string_dist(artist, p))
|
||||
result_title = min(parts, key=lambda p: string_dist(title, p))
|
||||
|
||||
return SearchResult(result_artist, result_title, item["link"])
|
||||
|
||||
|
|
@ -746,7 +745,9 @@ class Translator(RequestHandler):
|
|||
TRANSLATE_URL = "https://api.cognitive.microsofttranslator.com/translate"
|
||||
LINE_PARTS_RE = re.compile(r"^(\[\d\d:\d\d.\d\d\]|) *(.*)$")
|
||||
SEPARATOR = " | "
|
||||
remove_translations = partial(re.compile(r" / [^\n]+").sub, "")
|
||||
remove_translations = staticmethod(
|
||||
partial(re.compile(r" / [^\n]+").sub, "")
|
||||
)
|
||||
|
||||
_log: Logger
|
||||
api_key: str
|
||||
|
|
@ -838,15 +839,16 @@ class Translator(RequestHandler):
|
|||
lyrics_language = langdetect.detect(new_lyrics).upper()
|
||||
if lyrics_language == self.to_language:
|
||||
self.info(
|
||||
"🔵 Lyrics are already in the target language {}",
|
||||
self.to_language,
|
||||
"🔵 Lyrics are already in the target language {.to_language}",
|
||||
self,
|
||||
)
|
||||
return new_lyrics
|
||||
|
||||
if self.from_languages and lyrics_language not in self.from_languages:
|
||||
self.info(
|
||||
"🔵 Configuration {} does not permit translating from {}",
|
||||
self.from_languages,
|
||||
"🔵 Configuration {.from_languages} does not permit translating"
|
||||
" from {}",
|
||||
self,
|
||||
lyrics_language,
|
||||
)
|
||||
return new_lyrics
|
||||
|
|
@ -854,7 +856,7 @@ class Translator(RequestHandler):
|
|||
lyrics, *url = new_lyrics.split("\n\nSource: ")
|
||||
with self.handle_request():
|
||||
translated_lines = self.append_translations(lyrics.splitlines())
|
||||
self.info("🟢 Translated lyrics to {}", self.to_language)
|
||||
self.info("🟢 Translated lyrics to {.to_language}", self)
|
||||
return "\n\nSource: ".join(["\n".join(translated_lines), *url])
|
||||
|
||||
|
||||
|
|
@ -1090,7 +1092,7 @@ class LyricsPlugin(RequestHandler, plugins.BeetsPlugin):
|
|||
return
|
||||
|
||||
if lyrics := self.find_lyrics(item):
|
||||
self.info("🟢 Found lyrics: {0}", item)
|
||||
self.info("🟢 Found lyrics: {}", item)
|
||||
if translator := self.translator:
|
||||
lyrics = translator.translate(lyrics, item.lyrics)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -83,9 +83,7 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
collection = self.config["collection"].as_str()
|
||||
if collection:
|
||||
if collection not in collection_ids:
|
||||
raise ui.UserError(
|
||||
"invalid collection ID: {}".format(collection)
|
||||
)
|
||||
raise ui.UserError(f"invalid collection ID: {collection}")
|
||||
return collection
|
||||
|
||||
# No specified collection. Just return the first collection ID
|
||||
|
|
@ -156,10 +154,10 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
if re.match(UUID_REGEX, aid):
|
||||
album_ids.append(aid)
|
||||
else:
|
||||
self._log.info("skipping invalid MBID: {0}", aid)
|
||||
self._log.info("skipping invalid MBID: {}", aid)
|
||||
|
||||
# Submit to MusicBrainz.
|
||||
self._log.info("Updating MusicBrainz collection {0}...", collection_id)
|
||||
self._log.info("Updating MusicBrainz collection {}...", collection_id)
|
||||
submit_albums(collection_id, album_ids)
|
||||
if remove_missing:
|
||||
self.remove_missing(collection_id, lib.albums())
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ class MBSubmitPlugin(BeetsPlugin):
|
|||
subprocess.Popen([picard_path] + paths)
|
||||
self._log.info("launched picard from\n{}", picard_path)
|
||||
except OSError as exc:
|
||||
self._log.error(f"Could not open picard, got error:\n{exc}")
|
||||
self._log.error("Could not open picard, got error:\n{}", exc)
|
||||
|
||||
def print_tracks(self, session, task):
|
||||
for i in sorted(task.items, key=lambda i: i.track):
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ def load_meta_sources():
|
|||
meta_sources = {}
|
||||
|
||||
for module_path, class_name in SOURCES.items():
|
||||
module = import_module(METASYNC_MODULE + "." + module_path)
|
||||
module = import_module(f"{METASYNC_MODULE}.{module_path}")
|
||||
meta_sources[class_name.lower()] = getattr(module, class_name)
|
||||
|
||||
return meta_sources
|
||||
|
|
@ -117,13 +117,13 @@ class MetaSyncPlugin(BeetsPlugin):
|
|||
try:
|
||||
cls = META_SOURCES[player]
|
||||
except KeyError:
|
||||
self._log.error("Unknown metadata source '{}'".format(player))
|
||||
self._log.error("Unknown metadata source '{}'", player)
|
||||
|
||||
try:
|
||||
meta_source_instances[player] = cls(self.config, self._log)
|
||||
except (ImportError, ConfigValueError) as e:
|
||||
self._log.error(
|
||||
f"Failed to instantiate metadata source {player!r}: {e}"
|
||||
"Failed to instantiate metadata source {!r}: {}", player, e
|
||||
)
|
||||
|
||||
# Avoid needlessly iterating over items
|
||||
|
|
|
|||
|
|
@ -44,11 +44,12 @@ class Amarok(MetaSource):
|
|||
"amarok_lastplayed": types.DATE,
|
||||
}
|
||||
|
||||
query_xml = '<query version="1.0"> \
|
||||
<filters> \
|
||||
<and><include field="filename" value=%s /></and> \
|
||||
</filters> \
|
||||
</query>'
|
||||
query_xml = """
|
||||
<query version="1.0">
|
||||
<filters>
|
||||
<and><include field="filename" value={} /></and>
|
||||
</filters>
|
||||
</query>"""
|
||||
|
||||
def __init__(self, config, log):
|
||||
super().__init__(config, log)
|
||||
|
|
@ -68,7 +69,7 @@ class Amarok(MetaSource):
|
|||
# of the result set. So query for the filename and then try to match
|
||||
# the correct item from the results we get back
|
||||
results = self.collection.Query(
|
||||
self.query_xml % quoteattr(basename(path))
|
||||
self.query_xml.format(quoteattr(basename(path)))
|
||||
)
|
||||
for result in results:
|
||||
if result["xesam:url"] != path:
|
||||
|
|
|
|||
|
|
@ -76,12 +76,12 @@ class Itunes(MetaSource):
|
|||
library_path = config["itunes"]["library"].as_filename()
|
||||
|
||||
try:
|
||||
self._log.debug(f"loading iTunes library from {library_path}")
|
||||
self._log.debug("loading iTunes library from {}", library_path)
|
||||
with create_temporary_copy(library_path) as library_copy:
|
||||
with open(library_copy, "rb") as library_copy_f:
|
||||
raw_library = plistlib.load(library_copy_f)
|
||||
except OSError as e:
|
||||
raise ConfigValueError("invalid iTunes library: " + e.strerror)
|
||||
raise ConfigValueError(f"invalid iTunes library: {e.strerror}")
|
||||
except Exception:
|
||||
# It's likely the user configured their '.itl' library (<> xml)
|
||||
if os.path.splitext(library_path)[1].lower() != ".xml":
|
||||
|
|
@ -91,7 +91,7 @@ class Itunes(MetaSource):
|
|||
)
|
||||
else:
|
||||
hint = ""
|
||||
raise ConfigValueError("invalid iTunes library" + hint)
|
||||
raise ConfigValueError(f"invalid iTunes library{hint}")
|
||||
|
||||
# Make the iTunes library queryable using the path
|
||||
self.collection = {
|
||||
|
|
@ -104,7 +104,7 @@ class Itunes(MetaSource):
|
|||
result = self.collection.get(util.bytestring_path(item.path).lower())
|
||||
|
||||
if not result:
|
||||
self._log.warning(f"no iTunes match found for {item}")
|
||||
self._log.warning("no iTunes match found for {}", item)
|
||||
return
|
||||
|
||||
item.itunes_rating = result.get("Rating")
|
||||
|
|
|
|||
|
|
@ -226,8 +226,8 @@ class MissingPlugin(BeetsPlugin):
|
|||
for track_info in album_info.tracks:
|
||||
if track_info.track_id not in item_mbids:
|
||||
self._log.debug(
|
||||
"track {0} in album {1}",
|
||||
track_info.track_id,
|
||||
album_info.album_id,
|
||||
"track {.track_id} in album {.album_id}",
|
||||
track_info,
|
||||
album_info,
|
||||
)
|
||||
yield _item(track_info, album_info, album.id)
|
||||
|
|
|
|||
|
|
@ -51,8 +51,8 @@ class MPDClientWrapper:
|
|||
if not self.strip_path.endswith("/"):
|
||||
self.strip_path += "/"
|
||||
|
||||
self._log.debug("music_directory: {0}", self.music_directory)
|
||||
self._log.debug("strip_path: {0}", self.strip_path)
|
||||
self._log.debug("music_directory: {.music_directory}", self)
|
||||
self._log.debug("strip_path: {.strip_path}", self)
|
||||
|
||||
self.client = mpd.MPDClient()
|
||||
|
||||
|
|
@ -64,7 +64,7 @@ class MPDClientWrapper:
|
|||
if host[0] in ["/", "~"]:
|
||||
host = os.path.expanduser(host)
|
||||
|
||||
self._log.info("connecting to {0}:{1}", host, port)
|
||||
self._log.info("connecting to {}:{}", host, port)
|
||||
try:
|
||||
self.client.connect(host, port)
|
||||
except OSError as e:
|
||||
|
|
@ -89,7 +89,7 @@ class MPDClientWrapper:
|
|||
try:
|
||||
return getattr(self.client, command)()
|
||||
except (OSError, mpd.ConnectionError) as err:
|
||||
self._log.error("{0}", err)
|
||||
self._log.error("{}", err)
|
||||
|
||||
if retries <= 0:
|
||||
# if we exited without breaking, we couldn't reconnect in time :(
|
||||
|
|
@ -123,7 +123,7 @@ class MPDClientWrapper:
|
|||
result = os.path.join(self.music_directory, file)
|
||||
else:
|
||||
result = entry["file"]
|
||||
self._log.debug("returning: {0}", result)
|
||||
self._log.debug("returning: {}", result)
|
||||
return result, entry.get("id")
|
||||
|
||||
def status(self):
|
||||
|
|
@ -169,7 +169,7 @@ class MPDStats:
|
|||
if item:
|
||||
return item
|
||||
else:
|
||||
self._log.info("item not found: {0}", displayable_path(path))
|
||||
self._log.info("item not found: {}", displayable_path(path))
|
||||
|
||||
def update_item(self, item, attribute, value=None, increment=None):
|
||||
"""Update the beets item. Set attribute to value or increment the value
|
||||
|
|
@ -188,10 +188,10 @@ class MPDStats:
|
|||
item.store()
|
||||
|
||||
self._log.debug(
|
||||
"updated: {0} = {1} [{2}]",
|
||||
"updated: {} = {} [{.filepath}]",
|
||||
attribute,
|
||||
item[attribute],
|
||||
displayable_path(item.path),
|
||||
item,
|
||||
)
|
||||
|
||||
def update_rating(self, item, skipped):
|
||||
|
|
@ -234,12 +234,12 @@ class MPDStats:
|
|||
def handle_played(self, song):
|
||||
"""Updates the play count of a song."""
|
||||
self.update_item(song["beets_item"], "play_count", increment=1)
|
||||
self._log.info("played {0}", displayable_path(song["path"]))
|
||||
self._log.info("played {}", displayable_path(song["path"]))
|
||||
|
||||
def handle_skipped(self, song):
|
||||
"""Updates the skip count of a song."""
|
||||
self.update_item(song["beets_item"], "skip_count", increment=1)
|
||||
self._log.info("skipped {0}", displayable_path(song["path"]))
|
||||
self._log.info("skipped {}", displayable_path(song["path"]))
|
||||
|
||||
def on_stop(self, status):
|
||||
self._log.info("stop")
|
||||
|
|
@ -278,11 +278,11 @@ class MPDStats:
|
|||
self.handle_song_change(self.now_playing)
|
||||
|
||||
if is_url(path):
|
||||
self._log.info("playing stream {0}", displayable_path(path))
|
||||
self._log.info("playing stream {}", displayable_path(path))
|
||||
self.now_playing = None
|
||||
return
|
||||
|
||||
self._log.info("playing {0}", displayable_path(path))
|
||||
self._log.info("playing {}", displayable_path(path))
|
||||
|
||||
self.now_playing = {
|
||||
"started": time.time(),
|
||||
|
|
@ -307,12 +307,12 @@ class MPDStats:
|
|||
if "player" in events:
|
||||
status = self.mpd.status()
|
||||
|
||||
handler = getattr(self, "on_" + status["state"], None)
|
||||
handler = getattr(self, f"on_{status['state']}", None)
|
||||
|
||||
if handler:
|
||||
handler(status)
|
||||
else:
|
||||
self._log.debug('unhandled status "{0}"', status)
|
||||
self._log.debug('unhandled status "{}"', status)
|
||||
|
||||
events = self.mpd.events()
|
||||
|
||||
|
|
|
|||
|
|
@ -101,8 +101,8 @@ class MPDUpdatePlugin(BeetsPlugin):
|
|||
|
||||
try:
|
||||
s = BufferedSocket(host, port)
|
||||
except OSError as e:
|
||||
self._log.warning("MPD connection failed: {0}", str(e.strerror))
|
||||
except OSError:
|
||||
self._log.warning("MPD connection failed", exc_info=True)
|
||||
return
|
||||
|
||||
resp = s.readline()
|
||||
|
|
@ -111,7 +111,7 @@ class MPDUpdatePlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
if password:
|
||||
s.send(b'password "%s"\n' % password.encode("utf8"))
|
||||
s.send(f'password "{password}"\n'.encode())
|
||||
resp = s.readline()
|
||||
if b"OK" not in resp:
|
||||
self._log.warning("Authentication failed: {0!r}", resp)
|
||||
|
|
|
|||
|
|
@ -18,12 +18,14 @@ from __future__ import annotations
|
|||
|
||||
import traceback
|
||||
from collections import Counter
|
||||
from contextlib import suppress
|
||||
from functools import cached_property
|
||||
from itertools import product
|
||||
from typing import TYPE_CHECKING, Any, Iterable, Sequence
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import musicbrainzngs
|
||||
from confuse.exceptions import NotFoundError
|
||||
|
||||
import beets
|
||||
import beets.autotag.hooks
|
||||
|
|
@ -68,9 +70,7 @@ class MusicBrainzAPIError(util.HumanReadableError):
|
|||
super().__init__(reason, verb, tb)
|
||||
|
||||
def get_message(self):
|
||||
return "{} in {} with query {}".format(
|
||||
self._reasonstr(), self.verb, repr(self.query)
|
||||
)
|
||||
return f"{self._reasonstr()} in {self.verb} with query {self.query!r}"
|
||||
|
||||
|
||||
RELEASE_INCLUDES = list(
|
||||
|
|
@ -203,7 +203,7 @@ def _multi_artist_credit(
|
|||
|
||||
|
||||
def track_url(trackid: str) -> str:
|
||||
return urljoin(BASE_URL, "recording/" + trackid)
|
||||
return urljoin(BASE_URL, f"recording/{trackid}")
|
||||
|
||||
|
||||
def _flatten_artist_credit(credit: list[JSONDict]) -> tuple[str, str, str]:
|
||||
|
|
@ -248,7 +248,7 @@ def _get_related_artist_names(relations, relation_type):
|
|||
|
||||
|
||||
def album_url(albumid: str) -> str:
|
||||
return urljoin(BASE_URL, "release/" + albumid)
|
||||
return urljoin(BASE_URL, f"release/{albumid}")
|
||||
|
||||
|
||||
def _preferred_release_event(
|
||||
|
|
@ -293,7 +293,7 @@ def _set_date_str(
|
|||
continue
|
||||
|
||||
if original:
|
||||
key = "original_" + key
|
||||
key = f"original_{key}"
|
||||
setattr(info, key, date_num)
|
||||
|
||||
|
||||
|
|
@ -373,7 +373,6 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
"https": False,
|
||||
"ratelimit": 1,
|
||||
"ratelimit_interval": 1,
|
||||
"searchlimit": 5,
|
||||
"genres": False,
|
||||
"external_ids": {
|
||||
"discogs": False,
|
||||
|
|
@ -385,6 +384,15 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
"extra_tags": [],
|
||||
},
|
||||
)
|
||||
# TODO: Remove in 3.0.0
|
||||
with suppress(NotFoundError):
|
||||
self.config["search_limit"] = self.config["match"][
|
||||
"searchlimit"
|
||||
].get()
|
||||
self._log.warning(
|
||||
"'musicbrainz.searchlimit' option is deprecated and will be "
|
||||
"removed in 3.0.0. Use 'musicbrainz.search_limit' instead."
|
||||
)
|
||||
hostname = self.config["host"].as_str()
|
||||
https = self.config["https"].get(bool)
|
||||
# Only call set_hostname when a custom server is configured. Since
|
||||
|
|
@ -801,7 +809,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
)
|
||||
try:
|
||||
method = getattr(musicbrainzngs, f"search_{query_type}s")
|
||||
res = method(limit=self.config["searchlimit"].get(int), **filters)
|
||||
res = method(limit=self.config["search_limit"].get(), **filters)
|
||||
except musicbrainzngs.MusicBrainzError as exc:
|
||||
raise MusicBrainzAPIError(
|
||||
exc, f"{query_type} search", filters, traceback.format_exc()
|
||||
|
|
@ -838,7 +846,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
"""
|
||||
self._log.debug("Requesting MusicBrainz release {}", album_id)
|
||||
if not (albumid := self._extract_id(album_id)):
|
||||
self._log.debug("Invalid MBID ({0}).", album_id)
|
||||
self._log.debug("Invalid MBID ({}).", album_id)
|
||||
return None
|
||||
|
||||
try:
|
||||
|
|
@ -875,7 +883,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
or None if no track is found. May raise a MusicBrainzAPIError.
|
||||
"""
|
||||
if not (trackid := self._extract_id(track_id)):
|
||||
self._log.debug("Invalid MBID ({0}).", track_id)
|
||||
self._log.debug("Invalid MBID ({}).", track_id)
|
||||
return None
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -179,10 +179,8 @@ class ParentWorkPlugin(BeetsPlugin):
|
|||
|
||||
if not item.mb_workid:
|
||||
self._log.info(
|
||||
"No work for {}, \
|
||||
add one at https://musicbrainz.org/recording/{}",
|
||||
"No work for {0}, add one at https://musicbrainz.org/recording/{0.mb_trackid}",
|
||||
item,
|
||||
item.mb_trackid,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,11 @@ from beets.util import get_temp_filename
|
|||
# If this is missing, they're placed at the end.
|
||||
ARGS_MARKER = "$args"
|
||||
|
||||
# Indicate where the playlist file (with absolute path) should be inserted into
|
||||
# the command string. If this is missing, its placed at the end, but before
|
||||
# arguments.
|
||||
PLS_MARKER = "$playlist"
|
||||
|
||||
|
||||
def play(
|
||||
command_str,
|
||||
|
|
@ -43,7 +48,7 @@ def play(
|
|||
"""
|
||||
# Print number of tracks or albums to be played, log command to be run.
|
||||
item_type += "s" if len(selection) > 1 else ""
|
||||
ui.print_("Playing {} {}.".format(len(selection), item_type))
|
||||
ui.print_(f"Playing {len(selection)} {item_type}.")
|
||||
log.debug("executing command: {} {!r}", command_str, open_args)
|
||||
|
||||
try:
|
||||
|
|
@ -132,8 +137,23 @@ class PlayPlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
open_args = self._playlist_or_paths(paths)
|
||||
open_args_str = [
|
||||
p.decode("utf-8") for p in self._playlist_or_paths(paths)
|
||||
]
|
||||
command_str = self._command_str(opts.args)
|
||||
|
||||
if PLS_MARKER in command_str:
|
||||
if not config["play"]["raw"]:
|
||||
command_str = command_str.replace(
|
||||
PLS_MARKER, "".join(open_args_str)
|
||||
)
|
||||
self._log.debug(
|
||||
"command altered by PLS_MARKER to: {}", command_str
|
||||
)
|
||||
open_args = []
|
||||
else:
|
||||
command_str = command_str.replace(PLS_MARKER, " ")
|
||||
|
||||
# Check if the selection exceeds configured threshold. If True,
|
||||
# cancel, otherwise proceed with play command.
|
||||
if opts.yes or not self._exceeds_threshold(
|
||||
|
|
@ -154,7 +174,7 @@ class PlayPlugin(BeetsPlugin):
|
|||
return f"{command_str} {args}"
|
||||
else:
|
||||
# Don't include the marker in the command.
|
||||
return command_str.replace(" " + ARGS_MARKER, "")
|
||||
return command_str.replace(f" {ARGS_MARKER}", "")
|
||||
|
||||
def _playlist_or_paths(self, paths):
|
||||
"""Return either the raw paths of items or a playlist of the items."""
|
||||
|
|
@ -162,6 +182,7 @@ class PlayPlugin(BeetsPlugin):
|
|||
return paths
|
||||
else:
|
||||
return [self._create_tmp_playlist(paths)]
|
||||
return [shlex.quote(self._create_tmp_playlist(paths))]
|
||||
|
||||
def _exceeds_threshold(
|
||||
self, selection, command_str, open_args, item_type="track"
|
||||
|
|
@ -179,9 +200,7 @@ class PlayPlugin(BeetsPlugin):
|
|||
ui.print_(
|
||||
ui.colorize(
|
||||
"text_warning",
|
||||
"You are about to queue {} {}.".format(
|
||||
len(selection), item_type
|
||||
),
|
||||
f"You are about to queue {len(selection)} {item_type}.",
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
|
||||
def cli_exit(self, lib):
|
||||
for playlist in self.find_playlists():
|
||||
self._log.info(f"Updating playlist: {playlist}")
|
||||
self._log.info("Updating playlist: {}", playlist)
|
||||
base_dir = beets.util.bytestring_path(
|
||||
self.relative_to
|
||||
if self.relative_to
|
||||
|
|
@ -133,21 +133,16 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
try:
|
||||
self.update_playlist(playlist, base_dir)
|
||||
except beets.util.FilesystemError:
|
||||
self._log.error(
|
||||
"Failed to update playlist: {}".format(
|
||||
beets.util.displayable_path(playlist)
|
||||
)
|
||||
)
|
||||
self._log.error("Failed to update playlist: {}", playlist)
|
||||
|
||||
def find_playlists(self):
|
||||
"""Find M3U playlists in the playlist directory."""
|
||||
playlist_dir = beets.util.syspath(self.playlist_dir)
|
||||
try:
|
||||
dir_contents = os.listdir(beets.util.syspath(self.playlist_dir))
|
||||
dir_contents = os.listdir(playlist_dir)
|
||||
except OSError:
|
||||
self._log.warning(
|
||||
"Unable to open playlist directory {}".format(
|
||||
beets.util.displayable_path(self.playlist_dir)
|
||||
)
|
||||
"Unable to open playlist directory {.playlist_dir}", self
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -195,9 +190,10 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
|
||||
if changes or deletions:
|
||||
self._log.info(
|
||||
"Updated playlist {} ({} changes, {} deletions)".format(
|
||||
filename, changes, deletions
|
||||
)
|
||||
"Updated playlist {} ({} changes, {} deletions)",
|
||||
filename,
|
||||
changes,
|
||||
deletions,
|
||||
)
|
||||
beets.util.copy(new_playlist, filename, replace=True)
|
||||
beets.util.remove(new_playlist)
|
||||
|
|
|
|||
|
|
@ -22,9 +22,7 @@ def get_music_section(
|
|||
):
|
||||
"""Getting the section key for the music library in Plex."""
|
||||
api_endpoint = append_token("library/sections", token)
|
||||
url = urljoin(
|
||||
"{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint
|
||||
)
|
||||
url = urljoin(f"{get_protocol(secure)}://{host}:{port}", api_endpoint)
|
||||
|
||||
# Sends request.
|
||||
r = requests.get(
|
||||
|
|
@ -54,9 +52,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors):
|
|||
)
|
||||
api_endpoint = f"library/sections/{section_key}/refresh"
|
||||
api_endpoint = append_token(api_endpoint, token)
|
||||
url = urljoin(
|
||||
"{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint
|
||||
)
|
||||
url = urljoin(f"{get_protocol(secure)}://{host}:{port}", api_endpoint)
|
||||
|
||||
# Sends request and returns requests object.
|
||||
r = requests.get(
|
||||
|
|
@ -70,7 +66,7 @@ def update_plex(host, port, token, library_name, secure, ignore_cert_errors):
|
|||
def append_token(url, token):
|
||||
"""Appends the Plex Home token to the api call if required."""
|
||||
if token:
|
||||
url += "?" + urlencode({"X-Plex-Token": token})
|
||||
url += f"?{urlencode({'X-Plex-Token': token})}"
|
||||
return url
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -70,9 +70,7 @@ def call(args: list[str], log: Logger, **kwargs: Any):
|
|||
return command_output(args, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
log.debug(e.output.decode("utf8", "ignore"))
|
||||
raise ReplayGainError(
|
||||
"{} exited with status {}".format(args[0], e.returncode)
|
||||
)
|
||||
raise ReplayGainError(f"{args[0]} exited with status {e.returncode}")
|
||||
|
||||
|
||||
def db_to_lufs(db: float) -> float:
|
||||
|
|
@ -143,9 +141,8 @@ class RgTask:
|
|||
item.rg_track_peak = track_gain.peak
|
||||
item.store()
|
||||
self._log.debug(
|
||||
"applied track gain {0} LU, peak {1} of FS",
|
||||
item.rg_track_gain,
|
||||
item.rg_track_peak,
|
||||
"applied track gain {0.rg_track_gain} LU, peak {0.rg_track_peak} of FS",
|
||||
item,
|
||||
)
|
||||
|
||||
def _store_album_gain(self, item: Item, album_gain: Gain):
|
||||
|
|
@ -157,9 +154,8 @@ class RgTask:
|
|||
item.rg_album_peak = album_gain.peak
|
||||
item.store()
|
||||
self._log.debug(
|
||||
"applied album gain {0} LU, peak {1} of FS",
|
||||
item.rg_album_gain,
|
||||
item.rg_album_peak,
|
||||
"applied album gain {0.rg_album_gain} LU, peak {0.rg_album_peak} of FS",
|
||||
item,
|
||||
)
|
||||
|
||||
def _store_track(self, write: bool):
|
||||
|
|
@ -170,15 +166,14 @@ class RgTask:
|
|||
# `track_gains` without throwing FatalReplayGainError
|
||||
# => raise non-fatal exception & continue
|
||||
raise ReplayGainError(
|
||||
"ReplayGain backend `{}` failed for track {}".format(
|
||||
self.backend_name, item
|
||||
)
|
||||
f"ReplayGain backend `{self.backend_name}` failed for track"
|
||||
f" {item}"
|
||||
)
|
||||
|
||||
self._store_track_gain(item, self.track_gains[0])
|
||||
if write:
|
||||
item.try_write()
|
||||
self._log.debug("done analyzing {0}", item)
|
||||
self._log.debug("done analyzing {}", item)
|
||||
|
||||
def _store_album(self, write: bool):
|
||||
"""Store track/album gains for all tracks of the task in the database."""
|
||||
|
|
@ -191,17 +186,15 @@ class RgTask:
|
|||
# `album_gain` without throwing FatalReplayGainError
|
||||
# => raise non-fatal exception & continue
|
||||
raise ReplayGainError(
|
||||
"ReplayGain backend `{}` failed "
|
||||
"for some tracks in album {}".format(
|
||||
self.backend_name, self.album
|
||||
)
|
||||
f"ReplayGain backend `{self.backend_name}` failed "
|
||||
f"for some tracks in album {self.album}"
|
||||
)
|
||||
for item, track_gain in zip(self.items, self.track_gains):
|
||||
self._store_track_gain(item, track_gain)
|
||||
self._store_album_gain(item, self.album_gain)
|
||||
if write:
|
||||
item.try_write()
|
||||
self._log.debug("done analyzing {0}", item)
|
||||
self._log.debug("done analyzing {}", item)
|
||||
|
||||
def store(self, write: bool):
|
||||
"""Store computed gains for the items of this task in the database."""
|
||||
|
|
@ -235,7 +228,7 @@ class R128Task(RgTask):
|
|||
def _store_track_gain(self, item: Item, track_gain: Gain):
|
||||
item.r128_track_gain = track_gain.gain
|
||||
item.store()
|
||||
self._log.debug("applied r128 track gain {0} LU", item.r128_track_gain)
|
||||
self._log.debug("applied r128 track gain {.r128_track_gain} LU", item)
|
||||
|
||||
def _store_album_gain(self, item: Item, album_gain: Gain):
|
||||
"""
|
||||
|
|
@ -244,7 +237,7 @@ class R128Task(RgTask):
|
|||
"""
|
||||
item.r128_album_gain = album_gain.gain
|
||||
item.store()
|
||||
self._log.debug("applied r128 album gain {0} LU", item.r128_album_gain)
|
||||
self._log.debug("applied r128 album gain {.r128_album_gain} LU", item)
|
||||
|
||||
|
||||
AnyRgTask = TypeVar("AnyRgTask", bound=RgTask)
|
||||
|
|
@ -385,10 +378,7 @@ class FfmpegBackend(Backend):
|
|||
album_gain = target_level_lufs - album_gain
|
||||
|
||||
self._log.debug(
|
||||
"{}: gain {} LU, peak {}",
|
||||
task.album,
|
||||
album_gain,
|
||||
album_peak,
|
||||
"{.album}: gain {} LU, peak {}", task, album_gain, album_peak
|
||||
)
|
||||
|
||||
task.album_gain = Gain(album_gain, album_peak)
|
||||
|
|
@ -431,9 +421,9 @@ class FfmpegBackend(Backend):
|
|||
target_level_lufs = db_to_lufs(target_level)
|
||||
|
||||
# call ffmpeg
|
||||
self._log.debug(f"analyzing {item}")
|
||||
self._log.debug("analyzing {}", item)
|
||||
cmd = self._construct_cmd(item, peak_method)
|
||||
self._log.debug("executing {0}", " ".join(map(displayable_path, cmd)))
|
||||
self._log.debug("executing {}", " ".join(map(displayable_path, cmd)))
|
||||
output = call(cmd, self._log).stderr.splitlines()
|
||||
|
||||
# parse output
|
||||
|
|
@ -501,12 +491,10 @@ class FfmpegBackend(Backend):
|
|||
if self._parse_float(b"M: " + line[1]) >= gating_threshold:
|
||||
n_blocks += 1
|
||||
self._log.debug(
|
||||
"{}: {} blocks over {} LUFS".format(
|
||||
item, n_blocks, gating_threshold
|
||||
)
|
||||
"{}: {} blocks over {} LUFS", item, n_blocks, gating_threshold
|
||||
)
|
||||
|
||||
self._log.debug("{}: gain {} LU, peak {}".format(item, gain, peak))
|
||||
self._log.debug("{}: gain {} LU, peak {}", item, gain, peak)
|
||||
|
||||
return Gain(gain, peak), n_blocks
|
||||
|
||||
|
|
@ -526,9 +514,7 @@ class FfmpegBackend(Backend):
|
|||
if output[i].startswith(search):
|
||||
return i
|
||||
raise ReplayGainError(
|
||||
"ffmpeg output: missing {} after line {}".format(
|
||||
repr(search), start_line
|
||||
)
|
||||
f"ffmpeg output: missing {search!r} after line {start_line}"
|
||||
)
|
||||
|
||||
def _parse_float(self, line: bytes) -> float:
|
||||
|
|
@ -575,7 +561,7 @@ class CommandBackend(Backend):
|
|||
# Explicit executable path.
|
||||
if not os.path.isfile(self.command):
|
||||
raise FatalReplayGainError(
|
||||
"replaygain command does not exist: {}".format(self.command)
|
||||
f"replaygain command does not exist: {self.command}"
|
||||
)
|
||||
else:
|
||||
# Check whether the program is in $PATH.
|
||||
|
|
@ -663,8 +649,8 @@ class CommandBackend(Backend):
|
|||
cmd = cmd + ["-d", str(int(target_level - 89))]
|
||||
cmd = cmd + [syspath(i.path) for i in items]
|
||||
|
||||
self._log.debug("analyzing {0} files", len(items))
|
||||
self._log.debug("executing {0}", " ".join(map(displayable_path, cmd)))
|
||||
self._log.debug("analyzing {} files", len(items))
|
||||
self._log.debug("executing {}", " ".join(map(displayable_path, cmd)))
|
||||
output = call(cmd, self._log).stdout
|
||||
self._log.debug("analysis finished")
|
||||
return self.parse_tool_output(
|
||||
|
|
@ -680,7 +666,7 @@ class CommandBackend(Backend):
|
|||
for line in text.split(b"\n")[1 : num_lines + 1]:
|
||||
parts = line.split(b"\t")
|
||||
if len(parts) != 6 or parts[0] == b"File":
|
||||
self._log.debug("bad tool output: {0}", text)
|
||||
self._log.debug("bad tool output: {}", text)
|
||||
raise ReplayGainError("mp3gain failed")
|
||||
|
||||
# _file = parts[0]
|
||||
|
|
@ -1105,9 +1091,8 @@ class AudioToolsBackend(Backend):
|
|||
)
|
||||
|
||||
self._log.debug(
|
||||
"ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}",
|
||||
item.artist,
|
||||
item.title,
|
||||
"ReplayGain for track {0.artist} - {0.title}: {1:.2f}, {2:.2f}",
|
||||
item,
|
||||
rg_track_gain,
|
||||
rg_track_peak,
|
||||
)
|
||||
|
|
@ -1132,7 +1117,7 @@ class AudioToolsBackend(Backend):
|
|||
)
|
||||
track_gains.append(Gain(gain=rg_track_gain, peak=rg_track_peak))
|
||||
self._log.debug(
|
||||
"ReplayGain for track {0}: {1:.2f}, {2:.2f}",
|
||||
"ReplayGain for track {}: {.2f}, {.2f}",
|
||||
item,
|
||||
rg_track_gain,
|
||||
rg_track_peak,
|
||||
|
|
@ -1145,8 +1130,8 @@ class AudioToolsBackend(Backend):
|
|||
rg_album_gain, task.target_level
|
||||
)
|
||||
self._log.debug(
|
||||
"ReplayGain for album {0}: {1:.2f}, {2:.2f}",
|
||||
task.items[0].album,
|
||||
"ReplayGain for album {.items[0].album}: {.2f}, {.2f}",
|
||||
task,
|
||||
rg_album_gain,
|
||||
rg_album_peak,
|
||||
)
|
||||
|
|
@ -1229,10 +1214,8 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
|
||||
if self.backend_name not in BACKENDS:
|
||||
raise ui.UserError(
|
||||
"Selected ReplayGain backend {} is not supported. "
|
||||
"Please select one of: {}".format(
|
||||
self.backend_name, ", ".join(BACKENDS.keys())
|
||||
)
|
||||
f"Selected ReplayGain backend {self.backend_name} is not"
|
||||
f" supported. Please select one of: {', '.join(BACKENDS)}"
|
||||
)
|
||||
|
||||
# FIXME: Consider renaming the configuration option to 'peak_method'
|
||||
|
|
@ -1240,10 +1223,9 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
peak_method = self.config["peak"].as_str()
|
||||
if peak_method not in PeakMethod.__members__:
|
||||
raise ui.UserError(
|
||||
"Selected ReplayGain peak method {} is not supported. "
|
||||
"Please select one of: {}".format(
|
||||
peak_method, ", ".join(PeakMethod.__members__)
|
||||
)
|
||||
f"Selected ReplayGain peak method {peak_method} is not"
|
||||
" supported. Please select one of:"
|
||||
f" {', '.join(PeakMethod.__members__)}"
|
||||
)
|
||||
# This only applies to plain old rg tags, r128 doesn't store peak
|
||||
# values.
|
||||
|
|
@ -1348,19 +1330,19 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
items, nothing is done.
|
||||
"""
|
||||
if not force and not self.album_requires_gain(album):
|
||||
self._log.info("Skipping album {0}", album)
|
||||
self._log.info("Skipping album {}", album)
|
||||
return
|
||||
|
||||
items_iter = iter(album.items())
|
||||
use_r128 = self.should_use_r128(next(items_iter))
|
||||
if any(use_r128 != self.should_use_r128(i) for i in items_iter):
|
||||
self._log.error(
|
||||
"Cannot calculate gain for album {0} (incompatible formats)",
|
||||
"Cannot calculate gain for album {} (incompatible formats)",
|
||||
album,
|
||||
)
|
||||
return
|
||||
|
||||
self._log.info("analyzing {0}", album)
|
||||
self._log.info("analyzing {}", album)
|
||||
|
||||
discs: dict[int, list[Item]] = {}
|
||||
if self.config["per_disc"].get(bool):
|
||||
|
|
@ -1384,7 +1366,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
callback=store_cb,
|
||||
)
|
||||
except ReplayGainError as e:
|
||||
self._log.info("ReplayGain error: {0}", e)
|
||||
self._log.info("ReplayGain error: {}", e)
|
||||
except FatalReplayGainError as e:
|
||||
raise ui.UserError(f"Fatal replay gain error: {e}")
|
||||
|
||||
|
|
@ -1396,7 +1378,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
in the item, nothing is done.
|
||||
"""
|
||||
if not force and not self.track_requires_gain(item):
|
||||
self._log.info("Skipping track {0}", item)
|
||||
self._log.info("Skipping track {}", item)
|
||||
return
|
||||
|
||||
use_r128 = self.should_use_r128(item)
|
||||
|
|
@ -1413,7 +1395,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
callback=store_cb,
|
||||
)
|
||||
except ReplayGainError as e:
|
||||
self._log.info("ReplayGain error: {0}", e)
|
||||
self._log.info("ReplayGain error: {}", e)
|
||||
except FatalReplayGainError as e:
|
||||
raise ui.UserError(f"Fatal replay gain error: {e}")
|
||||
|
||||
|
|
@ -1526,18 +1508,16 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
if opts.album:
|
||||
albums = lib.albums(args)
|
||||
self._log.info(
|
||||
"Analyzing {} albums ~ {} backend...".format(
|
||||
len(albums), self.backend_name
|
||||
)
|
||||
f"Analyzing {len(albums)} albums ~"
|
||||
f" {self.backend_name} backend..."
|
||||
)
|
||||
for album in albums:
|
||||
self.handle_album(album, write, force)
|
||||
else:
|
||||
items = lib.items(args)
|
||||
self._log.info(
|
||||
"Analyzing {} tracks ~ {} backend...".format(
|
||||
len(items), self.backend_name
|
||||
)
|
||||
f"Analyzing {len(items)} tracks ~"
|
||||
f" {self.backend_name} backend..."
|
||||
)
|
||||
for item in items:
|
||||
self.handle_track(item, write, force)
|
||||
|
|
@ -1556,8 +1536,10 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
"--threads",
|
||||
dest="threads",
|
||||
type=int,
|
||||
help="change the number of threads, \
|
||||
defaults to maximum available processors",
|
||||
help=(
|
||||
"change the number of threads, defaults to maximum available"
|
||||
" processors"
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-f",
|
||||
|
|
@ -1565,8 +1547,10 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
dest="force",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="analyze all files, including those that "
|
||||
"already have ReplayGain metadata",
|
||||
help=(
|
||||
"analyze all files, including those that already have"
|
||||
" ReplayGain metadata"
|
||||
),
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-w",
|
||||
|
|
|
|||
|
|
@ -57,9 +57,9 @@ class RewritePlugin(BeetsPlugin):
|
|||
raise ui.UserError("invalid rewrite specification")
|
||||
if fieldname not in library.Item._fields:
|
||||
raise ui.UserError(
|
||||
"invalid field name (%s) in rewriter" % fieldname
|
||||
f"invalid field name ({fieldname}) in rewriter"
|
||||
)
|
||||
self._log.debug("adding template field {0}", key)
|
||||
self._log.debug("adding template field {}", key)
|
||||
pattern = re.compile(pattern.lower())
|
||||
rules[fieldname].append((pattern, value))
|
||||
if fieldname == "artist":
|
||||
|
|
|
|||
|
|
@ -59,9 +59,7 @@ class ScrubPlugin(BeetsPlugin):
|
|||
def scrub_func(lib, opts, args):
|
||||
# Walk through matching files and remove tags.
|
||||
for item in lib.items(args):
|
||||
self._log.info(
|
||||
"scrubbing: {0}", util.displayable_path(item.path)
|
||||
)
|
||||
self._log.info("scrubbing: {.filepath}", item)
|
||||
self._scrub_item(item, opts.write)
|
||||
|
||||
scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
|
||||
|
|
@ -110,7 +108,7 @@ class ScrubPlugin(BeetsPlugin):
|
|||
f.save()
|
||||
except (OSError, mutagen.MutagenError) as exc:
|
||||
self._log.error(
|
||||
"could not scrub {0}: {1}", util.displayable_path(path), exc
|
||||
"could not scrub {}: {}", util.displayable_path(path), exc
|
||||
)
|
||||
|
||||
def _scrub_item(self, item, restore):
|
||||
|
|
@ -124,7 +122,7 @@ class ScrubPlugin(BeetsPlugin):
|
|||
util.syspath(item.path), config["id3v23"].get(bool)
|
||||
)
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
self._log.error("could not open file to scrub: {0}", exc)
|
||||
self._log.error("could not open file to scrub: {}", exc)
|
||||
return
|
||||
images = mf.images
|
||||
|
||||
|
|
@ -144,12 +142,10 @@ class ScrubPlugin(BeetsPlugin):
|
|||
mf.images = images
|
||||
mf.save()
|
||||
except mediafile.UnreadableFileError as exc:
|
||||
self._log.error("could not write tags: {0}", exc)
|
||||
self._log.error("could not write tags: {}", exc)
|
||||
|
||||
def import_task_files(self, session, task):
|
||||
"""Automatically scrub imported files."""
|
||||
for item in task.imported_items():
|
||||
self._log.debug(
|
||||
"auto-scrubbing {0}", util.displayable_path(item.path)
|
||||
)
|
||||
self._log.debug("auto-scrubbing {.filepath}", item)
|
||||
self._scrub_item(item, ui.should_write())
|
||||
|
|
|
|||
|
|
@ -138,10 +138,9 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
if name in args
|
||||
}
|
||||
if not playlists:
|
||||
unmatched = [name for name, _, _ in self._unmatched_playlists]
|
||||
raise ui.UserError(
|
||||
"No playlist matching any of {} found".format(
|
||||
[name for name, _, _ in self._unmatched_playlists]
|
||||
)
|
||||
f"No playlist matching any of {unmatched} found"
|
||||
)
|
||||
|
||||
self._matched_playlists = playlists
|
||||
|
|
@ -235,7 +234,7 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
for playlist in self._unmatched_playlists:
|
||||
n, (q, _), (a_q, _) = playlist
|
||||
if self.matches(model, q, a_q):
|
||||
self._log.debug("{0} will be updated because of {1}", n, model)
|
||||
self._log.debug("{} will be updated because of {}", n, model)
|
||||
self._matched_playlists.add(playlist)
|
||||
self.register_listener("cli_exit", self.update_playlists)
|
||||
|
||||
|
|
@ -244,12 +243,12 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
def update_playlists(self, lib, pretend=False):
|
||||
if pretend:
|
||||
self._log.info(
|
||||
"Showing query results for {0} smart playlists...",
|
||||
"Showing query results for {} smart playlists...",
|
||||
len(self._matched_playlists),
|
||||
)
|
||||
else:
|
||||
self._log.info(
|
||||
"Updating {0} smart playlists...", len(self._matched_playlists)
|
||||
"Updating {} smart playlists...", len(self._matched_playlists)
|
||||
)
|
||||
|
||||
playlist_dir = self.config["playlist_dir"].as_filename()
|
||||
|
|
@ -268,7 +267,7 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
if pretend:
|
||||
self._log.info("Results for playlist {}:", name)
|
||||
else:
|
||||
self._log.info("Creating playlist {0}", name)
|
||||
self._log.info("Creating playlist {}", name)
|
||||
items = []
|
||||
|
||||
if query:
|
||||
|
|
@ -331,8 +330,9 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
for key, value in attr
|
||||
]
|
||||
attrs = "".join(al)
|
||||
comment = "#EXTINF:{}{},{} - {}\n".format(
|
||||
int(item.length), attrs, item.artist, item.title
|
||||
comment = (
|
||||
f"#EXTINF:{int(item.length)}{attrs},"
|
||||
f"{item.artist} - {item.title}\n"
|
||||
)
|
||||
f.write(comment.encode("utf-8") + entry.uri + b"\n")
|
||||
# Send an event when playlists were updated.
|
||||
|
|
@ -340,13 +340,11 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
|
||||
if pretend:
|
||||
self._log.info(
|
||||
"Displayed results for {0} playlists",
|
||||
"Displayed results for {} playlists",
|
||||
len(self._matched_playlists),
|
||||
)
|
||||
else:
|
||||
self._log.info(
|
||||
"{0} playlists updated", len(self._matched_playlists)
|
||||
)
|
||||
self._log.info("{} playlists updated", len(self._matched_playlists))
|
||||
|
||||
|
||||
class PlaylistItem:
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ from typing import TYPE_CHECKING, Any, Literal, Sequence, Union
|
|||
|
||||
import confuse
|
||||
import requests
|
||||
import unidecode
|
||||
|
||||
from beets import ui
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo
|
||||
|
|
@ -131,15 +130,11 @@ class SpotifyPlugin(
|
|||
"mode": "list",
|
||||
"tiebreak": "popularity",
|
||||
"show_failures": False,
|
||||
"artist_field": "albumartist",
|
||||
"album_field": "album",
|
||||
"track_field": "title",
|
||||
"region_filter": None,
|
||||
"regex": [],
|
||||
"client_id": "4e414367a1d14c75a5c5129a627fcab8",
|
||||
"client_secret": "f82bdc09b2254f1a8286815d02fd46dc",
|
||||
"tokenfile": "spotify_token.json",
|
||||
"search_query_ascii": False,
|
||||
}
|
||||
)
|
||||
self.config["client_id"].redact = True
|
||||
|
|
@ -170,8 +165,9 @@ class SpotifyPlugin(
|
|||
c_secret: str = self.config["client_secret"].as_str()
|
||||
|
||||
headers = {
|
||||
"Authorization": "Basic {}".format(
|
||||
base64.b64encode(f"{c_id}:{c_secret}".encode()).decode()
|
||||
"Authorization": (
|
||||
"Basic"
|
||||
f" {base64.b64encode(f'{c_id}:{c_secret}'.encode()).decode()}"
|
||||
)
|
||||
}
|
||||
response = requests.post(
|
||||
|
|
@ -184,14 +180,12 @@ class SpotifyPlugin(
|
|||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
raise ui.UserError(
|
||||
"Spotify authorization failed: {}\n{}".format(e, response.text)
|
||||
f"Spotify authorization failed: {e}\n{response.text}"
|
||||
)
|
||||
self.access_token = response.json()["access_token"]
|
||||
|
||||
# Save the token for later use.
|
||||
self._log.debug(
|
||||
"{} access token: {}", self.data_source, self.access_token
|
||||
)
|
||||
self._log.debug("{0.data_source} access token: {0.access_token}", self)
|
||||
with open(self._tokenfile(), "w") as f:
|
||||
json.dump({"access_token": self.access_token}, f)
|
||||
|
||||
|
|
@ -229,16 +223,16 @@ class SpotifyPlugin(
|
|||
self._log.error("ReadTimeout.")
|
||||
raise APIError("Request timed out.")
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
self._log.error(f"Network error: {e}")
|
||||
self._log.error("Network error: {}", e)
|
||||
raise APIError("Network error.")
|
||||
except requests.exceptions.RequestException as e:
|
||||
if e.response is None:
|
||||
self._log.error(f"Request failed: {e}")
|
||||
self._log.error("Request failed: {}", e)
|
||||
raise APIError("Request failed.")
|
||||
if e.response.status_code == 401:
|
||||
self._log.debug(
|
||||
f"{self.data_source} access token has expired. "
|
||||
f"Reauthenticating."
|
||||
"{.data_source} access token has expired. Reauthenticating.",
|
||||
self,
|
||||
)
|
||||
self._authenticate()
|
||||
return self._handle_response(
|
||||
|
|
@ -257,7 +251,7 @@ class SpotifyPlugin(
|
|||
"Retry-After", DEFAULT_WAITING_TIME
|
||||
)
|
||||
self._log.debug(
|
||||
f"Too many API requests. Retrying after {seconds} seconds."
|
||||
"Too many API requests. Retrying after {} seconds.", seconds
|
||||
)
|
||||
time.sleep(int(seconds) + 1)
|
||||
return self._handle_response(
|
||||
|
|
@ -278,7 +272,7 @@ class SpotifyPlugin(
|
|||
f"URL:\n{url}\nparams:\n{params}"
|
||||
)
|
||||
else:
|
||||
self._log.error(f"Request failed. Error: {e}")
|
||||
self._log.error("Request failed. Error: {}", e)
|
||||
raise APIError("Request failed.")
|
||||
|
||||
def album_for_id(self, album_id: str) -> AlbumInfo | None:
|
||||
|
|
@ -293,7 +287,9 @@ class SpotifyPlugin(
|
|||
if not (spotify_id := self._extract_id(album_id)):
|
||||
return None
|
||||
|
||||
album_data = self._handle_response("get", self.album_url + spotify_id)
|
||||
album_data = self._handle_response(
|
||||
"get", f"{self.album_url}{spotify_id}"
|
||||
)
|
||||
if album_data["name"] == "":
|
||||
self._log.debug("Album removed from Spotify: {}", album_id)
|
||||
return None
|
||||
|
|
@ -316,9 +312,7 @@ class SpotifyPlugin(
|
|||
else:
|
||||
raise ui.UserError(
|
||||
"Invalid `release_date_precision` returned "
|
||||
"by {} API: '{}'".format(
|
||||
self.data_source, release_date_precision
|
||||
)
|
||||
f"by {self.data_source} API: '{release_date_precision}'"
|
||||
)
|
||||
|
||||
tracks_data = album_data["tracks"]
|
||||
|
|
@ -411,7 +405,7 @@ class SpotifyPlugin(
|
|||
# release) and `track.medium_total` (total number of tracks on
|
||||
# the track's disc).
|
||||
album_data = self._handle_response(
|
||||
"get", self.album_url + track_data["album"]["id"]
|
||||
"get", f"{self.album_url}{track_data['album']['id']}"
|
||||
)
|
||||
medium_total = 0
|
||||
for i, track_data in enumerate(album_data["tracks"]["items"], start=1):
|
||||
|
|
@ -422,62 +416,43 @@ class SpotifyPlugin(
|
|||
track.medium_total = medium_total
|
||||
return track
|
||||
|
||||
def _construct_search_query(
|
||||
self, filters: SearchFilter, keywords: str = ""
|
||||
) -> str:
|
||||
"""Construct a query string with the specified filters and keywords to
|
||||
be provided to the Spotify Search API
|
||||
(https://developer.spotify.com/documentation/web-api/reference/search).
|
||||
|
||||
:param filters: (Optional) Field filters to apply.
|
||||
:param keywords: (Optional) Query keywords to use.
|
||||
:return: Query string to be provided to the Search API.
|
||||
"""
|
||||
|
||||
query_components = [
|
||||
keywords,
|
||||
" ".join(f"{k}:{v}" for k, v in filters.items()),
|
||||
]
|
||||
query = " ".join([q for q in query_components if q])
|
||||
if not isinstance(query, str):
|
||||
query = query.decode("utf8")
|
||||
|
||||
if self.config["search_query_ascii"].get():
|
||||
query = unidecode.unidecode(query)
|
||||
|
||||
return query
|
||||
|
||||
def _search_api(
|
||||
self,
|
||||
query_type: Literal["album", "track"],
|
||||
filters: SearchFilter,
|
||||
keywords: str = "",
|
||||
query_string: str = "",
|
||||
) -> Sequence[SearchResponseAlbums | SearchResponseTracks]:
|
||||
"""Query the Spotify Search API for the specified ``keywords``,
|
||||
"""Query the Spotify Search API for the specified ``query_string``,
|
||||
applying the provided ``filters``.
|
||||
|
||||
:param query_type: Item type to search across. Valid types are:
|
||||
'album', 'artist', 'playlist', and 'track'.
|
||||
:param filters: (Optional) Field filters to apply.
|
||||
:param keywords: (Optional) Query keywords to use.
|
||||
:param filters: Field filters to apply.
|
||||
:param query_string: Additional query to include in the search.
|
||||
"""
|
||||
query = self._construct_search_query(keywords=keywords, filters=filters)
|
||||
query = self._construct_search_query(
|
||||
filters=filters, query_string=query_string
|
||||
)
|
||||
|
||||
self._log.debug(f"Searching {self.data_source} for '{query}'")
|
||||
self._log.debug("Searching {.data_source} for '{}'", self, query)
|
||||
try:
|
||||
response = self._handle_response(
|
||||
"get",
|
||||
self.search_url,
|
||||
params={"q": query, "type": query_type},
|
||||
params={
|
||||
"q": query,
|
||||
"type": query_type,
|
||||
"limit": self.config["search_limit"].get(),
|
||||
},
|
||||
)
|
||||
except APIError as e:
|
||||
self._log.debug("Spotify API error: {}", e)
|
||||
return ()
|
||||
response_data = response.get(query_type + "s", {}).get("items", [])
|
||||
response_data = response.get(f"{query_type}s", {}).get("items", [])
|
||||
self._log.debug(
|
||||
"Found {} result(s) from {} for '{}'",
|
||||
"Found {} result(s) from {.data_source} for '{}'",
|
||||
len(response_data),
|
||||
self.data_source,
|
||||
self,
|
||||
query,
|
||||
)
|
||||
return response_data
|
||||
|
|
@ -497,17 +472,17 @@ class SpotifyPlugin(
|
|||
"-m",
|
||||
"--mode",
|
||||
action="store",
|
||||
help='"open" to open {} with playlist, '
|
||||
'"list" to print (default)'.format(self.data_source),
|
||||
help=(
|
||||
f'"open" to open {self.data_source} with playlist, '
|
||||
'"list" to print (default)'
|
||||
),
|
||||
)
|
||||
spotify_cmd.parser.add_option(
|
||||
"-f",
|
||||
"--show-failures",
|
||||
action="store_true",
|
||||
dest="show_failures",
|
||||
help="list tracks that did not match a {} ID".format(
|
||||
self.data_source
|
||||
),
|
||||
help=f"list tracks that did not match a {self.data_source} ID",
|
||||
)
|
||||
spotify_cmd.func = queries
|
||||
|
||||
|
|
@ -540,7 +515,7 @@ class SpotifyPlugin(
|
|||
|
||||
if self.config["mode"].get() not in ["list", "open"]:
|
||||
self._log.warning(
|
||||
"{0} is not a valid mode", self.config["mode"].get()
|
||||
"{} is not a valid mode", self.config["mode"].get()
|
||||
)
|
||||
return False
|
||||
|
||||
|
|
@ -563,8 +538,8 @@ class SpotifyPlugin(
|
|||
|
||||
if not items:
|
||||
self._log.debug(
|
||||
"Your beets query returned no items, skipping {}.",
|
||||
self.data_source,
|
||||
"Your beets query returned no items, skipping {.data_source}.",
|
||||
self,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -585,19 +560,25 @@ class SpotifyPlugin(
|
|||
regex["search"], regex["replace"], value
|
||||
)
|
||||
|
||||
# Custom values can be passed in the config (just in case)
|
||||
artist = item[self.config["artist_field"].get()]
|
||||
album = item[self.config["album_field"].get()]
|
||||
keywords = item[self.config["track_field"].get()]
|
||||
artist = item["artist"] or item["albumartist"]
|
||||
album = item["album"]
|
||||
query_string = item["title"]
|
||||
|
||||
# Query the Web API for each track, look for the items' JSON data
|
||||
query_filters: SearchFilter = {"artist": artist, "album": album}
|
||||
query_filters: SearchFilter = {}
|
||||
if artist:
|
||||
query_filters["artist"] = artist
|
||||
if album:
|
||||
query_filters["album"] = album
|
||||
|
||||
response_data_tracks = self._search_api(
|
||||
query_type="track", keywords=keywords, filters=query_filters
|
||||
query_type="track",
|
||||
query_string=query_string,
|
||||
filters=query_filters,
|
||||
)
|
||||
if not response_data_tracks:
|
||||
query = self._construct_search_query(
|
||||
keywords=keywords, filters=query_filters
|
||||
query_string=query_string, filters=query_filters
|
||||
)
|
||||
|
||||
failures.append(query)
|
||||
|
|
@ -617,8 +598,8 @@ class SpotifyPlugin(
|
|||
or self.config["tiebreak"].get() == "first"
|
||||
):
|
||||
self._log.debug(
|
||||
"{} track(s) found, count: {}",
|
||||
self.data_source,
|
||||
"{.data_source} track(s) found, count: {}",
|
||||
self,
|
||||
len(response_data_tracks),
|
||||
)
|
||||
chosen_result = response_data_tracks[0]
|
||||
|
|
@ -641,19 +622,19 @@ class SpotifyPlugin(
|
|||
if failure_count > 0:
|
||||
if self.config["show_failures"].get():
|
||||
self._log.info(
|
||||
"{} track(s) did not match a {} ID:",
|
||||
"{} track(s) did not match a {.data_source} ID:",
|
||||
failure_count,
|
||||
self.data_source,
|
||||
self,
|
||||
)
|
||||
for track in failures:
|
||||
self._log.info("track: {}", track)
|
||||
self._log.info("")
|
||||
else:
|
||||
self._log.warning(
|
||||
"{} track(s) did not match a {} ID:\n"
|
||||
"{} track(s) did not match a {.data_source} ID:\n"
|
||||
"use --show-failures to display",
|
||||
failure_count,
|
||||
self.data_source,
|
||||
self,
|
||||
)
|
||||
|
||||
return results
|
||||
|
|
@ -670,20 +651,18 @@ class SpotifyPlugin(
|
|||
spotify_ids = [track_data["id"] for track_data in results]
|
||||
if self.config["mode"].get() == "open":
|
||||
self._log.info(
|
||||
"Attempting to open {} with playlist".format(
|
||||
self.data_source
|
||||
)
|
||||
"Attempting to open {.data_source} with playlist", self
|
||||
)
|
||||
spotify_url = "spotify:trackset:Playlist:" + ",".join(
|
||||
spotify_ids
|
||||
spotify_url = (
|
||||
f"spotify:trackset:Playlist:{','.join(spotify_ids)}"
|
||||
)
|
||||
webbrowser.open(spotify_url)
|
||||
else:
|
||||
for spotify_id in spotify_ids:
|
||||
print(self.open_track_url + spotify_id)
|
||||
print(f"{self.open_track_url}{spotify_id}")
|
||||
else:
|
||||
self._log.warning(
|
||||
f"No {self.data_source} tracks found from beets query"
|
||||
"No {.data_source} tracks found from beets query", self
|
||||
)
|
||||
|
||||
def _fetch_info(self, items, write, force):
|
||||
|
|
@ -715,12 +694,10 @@ class SpotifyPlugin(
|
|||
audio_features = self.track_audio_features(spotify_track_id)
|
||||
if audio_features is None:
|
||||
self._log.info("No audio features found for: {}", item)
|
||||
continue
|
||||
for feature in audio_features.keys():
|
||||
if feature in self.spotify_audio_features.keys():
|
||||
item[self.spotify_audio_features[feature]] = audio_features[
|
||||
feature
|
||||
]
|
||||
else:
|
||||
for feature, value in audio_features.items():
|
||||
if feature in self.spotify_audio_features:
|
||||
item[self.spotify_audio_features[feature]] = value
|
||||
item["spotify_updated"] = time.time()
|
||||
item.store()
|
||||
if write:
|
||||
|
|
@ -728,7 +705,7 @@ class SpotifyPlugin(
|
|||
|
||||
def track_info(self, track_id: str):
|
||||
"""Fetch a track's popularity and external IDs using its Spotify ID."""
|
||||
track_data = self._handle_response("get", self.track_url + track_id)
|
||||
track_data = self._handle_response("get", f"{self.track_url}{track_id}")
|
||||
external_ids = track_data.get("external_ids", {})
|
||||
popularity = track_data.get("popularity")
|
||||
self._log.debug(
|
||||
|
|
@ -747,7 +724,7 @@ class SpotifyPlugin(
|
|||
"""Fetch track audio features by its Spotify ID."""
|
||||
try:
|
||||
return self._handle_response(
|
||||
"get", self.audio_features_url + track_id
|
||||
"get", f"{self.audio_features_url}{track_id}"
|
||||
)
|
||||
except APIError as e:
|
||||
self._log.debug("Spotify API error: {}", e)
|
||||
|
|
|
|||
|
|
@ -168,9 +168,7 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
|
|||
params["v"] = "1.12.0"
|
||||
params["c"] = "beets"
|
||||
resp = requests.get(
|
||||
"{}/rest/{}?{}".format(
|
||||
self.config["base_url"].get(), endpoint, urlencode(params)
|
||||
),
|
||||
f"{self.config['base_url'].get()}/rest/{endpoint}?{urlencode(params)}",
|
||||
timeout=10,
|
||||
)
|
||||
return resp
|
||||
|
|
@ -182,5 +180,5 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
|
|||
for track in tracks:
|
||||
if track not in output:
|
||||
output[track] = ";"
|
||||
output[track] += name + ";"
|
||||
output[track] += f"{name};"
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class SubsonicUpdate(BeetsPlugin):
|
|||
# Pick the random sequence and salt the password
|
||||
r = string.ascii_letters + string.digits
|
||||
salt = "".join([random.choice(r) for _ in range(6)])
|
||||
salted_password = password + salt
|
||||
salted_password = f"{password}{salt}"
|
||||
token = hashlib.md5(salted_password.encode("utf-8")).hexdigest()
|
||||
|
||||
# Put together the payload of the request to the server and the URL
|
||||
|
|
@ -101,14 +101,14 @@ class SubsonicUpdate(BeetsPlugin):
|
|||
context_path = ""
|
||||
url = f"http://{host}:{port}{context_path}"
|
||||
|
||||
return url + f"/rest/{endpoint}"
|
||||
return f"{url}/rest/{endpoint}"
|
||||
|
||||
def start_scan(self):
|
||||
user = self.config["user"].as_str()
|
||||
auth = self.config["auth"].as_str()
|
||||
url = self.__format_url("startScan")
|
||||
self._log.debug("URL is {0}", url)
|
||||
self._log.debug("auth type is {0}", self.config["auth"])
|
||||
self._log.debug("URL is {}", url)
|
||||
self._log.debug("auth type is {.config[auth]}", self)
|
||||
|
||||
if auth == "token":
|
||||
salt, token = self.__create_token()
|
||||
|
|
@ -145,14 +145,15 @@ class SubsonicUpdate(BeetsPlugin):
|
|||
and json["subsonic-response"]["status"] == "ok"
|
||||
):
|
||||
count = json["subsonic-response"]["scanStatus"]["count"]
|
||||
self._log.info(f"Updating Subsonic; scanning {count} tracks")
|
||||
self._log.info("Updating Subsonic; scanning {} tracks", count)
|
||||
elif (
|
||||
response.status_code == 200
|
||||
and json["subsonic-response"]["status"] == "failed"
|
||||
):
|
||||
error_message = json["subsonic-response"]["error"]["message"]
|
||||
self._log.error(f"Error: {error_message}")
|
||||
self._log.error(
|
||||
"Error: {[subsonic-response][error][message]}", json
|
||||
)
|
||||
else:
|
||||
self._log.error("Error: {0}", json)
|
||||
self._log.error("Error: {}", json)
|
||||
except Exception as error:
|
||||
self._log.error(f"Error: {error}")
|
||||
self._log.error("Error: {}", error)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue