mirror of
https://github.com/beetbox/beets.git
synced 2026-02-08 08:25:23 +01:00
Merge branch 'master' into generic-cached-classproperty
This commit is contained in:
commit
625f05adc1
211 changed files with 16896 additions and 8111 deletions
|
|
@ -27,6 +27,14 @@ af102c3e2f1c7a49e99839e2825906fe01780eec
|
|||
910354a6c617ed5aa643cff666205b43e1557373
|
||||
# pyupgrade beetsplug and tests
|
||||
1ec87a3bdd737abe46c6e614051bf9e314db4619
|
||||
# Updates docstrings in library.py.
|
||||
8c5ced3ee11a353546034189736c6001115135a4
|
||||
# Fixes inconsistencies in ending quote placements for single-line docstrings.
|
||||
bbd32639b4c469fe3d6668f1e3bb17d8ba7a70ce
|
||||
# Fixes linting errors by removing trailing whitespaces.
|
||||
acf576c455e59e8197359d4517f8c0a5a9f362bb
|
||||
# Alters docstrings in library.py to be imperative-style.
|
||||
2f42c8b1c019a90448d33d940b609c18ba644cbc
|
||||
|
||||
# 2022
|
||||
# Reformat flake8 config comments
|
||||
|
|
@ -37,22 +45,78 @@ abc3dfbf429b179fac25bd1dff72d577cd4d04c7
|
|||
a6e5201ff3fad4c69bf24d17bace2ef744b9f51b
|
||||
|
||||
# 2024
|
||||
# Replace assertTrue
|
||||
0ecc345143cf89fabe74bb2e95eedfa1114857a3
|
||||
# Replace assertFalse
|
||||
cb82917fe0d5476c74bb946f91ea0d9a9f019c9b
|
||||
# Replace assertIsNone
|
||||
5d4911e905d3a89793332eb851035e6529c0725e
|
||||
# Replace assertIsNotNone
|
||||
2616bcc950e592745713f28db0192293410ed3e3
|
||||
# Replace assertIn
|
||||
11e948121cde969f9ea27caa545a6508145572fb
|
||||
# Replace assertNotIn
|
||||
6631b6aef6da3e09d3531de6df7995dd5396398f
|
||||
# Replace assertEqual
|
||||
9a05d27acfef3788d10dd0a8db72a6c8c15dfbe9
|
||||
# Replace assertNotEqual
|
||||
f9359df0d15ea8ee8e3c80bc198e779f185160cb
|
||||
# Replace assertIsInstance
|
||||
eda0ef11d67f482fe50bbe581685b8b6a284afb9
|
||||
# Replace assertLess and assertLessEqual
|
||||
6a3380bcb5e803e825bd9485fcc4b70d352947eb
|
||||
# Replace assertGreater and assertGreaterEqual
|
||||
46bdb84b464ffec3f0ce88d53467391be7b7046f
|
||||
# Replace assertCountEqual
|
||||
fdb8c28271e8b22d458330598a524067ca37026e
|
||||
# Replace assertListEqual
|
||||
fcc4d8481df295019945ac7973906f960c58c9fb
|
||||
# Use f-string syntax
|
||||
4b69b493d2630b723684f259ee9e7e07c480e8ee
|
||||
# Reformat the codebase
|
||||
85a17ee5039628a6f3cdcb7a03d7d1bd530fbe89
|
||||
# Fix lint issues
|
||||
f36bc497c8c8f89004f3f6879908d3f0b25123e1
|
||||
# Remove some lint exclusions and fix the issues
|
||||
5f78d1b82b2292d5ce0c99623ba0ec444b80d24c
|
||||
# Use PEP585 lowercase collections typing annotations
|
||||
51f9dd229e64f5106d69f87906a94e75604f346b
|
||||
# Remove unnecessary quotes from types
|
||||
fbfdfd54446fab6782ef0629da303f14f0a2ecdf
|
||||
# Replace Union types by PEP604 pipe character
|
||||
7ef1b61070ed4ed79c4720d019968baf38e38050
|
||||
# Update deprecated imports
|
||||
161b0522bbf7f4984173fee4128416b05f6cc5f3
|
||||
# Move imports required for typing under the TYPE_CHECKING block
|
||||
5c81f94cf7ced476673d0fa948cc7ecda00bae99
|
||||
|
||||
# 2025
|
||||
# Fix formatting
|
||||
c490ac5810b70f3cf5fd8649669838e8fdb19f4d
|
||||
# Importer restructure
|
||||
9147577b2b19f43ca827e9650261a86fb0450cef
|
||||
# Move functionality under MusicBrainz plugin
|
||||
529aaac7dced71266c6d69866748a7d044ec20ff
|
||||
# musicbrainz: reorder methods
|
||||
5dc6f45110b99f0cc8dbb94251f9b1f6d69583fa
|
||||
# Copy paste query, types from library to dbcore
|
||||
1a045c91668c771686f4c871c84f1680af2e944b
|
||||
# Library restructure (split library.py into multiple modules)
|
||||
0ad4e19d4f870db757373f44d12ff3be2441363a
|
||||
# Split library file into different files inside library folder.
|
||||
98377ab5f6fc1829d79211b376bfd8d82bafaf33
|
||||
# Use pathlib.Path in test_smartplaylist.py
|
||||
d017270196dc8e0e2a4051afa5d05213946cbbbc
|
||||
# Replace assertIsFile
|
||||
ca4fa6ba10807f4a48a428d23e45c023c15dfa7d
|
||||
# Replace assertIsDir
|
||||
43b8cce063b1a1ef079266f362272307fb328d73
|
||||
# Replace assertFileTag and assertNoFileTag
|
||||
c6b5b3bed31704f7fe8632a6aef1a2348028348f
|
||||
# Replace assertAlbumImport
|
||||
3c8179a762c4387f9c40a12e3b9e560ff1c194ec
|
||||
# Replace assertCount
|
||||
72caf0d2cdc8fcefe1c252bdb0ac9b11b90cc649
|
||||
# Docs: fix linting issues
|
||||
769dcdc88a1263638ae25944ba6b2be3e8933666
|
||||
# Reformat all docs using docstrfmt
|
||||
|
|
@ -73,3 +137,21 @@ d93ddf8dd43e4f9ed072a03829e287c78d2570a2
|
|||
33f1a5d0bef8ca08be79ee7a0d02a018d502680d
|
||||
# Moved art.py utility module from beets into beetsplug
|
||||
28aee0fde463f1e18dfdba1994e2bdb80833722f
|
||||
# Refactor `ui/commands.py` into multiple modules
|
||||
59c93e70139f70e9fd1c6f3c1bceb005945bec33
|
||||
# Moved ui.commands._utils into ui.commands.utils
|
||||
25ae330044abf04045e3f378f72bbaed739fb30d
|
||||
# Refactor test_ui_command.py into multiple modules
|
||||
a59e41a88365e414db3282658d2aa456e0b3468a
|
||||
# pyupgrade Python 3.10
|
||||
301637a1609831947cb5dd90270ed46c24b1ab1b
|
||||
# Fix changelog formatting
|
||||
658b184c59388635787b447983ecd3a575f4fe56
|
||||
# Configure future-annotations
|
||||
ac7f3d9da95c2d0a32e5c908ea68480518a1582d
|
||||
# Configure ruff for py310
|
||||
c46069654628040316dea9db85d01b263db3ba9e
|
||||
# Enable RUF rules
|
||||
4749599913a42e02e66b37db9190de11d6be2cdf
|
||||
# Address RUF012
|
||||
bc71ec308eb938df1d349f6857634ddf2a82e339
|
||||
|
|
|
|||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
|
|
@ -2,4 +2,6 @@
|
|||
* @beetbox/maintainers
|
||||
|
||||
# Specific ownerships:
|
||||
/beets/metadata_plugins.py @semohr
|
||||
/beets/metadata_plugins.py @semohr
|
||||
/beetsplug/titlecase.py @henry-oberholtzer
|
||||
/beetsplug/mbpseudo.py @asardaes
|
||||
|
|
|
|||
14
.github/workflows/ci.yaml
vendored
14
.github/workflows/ci.yaml
vendored
|
|
@ -20,10 +20,10 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ubuntu-latest, windows-latest]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
env:
|
||||
IS_MAIN_PYTHON: ${{ matrix.python-version == '3.9' && matrix.platform == 'ubuntu-latest' }}
|
||||
IS_MAIN_PYTHON: ${{ matrix.python-version == '3.10' && matrix.platform == 'ubuntu-latest' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install Python tools
|
||||
|
|
@ -39,7 +39,15 @@ jobs:
|
|||
if: matrix.platform == 'ubuntu-latest'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install --yes --no-install-recommends ffmpeg gobject-introspection gstreamer1.0-plugins-base python3-gst-1.0 libcairo2-dev libgirepository-2.0-dev pandoc imagemagick
|
||||
sudo apt install --yes --no-install-recommends \
|
||||
ffmpeg \
|
||||
gobject-introspection \
|
||||
gstreamer1.0-plugins-base \
|
||||
python3-gst-1.0 \
|
||||
libcairo2-dev \
|
||||
libgirepository-2.0-dev \
|
||||
pandoc \
|
||||
imagemagick
|
||||
|
||||
- name: Get changed lyrics files
|
||||
id: lyrics-update
|
||||
|
|
|
|||
6
.github/workflows/integration_test.yaml
vendored
6
.github/workflows/integration_test.yaml
vendored
|
|
@ -3,6 +3,10 @@ on:
|
|||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * SUN" # run every Sunday at midnight
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.10"
|
||||
|
||||
jobs:
|
||||
test_integration:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
@ -12,7 +16,7 @@ jobs:
|
|||
uses: BrandonLWhite/pipx-install-action@v1.0.3
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: poetry
|
||||
|
||||
- name: Install dependencies
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ concurrency:
|
|||
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: 3.9
|
||||
PYTHON_VERSION: "3.10"
|
||||
|
||||
jobs:
|
||||
changed-files:
|
||||
2
.github/workflows/make_release.yaml
vendored
2
.github/workflows/make_release.yaml
vendored
|
|
@ -8,7 +8,7 @@ on:
|
|||
required: true
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: 3.9
|
||||
PYTHON_VERSION: "3.10"
|
||||
NEW_VERSION: ${{ inputs.version }}
|
||||
NEW_TAG: v${{ inputs.version }}
|
||||
|
||||
|
|
|
|||
|
|
@ -124,12 +124,12 @@ command. Instead, you can activate the virtual environment in your shell with:
|
|||
|
||||
$ poetry shell
|
||||
|
||||
You should see ``(beets-py3.9)`` prefix in your shell prompt. Now you can run
|
||||
You should see ``(beets-py3.10)`` prefix in your shell prompt. Now you can run
|
||||
commands directly, for example:
|
||||
|
||||
::
|
||||
|
||||
$ (beets-py3.9) pytest
|
||||
$ (beets-py3.10) pytest
|
||||
|
||||
Additionally, poethepoet_ task runner assists us with the most common
|
||||
operations. Formatting, linting, testing are defined as ``poe`` tasks in
|
||||
|
|
@ -286,31 +286,6 @@ according to the specifications required by the project.
|
|||
Similarly, run ``poe format-docs`` and ``poe lint-docs`` to ensure consistent
|
||||
documentation formatting and check for any issues.
|
||||
|
||||
Handling Paths
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
A great deal of convention deals with the handling of **paths**. Paths are
|
||||
stored internally—in the database, for instance—as byte strings (i.e., ``bytes``
|
||||
instead of ``str`` in Python 3). This is because POSIX operating systems’ path
|
||||
names are only reliably usable as byte strings—operating systems typically
|
||||
recommend but do not require that filenames use a given encoding, so violations
|
||||
of any reported encoding are inevitable. On Windows, the strings are always
|
||||
encoded with UTF-8; on Unix, the encoding is controlled by the filesystem. Here
|
||||
are some guidelines to follow:
|
||||
|
||||
- If you have a Unicode path or you’re not sure whether something is Unicode or
|
||||
not, pass it through ``bytestring_path`` function in the ``beets.util`` module
|
||||
to convert it to bytes.
|
||||
- Pass every path name through the ``syspath`` function (also in ``beets.util``)
|
||||
before sending it to any *operating system* file operation (``open``, for
|
||||
example). This is necessary to use long filenames (which, maddeningly, must be
|
||||
Unicode) on Windows. This allows us to consistently store bytes in the
|
||||
database but use the native encoding rule on both POSIX and Windows.
|
||||
- Similarly, the ``displayable_path`` utility function converts bytestring paths
|
||||
to a Unicode string for displaying to the user. Every time you want to print
|
||||
out a string to the terminal or log it with the ``logging`` module, feed it
|
||||
through this function.
|
||||
|
||||
Editor Settings
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ simple if you know a little Python.
|
|||
|
||||
.. _transcode audio: https://beets.readthedocs.org/page/plugins/convert.html
|
||||
|
||||
.. _writing your own plugin: https://beets.readthedocs.org/page/dev/plugins.html
|
||||
.. _writing your own plugin: https://beets.readthedocs.org/page/dev/plugins/index.html
|
||||
|
||||
Install
|
||||
-------
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ Beets는 라이브러리로 디자인 되었기 때문에, 당신이 음악들
|
|||
|
||||
.. _transcode audio: https://beets.readthedocs.org/page/plugins/convert.html
|
||||
|
||||
.. _writing your own plugin: https://beets.readthedocs.org/page/dev/plugins.html
|
||||
.. _writing your own plugin: https://beets.readthedocs.org/page/dev/plugins/index.html
|
||||
|
||||
설치
|
||||
-------
|
||||
|
|
|
|||
|
|
@ -17,22 +17,18 @@ from sys import stderr
|
|||
|
||||
import confuse
|
||||
|
||||
from .util import deprecate_imports
|
||||
from .util.deprecation import deprecate_imports
|
||||
|
||||
__version__ = "2.5.1"
|
||||
__version__ = "2.6.1"
|
||||
__author__ = "Adrian Sampson <adrian@radbox.org>"
|
||||
|
||||
|
||||
def __getattr__(name: str):
|
||||
"""Handle deprecated imports."""
|
||||
return deprecate_imports(
|
||||
old_module=__name__,
|
||||
new_module_by_name={
|
||||
"art": "beetsplug._utils",
|
||||
"vfs": "beetsplug._utils",
|
||||
},
|
||||
name=name,
|
||||
version="3.0.0",
|
||||
__name__,
|
||||
{"art": "beetsplug._utils", "vfs": "beetsplug._utils"},
|
||||
name,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -16,39 +16,33 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import warnings
|
||||
from importlib import import_module
|
||||
from typing import TYPE_CHECKING, Union
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import config, logging
|
||||
|
||||
# Parts of external interface.
|
||||
from beets.util import unique_list
|
||||
from beets.util.deprecation import deprecate_for_maintainers, deprecate_imports
|
||||
|
||||
from ..util import deprecate_imports
|
||||
from .hooks import AlbumInfo, AlbumMatch, TrackInfo, TrackMatch
|
||||
from .match import Proposal, Recommendation, tag_album, tag_item
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping, Sequence
|
||||
from collections.abc import Sequence
|
||||
|
||||
from beets.library import Album, Item, LibModel
|
||||
|
||||
|
||||
def __getattr__(name: str):
|
||||
if name == "current_metadata":
|
||||
warnings.warn(
|
||||
(
|
||||
f"'beets.autotag.{name}' is deprecated and will be removed in"
|
||||
" 3.0.0. Use 'beets.util.get_most_common_tags' instead."
|
||||
),
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
deprecate_for_maintainers(
|
||||
f"'beets.autotag.{name}'", "'beets.util.get_most_common_tags'"
|
||||
)
|
||||
return import_module("beets.util").get_most_common_tags
|
||||
|
||||
return deprecate_imports(
|
||||
__name__, {"Distance": "beets.autotag.distance"}, name, "3.0.0"
|
||||
__name__, {"Distance": "beets.autotag.distance"}, name
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -117,8 +111,8 @@ SPECIAL_FIELDS = {
|
|||
|
||||
|
||||
def _apply_metadata(
|
||||
info: Union[AlbumInfo, TrackInfo],
|
||||
db_obj: Union[Album, Item],
|
||||
info: AlbumInfo | TrackInfo,
|
||||
db_obj: Album | Item,
|
||||
nullable_fields: Sequence[str] = [],
|
||||
):
|
||||
"""Set the db_obj's metadata to match the info."""
|
||||
|
|
@ -210,11 +204,11 @@ def apply_album_metadata(album_info: AlbumInfo, album: Album):
|
|||
correct_list_fields(album)
|
||||
|
||||
|
||||
def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
|
||||
"""Set the items' metadata to match an AlbumInfo object using a
|
||||
mapping from Items to TrackInfo objects.
|
||||
"""
|
||||
for item, track_info in mapping.items():
|
||||
def apply_metadata(
|
||||
album_info: AlbumInfo, item_info_pairs: list[tuple[Item, TrackInfo]]
|
||||
):
|
||||
"""Set items metadata to match corresponding tagged info."""
|
||||
for item, track_info in item_info_pairs:
|
||||
# Artist or artist credit.
|
||||
if config["artist_credit"]:
|
||||
item.artist = (
|
||||
|
|
|
|||
|
|
@ -422,7 +422,7 @@ def track_distance(
|
|||
def distance(
|
||||
items: Sequence[Item],
|
||||
album_info: AlbumInfo,
|
||||
mapping: dict[Item, TrackInfo],
|
||||
item_info_pairs: list[tuple[Item, TrackInfo]],
|
||||
) -> Distance:
|
||||
"""Determines how "significant" an album metadata change would be.
|
||||
Returns a Distance object. `album_info` is an AlbumInfo object
|
||||
|
|
@ -518,16 +518,16 @@ def distance(
|
|||
|
||||
# Tracks.
|
||||
dist.tracks = {}
|
||||
for item, track in mapping.items():
|
||||
for item, track in item_info_pairs:
|
||||
dist.tracks[track] = track_distance(item, track, album_info.va)
|
||||
dist.add("tracks", dist.tracks[track].distance)
|
||||
|
||||
# Missing tracks.
|
||||
for _ in range(len(album_info.tracks) - len(mapping)):
|
||||
for _ in range(len(album_info.tracks) - len(item_info_pairs)):
|
||||
dist.add("missing_tracks", 1.0)
|
||||
|
||||
# Unmatched tracks.
|
||||
for _ in range(len(items) - len(mapping)):
|
||||
for _ in range(len(items) - len(item_info_pairs)):
|
||||
dist.add("unmatched_tracks", 1.0)
|
||||
|
||||
dist.add_data_source(likelies["data_source"], album_info.data_source)
|
||||
|
|
|
|||
|
|
@ -17,10 +17,14 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from copy import deepcopy
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar
|
||||
from dataclasses import dataclass
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING, Any, TypeVar
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from beets.util import cached_classproperty
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import Item
|
||||
|
||||
|
|
@ -54,6 +58,10 @@ class AttrDict(dict[str, V]):
|
|||
class Info(AttrDict[Any]):
|
||||
"""Container for metadata about a musical entity."""
|
||||
|
||||
@cached_property
|
||||
def name(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
album: str | None = None,
|
||||
|
|
@ -95,6 +103,10 @@ class AlbumInfo(Info):
|
|||
user items, and later to drive tagging decisions once selected.
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def name(self) -> str:
|
||||
return self.album or ""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tracks: list[TrackInfo],
|
||||
|
|
@ -167,6 +179,10 @@ class TrackInfo(Info):
|
|||
stand alone for singleton matching.
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def name(self) -> str:
|
||||
return self.title or ""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
|
|
@ -214,16 +230,32 @@ class TrackInfo(Info):
|
|||
|
||||
|
||||
# Structures that compose all the information for a candidate match.
|
||||
|
||||
|
||||
class AlbumMatch(NamedTuple):
|
||||
@dataclass
|
||||
class Match:
|
||||
distance: Distance
|
||||
info: Info
|
||||
|
||||
@cached_classproperty
|
||||
def type(cls) -> str:
|
||||
return cls.__name__.removesuffix("Match") # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AlbumMatch(Match):
|
||||
info: AlbumInfo
|
||||
mapping: dict[Item, TrackInfo]
|
||||
extra_items: list[Item]
|
||||
extra_tracks: list[TrackInfo]
|
||||
|
||||
@property
|
||||
def item_info_pairs(self) -> list[tuple[Item, TrackInfo]]:
|
||||
return list(self.mapping.items())
|
||||
|
||||
class TrackMatch(NamedTuple):
|
||||
distance: Distance
|
||||
@property
|
||||
def items(self) -> list[Item]:
|
||||
return [i for i, _ in self.item_info_pairs]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrackMatch(Match):
|
||||
info: TrackInfo
|
||||
|
|
|
|||
|
|
@ -24,8 +24,8 @@ from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar
|
|||
import lap
|
||||
import numpy as np
|
||||
|
||||
from beets import config, logging, metadata_plugins
|
||||
from beets.autotag import AlbumInfo, AlbumMatch, TrackInfo, TrackMatch, hooks
|
||||
from beets import config, logging, metadata_plugins, plugins
|
||||
from beets.autotag import AlbumMatch, TrackMatch, hooks
|
||||
from beets.util import get_most_common_tags
|
||||
|
||||
from .distance import VA_ARTISTS, distance, track_distance
|
||||
|
|
@ -33,6 +33,7 @@ from .distance import VA_ARTISTS, distance, track_distance
|
|||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
from beets.autotag import AlbumInfo, TrackInfo
|
||||
from beets.library import Item
|
||||
|
||||
# Global logger.
|
||||
|
|
@ -69,7 +70,7 @@ class Proposal(NamedTuple):
|
|||
def assign_items(
|
||||
items: Sequence[Item],
|
||||
tracks: Sequence[TrackInfo],
|
||||
) -> tuple[dict[Item, TrackInfo], list[Item], list[TrackInfo]]:
|
||||
) -> tuple[list[tuple[Item, TrackInfo]], list[Item], list[TrackInfo]]:
|
||||
"""Given a list of Items and a list of TrackInfo objects, find the
|
||||
best mapping between them. Returns a mapping from Items to TrackInfo
|
||||
objects, a set of extra Items, and a set of extra TrackInfo
|
||||
|
|
@ -95,7 +96,7 @@ def assign_items(
|
|||
extra_items.sort(key=lambda i: (i.disc, i.track, i.title))
|
||||
extra_tracks = list(set(tracks) - set(mapping.values()))
|
||||
extra_tracks.sort(key=lambda t: (t.index, t.title))
|
||||
return mapping, extra_items, extra_tracks
|
||||
return list(mapping.items()), extra_items, extra_tracks
|
||||
|
||||
|
||||
def match_by_id(items: Iterable[Item]) -> AlbumInfo | None:
|
||||
|
|
@ -217,10 +218,12 @@ def _add_candidate(
|
|||
return
|
||||
|
||||
# Find mapping between the items and the track info.
|
||||
mapping, extra_items, extra_tracks = assign_items(items, info.tracks)
|
||||
item_info_pairs, extra_items, extra_tracks = assign_items(
|
||||
items, info.tracks
|
||||
)
|
||||
|
||||
# Get the change distance.
|
||||
dist = distance(items, info, mapping)
|
||||
dist = distance(items, info, item_info_pairs)
|
||||
|
||||
# Skip matches with ignored penalties.
|
||||
penalties = [key for key, _ in dist]
|
||||
|
|
@ -232,14 +235,14 @@ def _add_candidate(
|
|||
|
||||
log.debug("Success. Distance: {}", dist)
|
||||
results[info.album_id] = hooks.AlbumMatch(
|
||||
dist, info, mapping, extra_items, extra_tracks
|
||||
dist, info, dict(item_info_pairs), extra_items, extra_tracks
|
||||
)
|
||||
|
||||
|
||||
def tag_album(
|
||||
items,
|
||||
search_artist: str | None = None,
|
||||
search_album: str | None = None,
|
||||
search_name: str | None = None,
|
||||
search_ids: list[str] = [],
|
||||
) -> tuple[str, str, Proposal]:
|
||||
"""Return a tuple of the current artist name, the current album
|
||||
|
|
@ -274,12 +277,17 @@ def tag_album(
|
|||
log.debug("Searching for album ID: {}", search_id)
|
||||
if info := metadata_plugins.album_for_id(search_id):
|
||||
_add_candidate(items, candidates, info)
|
||||
if opt_candidate := candidates.get(info.album_id):
|
||||
plugins.send("album_matched", match=opt_candidate)
|
||||
|
||||
# Use existing metadata or text search.
|
||||
else:
|
||||
# Try search based on current ID.
|
||||
if info := match_by_id(items):
|
||||
_add_candidate(items, candidates, info)
|
||||
for candidate in candidates.values():
|
||||
plugins.send("album_matched", match=candidate)
|
||||
|
||||
rec = _recommendation(list(candidates.values()))
|
||||
log.debug("Album ID match recommendation is {}", rec)
|
||||
if candidates and not config["import"]["timid"]:
|
||||
|
|
@ -295,10 +303,10 @@ def tag_album(
|
|||
)
|
||||
|
||||
# Search terms.
|
||||
if not (search_artist and search_album):
|
||||
if not (search_artist and search_name):
|
||||
# No explicit search terms -- use current metadata.
|
||||
search_artist, search_album = cur_artist, cur_album
|
||||
log.debug("Search terms: {} - {}", search_artist, search_album)
|
||||
search_artist, search_name = cur_artist, cur_album
|
||||
log.debug("Search terms: {} - {}", search_artist, search_name)
|
||||
|
||||
# Is this album likely to be a "various artist" release?
|
||||
va_likely = (
|
||||
|
|
@ -310,9 +318,11 @@ def tag_album(
|
|||
|
||||
# Get the results from the data sources.
|
||||
for matched_candidate in metadata_plugins.candidates(
|
||||
items, search_artist, search_album, va_likely
|
||||
items, search_artist, search_name, va_likely
|
||||
):
|
||||
_add_candidate(items, candidates, matched_candidate)
|
||||
if opt_candidate := candidates.get(matched_candidate.album_id):
|
||||
plugins.send("album_matched", match=opt_candidate)
|
||||
|
||||
log.debug("Evaluating {} candidates.", len(candidates))
|
||||
# Sort and get the recommendation.
|
||||
|
|
@ -324,7 +334,7 @@ def tag_album(
|
|||
def tag_item(
|
||||
item,
|
||||
search_artist: str | None = None,
|
||||
search_title: str | None = None,
|
||||
search_name: str | None = None,
|
||||
search_ids: list[str] | None = None,
|
||||
) -> Proposal:
|
||||
"""Find metadata for a single track. Return a `Proposal` consisting
|
||||
|
|
@ -366,12 +376,12 @@ def tag_item(
|
|||
|
||||
# Search terms.
|
||||
search_artist = search_artist or item.artist
|
||||
search_title = search_title or item.title
|
||||
log.debug("Item search terms: {} - {}", search_artist, search_title)
|
||||
search_name = search_name or item.title
|
||||
log.debug("Item search terms: {} - {}", search_artist, search_name)
|
||||
|
||||
# Get and evaluate candidate metadata.
|
||||
for track_info in metadata_plugins.item_candidates(
|
||||
item, search_artist, search_title
|
||||
item, search_artist, search_name
|
||||
):
|
||||
dist = track_distance(item, track_info, incl_artist=True)
|
||||
candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info)
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ plugins: [musicbrainz]
|
|||
|
||||
pluginpath: []
|
||||
|
||||
raise_on_error: no
|
||||
|
||||
# --------------- Import ---------------
|
||||
|
||||
clutter: ["Thumbs.DB", ".DS_Store"]
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@
|
|||
Library.
|
||||
"""
|
||||
|
||||
from .db import Database, Model, Results
|
||||
from .db import Database, Index, Model, Results
|
||||
from .query import (
|
||||
AndQuery,
|
||||
FieldQuery,
|
||||
|
|
@ -36,6 +36,7 @@ __all__ = [
|
|||
"AndQuery",
|
||||
"Database",
|
||||
"FieldQuery",
|
||||
"Index",
|
||||
"InvalidQueryError",
|
||||
"MatchQuery",
|
||||
"Model",
|
||||
|
|
|
|||
|
|
@ -26,31 +26,42 @@ import threading
|
|||
import time
|
||||
from abc import ABC
|
||||
from collections import defaultdict
|
||||
from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Generator,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
Sequence,
|
||||
)
|
||||
from functools import cached_property
|
||||
from sqlite3 import Connection, sqlite_version_info
|
||||
from typing import TYPE_CHECKING, Any, AnyStr, Callable, Generic
|
||||
from typing import TYPE_CHECKING, Any, AnyStr, ClassVar, Generic, NamedTuple
|
||||
|
||||
from typing_extensions import TypeVar # default value support
|
||||
from typing_extensions import (
|
||||
Self,
|
||||
TypeVar, # default value support
|
||||
)
|
||||
from unidecode import unidecode
|
||||
|
||||
import beets
|
||||
|
||||
from ..util import cached_classproperty, functemplate
|
||||
from . import types
|
||||
from .query import (
|
||||
FieldQueryType,
|
||||
FieldSort,
|
||||
MatchQuery,
|
||||
NullSort,
|
||||
Query,
|
||||
Sort,
|
||||
TrueQuery,
|
||||
)
|
||||
from .query import MatchQuery, NullSort, TrueQuery
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Generator,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Sequence,
|
||||
)
|
||||
from sqlite3 import Connection
|
||||
from types import TracebackType
|
||||
|
||||
from .query import SQLiteType
|
||||
from .query import FieldQueryType, FieldSort, Query, Sort, SQLiteType
|
||||
|
||||
D = TypeVar("D", bound="Database", default=Any)
|
||||
|
||||
|
|
@ -76,6 +87,10 @@ class DBCustomFunctionError(Exception):
|
|||
)
|
||||
|
||||
|
||||
class NotFoundError(LookupError):
|
||||
pass
|
||||
|
||||
|
||||
class FormattedMapping(Mapping[str, str]):
|
||||
"""A `dict`-like formatted view of a model.
|
||||
|
||||
|
|
@ -90,6 +105,8 @@ class FormattedMapping(Mapping[str, str]):
|
|||
are replaced.
|
||||
"""
|
||||
|
||||
model: Model
|
||||
|
||||
ALL_KEYS = "*"
|
||||
|
||||
def __init__(
|
||||
|
|
@ -289,7 +306,7 @@ class Model(ABC, Generic[D]):
|
|||
"""The flex field SQLite table name.
|
||||
"""
|
||||
|
||||
_fields: dict[str, types.Type] = {}
|
||||
_fields: ClassVar[dict[str, types.Type]] = {}
|
||||
"""A mapping indicating available "fixed" fields on this type. The
|
||||
keys are field names and the values are `Type` objects.
|
||||
"""
|
||||
|
|
@ -299,12 +316,17 @@ class Model(ABC, Generic[D]):
|
|||
terms.
|
||||
"""
|
||||
|
||||
_indices: Sequence[Index] = ()
|
||||
"""A sequence of `Index` objects that describe the indices to be
|
||||
created for this table.
|
||||
"""
|
||||
|
||||
@cached_classproperty
|
||||
def _types(cls) -> dict[str, types.Type]:
|
||||
"""Optional types for non-fixed (flexible and computed) fields."""
|
||||
return {}
|
||||
|
||||
_sorts: dict[str, type[FieldSort]] = {}
|
||||
_sorts: ClassVar[dict[str, type[FieldSort]]] = {}
|
||||
"""Optional named sort criteria. The keys are strings and the values
|
||||
are subclasses of `Sort`.
|
||||
"""
|
||||
|
|
@ -353,6 +375,22 @@ class Model(ABC, Generic[D]):
|
|||
"""Fields in the related table."""
|
||||
return cls._relation._fields.keys() - cls.shared_db_fields
|
||||
|
||||
@cached_property
|
||||
def db(self) -> D:
|
||||
"""Get the database associated with this object.
|
||||
|
||||
This validates that the database is attached and the object has an id.
|
||||
"""
|
||||
return self._check_db()
|
||||
|
||||
def get_fresh_from_db(self) -> Self:
|
||||
"""Load this object from the database."""
|
||||
model_cls = self.__class__
|
||||
if obj := self.db._get(model_cls, self.id):
|
||||
return obj
|
||||
|
||||
raise NotFoundError(f"No matching {model_cls.__name__} found") from None
|
||||
|
||||
@classmethod
|
||||
def _getters(cls: type[Model]):
|
||||
"""Return a mapping from field names to getter functions."""
|
||||
|
|
@ -592,7 +630,6 @@ class Model(ABC, Generic[D]):
|
|||
"""
|
||||
if fields is None:
|
||||
fields = self._fields
|
||||
db = self._check_db()
|
||||
|
||||
# Build assignments for query.
|
||||
assignments = []
|
||||
|
|
@ -604,7 +641,7 @@ class Model(ABC, Generic[D]):
|
|||
value = self._type(key).to_sql(self[key])
|
||||
subvars.append(value)
|
||||
|
||||
with db.transaction() as tx:
|
||||
with self.db.transaction() as tx:
|
||||
# Main table update.
|
||||
if assignments:
|
||||
query = f"UPDATE {self._table} SET {','.join(assignments)} WHERE id=?"
|
||||
|
|
@ -638,21 +675,16 @@ class Model(ABC, Generic[D]):
|
|||
If check_revision is true, the database is only queried loaded when a
|
||||
transaction has been committed since the item was last loaded.
|
||||
"""
|
||||
db = self._check_db()
|
||||
if not self._dirty and db.revision == self._revision:
|
||||
if not self._dirty and self.db.revision == self._revision:
|
||||
# Exit early
|
||||
return
|
||||
stored_obj = db._get(type(self), self.id)
|
||||
assert stored_obj is not None, f"object {self.id} not in DB"
|
||||
self._values_fixed = LazyConvertDict(self)
|
||||
self._values_flex = LazyConvertDict(self)
|
||||
self.update(dict(stored_obj))
|
||||
|
||||
self.__dict__.update(self.get_fresh_from_db().__dict__)
|
||||
self.clear_dirty()
|
||||
|
||||
def remove(self):
|
||||
"""Remove the object's associated rows from the database."""
|
||||
db = self._check_db()
|
||||
with db.transaction() as tx:
|
||||
with self.db.transaction() as tx:
|
||||
tx.mutate(f"DELETE FROM {self._table} WHERE id=?", (self.id,))
|
||||
tx.mutate(
|
||||
f"DELETE FROM {self._flex_table} WHERE entity_id=?", (self.id,)
|
||||
|
|
@ -668,7 +700,7 @@ class Model(ABC, Generic[D]):
|
|||
"""
|
||||
if db:
|
||||
self._db = db
|
||||
db = self._check_db(False)
|
||||
db = self._check_db(need_id=False)
|
||||
|
||||
with db.transaction() as tx:
|
||||
new_id = tx.mutate(f"INSERT INTO {self._table} DEFAULT VALUES")
|
||||
|
|
@ -689,7 +721,7 @@ class Model(ABC, Generic[D]):
|
|||
self,
|
||||
included_keys: str = _formatter.ALL_KEYS,
|
||||
for_path: bool = False,
|
||||
):
|
||||
) -> FormattedMapping:
|
||||
"""Get a mapping containing all values on this object formatted
|
||||
as human-readable unicode strings.
|
||||
"""
|
||||
|
|
@ -733,9 +765,9 @@ class Model(ABC, Generic[D]):
|
|||
Remove the database connection as sqlite connections are not
|
||||
picklable.
|
||||
"""
|
||||
state = self.__dict__.copy()
|
||||
state["_db"] = None
|
||||
return state
|
||||
return {
|
||||
k: v for k, v in self.__dict__.items() if k not in {"_db", "db"}
|
||||
}
|
||||
|
||||
|
||||
# Database controller and supporting interfaces.
|
||||
|
|
@ -1059,6 +1091,7 @@ class Database:
|
|||
for model_cls in self._models:
|
||||
self._make_table(model_cls._table, model_cls._fields)
|
||||
self._make_attribute_table(model_cls._flex_table)
|
||||
self._create_indices(model_cls._table, model_cls._indices)
|
||||
|
||||
# Primitive access control: connections and transactions.
|
||||
|
||||
|
|
@ -1097,6 +1130,16 @@ class Database:
|
|||
# call conn.close() in _close()
|
||||
check_same_thread=False,
|
||||
)
|
||||
|
||||
if sys.version_info >= (3, 12) and sqlite3.sqlite_version_info >= (
|
||||
3,
|
||||
29,
|
||||
0,
|
||||
):
|
||||
# If possible, disable double-quoted strings
|
||||
conn.setconfig(sqlite3.SQLITE_DBCONFIG_DQS_DDL, 0)
|
||||
conn.setconfig(sqlite3.SQLITE_DBCONFIG_DQS_DML, 0)
|
||||
|
||||
self.add_functions(conn)
|
||||
|
||||
if self.supports_extensions:
|
||||
|
|
@ -1236,6 +1279,19 @@ class Database:
|
|||
ON {flex_table} (entity_id);
|
||||
""")
|
||||
|
||||
def _create_indices(
|
||||
self,
|
||||
table: str,
|
||||
indices: Sequence[Index],
|
||||
):
|
||||
"""Create indices for the given table if they don't exist."""
|
||||
with self.transaction() as tx:
|
||||
for index in indices:
|
||||
tx.script(
|
||||
f"CREATE INDEX IF NOT EXISTS {index.name} "
|
||||
f"ON {table} ({', '.join(index.columns)});"
|
||||
)
|
||||
|
||||
# Querying.
|
||||
|
||||
def _fetch(
|
||||
|
|
@ -1296,12 +1352,15 @@ class Database:
|
|||
sort if sort.is_slow() else None, # Slow sort component.
|
||||
)
|
||||
|
||||
def _get(
|
||||
self,
|
||||
model_cls: type[AnyModel],
|
||||
id,
|
||||
) -> AnyModel | None:
|
||||
"""Get a Model object by its id or None if the id does not
|
||||
exist.
|
||||
"""
|
||||
return self._fetch(model_cls, MatchQuery("id", id)).get()
|
||||
def _get(self, model_cls: type[AnyModel], id_: int) -> AnyModel | None:
|
||||
"""Get a Model object by its id or None if the id does not exist."""
|
||||
return self._fetch(model_cls, MatchQuery("id", id_)).get()
|
||||
|
||||
|
||||
class Index(NamedTuple):
|
||||
"""A helper class to represent the index
|
||||
information in the database schema.
|
||||
"""
|
||||
|
||||
name: str
|
||||
columns: tuple[str, ...]
|
||||
|
|
|
|||
|
|
@ -20,17 +20,19 @@ import os
|
|||
import re
|
||||
import unicodedata
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Iterator, MutableSequence, Sequence
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime, timedelta
|
||||
from functools import cached_property, reduce
|
||||
from operator import mul, or_
|
||||
from re import Pattern
|
||||
from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar
|
||||
|
||||
from beets import util
|
||||
from beets.util.units import raw_seconds_short
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator, MutableSequence
|
||||
|
||||
from beets.dbcore.db import AnyModel, Model
|
||||
|
||||
P = TypeVar("P", default=Any)
|
||||
|
|
@ -122,7 +124,7 @@ class Query(ABC):
|
|||
return hash(type(self))
|
||||
|
||||
|
||||
SQLiteType = Union[str, bytes, float, int, memoryview, None]
|
||||
SQLiteType = str | bytes | float | int | memoryview | None
|
||||
AnySQLiteType = TypeVar("AnySQLiteType", bound=SQLiteType)
|
||||
FieldQueryType = type["FieldQuery"]
|
||||
|
||||
|
|
@ -689,7 +691,12 @@ class Period:
|
|||
("%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M"), # minute
|
||||
("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second
|
||||
)
|
||||
relative_units = {"y": 365, "m": 30, "w": 7, "d": 1}
|
||||
relative_units: ClassVar[dict[str, int]] = {
|
||||
"y": 365,
|
||||
"m": 30,
|
||||
"w": 7,
|
||||
"d": 1,
|
||||
}
|
||||
relative_re = "(?P<sign>[+|-]?)(?P<quantity>[0-9]+)(?P<timespan>[y|m|w|d])"
|
||||
|
||||
def __init__(self, date: datetime, precision: str):
|
||||
|
|
|
|||
|
|
@ -250,7 +250,7 @@ def parse_sorted_query(
|
|||
# Split up query in to comma-separated subqueries, each representing
|
||||
# an AndQuery, which need to be joined together in one OrQuery
|
||||
subquery_parts = []
|
||||
for part in parts + [","]:
|
||||
for part in [*parts, ","]:
|
||||
if part.endswith(","):
|
||||
# Ensure we can catch "foo, bar" as well as "foo , bar"
|
||||
last_subquery_part = part[:-1]
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import re
|
|||
import time
|
||||
import typing
|
||||
from abc import ABC
|
||||
from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar, cast
|
||||
|
||||
import beets
|
||||
from beets import util
|
||||
|
|
@ -406,7 +406,7 @@ class MusicalKey(String):
|
|||
The standard format is C, Cm, C#, C#m, etc.
|
||||
"""
|
||||
|
||||
ENHARMONIC = {
|
||||
ENHARMONIC: ClassVar[dict[str, str]] = {
|
||||
r"db": "c#",
|
||||
r"eb": "d#",
|
||||
r"gb": "f#",
|
||||
|
|
|
|||
|
|
@ -28,11 +28,11 @@ from .tasks import (
|
|||
# Note: Stages are not exposed to the public API
|
||||
|
||||
__all__ = [
|
||||
"ImportSession",
|
||||
"ImportAbortError",
|
||||
"Action",
|
||||
"ImportTask",
|
||||
"ArchiveImportTask",
|
||||
"ImportAbortError",
|
||||
"ImportSession",
|
||||
"ImportTask",
|
||||
"SentinelImportTask",
|
||||
"SingletonImportTask",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -15,9 +15,9 @@ from __future__ import annotations
|
|||
|
||||
import os
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import config, dbcore, library, logging, plugins, util
|
||||
from beets import config, logging, plugins, util
|
||||
from beets.importer.tasks import Action
|
||||
from beets.util import displayable_path, normpath, pipeline, syspath
|
||||
|
||||
|
|
@ -25,6 +25,9 @@ from . import stages as stagefuncs
|
|||
from .state import ImportState
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
from beets import dbcore, library
|
||||
from beets.util import PathBytes
|
||||
|
||||
from .tasks import ImportTask
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from __future__ import annotations
|
|||
|
||||
import itertools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Callable
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import config, plugins
|
||||
from beets.util import MoveOperation, displayable_path, pipeline
|
||||
|
|
@ -30,6 +30,8 @@ from .tasks import (
|
|||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from beets import library
|
||||
|
||||
from .session import ImportSession
|
||||
|
|
@ -386,5 +388,5 @@ def _extend_pipeline(tasks, *stages):
|
|||
else:
|
||||
task_iter = tasks
|
||||
|
||||
ipl = pipeline.Pipeline([task_iter] + list(stages))
|
||||
ipl = pipeline.Pipeline([task_iter, *list(stages)])
|
||||
return pipeline.multiple(ipl.pull())
|
||||
|
|
|
|||
|
|
@ -20,9 +20,10 @@ import re
|
|||
import shutil
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from tempfile import mkdtemp
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import mediafile
|
||||
|
||||
|
|
@ -32,6 +33,8 @@ from beets.dbcore.query import PathQuery
|
|||
from .state import ImportState
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
from beets.autotag.match import Recommendation
|
||||
|
||||
from .session import ImportSession
|
||||
|
|
@ -231,7 +234,7 @@ class ImportTask(BaseImportTask):
|
|||
or APPLY (in which case the data comes from the choice).
|
||||
"""
|
||||
if self.choice_flag in (Action.ASIS, Action.RETAG):
|
||||
likelies, consensus = util.get_most_common_tags(self.items)
|
||||
likelies, _ = util.get_most_common_tags(self.items)
|
||||
return likelies
|
||||
elif self.choice_flag is Action.APPLY and self.match:
|
||||
return self.match.info.copy()
|
||||
|
|
@ -244,21 +247,21 @@ class ImportTask(BaseImportTask):
|
|||
matched items.
|
||||
"""
|
||||
if self.choice_flag in (Action.ASIS, Action.RETAG):
|
||||
return list(self.items)
|
||||
return self.items
|
||||
elif self.choice_flag == Action.APPLY and isinstance(
|
||||
self.match, autotag.AlbumMatch
|
||||
):
|
||||
return list(self.match.mapping.keys())
|
||||
return self.match.items
|
||||
else:
|
||||
assert False
|
||||
return []
|
||||
|
||||
def apply_metadata(self):
|
||||
"""Copy metadata from match info to the items."""
|
||||
if config["import"]["from_scratch"]:
|
||||
for item in self.match.mapping:
|
||||
for item in self.match.items:
|
||||
item.clear()
|
||||
|
||||
autotag.apply_metadata(self.match.info, self.match.mapping)
|
||||
autotag.apply_metadata(self.match.info, self.match.item_info_pairs)
|
||||
|
||||
def duplicate_items(self, lib: library.Library):
|
||||
duplicate_items = []
|
||||
|
|
@ -677,6 +680,8 @@ class SingletonImportTask(ImportTask):
|
|||
return [self.item]
|
||||
|
||||
def apply_metadata(self):
|
||||
if config["import"]["from_scratch"]:
|
||||
self.item.clear()
|
||||
autotag.apply_item_metadata(self.item, self.match.info)
|
||||
|
||||
def _emit_imported(self, lib):
|
||||
|
|
@ -889,7 +894,7 @@ class ArchiveImportTask(SentinelImportTask):
|
|||
# The (0, 0, -1) is added to date_time because the
|
||||
# function time.mktime expects a 9-element tuple.
|
||||
# The -1 indicates that the DST flag is unknown.
|
||||
date_time = time.mktime(f.date_time + (0, 0, -1))
|
||||
date_time = time.mktime((*f.date_time, 0, 0, -1))
|
||||
fullpath = os.path.join(extract_to, f.filename)
|
||||
os.utime(fullpath, (date_time, date_time))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from beets.util import deprecate_imports
|
||||
from beets.util.deprecation import deprecate_imports
|
||||
|
||||
from .exceptions import FileOperationError, ReadError, WriteError
|
||||
from .library import Library
|
||||
|
|
@ -13,17 +13,17 @@ NEW_MODULE_BY_NAME = dict.fromkeys(
|
|||
|
||||
|
||||
def __getattr__(name: str):
|
||||
return deprecate_imports(__name__, NEW_MODULE_BY_NAME, name, "3.0.0")
|
||||
return deprecate_imports(__name__, NEW_MODULE_BY_NAME, name)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Library",
|
||||
"LibModel",
|
||||
"Album",
|
||||
"Item",
|
||||
"parse_query_parts",
|
||||
"parse_query_string",
|
||||
"FileOperationError",
|
||||
"Item",
|
||||
"LibModel",
|
||||
"Library",
|
||||
"ReadError",
|
||||
"WriteError",
|
||||
"parse_query_parts",
|
||||
"parse_query_string",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -125,24 +125,20 @@ class Library(dbcore.Database):
|
|||
return self._fetch(Item, query, sort or self.get_default_item_sort())
|
||||
|
||||
# Convenience accessors.
|
||||
|
||||
def get_item(self, id):
|
||||
def get_item(self, id_: int) -> Item | None:
|
||||
"""Fetch a :class:`Item` by its ID.
|
||||
|
||||
Return `None` if no match is found.
|
||||
"""
|
||||
return self._get(Item, id)
|
||||
return self._get(Item, id_)
|
||||
|
||||
def get_album(self, item_or_id):
|
||||
def get_album(self, item_or_id: Item | int) -> Album | None:
|
||||
"""Given an album ID or an item associated with an album, return
|
||||
a :class:`Album` object for the album.
|
||||
|
||||
If no such album exists, return `None`.
|
||||
"""
|
||||
if isinstance(item_or_id, int):
|
||||
album_id = item_or_id
|
||||
else:
|
||||
album_id = item_or_id.album_id
|
||||
if album_id is None:
|
||||
return None
|
||||
return self._get(Album, album_id)
|
||||
album_id = (
|
||||
item_or_id if isinstance(item_or_id, int) else item_or_id.album_id
|
||||
)
|
||||
return self._get(Album, album_id) if album_id else None
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import time
|
|||
import unicodedata
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from mediafile import MediaFile, UnreadableFileError
|
||||
|
||||
|
|
@ -40,6 +40,7 @@ class LibModel(dbcore.Model["Library"]):
|
|||
# Config key that specifies how an instance should be formatted.
|
||||
_format_config_key: str
|
||||
path: bytes
|
||||
length: float
|
||||
|
||||
@cached_classproperty
|
||||
def _types(cls) -> dict[str, types.Type]:
|
||||
|
|
@ -229,7 +230,7 @@ class Album(LibModel):
|
|||
_table = "albums"
|
||||
_flex_table = "album_attributes"
|
||||
_always_dirty = True
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, types.Type]] = {
|
||||
"id": types.PRIMARY_ID,
|
||||
"artpath": types.NullPathType(),
|
||||
"added": types.DATE,
|
||||
|
|
@ -281,13 +282,13 @@ class Album(LibModel):
|
|||
def _types(cls) -> dict[str, types.Type]:
|
||||
return {**super()._types, "path": types.PathType()}
|
||||
|
||||
_sorts = {
|
||||
_sorts: ClassVar[dict[str, type[dbcore.query.FieldSort]]] = {
|
||||
"albumartist": dbcore.query.SmartArtistSort,
|
||||
"artist": dbcore.query.SmartArtistSort,
|
||||
}
|
||||
|
||||
# List of keys that are set on an album's items.
|
||||
item_keys = [
|
||||
item_keys: ClassVar[list[str]] = [
|
||||
"added",
|
||||
"albumartist",
|
||||
"albumartists",
|
||||
|
|
@ -616,13 +617,20 @@ class Album(LibModel):
|
|||
for item in self.items():
|
||||
item.try_sync(write, move)
|
||||
|
||||
@cached_property
|
||||
def length(self) -> float: # type: ignore[override] # still writable since we override __setattr__
|
||||
"""Return the total length of all items in this album in seconds."""
|
||||
return sum(item.length for item in self.items())
|
||||
|
||||
|
||||
class Item(LibModel):
|
||||
"""Represent a song or track."""
|
||||
|
||||
album_id: int | None
|
||||
|
||||
_table = "items"
|
||||
_flex_table = "item_attributes"
|
||||
_fields = {
|
||||
_fields: ClassVar[dict[str, types.Type]] = {
|
||||
"id": types.PRIMARY_ID,
|
||||
"path": types.PathType(),
|
||||
"album_id": types.FOREIGN_ID,
|
||||
|
|
@ -716,6 +724,7 @@ class Item(LibModel):
|
|||
"mtime": types.DATE,
|
||||
"added": types.DATE,
|
||||
}
|
||||
_indices = (dbcore.Index("idx_item_album_id", ("album_id",)),)
|
||||
|
||||
_search_fields = (
|
||||
"artist",
|
||||
|
|
@ -742,7 +751,9 @@ class Item(LibModel):
|
|||
|
||||
_formatter = FormattedItemMapping
|
||||
|
||||
_sorts = {"artist": dbcore.query.SmartArtistSort}
|
||||
_sorts: ClassVar[dict[str, type[dbcore.query.FieldSort]]] = {
|
||||
"artist": dbcore.query.SmartArtistSort
|
||||
}
|
||||
|
||||
@cached_classproperty
|
||||
def _queries(cls) -> dict[str, FieldQueryType]:
|
||||
|
|
@ -1143,7 +1154,6 @@ class Item(LibModel):
|
|||
If `store` is `False` however, the item won't be stored and it will
|
||||
have to be manually stored after invoking this method.
|
||||
"""
|
||||
self._check_db()
|
||||
dest = self.destination(basedir=basedir)
|
||||
|
||||
# Create necessary ancestry for the move.
|
||||
|
|
@ -1183,9 +1193,8 @@ class Item(LibModel):
|
|||
is true, returns just the fragment of the path underneath the library
|
||||
base directory.
|
||||
"""
|
||||
db = self._check_db()
|
||||
basedir = basedir or db.directory
|
||||
path_formats = path_formats or db.path_formats
|
||||
basedir = basedir or self.db.directory
|
||||
path_formats = path_formats or self.db.path_formats
|
||||
|
||||
# Use a path format based on a query, falling back on the
|
||||
# default.
|
||||
|
|
@ -1224,7 +1233,7 @@ class Item(LibModel):
|
|||
)
|
||||
|
||||
lib_path_str, fallback = util.legalize_path(
|
||||
subpath, db.replacements, self.filepath.suffix
|
||||
subpath, self.db.replacements, self.filepath.suffix
|
||||
)
|
||||
if fallback:
|
||||
# Print an error message if legalization fell back to
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ calls (`debug`, `info`, etc).
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import threading
|
||||
from copy import copy
|
||||
from logging import (
|
||||
|
|
@ -34,10 +35,25 @@ from logging import (
|
|||
Handler,
|
||||
Logger,
|
||||
NullHandler,
|
||||
RootLogger,
|
||||
StreamHandler,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Any, Mapping, TypeVar, Union, overload
|
||||
from typing import TYPE_CHECKING, Any, TypeVar, overload
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
from logging import RootLogger
|
||||
from types import TracebackType
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
# see https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi
|
||||
_SysExcInfoType = (
|
||||
tuple[type[BaseException], BaseException, TracebackType | None]
|
||||
| tuple[None, None, None]
|
||||
)
|
||||
_ExcInfoType = _SysExcInfoType | BaseException | bool | None
|
||||
_ArgsType = tuple[object, ...] | Mapping[str, object]
|
||||
|
||||
|
||||
__all__ = [
|
||||
"DEBUG",
|
||||
|
|
@ -53,17 +69,13 @@ __all__ = [
|
|||
"getLogger",
|
||||
]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
T = TypeVar("T")
|
||||
from types import TracebackType
|
||||
|
||||
# see https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi
|
||||
_SysExcInfoType = Union[
|
||||
tuple[type[BaseException], BaseException, Union[TracebackType, None]],
|
||||
tuple[None, None, None],
|
||||
]
|
||||
_ExcInfoType = Union[None, bool, _SysExcInfoType, BaseException]
|
||||
_ArgsType = Union[tuple[object, ...], Mapping[str, object]]
|
||||
# Regular expression to match:
|
||||
# - C0 control characters (0x00-0x1F) except useful whitespace (\t, \n, \r)
|
||||
# - DEL control character (0x7f)
|
||||
# - C1 control characters (0x80-0x9F)
|
||||
# Used to sanitize log messages that could disrupt terminal output
|
||||
_CONTROL_CHAR_REGEX = re.compile(r"[\x00-\x08\x0b\x0c\x0e-\x1f\x7f\x80-\x9f]")
|
||||
_UNICODE_REPLACEMENT_CHARACTER = "\ufffd"
|
||||
|
||||
|
||||
def _logsafe(val: T) -> str | T:
|
||||
|
|
@ -80,6 +92,10 @@ def _logsafe(val: T) -> str | T:
|
|||
# type, and (b) warn the developer if they do this for other
|
||||
# bytestrings.
|
||||
return val.decode("utf-8", "replace")
|
||||
if isinstance(val, str):
|
||||
# Sanitize log messages by replacing control characters that can disrupt
|
||||
# terminals.
|
||||
return _CONTROL_CHAR_REGEX.sub(_UNICODE_REPLACEMENT_CHARACTER, val)
|
||||
|
||||
# Other objects are used as-is so field access, etc., still works in
|
||||
# the format string. Relies on a working __str__ implementation.
|
||||
|
|
|
|||
|
|
@ -13,17 +13,11 @@
|
|||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
import warnings
|
||||
|
||||
import mediafile
|
||||
|
||||
warnings.warn(
|
||||
"beets.mediafile is deprecated; use mediafile instead",
|
||||
# Show the location of the `import mediafile` statement as the warning's
|
||||
# source, rather than this file, such that the offending module can be
|
||||
# identified easily.
|
||||
stacklevel=2,
|
||||
)
|
||||
from .util.deprecation import deprecate_for_maintainers
|
||||
|
||||
deprecate_for_maintainers("'beets.mediafile'", "'mediafile'", stacklevel=2)
|
||||
|
||||
# Import everything from the mediafile module into this module.
|
||||
for key, value in mediafile.__dict__.items():
|
||||
|
|
@ -31,4 +25,4 @@ for key, value in mediafile.__dict__.items():
|
|||
globals()[key] = value
|
||||
|
||||
# Cleanup namespace.
|
||||
del key, value, warnings, mediafile
|
||||
del key, value, mediafile
|
||||
|
|
|
|||
|
|
@ -9,69 +9,98 @@ from __future__ import annotations
|
|||
|
||||
import abc
|
||||
import re
|
||||
from functools import cache, cached_property
|
||||
from typing import TYPE_CHECKING, Generic, Literal, Sequence, TypedDict, TypeVar
|
||||
from contextlib import contextmanager, nullcontext
|
||||
from functools import cache, cached_property, wraps
|
||||
from typing import TYPE_CHECKING, Generic, Literal, TypedDict, TypeVar
|
||||
|
||||
import unidecode
|
||||
from confuse import NotFoundError
|
||||
from typing_extensions import NotRequired
|
||||
|
||||
from beets import config, logging
|
||||
from beets.util import cached_classproperty
|
||||
from beets.util.id_extractors import extract_release_id
|
||||
|
||||
from .plugins import BeetsPlugin, find_plugins, notify_info_yielded, send
|
||||
from .plugins import BeetsPlugin, find_plugins, notify_info_yielded
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import Callable, Iterable, Iterator, Sequence
|
||||
|
||||
from .autotag.hooks import AlbumInfo, Item, TrackInfo
|
||||
|
||||
Ret = TypeVar("Ret")
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
@cache
|
||||
def find_metadata_source_plugins() -> list[MetadataSourcePlugin]:
|
||||
"""Return a list of all loaded metadata source plugins."""
|
||||
# TODO: Make this an isinstance(MetadataSourcePlugin, ...) check in v3.0.0
|
||||
# This should also allow us to remove the type: ignore comments below.
|
||||
return [p for p in find_plugins() if hasattr(p, "data_source")] # type: ignore[misc]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def handle_plugin_error(plugin: MetadataSourcePlugin, method_name: str):
|
||||
"""Safely call a plugin method, catching and logging exceptions."""
|
||||
try:
|
||||
yield
|
||||
except Exception as e:
|
||||
log.error("Error in '{}.{}': {}", plugin.data_source, method_name, e)
|
||||
log.debug("Exception details:", exc_info=True)
|
||||
|
||||
|
||||
def _yield_from_plugins(
|
||||
func: Callable[..., Iterable[Ret]],
|
||||
) -> Callable[..., Iterator[Ret]]:
|
||||
method_name = func.__name__
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs) -> Iterator[Ret]:
|
||||
for plugin in find_metadata_source_plugins():
|
||||
method = getattr(plugin, method_name)
|
||||
with (
|
||||
nullcontext()
|
||||
if config["raise_on_error"]
|
||||
else handle_plugin_error(plugin, method_name)
|
||||
):
|
||||
yield from filter(None, method(*args, **kwargs))
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@notify_info_yielded("albuminfo_received")
|
||||
def candidates(*args, **kwargs) -> Iterable[AlbumInfo]:
|
||||
"""Return matching album candidates from all metadata source plugins."""
|
||||
for plugin in find_metadata_source_plugins():
|
||||
yield from plugin.candidates(*args, **kwargs)
|
||||
@_yield_from_plugins
|
||||
def candidates(*args, **kwargs) -> Iterator[AlbumInfo]:
|
||||
yield from ()
|
||||
|
||||
|
||||
@notify_info_yielded("trackinfo_received")
|
||||
def item_candidates(*args, **kwargs) -> Iterable[TrackInfo]:
|
||||
"""Return matching track candidates fromm all metadata source plugins."""
|
||||
for plugin in find_metadata_source_plugins():
|
||||
yield from plugin.item_candidates(*args, **kwargs)
|
||||
@_yield_from_plugins
|
||||
def item_candidates(*args, **kwargs) -> Iterator[TrackInfo]:
|
||||
yield from ()
|
||||
|
||||
|
||||
@notify_info_yielded("albuminfo_received")
|
||||
@_yield_from_plugins
|
||||
def albums_for_ids(*args, **kwargs) -> Iterator[AlbumInfo]:
|
||||
yield from ()
|
||||
|
||||
|
||||
@notify_info_yielded("trackinfo_received")
|
||||
@_yield_from_plugins
|
||||
def tracks_for_ids(*args, **kwargs) -> Iterator[TrackInfo]:
|
||||
yield from ()
|
||||
|
||||
|
||||
def album_for_id(_id: str) -> AlbumInfo | None:
|
||||
"""Get AlbumInfo object for the given ID string.
|
||||
|
||||
A single ID can yield just a single album, so we return the first match.
|
||||
"""
|
||||
for plugin in find_metadata_source_plugins():
|
||||
if info := plugin.album_for_id(album_id=_id):
|
||||
send("albuminfo_received", info=info)
|
||||
return info
|
||||
|
||||
return None
|
||||
return next(albums_for_ids([_id]), None)
|
||||
|
||||
|
||||
def track_for_id(_id: str) -> TrackInfo | None:
|
||||
"""Get TrackInfo object for the given ID string.
|
||||
|
||||
A single ID can yield just a single track, so we return the first match.
|
||||
"""
|
||||
for plugin in find_metadata_source_plugins():
|
||||
if info := plugin.track_for_id(_id):
|
||||
send("trackinfo_received", info=info)
|
||||
return info
|
||||
|
||||
return None
|
||||
return next(tracks_for_ids([_id]), None)
|
||||
|
||||
|
||||
@cache
|
||||
|
|
|
|||
119
beets/plugins.py
119
beets/plugins.py
|
|
@ -20,12 +20,10 @@ import abc
|
|||
import inspect
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from functools import cached_property, wraps
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
from types import GenericAlias
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeVar
|
||||
|
||||
import mediafile
|
||||
|
|
@ -34,9 +32,10 @@ from typing_extensions import ParamSpec
|
|||
import beets
|
||||
from beets import logging
|
||||
from beets.util import unique_list
|
||||
from beets.util.deprecation import deprecate_for_maintainers, deprecate_for_user
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Iterable, Sequence
|
||||
from collections.abc import Callable, Iterable, Iterator, Sequence
|
||||
|
||||
from confuse import ConfigView
|
||||
|
||||
|
|
@ -59,7 +58,6 @@ if TYPE_CHECKING:
|
|||
P = ParamSpec("P")
|
||||
Ret = TypeVar("Ret", bound=Any)
|
||||
Listener = Callable[..., Any]
|
||||
IterF = Callable[P, Iterable[Ret]]
|
||||
|
||||
|
||||
PLUGIN_NAMESPACE = "beetsplug"
|
||||
|
|
@ -72,6 +70,7 @@ EventType = Literal[
|
|||
"album_imported",
|
||||
"album_removed",
|
||||
"albuminfo_received",
|
||||
"album_matched",
|
||||
"before_choose_candidate",
|
||||
"before_item_moved",
|
||||
"cli_exit",
|
||||
|
|
@ -141,7 +140,13 @@ class PluginLogFilter(logging.Filter):
|
|||
# Managing the plugins themselves.
|
||||
|
||||
|
||||
class BeetsPlugin(metaclass=abc.ABCMeta):
|
||||
class BeetsPluginMeta(abc.ABCMeta):
|
||||
template_funcs: ClassVar[TFuncMap[str]] = {}
|
||||
template_fields: ClassVar[TFuncMap[Item]] = {}
|
||||
album_template_fields: ClassVar[TFuncMap[Album]] = {}
|
||||
|
||||
|
||||
class BeetsPlugin(metaclass=BeetsPluginMeta):
|
||||
"""The base class for all beets plugins. Plugins provide
|
||||
functionality by defining a subclass of BeetsPlugin and overriding
|
||||
the abstract methods defined here.
|
||||
|
|
@ -151,9 +156,10 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
list
|
||||
)
|
||||
listeners: ClassVar[dict[EventType, list[Listener]]] = defaultdict(list)
|
||||
template_funcs: TFuncMap[str] | None = None
|
||||
template_fields: TFuncMap[Item] | None = None
|
||||
album_template_fields: TFuncMap[Album] | None = None
|
||||
|
||||
template_funcs: TFuncMap[str]
|
||||
template_fields: TFuncMap[Item]
|
||||
album_template_fields: TFuncMap[Album]
|
||||
|
||||
name: str
|
||||
config: ConfigView
|
||||
|
|
@ -161,7 +167,7 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
import_stages: list[ImportStageFunc]
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
"""Enable legacy metadata‐source plugins to work with the new interface.
|
||||
"""Enable legacy metadata source plugins to work with the new interface.
|
||||
|
||||
When a plugin subclass of BeetsPlugin defines a `data_source` attribute
|
||||
but does not inherit from MetadataSourcePlugin, this hook:
|
||||
|
|
@ -184,11 +190,12 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
):
|
||||
return
|
||||
|
||||
warnings.warn(
|
||||
f"{cls.__name__} is used as a legacy metadata source. "
|
||||
"It should extend MetadataSourcePlugin instead of BeetsPlugin. "
|
||||
"Support for this will be removed in the v3.0.0 release!",
|
||||
DeprecationWarning,
|
||||
deprecate_for_maintainers(
|
||||
(
|
||||
f"'{cls.__name__}' is used as a legacy metadata source since it"
|
||||
" inherits 'beets.plugins.BeetsPlugin'. Support for this"
|
||||
),
|
||||
"'beets.metadata_plugins.MetadataSourcePlugin'",
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
|
|
@ -219,14 +226,10 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
self.name = name or self.__module__.split(".")[-1]
|
||||
self.config = beets.config[self.name]
|
||||
|
||||
# Set class attributes if they are not already set
|
||||
# for the type of plugin.
|
||||
if not self.template_funcs:
|
||||
self.template_funcs = {}
|
||||
if not self.template_fields:
|
||||
self.template_fields = {}
|
||||
if not self.album_template_fields:
|
||||
self.album_template_fields = {}
|
||||
# create per-instance storage for template fields and functions
|
||||
self.template_funcs = {}
|
||||
self.template_fields = {}
|
||||
self.album_template_fields = {}
|
||||
|
||||
self.early_import_stages = []
|
||||
self.import_stages = []
|
||||
|
|
@ -256,16 +259,19 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
):
|
||||
return
|
||||
|
||||
message = (
|
||||
"'source_weight' configuration option is deprecated and will be"
|
||||
" removed in v3.0.0. Use 'data_source_mismatch_penalty' instead"
|
||||
)
|
||||
for source in self.config.root().sources:
|
||||
if "source_weight" in (source.get(self.name) or {}):
|
||||
if source.filename: # user config
|
||||
self._log.warning(message)
|
||||
deprecate_for_user(
|
||||
self._log,
|
||||
f"'{self.name}.source_weight' configuration option",
|
||||
f"'{self.name}.data_source_mismatch_penalty'",
|
||||
)
|
||||
else: # 3rd-party plugin config
|
||||
warnings.warn(message, DeprecationWarning, stacklevel=0)
|
||||
deprecate_for_maintainers(
|
||||
"'source_weight' configuration option",
|
||||
"'data_source_mismatch_penalty'",
|
||||
)
|
||||
|
||||
def commands(self) -> Sequence[Subcommand]:
|
||||
"""Should return a list of beets.ui.Subcommand objects for
|
||||
|
|
@ -368,8 +374,6 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
"""
|
||||
|
||||
def helper(func: TFunc[str]) -> TFunc[str]:
|
||||
if cls.template_funcs is None:
|
||||
cls.template_funcs = {}
|
||||
cls.template_funcs[name] = func
|
||||
return func
|
||||
|
||||
|
|
@ -384,8 +388,6 @@ class BeetsPlugin(metaclass=abc.ABCMeta):
|
|||
"""
|
||||
|
||||
def helper(func: TFunc[Item]) -> TFunc[Item]:
|
||||
if cls.template_fields is None:
|
||||
cls.template_fields = {}
|
||||
cls.template_fields[name] = func
|
||||
return func
|
||||
|
||||
|
|
@ -414,16 +416,22 @@ def get_plugin_names() -> list[str]:
|
|||
# *contain* a `beetsplug` package.
|
||||
sys.path += paths
|
||||
plugins = unique_list(beets.config["plugins"].as_str_seq())
|
||||
# TODO: Remove in v3.0.0
|
||||
if (
|
||||
"musicbrainz" not in plugins
|
||||
and "musicbrainz" in beets.config
|
||||
and beets.config["musicbrainz"].get().get("enabled")
|
||||
):
|
||||
plugins.append("musicbrainz")
|
||||
|
||||
beets.config.add({"disabled_plugins": []})
|
||||
disabled_plugins = set(beets.config["disabled_plugins"].as_str_seq())
|
||||
# TODO: Remove in v3.0.0
|
||||
mb_enabled = beets.config["musicbrainz"].flatten().get("enabled")
|
||||
if mb_enabled:
|
||||
deprecate_for_user(
|
||||
log,
|
||||
"'musicbrainz.enabled' configuration option",
|
||||
"'plugins' configuration to explicitly add 'musicbrainz'",
|
||||
)
|
||||
if "musicbrainz" not in plugins:
|
||||
plugins.append("musicbrainz")
|
||||
elif mb_enabled is False:
|
||||
deprecate_for_user(log, "'musicbrainz.enabled' configuration option")
|
||||
disabled_plugins.add("musicbrainz")
|
||||
|
||||
return [p for p in plugins if p not in disabled_plugins]
|
||||
|
||||
|
||||
|
|
@ -449,9 +457,6 @@ def _get_plugin(name: str) -> BeetsPlugin | None:
|
|||
for obj in reversed(namespace.__dict__.values()):
|
||||
if (
|
||||
inspect.isclass(obj)
|
||||
and not isinstance(
|
||||
obj, GenericAlias
|
||||
) # seems to be needed for python <= 3.9 only
|
||||
and issubclass(obj, BeetsPlugin)
|
||||
and obj != BeetsPlugin
|
||||
and not inspect.isabstract(obj)
|
||||
|
|
@ -542,7 +547,7 @@ def named_queries(model_cls: type[AnyModel]) -> dict[str, FieldQueryType]:
|
|||
|
||||
def notify_info_yielded(
|
||||
event: EventType,
|
||||
) -> Callable[[IterF[P, Ret]], IterF[P, Ret]]:
|
||||
) -> Callable[[Callable[P, Iterable[Ret]]], Callable[P, Iterator[Ret]]]:
|
||||
"""Makes a generator send the event 'event' every time it yields.
|
||||
This decorator is supposed to decorate a generator, but any function
|
||||
returning an iterable should work.
|
||||
|
|
@ -550,9 +555,11 @@ def notify_info_yielded(
|
|||
'send'.
|
||||
"""
|
||||
|
||||
def decorator(func: IterF[P, Ret]) -> IterF[P, Ret]:
|
||||
def decorator(
|
||||
func: Callable[P, Iterable[Ret]],
|
||||
) -> Callable[P, Iterator[Ret]]:
|
||||
@wraps(func)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> Iterable[Ret]:
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> Iterator[Ret]:
|
||||
for v in func(*args, **kwargs):
|
||||
send(event, info=v)
|
||||
yield v
|
||||
|
|
@ -568,8 +575,7 @@ def template_funcs() -> TFuncMap[str]:
|
|||
"""
|
||||
funcs: TFuncMap[str] = {}
|
||||
for plugin in find_plugins():
|
||||
if plugin.template_funcs:
|
||||
funcs.update(plugin.template_funcs)
|
||||
funcs.update(plugin.template_funcs)
|
||||
return funcs
|
||||
|
||||
|
||||
|
|
@ -595,21 +601,20 @@ F = TypeVar("F")
|
|||
|
||||
|
||||
def _check_conflicts_and_merge(
|
||||
plugin: BeetsPlugin, plugin_funcs: dict[str, F] | None, funcs: dict[str, F]
|
||||
plugin: BeetsPlugin, plugin_funcs: dict[str, F], funcs: dict[str, F]
|
||||
) -> None:
|
||||
"""Check the provided template functions for conflicts and merge into funcs.
|
||||
|
||||
Raises a `PluginConflictError` if a plugin defines template functions
|
||||
for fields that another plugin has already defined template functions for.
|
||||
"""
|
||||
if plugin_funcs:
|
||||
if not plugin_funcs.keys().isdisjoint(funcs.keys()):
|
||||
conflicted_fields = ", ".join(plugin_funcs.keys() & funcs.keys())
|
||||
raise PluginConflictError(
|
||||
f"Plugin {plugin.name} defines template functions for "
|
||||
f"{conflicted_fields} that conflict with another plugin."
|
||||
)
|
||||
funcs.update(plugin_funcs)
|
||||
if not plugin_funcs.keys().isdisjoint(funcs.keys()):
|
||||
conflicted_fields = ", ".join(plugin_funcs.keys() & funcs.keys())
|
||||
raise PluginConflictError(
|
||||
f"Plugin {plugin.name} defines template functions for "
|
||||
f"{conflicted_fields} that conflict with another plugin."
|
||||
)
|
||||
funcs.update(plugin_funcs)
|
||||
|
||||
|
||||
def item_field_getters() -> TFuncMap[Item]:
|
||||
|
|
|
|||
112
beets/random.py
112
beets/random.py
|
|
@ -1,112 +0,0 @@
|
|||
# This file is part of beets.
|
||||
# Copyright 2016, Philippe Mongeau.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Get a random song or album from the library."""
|
||||
|
||||
import random
|
||||
from itertools import groupby
|
||||
from operator import attrgetter
|
||||
|
||||
|
||||
def _length(obj, album):
|
||||
"""Get the duration of an item or album."""
|
||||
if album:
|
||||
return sum(i.length for i in obj.items())
|
||||
else:
|
||||
return obj.length
|
||||
|
||||
|
||||
def _equal_chance_permutation(objs, field="albumartist", random_gen=None):
|
||||
"""Generate (lazily) a permutation of the objects where every group
|
||||
with equal values for `field` have an equal chance of appearing in
|
||||
any given position.
|
||||
"""
|
||||
rand = random_gen or random
|
||||
|
||||
# Group the objects by artist so we can sample from them.
|
||||
key = attrgetter(field)
|
||||
objs.sort(key=key)
|
||||
objs_by_artists = {}
|
||||
for artist, v in groupby(objs, key):
|
||||
objs_by_artists[artist] = list(v)
|
||||
|
||||
# While we still have artists with music to choose from, pick one
|
||||
# randomly and pick a track from that artist.
|
||||
while objs_by_artists:
|
||||
# Choose an artist and an object for that artist, removing
|
||||
# this choice from the pool.
|
||||
artist = rand.choice(list(objs_by_artists.keys()))
|
||||
objs_from_artist = objs_by_artists[artist]
|
||||
i = rand.randint(0, len(objs_from_artist) - 1)
|
||||
yield objs_from_artist.pop(i)
|
||||
|
||||
# Remove the artist if we've used up all of its objects.
|
||||
if not objs_from_artist:
|
||||
del objs_by_artists[artist]
|
||||
|
||||
|
||||
def _take(iter, num):
|
||||
"""Return a list containing the first `num` values in `iter` (or
|
||||
fewer, if the iterable ends early).
|
||||
"""
|
||||
out = []
|
||||
for val in iter:
|
||||
out.append(val)
|
||||
num -= 1
|
||||
if num <= 0:
|
||||
break
|
||||
return out
|
||||
|
||||
|
||||
def _take_time(iter, secs, album):
|
||||
"""Return a list containing the first values in `iter`, which should
|
||||
be Item or Album objects, that add up to the given amount of time in
|
||||
seconds.
|
||||
"""
|
||||
out = []
|
||||
total_time = 0.0
|
||||
for obj in iter:
|
||||
length = _length(obj, album)
|
||||
if total_time + length <= secs:
|
||||
out.append(obj)
|
||||
total_time += length
|
||||
return out
|
||||
|
||||
|
||||
def random_objs(
|
||||
objs, album, number=1, time=None, equal_chance=False, random_gen=None
|
||||
):
|
||||
"""Get a random subset of the provided `objs`.
|
||||
|
||||
If `number` is provided, produce that many matches. Otherwise, if
|
||||
`time` is provided, instead select a list whose total time is close
|
||||
to that number of minutes. If `equal_chance` is true, give each
|
||||
artist an equal chance of being included so that artists with more
|
||||
songs are not represented disproportionately.
|
||||
"""
|
||||
rand = random_gen or random
|
||||
|
||||
# Permute the objects either in a straightforward way or an
|
||||
# artist-balanced way.
|
||||
if equal_chance:
|
||||
perm = _equal_chance_permutation(objs)
|
||||
else:
|
||||
perm = objs
|
||||
rand.shuffle(perm) # N.B. This shuffles the original list.
|
||||
|
||||
# Select objects by time our count.
|
||||
if time:
|
||||
return _take_time(perm, time * 60, album)
|
||||
else:
|
||||
return _take(perm, number)
|
||||
|
|
@ -107,7 +107,11 @@ def item(lib=None, **kwargs):
|
|||
|
||||
# Dummy import session.
|
||||
def import_session(lib=None, loghandler=None, paths=[], query=[], cli=False):
|
||||
cls = commands.TerminalImportSession if cli else importer.ImportSession
|
||||
cls = (
|
||||
commands.import_.session.TerminalImportSession
|
||||
if cli
|
||||
else importer.ImportSession
|
||||
)
|
||||
return cls(lib, loghandler, paths, query)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ from beets.autotag.hooks import AlbumInfo, TrackInfo
|
|||
from beets.importer import ImportSession
|
||||
from beets.library import Item, Library
|
||||
from beets.test import _common
|
||||
from beets.ui.commands import TerminalImportSession
|
||||
from beets.ui.commands.import_.session import TerminalImportSession
|
||||
from beets.util import (
|
||||
MoveOperation,
|
||||
bytestring_path,
|
||||
|
|
@ -120,7 +120,7 @@ def capture_stdout():
|
|||
|
||||
def has_program(cmd, args=["--version"]):
|
||||
"""Returns `True` if `cmd` can be executed."""
|
||||
full_cmd = [cmd] + args
|
||||
full_cmd = [cmd, *args]
|
||||
try:
|
||||
with open(os.devnull, "wb") as devnull:
|
||||
subprocess.check_call(
|
||||
|
|
@ -364,15 +364,17 @@ class TestHelper(ConfigMixin):
|
|||
items.append(item)
|
||||
return self.lib.add_album(items)
|
||||
|
||||
def create_mediafile_fixture(self, ext="mp3", images=[]):
|
||||
def create_mediafile_fixture(self, ext="mp3", images=[], target_dir=None):
|
||||
"""Copy a fixture mediafile with the extension to `temp_dir`.
|
||||
|
||||
`images` is a subset of 'png', 'jpg', and 'tiff'. For each
|
||||
specified extension a cover art image is added to the media
|
||||
file.
|
||||
"""
|
||||
if not target_dir:
|
||||
target_dir = self.temp_dir
|
||||
src = os.path.join(_common.RSRC, util.bytestring_path(f"full.{ext}"))
|
||||
handle, path = mkstemp(dir=self.temp_dir)
|
||||
handle, path = mkstemp(dir=target_dir)
|
||||
path = bytestring_path(path)
|
||||
os.close(handle)
|
||||
shutil.copyfile(syspath(src), syspath(path))
|
||||
|
|
@ -524,7 +526,7 @@ class ImportHelper(TestHelper):
|
|||
autotagging library and several assertions for the library.
|
||||
"""
|
||||
|
||||
default_import_config = {
|
||||
default_import_config: ClassVar[dict[str, bool]] = {
|
||||
"autotag": True,
|
||||
"copy": True,
|
||||
"hardlink": False,
|
||||
|
|
@ -880,7 +882,7 @@ class FetchImageHelper:
|
|||
def run(self, *args, **kwargs):
|
||||
super().run(*args, **kwargs)
|
||||
|
||||
IMAGEHEADER: dict[str, bytes] = {
|
||||
IMAGEHEADER: ClassVar[dict[str, bytes]] = {
|
||||
"image/jpeg": b"\xff\xd8\xff\x00\x00\x00JFIF",
|
||||
"image/png": b"\211PNG\r\n\032\n",
|
||||
"image/gif": b"GIF89a",
|
||||
|
|
|
|||
|
|
@ -23,16 +23,15 @@ import errno
|
|||
import optparse
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sqlite3
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
import traceback
|
||||
import warnings
|
||||
from difflib import SequenceMatcher
|
||||
from functools import cache
|
||||
from itertools import chain
|
||||
from typing import Any, Callable, Literal
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
|
||||
import confuse
|
||||
|
||||
|
|
@ -40,8 +39,15 @@ from beets import config, library, logging, plugins, util
|
|||
from beets.dbcore import db
|
||||
from beets.dbcore import query as db_query
|
||||
from beets.util import as_string
|
||||
from beets.util.deprecation import deprecate_for_maintainers
|
||||
from beets.util.functemplate import template
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Iterable
|
||||
|
||||
from beets.dbcore.db import FormattedMapping
|
||||
|
||||
|
||||
# On Windows platforms, use colorama to support "ANSI" terminal colors.
|
||||
if sys.platform == "win32":
|
||||
try:
|
||||
|
|
@ -111,11 +117,7 @@ def decargs(arglist):
|
|||
.. deprecated:: 2.4.0
|
||||
This function will be removed in 3.0.0.
|
||||
"""
|
||||
warnings.warn(
|
||||
"decargs() is deprecated and will be removed in version 3.0.0.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
deprecate_for_maintainers("'beets.ui.decargs'")
|
||||
return arglist
|
||||
|
||||
|
||||
|
|
@ -470,13 +472,13 @@ CODE_BY_COLOR = {
|
|||
"normal": 0,
|
||||
"bold": 1,
|
||||
"faint": 2,
|
||||
# "italic": 3,
|
||||
"italic": 3,
|
||||
"underline": 4,
|
||||
# "blink_slow": 5,
|
||||
# "blink_rapid": 6,
|
||||
"blink_slow": 5,
|
||||
"blink_rapid": 6,
|
||||
"inverse": 7,
|
||||
# "conceal": 8,
|
||||
# "crossed_out": 9
|
||||
"conceal": 8,
|
||||
"crossed_out": 9,
|
||||
# Text colors.
|
||||
"black": 30,
|
||||
"red": 31,
|
||||
|
|
@ -486,6 +488,14 @@ CODE_BY_COLOR = {
|
|||
"magenta": 35,
|
||||
"cyan": 36,
|
||||
"white": 37,
|
||||
"bright_black": 90,
|
||||
"bright_red": 91,
|
||||
"bright_green": 92,
|
||||
"bright_yellow": 93,
|
||||
"bright_blue": 94,
|
||||
"bright_magenta": 95,
|
||||
"bright_cyan": 96,
|
||||
"bright_white": 97,
|
||||
# Background colors.
|
||||
"bg_black": 40,
|
||||
"bg_red": 41,
|
||||
|
|
@ -495,6 +505,14 @@ CODE_BY_COLOR = {
|
|||
"bg_magenta": 45,
|
||||
"bg_cyan": 46,
|
||||
"bg_white": 47,
|
||||
"bg_bright_black": 100,
|
||||
"bg_bright_red": 101,
|
||||
"bg_bright_green": 102,
|
||||
"bg_bright_yellow": 103,
|
||||
"bg_bright_blue": 104,
|
||||
"bg_bright_magenta": 105,
|
||||
"bg_bright_cyan": 106,
|
||||
"bg_bright_white": 107,
|
||||
}
|
||||
RESET_COLOR = f"{COLOR_ESCAPE}[39;49;00m"
|
||||
# Precompile common ANSI-escape regex patterns
|
||||
|
|
@ -699,27 +717,11 @@ def get_replacements():
|
|||
return replacements
|
||||
|
||||
|
||||
def term_width():
|
||||
@cache
|
||||
def term_width() -> int:
|
||||
"""Get the width (columns) of the terminal."""
|
||||
fallback = config["ui"]["terminal_width"].get(int)
|
||||
|
||||
# The fcntl and termios modules are not available on non-Unix
|
||||
# platforms, so we fall back to a constant.
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
except ImportError:
|
||||
return fallback
|
||||
|
||||
try:
|
||||
buf = fcntl.ioctl(0, termios.TIOCGWINSZ, " " * 4)
|
||||
except OSError:
|
||||
return fallback
|
||||
try:
|
||||
height, width = struct.unpack("hh", buf)
|
||||
except struct.error:
|
||||
return fallback
|
||||
return width
|
||||
columns, _ = shutil.get_terminal_size(fallback=(0, 0))
|
||||
return columns if columns else config["ui"]["terminal_width"].get(int)
|
||||
|
||||
|
||||
def split_into_lines(string, width_tuple):
|
||||
|
|
@ -1045,42 +1047,47 @@ def print_newline_layout(
|
|||
FLOAT_EPSILON = 0.01
|
||||
|
||||
|
||||
def _field_diff(field, old, old_fmt, new, new_fmt):
|
||||
def _field_diff(
|
||||
field: str, old: FormattedMapping, new: FormattedMapping
|
||||
) -> str | None:
|
||||
"""Given two Model objects and their formatted views, format their values
|
||||
for `field` and highlight changes among them. Return a human-readable
|
||||
string. If the value has not changed, return None instead.
|
||||
"""
|
||||
oldval = old.get(field)
|
||||
newval = new.get(field)
|
||||
|
||||
# If no change, abort.
|
||||
if (
|
||||
if (oldval := old.model.get(field)) == (newval := new.model.get(field)) or (
|
||||
isinstance(oldval, float)
|
||||
and isinstance(newval, float)
|
||||
and abs(oldval - newval) < FLOAT_EPSILON
|
||||
):
|
||||
return None
|
||||
elif oldval == newval:
|
||||
return None
|
||||
|
||||
# Get formatted values for output.
|
||||
oldstr = old_fmt.get(field, "")
|
||||
newstr = new_fmt.get(field, "")
|
||||
oldstr, newstr = old.get(field, ""), new.get(field, "")
|
||||
if field not in new:
|
||||
return colorize("text_diff_removed", f"{field}: {oldstr}")
|
||||
|
||||
if field not in old:
|
||||
return colorize("text_diff_added", f"{field}: {newstr}")
|
||||
|
||||
# For strings, highlight changes. For others, colorize the whole
|
||||
# thing.
|
||||
if isinstance(oldval, str):
|
||||
oldstr, newstr = colordiff(oldval, newstr)
|
||||
oldstr, newstr = colordiff(oldstr, newstr)
|
||||
else:
|
||||
oldstr = colorize("text_diff_removed", oldstr)
|
||||
newstr = colorize("text_diff_added", newstr)
|
||||
|
||||
return f"{oldstr} -> {newstr}"
|
||||
return f"{field}: {oldstr} -> {newstr}"
|
||||
|
||||
|
||||
def show_model_changes(
|
||||
new, old=None, fields=None, always=False, print_obj: bool = True
|
||||
):
|
||||
new: library.LibModel,
|
||||
old: library.LibModel | None = None,
|
||||
fields: Iterable[str] | None = None,
|
||||
always: bool = False,
|
||||
print_obj: bool = True,
|
||||
) -> bool:
|
||||
"""Given a Model object, print a list of changes from its pristine
|
||||
version stored in the database. Return a boolean indicating whether
|
||||
any changes were found.
|
||||
|
|
@ -1090,7 +1097,7 @@ def show_model_changes(
|
|||
restrict the detection to. `always` indicates whether the object is
|
||||
always identified, regardless of whether any changes are present.
|
||||
"""
|
||||
old = old or new._db._get(type(new), new.id)
|
||||
old = old or new.get_fresh_from_db()
|
||||
|
||||
# Keep the formatted views around instead of re-creating them in each
|
||||
# iteration step
|
||||
|
|
@ -1098,105 +1105,28 @@ def show_model_changes(
|
|||
new_fmt = new.formatted()
|
||||
|
||||
# Build up lines showing changed fields.
|
||||
changes = []
|
||||
for field in old:
|
||||
# Subset of the fields. Never show mtime.
|
||||
if field == "mtime" or (fields and field not in fields):
|
||||
continue
|
||||
diff_fields = (set(old) | set(new)) - {"mtime"}
|
||||
if allowed_fields := set(fields or {}):
|
||||
diff_fields &= allowed_fields
|
||||
|
||||
# Detect and show difference for this field.
|
||||
line = _field_diff(field, old, old_fmt, new, new_fmt)
|
||||
if line:
|
||||
changes.append(f" {field}: {line}")
|
||||
|
||||
# New fields.
|
||||
for field in set(new) - set(old):
|
||||
if fields and field not in fields:
|
||||
continue
|
||||
|
||||
changes.append(
|
||||
f" {field}: {colorize('text_highlight', new_fmt[field])}"
|
||||
)
|
||||
changes = [
|
||||
d
|
||||
for f in sorted(diff_fields)
|
||||
if (d := _field_diff(f, old_fmt, new_fmt))
|
||||
]
|
||||
|
||||
# Print changes.
|
||||
if print_obj and (changes or always):
|
||||
print_(format(old))
|
||||
if changes:
|
||||
print_("\n".join(changes))
|
||||
print_(textwrap.indent("\n".join(changes), " "))
|
||||
|
||||
return bool(changes)
|
||||
|
||||
|
||||
def show_path_changes(path_changes):
|
||||
"""Given a list of tuples (source, destination) that indicate the
|
||||
path changes, log the changes as INFO-level output to the beets log.
|
||||
The output is guaranteed to be unicode.
|
||||
|
||||
Every pair is shown on a single line if the terminal width permits it,
|
||||
else it is split over two lines. E.g.,
|
||||
|
||||
Source -> Destination
|
||||
|
||||
vs.
|
||||
|
||||
Source
|
||||
-> Destination
|
||||
"""
|
||||
sources, destinations = zip(*path_changes)
|
||||
|
||||
# Ensure unicode output
|
||||
sources = list(map(util.displayable_path, sources))
|
||||
destinations = list(map(util.displayable_path, destinations))
|
||||
|
||||
# Calculate widths for terminal split
|
||||
col_width = (term_width() - len(" -> ")) // 2
|
||||
max_width = len(max(sources + destinations, key=len))
|
||||
|
||||
if max_width > col_width:
|
||||
# Print every change over two lines
|
||||
for source, dest in zip(sources, destinations):
|
||||
color_source, color_dest = colordiff(source, dest)
|
||||
print_(f"{color_source} \n -> {color_dest}")
|
||||
else:
|
||||
# Print every change on a single line, and add a header
|
||||
title_pad = max_width - len("Source ") + len(" -> ")
|
||||
|
||||
print_(f"Source {' ' * title_pad} Destination")
|
||||
for source, dest in zip(sources, destinations):
|
||||
pad = max_width - len(source)
|
||||
color_source, color_dest = colordiff(source, dest)
|
||||
print_(f"{color_source} {' ' * pad} -> {color_dest}")
|
||||
|
||||
|
||||
# Helper functions for option parsing.
|
||||
|
||||
|
||||
def _store_dict(option, opt_str, value, parser):
|
||||
"""Custom action callback to parse options which have ``key=value``
|
||||
pairs as values. All such pairs passed for this option are
|
||||
aggregated into a dictionary.
|
||||
"""
|
||||
dest = option.dest
|
||||
option_values = getattr(parser.values, dest, None)
|
||||
|
||||
if option_values is None:
|
||||
# This is the first supplied ``key=value`` pair of option.
|
||||
# Initialize empty dictionary and get a reference to it.
|
||||
setattr(parser.values, dest, {})
|
||||
option_values = getattr(parser.values, dest)
|
||||
|
||||
try:
|
||||
key, value = value.split("=", 1)
|
||||
if not (key and value):
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
raise UserError(
|
||||
f"supplied argument `{value}' is not of the form `key=value'"
|
||||
)
|
||||
|
||||
option_values[key] = value
|
||||
|
||||
|
||||
class CommonOptionsParser(optparse.OptionParser):
|
||||
"""Offers a simple way to add common formatting options.
|
||||
|
||||
|
|
@ -1682,9 +1612,9 @@ def _raw_main(args: list[str], lib=None) -> None:
|
|||
and subargs[0] == "config"
|
||||
and ("-e" in subargs or "--edit" in subargs)
|
||||
):
|
||||
from beets.ui.commands import config_edit
|
||||
from beets.ui.commands.config import config_edit
|
||||
|
||||
return config_edit()
|
||||
return config_edit(options)
|
||||
|
||||
test_lib = bool(lib)
|
||||
subcommands, lib = _setup(options, lib)
|
||||
|
|
|
|||
2490
beets/ui/commands.py
2490
beets/ui/commands.py
File diff suppressed because it is too large
Load diff
67
beets/ui/commands/__init__.py
Normal file
67
beets/ui/commands/__init__.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
# This file is part of beets.
|
||||
# Copyright 2016, Adrian Sampson.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""This module provides the default commands for beets' command-line
|
||||
interface.
|
||||
"""
|
||||
|
||||
from beets.util.deprecation import deprecate_imports
|
||||
|
||||
from .completion import completion_cmd
|
||||
from .config import config_cmd
|
||||
from .fields import fields_cmd
|
||||
from .help import HelpCommand
|
||||
from .import_ import import_cmd
|
||||
from .list import list_cmd
|
||||
from .modify import modify_cmd
|
||||
from .move import move_cmd
|
||||
from .remove import remove_cmd
|
||||
from .stats import stats_cmd
|
||||
from .update import update_cmd
|
||||
from .version import version_cmd
|
||||
from .write import write_cmd
|
||||
|
||||
|
||||
def __getattr__(name: str):
|
||||
"""Handle deprecated imports."""
|
||||
return deprecate_imports(
|
||||
__name__,
|
||||
{
|
||||
"TerminalImportSession": "beets.ui.commands.import_.session",
|
||||
"PromptChoice": "beets.util",
|
||||
},
|
||||
name,
|
||||
)
|
||||
|
||||
|
||||
# The list of default subcommands. This is populated with Subcommand
|
||||
# objects that can be fed to a SubcommandsOptionParser.
|
||||
default_commands = [
|
||||
fields_cmd,
|
||||
HelpCommand(),
|
||||
import_cmd,
|
||||
list_cmd,
|
||||
update_cmd,
|
||||
remove_cmd,
|
||||
stats_cmd,
|
||||
version_cmd,
|
||||
modify_cmd,
|
||||
move_cmd,
|
||||
write_cmd,
|
||||
config_cmd,
|
||||
completion_cmd,
|
||||
]
|
||||
|
||||
|
||||
__all__ = ["default_commands"]
|
||||
117
beets/ui/commands/completion.py
Normal file
117
beets/ui/commands/completion.py
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
"""The 'completion' command: print shell script for command line completion."""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from beets import library, logging, plugins, ui
|
||||
from beets.util import syspath
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
def print_completion(*args):
|
||||
from beets.ui.commands import default_commands
|
||||
|
||||
for line in completion_script(default_commands + plugins.commands()):
|
||||
ui.print_(line, end="")
|
||||
if not any(os.path.isfile(syspath(p)) for p in BASH_COMPLETION_PATHS):
|
||||
log.warning(
|
||||
"Warning: Unable to find the bash-completion package. "
|
||||
"Command line completion might not work."
|
||||
)
|
||||
|
||||
|
||||
completion_cmd = ui.Subcommand(
|
||||
"completion",
|
||||
help="print shell script that provides command line completion",
|
||||
)
|
||||
completion_cmd.func = print_completion
|
||||
completion_cmd.hide = True
|
||||
|
||||
|
||||
BASH_COMPLETION_PATHS = [
|
||||
b"/etc/bash_completion",
|
||||
b"/usr/share/bash-completion/bash_completion",
|
||||
b"/usr/local/share/bash-completion/bash_completion",
|
||||
# SmartOS
|
||||
b"/opt/local/share/bash-completion/bash_completion",
|
||||
# Homebrew (before bash-completion2)
|
||||
b"/usr/local/etc/bash_completion",
|
||||
]
|
||||
|
||||
|
||||
def completion_script(commands):
|
||||
"""Yield the full completion shell script as strings.
|
||||
|
||||
``commands`` is alist of ``ui.Subcommand`` instances to generate
|
||||
completion data for.
|
||||
"""
|
||||
base_script = os.path.join(
|
||||
os.path.dirname(__file__), "./completion_base.sh"
|
||||
)
|
||||
with open(base_script) as base_script:
|
||||
yield base_script.read()
|
||||
|
||||
options = {}
|
||||
aliases = {}
|
||||
command_names = []
|
||||
|
||||
# Collect subcommands
|
||||
for cmd in commands:
|
||||
name = cmd.name
|
||||
command_names.append(name)
|
||||
|
||||
for alias in cmd.aliases:
|
||||
if re.match(r"^\w+$", alias):
|
||||
aliases[alias] = name
|
||||
|
||||
options[name] = {"flags": [], "opts": []}
|
||||
for opts in cmd.parser._get_all_options()[1:]:
|
||||
if opts.action in ("store_true", "store_false"):
|
||||
option_type = "flags"
|
||||
else:
|
||||
option_type = "opts"
|
||||
|
||||
options[name][option_type].extend(
|
||||
opts._short_opts + opts._long_opts
|
||||
)
|
||||
|
||||
# Add global options
|
||||
options["_global"] = {
|
||||
"flags": ["-v", "--verbose"],
|
||||
"opts": "-l --library -c --config -d --directory -h --help".split(" "),
|
||||
}
|
||||
|
||||
# Add flags common to all commands
|
||||
options["_common"] = {"flags": ["-h", "--help"]}
|
||||
|
||||
# Start generating the script
|
||||
yield "_beet() {\n"
|
||||
|
||||
# Command names
|
||||
yield f" local commands={' '.join(command_names)!r}\n"
|
||||
yield "\n"
|
||||
|
||||
# Command aliases
|
||||
yield f" local aliases={' '.join(aliases.keys())!r}\n"
|
||||
for alias, cmd in aliases.items():
|
||||
yield f" local alias__{alias.replace('-', '_')}={cmd}\n"
|
||||
yield "\n"
|
||||
|
||||
# Fields
|
||||
fields = library.Item._fields.keys() | library.Album._fields.keys()
|
||||
yield f" fields={' '.join(fields)!r}\n"
|
||||
|
||||
# Command options
|
||||
for cmd, opts in options.items():
|
||||
for option_type, option_list in opts.items():
|
||||
if option_list:
|
||||
option_list = " ".join(option_list)
|
||||
yield (
|
||||
" local"
|
||||
f" {option_type}__{cmd.replace('-', '_')}='{option_list}'\n"
|
||||
)
|
||||
|
||||
yield " _beet_dispatch\n"
|
||||
yield "}\n"
|
||||
93
beets/ui/commands/config.py
Normal file
93
beets/ui/commands/config.py
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
"""The 'config' command: show and edit user configuration."""
|
||||
|
||||
import os
|
||||
|
||||
from beets import config, ui
|
||||
from beets.util import displayable_path, editor_command, interactive_open
|
||||
|
||||
|
||||
def config_func(lib, opts, args):
|
||||
# Make sure lazy configuration is loaded
|
||||
config.resolve()
|
||||
|
||||
# Print paths.
|
||||
if opts.paths:
|
||||
filenames = []
|
||||
for source in config.sources:
|
||||
if not opts.defaults and source.default:
|
||||
continue
|
||||
if source.filename:
|
||||
filenames.append(source.filename)
|
||||
|
||||
# In case the user config file does not exist, prepend it to the
|
||||
# list.
|
||||
user_path = config.user_config_path()
|
||||
if user_path not in filenames:
|
||||
filenames.insert(0, user_path)
|
||||
|
||||
for filename in filenames:
|
||||
ui.print_(displayable_path(filename))
|
||||
|
||||
# Open in editor.
|
||||
elif opts.edit:
|
||||
# Note: This branch *should* be unreachable
|
||||
# since the normal flow should be short-circuited
|
||||
# by the special case in ui._raw_main
|
||||
config_edit(opts)
|
||||
|
||||
# Dump configuration.
|
||||
else:
|
||||
config_out = config.dump(full=opts.defaults, redact=opts.redact)
|
||||
if config_out.strip() != "{}":
|
||||
ui.print_(config_out)
|
||||
else:
|
||||
print("Empty configuration")
|
||||
|
||||
|
||||
def config_edit(cli_options):
|
||||
"""Open a program to edit the user configuration.
|
||||
An empty config file is created if no existing config file exists.
|
||||
"""
|
||||
path = cli_options.config or config.user_config_path()
|
||||
editor = editor_command()
|
||||
try:
|
||||
if not os.path.isfile(path):
|
||||
open(path, "w+").close()
|
||||
interactive_open([path], editor)
|
||||
except OSError as exc:
|
||||
message = f"Could not edit configuration: {exc}"
|
||||
if not editor:
|
||||
message += (
|
||||
". Please set the VISUAL (or EDITOR) environment variable"
|
||||
)
|
||||
raise ui.UserError(message)
|
||||
|
||||
|
||||
config_cmd = ui.Subcommand("config", help="show or edit the user configuration")
|
||||
config_cmd.parser.add_option(
|
||||
"-p",
|
||||
"--paths",
|
||||
action="store_true",
|
||||
help="show files that configuration was loaded from",
|
||||
)
|
||||
config_cmd.parser.add_option(
|
||||
"-e",
|
||||
"--edit",
|
||||
action="store_true",
|
||||
help="edit user configuration with $VISUAL (or $EDITOR)",
|
||||
)
|
||||
config_cmd.parser.add_option(
|
||||
"-d",
|
||||
"--defaults",
|
||||
action="store_true",
|
||||
help="include the default configuration",
|
||||
)
|
||||
config_cmd.parser.add_option(
|
||||
"-c",
|
||||
"--clear",
|
||||
action="store_false",
|
||||
dest="redact",
|
||||
default=True,
|
||||
help="do not redact sensitive fields",
|
||||
)
|
||||
config_cmd.func = config_func
|
||||
41
beets/ui/commands/fields.py
Normal file
41
beets/ui/commands/fields.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
"""The `fields` command: show available fields for queries and format strings."""
|
||||
|
||||
import textwrap
|
||||
|
||||
from beets import library, ui
|
||||
|
||||
|
||||
def _print_keys(query):
|
||||
"""Given a SQLite query result, print the `key` field of each
|
||||
returned row, with indentation of 2 spaces.
|
||||
"""
|
||||
for row in query:
|
||||
ui.print_(f" {row['key']}")
|
||||
|
||||
|
||||
def fields_func(lib, opts, args):
|
||||
def _print_rows(names):
|
||||
names.sort()
|
||||
ui.print_(textwrap.indent("\n".join(names), " "))
|
||||
|
||||
ui.print_("Item fields:")
|
||||
_print_rows(library.Item.all_keys())
|
||||
|
||||
ui.print_("Album fields:")
|
||||
_print_rows(library.Album.all_keys())
|
||||
|
||||
with lib.transaction() as tx:
|
||||
# The SQL uses the DISTINCT to get unique values from the query
|
||||
unique_fields = "SELECT DISTINCT key FROM ({})"
|
||||
|
||||
ui.print_("Item flexible attributes:")
|
||||
_print_keys(tx.query(unique_fields.format(library.Item._flex_table)))
|
||||
|
||||
ui.print_("Album flexible attributes:")
|
||||
_print_keys(tx.query(unique_fields.format(library.Album._flex_table)))
|
||||
|
||||
|
||||
fields_cmd = ui.Subcommand(
|
||||
"fields", help="show fields available for queries and format strings"
|
||||
)
|
||||
fields_cmd.func = fields_func
|
||||
22
beets/ui/commands/help.py
Normal file
22
beets/ui/commands/help.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
"""The 'help' command: show help information for commands."""
|
||||
|
||||
from beets import ui
|
||||
|
||||
|
||||
class HelpCommand(ui.Subcommand):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"help",
|
||||
aliases=("?",),
|
||||
help="give detailed help on a specific sub-command",
|
||||
)
|
||||
|
||||
def func(self, lib, opts, args):
|
||||
if args:
|
||||
cmdname = args[0]
|
||||
helpcommand = self.root_parser._subcommand_for_name(cmdname)
|
||||
if not helpcommand:
|
||||
raise ui.UserError(f"unknown command '{cmdname}'")
|
||||
helpcommand.print_help()
|
||||
else:
|
||||
self.root_parser.print_help()
|
||||
341
beets/ui/commands/import_/__init__.py
Normal file
341
beets/ui/commands/import_/__init__.py
Normal file
|
|
@ -0,0 +1,341 @@
|
|||
"""The `import` command: import new music into the library."""
|
||||
|
||||
import os
|
||||
|
||||
from beets import config, logging, plugins, ui
|
||||
from beets.util import displayable_path, normpath, syspath
|
||||
|
||||
from .session import TerminalImportSession
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
def paths_from_logfile(path):
|
||||
"""Parse the logfile and yield skipped paths to pass to the `import`
|
||||
command.
|
||||
"""
|
||||
with open(path, encoding="utf-8") as fp:
|
||||
for i, line in enumerate(fp, start=1):
|
||||
verb, sep, paths = line.rstrip("\n").partition(" ")
|
||||
if not sep:
|
||||
raise ValueError(f"line {i} is invalid")
|
||||
|
||||
# Ignore informational lines that don't need to be re-imported.
|
||||
if verb in {"import", "duplicate-keep", "duplicate-replace"}:
|
||||
continue
|
||||
|
||||
if verb not in {"asis", "skip", "duplicate-skip"}:
|
||||
raise ValueError(f"line {i} contains unknown verb {verb}")
|
||||
|
||||
yield os.path.commonpath(paths.split("; "))
|
||||
|
||||
|
||||
def parse_logfiles(logfiles):
|
||||
"""Parse all `logfiles` and yield paths from it."""
|
||||
for logfile in logfiles:
|
||||
try:
|
||||
yield from paths_from_logfile(syspath(normpath(logfile)))
|
||||
except ValueError as err:
|
||||
raise ui.UserError(
|
||||
f"malformed logfile {displayable_path(logfile)}: {err}"
|
||||
) from err
|
||||
except OSError as err:
|
||||
raise ui.UserError(
|
||||
f"unreadable logfile {displayable_path(logfile)}: {err}"
|
||||
) from err
|
||||
|
||||
|
||||
def import_files(lib, paths: list[bytes], query):
|
||||
"""Import the files in the given list of paths or matching the
|
||||
query.
|
||||
"""
|
||||
# Check parameter consistency.
|
||||
if config["import"]["quiet"] and config["import"]["timid"]:
|
||||
raise ui.UserError("can't be both quiet and timid")
|
||||
|
||||
# Open the log.
|
||||
if config["import"]["log"].get() is not None:
|
||||
logpath = syspath(config["import"]["log"].as_filename())
|
||||
try:
|
||||
loghandler = logging.FileHandler(logpath, encoding="utf-8")
|
||||
except OSError:
|
||||
raise ui.UserError(
|
||||
"Could not open log file for writing:"
|
||||
f" {displayable_path(logpath)}"
|
||||
)
|
||||
else:
|
||||
loghandler = None
|
||||
|
||||
# Never ask for input in quiet mode.
|
||||
if config["import"]["resume"].get() == "ask" and config["import"]["quiet"]:
|
||||
config["import"]["resume"] = False
|
||||
|
||||
session = TerminalImportSession(lib, loghandler, paths, query)
|
||||
session.run()
|
||||
|
||||
# Emit event.
|
||||
plugins.send("import", lib=lib, paths=paths)
|
||||
|
||||
|
||||
def import_func(lib, opts, args: list[str]):
|
||||
config["import"].set_args(opts)
|
||||
|
||||
# Special case: --copy flag suppresses import_move (which would
|
||||
# otherwise take precedence).
|
||||
if opts.copy:
|
||||
config["import"]["move"] = False
|
||||
|
||||
if opts.library:
|
||||
query = args
|
||||
byte_paths = []
|
||||
else:
|
||||
query = None
|
||||
paths = args
|
||||
|
||||
# The paths from the logfiles go into a separate list to allow handling
|
||||
# errors differently from user-specified paths.
|
||||
paths_from_logfiles = list(parse_logfiles(opts.from_logfiles or []))
|
||||
|
||||
if not paths and not paths_from_logfiles:
|
||||
raise ui.UserError("no path specified")
|
||||
|
||||
byte_paths = [os.fsencode(p) for p in paths]
|
||||
paths_from_logfiles = [os.fsencode(p) for p in paths_from_logfiles]
|
||||
|
||||
# Check the user-specified directories.
|
||||
for path in byte_paths:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
raise ui.UserError(
|
||||
f"no such file or directory: {displayable_path(path)}"
|
||||
)
|
||||
|
||||
# Check the directories from the logfiles, but don't throw an error in
|
||||
# case those paths don't exist. Maybe some of those paths have already
|
||||
# been imported and moved separately, so logging a warning should
|
||||
# suffice.
|
||||
for path in paths_from_logfiles:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
log.warning(
|
||||
"No such file or directory: {}", displayable_path(path)
|
||||
)
|
||||
continue
|
||||
|
||||
byte_paths.append(path)
|
||||
|
||||
# If all paths were read from a logfile, and none of them exist, throw
|
||||
# an error
|
||||
if not byte_paths:
|
||||
raise ui.UserError("none of the paths are importable")
|
||||
|
||||
import_files(lib, byte_paths, query)
|
||||
|
||||
|
||||
def _store_dict(option, opt_str, value, parser):
|
||||
"""Custom action callback to parse options which have ``key=value``
|
||||
pairs as values. All such pairs passed for this option are
|
||||
aggregated into a dictionary.
|
||||
"""
|
||||
dest = option.dest
|
||||
option_values = getattr(parser.values, dest, None)
|
||||
|
||||
if option_values is None:
|
||||
# This is the first supplied ``key=value`` pair of option.
|
||||
# Initialize empty dictionary and get a reference to it.
|
||||
setattr(parser.values, dest, {})
|
||||
option_values = getattr(parser.values, dest)
|
||||
|
||||
try:
|
||||
key, value = value.split("=", 1)
|
||||
if not (key and value):
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
raise ui.UserError(
|
||||
f"supplied argument `{value}' is not of the form `key=value'"
|
||||
)
|
||||
|
||||
option_values[key] = value
|
||||
|
||||
|
||||
import_cmd = ui.Subcommand(
|
||||
"import", help="import new music", aliases=("imp", "im")
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-c",
|
||||
"--copy",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="copy tracks into library directory (default)",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-C",
|
||||
"--nocopy",
|
||||
action="store_false",
|
||||
dest="copy",
|
||||
help="don't copy tracks (opposite of -c)",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-m",
|
||||
"--move",
|
||||
action="store_true",
|
||||
dest="move",
|
||||
help="move tracks into the library (overrides -c)",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-w",
|
||||
"--write",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="write new metadata to files' tags (default)",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-W",
|
||||
"--nowrite",
|
||||
action="store_false",
|
||||
dest="write",
|
||||
help="don't write metadata (opposite of -w)",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-a",
|
||||
"--autotag",
|
||||
action="store_true",
|
||||
dest="autotag",
|
||||
help="infer tags for imported files (default)",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-A",
|
||||
"--noautotag",
|
||||
action="store_false",
|
||||
dest="autotag",
|
||||
help="don't infer tags for imported files (opposite of -a)",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-p",
|
||||
"--resume",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="resume importing if interrupted",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-P",
|
||||
"--noresume",
|
||||
action="store_false",
|
||||
dest="resume",
|
||||
help="do not try to resume importing",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
dest="quiet",
|
||||
help="never prompt for input: skip albums instead",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"--quiet-fallback",
|
||||
type="string",
|
||||
dest="quiet_fallback",
|
||||
help="decision in quiet mode when no strong match: skip or asis",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-l",
|
||||
"--log",
|
||||
dest="log",
|
||||
help="file to log untaggable albums for later review",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-s",
|
||||
"--singletons",
|
||||
action="store_true",
|
||||
help="import individual tracks instead of full albums",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-t",
|
||||
"--timid",
|
||||
dest="timid",
|
||||
action="store_true",
|
||||
help="always confirm all actions",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-L",
|
||||
"--library",
|
||||
dest="library",
|
||||
action="store_true",
|
||||
help="retag items matching a query",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-i",
|
||||
"--incremental",
|
||||
dest="incremental",
|
||||
action="store_true",
|
||||
help="skip already-imported directories",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-I",
|
||||
"--noincremental",
|
||||
dest="incremental",
|
||||
action="store_false",
|
||||
help="do not skip already-imported directories",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-R",
|
||||
"--incremental-skip-later",
|
||||
action="store_true",
|
||||
dest="incremental_skip_later",
|
||||
help="do not record skipped files during incremental import",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-r",
|
||||
"--noincremental-skip-later",
|
||||
action="store_false",
|
||||
dest="incremental_skip_later",
|
||||
help="record skipped files during incremental import",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"--from-scratch",
|
||||
dest="from_scratch",
|
||||
action="store_true",
|
||||
help="erase existing metadata before applying new metadata",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"--flat",
|
||||
dest="flat",
|
||||
action="store_true",
|
||||
help="import an entire tree as a single album",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-g",
|
||||
"--group-albums",
|
||||
dest="group_albums",
|
||||
action="store_true",
|
||||
help="group tracks in a folder into separate albums",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"--pretend",
|
||||
dest="pretend",
|
||||
action="store_true",
|
||||
help="just print the files to import",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"-S",
|
||||
"--search-id",
|
||||
dest="search_ids",
|
||||
action="append",
|
||||
metavar="ID",
|
||||
help="restrict matching to a specific metadata backend ID",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"--from-logfile",
|
||||
dest="from_logfiles",
|
||||
action="append",
|
||||
metavar="PATH",
|
||||
help="read skipped paths from an existing logfile",
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
"--set",
|
||||
dest="set_fields",
|
||||
action="callback",
|
||||
callback=_store_dict,
|
||||
metavar="FIELD=VALUE",
|
||||
help="set the given fields to the supplied values",
|
||||
)
|
||||
import_cmd.func = import_func
|
||||
563
beets/ui/commands/import_/display.py
Normal file
563
beets/ui/commands/import_/display.py
Normal file
|
|
@ -0,0 +1,563 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
|
||||
from typing_extensions import NotRequired
|
||||
|
||||
from beets import config, ui
|
||||
from beets.autotag import hooks
|
||||
from beets.util import displayable_path
|
||||
from beets.util.units import human_seconds_short
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
import confuse
|
||||
|
||||
from beets import autotag
|
||||
from beets.autotag.distance import Distance
|
||||
from beets.library.models import Item
|
||||
from beets.ui import ColorName
|
||||
|
||||
VARIOUS_ARTISTS = "Various Artists"
|
||||
|
||||
|
||||
class Side(TypedDict):
|
||||
prefix: str
|
||||
contents: str
|
||||
suffix: str
|
||||
width: NotRequired[int]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ChangeRepresentation:
|
||||
"""Keeps track of all information needed to generate a (colored) text
|
||||
representation of the changes that will be made if an album or singleton's
|
||||
tags are changed according to `match`, which must be an AlbumMatch or
|
||||
TrackMatch object, accordingly.
|
||||
"""
|
||||
|
||||
cur_artist: str
|
||||
cur_name: str
|
||||
match: autotag.hooks.Match
|
||||
|
||||
@cached_property
|
||||
def changed_prefix(self) -> str:
|
||||
return ui.colorize("changed", "\u2260")
|
||||
|
||||
@cached_property
|
||||
def _indentation_config(self) -> confuse.Subview:
|
||||
return config["ui"]["import"]["indentation"]
|
||||
|
||||
@cached_property
|
||||
def indent_header(self) -> str:
|
||||
return ui.indent(self._indentation_config["match_header"].as_number())
|
||||
|
||||
@cached_property
|
||||
def indent_detail(self) -> str:
|
||||
return ui.indent(self._indentation_config["match_details"].as_number())
|
||||
|
||||
@cached_property
|
||||
def indent_tracklist(self) -> str:
|
||||
return ui.indent(
|
||||
self._indentation_config["match_tracklist"].as_number()
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def layout(self) -> int:
|
||||
return config["ui"]["import"]["layout"].as_choice(
|
||||
{"column": 0, "newline": 1}
|
||||
)
|
||||
|
||||
def print_layout(
|
||||
self,
|
||||
indent: str,
|
||||
left: Side,
|
||||
right: Side,
|
||||
separator: str = " -> ",
|
||||
max_width: int | None = None,
|
||||
) -> None:
|
||||
if not max_width:
|
||||
# If no max_width provided, use terminal width
|
||||
max_width = ui.term_width()
|
||||
if self.layout == 0:
|
||||
ui.print_column_layout(indent, left, right, separator, max_width)
|
||||
else:
|
||||
ui.print_newline_layout(indent, left, right, separator, max_width)
|
||||
|
||||
def show_match_header(self) -> None:
|
||||
"""Print out a 'header' identifying the suggested match (album name,
|
||||
artist name,...) and summarizing the changes that would be made should
|
||||
the user accept the match.
|
||||
"""
|
||||
# Print newline at beginning of change block.
|
||||
ui.print_("")
|
||||
|
||||
# 'Match' line and similarity.
|
||||
ui.print_(
|
||||
f"{self.indent_header}Match ({dist_string(self.match.distance)}):"
|
||||
)
|
||||
|
||||
artist_name_str = f"{self.match.info.artist} - {self.match.info.name}"
|
||||
ui.print_(
|
||||
self.indent_header
|
||||
+ dist_colorize(artist_name_str, self.match.distance)
|
||||
)
|
||||
|
||||
# Penalties.
|
||||
penalties = penalty_string(self.match.distance)
|
||||
if penalties:
|
||||
ui.print_(f"{self.indent_header}{penalties}")
|
||||
|
||||
# Disambiguation.
|
||||
disambig = disambig_string(self.match.info)
|
||||
if disambig:
|
||||
ui.print_(f"{self.indent_header}{disambig}")
|
||||
|
||||
# Data URL.
|
||||
if self.match.info.data_url:
|
||||
url = ui.colorize("text_faint", f"{self.match.info.data_url}")
|
||||
ui.print_(f"{self.indent_header}{url}")
|
||||
|
||||
def show_match_details(self) -> None:
|
||||
"""Print out the details of the match, including changes in album name
|
||||
and artist name.
|
||||
"""
|
||||
# Artist.
|
||||
artist_l, artist_r = self.cur_artist or "", self.match.info.artist
|
||||
if artist_r == VARIOUS_ARTISTS:
|
||||
# Hide artists for VA releases.
|
||||
artist_l, artist_r = "", ""
|
||||
left: Side
|
||||
right: Side
|
||||
if artist_l != artist_r:
|
||||
artist_l, artist_r = ui.colordiff(artist_l, artist_r)
|
||||
left = {
|
||||
"prefix": f"{self.changed_prefix} Artist: ",
|
||||
"contents": artist_l,
|
||||
"suffix": "",
|
||||
}
|
||||
right = {"prefix": "", "contents": artist_r, "suffix": ""}
|
||||
self.print_layout(self.indent_detail, left, right)
|
||||
|
||||
else:
|
||||
ui.print_(f"{self.indent_detail}*", "Artist:", artist_r)
|
||||
|
||||
if self.cur_name:
|
||||
type_ = self.match.type
|
||||
name_l, name_r = self.cur_name or "", self.match.info.name
|
||||
if self.cur_name != self.match.info.name != VARIOUS_ARTISTS:
|
||||
name_l, name_r = ui.colordiff(name_l, name_r)
|
||||
left = {
|
||||
"prefix": f"{self.changed_prefix} {type_}: ",
|
||||
"contents": name_l,
|
||||
"suffix": "",
|
||||
}
|
||||
right = {"prefix": "", "contents": name_r, "suffix": ""}
|
||||
self.print_layout(self.indent_detail, left, right)
|
||||
else:
|
||||
ui.print_(f"{self.indent_detail}*", f"{type_}:", name_r)
|
||||
|
||||
def make_medium_info_line(self, track_info: hooks.TrackInfo) -> str:
|
||||
"""Construct a line with the current medium's info."""
|
||||
track_media = track_info.get("media", "Media")
|
||||
# Build output string.
|
||||
if self.match.info.mediums > 1 and track_info.disctitle:
|
||||
return (
|
||||
f"* {track_media} {track_info.medium}: {track_info.disctitle}"
|
||||
)
|
||||
elif self.match.info.mediums > 1:
|
||||
return f"* {track_media} {track_info.medium}"
|
||||
elif track_info.disctitle:
|
||||
return f"* {track_media}: {track_info.disctitle}"
|
||||
else:
|
||||
return ""
|
||||
|
||||
def format_index(self, track_info: hooks.TrackInfo | Item) -> str:
|
||||
"""Return a string representing the track index of the given
|
||||
TrackInfo or Item object.
|
||||
"""
|
||||
if isinstance(track_info, hooks.TrackInfo):
|
||||
index = track_info.index
|
||||
medium_index = track_info.medium_index
|
||||
medium = track_info.medium
|
||||
mediums = self.match.info.mediums
|
||||
else:
|
||||
index = medium_index = track_info.track
|
||||
medium = track_info.disc
|
||||
mediums = track_info.disctotal
|
||||
if config["per_disc_numbering"]:
|
||||
if mediums and mediums > 1:
|
||||
return f"{medium}-{medium_index}"
|
||||
else:
|
||||
return str(medium_index if medium_index is not None else index)
|
||||
else:
|
||||
return str(index)
|
||||
|
||||
def make_track_numbers(
|
||||
self, item: Item, track_info: hooks.TrackInfo
|
||||
) -> tuple[str, str, bool]:
|
||||
"""Format colored track indices."""
|
||||
cur_track = self.format_index(item)
|
||||
new_track = self.format_index(track_info)
|
||||
changed = False
|
||||
# Choose color based on change.
|
||||
highlight_color: ColorName
|
||||
if cur_track != new_track:
|
||||
changed = True
|
||||
if item.track in (track_info.index, track_info.medium_index):
|
||||
highlight_color = "text_highlight_minor"
|
||||
else:
|
||||
highlight_color = "text_highlight"
|
||||
else:
|
||||
highlight_color = "text_faint"
|
||||
|
||||
lhs_track = ui.colorize(highlight_color, f"(#{cur_track})")
|
||||
rhs_track = ui.colorize(highlight_color, f"(#{new_track})")
|
||||
return lhs_track, rhs_track, changed
|
||||
|
||||
@staticmethod
|
||||
def make_track_titles(
|
||||
item: Item, track_info: hooks.TrackInfo
|
||||
) -> tuple[str, str, bool]:
|
||||
"""Format colored track titles."""
|
||||
new_title = track_info.name
|
||||
if not item.title.strip():
|
||||
# If there's no title, we use the filename. Don't colordiff.
|
||||
cur_title = displayable_path(os.path.basename(item.path))
|
||||
return cur_title, new_title, True
|
||||
else:
|
||||
# If there is a title, highlight differences.
|
||||
cur_title = item.title.strip()
|
||||
cur_col, new_col = ui.colordiff(cur_title, new_title)
|
||||
return cur_col, new_col, cur_title != new_title
|
||||
|
||||
@staticmethod
|
||||
def make_track_lengths(
|
||||
item: Item, track_info: hooks.TrackInfo
|
||||
) -> tuple[str, str, bool]:
|
||||
"""Format colored track lengths."""
|
||||
changed = False
|
||||
highlight_color: ColorName
|
||||
if (
|
||||
item.length
|
||||
and track_info.length
|
||||
and abs(item.length - track_info.length)
|
||||
>= config["ui"]["length_diff_thresh"].as_number()
|
||||
):
|
||||
highlight_color = "text_highlight"
|
||||
changed = True
|
||||
else:
|
||||
highlight_color = "text_highlight_minor"
|
||||
|
||||
# Handle nonetype lengths by setting to 0
|
||||
cur_length0 = item.length if item.length else 0
|
||||
new_length0 = track_info.length if track_info.length else 0
|
||||
# format into string
|
||||
cur_length = f"({human_seconds_short(cur_length0)})"
|
||||
new_length = f"({human_seconds_short(new_length0)})"
|
||||
# colorize
|
||||
lhs_length = ui.colorize(highlight_color, cur_length)
|
||||
rhs_length = ui.colorize(highlight_color, new_length)
|
||||
|
||||
return lhs_length, rhs_length, changed
|
||||
|
||||
def make_line(
|
||||
self, item: Item, track_info: hooks.TrackInfo
|
||||
) -> tuple[Side, Side]:
|
||||
"""Extract changes from item -> new TrackInfo object, and colorize
|
||||
appropriately. Returns (lhs, rhs) for column printing.
|
||||
"""
|
||||
# Track titles.
|
||||
lhs_title, rhs_title, diff_title = self.make_track_titles(
|
||||
item, track_info
|
||||
)
|
||||
# Track number change.
|
||||
lhs_track, rhs_track, diff_track = self.make_track_numbers(
|
||||
item, track_info
|
||||
)
|
||||
# Length change.
|
||||
lhs_length, rhs_length, diff_length = self.make_track_lengths(
|
||||
item, track_info
|
||||
)
|
||||
|
||||
changed = diff_title or diff_track or diff_length
|
||||
|
||||
# Construct lhs and rhs dicts.
|
||||
# Previously, we printed the penalties, however this is no longer
|
||||
# the case, thus the 'info' dictionary is unneeded.
|
||||
# penalties = penalty_string(self.match.distance.tracks[track_info])
|
||||
|
||||
lhs: Side = {
|
||||
"prefix": f"{self.changed_prefix if changed else '*'} {lhs_track} ",
|
||||
"contents": lhs_title,
|
||||
"suffix": f" {lhs_length}",
|
||||
}
|
||||
rhs: Side = {"prefix": "", "contents": "", "suffix": ""}
|
||||
if not changed:
|
||||
# Only return the left side, as nothing changed.
|
||||
return (lhs, rhs)
|
||||
else:
|
||||
# Construct a dictionary for the "changed to" side
|
||||
rhs = {
|
||||
"prefix": f"{rhs_track} ",
|
||||
"contents": rhs_title,
|
||||
"suffix": f" {rhs_length}",
|
||||
}
|
||||
return (lhs, rhs)
|
||||
|
||||
def print_tracklist(self, lines: list[tuple[Side, Side]]) -> None:
|
||||
"""Calculates column widths for tracks stored as line tuples:
|
||||
(left, right). Then prints each line of tracklist.
|
||||
"""
|
||||
if len(lines) == 0:
|
||||
# If no lines provided, e.g. details not required, do nothing.
|
||||
return
|
||||
|
||||
def get_width(side: Side) -> int:
|
||||
"""Return the width of left or right in uncolorized characters."""
|
||||
try:
|
||||
return len(
|
||||
ui.uncolorize(
|
||||
" ".join(
|
||||
[side["prefix"], side["contents"], side["suffix"]]
|
||||
)
|
||||
)
|
||||
)
|
||||
except KeyError:
|
||||
# An empty dictionary -> Nothing to report
|
||||
return 0
|
||||
|
||||
# Check how to fit content into terminal window
|
||||
indent_width = len(self.indent_tracklist)
|
||||
terminal_width = ui.term_width()
|
||||
joiner_width = len("".join(["* ", " -> "]))
|
||||
col_width = (terminal_width - indent_width - joiner_width) // 2
|
||||
max_width_l = max(get_width(line_tuple[0]) for line_tuple in lines)
|
||||
max_width_r = max(get_width(line_tuple[1]) for line_tuple in lines)
|
||||
|
||||
if ((max_width_l <= col_width) and (max_width_r <= col_width)) or (
|
||||
((max_width_l > col_width) or (max_width_r > col_width))
|
||||
and ((max_width_l + max_width_r) <= col_width * 2)
|
||||
):
|
||||
# All content fits. Either both maximum widths are below column
|
||||
# widths, or one of the columns is larger than allowed but the
|
||||
# other is smaller than allowed.
|
||||
# In this case we can afford to shrink the columns to fit their
|
||||
# largest string
|
||||
col_width_l = max_width_l
|
||||
col_width_r = max_width_r
|
||||
else:
|
||||
# Not all content fits - stick with original half/half split
|
||||
col_width_l = col_width
|
||||
col_width_r = col_width
|
||||
|
||||
# Print out each line, using the calculated width from above.
|
||||
for left, right in lines:
|
||||
left["width"] = col_width_l
|
||||
right["width"] = col_width_r
|
||||
self.print_layout(self.indent_tracklist, left, right)
|
||||
|
||||
|
||||
class AlbumChange(ChangeRepresentation):
|
||||
match: autotag.hooks.AlbumMatch
|
||||
|
||||
def show_match_tracks(self) -> None:
|
||||
"""Print out the tracks of the match, summarizing changes the match
|
||||
suggests for them.
|
||||
"""
|
||||
pairs = sorted(
|
||||
self.match.item_info_pairs, key=lambda pair: pair[1].index or 0
|
||||
)
|
||||
# Build up LHS and RHS for track difference display. The `lines` list
|
||||
# contains `(left, right)` tuples.
|
||||
lines: list[tuple[Side, Side]] = []
|
||||
medium = disctitle = None
|
||||
for item, track_info in pairs:
|
||||
# If the track is the first on a new medium, show medium
|
||||
# number and title.
|
||||
if medium != track_info.medium or disctitle != track_info.disctitle:
|
||||
# Create header for new medium
|
||||
header = self.make_medium_info_line(track_info)
|
||||
if header != "":
|
||||
# Print tracks from previous medium
|
||||
self.print_tracklist(lines)
|
||||
lines = []
|
||||
ui.print_(f"{self.indent_detail}{header}")
|
||||
# Save new medium details for future comparison.
|
||||
medium, disctitle = track_info.medium, track_info.disctitle
|
||||
|
||||
# Construct the line tuple for the track.
|
||||
left, right = self.make_line(item, track_info)
|
||||
if right["contents"] != "":
|
||||
lines.append((left, right))
|
||||
else:
|
||||
if config["import"]["detail"]:
|
||||
lines.append((left, right))
|
||||
self.print_tracklist(lines)
|
||||
|
||||
# Missing and unmatched tracks.
|
||||
if self.match.extra_tracks:
|
||||
ui.print_(
|
||||
"Missing tracks"
|
||||
f" ({len(self.match.extra_tracks)}/{len(self.match.info.tracks)} -"
|
||||
f" {len(self.match.extra_tracks) / len(self.match.info.tracks):.1%}):"
|
||||
)
|
||||
for track_info in self.match.extra_tracks:
|
||||
line = f" ! {track_info.title} (#{self.format_index(track_info)})"
|
||||
if track_info.length:
|
||||
line += f" ({human_seconds_short(track_info.length)})"
|
||||
ui.print_(ui.colorize("text_warning", line))
|
||||
if self.match.extra_items:
|
||||
ui.print_(f"Unmatched tracks ({len(self.match.extra_items)}):")
|
||||
for item in self.match.extra_items:
|
||||
line = f" ! {item.title} (#{self.format_index(item)})"
|
||||
if item.length:
|
||||
line += f" ({human_seconds_short(item.length)})"
|
||||
ui.print_(ui.colorize("text_warning", line))
|
||||
|
||||
|
||||
class TrackChange(ChangeRepresentation):
|
||||
"""Track change representation, comparing item with match."""
|
||||
|
||||
match: autotag.hooks.TrackMatch
|
||||
|
||||
|
||||
def show_change(
|
||||
cur_artist: str, cur_album: str, match: hooks.AlbumMatch
|
||||
) -> None:
|
||||
"""Print out a representation of the changes that will be made if an
|
||||
album's tags are changed according to `match`, which must be an AlbumMatch
|
||||
object.
|
||||
"""
|
||||
change = AlbumChange(cur_artist, cur_album, match)
|
||||
|
||||
# Print the match header.
|
||||
change.show_match_header()
|
||||
|
||||
# Print the match details.
|
||||
change.show_match_details()
|
||||
|
||||
# Print the match tracks.
|
||||
change.show_match_tracks()
|
||||
|
||||
|
||||
def show_item_change(item: Item, match: hooks.TrackMatch) -> None:
|
||||
"""Print out the change that would occur by tagging `item` with the
|
||||
metadata from `match`, a TrackMatch object.
|
||||
"""
|
||||
change = TrackChange(item.artist, item.title, match)
|
||||
# Print the match header.
|
||||
change.show_match_header()
|
||||
# Print the match details.
|
||||
change.show_match_details()
|
||||
|
||||
|
||||
def disambig_string(info: hooks.Info) -> str:
|
||||
"""Generate a string for an AlbumInfo or TrackInfo object that
|
||||
provides context that helps disambiguate similar-looking albums and
|
||||
tracks.
|
||||
"""
|
||||
if isinstance(info, hooks.AlbumInfo):
|
||||
disambig = get_album_disambig_fields(info)
|
||||
elif isinstance(info, hooks.TrackInfo):
|
||||
disambig = get_singleton_disambig_fields(info)
|
||||
else:
|
||||
return ""
|
||||
|
||||
return ", ".join(disambig)
|
||||
|
||||
|
||||
def get_singleton_disambig_fields(info: hooks.TrackInfo) -> Sequence[str]:
|
||||
out = []
|
||||
chosen_fields = config["match"]["singleton_disambig_fields"].as_str_seq()
|
||||
calculated_values = {
|
||||
"index": f"Index {info.index}",
|
||||
"track_alt": f"Track {info.track_alt}",
|
||||
"album": (
|
||||
f"[{info.album}]"
|
||||
if (
|
||||
config["import"]["singleton_album_disambig"].get()
|
||||
and info.get("album")
|
||||
)
|
||||
else ""
|
||||
),
|
||||
}
|
||||
|
||||
for field in chosen_fields:
|
||||
if field in calculated_values:
|
||||
out.append(str(calculated_values[field]))
|
||||
else:
|
||||
try:
|
||||
out.append(str(info[field]))
|
||||
except (AttributeError, KeyError):
|
||||
print(f"Disambiguation string key {field} does not exist.")
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def get_album_disambig_fields(info: hooks.AlbumInfo) -> Sequence[str]:
|
||||
out = []
|
||||
chosen_fields = config["match"]["album_disambig_fields"].as_str_seq()
|
||||
calculated_values = {
|
||||
"media": (
|
||||
f"{info.mediums}x{info.media}"
|
||||
if (info.mediums and info.mediums > 1)
|
||||
else info.media
|
||||
),
|
||||
}
|
||||
|
||||
for field in chosen_fields:
|
||||
if field in calculated_values:
|
||||
out.append(str(calculated_values[field]))
|
||||
else:
|
||||
try:
|
||||
out.append(str(info[field]))
|
||||
except (AttributeError, KeyError):
|
||||
print(f"Disambiguation string key {field} does not exist.")
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def dist_colorize(string: str, dist: Distance) -> str:
|
||||
"""Formats a string as a colorized similarity string according to
|
||||
a distance.
|
||||
"""
|
||||
if dist <= config["match"]["strong_rec_thresh"].as_number():
|
||||
string = ui.colorize("text_success", string)
|
||||
elif dist <= config["match"]["medium_rec_thresh"].as_number():
|
||||
string = ui.colorize("text_warning", string)
|
||||
else:
|
||||
string = ui.colorize("text_error", string)
|
||||
return string
|
||||
|
||||
|
||||
def dist_string(dist: Distance) -> str:
|
||||
"""Formats a distance (a float) as a colorized similarity percentage
|
||||
string.
|
||||
"""
|
||||
string = f"{(1 - dist) * 100:.1f}%"
|
||||
return dist_colorize(string, dist)
|
||||
|
||||
|
||||
def penalty_string(distance: Distance, limit: int | None = None) -> str:
|
||||
"""Returns a colorized string that indicates all the penalties
|
||||
applied to a distance object.
|
||||
"""
|
||||
penalties = []
|
||||
for key in distance.keys():
|
||||
key = key.replace("album_", "")
|
||||
key = key.replace("track_", "")
|
||||
key = key.replace("_", " ")
|
||||
penalties.append(key)
|
||||
if penalties:
|
||||
if limit and len(penalties) > limit:
|
||||
penalties = [*penalties[:limit], "..."]
|
||||
# Prefix penalty string with U+2260: Not Equal To
|
||||
penalty_string = f"\u2260 {', '.join(penalties)}"
|
||||
return ui.colorize("changed", penalty_string)
|
||||
|
||||
return ""
|
||||
545
beets/ui/commands/import_/session.py
Normal file
545
beets/ui/commands/import_/session.py
Normal file
|
|
@ -0,0 +1,545 @@
|
|||
from collections import Counter
|
||||
from itertools import chain
|
||||
|
||||
from beets import autotag, config, importer, logging, plugins, ui
|
||||
from beets.autotag import Recommendation
|
||||
from beets.util import PromptChoice, displayable_path
|
||||
from beets.util.units import human_bytes, human_seconds_short
|
||||
|
||||
from .display import (
|
||||
disambig_string,
|
||||
dist_colorize,
|
||||
penalty_string,
|
||||
show_change,
|
||||
show_item_change,
|
||||
)
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
class TerminalImportSession(importer.ImportSession):
|
||||
"""An import session that runs in a terminal."""
|
||||
|
||||
def choose_match(self, task):
|
||||
"""Given an initial autotagging of items, go through an interactive
|
||||
dance with the user to ask for a choice of metadata. Returns an
|
||||
AlbumMatch object, ASIS, or SKIP.
|
||||
"""
|
||||
# Show what we're tagging.
|
||||
ui.print_()
|
||||
|
||||
path_str0 = displayable_path(task.paths, "\n")
|
||||
path_str = ui.colorize("import_path", path_str0)
|
||||
items_str0 = f"({len(task.items)} items)"
|
||||
items_str = ui.colorize("import_path_items", items_str0)
|
||||
ui.print_(" ".join([path_str, items_str]))
|
||||
|
||||
# Let plugins display info or prompt the user before we go through the
|
||||
# process of selecting candidate.
|
||||
results = plugins.send(
|
||||
"import_task_before_choice", session=self, task=task
|
||||
)
|
||||
actions = [action for action in results if action]
|
||||
|
||||
if len(actions) == 1:
|
||||
return actions[0]
|
||||
elif len(actions) > 1:
|
||||
raise plugins.PluginConflictError(
|
||||
"Only one handler for `import_task_before_choice` may return "
|
||||
"an action."
|
||||
)
|
||||
|
||||
# Take immediate action if appropriate.
|
||||
action = _summary_judgment(task.rec)
|
||||
if action == importer.Action.APPLY:
|
||||
match = task.candidates[0]
|
||||
show_change(task.cur_artist, task.cur_album, match)
|
||||
return match
|
||||
elif action is not None:
|
||||
return action
|
||||
|
||||
# Loop until we have a choice.
|
||||
while True:
|
||||
# Ask for a choice from the user. The result of
|
||||
# `choose_candidate` may be an `importer.Action`, an
|
||||
# `AlbumMatch` object for a specific selection, or a
|
||||
# `PromptChoice`.
|
||||
choices = self._get_choices(task)
|
||||
choice = choose_candidate(
|
||||
task.candidates,
|
||||
False,
|
||||
task.rec,
|
||||
task.cur_artist,
|
||||
task.cur_album,
|
||||
itemcount=len(task.items),
|
||||
choices=choices,
|
||||
)
|
||||
|
||||
# Basic choices that require no more action here.
|
||||
if choice in (importer.Action.SKIP, importer.Action.ASIS):
|
||||
# Pass selection to main control flow.
|
||||
return choice
|
||||
|
||||
# Plugin-provided choices. We invoke the associated callback
|
||||
# function.
|
||||
elif choice in choices:
|
||||
post_choice = choice.callback(self, task)
|
||||
if isinstance(post_choice, importer.Action):
|
||||
return post_choice
|
||||
elif isinstance(post_choice, autotag.Proposal):
|
||||
# Use the new candidates and continue around the loop.
|
||||
task.candidates = post_choice.candidates
|
||||
task.rec = post_choice.recommendation
|
||||
|
||||
# Otherwise, we have a specific match selection.
|
||||
else:
|
||||
# We have a candidate! Finish tagging. Here, choice is an
|
||||
# AlbumMatch object.
|
||||
assert isinstance(choice, autotag.AlbumMatch)
|
||||
return choice
|
||||
|
||||
def choose_item(self, task):
|
||||
"""Ask the user for a choice about tagging a single item. Returns
|
||||
either an action constant or a TrackMatch object.
|
||||
"""
|
||||
ui.print_()
|
||||
ui.print_(displayable_path(task.item.path))
|
||||
candidates, rec = task.candidates, task.rec
|
||||
|
||||
# Take immediate action if appropriate.
|
||||
action = _summary_judgment(task.rec)
|
||||
if action == importer.Action.APPLY:
|
||||
match = candidates[0]
|
||||
show_item_change(task.item, match)
|
||||
return match
|
||||
elif action is not None:
|
||||
return action
|
||||
|
||||
while True:
|
||||
# Ask for a choice.
|
||||
choices = self._get_choices(task)
|
||||
choice = choose_candidate(
|
||||
candidates, True, rec, item=task.item, choices=choices
|
||||
)
|
||||
|
||||
if choice in (importer.Action.SKIP, importer.Action.ASIS):
|
||||
return choice
|
||||
|
||||
elif choice in choices:
|
||||
post_choice = choice.callback(self, task)
|
||||
if isinstance(post_choice, importer.Action):
|
||||
return post_choice
|
||||
elif isinstance(post_choice, autotag.Proposal):
|
||||
candidates = post_choice.candidates
|
||||
rec = post_choice.recommendation
|
||||
|
||||
else:
|
||||
# Chose a candidate.
|
||||
assert isinstance(choice, autotag.TrackMatch)
|
||||
return choice
|
||||
|
||||
def resolve_duplicate(self, task, found_duplicates):
|
||||
"""Decide what to do when a new album or item seems similar to one
|
||||
that's already in the library.
|
||||
"""
|
||||
log.warning(
|
||||
"This {} is already in the library!",
|
||||
("album" if task.is_album else "item"),
|
||||
)
|
||||
|
||||
if config["import"]["quiet"]:
|
||||
# In quiet mode, don't prompt -- just skip.
|
||||
log.info("Skipping.")
|
||||
sel = "s"
|
||||
else:
|
||||
# Print some detail about the existing and new items so the
|
||||
# user can make an informed decision.
|
||||
for duplicate in found_duplicates:
|
||||
ui.print_(
|
||||
"Old: "
|
||||
+ summarize_items(
|
||||
(
|
||||
list(duplicate.items())
|
||||
if task.is_album
|
||||
else [duplicate]
|
||||
),
|
||||
not task.is_album,
|
||||
)
|
||||
)
|
||||
if config["import"]["duplicate_verbose_prompt"]:
|
||||
if task.is_album:
|
||||
for dup in duplicate.items():
|
||||
print(f" {dup}")
|
||||
else:
|
||||
print(f" {duplicate}")
|
||||
|
||||
ui.print_(
|
||||
"New: "
|
||||
+ summarize_items(
|
||||
task.imported_items(),
|
||||
not task.is_album,
|
||||
)
|
||||
)
|
||||
if config["import"]["duplicate_verbose_prompt"]:
|
||||
for item in task.imported_items():
|
||||
print(f" {item}")
|
||||
|
||||
sel = ui.input_options(
|
||||
("Skip new", "Keep all", "Remove old", "Merge all")
|
||||
)
|
||||
|
||||
if sel == "s":
|
||||
# Skip new.
|
||||
task.set_choice(importer.Action.SKIP)
|
||||
elif sel == "k":
|
||||
# Keep both. Do nothing; leave the choice intact.
|
||||
pass
|
||||
elif sel == "r":
|
||||
# Remove old.
|
||||
task.should_remove_duplicates = True
|
||||
elif sel == "m":
|
||||
task.should_merge_duplicates = True
|
||||
else:
|
||||
assert False
|
||||
|
||||
def should_resume(self, path):
|
||||
return ui.input_yn(
|
||||
f"Import of the directory:\n{displayable_path(path)}\n"
|
||||
"was interrupted. Resume (Y/n)?"
|
||||
)
|
||||
|
||||
def _get_choices(self, task):
|
||||
"""Get the list of prompt choices that should be presented to the
|
||||
user. This consists of both built-in choices and ones provided by
|
||||
plugins.
|
||||
|
||||
The `before_choose_candidate` event is sent to the plugins, with
|
||||
session and task as its parameters. Plugins are responsible for
|
||||
checking the right conditions and returning a list of `PromptChoice`s,
|
||||
which is flattened and checked for conflicts.
|
||||
|
||||
If two or more choices have the same short letter, a warning is
|
||||
emitted and all but one choices are discarded, giving preference
|
||||
to the default importer choices.
|
||||
|
||||
Returns a list of `PromptChoice`s.
|
||||
"""
|
||||
# Standard, built-in choices.
|
||||
choices = [
|
||||
PromptChoice("s", "Skip", lambda s, t: importer.Action.SKIP),
|
||||
PromptChoice("u", "Use as-is", lambda s, t: importer.Action.ASIS),
|
||||
]
|
||||
if task.is_album:
|
||||
choices += [
|
||||
PromptChoice(
|
||||
"t", "as Tracks", lambda s, t: importer.Action.TRACKS
|
||||
),
|
||||
PromptChoice(
|
||||
"g", "Group albums", lambda s, t: importer.Action.ALBUMS
|
||||
),
|
||||
]
|
||||
choices += [
|
||||
PromptChoice("e", "Enter search", manual_search),
|
||||
PromptChoice("i", "enter Id", manual_id),
|
||||
PromptChoice("b", "aBort", abort_action),
|
||||
]
|
||||
|
||||
# Send the before_choose_candidate event and flatten list.
|
||||
extra_choices = list(
|
||||
chain(
|
||||
*plugins.send(
|
||||
"before_choose_candidate", session=self, task=task
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Add a "dummy" choice for the other baked-in option, for
|
||||
# duplicate checking.
|
||||
all_choices = [
|
||||
PromptChoice("a", "Apply", None),
|
||||
*choices,
|
||||
*extra_choices,
|
||||
]
|
||||
|
||||
# Check for conflicts.
|
||||
short_letters = [c.short for c in all_choices]
|
||||
if len(short_letters) != len(set(short_letters)):
|
||||
# Duplicate short letter has been found.
|
||||
duplicates = [
|
||||
i for i, count in Counter(short_letters).items() if count > 1
|
||||
]
|
||||
for short in duplicates:
|
||||
# Keep the first of the choices, removing the rest.
|
||||
dup_choices = [c for c in all_choices if c.short == short]
|
||||
for c in dup_choices[1:]:
|
||||
log.warning(
|
||||
"Prompt choice '{0.long}' removed due to conflict "
|
||||
"with '{1[0].long}' (short letter: '{0.short}')",
|
||||
c,
|
||||
dup_choices,
|
||||
)
|
||||
extra_choices.remove(c)
|
||||
|
||||
return choices + extra_choices
|
||||
|
||||
|
||||
def summarize_items(items, singleton):
|
||||
"""Produces a brief summary line describing a set of items. Used for
|
||||
manually resolving duplicates during import.
|
||||
|
||||
`items` is a list of `Item` objects. `singleton` indicates whether
|
||||
this is an album or single-item import (if the latter, them `items`
|
||||
should only have one element).
|
||||
"""
|
||||
summary_parts = []
|
||||
if not singleton:
|
||||
summary_parts.append(f"{len(items)} items")
|
||||
|
||||
format_counts = {}
|
||||
for item in items:
|
||||
format_counts[item.format] = format_counts.get(item.format, 0) + 1
|
||||
if len(format_counts) == 1:
|
||||
# A single format.
|
||||
summary_parts.append(items[0].format)
|
||||
else:
|
||||
# Enumerate all the formats by decreasing frequencies:
|
||||
for fmt, count in sorted(
|
||||
format_counts.items(),
|
||||
key=lambda fmt_and_count: (-fmt_and_count[1], fmt_and_count[0]),
|
||||
):
|
||||
summary_parts.append(f"{fmt} {count}")
|
||||
|
||||
if items:
|
||||
average_bitrate = sum([item.bitrate for item in items]) / len(items)
|
||||
total_duration = sum([item.length for item in items])
|
||||
total_filesize = sum([item.filesize for item in items])
|
||||
summary_parts.append(f"{int(average_bitrate / 1000)}kbps")
|
||||
if items[0].format == "FLAC":
|
||||
sample_bits = (
|
||||
f"{round(int(items[0].samplerate) / 1000, 1)}kHz"
|
||||
f"/{items[0].bitdepth} bit"
|
||||
)
|
||||
summary_parts.append(sample_bits)
|
||||
summary_parts.append(human_seconds_short(total_duration))
|
||||
summary_parts.append(human_bytes(total_filesize))
|
||||
|
||||
return ", ".join(summary_parts)
|
||||
|
||||
|
||||
def _summary_judgment(rec):
|
||||
"""Determines whether a decision should be made without even asking
|
||||
the user. This occurs in quiet mode and when an action is chosen for
|
||||
NONE recommendations. Return None if the user should be queried.
|
||||
Otherwise, returns an action. May also print to the console if a
|
||||
summary judgment is made.
|
||||
"""
|
||||
|
||||
if config["import"]["quiet"]:
|
||||
if rec == Recommendation.strong:
|
||||
return importer.Action.APPLY
|
||||
else:
|
||||
action = config["import"]["quiet_fallback"].as_choice(
|
||||
{
|
||||
"skip": importer.Action.SKIP,
|
||||
"asis": importer.Action.ASIS,
|
||||
}
|
||||
)
|
||||
elif config["import"]["timid"]:
|
||||
return None
|
||||
elif rec == Recommendation.none:
|
||||
action = config["import"]["none_rec_action"].as_choice(
|
||||
{
|
||||
"skip": importer.Action.SKIP,
|
||||
"asis": importer.Action.ASIS,
|
||||
"ask": None,
|
||||
}
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
if action == importer.Action.SKIP:
|
||||
ui.print_("Skipping.")
|
||||
elif action == importer.Action.ASIS:
|
||||
ui.print_("Importing as-is.")
|
||||
return action
|
||||
|
||||
|
||||
def choose_candidate(
|
||||
candidates,
|
||||
singleton,
|
||||
rec,
|
||||
cur_artist=None,
|
||||
cur_album=None,
|
||||
item=None,
|
||||
itemcount=None,
|
||||
choices=[],
|
||||
):
|
||||
"""Given a sorted list of candidates, ask the user for a selection
|
||||
of which candidate to use. Applies to both full albums and
|
||||
singletons (tracks). Candidates are either AlbumMatch or TrackMatch
|
||||
objects depending on `singleton`. for albums, `cur_artist`,
|
||||
`cur_album`, and `itemcount` must be provided. For singletons,
|
||||
`item` must be provided.
|
||||
|
||||
`choices` is a list of `PromptChoice`s to be used in each prompt.
|
||||
|
||||
Returns one of the following:
|
||||
* the result of the choice, which may be SKIP or ASIS
|
||||
* a candidate (an AlbumMatch/TrackMatch object)
|
||||
* a chosen `PromptChoice` from `choices`
|
||||
"""
|
||||
# Sanity check.
|
||||
if singleton:
|
||||
assert item is not None
|
||||
else:
|
||||
assert cur_artist is not None
|
||||
assert cur_album is not None
|
||||
|
||||
# Build helper variables for the prompt choices.
|
||||
choice_opts = tuple(c.long for c in choices)
|
||||
choice_actions = {c.short: c for c in choices}
|
||||
|
||||
# Zero candidates.
|
||||
if not candidates:
|
||||
if singleton:
|
||||
ui.print_("No matching recordings found.")
|
||||
else:
|
||||
ui.print_(f"No matching release found for {itemcount} tracks.")
|
||||
ui.print_(
|
||||
"For help, see: "
|
||||
"https://beets.readthedocs.org/en/latest/faq.html#nomatch"
|
||||
)
|
||||
sel = ui.input_options(choice_opts)
|
||||
if sel in choice_actions:
|
||||
return choice_actions[sel]
|
||||
else:
|
||||
assert False
|
||||
|
||||
# Is the change good enough?
|
||||
bypass_candidates = False
|
||||
if rec != Recommendation.none:
|
||||
match = candidates[0]
|
||||
bypass_candidates = True
|
||||
|
||||
while True:
|
||||
# Display and choose from candidates.
|
||||
require = rec <= Recommendation.low
|
||||
|
||||
if not bypass_candidates:
|
||||
# Display list of candidates.
|
||||
ui.print_("")
|
||||
ui.print_(
|
||||
f"Finding tags for {'track' if singleton else 'album'} "
|
||||
f'"{item.artist if singleton else cur_artist} -'
|
||||
f' {item.title if singleton else cur_album}".'
|
||||
)
|
||||
|
||||
ui.print_(" Candidates:")
|
||||
for i, match in enumerate(candidates):
|
||||
# Index, metadata, and distance.
|
||||
index0 = f"{i + 1}."
|
||||
index = dist_colorize(index0, match.distance)
|
||||
dist = f"({(1 - match.distance) * 100:.1f}%)"
|
||||
distance = dist_colorize(dist, match.distance)
|
||||
metadata = f"{match.info.artist} - {match.info.name}"
|
||||
if i == 0:
|
||||
metadata = dist_colorize(metadata, match.distance)
|
||||
else:
|
||||
metadata = ui.colorize("text_highlight_minor", metadata)
|
||||
line1 = [index, distance, metadata]
|
||||
ui.print_(f" {' '.join(line1)}")
|
||||
|
||||
# Penalties.
|
||||
penalties = penalty_string(match.distance, 3)
|
||||
if penalties:
|
||||
ui.print_(f"{' ' * 13}{penalties}")
|
||||
|
||||
# Disambiguation
|
||||
disambig = disambig_string(match.info)
|
||||
if disambig:
|
||||
ui.print_(f"{' ' * 13}{disambig}")
|
||||
|
||||
# Ask the user for a choice.
|
||||
sel = ui.input_options(choice_opts, numrange=(1, len(candidates)))
|
||||
if sel == "m":
|
||||
pass
|
||||
elif sel in choice_actions:
|
||||
return choice_actions[sel]
|
||||
else: # Numerical selection.
|
||||
match = candidates[sel - 1]
|
||||
if sel != 1:
|
||||
# When choosing anything but the first match,
|
||||
# disable the default action.
|
||||
require = True
|
||||
bypass_candidates = False
|
||||
|
||||
# Show what we're about to do.
|
||||
if singleton:
|
||||
show_item_change(item, match)
|
||||
else:
|
||||
show_change(cur_artist, cur_album, match)
|
||||
|
||||
# Exact match => tag automatically if we're not in timid mode.
|
||||
if rec == Recommendation.strong and not config["import"]["timid"]:
|
||||
return match
|
||||
|
||||
# Ask for confirmation.
|
||||
default = config["import"]["default_action"].as_choice(
|
||||
{
|
||||
"apply": "a",
|
||||
"skip": "s",
|
||||
"asis": "u",
|
||||
"none": None,
|
||||
}
|
||||
)
|
||||
if default is None:
|
||||
require = True
|
||||
# Bell ring when user interaction is needed.
|
||||
if config["import"]["bell"]:
|
||||
ui.print_("\a", end="")
|
||||
sel = ui.input_options(
|
||||
("Apply", "More candidates", *choice_opts),
|
||||
require=require,
|
||||
default=default,
|
||||
)
|
||||
if sel == "a":
|
||||
return match
|
||||
elif sel in choice_actions:
|
||||
return choice_actions[sel]
|
||||
|
||||
|
||||
def manual_search(session, task):
|
||||
"""Get a new `Proposal` using manual search criteria.
|
||||
|
||||
Input either an artist and album (for full albums) or artist and
|
||||
track name (for singletons) for manual search.
|
||||
"""
|
||||
artist = ui.input_("Artist:").strip()
|
||||
name = ui.input_("Album:" if task.is_album else "Track:").strip()
|
||||
|
||||
if task.is_album:
|
||||
_, _, prop = autotag.tag_album(task.items, artist, name)
|
||||
return prop
|
||||
else:
|
||||
return autotag.tag_item(task.item, artist, name)
|
||||
|
||||
|
||||
def manual_id(session, task):
|
||||
"""Get a new `Proposal` using a manually-entered ID.
|
||||
|
||||
Input an ID, either for an album ("release") or a track ("recording").
|
||||
"""
|
||||
prompt = f"Enter {'release' if task.is_album else 'recording'} ID:"
|
||||
search_id = ui.input_(prompt).strip()
|
||||
|
||||
if task.is_album:
|
||||
_, _, prop = autotag.tag_album(task.items, search_ids=search_id.split())
|
||||
return prop
|
||||
else:
|
||||
return autotag.tag_item(task.item, search_ids=search_id.split())
|
||||
|
||||
|
||||
def abort_action(session, task):
|
||||
"""A prompt choice callback that aborts the importer."""
|
||||
raise importer.ImportAbortError()
|
||||
25
beets/ui/commands/list.py
Normal file
25
beets/ui/commands/list.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
"""The 'list' command: query and show library contents."""
|
||||
|
||||
from beets import ui
|
||||
|
||||
|
||||
def list_items(lib, query, album, fmt=""):
|
||||
"""Print out items in lib matching query. If album, then search for
|
||||
albums instead of single items.
|
||||
"""
|
||||
if album:
|
||||
for album in lib.albums(query):
|
||||
ui.print_(format(album, fmt))
|
||||
else:
|
||||
for item in lib.items(query):
|
||||
ui.print_(format(item, fmt))
|
||||
|
||||
|
||||
def list_func(lib, opts, args):
|
||||
list_items(lib, args, opts.album)
|
||||
|
||||
|
||||
list_cmd = ui.Subcommand("list", help="query the library", aliases=("ls",))
|
||||
list_cmd.parser.usage += "\nExample: %prog -f '$album: $title' artist:beatles"
|
||||
list_cmd.parser.add_all_common_options()
|
||||
list_cmd.func = list_func
|
||||
162
beets/ui/commands/modify.py
Normal file
162
beets/ui/commands/modify.py
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
"""The `modify` command: change metadata fields."""
|
||||
|
||||
from beets import library, ui
|
||||
from beets.util import functemplate
|
||||
|
||||
from .utils import do_query
|
||||
|
||||
|
||||
def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
|
||||
"""Modifies matching items according to user-specified assignments and
|
||||
deletions.
|
||||
|
||||
`mods` is a dictionary of field and value pairse indicating
|
||||
assignments. `dels` is a list of fields to be deleted.
|
||||
"""
|
||||
# Parse key=value specifications into a dictionary.
|
||||
model_cls = library.Album if album else library.Item
|
||||
|
||||
# Get the items to modify.
|
||||
items, albums = do_query(lib, query, album, False)
|
||||
objs = albums if album else items
|
||||
|
||||
# Apply changes *temporarily*, preview them, and collect modified
|
||||
# objects.
|
||||
ui.print_(f"Modifying {len(objs)} {'album' if album else 'item'}s.")
|
||||
changed = []
|
||||
templates = {
|
||||
key: functemplate.template(value) for key, value in mods.items()
|
||||
}
|
||||
for obj in objs:
|
||||
obj_mods = {
|
||||
key: model_cls._parse(key, obj.evaluate_template(templates[key]))
|
||||
for key in mods.keys()
|
||||
}
|
||||
if print_and_modify(obj, obj_mods, dels) and obj not in changed:
|
||||
changed.append(obj)
|
||||
|
||||
# Still something to do?
|
||||
if not changed:
|
||||
ui.print_("No changes to make.")
|
||||
return
|
||||
|
||||
# Confirm action.
|
||||
if confirm:
|
||||
if write and move:
|
||||
extra = ", move and write tags"
|
||||
elif write:
|
||||
extra = " and write tags"
|
||||
elif move:
|
||||
extra = " and move"
|
||||
else:
|
||||
extra = ""
|
||||
|
||||
changed = ui.input_select_objects(
|
||||
f"Really modify{extra}",
|
||||
changed,
|
||||
lambda o: print_and_modify(o, mods, dels),
|
||||
)
|
||||
|
||||
# Apply changes to database and files
|
||||
with lib.transaction():
|
||||
for obj in changed:
|
||||
obj.try_sync(write, move, inherit)
|
||||
|
||||
|
||||
def print_and_modify(obj, mods, dels):
|
||||
"""Print the modifications to an item and return a bool indicating
|
||||
whether any changes were made.
|
||||
|
||||
`mods` is a dictionary of fields and values to update on the object;
|
||||
`dels` is a sequence of fields to delete.
|
||||
"""
|
||||
obj.update(mods)
|
||||
for field in dels:
|
||||
try:
|
||||
del obj[field]
|
||||
except KeyError:
|
||||
pass
|
||||
return ui.show_model_changes(obj)
|
||||
|
||||
|
||||
def modify_parse_args(args):
|
||||
"""Split the arguments for the modify subcommand into query parts,
|
||||
assignments (field=value), and deletions (field!). Returns the result as
|
||||
a three-tuple in that order.
|
||||
"""
|
||||
mods = {}
|
||||
dels = []
|
||||
query = []
|
||||
for arg in args:
|
||||
if arg.endswith("!") and "=" not in arg and ":" not in arg:
|
||||
dels.append(arg[:-1]) # Strip trailing !.
|
||||
elif "=" in arg and ":" not in arg.split("=", 1)[0]:
|
||||
key, val = arg.split("=", 1)
|
||||
mods[key] = val
|
||||
else:
|
||||
query.append(arg)
|
||||
return query, mods, dels
|
||||
|
||||
|
||||
def modify_func(lib, opts, args):
|
||||
query, mods, dels = modify_parse_args(args)
|
||||
if not mods and not dels:
|
||||
raise ui.UserError("no modifications specified")
|
||||
modify_items(
|
||||
lib,
|
||||
mods,
|
||||
dels,
|
||||
query,
|
||||
ui.should_write(opts.write),
|
||||
ui.should_move(opts.move),
|
||||
opts.album,
|
||||
not opts.yes,
|
||||
opts.inherit,
|
||||
)
|
||||
|
||||
|
||||
modify_cmd = ui.Subcommand(
|
||||
"modify", help="change metadata fields", aliases=("mod",)
|
||||
)
|
||||
modify_cmd.parser.add_option(
|
||||
"-m",
|
||||
"--move",
|
||||
action="store_true",
|
||||
dest="move",
|
||||
help="move files in the library directory",
|
||||
)
|
||||
modify_cmd.parser.add_option(
|
||||
"-M",
|
||||
"--nomove",
|
||||
action="store_false",
|
||||
dest="move",
|
||||
help="don't move files in library",
|
||||
)
|
||||
modify_cmd.parser.add_option(
|
||||
"-w",
|
||||
"--write",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="write new metadata to files' tags (default)",
|
||||
)
|
||||
modify_cmd.parser.add_option(
|
||||
"-W",
|
||||
"--nowrite",
|
||||
action="store_false",
|
||||
dest="write",
|
||||
help="don't write metadata (opposite of -w)",
|
||||
)
|
||||
modify_cmd.parser.add_album_option()
|
||||
modify_cmd.parser.add_format_option(target="item")
|
||||
modify_cmd.parser.add_option(
|
||||
"-y", "--yes", action="store_true", help="skip confirmation"
|
||||
)
|
||||
modify_cmd.parser.add_option(
|
||||
"-I",
|
||||
"--noinherit",
|
||||
action="store_false",
|
||||
dest="inherit",
|
||||
default=True,
|
||||
help="when modifying albums, don't also change item data",
|
||||
)
|
||||
modify_cmd.func = modify_func
|
||||
200
beets/ui/commands/move.py
Normal file
200
beets/ui/commands/move.py
Normal file
|
|
@ -0,0 +1,200 @@
|
|||
"""The 'move' command: Move/copy files to the library or a new base directory."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import logging, ui
|
||||
from beets.util import MoveOperation, displayable_path, normpath, syspath
|
||||
|
||||
from .utils import do_query
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.util import PathLike
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
def show_path_changes(path_changes):
|
||||
"""Given a list of tuples (source, destination) that indicate the
|
||||
path changes, log the changes as INFO-level output to the beets log.
|
||||
The output is guaranteed to be unicode.
|
||||
|
||||
Every pair is shown on a single line if the terminal width permits it,
|
||||
else it is split over two lines. E.g.,
|
||||
|
||||
Source -> Destination
|
||||
|
||||
vs.
|
||||
|
||||
Source
|
||||
-> Destination
|
||||
"""
|
||||
sources, destinations = zip(*path_changes)
|
||||
|
||||
# Ensure unicode output
|
||||
sources = list(map(displayable_path, sources))
|
||||
destinations = list(map(displayable_path, destinations))
|
||||
|
||||
# Calculate widths for terminal split
|
||||
col_width = (ui.term_width() - len(" -> ")) // 2
|
||||
max_width = len(max(sources + destinations, key=len))
|
||||
|
||||
if max_width > col_width:
|
||||
# Print every change over two lines
|
||||
for source, dest in zip(sources, destinations):
|
||||
color_source, color_dest = ui.colordiff(source, dest)
|
||||
ui.print_(f"{color_source} \n -> {color_dest}")
|
||||
else:
|
||||
# Print every change on a single line, and add a header
|
||||
title_pad = max_width - len("Source ") + len(" -> ")
|
||||
|
||||
ui.print_(f"Source {' ' * title_pad} Destination")
|
||||
for source, dest in zip(sources, destinations):
|
||||
pad = max_width - len(source)
|
||||
color_source, color_dest = ui.colordiff(source, dest)
|
||||
ui.print_(f"{color_source} {' ' * pad} -> {color_dest}")
|
||||
|
||||
|
||||
def move_items(
|
||||
lib,
|
||||
dest_path: PathLike,
|
||||
query,
|
||||
copy,
|
||||
album,
|
||||
pretend,
|
||||
confirm=False,
|
||||
export=False,
|
||||
):
|
||||
"""Moves or copies items to a new base directory, given by dest. If
|
||||
dest is None, then the library's base directory is used, making the
|
||||
command "consolidate" files.
|
||||
"""
|
||||
dest = os.fsencode(dest_path) if dest_path else dest_path
|
||||
items, albums = do_query(lib, query, album, False)
|
||||
objs = albums if album else items
|
||||
num_objs = len(objs)
|
||||
|
||||
# Filter out files that don't need to be moved.
|
||||
def isitemmoved(item):
|
||||
return item.path != item.destination(basedir=dest)
|
||||
|
||||
def isalbummoved(album):
|
||||
return any(isitemmoved(i) for i in album.items())
|
||||
|
||||
objs = [o for o in objs if (isalbummoved if album else isitemmoved)(o)]
|
||||
num_unmoved = num_objs - len(objs)
|
||||
# Report unmoved files that match the query.
|
||||
unmoved_msg = ""
|
||||
if num_unmoved > 0:
|
||||
unmoved_msg = f" ({num_unmoved} already in place)"
|
||||
|
||||
copy = copy or export # Exporting always copies.
|
||||
action = "Copying" if copy else "Moving"
|
||||
act = "copy" if copy else "move"
|
||||
entity = "album" if album else "item"
|
||||
log.info(
|
||||
"{} {} {}{}{}.",
|
||||
action,
|
||||
len(objs),
|
||||
entity,
|
||||
"s" if len(objs) != 1 else "",
|
||||
unmoved_msg,
|
||||
)
|
||||
if not objs:
|
||||
return
|
||||
|
||||
if pretend:
|
||||
if album:
|
||||
show_path_changes(
|
||||
[
|
||||
(item.path, item.destination(basedir=dest))
|
||||
for obj in objs
|
||||
for item in obj.items()
|
||||
]
|
||||
)
|
||||
else:
|
||||
show_path_changes(
|
||||
[(obj.path, obj.destination(basedir=dest)) for obj in objs]
|
||||
)
|
||||
else:
|
||||
if confirm:
|
||||
objs = ui.input_select_objects(
|
||||
f"Really {act}",
|
||||
objs,
|
||||
lambda o: show_path_changes(
|
||||
[(o.path, o.destination(basedir=dest))]
|
||||
),
|
||||
)
|
||||
|
||||
for obj in objs:
|
||||
log.debug("moving: {.filepath}", obj)
|
||||
|
||||
if export:
|
||||
# Copy without affecting the database.
|
||||
obj.move(
|
||||
operation=MoveOperation.COPY, basedir=dest, store=False
|
||||
)
|
||||
else:
|
||||
# Ordinary move/copy: store the new path.
|
||||
if copy:
|
||||
obj.move(operation=MoveOperation.COPY, basedir=dest)
|
||||
else:
|
||||
obj.move(operation=MoveOperation.MOVE, basedir=dest)
|
||||
|
||||
|
||||
def move_func(lib, opts, args):
|
||||
dest = opts.dest
|
||||
if dest is not None:
|
||||
dest = normpath(dest)
|
||||
if not os.path.isdir(syspath(dest)):
|
||||
raise ui.UserError(f"no such directory: {displayable_path(dest)}")
|
||||
|
||||
move_items(
|
||||
lib,
|
||||
dest,
|
||||
args,
|
||||
opts.copy,
|
||||
opts.album,
|
||||
opts.pretend,
|
||||
opts.timid,
|
||||
opts.export,
|
||||
)
|
||||
|
||||
|
||||
move_cmd = ui.Subcommand("move", help="move or copy items", aliases=("mv",))
|
||||
move_cmd.parser.add_option(
|
||||
"-d", "--dest", metavar="DIR", dest="dest", help="destination directory"
|
||||
)
|
||||
move_cmd.parser.add_option(
|
||||
"-c",
|
||||
"--copy",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="copy instead of moving",
|
||||
)
|
||||
move_cmd.parser.add_option(
|
||||
"-p",
|
||||
"--pretend",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="show how files would be moved, but don't touch anything",
|
||||
)
|
||||
move_cmd.parser.add_option(
|
||||
"-t",
|
||||
"--timid",
|
||||
dest="timid",
|
||||
action="store_true",
|
||||
help="always confirm all actions",
|
||||
)
|
||||
move_cmd.parser.add_option(
|
||||
"-e",
|
||||
"--export",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="copy without changing the database path",
|
||||
)
|
||||
move_cmd.parser.add_album_option()
|
||||
move_cmd.func = move_func
|
||||
84
beets/ui/commands/remove.py
Normal file
84
beets/ui/commands/remove.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
"""The `remove` command: remove items from the library (and optionally delete files)."""
|
||||
|
||||
from beets import ui
|
||||
|
||||
from .utils import do_query
|
||||
|
||||
|
||||
def remove_items(lib, query, album, delete, force):
|
||||
"""Remove items matching query from lib. If album, then match and
|
||||
remove whole albums. If delete, also remove files from disk.
|
||||
"""
|
||||
# Get the matching items.
|
||||
items, albums = do_query(lib, query, album)
|
||||
objs = albums if album else items
|
||||
|
||||
# Confirm file removal if not forcing removal.
|
||||
if not force:
|
||||
# Prepare confirmation with user.
|
||||
album_str = (
|
||||
f" in {len(albums)} album{'s' if len(albums) > 1 else ''}"
|
||||
if album
|
||||
else ""
|
||||
)
|
||||
|
||||
if delete:
|
||||
fmt = "$path - $title"
|
||||
prompt = "Really DELETE"
|
||||
prompt_all = (
|
||||
"Really DELETE"
|
||||
f" {len(items)} file{'s' if len(items) > 1 else ''}{album_str}"
|
||||
)
|
||||
else:
|
||||
fmt = ""
|
||||
prompt = "Really remove from the library?"
|
||||
prompt_all = (
|
||||
"Really remove"
|
||||
f" {len(items)} item{'s' if len(items) > 1 else ''}{album_str}"
|
||||
" from the library?"
|
||||
)
|
||||
|
||||
# Helpers for printing affected items
|
||||
def fmt_track(t):
|
||||
ui.print_(format(t, fmt))
|
||||
|
||||
def fmt_album(a):
|
||||
ui.print_()
|
||||
for i in a.items():
|
||||
fmt_track(i)
|
||||
|
||||
fmt_obj = fmt_album if album else fmt_track
|
||||
|
||||
# Show all the items.
|
||||
for o in objs:
|
||||
fmt_obj(o)
|
||||
|
||||
# Confirm with user.
|
||||
objs = ui.input_select_objects(
|
||||
prompt, objs, fmt_obj, prompt_all=prompt_all
|
||||
)
|
||||
|
||||
if not objs:
|
||||
return
|
||||
|
||||
# Remove (and possibly delete) items.
|
||||
with lib.transaction():
|
||||
for obj in objs:
|
||||
obj.remove(delete)
|
||||
|
||||
|
||||
def remove_func(lib, opts, args):
|
||||
remove_items(lib, args, opts.album, opts.delete, opts.force)
|
||||
|
||||
|
||||
remove_cmd = ui.Subcommand(
|
||||
"remove", help="remove matching items from the library", aliases=("rm",)
|
||||
)
|
||||
remove_cmd.parser.add_option(
|
||||
"-d", "--delete", action="store_true", help="also remove files from disk"
|
||||
)
|
||||
remove_cmd.parser.add_option(
|
||||
"-f", "--force", action="store_true", help="do not ask when removing items"
|
||||
)
|
||||
remove_cmd.parser.add_album_option()
|
||||
remove_cmd.func = remove_func
|
||||
62
beets/ui/commands/stats.py
Normal file
62
beets/ui/commands/stats.py
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
"""The 'stats' command: show library statistics."""
|
||||
|
||||
import os
|
||||
|
||||
from beets import logging, ui
|
||||
from beets.util import syspath
|
||||
from beets.util.units import human_bytes, human_seconds
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
def show_stats(lib, query, exact):
|
||||
"""Shows some statistics about the matched items."""
|
||||
items = lib.items(query)
|
||||
|
||||
total_size = 0
|
||||
total_time = 0.0
|
||||
total_items = 0
|
||||
artists = set()
|
||||
albums = set()
|
||||
album_artists = set()
|
||||
|
||||
for item in items:
|
||||
if exact:
|
||||
try:
|
||||
total_size += os.path.getsize(syspath(item.path))
|
||||
except OSError as exc:
|
||||
log.info("could not get size of {.path}: {}", item, exc)
|
||||
else:
|
||||
total_size += int(item.length * item.bitrate / 8)
|
||||
total_time += item.length
|
||||
total_items += 1
|
||||
artists.add(item.artist)
|
||||
album_artists.add(item.albumartist)
|
||||
if item.album_id:
|
||||
albums.add(item.album_id)
|
||||
|
||||
size_str = human_bytes(total_size)
|
||||
if exact:
|
||||
size_str += f" ({total_size} bytes)"
|
||||
|
||||
ui.print_(f"""Tracks: {total_items}
|
||||
Total time: {human_seconds(total_time)}
|
||||
{f" ({total_time:.2f} seconds)" if exact else ""}
|
||||
{"Total size" if exact else "Approximate total size"}: {size_str}
|
||||
Artists: {len(artists)}
|
||||
Albums: {len(albums)}
|
||||
Album artists: {len(album_artists)}""")
|
||||
|
||||
|
||||
def stats_func(lib, opts, args):
|
||||
show_stats(lib, args, opts.exact)
|
||||
|
||||
|
||||
stats_cmd = ui.Subcommand(
|
||||
"stats", help="show statistics about the library or a query"
|
||||
)
|
||||
stats_cmd.parser.add_option(
|
||||
"-e", "--exact", action="store_true", help="exact size and time"
|
||||
)
|
||||
stats_cmd.func = stats_func
|
||||
196
beets/ui/commands/update.py
Normal file
196
beets/ui/commands/update.py
Normal file
|
|
@ -0,0 +1,196 @@
|
|||
"""The `update` command: Update library contents according to on-disk tags."""
|
||||
|
||||
import os
|
||||
|
||||
from beets import library, logging, ui
|
||||
from beets.util import ancestry, syspath
|
||||
|
||||
from .utils import do_query
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
|
||||
"""For all the items matched by the query, update the library to
|
||||
reflect the item's embedded tags.
|
||||
:param fields: The fields to be stored. If not specified, all fields will
|
||||
be.
|
||||
:param exclude_fields: The fields to not be stored. If not specified, all
|
||||
fields will be.
|
||||
"""
|
||||
with lib.transaction():
|
||||
items, _ = do_query(lib, query, album)
|
||||
if move and fields is not None and "path" not in fields:
|
||||
# Special case: if an item needs to be moved, the path field has to
|
||||
# updated; otherwise the new path will not be reflected in the
|
||||
# database.
|
||||
fields.append("path")
|
||||
if fields is None:
|
||||
# no fields were provided, update all media fields
|
||||
item_fields = fields or library.Item._media_fields
|
||||
if move and "path" not in item_fields:
|
||||
# move is enabled, add 'path' to the list of fields to update
|
||||
item_fields.add("path")
|
||||
else:
|
||||
# fields was provided, just update those
|
||||
item_fields = fields
|
||||
# get all the album fields to update
|
||||
album_fields = fields or library.Album._fields.keys()
|
||||
if exclude_fields:
|
||||
# remove any excluded fields from the item and album sets
|
||||
item_fields = [f for f in item_fields if f not in exclude_fields]
|
||||
album_fields = [f for f in album_fields if f not in exclude_fields]
|
||||
|
||||
# Walk through the items and pick up their changes.
|
||||
affected_albums = set()
|
||||
for item in items:
|
||||
# Item deleted?
|
||||
if not item.path or not os.path.exists(syspath(item.path)):
|
||||
ui.print_(format(item))
|
||||
ui.print_(ui.colorize("text_error", " deleted"))
|
||||
if not pretend:
|
||||
item.remove(True)
|
||||
affected_albums.add(item.album_id)
|
||||
continue
|
||||
|
||||
# Did the item change since last checked?
|
||||
if item.current_mtime() <= item.mtime:
|
||||
log.debug(
|
||||
"skipping {0.filepath} because mtime is up to date ({0.mtime})",
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
||||
# Read new data.
|
||||
try:
|
||||
item.read()
|
||||
except library.ReadError as exc:
|
||||
log.error("error reading {.filepath}: {}", item, exc)
|
||||
continue
|
||||
|
||||
# Special-case album artist when it matches track artist. (Hacky
|
||||
# but necessary for preserving album-level metadata for non-
|
||||
# autotagged imports.)
|
||||
if not item.albumartist:
|
||||
old_item = lib.get_item(item.id)
|
||||
if old_item.albumartist == old_item.artist == item.artist:
|
||||
item.albumartist = old_item.albumartist
|
||||
item._dirty.discard("albumartist")
|
||||
|
||||
# Check for and display changes.
|
||||
changed = ui.show_model_changes(item, fields=item_fields)
|
||||
|
||||
# Save changes.
|
||||
if not pretend:
|
||||
if changed:
|
||||
# Move the item if it's in the library.
|
||||
if move and lib.directory in ancestry(item.path):
|
||||
item.move(store=False)
|
||||
|
||||
item.store(fields=item_fields)
|
||||
affected_albums.add(item.album_id)
|
||||
else:
|
||||
# The file's mtime was different, but there were no
|
||||
# changes to the metadata. Store the new mtime,
|
||||
# which is set in the call to read(), so we don't
|
||||
# check this again in the future.
|
||||
item.store(fields=item_fields)
|
||||
|
||||
# Skip album changes while pretending.
|
||||
if pretend:
|
||||
return
|
||||
|
||||
# Modify affected albums to reflect changes in their items.
|
||||
for album_id in affected_albums:
|
||||
if album_id is None: # Singletons.
|
||||
continue
|
||||
album = lib.get_album(album_id)
|
||||
if not album: # Empty albums have already been removed.
|
||||
log.debug("emptied album {}", album_id)
|
||||
continue
|
||||
first_item = album.items().get()
|
||||
|
||||
# Update album structure to reflect an item in it.
|
||||
for key in library.Album.item_keys:
|
||||
album[key] = first_item[key]
|
||||
album.store(fields=album_fields)
|
||||
|
||||
# Move album art (and any inconsistent items).
|
||||
if move and lib.directory in ancestry(first_item.path):
|
||||
log.debug("moving album {}", album_id)
|
||||
|
||||
# Manually moving and storing the album.
|
||||
items = list(album.items())
|
||||
for item in items:
|
||||
item.move(store=False, with_album=False)
|
||||
item.store(fields=item_fields)
|
||||
album.move(store=False)
|
||||
album.store(fields=album_fields)
|
||||
|
||||
|
||||
def update_func(lib, opts, args):
|
||||
# Verify that the library folder exists to prevent accidental wipes.
|
||||
if not os.path.isdir(syspath(lib.directory)):
|
||||
ui.print_("Library path is unavailable or does not exist.")
|
||||
ui.print_(lib.directory)
|
||||
if not ui.input_yn("Are you sure you want to continue (y/n)?", True):
|
||||
return
|
||||
update_items(
|
||||
lib,
|
||||
args,
|
||||
opts.album,
|
||||
ui.should_move(opts.move),
|
||||
opts.pretend,
|
||||
opts.fields,
|
||||
opts.exclude_fields,
|
||||
)
|
||||
|
||||
|
||||
update_cmd = ui.Subcommand(
|
||||
"update",
|
||||
help="update the library",
|
||||
aliases=(
|
||||
"upd",
|
||||
"up",
|
||||
),
|
||||
)
|
||||
update_cmd.parser.add_album_option()
|
||||
update_cmd.parser.add_format_option()
|
||||
update_cmd.parser.add_option(
|
||||
"-m",
|
||||
"--move",
|
||||
action="store_true",
|
||||
dest="move",
|
||||
help="move files in the library directory",
|
||||
)
|
||||
update_cmd.parser.add_option(
|
||||
"-M",
|
||||
"--nomove",
|
||||
action="store_false",
|
||||
dest="move",
|
||||
help="don't move files in library",
|
||||
)
|
||||
update_cmd.parser.add_option(
|
||||
"-p",
|
||||
"--pretend",
|
||||
action="store_true",
|
||||
help="show all changes but do nothing",
|
||||
)
|
||||
update_cmd.parser.add_option(
|
||||
"-F",
|
||||
"--field",
|
||||
default=None,
|
||||
action="append",
|
||||
dest="fields",
|
||||
help="list of fields to update",
|
||||
)
|
||||
update_cmd.parser.add_option(
|
||||
"-e",
|
||||
"--exclude-field",
|
||||
default=None,
|
||||
action="append",
|
||||
dest="exclude_fields",
|
||||
help="list of fields to exclude from updates",
|
||||
)
|
||||
update_cmd.func = update_func
|
||||
29
beets/ui/commands/utils.py
Normal file
29
beets/ui/commands/utils.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
"""Utility functions for beets UI commands."""
|
||||
|
||||
from beets import ui
|
||||
|
||||
|
||||
def do_query(lib, query, album, also_items=True):
|
||||
"""For commands that operate on matched items, performs a query
|
||||
and returns a list of matching items and a list of matching
|
||||
albums. (The latter is only nonempty when album is True.) Raises
|
||||
a UserError if no items match. also_items controls whether, when
|
||||
fetching albums, the associated items should be fetched also.
|
||||
"""
|
||||
if album:
|
||||
albums = list(lib.albums(query))
|
||||
items = []
|
||||
if also_items:
|
||||
for al in albums:
|
||||
items += al.items()
|
||||
|
||||
else:
|
||||
albums = []
|
||||
items = list(lib.items(query))
|
||||
|
||||
if album and not albums:
|
||||
raise ui.UserError("No matching albums found.")
|
||||
elif not album and not items:
|
||||
raise ui.UserError("No matching items found.")
|
||||
|
||||
return items, albums
|
||||
23
beets/ui/commands/version.py
Normal file
23
beets/ui/commands/version.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"""The 'version' command: show version information."""
|
||||
|
||||
from platform import python_version
|
||||
|
||||
import beets
|
||||
from beets import plugins, ui
|
||||
|
||||
|
||||
def show_version(*args):
|
||||
ui.print_(f"beets version {beets.__version__}")
|
||||
ui.print_(f"Python version {python_version()}")
|
||||
# Show plugins.
|
||||
names = sorted(p.name for p in plugins.find_plugins())
|
||||
if names:
|
||||
ui.print_("plugins:", ", ".join(names))
|
||||
else:
|
||||
ui.print_("no plugins loaded")
|
||||
|
||||
|
||||
version_cmd = ui.Subcommand("version", help="output version information")
|
||||
version_cmd.func = show_version
|
||||
|
||||
__all__ = ["version_cmd"]
|
||||
60
beets/ui/commands/write.py
Normal file
60
beets/ui/commands/write.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
"""The `write` command: write tag information to files."""
|
||||
|
||||
import os
|
||||
|
||||
from beets import library, logging, ui
|
||||
from beets.util import syspath
|
||||
|
||||
from .utils import do_query
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger("beets")
|
||||
|
||||
|
||||
def write_items(lib, query, pretend, force):
|
||||
"""Write tag information from the database to the respective files
|
||||
in the filesystem.
|
||||
"""
|
||||
items, _ = do_query(lib, query, False, False)
|
||||
|
||||
for item in items:
|
||||
# Item deleted?
|
||||
if not os.path.exists(syspath(item.path)):
|
||||
log.info("missing file: {.filepath}", item)
|
||||
continue
|
||||
|
||||
# Get an Item object reflecting the "clean" (on-disk) state.
|
||||
try:
|
||||
clean_item = library.Item.from_path(item.path)
|
||||
except library.ReadError as exc:
|
||||
log.error("error reading {.filepath}: {}", item, exc)
|
||||
continue
|
||||
|
||||
# Check for and display changes.
|
||||
changed = ui.show_model_changes(
|
||||
item, clean_item, library.Item._media_tag_fields, force
|
||||
)
|
||||
if (changed or force) and not pretend:
|
||||
# We use `try_sync` here to keep the mtime up to date in the
|
||||
# database.
|
||||
item.try_sync(True, False)
|
||||
|
||||
|
||||
def write_func(lib, opts, args):
|
||||
write_items(lib, args, opts.pretend, opts.force)
|
||||
|
||||
|
||||
write_cmd = ui.Subcommand("write", help="write tag information to files")
|
||||
write_cmd.parser.add_option(
|
||||
"-p",
|
||||
"--pretend",
|
||||
action="store_true",
|
||||
help="show all changes but do nothing",
|
||||
)
|
||||
write_cmd.parser.add_option(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="write tags even if the existing tags match the database",
|
||||
)
|
||||
write_cmd.func = write_func
|
||||
|
|
@ -28,7 +28,6 @@ import sys
|
|||
import tempfile
|
||||
import threading
|
||||
import traceback
|
||||
import warnings
|
||||
import weakref
|
||||
from collections import Counter
|
||||
from collections.abc import Sequence
|
||||
|
|
@ -43,12 +42,10 @@ from typing import (
|
|||
TYPE_CHECKING,
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Generic,
|
||||
NamedTuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
|
|
@ -58,7 +55,7 @@ import beets
|
|||
from beets.util import hidden
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Iterator
|
||||
from collections.abc import Callable, Iterable, Iterator
|
||||
from logging import Logger
|
||||
|
||||
from beets.library import Item
|
||||
|
|
@ -69,8 +66,8 @@ WINDOWS_MAGIC_PREFIX = "\\\\?\\"
|
|||
T = TypeVar("T")
|
||||
T_co = TypeVar("T_co", covariant=True)
|
||||
_R_co = TypeVar("_R_co", covariant=True)
|
||||
PathLike = Union[str, bytes, Path]
|
||||
StrPath = Union[str, Path]
|
||||
StrPath = str | Path
|
||||
PathLike = StrPath | bytes
|
||||
Replacements = Sequence[tuple[Pattern[str], str]]
|
||||
|
||||
# Here for now to allow for a easy replace later on
|
||||
|
|
@ -173,6 +170,12 @@ class MoveOperation(Enum):
|
|||
REFLINK_AUTO = 5
|
||||
|
||||
|
||||
class PromptChoice(NamedTuple):
|
||||
short: str
|
||||
long: str
|
||||
callback: Any
|
||||
|
||||
|
||||
def normpath(path: PathLike) -> bytes:
|
||||
"""Provide the canonical form of the path suitable for storing in
|
||||
the database.
|
||||
|
|
@ -582,10 +585,14 @@ def hardlink(path: bytes, dest: bytes, replace: bool = False):
|
|||
if samefile(path, dest):
|
||||
return
|
||||
|
||||
if os.path.exists(syspath(dest)) and not replace:
|
||||
# Dereference symlinks, expand "~", and convert relative paths to absolute
|
||||
origin_path = Path(os.fsdecode(path)).expanduser().resolve()
|
||||
dest_path = Path(os.fsdecode(dest)).expanduser().resolve()
|
||||
|
||||
if dest_path.exists() and not replace:
|
||||
raise FilesystemError("file exists", "rename", (path, dest))
|
||||
try:
|
||||
os.link(syspath(path), syspath(dest))
|
||||
dest_path.hardlink_to(origin_path)
|
||||
except NotImplementedError:
|
||||
raise FilesystemError(
|
||||
"OS does not support hard links.link",
|
||||
|
|
@ -1205,26 +1212,3 @@ def get_temp_filename(
|
|||
def unique_list(elements: Iterable[T]) -> list[T]:
|
||||
"""Return a list with unique elements in the original order."""
|
||||
return list(dict.fromkeys(elements))
|
||||
|
||||
|
||||
def deprecate_imports(
|
||||
old_module: str, new_module_by_name: dict[str, str], name: str, version: str
|
||||
) -> Any:
|
||||
"""Handle deprecated module imports by redirecting to new locations.
|
||||
|
||||
Facilitates gradual migration of module structure by intercepting import
|
||||
attempts for relocated functionality. Issues deprecation warnings while
|
||||
transparently providing access to the moved implementation, allowing
|
||||
existing code to continue working during transition periods.
|
||||
"""
|
||||
if new_module := new_module_by_name.get(name):
|
||||
warnings.warn(
|
||||
(
|
||||
f"'{old_module}.{name}' is deprecated and will be removed"
|
||||
f" in {version}. Use '{new_module}.{name}' instead."
|
||||
),
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return getattr(import_module(new_module), name)
|
||||
raise AttributeError(f"module '{old_module}' has no attribute '{name}'")
|
||||
|
|
|
|||
|
|
@ -24,9 +24,10 @@ import platform
|
|||
import re
|
||||
import subprocess
|
||||
from abc import ABC, abstractmethod
|
||||
from contextlib import suppress
|
||||
from enum import Enum
|
||||
from itertools import chain
|
||||
from typing import Any, ClassVar, Mapping
|
||||
from typing import TYPE_CHECKING, Any, ClassVar
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from beets import logging, util
|
||||
|
|
@ -37,6 +38,9 @@ from beets.util import (
|
|||
syspath,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
|
||||
PROXY_URL = "https://images.weserv.nl/"
|
||||
|
||||
log = logging.getLogger("beets")
|
||||
|
|
@ -265,7 +269,8 @@ class IMBackend(LocalBackend):
|
|||
# with regards to the height.
|
||||
# ImageMagick already seems to default to no interlace, but we include
|
||||
# it here for the sake of explicitness.
|
||||
cmd: list[str] = self.convert_cmd + [
|
||||
cmd: list[str] = [
|
||||
*self.convert_cmd,
|
||||
syspath(path_in, prefix=False),
|
||||
"-resize",
|
||||
f"{maxwidth}x>",
|
||||
|
|
@ -295,7 +300,8 @@ class IMBackend(LocalBackend):
|
|||
return path_out
|
||||
|
||||
def get_size(self, path_in: bytes) -> tuple[int, int] | None:
|
||||
cmd: list[str] = self.identify_cmd + [
|
||||
cmd: list[str] = [
|
||||
*self.identify_cmd,
|
||||
"-format",
|
||||
"%w %h",
|
||||
syspath(path_in, prefix=False),
|
||||
|
|
@ -333,7 +339,8 @@ class IMBackend(LocalBackend):
|
|||
if not path_out:
|
||||
path_out = get_temp_filename(__name__, "deinterlace_IM_", path_in)
|
||||
|
||||
cmd = self.convert_cmd + [
|
||||
cmd = [
|
||||
*self.convert_cmd,
|
||||
syspath(path_in, prefix=False),
|
||||
"-interlace",
|
||||
"none",
|
||||
|
|
@ -348,7 +355,7 @@ class IMBackend(LocalBackend):
|
|||
return path_in
|
||||
|
||||
def get_format(self, path_in: bytes) -> str | None:
|
||||
cmd = self.identify_cmd + ["-format", "%[magick]", syspath(path_in)]
|
||||
cmd = [*self.identify_cmd, "-format", "%[magick]", syspath(path_in)]
|
||||
|
||||
try:
|
||||
# Image formats should really only be ASCII strings such as "PNG",
|
||||
|
|
@ -365,7 +372,8 @@ class IMBackend(LocalBackend):
|
|||
target: bytes,
|
||||
deinterlaced: bool,
|
||||
) -> bytes:
|
||||
cmd = self.convert_cmd + [
|
||||
cmd = [
|
||||
*self.convert_cmd,
|
||||
syspath(source),
|
||||
*(["-interlace", "none"] if deinterlaced else []),
|
||||
syspath(target),
|
||||
|
|
@ -397,14 +405,16 @@ class IMBackend(LocalBackend):
|
|||
# to grayscale and then pipe them into the `compare` command.
|
||||
# On Windows, ImageMagick doesn't support the magic \\?\ prefix
|
||||
# on paths, so we pass `prefix=False` to `syspath`.
|
||||
convert_cmd = self.convert_cmd + [
|
||||
convert_cmd = [
|
||||
*self.convert_cmd,
|
||||
syspath(im2, prefix=False),
|
||||
syspath(im1, prefix=False),
|
||||
"-colorspace",
|
||||
"gray",
|
||||
"MIFF:-",
|
||||
]
|
||||
compare_cmd = self.compare_cmd + [
|
||||
compare_cmd = [
|
||||
*self.compare_cmd,
|
||||
"-define",
|
||||
"phash:colorspaces=sRGB,HCLp",
|
||||
"-metric",
|
||||
|
|
@ -484,7 +494,7 @@ class IMBackend(LocalBackend):
|
|||
("-set", k, v) for k, v in metadata.items()
|
||||
)
|
||||
str_file = os.fsdecode(file)
|
||||
command = self.convert_cmd + [str_file, *assignments, str_file]
|
||||
command = [*self.convert_cmd, str_file, *assignments, str_file]
|
||||
|
||||
util.command_output(command)
|
||||
|
||||
|
|
@ -825,7 +835,7 @@ class ArtResizer:
|
|||
"jpeg": "jpg",
|
||||
}.get(new_format, new_format)
|
||||
|
||||
fname, ext = os.path.splitext(path_in)
|
||||
fname, _ = os.path.splitext(path_in)
|
||||
path_new = fname + b"." + new_format.encode("utf8")
|
||||
|
||||
# allows the exception to propagate, while still making sure a changed
|
||||
|
|
@ -837,7 +847,8 @@ class ArtResizer:
|
|||
)
|
||||
finally:
|
||||
if result_path != path_in:
|
||||
os.unlink(path_in)
|
||||
with suppress(OSError):
|
||||
os.unlink(path_in)
|
||||
return result_path
|
||||
|
||||
@property
|
||||
|
|
|
|||
60
beets/util/deprecation.py
Normal file
60
beets/util/deprecation.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import warnings
|
||||
from importlib import import_module
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from packaging.version import Version
|
||||
|
||||
import beets
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from logging import Logger
|
||||
|
||||
|
||||
def _format_message(old: str, new: str | None = None) -> str:
|
||||
next_major = f"{Version(beets.__version__).major + 1}.0.0"
|
||||
msg = f"{old} is deprecated and will be removed in version {next_major}."
|
||||
if new:
|
||||
msg += f" Use {new} instead."
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
def deprecate_for_user(
|
||||
logger: Logger, old: str, new: str | None = None
|
||||
) -> None:
|
||||
logger.warning(_format_message(old, new))
|
||||
|
||||
|
||||
def deprecate_for_maintainers(
|
||||
old: str, new: str | None = None, stacklevel: int = 1
|
||||
) -> None:
|
||||
"""Issue a deprecation warning visible to maintainers during development.
|
||||
|
||||
Emits a DeprecationWarning that alerts developers about deprecated code
|
||||
patterns. Unlike user-facing warnings, these are primarily for internal
|
||||
code maintenance and appear during test runs or with warnings enabled.
|
||||
"""
|
||||
warnings.warn(
|
||||
_format_message(old, new), DeprecationWarning, stacklevel=stacklevel + 1
|
||||
)
|
||||
|
||||
|
||||
def deprecate_imports(
|
||||
old_module: str, new_module_by_name: dict[str, str], name: str
|
||||
) -> Any:
|
||||
"""Handle deprecated module imports by redirecting to new locations.
|
||||
|
||||
Facilitates gradual migration of module structure by intercepting import
|
||||
attempts for relocated functionality. Issues deprecation warnings while
|
||||
transparently providing access to the moved implementation, allowing
|
||||
existing code to continue working during transition periods.
|
||||
"""
|
||||
if new_module := new_module_by_name.get(name):
|
||||
deprecate_for_maintainers(
|
||||
f"'{old_module}.{name}'", f"'{new_module}.{name}'", stacklevel=2
|
||||
)
|
||||
|
||||
return getattr(import_module(new_module), name)
|
||||
raise AttributeError(f"module '{old_module}' has no attribute '{name}'")
|
||||
|
|
@ -105,8 +105,6 @@ def compile_func(arg_names, statements, name="_the_func", debug=False):
|
|||
decorator_list=[],
|
||||
)
|
||||
|
||||
# The ast.Module signature changed in 3.8 to accept a list of types to
|
||||
# ignore.
|
||||
mod = ast.Module([func_def], [])
|
||||
|
||||
ast.fix_missing_locations(mod)
|
||||
|
|
|
|||
|
|
@ -20,10 +20,9 @@ import os
|
|||
import stat
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
|
||||
def is_hidden(path: Union[bytes, Path]) -> bool:
|
||||
def is_hidden(path: bytes | Path) -> bool:
|
||||
"""
|
||||
Determine whether the given path is treated as a 'hidden file' by the OS.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -36,10 +36,13 @@ from __future__ import annotations
|
|||
import queue
|
||||
import sys
|
||||
from threading import Lock, Thread
|
||||
from typing import Callable, Generator, TypeVar
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
from typing_extensions import TypeVarTuple, Unpack
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Generator
|
||||
|
||||
BUBBLE = "__PIPELINE_BUBBLE__"
|
||||
POISON = "__PIPELINE_POISON__"
|
||||
|
||||
|
|
@ -189,7 +192,7 @@ def stage(
|
|||
task: R | T | None = None
|
||||
while True:
|
||||
task = yield task
|
||||
task = func(*(args + (task,)))
|
||||
task = func(*args, task)
|
||||
|
||||
return coro
|
||||
|
||||
|
|
@ -213,7 +216,7 @@ def mutator_stage(func: Callable[[Unpack[A], T], R]):
|
|||
task = None
|
||||
while True:
|
||||
task = yield task
|
||||
func(*(args + (task,)))
|
||||
func(*args, task)
|
||||
|
||||
return coro
|
||||
|
||||
|
|
|
|||
|
|
@ -206,9 +206,14 @@ def extract_first(log, outpath, items):
|
|||
return real_path
|
||||
|
||||
|
||||
def clear_item(item, log):
|
||||
if mediafile.MediaFile(syspath(item.path)).images:
|
||||
log.debug("Clearing art for {}", item)
|
||||
item.try_write(tags={"images": None})
|
||||
|
||||
|
||||
def clear(log, lib, query):
|
||||
items = lib.items(query)
|
||||
log.info("Clearing album art from {} items", len(items))
|
||||
for item in items:
|
||||
log.debug("Clearing art for {}", item)
|
||||
item.try_write(tags={"images": None})
|
||||
clear_item(item, log)
|
||||
|
|
|
|||
290
beetsplug/_utils/musicbrainz.py
Normal file
290
beetsplug/_utils/musicbrainz.py
Normal file
|
|
@ -0,0 +1,290 @@
|
|||
"""Helpers for communicating with the MusicBrainz webservice.
|
||||
|
||||
Provides rate-limited HTTP session and convenience methods to fetch and
|
||||
normalize API responses.
|
||||
|
||||
This module centralizes request handling and response shaping so callers can
|
||||
work with consistently structured data without embedding HTTP or rate-limit
|
||||
logic throughout the codebase.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import operator
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property, singledispatchmethod, wraps
|
||||
from itertools import groupby
|
||||
from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypedDict, TypeVar
|
||||
|
||||
from requests_ratelimiter import LimiterMixin
|
||||
from typing_extensions import NotRequired, Unpack
|
||||
|
||||
from beets import config, logging
|
||||
|
||||
from .requests import RequestHandler, TimeoutAndRetrySession
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from requests import Response
|
||||
|
||||
from .._typing import JSONDict
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LimiterTimeoutSession(LimiterMixin, TimeoutAndRetrySession):
|
||||
"""HTTP session that enforces rate limits."""
|
||||
|
||||
|
||||
Entity = Literal[
|
||||
"area",
|
||||
"artist",
|
||||
"collection",
|
||||
"event",
|
||||
"genre",
|
||||
"instrument",
|
||||
"label",
|
||||
"place",
|
||||
"recording",
|
||||
"release",
|
||||
"release-group",
|
||||
"series",
|
||||
"work",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
||||
class LookupKwargs(TypedDict, total=False):
|
||||
includes: NotRequired[list[str]]
|
||||
|
||||
|
||||
class PagingKwargs(TypedDict, total=False):
|
||||
limit: NotRequired[int]
|
||||
offset: NotRequired[int]
|
||||
|
||||
|
||||
class SearchKwargs(PagingKwargs):
|
||||
query: NotRequired[str]
|
||||
|
||||
|
||||
class BrowseKwargs(LookupKwargs, PagingKwargs, total=False):
|
||||
pass
|
||||
|
||||
|
||||
class BrowseReleaseGroupsKwargs(BrowseKwargs, total=False):
|
||||
artist: NotRequired[str]
|
||||
collection: NotRequired[str]
|
||||
release: NotRequired[str]
|
||||
|
||||
|
||||
class BrowseRecordingsKwargs(BrowseReleaseGroupsKwargs, total=False):
|
||||
work: NotRequired[str]
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
def require_one_of(*keys: str) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
||||
required = frozenset(keys)
|
||||
|
||||
def deco(func: Callable[P, R]) -> Callable[P, R]:
|
||||
@wraps(func)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
# kwargs is a real dict at runtime; safe to inspect here
|
||||
if not required & kwargs.keys():
|
||||
required_str = ", ".join(sorted(required))
|
||||
raise ValueError(
|
||||
f"At least one of {required_str} filter is required"
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return deco
|
||||
|
||||
|
||||
@dataclass
|
||||
class MusicBrainzAPI(RequestHandler):
|
||||
"""High-level interface to the MusicBrainz WS/2 API.
|
||||
|
||||
Responsibilities:
|
||||
|
||||
- Configure the API host and request rate from application configuration.
|
||||
- Offer helpers to fetch common entity types and to run searches.
|
||||
- Normalize MusicBrainz responses so relation lists are grouped by target
|
||||
type for easier downstream consumption.
|
||||
|
||||
Documentation: https://musicbrainz.org/doc/MusicBrainz_API
|
||||
"""
|
||||
|
||||
api_host: str = field(init=False)
|
||||
rate_limit: float = field(init=False)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
mb_config = config["musicbrainz"]
|
||||
mb_config.add(
|
||||
{
|
||||
"host": "musicbrainz.org",
|
||||
"https": False,
|
||||
"ratelimit": 1,
|
||||
"ratelimit_interval": 1,
|
||||
}
|
||||
)
|
||||
|
||||
hostname = mb_config["host"].as_str()
|
||||
if hostname == "musicbrainz.org":
|
||||
self.api_host, self.rate_limit = "https://musicbrainz.org", 1.0
|
||||
else:
|
||||
https = mb_config["https"].get(bool)
|
||||
self.api_host = f"http{'s' if https else ''}://{hostname}"
|
||||
self.rate_limit = (
|
||||
mb_config["ratelimit"].get(int)
|
||||
/ mb_config["ratelimit_interval"].as_number()
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def api_root(self) -> str:
|
||||
return f"{self.api_host}/ws/2"
|
||||
|
||||
def create_session(self) -> LimiterTimeoutSession:
|
||||
return LimiterTimeoutSession(per_second=self.rate_limit)
|
||||
|
||||
def request(self, *args, **kwargs) -> Response:
|
||||
"""Ensure all requests specify JSON response format by default."""
|
||||
kwargs.setdefault("params", {})
|
||||
kwargs["params"]["fmt"] = "json"
|
||||
return super().request(*args, **kwargs)
|
||||
|
||||
def _get_resource(
|
||||
self, resource: str, includes: list[str] | None = None, **kwargs
|
||||
) -> JSONDict:
|
||||
"""Retrieve and normalize data from the API resource endpoint.
|
||||
|
||||
If requested, includes are appended to the request. The response is
|
||||
passed through a normalizer that groups relation entries by their
|
||||
target type so that callers receive a consistently structured mapping.
|
||||
"""
|
||||
if includes:
|
||||
kwargs["inc"] = "+".join(includes)
|
||||
|
||||
return self._group_relations(
|
||||
self.get_json(f"{self.api_root}/{resource}", params=kwargs)
|
||||
)
|
||||
|
||||
def _lookup(
|
||||
self, entity: Entity, id_: str, **kwargs: Unpack[LookupKwargs]
|
||||
) -> JSONDict:
|
||||
return self._get_resource(f"{entity}/{id_}", **kwargs)
|
||||
|
||||
def _browse(self, entity: Entity, **kwargs) -> list[JSONDict]:
|
||||
return self._get_resource(entity, **kwargs).get(f"{entity}s", [])
|
||||
|
||||
def search(
|
||||
self,
|
||||
entity: Entity,
|
||||
filters: dict[str, str],
|
||||
**kwargs: Unpack[SearchKwargs],
|
||||
) -> list[JSONDict]:
|
||||
"""Search for MusicBrainz entities matching the given filters.
|
||||
|
||||
* Query is constructed by combining the provided filters using AND logic
|
||||
* Each filter key-value pair is formatted as 'key:"value"' unless
|
||||
- 'key' is empty, in which case only the value is used, '"value"'
|
||||
- 'value' is empty, in which case the filter is ignored
|
||||
* Values are lowercased and stripped of whitespace.
|
||||
"""
|
||||
query = " AND ".join(
|
||||
":".join(filter(None, (k, f'"{_v}"')))
|
||||
for k, v in filters.items()
|
||||
if (_v := v.lower().strip())
|
||||
)
|
||||
log.debug("Searching for MusicBrainz {}s with: {!r}", entity, query)
|
||||
kwargs["query"] = query
|
||||
return self._get_resource(entity, **kwargs)[f"{entity}s"]
|
||||
|
||||
def get_release(self, id_: str, **kwargs: Unpack[LookupKwargs]) -> JSONDict:
|
||||
"""Retrieve a release by its MusicBrainz ID."""
|
||||
return self._lookup("release", id_, **kwargs)
|
||||
|
||||
def get_recording(
|
||||
self, id_: str, **kwargs: Unpack[LookupKwargs]
|
||||
) -> JSONDict:
|
||||
"""Retrieve a recording by its MusicBrainz ID."""
|
||||
return self._lookup("recording", id_, **kwargs)
|
||||
|
||||
def get_work(self, id_: str, **kwargs: Unpack[LookupKwargs]) -> JSONDict:
|
||||
"""Retrieve a work by its MusicBrainz ID."""
|
||||
return self._lookup("work", id_, **kwargs)
|
||||
|
||||
@require_one_of("artist", "collection", "release", "work")
|
||||
def browse_recordings(
|
||||
self, **kwargs: Unpack[BrowseRecordingsKwargs]
|
||||
) -> list[JSONDict]:
|
||||
"""Browse recordings related to the given entities.
|
||||
|
||||
At least one of artist, collection, release, or work must be provided.
|
||||
"""
|
||||
return self._browse("recording", **kwargs)
|
||||
|
||||
@require_one_of("artist", "collection", "release")
|
||||
def browse_release_groups(
|
||||
self, **kwargs: Unpack[BrowseReleaseGroupsKwargs]
|
||||
) -> list[JSONDict]:
|
||||
"""Browse release groups related to the given entities.
|
||||
|
||||
At least one of artist, collection, or release must be provided.
|
||||
"""
|
||||
return self._get_resource("release-group", **kwargs)["release-groups"]
|
||||
|
||||
@singledispatchmethod
|
||||
@classmethod
|
||||
def _group_relations(cls, data: Any) -> Any:
|
||||
"""Normalize MusicBrainz 'relations' into type-keyed fields recursively.
|
||||
|
||||
This helper rewrites payloads that use a generic 'relations' list into
|
||||
a structure that is easier to consume downstream. When a mapping
|
||||
contains 'relations', those entries are regrouped by their 'target-type'
|
||||
and stored under keys like '<target-type>-relations'. The original
|
||||
'relations' key is removed to avoid ambiguous access patterns.
|
||||
|
||||
The transformation is applied recursively so that nested objects and
|
||||
sequences are normalized consistently, while non-container values are
|
||||
left unchanged.
|
||||
"""
|
||||
return data
|
||||
|
||||
@_group_relations.register(list)
|
||||
@classmethod
|
||||
def _(cls, data: list[Any]) -> list[Any]:
|
||||
return [cls._group_relations(i) for i in data]
|
||||
|
||||
@_group_relations.register(dict)
|
||||
@classmethod
|
||||
def _(cls, data: JSONDict) -> JSONDict:
|
||||
for k, v in list(data.items()):
|
||||
if k == "relations":
|
||||
get_target_type = operator.methodcaller("get", "target-type")
|
||||
for target_type, group in groupby(
|
||||
sorted(v, key=get_target_type), get_target_type
|
||||
):
|
||||
relations = [
|
||||
{k: v for k, v in item.items() if k != "target-type"}
|
||||
for item in group
|
||||
]
|
||||
data[f"{target_type}-relations"] = cls._group_relations(
|
||||
relations
|
||||
)
|
||||
data.pop("relations")
|
||||
else:
|
||||
data[k] = cls._group_relations(v)
|
||||
return data
|
||||
|
||||
|
||||
class MusicBrainzAPIMixin:
|
||||
"""Mixin that provides a cached MusicBrainzAPI helper instance."""
|
||||
|
||||
@cached_property
|
||||
def mb_api(self) -> MusicBrainzAPI:
|
||||
return MusicBrainzAPI()
|
||||
195
beetsplug/_utils/requests.py
Normal file
195
beetsplug/_utils/requests.py
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import atexit
|
||||
import threading
|
||||
from contextlib import contextmanager
|
||||
from functools import cached_property
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Generic, Protocol, TypeVar
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from beets import __version__
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
|
||||
|
||||
class BeetsHTTPError(requests.exceptions.HTTPError):
|
||||
STATUS: ClassVar[HTTPStatus]
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(
|
||||
f"HTTP Error: {self.STATUS.value} {self.STATUS.phrase}",
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class HTTPNotFoundError(BeetsHTTPError):
|
||||
STATUS = HTTPStatus.NOT_FOUND
|
||||
|
||||
|
||||
class Closeable(Protocol):
|
||||
"""Protocol for objects that have a close method."""
|
||||
|
||||
def close(self) -> None: ...
|
||||
|
||||
|
||||
C = TypeVar("C", bound=Closeable)
|
||||
|
||||
|
||||
class SingletonMeta(type, Generic[C]):
|
||||
"""Metaclass ensuring a single shared instance per class.
|
||||
|
||||
Creates one instance per class type on first instantiation, reusing it
|
||||
for all subsequent calls. Automatically registers cleanup on program exit
|
||||
for proper resource management.
|
||||
"""
|
||||
|
||||
_instances: ClassVar[dict[type[Any], Any]] = {}
|
||||
_lock: ClassVar[threading.Lock] = threading.Lock()
|
||||
|
||||
def __call__(cls, *args: Any, **kwargs: Any) -> C:
|
||||
if cls not in cls._instances:
|
||||
with cls._lock:
|
||||
if cls not in SingletonMeta._instances:
|
||||
instance = super().__call__(*args, **kwargs)
|
||||
SingletonMeta._instances[cls] = instance
|
||||
atexit.register(instance.close)
|
||||
return SingletonMeta._instances[cls]
|
||||
|
||||
|
||||
class TimeoutAndRetrySession(requests.Session, metaclass=SingletonMeta):
|
||||
"""HTTP session with sensible defaults.
|
||||
|
||||
* default beets User-Agent header
|
||||
* default request timeout
|
||||
* automatic retries on transient connection or server errors
|
||||
* raises exceptions for HTTP error status codes
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.headers["User-Agent"] = f"beets/{__version__} https://beets.io/"
|
||||
|
||||
retry = Retry(
|
||||
total=6,
|
||||
backoff_factor=0.5,
|
||||
# Retry on server errors
|
||||
status_forcelist=[
|
||||
HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
HTTPStatus.BAD_GATEWAY,
|
||||
HTTPStatus.SERVICE_UNAVAILABLE,
|
||||
HTTPStatus.GATEWAY_TIMEOUT,
|
||||
],
|
||||
)
|
||||
adapter = HTTPAdapter(max_retries=retry)
|
||||
self.mount("https://", adapter)
|
||||
self.mount("http://", adapter)
|
||||
|
||||
def request(self, *args, **kwargs):
|
||||
"""Execute HTTP request with automatic timeout and status validation.
|
||||
|
||||
Ensures all requests have a timeout (defaults to 10 seconds) and raises
|
||||
an exception for HTTP error status codes.
|
||||
"""
|
||||
kwargs.setdefault("timeout", 10)
|
||||
r = super().request(*args, **kwargs)
|
||||
r.raise_for_status()
|
||||
|
||||
return r
|
||||
|
||||
|
||||
class RequestHandler:
|
||||
"""Manages HTTP requests with custom error handling and session management.
|
||||
|
||||
Provides a reusable interface for making HTTP requests with automatic
|
||||
conversion of standard HTTP errors to beets-specific exceptions. Supports
|
||||
custom session types and error mappings that can be overridden by
|
||||
subclasses.
|
||||
|
||||
Usage:
|
||||
Subclass and override :class:`RequestHandler.create_session`,
|
||||
:class:`RequestHandler.explicit_http_errors` or
|
||||
:class:`RequestHandler.status_to_error()` to customize behavior.
|
||||
|
||||
Use
|
||||
|
||||
- :class:`RequestHandler.get_json()` to get JSON response data
|
||||
- :class:`RequestHandler.get()` to get HTTP response object
|
||||
- :class:`RequestHandler.request()` to invoke arbitrary HTTP methods
|
||||
|
||||
Feel free to define common methods that are used in multiple plugins.
|
||||
"""
|
||||
|
||||
#: List of custom exceptions to be raised for specific status codes.
|
||||
explicit_http_errors: ClassVar[list[type[BeetsHTTPError]]] = [
|
||||
HTTPNotFoundError
|
||||
]
|
||||
|
||||
def create_session(self) -> TimeoutAndRetrySession:
|
||||
"""Create a new HTTP session instance.
|
||||
|
||||
Can be overridden by subclasses to provide custom session types.
|
||||
"""
|
||||
return TimeoutAndRetrySession()
|
||||
|
||||
@cached_property
|
||||
def session(self) -> TimeoutAndRetrySession:
|
||||
return self.create_session()
|
||||
|
||||
def status_to_error(
|
||||
self, code: int
|
||||
) -> type[requests.exceptions.HTTPError] | None:
|
||||
"""Map HTTP status codes to beets-specific exception types.
|
||||
|
||||
Searches the configured explicit HTTP errors for a matching status code.
|
||||
Returns None if no specific error type is registered for the given code.
|
||||
"""
|
||||
return next(
|
||||
(e for e in self.explicit_http_errors if e.STATUS == code), None
|
||||
)
|
||||
|
||||
@contextmanager
|
||||
def handle_http_error(self) -> Iterator[None]:
|
||||
"""Convert standard HTTP errors to beets-specific exceptions.
|
||||
|
||||
Wraps operations that may raise HTTPError, automatically translating
|
||||
recognized status codes into their corresponding beets exception types.
|
||||
Unrecognized errors are re-raised unchanged.
|
||||
"""
|
||||
try:
|
||||
yield
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if beets_error := self.status_to_error(e.response.status_code):
|
||||
raise beets_error(response=e.response) from e
|
||||
|
||||
raise
|
||||
|
||||
def request(self, *args, **kwargs) -> requests.Response:
|
||||
"""Perform HTTP request using the session with automatic error handling.
|
||||
|
||||
Delegates to the underlying session method while converting recognized
|
||||
HTTP errors to beets-specific exceptions through the error handler.
|
||||
"""
|
||||
with self.handle_http_error():
|
||||
return self.session.request(*args, **kwargs)
|
||||
|
||||
def get(self, *args, **kwargs) -> requests.Response:
|
||||
"""Perform HTTP GET request with automatic error handling."""
|
||||
return self.request("get", *args, **kwargs)
|
||||
|
||||
def put(self, *args, **kwargs) -> requests.Response:
|
||||
"""Perform HTTP PUT request with automatic error handling."""
|
||||
return self.request("put", *args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs) -> requests.Response:
|
||||
"""Perform HTTP DELETE request with automatic error handling."""
|
||||
return self.request("delete", *args, **kwargs)
|
||||
|
||||
def get_json(self, *args, **kwargs):
|
||||
"""Fetch and parse JSON data from an HTTP endpoint."""
|
||||
return self.get(*args, **kwargs).json()
|
||||
|
|
@ -15,6 +15,7 @@
|
|||
"""Fetch various AcousticBrainz metadata using MBID."""
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import ClassVar
|
||||
|
||||
import requests
|
||||
|
||||
|
|
@ -55,7 +56,7 @@ ABSCHEME = {
|
|||
|
||||
|
||||
class AcousticPlugin(plugins.BeetsPlugin):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"average_loudness": types.Float(6),
|
||||
"chords_changes_rate": types.Float(6),
|
||||
"chords_key": types.STRING,
|
||||
|
|
|
|||
|
|
@ -14,11 +14,17 @@
|
|||
|
||||
"""Adds an album template field for formatted album types."""
|
||||
|
||||
from beets.library import Album
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
from .musicbrainz import VARIOUS_ARTISTS_ID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.library import Album
|
||||
|
||||
|
||||
class AlbumTypesPlugin(BeetsPlugin):
|
||||
"""Adds an album template field for formatted album types."""
|
||||
|
|
|
|||
|
|
@ -14,12 +14,13 @@
|
|||
|
||||
"""An AURA server using Flask."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
from mimetypes import guess_type
|
||||
from typing import ClassVar
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from flask import (
|
||||
Blueprint,
|
||||
|
|
@ -40,12 +41,17 @@ from beets.dbcore.query import (
|
|||
NotQuery,
|
||||
RegexpQuery,
|
||||
SlowFieldSort,
|
||||
SQLiteType,
|
||||
)
|
||||
from beets.library import Album, Item, LibModel, Library
|
||||
from beets.library import Album, Item
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand, _open_library
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
|
||||
from beets.dbcore.query import SQLiteType
|
||||
from beets.library import LibModel, Library
|
||||
|
||||
# Constants
|
||||
|
||||
# AURA server information
|
||||
|
|
|
|||
|
|
@ -19,14 +19,7 @@ from __future__ import annotations
|
|||
import json
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Literal,
|
||||
Sequence,
|
||||
overload,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Literal, overload
|
||||
|
||||
import confuse
|
||||
from requests_oauthlib import OAuth1Session
|
||||
|
|
@ -42,6 +35,8 @@ from beets.autotag.hooks import AlbumInfo, TrackInfo
|
|||
from beets.metadata_plugins import MetadataSourcePlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Iterator, Sequence
|
||||
|
||||
from beets.importer import ImportSession
|
||||
from beets.library import Item
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ import sys
|
|||
import time
|
||||
import traceback
|
||||
from string import Template
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import beets
|
||||
import beets.ui
|
||||
|
|
@ -283,7 +283,7 @@ class BaseServer:
|
|||
if not self.ctrl_sock:
|
||||
self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port))
|
||||
self.ctrl_sock.sendall((f"{message}\n").encode("utf-8"))
|
||||
self.ctrl_sock.sendall((f"{message}\n").encode())
|
||||
|
||||
def _send_event(self, event):
|
||||
"""Notify subscribed connections of an event."""
|
||||
|
|
@ -1037,7 +1037,7 @@ class Command:
|
|||
raise BPDError(ERROR_PERMISSION, "insufficient privileges")
|
||||
|
||||
try:
|
||||
args = [conn] + self.args
|
||||
args = [conn, *self.args]
|
||||
results = func(*args)
|
||||
if results:
|
||||
for data in results:
|
||||
|
|
@ -1344,7 +1344,7 @@ class Server(BaseServer):
|
|||
|
||||
# Searching.
|
||||
|
||||
tagtype_map = {
|
||||
tagtype_map: ClassVar[dict[str, str]] = {
|
||||
"Artist": "artist",
|
||||
"ArtistSort": "artist_sort",
|
||||
"Album": "album",
|
||||
|
|
|
|||
|
|
@ -27,8 +27,17 @@ import gi
|
|||
|
||||
from beets import ui
|
||||
|
||||
gi.require_version("Gst", "1.0")
|
||||
from gi.repository import GLib, Gst # noqa: E402
|
||||
try:
|
||||
gi.require_version("Gst", "1.0")
|
||||
except ValueError as e:
|
||||
# on some scenarios, gi may be importable, but we get a ValueError when
|
||||
# trying to specify the required version. This is problematic in the test
|
||||
# suite where test_bpd.py has a call to
|
||||
# pytest.importorskip("beetsplug.bpd"). Re-raising as an ImportError
|
||||
# makes it so the test collector functions as inteded.
|
||||
raise ImportError from e
|
||||
|
||||
from gi.repository import GLib, Gst
|
||||
|
||||
Gst.init(None)
|
||||
|
||||
|
|
@ -106,7 +115,7 @@ class GstPlayer:
|
|||
elif message.type == Gst.MessageType.ERROR:
|
||||
# error
|
||||
self.player.set_state(Gst.State.NULL)
|
||||
err, debug = message.parse_error()
|
||||
err, _ = message.parse_error()
|
||||
print(f"Error: {err}")
|
||||
self.playing = False
|
||||
|
||||
|
|
@ -196,7 +205,7 @@ class GstPlayer:
|
|||
|
||||
def seek(self, position):
|
||||
"""Seeks to position (in seconds)."""
|
||||
cur_pos, cur_len = self.time()
|
||||
_, cur_len = self.time()
|
||||
if position > cur_len:
|
||||
self.stop()
|
||||
return
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
"""Retrieve and apply info from the autotagger for items matched by
|
||||
query.
|
||||
"""
|
||||
for item in lib.items(query + ["singleton:true"]):
|
||||
for item in lib.items([*query, "singleton:true"]):
|
||||
if not item.mb_trackid:
|
||||
self._log.info(
|
||||
"Skipping singleton with no mb_trackid: {}", item
|
||||
|
|
@ -149,14 +149,14 @@ class BPSyncPlugin(BeetsPlugin):
|
|||
library_trackid_to_item = {
|
||||
int(item.mb_trackid): item for item in items
|
||||
}
|
||||
item_to_trackinfo = {
|
||||
item: beatport_trackid_to_trackinfo[track_id]
|
||||
item_info_pairs = [
|
||||
(item, beatport_trackid_to_trackinfo[track_id])
|
||||
for track_id, item in library_trackid_to_item.items()
|
||||
}
|
||||
]
|
||||
|
||||
self._log.info("applying changes to {}", album)
|
||||
with lib.transaction():
|
||||
autotag.apply_metadata(albuminfo, item_to_trackinfo)
|
||||
autotag.apply_metadata(albuminfo, item_info_pairs)
|
||||
changed = False
|
||||
# Find any changed item to apply Beatport changes to album.
|
||||
any_changed_item = items[0]
|
||||
|
|
|
|||
|
|
@ -16,20 +16,26 @@
|
|||
autotagger. Requires the pyacoustid library.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from functools import cached_property, partial
|
||||
from typing import Iterable
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import acoustid
|
||||
import confuse
|
||||
|
||||
from beets import config, ui, util
|
||||
from beets.autotag.distance import Distance
|
||||
from beets.autotag.hooks import TrackInfo
|
||||
from beets.metadata_plugins import MetadataSourcePlugin
|
||||
from beetsplug.musicbrainz import MusicBrainzPlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from beets.autotag.hooks import TrackInfo
|
||||
|
||||
API_KEY = "1vOwZtEn"
|
||||
SCORE_THRESH = 0.5
|
||||
TRACK_ID_WEIGHT = 10.0
|
||||
|
|
|
|||
|
|
@ -95,12 +95,18 @@ def in_no_convert(item: Item) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def should_transcode(item, fmt):
|
||||
def should_transcode(item, fmt, force: bool = False):
|
||||
"""Determine whether the item should be transcoded as part of
|
||||
conversion (i.e., its bitrate is high or it has the wrong format).
|
||||
|
||||
If ``force`` is True, safety checks like ``no_convert`` and
|
||||
``never_convert_lossy_files`` are ignored and the item is always
|
||||
transcoded.
|
||||
"""
|
||||
if force:
|
||||
return True
|
||||
if in_no_convert(item) or (
|
||||
config["convert"]["never_convert_lossy_files"]
|
||||
config["convert"]["never_convert_lossy_files"].get(bool)
|
||||
and item.format.lower() not in LOSSLESS_FORMATS
|
||||
):
|
||||
return False
|
||||
|
|
@ -236,6 +242,16 @@ class ConvertPlugin(BeetsPlugin):
|
|||
drive, relative paths pointing to media files
|
||||
will be used.""",
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
"-F",
|
||||
"--force",
|
||||
action="store_true",
|
||||
dest="force",
|
||||
help=(
|
||||
"force transcoding. Ignores no_convert, "
|
||||
"never_convert_lossy_files, and max_bitrate"
|
||||
),
|
||||
)
|
||||
cmd.parser.add_album_option()
|
||||
cmd.func = self.convert_func
|
||||
return [cmd]
|
||||
|
|
@ -258,10 +274,15 @@ class ConvertPlugin(BeetsPlugin):
|
|||
pretend,
|
||||
hardlink,
|
||||
link,
|
||||
playlist,
|
||||
_,
|
||||
force,
|
||||
) = self._get_opts_and_config(empty_opts)
|
||||
|
||||
items = task.imported_items()
|
||||
|
||||
# Filter items based on should_transcode function
|
||||
items = [item for item in items if should_transcode(item, fmt)]
|
||||
|
||||
self._parallel_convert(
|
||||
dest,
|
||||
False,
|
||||
|
|
@ -272,6 +293,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
hardlink,
|
||||
threads,
|
||||
items,
|
||||
force,
|
||||
)
|
||||
|
||||
# Utilities converted from functions to methods on logging overhaul
|
||||
|
|
@ -347,6 +369,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
pretend=False,
|
||||
link=False,
|
||||
hardlink=False,
|
||||
force=False,
|
||||
):
|
||||
"""A pipeline thread that converts `Item` objects from a
|
||||
library.
|
||||
|
|
@ -372,11 +395,11 @@ class ConvertPlugin(BeetsPlugin):
|
|||
if keep_new:
|
||||
original = dest
|
||||
converted = item.path
|
||||
if should_transcode(item, fmt):
|
||||
if should_transcode(item, fmt, force):
|
||||
converted = replace_ext(converted, ext)
|
||||
else:
|
||||
original = item.path
|
||||
if should_transcode(item, fmt):
|
||||
if should_transcode(item, fmt, force):
|
||||
dest = replace_ext(dest, ext)
|
||||
converted = dest
|
||||
|
||||
|
|
@ -406,7 +429,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
)
|
||||
util.move(item.path, original)
|
||||
|
||||
if should_transcode(item, fmt):
|
||||
if should_transcode(item, fmt, force):
|
||||
linked = False
|
||||
try:
|
||||
self.encode(command, original, converted, pretend)
|
||||
|
|
@ -577,6 +600,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
hardlink,
|
||||
link,
|
||||
playlist,
|
||||
force,
|
||||
) = self._get_opts_and_config(opts)
|
||||
|
||||
if opts.album:
|
||||
|
|
@ -613,6 +637,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
hardlink,
|
||||
threads,
|
||||
items,
|
||||
force,
|
||||
)
|
||||
|
||||
if playlist:
|
||||
|
|
@ -735,7 +760,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
else:
|
||||
hardlink = self.config["hardlink"].get(bool)
|
||||
link = self.config["link"].get(bool)
|
||||
|
||||
force = getattr(opts, "force", False)
|
||||
return (
|
||||
dest,
|
||||
threads,
|
||||
|
|
@ -745,6 +770,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
hardlink,
|
||||
link,
|
||||
playlist,
|
||||
force,
|
||||
)
|
||||
|
||||
def _parallel_convert(
|
||||
|
|
@ -758,13 +784,21 @@ class ConvertPlugin(BeetsPlugin):
|
|||
hardlink,
|
||||
threads,
|
||||
items,
|
||||
force,
|
||||
):
|
||||
"""Run the convert_item function for every items on as many thread as
|
||||
defined in threads
|
||||
"""
|
||||
convert = [
|
||||
self.convert_item(
|
||||
dest, keep_new, path_formats, fmt, pretend, link, hardlink
|
||||
dest,
|
||||
keep_new,
|
||||
path_formats,
|
||||
fmt,
|
||||
pretend,
|
||||
link,
|
||||
hardlink,
|
||||
force,
|
||||
)
|
||||
for _ in range(threads)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -18,27 +18,26 @@ from __future__ import annotations
|
|||
|
||||
import collections
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Literal, Sequence
|
||||
from typing import TYPE_CHECKING, ClassVar, Literal
|
||||
|
||||
import requests
|
||||
|
||||
from beets import ui
|
||||
from beets.autotag import AlbumInfo, TrackInfo
|
||||
from beets.dbcore import types
|
||||
from beets.metadata_plugins import (
|
||||
IDResponse,
|
||||
SearchApiMetadataSourcePlugin,
|
||||
SearchFilter,
|
||||
)
|
||||
from beets.metadata_plugins import IDResponse, SearchApiMetadataSourcePlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
from beets.library import Item, Library
|
||||
from beets.metadata_plugins import SearchFilter
|
||||
|
||||
from ._typing import JSONDict
|
||||
|
||||
|
||||
class DeezerPlugin(SearchApiMetadataSourcePlugin[IDResponse]):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"deezer_track_rank": types.INTEGER,
|
||||
"deezer_track_id": types.INTEGER,
|
||||
"deezer_updated": types.DATE,
|
||||
|
|
|
|||
|
|
@ -27,13 +27,12 @@ import time
|
|||
import traceback
|
||||
from functools import cache
|
||||
from string import ascii_lowercase
|
||||
from typing import TYPE_CHECKING, Sequence, cast
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import confuse
|
||||
from discogs_client import Client, Master, Release
|
||||
from discogs_client.exceptions import DiscogsAPIError
|
||||
from requests.exceptions import ConnectionError
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
|
||||
import beets
|
||||
import beets.ui
|
||||
|
|
@ -42,15 +41,20 @@ from beets.autotag.distance import string_dist
|
|||
from beets.autotag.hooks import AlbumInfo, TrackInfo
|
||||
from beets.metadata_plugins import MetadataSourcePlugin
|
||||
|
||||
from .states import DISAMBIGUATION_RE, ArtistState, TracklistState
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Iterable
|
||||
from collections.abc import Callable, Iterable, Sequence
|
||||
|
||||
from beets.library import Item
|
||||
|
||||
from .types import ReleaseFormat, Track
|
||||
|
||||
USER_AGENT = f"beets/{beets.__version__} +https://beets.io/"
|
||||
API_KEY = "rAzVUQYRaoFjeBjyWuWZ"
|
||||
API_SECRET = "plxtUTqoCzwxZpqdPysCwGuBSmZNdZVy"
|
||||
|
||||
|
||||
# Exceptions that discogs_client should really handle but does not.
|
||||
CONNECTION_ERRORS = (
|
||||
ConnectionError,
|
||||
|
|
@ -60,7 +64,6 @@ CONNECTION_ERRORS = (
|
|||
DiscogsAPIError,
|
||||
)
|
||||
|
||||
|
||||
TRACK_INDEX_RE = re.compile(
|
||||
r"""
|
||||
(.*?) # medium: everything before medium_index.
|
||||
|
|
@ -76,50 +79,6 @@ TRACK_INDEX_RE = re.compile(
|
|||
re.VERBOSE,
|
||||
)
|
||||
|
||||
DISAMBIGUATION_RE = re.compile(r" \(\d+\)")
|
||||
|
||||
|
||||
class ReleaseFormat(TypedDict):
|
||||
name: str
|
||||
qty: int
|
||||
descriptions: list[str] | None
|
||||
|
||||
|
||||
class Artist(TypedDict):
|
||||
name: str
|
||||
anv: str
|
||||
join: str
|
||||
role: str
|
||||
tracks: str
|
||||
id: str
|
||||
resource_url: str
|
||||
|
||||
|
||||
class Track(TypedDict):
|
||||
position: str
|
||||
type_: str
|
||||
title: str
|
||||
duration: str
|
||||
artists: list[Artist]
|
||||
extraartists: NotRequired[list[Artist]]
|
||||
|
||||
|
||||
class TrackWithSubtracks(Track):
|
||||
sub_tracks: list[TrackWithSubtracks]
|
||||
|
||||
|
||||
class IntermediateTrackInfo(TrackInfo):
|
||||
"""Allows work with string mediums from
|
||||
get_track_info"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
medium_str: str | None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.medium_str = medium_str
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class DiscogsPlugin(MetadataSourcePlugin):
|
||||
def __init__(self):
|
||||
|
|
@ -277,7 +236,6 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
for track in album.tracks:
|
||||
if track.track_id == track_id:
|
||||
return track
|
||||
|
||||
return None
|
||||
|
||||
def get_albums(self, query: str) -> Iterable[AlbumInfo]:
|
||||
|
|
@ -343,25 +301,6 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
return media, albumtype
|
||||
|
||||
def get_artist_with_anv(
|
||||
self, artists: list[Artist], use_anv: bool = False
|
||||
) -> tuple[str, str | None]:
|
||||
"""Iterates through a discogs result, fetching data
|
||||
if the artist anv is to be used, maps that to the name.
|
||||
Calls the parent class get_artist method."""
|
||||
artist_list: list[dict[str | int, str]] = []
|
||||
for artist_data in artists:
|
||||
a: dict[str | int, str] = {
|
||||
"name": artist_data["name"],
|
||||
"id": artist_data["id"],
|
||||
"join": artist_data.get("join", ""),
|
||||
}
|
||||
if use_anv and (anv := artist_data.get("anv", "")):
|
||||
a["name"] = anv
|
||||
artist_list.append(a)
|
||||
artist, artist_id = self.get_artist(artist_list, join_key="join")
|
||||
return self.strip_disambiguation(artist), artist_id
|
||||
|
||||
def get_album_info(self, result: Release) -> AlbumInfo | None:
|
||||
"""Returns an AlbumInfo object for a discogs Release object."""
|
||||
# Explicitly reload the `Release` fields, as they might not be yet
|
||||
|
|
@ -391,11 +330,10 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
return None
|
||||
|
||||
artist_data = [a.data for a in result.artists]
|
||||
album_artist, album_artist_id = self.get_artist_with_anv(artist_data)
|
||||
album_artist_anv, _ = self.get_artist_with_anv(
|
||||
artist_data, use_anv=True
|
||||
# Information for the album artist
|
||||
albumartist = ArtistState.from_config(
|
||||
self.config, artist_data, for_album_artist=True
|
||||
)
|
||||
artist_credit = album_artist_anv
|
||||
|
||||
album = re.sub(r" +", " ", result.title)
|
||||
album_id = result.data["id"]
|
||||
|
|
@ -405,19 +343,13 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
# each make an API call just to get the same data back.
|
||||
tracks = self.get_tracks(
|
||||
result.data["tracklist"],
|
||||
(album_artist, album_artist_anv, album_artist_id),
|
||||
ArtistState.from_config(self.config, artist_data),
|
||||
)
|
||||
|
||||
# Assign ANV to the proper fields for tagging
|
||||
if not self.config["anv"]["artist_credit"]:
|
||||
artist_credit = album_artist
|
||||
if self.config["anv"]["album_artist"]:
|
||||
album_artist = album_artist_anv
|
||||
|
||||
# Extract information for the optional AlbumInfo fields, if possible.
|
||||
va = result.data["artists"][0].get("name", "").lower() == "various"
|
||||
va = albumartist.artist == config["va_name"].as_str()
|
||||
year = result.data.get("year")
|
||||
mediums = [t.medium for t in tracks]
|
||||
mediums = [t["medium"] for t in tracks]
|
||||
country = result.data.get("country")
|
||||
data_url = result.data.get("uri")
|
||||
style = self.format(result.data.get("styles"))
|
||||
|
|
@ -447,11 +379,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
cover_art_url = self.select_cover_art(result)
|
||||
|
||||
# Additional cleanups
|
||||
# (various artists name, catalog number, media, disambiguation).
|
||||
if va:
|
||||
va_name = config["va_name"].as_str()
|
||||
album_artist = va_name
|
||||
artist_credit = va_name
|
||||
# (catalog number, media, disambiguation).
|
||||
if catalogno == "none":
|
||||
catalogno = None
|
||||
# Explicitly set the `media` for the tracks, since it is expected by
|
||||
|
|
@ -474,9 +402,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
return AlbumInfo(
|
||||
album=album,
|
||||
album_id=album_id,
|
||||
artist=album_artist,
|
||||
artist_credit=artist_credit,
|
||||
artist_id=album_artist_id,
|
||||
**albumartist.info, # Unpacks values to satisfy the keyword arguments
|
||||
tracks=tracks,
|
||||
albumtype=albumtype,
|
||||
va=va,
|
||||
|
|
@ -494,7 +420,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
data_url=data_url,
|
||||
discogs_albumid=discogs_albumid,
|
||||
discogs_labelid=labelid,
|
||||
discogs_artistid=album_artist_id,
|
||||
discogs_artistid=albumartist.artist_id,
|
||||
cover_art_url=cover_art_url,
|
||||
)
|
||||
|
||||
|
|
@ -516,63 +442,22 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
else:
|
||||
return None
|
||||
|
||||
def _process_clean_tracklist(
|
||||
self,
|
||||
clean_tracklist: list[Track],
|
||||
album_artist_data: tuple[str, str, str | None],
|
||||
) -> tuple[list[TrackInfo], dict[int, str], int, list[str], list[str]]:
|
||||
# Distinct works and intra-work divisions, as defined by index tracks.
|
||||
tracks: list[TrackInfo] = []
|
||||
index_tracks = {}
|
||||
index = 0
|
||||
divisions: list[str] = []
|
||||
next_divisions: list[str] = []
|
||||
for track in clean_tracklist:
|
||||
# Only real tracks have `position`. Otherwise, it's an index track.
|
||||
if track["position"]:
|
||||
index += 1
|
||||
if next_divisions:
|
||||
# End of a block of index tracks: update the current
|
||||
# divisions.
|
||||
divisions += next_divisions
|
||||
del next_divisions[:]
|
||||
track_info = self.get_track_info(
|
||||
track, index, divisions, album_artist_data
|
||||
)
|
||||
track_info.track_alt = track["position"]
|
||||
tracks.append(track_info)
|
||||
else:
|
||||
next_divisions.append(track["title"])
|
||||
# We expect new levels of division at the beginning of the
|
||||
# tracklist (and possibly elsewhere).
|
||||
try:
|
||||
divisions.pop()
|
||||
except IndexError:
|
||||
pass
|
||||
index_tracks[index + 1] = track["title"]
|
||||
return tracks, index_tracks, index, divisions, next_divisions
|
||||
|
||||
def get_tracks(
|
||||
self,
|
||||
tracklist: list[Track],
|
||||
album_artist_data: tuple[str, str, str | None],
|
||||
albumartistinfo: ArtistState,
|
||||
) -> list[TrackInfo]:
|
||||
"""Returns a list of TrackInfo objects for a discogs tracklist."""
|
||||
try:
|
||||
clean_tracklist: list[Track] = self.coalesce_tracks(
|
||||
cast(list[TrackWithSubtracks], tracklist)
|
||||
)
|
||||
clean_tracklist: list[Track] = self._coalesce_tracks(tracklist)
|
||||
except Exception as exc:
|
||||
# FIXME: this is an extra precaution for making sure there are no
|
||||
# side effects after #2222. It should be removed after further
|
||||
# testing.
|
||||
self._log.debug("{}", traceback.format_exc())
|
||||
self._log.error("uncaught exception in coalesce_tracks: {}", exc)
|
||||
self._log.error("uncaught exception in _coalesce_tracks: {}", exc)
|
||||
clean_tracklist = tracklist
|
||||
processed = self._process_clean_tracklist(
|
||||
clean_tracklist, album_artist_data
|
||||
)
|
||||
tracks, index_tracks, index, divisions, next_divisions = processed
|
||||
t = TracklistState.build(self, clean_tracklist, albumartistinfo)
|
||||
# Fix up medium and medium_index for each track. Discogs position is
|
||||
# unreliable, but tracks are in order.
|
||||
medium = None
|
||||
|
|
@ -581,32 +466,36 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
# If a medium has two sides (ie. vinyl or cassette), each pair of
|
||||
# consecutive sides should belong to the same medium.
|
||||
if all([track.medium_str is not None for track in tracks]):
|
||||
m = sorted({track.medium_str.lower() for track in tracks})
|
||||
if all([medium is not None for medium in t.mediums]):
|
||||
m = sorted(
|
||||
{medium.lower() if medium else "" for medium in t.mediums}
|
||||
)
|
||||
# If all track.medium are single consecutive letters, assume it is
|
||||
# a 2-sided medium.
|
||||
if "".join(m) in ascii_lowercase:
|
||||
sides_per_medium = 2
|
||||
|
||||
for track in tracks:
|
||||
for i, track in enumerate(t.tracks):
|
||||
# Handle special case where a different medium does not indicate a
|
||||
# new disc, when there is no medium_index and the ordinal of medium
|
||||
# is not sequential. For example, I, II, III, IV, V. Assume these
|
||||
# are the track index, not the medium.
|
||||
# side_count is the number of mediums or medium sides (in the case
|
||||
# of two-sided mediums) that were seen before.
|
||||
medium_str = t.mediums[i]
|
||||
medium_index = t.medium_indices[i]
|
||||
medium_is_index = (
|
||||
track.medium_str
|
||||
and not track.medium_index
|
||||
medium_str
|
||||
and not medium_index
|
||||
and (
|
||||
len(track.medium_str) != 1
|
||||
len(medium_str) != 1
|
||||
or
|
||||
# Not within standard incremental medium values (A, B, C, ...).
|
||||
ord(track.medium_str) - 64 != side_count + 1
|
||||
ord(medium_str) - 64 != side_count + 1
|
||||
)
|
||||
)
|
||||
|
||||
if not medium_is_index and medium != track.medium_str:
|
||||
if not medium_is_index and medium != medium_str:
|
||||
side_count += 1
|
||||
if sides_per_medium == 2:
|
||||
if side_count % sides_per_medium:
|
||||
|
|
@ -617,7 +506,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
# Medium changed. Reset index_count.
|
||||
medium_count += 1
|
||||
index_count = 0
|
||||
medium = track.medium_str
|
||||
medium = medium_str
|
||||
|
||||
index_count += 1
|
||||
medium_count = 1 if medium_count == 0 else medium_count
|
||||
|
|
@ -625,69 +514,25 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
|
||||
# Get `disctitle` from Discogs index tracks. Assume that an index track
|
||||
# before the first track of each medium is a disc title.
|
||||
for track in tracks:
|
||||
for track in t.tracks:
|
||||
if track.medium_index == 1:
|
||||
if track.index in index_tracks:
|
||||
disctitle = index_tracks[track.index]
|
||||
if track.index in t.index_tracks:
|
||||
disctitle = t.index_tracks[track.index]
|
||||
else:
|
||||
disctitle = None
|
||||
track.disctitle = disctitle
|
||||
|
||||
return cast(list[TrackInfo], tracks)
|
||||
return t.tracks
|
||||
|
||||
def coalesce_tracks(
|
||||
self, raw_tracklist: list[TrackWithSubtracks]
|
||||
) -> list[Track]:
|
||||
def _coalesce_tracks(self, raw_tracklist: list[Track]) -> list[Track]:
|
||||
"""Pre-process a tracklist, merging subtracks into a single track. The
|
||||
title for the merged track is the one from the previous index track,
|
||||
if present; otherwise it is a combination of the subtracks titles.
|
||||
"""
|
||||
|
||||
def add_merged_subtracks(
|
||||
tracklist: list[TrackWithSubtracks],
|
||||
subtracks: list[TrackWithSubtracks],
|
||||
) -> None:
|
||||
"""Modify `tracklist` in place, merging a list of `subtracks` into
|
||||
a single track into `tracklist`."""
|
||||
# Calculate position based on first subtrack, without subindex.
|
||||
idx, medium_idx, sub_idx = self.get_track_index(
|
||||
subtracks[0]["position"]
|
||||
)
|
||||
position = f"{idx or ''}{medium_idx or ''}"
|
||||
|
||||
if tracklist and not tracklist[-1]["position"]:
|
||||
# Assume the previous index track contains the track title.
|
||||
if sub_idx:
|
||||
# "Convert" the track title to a real track, discarding the
|
||||
# subtracks assuming they are logical divisions of a
|
||||
# physical track (12.2.9 Subtracks).
|
||||
tracklist[-1]["position"] = position
|
||||
else:
|
||||
# Promote the subtracks to real tracks, discarding the
|
||||
# index track, assuming the subtracks are physical tracks.
|
||||
index_track = tracklist.pop()
|
||||
# Fix artists when they are specified on the index track.
|
||||
if index_track.get("artists"):
|
||||
for subtrack in subtracks:
|
||||
if not subtrack.get("artists"):
|
||||
subtrack["artists"] = index_track["artists"]
|
||||
# Concatenate index with track title when index_tracks
|
||||
# option is set
|
||||
if self.config["index_tracks"]:
|
||||
for subtrack in subtracks:
|
||||
subtrack["title"] = (
|
||||
f"{index_track['title']}: {subtrack['title']}"
|
||||
)
|
||||
tracklist.extend(subtracks)
|
||||
else:
|
||||
# Merge the subtracks, pick a title, and append the new track.
|
||||
track = subtracks[0].copy()
|
||||
track["title"] = " / ".join([t["title"] for t in subtracks])
|
||||
tracklist.append(track)
|
||||
|
||||
# Pre-process the tracklist, trying to identify subtracks.
|
||||
subtracks: list[TrackWithSubtracks] = []
|
||||
tracklist: list[TrackWithSubtracks] = []
|
||||
|
||||
subtracks: list[Track] = []
|
||||
tracklist: list[Track] = []
|
||||
prev_subindex = ""
|
||||
for track in raw_tracklist:
|
||||
# Regular subtrack (track with subindex).
|
||||
|
|
@ -699,7 +544,7 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
subtracks.append(track)
|
||||
else:
|
||||
# Subtrack part of a new group (..., 1.3, *2.1*, ...).
|
||||
add_merged_subtracks(tracklist, subtracks)
|
||||
self._add_merged_subtracks(tracklist, subtracks)
|
||||
subtracks = [track]
|
||||
prev_subindex = subindex.rjust(len(raw_tracklist))
|
||||
continue
|
||||
|
|
@ -708,21 +553,64 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
if not track["position"] and "sub_tracks" in track:
|
||||
# Append the index track, assuming it contains the track title.
|
||||
tracklist.append(track)
|
||||
add_merged_subtracks(tracklist, track["sub_tracks"])
|
||||
self._add_merged_subtracks(tracklist, track["sub_tracks"])
|
||||
continue
|
||||
|
||||
# Regular track or index track without nested sub_tracks.
|
||||
if subtracks:
|
||||
add_merged_subtracks(tracklist, subtracks)
|
||||
self._add_merged_subtracks(tracklist, subtracks)
|
||||
subtracks = []
|
||||
prev_subindex = ""
|
||||
tracklist.append(track)
|
||||
|
||||
# Merge and add the remaining subtracks, if any.
|
||||
if subtracks:
|
||||
add_merged_subtracks(tracklist, subtracks)
|
||||
self._add_merged_subtracks(tracklist, subtracks)
|
||||
|
||||
return cast(list[Track], tracklist)
|
||||
return tracklist
|
||||
|
||||
def _add_merged_subtracks(
|
||||
self,
|
||||
tracklist: list[Track],
|
||||
subtracks: list[Track],
|
||||
) -> None:
|
||||
"""Modify `tracklist` in place, merging a list of `subtracks` into
|
||||
a single track into `tracklist`."""
|
||||
# Calculate position based on first subtrack, without subindex.
|
||||
idx, medium_idx, sub_idx = self.get_track_index(
|
||||
subtracks[0]["position"]
|
||||
)
|
||||
position = f"{idx or ''}{medium_idx or ''}"
|
||||
|
||||
if tracklist and not tracklist[-1]["position"]:
|
||||
# Assume the previous index track contains the track title.
|
||||
if sub_idx:
|
||||
# "Convert" the track title to a real track, discarding the
|
||||
# subtracks assuming they are logical divisions of a
|
||||
# physical track (12.2.9 Subtracks).
|
||||
tracklist[-1]["position"] = position
|
||||
else:
|
||||
# Promote the subtracks to real tracks, discarding the
|
||||
# index track, assuming the subtracks are physical tracks.
|
||||
index_track = tracklist.pop()
|
||||
# Fix artists when they are specified on the index track.
|
||||
if index_track.get("artists"):
|
||||
for subtrack in subtracks:
|
||||
if not subtrack.get("artists"):
|
||||
subtrack["artists"] = index_track["artists"]
|
||||
# Concatenate index with track title when index_tracks
|
||||
# option is set
|
||||
if self.config["index_tracks"]:
|
||||
for subtrack in subtracks:
|
||||
subtrack["title"] = (
|
||||
f"{index_track['title']}: {subtrack['title']}"
|
||||
)
|
||||
tracklist.extend(subtracks)
|
||||
else:
|
||||
# Merge the subtracks, pick a title, and append the new track.
|
||||
track = subtracks[0].copy()
|
||||
track["title"] = " / ".join([t["title"] for t in subtracks])
|
||||
tracklist.append(track)
|
||||
|
||||
def strip_disambiguation(self, text: str) -> str:
|
||||
"""Removes discogs specific disambiguations from a string.
|
||||
|
|
@ -737,17 +625,10 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
track: Track,
|
||||
index: int,
|
||||
divisions: list[str],
|
||||
album_artist_data: tuple[str, str, str | None],
|
||||
) -> IntermediateTrackInfo:
|
||||
albumartistinfo: ArtistState,
|
||||
) -> tuple[TrackInfo, str | None, str | None]:
|
||||
"""Returns a TrackInfo object for a discogs track."""
|
||||
|
||||
artist, artist_anv, artist_id = album_artist_data
|
||||
artist_credit = artist_anv
|
||||
if not self.config["anv"]["artist_credit"]:
|
||||
artist_credit = artist
|
||||
if self.config["anv"]["artist"]:
|
||||
artist = artist_anv
|
||||
|
||||
title = track["title"]
|
||||
if self.config["index_tracks"]:
|
||||
prefix = ", ".join(divisions)
|
||||
|
|
@ -756,44 +637,26 @@ class DiscogsPlugin(MetadataSourcePlugin):
|
|||
track_id = None
|
||||
medium, medium_index, _ = self.get_track_index(track["position"])
|
||||
|
||||
# If artists are found on the track, we will use those instead
|
||||
if artists := track.get("artists", []):
|
||||
artist, artist_id = self.get_artist_with_anv(
|
||||
artists, self.config["anv"]["artist"]
|
||||
)
|
||||
artist_credit, _ = self.get_artist_with_anv(
|
||||
artists, self.config["anv"]["artist_credit"]
|
||||
)
|
||||
length = self.get_track_length(track["duration"])
|
||||
# If artists are found on the track, we will use those instead
|
||||
artistinfo = ArtistState.from_config(
|
||||
self.config,
|
||||
[
|
||||
*(track.get("artists") or albumartistinfo.raw_artists),
|
||||
*track.get("extraartists", []),
|
||||
],
|
||||
)
|
||||
|
||||
# Add featured artists
|
||||
if extraartists := track.get("extraartists", []):
|
||||
featured_list = [
|
||||
artist
|
||||
for artist in extraartists
|
||||
if "Featuring" in artist["role"]
|
||||
]
|
||||
featured, _ = self.get_artist_with_anv(
|
||||
featured_list, self.config["anv"]["artist"]
|
||||
)
|
||||
featured_credit, _ = self.get_artist_with_anv(
|
||||
featured_list, self.config["anv"]["artist_credit"]
|
||||
)
|
||||
if featured:
|
||||
artist += f" {self.config['featured_string']} {featured}"
|
||||
artist_credit += (
|
||||
f" {self.config['featured_string']} {featured_credit}"
|
||||
)
|
||||
return IntermediateTrackInfo(
|
||||
title=title,
|
||||
track_id=track_id,
|
||||
artist_credit=artist_credit,
|
||||
artist=artist,
|
||||
artist_id=artist_id,
|
||||
length=length,
|
||||
index=index,
|
||||
medium_str=medium,
|
||||
medium_index=medium_index,
|
||||
return (
|
||||
TrackInfo(
|
||||
title=title,
|
||||
track_id=track_id,
|
||||
**artistinfo.info,
|
||||
length=length,
|
||||
index=index,
|
||||
),
|
||||
medium,
|
||||
medium_index,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
237
beetsplug/discogs/states.py
Normal file
237
beetsplug/discogs/states.py
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
# This file is part of beets.
|
||||
# Copyright 2025, Sarunas Nejus, Henry Oberholtzer.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Dataclasses for managing artist credits and tracklists from Discogs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING, NamedTuple
|
||||
|
||||
from beets import config
|
||||
|
||||
from .types import ArtistInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from confuse import ConfigView
|
||||
|
||||
from beets.autotag.hooks import TrackInfo
|
||||
|
||||
from . import DiscogsPlugin
|
||||
from .types import Artist, Track, TracklistInfo
|
||||
|
||||
DISAMBIGUATION_RE = re.compile(r" \(\d+\)")
|
||||
|
||||
|
||||
@dataclass
|
||||
class ArtistState:
|
||||
"""Represent Discogs artist credits.
|
||||
|
||||
This object centralizes the plugin's policy for which Discogs artist fields
|
||||
to prefer (name vs. ANV), how to treat 'Various', how to format join
|
||||
phrases, and how to separate featured artists. It exposes both per-artist
|
||||
components and fully joined strings for common tag targets like 'artist' and
|
||||
'artist_credit'.
|
||||
"""
|
||||
|
||||
class ValidArtist(NamedTuple):
|
||||
"""A normalized, render-ready artist entry extracted from Discogs data.
|
||||
|
||||
Instances represent the subset of Discogs artist information needed for
|
||||
tagging, including the join token following the artist and whether the
|
||||
entry is considered a featured appearance.
|
||||
"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
credit: str
|
||||
join: str
|
||||
is_feat: bool
|
||||
|
||||
def get_artist(self, property_name: str) -> str:
|
||||
"""Return the requested display field with its trailing join token.
|
||||
|
||||
The join token is normalized so commas become ', ' and other join
|
||||
phrases are surrounded with spaces, producing a single fragment that
|
||||
can be concatenated to form a full artist string.
|
||||
"""
|
||||
join = {",": ", ", "": ""}.get(self.join, f" {self.join} ")
|
||||
return f"{getattr(self, property_name)}{join}"
|
||||
|
||||
raw_artists: list[Artist]
|
||||
use_anv: bool
|
||||
use_credit_anv: bool
|
||||
featured_string: str
|
||||
should_strip_disambiguation: bool
|
||||
|
||||
@property
|
||||
def info(self) -> ArtistInfo:
|
||||
"""Expose the state in the shape expected by downstream tag mapping."""
|
||||
return {k: getattr(self, k) for k in ArtistInfo.__annotations__} # type: ignore[return-value]
|
||||
|
||||
def strip_disambiguation(self, text: str) -> str:
|
||||
"""Strip Discogs disambiguation suffixes from an artist or label string.
|
||||
|
||||
This removes Discogs-specific numeric suffixes like 'Name (5)' and can
|
||||
be applied to multi-artist strings as well (e.g., 'A (1) & B (2)'). When
|
||||
the feature is disabled, the input is returned unchanged.
|
||||
"""
|
||||
if self.should_strip_disambiguation:
|
||||
return DISAMBIGUATION_RE.sub("", text)
|
||||
return text
|
||||
|
||||
@cached_property
|
||||
def valid_artists(self) -> list[ValidArtist]:
|
||||
"""Build the ordered, filtered list of artists used for rendering.
|
||||
|
||||
The resulting list normalizes Discogs entries by:
|
||||
- substituting the configured 'Various Artists' name when Discogs uses
|
||||
'Various'
|
||||
- choosing between name and ANV according to plugin settings
|
||||
- excluding non-empty roles unless they indicate a featured appearance
|
||||
- capturing join tokens so the original credit formatting is preserved
|
||||
"""
|
||||
va_name = config["va_name"].as_str()
|
||||
return [
|
||||
self.ValidArtist(
|
||||
str(a["id"]),
|
||||
self.strip_disambiguation(anv if self.use_anv else name),
|
||||
self.strip_disambiguation(anv if self.use_credit_anv else name),
|
||||
a["join"].strip(),
|
||||
is_feat,
|
||||
)
|
||||
for a in self.raw_artists
|
||||
if (
|
||||
(name := va_name if a["name"] == "Various" else a["name"])
|
||||
and (anv := a["anv"] or name)
|
||||
and (
|
||||
(is_feat := ("featuring" in a["role"].lower()))
|
||||
or not a["role"]
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
@property
|
||||
def artists_ids(self) -> list[str]:
|
||||
"""Return Discogs artist IDs for all valid artists, preserving order."""
|
||||
return [a.id for a in self.valid_artists]
|
||||
|
||||
@property
|
||||
def artist_id(self) -> str:
|
||||
"""Return the primary Discogs artist ID."""
|
||||
return self.artists_ids[0]
|
||||
|
||||
@property
|
||||
def artists(self) -> list[str]:
|
||||
"""Return the per-artist display names used for the 'artist' field."""
|
||||
return [a.name for a in self.valid_artists]
|
||||
|
||||
@property
|
||||
def artists_credit(self) -> list[str]:
|
||||
"""Return the per-artist display names used for the credit field."""
|
||||
return [a.credit for a in self.valid_artists]
|
||||
|
||||
@property
|
||||
def artist(self) -> str:
|
||||
"""Return the fully rendered artist string using display names."""
|
||||
return self.join_artists("name")
|
||||
|
||||
@property
|
||||
def artist_credit(self) -> str:
|
||||
"""Return the fully rendered artist credit string."""
|
||||
return self.join_artists("credit")
|
||||
|
||||
def join_artists(self, property_name: str) -> str:
|
||||
"""Render a single artist string with join phrases and featured artists.
|
||||
|
||||
Non-featured artists are concatenated using their join tokens. Featured
|
||||
artists are appended after the configured 'featured' marker, preserving
|
||||
Discogs order while keeping featured credits separate from the main
|
||||
artist string.
|
||||
"""
|
||||
non_featured = [a for a in self.valid_artists if not a.is_feat]
|
||||
featured = [a for a in self.valid_artists if a.is_feat]
|
||||
|
||||
artist = "".join(a.get_artist(property_name) for a in non_featured)
|
||||
if featured:
|
||||
if "feat" not in artist:
|
||||
artist += f" {self.featured_string} "
|
||||
|
||||
artist += ", ".join(a.get_artist(property_name) for a in featured)
|
||||
|
||||
return artist
|
||||
|
||||
@classmethod
|
||||
def from_config(
|
||||
cls,
|
||||
config: ConfigView,
|
||||
artists: list[Artist],
|
||||
for_album_artist: bool = False,
|
||||
) -> ArtistState:
|
||||
return cls(
|
||||
artists,
|
||||
config["anv"]["album_artist" if for_album_artist else "artist"].get(
|
||||
bool
|
||||
),
|
||||
config["anv"]["artist_credit"].get(bool),
|
||||
config["featured_string"].as_str(),
|
||||
config["strip_disambiguation"].get(bool),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TracklistState:
|
||||
index: int = 0
|
||||
index_tracks: dict[int, str] = field(default_factory=dict)
|
||||
tracks: list[TrackInfo] = field(default_factory=list)
|
||||
divisions: list[str] = field(default_factory=list)
|
||||
next_divisions: list[str] = field(default_factory=list)
|
||||
mediums: list[str | None] = field(default_factory=list)
|
||||
medium_indices: list[str | None] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def info(self) -> TracklistInfo:
|
||||
return asdict(self) # type: ignore[return-value]
|
||||
|
||||
@classmethod
|
||||
def build(
|
||||
cls,
|
||||
plugin: DiscogsPlugin,
|
||||
clean_tracklist: list[Track],
|
||||
albumartistinfo: ArtistState,
|
||||
) -> TracklistState:
|
||||
state = cls()
|
||||
for track in clean_tracklist:
|
||||
if track["position"]:
|
||||
state.index += 1
|
||||
if state.next_divisions:
|
||||
state.divisions += state.next_divisions
|
||||
state.next_divisions.clear()
|
||||
track_info, medium, medium_index = plugin.get_track_info(
|
||||
track, state.index, state.divisions, albumartistinfo
|
||||
)
|
||||
track_info.track_alt = track["position"]
|
||||
state.tracks.append(track_info)
|
||||
state.mediums.append(medium or None)
|
||||
state.medium_indices.append(medium_index or None)
|
||||
else:
|
||||
state.next_divisions.append(track["title"])
|
||||
try:
|
||||
state.divisions.pop()
|
||||
except IndexError:
|
||||
pass
|
||||
state.index_tracks[state.index + 1] = track["title"]
|
||||
return state
|
||||
67
beetsplug/discogs/types.py
Normal file
67
beetsplug/discogs/types.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
# This file is part of beets.
|
||||
# Copyright 2025, Sarunas Nejus, Henry Oberholtzer.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.autotag.hooks import TrackInfo
|
||||
|
||||
|
||||
class ReleaseFormat(TypedDict):
|
||||
name: str
|
||||
qty: int
|
||||
descriptions: list[str] | None
|
||||
|
||||
|
||||
class Artist(TypedDict):
|
||||
name: str
|
||||
anv: str
|
||||
join: str
|
||||
role: str
|
||||
tracks: str
|
||||
id: str
|
||||
resource_url: str
|
||||
|
||||
|
||||
class Track(TypedDict):
|
||||
position: str
|
||||
type_: str
|
||||
title: str
|
||||
duration: str
|
||||
artists: list[Artist]
|
||||
extraartists: NotRequired[list[Artist]]
|
||||
sub_tracks: NotRequired[list[Track]]
|
||||
|
||||
|
||||
class ArtistInfo(TypedDict):
|
||||
artist: str
|
||||
artists: list[str]
|
||||
artist_credit: str
|
||||
artists_credit: list[str]
|
||||
artist_id: str
|
||||
artists_ids: list[str]
|
||||
|
||||
|
||||
class TracklistInfo(TypedDict):
|
||||
index: int
|
||||
index_tracks: dict[int, str]
|
||||
tracks: list[TrackInfo]
|
||||
divisions: list[str]
|
||||
next_divisions: list[str]
|
||||
mediums: list[str | None]
|
||||
medium_indices: list[str | None]
|
||||
|
|
@ -25,7 +25,8 @@ import yaml
|
|||
from beets import plugins, ui, util
|
||||
from beets.dbcore import types
|
||||
from beets.importer import Action
|
||||
from beets.ui.commands import PromptChoice, _do_query
|
||||
from beets.ui.commands.utils import do_query
|
||||
from beets.util import PromptChoice
|
||||
|
||||
# These "safe" types can avoid the format/parse cycle that most fields go
|
||||
# through: they are safe to edit with native YAML types.
|
||||
|
|
@ -176,7 +177,7 @@ class EditPlugin(plugins.BeetsPlugin):
|
|||
def _edit_command(self, lib, opts, args):
|
||||
"""The CLI command function for the `beet edit` command."""
|
||||
# Get the objects to edit.
|
||||
items, albums = _do_query(lib, args, opts.album, False)
|
||||
items, albums = do_query(lib, args, opts.album, False)
|
||||
objs = albums if opts.album else items
|
||||
if not objs:
|
||||
ui.print_("Nothing to edit.")
|
||||
|
|
@ -274,23 +275,18 @@ class EditPlugin(plugins.BeetsPlugin):
|
|||
ui.print_("No changes to apply.")
|
||||
return False
|
||||
|
||||
# Confirm the changes.
|
||||
# For cancel/keep-editing, restore objects to their original
|
||||
# in-memory state so temp edits don't leak into the session
|
||||
choice = ui.input_options(
|
||||
("continue Editing", "apply", "cancel")
|
||||
)
|
||||
if choice == "a": # Apply.
|
||||
return True
|
||||
elif choice == "c": # Cancel.
|
||||
self.apply_data(objs, new_data, old_data)
|
||||
return False
|
||||
elif choice == "e": # Keep editing.
|
||||
# Reset the temporary changes to the objects. I we have a
|
||||
# copy from above, use that, else reload from the database.
|
||||
objs = [
|
||||
(old_obj or obj) for old_obj, obj in zip(objs_old, objs)
|
||||
]
|
||||
for obj in objs:
|
||||
if not obj.id < 0:
|
||||
obj.load()
|
||||
self.apply_data(objs, new_data, old_data)
|
||||
continue
|
||||
|
||||
# Remove the temporary file before returning.
|
||||
|
|
@ -379,9 +375,7 @@ class EditPlugin(plugins.BeetsPlugin):
|
|||
# to the files if needed without re-applying metadata.
|
||||
return Action.RETAG
|
||||
else:
|
||||
# Edit cancelled / no edits made. Revert changes.
|
||||
for obj in task.items:
|
||||
obj.read()
|
||||
return None
|
||||
|
||||
def importer_edit_candidate(self, session, task):
|
||||
"""Callback for invoking the functionality during an interactive
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
"ifempty": False,
|
||||
"remove_art_file": False,
|
||||
"quality": 0,
|
||||
"clearart_on_import": False,
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -82,6 +83,9 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
|
||||
self.register_listener("art_set", self.process_album)
|
||||
|
||||
if self.config["clearart_on_import"].get(bool):
|
||||
self.register_listener("import_task_files", self.import_task_files)
|
||||
|
||||
def commands(self):
|
||||
# Embed command.
|
||||
embed_cmd = ui.Subcommand(
|
||||
|
|
@ -278,3 +282,9 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
os.remove(syspath(album.artpath))
|
||||
album.artpath = None
|
||||
album.store()
|
||||
|
||||
def import_task_files(self, session, task):
|
||||
"""Automatically clearart of imported files."""
|
||||
for item in task.imported_items():
|
||||
self._log.debug("clearart-on-import {.filepath}", item)
|
||||
art.clear_item(item, self._log)
|
||||
|
|
|
|||
|
|
@ -148,7 +148,7 @@ class ExportPlugin(BeetsPlugin):
|
|||
album=opts.album,
|
||||
):
|
||||
try:
|
||||
data, item = data_emitter(included_keys or "*")
|
||||
data, _ = data_emitter(included_keys or "*")
|
||||
except (mediafile.UnreadableFileError, OSError) as ex:
|
||||
self._log.error("cannot read file: {}", ex)
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ from collections import OrderedDict
|
|||
from contextlib import closing
|
||||
from enum import Enum
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING, AnyStr, ClassVar, Literal, Tuple, Type
|
||||
from typing import TYPE_CHECKING, AnyStr, ClassVar, Literal
|
||||
|
||||
import confuse
|
||||
import requests
|
||||
|
|
@ -86,7 +86,7 @@ class Candidate:
|
|||
path: None | bytes = None,
|
||||
url: None | str = None,
|
||||
match: None | MetadataMatch = None,
|
||||
size: None | Tuple[int, int] = None,
|
||||
size: None | tuple[int, int] = None,
|
||||
):
|
||||
self._log = log
|
||||
self.path = path
|
||||
|
|
@ -355,7 +355,7 @@ class ArtSource(RequestMixin, ABC):
|
|||
# Specify whether this source fetches local or remote images
|
||||
LOC: ClassVar[SourceLocation]
|
||||
# A list of methods to match metadata, sorted by descending accuracy
|
||||
VALID_MATCHING_CRITERIA: list[str] = ["default"]
|
||||
VALID_MATCHING_CRITERIA: ClassVar[list[str]] = ["default"]
|
||||
# A human-readable name for the art source
|
||||
NAME: ClassVar[str]
|
||||
# The key to select the art source in the config. This value will also be
|
||||
|
|
@ -518,8 +518,8 @@ class RemoteArtSource(ArtSource):
|
|||
class CoverArtArchive(RemoteArtSource):
|
||||
NAME = "Cover Art Archive"
|
||||
ID = "coverart"
|
||||
VALID_MATCHING_CRITERIA = ["release", "releasegroup"]
|
||||
VALID_THUMBNAIL_SIZES = [250, 500, 1200]
|
||||
VALID_MATCHING_CRITERIA: ClassVar[list[str]] = ["release", "releasegroup"]
|
||||
VALID_THUMBNAIL_SIZES: ClassVar[list[int]] = [250, 500, 1200]
|
||||
|
||||
URL = "https://coverartarchive.org/release/{mbid}"
|
||||
GROUP_URL = "https://coverartarchive.org/release-group/{mbid}"
|
||||
|
|
@ -682,7 +682,7 @@ class GoogleImages(RemoteArtSource):
|
|||
"""
|
||||
if not (album.albumartist and album.album):
|
||||
return
|
||||
search_string = f"{album.albumartist},{album.album}".encode("utf-8")
|
||||
search_string = f"{album.albumartist},{album.album}".encode()
|
||||
|
||||
try:
|
||||
response = self.request(
|
||||
|
|
@ -867,7 +867,7 @@ class ITunesStore(RemoteArtSource):
|
|||
)
|
||||
except KeyError as e:
|
||||
self._log.debug(
|
||||
"Malformed itunes candidate: {} not found in {}", # NOQA E501
|
||||
"Malformed itunes candidate: {} not found in {}",
|
||||
e,
|
||||
list(c.keys()),
|
||||
)
|
||||
|
|
@ -1101,6 +1101,16 @@ class FileSystem(LocalArtSource):
|
|||
else:
|
||||
remaining.append(fn)
|
||||
|
||||
# Fall back to a configured image.
|
||||
if plugin.fallback:
|
||||
self._log.debug(
|
||||
"using fallback art file {}",
|
||||
util.displayable_path(plugin.fallback),
|
||||
)
|
||||
yield self._candidate(
|
||||
path=plugin.fallback, match=MetadataMatch.FALLBACK
|
||||
)
|
||||
|
||||
# Fall back to any image in the folder.
|
||||
if remaining and not plugin.cautious:
|
||||
self._log.debug(
|
||||
|
|
@ -1118,7 +1128,7 @@ class LastFM(RemoteArtSource):
|
|||
ID = "lastfm"
|
||||
|
||||
# Sizes in priority order.
|
||||
SIZES = OrderedDict(
|
||||
SIZES: ClassVar[dict[str, tuple[int, int]]] = OrderedDict(
|
||||
[
|
||||
("mega", (300, 300)),
|
||||
("extralarge", (300, 300)),
|
||||
|
|
@ -1293,7 +1303,7 @@ class CoverArtUrl(RemoteArtSource):
|
|||
|
||||
|
||||
# All art sources. The order they will be tried in is specified by the config.
|
||||
ART_SOURCES: set[Type[ArtSource]] = {
|
||||
ART_SOURCES: set[type[ArtSource]] = {
|
||||
FileSystem,
|
||||
CoverArtArchive,
|
||||
ITunesStore,
|
||||
|
|
@ -1332,6 +1342,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
"enforce_ratio": False,
|
||||
"cautious": False,
|
||||
"cover_names": ["cover", "front", "art", "album", "folder"],
|
||||
"fallback": None,
|
||||
"sources": [
|
||||
"filesystem",
|
||||
"coverart",
|
||||
|
|
@ -1380,6 +1391,9 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
cover_names = self.config["cover_names"].as_str_seq()
|
||||
self.cover_names = list(map(util.bytestring_path, cover_names))
|
||||
self.cautious = self.config["cautious"].get(bool)
|
||||
self.fallback = self.config["fallback"].get(
|
||||
confuse.Optional(confuse.Filename())
|
||||
)
|
||||
self.store_source = self.config["store_source"].get(bool)
|
||||
|
||||
self.cover_format = self.config["cover_format"].get(
|
||||
|
|
@ -1574,7 +1588,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
message = ui.colorize(
|
||||
"text_highlight_minor", "has album art"
|
||||
)
|
||||
self._log.info("{}: {}", album, message)
|
||||
ui.print_(f"{album}: {message}")
|
||||
else:
|
||||
# In ordinary invocations, look for images on the
|
||||
# filesystem. When forcing, however, always go to the Web
|
||||
|
|
@ -1587,4 +1601,4 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
message = ui.colorize("text_success", "found album art")
|
||||
else:
|
||||
message = ui.colorize("text_error", "no art found")
|
||||
self._log.info("{}: {}", album, message)
|
||||
ui.print_(f"{album}: {message}")
|
||||
|
|
|
|||
|
|
@ -183,16 +183,16 @@ def get_basic_beet_options():
|
|||
BL_NEED2.format("-l format-item", "-f -d 'print with custom format'")
|
||||
+ BL_NEED2.format("-l format-album", "-f -d 'print with custom format'")
|
||||
+ BL_NEED2.format(
|
||||
"-s l -l library", "-f -r -d 'library database file to use'"
|
||||
"-s l -l library", "-F -r -d 'library database file to use'"
|
||||
)
|
||||
+ BL_NEED2.format(
|
||||
"-s d -l directory", "-f -r -d 'destination music directory'"
|
||||
"-s d -l directory", "-F -r -d 'destination music directory'"
|
||||
)
|
||||
+ BL_NEED2.format(
|
||||
"-s v -l verbose", "-f -d 'print debugging information'"
|
||||
)
|
||||
+ BL_NEED2.format(
|
||||
"-s c -l config", "-f -r -d 'path to configuration file'"
|
||||
"-s c -l config", "-F -r -d 'path to configuration file'"
|
||||
)
|
||||
+ BL_NEED2.format(
|
||||
"-s h -l help", "-f -d 'print this help message and exit'"
|
||||
|
|
@ -216,7 +216,7 @@ def get_subcommands(cmd_name_and_help, nobasicfields, extravalues):
|
|||
word += BL_USE3.format(
|
||||
cmdname,
|
||||
f"-a {wrap('$FIELDS')}",
|
||||
f"-f -d {wrap('fieldname')}",
|
||||
f"-d {wrap('fieldname')}",
|
||||
)
|
||||
|
||||
if extravalues:
|
||||
|
|
@ -270,7 +270,7 @@ def get_all_commands(beetcmds):
|
|||
word += " ".join(
|
||||
BL_USE3.format(
|
||||
name,
|
||||
f"{cmd_need_arg}{cmd_s}{cmd_l} -f {cmd_arglist}",
|
||||
f"{cmd_need_arg}{cmd_s}{cmd_l} {cmd_arglist}",
|
||||
cmd_helpstr,
|
||||
).split()
|
||||
)
|
||||
|
|
@ -278,7 +278,7 @@ def get_all_commands(beetcmds):
|
|||
|
||||
word = word + BL_USE3.format(
|
||||
name,
|
||||
"-s h -l help -f",
|
||||
"-s h -l help",
|
||||
f"-d {wrap('print help')}",
|
||||
)
|
||||
return word
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ def apply_matches(d, log):
|
|||
"""Given a mapping from items to field dicts, apply the fields to
|
||||
the objects.
|
||||
"""
|
||||
some_map = list(d.values())[0]
|
||||
some_map = next(iter(d.values()))
|
||||
keys = some_map.keys()
|
||||
|
||||
# Only proceed if the "tag" field is equal across all filenames.
|
||||
|
|
|
|||
|
|
@ -17,13 +17,38 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from functools import cached_property, lru_cache
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from beets import plugins, ui
|
||||
from beets import config, plugins, ui
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
from beets.library import Item
|
||||
from beets.library import Album, Item
|
||||
|
||||
|
||||
DEFAULT_BRACKET_KEYWORDS: tuple[str, ...] = (
|
||||
"abridged",
|
||||
"acapella",
|
||||
"club",
|
||||
"demo",
|
||||
"edit",
|
||||
"edition",
|
||||
"extended",
|
||||
"instrumental",
|
||||
"live",
|
||||
"mix",
|
||||
"radio",
|
||||
"release",
|
||||
"remaster",
|
||||
"remastered",
|
||||
"remix",
|
||||
"rmx",
|
||||
"unabridged",
|
||||
"unreleased",
|
||||
"version",
|
||||
"vip",
|
||||
)
|
||||
|
||||
|
||||
def split_on_feat(
|
||||
|
|
@ -36,11 +61,23 @@ def split_on_feat(
|
|||
artist, which is always a string, and the featuring artist, which
|
||||
may be a string or None if none is present.
|
||||
"""
|
||||
# split on the first "feat".
|
||||
regex = re.compile(
|
||||
plugins.feat_tokens(for_artist, custom_words), re.IGNORECASE
|
||||
# Try explicit featuring tokens first (ft, feat, featuring, etc.)
|
||||
# to avoid splitting on generic separators like "&" when both are present
|
||||
regex_explicit = re.compile(
|
||||
plugins.feat_tokens(for_artist=False, custom_words=custom_words),
|
||||
re.IGNORECASE,
|
||||
)
|
||||
parts = tuple(s.strip() for s in regex.split(artist, 1))
|
||||
parts = tuple(s.strip() for s in regex_explicit.split(artist, 1))
|
||||
if len(parts) == 2:
|
||||
return parts
|
||||
|
||||
# Fall back to all tokens including generic separators if no explicit match
|
||||
if for_artist:
|
||||
regex = re.compile(
|
||||
plugins.feat_tokens(for_artist, custom_words), re.IGNORECASE
|
||||
)
|
||||
parts = tuple(s.strip() for s in regex.split(artist, 1))
|
||||
|
||||
if len(parts) == 1:
|
||||
return parts[0], None
|
||||
else:
|
||||
|
|
@ -98,7 +135,46 @@ def find_feat_part(
|
|||
return feat_part
|
||||
|
||||
|
||||
def _album_artist_no_feat(album: Album) -> str:
|
||||
custom_words = config["ftintitle"]["custom_words"].as_str_seq()
|
||||
return split_on_feat(album["albumartist"], False, list(custom_words))[0]
|
||||
|
||||
|
||||
class FtInTitlePlugin(plugins.BeetsPlugin):
|
||||
@cached_property
|
||||
def bracket_keywords(self) -> list[str]:
|
||||
return self.config["bracket_keywords"].as_str_seq()
|
||||
|
||||
@staticmethod
|
||||
@lru_cache(maxsize=256)
|
||||
def _bracket_position_pattern(keywords: tuple[str, ...]) -> re.Pattern[str]:
|
||||
"""
|
||||
Build a compiled regex to find the first bracketed segment that contains
|
||||
any of the provided keywords.
|
||||
|
||||
Cached by keyword tuple to avoid recompiling on every track/title.
|
||||
"""
|
||||
kw_inner = "|".join(map(re.escape, keywords))
|
||||
|
||||
# If we have keywords, require one of them to appear in the bracket text.
|
||||
# If kw == "", the lookahead becomes true and we match any bracket content.
|
||||
kw = rf"\b(?={kw_inner})\b" if kw_inner else ""
|
||||
return re.compile(
|
||||
rf"""
|
||||
(?: # non-capturing group for the split
|
||||
\s*? # optional whitespace before brackets
|
||||
(?= # any bracket containing a keyword
|
||||
\([^)]*{kw}.*?\)
|
||||
| \[[^]]*{kw}.*?\]
|
||||
| <[^>]*{kw}.*? >
|
||||
| \{{[^}}]*{kw}.*?\}}
|
||||
| $ # or the end of the string
|
||||
)
|
||||
)
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
|
|
@ -110,6 +186,7 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
"keep_in_artist": False,
|
||||
"preserve_album_artist": True,
|
||||
"custom_words": [],
|
||||
"bracket_keywords": list(DEFAULT_BRACKET_KEYWORDS),
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -129,6 +206,10 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
if self.config["auto"]:
|
||||
self.import_stages = [self.imported]
|
||||
|
||||
self.album_template_fields["album_artist_no_feat"] = (
|
||||
_album_artist_no_feat
|
||||
)
|
||||
|
||||
def commands(self) -> list[ui.Subcommand]:
|
||||
def func(lib, opts, args):
|
||||
self.config.set_args(opts)
|
||||
|
|
@ -207,8 +288,10 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
# artist and if we do not drop featuring information.
|
||||
if not drop_feat and not contains_feat(item.title, custom_words):
|
||||
feat_format = self.config["format"].as_str()
|
||||
new_format = feat_format.format(feat_part)
|
||||
new_title = f"{item.title} {new_format}"
|
||||
formatted = feat_format.format(feat_part)
|
||||
new_title = self.insert_ft_into_title(
|
||||
item.title, formatted, self.bracket_keywords
|
||||
)
|
||||
self._log.info("title: {.title} -> {}", item, new_title)
|
||||
item.title = new_title
|
||||
|
||||
|
|
@ -253,3 +336,28 @@ class FtInTitlePlugin(plugins.BeetsPlugin):
|
|||
item, feat_part, drop_feat, keep_in_artist_field, custom_words
|
||||
)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def find_bracket_position(
|
||||
title: str, keywords: list[str] | None = None
|
||||
) -> int | None:
|
||||
normalized = (
|
||||
DEFAULT_BRACKET_KEYWORDS if keywords is None else tuple(keywords)
|
||||
)
|
||||
pattern = FtInTitlePlugin._bracket_position_pattern(normalized)
|
||||
m: re.Match[str] | None = pattern.search(title)
|
||||
return m.start() if m else None
|
||||
|
||||
@classmethod
|
||||
def insert_ft_into_title(
|
||||
cls, title: str, feat_part: str, keywords: list[str] | None = None
|
||||
) -> str:
|
||||
"""Insert featured artist before the first bracket containing
|
||||
remix/edit keywords if present.
|
||||
"""
|
||||
normalized = (
|
||||
DEFAULT_BRACKET_KEYWORDS if keywords is None else tuple(keywords)
|
||||
)
|
||||
pattern = cls._bracket_position_pattern(normalized)
|
||||
parts = pattern.split(title, maxsplit=1)
|
||||
return f" {feat_part} ".join(parts).strip()
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
# This file is part of beets.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Deprecation warning for the removed gmusic plugin."""
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
|
||||
class Gmusic(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self._log.warning(
|
||||
"The 'gmusic' plugin has been removed following the"
|
||||
" shutdown of Google Play Music. Remove the plugin"
|
||||
" from your configuration to silence this warning."
|
||||
)
|
||||
169
beetsplug/importsource.py
Normal file
169
beetsplug/importsource.py
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
"""Adds a `source_path` attribute to imported albums indicating from what path
|
||||
the album was imported from. Also suggests removing that source path in case
|
||||
you've removed the album from the library.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
|
||||
from beets.dbcore.query import PathQuery
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import colorize as colorize_text
|
||||
from beets.ui import input_options
|
||||
|
||||
|
||||
class ImportSourcePlugin(BeetsPlugin):
|
||||
"""Main plugin class."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the plugin and read configuration."""
|
||||
super().__init__()
|
||||
self.config.add(
|
||||
{
|
||||
"suggest_removal": False,
|
||||
}
|
||||
)
|
||||
self.import_stages = [self.import_stage]
|
||||
self.register_listener("item_removed", self.suggest_removal)
|
||||
# In order to stop future removal suggestions for an album we keep
|
||||
# track of `mb_albumid`s in this set.
|
||||
self.stop_suggestions_for_albums = set()
|
||||
# During reimports (import --library) both the import_task_choice and
|
||||
# the item_removed event are triggered. The item_removed event is
|
||||
# triggered first. For the import_task_choice event we prevent removal
|
||||
# suggestions using the existing stop_suggestions_for_album mechanism.
|
||||
self.register_listener(
|
||||
"import_task_choice", self.prevent_suggest_removal
|
||||
)
|
||||
|
||||
def prevent_suggest_removal(self, session, task):
|
||||
if task.skip:
|
||||
return
|
||||
for item in task.imported_items():
|
||||
if "mb_albumid" in item:
|
||||
self.stop_suggestions_for_albums.add(item.mb_albumid)
|
||||
|
||||
def import_stage(self, _, task):
|
||||
"""Event handler for albums import finished."""
|
||||
for item in task.imported_items():
|
||||
# During reimports (import --library), we prevent overwriting the
|
||||
# source_path attribute with the path from the music library
|
||||
if "source_path" in item:
|
||||
self._log.info(
|
||||
"Preserving source_path of reimported item {}", item.id
|
||||
)
|
||||
continue
|
||||
item["source_path"] = item.path
|
||||
item.try_sync(write=False, move=False)
|
||||
|
||||
def suggest_removal(self, item):
|
||||
"""Prompts the user to delete the original path the item was imported from."""
|
||||
if (
|
||||
not self.config["suggest_removal"]
|
||||
or item.mb_albumid in self.stop_suggestions_for_albums
|
||||
):
|
||||
return
|
||||
|
||||
if "source_path" not in item:
|
||||
self._log.warning(
|
||||
"Item without source_path (probably imported before plugin "
|
||||
"usage): {}",
|
||||
item.filepath,
|
||||
)
|
||||
return
|
||||
|
||||
srcpath = Path(os.fsdecode(item.source_path))
|
||||
if not srcpath.is_file():
|
||||
self._log.warning(
|
||||
"Original source file no longer exists or is not accessible: {}",
|
||||
srcpath,
|
||||
)
|
||||
return
|
||||
|
||||
if not (
|
||||
os.access(srcpath, os.W_OK)
|
||||
and os.access(srcpath.parent, os.W_OK | os.X_OK)
|
||||
):
|
||||
self._log.warning(
|
||||
"Original source file cannot be deleted (insufficient permissions): {}",
|
||||
srcpath,
|
||||
)
|
||||
return
|
||||
|
||||
# We ask the user whether they'd like to delete the item's source
|
||||
# directory
|
||||
item_path = colorize_text("text_warning", item.filepath)
|
||||
source_path = colorize_text("text_warning", srcpath)
|
||||
|
||||
print(
|
||||
f"The item:\n{item_path}\nis originated from:\n{source_path}\n"
|
||||
"What would you like to do?"
|
||||
)
|
||||
|
||||
resp = input_options(
|
||||
[
|
||||
"Delete the item's source",
|
||||
"Recursively delete the source's directory",
|
||||
"do Nothing",
|
||||
"do nothing and Stop suggesting to delete items from this album",
|
||||
],
|
||||
require=True,
|
||||
)
|
||||
|
||||
# Handle user response
|
||||
if resp == "d":
|
||||
self._log.info(
|
||||
"Deleting the item's source file: {}",
|
||||
srcpath,
|
||||
)
|
||||
srcpath.unlink()
|
||||
|
||||
elif resp == "r":
|
||||
self._log.info(
|
||||
"Searching for other items with a source_path attr containing: {}",
|
||||
srcpath.parent,
|
||||
)
|
||||
|
||||
source_dir_query = PathQuery(
|
||||
"source_path",
|
||||
srcpath.parent,
|
||||
# The "source_path" attribute may not be present in all
|
||||
# items of the library, so we avoid errors with this:
|
||||
fast=False,
|
||||
)
|
||||
|
||||
print("Doing so will delete the following items' sources as well:")
|
||||
for searched_item in item._db.items(source_dir_query):
|
||||
print(colorize_text("text_warning", searched_item.filepath))
|
||||
|
||||
print("Would you like to continue?")
|
||||
continue_resp = input_options(
|
||||
["Yes", "delete None", "delete just the File"],
|
||||
require=False, # Yes is the a default
|
||||
)
|
||||
|
||||
if continue_resp == "y":
|
||||
self._log.info(
|
||||
"Deleting the item's source directory: {}",
|
||||
srcpath.parent,
|
||||
)
|
||||
rmtree(srcpath.parent)
|
||||
|
||||
elif continue_resp == "n":
|
||||
self._log.info("doing nothing - aborting hook function")
|
||||
return
|
||||
|
||||
elif continue_resp == "f":
|
||||
self._log.info(
|
||||
"removing just the item's original source: {}",
|
||||
srcpath,
|
||||
)
|
||||
srcpath.unlink()
|
||||
|
||||
elif resp == "s":
|
||||
self.stop_suggestions_for_albums.add(item.mb_albumid)
|
||||
|
||||
else:
|
||||
self._log.info("Doing nothing")
|
||||
|
|
@ -61,18 +61,18 @@ class InlinePlugin(BeetsPlugin):
|
|||
config["item_fields"].items(), config["pathfields"].items()
|
||||
):
|
||||
self._log.debug("adding item field {}", key)
|
||||
func = self.compile_inline(view.as_str(), False)
|
||||
func = self.compile_inline(view.as_str(), False, key)
|
||||
if func is not None:
|
||||
self.template_fields[key] = func
|
||||
|
||||
# Album fields.
|
||||
for key, view in config["album_fields"].items():
|
||||
self._log.debug("adding album field {}", key)
|
||||
func = self.compile_inline(view.as_str(), True)
|
||||
func = self.compile_inline(view.as_str(), True, key)
|
||||
if func is not None:
|
||||
self.album_template_fields[key] = func
|
||||
|
||||
def compile_inline(self, python_code, album):
|
||||
def compile_inline(self, python_code, album, field_name):
|
||||
"""Given a Python expression or function body, compile it as a path
|
||||
field function. The returned function takes a single argument, an
|
||||
Item, and returns a Unicode string. If the expression cannot be
|
||||
|
|
@ -97,7 +97,12 @@ class InlinePlugin(BeetsPlugin):
|
|||
is_expr = True
|
||||
|
||||
def _dict_for(obj):
|
||||
out = dict(obj)
|
||||
out = {}
|
||||
for key in obj.keys(computed=False):
|
||||
if key == field_name:
|
||||
continue
|
||||
out[key] = obj._get(key)
|
||||
|
||||
if album:
|
||||
out["items"] = list(obj.items())
|
||||
return out
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
|
||||
try:
|
||||
output = util.command_output(
|
||||
command + [util.syspath(item.path)]
|
||||
[*command, util.syspath(item.path)]
|
||||
).stdout
|
||||
except (subprocess.CalledProcessError, OSError) as exc:
|
||||
self._log.error("execution failed: {}", exc)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ import os
|
|||
import traceback
|
||||
from functools import singledispatchmethod
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Union
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import pylast
|
||||
import yaml
|
||||
|
|
@ -38,6 +38,9 @@ from beets.library import Album, Item
|
|||
from beets.util import plurality, unique_list
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import optparse
|
||||
from collections.abc import Callable
|
||||
|
||||
from beets.library import LibModel
|
||||
|
||||
LASTFM = pylast.LastFMNetwork(api_key=plugins.LASTFM_KEY)
|
||||
|
|
@ -52,7 +55,11 @@ PYLAST_EXCEPTIONS = (
|
|||
# Canonicalization tree processing.
|
||||
|
||||
|
||||
def flatten_tree(elem, path, branches):
|
||||
def flatten_tree(
|
||||
elem: dict[Any, Any] | list[Any] | str,
|
||||
path: list[str],
|
||||
branches: list[list[str]],
|
||||
) -> None:
|
||||
"""Flatten nested lists/dictionaries into lists of strings
|
||||
(branches).
|
||||
"""
|
||||
|
|
@ -61,15 +68,15 @@ def flatten_tree(elem, path, branches):
|
|||
|
||||
if isinstance(elem, dict):
|
||||
for k, v in elem.items():
|
||||
flatten_tree(v, path + [k], branches)
|
||||
flatten_tree(v, [*path, k], branches)
|
||||
elif isinstance(elem, list):
|
||||
for sub in elem:
|
||||
flatten_tree(sub, path, branches)
|
||||
else:
|
||||
branches.append(path + [str(elem)])
|
||||
branches.append([*path, str(elem)])
|
||||
|
||||
|
||||
def find_parents(candidate, branches):
|
||||
def find_parents(candidate: str, branches: list[list[str]]) -> list[str]:
|
||||
"""Find parents genre of a given genre, ordered from the closest to
|
||||
the further parent.
|
||||
"""
|
||||
|
|
@ -89,7 +96,7 @@ C14N_TREE = os.path.join(os.path.dirname(__file__), "genres-tree.yaml")
|
|||
|
||||
|
||||
class LastGenrePlugin(plugins.BeetsPlugin):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.config.add(
|
||||
|
|
@ -106,18 +113,17 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
"separator": ", ",
|
||||
"prefer_specific": False,
|
||||
"title_case": True,
|
||||
"extended_debug": False,
|
||||
"pretend": False,
|
||||
}
|
||||
)
|
||||
self.setup()
|
||||
|
||||
def setup(self):
|
||||
def setup(self) -> None:
|
||||
"""Setup plugin from config options"""
|
||||
if self.config["auto"]:
|
||||
self.import_stages = [self.imported]
|
||||
|
||||
self._genre_cache = {}
|
||||
self._genre_cache: dict[str, list[str]] = {}
|
||||
self.whitelist = self._load_whitelist()
|
||||
self.c14n_branches, self.canonicalize = self._load_c14n_tree()
|
||||
|
||||
|
|
@ -162,6 +168,11 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
flatten_tree(genres_tree, [], c14n_branches)
|
||||
return c14n_branches, canonicalize
|
||||
|
||||
def _tunelog(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
||||
"""Log tuning messages at DEBUG level when verbosity level is high enough."""
|
||||
if config["verbose"].as_number() >= 3:
|
||||
self._log.debug(msg, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def sources(self) -> tuple[str, ...]:
|
||||
"""A tuple of allowed genre sources. May contain 'track',
|
||||
|
|
@ -178,7 +189,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
# More canonicalization and general helpers.
|
||||
|
||||
def _get_depth(self, tag):
|
||||
def _get_depth(self, tag: str) -> int | None:
|
||||
"""Find the depth of a tag in the genres tree."""
|
||||
depth = None
|
||||
for key, value in enumerate(self.c14n_branches):
|
||||
|
|
@ -187,7 +198,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
break
|
||||
return depth
|
||||
|
||||
def _sort_by_depth(self, tags):
|
||||
def _sort_by_depth(self, tags: list[str]) -> list[str]:
|
||||
"""Given a list of tags, sort the tags by their depths in the
|
||||
genre tree.
|
||||
"""
|
||||
|
|
@ -255,9 +266,11 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
valid_tags = [t for t in tags if self._is_valid(t)]
|
||||
return valid_tags[:count]
|
||||
|
||||
def fetch_genre(self, lastfm_obj):
|
||||
"""Return the genre for a pylast entity or None if no suitable genre
|
||||
can be found. Ex. 'Electronic, House, Dance'
|
||||
def fetch_genre(
|
||||
self, lastfm_obj: pylast.Album | pylast.Artist | pylast.Track
|
||||
) -> list[str]:
|
||||
"""Return genres for a pylast entity. Returns an empty list if
|
||||
no suitable genres are found.
|
||||
"""
|
||||
min_weight = self.config["min_weight"].get(int)
|
||||
return self._tags_for(lastfm_obj, min_weight)
|
||||
|
|
@ -274,8 +287,10 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
# Cached last.fm entity lookups.
|
||||
|
||||
def _last_lookup(self, entity, method, *args):
|
||||
"""Get a genre based on the named entity using the callable `method`
|
||||
def _last_lookup(
|
||||
self, entity: str, method: Callable[..., Any], *args: str
|
||||
) -> list[str]:
|
||||
"""Get genres based on the named entity using the callable `method`
|
||||
whose arguments are given in the sequence `args`. The genre lookup
|
||||
is cached based on the entity name and the arguments.
|
||||
|
||||
|
|
@ -289,32 +304,27 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
|
||||
key = f"{entity}.{'-'.join(str(a) for a in args)}"
|
||||
if key not in self._genre_cache:
|
||||
args = [a.replace("\u2010", "-") for a in args]
|
||||
self._genre_cache[key] = self.fetch_genre(method(*args))
|
||||
args_replaced = [a.replace("\u2010", "-") for a in args]
|
||||
self._genre_cache[key] = self.fetch_genre(method(*args_replaced))
|
||||
|
||||
genre = self._genre_cache[key]
|
||||
if self.config["extended_debug"]:
|
||||
self._log.debug("last.fm (unfiltered) {} tags: {}", entity, genre)
|
||||
self._tunelog("last.fm (unfiltered) {} tags: {}", entity, genre)
|
||||
return genre
|
||||
|
||||
def fetch_album_genre(self, obj):
|
||||
"""Return raw album genres from Last.fm for this Item or Album."""
|
||||
def fetch_album_genre(self, albumartist: str, albumtitle: str) -> list[str]:
|
||||
"""Return genres from Last.fm for the album by albumartist."""
|
||||
return self._last_lookup(
|
||||
"album", LASTFM.get_album, obj.albumartist, obj.album
|
||||
"album", LASTFM.get_album, albumartist, albumtitle
|
||||
)
|
||||
|
||||
def fetch_album_artist_genre(self, obj):
|
||||
"""Return raw album artist genres from Last.fm for this Item or Album."""
|
||||
return self._last_lookup("artist", LASTFM.get_artist, obj.albumartist)
|
||||
def fetch_artist_genre(self, artist: str) -> list[str]:
|
||||
"""Return genres from Last.fm for the artist."""
|
||||
return self._last_lookup("artist", LASTFM.get_artist, artist)
|
||||
|
||||
def fetch_artist_genre(self, item):
|
||||
"""Returns raw track artist genres from Last.fm for this Item."""
|
||||
return self._last_lookup("artist", LASTFM.get_artist, item.artist)
|
||||
|
||||
def fetch_track_genre(self, obj):
|
||||
"""Returns raw track genres from Last.fm for this Item."""
|
||||
def fetch_track_genre(self, trackartist: str, tracktitle: str) -> list[str]:
|
||||
"""Return genres from Last.fm for the track by artist."""
|
||||
return self._last_lookup(
|
||||
"track", LASTFM.get_track, obj.artist, obj.title
|
||||
"track", LASTFM.get_track, trackartist, tracktitle
|
||||
)
|
||||
|
||||
# Main processing: _get_genre() and helpers.
|
||||
|
|
@ -349,7 +359,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
combined = old + new
|
||||
return self._resolve_genres(combined)
|
||||
|
||||
def _get_genre(self, obj: LibModel) -> tuple[Union[str, None], ...]:
|
||||
def _get_genre(self, obj: LibModel) -> tuple[str | None, ...]:
|
||||
"""Get the final genre string for an Album or Item object.
|
||||
|
||||
`self.sources` specifies allowed genre sources. Starting with the first
|
||||
|
|
@ -369,7 +379,9 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
and the whitelist feature was disabled.
|
||||
"""
|
||||
|
||||
def _try_resolve_stage(stage_label: str, keep_genres, new_genres):
|
||||
def _try_resolve_stage(
|
||||
stage_label: str, keep_genres: list[str], new_genres: list[str]
|
||||
) -> tuple[str, str] | None:
|
||||
"""Try to resolve genres for a given stage and log the result."""
|
||||
resolved_genres = self._combine_resolve_and_log(
|
||||
keep_genres, new_genres
|
||||
|
|
@ -402,14 +414,14 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
# Run through stages: track, album, artist,
|
||||
# album artist, or most popular track genre.
|
||||
if isinstance(obj, library.Item) and "track" in self.sources:
|
||||
if new_genres := self.fetch_track_genre(obj):
|
||||
if new_genres := self.fetch_track_genre(obj.artist, obj.title):
|
||||
if result := _try_resolve_stage(
|
||||
"track", keep_genres, new_genres
|
||||
):
|
||||
return result
|
||||
|
||||
if "album" in self.sources:
|
||||
if new_genres := self.fetch_album_genre(obj):
|
||||
if new_genres := self.fetch_album_genre(obj.albumartist, obj.album):
|
||||
if result := _try_resolve_stage(
|
||||
"album", keep_genres, new_genres
|
||||
):
|
||||
|
|
@ -418,20 +430,36 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
if "artist" in self.sources:
|
||||
new_genres = []
|
||||
if isinstance(obj, library.Item):
|
||||
new_genres = self.fetch_artist_genre(obj)
|
||||
new_genres = self.fetch_artist_genre(obj.artist)
|
||||
stage_label = "artist"
|
||||
elif obj.albumartist != config["va_name"].as_str():
|
||||
new_genres = self.fetch_album_artist_genre(obj)
|
||||
new_genres = self.fetch_artist_genre(obj.albumartist)
|
||||
stage_label = "album artist"
|
||||
if not new_genres:
|
||||
self._tunelog(
|
||||
'No album artist genre found for "{}", '
|
||||
"trying multi-valued field...",
|
||||
obj.albumartist,
|
||||
)
|
||||
for albumartist in obj.albumartists:
|
||||
self._tunelog(
|
||||
'Fetching artist genre for "{}"', albumartist
|
||||
)
|
||||
new_genres += self.fetch_artist_genre(albumartist)
|
||||
if new_genres:
|
||||
stage_label = "multi-valued album artist"
|
||||
else:
|
||||
# For "Various Artists", pick the most popular track genre.
|
||||
item_genres = []
|
||||
assert isinstance(obj, Album) # Type narrowing for mypy
|
||||
for item in obj.items():
|
||||
item_genre = None
|
||||
if "track" in self.sources:
|
||||
item_genre = self.fetch_track_genre(item)
|
||||
item_genre = self.fetch_track_genre(
|
||||
item.artist, item.title
|
||||
)
|
||||
if not item_genre:
|
||||
item_genre = self.fetch_artist_genre(item)
|
||||
item_genre = self.fetch_artist_genre(item.artist)
|
||||
if item_genre:
|
||||
item_genres += item_genre
|
||||
if item_genres:
|
||||
|
|
@ -454,6 +482,13 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
if obj.genre and self.config["keep_existing"]:
|
||||
if not self.whitelist or self._is_valid(obj.genre.lower()):
|
||||
return obj.genre, "original fallback"
|
||||
else:
|
||||
# If the original genre doesn't match a whitelisted genre, check
|
||||
# if we can canonicalize it to find a matching, whitelisted genre!
|
||||
if result := _try_resolve_stage(
|
||||
"original fallback", keep_genres, []
|
||||
):
|
||||
return result
|
||||
|
||||
# Return fallback string.
|
||||
if fallback := self.config["fallback"].get():
|
||||
|
|
@ -497,7 +532,7 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
write=write, move=False, inherit="track" not in self.sources
|
||||
)
|
||||
|
||||
def commands(self):
|
||||
def commands(self) -> list[ui.Subcommand]:
|
||||
lastgenre_cmd = ui.Subcommand("lastgenre", help="fetch genres")
|
||||
lastgenre_cmd.parser.add_option(
|
||||
"-p",
|
||||
|
|
@ -554,16 +589,11 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
dest="album",
|
||||
help="match albums instead of items (default)",
|
||||
)
|
||||
lastgenre_cmd.parser.add_option(
|
||||
"-d",
|
||||
"--debug",
|
||||
action="store_true",
|
||||
dest="extended_debug",
|
||||
help="extended last.fm debug logging",
|
||||
)
|
||||
lastgenre_cmd.parser.set_defaults(album=True)
|
||||
|
||||
def lastgenre_func(lib, opts, args):
|
||||
def lastgenre_func(
|
||||
lib: library.Library, opts: optparse.Values, args: list[str]
|
||||
) -> None:
|
||||
self.config.set_args(opts)
|
||||
|
||||
method = lib.albums if opts.album else lib.items
|
||||
|
|
@ -573,10 +603,16 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
lastgenre_cmd.func = lastgenre_func
|
||||
return [lastgenre_cmd]
|
||||
|
||||
def imported(self, session, task):
|
||||
def imported(
|
||||
self, session: library.Session, task: library.ImportTask
|
||||
) -> None:
|
||||
self._process(task.album if task.is_album else task.item, write=False)
|
||||
|
||||
def _tags_for(self, obj, min_weight=None):
|
||||
def _tags_for(
|
||||
self,
|
||||
obj: pylast.Album | pylast.Artist | pylast.Track,
|
||||
min_weight: int | None = None,
|
||||
) -> list[str]:
|
||||
"""Core genre identification routine.
|
||||
|
||||
Given a pylast entity (album or track), return a list of
|
||||
|
|
@ -588,11 +624,12 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
# Work around an inconsistency in pylast where
|
||||
# Album.get_top_tags() does not return TopItem instances.
|
||||
# https://github.com/pylast/pylast/issues/86
|
||||
obj_to_query: Any = obj
|
||||
if isinstance(obj, pylast.Album):
|
||||
obj = super(pylast.Album, obj)
|
||||
obj_to_query = super(pylast.Album, obj)
|
||||
|
||||
try:
|
||||
res = obj.get_top_tags()
|
||||
res: Any = obj_to_query.get_top_tags()
|
||||
except PYLAST_EXCEPTIONS as exc:
|
||||
self._log.debug("last.fm error: {}", exc)
|
||||
return []
|
||||
|
|
@ -607,6 +644,6 @@ class LastGenrePlugin(plugins.BeetsPlugin):
|
|||
res = [el for el in res if (int(el.weight or 0)) >= min_weight]
|
||||
|
||||
# Get strings from tags.
|
||||
res = [el.item.get_name().lower() for el in res]
|
||||
tags: list[str] = [el.item.get_name().lower() for el in res]
|
||||
|
||||
return res
|
||||
return tags
|
||||
|
|
|
|||
|
|
@ -2,15 +2,16 @@
|
|||
|
||||
import datetime
|
||||
|
||||
import musicbrainzngs
|
||||
import requests
|
||||
|
||||
from beets import config, ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beetsplug.lastimport import process_tracks
|
||||
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
|
||||
class ListenBrainzPlugin(BeetsPlugin):
|
||||
|
||||
class ListenBrainzPlugin(MusicBrainzAPIMixin, BeetsPlugin):
|
||||
"""A Beets plugin for interacting with ListenBrainz."""
|
||||
|
||||
ROOT = "http://api.listenbrainz.org/1/"
|
||||
|
|
@ -129,17 +130,16 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
)
|
||||
return tracks
|
||||
|
||||
def get_mb_recording_id(self, track):
|
||||
def get_mb_recording_id(self, track) -> str | None:
|
||||
"""Returns the MusicBrainz recording ID for a track."""
|
||||
resp = musicbrainzngs.search_recordings(
|
||||
query=track["track_metadata"].get("track_name"),
|
||||
release=track["track_metadata"].get("release_name"),
|
||||
strict=True,
|
||||
results = self.mb_api.search(
|
||||
"recording",
|
||||
{
|
||||
"": track["track_metadata"].get("track_name"),
|
||||
"release": track["track_metadata"].get("release_name"),
|
||||
},
|
||||
)
|
||||
if resp.get("recording-count") == "1":
|
||||
return resp.get("recording-list")[0].get("id")
|
||||
else:
|
||||
return None
|
||||
return next((r["id"] for r in results), None)
|
||||
|
||||
def get_playlists_createdfor(self, username):
|
||||
"""Returns a list of playlists created by a user."""
|
||||
|
|
@ -207,17 +207,16 @@ class ListenBrainzPlugin(BeetsPlugin):
|
|||
track_info = []
|
||||
for track in tracks:
|
||||
identifier = track.get("identifier")
|
||||
resp = musicbrainzngs.get_recording_by_id(
|
||||
recording = self.mb_api.get_recording(
|
||||
identifier, includes=["releases", "artist-credits"]
|
||||
)
|
||||
recording = resp.get("recording")
|
||||
title = recording.get("title")
|
||||
artist_credit = recording.get("artist-credit", [])
|
||||
if artist_credit:
|
||||
artist = artist_credit[0].get("artist", {}).get("name")
|
||||
else:
|
||||
artist = None
|
||||
releases = recording.get("release-list", [])
|
||||
releases = recording.get("releases", [])
|
||||
if releases:
|
||||
album = releases[0].get("title")
|
||||
date = releases[0].get("date")
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import atexit
|
||||
import itertools
|
||||
import math
|
||||
import re
|
||||
|
|
@ -25,10 +24,9 @@ from contextlib import contextmanager, suppress
|
|||
from dataclasses import dataclass
|
||||
from functools import cached_property, partial, total_ordering
|
||||
from html import unescape
|
||||
from http import HTTPStatus
|
||||
from itertools import groupby
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Iterable, Iterator, NamedTuple
|
||||
from typing import TYPE_CHECKING, ClassVar, NamedTuple
|
||||
from urllib.parse import quote, quote_plus, urlencode, urlparse
|
||||
|
||||
import langdetect
|
||||
|
|
@ -36,12 +34,17 @@ import requests
|
|||
from bs4 import BeautifulSoup
|
||||
from unidecode import unidecode
|
||||
|
||||
import beets
|
||||
from beets import plugins, ui
|
||||
from beets.autotag.distance import string_dist
|
||||
from beets.util.config import sanitize_choices
|
||||
|
||||
from ._utils.requests import HTTPNotFoundError, RequestHandler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Iterator
|
||||
|
||||
import confuse
|
||||
|
||||
from beets.importer import ImportTask
|
||||
from beets.library import Item, Library
|
||||
from beets.logging import BeetsLogger as Logger
|
||||
|
|
@ -54,41 +57,12 @@ if TYPE_CHECKING:
|
|||
TranslatorAPI,
|
||||
)
|
||||
|
||||
USER_AGENT = f"beets/{beets.__version__}"
|
||||
INSTRUMENTAL_LYRICS = "[Instrumental]"
|
||||
|
||||
|
||||
class NotFoundError(requests.exceptions.HTTPError):
|
||||
pass
|
||||
|
||||
|
||||
class CaptchaError(requests.exceptions.HTTPError):
|
||||
pass
|
||||
|
||||
|
||||
class TimeoutSession(requests.Session):
|
||||
def request(self, *args, **kwargs):
|
||||
"""Wrap the request method to raise an exception on HTTP errors."""
|
||||
kwargs.setdefault("timeout", 10)
|
||||
r = super().request(*args, **kwargs)
|
||||
if r.status_code == HTTPStatus.NOT_FOUND:
|
||||
raise NotFoundError("HTTP Error: Not Found", response=r)
|
||||
if 300 <= r.status_code < 400:
|
||||
raise CaptchaError("Captcha is required", response=r)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
return r
|
||||
|
||||
|
||||
r_session = TimeoutSession()
|
||||
r_session.headers.update({"User-Agent": USER_AGENT})
|
||||
|
||||
|
||||
@atexit.register
|
||||
def close_session():
|
||||
"""Close the requests session on shut down."""
|
||||
r_session.close()
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__("Captcha is required", *args, **kwargs)
|
||||
|
||||
|
||||
# Utilities.
|
||||
|
|
@ -184,9 +158,18 @@ def slug(text: str) -> str:
|
|||
return re.sub(r"\W+", "-", unidecode(text).lower().strip()).strip("-")
|
||||
|
||||
|
||||
class RequestHandler:
|
||||
class LyricsRequestHandler(RequestHandler):
|
||||
_log: Logger
|
||||
|
||||
def status_to_error(self, code: int) -> type[requests.HTTPError] | None:
|
||||
if err := super().status_to_error(code):
|
||||
return err
|
||||
|
||||
if 300 <= code < 400:
|
||||
return CaptchaError
|
||||
|
||||
return None
|
||||
|
||||
def debug(self, message: str, *args) -> None:
|
||||
"""Log a debug message with the class name."""
|
||||
self._log.debug(f"{self.__class__.__name__}: {message}", *args)
|
||||
|
|
@ -206,7 +189,7 @@ class RequestHandler:
|
|||
|
||||
return f"{url}?{urlencode(params)}"
|
||||
|
||||
def fetch_text(
|
||||
def get_text(
|
||||
self, url: str, params: JSONDict | None = None, **kwargs
|
||||
) -> str:
|
||||
"""Return text / HTML data from the given URL.
|
||||
|
|
@ -216,21 +199,21 @@ class RequestHandler:
|
|||
"""
|
||||
url = self.format_url(url, params)
|
||||
self.debug("Fetching HTML from {}", url)
|
||||
r = r_session.get(url, **kwargs)
|
||||
r = self.get(url, **kwargs)
|
||||
r.encoding = None
|
||||
return r.text
|
||||
|
||||
def fetch_json(self, url: str, params: JSONDict | None = None, **kwargs):
|
||||
def get_json(self, url: str, params: JSONDict | None = None, **kwargs):
|
||||
"""Return JSON data from the given URL."""
|
||||
url = self.format_url(url, params)
|
||||
self.debug("Fetching JSON from {}", url)
|
||||
return r_session.get(url, **kwargs).json()
|
||||
return super().get_json(url, **kwargs)
|
||||
|
||||
def post_json(self, url: str, params: JSONDict | None = None, **kwargs):
|
||||
"""Send POST request and return JSON response."""
|
||||
url = self.format_url(url, params)
|
||||
self.debug("Posting JSON to {}", url)
|
||||
return r_session.post(url, **kwargs).json()
|
||||
return self.request("post", url, **kwargs).json()
|
||||
|
||||
@contextmanager
|
||||
def handle_request(self) -> Iterator[None]:
|
||||
|
|
@ -249,8 +232,10 @@ class BackendClass(type):
|
|||
return cls.__name__.lower()
|
||||
|
||||
|
||||
class Backend(RequestHandler, metaclass=BackendClass):
|
||||
def __init__(self, config, log):
|
||||
class Backend(LyricsRequestHandler, metaclass=BackendClass):
|
||||
config: confuse.Subview
|
||||
|
||||
def __init__(self, config: confuse.Subview, log: Logger) -> None:
|
||||
self._log = log
|
||||
self.config = config
|
||||
|
||||
|
|
@ -354,10 +339,10 @@ class LRCLib(Backend):
|
|||
if album:
|
||||
get_params["album_name"] = album
|
||||
|
||||
yield self.fetch_json(self.SEARCH_URL, params=base_params)
|
||||
yield self.get_json(self.SEARCH_URL, params=base_params)
|
||||
|
||||
with suppress(NotFoundError):
|
||||
yield [self.fetch_json(self.GET_URL, params=get_params)]
|
||||
with suppress(HTTPNotFoundError):
|
||||
yield [self.get_json(self.GET_URL, params=get_params)]
|
||||
|
||||
@classmethod
|
||||
def pick_best_match(cls, lyrics: Iterable[LRCLyrics]) -> LRCLyrics | None:
|
||||
|
|
@ -382,7 +367,7 @@ class LRCLib(Backend):
|
|||
class MusiXmatch(Backend):
|
||||
URL_TEMPLATE = "https://www.musixmatch.com/lyrics/{}/{}"
|
||||
|
||||
REPLACEMENTS = {
|
||||
REPLACEMENTS: ClassVar[dict[str, str]] = {
|
||||
r"\s+": "-",
|
||||
"<": "Less_Than",
|
||||
">": "Greater_Than",
|
||||
|
|
@ -405,7 +390,7 @@ class MusiXmatch(Backend):
|
|||
def fetch(self, artist: str, title: str, *_) -> tuple[str, str] | None:
|
||||
url = self.build_url(artist, title)
|
||||
|
||||
html = self.fetch_text(url)
|
||||
html = self.get_text(url)
|
||||
if "We detected that your IP is blocked" in html:
|
||||
self.warn("Failed: Blocked IP address")
|
||||
return None
|
||||
|
|
@ -530,7 +515,7 @@ class SearchBackend(SoupMixin, Backend):
|
|||
def fetch(self, artist: str, title: str, *_) -> tuple[str, str] | None:
|
||||
"""Fetch lyrics for the given artist and title."""
|
||||
for result in self.get_results(artist, title):
|
||||
if (html := self.fetch_text(result.url)) and (
|
||||
if (html := self.get_text(result.url)) and (
|
||||
lyrics := self.scrape(html)
|
||||
):
|
||||
return lyrics, result.url
|
||||
|
|
@ -560,7 +545,7 @@ class Genius(SearchBackend):
|
|||
return {"Authorization": f"Bearer {self.config['genius_api_key']}"}
|
||||
|
||||
def search(self, artist: str, title: str) -> Iterable[SearchResult]:
|
||||
search_data: GeniusAPI.Search = self.fetch_json(
|
||||
search_data: GeniusAPI.Search = self.get_json(
|
||||
self.SEARCH_URL,
|
||||
params={"q": f"{artist} {title}"},
|
||||
headers=self.headers,
|
||||
|
|
@ -589,7 +574,7 @@ class Tekstowo(SearchBackend):
|
|||
return self.SEARCH_URL.format(quote_plus(unidecode(artistitle)))
|
||||
|
||||
def search(self, artist: str, title: str) -> Iterable[SearchResult]:
|
||||
if html := self.fetch_text(self.build_url(title, artist)):
|
||||
if html := self.get_text(self.build_url(title, artist)):
|
||||
soup = self.get_soup(html)
|
||||
for tag in soup.select("div[class=flex-group] > a[title*=' - ']"):
|
||||
artist, title = str(tag["title"]).split(" - ", 1)
|
||||
|
|
@ -615,7 +600,7 @@ class Google(SearchBackend):
|
|||
SEARCH_URL = "https://www.googleapis.com/customsearch/v1"
|
||||
|
||||
#: Exclude some letras.mus.br pages which do not contain lyrics.
|
||||
EXCLUDE_PAGES = [
|
||||
EXCLUDE_PAGES: ClassVar[list[str]] = [
|
||||
"significado.html",
|
||||
"traduccion.html",
|
||||
"traducao.html",
|
||||
|
|
@ -645,9 +630,12 @@ class Google(SearchBackend):
|
|||
#: Split cleaned up URL title into artist and title parts.
|
||||
URL_TITLE_PARTS_RE = re.compile(r" +(?:[ :|-]+|par|by) +|, ")
|
||||
|
||||
SOURCE_DIST_FACTOR = {"www.azlyrics.com": 0.5, "www.songlyrics.com": 0.6}
|
||||
SOURCE_DIST_FACTOR: ClassVar[dict[str, float]] = {
|
||||
"www.azlyrics.com": 0.5,
|
||||
"www.songlyrics.com": 0.6,
|
||||
}
|
||||
|
||||
ignored_domains: set[str] = set()
|
||||
ignored_domains: ClassVar[set[str]] = set()
|
||||
|
||||
@classmethod
|
||||
def pre_process_html(cls, html: str) -> str:
|
||||
|
|
@ -655,12 +643,12 @@ class Google(SearchBackend):
|
|||
html = Html.remove_ads(super().pre_process_html(html))
|
||||
return Html.remove_formatting(Html.merge_paragraphs(html))
|
||||
|
||||
def fetch_text(self, *args, **kwargs) -> str:
|
||||
def get_text(self, *args, **kwargs) -> str:
|
||||
"""Handle an error so that we can continue with the next URL."""
|
||||
kwargs.setdefault("allow_redirects", False)
|
||||
with self.handle_request():
|
||||
try:
|
||||
return super().fetch_text(*args, **kwargs)
|
||||
return super().get_text(*args, **kwargs)
|
||||
except CaptchaError:
|
||||
self.ignored_domains.add(urlparse(args[0]).netloc)
|
||||
raise
|
||||
|
|
@ -716,7 +704,7 @@ class Google(SearchBackend):
|
|||
"excludeTerms": ", ".join(self.EXCLUDE_PAGES),
|
||||
}
|
||||
|
||||
data: GoogleCustomSearchAPI.Response = self.fetch_json(
|
||||
data: GoogleCustomSearchAPI.Response = self.get_json(
|
||||
self.SEARCH_URL, params=params
|
||||
)
|
||||
for item in data.get("items", []):
|
||||
|
|
@ -741,11 +729,13 @@ class Google(SearchBackend):
|
|||
|
||||
|
||||
@dataclass
|
||||
class Translator(RequestHandler):
|
||||
class Translator(LyricsRequestHandler):
|
||||
TRANSLATE_URL = "https://api.cognitive.microsofttranslator.com/translate"
|
||||
LINE_PARTS_RE = re.compile(r"^(\[\d\d:\d\d.\d\d\]|) *(.*)$")
|
||||
SEPARATOR = " | "
|
||||
remove_translations = partial(re.compile(r" / [^\n]+").sub, "")
|
||||
remove_translations = staticmethod(
|
||||
partial(re.compile(r" / [^\n]+").sub, "")
|
||||
)
|
||||
|
||||
_log: Logger
|
||||
api_key: str
|
||||
|
|
@ -949,14 +939,14 @@ class RestFiles:
|
|||
ui.print_(textwrap.dedent(text))
|
||||
|
||||
|
||||
class LyricsPlugin(RequestHandler, plugins.BeetsPlugin):
|
||||
BACKEND_BY_NAME = {
|
||||
class LyricsPlugin(LyricsRequestHandler, plugins.BeetsPlugin):
|
||||
BACKEND_BY_NAME: ClassVar[dict[str, type[Backend]]] = {
|
||||
b.name: b for b in [LRCLib, Google, Genius, Tekstowo, MusiXmatch]
|
||||
}
|
||||
|
||||
@cached_property
|
||||
def backends(self) -> list[Backend]:
|
||||
user_sources = self.config["sources"].get()
|
||||
user_sources = self.config["sources"].as_str_seq()
|
||||
|
||||
chosen = sanitize_choices(user_sources, self.BACKEND_BY_NAME)
|
||||
if "google" in chosen and not self.config["google_API_key"].get():
|
||||
|
|
|
|||
|
|
@ -13,48 +13,151 @@
|
|||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import musicbrainzngs
|
||||
from requests.auth import HTTPDigestAuth
|
||||
|
||||
from beets import config, ui
|
||||
from beets import __version__, config, ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand
|
||||
|
||||
SUBMISSION_CHUNK_SIZE = 200
|
||||
FETCH_CHUNK_SIZE = 100
|
||||
UUID_REGEX = r"^[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}$"
|
||||
from ._utils.musicbrainz import MusicBrainzAPI
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Iterator
|
||||
|
||||
from requests import Response
|
||||
|
||||
from beets.importer import ImportSession, ImportTask
|
||||
from beets.library import Album, Library
|
||||
|
||||
from ._typing import JSONDict
|
||||
|
||||
UUID_PAT = re.compile(r"^[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}$")
|
||||
|
||||
|
||||
def mb_call(func, *args, **kwargs):
|
||||
"""Call a MusicBrainz API function and catch exceptions."""
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except musicbrainzngs.AuthenticationError:
|
||||
raise ui.UserError("authentication with MusicBrainz failed")
|
||||
except (musicbrainzngs.ResponseError, musicbrainzngs.NetworkError) as exc:
|
||||
raise ui.UserError(f"MusicBrainz API error: {exc}")
|
||||
except musicbrainzngs.UsageError:
|
||||
raise ui.UserError("MusicBrainz credentials missing")
|
||||
@dataclass
|
||||
class MusicBrainzUserAPI(MusicBrainzAPI):
|
||||
"""MusicBrainz API client with user authentication.
|
||||
|
||||
In order to retrieve private user collections and modify them, we need to
|
||||
authenticate the requests with the user's MusicBrainz credentials.
|
||||
|
||||
def submit_albums(collection_id, release_ids):
|
||||
"""Add all of the release IDs to the indicated collection. Multiple
|
||||
requests are made if there are many release IDs to submit.
|
||||
See documentation for authentication details:
|
||||
https://musicbrainz.org/doc/MusicBrainz_API#Authentication
|
||||
|
||||
Note that the documentation misleadingly states HTTP 'basic' authentication,
|
||||
and I had to reverse-engineer musicbrainzngs to discover that it actually
|
||||
uses HTTP 'digest' authentication.
|
||||
"""
|
||||
for i in range(0, len(release_ids), SUBMISSION_CHUNK_SIZE):
|
||||
chunk = release_ids[i : i + SUBMISSION_CHUNK_SIZE]
|
||||
mb_call(musicbrainzngs.add_releases_to_collection, collection_id, chunk)
|
||||
|
||||
auth: HTTPDigestAuth = field(init=False)
|
||||
|
||||
class MusicBrainzCollectionPlugin(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
def __post_init__(self) -> None:
|
||||
super().__post_init__()
|
||||
config["musicbrainz"]["pass"].redact = True
|
||||
musicbrainzngs.auth(
|
||||
self.auth = HTTPDigestAuth(
|
||||
config["musicbrainz"]["user"].as_str(),
|
||||
config["musicbrainz"]["pass"].as_str(),
|
||||
)
|
||||
|
||||
def request(self, *args, **kwargs) -> Response:
|
||||
"""Authenticate and include required client param in all requests."""
|
||||
kwargs.setdefault("params", {})
|
||||
kwargs["params"]["client"] = f"beets-{__version__}"
|
||||
kwargs["auth"] = self.auth
|
||||
return super().request(*args, **kwargs)
|
||||
|
||||
def browse_collections(self) -> list[JSONDict]:
|
||||
"""Get all collections for the authenticated user."""
|
||||
return self._browse("collection")
|
||||
|
||||
|
||||
@dataclass
|
||||
class MBCollection:
|
||||
"""Representation of a user's MusicBrainz collection.
|
||||
|
||||
Provides convenient, chunked operations for retrieving releases and updating
|
||||
the collection via the MusicBrainz web API. Fetch and submission limits are
|
||||
controlled by class-level constants to avoid oversized requests.
|
||||
"""
|
||||
|
||||
SUBMISSION_CHUNK_SIZE: ClassVar[int] = 200
|
||||
FETCH_CHUNK_SIZE: ClassVar[int] = 100
|
||||
|
||||
data: JSONDict
|
||||
mb_api: MusicBrainzUserAPI
|
||||
|
||||
@property
|
||||
def id(self) -> str:
|
||||
"""Unique identifier assigned to the collection by MusicBrainz."""
|
||||
return self.data["id"]
|
||||
|
||||
@property
|
||||
def release_count(self) -> int:
|
||||
"""Total number of releases recorded in the collection."""
|
||||
return self.data["release-count"]
|
||||
|
||||
@property
|
||||
def releases_url(self) -> str:
|
||||
"""Complete API endpoint URL for listing releases in this collection."""
|
||||
return f"{self.mb_api.api_root}/collection/{self.id}/releases"
|
||||
|
||||
@property
|
||||
def releases(self) -> list[JSONDict]:
|
||||
"""Retrieve all releases in the collection, fetched in successive pages.
|
||||
|
||||
The fetch is performed in chunks and returns a flattened sequence of
|
||||
release records.
|
||||
"""
|
||||
offsets = list(range(0, self.release_count, self.FETCH_CHUNK_SIZE))
|
||||
return [r for offset in offsets for r in self.get_releases(offset)]
|
||||
|
||||
def get_releases(self, offset: int) -> list[JSONDict]:
|
||||
"""Fetch a single page of releases beginning at a given position."""
|
||||
return self.mb_api.get_json(
|
||||
self.releases_url,
|
||||
params={"limit": self.FETCH_CHUNK_SIZE, "offset": offset},
|
||||
)["releases"]
|
||||
|
||||
@classmethod
|
||||
def get_id_chunks(cls, id_list: list[str]) -> Iterator[list[str]]:
|
||||
"""Yield successive sublists of identifiers sized for safe submission.
|
||||
|
||||
Splits a long sequence of identifiers into batches that respect the
|
||||
service's submission limits to avoid oversized requests.
|
||||
"""
|
||||
for i in range(0, len(id_list), cls.SUBMISSION_CHUNK_SIZE):
|
||||
yield id_list[i : i + cls.SUBMISSION_CHUNK_SIZE]
|
||||
|
||||
def add_releases(self, releases: list[str]) -> None:
|
||||
"""Add releases to the collection in batches."""
|
||||
for chunk in self.get_id_chunks(releases):
|
||||
# Need to escape semicolons: https://github.com/psf/requests/issues/6990
|
||||
self.mb_api.put(f"{self.releases_url}/{'%3B'.join(chunk)}")
|
||||
|
||||
def remove_releases(self, releases: list[str]) -> None:
|
||||
"""Remove releases from the collection in chunks."""
|
||||
for chunk in self.get_id_chunks(releases):
|
||||
# Need to escape semicolons: https://github.com/psf/requests/issues/6990
|
||||
self.mb_api.delete(f"{self.releases_url}/{'%3B'.join(chunk)}")
|
||||
|
||||
|
||||
def submit_albums(collection: MBCollection, release_ids):
|
||||
"""Add all of the release IDs to the indicated collection. Multiple
|
||||
requests are made if there are many release IDs to submit.
|
||||
"""
|
||||
collection.add_releases(release_ids)
|
||||
|
||||
|
||||
class MusicBrainzCollectionPlugin(BeetsPlugin):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.config.add(
|
||||
{
|
||||
"auto": False,
|
||||
|
|
@ -65,47 +168,32 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
if self.config["auto"]:
|
||||
self.import_stages = [self.imported]
|
||||
|
||||
def _get_collection(self):
|
||||
collections = mb_call(musicbrainzngs.get_collections)
|
||||
if not collections["collection-list"]:
|
||||
@cached_property
|
||||
def mb_api(self) -> MusicBrainzUserAPI:
|
||||
return MusicBrainzUserAPI()
|
||||
|
||||
@cached_property
|
||||
def collection(self) -> MBCollection:
|
||||
if not (collections := self.mb_api.browse_collections()):
|
||||
raise ui.UserError("no collections exist for user")
|
||||
|
||||
# Get all release collection IDs, avoiding event collections
|
||||
collection_ids = [
|
||||
x["id"]
|
||||
for x in collections["collection-list"]
|
||||
if x["entity-type"] == "release"
|
||||
]
|
||||
if not collection_ids:
|
||||
if not (
|
||||
collection_by_id := {
|
||||
c["id"]: c for c in collections if c["entity-type"] == "release"
|
||||
}
|
||||
):
|
||||
raise ui.UserError("No release collection found.")
|
||||
|
||||
# Check that the collection exists so we can present a nice error
|
||||
collection = self.config["collection"].as_str()
|
||||
if collection:
|
||||
if collection not in collection_ids:
|
||||
raise ui.UserError(f"invalid collection ID: {collection}")
|
||||
return collection
|
||||
if collection_id := self.config["collection"].as_str():
|
||||
if not (collection := collection_by_id.get(collection_id)):
|
||||
raise ui.UserError(f"invalid collection ID: {collection_id}")
|
||||
else:
|
||||
# No specified collection. Just return the first collection ID
|
||||
collection = next(iter(collection_by_id.values()))
|
||||
|
||||
# No specified collection. Just return the first collection ID
|
||||
return collection_ids[0]
|
||||
|
||||
def _get_albums_in_collection(self, id):
|
||||
def _fetch(offset):
|
||||
res = mb_call(
|
||||
musicbrainzngs.get_releases_in_collection,
|
||||
id,
|
||||
limit=FETCH_CHUNK_SIZE,
|
||||
offset=offset,
|
||||
)["collection"]
|
||||
return [x["id"] for x in res["release-list"]], res["release-count"]
|
||||
|
||||
offset = 0
|
||||
albums_in_collection, release_count = _fetch(offset)
|
||||
for i in range(0, release_count, FETCH_CHUNK_SIZE):
|
||||
albums_in_collection += _fetch(offset)[0]
|
||||
offset += FETCH_CHUNK_SIZE
|
||||
|
||||
return albums_in_collection
|
||||
return MBCollection(collection, self.mb_api)
|
||||
|
||||
def commands(self):
|
||||
mbupdate = Subcommand("mbupdate", help="Update MusicBrainz collection")
|
||||
|
|
@ -120,45 +208,33 @@ class MusicBrainzCollectionPlugin(BeetsPlugin):
|
|||
mbupdate.func = self.update_collection
|
||||
return [mbupdate]
|
||||
|
||||
def remove_missing(self, collection_id, lib_albums):
|
||||
lib_ids = {x.mb_albumid for x in lib_albums}
|
||||
albums_in_collection = self._get_albums_in_collection(collection_id)
|
||||
remove_me = list(set(albums_in_collection) - lib_ids)
|
||||
for i in range(0, len(remove_me), FETCH_CHUNK_SIZE):
|
||||
chunk = remove_me[i : i + FETCH_CHUNK_SIZE]
|
||||
mb_call(
|
||||
musicbrainzngs.remove_releases_from_collection,
|
||||
collection_id,
|
||||
chunk,
|
||||
)
|
||||
|
||||
def update_collection(self, lib, opts, args):
|
||||
def update_collection(self, lib: Library, opts, args) -> None:
|
||||
self.config.set_args(opts)
|
||||
remove_missing = self.config["remove"].get(bool)
|
||||
self.update_album_list(lib, lib.albums(), remove_missing)
|
||||
|
||||
def imported(self, session, task):
|
||||
def imported(self, session: ImportSession, task: ImportTask) -> None:
|
||||
"""Add each imported album to the collection."""
|
||||
if task.is_album:
|
||||
self.update_album_list(session.lib, [task.album])
|
||||
self.update_album_list(
|
||||
session.lib, [task.album], remove_missing=False
|
||||
)
|
||||
|
||||
def update_album_list(self, lib, album_list, remove_missing=False):
|
||||
def update_album_list(
|
||||
self, lib: Library, albums: Iterable[Album], remove_missing: bool
|
||||
) -> None:
|
||||
"""Update the MusicBrainz collection from a list of Beets albums"""
|
||||
collection_id = self._get_collection()
|
||||
collection = self.collection
|
||||
|
||||
# Get a list of all the album IDs.
|
||||
album_ids = []
|
||||
for album in album_list:
|
||||
aid = album.mb_albumid
|
||||
if aid:
|
||||
if re.match(UUID_REGEX, aid):
|
||||
album_ids.append(aid)
|
||||
else:
|
||||
self._log.info("skipping invalid MBID: {}", aid)
|
||||
album_ids = [id_ for a in albums if UUID_PAT.match(id_ := a.mb_albumid)]
|
||||
|
||||
# Submit to MusicBrainz.
|
||||
self._log.info("Updating MusicBrainz collection {}...", collection_id)
|
||||
submit_albums(collection_id, album_ids)
|
||||
self._log.info("Updating MusicBrainz collection {}...", collection.id)
|
||||
collection.add_releases(album_ids)
|
||||
if remove_missing:
|
||||
self.remove_missing(collection_id, lib.albums())
|
||||
lib_ids = {x.mb_albumid for x in lib.albums()}
|
||||
albums_in_collection = {r["id"] for r in collection.releases}
|
||||
collection.remove_releases(list(albums_in_collection - lib_ids))
|
||||
|
||||
self._log.info("...MusicBrainz collection updated.")
|
||||
|
|
|
|||
350
beetsplug/mbpseudo.py
Normal file
350
beetsplug/mbpseudo.py
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
# This file is part of beets.
|
||||
# Copyright 2025, Alexis Sarda-Espinosa.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Adds pseudo-releases from MusicBrainz as candidates during import."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import itertools
|
||||
from copy import deepcopy
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import mediafile
|
||||
from typing_extensions import override
|
||||
|
||||
from beets import config
|
||||
from beets.autotag.distance import distance
|
||||
from beets.autotag.hooks import AlbumInfo
|
||||
from beets.autotag.match import assign_items
|
||||
from beets.plugins import find_plugins
|
||||
from beets.util.id_extractors import extract_release_id
|
||||
from beetsplug.musicbrainz import (
|
||||
MusicBrainzPlugin,
|
||||
_merge_pseudo_and_actual_album,
|
||||
_preferred_alias,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
from beets.autotag import AlbumMatch
|
||||
from beets.autotag.distance import Distance
|
||||
from beets.library import Item
|
||||
from beetsplug._typing import JSONDict
|
||||
|
||||
_STATUS_PSEUDO = "Pseudo-Release"
|
||||
|
||||
|
||||
class MusicBrainzPseudoReleasePlugin(MusicBrainzPlugin):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.config.add(
|
||||
{
|
||||
"scripts": [],
|
||||
"custom_tags_only": False,
|
||||
"album_custom_tags": {
|
||||
"album_transl": "album",
|
||||
"album_artist_transl": "artist",
|
||||
},
|
||||
"track_custom_tags": {
|
||||
"title_transl": "title",
|
||||
"artist_transl": "artist",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
self._scripts = self.config["scripts"].as_str_seq()
|
||||
self._log.debug("Desired scripts: {0}", self._scripts)
|
||||
|
||||
album_custom_tags = self.config["album_custom_tags"].get().keys()
|
||||
track_custom_tags = self.config["track_custom_tags"].get().keys()
|
||||
self._log.debug(
|
||||
"Custom tags for albums and tracks: {0} + {1}",
|
||||
album_custom_tags,
|
||||
track_custom_tags,
|
||||
)
|
||||
for custom_tag in album_custom_tags | track_custom_tags:
|
||||
if not isinstance(custom_tag, str):
|
||||
continue
|
||||
|
||||
media_field = mediafile.MediaField(
|
||||
mediafile.MP3DescStorageStyle(custom_tag),
|
||||
mediafile.MP4StorageStyle(
|
||||
f"----:com.apple.iTunes:{custom_tag}"
|
||||
),
|
||||
mediafile.StorageStyle(custom_tag),
|
||||
mediafile.ASFStorageStyle(custom_tag),
|
||||
)
|
||||
try:
|
||||
self.add_media_field(custom_tag, media_field)
|
||||
except ValueError:
|
||||
# ignore errors due to duplicates
|
||||
pass
|
||||
|
||||
self.register_listener("pluginload", self._on_plugins_loaded)
|
||||
self.register_listener("album_matched", self._adjust_final_album_match)
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def _on_plugins_loaded(self):
|
||||
for plugin in find_plugins():
|
||||
if isinstance(plugin, MusicBrainzPlugin) and not isinstance(
|
||||
plugin, MusicBrainzPseudoReleasePlugin
|
||||
):
|
||||
raise RuntimeError(
|
||||
"The musicbrainz plugin should not be enabled together with"
|
||||
" the mbpseudo plugin"
|
||||
)
|
||||
|
||||
@override
|
||||
def candidates(
|
||||
self,
|
||||
items: Sequence[Item],
|
||||
artist: str,
|
||||
album: str,
|
||||
va_likely: bool,
|
||||
) -> Iterable[AlbumInfo]:
|
||||
if len(self._scripts) == 0:
|
||||
yield from super().candidates(items, artist, album, va_likely)
|
||||
else:
|
||||
for album_info in super().candidates(
|
||||
items, artist, album, va_likely
|
||||
):
|
||||
if isinstance(album_info, PseudoAlbumInfo):
|
||||
self._log.debug(
|
||||
"Using {0} release for distance calculations for album {1}",
|
||||
album_info.determine_best_ref(items),
|
||||
album_info.album_id,
|
||||
)
|
||||
yield album_info # first yield pseudo to give it priority
|
||||
yield album_info.get_official_release()
|
||||
else:
|
||||
yield album_info
|
||||
|
||||
@override
|
||||
def album_info(self, release: JSONDict) -> AlbumInfo:
|
||||
official_release = super().album_info(release)
|
||||
|
||||
if release.get("status") == _STATUS_PSEUDO:
|
||||
return official_release
|
||||
|
||||
if (ids := self._intercept_mb_release(release)) and (
|
||||
album_id := self._extract_id(ids[0])
|
||||
):
|
||||
raw_pseudo_release = self.mb_api.get_release(album_id)
|
||||
pseudo_release = super().album_info(raw_pseudo_release)
|
||||
|
||||
if self.config["custom_tags_only"].get(bool):
|
||||
self._replace_artist_with_alias(
|
||||
raw_pseudo_release, pseudo_release
|
||||
)
|
||||
self._add_custom_tags(official_release, pseudo_release)
|
||||
return official_release
|
||||
else:
|
||||
return PseudoAlbumInfo(
|
||||
pseudo_release=_merge_pseudo_and_actual_album(
|
||||
pseudo_release, official_release
|
||||
),
|
||||
official_release=official_release,
|
||||
)
|
||||
else:
|
||||
return official_release
|
||||
|
||||
def _intercept_mb_release(self, data: JSONDict) -> list[str]:
|
||||
album_id = data["id"] if "id" in data else None
|
||||
if self._has_desired_script(data) or not isinstance(album_id, str):
|
||||
return []
|
||||
|
||||
return [
|
||||
pr_id
|
||||
for rel in data.get("release-relations", [])
|
||||
if (pr_id := self._wanted_pseudo_release_id(album_id, rel))
|
||||
is not None
|
||||
]
|
||||
|
||||
def _has_desired_script(self, release: JSONDict) -> bool:
|
||||
if len(self._scripts) == 0:
|
||||
return False
|
||||
elif script := release.get("text-representation", {}).get("script"):
|
||||
return script in self._scripts
|
||||
else:
|
||||
return False
|
||||
|
||||
def _wanted_pseudo_release_id(
|
||||
self,
|
||||
album_id: str,
|
||||
relation: JSONDict,
|
||||
) -> str | None:
|
||||
if (
|
||||
len(self._scripts) == 0
|
||||
or relation.get("type", "") != "transl-tracklisting"
|
||||
or relation.get("direction", "") != "forward"
|
||||
or "release" not in relation
|
||||
):
|
||||
return None
|
||||
|
||||
release = relation["release"]
|
||||
if "id" in release and self._has_desired_script(release):
|
||||
self._log.debug(
|
||||
"Adding pseudo-release {0} for main release {1}",
|
||||
release["id"],
|
||||
album_id,
|
||||
)
|
||||
return release["id"]
|
||||
else:
|
||||
return None
|
||||
|
||||
def _replace_artist_with_alias(
|
||||
self,
|
||||
raw_pseudo_release: JSONDict,
|
||||
pseudo_release: AlbumInfo,
|
||||
):
|
||||
"""Use the pseudo-release's language to search for artist
|
||||
alias if the user hasn't configured import languages."""
|
||||
|
||||
if len(config["import"]["languages"].as_str_seq()) > 0:
|
||||
return
|
||||
|
||||
lang = raw_pseudo_release.get("text-representation", {}).get("language")
|
||||
artist_credits = raw_pseudo_release.get("release-group", {}).get(
|
||||
"artist-credit", []
|
||||
)
|
||||
aliases = [
|
||||
artist_credit.get("artist", {}).get("aliases", [])
|
||||
for artist_credit in artist_credits
|
||||
]
|
||||
|
||||
if lang and len(lang) >= 2 and len(aliases) > 0:
|
||||
locale = lang[0:2]
|
||||
aliases_flattened = list(itertools.chain.from_iterable(aliases))
|
||||
self._log.debug(
|
||||
"Using locale '{0}' to search aliases {1}",
|
||||
locale,
|
||||
aliases_flattened,
|
||||
)
|
||||
if alias_dict := _preferred_alias(aliases_flattened, [locale]):
|
||||
if alias := alias_dict.get("name"):
|
||||
self._log.debug("Got alias '{0}'", alias)
|
||||
pseudo_release.artist = alias
|
||||
for track in pseudo_release.tracks:
|
||||
track.artist = alias
|
||||
|
||||
def _add_custom_tags(
|
||||
self,
|
||||
official_release: AlbumInfo,
|
||||
pseudo_release: AlbumInfo,
|
||||
):
|
||||
for tag_key, pseudo_key in (
|
||||
self.config["album_custom_tags"].get().items()
|
||||
):
|
||||
official_release[tag_key] = pseudo_release[pseudo_key]
|
||||
|
||||
track_custom_tags = self.config["track_custom_tags"].get().items()
|
||||
for track, pseudo_track in zip(
|
||||
official_release.tracks, pseudo_release.tracks
|
||||
):
|
||||
for tag_key, pseudo_key in track_custom_tags:
|
||||
track[tag_key] = pseudo_track[pseudo_key]
|
||||
|
||||
def _adjust_final_album_match(self, match: AlbumMatch):
|
||||
album_info = match.info
|
||||
if isinstance(album_info, PseudoAlbumInfo):
|
||||
self._log.debug(
|
||||
"Switching {0} to pseudo-release source for final proposal",
|
||||
album_info.album_id,
|
||||
)
|
||||
album_info.use_pseudo_as_ref()
|
||||
new_pairs, *_ = assign_items(match.items, album_info.tracks)
|
||||
album_info.mapping = dict(new_pairs)
|
||||
|
||||
if album_info.data_source == self.data_source:
|
||||
album_info.data_source = "MusicBrainz"
|
||||
|
||||
@override
|
||||
def _extract_id(self, url: str) -> str | None:
|
||||
return extract_release_id("MusicBrainz", url)
|
||||
|
||||
|
||||
class PseudoAlbumInfo(AlbumInfo):
|
||||
"""This is a not-so-ugly hack.
|
||||
|
||||
We want the pseudo-release to result in a distance that is lower or equal to that of
|
||||
the official release, otherwise it won't qualify as a good candidate. However, if
|
||||
the input is in a script that's different from the pseudo-release (and we want to
|
||||
translate/transliterate it in the library), it will receive unwanted penalties.
|
||||
|
||||
This class is essentially a view of the ``AlbumInfo`` of both official and
|
||||
pseudo-releases, where it's possible to change the details that are exposed to other
|
||||
parts of the auto-tagger, enabling a "fair" distance calculation based on the
|
||||
current input's script but still preferring the translation/transliteration in the
|
||||
final proposal.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
pseudo_release: AlbumInfo,
|
||||
official_release: AlbumInfo,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(pseudo_release.tracks, **kwargs)
|
||||
self.__dict__["_pseudo_source"] = True
|
||||
self.__dict__["_official_release"] = official_release
|
||||
for k, v in pseudo_release.items():
|
||||
if k not in kwargs:
|
||||
self[k] = v
|
||||
|
||||
def get_official_release(self) -> AlbumInfo:
|
||||
return self.__dict__["_official_release"]
|
||||
|
||||
def determine_best_ref(self, items: Sequence[Item]) -> str:
|
||||
self.use_pseudo_as_ref()
|
||||
pseudo_dist = self._compute_distance(items)
|
||||
|
||||
self.use_official_as_ref()
|
||||
official_dist = self._compute_distance(items)
|
||||
|
||||
if official_dist < pseudo_dist:
|
||||
self.use_official_as_ref()
|
||||
return "official"
|
||||
else:
|
||||
self.use_pseudo_as_ref()
|
||||
return "pseudo"
|
||||
|
||||
def _compute_distance(self, items: Sequence[Item]) -> Distance:
|
||||
mapping, _, _ = assign_items(items, self.tracks)
|
||||
return distance(items, self, mapping)
|
||||
|
||||
def use_pseudo_as_ref(self):
|
||||
self.__dict__["_pseudo_source"] = True
|
||||
|
||||
def use_official_as_ref(self):
|
||||
self.__dict__["_pseudo_source"] = False
|
||||
|
||||
def __getattr__(self, attr: str) -> Any:
|
||||
# ensure we don't duplicate an official release's id, always return pseudo's
|
||||
if self.__dict__["_pseudo_source"] or attr == "album_id":
|
||||
return super().__getattr__(attr)
|
||||
else:
|
||||
return self.__dict__["_official_release"].__getattr__(attr)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
cls = self.__class__
|
||||
result = cls.__new__(cls)
|
||||
|
||||
memo[id(self)] = result
|
||||
result.__dict__.update(self.__dict__)
|
||||
for k, v in self.items():
|
||||
result[k] = deepcopy(v, memo)
|
||||
|
||||
return result
|
||||
|
|
@ -26,8 +26,7 @@ import subprocess
|
|||
from beets import ui
|
||||
from beets.autotag import Recommendation
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui.commands import PromptChoice
|
||||
from beets.util import displayable_path
|
||||
from beets.util import PromptChoice, displayable_path
|
||||
from beetsplug.info import print_data
|
||||
|
||||
|
||||
|
|
@ -70,7 +69,7 @@ class MBSubmitPlugin(BeetsPlugin):
|
|||
paths.append(displayable_path(p))
|
||||
try:
|
||||
picard_path = self.config["picard_path"].as_str()
|
||||
subprocess.Popen([picard_path] + paths)
|
||||
subprocess.Popen([picard_path, *paths])
|
||||
self._log.info("launched picard from\n{}", picard_path)
|
||||
except OSError as exc:
|
||||
self._log.error("Could not open picard, got error:\n{}", exc)
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class MBSyncPlugin(BeetsPlugin):
|
|||
"""Retrieve and apply info from the autotagger for items matched by
|
||||
query.
|
||||
"""
|
||||
for item in lib.items(query + ["singleton:true"]):
|
||||
for item in lib.items([*query, "singleton:true"]):
|
||||
if not item.mb_trackid:
|
||||
self._log.info(
|
||||
"Skipping singleton with no mb_trackid: {}", item
|
||||
|
|
@ -121,18 +121,20 @@ class MBSyncPlugin(BeetsPlugin):
|
|||
# Construct a track mapping according to MBIDs (release track MBIDs
|
||||
# first, if available, and recording MBIDs otherwise). This should
|
||||
# work for albums that have missing or extra tracks.
|
||||
mapping = {}
|
||||
item_info_pairs = []
|
||||
items = list(album.items())
|
||||
for item in items:
|
||||
if (
|
||||
item.mb_releasetrackid
|
||||
and item.mb_releasetrackid in releasetrack_index
|
||||
):
|
||||
mapping[item] = releasetrack_index[item.mb_releasetrackid]
|
||||
item_info_pairs.append(
|
||||
(item, releasetrack_index[item.mb_releasetrackid])
|
||||
)
|
||||
else:
|
||||
candidates = track_index[item.mb_trackid]
|
||||
if len(candidates) == 1:
|
||||
mapping[item] = candidates[0]
|
||||
item_info_pairs.append((item, candidates[0]))
|
||||
else:
|
||||
# If there are multiple copies of a recording, they are
|
||||
# disambiguated using their disc and track number.
|
||||
|
|
@ -141,13 +143,13 @@ class MBSyncPlugin(BeetsPlugin):
|
|||
c.medium_index == item.track
|
||||
and c.medium == item.disc
|
||||
):
|
||||
mapping[item] = c
|
||||
item_info_pairs.append((item, c))
|
||||
break
|
||||
|
||||
# Apply.
|
||||
self._log.debug("applying changes to {}", album)
|
||||
with lib.transaction():
|
||||
autotag.apply_metadata(album_info, mapping)
|
||||
autotag.apply_metadata(album_info, item_info_pairs)
|
||||
changed = False
|
||||
# Find any changed item to apply changes to album.
|
||||
any_changed_item = items[0]
|
||||
|
|
|
|||
|
|
@ -14,14 +14,20 @@
|
|||
|
||||
"""Synchronize information from music player libraries"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from importlib import import_module
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from confuse import ConfigValueError
|
||||
|
||||
from beets import ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from beets.dbcore import types
|
||||
|
||||
METASYNC_MODULE = "beetsplug.metasync"
|
||||
|
||||
# Dictionary to map the MODULE and the CLASS NAME of meta sources
|
||||
|
|
@ -32,8 +38,9 @@ SOURCES = {
|
|||
|
||||
|
||||
class MetaSource(metaclass=ABCMeta):
|
||||
item_types: ClassVar[dict[str, types.Type]]
|
||||
|
||||
def __init__(self, config, log):
|
||||
self.item_types = {}
|
||||
self.config = config
|
||||
self._log = log
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@
|
|||
from datetime import datetime
|
||||
from os.path import basename
|
||||
from time import mktime
|
||||
from typing import ClassVar
|
||||
from xml.sax.saxutils import quoteattr
|
||||
|
||||
from beets.dbcore import types
|
||||
|
|
@ -35,7 +36,7 @@ dbus = import_dbus()
|
|||
|
||||
|
||||
class Amarok(MetaSource):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"amarok_rating": types.INTEGER,
|
||||
"amarok_score": types.FLOAT,
|
||||
"amarok_uid": types.STRING,
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import shutil
|
|||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from time import mktime
|
||||
from typing import ClassVar
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
from confuse import ConfigValueError
|
||||
|
|
@ -58,7 +59,7 @@ def _norm_itunes_path(path):
|
|||
|
||||
|
||||
class Itunes(MetaSource):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"itunes_rating": types.INTEGER, # 0..100 scale
|
||||
"itunes_playcount": types.INTEGER,
|
||||
"itunes_skipcount": types.INTEGER,
|
||||
|
|
|
|||
|
|
@ -15,18 +15,26 @@
|
|||
|
||||
"""List missing tracks."""
|
||||
|
||||
from collections import defaultdict
|
||||
from collections.abc import Iterator
|
||||
from __future__ import annotations
|
||||
|
||||
import musicbrainzngs
|
||||
from musicbrainzngs.musicbrainz import MusicBrainzError
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import requests
|
||||
|
||||
from beets import config, metadata_plugins
|
||||
from beets.dbcore import types
|
||||
from beets.library import Album, Item, Library
|
||||
from beets.library import Item
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand, print_
|
||||
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
|
||||
from beets.library import Album, Library
|
||||
|
||||
MB_ARTIST_QUERY = r"mb_albumartistid::^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$"
|
||||
|
||||
|
||||
|
|
@ -85,10 +93,10 @@ def _item(track_info, album_info, album_id):
|
|||
)
|
||||
|
||||
|
||||
class MissingPlugin(BeetsPlugin):
|
||||
class MissingPlugin(MusicBrainzAPIMixin, BeetsPlugin):
|
||||
"""List missing tracks"""
|
||||
|
||||
album_types = {
|
||||
album_types: ClassVar[dict[str, types.Type]] = {
|
||||
"missing": types.INTEGER,
|
||||
}
|
||||
|
||||
|
|
@ -189,19 +197,19 @@ class MissingPlugin(BeetsPlugin):
|
|||
calculating_total = self.config["total"].get()
|
||||
for (artist, artist_id), album_ids in album_ids_by_artist.items():
|
||||
try:
|
||||
resp = musicbrainzngs.browse_release_groups(artist=artist_id)
|
||||
except MusicBrainzError as err:
|
||||
resp = self.mb_api.browse_release_groups(artist=artist_id)
|
||||
except requests.exceptions.RequestException:
|
||||
self._log.info(
|
||||
"Couldn't fetch info for artist '{}' ({}) - '{}'",
|
||||
"Couldn't fetch info for artist '{}' ({})",
|
||||
artist,
|
||||
artist_id,
|
||||
err,
|
||||
exc_info=True,
|
||||
)
|
||||
continue
|
||||
|
||||
missing_titles = [
|
||||
f"{artist} - {rg['title']}"
|
||||
for rg in resp["release-group-list"]
|
||||
for rg in resp
|
||||
if rg["id"] not in album_ids
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
import os
|
||||
import time
|
||||
from typing import ClassVar
|
||||
|
||||
import mpd
|
||||
|
||||
|
|
@ -318,7 +319,7 @@ class MPDStats:
|
|||
|
||||
|
||||
class MPDStatsPlugin(plugins.BeetsPlugin):
|
||||
item_types = {
|
||||
item_types: ClassVar[dict[str, types.Type]] = {
|
||||
"play_count": types.INTEGER,
|
||||
"skip_count": types.INTEGER,
|
||||
"last_played": types.DATE,
|
||||
|
|
|
|||
|
|
@ -16,24 +16,27 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import traceback
|
||||
from collections import Counter
|
||||
from contextlib import suppress
|
||||
from functools import cached_property
|
||||
from itertools import product
|
||||
from typing import TYPE_CHECKING, Any, Iterable, Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import musicbrainzngs
|
||||
from confuse.exceptions import NotFoundError
|
||||
|
||||
import beets
|
||||
import beets.autotag.hooks
|
||||
from beets import config, plugins, util
|
||||
from beets.metadata_plugins import MetadataSourcePlugin
|
||||
from beets.util.deprecation import deprecate_for_user
|
||||
from beets.util.id_extractors import extract_release_id
|
||||
|
||||
from ._utils.musicbrainz import MusicBrainzAPIMixin
|
||||
from ._utils.requests import HTTPNotFoundError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Sequence
|
||||
from typing import Literal
|
||||
|
||||
from beets.library import Item
|
||||
|
|
@ -55,55 +58,33 @@ FIELDS_TO_MB_KEYS = {
|
|||
"year": "date",
|
||||
}
|
||||
|
||||
musicbrainzngs.set_useragent("beets", beets.__version__, "https://beets.io/")
|
||||
|
||||
RELEASE_INCLUDES = [
|
||||
"artists",
|
||||
"media",
|
||||
"recordings",
|
||||
"release-groups",
|
||||
"labels",
|
||||
"artist-credits",
|
||||
"aliases",
|
||||
"recording-level-rels",
|
||||
"work-rels",
|
||||
"work-level-rels",
|
||||
"artist-rels",
|
||||
"isrcs",
|
||||
"url-rels",
|
||||
"release-rels",
|
||||
"genres",
|
||||
"tags",
|
||||
]
|
||||
|
||||
class MusicBrainzAPIError(util.HumanReadableError):
|
||||
"""An error while talking to MusicBrainz. The `query` field is the
|
||||
parameter to the action and may have any type.
|
||||
"""
|
||||
|
||||
def __init__(self, reason, verb, query, tb=None):
|
||||
self.query = query
|
||||
if isinstance(reason, musicbrainzngs.WebServiceError):
|
||||
reason = "MusicBrainz not reachable"
|
||||
super().__init__(reason, verb, tb)
|
||||
|
||||
def get_message(self):
|
||||
return f"{self._reasonstr()} in {self.verb} with query {self.query!r}"
|
||||
|
||||
|
||||
RELEASE_INCLUDES = list(
|
||||
{
|
||||
"artists",
|
||||
"media",
|
||||
"recordings",
|
||||
"release-groups",
|
||||
"labels",
|
||||
"artist-credits",
|
||||
"aliases",
|
||||
"recording-level-rels",
|
||||
"work-rels",
|
||||
"work-level-rels",
|
||||
"artist-rels",
|
||||
"isrcs",
|
||||
"url-rels",
|
||||
"release-rels",
|
||||
"tags",
|
||||
}
|
||||
& set(musicbrainzngs.VALID_INCLUDES["release"])
|
||||
)
|
||||
|
||||
TRACK_INCLUDES = list(
|
||||
{
|
||||
"artists",
|
||||
"aliases",
|
||||
"isrcs",
|
||||
"work-level-rels",
|
||||
"artist-rels",
|
||||
}
|
||||
& set(musicbrainzngs.VALID_INCLUDES["recording"])
|
||||
)
|
||||
TRACK_INCLUDES = [
|
||||
"artists",
|
||||
"aliases",
|
||||
"isrcs",
|
||||
"work-level-rels",
|
||||
"artist-rels",
|
||||
]
|
||||
|
||||
BROWSE_INCLUDES = [
|
||||
"artist-credits",
|
||||
|
|
@ -112,19 +93,18 @@ BROWSE_INCLUDES = [
|
|||
"recording-rels",
|
||||
"release-rels",
|
||||
]
|
||||
if "work-level-rels" in musicbrainzngs.VALID_BROWSE_INCLUDES["recording"]:
|
||||
BROWSE_INCLUDES.append("work-level-rels")
|
||||
BROWSE_CHUNKSIZE = 100
|
||||
BROWSE_MAXTRACKS = 500
|
||||
|
||||
|
||||
def _preferred_alias(aliases: list[JSONDict]):
|
||||
"""Given an list of alias structures for an artist credit, select
|
||||
and return the user's preferred alias alias or None if no matching
|
||||
alias is found.
|
||||
def _preferred_alias(
|
||||
aliases: list[JSONDict], languages: list[str] | None = None
|
||||
) -> JSONDict | None:
|
||||
"""Given a list of alias structures for an artist credit, select
|
||||
and return the user's preferred alias or None if no matching
|
||||
"""
|
||||
if not aliases:
|
||||
return
|
||||
return None
|
||||
|
||||
# Only consider aliases that have locales set.
|
||||
valid_aliases = [a for a in aliases if "locale" in a]
|
||||
|
|
@ -134,15 +114,18 @@ def _preferred_alias(aliases: list[JSONDict]):
|
|||
ignored_alias_types = [a.lower() for a in ignored_alias_types]
|
||||
|
||||
# Search configured locales in order.
|
||||
for locale in config["import"]["languages"].as_str_seq():
|
||||
if languages is None:
|
||||
languages = config["import"]["languages"].as_str_seq()
|
||||
|
||||
for locale in languages:
|
||||
# Find matching primary aliases for this locale that are not
|
||||
# being ignored
|
||||
matches = []
|
||||
for alias in valid_aliases:
|
||||
if (
|
||||
alias["locale"] == locale
|
||||
and "primary" in alias
|
||||
and alias.get("type", "").lower() not in ignored_alias_types
|
||||
and alias.get("primary")
|
||||
and (alias.get("type") or "").lower() not in ignored_alias_types
|
||||
):
|
||||
matches.append(alias)
|
||||
|
||||
|
|
@ -152,6 +135,8 @@ def _preferred_alias(aliases: list[JSONDict]):
|
|||
|
||||
return matches[0]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _multi_artist_credit(
|
||||
credit: list[JSONDict], include_join_phrase: bool
|
||||
|
|
@ -164,36 +149,33 @@ def _multi_artist_credit(
|
|||
artist_sort_parts = []
|
||||
artist_credit_parts = []
|
||||
for el in credit:
|
||||
if isinstance(el, str):
|
||||
# Join phrase.
|
||||
if include_join_phrase:
|
||||
artist_parts.append(el)
|
||||
artist_credit_parts.append(el)
|
||||
artist_sort_parts.append(el)
|
||||
alias = _preferred_alias(el["artist"].get("aliases", ()))
|
||||
|
||||
# An artist.
|
||||
if alias:
|
||||
cur_artist_name = alias["name"]
|
||||
else:
|
||||
alias = _preferred_alias(el["artist"].get("alias-list", ()))
|
||||
cur_artist_name = el["artist"]["name"]
|
||||
artist_parts.append(cur_artist_name)
|
||||
|
||||
# An artist.
|
||||
if alias:
|
||||
cur_artist_name = alias["alias"]
|
||||
else:
|
||||
cur_artist_name = el["artist"]["name"]
|
||||
artist_parts.append(cur_artist_name)
|
||||
# Artist sort name.
|
||||
if alias:
|
||||
artist_sort_parts.append(alias["sort-name"])
|
||||
elif "sort-name" in el["artist"]:
|
||||
artist_sort_parts.append(el["artist"]["sort-name"])
|
||||
else:
|
||||
artist_sort_parts.append(cur_artist_name)
|
||||
|
||||
# Artist sort name.
|
||||
if alias:
|
||||
artist_sort_parts.append(alias["sort-name"])
|
||||
elif "sort-name" in el["artist"]:
|
||||
artist_sort_parts.append(el["artist"]["sort-name"])
|
||||
else:
|
||||
artist_sort_parts.append(cur_artist_name)
|
||||
# Artist credit.
|
||||
if "name" in el:
|
||||
artist_credit_parts.append(el["name"])
|
||||
else:
|
||||
artist_credit_parts.append(cur_artist_name)
|
||||
|
||||
# Artist credit.
|
||||
if "name" in el:
|
||||
artist_credit_parts.append(el["name"])
|
||||
else:
|
||||
artist_credit_parts.append(cur_artist_name)
|
||||
if include_join_phrase and (joinphrase := el.get("joinphrase")):
|
||||
artist_parts.append(joinphrase)
|
||||
artist_sort_parts.append(joinphrase)
|
||||
artist_credit_parts.append(joinphrase)
|
||||
|
||||
return (
|
||||
artist_parts,
|
||||
|
|
@ -263,10 +245,11 @@ def _preferred_release_event(
|
|||
].as_str_seq()
|
||||
|
||||
for country in preferred_countries:
|
||||
for event in release.get("release-event-list", {}):
|
||||
for event in release.get("release-events", {}):
|
||||
try:
|
||||
if country in event["area"]["iso-3166-1-code-list"]:
|
||||
return country, event["date"]
|
||||
if area := event.get("area"):
|
||||
if country in area["iso-3166-1-codes"]:
|
||||
return country, event["date"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
|
@ -297,33 +280,9 @@ def _set_date_str(
|
|||
setattr(info, key, date_num)
|
||||
|
||||
|
||||
def _is_translation(r):
|
||||
_trans_key = "transl-tracklisting"
|
||||
return r["type"] == _trans_key and r["direction"] == "backward"
|
||||
|
||||
|
||||
def _find_actual_release_from_pseudo_release(
|
||||
pseudo_rel: JSONDict,
|
||||
) -> JSONDict | None:
|
||||
try:
|
||||
relations = pseudo_rel["release"]["release-relation-list"]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
# currently we only support trans(liter)ation's
|
||||
translations = [r for r in relations if _is_translation(r)]
|
||||
|
||||
if not translations:
|
||||
return None
|
||||
|
||||
actual_id = translations[0]["target"]
|
||||
|
||||
return musicbrainzngs.get_release_by_id(actual_id, RELEASE_INCLUDES)
|
||||
|
||||
|
||||
def _merge_pseudo_and_actual_album(
|
||||
pseudo: beets.autotag.hooks.AlbumInfo, actual: beets.autotag.hooks.AlbumInfo
|
||||
) -> beets.autotag.hooks.AlbumInfo | None:
|
||||
) -> beets.autotag.hooks.AlbumInfo:
|
||||
"""
|
||||
Merges a pseudo release with its actual release.
|
||||
|
||||
|
|
@ -361,7 +320,11 @@ def _merge_pseudo_and_actual_album(
|
|||
return merged
|
||||
|
||||
|
||||
class MusicBrainzPlugin(MetadataSourcePlugin):
|
||||
class MusicBrainzPlugin(MusicBrainzAPIMixin, MetadataSourcePlugin):
|
||||
@cached_property
|
||||
def genres_field(self) -> str:
|
||||
return f"{self.config['genres_tag'].as_choice(['genre', 'tag'])}s"
|
||||
|
||||
def __init__(self):
|
||||
"""Set up the python-musicbrainz-ngs module according to settings
|
||||
from the beets configuration. This should be called at startup.
|
||||
|
|
@ -369,11 +332,8 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
super().__init__()
|
||||
self.config.add(
|
||||
{
|
||||
"host": "musicbrainz.org",
|
||||
"https": False,
|
||||
"ratelimit": 1,
|
||||
"ratelimit_interval": 1,
|
||||
"genres": False,
|
||||
"genres_tag": "genre",
|
||||
"external_ids": {
|
||||
"discogs": False,
|
||||
"bandcamp": False,
|
||||
|
|
@ -389,20 +349,11 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
self.config["search_limit"] = self.config["match"][
|
||||
"searchlimit"
|
||||
].get()
|
||||
self._log.warning(
|
||||
"'musicbrainz.searchlimit' option is deprecated and will be "
|
||||
"removed in 3.0.0. Use 'musicbrainz.search_limit' instead."
|
||||
deprecate_for_user(
|
||||
self._log,
|
||||
"'musicbrainz.searchlimit' configuration option",
|
||||
"'musicbrainz.search_limit'",
|
||||
)
|
||||
hostname = self.config["host"].as_str()
|
||||
https = self.config["https"].get(bool)
|
||||
# Only call set_hostname when a custom server is configured. Since
|
||||
# musicbrainz-ngs connects to musicbrainz.org with HTTPS by default
|
||||
if hostname != "musicbrainz.org":
|
||||
musicbrainzngs.set_hostname(hostname, https)
|
||||
musicbrainzngs.set_rate_limit(
|
||||
self.config["ratelimit_interval"].as_number(),
|
||||
self.config["ratelimit"].get(int),
|
||||
)
|
||||
|
||||
def track_info(
|
||||
self,
|
||||
|
|
@ -449,9 +400,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
info.artists_ids = _artist_ids(recording["artist-credit"])
|
||||
info.artist_id = info.artists_ids[0]
|
||||
|
||||
if recording.get("artist-relation-list"):
|
||||
if recording.get("artist-relations"):
|
||||
info.remixer = _get_related_artist_names(
|
||||
recording["artist-relation-list"], relation_type="remixer"
|
||||
recording["artist-relations"], relation_type="remixer"
|
||||
)
|
||||
|
||||
if recording.get("length"):
|
||||
|
|
@ -459,13 +410,13 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
|
||||
info.trackdisambig = recording.get("disambiguation")
|
||||
|
||||
if recording.get("isrc-list"):
|
||||
info.isrc = ";".join(recording["isrc-list"])
|
||||
if recording.get("isrcs"):
|
||||
info.isrc = ";".join(recording["isrcs"])
|
||||
|
||||
lyricist = []
|
||||
composer = []
|
||||
composer_sort = []
|
||||
for work_relation in recording.get("work-relation-list", ()):
|
||||
for work_relation in recording.get("work-relations", ()):
|
||||
if work_relation["type"] != "performance":
|
||||
continue
|
||||
info.work = work_relation["work"]["title"]
|
||||
|
|
@ -474,7 +425,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
info.work_disambig = work_relation["work"]["disambiguation"]
|
||||
|
||||
for artist_relation in work_relation["work"].get(
|
||||
"artist-relation-list", ()
|
||||
"artist-relations", ()
|
||||
):
|
||||
if "type" in artist_relation:
|
||||
type = artist_relation["type"]
|
||||
|
|
@ -492,7 +443,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
info.composer_sort = ", ".join(composer_sort)
|
||||
|
||||
arranger = []
|
||||
for artist_relation in recording.get("artist-relation-list", ()):
|
||||
for artist_relation in recording.get("artist-relations", ()):
|
||||
if "type" in artist_relation:
|
||||
type = artist_relation["type"]
|
||||
if type == "arranger":
|
||||
|
|
@ -524,9 +475,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
release["artist-credit"], include_join_phrase=False
|
||||
)
|
||||
|
||||
ntracks = sum(len(m["track-list"]) for m in release["medium-list"])
|
||||
ntracks = sum(len(m["tracks"]) for m in release["media"])
|
||||
|
||||
# The MusicBrainz API omits 'artist-relation-list' and 'work-relation-list'
|
||||
# The MusicBrainz API omits 'relations'
|
||||
# when the release has more than 500 tracks. So we use browse_recordings
|
||||
# on chunks of tracks to recover the same information in this case.
|
||||
if ntracks > BROWSE_MAXTRACKS:
|
||||
|
|
@ -535,35 +486,32 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
for i in range(0, ntracks, BROWSE_CHUNKSIZE):
|
||||
self._log.debug("Retrieving tracks starting at {}", i)
|
||||
recording_list.extend(
|
||||
musicbrainzngs.browse_recordings(
|
||||
release=release["id"],
|
||||
limit=BROWSE_CHUNKSIZE,
|
||||
includes=BROWSE_INCLUDES,
|
||||
offset=i,
|
||||
)["recording-list"]
|
||||
self.mb_api.browse_recordings(
|
||||
release=release["id"], offset=i
|
||||
)
|
||||
)
|
||||
track_map = {r["id"]: r for r in recording_list}
|
||||
for medium in release["medium-list"]:
|
||||
for recording in medium["track-list"]:
|
||||
for medium in release["media"]:
|
||||
for recording in medium["tracks"]:
|
||||
recording_info = track_map[recording["recording"]["id"]]
|
||||
recording["recording"] = recording_info
|
||||
|
||||
# Basic info.
|
||||
track_infos = []
|
||||
index = 0
|
||||
for medium in release["medium-list"]:
|
||||
for medium in release["media"]:
|
||||
disctitle = medium.get("title")
|
||||
format = medium.get("format")
|
||||
|
||||
if format in config["match"]["ignored_media"].as_str_seq():
|
||||
continue
|
||||
|
||||
all_tracks = medium["track-list"]
|
||||
all_tracks = medium["tracks"]
|
||||
if (
|
||||
"data-track-list" in medium
|
||||
"data-tracks" in medium
|
||||
and not config["match"]["ignore_data_tracks"]
|
||||
):
|
||||
all_tracks += medium["data-track-list"]
|
||||
all_tracks += medium["data-tracks"]
|
||||
track_count = len(all_tracks)
|
||||
|
||||
if "pregap" in medium:
|
||||
|
|
@ -578,7 +526,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
|
||||
if (
|
||||
"video" in track["recording"]
|
||||
and track["recording"]["video"] == "true"
|
||||
and track["recording"]["video"]
|
||||
and config["match"]["ignore_video_tracks"]
|
||||
):
|
||||
continue
|
||||
|
|
@ -632,7 +580,7 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
artists=artists_names,
|
||||
artists_ids=album_artist_ids,
|
||||
tracks=track_infos,
|
||||
mediums=len(release["medium-list"]),
|
||||
mediums=len(release["media"]),
|
||||
artist_sort=artist_sort_name,
|
||||
artists_sort=artists_sort_names,
|
||||
artist_credit=artist_credit_name,
|
||||
|
|
@ -659,12 +607,8 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
if release.get("disambiguation"):
|
||||
info.albumdisambig = release.get("disambiguation")
|
||||
|
||||
# Get the "classic" Release type. This data comes from a legacy API
|
||||
# feature before MusicBrainz supported multiple release types.
|
||||
if "type" in release["release-group"]:
|
||||
reltype = release["release-group"]["type"]
|
||||
if reltype:
|
||||
info.albumtype = reltype.lower()
|
||||
if reltype := release["release-group"].get("primary-type"):
|
||||
info.albumtype = reltype.lower()
|
||||
|
||||
# Set the new-style "primary" and "secondary" release types.
|
||||
albumtypes = []
|
||||
|
|
@ -672,9 +616,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
rel_primarytype = release["release-group"]["primary-type"]
|
||||
if rel_primarytype:
|
||||
albumtypes.append(rel_primarytype.lower())
|
||||
if "secondary-type-list" in release["release-group"]:
|
||||
if release["release-group"]["secondary-type-list"]:
|
||||
for sec_type in release["release-group"]["secondary-type-list"]:
|
||||
if "secondary-types" in release["release-group"]:
|
||||
if release["release-group"]["secondary-types"]:
|
||||
for sec_type in release["release-group"]["secondary-types"]:
|
||||
albumtypes.append(sec_type.lower())
|
||||
info.albumtypes = albumtypes
|
||||
|
||||
|
|
@ -690,8 +634,8 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
_set_date_str(info, release_group_date, True)
|
||||
|
||||
# Label name.
|
||||
if release.get("label-info-list"):
|
||||
label_info = release["label-info-list"][0]
|
||||
if release.get("label-info"):
|
||||
label_info = release["label-info"][0]
|
||||
if label_info.get("label"):
|
||||
label = label_info["label"]["name"]
|
||||
if label != "[no label]":
|
||||
|
|
@ -705,18 +649,18 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
info.language = rep.get("language")
|
||||
|
||||
# Media (format).
|
||||
if release["medium-list"]:
|
||||
if release["media"]:
|
||||
# If all media are the same, use that medium name
|
||||
if len({m.get("format") for m in release["medium-list"]}) == 1:
|
||||
info.media = release["medium-list"][0].get("format")
|
||||
if len({m.get("format") for m in release["media"]}) == 1:
|
||||
info.media = release["media"][0].get("format")
|
||||
# Otherwise, let's just call it "Media"
|
||||
else:
|
||||
info.media = "Media"
|
||||
|
||||
if self.config["genres"]:
|
||||
sources = [
|
||||
release["release-group"].get("tag-list", []),
|
||||
release.get("tag-list", []),
|
||||
release["release-group"].get(self.genres_field, []),
|
||||
release.get(self.genres_field, []),
|
||||
]
|
||||
genres: Counter[str] = Counter()
|
||||
for source in sources:
|
||||
|
|
@ -732,11 +676,11 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
wanted_sources = {
|
||||
site for site, wanted in external_ids.items() if wanted
|
||||
}
|
||||
if wanted_sources and (url_rels := release.get("url-relation-list")):
|
||||
if wanted_sources and (url_rels := release.get("url-relations")):
|
||||
urls = {}
|
||||
|
||||
for source, url in product(wanted_sources, url_rels):
|
||||
if f"{source}.com" in (target := url["target"]):
|
||||
if f"{source}.com" in (target := url["url"]["resource"]):
|
||||
urls[source] = target
|
||||
self._log.debug(
|
||||
"Found link to {} release via MusicBrainz",
|
||||
|
|
@ -774,17 +718,20 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
def get_album_criteria(
|
||||
self, items: Sequence[Item], artist: str, album: str, va_likely: bool
|
||||
) -> dict[str, str]:
|
||||
criteria = {
|
||||
"release": album,
|
||||
"alias": album,
|
||||
"tracks": str(len(items)),
|
||||
} | ({"arid": VARIOUS_ARTISTS_ID} if va_likely else {"artist": artist})
|
||||
criteria = {"release": album} | (
|
||||
{"arid": VARIOUS_ARTISTS_ID} if va_likely else {"artist": artist}
|
||||
)
|
||||
|
||||
for tag, mb_field in self.extra_mb_field_by_tag.items():
|
||||
most_common, _ = util.plurality(i.get(tag) for i in items)
|
||||
value = str(most_common)
|
||||
if tag == "catalognum":
|
||||
value = value.replace(" ", "")
|
||||
if tag == "tracks":
|
||||
value = str(len(items))
|
||||
elif tag == "alias":
|
||||
value = album
|
||||
else:
|
||||
most_common, _ = util.plurality(i.get(tag) for i in items)
|
||||
value = str(most_common)
|
||||
if tag == "catalognum":
|
||||
value = value.replace(" ", "")
|
||||
|
||||
criteria[mb_field] = value
|
||||
|
||||
|
|
@ -801,20 +748,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
using the provided criteria. Handles API errors by converting them into
|
||||
MusicBrainzAPIError exceptions with contextual information.
|
||||
"""
|
||||
filters = {
|
||||
k: _v for k, v in filters.items() if (_v := v.lower().strip())
|
||||
}
|
||||
self._log.debug(
|
||||
"Searching for MusicBrainz {}s with: {!r}", query_type, filters
|
||||
return self.mb_api.search(
|
||||
query_type, filters, limit=self.config["search_limit"].get()
|
||||
)
|
||||
try:
|
||||
method = getattr(musicbrainzngs, f"search_{query_type}s")
|
||||
res = method(limit=self.config["search_limit"].get(), **filters)
|
||||
except musicbrainzngs.MusicBrainzError as exc:
|
||||
raise MusicBrainzAPIError(
|
||||
exc, f"{query_type} search", filters, traceback.format_exc()
|
||||
)
|
||||
return res[f"{query_type}-list"]
|
||||
|
||||
def candidates(
|
||||
self,
|
||||
|
|
@ -826,7 +762,10 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
criteria = self.get_album_criteria(items, artist, album, va_likely)
|
||||
release_ids = (r["id"] for r in self._search_api("release", criteria))
|
||||
|
||||
yield from filter(None, map(self.album_for_id, release_ids))
|
||||
for id_ in release_ids:
|
||||
with suppress(HTTPNotFoundError):
|
||||
if album_info := self.album_for_id(id_):
|
||||
yield album_info
|
||||
|
||||
def item_candidates(
|
||||
self, item: Item, artist: str, title: str
|
||||
|
|
@ -849,29 +788,35 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
self._log.debug("Invalid MBID ({}).", album_id)
|
||||
return None
|
||||
|
||||
# A 404 error here is fine. e.g. re-importing a release that has
|
||||
# been deleted on MusicBrainz.
|
||||
try:
|
||||
res = musicbrainzngs.get_release_by_id(albumid, RELEASE_INCLUDES)
|
||||
|
||||
# resolve linked release relations
|
||||
actual_res = None
|
||||
|
||||
if res["release"].get("status") == "Pseudo-Release":
|
||||
actual_res = _find_actual_release_from_pseudo_release(res)
|
||||
|
||||
except musicbrainzngs.ResponseError:
|
||||
self._log.debug("Album ID match failed.")
|
||||
res = self.mb_api.get_release(albumid, includes=RELEASE_INCLUDES)
|
||||
except HTTPNotFoundError:
|
||||
self._log.debug("Release {} not found on MusicBrainz.", albumid)
|
||||
return None
|
||||
except musicbrainzngs.MusicBrainzError as exc:
|
||||
raise MusicBrainzAPIError(
|
||||
exc, "get release by ID", albumid, traceback.format_exc()
|
||||
)
|
||||
|
||||
# resolve linked release relations
|
||||
actual_res = None
|
||||
|
||||
if res.get("status") == "Pseudo-Release" and (
|
||||
relations := res.get("release-relations")
|
||||
):
|
||||
for rel in relations:
|
||||
if (
|
||||
rel["type"] == "transl-tracklisting"
|
||||
and rel["direction"] == "backward"
|
||||
):
|
||||
actual_res = self.mb_api.get_release(
|
||||
rel["release"]["id"], includes=RELEASE_INCLUDES
|
||||
)
|
||||
|
||||
# release is potentially a pseudo release
|
||||
release = self.album_info(res["release"])
|
||||
release = self.album_info(res)
|
||||
|
||||
# should be None unless we're dealing with a pseudo release
|
||||
if actual_res is not None:
|
||||
actual_release = self.album_info(actual_res["release"])
|
||||
actual_release = self.album_info(actual_res)
|
||||
return _merge_pseudo_and_actual_album(release, actual_release)
|
||||
else:
|
||||
return release
|
||||
|
|
@ -886,13 +831,9 @@ class MusicBrainzPlugin(MetadataSourcePlugin):
|
|||
self._log.debug("Invalid MBID ({}).", track_id)
|
||||
return None
|
||||
|
||||
try:
|
||||
res = musicbrainzngs.get_recording_by_id(trackid, TRACK_INCLUDES)
|
||||
except musicbrainzngs.ResponseError:
|
||||
self._log.debug("Track ID match failed.")
|
||||
return None
|
||||
except musicbrainzngs.MusicBrainzError as exc:
|
||||
raise MusicBrainzAPIError(
|
||||
exc, "get recording by ID", trackid, traceback.format_exc()
|
||||
with suppress(HTTPNotFoundError):
|
||||
return self.track_info(
|
||||
self.mb_api.get_recording(trackid, includes=TRACK_INCLUDES)
|
||||
)
|
||||
return self.track_info(res["recording"])
|
||||
|
||||
return None
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue