Merge branch 'master' into formatted-modify

This commit is contained in:
Adrian Sampson 2022-08-20 16:37:52 -07:00 committed by GitHub
commit 7af40db050
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
95 changed files with 5024 additions and 1363 deletions

View file

@ -12,7 +12,7 @@ jobs:
strategy:
matrix:
platform: [ubuntu-latest, windows-latest]
python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0-rc.2]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11-dev']
env:
PY_COLORS: 1
@ -45,17 +45,17 @@ jobs:
sudo apt install ffmpeg # For replaygain
- name: Test older Python versions with tox
if: matrix.python-version != '3.9' && matrix.python-version != '3.10.0-rc.2'
if: matrix.python-version != '3.10' && matrix.python-version != '3.11-dev'
run: |
tox -e py-test
- name: Test latest Python version with tox and get coverage
if: matrix.python-version == '3.9'
if: matrix.python-version == '3.10'
run: |
tox -vv -e py-cov
- name: Test nightly Python version with tox
if: matrix.python-version == '3.10.0-rc.2'
if: matrix.python-version == '3.11-dev'
# continue-on-error is not ideal since it doesn't give a visible
# warning, but there doesn't seem to be anything better:
# https://github.com/actions/toolkit/issues/399
@ -64,7 +64,7 @@ jobs:
tox -e py-test
- name: Upload code coverage
if: matrix.python-version == '3.9'
if: matrix.python-version == '3.10'
run: |
pip install codecov || true
codecov || true
@ -78,10 +78,10 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
- name: Set up Python 3.10
uses: actions/setup-python@v2
with:
python-version: 3.9
python-version: '3.10'
- name: Install base dependencies
run: |
@ -100,10 +100,10 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
- name: Set up Python 3.10
uses: actions/setup-python@v2
with:
python-version: 3.9
python-version: '3.10'
- name: Install base dependencies
run: |

2
.gitignore vendored
View file

@ -29,7 +29,7 @@ downloads/
eggs/
.eggs/
lib/
lib64/
lib64
parts/
sdist/
var/

View file

@ -203,11 +203,10 @@ There are a few coding conventions we use in beets:
instead. In particular, we have our own logging shim, so youll see
``from beets import logging`` in most files.
- Always log Unicode strings (e.g., ``log.debug(u"hello world")``).
- The loggers use
`str.format <http://docs.python.org/library/stdtypes.html#str.format>`__-style
logging instead of ``%``-style, so you can type
``log.debug(u"{0}", obj)`` to do your formatting.
``log.debug("{0}", obj)`` to do your formatting.
- Exception handlers must use ``except A as B:`` instead of
``except A, B:``.

View file

@ -59,7 +59,7 @@ shockingly simple if you know a little Python.
.. _writing your own plugin:
https://beets.readthedocs.org/page/dev/plugins.html
.. _HTML5 Audio:
http://www.w3.org/TR/html-markup/audio.html
https://html.spec.whatwg.org/multipage/media.html#the-audio-element
.. _albums that are missing tracks:
https://beets.readthedocs.org/page/plugins/missing.html
.. _duplicate tracks and albums:

View file

@ -54,7 +54,7 @@ Beets는 라이브러리로 디자인 되었기 때문에, 당신이 음악들
.. _writing your own plugin:
https://beets.readthedocs.org/page/dev/plugins.html
.. _HTML5 Audio:
http://www.w3.org/TR/html-markup/audio.html
https://html.spec.whatwg.org/multipage/media.html#the-audio-element
.. _albums that are missing tracks:
https://beets.readthedocs.org/page/plugins/missing.html
.. _duplicate tracks and albums:

11
SECURITY.md Normal file
View file

@ -0,0 +1,11 @@
# Security Policy
## Supported Versions
We currently support only the latest release of beets.
## Reporting a Vulnerability
To report a security vulnerability, please send email to [our Zulip team][z].
[z]: mailto:email.218c36e48d78cf125c0a6219a6c2a417.show-sender@streams.zulipchat.com

View file

@ -16,7 +16,7 @@
import confuse
from sys import stderr
__version__ = '1.5.1'
__version__ = '1.6.1'
__author__ = 'Adrian Sampson <adrian@radbox.org>'

View file

@ -17,8 +17,6 @@ music and items' embedded album art.
"""
import subprocess
import platform
from tempfile import NamedTemporaryFile
import os
@ -53,14 +51,22 @@ def embed_item(log, item, imagepath, maxwidth=None, itempath=None,
quality=0):
"""Embed an image into the item's media file.
"""
# Conditions and filters.
# Conditions.
if compare_threshold:
if not check_art_similarity(log, item, imagepath, compare_threshold):
is_similar = check_art_similarity(
log, item, imagepath, compare_threshold)
if is_similar is None:
log.warning('Error while checking art similarity; skipping.')
return
elif not is_similar:
log.info('Image not similar; skipping.')
return
if ifempty and get_art(log, item):
log.info('media file already contained art')
return
# Filters.
if maxwidth and not as_album:
imagepath = resize_image(log, imagepath, maxwidth, quality)
@ -115,76 +121,30 @@ def resize_image(log, imagepath, maxwidth, quality):
return imagepath
def check_art_similarity(log, item, imagepath, compare_threshold):
def check_art_similarity(
log,
item,
imagepath,
compare_threshold,
artresizer=None,
):
"""A boolean indicating if an image is similar to embedded item art.
If no embedded art exists, always return `True`. If the comparison fails
for some reason, the return value is `None`.
This must only be called if `ArtResizer.shared.can_compare` is `True`.
"""
with NamedTemporaryFile(delete=True) as f:
art = extract(log, f.name, item)
if art:
is_windows = platform.system() == "Windows"
if not art:
return True
# Converting images to grayscale tends to minimize the weight
# of colors in the diff score. So we first convert both images
# to grayscale and then pipe them into the `compare` command.
# On Windows, ImageMagick doesn't support the magic \\?\ prefix
# on paths, so we pass `prefix=False` to `syspath`.
convert_cmd = ['convert', syspath(imagepath, prefix=False),
syspath(art, prefix=False),
'-colorspace', 'gray', 'MIFF:-']
compare_cmd = ['compare', '-metric', 'PHASH', '-', 'null:']
log.debug('comparing images with pipeline {} | {}',
convert_cmd, compare_cmd)
convert_proc = subprocess.Popen(
convert_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=not is_windows,
)
compare_proc = subprocess.Popen(
compare_cmd,
stdin=convert_proc.stdout,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=not is_windows,
)
if artresizer is None:
artresizer = ArtResizer.shared
# Check the convert output. We're not interested in the
# standard output; that gets piped to the next stage.
convert_proc.stdout.close()
convert_stderr = convert_proc.stderr.read()
convert_proc.stderr.close()
convert_proc.wait()
if convert_proc.returncode:
log.debug(
'ImageMagick convert failed with status {}: {!r}',
convert_proc.returncode,
convert_stderr,
)
return
# Check the compare output.
stdout, stderr = compare_proc.communicate()
if compare_proc.returncode:
if compare_proc.returncode != 1:
log.debug('ImageMagick compare failed: {0}, {1}',
displayable_path(imagepath),
displayable_path(art))
return
out_str = stderr
else:
out_str = stdout
try:
phash_diff = float(out_str)
except ValueError:
log.debug('IM output is not a number: {0!r}', out_str)
return
log.debug('ImageMagick compare score: {0}', phash_diff)
return phash_diff <= compare_threshold
return True
return artresizer.compare(art, imagepath, compare_threshold)
def extract(log, outpath, item):

View file

@ -598,6 +598,14 @@ def tracks_for_id(track_id):
yield t
def invoke_mb(call_func, *args):
try:
return call_func(*args)
except mb.MusicBrainzAPIError as exc:
exc.log(log)
return ()
@plugins.notify_info_yielded('albuminfo_received')
def album_candidates(items, artist, album, va_likely, extra_tags):
"""Search for album matches. ``items`` is a list of Item objects
@ -609,25 +617,19 @@ def album_candidates(items, artist, album, va_likely, extra_tags):
constrain the search.
"""
# Base candidates if we have album and artist to match.
if artist and album:
try:
yield from mb.match_album(artist, album, len(items),
extra_tags)
except mb.MusicBrainzAPIError as exc:
exc.log(log)
if config["musicbrainz"]["enabled"]:
# Base candidates if we have album and artist to match.
if artist and album:
yield from invoke_mb(mb.match_album, artist, album, len(items),
extra_tags)
# Also add VA matches from MusicBrainz where appropriate.
if va_likely and album:
try:
yield from mb.match_album(None, album, len(items),
extra_tags)
except mb.MusicBrainzAPIError as exc:
exc.log(log)
# Also add VA matches from MusicBrainz where appropriate.
if va_likely and album:
yield from invoke_mb(mb.match_album, None, album, len(items),
extra_tags)
# Candidates from plugins.
yield from plugins.candidates(items, artist, album, va_likely,
extra_tags)
yield from plugins.candidates(items, artist, album, va_likely, extra_tags)
@plugins.notify_info_yielded('trackinfo_received')
@ -638,11 +640,8 @@ def item_candidates(item, artist, title):
"""
# MusicBrainz candidates.
if artist and title:
try:
yield from mb.match_track(artist, title)
except mb.MusicBrainzAPIError as exc:
exc.log(log)
if config["musicbrainz"]["enabled"] and artist and title:
yield from invoke_mb(mb.match_track, artist, title)
# Plugin candidates.
yield from plugins.item_candidates(item, artist, title)

View file

@ -357,8 +357,8 @@ def _add_candidate(items, results, info):
log.debug('No tracks.')
return
# Don't duplicate.
if info.album_id in results:
# Prevent duplicates.
if info.album_id and info.album_id in results:
log.debug('Duplicate.')
return

View file

@ -55,6 +55,11 @@ aunique:
disambiguators: albumtype year label catalognum albumdisambig releasegroupdisambig
bracket: '[]'
sunique:
keys: artist title
disambiguators: year trackdisambig
bracket: '[]'
overwrite_null:
album: []
track: []
@ -101,6 +106,7 @@ paths:
statefile: state.pickle
musicbrainz:
enabled: yes
host: musicbrainz.org
https: no
ratelimit: 1

View file

@ -177,6 +177,23 @@ class StringFieldQuery(FieldQuery):
raise NotImplementedError()
class StringQuery(StringFieldQuery):
"""A query that matches a whole string in a specific item field."""
def col_clause(self):
search = (self.pattern
.replace('\\', '\\\\')
.replace('%', '\\%')
.replace('_', '\\_'))
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals
@classmethod
def string_match(cls, pattern, value):
return pattern.lower() == value.lower()
class SubstringQuery(StringFieldQuery):
"""A query that matches a substring in a specific item field."""
@ -443,7 +460,7 @@ class AndQuery(MutableCollectionQuery):
return self.clause_with_joiner('and')
def match(self, item):
return all([q.match(item) for q in self.subqueries])
return all(q.match(item) for q in self.subqueries)
class OrQuery(MutableCollectionQuery):
@ -453,7 +470,7 @@ class OrQuery(MutableCollectionQuery):
return self.clause_with_joiner('or')
def match(self, item):
return any([q.match(item) for q in self.subqueries])
return any(q.match(item) for q in self.subqueries)
class NotQuery(Query):

View file

@ -726,8 +726,8 @@ class ImportTask(BaseImportTask):
item.update(changes)
def manipulate_files(self, operation=None, write=False, session=None):
""" Copy, move, link, hardlink or reflink (depending on `operation`) the files
as well as write metadata.
""" Copy, move, link, hardlink or reflink (depending on `operation`)
the files as well as write metadata.
`operation` should be an instance of `util.MoveOperation`.

View file

@ -53,8 +53,9 @@ class PathQuery(dbcore.FieldQuery):
"""
def __init__(self, field, pattern, fast=True, case_sensitive=None):
"""Create a path query. `pattern` must be a path, either to a
file or a directory.
"""Create a path query.
`pattern` must be a path, either to a file or a directory.
`case_sensitive` can be a bool or `None`, indicating that the
behavior should depend on the filesystem.
@ -140,8 +141,10 @@ class DateType(types.Float):
class PathType(types.Type):
"""A dbcore type for filesystem paths. These are represented as
`bytes` objects, in keeping with the Unix filesystem abstraction.
"""A dbcore type for filesystem paths.
These are represented as `bytes` objects, in keeping with
the Unix filesystem abstraction.
"""
sql = 'BLOB'
@ -149,8 +152,9 @@ class PathType(types.Type):
model_type = bytes
def __init__(self, nullable=False):
"""Create a path type object. `nullable` controls whether the
type may be missing, i.e., None.
"""Create a path type object.
`nullable` controls whether the type may be missing, i.e., None.
"""
self.nullable = nullable
@ -282,7 +286,8 @@ PF_KEY_DEFAULT = 'default'
# Exceptions.
class FileOperationError(Exception):
"""Indicates an error when interacting with a file on disk.
"""Indicate an error when interacting with a file on disk.
Possibilities include an unsupported media type, a permissions
error, and an unhandled Mutagen exception.
"""
@ -296,8 +301,10 @@ class FileOperationError(Exception):
self.reason = reason
def text(self):
"""Get a string representing the error. Describes both the
underlying reason and the file path in question.
"""Get a string representing the error.
Describe both the underlying reason and the file path
in question.
"""
return '{}: {}'.format(
util.displayable_path(self.path),
@ -310,16 +317,14 @@ class FileOperationError(Exception):
class ReadError(FileOperationError):
"""An error while reading a file (i.e. in `Item.read`).
"""
"""An error while reading a file (i.e. in `Item.read`)."""
def __str__(self):
return 'error reading ' + super().text()
class WriteError(FileOperationError):
"""An error while writing a file (i.e. in `Item.write`).
"""
"""An error while writing a file (i.e. in `Item.write`)."""
def __str__(self):
return 'error writing ' + super().text()
@ -328,12 +333,10 @@ class WriteError(FileOperationError):
# Item and Album model classes.
class LibModel(dbcore.Model):
"""Shared concrete functionality for Items and Albums.
"""
"""Shared concrete functionality for Items and Albums."""
# Config key that specifies how an instance should be formatted.
_format_config_key = None
"""Config key that specifies how an instance should be formatted.
"""
def _template_funcs(self):
funcs = DefaultTemplateFunctions(self, self._db).functions()
@ -410,6 +413,7 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
def _get(self, key):
"""Get the value for a key, either from the album or the item.
Raise a KeyError for invalid keys.
"""
if self.for_path and key in self.album_keys:
@ -422,8 +426,10 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
raise KeyError(key)
def __getitem__(self, key):
"""Get the value for a key. `artist` and `albumartist`
are fallback values for each other when not set.
"""Get the value for a key.
`artist` and `albumartist` are fallback values for each other
when not set.
"""
value = self._get(key)
@ -448,6 +454,7 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
class Item(LibModel):
"""Represent a song or track."""
_table = 'items'
_flex_table = 'item_attributes'
_fields = {
@ -515,8 +522,8 @@ class Item(LibModel):
'rg_track_peak': types.NULL_FLOAT,
'rg_album_gain': types.NULL_FLOAT,
'rg_album_peak': types.NULL_FLOAT,
'r128_track_gain': types.NullPaddedInt(6),
'r128_album_gain': types.NullPaddedInt(6),
'r128_track_gain': types.NULL_FLOAT,
'r128_album_gain': types.NULL_FLOAT,
'original_year': types.PaddedInt(4),
'original_month': types.PaddedInt(2),
'original_day': types.PaddedInt(2),
@ -524,6 +531,9 @@ class Item(LibModel):
'length': DurationType(),
'bitrate': types.ScaledInt(1000, 'kbps'),
'bitrate_mode': types.STRING,
'encoder_info': types.STRING,
'encoder_settings': types.STRING,
'format': types.STRING,
'samplerate': types.ScaledInt(1000, 'kHz'),
'bitdepth': types.INTEGER,
@ -539,22 +549,18 @@ class Item(LibModel):
'data_source': types.STRING,
}
# Set of item fields that are backed by `MediaFile` fields.
# Any kind of field (fixed, flexible, and computed) may be a media
# field. Only these fields are read from disk in `read` and written in
# `write`.
_media_fields = set(MediaFile.readable_fields()) \
.intersection(_fields.keys())
"""Set of item fields that are backed by `MediaFile` fields.
Any kind of field (fixed, flexible, and computed) may be a media
field. Only these fields are read from disk in `read` and written in
`write`.
"""
# Set of item fields that are backed by *writable* `MediaFile` tag
# fields.
# This excludes fields that represent audio data, such as `bitrate` or
# `length`.
_media_tag_fields = set(MediaFile.fields()).intersection(_fields.keys())
"""Set of item fields that are backed by *writable* `MediaFile` tag
fields.
This excludes fields that represent audio data, such as `bitrate` or
`length`.
"""
_formatter = FormattedItemMapping
@ -562,8 +568,8 @@ class Item(LibModel):
_format_config_key = 'format_item'
# Cached album object. Read-only.
__album = None
"""Cached album object. Read-only."""
@property
def _cached_album(self):
@ -594,8 +600,7 @@ class Item(LibModel):
@classmethod
def from_path(cls, path):
"""Creates a new item from the media file at the specified path.
"""
"""Create a new item from the media file at the specified path."""
# Initiate with values that aren't read from files.
i = cls(album_id=None)
i.read(path)
@ -603,8 +608,7 @@ class Item(LibModel):
return i
def __setitem__(self, key, value):
"""Set the item's value for a standard field or a flexattr.
"""
"""Set the item's value for a standard field or a flexattr."""
# Encode unicode paths and read buffers.
if key == 'path':
if isinstance(value, str):
@ -621,7 +625,9 @@ class Item(LibModel):
def __getitem__(self, key):
"""Get the value for a field, falling back to the album if
necessary. Raise a KeyError if the field is not available.
necessary.
Raise a KeyError if the field is not available.
"""
try:
return super().__getitem__(key)
@ -641,8 +647,9 @@ class Item(LibModel):
)
def keys(self, computed=False, with_album=True):
"""Get a list of available field names. `with_album`
controls whether the album's fields are included.
"""Get a list of available field names.
`with_album` controls whether the album's fields are included.
"""
keys = super().keys(computed=computed)
if with_album and self._cached_album:
@ -653,7 +660,9 @@ class Item(LibModel):
def get(self, key, default=None, with_album=True):
"""Get the value for a given key or `default` if it does not
exist. Set `with_album` to false to skip album fallback.
exist.
Set `with_album` to false to skip album fallback.
"""
try:
return self._get(key, default, raise_=with_album)
@ -663,8 +672,9 @@ class Item(LibModel):
return default
def update(self, values):
"""Set all key/value pairs in the mapping. If mtime is
specified, it is not reset (as it might otherwise be).
"""Set all key/value pairs in the mapping.
If mtime is specified, it is not reset (as it might otherwise be).
"""
super().update(values)
if self.mtime == 0 and 'mtime' in values:
@ -690,10 +700,10 @@ class Item(LibModel):
"""Read the metadata from the associated file.
If `read_path` is specified, read metadata from that file
instead. Updates all the properties in `_media_fields`
instead. Update all the properties in `_media_fields`
from the media file.
Raises a `ReadError` if the file could not be read.
Raise a `ReadError` if the file could not be read.
"""
if read_path is None:
read_path = self.path
@ -769,10 +779,10 @@ class Item(LibModel):
plugins.send('after_write', item=self, path=path)
def try_write(self, *args, **kwargs):
"""Calls `write()` but catches and logs `FileOperationError`
"""Call `write()` but catch and log `FileOperationError`
exceptions.
Returns `False` an exception was caught and `True` otherwise.
Return `False` an exception was caught and `True` otherwise.
"""
try:
self.write(*args, **kwargs)
@ -782,7 +792,7 @@ class Item(LibModel):
return False
def try_sync(self, write, move, with_album=True):
"""Synchronize the item with the database and, possibly, updates its
"""Synchronize the item with the database and, possibly, update its
tags on disk and its path (by moving the file).
`write` indicates whether to write new tags into the file. Similarly,
@ -806,7 +816,7 @@ class Item(LibModel):
# Files themselves.
def move_file(self, dest, operation=MoveOperation.MOVE):
"""Move, copy, link or hardlink the item's depending on `operation`,
"""Move, copy, link or hardlink the item depending on `operation`,
updating the path value if the move succeeds.
If a file exists at `dest`, then it is slightly modified to be unique.
@ -848,7 +858,7 @@ class Item(LibModel):
self.path = dest
def current_mtime(self):
"""Returns the current mtime of the file, rounded to the nearest
"""Return the current mtime of the file, rounded to the nearest
integer.
"""
return int(os.path.getmtime(syspath(self.path)))
@ -867,9 +877,12 @@ class Item(LibModel):
# Model methods.
def remove(self, delete=False, with_album=True):
"""Removes the item. If `delete`, then the associated file is
removed from disk. If `with_album`, then the item's album (if
any) is removed if it the item was the last in the album.
"""Remove the item.
If `delete`, then the associated file is removed from disk.
If `with_album`, then the item's album (if any) is removed
if the item was the last in the album.
"""
super().remove()
@ -892,9 +905,10 @@ class Item(LibModel):
def move(self, operation=MoveOperation.MOVE, basedir=None,
with_album=True, store=True):
"""Move the item to its designated location within the library
directory (provided by destination()). Subdirectories are
created as needed. If the operation succeeds, the item's path
field is updated to reflect the new location.
directory (provided by destination()).
Subdirectories are created as needed. If the operation succeeds,
the item's path field is updated to reflect the new location.
Instead of moving the item it can also be copied, linked or hardlinked
depending on `operation` which should be an instance of
@ -908,8 +922,8 @@ class Item(LibModel):
By default, the item is stored to the database if it is in the
database, so any dirty fields prior to the move() call will be written
as a side effect.
If `store` is `False` however, the item won't be stored and you'll
have to manually store it after invoking this method.
If `store` is `False` however, the item won't be stored and it will
have to be manually stored after invoking this method.
"""
self._check_db()
dest = self.destination(basedir=basedir)
@ -938,18 +952,21 @@ class Item(LibModel):
# Templating.
def destination(self, fragment=False, basedir=None, platform=None,
path_formats=None):
"""Returns the path in the library directory designated for the
item (i.e., where the file ought to be). fragment makes this
method return just the path fragment underneath the root library
directory; the path is also returned as Unicode instead of
encoded as a bytestring. basedir can override the library's base
directory for the destination.
path_formats=None, replacements=None):
"""Return the path in the library directory designated for the
item (i.e., where the file ought to be).
fragment makes this method return just the path fragment underneath
the root library directory; the path is also returned as Unicode
instead of encoded as a bytestring. basedir can override the library's
base directory for the destination.
"""
self._check_db()
platform = platform or sys.platform
basedir = basedir or self._db.directory
path_formats = path_formats or self._db.path_formats
if replacements is None:
replacements = self._db.replacements
# Use a path format based on a query, falling back on the
# default.
@ -994,7 +1011,7 @@ class Item(LibModel):
maxlen = util.max_filename_length(self._db.directory)
subpath, fellback = util.legalize_path(
subpath, self._db.replacements, maxlen,
subpath, replacements, maxlen,
os.path.splitext(self.path)[1], fragment
)
if fellback:
@ -1014,9 +1031,10 @@ class Item(LibModel):
class Album(LibModel):
"""Provides access to information about albums stored in a
library. Reflects the library's "albums" table, including album
art.
"""Provide access to information about albums stored in a
library.
Reflects the library's "albums" table, including album art.
"""
_table = 'albums'
_flex_table = 'album_attributes'
@ -1056,7 +1074,7 @@ class Album(LibModel):
'releasegroupdisambig': types.STRING,
'rg_album_gain': types.NULL_FLOAT,
'rg_album_peak': types.NULL_FLOAT,
'r128_album_gain': types.NullPaddedInt(6),
'r128_album_gain': types.NULL_FLOAT,
'original_year': types.PaddedInt(4),
'original_month': types.PaddedInt(2),
'original_day': types.PaddedInt(2),
@ -1074,6 +1092,7 @@ class Album(LibModel):
'artist': SmartArtistSort,
}
# List of keys that are set on an album's items.
item_keys = [
'added',
'albumartist',
@ -1111,8 +1130,6 @@ class Album(LibModel):
'original_month',
'original_day',
]
"""List of keys that are set on an album's items.
"""
_format_config_key = 'format_album'
@ -1126,20 +1143,26 @@ class Album(LibModel):
return getters
def items(self):
"""Returns an iterable over the items associated with this
"""Return an iterable over the items associated with this
album.
"""
return self._db.items(dbcore.MatchQuery('album_id', self.id))
def remove(self, delete=False, with_items=True):
"""Removes this album and all its associated items from the
library. If delete, then the items' files are also deleted
from disk, along with any album art. The directories
containing the album are also removed (recursively) if empty.
"""Remove this album and all its associated items from the
library.
If delete, then the items' files are also deleted from disk,
along with any album art. The directories containing the album are
also removed (recursively) if empty.
Set with_items to False to avoid removing the album's items.
"""
super().remove()
# Send a 'album_removed' signal to plugins
plugins.send('album_removed', album=self)
# Delete art file.
if delete:
artpath = self.artpath
@ -1203,8 +1226,8 @@ class Album(LibModel):
By default, the album is stored to the database, persisting any
modifications to its metadata. If `store` is `False` however,
the album is not stored automatically, and you'll have to manually
store it after invoking this method.
the album is not stored automatically, and it will have to be manually
stored after invoking this method.
"""
basedir = basedir or self._db.directory
@ -1225,7 +1248,7 @@ class Album(LibModel):
self.store()
def item_dir(self):
"""Returns the directory containing the album's first item,
"""Return the directory containing the album's first item,
provided that such an item exists.
"""
item = self.items().get()
@ -1234,8 +1257,7 @@ class Album(LibModel):
return os.path.dirname(item.path)
def _albumtotal(self):
"""Return the total number of tracks on all discs on the album
"""
"""Return the total number of tracks on all discs on the album."""
if self.disctotal == 1 or not beets.config['per_disc_numbering']:
return self.items()[0].tracktotal
@ -1255,8 +1277,10 @@ class Album(LibModel):
return total
def art_destination(self, image, item_dir=None):
"""Returns a path to the destination for the album art image
for the album. `image` is the path of the image that will be
"""Return a path to the destination for the album art image
for the album.
`image` is the path of the image that will be
moved there (used for its extension).
The path construction uses the existing path of the album's
@ -1284,11 +1308,12 @@ class Album(LibModel):
return bytestring_path(dest)
def set_art(self, path, copy=True):
"""Sets the album's cover art to the image at the given path.
"""Set the album's cover art to the image at the given path.
The image is copied (or moved) into place, replacing any
existing art.
Sends an 'art_set' event with `self` as the sole argument.
Send an 'art_set' event with `self` as the sole argument.
"""
path = bytestring_path(path)
oldart = self.artpath
@ -1315,10 +1340,12 @@ class Album(LibModel):
plugins.send('art_set', album=self)
def store(self, fields=None):
"""Update the database with the album information. The album's
tracks are also updated.
:param fields: The fields to be stored. If not specified, all fields
will be.
"""Update the database with the album information.
The album's tracks are also updated.
`fields` represents the fields to be stored. If not specified,
all fields will be.
"""
# Get modified track fields.
track_updates = {}
@ -1355,36 +1382,26 @@ def parse_query_parts(parts, model_cls):
`Query` and `Sort` they represent.
Like `dbcore.parse_sorted_query`, with beets query prefixes and
special path query detection.
ensuring that implicit path queries are made explicit with 'path::<query>'
"""
# Get query types and their prefix characters.
prefixes = {':': dbcore.query.RegexpQuery}
prefixes = {
':': dbcore.query.RegexpQuery,
'=~': dbcore.query.StringQuery,
'=': dbcore.query.MatchQuery,
}
prefixes.update(plugins.queries())
# Special-case path-like queries, which are non-field queries
# containing path separators (/).
path_parts = []
non_path_parts = []
for s in parts:
if PathQuery.is_path_query(s):
path_parts.append(s)
else:
non_path_parts.append(s)
parts = [f"path:{s}" if PathQuery.is_path_query(s) else s for s in parts]
case_insensitive = beets.config['sort_case_insensitive'].get(bool)
query, sort = dbcore.parse_sorted_query(
model_cls, non_path_parts, prefixes, case_insensitive
return dbcore.parse_sorted_query(
model_cls, parts, prefixes, case_insensitive
)
# Add path queries to aggregate query.
# Match field / flexattr depending on whether the model has the path field
fast_path_query = 'path' in model_cls._fields
query.subqueries += [PathQuery('path', s, fast_path_query)
for s in path_parts]
return query, sort
def parse_query_string(s, model_cls):
"""Given a beets query string, return the `Query` and `Sort` they
@ -1403,10 +1420,11 @@ def parse_query_string(s, model_cls):
def _sqlite_bytelower(bytestring):
""" A custom ``bytelower`` sqlite function so we can compare
bytestrings in a semi case insensitive fashion. This is to work
around sqlite builds are that compiled with
``-DSQLITE_LIKE_DOESNT_MATCH_BLOBS``. See
``https://github.com/beetbox/beets/issues/2172`` for details.
bytestrings in a semi case insensitive fashion.
This is to work around sqlite builds are that compiled with
``-DSQLITE_LIKE_DOESNT_MATCH_BLOBS``. See
``https://github.com/beetbox/beets/issues/2172`` for details.
"""
return bytestring.lower()
@ -1414,8 +1432,7 @@ def _sqlite_bytelower(bytestring):
# The Library: interface to the database.
class Library(dbcore.Database):
"""A database of music containing songs and albums.
"""
"""A database of music containing songs and albums."""
_models = (Item, Album)
def __init__(self, path='library.blb',
@ -1441,7 +1458,9 @@ class Library(dbcore.Database):
def add(self, obj):
"""Add the :class:`Item` or :class:`Album` object to the library
database. Return the object's new id.
database.
Return the object's new id.
"""
obj.add(self)
self._memotable = {}
@ -1477,8 +1496,10 @@ class Library(dbcore.Database):
# Querying.
def _fetch(self, model_cls, query, sort=None):
"""Parse a query and fetch. If a order specification is present
in the query string the `sort` argument is ignored.
"""Parse a query and fetch.
If an order specification is present in the query string
the `sort` argument is ignored.
"""
# Parse the query, if necessary.
try:
@ -1501,40 +1522,38 @@ class Library(dbcore.Database):
@staticmethod
def get_default_album_sort():
"""Get a :class:`Sort` object for albums from the config option.
"""
"""Get a :class:`Sort` object for albums from the config option."""
return dbcore.sort_from_strings(
Album, beets.config['sort_album'].as_str_seq())
@staticmethod
def get_default_item_sort():
"""Get a :class:`Sort` object for items from the config option.
"""
"""Get a :class:`Sort` object for items from the config option."""
return dbcore.sort_from_strings(
Item, beets.config['sort_item'].as_str_seq())
def albums(self, query=None, sort=None):
"""Get :class:`Album` objects matching the query.
"""
"""Get :class:`Album` objects matching the query."""
return self._fetch(Album, query, sort or self.get_default_album_sort())
def items(self, query=None, sort=None):
"""Get :class:`Item` objects matching the query.
"""
"""Get :class:`Item` objects matching the query."""
return self._fetch(Item, query, sort or self.get_default_item_sort())
# Convenience accessors.
def get_item(self, id):
"""Fetch an :class:`Item` by its ID. Returns `None` if no match is
found.
"""Fetch a :class:`Item` by its ID.
Return `None` if no match is found.
"""
return self._get(Item, id)
def get_album(self, item_or_id):
"""Given an album ID or an item associated with an album, return
an :class:`Album` object for the album. If no such album exists,
returns `None`.
a :class:`Album` object for the album.
If no such album exists, return `None`.
"""
if isinstance(item_or_id, int):
album_id = item_or_id
@ -1549,30 +1568,37 @@ class Library(dbcore.Database):
def _int_arg(s):
"""Convert a string argument to an integer for use in a template
function. May raise a ValueError.
function.
May raise a ValueError.
"""
return int(s.strip())
class DefaultTemplateFunctions:
"""A container class for the default functions provided to path
templates. These functions are contained in an object to provide
templates.
These functions are contained in an object to provide
additional context to the functions -- specifically, the Item being
evaluated.
"""
_prefix = 'tmpl_'
def __init__(self, item=None, lib=None):
"""Parametrize the functions. If `item` or `lib` is None, then
some functions (namely, ``aunique``) will always evaluate to the
empty string.
"""Parametrize the functions.
If `item` or `lib` is None, then some functions (namely, ``aunique``)
will always evaluate to the empty string.
"""
self.item = item
self.lib = lib
def functions(self):
"""Returns a dictionary containing the functions defined in this
object. The keys are function names (as exposed in templates)
"""Return a dictionary containing the functions defined in this
object.
The keys are function names (as exposed in templates)
and the values are Python functions.
"""
out = {}
@ -1587,7 +1613,7 @@ class DefaultTemplateFunctions:
@staticmethod
def tmpl_upper(s):
"""Covert a string to upper case."""
"""Convert a string to upper case."""
return s.upper()
@staticmethod
@ -1625,21 +1651,20 @@ class DefaultTemplateFunctions:
@staticmethod
def tmpl_asciify(s):
"""Translate non-ASCII characters to their ASCII equivalents.
"""
"""Translate non-ASCII characters to their ASCII equivalents."""
return util.asciify_path(s, beets.config['path_sep_replace'].as_str())
@staticmethod
def tmpl_time(s, fmt):
"""Format a time value using `strftime`.
"""
"""Format a time value using `strftime`."""
cur_fmt = beets.config['time_format'].as_str()
return time.strftime(fmt, time.strptime(s, cur_fmt))
def tmpl_aunique(self, keys=None, disam=None, bracket=None):
"""Generate a string that is guaranteed to be unique among all
albums in the library who share the same set of keys. A fields
from "disam" is used in the string if one is sufficient to
albums in the library who share the same set of keys.
A fields from "disam" is used in the string if one is sufficient to
disambiguate the albums. Otherwise, a fallback opaque value is
used. Both "keys" and "disam" should be given as
whitespace-separated lists of field names, while "bracket" is a
@ -1658,15 +1683,89 @@ class DefaultTemplateFunctions:
if album_id is None:
return ''
memokey = ('aunique', keys, disam, album_id)
memokey = self._tmpl_unique_memokey('aunique', keys, disam, album_id)
memoval = self.lib._memotable.get(memokey)
if memoval is not None:
return memoval
keys = keys or beets.config['aunique']['keys'].as_str()
disam = disam or beets.config['aunique']['disambiguators'].as_str()
album = self.lib.get_album(album_id)
return self._tmpl_unique(
'aunique', keys, disam, bracket, album_id, album, album.item_keys,
# Do nothing for singletons.
lambda a: a is None)
def tmpl_sunique(self, keys=None, disam=None, bracket=None):
"""Generate a string that is guaranteed to be unique among all
singletons in the library who share the same set of keys.
A fields from "disam" is used in the string if one is sufficient to
disambiguate the albums. Otherwise, a fallback opaque value is
used. Both "keys" and "disam" should be given as
whitespace-separated lists of field names, while "bracket" is a
pair of characters to be used as brackets surrounding the
disambiguator or empty to have no brackets.
"""
# Fast paths: no album, no item or library, or memoized value.
if not self.item or not self.lib:
return ''
if isinstance(self.item, Item):
item_id = self.item.id
else:
raise NotImplementedError("sunique is only implemented for items")
if item_id is None:
return ''
return self._tmpl_unique(
'sunique', keys, disam, bracket, item_id, self.item,
Item.all_keys(),
# Do nothing for non singletons.
lambda i: i.album_id is not None,
initial_subqueries=[dbcore.query.NoneQuery('album_id', True)])
def _tmpl_unique_memokey(self, name, keys, disam, item_id):
"""Get the memokey for the unique template named "name" for the
specific parameters.
"""
return (name, keys, disam, item_id)
def _tmpl_unique(self, name, keys, disam, bracket, item_id, db_item,
item_keys, skip_item, initial_subqueries=None):
"""Generate a string that is guaranteed to be unique among all items of
the same type as "db_item" who share the same set of keys.
A field from "disam" is used in the string if one is sufficient to
disambiguate the items. Otherwise, a fallback opaque value is
used. Both "keys" and "disam" should be given as
whitespace-separated lists of field names, while "bracket" is a
pair of characters to be used as brackets surrounding the
disambiguator or empty to have no brackets.
"name" is the name of the templates. It is also the name of the
configuration section where the default values of the parameters
are stored.
"skip_item" is a function that must return True when the template
should return an empty string.
"initial_subqueries" is a list of subqueries that should be included
in the query to find the ambigous items.
"""
memokey = self._tmpl_unique_memokey(name, keys, disam, item_id)
memoval = self.lib._memotable.get(memokey)
if memoval is not None:
return memoval
if skip_item(db_item):
self.lib._memotable[memokey] = ''
return ''
keys = keys or beets.config[name]['keys'].as_str()
disam = disam or beets.config[name]['disambiguators'].as_str()
if bracket is None:
bracket = beets.config['aunique']['bracket'].as_str()
bracket = beets.config[name]['bracket'].as_str()
keys = keys.split()
disam = disam.split()
@ -1678,44 +1777,44 @@ class DefaultTemplateFunctions:
bracket_l = ''
bracket_r = ''
album = self.lib.get_album(album_id)
if not album:
# Do nothing for singletons.
self.lib._memotable[memokey] = ''
return ''
# Find matching albums to disambiguate with.
# Find matching items to disambiguate with.
subqueries = []
if initial_subqueries is not None:
subqueries.extend(initial_subqueries)
for key in keys:
value = album.get(key, '')
subqueries.append(dbcore.MatchQuery(key, value))
albums = self.lib.albums(dbcore.AndQuery(subqueries))
value = db_item.get(key, '')
# Use slow queries for flexible attributes.
fast = key in item_keys
subqueries.append(dbcore.MatchQuery(key, value, fast))
query = dbcore.AndQuery(subqueries)
ambigous_items = (self.lib.items(query)
if isinstance(db_item, Item)
else self.lib.albums(query))
# If there's only one album to matching these details, then do
# If there's only one item to matching these details, then do
# nothing.
if len(albums) == 1:
if len(ambigous_items) == 1:
self.lib._memotable[memokey] = ''
return ''
# Find the first disambiguator that distinguishes the albums.
# Find the first disambiguator that distinguishes the items.
for disambiguator in disam:
# Get the value for each album for the current field.
disam_values = {a.get(disambiguator, '') for a in albums}
# Get the value for each item for the current field.
disam_values = {s.get(disambiguator, '') for s in ambigous_items}
# If the set of unique values is equal to the number of
# albums in the disambiguation set, we're done -- this is
# items in the disambiguation set, we're done -- this is
# sufficient disambiguation.
if len(disam_values) == len(albums):
if len(disam_values) == len(ambigous_items):
break
else:
# No disambiguator distinguished all fields.
res = f' {bracket_l}{album.id}{bracket_r}'
res = f' {bracket_l}{item_id}{bracket_r}'
self.lib._memotable[memokey] = res
return res
# Flatten disambiguation value into a string.
disam_value = album.formatted(for_path=True).get(disambiguator)
disam_value = db_item.formatted(for_path=True).get(disambiguator)
# Return empty string if disambiguator is empty.
if disam_value:
@ -1728,14 +1827,15 @@ class DefaultTemplateFunctions:
@staticmethod
def tmpl_first(s, count=1, skip=0, sep='; ', join_str='; '):
""" Gets the item(s) from x to y in a string separated by something
and join then with something
"""Get the item(s) from x to y in a string separated by something
and join then with something.
:param s: the string
:param count: The number of items included
:param skip: The number of items skipped
:param sep: the separator. Usually is '; ' (default) or '/ '
:param join_str: the string which will join the items, default '; '.
Args:
s: the string
count: The number of items included
skip: The number of items skipped
sep: the separator. Usually is '; ' (default) or '/ '
join_str: the string which will join the items, default '; '.
"""
skip = int(skip)
count = skip + int(count)
@ -1745,10 +1845,13 @@ class DefaultTemplateFunctions:
""" If field exists return trueval or the field (default)
otherwise, emit return falseval (if provided).
:param field: The name of the field
:param trueval: The string if the condition is true
:param falseval: The string if the condition is false
:return: The string, based on condition
Args:
field: The name of the field
trueval: The string if the condition is true
falseval: The string if the condition is false
Returns:
The string, based on condition.
"""
if field in self.item:
return trueval if trueval else self.item.formatted().get(field)

View file

@ -16,11 +16,18 @@
import mediafile
import warnings
warnings.warn("beets.mediafile is deprecated; use mediafile instead")
warnings.warn(
"beets.mediafile is deprecated; use mediafile instead",
# Show the location of the `import mediafile` statement as the warning's
# source, rather than this file, such that the offending module can be
# identified easily.
stacklevel=2,
)
# Import everything from the mediafile module into this module.
for key, value in mediafile.__dict__.items():
if key not in ['__name__']:
globals()[key] = value
# Cleanup namespace.
del key, value, warnings, mediafile

View file

@ -581,7 +581,7 @@ def _colordiff(a, b, highlight='text_highlight',
a_out.append(colorize(color, a[a_start:a_end]))
b_out.append(colorize(color, b[b_start:b_end]))
else:
assert(False)
assert False
return ''.join(a_out), ''.join(b_out)
@ -757,15 +757,21 @@ def show_path_changes(path_changes):
if max_width > col_width:
# Print every change over two lines
for source, dest in zip(sources, destinations):
log.info('{0} \n -> {1}', source, dest)
color_source, color_dest = colordiff(source, dest)
print_('{0} \n -> {1}'.format(color_source, color_dest))
else:
# Print every change on a single line, and add a header
title_pad = max_width - len('Source ') + len(' -> ')
log.info('Source {0} Destination', ' ' * title_pad)
print_('Source {0} Destination'.format(' ' * title_pad))
for source, dest in zip(sources, destinations):
pad = max_width - len(source)
log.info('{0} {1} -> {2}', source, ' ' * pad, dest)
color_source, color_dest = colordiff(source, dest)
print_('{0} {1} -> {2}'.format(
color_source,
' ' * pad,
color_dest,
))
# Helper functions for option parsing.
@ -1122,8 +1128,12 @@ def _load_plugins(options, config):
else:
plugin_list = config['plugins'].as_str_seq()
# Exclude any plugins that were specified on the command line
if options.exclude is not None:
plugin_list = [p for p in plugin_list
if p not in options.exclude.split(',')]
plugins.load_plugins(plugin_list)
plugins.send("pluginload")
return plugins
@ -1139,16 +1149,6 @@ def _setup(options, lib=None):
plugins = _load_plugins(options, config)
# Get the default subcommands.
from beets.ui.commands import default_commands
subcommands = list(default_commands)
subcommands.extend(plugins.commands())
if lib is None:
lib = _open_library(config)
plugins.send("library_opened", lib=lib)
# Add types and queries defined by plugins.
plugin_types_album = plugins.types(library.Album)
library.Album._types.update(plugin_types_album)
@ -1160,6 +1160,18 @@ def _setup(options, lib=None):
library.Item._queries.update(plugins.named_queries(library.Item))
library.Album._queries.update(plugins.named_queries(library.Album))
plugins.send("pluginload")
# Get the default subcommands.
from beets.ui.commands import default_commands
subcommands = list(default_commands)
subcommands.extend(plugins.commands())
if lib is None:
lib = _open_library(config)
plugins.send("library_opened", lib=lib)
return subcommands, plugins, lib
@ -1200,10 +1212,22 @@ def _configure(options):
return config
def _ensure_db_directory_exists(path):
if path == b':memory:': # in memory db
return
newpath = os.path.dirname(path)
if not os.path.isdir(newpath):
if input_yn("The database directory {} does not \
exist. Create it (Y/n)?"
.format(util.displayable_path(newpath))):
os.makedirs(newpath)
def _open_library(config):
"""Create a new library instance from the configuration.
"""
dbpath = util.bytestring_path(config['library'].as_filename())
_ensure_db_directory_exists(dbpath)
try:
lib = library.Library(
dbpath,
@ -1242,6 +1266,8 @@ def _raw_main(args, lib=None):
help='path to configuration file')
parser.add_option('-p', '--plugins', dest='plugins',
help='a comma-separated list of plugins to load')
parser.add_option('-P', '--disable-plugins', dest='exclude',
help='a comma-separated list of plugins to disable')
parser.add_option('-h', '--help', dest='help', action='store_true',
help='show this help message and exit')
parser.add_option('--version', dest='version', action='store_true',

View file

@ -79,6 +79,43 @@ def _do_query(lib, query, album, also_items=True):
return items, albums
def _paths_from_logfile(path):
"""Parse the logfile and yield skipped paths to pass to the `import`
command.
"""
with open(path, mode="r", encoding="utf-8") as fp:
for i, line in enumerate(fp, start=1):
verb, sep, paths = line.rstrip("\n").partition(" ")
if not sep:
raise ValueError(f"line {i} is invalid")
# Ignore informational lines that don't need to be re-imported.
if verb in {"import", "duplicate-keep", "duplicate-replace"}:
continue
if verb not in {"asis", "skip", "duplicate-skip"}:
raise ValueError(f"line {i} contains unknown verb {verb}")
yield os.path.commonpath(paths.split("; "))
def _parse_logfiles(logfiles):
"""Parse all `logfiles` and yield paths from it."""
for logfile in logfiles:
try:
yield from _paths_from_logfile(syspath(normpath(logfile)))
except ValueError as err:
raise ui.UserError('malformed logfile {}: {}'.format(
util.displayable_path(logfile),
str(err)
)) from err
except IOError as err:
raise ui.UserError('unreadable logfile {}: {}'.format(
util.displayable_path(logfile),
str(err)
)) from err
# fields: Shows a list of available fields for queries and format strings.
def _print_keys(query):
@ -913,12 +950,6 @@ def import_files(lib, paths, query):
"""Import the files in the given list of paths or matching the
query.
"""
# Check the user-specified directories.
for path in paths:
if not os.path.exists(syspath(normpath(path))):
raise ui.UserError('no such file or directory: {}'.format(
displayable_path(path)))
# Check parameter consistency.
if config['import']['quiet'] and config['import']['timid']:
raise ui.UserError("can't be both quiet and timid")
@ -960,7 +991,12 @@ def import_func(lib, opts, args):
else:
query = None
paths = args
if not paths:
# The paths from the logfiles go into a separate list to allow handling
# errors differently from user-specified paths.
paths_from_logfiles = list(_parse_logfiles(opts.from_logfiles or []))
if not paths and not paths_from_logfiles:
raise ui.UserError('no path specified')
# On Python 2, we used to get filenames as raw bytes, which is
@ -969,6 +1005,31 @@ def import_func(lib, opts, args):
# filename.
paths = [p.encode(util.arg_encoding(), 'surrogateescape')
for p in paths]
paths_from_logfiles = [p.encode(util.arg_encoding(), 'surrogateescape')
for p in paths_from_logfiles]
# Check the user-specified directories.
for path in paths:
if not os.path.exists(syspath(normpath(path))):
raise ui.UserError('no such file or directory: {}'.format(
displayable_path(path)))
# Check the directories from the logfiles, but don't throw an error in
# case those paths don't exist. Maybe some of those paths have already
# been imported and moved separately, so logging a warning should
# suffice.
for path in paths_from_logfiles:
if not os.path.exists(syspath(normpath(path))):
log.warning('No such file or directory: {}'.format(
displayable_path(path)))
continue
paths.append(path)
# If all paths were read from a logfile, and none of them exist, throw
# an error
if not paths:
raise ui.UserError('none of the paths are importable')
import_files(lib, paths, query)
@ -1061,6 +1122,11 @@ import_cmd.parser.add_option(
metavar='ID',
help='restrict matching to a specific metadata backend ID'
)
import_cmd.parser.add_option(
'--from-logfile', dest='from_logfiles', action='append',
metavar='PATH',
help='read skipped paths from an existing logfile'
)
import_cmd.parser.add_option(
'--set', dest='set_fields', action='callback',
callback=_store_dict,

View file

@ -19,6 +19,7 @@ import sys
import errno
import locale
import re
import tempfile
import shutil
import fnmatch
import functools
@ -478,24 +479,46 @@ def move(path, dest, replace=False):
instead, in which case metadata will *not* be preserved. Paths are
translated to system paths.
"""
if os.path.isdir(syspath(path)):
raise FilesystemError(u'source is directory', 'move', (path, dest))
if os.path.isdir(syspath(dest)):
raise FilesystemError(u'destination is directory', 'move',
(path, dest))
if samefile(path, dest):
return
path = syspath(path)
dest = syspath(dest)
if os.path.exists(dest) and not replace:
if os.path.exists(syspath(dest)) and not replace:
raise FilesystemError('file exists', 'rename', (path, dest))
# First, try renaming the file.
try:
os.rename(path, dest)
os.replace(syspath(path), syspath(dest))
except OSError:
# Otherwise, copy and delete the original.
# Copy the file to a temporary destination.
basename = os.path.basename(bytestring_path(dest))
dirname = os.path.dirname(bytestring_path(dest))
tmp = tempfile.NamedTemporaryFile(
suffix=syspath(b'.beets', prefix=False),
prefix=syspath(b'.' + basename, prefix=False),
dir=syspath(dirname),
delete=False,
)
try:
shutil.copyfile(path, dest)
os.remove(path)
with open(syspath(path), 'rb') as f:
shutil.copyfileobj(f, tmp)
finally:
tmp.close()
# Move the copied file into place.
try:
os.replace(tmp.name, syspath(dest))
tmp = None
os.remove(syspath(path))
except OSError as exc:
raise FilesystemError(exc, 'move', (path, dest),
traceback.format_exc())
finally:
if tmp is not None:
os.remove(tmp)
def link(path, dest, replace=False):

View file

@ -16,18 +16,17 @@
public resizing proxy if neither is available.
"""
from itertools import chain
import subprocess
import os
import os.path
import platform
import re
from tempfile import NamedTemporaryFile
from urllib.parse import urlencode
from beets import logging
from beets import util
# Resizing methods
PIL = 1
IMAGEMAGICK = 2
WEBPROXY = 3
from beets.util import bytestring_path, displayable_path, py3_path, syspath
PROXY_URL = 'https://images.weserv.nl/'
@ -54,145 +53,417 @@ def temp_file_for(path):
specified path.
"""
ext = os.path.splitext(path)[1]
with NamedTemporaryFile(suffix=util.py3_path(ext), delete=False) as f:
return util.bytestring_path(f.name)
with NamedTemporaryFile(suffix=py3_path(ext), delete=False) as f:
return bytestring_path(f.name)
def pil_resize(maxwidth, path_in, path_out=None, quality=0, max_filesize=0):
"""Resize using Python Imaging Library (PIL). Return the output path
of resized image.
"""
path_out = path_out or temp_file_for(path_in)
from PIL import Image
class LocalBackendNotAvailableError(Exception):
pass
log.debug('artresizer: PIL resizing {0} to {1}',
util.displayable_path(path_in), util.displayable_path(path_out))
try:
im = Image.open(util.syspath(path_in))
size = maxwidth, maxwidth
im.thumbnail(size, Image.ANTIALIAS)
_NOT_AVAILABLE = object()
if quality == 0:
# Use PIL's default quality.
quality = -1
im.save(util.py3_path(path_out), quality=quality)
if max_filesize > 0:
# If maximum filesize is set, we attempt to lower the quality of
# jpeg conversion by a proportional amount, up to 3 attempts
# First, set the maximum quality to either provided, or 95
if quality > 0:
lower_qual = quality
else:
lower_qual = 95
for i in range(5):
# 5 attempts is an abitrary choice
filesize = os.stat(util.syspath(path_out)).st_size
log.debug("PIL Pass {0} : Output size: {1}B", i, filesize)
if filesize <= max_filesize:
return path_out
# The relationship between filesize & quality will be
# image dependent.
lower_qual -= 10
# Restrict quality dropping below 10
if lower_qual < 10:
lower_qual = 10
# Use optimize flag to improve filesize decrease
im.save(
util.py3_path(path_out), quality=lower_qual, optimize=True
)
log.warning("PIL Failed to resize file to below {0}B",
max_filesize)
return path_out
class LocalBackend:
@classmethod
def available(cls):
try:
cls.version()
return True
except LocalBackendNotAvailableError:
return False
class IMBackend(LocalBackend):
NAME = "ImageMagick"
# These fields are used as a cache for `version()`. `_legacy` indicates
# whether the modern `magick` binary is available or whether to fall back
# to the old-style `convert`, `identify`, etc. commands.
_version = None
_legacy = None
@classmethod
def version(cls):
"""Obtain and cache ImageMagick version.
Raises `LocalBackendNotAvailableError` if not available.
"""
if cls._version is None:
for cmd_name, legacy in (('magick', False), ('convert', True)):
try:
out = util.command_output([cmd_name, "--version"]).stdout
except (subprocess.CalledProcessError, OSError) as exc:
log.debug('ImageMagick version check failed: {}', exc)
cls._version = _NOT_AVAILABLE
else:
if b'imagemagick' in out.lower():
pattern = br".+ (\d+)\.(\d+)\.(\d+).*"
match = re.search(pattern, out)
if match:
cls._version = (int(match.group(1)),
int(match.group(2)),
int(match.group(3)))
cls._legacy = legacy
if cls._version is _NOT_AVAILABLE:
raise LocalBackendNotAvailableError()
else:
return cls._version
def __init__(self):
"""Initialize a wrapper around ImageMagick for local image operations.
Stores the ImageMagick version and legacy flag. If ImageMagick is not
available, raise an Exception.
"""
self.version()
# Use ImageMagick's magick binary when it's available.
# If it's not, fall back to the older, separate convert
# and identify commands.
if self._legacy:
self.convert_cmd = ['convert']
self.identify_cmd = ['identify']
self.compare_cmd = ['compare']
else:
self.convert_cmd = ['magick']
self.identify_cmd = ['magick', 'identify']
self.compare_cmd = ['magick', 'compare']
def resize(self, maxwidth, path_in, path_out=None, quality=0,
max_filesize=0):
"""Resize using ImageMagick.
Use the ``magick`` program or ``convert`` on older versions. Return
the output path of resized image.
"""
path_out = path_out or temp_file_for(path_in)
log.debug('artresizer: ImageMagick resizing {0} to {1}',
displayable_path(path_in), displayable_path(path_out))
# "-resize WIDTHx>" shrinks images with the width larger
# than the given width while maintaining the aspect ratio
# with regards to the height.
# ImageMagick already seems to default to no interlace, but we include
# it here for the sake of explicitness.
cmd = self.convert_cmd + [
syspath(path_in, prefix=False),
'-resize', f'{maxwidth}x>',
'-interlace', 'none',
]
if quality > 0:
cmd += ['-quality', f'{quality}']
# "-define jpeg:extent=SIZEb" sets the target filesize for imagemagick
# to SIZE in bytes.
if max_filesize > 0:
cmd += ['-define', f'jpeg:extent={max_filesize}b']
cmd.append(syspath(path_out, prefix=False))
try:
util.command_output(cmd)
except subprocess.CalledProcessError:
log.warning('artresizer: IM convert failed for {0}',
displayable_path(path_in))
return path_in
return path_out
def get_size(self, path_in):
cmd = self.identify_cmd + [
'-format', '%w %h', syspath(path_in, prefix=False)
]
try:
out = util.command_output(cmd).stdout
except subprocess.CalledProcessError as exc:
log.warning('ImageMagick size query failed')
log.debug(
'`convert` exited with (status {}) when '
'getting size with command {}:\n{}',
exc.returncode, cmd, exc.output.strip()
)
return None
try:
return tuple(map(int, out.split(b' ')))
except IndexError:
log.warning('Could not understand IM output: {0!r}', out)
return None
def deinterlace(self, path_in, path_out=None):
path_out = path_out or temp_file_for(path_in)
cmd = self.convert_cmd + [
syspath(path_in, prefix=False),
'-interlace', 'none',
syspath(path_out, prefix=False),
]
try:
util.command_output(cmd)
return path_out
except OSError:
log.error("PIL cannot create thumbnail for '{0}'",
util.displayable_path(path_in))
return path_in
except subprocess.CalledProcessError:
# FIXME: Should probably issue a warning?
return path_in
def get_format(self, filepath):
cmd = self.identify_cmd + [
'-format', '%[magick]',
syspath(filepath)
]
def im_resize(maxwidth, path_in, path_out=None, quality=0, max_filesize=0):
"""Resize using ImageMagick.
try:
return util.command_output(cmd).stdout
except subprocess.CalledProcessError:
# FIXME: Should probably issue a warning?
return None
Use the ``magick`` program or ``convert`` on older versions. Return
the output path of resized image.
"""
path_out = path_out or temp_file_for(path_in)
log.debug('artresizer: ImageMagick resizing {0} to {1}',
util.displayable_path(path_in), util.displayable_path(path_out))
def convert_format(self, source, target, deinterlaced):
cmd = self.convert_cmd + [
syspath(source),
*(["-interlace", "none"] if deinterlaced else []),
syspath(target),
]
# "-resize WIDTHx>" shrinks images with the width larger
# than the given width while maintaining the aspect ratio
# with regards to the height.
cmd = ArtResizer.shared.im_convert_cmd + [
util.syspath(path_in, prefix=False),
'-resize', f'{maxwidth}x>',
]
try:
subprocess.check_call(
cmd,
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL
)
return target
except subprocess.CalledProcessError:
# FIXME: Should probably issue a warning?
return source
if quality > 0:
cmd += ['-quality', f'{quality}']
@property
def can_compare(self):
return self.version() > (6, 8, 7)
# "-define jpeg:extent=SIZEb" sets the target filesize for imagemagick to
# SIZE in bytes.
if max_filesize > 0:
cmd += ['-define', f'jpeg:extent={max_filesize}b']
def compare(self, im1, im2, compare_threshold):
is_windows = platform.system() == "Windows"
cmd.append(util.syspath(path_out, prefix=False))
try:
util.command_output(cmd)
except subprocess.CalledProcessError:
log.warning('artresizer: IM convert failed for {0}',
util.displayable_path(path_in))
return path_in
return path_out
BACKEND_FUNCS = {
PIL: pil_resize,
IMAGEMAGICK: im_resize,
}
def pil_getsize(path_in):
from PIL import Image
try:
im = Image.open(util.syspath(path_in))
return im.size
except OSError as exc:
log.error("PIL could not read file {}: {}",
util.displayable_path(path_in), exc)
def im_getsize(path_in):
cmd = ArtResizer.shared.im_identify_cmd + \
['-format', '%w %h', util.syspath(path_in, prefix=False)]
try:
out = util.command_output(cmd).stdout
except subprocess.CalledProcessError as exc:
log.warning('ImageMagick size query failed')
log.debug(
'`convert` exited with (status {}) when '
'getting size with command {}:\n{}',
exc.returncode, cmd, exc.output.strip()
# Converting images to grayscale tends to minimize the weight
# of colors in the diff score. So we first convert both images
# to grayscale and then pipe them into the `compare` command.
# On Windows, ImageMagick doesn't support the magic \\?\ prefix
# on paths, so we pass `prefix=False` to `syspath`.
convert_cmd = self.convert_cmd + [
syspath(im2, prefix=False), syspath(im1, prefix=False),
'-colorspace', 'gray', 'MIFF:-'
]
compare_cmd = self.compare_cmd + [
'-metric', 'PHASH', '-', 'null:',
]
log.debug('comparing images with pipeline {} | {}',
convert_cmd, compare_cmd)
convert_proc = subprocess.Popen(
convert_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=not is_windows,
)
return
try:
return tuple(map(int, out.split(b' ')))
except IndexError:
log.warning('Could not understand IM output: {0!r}', out)
compare_proc = subprocess.Popen(
compare_cmd,
stdin=convert_proc.stdout,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=not is_windows,
)
# Check the convert output. We're not interested in the
# standard output; that gets piped to the next stage.
convert_proc.stdout.close()
convert_stderr = convert_proc.stderr.read()
convert_proc.stderr.close()
convert_proc.wait()
if convert_proc.returncode:
log.debug(
'ImageMagick convert failed with status {}: {!r}',
convert_proc.returncode,
convert_stderr,
)
return None
# Check the compare output.
stdout, stderr = compare_proc.communicate()
if compare_proc.returncode:
if compare_proc.returncode != 1:
log.debug('ImageMagick compare failed: {0}, {1}',
displayable_path(im2), displayable_path(im1))
return None
out_str = stderr
else:
out_str = stdout
try:
phash_diff = float(out_str)
except ValueError:
log.debug('IM output is not a number: {0!r}', out_str)
return None
log.debug('ImageMagick compare score: {0}', phash_diff)
return phash_diff <= compare_threshold
@property
def can_write_metadata(self):
return True
def write_metadata(self, file, metadata):
assignments = list(chain.from_iterable(
('-set', k, v) for k, v in metadata.items()
))
command = self.convert_cmd + [file, *assignments, file]
util.command_output(command)
BACKEND_GET_SIZE = {
PIL: pil_getsize,
IMAGEMAGICK: im_getsize,
}
class PILBackend(LocalBackend):
NAME = "PIL"
@classmethod
def version(cls):
try:
__import__('PIL', fromlist=['Image'])
except ImportError:
raise LocalBackendNotAvailableError()
def __init__(self):
"""Initialize a wrapper around PIL for local image operations.
If PIL is not available, raise an Exception.
"""
self.version()
def resize(self, maxwidth, path_in, path_out=None, quality=0,
max_filesize=0):
"""Resize using Python Imaging Library (PIL). Return the output path
of resized image.
"""
path_out = path_out or temp_file_for(path_in)
from PIL import Image
log.debug('artresizer: PIL resizing {0} to {1}',
displayable_path(path_in), displayable_path(path_out))
try:
im = Image.open(syspath(path_in))
size = maxwidth, maxwidth
im.thumbnail(size, Image.ANTIALIAS)
if quality == 0:
# Use PIL's default quality.
quality = -1
# progressive=False only affects JPEGs and is the default,
# but we include it here for explicitness.
im.save(py3_path(path_out), quality=quality, progressive=False)
if max_filesize > 0:
# If maximum filesize is set, we attempt to lower the quality
# of jpeg conversion by a proportional amount, up to 3 attempts
# First, set the maximum quality to either provided, or 95
if quality > 0:
lower_qual = quality
else:
lower_qual = 95
for i in range(5):
# 5 attempts is an abitrary choice
filesize = os.stat(syspath(path_out)).st_size
log.debug("PIL Pass {0} : Output size: {1}B", i, filesize)
if filesize <= max_filesize:
return path_out
# The relationship between filesize & quality will be
# image dependent.
lower_qual -= 10
# Restrict quality dropping below 10
if lower_qual < 10:
lower_qual = 10
# Use optimize flag to improve filesize decrease
im.save(py3_path(path_out), quality=lower_qual,
optimize=True, progressive=False)
log.warning("PIL Failed to resize file to below {0}B",
max_filesize)
return path_out
else:
return path_out
except OSError:
log.error("PIL cannot create thumbnail for '{0}'",
displayable_path(path_in))
return path_in
def get_size(self, path_in):
from PIL import Image
try:
im = Image.open(syspath(path_in))
return im.size
except OSError as exc:
log.error("PIL could not read file {}: {}",
displayable_path(path_in), exc)
return None
def deinterlace(self, path_in, path_out=None):
path_out = path_out or temp_file_for(path_in)
from PIL import Image
try:
im = Image.open(syspath(path_in))
im.save(py3_path(path_out), progressive=False)
return path_out
except IOError:
# FIXME: Should probably issue a warning?
return path_in
def get_format(self, filepath):
from PIL import Image, UnidentifiedImageError
try:
with Image.open(syspath(filepath)) as im:
return im.format
except (ValueError, TypeError, UnidentifiedImageError,
FileNotFoundError):
log.exception("failed to detect image format for {}", filepath)
return None
def convert_format(self, source, target, deinterlaced):
from PIL import Image, UnidentifiedImageError
try:
with Image.open(syspath(source)) as im:
im.save(py3_path(target), progressive=not deinterlaced)
return target
except (ValueError, TypeError, UnidentifiedImageError,
FileNotFoundError, OSError):
log.exception("failed to convert image {} -> {}", source, target)
return source
@property
def can_compare(self):
return False
def compare(self, im1, im2, compare_threshold):
# It is an error to call this when ArtResizer.can_compare is not True.
raise NotImplementedError()
@property
def can_write_metadata(self):
return True
def write_metadata(self, file, metadata):
from PIL import Image, PngImagePlugin
# FIXME: Detect and handle other file types (currently, the only user
# is the thumbnails plugin, which generates PNG images).
im = Image.open(file)
meta = PngImagePlugin.PngInfo()
for k, v in metadata.items():
meta.add_text(k, v, 0)
im.save(file, "PNG", pnginfo=meta)
class Shareable(type):
@ -213,6 +484,12 @@ class Shareable(type):
return cls._instance
BACKEND_CLASSES = [
IMBackend,
PILBackend,
]
class ArtResizer(metaclass=Shareable):
"""A singleton class that performs image resizes.
"""
@ -220,21 +497,25 @@ class ArtResizer(metaclass=Shareable):
def __init__(self):
"""Create a resizer object with an inferred method.
"""
self.method = self._check_method()
log.debug("artresizer: method is {0}", self.method)
self.can_compare = self._can_compare()
# Check if a local backend is availabe, and store an instance of the
# backend class. Otherwise, fallback to the web proxy.
for backend_cls in BACKEND_CLASSES:
try:
self.local_method = backend_cls()
log.debug(f"artresizer: method is {self.local_method.NAME}")
break
except LocalBackendNotAvailableError:
continue
else:
log.debug("artresizer: method is WEBPROXY")
self.local_method = None
# Use ImageMagick's magick binary when it's available. If it's
# not, fall back to the older, separate convert and identify
# commands.
if self.method[0] == IMAGEMAGICK:
self.im_legacy = self.method[2]
if self.im_legacy:
self.im_convert_cmd = ['convert']
self.im_identify_cmd = ['identify']
else:
self.im_convert_cmd = ['magick']
self.im_identify_cmd = ['magick', 'identify']
@property
def method(self):
if self.local:
return self.local_method.NAME
else:
return "WEBPROXY"
def resize(
self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0
@ -245,10 +526,23 @@ class ArtResizer(metaclass=Shareable):
For WEBPROXY, returns `path_in` unmodified.
"""
if self.local:
func = BACKEND_FUNCS[self.method[0]]
return func(maxwidth, path_in, path_out,
quality=quality, max_filesize=max_filesize)
return self.local_method.resize(
maxwidth, path_in, path_out,
quality=quality, max_filesize=max_filesize
)
else:
# Handled by `proxy_url` already.
return path_in
def deinterlace(self, path_in, path_out=None):
"""Deinterlace an image.
Only available locally.
"""
if self.local:
return self.local_method.deinterlace(path_in, path_out)
else:
# FIXME: Should probably issue a warning?
return path_in
def proxy_url(self, maxwidth, url, quality=0):
@ -257,6 +551,7 @@ class ArtResizer(metaclass=Shareable):
Otherwise, the URL is returned unmodified.
"""
if self.local:
# Going to be handled by `resize()`.
return url
else:
return resize_url(url, maxwidth, quality)
@ -266,76 +561,98 @@ class ArtResizer(metaclass=Shareable):
"""A boolean indicating whether the resizing method is performed
locally (i.e., PIL or ImageMagick).
"""
return self.method[0] in BACKEND_FUNCS
return self.local_method is not None
def get_size(self, path_in):
"""Return the size of an image file as an int couple (width, height)
in pixels.
Only available locally
Only available locally.
"""
if self.local:
func = BACKEND_GET_SIZE[self.method[0]]
return func(path_in)
return self.local_method.get_size(path_in)
else:
# FIXME: Should probably issue a warning?
return path_in
def _can_compare(self):
def get_format(self, path_in):
"""Returns the format of the image as a string.
Only available locally.
"""
if self.local:
return self.local_method.get_format(path_in)
else:
# FIXME: Should probably issue a warning?
return None
def reformat(self, path_in, new_format, deinterlaced=True):
"""Converts image to desired format, updating its extension, but
keeping the same filename.
Only available locally.
"""
if not self.local:
# FIXME: Should probably issue a warning?
return path_in
new_format = new_format.lower()
# A nonexhaustive map of image "types" to extensions overrides
new_format = {
'jpeg': 'jpg',
}.get(new_format, new_format)
fname, ext = os.path.splitext(path_in)
path_new = fname + b'.' + new_format.encode('utf8')
# allows the exception to propagate, while still making sure a changed
# file path was removed
result_path = path_in
try:
result_path = self.local_method.convert_format(
path_in, path_new, deinterlaced
)
finally:
if result_path != path_in:
os.unlink(path_in)
return result_path
@property
def can_compare(self):
"""A boolean indicating whether image comparison is available"""
return self.method[0] == IMAGEMAGICK and self.method[1] > (6, 8, 7)
@staticmethod
def _check_method():
"""Return a tuple indicating an available method and its version.
The result has at least two elements:
- The method, eitehr WEBPROXY, PIL, or IMAGEMAGICK.
- The version.
If the method is IMAGEMAGICK, there is also a third element: a
bool flag indicating whether to use the `magick` binary or
legacy single-purpose executables (`convert`, `identify`, etc.)
"""
version = get_im_version()
if version:
version, legacy = version
return IMAGEMAGICK, version, legacy
version = get_pil_version()
if version:
return PIL, version
return WEBPROXY, (0)
def get_im_version():
"""Get the ImageMagick version and legacy flag as a pair. Or return
None if ImageMagick is not available.
"""
for cmd_name, legacy in ((['magick'], False), (['convert'], True)):
cmd = cmd_name + ['--version']
try:
out = util.command_output(cmd).stdout
except (subprocess.CalledProcessError, OSError) as exc:
log.debug('ImageMagick version check failed: {}', exc)
if self.local:
return self.local_method.can_compare
else:
if b'imagemagick' in out.lower():
pattern = br".+ (\d+)\.(\d+)\.(\d+).*"
match = re.search(pattern, out)
if match:
version = (int(match.group(1)),
int(match.group(2)),
int(match.group(3)))
return version, legacy
return False
return None
def compare(self, im1, im2, compare_threshold):
"""Return a boolean indicating whether two images are similar.
Only available locally.
"""
if self.local:
return self.local_method.compare(im1, im2, compare_threshold)
else:
# FIXME: Should probably issue a warning?
return None
def get_pil_version():
"""Get the PIL/Pillow version, or None if it is unavailable.
"""
try:
__import__('PIL', fromlist=['Image'])
return (0,)
except ImportError:
return None
@property
def can_write_metadata(self):
"""A boolean indicating whether writing image metadata is supported."""
if self.local:
return self.local_method.can_write_metadata
else:
return False
def write_metadata(self, file, metadata):
"""Write key-value metadata to the image file.
Only available locally. Currently, expects the image to be a PNG file.
"""
if self.local:
self.local_method.write_metadata(file, metadata)
else:
# FIXME: Should probably issue a warning?
pass

View file

@ -16,7 +16,13 @@
import confuse
import warnings
warnings.warn("beets.util.confit is deprecated; use confuse instead")
warnings.warn(
"beets.util.confit is deprecated; use confuse instead",
# Show the location of the `import confit` statement as the warning's
# source, rather than this file, such that the offending module can be
# identified easily.
stacklevel=2,
)
# Import everything from the confuse module into this module.
for key, value in confuse.__dict__.items():

View file

@ -233,9 +233,10 @@ class AcousticPlugin(plugins.BeetsPlugin):
item.try_write()
def _map_data_to_scheme(self, data, scheme):
"""Given `data` as a structure of nested dictionaries, and `scheme` as a
structure of nested dictionaries , `yield` tuples `(attr, val)` where
`attr` and `val` are corresponding leaf nodes in `scheme` and `data`.
"""Given `data` as a structure of nested dictionaries, and
`scheme` as a structure of nested dictionaries , `yield` tuples
`(attr, val)` where `attr` and `val` are corresponding leaf
nodes in `scheme` and `data`.
As its name indicates, `scheme` defines how the data is structured,
so this function tries to find leaf nodes in `data` that correspond
@ -321,7 +322,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
else:
yield v, subdata[k]
else:
self._log.warning('Acousticbrainz did not provide info'
self._log.warning('Acousticbrainz did not provide info '
'about {}', k)
self._log.debug('Data {} could not be mapped to scheme {} '
'because key {} was not found', subdata, v, k)

View file

@ -17,6 +17,7 @@
from mimetypes import guess_type
import re
import os.path
from os.path import isfile, getsize
from beets.plugins import BeetsPlugin
@ -595,6 +596,24 @@ class ArtistDocument(AURADocument):
return self.single_resource_document(artist_resource)
def safe_filename(fn):
"""Check whether a string is a simple (non-path) filename.
For example, `foo.txt` is safe because it is a "plain" filename. But
`foo/bar.txt` and `../foo.txt` and `.` are all non-safe because they
can traverse to other directories other than the current one.
"""
# Rule out any directories.
if os.path.basename(fn) != fn:
return False
# In single names, rule out Unix directory traversal names.
if fn in ('.', '..'):
return False
return True
class ImageDocument(AURADocument):
"""Class for building documents for /images/(id) endpoints."""
@ -616,6 +635,8 @@ class ImageDocument(AURADocument):
parent_type = id_split[0]
parent_id = id_split[1]
img_filename = "-".join(id_split[2:])
if not safe_filename(img_filename):
return None
# Get the path to the directory parent's images are in
if parent_type == "album":
@ -631,7 +652,7 @@ class ImageDocument(AURADocument):
# Images for other resource types are not supported
return None
img_path = dir_path + "/" + img_filename
img_path = os.path.join(dir_path, img_filename)
# Check the image actually exists
if isfile(img_path):
return img_path

View file

@ -510,7 +510,7 @@ class BaseServer:
"""Remove the song at index from the playlist."""
index = cast_arg(int, index)
try:
del(self.playlist[index])
del self.playlist[index]
except IndexError:
raise ArgumentIndexError()
self.playlist_version += 1

View file

@ -22,6 +22,7 @@ import subprocess
import tempfile
import shlex
from string import Template
import logging
from beets import ui, util, plugins, config
from beets.plugins import BeetsPlugin
@ -36,7 +37,7 @@ _temp_files = [] # Keep track of temporary transcoded files for deletion.
# Some convenient alternate names for formats.
ALIASES = {
'wma': 'windows media',
'windows media': 'wma',
'vorbis': 'ogg',
}
@ -137,6 +138,7 @@ class ConvertPlugin(BeetsPlugin):
},
'max_bitrate': 500,
'auto': False,
'auto_keep': False,
'tmpdir': None,
'quiet': False,
'embed': True,
@ -147,7 +149,7 @@ class ConvertPlugin(BeetsPlugin):
'album_art_maxwidth': 0,
'delete_originals': False,
})
self.early_import_stages = [self.auto_convert]
self.early_import_stages = [self.auto_convert, self.auto_convert_keep]
self.register_listener('import_task_files', self._cleanup)
@ -183,6 +185,16 @@ class ConvertPlugin(BeetsPlugin):
par_map(lambda item: self.convert_on_import(config.lib, item),
task.imported_items())
def auto_convert_keep(self, config, task):
if self.config['auto_keep']:
empty_opts = self.commands()[0].parser.get_default_values()
(dest, threads, path_formats, fmt,
pretend, hardlink, link) = self._get_opts_and_config(empty_opts)
items = task.imported_items()
self._parallel_convert(dest, False, path_formats, fmt,
pretend, link, hardlink, threads, items)
# Utilities converted from functions to methods on logging overhaul
def encode(self, command, source, dest, pretend=False):
@ -342,9 +354,10 @@ class ConvertPlugin(BeetsPlugin):
if self.config['embed'] and not linked:
album = item._cached_album
if album and album.artpath:
maxwidth = self._get_art_resize(album.artpath)
self._log.debug('embedding album art from {}',
util.displayable_path(album.artpath))
art.embed_item(self._log, item, album.artpath,
art.embed_item(self._log, item, album.artpath, maxwidth,
itempath=converted, id3v23=id3v23)
if keep_new:
@ -388,20 +401,10 @@ class ConvertPlugin(BeetsPlugin):
return
# Decide whether we need to resize the cover-art image.
resize = False
maxwidth = None
if self.config['album_art_maxwidth']:
maxwidth = self.config['album_art_maxwidth'].get(int)
size = ArtResizer.shared.get_size(album.artpath)
self._log.debug('image size: {}', size)
if size:
resize = size[0] > maxwidth
else:
self._log.warning('Could not get size of image (please see '
'documentation for dependencies).')
maxwidth = self._get_art_resize(album.artpath)
# Either copy or resize (while copying) the image.
if resize:
if maxwidth is not None:
self._log.info('Resizing cover art from {0} to {1}',
util.displayable_path(album.artpath),
util.displayable_path(dest))
@ -431,31 +434,8 @@ class ConvertPlugin(BeetsPlugin):
util.copy(album.artpath, dest)
def convert_func(self, lib, opts, args):
dest = opts.dest or self.config['dest'].get()
if not dest:
raise ui.UserError('no convert destination set')
dest = util.bytestring_path(dest)
threads = opts.threads or self.config['threads'].get(int)
path_formats = ui.get_path_formats(self.config['paths'] or None)
fmt = opts.format or self.config['format'].as_str().lower()
if opts.pretend is not None:
pretend = opts.pretend
else:
pretend = self.config['pretend'].get(bool)
if opts.hardlink is not None:
hardlink = opts.hardlink
link = False
elif opts.link is not None:
hardlink = False
link = opts.link
else:
hardlink = self.config['hardlink'].get(bool)
link = self.config['link'].get(bool)
(dest, threads, path_formats, fmt,
pretend, hardlink, link) = self._get_opts_and_config(opts)
if opts.album:
albums = lib.albums(ui.decargs(args))
@ -480,16 +460,8 @@ class ConvertPlugin(BeetsPlugin):
self.copy_album_art(album, dest, path_formats, pretend,
link, hardlink)
convert = [self.convert_item(dest,
opts.keep_new,
path_formats,
fmt,
pretend,
link,
hardlink)
for _ in range(threads)]
pipe = util.pipeline.Pipeline([iter(items), convert])
pipe.run_parallel()
self._parallel_convert(dest, opts.keep_new, path_formats, fmt,
pretend, link, hardlink, threads, items)
def convert_on_import(self, lib, item):
"""Transcode a file automatically after it is imported into the
@ -523,12 +495,83 @@ class ConvertPlugin(BeetsPlugin):
item.store()
if self.config['delete_originals']:
self._log.info('Removing original file {0}', source_path)
self._log.log(
logging.DEBUG if self.config['quiet'] else logging.INFO,
'Removing original file {0}',
source_path,
)
util.remove(source_path, False)
def _get_art_resize(self, artpath):
"""For a given piece of album art, determine whether or not it needs
to be resized according to the user's settings. If so, returns the
new size. If not, returns None.
"""
newwidth = None
if self.config['album_art_maxwidth']:
maxwidth = self.config['album_art_maxwidth'].get(int)
size = ArtResizer.shared.get_size(artpath)
self._log.debug('image size: {}', size)
if size:
if size[0] > maxwidth:
newwidth = maxwidth
else:
self._log.warning('Could not get size of image (please see '
'documentation for dependencies).')
return newwidth
def _cleanup(self, task, session):
for path in task.old_paths:
if path in _temp_files:
if os.path.isfile(path):
util.remove(path)
_temp_files.remove(path)
def _get_opts_and_config(self, opts):
"""Returns parameters needed for convert function.
Get parameters from command line if available,
default to config if not available.
"""
dest = opts.dest or self.config['dest'].get()
if not dest:
raise ui.UserError('no convert destination set')
dest = util.bytestring_path(dest)
threads = opts.threads or self.config['threads'].get(int)
path_formats = ui.get_path_formats(self.config['paths'] or None)
fmt = opts.format or self.config['format'].as_str().lower()
if opts.pretend is not None:
pretend = opts.pretend
else:
pretend = self.config['pretend'].get(bool)
if opts.hardlink is not None:
hardlink = opts.hardlink
link = False
elif opts.link is not None:
hardlink = False
link = opts.link
else:
hardlink = self.config['hardlink'].get(bool)
link = self.config['link'].get(bool)
return dest, threads, path_formats, fmt, pretend, hardlink, link
def _parallel_convert(self, dest, keep_new, path_formats, fmt,
pretend, link, hardlink, threads, items):
"""Run the convert_item function for every items on as many thread as
defined in threads
"""
convert = [self.convert_item(dest,
keep_new,
path_formats,
fmt,
pretend,
link,
hardlink)
for _ in range(threads)]
pipe = util.pipeline.Pipeline([iter(items), convert])
pipe.run_parallel()

View file

@ -77,11 +77,16 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin):
"by {} API: '{}'".format(self.data_source, release_date)
)
tracks_data = requests.get(
tracks_obj = requests.get(
self.album_url + deezer_id + '/tracks'
).json()['data']
).json()
tracks_data = tracks_obj['data']
if not tracks_data:
return None
while "next" in tracks_obj:
tracks_obj = requests.get(tracks_obj['next']).json()
tracks_data.extend(tracks_obj['data'])
tracks = []
medium_totals = collections.defaultdict(int)
for i, track_data in enumerate(tracks_data, start=1):
@ -128,9 +133,9 @@ class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin):
artist=artist,
artist_id=artist_id,
length=track_data['duration'],
index=track_data['track_position'],
medium=track_data['disk_number'],
medium_index=track_data['track_position'],
index=track_data.get('track_position'),
medium=track_data.get('disk_number'),
medium_index=track_data.get('track_position'),
data_source=self.data_source,
data_url=track_data['link'],
)

View file

@ -57,6 +57,7 @@ class DiscogsPlugin(BeetsPlugin):
'user_token': '',
'separator': ', ',
'index_tracks': False,
'append_style_genre': False,
})
self.config['apikey'].redact = True
self.config['apisecret'].redact = True
@ -157,6 +158,11 @@ class DiscogsPlugin(BeetsPlugin):
if not self.discogs_client:
return
if not album and not artist:
self._log.debug('Skipping Discogs query. Files missing album and '
'artist tags.')
return []
if va_likely:
query = album
else:
@ -313,8 +319,14 @@ class DiscogsPlugin(BeetsPlugin):
country = result.data.get('country')
data_url = result.data.get('uri')
style = self.format(result.data.get('styles'))
genre = self.format(result.data.get('genres'))
discogs_albumid = self.extract_release_id(result.data.get('uri'))
base_genre = self.format(result.data.get('genres'))
if self.config['append_style_genre'] and style:
genre = self.config['separator'].as_str().join([base_genre, style])
else:
genre = base_genre
discogs_albumid = self.extract_release_id_regex(result.data.get('uri'))
# Extract information for the optional AlbumInfo fields that are
# contained on nested discogs fields.
@ -366,12 +378,6 @@ class DiscogsPlugin(BeetsPlugin):
else:
return None
def extract_release_id(self, uri):
if uri:
return uri.split("/")[-1]
else:
return None
def get_tracks(self, tracklist):
"""Returns a list of TrackInfo objects for a discogs tracklist.
"""

View file

@ -79,14 +79,16 @@ class ExportPlugin(BeetsPlugin):
})
def commands(self):
# TODO: Add option to use albums
cmd = ui.Subcommand('export', help='export data from beets')
cmd.func = self.run
cmd.parser.add_option(
'-l', '--library', action='store_true',
help='show library fields instead of tags',
)
cmd.parser.add_option(
'-a', '--album', action='store_true',
help='show album fields instead of tracks (implies "--library")',
)
cmd.parser.add_option(
'--append', action='store_true', default=False,
help='if should append data to the file',
@ -121,14 +123,20 @@ class ExportPlugin(BeetsPlugin):
}
)
items = []
data_collector = library_data if opts.library else tag_data
if opts.library or opts.album:
data_collector = library_data
else:
data_collector = tag_data
included_keys = []
for keys in opts.included_keys:
included_keys.extend(keys.split(','))
for data_emitter in data_collector(lib, ui.decargs(args)):
items = []
for data_emitter in data_collector(
lib, ui.decargs(args),
album=opts.album,
):
try:
data, item = data_emitter(included_keys or '*')
except (mediafile.UnreadableFileError, OSError) as ex:
@ -139,8 +147,6 @@ class ExportPlugin(BeetsPlugin):
if isinstance(value, bytes):
data[key] = util.displayable_path(value)
items += [data]
if file_format_is_line_based:
export_format.export(data, **format_options)
else:

View file

@ -49,6 +49,8 @@ class Candidate:
CANDIDATE_EXACT = 1
CANDIDATE_DOWNSCALE = 2
CANDIDATE_DOWNSIZE = 3
CANDIDATE_DEINTERLACE = 4
CANDIDATE_REFORMAT = 5
MATCH_EXACT = 0
MATCH_FALLBACK = 1
@ -72,12 +74,15 @@ class Candidate:
Return `CANDIDATE_DOWNSCALE` if the file must be rescaled.
Return `CANDIDATE_DOWNSIZE` if the file must be resized, and possibly
also rescaled.
Return `CANDIDATE_DEINTERLACE` if the file must be deinterlaced.
Return `CANDIDATE_REFORMAT` if the file has to be converted.
"""
if not self.path:
return self.CANDIDATE_BAD
if (not (plugin.enforce_ratio or plugin.minwidth or plugin.maxwidth
or plugin.max_filesize)):
or plugin.max_filesize or plugin.deinterlace
or plugin.cover_format)):
return self.CANDIDATE_EXACT
# get_size returns None if no local imaging backend is available
@ -140,10 +145,23 @@ class Candidate:
filesize, plugin.max_filesize)
downsize = True
# Check image format
reformat = False
if plugin.cover_format:
fmt = ArtResizer.shared.get_format(self.path)
reformat = fmt != plugin.cover_format
if reformat:
self._log.debug('image needs reformatting: {} -> {}',
fmt, plugin.cover_format)
if downscale:
return self.CANDIDATE_DOWNSCALE
elif downsize:
return self.CANDIDATE_DOWNSIZE
elif plugin.deinterlace:
return self.CANDIDATE_DEINTERLACE
elif reformat:
return self.CANDIDATE_REFORMAT
else:
return self.CANDIDATE_EXACT
@ -163,6 +181,14 @@ class Candidate:
ArtResizer.shared.resize(max(self.size), self.path,
quality=plugin.quality,
max_filesize=plugin.max_filesize)
elif self.check == self.CANDIDATE_DEINTERLACE:
self.path = ArtResizer.shared.deinterlace(self.path)
elif self.check == self.CANDIDATE_REFORMAT:
self.path = ArtResizer.shared.reformat(
self.path,
plugin.cover_format,
deinterlaced=plugin.deinterlace,
)
def _logged_get(log, *args, **kwargs):
@ -336,8 +362,9 @@ class CoverArtArchive(RemoteArtSource):
GROUP_URL = 'https://coverartarchive.org/release-group/{mbid}'
def get(self, album, plugin, paths):
"""Return the Cover Art Archive and Cover Art Archive release group URLs
using album MusicBrainz release ID and release group ID.
"""Return the Cover Art Archive and Cover Art Archive release
group URLs using album MusicBrainz release ID and release group
ID.
"""
def get_image_urls(url, size_suffix=None):
@ -916,6 +943,8 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
'lastfm_key': None,
'store_source': False,
'high_resolution': False,
'deinterlace': False,
'cover_format': None,
})
self.config['google_key'].redact = True
self.config['fanarttv_key'].redact = True
@ -933,6 +962,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
confuse.String(pattern=self.PAT_PERCENT)]))
self.margin_px = None
self.margin_percent = None
self.deinterlace = self.config['deinterlace'].get(bool)
if type(self.enforce_ratio) is str:
if self.enforce_ratio[-1] == '%':
self.margin_percent = float(self.enforce_ratio[:-1]) / 100
@ -951,6 +981,10 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
self.src_removed = (config['import']['delete'].get(bool) or
config['import']['move'].get(bool))
self.cover_format = self.config['cover_format'].get(
confuse.Optional(str)
)
if self.config['auto']:
# Enable two import hooks when fetching is enabled.
self.import_stages = [self.fetch_art]

View file

@ -81,6 +81,12 @@ class FishPlugin(BeetsPlugin):
choices=library.Item.all_keys() +
library.Album.all_keys(),
help='include specified field *values* in completions')
cmd.parser.add_option(
'-o',
'--output',
default='~/.config/fish/completions/beet.fish',
help='where to save the script. default: '
'~/.config/fish/completions')
return [cmd]
def run(self, lib, opts, args):
@ -89,14 +95,13 @@ class FishPlugin(BeetsPlugin):
# If specified, also collect the values for these fields.
# Make a giant string of all the above, formatted in a way that
# allows Fish to do tab completion for the `beet` command.
home_dir = os.path.expanduser("~")
completion_dir = os.path.join(home_dir, '.config/fish/completions')
try:
os.makedirs(completion_dir)
except OSError:
if not os.path.isdir(completion_dir):
raise
completion_file_path = os.path.join(completion_dir, 'beet.fish')
completion_file_path = os.path.expanduser(opts.output)
completion_dir = os.path.dirname(completion_file_path)
if completion_dir != '':
os.makedirs(completion_dir, exist_ok=True)
nobasicfields = opts.noFields # Do not complete for album/track fields
extravalues = opts.extravalues # e.g., Also complete artists names
beetcmds = sorted(

View file

@ -1,5 +1,4 @@
# This file is part of beets.
# Copyright 2017, Tigran Kostandyan.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -12,124 +11,15 @@
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Upload files to Google Play Music and list songs in its library."""
import os.path
"""Deprecation warning for the removed gmusic plugin."""
from beets.plugins import BeetsPlugin
from beets import ui
from beets import config
from beets.ui import Subcommand
from gmusicapi import Musicmanager, Mobileclient
from gmusicapi.exceptions import NotLoggedIn
import gmusicapi.clients
class Gmusic(BeetsPlugin):
def __init__(self):
super().__init__()
self.m = Musicmanager()
# OAUTH_FILEPATH was moved in gmusicapi 12.0.0.
if hasattr(Musicmanager, 'OAUTH_FILEPATH'):
oauth_file = Musicmanager.OAUTH_FILEPATH
else:
oauth_file = gmusicapi.clients.OAUTH_FILEPATH
self.config.add({
'auto': False,
'uploader_id': '',
'uploader_name': '',
'device_id': '',
'oauth_file': oauth_file,
})
if self.config['auto']:
self.import_stages = [self.autoupload]
def commands(self):
gupload = Subcommand('gmusic-upload',
help='upload your tracks to Google Play Music')
gupload.func = self.upload
search = Subcommand('gmusic-songs',
help='list of songs in Google Play Music library')
search.parser.add_option('-t', '--track', dest='track',
action='store_true',
help='Search by track name')
search.parser.add_option('-a', '--artist', dest='artist',
action='store_true',
help='Search by artist')
search.func = self.search
return [gupload, search]
def authenticate(self):
if self.m.is_authenticated():
return
# Checks for OAuth2 credentials,
# if they don't exist - performs authorization
oauth_file = self.config['oauth_file'].as_filename()
if os.path.isfile(oauth_file):
uploader_id = self.config['uploader_id']
uploader_name = self.config['uploader_name']
self.m.login(oauth_credentials=oauth_file,
uploader_id=uploader_id.as_str().upper() or None,
uploader_name=uploader_name.as_str() or None)
else:
self.m.perform_oauth(oauth_file)
def upload(self, lib, opts, args):
items = lib.items(ui.decargs(args))
files = self.getpaths(items)
self.authenticate()
ui.print_('Uploading your files...')
self.m.upload(filepaths=files)
ui.print_('Your files were successfully added to library')
def autoupload(self, session, task):
items = task.imported_items()
files = self.getpaths(items)
self.authenticate()
self._log.info('Uploading files to Google Play Music...', files)
self.m.upload(filepaths=files)
self._log.info('Your files were successfully added to your '
+ 'Google Play Music library')
def getpaths(self, items):
return [x.path for x in items]
def search(self, lib, opts, args):
password = config['gmusic']['password']
email = config['gmusic']['email']
uploader_id = config['gmusic']['uploader_id']
device_id = config['gmusic']['device_id']
password.redact = True
email.redact = True
# Since Musicmanager doesn't support library management
# we need to use mobileclient interface
mobile = Mobileclient()
try:
new_device_id = (device_id.as_str()
or uploader_id.as_str().replace(':', '')
or Mobileclient.FROM_MAC_ADDRESS).upper()
mobile.login(email.as_str(), password.as_str(), new_device_id)
files = mobile.get_all_songs()
except NotLoggedIn:
ui.print_(
'Authentication error. Please check your email and password.'
)
return
if not args:
for i, file in enumerate(files, start=1):
print(i, ui.colorize('blue', file['artist']),
file['title'], ui.colorize('red', file['album']))
else:
if opts.track:
self.match(files, args, 'title')
else:
self.match(files, args, 'artist')
@staticmethod
def match(files, args, search_by):
for file in files:
if ' '.join(ui.decargs(args)) in file[search_by]:
print(file['artist'], file['title'], file['album'])
self._log.warning("The 'gmusic' plugin has been removed following the"
" shutdown of Google Play Music. Remove the plugin"
" from your configuration to silence this warning.")

View file

@ -34,6 +34,7 @@ class ImportAddedPlugin(BeetsPlugin):
register('item_copied', self.record_import_mtime)
register('item_linked', self.record_import_mtime)
register('item_hardlinked', self.record_import_mtime)
register('item_reflinked', self.record_import_mtime)
register('album_imported', self.update_album_times)
register('item_imported', self.update_item_times)
register('after_write', self.update_after_write_time)
@ -49,7 +50,8 @@ class ImportAddedPlugin(BeetsPlugin):
def record_if_inplace(self, task, session):
if not (session.config['copy'] or session.config['move'] or
session.config['link'] or session.config['hardlink']):
session.config['link'] or session.config['hardlink'] or
session.config['reflink']):
self._log.debug("In place import detected, recording mtimes from "
"source paths")
items = [task.item] \

View file

@ -25,7 +25,7 @@ from beets.library import Item
from beets.util import displayable_path, normpath, syspath
def tag_data(lib, args):
def tag_data(lib, args, album=False):
query = []
for arg in args:
path = normpath(arg)
@ -69,8 +69,8 @@ def tag_data_emitter(path):
return emitter
def library_data(lib, args):
for item in lib.items(args):
def library_data(lib, args, album=False):
for item in lib.albums(args) if album else lib.items(args):
yield library_data_emitter(item)
@ -156,6 +156,10 @@ class InfoPlugin(BeetsPlugin):
'-l', '--library', action='store_true',
help='show library fields instead of tags',
)
cmd.parser.add_option(
'-a', '--album', action='store_true',
help='show album fields instead of tracks (implies "--library")',
)
cmd.parser.add_option(
'-s', '--summarize', action='store_true',
help='summarize the tags of all files',
@ -186,7 +190,7 @@ class InfoPlugin(BeetsPlugin):
dictionary and only prints that. If two files have different values
for the same tag, the value is set to '[various]'
"""
if opts.library:
if opts.library or opts.album:
data_collector = library_data
else:
data_collector = tag_data
@ -199,7 +203,10 @@ class InfoPlugin(BeetsPlugin):
first = True
summary = {}
for data_emitter in data_collector(lib, ui.decargs(args)):
for data_emitter in data_collector(
lib, ui.decargs(args),
album=opts.album,
):
try:
data, item = data_emitter(included_keys or '*')
except (mediafile.UnreadableFileError, OSError) as ex:

View file

@ -54,11 +54,12 @@ class KodiUpdate(BeetsPlugin):
super().__init__()
# Adding defaults.
config['kodi'].add({
config['kodi'].add([{
'host': 'localhost',
'port': 8080,
'user': 'kodi',
'pwd': 'kodi'})
'pwd': 'kodi'
}])
config['kodi']['pwd'].redact = True
self.register_listener('database_change', self.listen_for_db_change)
@ -72,24 +73,34 @@ class KodiUpdate(BeetsPlugin):
"""
self._log.info('Requesting a Kodi library update...')
# Try to send update request.
try:
r = update_kodi(
config['kodi']['host'].get(),
config['kodi']['port'].get(),
config['kodi']['user'].get(),
config['kodi']['pwd'].get())
r.raise_for_status()
kodi = config['kodi'].get()
except requests.exceptions.RequestException as e:
self._log.warning('Kodi update failed: {0}',
str(e))
return
# Backwards compatibility in case not configured as an array
if not isinstance(kodi, list):
kodi = [kodi]
json = r.json()
if json.get('result') != 'OK':
self._log.warning('Kodi update failed: JSON response was {0!r}',
json)
return
for instance in kodi:
# Try to send update request.
try:
r = update_kodi(
instance['host'],
instance['port'],
instance['user'],
instance['pwd']
)
r.raise_for_status()
self._log.info('Kodi update triggered')
json = r.json()
if json.get('result') != 'OK':
self._log.warning(
'Kodi update failed: JSON response was {0!r}', json
)
continue
self._log.info(
'Kodi update triggered for {0}:{1}',
instance['host'], instance['port']
)
except requests.exceptions.RequestException as e:
self._log.warning('Kodi update failed: {0}', str(e))
continue

101
beetsplug/limit.py Normal file
View file

@ -0,0 +1,101 @@
# This file is part of beets.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Adds head/tail functionality to list/ls.
1. Implemented as `lslimit` command with `--head` and `--tail` options. This is
the idiomatic way to use this plugin.
2. Implemented as query prefix `<` for head functionality only. This is the
composable way to use the plugin (plays nicely with anything that uses the
query language).
"""
from beets.dbcore import FieldQuery
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand, decargs, print_
from collections import deque
from itertools import islice
def lslimit(lib, opts, args):
"""Query command with head/tail."""
if (opts.head is not None) and (opts.tail is not None):
raise ValueError("Only use one of --head and --tail")
if (opts.head or opts.tail or 0) < 0:
raise ValueError("Limit value must be non-negative")
query = decargs(args)
if opts.album:
objs = lib.albums(query)
else:
objs = lib.items(query)
if opts.head is not None:
objs = islice(objs, opts.head)
elif opts.tail is not None:
objs = deque(objs, opts.tail)
for obj in objs:
print_(format(obj))
lslimit_cmd = Subcommand(
"lslimit",
help="query with optional head or tail"
)
lslimit_cmd.parser.add_option(
"--head",
action="store",
type="int",
default=None
)
lslimit_cmd.parser.add_option(
"--tail",
action="store",
type="int",
default=None
)
lslimit_cmd.parser.add_all_common_options()
lslimit_cmd.func = lslimit
class LimitPlugin(BeetsPlugin):
"""Query limit functionality via command and query prefix."""
def commands(self):
"""Expose `lslimit` subcommand."""
return [lslimit_cmd]
def queries(self):
class HeadQuery(FieldQuery):
"""This inner class pattern allows the query to track state."""
n = 0
N = None
@classmethod
def value_match(cls, pattern, value):
if cls.N is None:
cls.N = int(pattern)
if cls.N < 0:
raise ValueError("Limit value must be non-negative")
cls.n += 1
return cls.n <= cls.N
return {
"<": HeadQuery
}

View file

@ -377,7 +377,7 @@ class Genius(Backend):
data = {'q': title + " " + artist.lower()}
try:
response = requests.get(
search_url, data=data, headers=self.headers)
search_url, params=data, headers=self.headers)
except requests.RequestException as exc:
self._log.debug('Genius API request failed: {0}', exc)
return None
@ -387,6 +387,10 @@ class Genius(Backend):
except ValueError:
return None
def replace_br(self, lyrics_div):
for br in lyrics_div.find_all("br"):
br.replace_with("\n")
def _scrape_lyrics_from_html(self, html):
"""Scrape lyrics from a given genius.com html"""
@ -401,7 +405,12 @@ class Genius(Backend):
# all of the lyrics can be found already correctly formatted
# Sometimes, though, it packages the lyrics into separate divs, most
# likely for easier ad placement
lyrics_div = soup.find("div", class_="lyrics")
lyrics_div = soup.find("div", {"data-lyrics-container": True})
if lyrics_div:
self.replace_br(lyrics_div)
if not lyrics_div:
self._log.debug('Received unusual song page html')
verse_div = soup.find("div",
@ -417,13 +426,17 @@ class Genius(Backend):
return None
lyrics_div = verse_div.parent
for br in lyrics_div.find_all("br"):
br.replace_with("\n")
self.replace_br(lyrics_div)
ads = lyrics_div.find_all("div",
class_=re.compile("InreadAd__Container"))
for ad in ads:
ad.replace_with("\n")
footers = lyrics_div.find_all("div",
class_=re.compile("Lyrics__Footer"))
for footer in footers:
footer.replace_with("")
return lyrics_div.get_text()
@ -488,11 +501,11 @@ class Tekstowo(Backend):
if not soup:
return None
lyrics_div = soup.find("div", class_="song-text")
lyrics_div = soup.select("div.song-text > div.inner-text")
if not lyrics_div:
return None
return lyrics_div.get_text()
return lyrics_div[0].get_text()
def remove_credits(text):

View file

@ -22,7 +22,7 @@ import subprocess
import sys
import warnings
from multiprocessing.pool import ThreadPool, RUN
from six.moves import queue
import queue
from threading import Thread, Event
from beets import ui
@ -94,21 +94,138 @@ def lufs_to_db(db):
# gain: in LU to reference level
# peak: part of full scale (FS is 1.0)
Gain = collections.namedtuple("Gain", "gain peak")
# album_gain: Gain object
# track_gains: list of Gain objects
AlbumGain = collections.namedtuple("AlbumGain", "album_gain track_gains")
class Peak(enum.Enum):
none = 0
class PeakMethod(enum.Enum):
true = 1
sample = 2
class RgTask():
"""State and methods for a single replaygain calculation (rg version).
Bundles the state (parameters and results) of a single replaygain
calculation (either for one item, one disk, or one full album).
This class provides methods to store the resulting gains and peaks as plain
old rg tags.
"""
def __init__(self, items, album, target_level, peak_method, backend_name,
log):
self.items = items
self.album = album
self.target_level = target_level
self.peak_method = peak_method
self.backend_name = backend_name
self._log = log
self.album_gain = None
self.track_gains = None
def _store_track_gain(self, item, track_gain):
"""Store track gain for a single item in the database.
"""
item.rg_track_gain = track_gain.gain
item.rg_track_peak = track_gain.peak
item.store()
self._log.debug('applied track gain {0} LU, peak {1} of FS',
item.rg_track_gain, item.rg_track_peak)
def _store_album_gain(self, item):
"""Store album gain for a single item in the database.
The caller needs to ensure that `self.album_gain is not None`.
"""
item.rg_album_gain = self.album_gain.gain
item.rg_album_peak = self.album_gain.peak
item.store()
self._log.debug('applied album gain {0} LU, peak {1} of FS',
item.rg_album_gain, item.rg_album_peak)
def _store_track(self, write):
"""Store track gain for the first track of the task in the database.
"""
item = self.items[0]
if self.track_gains is None or len(self.track_gains) != 1:
# In some cases, backends fail to produce a valid
# `track_gains` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
"ReplayGain backend `{}` failed for track {}"
.format(self.backend_name, item)
)
self._store_track_gain(item, self.track_gains[0])
if write:
item.try_write()
self._log.debug('done analyzing {0}', item)
def _store_album(self, write):
"""Store track/album gains for all tracks of the task in the database.
"""
if (self.album_gain is None or self.track_gains is None
or len(self.track_gains) != len(self.items)):
# In some cases, backends fail to produce a valid
# `album_gain` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
"ReplayGain backend `{}` failed "
"for some tracks in album {}"
.format(self.backend_name, self.album)
)
for item, track_gain in zip(self.items, self.track_gains):
self._store_track_gain(item, track_gain)
self._store_album_gain(item)
if write:
item.try_write()
self._log.debug('done analyzing {0}', item)
def store(self, write):
"""Store computed gains for the items of this task in the database.
"""
if self.album is not None:
self._store_album(write)
else:
self._store_track(write)
class R128Task(RgTask):
"""State and methods for a single replaygain calculation (r128 version).
Bundles the state (parameters and results) of a single replaygain
calculation (either for one item, one disk, or one full album).
This class provides methods to store the resulting gains and peaks as R128
tags.
"""
def __init__(self, items, album, target_level, backend_name, log):
# R128_* tags do not store the track/album peak
super().__init__(items, album, target_level, None, backend_name,
log)
def _store_track_gain(self, item, track_gain):
item.r128_track_gain = track_gain.gain
item.store()
self._log.debug('applied r128 track gain {0} LU',
item.r128_track_gain)
def _store_album_gain(self, item):
"""
The caller needs to ensure that `self.album_gain is not None`.
"""
item.r128_album_gain = self.album_gain.gain
item.store()
self._log.debug('applied r128 album gain {0} LU',
item.r128_album_gain)
class Backend:
"""An abstract class representing engine for calculating RG values.
"""
NAME = ""
do_parallel = False
def __init__(self, config, log):
@ -117,15 +234,15 @@ class Backend:
"""
self._log = log
def compute_track_gain(self, items, target_level, peak):
"""Computes the track gain of the given tracks, returns a list
of Gain objects.
def compute_track_gain(self, task):
"""Computes the track gain for the tracks belonging to `task`, and sets
the `track_gains` attribute on the task. Returns `task`.
"""
raise NotImplementedError()
def compute_album_gain(self, items, target_level, peak):
"""Computes the album gain of the given album, returns an
AlbumGain object.
def compute_album_gain(self, task):
"""Computes the album gain for the album belonging to `task`, and sets
the `album_gain` attribute on the task. Returns `task`.
"""
raise NotImplementedError()
@ -135,6 +252,7 @@ class FfmpegBackend(Backend):
"""A replaygain backend using ffmpeg's ebur128 filter.
"""
NAME = "ffmpeg"
do_parallel = True
def __init__(self, config, log):
@ -165,27 +283,28 @@ class FfmpegBackend(Backend):
"the --enable-libebur128 configuration option is required."
)
def compute_track_gain(self, items, target_level, peak):
"""Computes the track gain of the given tracks, returns a list
of Gain objects (the track gains).
def compute_track_gain(self, task):
"""Computes the track gain for the tracks belonging to `task`, and sets
the `track_gains` attribute on the task. Returns `task`.
"""
gains = []
for item in items:
for item in task.items:
gains.append(
self._analyse_item(
item,
target_level,
peak,
task.target_level,
task.peak_method,
count_blocks=False,
)[0] # take only the gain, discarding number of gating blocks
)
return gains
task.track_gains = gains
return task
def compute_album_gain(self, items, target_level, peak):
"""Computes the album gain of the given album, returns an
AlbumGain object.
def compute_album_gain(self, task):
"""Computes the album gain for the album belonging to `task`, and sets
the `album_gain` attribute on the task. Returns `task`.
"""
target_level_lufs = db_to_lufs(target_level)
target_level_lufs = db_to_lufs(task.target_level)
# analyse tracks
# list of track Gain objects
@ -197,9 +316,9 @@ class FfmpegBackend(Backend):
# total number of BS.1770 gating blocks
n_blocks = 0
for item in items:
for item in task.items:
track_gain, track_n_blocks = self._analyse_item(
item, target_level, peak
item, task.target_level, task.peak_method
)
track_gains.append(track_gain)
@ -234,10 +353,12 @@ class FfmpegBackend(Backend):
self._log.debug(
"{}: gain {} LU, peak {}"
.format(items, album_gain, album_peak)
.format(task.items, album_gain, album_peak)
)
return AlbumGain(Gain(album_gain, album_peak), track_gains)
task.album_gain = Gain(album_gain, album_peak)
task.track_gains = track_gains
return task
def _construct_cmd(self, item, peak_method):
"""Construct the shell command to analyse items."""
@ -250,13 +371,15 @@ class FfmpegBackend(Backend):
"-map",
"a:0",
"-filter",
f"ebur128=peak={peak_method}",
"ebur128=peak={}".format(
"none" if peak_method is None else peak_method.name),
"-f",
"null",
"-",
]
def _analyse_item(self, item, target_level, peak, count_blocks=True):
def _analyse_item(self, item, target_level, peak_method,
count_blocks=True):
"""Analyse item. Return a pair of a Gain object and the number
of gating blocks above the threshold.
@ -264,7 +387,6 @@ class FfmpegBackend(Backend):
will be 0.
"""
target_level_lufs = db_to_lufs(target_level)
peak_method = peak.name
# call ffmpeg
self._log.debug(f"analyzing {item}")
@ -276,12 +398,13 @@ class FfmpegBackend(Backend):
# parse output
if peak == Peak.none:
if peak_method is None:
peak = 0
else:
line_peak = self._find_line(
output,
f" {peak_method.capitalize()} peak:".encode(),
# `peak_method` is non-`None` in this arm of the conditional
f" {peak_method.name.capitalize()} peak:".encode(),
start_line=len(output) - 1, step_size=-1,
)
peak = self._parse_float(
@ -379,6 +502,7 @@ class FfmpegBackend(Backend):
# mpgain/aacgain CLI tool backend.
class CommandBackend(Backend):
NAME = "command"
do_parallel = True
def __init__(self, config, log):
@ -412,28 +536,33 @@ class CommandBackend(Backend):
self.noclip = config['noclip'].get(bool)
def compute_track_gain(self, items, target_level, peak):
"""Computes the track gain of the given tracks, returns a list
of TrackGain objects.
def compute_track_gain(self, task):
"""Computes the track gain for the tracks belonging to `task`, and sets
the `track_gains` attribute on the task. Returns `task`.
"""
supported_items = list(filter(self.format_supported, items))
output = self.compute_gain(supported_items, target_level, False)
return output
supported_items = list(filter(self.format_supported, task.items))
output = self.compute_gain(supported_items, task.target_level, False)
task.track_gains = output
return task
def compute_album_gain(self, items, target_level, peak):
"""Computes the album gain of the given album, returns an
AlbumGain object.
def compute_album_gain(self, task):
"""Computes the album gain for the album belonging to `task`, and sets
the `album_gain` attribute on the task. Returns `task`.
"""
# TODO: What should be done when not all tracks in the album are
# supported?
supported_items = list(filter(self.format_supported, items))
if len(supported_items) != len(items):
supported_items = list(filter(self.format_supported, task.items))
if len(supported_items) != len(task.items):
self._log.debug('tracks are of unsupported format')
return AlbumGain(None, [])
task.album_gain = None
task.track_gains = None
return task
output = self.compute_gain(supported_items, target_level, True)
return AlbumGain(output[-1], output[:-1])
output = self.compute_gain(supported_items, task.target_level, True)
task.album_gain = output[-1]
task.track_gains = output[:-1]
return task
def format_supported(self, item):
"""Checks whether the given item is supported by the selected tool.
@ -508,6 +637,8 @@ class CommandBackend(Backend):
# GStreamer-based backend.
class GStreamerBackend(Backend):
NAME = "gstreamer"
def __init__(self, config, log):
super().__init__(config, log)
self._import_gst()
@ -612,21 +743,28 @@ class GStreamerBackend(Backend):
if self._error is not None:
raise self._error
def compute_track_gain(self, items, target_level, peak):
self.compute(items, target_level, False)
if len(self._file_tags) != len(items):
def compute_track_gain(self, task):
"""Computes the track gain for the tracks belonging to `task`, and sets
the `track_gains` attribute on the task. Returns `task`.
"""
self.compute(task.items, task.target_level, False)
if len(self._file_tags) != len(task.items):
raise ReplayGainError("Some tracks did not receive tags")
ret = []
for item in items:
for item in task.items:
ret.append(Gain(self._file_tags[item]["TRACK_GAIN"],
self._file_tags[item]["TRACK_PEAK"]))
return ret
task.track_gains = ret
return task
def compute_album_gain(self, items, target_level, peak):
items = list(items)
self.compute(items, target_level, True)
def compute_album_gain(self, task):
"""Computes the album gain for the album belonging to `task`, and sets
the `album_gain` attribute on the task. Returns `task`.
"""
items = list(task.items)
self.compute(items, task.target_level, True)
if len(self._file_tags) != len(items):
raise ReplayGainError("Some items in album did not receive tags")
@ -648,7 +786,9 @@ class GStreamerBackend(Backend):
except KeyError:
raise ReplayGainError("results missing for album")
return AlbumGain(Gain(gain, peak), track_gains)
task.album_gain = Gain(gain, peak)
task.track_gains = track_gains
return task
def close(self):
self._bus.remove_signal_watch()
@ -764,14 +904,14 @@ class GStreamerBackend(Backend):
def _on_pad_added(self, decbin, pad):
sink_pad = self._conv.get_compatible_pad(pad, None)
assert(sink_pad is not None)
assert sink_pad is not None
pad.link(sink_pad)
def _on_pad_removed(self, decbin, pad):
# Called when the decodebin element is disconnected from the
# rest of the pipeline while switching input files
peer = pad.get_peer()
assert(peer is None)
assert peer is None
class AudioToolsBackend(Backend):
@ -779,6 +919,7 @@ class AudioToolsBackend(Backend):
<http://audiotools.sourceforge.net/>`_ and its capabilities to read more
file formats and compute ReplayGain values using it replaygain module.
"""
NAME = "audiotools"
def __init__(self, config, log):
super().__init__(config, log)
@ -840,12 +981,14 @@ class AudioToolsBackend(Backend):
return
return rg
def compute_track_gain(self, items, target_level, peak):
"""Compute ReplayGain values for the requested items.
:return list: list of :class:`Gain` objects
def compute_track_gain(self, task):
"""Computes the track gain for the tracks belonging to `task`, and sets
the `track_gains` attribute on the task. Returns `task`.
"""
return [self._compute_track_gain(item, target_level) for item in items]
gains = [self._compute_track_gain(i, task.target_level)
for i in task.items]
task.track_gains = gains
return task
def _with_target_level(self, gain, target_level):
"""Return `gain` relative to `target_level`.
@ -890,23 +1033,22 @@ class AudioToolsBackend(Backend):
item.artist, item.title, rg_track_gain, rg_track_peak)
return Gain(gain=rg_track_gain, peak=rg_track_peak)
def compute_album_gain(self, items, target_level, peak):
"""Compute ReplayGain values for the requested album and its items.
:rtype: :class:`AlbumGain`
def compute_album_gain(self, task):
"""Computes the album gain for the album belonging to `task`, and sets
the `album_gain` attribute on the task. Returns `task`.
"""
# The first item is taken and opened to get the sample rate to
# initialize the replaygain object. The object is used for all the
# tracks in the album to get the album values.
item = list(items)[0]
item = list(task.items)[0]
audiofile = self.open_audio_file(item)
rg = self.init_replaygain(audiofile, item)
track_gains = []
for item in items:
for item in task.items:
audiofile = self.open_audio_file(item)
rg_track_gain, rg_track_peak = self._title_gain(
rg, audiofile, target_level
rg, audiofile, task.target_level
)
track_gains.append(
Gain(gain=rg_track_gain, peak=rg_track_peak)
@ -917,14 +1059,14 @@ class AudioToolsBackend(Backend):
# After getting the values for all tracks, it's possible to get the
# album values.
rg_album_gain, rg_album_peak = rg.album_gain()
rg_album_gain = self._with_target_level(rg_album_gain, target_level)
rg_album_gain = self._with_target_level(
rg_album_gain, task.target_level)
self._log.debug('ReplayGain for album {0}: {1:.2f}, {2:.2f}',
items[0].album, rg_album_gain, rg_album_peak)
task.items[0].album, rg_album_gain, rg_album_peak)
return AlbumGain(
Gain(gain=rg_album_gain, peak=rg_album_peak),
track_gains=track_gains
)
task.album_gain = Gain(gain=rg_album_gain, peak=rg_album_peak)
task.track_gains = track_gains
return task
class ExceptionWatcher(Thread):
@ -956,22 +1098,19 @@ class ExceptionWatcher(Thread):
# Main plugin logic.
BACKEND_CLASSES = [
CommandBackend,
GStreamerBackend,
AudioToolsBackend,
FfmpegBackend,
]
BACKENDS = {b.NAME: b for b in BACKEND_CLASSES}
class ReplayGainPlugin(BeetsPlugin):
"""Provides ReplayGain analysis.
"""
backends = {
"command": CommandBackend,
"gstreamer": GStreamerBackend,
"audiotools": AudioToolsBackend,
"ffmpeg": FfmpegBackend,
}
peak_methods = {
"true": Peak.true,
"sample": Peak.sample,
}
def __init__(self):
super().__init__()
@ -989,30 +1128,36 @@ class ReplayGainPlugin(BeetsPlugin):
'r128_targetlevel': lufs_to_db(-23),
})
self.overwrite = self.config['overwrite'].get(bool)
self.per_disc = self.config['per_disc'].get(bool)
# FIXME: Consider renaming the configuration option and deprecating the
# old name 'overwrite'.
self.force_on_import = self.config['overwrite'].get(bool)
# Remember which backend is used for CLI feedback
self.backend_name = self.config['backend'].as_str()
if self.backend_name not in self.backends:
if self.backend_name not in BACKENDS:
raise ui.UserError(
"Selected ReplayGain backend {} is not supported. "
"Please select one of: {}".format(
self.backend_name,
', '.join(self.backends.keys())
', '.join(BACKENDS.keys())
)
)
# FIXME: Consider renaming the configuration option to 'peak_method'
# and deprecating the old name 'peak'.
peak_method = self.config["peak"].as_str()
if peak_method not in self.peak_methods:
if peak_method not in PeakMethod.__members__:
raise ui.UserError(
"Selected ReplayGain peak method {} is not supported. "
"Please select one of: {}".format(
peak_method,
', '.join(self.peak_methods.keys())
', '.join(PeakMethod.__members__)
)
)
self._peak_method = self.peak_methods[peak_method]
# This only applies to plain old rg tags, r128 doesn't store peak
# values.
self.peak_method = PeakMethod[peak_method]
# On-import analysis.
if self.config['auto']:
@ -1024,7 +1169,7 @@ class ReplayGainPlugin(BeetsPlugin):
self.r128_whitelist = self.config['r128'].as_str_seq()
try:
self.backend_instance = self.backends[self.backend_name](
self.backend_instance = BACKENDS[self.backend_name](
self.config, self._log
)
except (ReplayGainError, FatalReplayGainError) as e:
@ -1037,70 +1182,66 @@ class ReplayGainPlugin(BeetsPlugin):
"""
return item.format in self.r128_whitelist
@staticmethod
def has_r128_track_data(item):
return item.r128_track_gain is not None
@staticmethod
def has_rg_track_data(item):
return (item.rg_track_gain is not None
and item.rg_track_peak is not None)
def track_requires_gain(self, item):
return self.overwrite or \
(self.should_use_r128(item) and not item.r128_track_gain) or \
(not self.should_use_r128(item) and
(not item.rg_track_gain or not item.rg_track_peak))
if self.should_use_r128(item):
if not self.has_r128_track_data(item):
return True
else:
if not self.has_rg_track_data(item):
return True
return False
@staticmethod
def has_r128_album_data(item):
return (item.r128_track_gain is not None
and item.r128_album_gain is not None)
@staticmethod
def has_rg_album_data(item):
return (item.rg_album_gain is not None
and item.rg_album_peak is not None)
def album_requires_gain(self, album):
# Skip calculating gain only when *all* files don't need
# recalculation. This way, if any file among an album's tracks
# needs recalculation, we still get an accurate album gain
# value.
return self.overwrite or \
any([self.should_use_r128(item) and
(not item.r128_track_gain or not item.r128_album_gain)
for item in album.items()]) or \
any([not self.should_use_r128(item) and
(not item.rg_album_gain or not item.rg_album_peak)
for item in album.items()])
for item in album.items():
if self.should_use_r128(item):
if not self.has_r128_album_data(item):
return True
else:
if not self.has_rg_album_data(item):
return True
def store_track_gain(self, item, track_gain):
item.rg_track_gain = track_gain.gain
item.rg_track_peak = track_gain.peak
item.store()
self._log.debug('applied track gain {0} LU, peak {1} of FS',
item.rg_track_gain, item.rg_track_peak)
return False
def store_album_gain(self, item, album_gain):
item.rg_album_gain = album_gain.gain
item.rg_album_peak = album_gain.peak
item.store()
self._log.debug('applied album gain {0} LU, peak {1} of FS',
item.rg_album_gain, item.rg_album_peak)
def store_track_r128_gain(self, item, track_gain):
item.r128_track_gain = track_gain.gain
item.store()
self._log.debug('applied r128 track gain {0} LU',
item.r128_track_gain)
def store_album_r128_gain(self, item, album_gain):
item.r128_album_gain = album_gain.gain
item.store()
self._log.debug('applied r128 album gain {0} LU',
item.r128_album_gain)
def tag_specific_values(self, items):
"""Return some tag specific values.
Returns a tuple (store_track_gain, store_album_gain, target_level,
peak_method).
"""
if any([self.should_use_r128(item) for item in items]):
store_track_gain = self.store_track_r128_gain
store_album_gain = self.store_album_r128_gain
target_level = self.config['r128_targetlevel'].as_number()
peak = Peak.none # R128_* tags do not store the track/album peak
def create_task(self, items, use_r128, album=None):
if use_r128:
return R128Task(
items, album,
self.config["r128_targetlevel"].as_number(),
self.backend_instance.NAME,
self._log,
)
else:
store_track_gain = self.store_track_gain
store_album_gain = self.store_album_gain
target_level = self.config['targetlevel'].as_number()
peak = self._peak_method
return store_track_gain, store_album_gain, target_level, peak
return RgTask(
items, album,
self.config["targetlevel"].as_number(),
self.peak_method,
self.backend_instance.NAME,
self._log,
)
def handle_album(self, album, write, force=False):
"""Compute album and track replay gain store it in all of the
@ -1114,8 +1255,9 @@ class ReplayGainPlugin(BeetsPlugin):
self._log.info('Skipping album {0}', album)
return
if (any([self.should_use_r128(item) for item in album.items()]) and not
all([self.should_use_r128(item) for item in album.items()])):
items_iter = iter(album.items())
use_r128 = self.should_use_r128(next(items_iter))
if any(use_r128 != self.should_use_r128(i) for i in items_iter):
self._log.error(
"Cannot calculate gain for album {0} (incompatible formats)",
album)
@ -1123,11 +1265,8 @@ class ReplayGainPlugin(BeetsPlugin):
self._log.info('analyzing {0}', album)
tag_vals = self.tag_specific_values(album.items())
store_track_gain, store_album_gain, target_level, peak = tag_vals
discs = {}
if self.per_disc:
if self.config['per_disc'].get(bool):
for item in album.items():
if discs.get(item.disc) is None:
discs[item.disc] = []
@ -1136,34 +1275,12 @@ class ReplayGainPlugin(BeetsPlugin):
discs[1] = album.items()
for discnumber, items in discs.items():
def _store_album(album_gain):
if not album_gain or not album_gain.album_gain \
or len(album_gain.track_gains) != len(items):
# In some cases, backends fail to produce a valid
# `album_gain` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
"ReplayGain backend `{}` failed "
"for some tracks in album {}"
.format(self.backend_name, album)
)
for item, track_gain in zip(items,
album_gain.track_gains):
store_track_gain(item, track_gain)
store_album_gain(item, album_gain.album_gain)
if write:
item.try_write()
self._log.debug('done analyzing {0}', item)
task = self.create_task(items, use_r128, album=album)
try:
self._apply(
self.backend_instance.compute_album_gain, args=(),
kwds={
"items": list(items),
"target_level": target_level,
"peak": peak
},
callback=_store_album
self.backend_instance.compute_album_gain,
args=[task], kwds={},
callback=lambda task: task.store(write)
)
except ReplayGainError as e:
self._log.info("ReplayGain error: {0}", e)
@ -1182,33 +1299,14 @@ class ReplayGainPlugin(BeetsPlugin):
self._log.info('Skipping track {0}', item)
return
tag_vals = self.tag_specific_values([item])
store_track_gain, store_album_gain, target_level, peak = tag_vals
def _store_track(track_gains):
if not track_gains or len(track_gains) != 1:
# In some cases, backends fail to produce a valid
# `track_gains` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
"ReplayGain backend `{}` failed for track {}"
.format(self.backend_name, item)
)
store_track_gain(item, track_gains[0])
if write:
item.try_write()
self._log.debug('done analyzing {0}', item)
use_r128 = self.should_use_r128(item)
task = self.create_task([item], use_r128)
try:
self._apply(
self.backend_instance.compute_track_gain, args=(),
kwds={
"items": [item],
"target_level": target_level,
"peak": peak,
},
callback=_store_track
self.backend_instance.compute_track_gain,
args=[task], kwds={},
callback=lambda task: task.store(write)
)
except ReplayGainError as e:
self._log.info("ReplayGain error: {0}", e)
@ -1308,9 +1406,9 @@ class ReplayGainPlugin(BeetsPlugin):
"""
if self.config['auto']:
if task.is_album:
self.handle_album(task.album, False)
self.handle_album(task.album, False, self.force_on_import)
else:
self.handle_track(task.item, False)
self.handle_track(task.item, False, self.force_on_import)
def command_func(self, lib, opts, args):
try:

View file

@ -1,5 +1,6 @@
# This file is part of beets.
# Copyright 2019, Rahul Ahuja.
# Copyright 2022, Alok Saboo.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -16,24 +17,50 @@
Spotify playlist construction.
"""
import re
import json
import base64
import webbrowser
import collections
import datetime
import json
import re
import time
import webbrowser
import unidecode
import requests
import confuse
import requests
import unidecode
from beets import ui
from beets.autotag.hooks import AlbumInfo, TrackInfo
from beets.plugins import MetadataSourcePlugin, BeetsPlugin
from beets.dbcore import types
from beets.library import DateType
from beets.plugins import BeetsPlugin, MetadataSourcePlugin
DEFAULT_WAITING_TIME = 5
class SpotifyAPIError(Exception):
pass
class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
data_source = 'Spotify'
item_types = {
'spotify_track_popularity': types.INTEGER,
'spotify_acousticness': types.FLOAT,
'spotify_danceability': types.FLOAT,
'spotify_energy': types.FLOAT,
'spotify_instrumentalness': types.FLOAT,
'spotify_key': types.FLOAT,
'spotify_liveness': types.FLOAT,
'spotify_loudness': types.FLOAT,
'spotify_mode': types.INTEGER,
'spotify_speechiness': types.FLOAT,
'spotify_tempo': types.FLOAT,
'spotify_time_signature': types.INTEGER,
'spotify_valence': types.FLOAT,
'spotify_updated': DateType(),
}
# Base URLs for the Spotify API
# Documentation: https://developer.spotify.com/web-api
oauth_token_url = 'https://accounts.spotify.com/api/token'
@ -41,6 +68,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
search_url = 'https://api.spotify.com/v1/search'
album_url = 'https://api.spotify.com/v1/albums/'
track_url = 'https://api.spotify.com/v1/tracks/'
audio_features_url = 'https://api.spotify.com/v1/audio-features/'
# Spotify IDs consist of 22 alphanumeric characters
# (zero-left-padded base62 representation of randomly generated UUID4)
@ -49,6 +77,21 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
'match_group': 2,
}
spotify_audio_features = {
'acousticness': 'spotify_acousticness',
'danceability': 'spotify_danceability',
'energy': 'spotify_energy',
'instrumentalness': 'spotify_instrumentalness',
'key': 'spotify_key',
'liveness': 'spotify_liveness',
'loudness': 'spotify_loudness',
'mode': 'spotify_mode',
'speechiness': 'spotify_speechiness',
'tempo': 'spotify_tempo',
'time_signature': 'spotify_time_signature',
'valence': 'spotify_valence',
}
def __init__(self):
super().__init__()
self.config.add(
@ -146,6 +189,17 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
)
self._authenticate()
return self._handle_response(request_type, url, params=params)
elif response.status_code == 429:
seconds = response.headers.get('Retry-After',
DEFAULT_WAITING_TIME)
self._log.debug('Too many API requests. Retrying after {} \
seconds.', seconds)
time.sleep(int(seconds) + 1)
return self._handle_response(request_type, url, params=params)
elif response.status_code == 404:
raise SpotifyAPIError("API Error: {}\nURL: {}\nparams: {}".
format(response.status_code, url,
params))
else:
raise ui.UserError(
'{} API error:\n{}\nURL:\n{}\nparams:\n{}'.format(
@ -194,9 +248,16 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
)
)
tracks_data = album_data['tracks']
tracks_items = tracks_data['items']
while tracks_data['next']:
tracks_data = self._handle_response(requests.get,
tracks_data['next'])
tracks_items.extend(tracks_data['items'])
tracks = []
medium_totals = collections.defaultdict(int)
for i, track_data in enumerate(album_data['tracks']['items'], start=1):
for i, track_data in enumerate(tracks_items, start=1):
track = self._get_track(track_data)
track.index = i
medium_totals[track.medium] += 1
@ -207,8 +268,10 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
return AlbumInfo(
album=album_data['name'],
album_id=spotify_id,
spotify_album_id=spotify_id,
artist=artist,
artist_id=artist_id,
spotify_artist_id=artist_id,
tracks=tracks,
albumtype=album_data['album_type'],
va=len(album_data['artists']) == 1
@ -232,11 +295,20 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
:rtype: beets.autotag.hooks.TrackInfo
"""
artist, artist_id = self.get_artist(track_data['artists'])
# Get album information for spotify tracks
try:
album = track_data['album']['name']
except KeyError:
album = None
return TrackInfo(
title=track_data['name'],
track_id=track_data['id'],
spotify_track_id=track_data['id'],
artist=artist,
album=album,
artist_id=artist_id,
spotify_artist_id=artist_id,
length=track_data['duration_ms'] / 1000,
index=track_data['track_number'],
medium=track_data['disc_number'],
@ -305,8 +377,8 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
return unidecode.unidecode(query)
def _search_api(self, query_type, filters=None, keywords=''):
"""Query the Spotify Search API for the specified ``keywords``, applying
the provided ``filters``.
"""Query the Spotify Search API for the specified ``keywords``,
applying the provided ``filters``.
:param query_type: Item type to search across. Valid types are:
'album', 'artist', 'playlist', and 'track'.
@ -327,15 +399,17 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
self._log.debug(
f"Searching {self.data_source} for '{query}'"
)
response_data = (
self._handle_response(
try:
response = self._handle_response(
requests.get,
self.search_url,
params={'q': query, 'type': query_type},
)
.get(query_type + 's', {})
.get('items', [])
)
except SpotifyAPIError as e:
self._log.debug('Spotify API error: {}', e)
return []
response_data = (response.get(query_type + 's', {})
.get('items', []))
self._log.debug(
"Found {} result(s) from {} for '{}'",
len(response_data),
@ -345,6 +419,7 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
return response_data
def commands(self):
# autotagger import command
def queries(lib, opts, args):
success = self._parse_opts(opts)
if success:
@ -371,7 +446,22 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
),
)
spotify_cmd.func = queries
return [spotify_cmd]
# spotifysync command
sync_cmd = ui.Subcommand('spotifysync',
help="fetch track attributes from Spotify")
sync_cmd.parser.add_option(
'-f', '--force', dest='force_refetch',
action='store_true', default=False,
help='re-download data when already present'
)
def func(lib, opts, args):
items = lib.items(ui.decargs(args))
self._fetch_info(items, ui.should_write(), opts.force_refetch)
sync_cmd.func = func
return [spotify_cmd, sync_cmd]
def _parse_opts(self, opts):
if opts.mode:
@ -525,3 +615,57 @@ class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
self._log.warning(
f'No {self.data_source} tracks found from beets query'
)
def _fetch_info(self, items, write, force):
"""Obtain track information from Spotify."""
self._log.debug('Total {} tracks', len(items))
for index, item in enumerate(items, start=1):
self._log.info('Processing {}/{} tracks - {} ',
index, len(items), item)
# If we're not forcing re-downloading for all tracks, check
# whether the popularity data is already present
if not force:
if 'spotify_track_popularity' in item:
self._log.debug('Popularity already present for: {}',
item)
continue
try:
spotify_track_id = item.spotify_track_id
except AttributeError:
self._log.debug('No track_id present for: {}', item)
continue
popularity = self.track_popularity(spotify_track_id)
item['spotify_track_popularity'] = popularity
audio_features = \
self.track_audio_features(spotify_track_id)
if audio_features is None:
self._log.info('No audio features found for: {}', item)
continue
for feature in audio_features.keys():
if feature in self.spotify_audio_features.keys():
item[self.spotify_audio_features[feature]] = \
audio_features[feature]
item['spotify_updated'] = datetime.datetime.now()
item.store()
if write:
item.try_write()
def track_popularity(self, track_id=None):
"""Fetch a track popularity by its Spotify ID."""
track_data = self._handle_response(
requests.get, self.track_url + track_id
)
self._log.debug('track_data: {}', track_data['popularity'])
return track_data['popularity']
def track_audio_features(self, track_id=None):
"""Fetch track audio features by its Spotify ID."""
try:
return self._handle_response(
requests.get, self.audio_features_url + track_id)
except SpotifyAPIError as e:
self._log.debug('Spotify API error: {}', e)
return None

View file

@ -22,7 +22,6 @@ Spec: standards.freedesktop.org/thumbnail-spec/latest/index.html
from hashlib import md5
import os
import shutil
from itertools import chain
from pathlib import PurePosixPath
import ctypes
import ctypes.util
@ -32,7 +31,7 @@ from xdg import BaseDirectory
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand, decargs
from beets import util
from beets.util.artresizer import ArtResizer, get_im_version, get_pil_version
from beets.util.artresizer import ArtResizer
BASE_DIR = os.path.join(BaseDirectory.xdg_cache_home, "thumbnails")
@ -49,7 +48,6 @@ class ThumbnailsPlugin(BeetsPlugin):
'dolphin': False,
})
self.write_metadata = None
if self.config['auto'] and self._check_local_ok():
self.register_listener('art_set', self.process_album)
@ -90,14 +88,12 @@ class ThumbnailsPlugin(BeetsPlugin):
if not os.path.exists(dir):
os.makedirs(dir)
if get_im_version():
self.write_metadata = write_metadata_im
tool = "IM"
else:
assert get_pil_version() # since we're local
self.write_metadata = write_metadata_pil
tool = "PIL"
self._log.debug("using {0} to write metadata", tool)
if not ArtResizer.shared.can_write_metadata:
raise RuntimeError(
f"Thumbnails: ArtResizer backend {ArtResizer.shared.method}"
f" unexpectedly cannot write image metadata."
)
self._log.debug(f"using {ArtResizer.shared.method} to write metadata")
uri_getter = GioURI()
if not uri_getter.available:
@ -171,7 +167,7 @@ class ThumbnailsPlugin(BeetsPlugin):
metadata = {"Thumb::URI": self.get_uri(album.artpath),
"Thumb::MTime": str(mtime)}
try:
self.write_metadata(image_path, metadata)
ArtResizer.shared.write_metadata(image_path, metadata)
except Exception:
self._log.exception("could not write metadata to {0}",
util.displayable_path(image_path))
@ -188,26 +184,6 @@ class ThumbnailsPlugin(BeetsPlugin):
self._log.debug("Wrote file {0}", util.displayable_path(outfilename))
def write_metadata_im(file, metadata):
"""Enrich the file metadata with `metadata` dict thanks to IM."""
command = ['convert', file] + \
list(chain.from_iterable(('-set', k, v)
for k, v in metadata.items())) + [file]
util.command_output(command)
return True
def write_metadata_pil(file, metadata):
"""Enrich the file metadata with `metadata` dict thanks to PIL."""
from PIL import Image, PngImagePlugin
im = Image.open(file)
meta = PngImagePlugin.PngInfo()
for k, v in metadata.items():
meta.add_text(k, v, 0)
im.save(file, "PNG", pnginfo=meta)
return True
class URIGetter:
available = False
name = "Abstract base"

View file

@ -32,7 +32,8 @@ class Unimported(BeetsPlugin):
super().__init__()
self.config.add(
{
'ignore_extensions': []
'ignore_extensions': [],
'ignore_subdirectories': []
}
)

View file

@ -261,7 +261,7 @@ class QueryConverter(PathConverter):
for query in queries]
def to_url(self, value):
return ','.join([v.replace(os.sep, '\\') for v in value])
return '/'.join([v.replace(os.sep, '\\') for v in value])
class EverythingConverter(PathConverter):
@ -324,7 +324,7 @@ def item_file(item_id):
response = flask.send_file(
item_path,
as_attachment=True,
attachment_filename=safe_filename
download_name=safe_filename
)
response.headers['Content-Length'] = os.path.getsize(item_path)
return response

View file

@ -1,9 +1,17 @@
# Don't post a comment on pull requests.
comment: off
# I think this disables commit statuses?
# Sets non-blocking status checks
# https://docs.codecov.com/docs/commit-status#informational
coverage:
status:
project: no
patch: no
changes: no
status:
project:
default:
informational: true
patch:
default:
informational: true
changes: no
github_checks:
annotations: false

View file

@ -1,34 +1,140 @@
Changelog
=========
1.5.1 (in development)
1.6.1 (in development)
----------------------
This release now requires Python 3.6 or later (it removes support for Python
2.7, 3.4, and 3.5).
Changelog goes here!
New features:
* Added `spotify_updated` field to track when the information was last updated.
* We now import and tag the `album` information when importing singletons using Spotify source.
:bug:`4398`
* :doc:`/plugins/spotify`: The plugin now provides an additional command
`spotifysync` that allows getting track popularity and audio features
information from Spotify.
:bug:`4094`
* :doc:`/plugins/spotify`: The plugin now records Spotify-specific IDs in the
`spotify_album_id`, `spotify_artist_id`, and `spotify_track_id` fields.
:bug:`4348`
* Create the parental directories for database if they do not exist.
:bug:`3808` :bug:`4327`
* :ref:`musicbrainz-config`: a new :ref:`musicbrainz.enabled` option allows disabling
the MusicBrainz metadata source during the autotagging process
* :doc:`/plugins/kodiupdate`: Now supports multiple kodi instances
:bug:`4101`
* Add the item fields ``bitrate_mode``, ``encoder_info`` and ``encoder_settings``.
* Add :ref:`exact match <exact-match>` queries, using the prefixes ``=`` and
``=~``.
:bug:`4251`
* :doc:`/plugins/discogs`: Permit appending style to genre
* :doc:`/plugins/convert`: Add a new `auto_keep` option that automatically
converts files but keeps the *originals* in the library.
:bug:`1840` :bug:`4302`
* Added a ``-P`` (or ``--disable-plugins``) flag to specify one/multiple plugin(s) to be
disabled at startup.
* :ref:`import-options`: Add support for re-running the importer on paths in
log files that were created with the ``-l`` (or ``--logfile``) argument.
:bug:`4379` :bug:`4387`
* Add :ref:`%sunique{} <sunique>` template to disambiguate between singletons.
:bug:`4438`
Bug fixes:
* We now respect the Spotify API's rate limiting, which avoids crashing when the API reports code 429 (too many requests).
:bug:`4370`
* Fix implicit paths OR queries (e.g. ``beet list /path/ , /other-path/``)
which have previously been returning the entire library.
:bug:`1865`
* The Discogs release ID is now populated correctly to the discogs_albumid
field again (it was no longer working after Discogs changed their release URL
format).
:bug:`4225`
* The autotagger no longer considers all matches without a MusicBrainz ID as
duplicates of each other.
:bug:`4299`
* :doc:`/plugins/convert`: Resize album art when embedding
:bug:`2116`
* :doc:`/plugins/deezer`: Fix auto tagger pagination issues (fetch beyond the
first 25 tracks of a release).
* :doc:`/plugins/spotify`: Fix auto tagger pagination issues (fetch beyond the
first 50 tracks of a release).
* :doc:`/plugins/lyrics`: Fix Genius search by using query params instead of body.
* :doc:`/plugins/unimported`: The new ``ignore_subdirectories`` configuration
option added in 1.6.0 now has a default value if it hasn't been set.
* :doc:`/plugins/deezer`: Tolerate missing fields when searching for singleton
tracks.
:bug:`4116`
* :doc:`/plugins/replaygain`: The type of the internal ``r128_track_gain`` and
``r128_album_gain`` fields was changed from integer to float to fix loss of
precision due to truncation.
:bug:`4169`
* Fix a regression in the previous release that caused a `TypeError` when
moving files across filesystems.
:bug:`4168`
* :doc:`/plugins/convert`: Deleting the original files during conversion no
longer logs output when the ``quiet`` flag is enabled.
* :doc:`plugins/web`: Fix handling of "query" requests. Previously queries
consisting of more than one token (separated by a slash) always returned an
empty result.
* :doc:`/plugins/discogs`: Skip Discogs query on insufficiently tagged files
(artist and album tags missing) to prevent arbitrary candidate results.
:bug:`4227`
* :doc:`plugins/lyrics`: Fixed issues with the Tekstowo.pl and Genius
backends where some non-lyrics content got included in the lyrics
* :doc:`plugins/limit`: Better header formatting to improve index
* :doc:`plugins/replaygain`: Correctly handle the ``overwrite`` config option,
which forces recomputing ReplayGain values on import even for tracks
that already have the tags.
* :doc:`plugins/embedart`: Fix a crash when using recent versions of
ImageMagick and the ``compare_threshold`` option.
:bug:`4272`
* :doc:`plugins/lyrics`: Fixed issue with Genius header being included in lyrics,
added test case of up-to-date Genius html
* :doc:`plugins/importadded`: Fix a bug with recently added reflink import option
that casues a crash when ImportAdded plugin enabled.
:bug:`4389`
* :doc:`plugins/convert`: Fix a bug with the `wma` format alias.
* :doc:`/plugins/web`: Fix get file from item.
For packagers:
* As noted above, the minimum Python version is now 3.6.
* We fixed a flaky test, named `test_album_art` in the `test_zero.py` file,
that some distributions had disabled. Disabling this test should no longer
be necessary.
:bug:`4037` :bug:`4038`
* This version of beets no longer depends on the `six`_ library.
:bug:`4030`
* We fixed a version for the dependency on the `Confuse`_ library.
:bug:`4167`
* The minimum required version of :pypi:`mediafile` is now 0.9.0.
Other new things:
* :doc:`/plugins/limit`: Limit query results to head or tail (``lslimit``
command only)
* :doc:`/plugins/fish`: Add ``--output`` option.
1.6.0 (November 27, 2021)
-------------------------
This release is our first experiment with time-based releases! We are aiming
to publish a new release of beets every 3 months. We therefore have a healthy
but not dizzyingly long list of new features and fixes.
With this release, beets now requires Python 3.6 or later (it removes support
for Python 2.7, 3.4, and 3.5). There are also a few other dependency
changes---if you're a maintainer of a beets package for a package manager,
thank you for your ongoing efforts, and please see the list of notes below.
Major new features:
* Include the genre tags from the release group when the musicbrainz genre
option is set, and sort them by the number of votes. Thanks to
:user:`aereaux`.
* Primary and secondary release types from MusicBrainz are now stored in
``albumtypes`` field. Thanks to :user:`edgars-supe`.
* When fetching genres from MusicBrainz, we now include genres from the
release group (in addition to the release). We also prioritize genres based
on the number of votes.
Thanks to :user:`aereaux`.
* Primary and secondary release types from MusicBrainz are now stored in a new
``albumtypes`` field.
Thanks to :user:`edgars-supe`.
:bug:`2200`
* :doc:`/plugins/albumtypes`: An accompanying plugin for formatting
``albumtypes``. Thanks to :user:`edgars-supe`.
* An accompanying new :doc:`/plugins/albumtypes` includes some options for
formatting this new ``albumtypes`` field.
Thanks to :user:`edgars-supe`.
* The :ref:`modify-cmd` and :ref:`import-cmd` can now use
:doc:`/reference/pathformat` formats when setting fields.
@ -40,17 +146,68 @@ Other new things:
permission.
* :doc:`/plugins/unimported`: Support excluding specific
subdirectories in library.
* :doc:`/plugins/permissions`: The plugin now sets cover art permissions to
match the audio file permissions.
* :doc:`/plugins/unimported`: A new configuration option supports excluding
specific subdirectories in library.
* :doc:`/plugins/info`: Add support for an ``--album`` flag.
* :doc:`/plugins/export`: Similarly add support for an ``--album`` flag.
* ``beet move`` now highlights path differences in color (when enabled).
* When moving files and a direct rename of a file is not possible (for
example, when crossing filesystems), beets now copies to a temporary file in
the target folder first and then moves to the destination instead of
directly copying the target path. This gets us closer to always updating
files atomically.
Thanks to :user:`catap`.
:bug:`4060`
* :doc:`/plugins/fetchart`: Add a new option to store cover art as
non-progressive image. This is useful for DAPs that do not support
progressive images. Set ``deinterlace: yes`` in your configuration to enable
this conversion.
* :doc:`/plugins/fetchart`: Add a new option to change the file format of
cover art images. This may also be useful for DAPs that only support some
image formats.
* Support flexible attributes in ``%aunique``.
:bug:`2678` :bug:`3553`
* Make ``%aunique`` faster, especially when using inline fields.
:bug:`4145`
Bug fixes:
* :doc:`/plugins/lyrics`: Fix crash bug when beautifulsoup4 is not installed.
* :doc:`/plugins/lyrics`: Fix a crash when Beautiful Soup is not installed.
:bug:`4027`
* :doc:`/plugins/discogs`: Support a new Discogs URL format for IDs.
:bug:`4080`
* :doc:`/plugins/discogs`: Remove built-in rate-limiting because the Discogs
Python library we use now has its own rate-limiting.
:bug:`4108`
* :doc:`/plugins/export`: Fix some duplicated output.
* :doc:`/plugins/aura`: Fix a potential security hole when serving image
files.
:bug:`4160`
* :doc:`/plugins/discogs`: Adapt regex to new URL format .
:bug: `4080`
For plugin developers:
* :py:meth:`beets.library.Item.destination` now accepts a `replacements`
argument to be used in favor of the default.
* The `pluginload` event is now sent after plugin types and queries are
available, not before.
* A new plugin event, `album_removed`, is called when an album is removed from
the library (even when its file is not deleted from disk).
Here are some notes for packagers:
* As noted above, the minimum Python version is now 3.6.
* We fixed a flaky test, named `test_album_art` in the `test_zero.py` file,
that some distributions had disabled. Disabling this test should no longer
be necessary.
:bug:`4037` :bug:`4038`
* This version of beets no longer depends on the `six`_ library.
:bug:`4030`
* The `gmusic` plugin was removed since Google Play Music has been shut down.
Thus, the optional dependency on `gmusicapi` does not exist anymore.
:bug:`4089`
* :doc:`/plugins/discogs`: Remove requests ratel imit code from plugin in favor of discogs library built-in capability
:bug: `4108`
1.5.0 (August 19, 2021)
-----------------------

View file

@ -11,8 +11,8 @@ master_doc = 'index'
project = 'beets'
copyright = '2016, Adrian Sampson'
version = '1.5'
release = '1.5.1'
version = '1.6'
release = '1.6.1'
pygments_style = 'sphinx'

View file

@ -36,7 +36,8 @@ found therein. Here's a skeleton of a plugin file::
Once you have your ``BeetsPlugin`` subclass, there's a variety of things your
plugin can do. (Read on!)
To use your new plugin, make sure your ``beetsplug`` directory is in the Python
To use your new plugin, make sure the directory that contains your
``beetsplug`` directory is in the Python
path (using ``PYTHONPATH`` or by installing in a `virtualenv`_, for example).
Then, as described above, edit your ``config.yaml`` to include
``plugins: myawesomeplugin`` (substituting the name of the Python module
@ -143,6 +144,9 @@ The events currently available are:
command finishes adding an album to the library. Parameters: ``lib``,
``album``
* `album_removed`: called with an ``Album`` object every time an album is
removed from the library (even when its file is not deleted from disk).
* `item_copied`: called with an ``Item`` object whenever its file is copied.
Parameters: ``item``, ``source`` path, ``destination`` path

View file

@ -46,7 +46,7 @@ Beets works on Python 3.6 or later.
* On **NixOS**, there's a `package <NixOS_>`_ you can install with ``nix-env -i beets``.
.. _DNF package: https://apps.fedoraproject.org/packages/beets
.. _DNF package: https://packages.fedoraproject.org/pkgs/beets/
.. _SlackBuild: https://slackbuilds.org/repository/14.2/multimedia/beets/
.. _FreeBSD: http://portsmon.freebsd.org/portoverview.py?category=audio&portname=beets
.. _AUR: https://aur.archlinux.org/packages/beets-git/
@ -94,16 +94,18 @@ Installing on Windows
Installing beets on Windows can be tricky. Following these steps might help you
get it right:
1. If you don't have it, `install Python`_ (you want Python 3.6). The
1. If you don't have it, `install Python`_ (you want at least Python 3.6). The
installer should give you the option to "add Python to PATH." Check this
box. If you do that, you can skip the next step.
2. If you haven't done so already, set your ``PATH`` environment variable to
include Python and its scripts. To do so, you have to get the "Properties"
window for "My Computer", then choose the "Advanced" tab, then hit the
"Environment Variables" button, and then look for the ``PATH`` variable in
the table. Add the following to the end of the variable's value:
``;C:\Python36;C:\Python36\Scripts``. You may need to adjust these paths to
include Python and its scripts. To do so, open the "Settings" application,
then access the "System" screen, then access the "About" tab, and then hit
"Advanced system settings" located on the right side of the screen. This
should open the "System Properties" screen, then select the "Advanced" tab,
then hit the "Environmental Variables..." button, and then look for the PATH
variable in the table. Add the following to the end of the variable's value:
``;C:\Python36;C:\Python36\Scripts``. You may need to adjust these paths to
point to your Python installation.
3. Now install beets by running: ``pip install beets``

View file

@ -80,6 +80,8 @@ all of these limitations.
Now that that's out of the way, let's tag some music.
.. _import-options:
Options
-------
@ -101,7 +103,8 @@ command-line options you should know:
* ``beet import -l LOGFILE``: write a message to ``LOGFILE`` every time you skip
an album or choose to take its tags "as-is" (see below) or the album is
skipped as a duplicate; this lets you come back later and reexamine albums
that weren't tagged successfully
that weren't tagged successfully. Run ``beet import --from-logfile=LOGFILE``
rerun the importer on such paths from the logfile.
* ``beet import -q``: quiet mode. Never prompt for input and, instead,
conservatively skip any albums that need your opinion. The ``-ql`` combination

View file

@ -12,7 +12,7 @@ Then you can get a more detailed look at beets' features in the
:doc:`/reference/cli/` and :doc:`/reference/config` references. You might also
be interested in exploring the :doc:`plugins </plugins/index>`.
If you still need help, your can drop by the ``#beets`` IRC channel on
If you still need help, you can drop by the ``#beets`` IRC channel on
Libera.Chat, drop by `the discussion board`_, send email to
`the mailing list`_, or `file a bug`_ in the issue tracker. Please let us know
where you think this documentation can be improved.

View file

@ -32,7 +32,7 @@ from MusicBrainz and other sources.
If you have a Beatport ID or a URL for a release or track you want to tag, you
can just enter one of the two at the "enter Id" prompt in the importer. You can
also search for an id like so:
also search for an id like so::
beet import path/to/music/library --search-id id

View file

@ -66,13 +66,23 @@ file. The available options are:
default configuration) non-MP3 files over the maximum bitrate before adding
them to your library.
Default: ``no``.
- **auto_keep**: Convert your files automatically on import to **dest** but
import the non transcoded version. It uses the default format you have
defined in your config file.
Default: ``no``.
.. note:: You probably want to use only one of the `auto` and `auto_keep`
options, not both. Enabling both will convert your files twice on import,
which you probably don't want.
- **tmpdir**: The directory where temporary files will be stored during import.
Default: none (system default),
- **copy_album_art**: Copy album art when copying or transcoding albums matched
using the ``-a`` option. Default: ``no``.
- **album_art_maxwidth**: Downscale album art if it's too big. The resize
operation reduces image width to at most ``maxwidth`` pixels while
preserving the aspect ratio.
preserving the aspect ratio. The specified image size will apply to both
embedded album art and external image files.
- **dest**: The directory where the files will be converted (or copied) to.
Default: none.
- **embed**: Embed album art in converted items. Default: ``yes``.
@ -155,7 +165,7 @@ command to use to transcode audio. The tokens ``$source`` and ``$dest`` in the
command are replaced with the paths to the existing and new file.
The plugin in comes with default commands for the most common audio
formats: `mp3`, `alac`, `flac`, `aac`, `opus`, `ogg`, `wmv`. For
formats: `mp3`, `alac`, `flac`, `aac`, `opus`, `ogg`, `wma`. For
details have a look at the output of ``beet config -d``.
For a one-command-fits-all solution use the ``convert.command`` and

View file

@ -19,7 +19,8 @@ authentication credentials via a personal access token or an OAuth2
authorization.
Matches from Discogs will now show up during import alongside matches from
MusicBrainz.
MusicBrainz. The search terms sent to the Discogs API are based on the artist
and album tags of your tracks. If those are empty no query will be issued.
If you have a Discogs ID for an album you want to tag, you can also enter it
at the "enter Id" prompt in the importer.
@ -50,7 +51,7 @@ This plugin can be configured like other metadata source plugins as described in
There is one additional option in the ``discogs:`` section, ``index_tracks``.
Index tracks (see the `Discogs guidelines
<https://support.discogs.com/hc/en-us/articles/360005055373-Database-Guidelines-12-Tracklisting#12.13>`_),
<https://support.discogs.com/hc/en-us/articles/360005055373-Database-Guidelines-12-Tracklisting#Index_Tracks_And_Headings>`_),
along with headers, mark divisions between distinct works on the same release
or within works. When ``index_tracks`` is enabled::
@ -75,6 +76,15 @@ whereas with ``index_tracks`` disabled you'd get::
This option is useful when importing classical music.
Other configurations available under ``discogs:`` are:
- **append_style_genre**: Appends the Discogs style (if found) to the genre tag. This can be useful if you want more granular genres to categorize your music.
For example, a release in Discogs might have a genre of "Electronic" and a style of "Techno": enabling this setting would set the genre to be "Electronic, Techno" (assuming default separator of ``", "``) instead of just "Electronic".
Default: ``false``
- **separator**: How to join multiple genre and style values from Discogs into a string.
Default: ``", "``
Troubleshooting
---------------

View file

@ -34,6 +34,9 @@ The ``export`` command has these command-line options:
* ``--library`` or ``-l``: Show data from the library database instead of the
files' tags.
* ``--album`` or ``-a``: Show data from albums instead of tracks (implies
``--library``).
* ``--output`` or ``-o``: Path for an output file. If not informed, will print
the data in the console.

View file

@ -86,6 +86,14 @@ file. The available options are:
- **high_resolution**: If enabled, fetchart retrieves artwork in the highest
resolution it can find (warning: image files can sometimes reach >20MB).
Default: ``no``.
- **deinterlace**: If enabled, `Pillow`_ or `ImageMagick`_ backends are
instructed to store cover art as non-progressive JPEG. You might need this if
you use DAPs that don't support progressive images.
Default: ``no``.
- **cover_format**: If enabled, forced the cover image into the specified
format. Most often, this will be either ``JPEG`` or ``PNG`` [#imgformats]_.
Also respects ``deinterlace``.
Default: None (leave unchanged).
Note: ``maxwidth`` and ``enforce_ratio`` options require either `ImageMagick`_
or `Pillow`_.
@ -101,6 +109,12 @@ or `Pillow`_.
.. _beets custom search engine: https://cse.google.com.au:443/cse/publicurl?cx=001442825323518660753:hrh5ch1gjzm
.. _Pillow: https://github.com/python-pillow/Pillow
.. _ImageMagick: https://www.imagemagick.org/
.. [#imgformats] Other image formats are available, though the full list
depends on your system and what backend you are using. If you're using the
ImageMagick backend, you can use ``magick identify -list format`` to get a
full list of all supported formats, and you can use the Python function
PIL.features.pilinfo() to print a list of all supported formats in Pillow
(``python3 -c 'import PIL.features as f; f.pilinfo()'``).
Here's an example that makes plugin select only images that contain ``front`` or
``back`` keywords in their filenames and prioritizes the iTunes source over
@ -226,10 +240,10 @@ in your configuration.
.. _registering a personal fanart.tv API key: https://fanart.tv/get-an-api-key/
More detailed information can be found `on their blog`_. Specifically, the
More detailed information can be found `on their Wiki`_. Specifically, the
personal key will give you earlier access to new art.
.. _on their blog: https://fanart.tv/2015/01/personal-api-keys/
.. _on their Wiki: https://wiki.fanart.tv/General/personal%20api/
Last.fm
'''''''

View file

@ -50,3 +50,8 @@ with care when specified fields contain a large number of values. Libraries with
for example, very large numbers of genres/artists may result in higher memory
utilization, completion latency, et cetera. This option is not meant to replace
database queries altogether.
By default, the completion file will be generated at
``~/.config/fish/completions/``.
If you want to save it somewhere else, you can use the ``-o`` or ``--output``
option.

View file

@ -1,87 +1,5 @@
Gmusic Plugin
=============
The ``gmusic`` plugin lets you upload songs to Google Play Music and query
songs in your library.
Installation
------------
The plugin requires :pypi:`gmusicapi`. You can install it using ``pip``::
pip install gmusicapi
.. _gmusicapi: https://github.com/simon-weber/gmusicapi/
Then, you can enable the ``gmusic`` plugin in your configuration (see
:ref:`using-plugins`).
Usage
-----
Configuration is required before use. Below is an example configuration::
gmusic:
email: user@example.com
password: seekrit
auto: yes
uploader_id: 00:11:22:33:AA:BB
device_id: 00112233AABB
oauth_file: ~/.config/beets/oauth.cred
To upload tracks to Google Play Music, use the ``gmusic-upload`` command::
beet gmusic-upload [QUERY]
If you don't include a query, the plugin will upload your entire collection.
To list your music collection, use the ``gmusic-songs`` command::
beet gmusic-songs [-at] [ARGS]
Use the ``-a`` option to search by artist and ``-t`` to search by track. For
example::
beet gmusic-songs -a John Frusciante
beet gmusic-songs -t Black Hole Sun
For a list of all songs in your library, run ``beet gmusic-songs`` without any
arguments.
Configuration
-------------
To configure the plugin, make a ``gmusic:`` section in your configuration file.
The available options are:
- **email**: Your Google account email address.
Default: none.
- **password**: Password to your Google account. Required to query songs in
your collection.
For accounts with 2-step-verification, an
`app password <https://support.google.com/accounts/answer/185833?hl=en>`__
will need to be generated. An app password for an account without
2-step-verification is not required but is recommended.
Default: none.
- **auto**: Set to ``yes`` to automatically upload new imports to Google Play
Music.
Default: ``no``
- **uploader_id**: Unique id as a MAC address, eg ``00:11:22:33:AA:BB``.
This option should be set before the maximum number of authorized devices is
reached.
If provided, use the same id for all future runs on this, and other, beets
installations as to not reach the maximum number of authorized devices.
Default: device's MAC address.
- **device_id**: Unique device ID for authorized devices. It is usually
the same as your MAC address with the colons removed, eg ``00112233AABB``.
This option only needs to be set if you receive an `InvalidDeviceId`
exception. Below the exception will be a list of valid device IDs.
Default: none.
- **oauth_file**: Filepath for oauth credentials file.
Default: `{user_data_dir} <https://pypi.org/project/appdirs/>`__/gmusicapi/oauth.cred
Refer to the `Google Play Music Help
<https://support.google.com/googleplaymusic/answer/3139562?hl=en>`__
page for more details on authorized devices.
The ``gmusic`` plugin interfaced beets to Google Play Music. It has been
removed after the shutdown of this service.

View file

@ -98,6 +98,7 @@ following to your configuration::
kodiupdate
lastgenre
lastimport
limit
loadext
lyrics
mbcollection
@ -231,7 +232,6 @@ Miscellaneous
* :doc:`filefilter`: Automatically skip files during the import process based
on regular expressions.
* :doc:`fuzzy`: Search albums and tracks with fuzzy string matching.
* :doc:`gmusic`: Search and upload files to Google Play Music.
* :doc:`hook`: Run a command when an event is emitted by beets.
* :doc:`ihate`: Automatically skip albums and tracks during the import process.
* :doc:`info`: Print music files' tags to the console.
@ -279,11 +279,16 @@ Here are a few of the plugins written by the beets community:
* `beets-autofix`_ automates repetitive tasks to keep your library in order.
* `beets-audible`_ adds Audible as a tagger data source and provides
other features for managing audiobook collections.
* `beets-barcode`_ lets you scan or enter barcodes for physical media to
search for their metadata.
* `beetcamp`_ enables **bandcamp.com** autotagger with a fairly extensive amount of metadata.
* `beetstream`_ is server implementation of the `SubSonic API`_ specification, allowing you to stream your music on a multitude of clients.
* `beets-bpmanalyser`_ analyses songs and calculates their tempo (BPM).
* `beets-check`_ automatically checksums your files to detect corruption.
@ -338,6 +343,8 @@ Here are a few of the plugins written by the beets community:
.. _beets-barcode: https://github.com/8h2a/beets-barcode
.. _beetcamp: https://github.com/snejus/beetcamp
.. _beetstream: https://github.com/BinaryBrain/Beetstream
.. _SubSonic API: http://www.subsonic.org/pages/api.jsp
.. _beets-check: https://github.com/geigerzaehler/beets-check
.. _beets-copyartifacts: https://github.com/adammillerio/beets-copyartifacts
.. _dsedivec: https://github.com/dsedivec/beets-plugins
@ -370,3 +377,4 @@ Here are a few of the plugins written by the beets community:
.. _beets-bpmanalyser: https://github.com/adamjakab/BeetsPluginBpmAnalyser
.. _beets-originquery: https://github.com/x1ppy/beets-originquery
.. _drop2beets: https://github.com/martinkirch/drop2beets
.. _beets-audible: https://github.com/Neurrone/beets-audible

View file

@ -31,6 +31,8 @@ Additional command-line options include:
* ``--library`` or ``-l``: Show data from the library database instead of the
files' tags.
* ``--album`` or ``-a``: Show data from albums instead of tracks (implies
``--library``).
* ``--summarize`` or ``-s``: Merge all the information from multiple files
into a single list of values. If the tags differ across the files, print
``[various]``.

View file

@ -16,6 +16,19 @@ which looks like this::
user: kodi
pwd: kodi
To update multiple Kodi instances, specify them as an array::
kodi:
- host: x.x.x.x
port: 8080
user: kodi
pwd: kodi
- host: y.y.y.y
port: 8081
user: kodi2
pwd: kodi2
To use the ``kodiupdate`` plugin you need to install the `requests`_ library with::
pip install requests

58
docs/plugins/limit.rst Normal file
View file

@ -0,0 +1,58 @@
Limit Query Plugin
==================
``limit`` is a plugin to limit a query to the first or last set of
results. We also provide a query prefix ``'<n'`` to inline the same
behavior in the ``list`` command. They are analagous to piping results:
$ beet [list|ls] [QUERY] | [head|tail] -n n
There are two provided interfaces:
1. ``beet lslimit [--head n | --tail n] [QUERY]`` returns the head or
tail of a query
2. ``beet [list|ls] [QUERY] '<n'`` returns the head of a query
There are two differences in behavior:
1. The query prefix does not support tail.
2. The query prefix could appear anywhere in the query but will only
have the same behavior as the ``lslimit`` command and piping to ``head``
when it appears last.
Performance for the query previx is much worse due to the current
singleton-based implementation.
So why does the query prefix exist? Because it composes with any other
query-based API or plugin (see :doc:`/reference/query`). For example,
you can use the query prefix in ``smartplaylist``
(see :doc:`/plugins/smartplaylist`) to limit the number of tracks in a smart
playlist for applications like most played and recently added.
Configuration
-------------
Enable the ``limit`` plugin in your configuration (see
:ref:`using-plugins`).
Examples
--------
First 10 tracks
$ beet ls | head -n 10
$ beet lslimit --head 10
$ beet ls '<10'
Last 10 tracks
$ beet ls | tail -n 10
$ beet lslimit --tail 10
100 mostly recently released tracks
$ beet lslimit --head 100 year- month- day-
$ beet ls year- month- day- '<100'
$ beet lslimit --tail 100 year+ month+ day+

View file

@ -112,7 +112,10 @@ configuration file. The available options are:
- **backend**: The analysis backend; either ``gstreamer``, ``command``, ``audiotools``
or ``ffmpeg``.
Default: ``command``.
- **overwrite**: Re-analyze files that already have ReplayGain tags.
- **overwrite**: On import, re-analyze files that already have ReplayGain tags.
Note that, for historical reasons, the name of this option is somewhat
unfortunate: It does not decide whether tags are written to the files (which
is controlled by the :ref:`import.write <config-import-write>` option).
Default: ``no``.
- **targetlevel**: A number of decibels for the target loudness level for files
using ``REPLAYGAIN_`` tags.

View file

@ -8,9 +8,9 @@ Also, the plugin can use the Spotify `Album`_ and `Track`_ APIs to provide
metadata matches for the importer.
.. _Spotify: https://www.spotify.com/
.. _Spotify Search API: https://developer.spotify.com/documentation/web-api/reference/#category-search
.. _Album: https://developer.spotify.com/documentation/web-api/reference/#endpoint-get-an-album
.. _Track: https://developer.spotify.com/documentation/web-api/reference/#endpoint-get-track
.. _Spotify Search API: https://developer.spotify.com/documentation/web-api/reference/#/operations/search
.. _Album: https://developer.spotify.com/documentation/web-api/reference/#/operations/get-an-album
.. _Track: https://developer.spotify.com/documentation/web-api/reference/#/operations/get-track
Why Use This Plugin?
--------------------
@ -19,6 +19,7 @@ Why Use This Plugin?
* You have playlists or albums you'd like to make available in Spotify from Beets without having to search for each artist/album/track.
* You want to check which tracks in your library are available on Spotify.
* You want to autotag music with metadata from the Spotify API.
* You want to obtain track popularity and audio features (e.g., danceability)
Basic Usage
-----------
@ -58,7 +59,7 @@ configuration options are provided.
The default options should work as-is, but there are some options you can put
in config.yaml under the ``spotify:`` section:
- **mode**: One of the following:
- **mode**: One of the following:
- ``list``: Print out the playlist as a list of links. This list can then
be pasted in to a new or existing Spotify playlist.
@ -105,3 +106,40 @@ Here's an example::
}
]
Obtaining Track Popularity and Audio Features from Spotify
----------------------------------------------------------
Spotify provides information on track `popularity`_ and audio `features`_ that
can be used for music discovery.
.. _popularity: https://developer.spotify.com/documentation/web-api/reference/#/operations/get-track
.. _features: https://developer.spotify.com/documentation/web-api/reference/#/operations/get-audio-features
The ``spotify`` plugin provides an additional command ``spotifysync`` to obtain
these track attributes from Spotify:
* ``beet spotifysync [-f]``: obtain popularity and audio features information
for every track in the library. By default, ``spotifysync`` will skip tracks
that already have this information populated. Using the ``-f`` or ``-force``
option will download the data even for tracks that already have it. Please
note that ``spotifysync`` works on tracks that have the Spotify track
identifiers. So run ``spotifysync`` only after importing your music, during
which Spotify identifiers will be added for tracks where Spotify is chosen as
the tag source.
In addition to ``popularity``, the command currently sets these audio features
for all tracks with a Spotify track ID:
* ``acousticness``
* ``danceability``
* ``energy``
* ``instrumentalness``
* ``key``
* ``liveness``
* ``loudness``
* ``mode``
* ``speechiness``
* ``tempo``
* ``time_signature``
* ``valence``

View file

@ -86,7 +86,9 @@ Optional command flags:
that weren't tagged successfully---either because they're not in the
MusicBrainz database or because something's wrong with the files. Use the
``-l`` option to specify a filename to log every time you skip an album
or import it "as-is" or an album gets skipped as a duplicate.
or import it "as-is" or an album gets skipped as a duplicate. You can later
review the file manually or import skipped paths from the logfile
automatically by using the ``--from-logfile LOGFILE`` argument.
* Relatedly, the ``-q`` (quiet) option can help with large imports by
autotagging without ever bothering to ask for user input. Whenever the
@ -268,11 +270,11 @@ field name with an exclamation point: ``field!``.
Values support the same template syntax as beets'
:doc:`path formats <pathformat>`.
The ``-a`` switch operates on albums instead of individual tracks. Without
this flag, the command will only change *track-level* data, even if all the
tracks belong to the same album. If you want to change an *album-level* field,
such as ``year`` or ``albumartist``, you'll want to use the ``-a`` flag to
avoid a confusing situation where the data for individual tracks conflicts
The ``-a`` switch also operates on albums in addition to the individual tracks.
Without this flag, the command will only change *track-level* data, even if all
the tracks belong to the same album. If you want to change an *album-level*
field, such as ``year`` or ``albumartist``, you'll want to use the ``-a`` flag
to avoid a confusing situation where the data for individual tracks conflicts
with the data for the whole album.
Items will automatically be moved around when necessary if they're in your
@ -450,6 +452,9 @@ import ...``.
specified, the plugin list in your configuration is ignored. The long form
of this argument also allows specifying no plugins, effectively disabling
all plugins: ``--plugins=``.
* ``-P plugins``: specify a comma-separated list of plugins to disable in a
specific beets run. This will overwrite ``-p`` if used with it. To disable all plugins, use
``--plugins=`` instead.
Beets also uses the ``BEETSDIR`` environment variable to look for
configuration and data.

View file

@ -327,6 +327,25 @@ The defaults look like this::
See :ref:`aunique` for more details.
.. _config-sunique:
sunique
~~~~~~~
Like :ref:`config-aunique` above for albums, these options control the
generation of a unique string to disambiguate *singletons* that share similar
metadata.
The defaults look like this::
sunique:
keys: artist title
disambiguators: year trackdisambig
bracket: '[]'
See :ref:`sunique` for more details.
.. _terminal_encoding:
terminal_encoding
@ -744,6 +763,17 @@ to one request per second.
.. _limited: https://musicbrainz.org/doc/XML_Web_Service/Rate_Limiting
.. _Building search indexes: https://musicbrainz.org/doc/Development/Search_server_setup
.. _musicbrainz.enabled:
enabled
~~~~~~~
This option allows you to disable using MusicBrainz as a metadata source. This applies
if you use plugins that fetch data from alternative sources and should make the import
process quicker.
Default: ``yes``.
.. _searchlimit:
searchlimit

View file

@ -73,6 +73,8 @@ These functions are built in to beets:
option.
* ``%aunique{identifiers,disambiguators,brackets}``: Provides a unique string
to disambiguate similar albums in the database. See :ref:`aunique`, below.
* ``%sunique{identifiers,disambiguators,brackets}``: Similarly, a unique string
to disambiguate similar singletons in the database. See :ref:`sunique`, below.
* ``%time{date_time,format}``: Return the date and time in any format accepted
by `strftime`_. For example, to get the year some music was added to your
library, use ``%time{$added,%Y}``.
@ -145,6 +147,18 @@ its import time. Only the second album will receive a disambiguation string. If
you want to add the disambiguation string to both albums, just run ``beet move``
(possibly restricted by a query) to update the paths for the albums.
.. _sunique:
Singleton Disambiguation
------------------------
It is also possible to have singleton tracks with the same name and the same
artist. Beets provides the ``%sunique{}`` template to avoid giving these
tracks the same file path.
It has the same arguments as the :ref:`%aunique <aunique>` template, but the default
values are different. The default identifiers are ``artist title`` and the
default disambiguators are ``year trackdisambig``.
Syntax Details
--------------
@ -235,6 +249,9 @@ Audio information:
* length (in seconds)
* bitrate (in kilobits per second, with units: e.g., "192kbps")
* bitrate_mode (e.g., "CBR", "VBR" or "ABR", only available for the MP3 format)
* encoder_info (e.g., "LAME 3.97.0", only available for some formats)
* encoder_settings (e.g., "-V2", only available for the MP3 format)
* format (e.g., "MP3" or "FLAC")
* channels
* bitdepth (only available for some formats)

View file

@ -93,14 +93,47 @@ backslashes are not part of beets' syntax; I'm just using the escaping
functionality of my shell (bash or zsh, for instance) to pass ``the rebel`` as a
single argument instead of two.
.. _exact-match:
Exact Matches
-------------
While ordinary queries perform *substring* matches, beets can also match whole
strings by adding either ``=`` (case-sensitive) or ``=~`` (ignore case) after
the field name's colon and before the expression::
$ beet list artist:air
$ beet list artist:=~air
$ beet list artist:=AIR
The first query is a simple substring one that returns tracks by Air, AIR, and
Air Supply. The second query returns tracks by Air and AIR, since both are a
case-insensitive match for the entire expression, but does not return anything
by Air Supply. The third query, which requires a case-sensitive exact match,
returns tracks by AIR only.
Exact matches may be performed on phrases as well::
$ beet list artist:=~"dave matthews"
$ beet list artist:="Dave Matthews"
Both of these queries return tracks by Dave Matthews, but not by Dave Matthews
Band.
To search for exact matches across *all* fields, just prefix the expression with
a single ``=`` or ``=~``::
$ beet list =~crash
$ beet list ="American Football"
.. _regex:
Regular Expressions
-------------------
While ordinary keywords perform simple substring matches, beets also supports
regular expression matching for more advanced queries. To run a regex query, use
an additional ``:`` between the field name and the expression::
In addition to simple substring and exact matches, beets also supports regular
expression matching for more advanced queries. To run a regex query, use an
additional ``:`` between the field name and the expression::
$ beet list "artist::Ann(a|ie)"

View file

@ -151,7 +151,7 @@ _beet_subcmd_options() {
libfile=("$matchany" ':file:database file:{_files -g *.db}')
regex_words+=("$opt:$optdesc:\$libfile")
;;
(DIR|DIRECTORY)
(DIR|DIRECTORY|DEST)
local -a dirs
dirs=("$matchany" ':dir:directory:_dirs')
regex_words+=("$opt:$optdesc:\$dirs")

View file

@ -276,7 +276,7 @@ def prep():
cur_version = get_version()
# Tag.
subprocess.check_output(['git', 'tag', f'v{cur_version}'])
subprocess.check_call(['git', 'tag', f'v{cur_version}'])
# Build.
with chdir(BASE):

View file

@ -69,6 +69,7 @@ per-file-ignores =
./beetsplug/permissions.py:D
./beetsplug/spotify.py:D
./beetsplug/lastgenre/__init__.py:D
./beetsplug/limit.py:D
./beetsplug/mbcollection.py:D
./beetsplug/metasync/amarok.py:D
./beetsplug/metasync/itunes.py:D
@ -161,6 +162,7 @@ per-file-ignores =
./test/test_library.py:D
./test/test_ui_commands.py:D
./test/test_lyrics.py:D
./test/test_limit.py:D
./test/test_beatport.py:D
./test/test_random.py:D
./test/test_embyupdate.py:D

View file

@ -54,7 +54,7 @@ if 'sdist' in sys.argv:
setup(
name='beets',
version='1.5.1',
version='1.6.1',
description='music tagger and library organizer',
author='Adrian Sampson',
author_email='adrian@radbox.org',
@ -88,8 +88,8 @@ setup(
'unidecode',
'musicbrainzngs>=0.4',
'pyyaml',
'mediafile>=0.2.0',
'confuse>=1.0.0',
'mediafile>=0.9.0',
'confuse>=1.5.0',
'munkres>=1.0.0',
'jellyfish',
] + (
@ -126,7 +126,6 @@ setup(
'embedart': ['Pillow'],
'embyupdate': ['requests'],
'chroma': ['pyacoustid'],
'gmusic': ['gmusicapi'],
'discogs': ['python3-discogs-client>=2.3.10'],
'beatport': ['requests-oauthlib>=0.6.1'],
'kodiupdate': ['requests'],
@ -136,7 +135,7 @@ setup(
'mpdstats': ['python-mpd2>=0.4.2'],
'plexupdate': ['requests'],
'web': ['flask', 'flask-cors'],
'import': ['rarfile'],
'import': ['rarfile', 'py7zr'],
'thumbnails': ['pyxdg', 'Pillow'],
'metasync': ['dbus-python'],
'sonosupdate': ['soco'],

View file

@ -37,7 +37,7 @@ import shutil
import subprocess
from tempfile import mkdtemp, mkstemp
from contextlib import contextmanager
from six import StringIO
from io import StringIO
from enum import Enum
import beets
@ -373,21 +373,23 @@ class TestHelper:
items.append(item)
return items
def add_album_fixture(self, track_count=1, ext='mp3'):
def add_album_fixture(self, track_count=1, ext='mp3', disc_count=1):
"""Add an album with files to the database.
"""
items = []
path = os.path.join(_common.RSRC, util.bytestring_path('full.' + ext))
for i in range(track_count):
item = Item.from_path(path)
item.album = '\u00e4lbum' # Check unicode paths
item.title = f't\u00eftle {i}'
# mtime needs to be set last since other assignments reset it.
item.mtime = 12345
item.add(self.lib)
item.move(operation=MoveOperation.COPY)
item.store()
items.append(item)
for discnumber in range(1, disc_count + 1):
for i in range(track_count):
item = Item.from_path(path)
item.album = '\u00e4lbum' # Check unicode paths
item.title = f't\u00eftle {i}'
item.disc = discnumber
# mtime needs to be set last since other assignments reset it.
item.mtime = 12345
item.add(self.lib)
item.move(operation=MoveOperation.COPY)
item.store()
items.append(item)
return self.lib.add_album(items)
def create_mediafile_fixture(self, ext='mp3', images=[]):

File diff suppressed because one or more lines are too long

View file

@ -57,6 +57,6 @@ Black_magic_woman: |
u_n_eye: |
let see cool bed for sometimes are place told in yeah or ride open hide blame knee your my borders
perfect i of laying lies they love the night all out saying fast things said that on face hit hell
no low not bullets bullet fly time maybe over is roof a it know now airplane where tekst and tonight
brakes just waste we go an to you was going eye start need insane cross gotta historia mood life with
hurts too whoa me fight little every oh would thousand but high tekstu lay space do down private edycji
no low not bullets bullet fly time maybe over is roof a it know now airplane where and tonight
brakes just waste we go an to you was going eye start need insane cross gotta mood life with
hurts too whoa me fight little every oh would thousand but high lay space do down private

View file

@ -0,0 +1,766 @@
{
"album_type": "compilation",
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of"
},
"href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of",
"id": "0LyfQWJT6nXafLPZqxe9Of",
"name": "Various Artists",
"type": "artist",
"uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of"
}
],
"available_markets": [],
"copyrights": [
{
"text": "2013 Back Lot Music",
"type": "C"
},
{
"text": "2013 Back Lot Music",
"type": "P"
}
],
"external_ids": {
"upc": "857970002363"
},
"external_urls": {
"spotify": "https://open.spotify.com/album/5l3zEmMrOhOzG8d8s83GOL"
},
"genres": [],
"href": "https://api.spotify.com/v1/albums/5l3zEmMrOhOzG8d8s83GOL",
"id": "5l3zEmMrOhOzG8d8s83GOL",
"images": [
{
"height": 640,
"url": "https://i.scdn.co/image/ab67616d0000b27399140a62d43aec760f6172a2",
"width": 640
},
{
"height": 300,
"url": "https://i.scdn.co/image/ab67616d00001e0299140a62d43aec760f6172a2",
"width": 300
},
{
"height": 64,
"url": "https://i.scdn.co/image/ab67616d0000485199140a62d43aec760f6172a2",
"width": 64
}
],
"label": "Back Lot Music",
"name": "Despicable Me 2 (Original Motion Picture Soundtrack)",
"popularity": 0,
"release_date": "2013-06-18",
"release_date_precision": "day",
"total_tracks": 24,
"tracks": {
"href": "https://api.spotify.com/v1/albums/5l3zEmMrOhOzG8d8s83GOL/tracks?offset=0&limit=50",
"items": [
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/5nLYd9ST4Cnwy6NHaCxbj8"
},
"href": "https://api.spotify.com/v1/artists/5nLYd9ST4Cnwy6NHaCxbj8",
"id": "5nLYd9ST4Cnwy6NHaCxbj8",
"name": "CeeLo Green",
"type": "artist",
"uri": "spotify:artist:5nLYd9ST4Cnwy6NHaCxbj8"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 221805,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/3EiEbQAR44icEkz3rsMI0N"
},
"href": "https://api.spotify.com/v1/tracks/3EiEbQAR44icEkz3rsMI0N",
"id": "3EiEbQAR44icEkz3rsMI0N",
"is_local": false,
"name": "Scream",
"preview_url": null,
"track_number": 1,
"type": "track",
"uri": "spotify:track:3EiEbQAR44icEkz3rsMI0N"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/3NVrWkcHOtmPbMSvgHmijZ"
},
"href": "https://api.spotify.com/v1/artists/3NVrWkcHOtmPbMSvgHmijZ",
"id": "3NVrWkcHOtmPbMSvgHmijZ",
"name": "The Minions",
"type": "artist",
"uri": "spotify:artist:3NVrWkcHOtmPbMSvgHmijZ"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 39065,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/1G4Z91vvEGTYd2ZgOD0MuN"
},
"href": "https://api.spotify.com/v1/tracks/1G4Z91vvEGTYd2ZgOD0MuN",
"id": "1G4Z91vvEGTYd2ZgOD0MuN",
"is_local": false,
"name": "Another Irish Drinking Song",
"preview_url": null,
"track_number": 2,
"type": "track",
"uri": "spotify:track:1G4Z91vvEGTYd2ZgOD0MuN"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RdwBSPQiwcmiDo9kixcl8"
},
"href": "https://api.spotify.com/v1/artists/2RdwBSPQiwcmiDo9kixcl8",
"id": "2RdwBSPQiwcmiDo9kixcl8",
"name": "Pharrell Williams",
"type": "artist",
"uri": "spotify:artist:2RdwBSPQiwcmiDo9kixcl8"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 176078,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/7DKqhn3Aa0NT9N9GAcagda"
},
"href": "https://api.spotify.com/v1/tracks/7DKqhn3Aa0NT9N9GAcagda",
"id": "7DKqhn3Aa0NT9N9GAcagda",
"is_local": false,
"name": "Just a Cloud Away",
"preview_url": null,
"track_number": 3,
"type": "track",
"uri": "spotify:track:7DKqhn3Aa0NT9N9GAcagda"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RdwBSPQiwcmiDo9kixcl8"
},
"href": "https://api.spotify.com/v1/artists/2RdwBSPQiwcmiDo9kixcl8",
"id": "2RdwBSPQiwcmiDo9kixcl8",
"name": "Pharrell Williams",
"type": "artist",
"uri": "spotify:artist:2RdwBSPQiwcmiDo9kixcl8"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 233305,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/6NPVjNh8Jhru9xOmyQigds"
},
"href": "https://api.spotify.com/v1/tracks/6NPVjNh8Jhru9xOmyQigds",
"id": "6NPVjNh8Jhru9xOmyQigds",
"is_local": false,
"name": "Happy",
"preview_url": null,
"track_number": 4,
"type": "track",
"uri": "spotify:track:6NPVjNh8Jhru9xOmyQigds"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/3NVrWkcHOtmPbMSvgHmijZ"
},
"href": "https://api.spotify.com/v1/artists/3NVrWkcHOtmPbMSvgHmijZ",
"id": "3NVrWkcHOtmPbMSvgHmijZ",
"name": "The Minions",
"type": "artist",
"uri": "spotify:artist:3NVrWkcHOtmPbMSvgHmijZ"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 98211,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/5HSqCeDCn2EEGR5ORwaHA0"
},
"href": "https://api.spotify.com/v1/tracks/5HSqCeDCn2EEGR5ORwaHA0",
"id": "5HSqCeDCn2EEGR5ORwaHA0",
"is_local": false,
"name": "I Swear",
"preview_url": null,
"track_number": 5,
"type": "track",
"uri": "spotify:track:5HSqCeDCn2EEGR5ORwaHA0"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/3NVrWkcHOtmPbMSvgHmijZ"
},
"href": "https://api.spotify.com/v1/artists/3NVrWkcHOtmPbMSvgHmijZ",
"id": "3NVrWkcHOtmPbMSvgHmijZ",
"name": "The Minions",
"type": "artist",
"uri": "spotify:artist:3NVrWkcHOtmPbMSvgHmijZ"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 175291,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/2Ls4QknWvBoGSeAlNKw0Xj"
},
"href": "https://api.spotify.com/v1/tracks/2Ls4QknWvBoGSeAlNKw0Xj",
"id": "2Ls4QknWvBoGSeAlNKw0Xj",
"is_local": false,
"name": "Y.M.C.A.",
"preview_url": null,
"track_number": 6,
"type": "track",
"uri": "spotify:track:2Ls4QknWvBoGSeAlNKw0Xj"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RdwBSPQiwcmiDo9kixcl8"
},
"href": "https://api.spotify.com/v1/artists/2RdwBSPQiwcmiDo9kixcl8",
"id": "2RdwBSPQiwcmiDo9kixcl8",
"name": "Pharrell Williams",
"type": "artist",
"uri": "spotify:artist:2RdwBSPQiwcmiDo9kixcl8"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 206105,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/1XkUmKLbm1tzVtrkdj2Ou8"
},
"href": "https://api.spotify.com/v1/tracks/1XkUmKLbm1tzVtrkdj2Ou8",
"id": "1XkUmKLbm1tzVtrkdj2Ou8",
"is_local": false,
"name": "Fun, Fun, Fun",
"preview_url": null,
"track_number": 7,
"type": "track",
"uri": "spotify:track:1XkUmKLbm1tzVtrkdj2Ou8"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RdwBSPQiwcmiDo9kixcl8"
},
"href": "https://api.spotify.com/v1/artists/2RdwBSPQiwcmiDo9kixcl8",
"id": "2RdwBSPQiwcmiDo9kixcl8",
"name": "Pharrell Williams",
"type": "artist",
"uri": "spotify:artist:2RdwBSPQiwcmiDo9kixcl8"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 254705,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/42lHGtAZd6xVLC789afLWt"
},
"href": "https://api.spotify.com/v1/tracks/42lHGtAZd6xVLC789afLWt",
"id": "42lHGtAZd6xVLC789afLWt",
"is_local": false,
"name": "Despicable Me",
"preview_url": null,
"track_number": 8,
"type": "track",
"uri": "spotify:track:42lHGtAZd6xVLC789afLWt"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 126825,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/7uAC260NViRKyYW4st4vri"
},
"href": "https://api.spotify.com/v1/tracks/7uAC260NViRKyYW4st4vri",
"id": "7uAC260NViRKyYW4st4vri",
"is_local": false,
"name": "PX-41 Labs",
"preview_url": null,
"track_number": 9,
"type": "track",
"uri": "spotify:track:7uAC260NViRKyYW4st4vri"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 87118,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/6YLmc6yT7OGiNwbShHuEN2"
},
"href": "https://api.spotify.com/v1/tracks/6YLmc6yT7OGiNwbShHuEN2",
"id": "6YLmc6yT7OGiNwbShHuEN2",
"is_local": false,
"name": "The Fairy Party",
"preview_url": null,
"track_number": 10,
"type": "track",
"uri": "spotify:track:6YLmc6yT7OGiNwbShHuEN2"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 339478,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/5lwsXhSXKFoxoGOFLZdQX6"
},
"href": "https://api.spotify.com/v1/tracks/5lwsXhSXKFoxoGOFLZdQX6",
"id": "5lwsXhSXKFoxoGOFLZdQX6",
"is_local": false,
"name": "Lucy And The AVL",
"preview_url": null,
"track_number": 11,
"type": "track",
"uri": "spotify:track:5lwsXhSXKFoxoGOFLZdQX6"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 87478,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/2FlWtPuBMGo0a0X7LGETyk"
},
"href": "https://api.spotify.com/v1/tracks/2FlWtPuBMGo0a0X7LGETyk",
"id": "2FlWtPuBMGo0a0X7LGETyk",
"is_local": false,
"name": "Goodbye Nefario",
"preview_url": null,
"track_number": 12,
"type": "track",
"uri": "spotify:track:2FlWtPuBMGo0a0X7LGETyk"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 86998,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/3YnhGNADeUaoBTjB1uGUjh"
},
"href": "https://api.spotify.com/v1/tracks/3YnhGNADeUaoBTjB1uGUjh",
"id": "3YnhGNADeUaoBTjB1uGUjh",
"is_local": false,
"name": "Time for Bed",
"preview_url": null,
"track_number": 13,
"type": "track",
"uri": "spotify:track:3YnhGNADeUaoBTjB1uGUjh"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 180265,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/6npUKThV4XI20VLW5ryr5O"
},
"href": "https://api.spotify.com/v1/tracks/6npUKThV4XI20VLW5ryr5O",
"id": "6npUKThV4XI20VLW5ryr5O",
"is_local": false,
"name": "Break-In",
"preview_url": null,
"track_number": 14,
"type": "track",
"uri": "spotify:track:6npUKThV4XI20VLW5ryr5O"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 95011,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/1qyFlqVfbgyiM7tQ2Jy9vC"
},
"href": "https://api.spotify.com/v1/tracks/1qyFlqVfbgyiM7tQ2Jy9vC",
"id": "1qyFlqVfbgyiM7tQ2Jy9vC",
"is_local": false,
"name": "Stalking Floyd Eaglesan",
"preview_url": null,
"track_number": 15,
"type": "track",
"uri": "spotify:track:1qyFlqVfbgyiM7tQ2Jy9vC"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 189771,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/4DRQctGiqjJkbFa7iTK4pb"
},
"href": "https://api.spotify.com/v1/tracks/4DRQctGiqjJkbFa7iTK4pb",
"id": "4DRQctGiqjJkbFa7iTK4pb",
"is_local": false,
"name": "Moving to Australia",
"preview_url": null,
"track_number": 16,
"type": "track",
"uri": "spotify:track:4DRQctGiqjJkbFa7iTK4pb"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 85878,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/1TSjM9GY2oN6RO6aYGN25n"
},
"href": "https://api.spotify.com/v1/tracks/1TSjM9GY2oN6RO6aYGN25n",
"id": "1TSjM9GY2oN6RO6aYGN25n",
"is_local": false,
"name": "Going to Save the World",
"preview_url": null,
"track_number": 17,
"type": "track",
"uri": "spotify:track:1TSjM9GY2oN6RO6aYGN25n"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 87158,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/3AEMuoglM1myQ8ouIyh8LG"
},
"href": "https://api.spotify.com/v1/tracks/3AEMuoglM1myQ8ouIyh8LG",
"id": "3AEMuoglM1myQ8ouIyh8LG",
"is_local": false,
"name": "El Macho",
"preview_url": null,
"track_number": 18,
"type": "track",
"uri": "spotify:track:3AEMuoglM1myQ8ouIyh8LG"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 47438,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/2d7fEVYdZnjlya3MPEma21"
},
"href": "https://api.spotify.com/v1/tracks/2d7fEVYdZnjlya3MPEma21",
"id": "2d7fEVYdZnjlya3MPEma21",
"is_local": false,
"name": "Jillian",
"preview_url": null,
"track_number": 19,
"type": "track",
"uri": "spotify:track:2d7fEVYdZnjlya3MPEma21"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 89398,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/7h8WnOo4Fh6NvfTUnR7nOa"
},
"href": "https://api.spotify.com/v1/tracks/7h8WnOo4Fh6NvfTUnR7nOa",
"id": "7h8WnOo4Fh6NvfTUnR7nOa",
"is_local": false,
"name": "Take Her Home",
"preview_url": null,
"track_number": 20,
"type": "track",
"uri": "spotify:track:7h8WnOo4Fh6NvfTUnR7nOa"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 212691,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/25A9ZlegjJ0z2fI1PgTqy2"
},
"href": "https://api.spotify.com/v1/tracks/25A9ZlegjJ0z2fI1PgTqy2",
"id": "25A9ZlegjJ0z2fI1PgTqy2",
"is_local": false,
"name": "El Macho's Lair",
"preview_url": null,
"track_number": 21,
"type": "track",
"uri": "spotify:track:25A9ZlegjJ0z2fI1PgTqy2"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 117745,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/48GwOCuPhWKDktq3efmfRg"
},
"href": "https://api.spotify.com/v1/tracks/48GwOCuPhWKDktq3efmfRg",
"id": "48GwOCuPhWKDktq3efmfRg",
"is_local": false,
"name": "Home Invasion",
"preview_url": null,
"track_number": 22,
"type": "track",
"uri": "spotify:track:48GwOCuPhWKDktq3efmfRg"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RaHCHhZWBXn460JpMaicz"
},
"href": "https://api.spotify.com/v1/artists/2RaHCHhZWBXn460JpMaicz",
"id": "2RaHCHhZWBXn460JpMaicz",
"name": "Heitor Pereira",
"type": "artist",
"uri": "spotify:artist:2RaHCHhZWBXn460JpMaicz"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 443251,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/6dZkl2egcKVm8rO9W7pPWa"
},
"href": "https://api.spotify.com/v1/tracks/6dZkl2egcKVm8rO9W7pPWa",
"id": "6dZkl2egcKVm8rO9W7pPWa",
"is_local": false,
"name": "The Big Battle",
"preview_url": null,
"track_number": 23,
"type": "track",
"uri": "spotify:track:6dZkl2egcKVm8rO9W7pPWa"
},
{
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/3NVrWkcHOtmPbMSvgHmijZ"
},
"href": "https://api.spotify.com/v1/artists/3NVrWkcHOtmPbMSvgHmijZ",
"id": "3NVrWkcHOtmPbMSvgHmijZ",
"name": "The Minions",
"type": "artist",
"uri": "spotify:artist:3NVrWkcHOtmPbMSvgHmijZ"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 13886,
"explicit": false,
"external_urls": {
"spotify": "https://open.spotify.com/track/2L0OyiAepqAbKvUZfWovOJ"
},
"href": "https://api.spotify.com/v1/tracks/2L0OyiAepqAbKvUZfWovOJ",
"id": "2L0OyiAepqAbKvUZfWovOJ",
"is_local": false,
"name": "Ba Do Bleep",
"preview_url": null,
"track_number": 24,
"type": "track",
"uri": "spotify:track:2L0OyiAepqAbKvUZfWovOJ"
}
],
"limit": 50,
"next": null,
"offset": 0,
"previous": null,
"total": 24
},
"type": "album",
"uri": "spotify:album:5l3zEmMrOhOzG8d8s83GOL"
}

View file

@ -0,0 +1,77 @@
{
"album": {
"album_type": "compilation",
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of"
},
"href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of",
"id": "0LyfQWJT6nXafLPZqxe9Of",
"name": "Various Artists",
"type": "artist",
"uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of"
}
],
"available_markets": [],
"external_urls": {
"spotify": "https://open.spotify.com/album/5l3zEmMrOhOzG8d8s83GOL"
},
"href": "https://api.spotify.com/v1/albums/5l3zEmMrOhOzG8d8s83GOL",
"id": "5l3zEmMrOhOzG8d8s83GOL",
"images": [
{
"height": 640,
"url": "https://i.scdn.co/image/ab67616d0000b27399140a62d43aec760f6172a2",
"width": 640
},
{
"height": 300,
"url": "https://i.scdn.co/image/ab67616d00001e0299140a62d43aec760f6172a2",
"width": 300
},
{
"height": 64,
"url": "https://i.scdn.co/image/ab67616d0000485199140a62d43aec760f6172a2",
"width": 64
}
],
"name": "Despicable Me 2 (Original Motion Picture Soundtrack)",
"release_date": "2013-06-18",
"release_date_precision": "day",
"total_tracks": 24,
"type": "album",
"uri": "spotify:album:5l3zEmMrOhOzG8d8s83GOL"
},
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/2RdwBSPQiwcmiDo9kixcl8"
},
"href": "https://api.spotify.com/v1/artists/2RdwBSPQiwcmiDo9kixcl8",
"id": "2RdwBSPQiwcmiDo9kixcl8",
"name": "Pharrell Williams",
"type": "artist",
"uri": "spotify:artist:2RdwBSPQiwcmiDo9kixcl8"
}
],
"available_markets": [],
"disc_number": 1,
"duration_ms": 233305,
"explicit": false,
"external_ids": {
"isrc": "USQ4E1300686"
},
"external_urls": {
"spotify": "https://open.spotify.com/track/6NPVjNh8Jhru9xOmyQigds"
},
"href": "https://api.spotify.com/v1/tracks/6NPVjNh8Jhru9xOmyQigds",
"id": "6NPVjNh8Jhru9xOmyQigds",
"is_local": false,
"name": "Happy",
"popularity": 1,
"preview_url": null,
"track_number": 4,
"type": "track",
"uri": "spotify:track:6NPVjNh8Jhru9xOmyQigds"
}

View file

@ -31,7 +31,7 @@ from beets import library
from beets import importer
from beets import logging
from beets import util
from beets.util.artresizer import ArtResizer, WEBPROXY
from beets.util.artresizer import ArtResizer
import confuse
@ -787,7 +787,7 @@ class ArtForAlbumTest(UseThePlugin):
"""Skip the test if the art resizer doesn't have ImageMagick or
PIL (so comparisons and measurements are unavailable).
"""
if ArtResizer.shared.method[0] == WEBPROXY:
if not ArtResizer.shared.local:
self.skipTest("ArtResizer has no local imaging backend available")
def test_respect_minwidth(self):

View file

@ -16,17 +16,36 @@
import unittest
from unittest.mock import patch
import os
from test import _common
from test.helper import TestHelper
from beets.util import syspath
from beets.util.artresizer import (
pil_resize,
im_resize,
get_im_version,
get_pil_version,
)
from beets.util import command_output, syspath
from beets.util.artresizer import IMBackend, PILBackend
class DummyIMBackend(IMBackend):
"""An `IMBackend` which pretends that ImageMagick is available.
The version is sufficiently recent to support image comparison.
"""
def __init__(self):
"""Init a dummy backend class for mocked ImageMagick tests."""
self.version = (7, 0, 0)
self.legacy = False
self.convert_cmd = ['magick']
self.identify_cmd = ['magick', 'identify']
self.compare_cmd = ['magick', 'compare']
class DummyPILBackend(PILBackend):
"""An `PILBackend` which pretends that PIL is available."""
def __init__(self):
"""Init a dummy backend class for mocked PIL tests."""
pass
class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
@ -43,10 +62,10 @@ class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
"""Called after each test, unloading all plugins."""
self.teardown_beets()
def _test_img_resize(self, resize_func):
def _test_img_resize(self, backend):
"""Test resizing based on file size, given a resize_func."""
# Check quality setting unaffected by new parameter
im_95_qual = resize_func(
im_95_qual = backend.resize(
225,
self.IMG_225x225,
quality=95,
@ -56,7 +75,7 @@ class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
self.assertExists(im_95_qual)
# Attempt a lower filesize with same quality
im_a = resize_func(
im_a = backend.resize(
225,
self.IMG_225x225,
quality=95,
@ -68,7 +87,7 @@ class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
os.stat(syspath(im_95_qual)).st_size)
# Attempt with lower initial quality
im_75_qual = resize_func(
im_75_qual = backend.resize(
225,
self.IMG_225x225,
quality=75,
@ -76,7 +95,7 @@ class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
)
self.assertExists(im_75_qual)
im_b = resize_func(
im_b = backend.resize(
225,
self.IMG_225x225,
quality=95,
@ -87,15 +106,55 @@ class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
self.assertLess(os.stat(syspath(im_b)).st_size,
os.stat(syspath(im_75_qual)).st_size)
@unittest.skipUnless(get_pil_version(), "PIL not available")
@unittest.skipUnless(PILBackend.available(), "PIL not available")
def test_pil_file_resize(self):
"""Test PIL resize function is lowering file size."""
self._test_img_resize(pil_resize)
self._test_img_resize(PILBackend())
@unittest.skipUnless(get_im_version(), "ImageMagick not available")
@unittest.skipUnless(IMBackend.available(), "ImageMagick not available")
def test_im_file_resize(self):
"""Test IM resize function is lowering file size."""
self._test_img_resize(im_resize)
self._test_img_resize(IMBackend())
@unittest.skipUnless(PILBackend.available(), "PIL not available")
def test_pil_file_deinterlace(self):
"""Test PIL deinterlace function.
Check if the `PILBackend.deinterlace()` function returns images
that are non-progressive
"""
path = PILBackend().deinterlace(self.IMG_225x225)
from PIL import Image
with Image.open(path) as img:
self.assertFalse('progression' in img.info)
@unittest.skipUnless(IMBackend.available(), "ImageMagick not available")
def test_im_file_deinterlace(self):
"""Test ImageMagick deinterlace function.
Check if the `IMBackend.deinterlace()` function returns images
that are non-progressive.
"""
im = IMBackend()
path = im.deinterlace(self.IMG_225x225)
cmd = im.identify_cmd + [
'-format', '%[interlace]', syspath(path, prefix=False),
]
out = command_output(cmd).stdout
self.assertTrue(out == b'None')
@patch('beets.util.artresizer.util')
def test_write_metadata_im(self, mock_util):
"""Test writing image metadata."""
metadata = {"a": "A", "b": "B"}
im = DummyIMBackend()
im.write_metadata("foo", metadata)
try:
command = im.convert_cmd + "foo -set a A -set b B foo".split()
mock_util.command_output.assert_called_once_with(command)
except AssertionError:
command = im.convert_cmd + "foo -set b B -set a A foo".split()
mock_util.command_output.assert_called_once_with(command)
def suite():

View file

@ -48,7 +48,8 @@ class TestHelper(helper.TestHelper):
shell_quote(stub), tag)
def assertFileTag(self, path, tag): # noqa
"""Assert that the path is a file and the files content ends with `tag`.
"""Assert that the path is a file and the files content ends
with `tag`.
"""
display_tag = tag
tag = tag.encode('utf-8')
@ -107,7 +108,8 @@ class ImportConvertTest(unittest.TestCase, TestHelper):
item = self.lib.items().get()
self.assertFileTag(item.path, 'convert')
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_import_original_on_convert_error(self):
# `false` exits with non-zero code
self.config['convert']['command'] = 'false'
@ -122,9 +124,18 @@ class ImportConvertTest(unittest.TestCase, TestHelper):
self.importer.run()
for path in self.importer.paths:
for root, dirnames, filenames in os.walk(path):
self.assertTrue(len(fnmatch.filter(filenames, '*.mp3')) == 0,
'Non-empty import directory {}'
.format(util.displayable_path(path)))
self.assertEqual(len(fnmatch.filter(filenames, '*.mp3')), 0,
'Non-empty import directory {}'
.format(util.displayable_path(path)))
def get_count_of_import_files(self):
import_file_count = 0
for path in self.importer.paths:
for root, _, filenames in os.walk(path):
import_file_count += len(filenames)
return import_file_count
class ConvertCommand:
@ -264,7 +275,7 @@ class NeverConvertLossyFilesTest(unittest.TestCase, TestHelper,
self.unload_plugins()
self.teardown_beets()
def test_transcode_from_lossles(self):
def test_transcode_from_lossless(self):
[item] = self.add_item_fixtures(ext='flac')
with control_stdin('y'):
self.run_convert_path(item.path)

View file

@ -763,5 +763,6 @@ class ResultsIteratorTest(unittest.TestCase):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')

View file

@ -20,6 +20,8 @@ from test import _common
from test._common import Bag
from test.helper import capture_log
from beets import config
from beetsplug.discogs import DiscogsPlugin
@ -335,6 +337,7 @@ class DGAlbumInfoTest(_common.TestCase):
def test_parse_minimal_release(self):
"""Test parsing of a release with the minimal amount of information."""
data = {'id': 123,
'uri': 'https://www.discogs.com/release/123456-something',
'tracklist': [self._make_track('A', '1', '01:01')],
'artists': [{'name': 'ARTIST NAME', 'id': 321, 'join': ''}],
'title': 'TITLE'}
@ -373,6 +376,33 @@ class DGAlbumInfoTest(_common.TestCase):
match = ''
self.assertEqual(match, expected)
def test_default_genre_style_settings(self):
"""Test genre default settings, genres to genre, styles to style"""
release = self._make_release_from_positions(['1', '2'])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.genre, 'GENRE1, GENRE2')
self.assertEqual(d.style, 'STYLE1, STYLE2')
def test_append_style_to_genre(self):
"""Test appending style to genre if config enabled"""
config['discogs']['append_style_genre'] = True
release = self._make_release_from_positions(['1', '2'])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.genre, 'GENRE1, GENRE2, STYLE1, STYLE2')
self.assertEqual(d.style, 'STYLE1, STYLE2')
def test_append_style_to_genre_no_style(self):
"""Test nothing appended to genre if style is empty"""
config['discogs']['append_style_genre'] = True
release = self._make_release_from_positions(['1', '2'])
release.data['styles'] = []
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.genre, 'GENRE1, GENRE2')
self.assertEqual(d.style, None)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)

View file

@ -21,6 +21,7 @@ import unittest
from test import _common
from test.helper import TestHelper
from test.test_art_resize import DummyIMBackend
from mediafile import MediaFile
from beets import config, logging, ui
@ -216,16 +217,30 @@ class EmbedartCliTest(_common.TestCase, TestHelper):
self.assertEqual(mediafile.images[0].data, self.image_data)
@patch('beets.art.subprocess')
class DummyArtResizer(ArtResizer):
"""An `ArtResizer` which pretends that ImageMagick is available, and has
a sufficiently recent version to support image comparison.
"""
def __init__(self):
self.local_method = DummyIMBackend()
@patch('beets.util.artresizer.subprocess')
@patch('beets.art.extract')
class ArtSimilarityTest(unittest.TestCase):
def setUp(self):
self.item = _common.item()
self.log = logging.getLogger('beets.embedart')
self.artresizer = DummyArtResizer()
def _similarity(self, threshold):
return art.check_art_similarity(self.log, self.item, b'path',
threshold)
return art.check_art_similarity(
self.log,
self.item,
b'path',
threshold,
artresizer=self.artresizer,
)
def _popen(self, status=0, stdout="", stderr=""):
"""Create a mock `Popen` object."""

View file

@ -63,7 +63,8 @@ class HookTest(_common.TestCase, TestHelper):
self.assertIn('hook: invalid command ""', logs)
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_hook_non_zero_exit(self):
self._add_hook('test_event', 'sh -c "exit 1"')
@ -86,7 +87,8 @@ class HookTest(_common.TestCase, TestHelper):
message.startswith("hook: hook for test_event failed: ")
for message in logs))
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_hook_no_arguments(self):
temporary_paths = [
get_temporary_path() for i in range(self.TEST_HOOK_COUNT)
@ -105,7 +107,8 @@ class HookTest(_common.TestCase, TestHelper):
self.assertTrue(os.path.isfile(path))
os.remove(path)
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_hook_event_substitution(self):
temporary_directory = tempfile._get_default_tempdir()
event_names = [f'test_event_event_{i}' for i in
@ -126,7 +129,8 @@ class HookTest(_common.TestCase, TestHelper):
self.assertTrue(os.path.isfile(path))
os.remove(path)
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_hook_argument_substitution(self):
temporary_paths = [
get_temporary_path() for i in range(self.TEST_HOOK_COUNT)
@ -145,7 +149,8 @@ class HookTest(_common.TestCase, TestHelper):
self.assertTrue(os.path.isfile(path))
os.remove(path)
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_hook_bytes_interpolation(self):
temporary_paths = [
get_temporary_path().encode('utf-8')

View file

@ -21,7 +21,7 @@ import shutil
import unicodedata
import sys
import stat
from six import StringIO
from io import StringIO
from tempfile import mkstemp
from zipfile import ZipFile
from tarfile import TarFile
@ -228,14 +228,14 @@ class ImportHelper(TestHelper):
)
def assert_file_in_lib(self, *segments):
"""Join the ``segments`` and assert that this path exists in the library
directory
"""Join the ``segments`` and assert that this path exists in the
library directory.
"""
self.assertExists(os.path.join(self.libdir, *segments))
def assert_file_not_in_lib(self, *segments):
"""Join the ``segments`` and assert that this path exists in the library
directory
"""Join the ``segments`` and assert that this path does not
exist in the library directory.
"""
self.assertNotExists(os.path.join(self.libdir, *segments))
@ -462,8 +462,8 @@ class ImportPasswordRarTest(ImportZipTest):
class ImportSingletonTest(_common.TestCase, ImportHelper):
"""Test ``APPLY`` and ``ASIS`` choices for an import session with singletons
config set to True.
"""Test ``APPLY`` and ``ASIS`` choices for an import session with
singletons config set to True.
"""
def setUp(self):

View file

@ -148,7 +148,10 @@ class GetSetTest(_common.TestCase):
class DestinationTest(_common.TestCase):
def setUp(self):
super().setUp()
self.lib = beets.library.Library(':memory:')
# default directory is ~/Music and the only reason why it was switched
# to ~/.Music is to confirm that tests works well when path to
# temporary directory contains .
self.lib = beets.library.Library(':memory:', '~/.Music')
self.i = item(self.lib)
def tearDown(self):
@ -224,7 +227,7 @@ class DestinationTest(_common.TestCase):
self.i.album = '.something'
dest = self.i.destination()
self.assertTrue(b'something' in dest)
self.assertFalse(b'/.' in dest)
self.assertFalse(b'/.something' in dest)
def test_destination_preserves_legitimate_slashes(self):
self.i.artist = 'one'
@ -449,6 +452,16 @@ class DestinationTest(_common.TestCase):
self.assertEqual(self.i.destination(),
np('base/ber/foo'))
def test_destination_with_replacements_argument(self):
self.lib.directory = b'base'
self.lib.replacements = [(re.compile(r'a'), 'f')]
self.lib.path_formats = [('default', '$album/$title')]
self.i.title = 'foo'
self.i.album = 'bar'
replacements = [(re.compile(r'a'), 'e')]
self.assertEqual(self.i.destination(replacements=replacements),
np('base/ber/foo'))
@unittest.skip('unimplemented: #359')
def test_destination_with_empty_component(self):
self.lib.directory = b'base'
@ -781,6 +794,101 @@ class DisambiguationTest(_common.TestCase, PathFormattingMixin):
self._setf('foo%aunique{albumartist album,year,}/$title')
self._assert_dest(b'/base/foo 2001/the title', self.i1)
def test_key_flexible_attribute(self):
album1 = self.lib.get_album(self.i1)
album1.flex = 'flex1'
album2 = self.lib.get_album(self.i2)
album2.flex = 'flex2'
album1.store()
album2.store()
self._setf('foo%aunique{albumartist album flex,year}/$title')
self._assert_dest(b'/base/foo/the title', self.i1)
class SingletonDisambiguationTest(_common.TestCase, PathFormattingMixin):
def setUp(self):
super().setUp()
self.lib = beets.library.Library(':memory:')
self.lib.directory = b'/base'
self.lib.path_formats = [('default', 'path')]
self.i1 = item()
self.i1.year = 2001
self.lib.add(self.i1)
self.i2 = item()
self.i2.year = 2002
self.lib.add(self.i2)
self.lib._connection().commit()
self._setf('foo/$title%sunique{artist title,year}')
def tearDown(self):
super().tearDown()
self.lib._connection().close()
def test_sunique_expands_to_disambiguating_year(self):
self._assert_dest(b'/base/foo/the title [2001]', self.i1)
def test_sunique_with_default_arguments_uses_trackdisambig(self):
self.i1.trackdisambig = 'live version'
self.i1.year = self.i2.year
self.i1.store()
self._setf('foo/$title%sunique{}')
self._assert_dest(b'/base/foo/the title [live version]', self.i1)
def test_sunique_expands_to_nothing_for_distinct_singletons(self):
self.i2.title = 'different track'
self.i2.store()
self._assert_dest(b'/base/foo/the title', self.i1)
def test_sunique_does_not_match_album(self):
self.lib.add_album([self.i2])
self._assert_dest(b'/base/foo/the title', self.i1)
def test_sunique_use_fallback_numbers_when_identical(self):
self.i2.year = self.i1.year
self.i2.store()
self._assert_dest(b'/base/foo/the title [1]', self.i1)
self._assert_dest(b'/base/foo/the title [2]', self.i2)
def test_sunique_falls_back_to_second_distinguishing_field(self):
self._setf('foo/$title%sunique{albumartist album,month year}')
self._assert_dest(b'/base/foo/the title [2001]', self.i1)
def test_sunique_sanitized(self):
self.i2.year = self.i1.year
self.i1.trackdisambig = 'foo/bar'
self.i2.store()
self.i1.store()
self._setf('foo/$title%sunique{artist title,trackdisambig}')
self._assert_dest(b'/base/foo/the title [foo_bar]', self.i1)
def test_drop_empty_disambig_string(self):
self.i1.trackdisambig = None
self.i2.trackdisambig = 'foo'
self.i1.store()
self.i2.store()
self._setf('foo/$title%sunique{albumartist album,trackdisambig}')
self._assert_dest(b'/base/foo/the title', self.i1)
def test_change_brackets(self):
self._setf('foo/$title%sunique{artist title,year,()}')
self._assert_dest(b'/base/foo/the title (2001)', self.i1)
def test_remove_brackets(self):
self._setf('foo/$title%sunique{artist title,year,}')
self._assert_dest(b'/base/foo/the title 2001', self.i1)
def test_key_flexible_attribute(self):
self.i1.flex = 'flex1'
self.i2.flex = 'flex2'
self.i1.store()
self.i2.store()
self._setf('foo/$title%sunique{artist title flex,year}')
self._assert_dest(b'/base/foo/the title', self.i1)
class PluginDestinationTest(_common.TestCase):
def setUp(self):

105
test/test_limit.py Normal file
View file

@ -0,0 +1,105 @@
# This file is part of beets.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Tests for the 'limit' plugin."""
import unittest
from test.helper import TestHelper
class LimitPluginTest(unittest.TestCase, TestHelper):
"""Unit tests for LimitPlugin
Note: query prefix tests do not work correctly with `run_with_output`.
"""
def setUp(self):
self.setup_beets()
self.load_plugins("limit")
# we'll create an even number of tracks in the library
self.num_test_items = 10
assert self.num_test_items % 2 == 0
for item_no, item in \
enumerate(self.add_item_fixtures(count=self.num_test_items)):
item.track = item_no + 1
item.store()
# our limit tests will use half of this number
self.num_limit = self.num_test_items // 2
self.num_limit_prefix = "".join(["'", "<", str(self.num_limit), "'"])
# a subset of tests has only `num_limit` results, identified by a
# range filter on the track number
self.track_head_range = "track:.." + str(self.num_limit)
self.track_tail_range = "track:" + str(self.num_limit + 1) + ".."
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def test_no_limit(self):
"""Returns all when there is no limit or filter."""
result = self.run_with_output("lslimit")
self.assertEqual(result.count("\n"), self.num_test_items)
def test_lslimit_head(self):
"""Returns the expected number with `lslimit --head`."""
result = self.run_with_output("lslimit", "--head", str(self.num_limit))
self.assertEqual(result.count("\n"), self.num_limit)
def test_lslimit_tail(self):
"""Returns the expected number with `lslimit --tail`."""
result = self.run_with_output("lslimit", "--tail", str(self.num_limit))
self.assertEqual(result.count("\n"), self.num_limit)
def test_lslimit_head_invariant(self):
"""Returns the expected number with `lslimit --head` and a filter."""
result = self.run_with_output(
"lslimit", "--head", str(self.num_limit), self.track_tail_range)
self.assertEqual(result.count("\n"), self.num_limit)
def test_lslimit_tail_invariant(self):
"""Returns the expected number with `lslimit --tail` and a filter."""
result = self.run_with_output(
"lslimit", "--tail", str(self.num_limit), self.track_head_range)
self.assertEqual(result.count("\n"), self.num_limit)
def test_prefix(self):
"""Returns the expected number with the query prefix."""
result = self.lib.items(self.num_limit_prefix)
self.assertEqual(len(result), self.num_limit)
def test_prefix_when_correctly_ordered(self):
"""Returns the expected number with the query prefix and filter when
the prefix portion (correctly) appears last."""
correct_order = self.track_tail_range + " " + self.num_limit_prefix
result = self.lib.items(correct_order)
self.assertEqual(len(result), self.num_limit)
def test_prefix_when_incorrectly_ordred(self):
"""Returns no results with the query prefix and filter when the prefix
portion (incorrectly) appears first."""
incorrect_order = self.num_limit_prefix + " " + self.track_tail_range
result = self.lib.items(incorrect_order)
self.assertEqual(len(result), 0)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')

View file

@ -3,7 +3,7 @@
import sys
import threading
import logging as log
from six import StringIO
from io import StringIO
import unittest
import beets.logging as blog
@ -175,7 +175,7 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper):
self.lock1 = threading.Lock()
self.lock2 = threading.Lock()
self.test_case = test_case
self.exc_info = None
self.exc = None
self.t1_step = self.t2_step = 0
def log_all(self, name):
@ -190,9 +190,8 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper):
self.lock1.acquire()
self.test_case.assertEqual(self._log.level, log.INFO)
self.t1_step = 2
except Exception:
import sys
self.exc_info = sys.exc_info()
except Exception as e:
self.exc = e
def listener2(self):
try:
@ -201,9 +200,8 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper):
self.lock2.acquire()
self.test_case.assertEqual(self._log.level, log.DEBUG)
self.t2_step = 2
except Exception:
import sys
self.exc_info = sys.exc_info()
except Exception as e:
self.exc = e
def setUp(self):
self.setup_beets(disk=True)
@ -215,8 +213,8 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper):
dp = self.DummyPlugin(self)
def check_dp_exc():
if dp.exc_info:
raise None.with_traceback(dp.exc_info[2])
if dp.exc:
raise dp.exc
try:
dp.lock1.acquire()

View file

@ -231,13 +231,24 @@ class MockFetchUrl:
return content
def is_lyrics_content_ok(title, text):
"""Compare lyrics text to expected lyrics for given title."""
if not text:
return
keywords = set(LYRICS_TEXTS[google.slugify(title)].split())
words = {x.strip(".?, ") for x in text.lower().split()}
return keywords <= words
class LyricsAssertions:
"""A mixin with lyrics-specific assertions."""
def assertLyricsContentOk(self, title, text, msg=""): # noqa: N802
"""Compare lyrics text to expected lyrics for given title."""
if not text:
return
keywords = set(LYRICS_TEXTS[google.slugify(title)].split())
words = {x.strip(".?, ()") for x in text.lower().split()}
if not keywords <= words:
details = (
f"{keywords!r} is not a subset of {words!r}."
f" Words only in expected set {keywords - words!r},"
f" Words only in result set {words - keywords!r}."
)
self.fail(f"{details} : {msg}")
LYRICS_ROOT_DIR = os.path.join(_common.RSRC, b'lyrics')
@ -255,7 +266,7 @@ class LyricsGoogleBaseTest(unittest.TestCase):
self.skipTest('Beautiful Soup 4 not available')
class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
class LyricsPluginSourcesTest(LyricsGoogleBaseTest, LyricsAssertions):
"""Check that beets google custom search engine sources are correctly
scraped.
"""
@ -325,17 +336,16 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
os.environ.get('INTEGRATION_TEST', '0') == '1',
'integration testing not enabled')
def test_backend_sources_ok(self):
"""Test default backends with songs known to exist in respective databases.
"""Test default backends with songs known to exist in respective
databases.
"""
errors = []
# Don't test any sources marked as skipped.
sources = [s for s in self.DEFAULT_SOURCES if not s.get("skip", False)]
for s in sources:
res = s['backend'](self.plugin.config, self.plugin._log).fetch(
s['artist'], s['title'])
if not is_lyrics_content_ok(s['title'], res):
errors.append(s['backend'].__name__)
self.assertFalse(errors)
with self.subTest(s['backend'].__name__):
backend = s['backend'](self.plugin.config, self.plugin._log)
res = backend.fetch(s['artist'], s['title'])
self.assertLyricsContentOk(s['title'], res)
@unittest.skipUnless(
os.environ.get('INTEGRATION_TEST', '0') == '1',
@ -351,10 +361,10 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
res = lyrics.scrape_lyrics_from_html(
raw_backend.fetch_url(url))
self.assertTrue(google.is_lyrics(res), url)
self.assertTrue(is_lyrics_content_ok(s['title'], res), url)
self.assertLyricsContentOk(s['title'], res, url)
class LyricsGooglePluginMachineryTest(LyricsGoogleBaseTest):
class LyricsGooglePluginMachineryTest(LyricsGoogleBaseTest, LyricsAssertions):
"""Test scraping heuristics on a fake html page.
"""
@ -372,8 +382,7 @@ class LyricsGooglePluginMachineryTest(LyricsGoogleBaseTest):
url = self.source['url'] + self.source['path']
res = lyrics.scrape_lyrics_from_html(raw_backend.fetch_url(url))
self.assertTrue(google.is_lyrics(res), url)
self.assertTrue(is_lyrics_content_ok(self.source['title'], res),
url)
self.assertLyricsContentOk(self.source['title'], res, url)
@patch.object(lyrics.Backend, 'fetch_url', MockFetchUrl())
def test_is_page_candidate_exact_match(self):
@ -449,7 +458,7 @@ class GeniusScrapeLyricsFromHtmlTest(GeniusBaseTest):
def test_good_lyrics(self):
"""Ensure we are able to scrape a page with lyrics"""
url = 'https://genius.com/Wu-tang-clan-cream-lyrics'
url = 'https://genius.com/Ttng-chinchilla-lyrics'
mock = MockFetchUrl()
self.assertIsNotNone(genius._scrape_lyrics_from_html(mock(url)))

View file

@ -72,7 +72,8 @@ class PlayPluginTest(unittest.TestCase, TestHelper):
self.run_and_assert(
open_mock, ['title:aNiceTitle'], 'echo other')
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_relative_to(self, open_mock):
self.config['play']['command'] = 'echo'
self.config['play']['relative_to'] = '/something'

View file

@ -31,7 +31,10 @@ from beets.dbcore.query import (NoneQuery, ParsingError,
InvalidQueryArgumentValueError)
from beets.library import Library, Item
from beets import util
import platform
# Because the absolute path begins with something like C:, we
# can't disambiguate it from an ordinary query.
WIN32_NO_IMPLICIT_PATHS = 'Implicit paths are not supported on Windows'
class TestHelper(helper.TestHelper):
@ -94,16 +97,19 @@ class DummyDataTestCase(_common.TestCase, AssertsMixin):
items[0].album = 'baz'
items[0].year = 2001
items[0].comp = True
items[0].genre = 'rock'
items[1].title = 'baz qux'
items[1].artist = 'two'
items[1].album = 'baz'
items[1].year = 2002
items[1].comp = True
items[1].genre = 'Rock'
items[2].title = 'beets 4 eva'
items[2].artist = 'three'
items[2].album = 'foo'
items[2].year = 2003
items[2].comp = False
items[2].genre = 'Hard Rock'
for item in items:
self.lib.add(item)
self.album = self.lib.add_album(items[:2])
@ -132,6 +138,22 @@ class GetTest(DummyDataTestCase):
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_get_one_keyed_exact(self):
q = 'genre:=rock'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar'])
q = 'genre:=Rock'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
q = 'genre:="Hard Rock"'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_keyed_exact_nocase(self):
q = 'genre:=~"hard rock"'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_keyed_regexp(self):
q = 'artist::t.+r'
results = self.lib.items(q)
@ -142,6 +164,16 @@ class GetTest(DummyDataTestCase):
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_unkeyed_exact(self):
q = '=rock'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar'])
def test_get_one_unkeyed_exact_nocase(self):
q = '=~"hard rock"'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_unkeyed_regexp(self):
q = ':x$'
results = self.lib.items(q)
@ -159,6 +191,11 @@ class GetTest(DummyDataTestCase):
# objects.
self.assert_items_matched(results, [])
def test_get_no_matches_exact(self):
q = 'genre:="hard rock"'
results = self.lib.items(q)
self.assert_items_matched(results, [])
def test_term_case_insensitive(self):
q = 'oNE'
results = self.lib.items(q)
@ -182,6 +219,14 @@ class GetTest(DummyDataTestCase):
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_keyed_matches_exact_nocase(self):
q = 'genre:=~rock'
results = self.lib.items(q)
self.assert_items_matched(results, [
'foo bar',
'baz qux',
])
def test_unkeyed_term_matches_multiple_columns(self):
q = 'baz'
results = self.lib.items(q)
@ -350,6 +395,16 @@ class MatchTest(_common.TestCase):
q = dbcore.query.SubstringQuery('disc', '6')
self.assertTrue(q.match(self.item))
def test_exact_match_nocase_positive(self):
q = dbcore.query.StringQuery('genre', 'the genre')
self.assertTrue(q.match(self.item))
q = dbcore.query.StringQuery('genre', 'THE GENRE')
self.assertTrue(q.match(self.item))
def test_exact_match_nocase_negative(self):
q = dbcore.query.StringQuery('genre', 'genre')
self.assertFalse(q.match(self.item))
def test_year_match_positive(self):
q = dbcore.query.NumericQuery('year', '1')
self.assertTrue(q.match(self.item))
@ -425,7 +480,8 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin):
results = self.lib.albums(q)
self.assert_albums_matched(results, [])
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_parent_directory_no_slash(self):
q = 'path:/a'
results = self.lib.items(q)
@ -434,7 +490,8 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin):
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_parent_directory_with_slash(self):
q = 'path:/a/'
results = self.lib.items(q)
@ -467,6 +524,7 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin):
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
@unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
def test_slashed_query_matches_path(self):
q = '/a/b'
results = self.lib.items(q)
@ -475,7 +533,7 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin):
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
@unittest.skip('unfixed (#1865)')
@unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
def test_path_query_in_or_query(self):
q = '/a/b , /a/b'
results = self.lib.items(q)
@ -595,12 +653,8 @@ class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin):
self.assertFalse(is_path('foo:bar/'))
self.assertFalse(is_path('foo:/bar'))
@unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
def test_detect_absolute_path(self):
if platform.system() == 'Windows':
# Because the absolute path begins with something like C:, we
# can't disambiguate it from an ordinary query.
self.skipTest('Windows absolute paths do not work as queries')
# Don't patch `os.path.exists`; we'll actually create a file when
# it exists.
self.patcher_exists.stop()

View file

@ -41,52 +41,77 @@ def reset_replaygain(item):
item['rg_track_gain'] = None
item['rg_album_gain'] = None
item['rg_album_gain'] = None
item['r128_track_gain'] = None
item['r128_album_gain'] = None
item.write()
item.store()
item.store()
item.store()
class GstBackendMixin():
backend = 'gstreamer'
has_r128_support = True
def test_backend(self):
"""Check whether the backend actually has all required functionality.
"""
try:
# Check if required plugins can be loaded by instantiating a
# GStreamerBackend (via its .__init__).
config['replaygain']['targetlevel'] = 89
GStreamerBackend(config['replaygain'], None)
except FatalGstreamerPluginReplayGainError as e:
# Skip the test if plugins could not be loaded.
self.skipTest(str(e))
class CmdBackendMixin():
backend = 'command'
has_r128_support = False
def test_backend(self):
"""Check whether the backend actually has all required functionality.
"""
pass
class FfmpegBackendMixin():
backend = 'ffmpeg'
has_r128_support = True
def test_backend(self):
"""Check whether the backend actually has all required functionality.
"""
pass
class ReplayGainCliTestBase(TestHelper):
def setUp(self):
# Implemented by Mixins, see above. This may decide to skip the test.
self.test_backend()
self.setup_beets(disk=True)
self.config['replaygain']['backend'] = self.backend
try:
self.load_plugins('replaygain')
except Exception:
import sys
# store exception info so an error in teardown does not swallow it
exc_info = sys.exc_info()
try:
self.teardown_beets()
self.unload_plugins()
except Exception:
# if load_plugins() failed then setup is incomplete and
# teardown operations may fail. In particular # {Item,Album}
# may not have the _original_types attribute in unload_plugins
pass
raise None.with_traceback(exc_info[2])
self.teardown_beets()
self.unload_plugins()
album = self.add_album_fixture(2)
def _add_album(self, *args, **kwargs):
album = self.add_album_fixture(*args, **kwargs)
for item in album.items():
reset_replaygain(item)
return album
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def _reset_replaygain(self, item):
item['rg_track_peak'] = None
item['rg_track_gain'] = None
item['rg_album_peak'] = None
item['rg_album_gain'] = None
item['r128_track_gain'] = None
item['r128_album_gain'] = None
item.write()
item.store()
def test_cli_saves_track_gain(self):
self._add_album(2)
for item in self.lib.items():
self.assertIsNone(item.rg_track_peak)
self.assertIsNone(item.rg_track_gain)
@ -112,15 +137,85 @@ class ReplayGainCliTestBase(TestHelper):
mediafile.rg_track_gain, item.rg_track_gain, places=2)
def test_cli_skips_calculated_tracks(self):
album_rg = self._add_album(1)
item_rg = album_rg.items()[0]
if self.has_r128_support:
album_r128 = self._add_album(1, ext="opus")
item_r128 = album_r128.items()[0]
self.run_command('replaygain')
item = self.lib.items()[0]
peak = item.rg_track_peak
item.rg_track_gain = 0.0
item_rg.load()
self.assertIsNotNone(item_rg.rg_track_gain)
self.assertIsNotNone(item_rg.rg_track_peak)
self.assertIsNone(item_rg.r128_track_gain)
item_rg.rg_track_gain += 1.0
item_rg.rg_track_peak += 1.0
item_rg.store()
rg_track_gain = item_rg.rg_track_gain
rg_track_peak = item_rg.rg_track_peak
if self.has_r128_support:
item_r128.load()
self.assertIsNotNone(item_r128.r128_track_gain)
self.assertIsNone(item_r128.rg_track_gain)
self.assertIsNone(item_r128.rg_track_peak)
item_r128.r128_track_gain += 1.0
item_r128.store()
r128_track_gain = item_r128.r128_track_gain
self.run_command('replaygain')
self.assertEqual(item.rg_track_gain, 0.0)
self.assertEqual(item.rg_track_peak, peak)
item_rg.load()
self.assertEqual(item_rg.rg_track_gain, rg_track_gain)
self.assertEqual(item_rg.rg_track_peak, rg_track_peak)
if self.has_r128_support:
item_r128.load()
self.assertEqual(item_r128.r128_track_gain, r128_track_gain)
def test_cli_does_not_skip_wrong_tag_type(self):
"""Check that items that have tags of the wrong type won't be skipped.
"""
if not self.has_r128_support:
# This test is a lot less interesting if the backend cannot write
# both tag types.
self.skipTest("r128 tags for opus not supported on backend {}"
.format(self.backend))
album_rg = self._add_album(1)
item_rg = album_rg.items()[0]
album_r128 = self._add_album(1, ext="opus")
item_r128 = album_r128.items()[0]
item_rg.r128_track_gain = 0.0
item_rg.store()
item_r128.rg_track_gain = 0.0
item_r128.rg_track_peak = 42.0
item_r128.store()
self.run_command('replaygain')
item_rg.load()
item_r128.load()
self.assertIsNotNone(item_rg.rg_track_gain)
self.assertIsNotNone(item_rg.rg_track_peak)
# FIXME: Should the plugin null this field?
# self.assertIsNone(item_rg.r128_track_gain)
self.assertIsNotNone(item_r128.r128_track_gain)
# FIXME: Should the plugin null these fields?
# self.assertIsNone(item_r128.rg_track_gain)
# self.assertIsNone(item_r128.rg_track_peak)
def test_cli_saves_album_gain_to_file(self):
self._add_album(2)
for item in self.lib.items():
mediafile = MediaFile(item.path)
self.assertIsNone(mediafile.rg_album_peak)
@ -143,13 +238,11 @@ class ReplayGainCliTestBase(TestHelper):
self.assertNotEqual(max(peaks), 0.0)
def test_cli_writes_only_r128_tags(self):
if self.backend == "command":
# opus not supported by command backend
return
if not self.has_r128_support:
self.skipTest("r128 tags for opus not supported on backend {}"
.format(self.backend))
album = self.add_album_fixture(2, ext="opus")
for item in album.items():
self._reset_replaygain(item)
album = self._add_album(2, ext="opus")
self.run_command('replaygain', '-a')
@ -162,51 +255,126 @@ class ReplayGainCliTestBase(TestHelper):
self.assertIsNotNone(mediafile.r128_track_gain)
self.assertIsNotNone(mediafile.r128_album_gain)
def test_target_level_has_effect(self):
item = self.lib.items()[0]
def test_targetlevel_has_effect(self):
album = self._add_album(1)
item = album.items()[0]
def analyse(target_level):
self.config['replaygain']['targetlevel'] = target_level
self._reset_replaygain(item)
self.run_command('replaygain', '-f')
mediafile = MediaFile(item.path)
return mediafile.rg_track_gain
item.load()
return item.rg_track_gain
gain_relative_to_84 = analyse(84)
gain_relative_to_89 = analyse(89)
# check that second calculation did work
if gain_relative_to_84 is not None:
self.assertIsNotNone(gain_relative_to_89)
self.assertNotEqual(gain_relative_to_84, gain_relative_to_89)
def test_r128_targetlevel_has_effect(self):
if not self.has_r128_support:
self.skipTest("r128 tags for opus not supported on backend {}"
.format(self.backend))
album = self._add_album(1, ext="opus")
item = album.items()[0]
def analyse(target_level):
self.config['replaygain']['r128_targetlevel'] = target_level
self.run_command('replaygain', '-f')
item.load()
return item.r128_track_gain
gain_relative_to_84 = analyse(84)
gain_relative_to_89 = analyse(89)
self.assertNotEqual(gain_relative_to_84, gain_relative_to_89)
def test_per_disc(self):
# Use the per_disc option and add a little more concurrency.
album = self._add_album(track_count=4, disc_count=3)
self.config['replaygain']['per_disc'] = True
self.run_command('replaygain', '-a')
# FIXME: Add fixtures with known track/album gain (within a suitable
# tolerance) so that we can actually check per-disc operation here.
for item in album.items():
self.assertIsNotNone(item.rg_track_gain)
self.assertIsNotNone(item.rg_album_gain)
@unittest.skipIf(not GST_AVAILABLE, 'gstreamer cannot be found')
class ReplayGainGstCliTest(ReplayGainCliTestBase, unittest.TestCase):
backend = 'gstreamer'
def setUp(self):
try:
# Check if required plugins can be loaded by instantiating a
# GStreamerBackend (via its .__init__).
config['replaygain']['targetlevel'] = 89
GStreamerBackend(config['replaygain'], None)
except FatalGstreamerPluginReplayGainError as e:
# Skip the test if plugins could not be loaded.
self.skipTest(str(e))
super().setUp()
class ReplayGainGstCliTest(ReplayGainCliTestBase, unittest.TestCase,
GstBackendMixin):
pass
@unittest.skipIf(not GAIN_PROG_AVAILABLE, 'no *gain command found')
class ReplayGainCmdCliTest(ReplayGainCliTestBase, unittest.TestCase):
backend = 'command'
class ReplayGainCmdCliTest(ReplayGainCliTestBase, unittest.TestCase,
CmdBackendMixin):
pass
@unittest.skipIf(not FFMPEG_AVAILABLE, 'ffmpeg cannot be found')
class ReplayGainFfmpegTest(ReplayGainCliTestBase, unittest.TestCase):
backend = 'ffmpeg'
class ReplayGainFfmpegCliTest(ReplayGainCliTestBase, unittest.TestCase,
FfmpegBackendMixin):
pass
class ImportTest(TestHelper):
threaded = False
def setUp(self):
# Implemented by Mixins, see above. This may decide to skip the test.
self.test_backend()
self.setup_beets(disk=True)
self.config['threaded'] = self.threaded
self.config['replaygain']['backend'] = self.backend
try:
self.load_plugins('replaygain')
except Exception:
self.teardown_beets()
self.unload_plugins()
self.importer = self.create_importer()
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def test_import_converted(self):
self.importer.run()
for item in self.lib.items():
# FIXME: Add fixtures with known track/album gain (within a
# suitable tolerance) so that we can actually check correct
# operation here.
self.assertIsNotNone(item.rg_track_gain)
self.assertIsNotNone(item.rg_album_gain)
@unittest.skipIf(not GST_AVAILABLE, 'gstreamer cannot be found')
class ReplayGainGstImportTest(ImportTest, unittest.TestCase,
GstBackendMixin):
pass
@unittest.skipIf(not GAIN_PROG_AVAILABLE, 'no *gain command found')
class ReplayGainCmdImportTest(ImportTest, unittest.TestCase,
CmdBackendMixin):
pass
@unittest.skipIf(not FFMPEG_AVAILABLE, 'ffmpeg cannot be found')
class ReplayGainFfmpegImportTest(ImportTest, unittest.TestCase,
FfmpegBackendMixin):
pass
@unittest.skipIf(not FFMPEG_AVAILABLE, 'ffmpeg cannot be found')
class ReplayGainFfmpegThreadedImportTest(ImportTest, unittest.TestCase,
FfmpegBackendMixin):
threaded = True
def suite():

View file

@ -10,7 +10,7 @@ from beets import config
from beets.library import Item
from beetsplug import spotify
from test.helper import TestHelper
from six.moves.urllib.parse import parse_qs, urlparse
from urllib.parse import parse_qs, urlparse
class ArgumentsMock:
@ -127,6 +127,68 @@ class SpotifyPluginTest(_common.TestCase, TestHelper):
self.assertIn('album:Despicable Me 2', query)
self.assertEqual(params['type'], ['track'])
@responses.activate
def test_track_for_id(self):
"""Tests if plugin is able to fetch a track by its Spotify ID"""
# Mock the Spotify 'Get Track' call
json_file = os.path.join(
_common.RSRC, b'spotify', b'track_info.json'
)
with open(json_file, 'rb') as f:
response_body = f.read()
responses.add(
responses.GET,
spotify.SpotifyPlugin.track_url + '6NPVjNh8Jhru9xOmyQigds',
body=response_body,
status=200,
content_type='application/json',
)
# Mock the Spotify 'Get Album' call
json_file = os.path.join(
_common.RSRC, b'spotify', b'album_info.json'
)
with open(json_file, 'rb') as f:
response_body = f.read()
responses.add(
responses.GET,
spotify.SpotifyPlugin.album_url + '5l3zEmMrOhOzG8d8s83GOL',
body=response_body,
status=200,
content_type='application/json',
)
# Mock the Spotify 'Search' call
json_file = os.path.join(
_common.RSRC, b'spotify', b'track_request.json'
)
with open(json_file, 'rb') as f:
response_body = f.read()
responses.add(
responses.GET,
spotify.SpotifyPlugin.search_url,
body=response_body,
status=200,
content_type='application/json',
)
track_info = self.spotify.track_for_id('6NPVjNh8Jhru9xOmyQigds')
item = Item(
mb_trackid=track_info.track_id,
albumartist=track_info.artist,
title=track_info.title,
length=track_info.length
)
item.add(self.lib)
results = self.spotify._match_library_tracks(self.lib, "Happy")
self.assertEqual(1, len(results))
self.assertEqual("6NPVjNh8Jhru9xOmyQigds", results[0]['id'])
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)

View file

@ -23,7 +23,6 @@ from test.helper import TestHelper
from beets.util import bytestring_path
from beetsplug.thumbnails import (ThumbnailsPlugin, NORMAL_DIR, LARGE_DIR,
write_metadata_im, write_metadata_pil,
PathlibURI, GioURI)
@ -34,22 +33,11 @@ class ThumbnailsTest(unittest.TestCase, TestHelper):
def tearDown(self):
self.teardown_beets()
@patch('beetsplug.thumbnails.util')
def test_write_metadata_im(self, mock_util):
metadata = {"a": "A", "b": "B"}
write_metadata_im("foo", metadata)
try:
command = "convert foo -set a A -set b B foo".split(' ')
mock_util.command_output.assert_called_once_with(command)
except AssertionError:
command = "convert foo -set b B -set a A foo".split(' ')
mock_util.command_output.assert_called_once_with(command)
@patch('beetsplug.thumbnails.ArtResizer')
@patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
@patch('beetsplug.thumbnails.os.stat')
def test_add_tags(self, mock_stat, _):
def test_add_tags(self, mock_stat, _, mock_artresizer):
plugin = ThumbnailsPlugin()
plugin.write_metadata = Mock()
plugin.get_uri = Mock(side_effect={b"/path/to/cover":
"COVER_URI"}.__getitem__)
album = Mock(artpath=b"/path/to/cover")
@ -59,24 +47,25 @@ class ThumbnailsTest(unittest.TestCase, TestHelper):
metadata = {"Thumb::URI": "COVER_URI",
"Thumb::MTime": "12345"}
plugin.write_metadata.assert_called_once_with(b"/path/to/thumbnail",
metadata)
mock_artresizer.shared.write_metadata.assert_called_once_with(
b"/path/to/thumbnail",
metadata,
)
mock_stat.assert_called_once_with(album.artpath)
@patch('beetsplug.thumbnails.os')
@patch('beetsplug.thumbnails.ArtResizer')
@patch('beetsplug.thumbnails.get_im_version')
@patch('beetsplug.thumbnails.get_pil_version')
@patch('beetsplug.thumbnails.GioURI')
def test_check_local_ok(self, mock_giouri, mock_pil, mock_im,
mock_artresizer, mock_os):
def test_check_local_ok(self, mock_giouri, mock_artresizer, mock_os):
# test local resizing capability
mock_artresizer.shared.local = False
mock_artresizer.shared.can_write_metadata = False
plugin = ThumbnailsPlugin()
self.assertFalse(plugin._check_local_ok())
# test dirs creation
mock_artresizer.shared.local = True
mock_artresizer.shared.can_write_metadata = True
def exists(path):
if path == NORMAL_DIR:
@ -91,20 +80,14 @@ class ThumbnailsTest(unittest.TestCase, TestHelper):
# test metadata writer function
mock_os.path.exists = lambda _: True
mock_pil.return_value = False
mock_im.return_value = False
with self.assertRaises(AssertionError):
mock_artresizer.shared.local = True
mock_artresizer.shared.can_write_metadata = False
with self.assertRaises(RuntimeError):
ThumbnailsPlugin()
mock_pil.return_value = True
self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_pil)
mock_im.return_value = True
self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_im)
mock_pil.return_value = False
self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_im)
mock_artresizer.shared.local = True
mock_artresizer.shared.can_write_metadata = True
self.assertTrue(ThumbnailsPlugin()._check_local_ok())
# test URI getter function

View file

@ -748,6 +748,40 @@ class ImportTest(_common.TestCase):
self.assertRaises(ui.UserError, commands.import_files, None, [],
None)
def test_parse_paths_from_logfile(self):
if os.path.__name__ == 'ntpath':
logfile_content = (
"import started Wed Jun 15 23:08:26 2022\n"
"asis C:\\music\\Beatles, The\\The Beatles; C:\\music\\Beatles, The\\The Beatles\\CD 01; C:\\music\\Beatles, The\\The Beatles\\CD 02\n" # noqa: E501
"duplicate-replace C:\\music\\Bill Evans\\Trio '65\n"
"skip C:\\music\\Michael Jackson\\Bad\n"
"skip C:\\music\\Soulwax\\Any Minute Now\n"
)
expected_paths = [
"C:\\music\\Beatles, The\\The Beatles",
"C:\\music\\Michael Jackson\\Bad",
"C:\\music\\Soulwax\\Any Minute Now",
]
else:
logfile_content = (
"import started Wed Jun 15 23:08:26 2022\n"
"asis /music/Beatles, The/The Beatles; /music/Beatles, The/The Beatles/CD 01; /music/Beatles, The/The Beatles/CD 02\n" # noqa: E501
"duplicate-replace /music/Bill Evans/Trio '65\n"
"skip /music/Michael Jackson/Bad\n"
"skip /music/Soulwax/Any Minute Now\n"
)
expected_paths = [
"/music/Beatles, The/The Beatles",
"/music/Michael Jackson/Bad",
"/music/Soulwax/Any Minute Now",
]
logfile = os.path.join(self.temp_dir, b"logfile.log")
with open(logfile, mode="w") as fp:
fp.write(logfile_content)
actual_paths = list(commands._paths_from_logfile(logfile))
self.assertEqual(actual_paths, expected_paths)
@_common.slow_test()
class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions):
@ -937,7 +971,8 @@ class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions):
# '--config', cli_overwrite_config_path, 'test')
# self.assertEqual(config['anoption'].get(), 'cli overwrite')
@unittest.skipIf(sys.platform, 'win32') # FIXME: fails on windows
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_cli_config_paths_resolve_relative_to_user_dir(self):
cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
with open(cli_config_path, 'w') as file:

View file

@ -15,11 +15,16 @@
"""Test module for file ui/__init__.py
"""
import os
import shutil
import unittest
from test import _common
from random import random
from copy import deepcopy
from beets import ui
from test import _common
from test.helper import control_stdin
from beets import config
class InputMethodsTest(_common.TestCase):
@ -121,8 +126,39 @@ class InitTest(_common.LibTestCase):
self.assertEqual(h, ui.human_seconds(i))
class ParentalDirCreation(_common.TestCase):
def test_create_yes(self):
non_exist_path = _common.util.py3_path(os.path.join(
self.temp_dir, b'nonexist', str(random()).encode()))
# Deepcopy instead of recovering because exceptions might
# occcur; wish I can use a golang defer here.
test_config = deepcopy(config)
test_config['library'] = non_exist_path
with control_stdin('y'):
ui._open_library(test_config)
def test_create_no(self):
non_exist_path_parent = _common.util.py3_path(
os.path.join(self.temp_dir, b'nonexist'))
non_exist_path = _common.util.py3_path(os.path.join(
non_exist_path_parent.encode(), str(random()).encode()))
test_config = deepcopy(config)
test_config['library'] = non_exist_path
with control_stdin('n'):
try:
ui._open_library(test_config)
except ui.UserError:
if os.path.exists(non_exist_path_parent):
shutil.rmtree(non_exist_path_parent)
raise OSError("Parent directories should not be created.")
else:
raise OSError("Parent directories should not be created.")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')

View file

@ -182,6 +182,21 @@ class PathTruncationTest(_common.TestCase):
self.assertEqual(p, 'abcde/f.ext')
class ConfitDeprecationTest(_common.TestCase):
def test_confit_deprecattion_warning_origin(self):
"""Test that importing `confit` raises a warning.
In addition, ensure that the warning originates from the actual
import statement, not the `confit` module.
"""
# See https://github.com/beetbox/beets/discussions/4024
with self.assertWarns(UserWarning) as w:
import beets.util.confit # noqa: F401
self.assertIn(__file__, w.filename)
self.assertNotIn("confit.py", w.filename)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)

View file

@ -667,6 +667,16 @@ class WebPluginTest(_common.LibTestCase):
# Remove the item
self.lib.get_item(item_id).remove()
def test_get_item_file(self):
ipath = os.path.join(self.temp_dir, b'testfile2.mp3')
shutil.copy(os.path.join(_common.RSRC, b'full.mp3'), ipath)
self.assertTrue(os.path.exists(ipath))
item_id = self.lib.add(Item.from_path(ipath))
response = self.client.get('/item/' + str(item_id) + '/file')
self.assertEqual(response.status_code, 200)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)

View file

@ -17,14 +17,15 @@ files = beets beetsplug beet test setup.py docs
deps =
{test,cov}: {[_test]deps}
lint: {[_lint]deps}
passenv = INTEGRATION_TEST
commands =
test: python -bb -m pytest -rs {posargs}
cov: coverage run -m pytest -rs {posargs}
lint: python -m flake8 {posargs} {[_lint]files}
[testenv:docs]
basepython = python3.9
deps = sphinx
basepython = python3.10
deps = sphinx<4.4.0
commands = sphinx-build -W -q -b html docs {envtmpdir}/html {posargs}
# checks all links in the docs