mirror of
https://github.com/beetbox/beets.git
synced 2025-12-06 16:42:42 +01:00
Merge branch 'master' into fix_#2873
This commit is contained in:
commit
51d922df27
60 changed files with 3230 additions and 470 deletions
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 💡 Have an idea for a new feature?
|
||||
url: https://github.com/beetbox/beets/discussions
|
||||
about: Create a new idea discussion!
|
||||
- name: 🙇 Need help with beets?
|
||||
url: https://github.com/beetbox/beets/discussions
|
||||
about: Create a new help discussion if it hasn't been asked before!
|
||||
39
.github/ISSUE_TEMPLATE/feature-request.md
vendored
39
.github/ISSUE_TEMPLATE/feature-request.md
vendored
|
|
@ -1,26 +1,35 @@
|
|||
---
|
||||
name: "\U0001F680 Feature request"
|
||||
about: Suggest a new idea for beets
|
||||
about: "Formalize a feature request from GitHub Discussions"
|
||||
|
||||
---
|
||||
|
||||
### Use case
|
||||
|
||||
I'm trying to use beets to...
|
||||
|
||||
|
||||
### Solution
|
||||
<!--
|
||||
Do you have a proposal for how beets should work?
|
||||
|
||||
Try to be as specific as possible—for example, you could propose the name for
|
||||
a new command-line option or refer to the particular ID3 frame you wish
|
||||
were supported.
|
||||
If you're landing here as a user, we ask you bring up your idea in the
|
||||
Discussions (https://github.com/beetbox/beets/discussions).
|
||||
-->
|
||||
|
||||
### Proposed solution
|
||||
|
||||
<!-- What is solution to this feature request? -->
|
||||
|
||||
### Objective
|
||||
|
||||
<!-- Ref to Discussions -->
|
||||
|
||||
#### Goals
|
||||
|
||||
<!-- What is the purpose of feature request? -->
|
||||
|
||||
#### Non-goals
|
||||
|
||||
### Alternatives
|
||||
<!--
|
||||
Have you tried using an existing plugin to do something similar?
|
||||
Is there any current feature that _almost_ does what you need?
|
||||
What else could be accomplished with this feature request, but is currently out
|
||||
of scope?
|
||||
-->
|
||||
|
||||
#### Anti-goals
|
||||
|
||||
<!--
|
||||
What could go wrong (side effects) if we implement this feature request?
|
||||
-->
|
||||
|
|
|
|||
41
.github/workflows/ci.yaml
vendored
41
.github/workflows/ci.yaml
vendored
|
|
@ -1,12 +1,18 @@
|
|||
name: ci
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9-dev]
|
||||
python-version: [2.7, 3.6, 3.7, 3.8, 3.9, 3.10-dev]
|
||||
|
||||
env:
|
||||
PY_COLORS: 1
|
||||
|
|
@ -24,18 +30,31 @@ jobs:
|
|||
python -m pip install --upgrade pip
|
||||
python -m pip install tox sphinx
|
||||
|
||||
- name: Test with tox
|
||||
if: matrix.python-version != '3.8'
|
||||
- name: Install optional dependencies
|
||||
run: |
|
||||
sudo apt-get install ffmpeg # For replaygain
|
||||
|
||||
- name: Test older Python versions with tox
|
||||
if: matrix.python-version != '3.9' && matrix.python-version != '3.10-dev'
|
||||
run: |
|
||||
tox -e py-test
|
||||
|
||||
- name: Test with tox and get coverage
|
||||
if: matrix.python-version == '3.8'
|
||||
- name: Test latest Python version with tox and get coverage
|
||||
if: matrix.python-version == '3.9'
|
||||
run: |
|
||||
tox -vv -e py-cov
|
||||
|
||||
- name: Test nightly Python version with tox
|
||||
if: matrix.python-version == '3.10-dev'
|
||||
# continue-on-error is not ideal since it doesn't give a visible
|
||||
# warning, but there doesn't seem to be anything better:
|
||||
# https://github.com/actions/toolkit/issues/399
|
||||
continue-on-error: true
|
||||
run: |
|
||||
tox -e py-test
|
||||
|
||||
- name: Upload code coverage
|
||||
if: matrix.python-version == '3.8'
|
||||
if: matrix.python-version == '3.9'
|
||||
run: |
|
||||
pip install codecov || true
|
||||
codecov || true
|
||||
|
|
@ -49,10 +68,10 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python 2.7
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 2.7
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install base dependencies
|
||||
run: |
|
||||
|
|
@ -71,10 +90,10 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python 3.8
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install base dependencies
|
||||
run: |
|
||||
|
|
|
|||
21
README.rst
21
README.rst
|
|
@ -81,8 +81,8 @@ Install
|
|||
-------
|
||||
|
||||
You can install beets by typing ``pip install beets``.
|
||||
Beets has also been packaged in the `software repositories`_ of several distributions.
|
||||
Check out the `Getting Started`_ guide for more information.
|
||||
Beets has also been packaged in the `software repositories`_ of several
|
||||
distributions. Check out the `Getting Started`_ guide for more information.
|
||||
|
||||
.. _Getting Started: https://beets.readthedocs.org/page/guides/main.html
|
||||
.. _software repositories: https://repology.org/project/beets/versions
|
||||
|
|
@ -90,7 +90,9 @@ Check out the `Getting Started`_ guide for more information.
|
|||
Contribute
|
||||
----------
|
||||
|
||||
Thank you for considering contributing to ``beets``! Whether you're a programmer or not, you should be able to find all the info you need at `CONTRIBUTING.rst`_.
|
||||
Thank you for considering contributing to ``beets``! Whether you're a
|
||||
programmer or not, you should be able to find all the info you need at
|
||||
`CONTRIBUTING.rst`_.
|
||||
|
||||
.. _CONTRIBUTING.rst: https://github.com/beetbox/beets/blob/master/CONTRIBUTING.rst
|
||||
|
||||
|
|
@ -105,12 +107,19 @@ news and updates.
|
|||
|
||||
Contact
|
||||
-------
|
||||
* Encountered a bug you'd like to report or have an idea for a new feature? Check out our `issue tracker`_! If your issue or feature hasn't already been reported, please `open a new ticket`_ and we'll be in touch with you shortly. If you'd like to vote on a feature/bug, simply give a :+1: on issues you'd like to see prioritized over others.
|
||||
* Need help/support, would like to start a discussion, or would just like to introduce yourself to the team? Check out our `forums`_!
|
||||
* Encountered a bug you'd like to report? Check out our `issue tracker`_!
|
||||
* If your issue hasn't already been reported, please `open a new ticket`_
|
||||
and we'll be in touch with you shortly.
|
||||
* If you'd like to vote on a feature/bug, simply give a :+1: on issues
|
||||
you'd like to see prioritized over others.
|
||||
* Need help/support, would like to start a discussion, have an idea for a new
|
||||
feature, or would just like to introduce yourself to the team? Check out
|
||||
`GitHub Discussions`_ or `Discourse`_!
|
||||
|
||||
.. _GitHub Discussions: https://github.com/beetbox/beets/discussions
|
||||
.. _issue tracker: https://github.com/beetbox/beets/issues
|
||||
.. _open a new ticket: https://github.com/beetbox/beets/issues/new/choose
|
||||
.. _forums: https://discourse.beets.io/
|
||||
.. _Discourse: https://discourse.beets.io/
|
||||
|
||||
Authors
|
||||
-------
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import traceback
|
|||
from six.moves.urllib.parse import urljoin
|
||||
|
||||
from beets import logging
|
||||
from beets import plugins
|
||||
import beets.autotag.hooks
|
||||
import beets
|
||||
from beets import util
|
||||
|
|
@ -70,14 +71,14 @@ log = logging.getLogger('beets')
|
|||
RELEASE_INCLUDES = ['artists', 'media', 'recordings', 'release-groups',
|
||||
'labels', 'artist-credits', 'aliases',
|
||||
'recording-level-rels', 'work-rels',
|
||||
'work-level-rels', 'artist-rels']
|
||||
'work-level-rels', 'artist-rels', 'isrcs']
|
||||
BROWSE_INCLUDES = ['artist-credits', 'work-rels',
|
||||
'artist-rels', 'recording-rels', 'release-rels']
|
||||
if "work-level-rels" in musicbrainzngs.VALID_BROWSE_INCLUDES['recording']:
|
||||
BROWSE_INCLUDES.append("work-level-rels")
|
||||
BROWSE_CHUNKSIZE = 100
|
||||
BROWSE_MAXTRACKS = 500
|
||||
TRACK_INCLUDES = ['artists', 'aliases']
|
||||
TRACK_INCLUDES = ['artists', 'aliases', 'isrcs']
|
||||
if 'work-level-rels' in musicbrainzngs.VALID_INCLUDES['recording']:
|
||||
TRACK_INCLUDES += ['work-level-rels', 'artist-rels']
|
||||
if 'genres' in musicbrainzngs.VALID_INCLUDES['recording']:
|
||||
|
|
@ -97,7 +98,11 @@ def configure():
|
|||
from the beets configuration. This should be called at startup.
|
||||
"""
|
||||
hostname = config['musicbrainz']['host'].as_str()
|
||||
musicbrainzngs.set_hostname(hostname)
|
||||
https = config['musicbrainz']['https'].get(bool)
|
||||
# Only call set_hostname when a custom server is configured. Since
|
||||
# musicbrainz-ngs connects to musicbrainz.org with HTTPS by default
|
||||
if hostname != "musicbrainz.org":
|
||||
musicbrainzngs.set_hostname(hostname, https)
|
||||
musicbrainzngs.set_rate_limit(
|
||||
config['musicbrainz']['ratelimit_interval'].as_number(),
|
||||
config['musicbrainz']['ratelimit'].get(int),
|
||||
|
|
@ -223,6 +228,11 @@ def track_info(recording, index=None, medium=None, medium_index=None,
|
|||
if recording.get('length'):
|
||||
info.length = int(recording['length']) / (1000.0)
|
||||
|
||||
info.trackdisambig = recording.get('disambiguation')
|
||||
|
||||
if recording.get('isrc-list'):
|
||||
info.isrc = ';'.join(recording['isrc-list'])
|
||||
|
||||
lyricist = []
|
||||
composer = []
|
||||
composer_sort = []
|
||||
|
|
@ -259,6 +269,11 @@ def track_info(recording, index=None, medium=None, medium_index=None,
|
|||
if arranger:
|
||||
info.arranger = u', '.join(arranger)
|
||||
|
||||
# Supplementary fields provided by plugins
|
||||
extra_trackdatas = plugins.send('mb_track_extract', data=recording)
|
||||
for extra_trackdata in extra_trackdatas:
|
||||
info.update(extra_trackdata)
|
||||
|
||||
info.decode()
|
||||
return info
|
||||
|
||||
|
|
@ -447,6 +462,10 @@ def album_info(release):
|
|||
if config['musicbrainz']['genres'] and genres:
|
||||
info.genre = ';'.join(g['name'] for g in genres)
|
||||
|
||||
extra_albumdatas = plugins.send('mb_album_extract', data=release)
|
||||
for extra_albumdata in extra_albumdatas:
|
||||
info.update(extra_albumdata)
|
||||
|
||||
info.decode()
|
||||
return info
|
||||
|
||||
|
|
|
|||
|
|
@ -102,6 +102,7 @@ statefile: state.pickle
|
|||
|
||||
musicbrainz:
|
||||
host: musicbrainz.org
|
||||
https: no
|
||||
ratelimit: 1
|
||||
ratelimit_interval: 1.0
|
||||
searchlimit: 5
|
||||
|
|
|
|||
|
|
@ -52,14 +52,24 @@ class FormattedMapping(Mapping):
|
|||
The accessor `mapping[key]` returns the formatted version of
|
||||
`model[key]` as a unicode string.
|
||||
|
||||
The `included_keys` parameter allows filtering the fields that are
|
||||
returned. By default all fields are returned. Limiting to specific keys can
|
||||
avoid expensive per-item database queries.
|
||||
|
||||
If `for_path` is true, all path separators in the formatted values
|
||||
are replaced.
|
||||
"""
|
||||
|
||||
def __init__(self, model, for_path=False):
|
||||
ALL_KEYS = '*'
|
||||
|
||||
def __init__(self, model, included_keys=ALL_KEYS, for_path=False):
|
||||
self.for_path = for_path
|
||||
self.model = model
|
||||
self.model_keys = model.keys(True)
|
||||
if included_keys == self.ALL_KEYS:
|
||||
# Performance note: this triggers a database query.
|
||||
self.model_keys = self.model.keys(True)
|
||||
else:
|
||||
self.model_keys = included_keys
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self.model_keys:
|
||||
|
|
@ -257,6 +267,11 @@ class Model(object):
|
|||
value is the same as the old value (e.g., `o.f = o.f`).
|
||||
"""
|
||||
|
||||
_revision = -1
|
||||
"""A revision number from when the model was loaded from or written
|
||||
to the database.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _getters(cls):
|
||||
"""Return a mapping from field names to getter functions.
|
||||
|
|
@ -309,9 +324,11 @@ class Model(object):
|
|||
|
||||
def clear_dirty(self):
|
||||
"""Mark all fields as *clean* (i.e., not needing to be stored to
|
||||
the database).
|
||||
the database). Also update the revision.
|
||||
"""
|
||||
self._dirty = set()
|
||||
if self._db:
|
||||
self._revision = self._db.revision
|
||||
|
||||
def _check_db(self, need_id=True):
|
||||
"""Ensure that this object is associated with a database row: it
|
||||
|
|
@ -351,9 +368,9 @@ class Model(object):
|
|||
"""
|
||||
return cls._fields.get(key) or cls._types.get(key) or types.DEFAULT
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Get the value for a field. Raise a KeyError if the field is
|
||||
not available.
|
||||
def _get(self, key, default=None, raise_=False):
|
||||
"""Get the value for a field, or `default`. Alternatively,
|
||||
raise a KeyError if the field is not available.
|
||||
"""
|
||||
getters = self._getters()
|
||||
if key in getters: # Computed.
|
||||
|
|
@ -365,8 +382,18 @@ class Model(object):
|
|||
return self._type(key).null
|
||||
elif key in self._values_flex: # Flexible.
|
||||
return self._values_flex[key]
|
||||
else:
|
||||
elif raise_:
|
||||
raise KeyError(key)
|
||||
else:
|
||||
return default
|
||||
|
||||
get = _get
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Get the value for a field. Raise a KeyError if the field is
|
||||
not available.
|
||||
"""
|
||||
return self._get(key, raise_=True)
|
||||
|
||||
def _setitem(self, key, value):
|
||||
"""Assign the value for a field, return whether new and old value
|
||||
|
|
@ -441,19 +468,10 @@ class Model(object):
|
|||
for key in self:
|
||||
yield key, self[key]
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Get the value for a given key or `default` if it does not
|
||||
exist.
|
||||
"""
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __contains__(self, key):
|
||||
"""Determine whether `key` is an attribute on this object.
|
||||
"""
|
||||
return key in self.keys(True)
|
||||
return key in self.keys(computed=True)
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over the available field names (excluding computed
|
||||
|
|
@ -538,8 +556,14 @@ class Model(object):
|
|||
|
||||
def load(self):
|
||||
"""Refresh the object's metadata from the library database.
|
||||
|
||||
If check_revision is true, the database is only queried loaded when a
|
||||
transaction has been committed since the item was last loaded.
|
||||
"""
|
||||
self._check_db()
|
||||
if not self._dirty and self._db.revision == self._revision:
|
||||
# Exit early
|
||||
return
|
||||
stored_obj = self._db._get(type(self), self.id)
|
||||
assert stored_obj is not None, u"object {0} not in DB".format(self.id)
|
||||
self._values_fixed = LazyConvertDict(self)
|
||||
|
|
@ -590,11 +614,11 @@ class Model(object):
|
|||
|
||||
_formatter = FormattedMapping
|
||||
|
||||
def formatted(self, for_path=False):
|
||||
def formatted(self, included_keys=_formatter.ALL_KEYS, for_path=False):
|
||||
"""Get a mapping containing all values on this object formatted
|
||||
as human-readable unicode strings.
|
||||
"""
|
||||
return self._formatter(self, for_path)
|
||||
return self._formatter(self, included_keys, for_path)
|
||||
|
||||
def evaluate_template(self, template, for_path=False):
|
||||
"""Evaluate a template (a string or a `Template` object) using
|
||||
|
|
@ -604,7 +628,7 @@ class Model(object):
|
|||
# Perform substitution.
|
||||
if isinstance(template, six.string_types):
|
||||
template = functemplate.template(template)
|
||||
return template.substitute(self.formatted(for_path),
|
||||
return template.substitute(self.formatted(for_path=for_path),
|
||||
self._template_funcs())
|
||||
|
||||
# Parsing.
|
||||
|
|
@ -714,10 +738,10 @@ class Results(object):
|
|||
def _get_indexed_flex_attrs(self):
|
||||
""" Index flexible attributes by the entity id they belong to
|
||||
"""
|
||||
flex_values = dict()
|
||||
flex_values = {}
|
||||
for row in self.flex_rows:
|
||||
if row['entity_id'] not in flex_values:
|
||||
flex_values[row['entity_id']] = dict()
|
||||
flex_values[row['entity_id']] = {}
|
||||
|
||||
flex_values[row['entity_id']][row['key']] = row['value']
|
||||
|
||||
|
|
@ -794,6 +818,12 @@ class Transaction(object):
|
|||
"""A context manager for safe, concurrent access to the database.
|
||||
All SQL commands should be executed through a transaction.
|
||||
"""
|
||||
|
||||
_mutated = False
|
||||
"""A flag storing whether a mutation has been executed in the
|
||||
current transaction.
|
||||
"""
|
||||
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
|
||||
|
|
@ -815,12 +845,15 @@ class Transaction(object):
|
|||
entered but not yet exited transaction. If it is the last active
|
||||
transaction, the database updates are committed.
|
||||
"""
|
||||
# Beware of races; currently secured by db._db_lock
|
||||
self.db.revision += self._mutated
|
||||
with self.db._tx_stack() as stack:
|
||||
assert stack.pop() is self
|
||||
empty = not stack
|
||||
if empty:
|
||||
# Ending a "root" transaction. End the SQLite transaction.
|
||||
self.db._connection().commit()
|
||||
self._mutated = False
|
||||
self.db._db_lock.release()
|
||||
|
||||
def query(self, statement, subvals=()):
|
||||
|
|
@ -836,7 +869,6 @@ class Transaction(object):
|
|||
"""
|
||||
try:
|
||||
cursor = self.db._connection().execute(statement, subvals)
|
||||
return cursor.lastrowid
|
||||
except sqlite3.OperationalError as e:
|
||||
# In two specific cases, SQLite reports an error while accessing
|
||||
# the underlying database file. We surface these exceptions as
|
||||
|
|
@ -846,9 +878,14 @@ class Transaction(object):
|
|||
raise DBAccessError(e.args[0])
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
self._mutated = True
|
||||
return cursor.lastrowid
|
||||
|
||||
def script(self, statements):
|
||||
"""Execute a string containing multiple SQL statements."""
|
||||
# We don't know whether this mutates, but quite likely it does.
|
||||
self._mutated = True
|
||||
self.db._connection().executescript(statements)
|
||||
|
||||
|
||||
|
|
@ -864,6 +901,11 @@ class Database(object):
|
|||
supports_extensions = hasattr(sqlite3.Connection, 'enable_load_extension')
|
||||
"""Whether or not the current version of SQLite supports extensions"""
|
||||
|
||||
revision = 0
|
||||
"""The current revision of the database. To be increased whenever
|
||||
data is written in a transaction.
|
||||
"""
|
||||
|
||||
def __init__(self, path, timeout=5.0):
|
||||
self.path = path
|
||||
self.timeout = timeout
|
||||
|
|
|
|||
|
|
@ -187,7 +187,7 @@ class ImportSession(object):
|
|||
self.logger = self._setup_logging(loghandler)
|
||||
self.paths = paths
|
||||
self.query = query
|
||||
self._is_resuming = dict()
|
||||
self._is_resuming = {}
|
||||
self._merged_items = set()
|
||||
self._merged_dirs = set()
|
||||
|
||||
|
|
@ -572,10 +572,11 @@ class ImportTask(BaseImportTask):
|
|||
util.prune_dirs(os.path.dirname(item.path),
|
||||
lib.directory)
|
||||
|
||||
def set_fields(self):
|
||||
def set_fields(self, lib):
|
||||
"""Sets the fields given at CLI or configuration to the specified
|
||||
values.
|
||||
values, for both the album and all its items.
|
||||
"""
|
||||
items = self.imported_items()
|
||||
for field, view in config['import']['set_fields'].items():
|
||||
value = view.get()
|
||||
log.debug(u'Set field {1}={2} for {0}',
|
||||
|
|
@ -583,6 +584,11 @@ class ImportTask(BaseImportTask):
|
|||
field,
|
||||
value)
|
||||
self.album[field] = value
|
||||
for item in items:
|
||||
item[field] = value
|
||||
with lib.transaction():
|
||||
for item in items:
|
||||
item.store()
|
||||
self.album.store()
|
||||
|
||||
def finalize(self, session):
|
||||
|
|
@ -786,7 +792,7 @@ class ImportTask(BaseImportTask):
|
|||
if (not dup_item.album_id or
|
||||
dup_item.album_id in replaced_album_ids):
|
||||
continue
|
||||
replaced_album = dup_item.get_album()
|
||||
replaced_album = dup_item._cached_album
|
||||
if replaced_album:
|
||||
replaced_album_ids.add(dup_item.album_id)
|
||||
self.replaced_albums[replaced_album.path] = replaced_album
|
||||
|
|
@ -946,9 +952,9 @@ class SingletonImportTask(ImportTask):
|
|||
def reload(self):
|
||||
self.item.load()
|
||||
|
||||
def set_fields(self):
|
||||
def set_fields(self, lib):
|
||||
"""Sets the fields given at CLI or configuration to the specified
|
||||
values.
|
||||
values, for the singleton item.
|
||||
"""
|
||||
for field, view in config['import']['set_fields'].items():
|
||||
value = view.get()
|
||||
|
|
@ -1054,6 +1060,12 @@ class ArchiveImportTask(SentinelImportTask):
|
|||
pass
|
||||
else:
|
||||
cls._handlers.append((is_rarfile, RarFile))
|
||||
try:
|
||||
from py7zr import is_7zfile, SevenZipFile
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
cls._handlers.append((is_7zfile, SevenZipFile))
|
||||
|
||||
return cls._handlers
|
||||
|
||||
|
|
@ -1510,7 +1522,7 @@ def apply_choice(session, task):
|
|||
# because then the ``ImportTask`` won't have an `album` for which
|
||||
# it can set the fields.
|
||||
if config['import']['set_fields']:
|
||||
task.set_fields()
|
||||
task.set_fields(session.lib)
|
||||
|
||||
|
||||
@pipeline.mutator_stage
|
||||
|
|
|
|||
102
beets/library.py
102
beets/library.py
|
|
@ -374,8 +374,19 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
|
|||
Album-level fields take precedence if `for_path` is true.
|
||||
"""
|
||||
|
||||
def __init__(self, item, for_path=False):
|
||||
super(FormattedItemMapping, self).__init__(item, for_path)
|
||||
ALL_KEYS = '*'
|
||||
|
||||
def __init__(self, item, included_keys=ALL_KEYS, for_path=False):
|
||||
# We treat album and item keys specially here,
|
||||
# so exclude transitive album keys from the model's keys.
|
||||
super(FormattedItemMapping, self).__init__(item, included_keys=[],
|
||||
for_path=for_path)
|
||||
self.included_keys = included_keys
|
||||
if included_keys == self.ALL_KEYS:
|
||||
# Performance note: this triggers a database query.
|
||||
self.model_keys = item.keys(computed=True, with_album=False)
|
||||
else:
|
||||
self.model_keys = included_keys
|
||||
self.item = item
|
||||
|
||||
@lazy_property
|
||||
|
|
@ -386,15 +397,19 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
|
|||
def album_keys(self):
|
||||
album_keys = []
|
||||
if self.album:
|
||||
for key in self.album.keys(True):
|
||||
if self.included_keys == self.ALL_KEYS:
|
||||
# Performance note: this triggers a database query.
|
||||
for key in self.album.keys(computed=True):
|
||||
if key in Album.item_keys \
|
||||
or key not in self.item._fields.keys():
|
||||
album_keys.append(key)
|
||||
else:
|
||||
album_keys = self.included_keys
|
||||
return album_keys
|
||||
|
||||
@lazy_property
|
||||
@property
|
||||
def album(self):
|
||||
return self.item.get_album()
|
||||
return self.item._cached_album
|
||||
|
||||
def _get(self, key):
|
||||
"""Get the value for a key, either from the album or the item.
|
||||
|
|
@ -418,11 +433,14 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
|
|||
# `artist` and `albumartist` fields fall back to one another.
|
||||
# This is helpful in path formats when the album artist is unset
|
||||
# on as-is imports.
|
||||
try:
|
||||
if key == 'artist' and not value:
|
||||
return self._get('albumartist')
|
||||
elif key == 'albumartist' and not value:
|
||||
return self._get('artist')
|
||||
else:
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
|
|
@ -477,12 +495,14 @@ class Item(LibModel):
|
|||
'mb_artistid': types.STRING,
|
||||
'mb_albumartistid': types.STRING,
|
||||
'mb_releasetrackid': types.STRING,
|
||||
'trackdisambig': types.STRING,
|
||||
'albumtype': types.STRING,
|
||||
'label': types.STRING,
|
||||
'acoustid_fingerprint': types.STRING,
|
||||
'acoustid_id': types.STRING,
|
||||
'mb_releasegroupid': types.STRING,
|
||||
'asin': types.STRING,
|
||||
'isrc': types.STRING,
|
||||
'catalognum': types.STRING,
|
||||
'script': types.STRING,
|
||||
'language': types.STRING,
|
||||
|
|
@ -544,6 +564,29 @@ class Item(LibModel):
|
|||
|
||||
_format_config_key = 'format_item'
|
||||
|
||||
__album = None
|
||||
"""Cached album object. Read-only."""
|
||||
|
||||
@property
|
||||
def _cached_album(self):
|
||||
"""The Album object that this item belongs to, if any, or
|
||||
None if the item is a singleton or is not associated with a
|
||||
library.
|
||||
The instance is cached and refreshed on access.
|
||||
|
||||
DO NOT MODIFY!
|
||||
If you want a copy to modify, use :meth:`get_album`.
|
||||
"""
|
||||
if not self.__album and self._db:
|
||||
self.__album = self._db.get_album(self)
|
||||
elif self.__album:
|
||||
self.__album.load()
|
||||
return self.__album
|
||||
|
||||
@_cached_album.setter
|
||||
def _cached_album(self, album):
|
||||
self.__album = album
|
||||
|
||||
@classmethod
|
||||
def _getters(cls):
|
||||
getters = plugins.item_field_getters()
|
||||
|
|
@ -570,12 +613,57 @@ class Item(LibModel):
|
|||
value = bytestring_path(value)
|
||||
elif isinstance(value, BLOB_TYPE):
|
||||
value = bytes(value)
|
||||
elif key == 'album_id':
|
||||
self._cached_album = None
|
||||
|
||||
changed = super(Item, self)._setitem(key, value)
|
||||
|
||||
if changed and key in MediaFile.fields():
|
||||
self.mtime = 0 # Reset mtime on dirty.
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Get the value for a field, falling back to the album if
|
||||
necessary. Raise a KeyError if the field is not available.
|
||||
"""
|
||||
try:
|
||||
return super(Item, self).__getitem__(key)
|
||||
except KeyError:
|
||||
if self._cached_album:
|
||||
return self._cached_album[key]
|
||||
raise
|
||||
|
||||
def __repr__(self):
|
||||
# This must not use `with_album=True`, because that might access
|
||||
# the database. When debugging, that is not guaranteed to succeed, and
|
||||
# can even deadlock due to the database lock.
|
||||
return '{0}({1})'.format(
|
||||
type(self).__name__,
|
||||
', '.join('{0}={1!r}'.format(k, self[k])
|
||||
for k in self.keys(with_album=False)),
|
||||
)
|
||||
|
||||
def keys(self, computed=False, with_album=True):
|
||||
"""Get a list of available field names. `with_album`
|
||||
controls whether the album's fields are included.
|
||||
"""
|
||||
keys = super(Item, self).keys(computed=computed)
|
||||
if with_album and self._cached_album:
|
||||
keys = set(keys)
|
||||
keys.update(self._cached_album.keys(computed=computed))
|
||||
keys = list(keys)
|
||||
return keys
|
||||
|
||||
def get(self, key, default=None, with_album=True):
|
||||
"""Get the value for a given key or `default` if it does not
|
||||
exist. Set `with_album` to false to skip album fallback.
|
||||
"""
|
||||
try:
|
||||
return self._get(key, default, raise_=with_album)
|
||||
except KeyError:
|
||||
if self._cached_album:
|
||||
return self._cached_album.get(key, default)
|
||||
return default
|
||||
|
||||
def update(self, values):
|
||||
"""Set all key/value pairs in the mapping. If mtime is
|
||||
specified, it is not reset (as it might otherwise be).
|
||||
|
|
@ -1630,7 +1718,7 @@ class DefaultTemplateFunctions(object):
|
|||
return res
|
||||
|
||||
# Flatten disambiguation value into a string.
|
||||
disam_value = album.formatted(True).get(disambiguator)
|
||||
disam_value = album.formatted(for_path=True).get(disambiguator)
|
||||
|
||||
# Return empty string if disambiguator is empty.
|
||||
if disam_value:
|
||||
|
|
|
|||
|
|
@ -130,29 +130,30 @@ class BeetsPlugin(object):
|
|||
be sent for backwards-compatibility.
|
||||
"""
|
||||
if six.PY2:
|
||||
func_args = inspect.getargspec(func).args
|
||||
argspec = inspect.getargspec(func)
|
||||
func_args = argspec.args
|
||||
has_varkw = argspec.keywords is not None
|
||||
else:
|
||||
func_args = inspect.getfullargspec(func).args
|
||||
argspec = inspect.getfullargspec(func)
|
||||
func_args = argspec.args
|
||||
has_varkw = argspec.varkw is not None
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
assert self._log.level == logging.NOTSET
|
||||
|
||||
verbosity = beets.config['verbose'].get(int)
|
||||
log_level = max(logging.DEBUG, base_log_level - 10 * verbosity)
|
||||
self._log.setLevel(log_level)
|
||||
try:
|
||||
if not has_varkw:
|
||||
kwargs = dict((k, v) for k, v in kwargs.items()
|
||||
if k in func_args)
|
||||
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except TypeError as exc:
|
||||
if exc.args[0].startswith(func.__name__):
|
||||
# caused by 'func' and not stuff internal to 'func'
|
||||
kwargs = dict((arg, val) for arg, val in kwargs.items()
|
||||
if arg in func_args)
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
self._log.setLevel(logging.NOTSET)
|
||||
|
||||
return wrapper
|
||||
|
||||
def queries(self):
|
||||
|
|
|
|||
|
|
@ -666,10 +666,10 @@ def term_width():
|
|||
FLOAT_EPSILON = 0.01
|
||||
|
||||
|
||||
def _field_diff(field, old, new):
|
||||
"""Given two Model objects, format their values for `field` and
|
||||
highlight changes among them. Return a human-readable string. If the
|
||||
value has not changed, return None instead.
|
||||
def _field_diff(field, old, old_fmt, new, new_fmt):
|
||||
"""Given two Model objects and their formatted views, format their values
|
||||
for `field` and highlight changes among them. Return a human-readable
|
||||
string. If the value has not changed, return None instead.
|
||||
"""
|
||||
oldval = old.get(field)
|
||||
newval = new.get(field)
|
||||
|
|
@ -682,8 +682,8 @@ def _field_diff(field, old, new):
|
|||
return None
|
||||
|
||||
# Get formatted values for output.
|
||||
oldstr = old.formatted().get(field, u'')
|
||||
newstr = new.formatted().get(field, u'')
|
||||
oldstr = old_fmt.get(field, u'')
|
||||
newstr = new_fmt.get(field, u'')
|
||||
|
||||
# For strings, highlight changes. For others, colorize the whole
|
||||
# thing.
|
||||
|
|
@ -708,6 +708,11 @@ def show_model_changes(new, old=None, fields=None, always=False):
|
|||
"""
|
||||
old = old or new._db._get(type(new), new.id)
|
||||
|
||||
# Keep the formatted views around instead of re-creating them in each
|
||||
# iteration step
|
||||
old_fmt = old.formatted()
|
||||
new_fmt = new.formatted()
|
||||
|
||||
# Build up lines showing changed fields.
|
||||
changes = []
|
||||
for field in old:
|
||||
|
|
@ -716,7 +721,7 @@ def show_model_changes(new, old=None, fields=None, always=False):
|
|||
continue
|
||||
|
||||
# Detect and show difference for this field.
|
||||
line = _field_diff(field, old, new)
|
||||
line = _field_diff(field, old, old_fmt, new, new_fmt)
|
||||
if line:
|
||||
changes.append(u' {0}: {1}'.format(field, line))
|
||||
|
||||
|
|
@ -727,7 +732,7 @@ def show_model_changes(new, old=None, fields=None, always=False):
|
|||
|
||||
changes.append(u' {0}: {1}'.format(
|
||||
field,
|
||||
colorize('text_highlight', new.formatted()[field])
|
||||
colorize('text_highlight', new_fmt[field])
|
||||
))
|
||||
|
||||
# Print changes.
|
||||
|
|
@ -791,11 +796,14 @@ def _store_dict(option, opt_str, value, parser):
|
|||
if option_values is None:
|
||||
# This is the first supplied ``key=value`` pair of option.
|
||||
# Initialize empty dictionary and get a reference to it.
|
||||
setattr(parser.values, dest, dict())
|
||||
setattr(parser.values, dest, {})
|
||||
option_values = getattr(parser.values, dest)
|
||||
|
||||
# Decode the argument using the platform's argument encoding.
|
||||
value = util.text_string(value, util.arg_encoding())
|
||||
|
||||
try:
|
||||
key, value = map(lambda s: util.text_string(s), value.split('='))
|
||||
key, value = value.split('=', 1)
|
||||
if not (key and value):
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
|
|
@ -1155,8 +1163,13 @@ def _setup(options, lib=None):
|
|||
plugins.send("library_opened", lib=lib)
|
||||
|
||||
# Add types and queries defined by plugins.
|
||||
library.Item._types.update(plugins.types(library.Item))
|
||||
library.Album._types.update(plugins.types(library.Album))
|
||||
plugin_types_album = plugins.types(library.Album)
|
||||
library.Album._types.update(plugin_types_album)
|
||||
item_types = plugin_types_album.copy()
|
||||
item_types.update(library.Item._types)
|
||||
item_types.update(plugins.types(library.Item))
|
||||
library.Item._types = item_types
|
||||
|
||||
library.Item._queries.update(plugins.named_queries(library.Item))
|
||||
library.Album._queries.update(plugins.named_queries(library.Album))
|
||||
|
||||
|
|
|
|||
|
|
@ -698,6 +698,19 @@ class TerminalImportSession(importer.ImportSession):
|
|||
print_(displayable_path(task.paths, u'\n') +
|
||||
u' ({0} items)'.format(len(task.items)))
|
||||
|
||||
# Let plugins display info or prompt the user before we go through the
|
||||
# process of selecting candidate.
|
||||
results = plugins.send('import_task_before_choice',
|
||||
session=self, task=task)
|
||||
actions = [action for action in results if action]
|
||||
|
||||
if len(actions) == 1:
|
||||
return actions[0]
|
||||
elif len(actions) > 1:
|
||||
raise plugins.PluginConflictException(
|
||||
u'Only one handler for `import_task_before_choice` may return '
|
||||
u'an action.')
|
||||
|
||||
# Take immediate action if appropriate.
|
||||
action = _summary_judgment(task.rec)
|
||||
if action == importer.action.APPLY:
|
||||
|
|
|
|||
|
|
@ -64,12 +64,13 @@ def temp_file_for(path):
|
|||
return util.bytestring_path(f.name)
|
||||
|
||||
|
||||
def pil_resize(maxwidth, path_in, path_out=None, quality=0):
|
||||
def pil_resize(maxwidth, path_in, path_out=None, quality=0, max_filesize=0):
|
||||
"""Resize using Python Imaging Library (PIL). Return the output path
|
||||
of resized image.
|
||||
"""
|
||||
path_out = path_out or temp_file_for(path_in)
|
||||
from PIL import Image
|
||||
|
||||
log.debug(u'artresizer: PIL resizing {0} to {1}',
|
||||
util.displayable_path(path_in), util.displayable_path(path_out))
|
||||
|
||||
|
|
@ -83,6 +84,35 @@ def pil_resize(maxwidth, path_in, path_out=None, quality=0):
|
|||
quality = -1
|
||||
|
||||
im.save(util.py3_path(path_out), quality=quality)
|
||||
if max_filesize > 0:
|
||||
# If maximum filesize is set, we attempt to lower the quality of
|
||||
# jpeg conversion by a proportional amount, up to 3 attempts
|
||||
# First, set the maximum quality to either provided, or 95
|
||||
if quality > 0:
|
||||
lower_qual = quality
|
||||
else:
|
||||
lower_qual = 95
|
||||
for i in range(5):
|
||||
# 5 attempts is an abitrary choice
|
||||
filesize = os.stat(util.syspath(path_out)).st_size
|
||||
log.debug(u"PIL Pass {0} : Output size: {1}B", i, filesize)
|
||||
if filesize <= max_filesize:
|
||||
return path_out
|
||||
# The relationship between filesize & quality will be
|
||||
# image dependent.
|
||||
lower_qual -= 10
|
||||
# Restrict quality dropping below 10
|
||||
if lower_qual < 10:
|
||||
lower_qual = 10
|
||||
# Use optimize flag to improve filesize decrease
|
||||
im.save(
|
||||
util.py3_path(path_out), quality=lower_qual, optimize=True
|
||||
)
|
||||
log.warning(u"PIL Failed to resize file to below {0}B",
|
||||
max_filesize)
|
||||
return path_out
|
||||
|
||||
else:
|
||||
return path_out
|
||||
except IOError:
|
||||
log.error(u"PIL cannot create thumbnail for '{0}'",
|
||||
|
|
@ -90,7 +120,7 @@ def pil_resize(maxwidth, path_in, path_out=None, quality=0):
|
|||
return path_in
|
||||
|
||||
|
||||
def im_resize(maxwidth, path_in, path_out=None, quality=0):
|
||||
def im_resize(maxwidth, path_in, path_out=None, quality=0, max_filesize=0):
|
||||
"""Resize using ImageMagick.
|
||||
|
||||
Use the ``magick`` program or ``convert`` on older versions. Return
|
||||
|
|
@ -111,6 +141,11 @@ def im_resize(maxwidth, path_in, path_out=None, quality=0):
|
|||
if quality > 0:
|
||||
cmd += ['-quality', '{0}'.format(quality)]
|
||||
|
||||
# "-define jpeg:extent=SIZEb" sets the target filesize for imagemagick to
|
||||
# SIZE in bytes.
|
||||
if max_filesize > 0:
|
||||
cmd += ['-define', 'jpeg:extent={0}b'.format(max_filesize)]
|
||||
|
||||
cmd.append(util.syspath(path_out, prefix=False))
|
||||
|
||||
try:
|
||||
|
|
@ -131,6 +166,7 @@ BACKEND_FUNCS = {
|
|||
|
||||
def pil_getsize(path_in):
|
||||
from PIL import Image
|
||||
|
||||
try:
|
||||
im = Image.open(util.syspath(path_in))
|
||||
return im.size
|
||||
|
|
@ -171,6 +207,7 @@ class Shareable(type):
|
|||
lazily-created shared instance of ``MyClass`` while calling
|
||||
``MyClass()`` to construct a new object works as usual.
|
||||
"""
|
||||
|
||||
def __init__(cls, name, bases, dict):
|
||||
super(Shareable, cls).__init__(name, bases, dict)
|
||||
cls._instance = None
|
||||
|
|
@ -205,7 +242,9 @@ class ArtResizer(six.with_metaclass(Shareable, object)):
|
|||
self.im_convert_cmd = ['magick']
|
||||
self.im_identify_cmd = ['magick', 'identify']
|
||||
|
||||
def resize(self, maxwidth, path_in, path_out=None, quality=0):
|
||||
def resize(
|
||||
self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0
|
||||
):
|
||||
"""Manipulate an image file according to the method, returning a
|
||||
new path. For PIL or IMAGEMAGIC methods, resizes the image to a
|
||||
temporary file and encodes with the specified quality level.
|
||||
|
|
@ -213,7 +252,8 @@ class ArtResizer(six.with_metaclass(Shareable, object)):
|
|||
"""
|
||||
if self.local:
|
||||
func = BACKEND_FUNCS[self.method[0]]
|
||||
return func(maxwidth, path_in, path_out, quality=quality)
|
||||
return func(maxwidth, path_in, path_out,
|
||||
quality=quality, max_filesize=max_filesize)
|
||||
else:
|
||||
return path_in
|
||||
|
||||
|
|
|
|||
966
beetsplug/aura.py
Normal file
966
beetsplug/aura.py
Normal file
|
|
@ -0,0 +1,966 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of beets.
|
||||
# Copyright 2020, Callum Brown.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""An AURA server using Flask."""
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from mimetypes import guess_type
|
||||
import re
|
||||
from os.path import isfile, getsize
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand, _open_library
|
||||
from beets import config
|
||||
from beets.util import py3_path
|
||||
from beets.library import Item, Album
|
||||
from beets.dbcore.query import (
|
||||
MatchQuery,
|
||||
NotQuery,
|
||||
RegexpQuery,
|
||||
AndQuery,
|
||||
FixedFieldSort,
|
||||
SlowFieldSort,
|
||||
MultipleSort,
|
||||
)
|
||||
|
||||
from flask import (
|
||||
Blueprint,
|
||||
Flask,
|
||||
current_app,
|
||||
send_file,
|
||||
make_response,
|
||||
request,
|
||||
)
|
||||
|
||||
|
||||
# Constants
|
||||
|
||||
# AURA server information
|
||||
# TODO: Add version information
|
||||
SERVER_INFO = {
|
||||
"aura-version": "0",
|
||||
"server": "beets-aura",
|
||||
"server-version": "0.1",
|
||||
"auth-required": False,
|
||||
"features": ["albums", "artists", "images"],
|
||||
}
|
||||
|
||||
# Maps AURA Track attribute to beets Item attribute
|
||||
TRACK_ATTR_MAP = {
|
||||
# Required
|
||||
"title": "title",
|
||||
"artist": "artist",
|
||||
# Optional
|
||||
"album": "album",
|
||||
"track": "track", # Track number on album
|
||||
"tracktotal": "tracktotal",
|
||||
"disc": "disc",
|
||||
"disctotal": "disctotal",
|
||||
"year": "year",
|
||||
"month": "month",
|
||||
"day": "day",
|
||||
"bpm": "bpm",
|
||||
"genre": "genre",
|
||||
"recording-mbid": "mb_trackid", # beets trackid is MB recording
|
||||
"track-mbid": "mb_releasetrackid",
|
||||
"composer": "composer",
|
||||
"albumartist": "albumartist",
|
||||
"comments": "comments",
|
||||
# Optional for Audio Metadata
|
||||
# TODO: Support the mimetype attribute, format != mime type
|
||||
# "mimetype": track.format,
|
||||
"duration": "length",
|
||||
"framerate": "samplerate",
|
||||
# I don't think beets has a framecount field
|
||||
# "framecount": ???,
|
||||
"channels": "channels",
|
||||
"bitrate": "bitrate",
|
||||
"bitdepth": "bitdepth",
|
||||
"size": "filesize",
|
||||
}
|
||||
|
||||
# Maps AURA Album attribute to beets Album attribute
|
||||
ALBUM_ATTR_MAP = {
|
||||
# Required
|
||||
"title": "album",
|
||||
"artist": "albumartist",
|
||||
# Optional
|
||||
"tracktotal": "albumtotal",
|
||||
"disctotal": "disctotal",
|
||||
"year": "year",
|
||||
"month": "month",
|
||||
"day": "day",
|
||||
"genre": "genre",
|
||||
"release-mbid": "mb_albumid",
|
||||
"release-group-mbid": "mb_releasegroupid",
|
||||
}
|
||||
|
||||
# Maps AURA Artist attribute to beets Item field
|
||||
# Artists are not first-class in beets, so information is extracted from
|
||||
# beets Items.
|
||||
ARTIST_ATTR_MAP = {
|
||||
# Required
|
||||
"name": "artist",
|
||||
# Optional
|
||||
"artist-mbid": "mb_artistid",
|
||||
}
|
||||
|
||||
|
||||
class AURADocument:
|
||||
"""Base class for building AURA documents."""
|
||||
|
||||
@staticmethod
|
||||
def error(status, title, detail):
|
||||
"""Make a response for an error following the JSON:API spec.
|
||||
|
||||
Args:
|
||||
status: An HTTP status code string, e.g. "404 Not Found".
|
||||
title: A short, human-readable summary of the problem.
|
||||
detail: A human-readable explanation specific to this
|
||||
occurrence of the problem.
|
||||
"""
|
||||
document = {
|
||||
"errors": [{"status": status, "title": title, "detail": detail}]
|
||||
}
|
||||
return make_response(document, status)
|
||||
|
||||
def translate_filters(self):
|
||||
"""Translate filters from request arguments to a beets Query."""
|
||||
# The format of each filter key in the request parameter is:
|
||||
# filter[<attribute>]. This regex extracts <attribute>.
|
||||
pattern = re.compile(r"filter\[(?P<attribute>[a-zA-Z0-9_-]+)\]")
|
||||
queries = []
|
||||
for key, value in request.args.items():
|
||||
match = pattern.match(key)
|
||||
if match:
|
||||
# Extract attribute name from key
|
||||
aura_attr = match.group("attribute")
|
||||
# Get the beets version of the attribute name
|
||||
beets_attr = self.attribute_map.get(aura_attr, aura_attr)
|
||||
converter = self.get_attribute_converter(beets_attr)
|
||||
value = converter(value)
|
||||
# Add exact match query to list
|
||||
# Use a slow query so it works with all fields
|
||||
queries.append(MatchQuery(beets_attr, value, fast=False))
|
||||
# NOTE: AURA doesn't officially support multiple queries
|
||||
return AndQuery(queries)
|
||||
|
||||
def translate_sorts(self, sort_arg):
|
||||
"""Translate an AURA sort parameter into a beets Sort.
|
||||
|
||||
Args:
|
||||
sort_arg: The value of the 'sort' query parameter; a comma
|
||||
separated list of fields to sort by, in order.
|
||||
E.g. "-year,title".
|
||||
"""
|
||||
# Change HTTP query parameter to a list
|
||||
aura_sorts = sort_arg.strip(",").split(",")
|
||||
sorts = []
|
||||
for aura_attr in aura_sorts:
|
||||
if aura_attr[0] == "-":
|
||||
ascending = False
|
||||
# Remove leading "-"
|
||||
aura_attr = aura_attr[1:]
|
||||
else:
|
||||
# JSON:API default
|
||||
ascending = True
|
||||
# Get the beets version of the attribute name
|
||||
beets_attr = self.attribute_map.get(aura_attr, aura_attr)
|
||||
# Use slow sort so it works with all fields (inc. computed)
|
||||
sorts.append(SlowFieldSort(beets_attr, ascending=ascending))
|
||||
return MultipleSort(sorts)
|
||||
|
||||
def paginate(self, collection):
|
||||
"""Get a page of the collection and the URL to the next page.
|
||||
|
||||
Args:
|
||||
collection: The raw data from which resource objects can be
|
||||
built. Could be an sqlite3.Cursor object (tracks and
|
||||
albums) or a list of strings (artists).
|
||||
"""
|
||||
# Pages start from zero
|
||||
page = request.args.get("page", 0, int)
|
||||
# Use page limit defined in config by default.
|
||||
default_limit = config["aura"]["page_limit"].get(int)
|
||||
limit = request.args.get("limit", default_limit, int)
|
||||
# start = offset of first item to return
|
||||
start = page * limit
|
||||
# end = offset of last item + 1
|
||||
end = start + limit
|
||||
if end > len(collection):
|
||||
end = len(collection)
|
||||
next_url = None
|
||||
else:
|
||||
# Not the last page so work out links.next url
|
||||
if not request.args:
|
||||
# No existing arguments, so current page is 0
|
||||
next_url = request.url + "?page=1"
|
||||
elif not request.args.get("page", None):
|
||||
# No existing page argument, so add one to the end
|
||||
next_url = request.url + "&page=1"
|
||||
else:
|
||||
# Increment page token by 1
|
||||
next_url = request.url.replace(
|
||||
"page={}".format(page), "page={}".format(page + 1)
|
||||
)
|
||||
# Get only the items in the page range
|
||||
data = [self.resource_object(collection[i]) for i in range(start, end)]
|
||||
return data, next_url
|
||||
|
||||
def get_included(self, data, include_str):
|
||||
"""Build a list of resource objects for inclusion.
|
||||
|
||||
Args:
|
||||
data: An array of dicts in the form of resource objects.
|
||||
include_str: A comma separated list of resource types to
|
||||
include. E.g. "tracks,images".
|
||||
"""
|
||||
# Change HTTP query parameter to a list
|
||||
to_include = include_str.strip(",").split(",")
|
||||
# Build a list of unique type and id combinations
|
||||
# For each resource object in the primary data, iterate over it's
|
||||
# relationships. If a relationship matches one of the types
|
||||
# requested for inclusion (e.g. "albums") then add each type-id pair
|
||||
# under the "data" key to unique_identifiers, checking first that
|
||||
# it has not already been added. This ensures that no resources are
|
||||
# included more than once.
|
||||
unique_identifiers = []
|
||||
for res_obj in data:
|
||||
for rel_name, rel_obj in res_obj["relationships"].items():
|
||||
if rel_name in to_include:
|
||||
# NOTE: Assumes relationship is to-many
|
||||
for identifier in rel_obj["data"]:
|
||||
if identifier not in unique_identifiers:
|
||||
unique_identifiers.append(identifier)
|
||||
# TODO: I think this could be improved
|
||||
included = []
|
||||
for identifier in unique_identifiers:
|
||||
res_type = identifier["type"]
|
||||
if res_type == "track":
|
||||
track_id = int(identifier["id"])
|
||||
track = current_app.config["lib"].get_item(track_id)
|
||||
included.append(TrackDocument.resource_object(track))
|
||||
elif res_type == "album":
|
||||
album_id = int(identifier["id"])
|
||||
album = current_app.config["lib"].get_album(album_id)
|
||||
included.append(AlbumDocument.resource_object(album))
|
||||
elif res_type == "artist":
|
||||
artist_id = identifier["id"]
|
||||
included.append(ArtistDocument.resource_object(artist_id))
|
||||
elif res_type == "image":
|
||||
image_id = identifier["id"]
|
||||
included.append(ImageDocument.resource_object(image_id))
|
||||
else:
|
||||
raise ValueError("Invalid resource type: {}".format(res_type))
|
||||
return included
|
||||
|
||||
def all_resources(self):
|
||||
"""Build document for /tracks, /albums or /artists."""
|
||||
query = self.translate_filters()
|
||||
sort_arg = request.args.get("sort", None)
|
||||
if sort_arg:
|
||||
sort = self.translate_sorts(sort_arg)
|
||||
# For each sort field add a query which ensures all results
|
||||
# have a non-empty, non-zero value for that field.
|
||||
for s in sort.sorts:
|
||||
query.subqueries.append(
|
||||
NotQuery(
|
||||
# Match empty fields (^$) or zero fields, (^0$)
|
||||
RegexpQuery(s.field, "(^$|^0$)", fast=False)
|
||||
)
|
||||
)
|
||||
else:
|
||||
sort = None
|
||||
# Get information from the library
|
||||
collection = self.get_collection(query=query, sort=sort)
|
||||
# Convert info to AURA form and paginate it
|
||||
data, next_url = self.paginate(collection)
|
||||
document = {"data": data}
|
||||
# If there are more pages then provide a way to access them
|
||||
if next_url:
|
||||
document["links"] = {"next": next_url}
|
||||
# Include related resources for each element in "data"
|
||||
include_str = request.args.get("include", None)
|
||||
if include_str:
|
||||
document["included"] = self.get_included(data, include_str)
|
||||
return document
|
||||
|
||||
def single_resource_document(self, resource_object):
|
||||
"""Build document for a specific requested resource.
|
||||
|
||||
Args:
|
||||
resource_object: A dictionary in the form of a JSON:API
|
||||
resource object.
|
||||
"""
|
||||
document = {"data": resource_object}
|
||||
include_str = request.args.get("include", None)
|
||||
if include_str:
|
||||
# [document["data"]] is because arg needs to be list
|
||||
document["included"] = self.get_included(
|
||||
[document["data"]], include_str
|
||||
)
|
||||
return document
|
||||
|
||||
|
||||
class TrackDocument(AURADocument):
|
||||
"""Class for building documents for /tracks endpoints."""
|
||||
|
||||
attribute_map = TRACK_ATTR_MAP
|
||||
|
||||
def get_collection(self, query=None, sort=None):
|
||||
"""Get Item objects from the library.
|
||||
|
||||
Args:
|
||||
query: A beets Query object or a beets query string.
|
||||
sort: A beets Sort object.
|
||||
"""
|
||||
return current_app.config["lib"].items(query, sort)
|
||||
|
||||
def get_attribute_converter(self, beets_attr):
|
||||
"""Work out what data type an attribute should be for beets.
|
||||
|
||||
Args:
|
||||
beets_attr: The name of the beets attribute, e.g. "title".
|
||||
"""
|
||||
# filesize is a special field (read from disk not db?)
|
||||
if beets_attr == "filesize":
|
||||
converter = int
|
||||
else:
|
||||
try:
|
||||
# Look for field in list of Item fields
|
||||
# and get python type of database type.
|
||||
# See beets.library.Item and beets.dbcore.types
|
||||
converter = Item._fields[beets_attr].model_type
|
||||
except KeyError:
|
||||
# Fall back to string (NOTE: probably not good)
|
||||
converter = str
|
||||
return converter
|
||||
|
||||
@staticmethod
|
||||
def resource_object(track):
|
||||
"""Construct a JSON:API resource object from a beets Item.
|
||||
|
||||
Args:
|
||||
track: A beets Item object.
|
||||
"""
|
||||
attributes = {}
|
||||
# Use aura => beets attribute map, e.g. size => filesize
|
||||
for aura_attr, beets_attr in TRACK_ATTR_MAP.items():
|
||||
a = getattr(track, beets_attr)
|
||||
# Only set attribute if it's not None, 0, "", etc.
|
||||
# NOTE: This could result in required attributes not being set
|
||||
if a:
|
||||
attributes[aura_attr] = a
|
||||
|
||||
# JSON:API one-to-many relationship to parent album
|
||||
relationships = {
|
||||
"artists": {"data": [{"type": "artist", "id": track.artist}]}
|
||||
}
|
||||
# Only add album relationship if not singleton
|
||||
if not track.singleton:
|
||||
relationships["albums"] = {
|
||||
"data": [{"type": "album", "id": str(track.album_id)}]
|
||||
}
|
||||
|
||||
return {
|
||||
"type": "track",
|
||||
"id": str(track.id),
|
||||
"attributes": attributes,
|
||||
"relationships": relationships,
|
||||
}
|
||||
|
||||
def single_resource(self, track_id):
|
||||
"""Get track from the library and build a document.
|
||||
|
||||
Args:
|
||||
track_id: The beets id of the track (integer).
|
||||
"""
|
||||
track = current_app.config["lib"].get_item(track_id)
|
||||
if not track:
|
||||
return self.error(
|
||||
"404 Not Found",
|
||||
"No track with the requested id.",
|
||||
"There is no track with an id of {} in the library.".format(
|
||||
track_id
|
||||
),
|
||||
)
|
||||
return self.single_resource_document(self.resource_object(track))
|
||||
|
||||
|
||||
class AlbumDocument(AURADocument):
|
||||
"""Class for building documents for /albums endpoints."""
|
||||
|
||||
attribute_map = ALBUM_ATTR_MAP
|
||||
|
||||
def get_collection(self, query=None, sort=None):
|
||||
"""Get Album objects from the library.
|
||||
|
||||
Args:
|
||||
query: A beets Query object or a beets query string.
|
||||
sort: A beets Sort object.
|
||||
"""
|
||||
return current_app.config["lib"].albums(query, sort)
|
||||
|
||||
def get_attribute_converter(self, beets_attr):
|
||||
"""Work out what data type an attribute should be for beets.
|
||||
|
||||
Args:
|
||||
beets_attr: The name of the beets attribute, e.g. "title".
|
||||
"""
|
||||
try:
|
||||
# Look for field in list of Album fields
|
||||
# and get python type of database type.
|
||||
# See beets.library.Album and beets.dbcore.types
|
||||
converter = Album._fields[beets_attr].model_type
|
||||
except KeyError:
|
||||
# Fall back to string (NOTE: probably not good)
|
||||
converter = str
|
||||
return converter
|
||||
|
||||
@staticmethod
|
||||
def resource_object(album):
|
||||
"""Construct a JSON:API resource object from a beets Album.
|
||||
|
||||
Args:
|
||||
album: A beets Album object.
|
||||
"""
|
||||
attributes = {}
|
||||
# Use aura => beets attribute name map
|
||||
for aura_attr, beets_attr in ALBUM_ATTR_MAP.items():
|
||||
a = getattr(album, beets_attr)
|
||||
# Only set attribute if it's not None, 0, "", etc.
|
||||
# NOTE: This could mean required attributes are not set
|
||||
if a:
|
||||
attributes[aura_attr] = a
|
||||
|
||||
# Get beets Item objects for all tracks in the album sorted by
|
||||
# track number. Sorting is not required but it's nice.
|
||||
query = MatchQuery("album_id", album.id)
|
||||
sort = FixedFieldSort("track", ascending=True)
|
||||
tracks = current_app.config["lib"].items(query, sort)
|
||||
# JSON:API one-to-many relationship to tracks on the album
|
||||
relationships = {
|
||||
"tracks": {
|
||||
"data": [{"type": "track", "id": str(t.id)} for t in tracks]
|
||||
}
|
||||
}
|
||||
# Add images relationship if album has associated images
|
||||
if album.artpath:
|
||||
path = py3_path(album.artpath)
|
||||
filename = path.split("/")[-1]
|
||||
image_id = "album-{}-{}".format(album.id, filename)
|
||||
relationships["images"] = {
|
||||
"data": [{"type": "image", "id": image_id}]
|
||||
}
|
||||
# Add artist relationship if artist name is same on tracks
|
||||
# Tracks are used to define artists so don't albumartist
|
||||
# Check for all tracks in case some have featured artists
|
||||
if album.albumartist in [t.artist for t in tracks]:
|
||||
relationships["artists"] = {
|
||||
"data": [{"type": "artist", "id": album.albumartist}]
|
||||
}
|
||||
|
||||
return {
|
||||
"type": "album",
|
||||
"id": str(album.id),
|
||||
"attributes": attributes,
|
||||
"relationships": relationships,
|
||||
}
|
||||
|
||||
def single_resource(self, album_id):
|
||||
"""Get album from the library and build a document.
|
||||
|
||||
Args:
|
||||
album_id: The beets id of the album (integer).
|
||||
"""
|
||||
album = current_app.config["lib"].get_album(album_id)
|
||||
if not album:
|
||||
return self.error(
|
||||
"404 Not Found",
|
||||
"No album with the requested id.",
|
||||
"There is no album with an id of {} in the library.".format(
|
||||
album_id
|
||||
),
|
||||
)
|
||||
return self.single_resource_document(self.resource_object(album))
|
||||
|
||||
|
||||
class ArtistDocument(AURADocument):
|
||||
"""Class for building documents for /artists endpoints."""
|
||||
|
||||
attribute_map = ARTIST_ATTR_MAP
|
||||
|
||||
def get_collection(self, query=None, sort=None):
|
||||
"""Get a list of artist names from the library.
|
||||
|
||||
Args:
|
||||
query: A beets Query object or a beets query string.
|
||||
sort: A beets Sort object.
|
||||
"""
|
||||
# Gets only tracks with matching artist information
|
||||
tracks = current_app.config["lib"].items(query, sort)
|
||||
collection = []
|
||||
for track in tracks:
|
||||
# Do not add duplicates
|
||||
if track.artist not in collection:
|
||||
collection.append(track.artist)
|
||||
return collection
|
||||
|
||||
def get_attribute_converter(self, beets_attr):
|
||||
"""Work out what data type an attribute should be for beets.
|
||||
|
||||
Args:
|
||||
beets_attr: The name of the beets attribute, e.g. "artist".
|
||||
"""
|
||||
try:
|
||||
# Look for field in list of Item fields
|
||||
# and get python type of database type.
|
||||
# See beets.library.Item and beets.dbcore.types
|
||||
converter = Item._fields[beets_attr].model_type
|
||||
except KeyError:
|
||||
# Fall back to string (NOTE: probably not good)
|
||||
converter = str
|
||||
return converter
|
||||
|
||||
@staticmethod
|
||||
def resource_object(artist_id):
|
||||
"""Construct a JSON:API resource object for the given artist.
|
||||
|
||||
Args:
|
||||
artist_id: A string which is the artist's name.
|
||||
"""
|
||||
# Get tracks where artist field exactly matches artist_id
|
||||
query = MatchQuery("artist", artist_id)
|
||||
tracks = current_app.config["lib"].items(query)
|
||||
if not tracks:
|
||||
return None
|
||||
|
||||
# Get artist information from the first track
|
||||
# NOTE: It could be that the first track doesn't have a
|
||||
# MusicBrainz id but later tracks do, which isn't ideal.
|
||||
attributes = {}
|
||||
# Use aura => beets attribute map, e.g. artist => name
|
||||
for aura_attr, beets_attr in ARTIST_ATTR_MAP.items():
|
||||
a = getattr(tracks[0], beets_attr)
|
||||
# Only set attribute if it's not None, 0, "", etc.
|
||||
# NOTE: This could mean required attributes are not set
|
||||
if a:
|
||||
attributes[aura_attr] = a
|
||||
|
||||
relationships = {
|
||||
"tracks": {
|
||||
"data": [{"type": "track", "id": str(t.id)} for t in tracks]
|
||||
}
|
||||
}
|
||||
album_query = MatchQuery("albumartist", artist_id)
|
||||
albums = current_app.config["lib"].albums(query=album_query)
|
||||
if len(albums) != 0:
|
||||
relationships["albums"] = {
|
||||
"data": [{"type": "album", "id": str(a.id)} for a in albums]
|
||||
}
|
||||
|
||||
return {
|
||||
"type": "artist",
|
||||
"id": artist_id,
|
||||
"attributes": attributes,
|
||||
"relationships": relationships,
|
||||
}
|
||||
|
||||
def single_resource(self, artist_id):
|
||||
"""Get info for the requested artist and build a document.
|
||||
|
||||
Args:
|
||||
artist_id: A string which is the artist's name.
|
||||
"""
|
||||
artist_resource = self.resource_object(artist_id)
|
||||
if not artist_resource:
|
||||
return self.error(
|
||||
"404 Not Found",
|
||||
"No artist with the requested id.",
|
||||
"There is no artist with an id of {} in the library.".format(
|
||||
artist_id
|
||||
),
|
||||
)
|
||||
return self.single_resource_document(artist_resource)
|
||||
|
||||
|
||||
class ImageDocument(AURADocument):
|
||||
"""Class for building documents for /images/(id) endpoints."""
|
||||
|
||||
@staticmethod
|
||||
def get_image_path(image_id):
|
||||
"""Works out the full path to the image with the given id.
|
||||
|
||||
Returns None if there is no such image.
|
||||
|
||||
Args:
|
||||
image_id: A string in the form
|
||||
"<parent_type>-<parent_id>-<img_filename>".
|
||||
"""
|
||||
# Split image_id into its constituent parts
|
||||
id_split = image_id.split("-")
|
||||
if len(id_split) < 3:
|
||||
# image_id is not in the required format
|
||||
return None
|
||||
parent_type = id_split[0]
|
||||
parent_id = id_split[1]
|
||||
img_filename = "-".join(id_split[2:])
|
||||
|
||||
# Get the path to the directory parent's images are in
|
||||
if parent_type == "album":
|
||||
album = current_app.config["lib"].get_album(int(parent_id))
|
||||
if not album or not album.artpath:
|
||||
return None
|
||||
# Cut the filename off of artpath
|
||||
# This is in preparation for supporting images in the same
|
||||
# directory that are not tracked by beets.
|
||||
artpath = py3_path(album.artpath)
|
||||
dir_path = "/".join(artpath.split("/")[:-1])
|
||||
else:
|
||||
# Images for other resource types are not supported
|
||||
return None
|
||||
|
||||
img_path = dir_path + "/" + img_filename
|
||||
# Check the image actually exists
|
||||
if isfile(img_path):
|
||||
return img_path
|
||||
else:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def resource_object(image_id):
|
||||
"""Construct a JSON:API resource object for the given image.
|
||||
|
||||
Args:
|
||||
image_id: A string in the form
|
||||
"<parent_type>-<parent_id>-<img_filename>".
|
||||
"""
|
||||
# Could be called as a static method, so can't use
|
||||
# self.get_image_path()
|
||||
image_path = ImageDocument.get_image_path(image_id)
|
||||
if not image_path:
|
||||
return None
|
||||
|
||||
attributes = {
|
||||
"role": "cover",
|
||||
"mimetype": guess_type(image_path)[0],
|
||||
"size": getsize(image_path),
|
||||
}
|
||||
try:
|
||||
from PIL import Image
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
im = Image.open(image_path)
|
||||
attributes["width"] = im.width
|
||||
attributes["height"] = im.height
|
||||
|
||||
relationships = {}
|
||||
# Split id into [parent_type, parent_id, filename]
|
||||
id_split = image_id.split("-")
|
||||
relationships[id_split[0] + "s"] = {
|
||||
"data": [{"type": id_split[0], "id": id_split[1]}]
|
||||
}
|
||||
|
||||
return {
|
||||
"id": image_id,
|
||||
"type": "image",
|
||||
# Remove attributes that are None, 0, "", etc.
|
||||
"attributes": {k: v for k, v in attributes.items() if v},
|
||||
"relationships": relationships,
|
||||
}
|
||||
|
||||
def single_resource(self, image_id):
|
||||
"""Get info for the requested image and build a document.
|
||||
|
||||
Args:
|
||||
image_id: A string in the form
|
||||
"<parent_type>-<parent_id>-<img_filename>".
|
||||
"""
|
||||
image_resource = self.resource_object(image_id)
|
||||
if not image_resource:
|
||||
return self.error(
|
||||
"404 Not Found",
|
||||
"No image with the requested id.",
|
||||
"There is no image with an id of {} in the library.".format(
|
||||
image_id
|
||||
),
|
||||
)
|
||||
return self.single_resource_document(image_resource)
|
||||
|
||||
|
||||
# Initialise flask blueprint
|
||||
aura_bp = Blueprint("aura_bp", __name__)
|
||||
|
||||
|
||||
@aura_bp.route("/server")
|
||||
def server_info():
|
||||
"""Respond with info about the server."""
|
||||
return {"data": {"type": "server", "id": "0", "attributes": SERVER_INFO}}
|
||||
|
||||
|
||||
# Track endpoints
|
||||
|
||||
|
||||
@aura_bp.route("/tracks")
|
||||
def all_tracks():
|
||||
"""Respond with a list of all tracks and related information."""
|
||||
doc = TrackDocument()
|
||||
return doc.all_resources()
|
||||
|
||||
|
||||
@aura_bp.route("/tracks/<int:track_id>")
|
||||
def single_track(track_id):
|
||||
"""Respond with info about the specified track.
|
||||
|
||||
Args:
|
||||
track_id: The id of the track provided in the URL (integer).
|
||||
"""
|
||||
doc = TrackDocument()
|
||||
return doc.single_resource(track_id)
|
||||
|
||||
|
||||
@aura_bp.route("/tracks/<int:track_id>/audio")
|
||||
def audio_file(track_id):
|
||||
"""Supply an audio file for the specified track.
|
||||
|
||||
Args:
|
||||
track_id: The id of the track provided in the URL (integer).
|
||||
"""
|
||||
track = current_app.config["lib"].get_item(track_id)
|
||||
if not track:
|
||||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No track with the requested id.",
|
||||
"There is no track with an id of {} in the library.".format(
|
||||
track_id
|
||||
),
|
||||
)
|
||||
|
||||
path = py3_path(track.path)
|
||||
if not isfile(path):
|
||||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No audio file for the requested track.",
|
||||
(
|
||||
"There is no audio file for track {} at the expected location"
|
||||
).format(track_id),
|
||||
)
|
||||
|
||||
file_mimetype = guess_type(path)[0]
|
||||
if not file_mimetype:
|
||||
return AURADocument.error(
|
||||
"500 Internal Server Error",
|
||||
"Requested audio file has an unknown mimetype.",
|
||||
(
|
||||
"The audio file for track {} has an unknown mimetype. "
|
||||
"Its file extension is {}."
|
||||
).format(track_id, path.split(".")[-1]),
|
||||
)
|
||||
|
||||
# Check that the Accept header contains the file's mimetype
|
||||
# Takes into account */* and audio/*
|
||||
# Adding support for the bitrate parameter would require some effort so I
|
||||
# left it out. This means the client could be sent an error even if the
|
||||
# audio doesn't need transcoding.
|
||||
if not request.accept_mimetypes.best_match([file_mimetype]):
|
||||
return AURADocument.error(
|
||||
"406 Not Acceptable",
|
||||
"Unsupported MIME type or bitrate parameter in Accept header.",
|
||||
(
|
||||
"The audio file for track {} is only available as {} and "
|
||||
"bitrate parameters are not supported."
|
||||
).format(track_id, file_mimetype),
|
||||
)
|
||||
|
||||
return send_file(
|
||||
path,
|
||||
mimetype=file_mimetype,
|
||||
# Handles filename in Content-Disposition header
|
||||
as_attachment=True,
|
||||
# Tries to upgrade the stream to support range requests
|
||||
conditional=True,
|
||||
)
|
||||
|
||||
|
||||
# Album endpoints
|
||||
|
||||
|
||||
@aura_bp.route("/albums")
|
||||
def all_albums():
|
||||
"""Respond with a list of all albums and related information."""
|
||||
doc = AlbumDocument()
|
||||
return doc.all_resources()
|
||||
|
||||
|
||||
@aura_bp.route("/albums/<int:album_id>")
|
||||
def single_album(album_id):
|
||||
"""Respond with info about the specified album.
|
||||
|
||||
Args:
|
||||
album_id: The id of the album provided in the URL (integer).
|
||||
"""
|
||||
doc = AlbumDocument()
|
||||
return doc.single_resource(album_id)
|
||||
|
||||
|
||||
# Artist endpoints
|
||||
# Artist ids are their names
|
||||
|
||||
|
||||
@aura_bp.route("/artists")
|
||||
def all_artists():
|
||||
"""Respond with a list of all artists and related information."""
|
||||
doc = ArtistDocument()
|
||||
return doc.all_resources()
|
||||
|
||||
|
||||
# Using the path converter allows slashes in artist_id
|
||||
@aura_bp.route("/artists/<path:artist_id>")
|
||||
def single_artist(artist_id):
|
||||
"""Respond with info about the specified artist.
|
||||
|
||||
Args:
|
||||
artist_id: The id of the artist provided in the URL. A string
|
||||
which is the artist's name.
|
||||
"""
|
||||
doc = ArtistDocument()
|
||||
return doc.single_resource(artist_id)
|
||||
|
||||
|
||||
# Image endpoints
|
||||
# Image ids are in the form <parent_type>-<parent_id>-<img_filename>
|
||||
# For example: album-13-cover.jpg
|
||||
|
||||
|
||||
@aura_bp.route("/images/<string:image_id>")
|
||||
def single_image(image_id):
|
||||
"""Respond with info about the specified image.
|
||||
|
||||
Args:
|
||||
image_id: The id of the image provided in the URL. A string in
|
||||
the form "<parent_type>-<parent_id>-<img_filename>".
|
||||
"""
|
||||
doc = ImageDocument()
|
||||
return doc.single_resource(image_id)
|
||||
|
||||
|
||||
@aura_bp.route("/images/<string:image_id>/file")
|
||||
def image_file(image_id):
|
||||
"""Supply an image file for the specified image.
|
||||
|
||||
Args:
|
||||
image_id: The id of the image provided in the URL. A string in
|
||||
the form "<parent_type>-<parent_id>-<img_filename>".
|
||||
"""
|
||||
img_path = ImageDocument.get_image_path(image_id)
|
||||
if not img_path:
|
||||
return AURADocument.error(
|
||||
"404 Not Found",
|
||||
"No image with the requested id.",
|
||||
"There is no image with an id of {} in the library".format(
|
||||
image_id
|
||||
),
|
||||
)
|
||||
return send_file(img_path)
|
||||
|
||||
|
||||
# WSGI app
|
||||
|
||||
|
||||
def create_app():
|
||||
"""An application factory for use by a WSGI server."""
|
||||
config["aura"].add(
|
||||
{
|
||||
"host": u"127.0.0.1",
|
||||
"port": 8337,
|
||||
"cors": [],
|
||||
"cors_supports_credentials": False,
|
||||
"page_limit": 500,
|
||||
}
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
# Register AURA blueprint view functions under a URL prefix
|
||||
app.register_blueprint(aura_bp, url_prefix="/aura")
|
||||
# AURA specifies mimetype MUST be this
|
||||
app.config["JSONIFY_MIMETYPE"] = "application/vnd.api+json"
|
||||
# Disable auto-sorting of JSON keys
|
||||
app.config["JSON_SORT_KEYS"] = False
|
||||
# Provide a way to access the beets library
|
||||
# The normal method of using the Library and config provided in the
|
||||
# command function is not used because create_app() could be called
|
||||
# by an external WSGI server.
|
||||
# NOTE: this uses a 'private' function from beets.ui.__init__
|
||||
app.config["lib"] = _open_library(config)
|
||||
|
||||
# Enable CORS if required
|
||||
cors = config["aura"]["cors"].as_str_seq(list)
|
||||
if cors:
|
||||
from flask_cors import CORS
|
||||
|
||||
# "Accept" is the only header clients use
|
||||
app.config["CORS_ALLOW_HEADERS"] = "Accept"
|
||||
app.config["CORS_RESOURCES"] = {r"/aura/*": {"origins": cors}}
|
||||
app.config["CORS_SUPPORTS_CREDENTIALS"] = config["aura"][
|
||||
"cors_supports_credentials"
|
||||
].get(bool)
|
||||
CORS(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
# Beets Plugin Hook
|
||||
|
||||
|
||||
class AURAPlugin(BeetsPlugin):
|
||||
"""The BeetsPlugin subclass for the AURA server plugin."""
|
||||
|
||||
def __init__(self):
|
||||
"""Add configuration options for the AURA plugin."""
|
||||
super(AURAPlugin, self).__init__()
|
||||
|
||||
def commands(self):
|
||||
"""Add subcommand used to run the AURA server."""
|
||||
|
||||
def run_aura(lib, opts, args):
|
||||
"""Run the application using Flask's built in-server.
|
||||
|
||||
Args:
|
||||
lib: A beets Library object (not used).
|
||||
opts: Command line options. An optparse.Values object.
|
||||
args: The list of arguments to process (not used).
|
||||
"""
|
||||
app = create_app()
|
||||
# Start the built-in server (not intended for production)
|
||||
app.run(
|
||||
host=self.config["host"].get(str),
|
||||
port=self.config["port"].get(int),
|
||||
debug=opts.debug,
|
||||
threaded=True,
|
||||
)
|
||||
|
||||
run_aura_cmd = Subcommand("aura", help=u"run an AURA server")
|
||||
run_aura_cmd.parser.add_option(
|
||||
u"-d",
|
||||
u"--debug",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=u"use Flask debug mode",
|
||||
)
|
||||
run_aura_cmd.func = run_aura
|
||||
return [run_aura_cmd]
|
||||
|
|
@ -30,6 +30,7 @@ from beets.plugins import BeetsPlugin
|
|||
from beets.ui import Subcommand
|
||||
from beets.util import displayable_path, par_map
|
||||
from beets import ui
|
||||
from beets import importer
|
||||
|
||||
|
||||
class CheckerCommandException(Exception):
|
||||
|
|
@ -54,6 +55,11 @@ class BadFiles(BeetsPlugin):
|
|||
super(BadFiles, self).__init__()
|
||||
self.verbose = False
|
||||
|
||||
self.register_listener('import_task_start',
|
||||
self.on_import_task_start)
|
||||
self.register_listener('import_task_before_choice',
|
||||
self.on_import_task_before_choice)
|
||||
|
||||
def run_command(self, cmd):
|
||||
self._log.debug(u"running command: {}",
|
||||
displayable_path(list2cmdline(cmd)))
|
||||
|
|
@ -115,7 +121,7 @@ class BadFiles(BeetsPlugin):
|
|||
if not checker:
|
||||
self._log.error(u"no checker specified in the config for {}",
|
||||
ext)
|
||||
return
|
||||
return []
|
||||
path = item.path
|
||||
if not isinstance(path, six.text_type):
|
||||
path = item.path.decode(sys.getfilesystemencoding())
|
||||
|
|
@ -130,25 +136,75 @@ class BadFiles(BeetsPlugin):
|
|||
)
|
||||
else:
|
||||
self._log.error(u"error invoking {}: {}", e.checker, e.msg)
|
||||
return
|
||||
return []
|
||||
|
||||
error_lines = []
|
||||
|
||||
if status > 0:
|
||||
ui.print_(u"{}: checker exited with status {}"
|
||||
error_lines.append(
|
||||
u"{}: checker exited with status {}"
|
||||
.format(ui.colorize('text_error', dpath), status))
|
||||
for line in output:
|
||||
ui.print_(u" {}".format(line))
|
||||
error_lines.append(u" {}".format(line))
|
||||
|
||||
elif errors > 0:
|
||||
ui.print_(u"{}: checker found {} errors or warnings"
|
||||
error_lines.append(
|
||||
u"{}: checker found {} errors or warnings"
|
||||
.format(ui.colorize('text_warning', dpath), errors))
|
||||
for line in output:
|
||||
ui.print_(u" {}".format(line))
|
||||
error_lines.append(u" {}".format(line))
|
||||
elif self.verbose:
|
||||
ui.print_(u"{}: ok".format(ui.colorize('text_success', dpath)))
|
||||
error_lines.append(
|
||||
u"{}: ok".format(ui.colorize('text_success', dpath)))
|
||||
|
||||
return error_lines
|
||||
|
||||
def on_import_task_start(self, task, session):
|
||||
if not self.config['check_on_import'].get(False):
|
||||
return
|
||||
|
||||
checks_failed = []
|
||||
|
||||
for item in task.items:
|
||||
error_lines = self.check_item(item)
|
||||
if error_lines:
|
||||
checks_failed.append(error_lines)
|
||||
|
||||
if checks_failed:
|
||||
task._badfiles_checks_failed = checks_failed
|
||||
|
||||
def on_import_task_before_choice(self, task, session):
|
||||
if hasattr(task, '_badfiles_checks_failed'):
|
||||
ui.print_('{} one or more files failed checks:'
|
||||
.format(ui.colorize('text_warning', 'BAD')))
|
||||
for error in task._badfiles_checks_failed:
|
||||
for error_line in error:
|
||||
ui.print_(error_line)
|
||||
|
||||
ui.print_()
|
||||
ui.print_('What would you like to do?')
|
||||
|
||||
sel = ui.input_options(['aBort', 'skip', 'continue'])
|
||||
|
||||
if sel == 's':
|
||||
return importer.action.SKIP
|
||||
elif sel == 'c':
|
||||
return None
|
||||
elif sel == 'b':
|
||||
raise importer.ImportAbort()
|
||||
else:
|
||||
raise Exception('Unexpected selection: {}'.format(sel))
|
||||
|
||||
def command(self, lib, opts, args):
|
||||
# Get items from arguments
|
||||
items = lib.items(ui.decargs(args))
|
||||
self.verbose = opts.verbose
|
||||
par_map(self.check_item, items)
|
||||
|
||||
def check_and_print(item):
|
||||
for error_line in self.check_item(item):
|
||||
ui.print_(error_line)
|
||||
|
||||
par_map(check_and_print, items)
|
||||
|
||||
def commands(self):
|
||||
bad_command = Subcommand('bad',
|
||||
|
|
|
|||
85
beetsplug/bareasc.py
Normal file
85
beetsplug/bareasc.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2016, Philippe Mongeau.
|
||||
# Copyright 2021, Graham R. Cobb.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and ascociated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# This module is adapted from Fuzzy in accordance to the licence of
|
||||
# that module
|
||||
|
||||
"""Provides a bare-ASCII matching query."""
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from beets import ui
|
||||
from beets.ui import print_, decargs
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.dbcore.query import StringFieldQuery
|
||||
from unidecode import unidecode
|
||||
import six
|
||||
|
||||
|
||||
class BareascQuery(StringFieldQuery):
|
||||
"""Compare items using bare ASCII, without accents etc."""
|
||||
@classmethod
|
||||
def string_match(cls, pattern, val):
|
||||
"""Convert both pattern and string to plain ASCII before matching.
|
||||
|
||||
If pattern is all lower case, also convert string to lower case so
|
||||
match is also case insensitive
|
||||
"""
|
||||
# smartcase
|
||||
if pattern.islower():
|
||||
val = val.lower()
|
||||
pattern = unidecode(pattern)
|
||||
val = unidecode(val)
|
||||
return pattern in val
|
||||
|
||||
|
||||
class BareascPlugin(BeetsPlugin):
|
||||
"""Plugin to provide bare-ASCII option for beets matching."""
|
||||
def __init__(self):
|
||||
"""Default prefix for selecting bare-ASCII matching is #."""
|
||||
super(BareascPlugin, self).__init__()
|
||||
self.config.add({
|
||||
'prefix': '#',
|
||||
})
|
||||
|
||||
def queries(self):
|
||||
"""Register bare-ASCII matching."""
|
||||
prefix = self.config['prefix'].as_str()
|
||||
return {prefix: BareascQuery}
|
||||
|
||||
def commands(self):
|
||||
"""Add bareasc command as unidecode version of 'list'."""
|
||||
cmd = ui.Subcommand('bareasc',
|
||||
help='unidecode version of beet list command')
|
||||
cmd.parser.usage += u"\n" \
|
||||
u'Example: %prog -f \'$album: $title\' artist:beatles'
|
||||
cmd.parser.add_all_common_options()
|
||||
cmd.func = self.unidecode_list
|
||||
return [cmd]
|
||||
|
||||
def unidecode_list(self, lib, opts, args):
|
||||
"""Emulate normal 'list' command but with unidecode output."""
|
||||
query = decargs(args)
|
||||
album = opts.album
|
||||
# Copied from commands.py - list_items
|
||||
if album:
|
||||
for album in lib.albums(query):
|
||||
bare = unidecode(six.ensure_text(str(album)))
|
||||
print_(six.ensure_text(bare))
|
||||
else:
|
||||
for item in lib.items(query):
|
||||
bare = unidecode(six.ensure_text(str(item)))
|
||||
print_(six.ensure_text(bare))
|
||||
|
|
@ -21,6 +21,7 @@ use of the wide range of MPD clients.
|
|||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import re
|
||||
import sys
|
||||
from string import Template
|
||||
import traceback
|
||||
import random
|
||||
|
|
@ -334,7 +335,7 @@ class BaseServer(object):
|
|||
|
||||
def cmd_kill(self, conn):
|
||||
"""Exits the server process."""
|
||||
exit(0)
|
||||
sys.exit(0)
|
||||
|
||||
def cmd_close(self, conn):
|
||||
"""Closes the connection."""
|
||||
|
|
|
|||
|
|
@ -348,7 +348,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
item.store() # Store new path and audio data.
|
||||
|
||||
if self.config['embed'] and not linked:
|
||||
album = item.get_album()
|
||||
album = item._cached_album
|
||||
if album and album.artpath:
|
||||
self._log.debug(u'embedding album art from {}',
|
||||
util.displayable_path(album.artpath))
|
||||
|
|
|
|||
|
|
@ -1,58 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2016 Bruno Cauet
|
||||
# Split an album-file in tracks thanks a cue file
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import subprocess
|
||||
from os import path
|
||||
from glob import glob
|
||||
|
||||
from beets.util import command_output, displayable_path
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.autotag import TrackInfo
|
||||
|
||||
|
||||
class CuePlugin(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super(CuePlugin, self).__init__()
|
||||
# this does not seem supported by shnsplit
|
||||
self.config.add({
|
||||
'keep_before': .1,
|
||||
'keep_after': .9,
|
||||
})
|
||||
|
||||
# self.register_listener('import_task_start', self.look_for_cues)
|
||||
|
||||
def candidates(self, items, artist, album, va_likely, extra_tags=None):
|
||||
import pdb
|
||||
pdb.set_trace()
|
||||
|
||||
def item_candidates(self, item, artist, album):
|
||||
dir = path.dirname(item.path)
|
||||
cues = glob.glob(path.join(dir, "*.cue"))
|
||||
if not cues:
|
||||
return
|
||||
if len(cues) > 1:
|
||||
self._log.info(u"Found multiple cue files doing nothing: {0}",
|
||||
list(map(displayable_path, cues)))
|
||||
|
||||
cue_file = cues[0]
|
||||
self._log.info("Found {} for {}", displayable_path(cue_file), item)
|
||||
|
||||
try:
|
||||
# careful: will ask for input in case of conflicts
|
||||
command_output(['shnsplit', '-f', cue_file, item.path])
|
||||
except (subprocess.CalledProcessError, OSError):
|
||||
self._log.exception(u'shnsplit execution failed')
|
||||
return
|
||||
|
||||
tracks = glob(path.join(dir, "*.wav"))
|
||||
self._log.info("Generated {0} tracks", len(tracks))
|
||||
for t in tracks:
|
||||
title = "dunno lol"
|
||||
track_id = "wtf"
|
||||
index = int(path.basename(t)[len("split-track"):-len(".wav")])
|
||||
yield TrackInfo(title=title, track_id=track_id, index=index,
|
||||
artist=artist)
|
||||
# generate TrackInfo instances
|
||||
|
|
@ -239,13 +239,10 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
# cause a query to return no results, even if they match the artist or
|
||||
# album title. Use `re.UNICODE` flag to avoid stripping non-english
|
||||
# word characters.
|
||||
# FIXME: Encode as ASCII to work around a bug:
|
||||
# https://github.com/beetbox/beets/issues/1051
|
||||
# When the library is fixed, we should encode as UTF-8.
|
||||
query = re.sub(r'(?u)\W+', ' ', query).encode('ascii', "replace")
|
||||
query = re.sub(r'(?u)\W+', ' ', query)
|
||||
# Strip medium information from query, Things like "CD1" and "disk 1"
|
||||
# can also negate an otherwise positive result.
|
||||
query = re.sub(br'(?i)\b(CD|disc)\s*\d+', b'', query)
|
||||
query = re.sub(r'(?i)\b(CD|disc)\s*\d+', '', query)
|
||||
|
||||
self.request_start()
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -26,14 +26,15 @@ from xml.etree import ElementTree
|
|||
from datetime import datetime, date
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets import ui
|
||||
from beets import util
|
||||
import mediafile
|
||||
from beetsplug.info import make_key_filter, library_data, tag_data
|
||||
from beetsplug.info import library_data, tag_data
|
||||
|
||||
|
||||
class ExportEncoder(json.JSONEncoder):
|
||||
"""Deals with dates because JSON doesn't have a standard"""
|
||||
def default(self, o):
|
||||
if isinstance(o, datetime) or isinstance(o, date):
|
||||
if isinstance(o, (datetime, date)):
|
||||
return o.isoformat()
|
||||
return json.JSONEncoder.default(self, o)
|
||||
|
||||
|
|
@ -129,16 +130,18 @@ class ExportPlugin(BeetsPlugin):
|
|||
for keys in opts.included_keys:
|
||||
included_keys.extend(keys.split(','))
|
||||
|
||||
key_filter = make_key_filter(included_keys)
|
||||
|
||||
for data_emitter in data_collector(lib, ui.decargs(args)):
|
||||
try:
|
||||
data, item = data_emitter()
|
||||
data, item = data_emitter(included_keys or '*')
|
||||
except (mediafile.UnreadableFileError, IOError) as ex:
|
||||
self._log.error(u'cannot read file: {0}', ex)
|
||||
continue
|
||||
|
||||
data = key_filter(data)
|
||||
for key, value in data.items():
|
||||
if isinstance(value, bytes):
|
||||
data[key] = util.displayable_path(value)
|
||||
|
||||
items += [data]
|
||||
|
||||
if file_format_is_line_based:
|
||||
export_format.export(data, **format_options)
|
||||
|
|
|
|||
|
|
@ -51,6 +51,7 @@ class Candidate(object):
|
|||
CANDIDATE_BAD = 0
|
||||
CANDIDATE_EXACT = 1
|
||||
CANDIDATE_DOWNSCALE = 2
|
||||
CANDIDATE_DOWNSIZE = 3
|
||||
|
||||
MATCH_EXACT = 0
|
||||
MATCH_FALLBACK = 1
|
||||
|
|
@ -71,12 +72,15 @@ class Candidate(object):
|
|||
|
||||
Return `CANDIDATE_BAD` if the file is unusable.
|
||||
Return `CANDIDATE_EXACT` if the file is usable as-is.
|
||||
Return `CANDIDATE_DOWNSCALE` if the file must be resized.
|
||||
Return `CANDIDATE_DOWNSCALE` if the file must be rescaled.
|
||||
Return `CANDIDATE_DOWNSIZE` if the file must be resized, and possibly
|
||||
also rescaled.
|
||||
"""
|
||||
if not self.path:
|
||||
return self.CANDIDATE_BAD
|
||||
|
||||
if not (plugin.enforce_ratio or plugin.minwidth or plugin.maxwidth):
|
||||
if (not (plugin.enforce_ratio or plugin.minwidth or plugin.maxwidth
|
||||
or plugin.max_filesize)):
|
||||
return self.CANDIDATE_EXACT
|
||||
|
||||
# get_size returns None if no local imaging backend is available
|
||||
|
|
@ -87,14 +91,15 @@ class Candidate(object):
|
|||
if not self.size:
|
||||
self._log.warning(u'Could not get size of image (please see '
|
||||
u'documentation for dependencies). '
|
||||
u'The configuration options `minwidth` and '
|
||||
u'`enforce_ratio` may be violated.')
|
||||
u'The configuration options `minwidth`, '
|
||||
u'`enforce_ratio` and `max_filesize` '
|
||||
u'may be violated.')
|
||||
return self.CANDIDATE_EXACT
|
||||
|
||||
short_edge = min(self.size)
|
||||
long_edge = max(self.size)
|
||||
|
||||
# Check minimum size.
|
||||
# Check minimum dimension.
|
||||
if plugin.minwidth and self.size[0] < plugin.minwidth:
|
||||
self._log.debug(u'image too small ({} < {})',
|
||||
self.size[0], plugin.minwidth)
|
||||
|
|
@ -122,12 +127,27 @@ class Candidate(object):
|
|||
self.size[0], self.size[1])
|
||||
return self.CANDIDATE_BAD
|
||||
|
||||
# Check maximum size.
|
||||
# Check maximum dimension.
|
||||
downscale = False
|
||||
if plugin.maxwidth and self.size[0] > plugin.maxwidth:
|
||||
self._log.debug(u'image needs resizing ({} > {})',
|
||||
self._log.debug(u'image needs rescaling ({} > {})',
|
||||
self.size[0], plugin.maxwidth)
|
||||
return self.CANDIDATE_DOWNSCALE
|
||||
downscale = True
|
||||
|
||||
# Check filesize.
|
||||
downsize = False
|
||||
if plugin.max_filesize:
|
||||
filesize = os.stat(syspath(self.path)).st_size
|
||||
if filesize > plugin.max_filesize:
|
||||
self._log.debug(u'image needs resizing ({}B > {}B)',
|
||||
filesize, plugin.max_filesize)
|
||||
downsize = True
|
||||
|
||||
if downscale:
|
||||
return self.CANDIDATE_DOWNSCALE
|
||||
elif downsize:
|
||||
return self.CANDIDATE_DOWNSIZE
|
||||
else:
|
||||
return self.CANDIDATE_EXACT
|
||||
|
||||
def validate(self, plugin):
|
||||
|
|
@ -135,9 +155,17 @@ class Candidate(object):
|
|||
return self.check
|
||||
|
||||
def resize(self, plugin):
|
||||
if plugin.maxwidth and self.check == self.CANDIDATE_DOWNSCALE:
|
||||
self.path = ArtResizer.shared.resize(plugin.maxwidth, self.path,
|
||||
quality=plugin.quality)
|
||||
if self.check == self.CANDIDATE_DOWNSCALE:
|
||||
self.path = \
|
||||
ArtResizer.shared.resize(plugin.maxwidth, self.path,
|
||||
quality=plugin.quality,
|
||||
max_filesize=plugin.max_filesize)
|
||||
elif self.check == self.CANDIDATE_DOWNSIZE:
|
||||
# dimensions are correct, so maxwidth is set to maximum dimension
|
||||
self.path = \
|
||||
ArtResizer.shared.resize(max(self.size), self.path,
|
||||
quality=plugin.quality,
|
||||
max_filesize=plugin.max_filesize)
|
||||
|
||||
|
||||
def _logged_get(log, *args, **kwargs):
|
||||
|
|
@ -504,7 +532,7 @@ class FanartTV(RemoteArtSource):
|
|||
|
||||
matches = []
|
||||
# can there be more than one releasegroupid per response?
|
||||
for mbid, art in data.get(u'albums', dict()).items():
|
||||
for mbid, art in data.get(u'albums', {}).items():
|
||||
# there might be more art referenced, e.g. cdart, and an albumcover
|
||||
# might not be present, even if the request was successful
|
||||
if album.mb_releasegroupid == mbid and u'albumcover' in art:
|
||||
|
|
@ -892,6 +920,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
'minwidth': 0,
|
||||
'maxwidth': 0,
|
||||
'quality': 0,
|
||||
'max_filesize': 0,
|
||||
'enforce_ratio': False,
|
||||
'cautious': False,
|
||||
'cover_names': ['cover', 'front', 'art', 'album', 'folder'],
|
||||
|
|
@ -910,6 +939,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
|
||||
self.minwidth = self.config['minwidth'].get(int)
|
||||
self.maxwidth = self.config['maxwidth'].get(int)
|
||||
self.max_filesize = self.config['max_filesize'].get(int)
|
||||
self.quality = self.config['quality'].get(int)
|
||||
|
||||
# allow both pixel and percentage-based margin specifications
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ class FishPlugin(BeetsPlugin):
|
|||
# Collect commands, their aliases, and their help text
|
||||
cmd_names_help = []
|
||||
for cmd in beetcmds:
|
||||
names = [alias for alias in cmd.aliases]
|
||||
names = list(cmd.aliases)
|
||||
names.append(cmd.name)
|
||||
for name in names:
|
||||
cmd_names_help.append((name, cmd.help))
|
||||
|
|
@ -238,7 +238,7 @@ def get_all_commands(beetcmds):
|
|||
# Formatting for Fish to complete command options
|
||||
word = ""
|
||||
for cmd in beetcmds:
|
||||
names = [alias for alias in cmd.aliases]
|
||||
names = list(cmd.aliases)
|
||||
names.append(cmd.name)
|
||||
for name in names:
|
||||
name = _escape(name)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
# album.path for old albums that were replaced by a reimported album
|
||||
self.replaced_album_paths = None
|
||||
# item path in the library to the mtime of the source file
|
||||
self.item_mtime = dict()
|
||||
self.item_mtime = {}
|
||||
|
||||
register = self.register_listener
|
||||
register('import_task_created', self.check_config)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@
|
|||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets import ui
|
||||
|
|
@ -42,15 +41,29 @@ def tag_data(lib, args):
|
|||
yield tag_data_emitter(item.path)
|
||||
|
||||
|
||||
def tag_fields():
|
||||
fields = set(mediafile.MediaFile.readable_fields())
|
||||
fields.add('art')
|
||||
return fields
|
||||
|
||||
|
||||
def tag_data_emitter(path):
|
||||
def emitter():
|
||||
fields = list(mediafile.MediaFile.readable_fields())
|
||||
def emitter(included_keys):
|
||||
if included_keys == '*':
|
||||
fields = tag_fields()
|
||||
else:
|
||||
fields = included_keys
|
||||
if 'images' in fields:
|
||||
# We can't serialize the image data.
|
||||
fields.remove('images')
|
||||
mf = mediafile.MediaFile(syspath(path))
|
||||
tags = {}
|
||||
for field in fields:
|
||||
tags[field] = getattr(mf, field)
|
||||
tags['art'] = mf.art is not None
|
||||
if field == 'art':
|
||||
tags[field] = mf.art is not None
|
||||
else:
|
||||
tags[field] = getattr(mf, field, None)
|
||||
|
||||
# create a temporary Item to take advantage of __format__
|
||||
item = Item.from_path(syspath(path))
|
||||
|
||||
|
|
@ -64,8 +77,8 @@ def library_data(lib, args):
|
|||
|
||||
|
||||
def library_data_emitter(item):
|
||||
def emitter():
|
||||
data = dict(item.formatted())
|
||||
def emitter(included_keys):
|
||||
data = dict(item.formatted(included_keys=included_keys))
|
||||
|
||||
return data, item
|
||||
return emitter
|
||||
|
|
@ -185,18 +198,16 @@ class InfoPlugin(BeetsPlugin):
|
|||
included_keys.extend(keys.split(','))
|
||||
# Drop path even if user provides it multiple times
|
||||
included_keys = [k for k in included_keys if k != 'path']
|
||||
key_filter = make_key_filter(included_keys)
|
||||
|
||||
first = True
|
||||
summary = {}
|
||||
for data_emitter in data_collector(lib, ui.decargs(args)):
|
||||
try:
|
||||
data, item = data_emitter()
|
||||
data, item = data_emitter(included_keys or '*')
|
||||
except (mediafile.UnreadableFileError, IOError) as ex:
|
||||
self._log.error(u'cannot read file: {0}', ex)
|
||||
continue
|
||||
|
||||
data = key_filter(data)
|
||||
if opts.summarize:
|
||||
update_summary(summary, data)
|
||||
else:
|
||||
|
|
@ -211,34 +222,3 @@ class InfoPlugin(BeetsPlugin):
|
|||
|
||||
if opts.summarize:
|
||||
print_data(summary)
|
||||
|
||||
|
||||
def make_key_filter(include):
|
||||
"""Return a function that filters a dictionary.
|
||||
|
||||
The returned filter takes a dictionary and returns another
|
||||
dictionary that only includes the key-value pairs where the key
|
||||
glob-matches one of the keys in `include`.
|
||||
"""
|
||||
# By default, if no field inclusions are specified, include
|
||||
# everything but `path`.
|
||||
if not include:
|
||||
def filter_(data):
|
||||
return {k: v for k, v in data.items()
|
||||
if k != 'path'}
|
||||
return filter_
|
||||
|
||||
matchers = []
|
||||
for key in include:
|
||||
key = re.escape(key)
|
||||
key = key.replace(r'\*', '.*')
|
||||
matchers.append(re.compile(key + '$'))
|
||||
|
||||
def filter_(data):
|
||||
filtered = dict()
|
||||
for key, value in data.items():
|
||||
if any([m.match(key) for m in matchers]):
|
||||
filtered[key] = value
|
||||
return filtered
|
||||
|
||||
return filter_
|
||||
|
|
|
|||
|
|
@ -291,14 +291,21 @@ class MusiXmatch(Backend):
|
|||
self._log.warning(u'we are blocked at MusixMatch: url %s failed'
|
||||
% url)
|
||||
return
|
||||
html_part = html.split('<p class="mxm-lyrics__content')[-1]
|
||||
lyrics = extract_text_between(html_part, '>', '</p>')
|
||||
html_parts = html.split('<p class="mxm-lyrics__content')
|
||||
# Sometimes lyrics come in 2 or more parts
|
||||
lyrics_parts = []
|
||||
for html_part in html_parts:
|
||||
lyrics_parts.append(extract_text_between(html_part, '>', '</p>'))
|
||||
lyrics = '\n'.join(lyrics_parts)
|
||||
lyrics = lyrics.strip(',"').replace('\\n', '\n')
|
||||
# another odd case: sometimes only that string remains, for
|
||||
# missing songs. this seems to happen after being blocked
|
||||
# above, when filling in the CAPTCHA.
|
||||
if "Instant lyrics for all your music." in lyrics:
|
||||
return
|
||||
# sometimes there are non-existent lyrics with some content
|
||||
if 'Lyrics | Musixmatch' in lyrics:
|
||||
return
|
||||
return lyrics
|
||||
|
||||
|
||||
|
|
@ -401,6 +408,62 @@ class Genius(Backend):
|
|||
return lyrics_div.get_text()
|
||||
|
||||
|
||||
class Tekstowo(Backend):
|
||||
# Fetch lyrics from Tekstowo.pl.
|
||||
|
||||
BASE_URL = 'http://www.tekstowo.pl'
|
||||
URL_PATTERN = BASE_URL + '/wyszukaj.html?search-title=%s&search-artist=%s'
|
||||
|
||||
def fetch(self, artist, title):
|
||||
url = self.build_url(title, artist)
|
||||
search_results = self.fetch_url(url)
|
||||
song_page_url = self.parse_search_results(search_results)
|
||||
|
||||
if not song_page_url:
|
||||
return None
|
||||
|
||||
song_page_html = self.fetch_url(song_page_url)
|
||||
return self.extract_lyrics(song_page_html)
|
||||
|
||||
def parse_search_results(self, html):
|
||||
if not HAS_BEAUTIFUL_SOUP:
|
||||
return None
|
||||
|
||||
html = _scrape_strip_cruft(html)
|
||||
html = _scrape_merge_paragraphs(html)
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(html, "html.parser")
|
||||
except HTMLParseError:
|
||||
return None
|
||||
|
||||
song_rows = html.find("div", class_="content"). \
|
||||
find("div", class_="card"). \
|
||||
find_all("div", class_="box-przeboje")
|
||||
|
||||
if not song_rows:
|
||||
return None
|
||||
|
||||
song_row = song_rows[0]
|
||||
|
||||
if not song_row:
|
||||
return None
|
||||
|
||||
href = song_row.find('a').get('href')
|
||||
return self.BASE_URL + href
|
||||
|
||||
def extract_lyrics(self, html):
|
||||
html = _scrape_strip_cruft(html)
|
||||
html = _scrape_merge_paragraphs(html)
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(html, "html.parser")
|
||||
except HTMLParseError:
|
||||
return None
|
||||
|
||||
return html.find("div", class_="song-text").get_text()
|
||||
|
||||
|
||||
def remove_credits(text):
|
||||
"""Remove first/last line of text if it contains the word 'lyrics'
|
||||
eg 'Lyrics by songsdatabase.com'
|
||||
|
|
@ -593,11 +656,13 @@ class Google(Backend):
|
|||
|
||||
|
||||
class LyricsPlugin(plugins.BeetsPlugin):
|
||||
SOURCES = ['google', 'musixmatch', 'genius']
|
||||
SOURCES = ['google', 'musixmatch', 'genius', 'tekstowo']
|
||||
BS_SOURCES = ['google', 'genius', 'tekstowo']
|
||||
SOURCE_BACKENDS = {
|
||||
'google': Google,
|
||||
'musixmatch': MusiXmatch,
|
||||
'genius': Genius,
|
||||
'tekstowo': Tekstowo,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
|
@ -636,6 +701,9 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
sources = plugins.sanitize_choices(
|
||||
self.config['sources'].as_str_seq(), available_sources)
|
||||
|
||||
if not HAS_BEAUTIFUL_SOUP:
|
||||
sources = self.sanitize_bs_sources(sources)
|
||||
|
||||
if 'google' in sources:
|
||||
if not self.config['google_API_key'].get():
|
||||
# We log a *debug* message here because the default
|
||||
|
|
@ -645,18 +713,6 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
self._log.debug(u'Disabling google source: '
|
||||
u'no API key configured.')
|
||||
sources.remove('google')
|
||||
elif not HAS_BEAUTIFUL_SOUP:
|
||||
self._log.warning(u'To use the google lyrics source, you must '
|
||||
u'install the beautifulsoup4 module. See '
|
||||
u'the documentation for further details.')
|
||||
sources.remove('google')
|
||||
|
||||
if 'genius' in sources and not HAS_BEAUTIFUL_SOUP:
|
||||
self._log.debug(
|
||||
u'The Genius backend requires BeautifulSoup, which is not '
|
||||
u'installed, so the source is disabled.'
|
||||
)
|
||||
sources.remove('genius')
|
||||
|
||||
self.config['bing_lang_from'] = [
|
||||
x.lower() for x in self.config['bing_lang_from'].as_str_seq()]
|
||||
|
|
@ -670,6 +726,17 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
self.backends = [self.SOURCE_BACKENDS[source](self.config, self._log)
|
||||
for source in sources]
|
||||
|
||||
def sanitize_bs_sources(self, sources):
|
||||
for source in self.BS_SOURCES:
|
||||
if source in sources:
|
||||
self._log.debug(u'To use the %s lyrics source, you must '
|
||||
u'install the beautifulsoup4 module. See '
|
||||
u'the documentation for further details.'
|
||||
% source)
|
||||
sources.remove(source)
|
||||
|
||||
return sources
|
||||
|
||||
def get_bing_access_token(self):
|
||||
params = {
|
||||
'client_id': 'beets',
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class MBSyncPlugin(BeetsPlugin):
|
|||
# Map release track and recording MBIDs to their information.
|
||||
# Recordings can appear multiple times on a release, so each MBID
|
||||
# maps to a list of TrackInfo objects.
|
||||
releasetrack_index = dict()
|
||||
releasetrack_index = {}
|
||||
track_index = defaultdict(list)
|
||||
for track_info in album_info.tracks:
|
||||
releasetrack_index[track_info.release_track_id] = track_info
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ class Itunes(MetaSource):
|
|||
'itunes_skipcount': types.INTEGER,
|
||||
'itunes_lastplayed': DateType(),
|
||||
'itunes_lastskipped': DateType(),
|
||||
'itunes_dateadded': DateType(),
|
||||
}
|
||||
|
||||
def __init__(self, config, log):
|
||||
|
|
@ -124,3 +125,7 @@ class Itunes(MetaSource):
|
|||
if result.get('Skip Date'):
|
||||
item.itunes_lastskipped = mktime(
|
||||
result.get('Skip Date').timetuple())
|
||||
|
||||
if result.get('Date Added'):
|
||||
item.itunes_dateadded = mktime(
|
||||
result.get('Date Added').timetuple())
|
||||
|
|
|
|||
|
|
@ -216,7 +216,7 @@ class MissingPlugin(BeetsPlugin):
|
|||
"""Query MusicBrainz to determine items missing from `album`.
|
||||
"""
|
||||
item_mbids = [x.mb_trackid for x in album.items()]
|
||||
if len([i for i in album.items()]) < album.albumtotal:
|
||||
if len(list(album.items())) < album.albumtotal:
|
||||
# fetch missing items
|
||||
# TODO: Implement caching that without breaking other stuff
|
||||
album_info = hooks.album_for_mbid(album.mb_albumid)
|
||||
|
|
|
|||
|
|
@ -1059,40 +1059,30 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
(not item.rg_album_gain or not item.rg_album_peak)
|
||||
for item in album.items()])
|
||||
|
||||
def _store(self, item):
|
||||
"""Store an item to the database.
|
||||
When testing, item.store() sometimes fails non-destructively with
|
||||
sqlite.OperationalError.
|
||||
This method is here to be patched to a retry-once helper function
|
||||
in test_replaygain.py, so that it can still fail appropriately
|
||||
outside of these tests.
|
||||
"""
|
||||
item.store()
|
||||
|
||||
def store_track_gain(self, item, track_gain):
|
||||
item.rg_track_gain = track_gain.gain
|
||||
item.rg_track_peak = track_gain.peak
|
||||
self._store(item)
|
||||
item.store()
|
||||
self._log.debug(u'applied track gain {0} LU, peak {1} of FS',
|
||||
item.rg_track_gain, item.rg_track_peak)
|
||||
|
||||
def store_album_gain(self, item, album_gain):
|
||||
item.rg_album_gain = album_gain.gain
|
||||
item.rg_album_peak = album_gain.peak
|
||||
self._store(item)
|
||||
item.store()
|
||||
self._log.debug(u'applied album gain {0} LU, peak {1} of FS',
|
||||
item.rg_album_gain, item.rg_album_peak)
|
||||
|
||||
def store_track_r128_gain(self, item, track_gain):
|
||||
item.r128_track_gain = track_gain.gain
|
||||
self._store(item)
|
||||
item.store()
|
||||
|
||||
self._log.debug(u'applied r128 track gain {0} LU',
|
||||
item.r128_track_gain)
|
||||
|
||||
def store_album_r128_gain(self, item, album_gain):
|
||||
item.r128_album_gain = album_gain.gain
|
||||
self._store(item)
|
||||
item.store()
|
||||
self._log.debug(u'applied r128 album gain {0} LU',
|
||||
item.r128_album_gain)
|
||||
|
||||
|
|
@ -1139,7 +1129,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
tag_vals = self.tag_specific_values(album.items())
|
||||
store_track_gain, store_album_gain, target_level, peak = tag_vals
|
||||
|
||||
discs = dict()
|
||||
discs = {}
|
||||
if self.per_disc:
|
||||
for item in album.items():
|
||||
if discs.get(item.disc) is None:
|
||||
|
|
@ -1172,7 +1162,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
self._apply(
|
||||
self.backend_instance.compute_album_gain, args=(),
|
||||
kwds={
|
||||
"items": [i for i in items],
|
||||
"items": list(items),
|
||||
"target_level": target_level,
|
||||
"peak": peak
|
||||
},
|
||||
|
|
@ -1288,7 +1278,7 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
try:
|
||||
self._log.info('interrupted')
|
||||
self.terminate_pool()
|
||||
exit(0)
|
||||
sys.exit(0)
|
||||
except SystemExit:
|
||||
# Silence raised SystemExit ~ exit(0)
|
||||
pass
|
||||
|
|
@ -1321,22 +1311,11 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
"""
|
||||
if self.config['auto']:
|
||||
if task.is_album:
|
||||
self.handle_album(
|
||||
task.album,
|
||||
self.config['auto'].get(bool),
|
||||
self.config['overwrite'].get(bool)
|
||||
)
|
||||
self.handle_album(task.album, False)
|
||||
else:
|
||||
self.handle_track(
|
||||
task.item,
|
||||
self.config['auto'].get(bool),
|
||||
self.config['overwrite'].get(bool)
|
||||
)
|
||||
self.handle_track(task.item, False)
|
||||
|
||||
def commands(self):
|
||||
"""Return the "replaygain" ui subcommand.
|
||||
"""
|
||||
def func(lib, opts, args):
|
||||
def command_func(self, lib, opts, args):
|
||||
try:
|
||||
write = ui.should_write(opts.write)
|
||||
force = opts.force
|
||||
|
|
@ -1368,6 +1347,9 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
# Silence interrupt exceptions
|
||||
pass
|
||||
|
||||
def commands(self):
|
||||
"""Return the "replaygain" ui subcommand.
|
||||
"""
|
||||
cmd = ui.Subcommand('replaygain', help=u'analyze for ReplayGain')
|
||||
cmd.parser.add_album_option()
|
||||
cmd.parser.add_option(
|
||||
|
|
@ -1385,5 +1367,5 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
cmd.parser.add_option(
|
||||
"-W", "--nowrite", dest="write", action="store_false",
|
||||
help=u"don't write metadata (opposite of -w)")
|
||||
cmd.func = func
|
||||
cmd.func = self.command_func
|
||||
return [cmd]
|
||||
|
|
|
|||
|
|
@ -148,7 +148,7 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
|
|||
|
||||
def send(self, endpoint, params=None):
|
||||
if params is None:
|
||||
params = dict()
|
||||
params = {}
|
||||
a, b = self.generate_token()
|
||||
params['u'] = self.config['username']
|
||||
params['t'] = a
|
||||
|
|
@ -163,7 +163,7 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
|
|||
return resp
|
||||
|
||||
def get_playlists(self, ids):
|
||||
output = dict()
|
||||
output = {}
|
||||
for playlist_id in ids:
|
||||
name, tracks = self.get_playlist(playlist_id)
|
||||
for track in tracks:
|
||||
|
|
|
|||
|
|
@ -59,6 +59,9 @@ def _rep(obj, expand=False):
|
|||
return out
|
||||
|
||||
elif isinstance(obj, beets.library.Album):
|
||||
if app.config.get('INCLUDE_PATHS', False):
|
||||
out['artpath'] = util.displayable_path(out['artpath'])
|
||||
else:
|
||||
del out['artpath']
|
||||
if expand:
|
||||
out['items'] = [_rep(item) for item in obj.items()]
|
||||
|
|
@ -113,12 +116,19 @@ def resource(name, patchable=False):
|
|||
entities = [entity for entity in entities if entity]
|
||||
|
||||
if get_method() == "DELETE":
|
||||
|
||||
if app.config.get('READONLY', True):
|
||||
return flask.abort(405)
|
||||
|
||||
for entity in entities:
|
||||
entity.remove(delete=is_delete())
|
||||
|
||||
return flask.make_response(jsonify({'deleted': True}), 200)
|
||||
|
||||
elif get_method() == "PATCH" and patchable:
|
||||
if app.config.get('READONLY', True):
|
||||
return flask.abort(405)
|
||||
|
||||
for entity in entities:
|
||||
entity.update(flask.request.get_json())
|
||||
entity.try_sync(True, False) # write, don't move
|
||||
|
|
@ -159,12 +169,19 @@ def resource_query(name, patchable=False):
|
|||
entities = query_func(queries)
|
||||
|
||||
if get_method() == "DELETE":
|
||||
|
||||
if app.config.get('READONLY', True):
|
||||
return flask.abort(405)
|
||||
|
||||
for entity in entities:
|
||||
entity.remove(delete=is_delete())
|
||||
|
||||
return flask.make_response(jsonify({'deleted': True}), 200)
|
||||
|
||||
elif get_method() == "PATCH" and patchable:
|
||||
if app.config.get('READONLY', True):
|
||||
return flask.abort(405)
|
||||
|
||||
for entity in entities:
|
||||
entity.update(flask.request.get_json())
|
||||
entity.try_sync(True, False) # write, don't move
|
||||
|
|
@ -241,7 +258,9 @@ class QueryConverter(PathConverter):
|
|||
|
||||
def to_python(self, value):
|
||||
queries = value.split('/')
|
||||
return [query.replace('\\', os.sep) for query in queries]
|
||||
"""Do not do path substitution on regex value tests"""
|
||||
return [query if '::' in query else query.replace('\\', os.sep)
|
||||
for query in queries]
|
||||
|
||||
def to_url(self, value):
|
||||
return ','.join([v.replace(os.sep, '\\') for v in value])
|
||||
|
|
@ -423,6 +442,7 @@ class WebPlugin(BeetsPlugin):
|
|||
'cors_supports_credentials': False,
|
||||
'reverse_proxy': False,
|
||||
'include_paths': False,
|
||||
'readonly': True,
|
||||
})
|
||||
|
||||
def commands(self):
|
||||
|
|
@ -442,6 +462,7 @@ class WebPlugin(BeetsPlugin):
|
|||
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
|
||||
|
||||
app.config['INCLUDE_PATHS'] = self.config['include_paths']
|
||||
app.config['READONLY'] = self.config['readonly']
|
||||
|
||||
# Enable CORS if required.
|
||||
if self.config['cors']:
|
||||
|
|
|
|||
|
|
@ -4,84 +4,111 @@ Changelog
|
|||
1.5.0 (in development)
|
||||
----------------------
|
||||
|
||||
New features:
|
||||
This long overdue release of beets includes far too many exciting and useful
|
||||
features than could ever be satisfactorily enumerated.
|
||||
As a technical detail, it also introduces two new external libraries:
|
||||
`MediaFile`_ and `Confuse`_ used to be part of beets but are now reusable
|
||||
dependencies---packagers, please take note.
|
||||
Finally, this is the last version of beets where we intend to support Python
|
||||
2.x and 3.5; future releases will soon require Python 3.6.
|
||||
|
||||
Major new features:
|
||||
|
||||
* :doc:`/plugins/mpdstats`: Add strip_path option to help build the right local path
|
||||
from MPD information
|
||||
* Submitting acoustID information on tracks which already have a fingerprint
|
||||
:bug:`3834`
|
||||
* conversion uses par_map to parallelize conversion jobs in python3
|
||||
* Add ``title_case`` config option to lastgenre to make TitleCasing optional.
|
||||
* When config is printed with no available configuration a new message is printed.
|
||||
:bug:`3779`
|
||||
* When importing a duplicate album it ask if it should "Keep all" instead of "Keep both".
|
||||
:bug:`3569`
|
||||
* :doc:`/plugins/chroma`: Update file metadata after generating fingerprints through the `submit` command.
|
||||
* :doc:`/plugins/lastgenre`: Added more heavy metal genres: https://en.wikipedia.org/wiki/Heavy_metal_genres to genres.txt and genres-tree.yaml
|
||||
* :doc:`/plugins/subsonicplaylist`: import playlist from a subsonic server.
|
||||
* :doc:`/plugins/subsonicupdate`: Automatically choose between token and
|
||||
password-based authentication based on server version
|
||||
* A new :ref:`reflink` config option instructs the importer to create fast,
|
||||
copy-on-write file clones on filesystems that support them. Thanks to
|
||||
:user:`rubdos`.
|
||||
* A new :ref:`extra_tags` configuration option allows more tagged metadata
|
||||
to be included in MusicBrainz queries.
|
||||
* A new :doc:`/plugins/fish` adds `Fish shell`_ tab autocompletion to beets
|
||||
* :doc:`plugins/fetchart` and :doc:`plugins/embedart`: Added a new ``quality``
|
||||
option that controls the quality of the image output when the image is
|
||||
resized.
|
||||
* :doc:`plugins/keyfinder`: Added support for `keyfinder-cli`_
|
||||
Thanks to :user:`BrainDamage`.
|
||||
* :doc:`plugins/fetchart`: Added a new ``high_resolution`` config option to
|
||||
allow downloading of higher resolution iTunes artwork (at the expense of
|
||||
file size).
|
||||
:bug:`3391`
|
||||
* :doc:`plugins/discogs` now adds two extra fields: `discogs_labelid` and
|
||||
`discogs_artistid`
|
||||
:bug:`3413`
|
||||
* :doc:`/plugins/export`: Added new ``-f`` (``--format``) flag;
|
||||
which allows for the ability to export in json, jsonlines, csv and xml.
|
||||
Thanks to :user:`austinmm`.
|
||||
:bug:`3402`
|
||||
* :doc:`/plugins/unimported`: lets you find untracked files in your library directory.
|
||||
* A new :doc:`/plugins/unimported` lets you find untracked files in your
|
||||
library directory.
|
||||
* We now fetch information about `works`_ from MusicBrainz.
|
||||
MusicBrainz matches provide the fields ``work`` (the title), ``mb_workid``
|
||||
(the MBID), and ``work_disambig`` (the disambiguation string).
|
||||
Thanks to :user:`dosoe`.
|
||||
:bug:`2580` :bug:`3272`
|
||||
* :doc:`/plugins/convert`: Added new ``-l`` (``--link``) flag and ``link``
|
||||
option as well as the ``-H`` (``--hardlink``) flag and ``hardlink``
|
||||
option which symlinks or hardlinks files that do not need to
|
||||
be converted instead of copying them.
|
||||
:bug:`2324`
|
||||
* :doc:`/plugins/bpd`: BPD now supports most of the features of version 0.16
|
||||
of the MPD protocol. This is enough to get it talking to more complicated
|
||||
clients like ncmpcpp, but there are still some incompatibilities, largely due
|
||||
to MPD commands we don't support yet. Let us know if you find an MPD client
|
||||
that doesn't get along with BPD!
|
||||
:bug:`3214` :bug:`800`
|
||||
* :doc:`/plugins/replaygain`: The plugin now supports a ``per_disc`` option
|
||||
which enables calculation of album ReplayGain on disc level instead of album
|
||||
level.
|
||||
Thanks to :user:`samuelnilsson`
|
||||
:bug:`293`
|
||||
* :doc:`/plugins/replaygain`: The new ``ffmpeg`` ReplayGain backend supports
|
||||
``R128_`` tags.
|
||||
:bug:`3056`
|
||||
* :doc:`plugins/replaygain`: ``r128_targetlevel`` is a new configuration option
|
||||
for the ReplayGain plugin: It defines the reference volume for files using
|
||||
``R128_`` tags. ``targetlevel`` only configures the reference volume for
|
||||
``REPLAYGAIN_`` files.
|
||||
:bug:`3065`
|
||||
* A new :doc:`/plugins/parentwork` gets information about the original work,
|
||||
which is useful for classical music.
|
||||
Thanks to :user:`dosoe`.
|
||||
:bug:`2580` :bug:`3279`
|
||||
* :doc:`/plugins/discogs`: The field now collects the "style" field.
|
||||
* :doc:`/plugins/bpd`: BPD now supports most of the features of version 0.16
|
||||
of the MPD protocol. This is enough to get it talking to more complicated
|
||||
clients like ncmpcpp, but there are still some incompatibilities, largely due
|
||||
to MPD commands we don't support yet. (Let us know if you find an MPD client
|
||||
that doesn't get along with BPD!)
|
||||
:bug:`3214` :bug:`800`
|
||||
* A new :doc:`/plugins/deezer` can autotag tracks and albums using the
|
||||
`Deezer`_ database.
|
||||
Thanks to :user:`rhlahuja`.
|
||||
:bug:`3355`
|
||||
* A new :doc:`/plugins/bareasc` provides a new query type: `bare ASCII`
|
||||
which ignores accented characters, treating them as though they
|
||||
were the base ASCII character. To perform `bare ASCII` searches, use
|
||||
the ``#`` prefix with :ref:`list-cmd` or other commands.
|
||||
:bug:`3882`
|
||||
|
||||
Other new things:
|
||||
|
||||
* Enable HTTPS for MusicBrainz by default and add configuration option
|
||||
`https` for custom servers.
|
||||
* :doc:`/plugins/mpdstats`: Add a new `strip_path` option to help build the
|
||||
right local path from MPD information.
|
||||
* :doc:`/plugins/convert`: Conversion can now parallelize conversion jobs on
|
||||
Python 3.
|
||||
* :doc:`/plugins/lastgenre`: Add a new `title_case` config option to make
|
||||
title-case formatting optional.
|
||||
* There's a new message when running ``beet config`` when there's no available
|
||||
configuration file.
|
||||
:bug:`3779`
|
||||
* When importing a duplicate album, the prompt now says "keep all" instead of
|
||||
"keep both" to reflect that there may be more than two albums involved.
|
||||
:bug:`3569`
|
||||
* :doc:`/plugins/chroma`: The plugin now updates file metadata after
|
||||
generating fingerprints through the `submit` command.
|
||||
* :doc:`/plugins/lastgenre`: Added more heavy metal genres to the built-in
|
||||
genre filter lists.
|
||||
* A new :doc:`/plugins/subsonicplaylist` can import playlists from a Subsonic
|
||||
server.
|
||||
* :doc:`/plugins/subsonicupdate`: The plugin now automatically chooses between
|
||||
token- and password-based authentication based on server version
|
||||
* A new :ref:`extra_tags` configuration option lets you use more metadata in
|
||||
MusicBrainz queries to further narrow the search.
|
||||
* A new :doc:`/plugins/fish` adds `Fish shell`_ tab autocompletion to beets.
|
||||
* :doc:`plugins/fetchart` and :doc:`plugins/embedart`: Added a new ``quality``
|
||||
option that controls the quality of the image output when the image is
|
||||
resized.
|
||||
* :doc:`plugins/keyfinder`: Added support for `keyfinder-cli`_.
|
||||
Thanks to :user:`BrainDamage`.
|
||||
* :doc:`plugins/fetchart`: Added a new ``high_resolution`` config option to
|
||||
allow downloading of higher resolution iTunes artwork (at the expense of
|
||||
file size).
|
||||
:bug:`3391`
|
||||
* :doc:`plugins/discogs`: The plugin applies two new fields: `discogs_labelid`
|
||||
and `discogs_artistid`.
|
||||
:bug:`3413`
|
||||
* :doc:`/plugins/export`: Added a new ``-f`` (``--format``) flag,
|
||||
which can export your data as JSON, JSON lines, CSV, or XML.
|
||||
Thanks to :user:`austinmm`.
|
||||
:bug:`3402`
|
||||
* :doc:`/plugins/convert`: Added a new ``-l`` (``--link``) flag and ``link``
|
||||
option as well as the ``-H`` (``--hardlink``) flag and ``hardlink``
|
||||
option, which symlink or hardlink files that do not need to
|
||||
be converted (instead of copying them).
|
||||
:bug:`2324`
|
||||
* :doc:`/plugins/replaygain`: The plugin now supports a ``per_disc`` option
|
||||
that enables calculation of album ReplayGain on disc level instead of album
|
||||
level.
|
||||
Thanks to :user:`samuelnilsson`.
|
||||
:bug:`293`
|
||||
* :doc:`/plugins/replaygain`: The new ``ffmpeg`` ReplayGain backend supports
|
||||
``R128_`` tags.
|
||||
:bug:`3056`
|
||||
* :doc:`plugins/replaygain`: A new ``r128_targetlevel`` configuration option
|
||||
defines the reference volume for files using ``R128_`` tags. ``targetlevel``
|
||||
only configures the reference volume for ``REPLAYGAIN_`` files.
|
||||
:bug:`3065`
|
||||
* :doc:`/plugins/discogs`: The plugin now collects the "style" field.
|
||||
Thanks to :user:`thedevilisinthedetails`.
|
||||
:bug:`2579` :bug:`3251`
|
||||
* :doc:`/plugins/absubmit`: By default, the plugin now avoids re-analyzing
|
||||
files that already have AB data.
|
||||
files that already have AcousticBrainz data.
|
||||
There are new ``force`` and ``pretend`` options to help control this new
|
||||
behavior.
|
||||
Thanks to :user:`SusannaMaria`.
|
||||
|
|
@ -99,24 +126,21 @@ New features:
|
|||
Windows.
|
||||
Thanks to :user:`MartyLake`.
|
||||
:bug:`3331` :bug:`3334`
|
||||
* The 'data_source' field is now also applied as an album-level flexible
|
||||
attribute during imports, allowing for more refined album level searches.
|
||||
* The `data_source` field, which indicates which metadata source was used
|
||||
during an autotagging import, is now also applied as an album-level flexible
|
||||
attribute.
|
||||
:bug:`3350` :bug:`1693`
|
||||
* :doc:`/plugins/deezer`: Added Deezer plugin as an import metadata provider:
|
||||
you can now match tracks and albums using the `Deezer`_ database.
|
||||
Thanks to :user:`rhlahuja`.
|
||||
:bug:`3355`
|
||||
* :doc:`/plugins/beatport`: The plugin now gets the musical key, BPM and the
|
||||
* :doc:`/plugins/beatport`: The plugin now gets the musical key, BPM, and
|
||||
genre for each track.
|
||||
:bug:`2080`
|
||||
* :doc:`/plugins/beatport`: Fix default assignment of the musical key.
|
||||
* :doc:`/plugins/beatport`: Fix the default assignment of the musical key.
|
||||
:bug:`3377`
|
||||
* :doc:`/plugins/bpsync`: Add `bpsync` plugin to sync metadata changes
|
||||
from the Beatport database.
|
||||
* :doc:`/plugins/beatport`: Fix assignment of `genre` and rename `musical_key`
|
||||
to `initial_key`.
|
||||
:bug:`3387`
|
||||
* :doc:`/plugins/hook` now treats non-zero exit codes as errors.
|
||||
* :doc:`/plugins/hook`: The plugin now treats non-zero exit codes as errors.
|
||||
:bug:`3409`
|
||||
* :doc:`/plugins/subsonicupdate`: A new ``url`` configuration replaces the
|
||||
older (and now deprecated) separate ``host``, ``port``, and ``contextpath``
|
||||
|
|
@ -131,27 +155,24 @@ New features:
|
|||
:bug:`3459`
|
||||
* :doc:`/plugins/fetchart`: Album art can now be fetched from `last.fm`_.
|
||||
:bug:`3530`
|
||||
* The classes ``AlbumInfo`` and ``TrackInfo`` now have flexible attributes,
|
||||
allowing to solve :bug:`1547`.
|
||||
Thanks to :user:`dosoe`.
|
||||
* :doc:`/plugins/web`: The query API now interprets backslashes as path
|
||||
separators to support path queries.
|
||||
Thanks to :user:`nmeum`.
|
||||
:bug:`3567`
|
||||
* ``beet import`` now handles tar archives with bzip2 or gzip compression.
|
||||
:bug:`3606`
|
||||
* :doc:`/plugins/plexupdate`: Add option to use secure connection to Plex
|
||||
server, and to ignore certificate validation errors if necessary.
|
||||
* :doc:`/plugins/plexupdate`: Added an option to use a secure connection to
|
||||
Plex server, and to ignore certificate validation errors if necessary.
|
||||
:bug:`2871`
|
||||
* :doc:`/plugins/lyrics`: Improved searching Genius backend when artist
|
||||
contained special characters.
|
||||
* :doc:`/plugins/lyrics`: Improved searching on the Genius backend when the
|
||||
artist contains special characters.
|
||||
:bug:`3634`
|
||||
* :doc:`/plugins/parentwork`: Also get the composition date of the parent work,
|
||||
instead of just the child work.
|
||||
Thanks to :user:`aereaux`.
|
||||
:bug:`3650`
|
||||
* :doc:`/plugins/lyrics`: Fix a bug in the heuristic for detecting valid
|
||||
lyrics in the Google source of the lyrics plugin
|
||||
lyrics in the Google source.
|
||||
:bug:`2969`
|
||||
* :doc:`/plugins/thumbnails`: Fix a bug where pathlib expected a string instead
|
||||
of bytes for a path.
|
||||
|
|
@ -178,11 +199,44 @@ New features:
|
|||
* :doc:`/plugins/replaygain` now does its analysis in parallel when using
|
||||
the ``command`` or ``ffmpeg`` backends.
|
||||
:bug:`3478`
|
||||
* Fields in queries now fall back to an item's album and check its fields too.
|
||||
Notably, this allows querying items by an album flex attribute, also in path
|
||||
configuration.
|
||||
Thanks to :user:`FichteFoll`.
|
||||
:bug:`2797` :bug:`2988`
|
||||
* Add ``mb_album_extract`` and ``mb_track_extract`` hooks to allow
|
||||
plugins to add new fields based on MusicBrainz data. Thanks to :user:`dosoe`.
|
||||
* Removes usage of the bs1770gain replaygain backend.
|
||||
Thanks to :user:`SamuelCook`.
|
||||
* Added ``trackdisambig`` which stores the recording disambiguation from
|
||||
MusicBrainz for each track.
|
||||
:bug:`1904`
|
||||
* The :doc:`/plugins/aura` has arrived!
|
||||
* :doc:`plugins/fetchart`: The new ``max_filesize`` option for fetchart can be
|
||||
used to target a maximum image filesize.
|
||||
* :doc:`/plugins/badfiles`: Checkers can now be run during import with the
|
||||
``check_on_import`` config option.
|
||||
* :doc:`/plugins/export`: big speedups when `--include-keys` option is used
|
||||
Thanks to :user:`ssssam`.
|
||||
* The `importer` persists all fields set using :ref:`set_fields` to the
|
||||
mediafiles of all imported tracks.
|
||||
* Added 7z support via the `py7zr`_ library
|
||||
Thanks to :user:`arogl`. :bug:`3906`
|
||||
* Get ISRC identifiers from musicbrainz
|
||||
Thanks to :user:`aereaux`.
|
||||
* :doc:`/plugins/metasync`: The ``metasync`` plugin now also fetches the ``Date Added`` field from iTunes databases and stores it in the``itunes_dateadded`` field.Thanks to :user:`sandersantema`.
|
||||
|
||||
.. _py7zr: https://pypi.org/project/py7zr/
|
||||
|
||||
Fixes:
|
||||
|
||||
* :bug:`/plugins/lyrics`: Fixed Musixmatch fetch lyrics divided into multiple elements on the web-page
|
||||
* :bug:`/plugins/lyrics`: Fixed Musixmatch fetch for non-existing lyrics
|
||||
* :bug:`/plugins/web`: Allow use of backslash in regex web queries.
|
||||
:bug:`3867`
|
||||
* :bug:`/plugins/web`: Fixed a small bug which caused album artpath to be
|
||||
redacted even when ``include_paths`` option is set.
|
||||
:bug:`3866`
|
||||
* :bug:`/plugins/discogs`: Fixed a bug with ``index_tracks`` options that
|
||||
sometimes caused the index to be discarded. Also remove the extra semicolon
|
||||
that was added when there is no index track.
|
||||
|
|
@ -253,6 +307,8 @@ Fixes:
|
|||
* Removed ``@classmethod`` decorator from dbcore.query.NoneQuery.match method
|
||||
failing with AttributeError when called. It is now an instance method.
|
||||
:bug:`3516` :bug:`3517`
|
||||
* :doc:`/plugins/lyrics`: Added Tekstowo.pl lyrics provider
|
||||
:bug:`3344`
|
||||
* :doc:`/plugins/lyrics`: Tolerate missing lyrics div in Genius scraper.
|
||||
Thanks to :user:`thejli21`.
|
||||
:bug:`3535` :bug:`3554`
|
||||
|
|
@ -304,9 +360,17 @@ Fixes:
|
|||
information. Thanks to :user:`dosoe`.
|
||||
* :doc:`/plugins/discogs`: Replace deprecated discogs-client library with community
|
||||
supported python3-discogs-client library. :bug:`3608`
|
||||
* Fix :bug:`2873`. Duplicates can now generate checksums. Thanks user:`wisp3rwind`
|
||||
for the pointer to how to solve. Thanks to :user:`arogl`.
|
||||
|
||||
* :doc:`/plugins/chroma`: Fixed submitting AcoustID information for tracks
|
||||
that already have a fingerprint.
|
||||
:bug:`3834`
|
||||
* :doc:`/plugins/web`: DELETE and PATCH methods are disallowed by default.
|
||||
Set ``readonly: no`` web config option to enable them.
|
||||
:bug:`3870`
|
||||
* Allow equals within ``--set`` value when importing.
|
||||
:bug:`2984`
|
||||
* :doc:`/plugins/lyrics`: Fix crashes for Tekstowo false positives
|
||||
:bug:`3904`
|
||||
* :doc`/reference/cli`: Remove reference to rarfile version in link
|
||||
|
||||
For plugin developers:
|
||||
|
||||
|
|
@ -341,6 +405,16 @@ For plugin developers:
|
|||
:bug:`3355`
|
||||
* The autotag hooks have been modified such that they now take 'bpm',
|
||||
'musical_key' and a per-track based 'genre' as attributes.
|
||||
* Item (and attribute) access on an item now falls back to the album's
|
||||
attributes as well. If you specifically want to access an item's attributes,
|
||||
use ``Item.get(key, with_album=False)``. :bug:`2988`
|
||||
* ``Item.keys`` also has a ``with_album`` argument now, defaulting to ``True``.
|
||||
* A ``revision`` attribute has been added to ``Database``. It is increased on
|
||||
every transaction that mutates it. :bug:`2988`
|
||||
* The classes ``AlbumInfo`` and ``TrackInfo`` now convey arbitrary attributes
|
||||
instead of a fixed, built-in set of field names (which was important to
|
||||
address :bug:`1547`).
|
||||
Thanks to :user:`dosoe`.
|
||||
|
||||
For packagers:
|
||||
|
||||
|
|
@ -1940,7 +2014,7 @@ Major new features and bigger changes:
|
|||
search results you wish to see when looking up releases at MusicBrainz
|
||||
during import. :bug:`1245`
|
||||
* The importer now records the data source for a match in a new
|
||||
flexible attribute `data_source` on items and albums. :bug:`1311`
|
||||
flexible attribute ``data_source`` on items and albums. :bug:`1311`
|
||||
* The colors used in the terminal interface are now configurable via the new
|
||||
config option ``colors``, nested under the option ``ui``. (Also, the `color`
|
||||
config option has been moved from top-level to under ``ui``. Beets will
|
||||
|
|
|
|||
|
|
@ -200,6 +200,13 @@ The events currently available are:
|
|||
import pipeline stage is a better choice (see :ref:`plugin-stage`).
|
||||
Parameters: ``task`` and ``session``.
|
||||
|
||||
* `import_task_before_choice`: called after candidate search for an import task
|
||||
before any decision is made about how/if to import or tag. Can be used to
|
||||
present information about the task or initiate interaction with the user
|
||||
before importing occurs. Return an importer action to take a specific action.
|
||||
Only one handler may return a non-None result.
|
||||
Parameters: ``task`` and ``session``
|
||||
|
||||
* `import_task_choice`: called after a decision has been made about an import
|
||||
task. This event can be used to initiate further interaction with the user.
|
||||
Use ``task.choice_flag`` to determine or change the action to be
|
||||
|
|
@ -239,6 +246,18 @@ The events currently available are:
|
|||
:ref:`appending choices to the prompt <append_prompt_choices>` by returning a
|
||||
list of ``PromptChoices``. Parameters: ``task`` and ``session``.
|
||||
|
||||
* `mb_track_extract`: called after the metadata is obtained from
|
||||
MusicBrainz. The parameter is a ``dict`` containing the tags retrieved from
|
||||
MusicBrainz for a track. Plugins must return a new (potentially empty)
|
||||
``dict`` with additional ``field: value`` pairs, which the autotagger will
|
||||
apply to the item, as flexible attributes if ``field`` is not a hardcoded
|
||||
field. Fields already present on the track are overwritten.
|
||||
Parameter: ``data``
|
||||
|
||||
* `mb_album_extract`: Like `mb_track_extract`, but for album tags. Overwrites
|
||||
tags set at the track level, if they have the same ``field``.
|
||||
Parameter: ``data``
|
||||
|
||||
The included ``mpdupdate`` plugin provides an example use case for event listeners.
|
||||
|
||||
Extend the Autotagger
|
||||
|
|
|
|||
198
docs/plugins/aura.rst
Normal file
198
docs/plugins/aura.rst
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
AURA Plugin
|
||||
===========
|
||||
|
||||
This plugin is a server implementation of the `AURA`_ specification using the
|
||||
`Flask`_ framework. AURA is still a work in progress and doesn't yet have a
|
||||
stable version, but this server should be kept up to date. You are advised to
|
||||
read the :ref:`aura-issues` section.
|
||||
|
||||
.. _AURA: https://auraspec.readthedocs.io
|
||||
.. _Flask: https://palletsprojects.com/p/flask/
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
The ``aura`` plugin depends on `Flask`_, which can be installed using
|
||||
``python -m pip install flask``. Then you can enable the ``aura`` plugin in
|
||||
your configuration (see :ref:`using-plugins`).
|
||||
|
||||
It is likely that you will need to enable :ref:`aura-cors`, which introduces
|
||||
an additional dependency: `flask-cors`_. This can be installed with
|
||||
``python -m pip install flask-cors``.
|
||||
|
||||
If `Pillow`_ is installed (``python -m pip install Pillow``) then the optional
|
||||
``width`` and ``height`` attributes are included in image resource objects.
|
||||
|
||||
.. _flask-cors: https://flask-cors.readthedocs.io
|
||||
.. _Pillow: https://pillow.readthedocs.io
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Use ``beet aura`` to start the AURA server.
|
||||
By default Flask's built-in server is used, which will give a warning about
|
||||
using it in a production environment. It is safe to ignore this warning if the
|
||||
server will have only a few users.
|
||||
|
||||
Alternatively, you can use ``beet aura -d`` to start the server in
|
||||
`development mode`_, which will reload the server every time the AURA plugin
|
||||
file is changed.
|
||||
|
||||
You can specify the hostname and port number used by the server in your
|
||||
:doc:`configuration file </reference/config>`. For more detail see the
|
||||
:ref:`configuration` section below.
|
||||
|
||||
If you would prefer to use a different WSGI server, such as gunicorn or uWSGI,
|
||||
then see :ref:`aura-external-server`.
|
||||
|
||||
AURA is designed to separate the client and server functionality. This plugin
|
||||
provides the server but not the client, so unless you like looking at JSON you
|
||||
will need a separate client. Currently the only client is `AURA Web Client`_.
|
||||
|
||||
By default the API is served under http://127.0.0.1:8337/aura/. For example
|
||||
information about the track with an id of 3 can be obtained at
|
||||
http://127.0.0.1:8337/aura/tracks/3.
|
||||
|
||||
**Note the absence of a trailing slash**:
|
||||
http://127.0.0.1:8337/aura/tracks/3/ returns a ``404 Not Found`` error.
|
||||
|
||||
.. _development mode: https://flask.palletsprojects.com/en/1.1.x/server
|
||||
.. _AURA Web Client: https://sr.ht/~callum/aura-web-client/
|
||||
|
||||
|
||||
.. _configuration:
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
To configure the plugin, make an ``aura:`` section in your
|
||||
configuration file. The available options are:
|
||||
|
||||
- **host**: The server hostname. Set this to ``0.0.0.0`` to bind to all
|
||||
interfaces. Default: ``127.0.0.1``.
|
||||
- **port**: The server port.
|
||||
Default: ``8337``.
|
||||
- **cors**: A YAML list of origins to allow CORS requests from (see
|
||||
:ref:`aura-cors`, below).
|
||||
Default: disabled.
|
||||
- **cors_supports_credentials**: Allow authenticated requests when using CORS.
|
||||
Default: disabled.
|
||||
- **page_limit**: The number of items responses should be truncated to if the
|
||||
client does not specify. Default ``500``.
|
||||
|
||||
|
||||
.. _aura-cors:
|
||||
|
||||
Cross-Origin Resource Sharing (CORS)
|
||||
------------------------------------
|
||||
|
||||
`CORS`_ allows browser clients to make requests to the AURA server. You should
|
||||
set the ``cors`` configuration option to a YAML list of allowed origins.
|
||||
|
||||
For example::
|
||||
|
||||
aura:
|
||||
cors:
|
||||
- http://www.example.com
|
||||
- https://aura.example.org
|
||||
|
||||
Alternatively you can set it to ``'*'`` to enable access from all origins.
|
||||
Note that there are security implications if you set the origin to ``'*'``,
|
||||
so please research this before using it. Note the use of quote marks when
|
||||
allowing all origins. Quote marks are also required when the origin is
|
||||
``null``, for example when using ``file:///``.
|
||||
|
||||
If the server is behind a proxy that uses credentials, you might want to set
|
||||
the ``cors_supports_credentials`` configuration option to true to let
|
||||
in-browser clients log in. Note that this option has not been tested, so it
|
||||
may not work.
|
||||
|
||||
.. _CORS: https://en.wikipedia.org/wiki/Cross-origin_resource_sharing
|
||||
|
||||
|
||||
.. _aura-external-server:
|
||||
|
||||
Using an External WSGI Server
|
||||
-----------------------------
|
||||
|
||||
If you would like to use a different WSGI server (not Flask's built-in one),
|
||||
then you can! The ``beetsplug.aura`` module provides a WSGI callable called
|
||||
``create_app()`` which can be used by many WSGI servers.
|
||||
|
||||
For example to run the AURA server using `gunicorn`_ use
|
||||
``gunicorn 'beetsplug.aura:create_app()'``, or for `uWSGI`_ use
|
||||
``uwsgi --http :8337 --module 'beetsplug.aura:create_app()'``.
|
||||
Note that these commands just show how to use the AURA app and you would
|
||||
probably use something a bit different in a production environment. Read the
|
||||
relevant server's documentation to figure out what you need.
|
||||
|
||||
.. _gunicorn: https://gunicorn.org
|
||||
.. _uWSGI: https://uwsgi-docs.readthedocs.io
|
||||
|
||||
|
||||
Reverse Proxy Support
|
||||
---------------------
|
||||
|
||||
The plugin should work behind a reverse proxy without further configuration,
|
||||
however this has not been tested extensively. For details of what headers must
|
||||
be rewritten and a sample NGINX configuration see `Flask proxy setups`_.
|
||||
|
||||
It is (reportedly) possible to run the application under a URL prefix (for
|
||||
example so you could have ``/foo/aura/server`` rather than ``/aura/server``),
|
||||
but you'll have to work it out for yourself :-)
|
||||
|
||||
If using NGINX, do **not** add a trailing slash (``/``) to the URL where the
|
||||
application is running, otherwise you will get a 404. However if you are using
|
||||
Apache then you **should** add a trailing slash.
|
||||
|
||||
.. _Flask proxy setups: https://flask.palletsprojects.com/en/1.1.x/deploying/wsgi-standalone/#proxy-setups
|
||||
|
||||
|
||||
.. _aura-issues:
|
||||
|
||||
Issues
|
||||
------
|
||||
|
||||
As of writing there are some differences between the specification and this
|
||||
implementation:
|
||||
|
||||
- Compound filters are not specified in AURA, but this server interprets
|
||||
multiple ``filter`` parameters as AND. See `issue #19`_ for discussion.
|
||||
- The ``bitrate`` parameter used for content negotiation is not supported.
|
||||
Adding support for this is doable, but the way Flask handles acceptable MIME
|
||||
types means it's a lot easier not to bother with it. This means an error
|
||||
could be returned even if no transcoding was required.
|
||||
|
||||
It is possible that some attributes required by AURA could be absent from the
|
||||
server's response if beets does not have a saved value for them. However, this
|
||||
has not happened so far.
|
||||
|
||||
Beets fields (including flexible fields) that do not have an AURA equivalent
|
||||
are not provided in any resource's attributes section, however these fields may
|
||||
be used for filtering.
|
||||
|
||||
The ``mimetype`` and ``framecount`` attributes for track resources are not
|
||||
supported. The first is due to beets storing the file type (e.g. ``MP3``), so
|
||||
it is hard to filter by MIME type. The second is because there is no
|
||||
corresponding beets field.
|
||||
|
||||
Artists are defined by the ``artist`` field on beets Items, which means some
|
||||
albums have no ``artists`` relationship. Albums only have related artists
|
||||
when their beets ``albumartist`` field is the same as the ``artist`` field on
|
||||
at least one of it's constituent tracks.
|
||||
|
||||
The only art tracked by beets is a single cover image, so only albums have
|
||||
related images at the moment. This could be expanded to looking in the same
|
||||
directory for other images, and relating tracks to their album's image.
|
||||
|
||||
There are likely to be some performance issues, especially with larger
|
||||
libraries. Sorting, pagination and inclusion (most notably of images) are
|
||||
probably the main offenders. On a related note, the program attempts to import
|
||||
Pillow every time it constructs an image resource object, which is not good.
|
||||
|
||||
The beets library is accessed using a so called private function (with a single
|
||||
leading underscore) ``beets.ui.__init__._open_library()``. This shouldn't cause
|
||||
any issues but it is probably not best practice.
|
||||
|
||||
.. _issue #19: https://github.com/beetbox/aura/issues/19
|
||||
|
|
@ -17,6 +17,7 @@ install yourself:
|
|||
You can also add custom commands for a specific extension, like this::
|
||||
|
||||
badfiles:
|
||||
check_on_import: yes
|
||||
commands:
|
||||
ogg: myoggchecker --opt1 --opt2
|
||||
flac: flac --test --warnings-as-errors --silent
|
||||
|
|
@ -25,6 +26,10 @@ Custom commands will be run once for each file of the specified type, with the
|
|||
path to the file as the last argument. Commands must return a status code
|
||||
greater than zero for a file to be considered corrupt.
|
||||
|
||||
You can run the checkers when importing files by using the `check_on_import`
|
||||
option. When on, checkers will be run against every imported file and warnings
|
||||
and errors will be presented when selecting a tagging option.
|
||||
|
||||
.. _mp3val: http://mp3val.sourceforge.net/
|
||||
.. _flac: https://xiph.org/flac/
|
||||
|
||||
|
|
|
|||
69
docs/plugins/bareasc.rst
Normal file
69
docs/plugins/bareasc.rst
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
Bare-ASCII Search Plugin
|
||||
========================
|
||||
|
||||
The ``bareasc`` plugin provides a prefixed query that searches your library using
|
||||
simple ASCII character matching, with accented characters folded to their base
|
||||
ASCII character. This can be useful if you want to find a track with accented
|
||||
characters in the title or artist, particularly if you are not confident
|
||||
you have the accents correct. It is also not unknown for the accents
|
||||
to not be correct in the database entry or wrong in the CD information.
|
||||
|
||||
First, enable the plugin named ``bareasc`` (see :ref:`using-plugins`).
|
||||
You'll then be able to use the ``#`` prefix to use bare-ASCII matching::
|
||||
|
||||
$ beet ls '#dvorak'
|
||||
István Kertész - REQUIEM - Dvořàk: Requiem, op.89 - Confutatis maledictis
|
||||
|
||||
Command
|
||||
-------
|
||||
|
||||
In addition to the query prefix, the plugin provides a utility ``bareasc`` command.
|
||||
This command is **exactly** the same as the ``beet list`` command except that
|
||||
the output is passed through the bare-ASCII transformation before being printed.
|
||||
This allows you to easily check what the library data looks like in bare ASCII,
|
||||
which can be useful if you are trying to work out why a query is not matching.
|
||||
|
||||
Using the same example track as above::
|
||||
|
||||
$ beet bareasc 'Dvořàk'
|
||||
Istvan Kertesz - REQUIEM - Dvorak: Requiem, op.89 - Confutatis maledictis
|
||||
|
||||
Note: the ``bareasc`` command does *not* automatically use bare-ASCII queries.
|
||||
If you want a bare-ASCII query you still need to specify the ``#`` prefix.
|
||||
|
||||
Notes
|
||||
-----
|
||||
|
||||
If the query string is all in lower case, the comparison ignores case as well as
|
||||
accents.
|
||||
|
||||
The default ``bareasc`` prefix (``#``) is used as a comment character in some shells
|
||||
so may need to be protected (for example in quotes) when typed into the command line.
|
||||
|
||||
The bare ASCII transliteration is quite simple. It may not give the expected output
|
||||
for all languages. For example, German u-umlaut ``ü`` is transformed into ASCII ``u``,
|
||||
not into ``ue``.
|
||||
|
||||
The bare ASCII transformation also changes Unicode punctuation like double quotes,
|
||||
apostrophes and even some hyphens. It is often best to leave out punctuation
|
||||
in the queries. Note that the punctuation changes are often not even visible
|
||||
with normal terminal fonts. You can always use the ``bareasc`` command to print the
|
||||
transformed entries and use a command like ``diff`` to compare with the output
|
||||
from the ``list`` command.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
To configure the plugin, make a ``bareasc:`` section in your configuration
|
||||
file. The only available option is:
|
||||
|
||||
- **prefix**: The character used to designate bare-ASCII queries.
|
||||
Default: ``#``, which may need to be escaped in some shells.
|
||||
|
||||
Credits
|
||||
-------
|
||||
|
||||
The hard work in this plugin is done in Sean Burke's
|
||||
`Unidecode <https://pypi.org/project/Unidecode/>`__ library.
|
||||
Thanks are due to Sean and to all the people who created the Python
|
||||
version and the beets extensible query architecture.
|
||||
|
|
@ -49,6 +49,13 @@ file. The available options are:
|
|||
estimate the input image quality and uses 92 if it cannot be determined, and
|
||||
PIL defaults to 75.
|
||||
Default: 0 (disabled)
|
||||
- **max_filesize**: The maximum size of a target piece of cover art in bytes.
|
||||
When using an ImageMagick backend this sets
|
||||
``-define jpeg:extent=max_filesize``. Using PIL this will reduce JPG quality
|
||||
by up to 50% to attempt to reach the target filesize. Neither method is
|
||||
*guaranteed* to reach the target size, however in most cases it should
|
||||
succeed.
|
||||
Default: 0 (disabled)
|
||||
- **enforce_ratio**: Only images with a width:height ratio of 1:1 are
|
||||
considered as valid album art candidates if set to ``yes``.
|
||||
It is also possible to specify a certain deviation to the exact ratio to
|
||||
|
|
|
|||
|
|
@ -61,7 +61,9 @@ following to your configuration::
|
|||
|
||||
absubmit
|
||||
acousticbrainz
|
||||
aura
|
||||
badfiles
|
||||
bareasc
|
||||
beatport
|
||||
bpd
|
||||
bpm
|
||||
|
|
@ -184,6 +186,7 @@ Path Formats
|
|||
Interoperability
|
||||
----------------
|
||||
|
||||
* :doc:`aura`: A server implementation of the `AURA`_ specification.
|
||||
* :doc:`badfiles`: Check audio file integrity.
|
||||
* :doc:`embyupdate`: Automatically notifies `Emby`_ whenever the beets library changes.
|
||||
* :doc:`fish`: Adds `Fish shell`_ tab autocompletion to ``beet`` commands.
|
||||
|
|
@ -205,6 +208,7 @@ Interoperability
|
|||
library changes.
|
||||
|
||||
|
||||
.. _AURA: https://auraspec.readthedocs.io
|
||||
.. _Emby: https://emby.media
|
||||
.. _Fish shell: https://fishshell.com/
|
||||
.. _Plex: https://plex.tv
|
||||
|
|
@ -215,6 +219,7 @@ Interoperability
|
|||
Miscellaneous
|
||||
-------------
|
||||
|
||||
* :doc:`bareasc`: Search albums and tracks with bare ASCII string matching.
|
||||
* :doc:`bpd`: A music player for your beets library that emulates `MPD`_ and is
|
||||
compatible with `MPD clients`_.
|
||||
* :doc:`convert`: Transcode music and embed album art while exporting to
|
||||
|
|
|
|||
|
|
@ -20,14 +20,12 @@ your library::
|
|||
|
||||
If you just want to see specific properties you can use the
|
||||
``--include-keys`` option to filter them. The argument is a
|
||||
comma-separated list of simple glob patterns where ``*`` matches any
|
||||
string. For example::
|
||||
comma-separated list of field names. For example::
|
||||
|
||||
$ beet info -i 'title,mb*' beatles
|
||||
$ beet info -i 'title,mb_artistid' beatles
|
||||
|
||||
Will only show the ``title`` property and all properties starting with
|
||||
``mb``. You can add the ``-i`` option multiple times to the command
|
||||
line.
|
||||
Will only show the ``title`` and ``mb_artistid`` properties. You can add the
|
||||
``-i`` option multiple times to the command line.
|
||||
|
||||
Additional command-line options include:
|
||||
|
||||
|
|
|
|||
|
|
@ -3,10 +3,11 @@ Lyrics Plugin
|
|||
|
||||
The ``lyrics`` plugin fetches and stores song lyrics from databases on the Web.
|
||||
Namely, the current version of the plugin uses `Musixmatch`_, `Genius.com`_,
|
||||
and, optionally, the Google custom search API.
|
||||
`Tekstowo.pl`_, and, optionally, the Google custom search API.
|
||||
|
||||
.. _Musixmatch: https://www.musixmatch.com/
|
||||
.. _Genius.com: https://genius.com/
|
||||
.. _Tekstowo.pl: https://www.tekstowo.pl/
|
||||
|
||||
|
||||
Fetch Lyrics During Import
|
||||
|
|
@ -58,11 +59,11 @@ configuration file. The available options are:
|
|||
sources known to be scrapeable.
|
||||
- **sources**: List of sources to search for lyrics. An asterisk ``*`` expands
|
||||
to all available sources.
|
||||
Default: ``google lyricwiki musixmatch genius``, i.e., all the
|
||||
Default: ``google musixmatch genius tekstowo``, i.e., all the
|
||||
available sources. The ``google`` source will be automatically
|
||||
deactivated if no ``google_API_key`` is setup.
|
||||
Both it and the ``genius`` source will only be enabled if BeautifulSoup is
|
||||
installed.
|
||||
The ``google``, ``genius``, and ``tekstowo`` sources will only be enabled if
|
||||
BeautifulSoup is installed.
|
||||
|
||||
Here's an example of ``config.yaml``::
|
||||
|
||||
|
|
@ -155,15 +156,15 @@ After that, the lyrics plugin will fall back on other declared data sources.
|
|||
.. _pip: https://pip.pypa.io
|
||||
.. _BeautifulSoup: https://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||
|
||||
Activate Genius Lyrics
|
||||
----------------------
|
||||
Activate Genius and Tekstowo.pl Lyrics
|
||||
--------------------------------------
|
||||
|
||||
Like the Google backend, the Genius backend requires the `BeautifulSoup`_
|
||||
library. Install it by typing::
|
||||
Using the Genius or Tekstowo.pl backends requires `BeautifulSoup`_, which
|
||||
you can install using `pip`_ by typing::
|
||||
|
||||
pip install beautifulsoup4
|
||||
|
||||
The backend is enabled by default.
|
||||
These backends are enabled by default.
|
||||
|
||||
.. _lyrics-translation:
|
||||
|
||||
|
|
|
|||
|
|
@ -66,6 +66,8 @@ configuration file. The available options are:
|
|||
Default: false.
|
||||
- **include_paths**: If true, includes paths in item objects.
|
||||
Default: false.
|
||||
- **readonly**: If true, DELETE and PATCH operations are not allowed. Only GET is permitted.
|
||||
Default: true.
|
||||
|
||||
Implementation
|
||||
--------------
|
||||
|
|
@ -189,6 +191,8 @@ code.
|
|||
Removes the item with id *6* from the beets library. If the *?delete* query string is included,
|
||||
the matching file will be deleted from disk.
|
||||
|
||||
Only allowed if ``readonly`` configuration option is set to ``no``.
|
||||
|
||||
``PATCH /item/6``
|
||||
++++++++++++++++++
|
||||
|
||||
|
|
@ -203,6 +207,8 @@ Returns the updated JSON representation. ::
|
|||
...
|
||||
}
|
||||
|
||||
Only allowed if ``readonly`` configuration option is set to ``no``.
|
||||
|
||||
``GET /item/6,12,13``
|
||||
+++++++++++++++++++++
|
||||
|
||||
|
|
@ -261,6 +267,8 @@ For albums, the following endpoints are provided:
|
|||
|
||||
* ``GET /album/5``
|
||||
|
||||
* ``GET /album/5/art``
|
||||
|
||||
* ``DELETE /album/5``
|
||||
|
||||
* ``GET /album/5,7``
|
||||
|
|
@ -277,6 +285,7 @@ or ``/album/5,7``. In addition we can request the cover art of an album with
|
|||
``GET /album/5/art``.
|
||||
You can also add the '?expand' flag to get the individual items of an album.
|
||||
|
||||
``DELETE`` is only allowed if ``readonly`` configuration option is set to ``no``.
|
||||
|
||||
``GET /stats``
|
||||
++++++++++++++
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ albums (the latter case is true of typical Artist/Album organizations
|
|||
and many people's "downloads" folders). The path can also be a single
|
||||
song or an archive. Beets supports `zip` and `tar` archives out of the
|
||||
box. To extract `rar` files, install the `rarfile`_ package and the
|
||||
`unrar` command.
|
||||
`unrar` command. To extract `7z` files, install the `py7zr`_ package.
|
||||
|
||||
Optional command flags:
|
||||
|
||||
|
|
@ -151,7 +151,8 @@ Optional command flags:
|
|||
|
||||
beet import --set genre="Alternative Rock" --set mood="emotional"
|
||||
|
||||
.. _rarfile: https://pypi.python.org/pypi/rarfile/2.2
|
||||
.. _rarfile: https://pypi.python.org/pypi/rarfile/
|
||||
.. _py7zr: https://pypi.org/project/py7zr/
|
||||
|
||||
.. only:: html
|
||||
|
||||
|
|
|
|||
|
|
@ -683,6 +683,9 @@ Here's an example::
|
|||
Other field/value pairs supplied via the ``--set`` option on the command-line
|
||||
override any settings here for fields with the same name.
|
||||
|
||||
Fields are set on both the album and each individual track of the album.
|
||||
Fields are persisted to the media files of each track.
|
||||
|
||||
Default: ``{}`` (empty).
|
||||
|
||||
.. _musicbrainz-config:
|
||||
|
|
@ -691,15 +694,18 @@ MusicBrainz Options
|
|||
-------------------
|
||||
|
||||
You can instruct beets to use `your own MusicBrainz database`_ instead of
|
||||
the `main server`_. Use the ``host`` and ``ratelimit`` options under a
|
||||
``musicbrainz:`` header, like so::
|
||||
the `main server`_. Use the ``host``, ``https`` and ``ratelimit`` options
|
||||
under a ``musicbrainz:`` header, like so::
|
||||
|
||||
musicbrainz:
|
||||
host: localhost:5000
|
||||
https: no
|
||||
ratelimit: 100
|
||||
|
||||
The ``host`` key, of course, controls the Web server hostname (and port,
|
||||
optionally) that will be contacted by beets (default: musicbrainz.org).
|
||||
The ``https`` key makes the client use HTTPS instead of HTTP. This setting applies
|
||||
only to custom servers. The official MusicBrainz server always uses HTTPS. (Default: no.)
|
||||
The server must have search indices enabled (see `Building search indexes`_).
|
||||
|
||||
The ``ratelimit`` option, an integer, controls the number of Web service requests
|
||||
|
|
|
|||
9
setup.py
9
setup.py
|
|
@ -130,7 +130,9 @@ setup(
|
|||
] + [
|
||||
'discogs-client' if (sys.version_info < (3, 0, 0))
|
||||
else 'python3-discogs-client'
|
||||
],
|
||||
] + (
|
||||
['py7zr'] if (sys.version_info > (3, 5, 0)) else []
|
||||
),
|
||||
'lint': [
|
||||
'flake8',
|
||||
'flake8-coding',
|
||||
|
|
@ -178,8 +180,10 @@ setup(
|
|||
# embedart: ImageMagick
|
||||
# absubmit: extractor binary from https://acousticbrainz.org/download
|
||||
# keyfinder: KeyFinder
|
||||
# replaygain: python-gi and GStreamer 1.0+ or mp3gain/aacgain
|
||||
# replaygain: python-gi and GStreamer 1.0+
|
||||
# or mp3gain/aacgain
|
||||
# or Python Audio Tools
|
||||
# or ffmpeg
|
||||
# ipfs: go-ipfs
|
||||
|
||||
classifiers=[
|
||||
|
|
@ -196,6 +200,7 @@ setup(
|
|||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
],
|
||||
)
|
||||
|
|
|
|||
BIN
test/rsrc/archive.7z
Normal file
BIN
test/rsrc/archive.7z
Normal file
Binary file not shown.
|
|
@ -742,14 +742,17 @@ class ArtImporterTest(UseThePlugin):
|
|||
|
||||
|
||||
class ArtForAlbumTest(UseThePlugin):
|
||||
""" Tests that fetchart.art_for_album respects the size
|
||||
configuration (e.g., minwidth, enforce_ratio)
|
||||
""" Tests that fetchart.art_for_album respects the scale & filesize
|
||||
configurations (e.g., minwidth, enforce_ratio, max_filesize)
|
||||
"""
|
||||
|
||||
IMG_225x225 = os.path.join(_common.RSRC, b'abbey.jpg')
|
||||
IMG_348x348 = os.path.join(_common.RSRC, b'abbey-different.jpg')
|
||||
IMG_500x490 = os.path.join(_common.RSRC, b'abbey-similar.jpg')
|
||||
|
||||
IMG_225x225_SIZE = os.stat(util.syspath(IMG_225x225)).st_size
|
||||
IMG_348x348_SIZE = os.stat(util.syspath(IMG_348x348)).st_size
|
||||
|
||||
def setUp(self):
|
||||
super(ArtForAlbumTest, self).setUp()
|
||||
|
||||
|
|
@ -839,6 +842,29 @@ class ArtForAlbumTest(UseThePlugin):
|
|||
self._assertImageResized(self.IMG_225x225, False)
|
||||
self._assertImageResized(self.IMG_348x348, True)
|
||||
|
||||
def test_fileresize(self):
|
||||
self._require_backend()
|
||||
self.plugin.max_filesize = self.IMG_225x225_SIZE // 2
|
||||
self._assertImageResized(self.IMG_225x225, True)
|
||||
|
||||
def test_fileresize_if_necessary(self):
|
||||
self._require_backend()
|
||||
self.plugin.max_filesize = self.IMG_225x225_SIZE
|
||||
self._assertImageResized(self.IMG_225x225, False)
|
||||
self._assertImageIsValidArt(self.IMG_225x225, True)
|
||||
|
||||
def test_fileresize_no_scale(self):
|
||||
self._require_backend()
|
||||
self.plugin.maxwidth = 300
|
||||
self.plugin.max_filesize = self.IMG_225x225_SIZE // 2
|
||||
self._assertImageResized(self.IMG_225x225, True)
|
||||
|
||||
def test_fileresize_and_scale(self):
|
||||
self._require_backend()
|
||||
self.plugin.maxwidth = 200
|
||||
self.plugin.max_filesize = self.IMG_225x225_SIZE // 2
|
||||
self._assertImageResized(self.IMG_225x225, True)
|
||||
|
||||
|
||||
class DeprecatedConfigTest(_common.TestCase):
|
||||
"""While refactoring the plugin, the remote_priority option was deprecated,
|
||||
|
|
|
|||
110
test/test_art_resize.py
Normal file
110
test/test_art_resize.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2020, David Swarbrick.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Tests for image resizing based on filesize."""
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
|
||||
import unittest
|
||||
import os
|
||||
|
||||
from test import _common
|
||||
from test.helper import TestHelper
|
||||
from beets.util import syspath
|
||||
from beets.util.artresizer import (
|
||||
pil_resize,
|
||||
im_resize,
|
||||
get_im_version,
|
||||
get_pil_version,
|
||||
)
|
||||
|
||||
|
||||
class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
|
||||
"""Unittest test case for Art Resizer to a specific filesize."""
|
||||
|
||||
IMG_225x225 = os.path.join(_common.RSRC, b"abbey.jpg")
|
||||
IMG_225x225_SIZE = os.stat(syspath(IMG_225x225)).st_size
|
||||
|
||||
def setUp(self):
|
||||
"""Called before each test, setting up beets."""
|
||||
self.setup_beets()
|
||||
|
||||
def tearDown(self):
|
||||
"""Called after each test, unloading all plugins."""
|
||||
self.teardown_beets()
|
||||
|
||||
def _test_img_resize(self, resize_func):
|
||||
"""Test resizing based on file size, given a resize_func."""
|
||||
# Check quality setting unaffected by new parameter
|
||||
im_95_qual = resize_func(
|
||||
225,
|
||||
self.IMG_225x225,
|
||||
quality=95,
|
||||
max_filesize=0,
|
||||
)
|
||||
# check valid path returned - max_filesize hasn't broken resize command
|
||||
self.assertExists(im_95_qual)
|
||||
|
||||
# Attempt a lower filesize with same quality
|
||||
im_a = resize_func(
|
||||
225,
|
||||
self.IMG_225x225,
|
||||
quality=95,
|
||||
max_filesize=0.9 * os.stat(syspath(im_95_qual)).st_size,
|
||||
)
|
||||
self.assertExists(im_a)
|
||||
# target size was achieved
|
||||
self.assertLess(os.stat(syspath(im_a)).st_size,
|
||||
os.stat(syspath(im_95_qual)).st_size)
|
||||
|
||||
# Attempt with lower initial quality
|
||||
im_75_qual = resize_func(
|
||||
225,
|
||||
self.IMG_225x225,
|
||||
quality=75,
|
||||
max_filesize=0,
|
||||
)
|
||||
self.assertExists(im_75_qual)
|
||||
|
||||
im_b = resize_func(
|
||||
225,
|
||||
self.IMG_225x225,
|
||||
quality=95,
|
||||
max_filesize=0.9 * os.stat(syspath(im_75_qual)).st_size,
|
||||
)
|
||||
self.assertExists(im_b)
|
||||
# Check high (initial) quality still gives a smaller filesize
|
||||
self.assertLess(os.stat(syspath(im_b)).st_size,
|
||||
os.stat(syspath(im_75_qual)).st_size)
|
||||
|
||||
@unittest.skipUnless(get_pil_version(), "PIL not available")
|
||||
def test_pil_file_resize(self):
|
||||
"""Test PIL resize function is lowering file size."""
|
||||
self._test_img_resize(pil_resize)
|
||||
|
||||
@unittest.skipUnless(get_im_version(), "ImageMagick not available")
|
||||
def test_im_file_resize(self):
|
||||
"""Test IM resize function is lowering file size."""
|
||||
self._test_img_resize(im_resize)
|
||||
|
||||
|
||||
def suite():
|
||||
"""Run this suite of tests."""
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(defaultTest="suite")
|
||||
154
test/test_bareasc.py
Normal file
154
test/test_bareasc.py
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2021, Graham R. Cobb.
|
||||
|
||||
"""Tests for the 'bareasc' plugin."""
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import unittest
|
||||
|
||||
from test.helper import capture_stdout, TestHelper
|
||||
|
||||
from beets import logging
|
||||
|
||||
|
||||
class BareascPluginTest(unittest.TestCase, TestHelper):
|
||||
"""Test bare ASCII query matching."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test environment for bare ASCII query matching."""
|
||||
self.setup_beets()
|
||||
self.log = logging.getLogger('beets.web')
|
||||
self.config['bareasc']['prefix'] = u'#'
|
||||
self.load_plugins('bareasc')
|
||||
|
||||
# Add library elements. Note that self.lib.add overrides any "id=<n>"
|
||||
# and assigns the next free id number.
|
||||
self.add_item(title=u'with accents',
|
||||
album_id=2,
|
||||
artist=u'Antonín Dvořák')
|
||||
self.add_item(title=u'without accents',
|
||||
artist=u'Antonín Dvorak')
|
||||
self.add_item(title=u'with umlaut',
|
||||
album_id=2,
|
||||
artist=u'Brüggen')
|
||||
self.add_item(title=u'without umlaut or e',
|
||||
artist=u'Bruggen')
|
||||
self.add_item(title=u'without umlaut with e',
|
||||
artist=u'Brueggen')
|
||||
|
||||
def test_search_normal_noaccent(self):
|
||||
"""Normal search, no accents, not using bare-ASCII match.
|
||||
|
||||
Finds just the unaccented entry.
|
||||
"""
|
||||
items = self.lib.items(u'dvorak')
|
||||
|
||||
self.assertEqual(len(items), 1)
|
||||
self.assertEqual([items[0].title], [u'without accents'])
|
||||
|
||||
def test_search_normal_accent(self):
|
||||
"""Normal search, with accents, not using bare-ASCII match.
|
||||
|
||||
Finds just the accented entry.
|
||||
"""
|
||||
items = self.lib.items(u'dvořák')
|
||||
|
||||
self.assertEqual(len(items), 1)
|
||||
self.assertEqual([items[0].title], [u'with accents'])
|
||||
|
||||
def test_search_bareasc_noaccent(self):
|
||||
"""Bare-ASCII search, no accents.
|
||||
|
||||
Finds both entries.
|
||||
"""
|
||||
items = self.lib.items(u'#dvorak')
|
||||
|
||||
self.assertEqual(len(items), 2)
|
||||
self.assertEqual(
|
||||
{items[0].title, items[1].title},
|
||||
{u'without accents', u'with accents'}
|
||||
)
|
||||
|
||||
def test_search_bareasc_accent(self):
|
||||
"""Bare-ASCII search, with accents.
|
||||
|
||||
Finds both entries.
|
||||
"""
|
||||
items = self.lib.items(u'#dvořák')
|
||||
|
||||
self.assertEqual(len(items), 2)
|
||||
self.assertEqual(
|
||||
{items[0].title, items[1].title},
|
||||
{u'without accents', u'with accents'}
|
||||
)
|
||||
|
||||
def test_search_bareasc_wrong_accent(self):
|
||||
"""Bare-ASCII search, with incorrect accent.
|
||||
|
||||
Finds both entries.
|
||||
"""
|
||||
items = self.lib.items(u'#dvořäk')
|
||||
|
||||
self.assertEqual(len(items), 2)
|
||||
self.assertEqual(
|
||||
{items[0].title, items[1].title},
|
||||
{u'without accents', u'with accents'}
|
||||
)
|
||||
|
||||
def test_search_bareasc_noumlaut(self):
|
||||
"""Bare-ASCII search, with no umlaut.
|
||||
|
||||
Finds entry with 'u' not 'ue', although German speaker would
|
||||
normally replace ü with ue.
|
||||
|
||||
This is expected behaviour for this simple plugin.
|
||||
"""
|
||||
items = self.lib.items(u'#Bruggen')
|
||||
|
||||
self.assertEqual(len(items), 2)
|
||||
self.assertEqual(
|
||||
{items[0].title, items[1].title},
|
||||
{u'without umlaut or e', u'with umlaut'}
|
||||
)
|
||||
|
||||
def test_search_bareasc_umlaut(self):
|
||||
"""Bare-ASCII search, with umlaut.
|
||||
|
||||
Finds entry with 'u' not 'ue', although German speaker would
|
||||
normally replace ü with ue.
|
||||
|
||||
This is expected behaviour for this simple plugin.
|
||||
"""
|
||||
items = self.lib.items(u'#Brüggen')
|
||||
|
||||
self.assertEqual(len(items), 2)
|
||||
self.assertEqual(
|
||||
{items[0].title, items[1].title},
|
||||
{u'without umlaut or e', u'with umlaut'}
|
||||
)
|
||||
|
||||
def test_bareasc_list_output(self):
|
||||
"""Bare-ASCII version of list command - check output."""
|
||||
with capture_stdout() as output:
|
||||
self.run_command('bareasc', 'with accents')
|
||||
|
||||
self.assertIn('Antonin Dvorak', output.getvalue())
|
||||
|
||||
def test_bareasc_format_output(self):
|
||||
"""Bare-ASCII version of list -f command - check output."""
|
||||
with capture_stdout() as output:
|
||||
self.run_command('bareasc', 'with accents',
|
||||
'-f', '$artist:: $title')
|
||||
|
||||
self.assertEqual('Antonin Dvorak:: with accents\n',
|
||||
output.getvalue())
|
||||
|
||||
|
||||
def suite():
|
||||
"""loader."""
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='suite')
|
||||
|
|
@ -225,6 +225,31 @@ class MigrationTest(unittest.TestCase):
|
|||
self.fail("select failed")
|
||||
|
||||
|
||||
class TransactionTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.db = DatabaseFixture1(':memory:')
|
||||
|
||||
def tearDown(self):
|
||||
self.db._connection().close()
|
||||
|
||||
def test_mutate_increase_revision(self):
|
||||
old_rev = self.db.revision
|
||||
with self.db.transaction() as tx:
|
||||
tx.mutate(
|
||||
'INSERT INTO {0} '
|
||||
'(field_one) '
|
||||
'VALUES (?);'.format(ModelFixture1._table),
|
||||
(111,),
|
||||
)
|
||||
self.assertGreater(self.db.revision, old_rev)
|
||||
|
||||
def test_query_no_increase_revision(self):
|
||||
old_rev = self.db.revision
|
||||
with self.db.transaction() as tx:
|
||||
tx.query('PRAGMA table_info(%s)' % ModelFixture1._table)
|
||||
self.assertEqual(self.db.revision, old_rev)
|
||||
|
||||
|
||||
class ModelTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.db = DatabaseFixture1(':memory:')
|
||||
|
|
@ -246,6 +271,30 @@ class ModelTest(unittest.TestCase):
|
|||
row = self.db._connection().execute('select * from test').fetchone()
|
||||
self.assertEqual(row['field_one'], 123)
|
||||
|
||||
def test_revision(self):
|
||||
old_rev = self.db.revision
|
||||
model = ModelFixture1()
|
||||
model.add(self.db)
|
||||
model.store()
|
||||
self.assertEqual(model._revision, self.db.revision)
|
||||
self.assertGreater(self.db.revision, old_rev)
|
||||
|
||||
mid_rev = self.db.revision
|
||||
model2 = ModelFixture1()
|
||||
model2.add(self.db)
|
||||
model2.store()
|
||||
self.assertGreater(model2._revision, mid_rev)
|
||||
self.assertGreater(self.db.revision, model._revision)
|
||||
|
||||
# revision changed, so the model should be re-loaded
|
||||
model.load()
|
||||
self.assertEqual(model._revision, self.db.revision)
|
||||
|
||||
# revision did not change, so no reload
|
||||
mod2_old_rev = model2._revision
|
||||
model2.load()
|
||||
self.assertEqual(model2._revision, mod2_old_rev)
|
||||
|
||||
def test_retrieve_by_id(self):
|
||||
model = ModelFixture1()
|
||||
model.add(self.db)
|
||||
|
|
|
|||
|
|
@ -450,6 +450,12 @@ class ImportRarTest(ImportZipTest):
|
|||
return os.path.join(_common.RSRC, b'archive.rar')
|
||||
|
||||
|
||||
class Import7zTest(ImportZipTest):
|
||||
|
||||
def create_archive(self):
|
||||
return os.path.join(_common.RSRC, b'archive.7z')
|
||||
|
||||
|
||||
@unittest.skip('Implement me!')
|
||||
class ImportPasswordRarTest(ImportZipTest):
|
||||
|
||||
|
|
@ -734,10 +740,12 @@ class ImportTest(_common.TestCase, ImportHelper):
|
|||
def test_set_fields(self):
|
||||
genre = u"\U0001F3B7 Jazz"
|
||||
collection = u"To Listen"
|
||||
comments = u"managed by beets"
|
||||
|
||||
config['import']['set_fields'] = {
|
||||
u'genre': genre,
|
||||
u'collection': collection,
|
||||
u'genre': genre
|
||||
u'comments': comments
|
||||
}
|
||||
|
||||
# As-is album import.
|
||||
|
|
@ -748,7 +756,17 @@ class ImportTest(_common.TestCase, ImportHelper):
|
|||
for album in self.lib.albums():
|
||||
album.load() # TODO: Not sure this is necessary.
|
||||
self.assertEqual(album.genre, genre)
|
||||
self.assertEqual(album.collection, collection)
|
||||
self.assertEqual(album.comments, comments)
|
||||
for item in album.items():
|
||||
self.assertEqual(
|
||||
item.get("genre", with_album=False),
|
||||
genre)
|
||||
self.assertEqual(
|
||||
item.get("collection", with_album=False),
|
||||
collection)
|
||||
self.assertEqual(
|
||||
item.get("comments", with_album=False),
|
||||
comments)
|
||||
# Remove album from library to test again with APPLY choice.
|
||||
album.remove()
|
||||
|
||||
|
|
@ -761,7 +779,17 @@ class ImportTest(_common.TestCase, ImportHelper):
|
|||
for album in self.lib.albums():
|
||||
album.load()
|
||||
self.assertEqual(album.genre, genre)
|
||||
self.assertEqual(album.collection, collection)
|
||||
self.assertEqual(album.comments, comments)
|
||||
for item in album.items():
|
||||
self.assertEqual(
|
||||
item.get("genre", with_album=False),
|
||||
genre)
|
||||
self.assertEqual(
|
||||
item.get("collection", with_album=False),
|
||||
collection)
|
||||
self.assertEqual(
|
||||
item.get("comments", with_album=False),
|
||||
comments)
|
||||
|
||||
|
||||
class ImportTracksTest(_common.TestCase, ImportHelper):
|
||||
|
|
|
|||
|
|
@ -92,17 +92,6 @@ class InfoTest(unittest.TestCase, TestHelper):
|
|||
self.assertIn(u'title: [various]', out)
|
||||
self.remove_mediafile_fixtures()
|
||||
|
||||
def test_include_pattern(self):
|
||||
item, = self.add_item_fixtures()
|
||||
item.album = 'xxxx'
|
||||
item.store()
|
||||
|
||||
out = self.run_with_output('info', '--library', 'album:xxxx',
|
||||
'--include-keys', '*lbu*')
|
||||
self.assertIn(displayable_path(item.path), out)
|
||||
self.assertNotIn(u'title:', out)
|
||||
self.assertIn(u'album: xxxx', out)
|
||||
|
||||
def test_custom_format(self):
|
||||
self.add_item_fixtures()
|
||||
out = self.run_with_output('info', '--library', '--format',
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class IPFSPluginTest(unittest.TestCase, TestHelper):
|
|||
want_item = test_album.items()[2]
|
||||
for check_item in added_album.items():
|
||||
try:
|
||||
if check_item.ipfs:
|
||||
if check_item.get('ipfs', with_album=False):
|
||||
ipfs_item = os.path.basename(want_item.path).decode(
|
||||
_fsencoding(),
|
||||
)
|
||||
|
|
@ -57,7 +57,8 @@ class IPFSPluginTest(unittest.TestCase, TestHelper):
|
|||
ipfs_item)
|
||||
want_path = bytestring_path(want_path)
|
||||
self.assertEqual(check_item.path, want_path)
|
||||
self.assertEqual(check_item.ipfs, want_item.ipfs)
|
||||
self.assertEqual(check_item.get('ipfs', with_album=False),
|
||||
want_item.ipfs)
|
||||
self.assertEqual(check_item.title, want_item.title)
|
||||
found = True
|
||||
except AttributeError:
|
||||
|
|
|
|||
|
|
@ -132,6 +132,21 @@ class GetSetTest(_common.TestCase):
|
|||
def test_invalid_field_raises_attributeerror(self):
|
||||
self.assertRaises(AttributeError, getattr, self.i, u'xyzzy')
|
||||
|
||||
def test_album_fallback(self):
|
||||
# integration test of item-album fallback
|
||||
lib = beets.library.Library(':memory:')
|
||||
i = item(lib)
|
||||
album = lib.add_album([i])
|
||||
album['flex'] = u'foo'
|
||||
album.store()
|
||||
|
||||
self.assertTrue('flex' in i)
|
||||
self.assertFalse('flex' in i.keys(with_album=False))
|
||||
self.assertEqual(i['flex'], u'foo')
|
||||
self.assertEqual(i.get('flex'), u'foo')
|
||||
self.assertEqual(i.get('flex', with_album=False), None)
|
||||
self.assertEqual(i.get('flexx'), None)
|
||||
|
||||
|
||||
class DestinationTest(_common.TestCase):
|
||||
def setUp(self):
|
||||
|
|
@ -491,6 +506,24 @@ class DestinationTest(_common.TestCase):
|
|||
dest = self.i.destination()
|
||||
self.assertEqual(dest[-2:], b'XX')
|
||||
|
||||
def test_album_field_query(self):
|
||||
self.lib.directory = b'one'
|
||||
self.lib.path_formats = [(u'default', u'two'),
|
||||
(u'flex:foo', u'three')]
|
||||
album = self.lib.add_album([self.i])
|
||||
self.assertEqual(self.i.destination(), np('one/two'))
|
||||
album['flex'] = u'foo'
|
||||
album.store()
|
||||
self.assertEqual(self.i.destination(), np('one/three'))
|
||||
|
||||
def test_album_field_in_template(self):
|
||||
self.lib.directory = b'one'
|
||||
self.lib.path_formats = [(u'default', u'$flex/two')]
|
||||
album = self.lib.add_album([self.i])
|
||||
album['flex'] = u'foo'
|
||||
album.store()
|
||||
self.assertEqual(self.i.destination(), np('one/foo/two'))
|
||||
|
||||
|
||||
class ItemFormattedMappingTest(_common.LibTestCase):
|
||||
def test_formatted_item_value(self):
|
||||
|
|
|
|||
|
|
@ -274,6 +274,8 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
|
|||
dict(DEFAULT_SONG, backend=lyrics.Genius,
|
||||
# GitHub actions is on some form of Cloudflare blacklist.
|
||||
skip=os.environ.get('GITHUB_ACTIONS') == 'true'),
|
||||
dict(artist=u'Boy In Space', title=u'u n eye',
|
||||
backend=lyrics.Tekstowo),
|
||||
]
|
||||
|
||||
GOOGLE_SOURCES = [
|
||||
|
|
|
|||
|
|
@ -111,7 +111,8 @@ class MBAlbumInfoTest(_common.TestCase):
|
|||
})
|
||||
return release
|
||||
|
||||
def _make_track(self, title, tr_id, duration, artist=False, video=False):
|
||||
def _make_track(self, title, tr_id, duration, artist=False, video=False,
|
||||
disambiguation=None):
|
||||
track = {
|
||||
'title': title,
|
||||
'id': tr_id,
|
||||
|
|
@ -131,6 +132,8 @@ class MBAlbumInfoTest(_common.TestCase):
|
|||
]
|
||||
if video:
|
||||
track['video'] = 'true'
|
||||
if disambiguation:
|
||||
track['disambiguation'] = disambiguation
|
||||
return track
|
||||
|
||||
def test_parse_release_with_year(self):
|
||||
|
|
@ -445,6 +448,18 @@ class MBAlbumInfoTest(_common.TestCase):
|
|||
self.assertEqual(d.tracks[1].title, 'TITLE TWO')
|
||||
self.assertEqual(d.tracks[2].title, 'TITLE VIDEO')
|
||||
|
||||
def test_track_disambiguation(self):
|
||||
tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
|
||||
self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0,
|
||||
disambiguation="SECOND TRACK")]
|
||||
release = self._make_release(tracks=tracks)
|
||||
|
||||
d = mb.album_info(release)
|
||||
t = d.tracks
|
||||
self.assertEqual(len(t), 2)
|
||||
self.assertEqual(t[0].trackdisambig, None)
|
||||
self.assertEqual(t[1].trackdisambig, "SECOND TRACK")
|
||||
|
||||
|
||||
class ParseIDTest(_common.TestCase):
|
||||
def test_parse_id_correct(self):
|
||||
|
|
|
|||
|
|
@ -100,6 +100,7 @@ class MetaSyncTest(_common.TestCase, TestHelper):
|
|||
self.assertIn('itunes_skipcount: 3', out)
|
||||
self.assertIn('itunes_lastplayed: 2015-05-04 12:20:51', out)
|
||||
self.assertIn('itunes_lastskipped: 2015-02-05 15:41:04', out)
|
||||
self.assertIn('itunes_dateadded: 2014-04-24 09:28:38', out)
|
||||
self.assertEqual(self.lib.items()[0].itunes_rating, 60)
|
||||
|
||||
def test_sync_from_itunes(self):
|
||||
|
|
@ -111,12 +112,16 @@ class MetaSyncTest(_common.TestCase, TestHelper):
|
|||
self.assertFalse(hasattr(self.lib.items()[0], 'itunes_lastplayed'))
|
||||
self.assertEqual(self.lib.items()[0].itunes_lastskipped,
|
||||
_parsetime('2015-02-05 15:41:04'))
|
||||
self.assertEqual(self.lib.items()[0].itunes_dateadded,
|
||||
_parsetime('2014-04-24 09:28:38'))
|
||||
|
||||
self.assertEqual(self.lib.items()[1].itunes_rating, 100)
|
||||
self.assertEqual(self.lib.items()[1].itunes_playcount, 31)
|
||||
self.assertEqual(self.lib.items()[1].itunes_skipcount, 0)
|
||||
self.assertEqual(self.lib.items()[1].itunes_lastplayed,
|
||||
_parsetime('2015-05-04 12:20:51'))
|
||||
self.assertEqual(self.lib.items()[1].itunes_dateadded,
|
||||
_parsetime('2014-04-24 09:28:38'))
|
||||
self.assertFalse(hasattr(self.lib.items()[1], 'itunes_lastskipped'))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ class DummyDataTestCase(_common.TestCase, AssertsMixin):
|
|||
items[2].comp = False
|
||||
for item in items:
|
||||
self.lib.add(item)
|
||||
self.lib.add_album(items[:2])
|
||||
self.album = self.lib.add_album(items[:2])
|
||||
|
||||
def assert_items_matched_all(self, results):
|
||||
self.assert_items_matched(results, [
|
||||
|
|
@ -300,6 +300,17 @@ class GetTest(DummyDataTestCase):
|
|||
results = self.lib.items(q)
|
||||
self.assertFalse(results)
|
||||
|
||||
def test_album_field_fallback(self):
|
||||
self.album['albumflex'] = u'foo'
|
||||
self.album.store()
|
||||
|
||||
q = u'albumflex:foo'
|
||||
results = self.lib.items(q)
|
||||
self.assert_items_matched(results, [
|
||||
u'foo bar',
|
||||
u'baz qux',
|
||||
])
|
||||
|
||||
def test_invalid_query(self):
|
||||
with self.assertRaises(InvalidQueryArgumentValueError) as raised:
|
||||
dbcore.query.NumericQuery('year', u'199a')
|
||||
|
|
|
|||
557
test/test_web.py
557
test/test_web.py
|
|
@ -8,37 +8,90 @@ import json
|
|||
import unittest
|
||||
import os.path
|
||||
from six import assertCountEqual
|
||||
import shutil
|
||||
|
||||
from test import _common
|
||||
from beets.library import Item, Album
|
||||
from beetsplug import web
|
||||
|
||||
import platform
|
||||
|
||||
from beets import logging
|
||||
|
||||
|
||||
class WebPluginTest(_common.LibTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
||||
super(WebPluginTest, self).setUp()
|
||||
self.log = logging.getLogger('beets.web')
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
self.path_prefix = u'C:'
|
||||
else:
|
||||
self.path_prefix = u''
|
||||
|
||||
# Add fixtures
|
||||
for track in self.lib.items():
|
||||
track.remove()
|
||||
self.lib.add(Item(title=u'title', path='/path_1', id=1))
|
||||
self.lib.add(Item(title=u'another title', path='/path_2', id=2))
|
||||
self.lib.add(Album(album=u'album', id=3))
|
||||
self.lib.add(Album(album=u'another album', id=4))
|
||||
|
||||
# Add library elements. Note that self.lib.add overrides any "id=<n>"
|
||||
# and assigns the next free id number.
|
||||
# The following adds will create items #1, #2 and #3
|
||||
path1 = self.path_prefix + os.sep + \
|
||||
os.path.join(b'path_1').decode('utf-8')
|
||||
self.lib.add(Item(title=u'title',
|
||||
path=path1,
|
||||
album_id=2,
|
||||
artist='AAA Singers'))
|
||||
path2 = self.path_prefix + os.sep + \
|
||||
os.path.join(b'somewhere', b'a').decode('utf-8')
|
||||
self.lib.add(Item(title=u'another title',
|
||||
path=path2,
|
||||
artist='AAA Singers'))
|
||||
path3 = self.path_prefix + os.sep + \
|
||||
os.path.join(b'somewhere', b'abc').decode('utf-8')
|
||||
self.lib.add(Item(title=u'and a third',
|
||||
testattr='ABC',
|
||||
path=path3,
|
||||
album_id=2))
|
||||
# The following adds will create albums #1 and #2
|
||||
self.lib.add(Album(album=u'album',
|
||||
albumtest='xyz'))
|
||||
path4 = self.path_prefix + os.sep + \
|
||||
os.path.join(b'somewhere2', b'art_path_2').decode('utf-8')
|
||||
self.lib.add(Album(album=u'other album',
|
||||
artpath=path4))
|
||||
|
||||
web.app.config['TESTING'] = True
|
||||
web.app.config['lib'] = self.lib
|
||||
web.app.config['INCLUDE_PATHS'] = False
|
||||
web.app.config['READONLY'] = True
|
||||
self.client = web.app.test_client()
|
||||
|
||||
def test_config_include_paths_true(self):
|
||||
web.app.config['INCLUDE_PATHS'] = True
|
||||
response = self.client.get('/item/1')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
expected_path = self.path_prefix + os.sep \
|
||||
+ os.path.join(b'path_1').decode('utf-8')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['path'], u'/path_1')
|
||||
self.assertEqual(res_json['path'], expected_path)
|
||||
|
||||
web.app.config['INCLUDE_PATHS'] = False
|
||||
|
||||
def test_config_include_artpaths_true(self):
|
||||
web.app.config['INCLUDE_PATHS'] = True
|
||||
response = self.client.get('/album/2')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
expected_path = self.path_prefix + os.sep \
|
||||
+ os.path.join(b'somewhere2', b'art_path_2').decode('utf-8')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['artpath'], expected_path)
|
||||
|
||||
web.app.config['INCLUDE_PATHS'] = False
|
||||
|
||||
def test_config_include_paths_false(self):
|
||||
web.app.config['INCLUDE_PATHS'] = False
|
||||
|
|
@ -48,12 +101,20 @@ class WebPluginTest(_common.LibTestCase):
|
|||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotIn('path', res_json)
|
||||
|
||||
def test_config_include_artpaths_false(self):
|
||||
web.app.config['INCLUDE_PATHS'] = False
|
||||
response = self.client.get('/album/2')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotIn('artpath', res_json)
|
||||
|
||||
def test_get_all_items(self):
|
||||
response = self.client.get('/item/')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['items']), 2)
|
||||
self.assertEqual(len(res_json['items']), 3)
|
||||
|
||||
def test_get_single_item_by_id(self):
|
||||
response = self.client.get('/item/1')
|
||||
|
|
@ -69,11 +130,11 @@ class WebPluginTest(_common.LibTestCase):
|
|||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['items']), 2)
|
||||
response_titles = [item['title'] for item in res_json['items']]
|
||||
assertCountEqual(self, response_titles, [u'title', u'another title'])
|
||||
response_titles = {item['title'] for item in res_json['items']}
|
||||
self.assertEqual(response_titles, {u'title', u'another title'})
|
||||
|
||||
def test_get_single_item_not_found(self):
|
||||
response = self.client.get('/item/3')
|
||||
response = self.client.get('/item/4')
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_get_single_item_by_path(self):
|
||||
|
|
@ -94,13 +155,15 @@ class WebPluginTest(_common.LibTestCase):
|
|||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_get_item_empty_query(self):
|
||||
""" testing item query: <empty> """
|
||||
response = self.client.get('/item/query/')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['items']), 2)
|
||||
self.assertEqual(len(res_json['items']), 3)
|
||||
|
||||
def test_get_simple_item_query(self):
|
||||
""" testing item query: another """
|
||||
response = self.client.get('/item/query/another')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
|
|
@ -109,13 +172,59 @@ class WebPluginTest(_common.LibTestCase):
|
|||
self.assertEqual(res_json['results'][0]['title'],
|
||||
u'another title')
|
||||
|
||||
def test_query_item_string(self):
|
||||
""" testing item query: testattr:ABC """
|
||||
response = self.client.get('/item/query/testattr%3aABC')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['title'],
|
||||
u'and a third')
|
||||
|
||||
def test_query_item_regex(self):
|
||||
""" testing item query: testattr::[A-C]+ """
|
||||
response = self.client.get('/item/query/testattr%3a%3a[A-C]%2b')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['title'],
|
||||
u'and a third')
|
||||
|
||||
def test_query_item_regex_backslash(self):
|
||||
# """ testing item query: testattr::\w+ """
|
||||
response = self.client.get('/item/query/testattr%3a%3a%5cw%2b')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['title'],
|
||||
u'and a third')
|
||||
|
||||
def test_query_item_path(self):
|
||||
# """ testing item query: path:\somewhere\a """
|
||||
""" Note: path queries are special: the query item must match the path
|
||||
from the root all the way to a directory, so this matches 1 item """
|
||||
""" Note: filesystem separators in the query must be '\' """
|
||||
|
||||
response = self.client.get('/item/query/path:'
|
||||
+ self.path_prefix
|
||||
+ '\\somewhere\\a')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['title'],
|
||||
u'another title')
|
||||
|
||||
def test_get_all_albums(self):
|
||||
response = self.client.get('/album/')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
response_albums = [album['album'] for album in res_json['albums']]
|
||||
assertCountEqual(self, response_albums, [u'album', u'another album'])
|
||||
assertCountEqual(self, response_albums, [u'album', u'other album'])
|
||||
|
||||
def test_get_single_album_by_id(self):
|
||||
response = self.client.get('/album/2')
|
||||
|
|
@ -123,7 +232,7 @@ class WebPluginTest(_common.LibTestCase):
|
|||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], 2)
|
||||
self.assertEqual(res_json['album'], u'another album')
|
||||
self.assertEqual(res_json['album'], u'other album')
|
||||
|
||||
def test_get_multiple_albums_by_id(self):
|
||||
response = self.client.get('/album/1,2')
|
||||
|
|
@ -131,7 +240,7 @@ class WebPluginTest(_common.LibTestCase):
|
|||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
response_albums = [album['album'] for album in res_json['albums']]
|
||||
assertCountEqual(self, response_albums, [u'album', u'another album'])
|
||||
assertCountEqual(self, response_albums, [u'album', u'other album'])
|
||||
|
||||
def test_get_album_empty_query(self):
|
||||
response = self.client.get('/album/query/')
|
||||
|
|
@ -140,6 +249,428 @@ class WebPluginTest(_common.LibTestCase):
|
|||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['albums']), 2)
|
||||
|
||||
def test_get_simple_album_query(self):
|
||||
response = self.client.get('/album/query/other')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['album'],
|
||||
u'other album')
|
||||
self.assertEqual(res_json['results'][0]['id'], 2)
|
||||
|
||||
def test_get_album_details(self):
|
||||
response = self.client.get('/album/2?expand')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['items']), 2)
|
||||
self.assertEqual(res_json['items'][0]['album'],
|
||||
u'other album')
|
||||
self.assertEqual(res_json['items'][1]['album'],
|
||||
u'other album')
|
||||
response_track_titles = {item['title'] for item in res_json['items']}
|
||||
self.assertEqual(response_track_titles, {u'title', u'and a third'})
|
||||
|
||||
def test_query_album_string(self):
|
||||
""" testing query: albumtest:xy """
|
||||
response = self.client.get('/album/query/albumtest%3axy')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['album'],
|
||||
u'album')
|
||||
|
||||
def test_query_album_artpath_regex(self):
|
||||
""" testing query: artpath::art_ """
|
||||
response = self.client.get('/album/query/artpath%3a%3aart_')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['album'],
|
||||
u'other album')
|
||||
|
||||
def test_query_album_regex_backslash(self):
|
||||
# """ testing query: albumtest::\w+ """
|
||||
response = self.client.get('/album/query/albumtest%3a%3a%5cw%2b')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
self.assertEqual(res_json['results'][0]['album'],
|
||||
u'album')
|
||||
|
||||
def test_get_stats(self):
|
||||
response = self.client.get('/stats')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['items'], 3)
|
||||
self.assertEqual(res_json['albums'], 2)
|
||||
|
||||
def test_delete_item_id(self):
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Create a temporary item
|
||||
item_id = self.lib.add(Item(title=u'test_delete_item_id',
|
||||
test_delete_item_id=1))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
|
||||
# Delete item by id
|
||||
response = self.client.delete('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check the item has gone
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
self.assertEqual(response.status_code, 404)
|
||||
# Note: if this fails, the item may still be around
|
||||
# and may cause other tests to fail
|
||||
|
||||
def test_delete_item_without_file(self):
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Create an item with a file
|
||||
ipath = os.path.join(self.temp_dir, b'testfile1.mp3')
|
||||
shutil.copy(os.path.join(_common.RSRC, b'full.mp3'), ipath)
|
||||
self.assertTrue(os.path.exists(ipath))
|
||||
item_id = self.lib.add(Item.from_path(ipath))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
|
||||
# Delete item by id, without deleting file
|
||||
response = self.client.delete('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check the item has gone
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
# Check the file has not gone
|
||||
self.assertTrue(os.path.exists(ipath))
|
||||
os.remove(ipath)
|
||||
|
||||
def test_delete_item_with_file(self):
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Create an item with a file
|
||||
ipath = os.path.join(self.temp_dir, b'testfile2.mp3')
|
||||
shutil.copy(os.path.join(_common.RSRC, b'full.mp3'), ipath)
|
||||
self.assertTrue(os.path.exists(ipath))
|
||||
item_id = self.lib.add(Item.from_path(ipath))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
|
||||
# Delete item by id, with file
|
||||
response = self.client.delete('/item/' + str(item_id) + '?delete')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check the item has gone
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
# Check the file has gone
|
||||
self.assertFalse(os.path.exists(ipath))
|
||||
|
||||
def test_delete_item_query(self):
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Create a temporary item
|
||||
self.lib.add(Item(title=u'test_delete_item_query',
|
||||
test_delete_item_query=1))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/query/test_delete_item_query')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
|
||||
# Delete item by query
|
||||
response = self.client.delete('/item/query/test_delete_item_query')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check the item has gone
|
||||
response = self.client.get('/item/query/test_delete_item_query')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 0)
|
||||
|
||||
def test_delete_item_all_fails(self):
|
||||
""" DELETE is not supported for list all """
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Delete all items
|
||||
response = self.client.delete('/item/')
|
||||
self.assertEqual(response.status_code, 405)
|
||||
|
||||
# Note: if this fails, all items have gone and rest of
|
||||
# tests wil fail!
|
||||
|
||||
def test_delete_item_id_readonly(self):
|
||||
|
||||
web.app.config['READONLY'] = True
|
||||
|
||||
# Create a temporary item
|
||||
item_id = self.lib.add(Item(title=u'test_delete_item_id_ro',
|
||||
test_delete_item_id_ro=1))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
|
||||
# Try to delete item by id
|
||||
response = self.client.delete('/item/' + str(item_id))
|
||||
self.assertEqual(response.status_code, 405)
|
||||
|
||||
# Check the item has not gone
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
|
||||
# Remove it
|
||||
self.lib.get_item(item_id).remove()
|
||||
|
||||
def test_delete_item_query_readonly(self):
|
||||
|
||||
web.app.config['READONLY'] = True
|
||||
|
||||
# Create a temporary item
|
||||
item_id = self.lib.add(Item(title=u'test_delete_item_q_ro',
|
||||
test_delete_item_q_ro=1))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/query/test_delete_item_q_ro')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
|
||||
# Try to delete item by query
|
||||
response = self.client.delete('/item/query/test_delete_item_q_ro')
|
||||
self.assertEqual(response.status_code, 405)
|
||||
|
||||
# Check the item has not gone
|
||||
response = self.client.get('/item/query/test_delete_item_q_ro')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
|
||||
# Remove it
|
||||
self.lib.get_item(item_id).remove()
|
||||
|
||||
def test_delete_album_id(self):
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Create a temporary album
|
||||
album_id = self.lib.add(Album(album=u'test_delete_album_id',
|
||||
test_delete_album_id=1))
|
||||
|
||||
# Check we can find the temporary album we just created
|
||||
response = self.client.get('/album/' + str(album_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], album_id)
|
||||
|
||||
# Delete album by id
|
||||
response = self.client.delete('/album/' + str(album_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check the album has gone
|
||||
response = self.client.get('/album/' + str(album_id))
|
||||
self.assertEqual(response.status_code, 404)
|
||||
# Note: if this fails, the album may still be around
|
||||
# and may cause other tests to fail
|
||||
|
||||
def test_delete_album_query(self):
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Create a temporary album
|
||||
self.lib.add(Album(album=u'test_delete_album_query',
|
||||
test_delete_album_query=1))
|
||||
|
||||
# Check we can find the temporary album we just created
|
||||
response = self.client.get('/album/query/test_delete_album_query')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
|
||||
# Delete album
|
||||
response = self.client.delete('/album/query/test_delete_album_query')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check the album has gone
|
||||
response = self.client.get('/album/query/test_delete_album_query')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 0)
|
||||
|
||||
def test_delete_album_all_fails(self):
|
||||
""" DELETE is not supported for list all """
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Delete all albums
|
||||
response = self.client.delete('/album/')
|
||||
self.assertEqual(response.status_code, 405)
|
||||
|
||||
# Note: if this fails, all albums have gone and rest of
|
||||
# tests wil fail!
|
||||
|
||||
def test_delete_album_id_readonly(self):
|
||||
|
||||
web.app.config['READONLY'] = True
|
||||
|
||||
# Create a temporary album
|
||||
album_id = self.lib.add(Album(album=u'test_delete_album_id_ro',
|
||||
test_delete_album_id_ro=1))
|
||||
|
||||
# Check we can find the temporary album we just created
|
||||
response = self.client.get('/album/' + str(album_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], album_id)
|
||||
|
||||
# Try to delete album by id
|
||||
response = self.client.delete('/album/' + str(album_id))
|
||||
self.assertEqual(response.status_code, 405)
|
||||
|
||||
# Check the item has not gone
|
||||
response = self.client.get('/album/' + str(album_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], album_id)
|
||||
|
||||
# Remove it
|
||||
self.lib.get_album(album_id).remove()
|
||||
|
||||
def test_delete_album_query_readonly(self):
|
||||
|
||||
web.app.config['READONLY'] = True
|
||||
|
||||
# Create a temporary album
|
||||
album_id = self.lib.add(Album(album=u'test_delete_album_query_ro',
|
||||
test_delete_album_query_ro=1))
|
||||
|
||||
# Check we can find the temporary album we just created
|
||||
response = self.client.get('/album/query/test_delete_album_query_ro')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
|
||||
# Try to delete album
|
||||
response = self.client.delete(
|
||||
'/album/query/test_delete_album_query_ro'
|
||||
)
|
||||
self.assertEqual(response.status_code, 405)
|
||||
|
||||
# Check the album has not gone
|
||||
response = self.client.get('/album/query/test_delete_album_query_ro')
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(res_json['results']), 1)
|
||||
|
||||
# Remove it
|
||||
self.lib.get_album(album_id).remove()
|
||||
|
||||
def test_patch_item_id(self):
|
||||
# Note: PATCH is currently only implemented for track items, not albums
|
||||
|
||||
web.app.config['READONLY'] = False
|
||||
|
||||
# Create a temporary item
|
||||
item_id = self.lib.add(Item(title=u'test_patch_item_id',
|
||||
test_patch_f1=1,
|
||||
test_patch_f2="Old"))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
self.assertEqual(
|
||||
[res_json['test_patch_f1'], res_json['test_patch_f2']],
|
||||
['1', 'Old'])
|
||||
|
||||
# Patch item by id
|
||||
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}]})
|
||||
response = self.client.patch('/item/' + str(item_id),
|
||||
json={"test_patch_f2": "New"})
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
self.assertEqual(
|
||||
[res_json['test_patch_f1'], res_json['test_patch_f2']],
|
||||
['1', 'New'])
|
||||
|
||||
# Check the update has really worked
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
self.assertEqual(
|
||||
[res_json['test_patch_f1'], res_json['test_patch_f2']],
|
||||
['1', 'New'])
|
||||
|
||||
# Remove the item
|
||||
self.lib.get_item(item_id).remove()
|
||||
|
||||
def test_patch_item_id_readonly(self):
|
||||
# Note: PATCH is currently only implemented for track items, not albums
|
||||
|
||||
web.app.config['READONLY'] = True
|
||||
|
||||
# Create a temporary item
|
||||
item_id = self.lib.add(Item(title=u'test_patch_item_id_ro',
|
||||
test_patch_f1=2,
|
||||
test_patch_f2="Old"))
|
||||
|
||||
# Check we can find the temporary item we just created
|
||||
response = self.client.get('/item/' + str(item_id))
|
||||
res_json = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(res_json['id'], item_id)
|
||||
self.assertEqual(
|
||||
[res_json['test_patch_f1'], res_json['test_patch_f2']],
|
||||
['2', 'Old'])
|
||||
|
||||
# Patch item by id
|
||||
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"})
|
||||
response = self.client.patch('/item/' + str(item_id),
|
||||
json={"test_patch_f2": "New"})
|
||||
self.assertEqual(response.status_code, 405)
|
||||
|
||||
# Remove the item
|
||||
self.lib.get_item(item_id).remove()
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
|
|
|||
6
tox.ini
6
tox.ini
|
|
@ -18,12 +18,12 @@ deps =
|
|||
{test,cov}: {[_test]deps}
|
||||
lint: {[_lint]deps}
|
||||
commands =
|
||||
test: python -bb -m pytest {posargs}
|
||||
cov: coverage run -m pytest {posargs}
|
||||
test: python -bb -m pytest -rs {posargs}
|
||||
cov: coverage run -m pytest -rs {posargs}
|
||||
lint: python -m flake8 {posargs} {[_lint]files}
|
||||
|
||||
[testenv:docs]
|
||||
basepython = python2.7
|
||||
basepython = python3.9
|
||||
deps = sphinx
|
||||
commands = sphinx-build -W -q -b html docs {envtmpdir}/html {posargs}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue