mirror of
https://github.com/beetbox/beets.git
synced 2026-01-30 12:02:41 +01:00
Merge remote-tracking branch 'upstream/master' into pr/item-album-fallback
This commit is contained in:
commit
eda9930c59
157 changed files with 8910 additions and 1556 deletions
|
|
@ -2,7 +2,6 @@
|
|||
omit =
|
||||
*/pyshared/*
|
||||
*/python?.?/*
|
||||
*/site-packages/nose/*
|
||||
*/test/*
|
||||
exclude_lines =
|
||||
assert False
|
||||
|
|
|
|||
11
.github/pull_request_template.md
vendored
Normal file
11
.github/pull_request_template.md
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
## Description
|
||||
|
||||
Fixes #X. <!-- Insert issue number here if applicable. -->
|
||||
|
||||
(...)
|
||||
|
||||
## To Do
|
||||
|
||||
- [ ] Documentation. (If you've add a new command-line flag, for example, find the appropriate page under `docs/` to describe it.)
|
||||
- [ ] Changelog. (Add an entry to `docs/changelog.rst` near the top of the document.)
|
||||
- [ ] Tests. (Encouraged but not strictly required.)
|
||||
28
.github/stale.yml
vendored
Normal file
28
.github/stale.yml
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
daysUntilClose: 7
|
||||
staleLabel: stale
|
||||
|
||||
issues:
|
||||
daysUntilStale: 60
|
||||
onlyLabels:
|
||||
- needinfo
|
||||
markComment: >
|
||||
Is this still relevant? If so, what is blocking it?
|
||||
Is there anything you can do to help move it forward?
|
||||
|
||||
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
|
||||
pulls:
|
||||
daysUntilStale: 120
|
||||
markComment: >
|
||||
Is this still relevant? If so, what is blocking it?
|
||||
Is there anything you can do to help move it forward?
|
||||
|
||||
|
||||
This pull request has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
85
.github/workflows/ci.yaml
vendored
Normal file
85
.github/workflows/ci.yaml
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
name: ci
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9-dev]
|
||||
|
||||
env:
|
||||
PY_COLORS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install base dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install tox sphinx
|
||||
|
||||
- name: Test with tox
|
||||
if: matrix.python-version != '3.8'
|
||||
run: |
|
||||
tox -e py-test
|
||||
|
||||
- name: Test with tox and get coverage
|
||||
if: matrix.python-version == '3.8'
|
||||
run: |
|
||||
tox -vv -e py-cov
|
||||
|
||||
- name: Upload code coverage
|
||||
if: matrix.python-version == '3.8'
|
||||
run: |
|
||||
pip install codecov || true
|
||||
codecov || true
|
||||
|
||||
test-docs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
PY_COLORS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python 2.7
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 2.7
|
||||
|
||||
- name: Install base dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install tox sphinx
|
||||
|
||||
- name: Build and check docs using tox
|
||||
run: tox -e docs
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
PY_COLORS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install base dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install tox sphinx
|
||||
|
||||
- name: Lint with flake8
|
||||
run: tox -e py-lint
|
||||
27
.github/workflows/integration_test.yaml
vendored
Normal file
27
.github/workflows/integration_test.yaml
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
name: integration tests
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * SUN' # run every Sunday at midnight
|
||||
jobs:
|
||||
test_integration:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
PY_COLORS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up latest Python version
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9-dev
|
||||
|
||||
- name: Install base dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install tox sphinx
|
||||
|
||||
- name: Test with tox
|
||||
run: |
|
||||
tox -e int
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
# Project Specific patterns
|
||||
man
|
||||
test/rsrc/lyrics/*
|
||||
|
||||
# The rest is from https://www.gitignore.io/api/python
|
||||
|
||||
|
|
@ -54,7 +53,6 @@ htmlcov/
|
|||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
|
|
@ -77,6 +75,7 @@ target/
|
|||
|
||||
# virtualenv
|
||||
venv/
|
||||
.venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
|
|
|
|||
87
.travis.yml
87
.travis.yml
|
|
@ -1,87 +0,0 @@
|
|||
dist: trusty
|
||||
sudo: required
|
||||
language: python
|
||||
|
||||
env:
|
||||
global:
|
||||
# Undocumented feature of nose-show-skipped.
|
||||
NOSE_SHOW_SKIPPED: 1
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- python: 2.7.13
|
||||
env: {TOX_ENV: py27-cov, COVERAGE: 1}
|
||||
- python: 2.7.13
|
||||
env: {TOX_ENV: py27-test}
|
||||
- python: 3.4
|
||||
env: {TOX_ENV: py34-test}
|
||||
- python: 3.4_with_system_site_packages
|
||||
env: {TOX_ENV: py34-test}
|
||||
- python: 3.5
|
||||
env: {TOX_ENV: py35-test}
|
||||
- python: 3.6
|
||||
env: {TOX_ENV: py36-test}
|
||||
- python: 3.7
|
||||
env: {TOX_ENV: py37-test}
|
||||
dist: xenial
|
||||
# - python: 3.8-dev
|
||||
# env: {TOX_ENV: py38-test}
|
||||
# dist: xenial
|
||||
# - python: pypy
|
||||
# - env: {TOX_ENV: pypy-test}
|
||||
- python: 3.6
|
||||
env: {TOX_ENV: py36-flake8}
|
||||
- python: 2.7.13
|
||||
env: {TOX_ENV: docs}
|
||||
# Non-Python dependencies.
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- sourceline: "deb http://archive.ubuntu.com/ubuntu/ trusty multiverse"
|
||||
- sourceline: "deb http://archive.ubuntu.com/ubuntu/ trusty-updates multiverse"
|
||||
packages:
|
||||
- bash-completion
|
||||
- gir1.2-gst-plugins-base-1.0
|
||||
- gir1.2-gstreamer-1.0
|
||||
- gstreamer1.0-plugins-good
|
||||
- gstreamer1.0-plugins-bad
|
||||
- imagemagick
|
||||
- python-gi
|
||||
- python-gst-1.0
|
||||
- python3-gi
|
||||
- python3-gst-1.0
|
||||
- unrar
|
||||
|
||||
# To install dependencies, tell tox to do everything but actually running the
|
||||
# test.
|
||||
install:
|
||||
- travis_retry pip install tox sphinx
|
||||
# upgrade requests to satisfy sphinx linkcheck (for building man pages)
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == *_site_packages ]]; then pip install -U requests; fi
|
||||
- travis_retry tox -e $TOX_ENV --notest
|
||||
|
||||
script:
|
||||
# prevents "libdc1394 error: Failed to initialize libdc1394" errors
|
||||
- sudo ln -s /dev/null /dev/raw1394
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == *_site_packages ]]; then SITE_PACKAGES=--sitepackages; fi
|
||||
# pip in trusty breaks on packages prefixed with "_". See https://github.com/pypa/pip/issues/3681
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == 3.4_with_system_site_packages ]]; then sudo rm -rf /usr/lib/python3/dist-packages/_lxc-0.1.egg-info; fi
|
||||
- tox -e $TOX_ENV $SITE_PACKAGES
|
||||
|
||||
# Report coverage to codecov.io.
|
||||
before_install:
|
||||
- "[ ! -z $COVERAGE ] && travis_retry pip install codecov || true"
|
||||
after_success:
|
||||
- "[ ! -z $COVERAGE ] && codecov || true"
|
||||
|
||||
cache:
|
||||
pip: true
|
||||
|
||||
notifications:
|
||||
irc:
|
||||
channels:
|
||||
- "irc.freenode.org#beets"
|
||||
use_notice: true
|
||||
skip_join: true
|
||||
on_success: change
|
||||
on_failure: always
|
||||
377
CONTRIBUTING.rst
Normal file
377
CONTRIBUTING.rst
Normal file
|
|
@ -0,0 +1,377 @@
|
|||
############
|
||||
Contributing
|
||||
############
|
||||
|
||||
.. contents::
|
||||
:depth: 3
|
||||
|
||||
Thank you!
|
||||
==========
|
||||
|
||||
First off, thank you for considering contributing to beets! It’s people
|
||||
like you that make beets continue to succeed.
|
||||
|
||||
These guidelines describe how you can help most effectively. By
|
||||
following these guidelines, you can make life easier for the development
|
||||
team as it indicates you respect the maintainers’ time; in return, the
|
||||
maintainers will reciprocate by helping to address your issue, review
|
||||
changes, and finalize pull requests.
|
||||
|
||||
Types of Contributions
|
||||
======================
|
||||
|
||||
We love to get contributions from our community—you! There are many ways
|
||||
to contribute, whether you’re a programmer or not.
|
||||
|
||||
Non-Programming
|
||||
---------------
|
||||
|
||||
- Promote beets! Help get the word out by telling your friends, writing
|
||||
a blog post, or discussing it on a forum you frequent.
|
||||
- Improve the `documentation <http://beets.readthedocs.org/>`__. It’s
|
||||
incredibly easy to contribute here: just find a page you want to
|
||||
modify and hit the “Edit on GitHub” button in the upper-right. You
|
||||
can automatically send us a pull request for your changes.
|
||||
- GUI design. For the time being, beets is a command-line-only affair.
|
||||
But that’s mostly because we don’t have any great ideas for what a
|
||||
good GUI should look like. If you have those great ideas, please get
|
||||
in touch.
|
||||
- Benchmarks. We’d like to have a consistent way of measuring speed
|
||||
improvements in beets’ tagger and other functionality as well as a
|
||||
way of comparing beets’ performance to other tools. You can help by
|
||||
compiling a library of freely-licensed music files (preferably with
|
||||
incorrect metadata) for testing and measurement.
|
||||
- Think you have a nice config or cool use-case for beets? We’d love to
|
||||
hear about it! Submit a post to our `our
|
||||
forums <https://discourse.beets.io/>`__ under the “Show and Tell”
|
||||
category for a chance to get featured in `the
|
||||
docs <https://beets.readthedocs.io/en/stable/guides/advanced.html>`__.
|
||||
- Consider helping out in `our forums <https://discourse.beets.io/>`__
|
||||
by responding to support requests or driving some new discussions.
|
||||
|
||||
Programming
|
||||
-----------
|
||||
|
||||
- As a programmer (even if you’re just a beginner!), you have a ton of
|
||||
opportunities to get your feet wet with beets.
|
||||
- For developing plugins, or hacking away at beets, there’s some good
|
||||
information in the `“For Developers” section of the
|
||||
docs <https://beets.readthedocs.io/en/stable/dev/>`__.
|
||||
|
||||
Getting the Source
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The easiest way to get started with the latest beets source is to use
|
||||
`pip <https://pip.pypa.io/>`__ to install an “editable” package. This
|
||||
can be done with one command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ pip install -e git+https://github.com/beetbox/beets.git#egg=beets
|
||||
|
||||
Or, equivalently:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ git clone https://github.com/beetbox/beets.git
|
||||
$ cd beets
|
||||
$ pip install -e .
|
||||
|
||||
If you already have a released version of beets installed, you may need
|
||||
to remove it first by typing ``pip uninstall beets``. The pip command
|
||||
above will put the beets source in a ``src/beets`` directory and install
|
||||
the ``beet`` CLI script to a standard location on your system. You may
|
||||
want to use the ``--src`` option to specify the parent directory where
|
||||
the source will be checked out and the ``--user`` option such that the
|
||||
package will be installed to your home directory (compare with the
|
||||
output of ``pip install --help``).
|
||||
|
||||
Code Contribution Ideas
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- We maintain a set of `issues marked as
|
||||
“bite-sized” <https://github.com/beetbox/beets/labels/bitesize>`__.
|
||||
These are issues that would serve as a good introduction to the
|
||||
codebase. Claim one and start exploring!
|
||||
- Like testing? Our `test
|
||||
coverage <https://codecov.io/github/beetbox/beets>`__ is somewhat
|
||||
low. You can help out by finding low-coverage modules or checking out
|
||||
other `testing-related
|
||||
issues <https://github.com/beetbox/beets/labels/testing>`__.
|
||||
- There are several ways to improve the tests in general (see :ref:`testing` and some
|
||||
places to think about performance optimization (see
|
||||
`Optimization <https://github.com/beetbox/beets/wiki/Optimization>`__).
|
||||
- Not all of our code is up to our coding conventions. In particular,
|
||||
the `API
|
||||
documentation <https://beets.readthedocs.io/en/stable/dev/api.html>`__
|
||||
are currently quite sparse. You can help by adding to the docstrings
|
||||
in the code and to the documentation pages themselves. beets follows
|
||||
`PEP-257 <https://www.python.org/dev/peps/pep-0257/>`__ for
|
||||
docstrings and in some places, we also sometimes use `ReST autodoc
|
||||
syntax for
|
||||
Sphinx <https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html>`__
|
||||
to, for example, refer to a class name.
|
||||
|
||||
Your First Contribution
|
||||
=======================
|
||||
|
||||
If this is your first time contributing to an open source project,
|
||||
welcome! If you are confused at all about how to contribute or what to
|
||||
contribute, take a look at `this great
|
||||
tutorial <http://makeapullrequest.com/>`__, or stop by our
|
||||
`forums <https://discourse.beets.io/>`__ if you have any questions.
|
||||
|
||||
We maintain a list of issues we reserved for those new to open source
|
||||
labeled `“first timers
|
||||
only” <https://github.com/beetbox/beets/issues?q=is%3Aopen+is%3Aissue+label%3A%22first+timers+only%22>`__.
|
||||
Since the goal of these issues is to get users comfortable with
|
||||
contributing to an open source project, please do not hesitate to ask
|
||||
any questions.
|
||||
|
||||
How to Submit Your Work
|
||||
=======================
|
||||
|
||||
Do you have a great bug fix, new feature, or documentation expansion
|
||||
you’d like to contribute? Follow these steps to create a GitHub pull
|
||||
request and your code will ship in no time.
|
||||
|
||||
1. Fork the beets repository and clone it (see above) to create a
|
||||
workspace.
|
||||
2. Make your changes.
|
||||
3. Add tests. If you’ve fixed a bug, write a test to ensure that you’ve
|
||||
actually fixed it. If there’s a new feature or plugin, please
|
||||
contribute tests that show that your code does what it says.
|
||||
4. Add documentation. If you’ve added a new command flag, for example,
|
||||
find the appropriate page under ``docs/`` where it needs to be
|
||||
listed.
|
||||
5. Add a changelog entry to ``docs/changelog.rst`` near the top of the
|
||||
document.
|
||||
6. Run the tests and style checker. The easiest way to run the tests is
|
||||
to use `tox <https://tox.readthedocs.org/en/latest/>`__. For more
|
||||
information on running tests, see :ref:`testing`.
|
||||
7. Push to your fork and open a pull request! We’ll be in touch shortly.
|
||||
8. If you add commits to a pull request, please add a comment or
|
||||
re-request a review after you push them since GitHub doesn’t
|
||||
automatically notify us when commits are added.
|
||||
|
||||
Remember, code contributions have four parts: the code, the tests, the
|
||||
documentation, and the changelog entry. Thank you for contributing!
|
||||
|
||||
The Code
|
||||
========
|
||||
|
||||
The documentation has an `API
|
||||
section <https://beets.readthedocs.io/en/stable/dev/api.html>`__ that
|
||||
serves as an introduction to beets’ design.
|
||||
|
||||
Coding Conventions
|
||||
==================
|
||||
|
||||
General
|
||||
-------
|
||||
There are a few coding conventions we use in beets:
|
||||
|
||||
- Whenever you access the library database, do so through the provided
|
||||
Library methods or via a Transaction object. Never call
|
||||
``lib.conn.*`` directly. For example, do this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with g.lib.transaction() as tx:
|
||||
rows = tx.query('SELECT DISTINCT "{0}" FROM "{1}" ORDER BY "{2}"'
|
||||
.format(field, model._table, sort_field))
|
||||
|
||||
To fetch Item objects from the database, use lib.items(…) and supply
|
||||
a query as an argument. Resist the urge to write raw SQL for your
|
||||
query. If you must use lower-level queries into the database, do
|
||||
this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with lib.transaction() as tx:
|
||||
rows = tx.query('SELECT …')
|
||||
|
||||
Transaction objects help control concurrent access to the database
|
||||
and assist in debugging conflicting accesses.
|
||||
- Always use the `future
|
||||
imports <http://docs.python.org/library/__future__.html>`__
|
||||
``print_function``, ``division``, and ``absolute_import``, but *not*
|
||||
``unicode_literals``. These help keep your code modern and will help
|
||||
in the eventual move to Python 3.
|
||||
- ``str.format()`` should be used instead of the ``%`` operator
|
||||
- Never ``print`` informational messages; use the
|
||||
`logging <http://docs.python.org/library/logging.html>`__ module
|
||||
instead. In particular, we have our own logging shim, so you’ll see
|
||||
``from beets import logging`` in most files.
|
||||
|
||||
- Always log Unicode strings (e.g., ``log.debug(u"hello world")``).
|
||||
- The loggers use
|
||||
`str.format <http://docs.python.org/library/stdtypes.html#str.format>`__-style
|
||||
logging instead of ``%``-style, so you can type
|
||||
``log.debug(u"{0}", obj)`` to do your formatting.
|
||||
|
||||
- Exception handlers must use ``except A as B:`` instead of
|
||||
``except A, B:``.
|
||||
|
||||
Style
|
||||
-----
|
||||
We follow `PEP 8 <http://www.python.org/dev/peps/pep-0008/>`__ for
|
||||
style. You can use ``tox -e lint`` to check your code for any style
|
||||
errors.
|
||||
|
||||
Handling Paths
|
||||
--------------
|
||||
|
||||
A great deal of convention deals with the handling of **paths**. Paths
|
||||
are stored internally—in the database, for instance—as byte strings
|
||||
(i.e., ``bytes`` instead of ``str`` in Python 3). This is because POSIX
|
||||
operating systems’ path names are only reliably usable as byte
|
||||
strings—operating systems typically recommend but do not require that
|
||||
filenames use a given encoding, so violations of any reported encoding
|
||||
are inevitable. On Windows, the strings are always encoded with UTF-8;
|
||||
on Unix, the encoding is controlled by the filesystem. Here are some
|
||||
guidelines to follow:
|
||||
|
||||
- If you have a Unicode path or you’re not sure whether something is
|
||||
Unicode or not, pass it through ``bytestring_path`` function in the
|
||||
``beets.util`` module to convert it to bytes.
|
||||
- Pass every path name trough the ``syspath`` function (also in
|
||||
``beets.util``) before sending it to any *operating system* file
|
||||
operation (``open``, for example). This is necessary to use long
|
||||
filenames (which, maddeningly, must be Unicode) on Windows. This
|
||||
allows us to consistently store bytes in the database but use the
|
||||
native encoding rule on both POSIX and Windows.
|
||||
- Similarly, the ``displayable_path`` utility function converts
|
||||
bytestring paths to a Unicode string for displaying to the user.
|
||||
Every time you want to print out a string to the terminal or log it
|
||||
with the ``logging`` module, feed it through this function.
|
||||
|
||||
Editor Settings
|
||||
---------------
|
||||
|
||||
Personally, I work on beets with `vim <http://www.vim.org/>`__. Here are
|
||||
some ``.vimrc`` lines that might help with PEP 8-compliant Python
|
||||
coding::
|
||||
|
||||
filetype indent on
|
||||
autocmd FileType python setlocal shiftwidth=4 tabstop=4 softtabstop=4 expandtab shiftround autoindent
|
||||
|
||||
Consider installing `this alternative Python indentation
|
||||
plugin <https://github.com/mitsuhiko/vim-python-combined>`__. I also
|
||||
like `neomake <https://github.com/neomake/neomake>`__ with its flake8
|
||||
checker.
|
||||
|
||||
.. _testing:
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
Running the Tests
|
||||
-----------------
|
||||
|
||||
To run the tests for multiple Python versions, compile the docs, and
|
||||
check style, use `tox`_. Just type ``tox`` or use something like
|
||||
``tox -e py27`` to test a specific configuration. `detox`_ makes this go
|
||||
faster.
|
||||
|
||||
You can disable a hand-selected set of "slow" tests by setting the
|
||||
environment variable SKIP_SLOW_TESTS before running them.
|
||||
|
||||
Other ways to run the tests:
|
||||
|
||||
- ``python testall.py`` (ditto)
|
||||
- ``python -m unittest discover -p 'test_*'`` (ditto)
|
||||
- `pytest`_
|
||||
|
||||
You can also see the latest test results on `Linux`_ and on `Windows`_.
|
||||
|
||||
Note, if you are on Windows and are seeing errors running tox, it may be related to `this issue`_,
|
||||
in which case you may have to install tox v3.8.3 e.g. ``python -m pip install tox=3.8.3``
|
||||
|
||||
.. _this issue: https://github.com/tox-dev/tox/issues/1550
|
||||
|
||||
Coverage
|
||||
^^^^^^^^
|
||||
|
||||
``tox -e cov`` will add coverage info for tests: Coverage is pretty low
|
||||
still -- see the current status on `Codecov`_.
|
||||
|
||||
Red Flags
|
||||
^^^^^^^^^
|
||||
|
||||
The `pytest-random`_ plugin makes it easy to randomize the order of
|
||||
tests. ``py.test test --random`` will occasionally turn up failing tests
|
||||
that reveal ordering dependencies—which are bad news!
|
||||
|
||||
Test Dependencies
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The tests have a few more dependencies than beets itself. (The
|
||||
additional dependencies consist of testing utilities and dependencies of
|
||||
non-default plugins exercised by the test suite.) The dependencies are
|
||||
listed under 'test' in ``extras_require`` in `setup.py`_.
|
||||
To install the test dependencies, run ``python -m pip install .[test]``.
|
||||
Or, just run a test suite with ``tox`` which will install them
|
||||
automatically.
|
||||
|
||||
.. _setup.py: https://github.com/beetbox/beets/blob/master/setup.py#L99`
|
||||
|
||||
Writing Tests
|
||||
-------------
|
||||
|
||||
Writing tests is done by adding or modifying files in folder `test`_.
|
||||
Take a look at
|
||||
`https://github.com/beetbox/beets/blob/master/test/test_template.py#L224`_
|
||||
to get a basic view on how tests are written. Despite using ``pytest``
|
||||
as a test runner, we prefer to write tests using the standard
|
||||
`unittest`_ testing framework.
|
||||
|
||||
Any tests that involve sending out network traffic e.g. an external API
|
||||
call, should be skipped normally and run under our weekly `integration
|
||||
test`_ suite. These tests can be useful in detecting external changes
|
||||
that would affect ``beets``. In order to do this, simply add the
|
||||
following snippet before the applicable test case:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@unittest.skipUnless(
|
||||
os.environ.get('INTEGRATION_TEST', '0') == '1',
|
||||
'integration testing not enabled')
|
||||
|
||||
If you do this, it is also advised to create a similar test that 'mocks'
|
||||
the network call and can be run under normal circumstances by our CI and
|
||||
others. See `unittest.mock`_ for more info.
|
||||
|
||||
Basics
|
||||
^^^^^^
|
||||
|
||||
- Your file should contain a class derived from unittest.TestCase
|
||||
- Each method in this class which name starts with the letters *test*
|
||||
will be executed to test functionality
|
||||
- Errors are raised with these methods:
|
||||
|
||||
- ``self.assertEqual``
|
||||
- ``self.assertTrue``
|
||||
- ``self.assertFalse``
|
||||
- ``self.assertRaises``
|
||||
|
||||
- For detailed information see `Python unittest`_
|
||||
- **AVOID** using the ``start()`` and ``stop()`` methods of
|
||||
``mock.patch``, as they require manual cleanup. Use the annotation or
|
||||
context manager forms instead.
|
||||
|
||||
.. _Python unittest: https://docs.python.org/2/library/unittest.html
|
||||
.. _Codecov: https://codecov.io/github/beetbox/beets
|
||||
.. _pytest-random: https://github.com/klrmn/pytest-random
|
||||
.. _tox: http://tox.readthedocs.org
|
||||
.. _detox: https://pypi.python.org/pypi/detox/
|
||||
.. _pytest: http://pytest.org
|
||||
.. _Linux: https://github.com/beetbox/beets/actions
|
||||
.. _Windows: https://ci.appveyor.com/project/beetbox/beets/
|
||||
.. _`https://github.com/beetbox/beets/blob/master/setup.py#L99`: https://github.com/beetbox/beets/blob/master/setup.py#L99
|
||||
.. _test: https://github.com/beetbox/beets/tree/master/test
|
||||
.. _`https://github.com/beetbox/beets/blob/master/test/test_template.py#L224`: https://github.com/beetbox/beets/blob/master/test/test_template.py#L224
|
||||
.. _unittest: https://docs.python.org/3.8/library/unittest.html
|
||||
.. _integration test: https://github.com/beetbox/beets/actions?query=workflow%3A%22integration+tests%22
|
||||
.. _unittest.mock: https://docs.python.org/3/library/unittest.mock.html
|
||||
.. _Python unittest: https://docs.python.org/2/library/unittest.html
|
||||
77
README.rst
77
README.rst
|
|
@ -1,18 +1,20 @@
|
|||
.. image:: http://img.shields.io/pypi/v/beets.svg
|
||||
.. image:: https://img.shields.io/pypi/v/beets.svg
|
||||
:target: https://pypi.python.org/pypi/beets
|
||||
|
||||
.. image:: http://img.shields.io/codecov/c/github/beetbox/beets.svg
|
||||
.. image:: https://img.shields.io/codecov/c/github/beetbox/beets.svg
|
||||
:target: https://codecov.io/github/beetbox/beets
|
||||
|
||||
.. image:: https://travis-ci.org/beetbox/beets.svg?branch=master
|
||||
:target: https://travis-ci.org/beetbox/beets
|
||||
.. image:: https://github.com/beetbox/beets/workflows/ci/badge.svg?branch=master
|
||||
:target: https://github.com/beetbox/beets/actions
|
||||
|
||||
.. image:: https://repology.org/badge/tiny-repos/beets.svg
|
||||
:target: https://repology.org/project/beets/versions
|
||||
|
||||
|
||||
beets
|
||||
=====
|
||||
|
||||
Beets is the media library management system for obsessive-compulsive music
|
||||
geeks.
|
||||
Beets is the media library management system for obsessive music geeks.
|
||||
|
||||
The purpose of beets is to get your music collection right once and for all.
|
||||
It catalogs your collection, automatically improving its metadata as it goes.
|
||||
|
|
@ -51,46 +53,46 @@ imagine for your music collection. Via `plugins`_, beets becomes a panacea:
|
|||
If beets doesn't do what you want yet, `writing your own plugin`_ is
|
||||
shockingly simple if you know a little Python.
|
||||
|
||||
.. _plugins: http://beets.readthedocs.org/page/plugins/
|
||||
.. _MPD: http://www.musicpd.org/
|
||||
.. _MusicBrainz music collection: http://musicbrainz.org/doc/Collections/
|
||||
.. _plugins: https://beets.readthedocs.org/page/plugins/
|
||||
.. _MPD: https://www.musicpd.org/
|
||||
.. _MusicBrainz music collection: https://musicbrainz.org/doc/Collections/
|
||||
.. _writing your own plugin:
|
||||
http://beets.readthedocs.org/page/dev/plugins.html
|
||||
https://beets.readthedocs.org/page/dev/plugins.html
|
||||
.. _HTML5 Audio:
|
||||
http://www.w3.org/TR/html-markup/audio.html
|
||||
.. _albums that are missing tracks:
|
||||
http://beets.readthedocs.org/page/plugins/missing.html
|
||||
https://beets.readthedocs.org/page/plugins/missing.html
|
||||
.. _duplicate tracks and albums:
|
||||
http://beets.readthedocs.org/page/plugins/duplicates.html
|
||||
https://beets.readthedocs.org/page/plugins/duplicates.html
|
||||
.. _Transcode audio:
|
||||
http://beets.readthedocs.org/page/plugins/convert.html
|
||||
.. _Discogs: http://www.discogs.com/
|
||||
https://beets.readthedocs.org/page/plugins/convert.html
|
||||
.. _Discogs: https://www.discogs.com/
|
||||
.. _acoustic fingerprints:
|
||||
http://beets.readthedocs.org/page/plugins/chroma.html
|
||||
.. _ReplayGain: http://beets.readthedocs.org/page/plugins/replaygain.html
|
||||
.. _tempos: http://beets.readthedocs.org/page/plugins/acousticbrainz.html
|
||||
.. _genres: http://beets.readthedocs.org/page/plugins/lastgenre.html
|
||||
.. _album art: http://beets.readthedocs.org/page/plugins/fetchart.html
|
||||
.. _lyrics: http://beets.readthedocs.org/page/plugins/lyrics.html
|
||||
.. _MusicBrainz: http://musicbrainz.org/
|
||||
https://beets.readthedocs.org/page/plugins/chroma.html
|
||||
.. _ReplayGain: https://beets.readthedocs.org/page/plugins/replaygain.html
|
||||
.. _tempos: https://beets.readthedocs.org/page/plugins/acousticbrainz.html
|
||||
.. _genres: https://beets.readthedocs.org/page/plugins/lastgenre.html
|
||||
.. _album art: https://beets.readthedocs.org/page/plugins/fetchart.html
|
||||
.. _lyrics: https://beets.readthedocs.org/page/plugins/lyrics.html
|
||||
.. _MusicBrainz: https://musicbrainz.org/
|
||||
.. _Beatport: https://www.beatport.com
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
You can install beets by typing ``pip install beets``. Then check out the
|
||||
`Getting Started`_ guide.
|
||||
You can install beets by typing ``pip install beets``.
|
||||
Beets has also been packaged in the `software repositories`_ of several distributions.
|
||||
Check out the `Getting Started`_ guide for more information.
|
||||
|
||||
.. _Getting Started: http://beets.readthedocs.org/page/guides/main.html
|
||||
.. _Getting Started: https://beets.readthedocs.org/page/guides/main.html
|
||||
.. _software repositories: https://repology.org/project/beets/versions
|
||||
|
||||
Contribute
|
||||
----------
|
||||
|
||||
Check out the `Hacking`_ page on the wiki for tips on how to help out.
|
||||
You might also be interested in the `For Developers`_ section in the docs.
|
||||
Thank you for considering contributing to ``beets``! Whether you're a programmer or not, you should be able to find all the info you need at `CONTRIBUTING.rst`_.
|
||||
|
||||
.. _Hacking: https://github.com/beetbox/beets/wiki/Hacking
|
||||
.. _For Developers: http://docs.beets.io/page/dev/
|
||||
.. _CONTRIBUTING.rst: https://github.com/beetbox/beets/blob/master/CONTRIBUTING.rst
|
||||
|
||||
Read More
|
||||
---------
|
||||
|
|
@ -98,14 +100,21 @@ Read More
|
|||
Learn more about beets at `its Web site`_. Follow `@b33ts`_ on Twitter for
|
||||
news and updates.
|
||||
|
||||
.. _its Web site: http://beets.io/
|
||||
.. _@b33ts: http://twitter.com/b33ts/
|
||||
.. _its Web site: https://beets.io/
|
||||
.. _@b33ts: https://twitter.com/b33ts/
|
||||
|
||||
Contact
|
||||
-------
|
||||
* Encountered a bug you'd like to report or have an idea for a new feature? Check out our `issue tracker`_! If your issue or feature hasn't already been reported, please `open a new ticket`_ and we'll be in touch with you shortly. If you'd like to vote on a feature/bug, simply give a :+1: on issues you'd like to see prioritized over others.
|
||||
* Need help/support, would like to start a discussion, or would just like to introduce yourself to the team? Check out our `forums`_!
|
||||
|
||||
.. _issue tracker: https://github.com/beetbox/beets/issues
|
||||
.. _open a new ticket: https://github.com/beetbox/beets/issues/new/choose
|
||||
.. _forums: https://discourse.beets.io/
|
||||
|
||||
Authors
|
||||
-------
|
||||
|
||||
Beets is by `Adrian Sampson`_ with a supporting cast of thousands. For help,
|
||||
please visit our `forum`_.
|
||||
Beets is by `Adrian Sampson`_ with a supporting cast of thousands.
|
||||
|
||||
.. _forum: https://discourse.beets.io
|
||||
.. _Adrian Sampson: http://www.cs.cornell.edu/~asampson/
|
||||
.. _Adrian Sampson: https://www.cs.cornell.edu/~asampson/
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
.. image:: http://img.shields.io/pypi/v/beets.svg
|
||||
.. image:: https://img.shields.io/pypi/v/beets.svg
|
||||
:target: https://pypi.python.org/pypi/beets
|
||||
|
||||
.. image:: http://img.shields.io/codecov/c/github/beetbox/beets.svg
|
||||
.. image:: https://img.shields.io/codecov/c/github/beetbox/beets.svg
|
||||
:target: https://codecov.io/github/beetbox/beets
|
||||
|
||||
.. image:: https://travis-ci.org/beetbox/beets.svg?branch=master
|
||||
|
|
@ -34,7 +34,7 @@ Beets는 라이브러리로 디자인 되었기 때문에, 당신이 음악들
|
|||
- 필요하는 메타 데이터를 계산하거나 패치 할 때: `album art`_,
|
||||
`lyrics`_, `genres`_, `tempos`_, `ReplayGain`_ levels, or `acoustic
|
||||
fingerprints`_.
|
||||
- `MusicBrainz`_, `Discogs`_,`Beatport`_로부터 메타데이터를 가져오거나,
|
||||
- `MusicBrainz`_, `Discogs`_,`Beatport`_로부터 메타데이터를 가져오거나,
|
||||
노래 제목이나 음향 특징으로 메타데이터를 추측한다
|
||||
- `Transcode audio`_ 당신이 좋아하는 어떤 포맷으로든 변경한다.
|
||||
- 당신의 라이브러리에서 `duplicate tracks and albums`_ 이나 `albums that are missing tracks`_ 를 검사한다.
|
||||
|
|
@ -45,31 +45,31 @@ Beets는 라이브러리로 디자인 되었기 때문에, 당신이 음악들
|
|||
- 명령어로부터 음악 파일의 메타데이터를 분석할 수 있다.
|
||||
- `MPD`_ 프로토콜을 사용하여 음악 플레이어로 음악을 들으면, 엄청나게 다양한 인터페이스로 작동한다.
|
||||
|
||||
만약 Beets에 당신이 원하는게 아직 없다면,
|
||||
만약 Beets에 당신이 원하는게 아직 없다면,
|
||||
당신이 python을 안다면 `writing your own plugin`_ _은 놀라울정도로 간단하다.
|
||||
|
||||
.. _plugins: http://beets.readthedocs.org/page/plugins/
|
||||
.. _MPD: http://www.musicpd.org/
|
||||
.. _MusicBrainz music collection: http://musicbrainz.org/doc/Collections/
|
||||
.. _plugins: https://beets.readthedocs.org/page/plugins/
|
||||
.. _MPD: https://www.musicpd.org/
|
||||
.. _MusicBrainz music collection: https://musicbrainz.org/doc/Collections/
|
||||
.. _writing your own plugin:
|
||||
http://beets.readthedocs.org/page/dev/plugins.html
|
||||
https://beets.readthedocs.org/page/dev/plugins.html
|
||||
.. _HTML5 Audio:
|
||||
http://www.w3.org/TR/html-markup/audio.html
|
||||
.. _albums that are missing tracks:
|
||||
http://beets.readthedocs.org/page/plugins/missing.html
|
||||
https://beets.readthedocs.org/page/plugins/missing.html
|
||||
.. _duplicate tracks and albums:
|
||||
http://beets.readthedocs.org/page/plugins/duplicates.html
|
||||
https://beets.readthedocs.org/page/plugins/duplicates.html
|
||||
.. _Transcode audio:
|
||||
http://beets.readthedocs.org/page/plugins/convert.html
|
||||
.. _Discogs: http://www.discogs.com/
|
||||
https://beets.readthedocs.org/page/plugins/convert.html
|
||||
.. _Discogs: https://www.discogs.com/
|
||||
.. _acoustic fingerprints:
|
||||
http://beets.readthedocs.org/page/plugins/chroma.html
|
||||
.. _ReplayGain: http://beets.readthedocs.org/page/plugins/replaygain.html
|
||||
.. _tempos: http://beets.readthedocs.org/page/plugins/acousticbrainz.html
|
||||
.. _genres: http://beets.readthedocs.org/page/plugins/lastgenre.html
|
||||
.. _album art: http://beets.readthedocs.org/page/plugins/fetchart.html
|
||||
.. _lyrics: http://beets.readthedocs.org/page/plugins/lyrics.html
|
||||
.. _MusicBrainz: http://musicbrainz.org/
|
||||
https://beets.readthedocs.org/page/plugins/chroma.html
|
||||
.. _ReplayGain: https://beets.readthedocs.org/page/plugins/replaygain.html
|
||||
.. _tempos: https://beets.readthedocs.org/page/plugins/acousticbrainz.html
|
||||
.. _genres: https://beets.readthedocs.org/page/plugins/lastgenre.html
|
||||
.. _album art: https://beets.readthedocs.org/page/plugins/fetchart.html
|
||||
.. _lyrics: https://beets.readthedocs.org/page/plugins/lyrics.html
|
||||
.. _MusicBrainz: https://musicbrainz.org/
|
||||
.. _Beatport: https://www.beatport.com
|
||||
|
||||
설치
|
||||
|
|
@ -78,7 +78,7 @@ Beets는 라이브러리로 디자인 되었기 때문에, 당신이 음악들
|
|||
당신은 ``pip install beets`` 을 사용해서 Beets를 설치할 수 있다.
|
||||
그리고 `Getting Started`_ 가이드를 확인할 수 있다.
|
||||
|
||||
.. _Getting Started: http://beets.readthedocs.org/page/guides/main.html
|
||||
.. _Getting Started: https://beets.readthedocs.org/page/guides/main.html
|
||||
|
||||
컨트리뷰션
|
||||
----------
|
||||
|
|
@ -87,16 +87,16 @@ Beets는 라이브러리로 디자인 되었기 때문에, 당신이 음악들
|
|||
당신은 docs 안에 `For Developers`_ 에도 관심이 있을수 있다.
|
||||
|
||||
.. _Hacking: https://github.com/beetbox/beets/wiki/Hacking
|
||||
.. _For Developers: http://docs.beets.io/page/dev/
|
||||
.. _For Developers: https://beets.readthedocs.io/en/stable/dev/
|
||||
|
||||
Read More
|
||||
---------
|
||||
|
||||
`its Web site`_ 에서 Beets에 대해 조금 더 알아볼 수 있다.
|
||||
`its Web site`_ 에서 Beets에 대해 조금 더 알아볼 수 있다.
|
||||
트위터에서 `@b33ts`_ 를 팔로우하면 새 소식을 볼 수 있다.
|
||||
|
||||
.. _its Web site: http://beets.io/
|
||||
.. _@b33ts: http://twitter.com/b33ts/
|
||||
.. _its Web site: https://beets.io/
|
||||
.. _@b33ts: https://twitter.com/b33ts/
|
||||
|
||||
저자들
|
||||
-------
|
||||
|
|
@ -105,4 +105,4 @@ Read More
|
|||
돕고 싶다면 `forum`_.를 방문하면 된다.
|
||||
|
||||
.. _forum: https://discourse.beets.io
|
||||
.. _Adrian Sampson: http://www.cs.cornell.edu/~asampson/
|
||||
.. _Adrian Sampson: https://www.cs.cornell.edu/~asampson/
|
||||
|
|
|
|||
|
|
@ -6,9 +6,6 @@ skip_commits:
|
|||
message: /\[appveyor skip\]/
|
||||
|
||||
environment:
|
||||
# Undocumented feature of nose-show-skipped.
|
||||
NOSE_SHOW_SKIPPED: 1
|
||||
|
||||
matrix:
|
||||
- PYTHON: C:\Python27
|
||||
TOX_ENV: py27-test
|
||||
|
|
|
|||
|
|
@ -15,9 +15,8 @@
|
|||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
import confuse
|
||||
from sys import stderr
|
||||
|
||||
__version__ = u'1.5.0'
|
||||
__author__ = u'Adrian Sampson <adrian@radbox.org>'
|
||||
|
|
@ -32,11 +31,12 @@ class IncludeLazyConfig(confuse.LazyConfig):
|
|||
|
||||
try:
|
||||
for view in self['include']:
|
||||
filename = view.as_filename()
|
||||
if os.path.isfile(filename):
|
||||
self.set_file(filename)
|
||||
self.set_file(view.as_filename())
|
||||
except confuse.NotFoundError:
|
||||
pass
|
||||
except confuse.ConfigReadError as err:
|
||||
stderr.write("configuration `import` failed: {}"
|
||||
.format(err.reason))
|
||||
|
||||
|
||||
config = IncludeLazyConfig('beets', __name__)
|
||||
|
|
|
|||
27
beets/art.py
27
beets/art.py
|
|
@ -51,8 +51,8 @@ def get_art(log, item):
|
|||
|
||||
|
||||
def embed_item(log, item, imagepath, maxwidth=None, itempath=None,
|
||||
compare_threshold=0, ifempty=False, as_album=False,
|
||||
id3v23=None):
|
||||
compare_threshold=0, ifempty=False, as_album=False, id3v23=None,
|
||||
quality=0):
|
||||
"""Embed an image into the item's media file.
|
||||
"""
|
||||
# Conditions and filters.
|
||||
|
|
@ -64,7 +64,7 @@ def embed_item(log, item, imagepath, maxwidth=None, itempath=None,
|
|||
log.info(u'media file already contained art')
|
||||
return
|
||||
if maxwidth and not as_album:
|
||||
imagepath = resize_image(log, imagepath, maxwidth)
|
||||
imagepath = resize_image(log, imagepath, maxwidth, quality)
|
||||
|
||||
# Get the `Image` object from the file.
|
||||
try:
|
||||
|
|
@ -84,8 +84,8 @@ def embed_item(log, item, imagepath, maxwidth=None, itempath=None,
|
|||
item.try_write(path=itempath, tags={'images': [image]}, id3v23=id3v23)
|
||||
|
||||
|
||||
def embed_album(log, album, maxwidth=None, quiet=False,
|
||||
compare_threshold=0, ifempty=False):
|
||||
def embed_album(log, album, maxwidth=None, quiet=False, compare_threshold=0,
|
||||
ifempty=False, quality=0):
|
||||
"""Embed album art into all of the album's items.
|
||||
"""
|
||||
imagepath = album.artpath
|
||||
|
|
@ -97,20 +97,23 @@ def embed_album(log, album, maxwidth=None, quiet=False,
|
|||
displayable_path(imagepath), album)
|
||||
return
|
||||
if maxwidth:
|
||||
imagepath = resize_image(log, imagepath, maxwidth)
|
||||
imagepath = resize_image(log, imagepath, maxwidth, quality)
|
||||
|
||||
log.info(u'Embedding album art into {0}', album)
|
||||
|
||||
for item in album.items():
|
||||
embed_item(log, item, imagepath, maxwidth, None,
|
||||
compare_threshold, ifempty, as_album=True)
|
||||
embed_item(log, item, imagepath, maxwidth, None, compare_threshold,
|
||||
ifempty, as_album=True, quality=quality)
|
||||
|
||||
|
||||
def resize_image(log, imagepath, maxwidth):
|
||||
"""Returns path to an image resized to maxwidth.
|
||||
def resize_image(log, imagepath, maxwidth, quality):
|
||||
"""Returns path to an image resized to maxwidth and encoded with the
|
||||
specified quality level.
|
||||
"""
|
||||
log.debug(u'Resizing album art to {0} pixels wide', maxwidth)
|
||||
imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath))
|
||||
log.debug(u'Resizing album art to {0} pixels wide and encoding at quality \
|
||||
level {1}', maxwidth, quality)
|
||||
imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath),
|
||||
quality=quality)
|
||||
return imagepath
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -22,13 +22,54 @@ from beets import logging
|
|||
from beets import config
|
||||
|
||||
# Parts of external interface.
|
||||
from .hooks import AlbumInfo, TrackInfo, AlbumMatch, TrackMatch # noqa
|
||||
from .hooks import ( # noqa
|
||||
AlbumInfo,
|
||||
TrackInfo,
|
||||
AlbumMatch,
|
||||
TrackMatch,
|
||||
Distance,
|
||||
)
|
||||
from .match import tag_item, tag_album, Proposal # noqa
|
||||
from .match import Recommendation # noqa
|
||||
|
||||
# Global logger.
|
||||
log = logging.getLogger('beets')
|
||||
|
||||
# Metadata fields that are already hardcoded, or where the tag name changes.
|
||||
SPECIAL_FIELDS = {
|
||||
'album': (
|
||||
'va',
|
||||
'releasegroup_id',
|
||||
'artist_id',
|
||||
'album_id',
|
||||
'mediums',
|
||||
'tracks',
|
||||
'year',
|
||||
'month',
|
||||
'day',
|
||||
'artist',
|
||||
'artist_credit',
|
||||
'artist_sort',
|
||||
'data_url'
|
||||
),
|
||||
'track': (
|
||||
'track_alt',
|
||||
'artist_id',
|
||||
'release_track_id',
|
||||
'medium',
|
||||
'index',
|
||||
'medium_index',
|
||||
'title',
|
||||
'artist_credit',
|
||||
'artist_sort',
|
||||
'artist',
|
||||
'track_id',
|
||||
'medium_total',
|
||||
'data_url',
|
||||
'length'
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
# Additional utilities for the main interface.
|
||||
|
||||
|
|
@ -43,23 +84,14 @@ def apply_item_metadata(item, track_info):
|
|||
item.mb_releasetrackid = track_info.release_track_id
|
||||
if track_info.artist_id:
|
||||
item.mb_artistid = track_info.artist_id
|
||||
if track_info.data_source:
|
||||
item.data_source = track_info.data_source
|
||||
|
||||
if track_info.lyricist is not None:
|
||||
item.lyricist = track_info.lyricist
|
||||
if track_info.composer is not None:
|
||||
item.composer = track_info.composer
|
||||
if track_info.composer_sort is not None:
|
||||
item.composer_sort = track_info.composer_sort
|
||||
if track_info.arranger is not None:
|
||||
item.arranger = track_info.arranger
|
||||
if track_info.work is not None:
|
||||
item.work = track_info.work
|
||||
if track_info.mb_workid is not None:
|
||||
item.mb_workid = track_info.mb_workid
|
||||
if track_info.work_disambig is not None:
|
||||
item.work_disambig = track_info.work_disambig
|
||||
for field, value in track_info.items():
|
||||
# We only overwrite fields that are not already hardcoded.
|
||||
if field in SPECIAL_FIELDS['track']:
|
||||
continue
|
||||
if value is None:
|
||||
continue
|
||||
item[field] = value
|
||||
|
||||
# At the moment, the other metadata is left intact (including album
|
||||
# and track number). Perhaps these should be emptied?
|
||||
|
|
@ -151,44 +183,19 @@ def apply_metadata(album_info, mapping):
|
|||
# Track alt.
|
||||
item.track_alt = track_info.track_alt
|
||||
|
||||
# Miscellaneous/nullable metadata.
|
||||
misc_fields = {
|
||||
'album': (
|
||||
'albumtype',
|
||||
'label',
|
||||
'asin',
|
||||
'catalognum',
|
||||
'script',
|
||||
'language',
|
||||
'country',
|
||||
'albumstatus',
|
||||
'albumdisambig',
|
||||
'releasegroupdisambig',
|
||||
'data_source',
|
||||
),
|
||||
'track': (
|
||||
'disctitle',
|
||||
'lyricist',
|
||||
'media',
|
||||
'composer',
|
||||
'composer_sort',
|
||||
'arranger',
|
||||
'work',
|
||||
'mb_workid',
|
||||
'work_disambig',
|
||||
)
|
||||
}
|
||||
|
||||
# Don't overwrite fields with empty values unless the
|
||||
# field is explicitly allowed to be overwritten
|
||||
for field in misc_fields['album']:
|
||||
for field, value in album_info.items():
|
||||
if field in SPECIAL_FIELDS['album']:
|
||||
continue
|
||||
clobber = field in config['overwrite_null']['album'].as_str_seq()
|
||||
value = getattr(album_info, field)
|
||||
if value is None and not clobber:
|
||||
continue
|
||||
item[field] = value
|
||||
|
||||
for field in misc_fields['track']:
|
||||
for field, value in track_info.items():
|
||||
if field in SPECIAL_FIELDS['track']:
|
||||
continue
|
||||
clobber = field in config['overwrite_null']['track'].as_str_seq()
|
||||
value = getattr(track_info, field)
|
||||
if value is None and not clobber:
|
||||
|
|
|
|||
|
|
@ -39,8 +39,25 @@ except AttributeError:
|
|||
|
||||
|
||||
# Classes used to represent candidate options.
|
||||
class AttrDict(dict):
|
||||
"""A dictionary that supports attribute ("dot") access, so `d.field`
|
||||
is equivalent to `d['field']`.
|
||||
"""
|
||||
|
||||
class AlbumInfo(object):
|
||||
def __getattr__(self, attr):
|
||||
if attr in self:
|
||||
return self.get(attr)
|
||||
else:
|
||||
raise AttributeError
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
self.__setitem__(key, value)
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
|
||||
class AlbumInfo(AttrDict):
|
||||
"""Describes a canonical release that may be used to match a release
|
||||
in the library. Consists of these data members:
|
||||
|
||||
|
|
@ -49,40 +66,21 @@ class AlbumInfo(object):
|
|||
- ``artist``: name of the release's primary artist
|
||||
- ``artist_id``
|
||||
- ``tracks``: list of TrackInfo objects making up the release
|
||||
- ``asin``: Amazon ASIN
|
||||
- ``albumtype``: string describing the kind of release
|
||||
- ``va``: boolean: whether the release has "various artists"
|
||||
- ``year``: release year
|
||||
- ``month``: release month
|
||||
- ``day``: release day
|
||||
- ``label``: music label responsible for the release
|
||||
- ``mediums``: the number of discs in this release
|
||||
- ``artist_sort``: name of the release's artist for sorting
|
||||
- ``releasegroup_id``: MBID for the album's release group
|
||||
- ``catalognum``: the label's catalog number for the release
|
||||
- ``script``: character set used for metadata
|
||||
- ``language``: human language of the metadata
|
||||
- ``country``: the release country
|
||||
- ``albumstatus``: MusicBrainz release status (Official, etc.)
|
||||
- ``media``: delivery mechanism (Vinyl, etc.)
|
||||
- ``albumdisambig``: MusicBrainz release disambiguation comment
|
||||
- ``releasegroupdisambig``: MusicBrainz release group
|
||||
disambiguation comment.
|
||||
- ``artist_credit``: Release-specific artist name
|
||||
- ``data_source``: The original data source (MusicBrainz, Discogs, etc.)
|
||||
- ``data_url``: The data source release URL.
|
||||
|
||||
``mediums`` along with the fields up through ``tracks`` are required.
|
||||
The others are optional and may be None.
|
||||
"""
|
||||
def __init__(self, album, album_id, artist, artist_id, tracks, asin=None,
|
||||
albumtype=None, va=False, year=None, month=None, day=None,
|
||||
label=None, mediums=None, artist_sort=None,
|
||||
releasegroup_id=None, catalognum=None, script=None,
|
||||
language=None, country=None, albumstatus=None, media=None,
|
||||
albumdisambig=None, releasegroupdisambig=None,
|
||||
artist_credit=None, original_year=None, original_month=None,
|
||||
original_day=None, data_source=None, data_url=None):
|
||||
def __init__(self, tracks, album=None, album_id=None, artist=None,
|
||||
artist_id=None, asin=None, albumtype=None, va=False,
|
||||
year=None, month=None, day=None, label=None, mediums=None,
|
||||
artist_sort=None, releasegroup_id=None, catalognum=None,
|
||||
script=None, language=None, country=None, style=None,
|
||||
genre=None, albumstatus=None, media=None, albumdisambig=None,
|
||||
releasegroupdisambig=None, artist_credit=None,
|
||||
original_year=None, original_month=None,
|
||||
original_day=None, data_source=None, data_url=None,
|
||||
discogs_albumid=None, discogs_labelid=None,
|
||||
discogs_artistid=None, **kwargs):
|
||||
self.album = album
|
||||
self.album_id = album_id
|
||||
self.artist = artist
|
||||
|
|
@ -102,6 +100,8 @@ class AlbumInfo(object):
|
|||
self.script = script
|
||||
self.language = language
|
||||
self.country = country
|
||||
self.style = style
|
||||
self.genre = genre
|
||||
self.albumstatus = albumstatus
|
||||
self.media = media
|
||||
self.albumdisambig = albumdisambig
|
||||
|
|
@ -112,6 +112,10 @@ class AlbumInfo(object):
|
|||
self.original_day = original_day
|
||||
self.data_source = data_source
|
||||
self.data_url = data_url
|
||||
self.discogs_albumid = discogs_albumid
|
||||
self.discogs_labelid = discogs_labelid
|
||||
self.discogs_artistid = discogs_artistid
|
||||
self.update(kwargs)
|
||||
|
||||
# Work around a bug in python-musicbrainz-ngs that causes some
|
||||
# strings to be bytes rather than Unicode.
|
||||
|
|
@ -121,59 +125,45 @@ class AlbumInfo(object):
|
|||
constituent `TrackInfo` objects, are decoded to Unicode.
|
||||
"""
|
||||
for fld in ['album', 'artist', 'albumtype', 'label', 'artist_sort',
|
||||
'catalognum', 'script', 'language', 'country',
|
||||
'albumstatus', 'albumdisambig', 'releasegroupdisambig',
|
||||
'artist_credit', 'media']:
|
||||
'catalognum', 'script', 'language', 'country', 'style',
|
||||
'genre', 'albumstatus', 'albumdisambig',
|
||||
'releasegroupdisambig', 'artist_credit',
|
||||
'media', 'discogs_albumid', 'discogs_labelid',
|
||||
'discogs_artistid']:
|
||||
value = getattr(self, fld)
|
||||
if isinstance(value, bytes):
|
||||
setattr(self, fld, value.decode(codec, 'ignore'))
|
||||
|
||||
if self.tracks:
|
||||
for track in self.tracks:
|
||||
track.decode(codec)
|
||||
for track in self.tracks:
|
||||
track.decode(codec)
|
||||
|
||||
def copy(self):
|
||||
dupe = AlbumInfo([])
|
||||
dupe.update(self)
|
||||
dupe.tracks = [track.copy() for track in self.tracks]
|
||||
return dupe
|
||||
|
||||
|
||||
class TrackInfo(object):
|
||||
class TrackInfo(AttrDict):
|
||||
"""Describes a canonical track present on a release. Appears as part
|
||||
of an AlbumInfo's ``tracks`` list. Consists of these data members:
|
||||
|
||||
- ``title``: name of the track
|
||||
- ``track_id``: MusicBrainz ID; UUID fragment only
|
||||
- ``release_track_id``: MusicBrainz ID respective to a track on a
|
||||
particular release; UUID fragment only
|
||||
- ``artist``: individual track artist name
|
||||
- ``artist_id``
|
||||
- ``length``: float: duration of the track in seconds
|
||||
- ``index``: position on the entire release
|
||||
- ``media``: delivery mechanism (Vinyl, etc.)
|
||||
- ``medium``: the disc number this track appears on in the album
|
||||
- ``medium_index``: the track's position on the disc
|
||||
- ``medium_total``: the number of tracks on the item's disc
|
||||
- ``artist_sort``: name of the track artist for sorting
|
||||
- ``disctitle``: name of the individual medium (subtitle)
|
||||
- ``artist_credit``: Recording-specific artist name
|
||||
- ``data_source``: The original data source (MusicBrainz, Discogs, etc.)
|
||||
- ``data_url``: The data source release URL.
|
||||
- ``lyricist``: individual track lyricist name
|
||||
- ``composer``: individual track composer name
|
||||
- ``composer_sort``: individual track composer sort name
|
||||
- ``arranger`: individual track arranger name
|
||||
- ``track_alt``: alternative track number (tape, vinyl, etc.)
|
||||
- ``work`: individual track work title
|
||||
- ``mb_workid`: individual track work id
|
||||
- ``work_disambig`: individual track work diambiguation
|
||||
|
||||
Only ``title`` and ``track_id`` are required. The rest of the fields
|
||||
may be None. The indices ``index``, ``medium``, and ``medium_index``
|
||||
are all 1-based.
|
||||
"""
|
||||
def __init__(self, title, track_id, release_track_id=None, artist=None,
|
||||
artist_id=None, length=None, index=None, medium=None,
|
||||
medium_index=None, medium_total=None, artist_sort=None,
|
||||
disctitle=None, artist_credit=None, data_source=None,
|
||||
data_url=None, media=None, lyricist=None, composer=None,
|
||||
composer_sort=None, arranger=None, track_alt=None,
|
||||
work=None, mb_workid=None, work_disambig=None):
|
||||
def __init__(self, title=None, track_id=None, release_track_id=None,
|
||||
artist=None, artist_id=None, length=None, index=None,
|
||||
medium=None, medium_index=None, medium_total=None,
|
||||
artist_sort=None, disctitle=None, artist_credit=None,
|
||||
data_source=None, data_url=None, media=None, lyricist=None,
|
||||
composer=None, composer_sort=None, arranger=None,
|
||||
track_alt=None, work=None, mb_workid=None,
|
||||
work_disambig=None, bpm=None, initial_key=None, genre=None,
|
||||
**kwargs):
|
||||
self.title = title
|
||||
self.track_id = track_id
|
||||
self.release_track_id = release_track_id
|
||||
|
|
@ -198,6 +188,10 @@ class TrackInfo(object):
|
|||
self.work = work
|
||||
self.mb_workid = mb_workid
|
||||
self.work_disambig = work_disambig
|
||||
self.bpm = bpm
|
||||
self.initial_key = initial_key
|
||||
self.genre = genre
|
||||
self.update(kwargs)
|
||||
|
||||
# As above, work around a bug in python-musicbrainz-ngs.
|
||||
def decode(self, codec='utf-8'):
|
||||
|
|
@ -210,6 +204,11 @@ class TrackInfo(object):
|
|||
if isinstance(value, bytes):
|
||||
setattr(self, fld, value.decode(codec, 'ignore'))
|
||||
|
||||
def copy(self):
|
||||
dupe = TrackInfo()
|
||||
dupe.update(self)
|
||||
return dupe
|
||||
|
||||
|
||||
# Candidate distance scoring.
|
||||
|
||||
|
|
@ -333,7 +332,7 @@ class Distance(object):
|
|||
self._penalties = {}
|
||||
|
||||
@LazyClassProperty
|
||||
def _weights(cls): # noqa
|
||||
def _weights(cls): # noqa: N805
|
||||
"""A dictionary from keys to floating-point weights.
|
||||
"""
|
||||
weights_view = config['match']['distance_weights']
|
||||
|
|
@ -600,17 +599,21 @@ def tracks_for_id(track_id):
|
|||
|
||||
|
||||
@plugins.notify_info_yielded(u'albuminfo_received')
|
||||
def album_candidates(items, artist, album, va_likely):
|
||||
def album_candidates(items, artist, album, va_likely, extra_tags):
|
||||
"""Search for album matches. ``items`` is a list of Item objects
|
||||
that make up the album. ``artist`` and ``album`` are the respective
|
||||
names (strings), which may be derived from the item list or may be
|
||||
entered by the user. ``va_likely`` is a boolean indicating whether
|
||||
the album is likely to be a "various artists" release.
|
||||
the album is likely to be a "various artists" release. ``extra_tags``
|
||||
is an optional dictionary of additional tags used to further
|
||||
constrain the search.
|
||||
"""
|
||||
|
||||
# Base candidates if we have album and artist to match.
|
||||
if artist and album:
|
||||
try:
|
||||
for candidate in mb.match_album(artist, album, len(items)):
|
||||
for candidate in mb.match_album(artist, album, len(items),
|
||||
extra_tags):
|
||||
yield candidate
|
||||
except mb.MusicBrainzAPIError as exc:
|
||||
exc.log(log)
|
||||
|
|
@ -618,13 +621,15 @@ def album_candidates(items, artist, album, va_likely):
|
|||
# Also add VA matches from MusicBrainz where appropriate.
|
||||
if va_likely and album:
|
||||
try:
|
||||
for candidate in mb.match_album(None, album, len(items)):
|
||||
for candidate in mb.match_album(None, album, len(items),
|
||||
extra_tags):
|
||||
yield candidate
|
||||
except mb.MusicBrainzAPIError as exc:
|
||||
exc.log(log)
|
||||
|
||||
# Candidates from plugins.
|
||||
for candidate in plugins.candidates(items, artist, album, va_likely):
|
||||
for candidate in plugins.candidates(items, artist, album, va_likely,
|
||||
extra_tags):
|
||||
yield candidate
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -447,6 +447,12 @@ def tag_album(items, search_artist=None, search_album=None,
|
|||
search_artist, search_album = cur_artist, cur_album
|
||||
log.debug(u'Search terms: {0} - {1}', search_artist, search_album)
|
||||
|
||||
extra_tags = None
|
||||
if config['musicbrainz']['extra_tags']:
|
||||
tag_list = config['musicbrainz']['extra_tags'].get()
|
||||
extra_tags = {k: v for (k, v) in likelies.items() if k in tag_list}
|
||||
log.debug(u'Additional search terms: {0}', extra_tags)
|
||||
|
||||
# Is this album likely to be a "various artist" release?
|
||||
va_likely = ((not consensus['artist']) or
|
||||
(search_artist.lower() in VA_ARTISTS) or
|
||||
|
|
@ -457,7 +463,8 @@ def tag_album(items, search_artist=None, search_album=None,
|
|||
for matched_candidate in hooks.album_candidates(items,
|
||||
search_artist,
|
||||
search_album,
|
||||
va_likely):
|
||||
va_likely,
|
||||
extra_tags):
|
||||
_add_candidate(items, candidates, matched_candidate)
|
||||
|
||||
log.debug(u'Evaluating {0} candidates.', len(candidates))
|
||||
|
|
|
|||
|
|
@ -38,8 +38,16 @@ else:
|
|||
|
||||
SKIPPED_TRACKS = ['[data track]']
|
||||
|
||||
FIELDS_TO_MB_KEYS = {
|
||||
'catalognum': 'catno',
|
||||
'country': 'country',
|
||||
'label': 'label',
|
||||
'media': 'format',
|
||||
'year': 'date',
|
||||
}
|
||||
|
||||
musicbrainzngs.set_useragent('beets', beets.__version__,
|
||||
'http://beets.io/')
|
||||
'https://beets.io/')
|
||||
|
||||
|
||||
class MusicBrainzAPIError(util.HumanReadableException):
|
||||
|
|
@ -185,8 +193,8 @@ def track_info(recording, index=None, medium=None, medium_index=None,
|
|||
the number of tracks on the medium. Each number is a 1-based index.
|
||||
"""
|
||||
info = beets.autotag.hooks.TrackInfo(
|
||||
recording['title'],
|
||||
recording['id'],
|
||||
title=recording['title'],
|
||||
track_id=recording['id'],
|
||||
index=index,
|
||||
medium=medium,
|
||||
medium_index=medium_index,
|
||||
|
|
@ -333,11 +341,11 @@ def album_info(release):
|
|||
track_infos.append(ti)
|
||||
|
||||
info = beets.autotag.hooks.AlbumInfo(
|
||||
release['title'],
|
||||
release['id'],
|
||||
artist_name,
|
||||
release['artist-credit'][0]['artist']['id'],
|
||||
track_infos,
|
||||
album=release['title'],
|
||||
album_id=release['id'],
|
||||
artist=artist_name,
|
||||
artist_id=release['artist-credit'][0]['artist']['id'],
|
||||
tracks=track_infos,
|
||||
mediums=len(release['medium-list']),
|
||||
artist_sort=artist_sort_name,
|
||||
artist_credit=artist_credit_name,
|
||||
|
|
@ -411,13 +419,13 @@ def album_info(release):
|
|||
return info
|
||||
|
||||
|
||||
def match_album(artist, album, tracks=None):
|
||||
def match_album(artist, album, tracks=None, extra_tags=None):
|
||||
"""Searches for a single album ("release" in MusicBrainz parlance)
|
||||
and returns an iterator over AlbumInfo objects. May raise a
|
||||
MusicBrainzAPIError.
|
||||
|
||||
The query consists of an artist name, an album name, and,
|
||||
optionally, a number of tracks on the album.
|
||||
optionally, a number of tracks on the album and any other extra tags.
|
||||
"""
|
||||
# Build search criteria.
|
||||
criteria = {'release': album.lower().strip()}
|
||||
|
|
@ -429,6 +437,16 @@ def match_album(artist, album, tracks=None):
|
|||
if tracks is not None:
|
||||
criteria['tracks'] = six.text_type(tracks)
|
||||
|
||||
# Additional search cues from existing metadata.
|
||||
if extra_tags:
|
||||
for tag in extra_tags:
|
||||
key = FIELDS_TO_MB_KEYS[tag]
|
||||
value = six.text_type(extra_tags.get(tag, '')).lower().strip()
|
||||
if key == 'catno':
|
||||
value = value.replace(u' ', '')
|
||||
if value:
|
||||
criteria[key] = value
|
||||
|
||||
# Abort if we have no search terms.
|
||||
if not any(criteria.values()):
|
||||
return
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ replace:
|
|||
'^\s+': ''
|
||||
'^-': _
|
||||
path_sep_replace: _
|
||||
drive_sep_replace: _
|
||||
asciify_paths: false
|
||||
art_filename: cover
|
||||
max_filename_length: 0
|
||||
|
|
@ -103,6 +104,7 @@ musicbrainz:
|
|||
ratelimit: 1
|
||||
ratelimit_interval: 1.0
|
||||
searchlimit: 5
|
||||
extra_tags: []
|
||||
|
||||
match:
|
||||
strong_rec_thresh: 0.04
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ from __future__ import division, absolute_import, print_function
|
|||
|
||||
import time
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
import threading
|
||||
import sqlite3
|
||||
|
|
@ -84,6 +85,11 @@ class FormattedMapping(Mapping):
|
|||
|
||||
if self.for_path:
|
||||
sep_repl = beets.config['path_sep_replace'].as_str()
|
||||
sep_drive = beets.config['drive_sep_replace'].as_str()
|
||||
|
||||
if re.match(r'^\w:', value):
|
||||
value = re.sub(r'(?<=^\w):', sep_drive, value)
|
||||
|
||||
for sep in (os.path.sep, os.path.altsep):
|
||||
if sep:
|
||||
value = value.replace(sep, sep_repl)
|
||||
|
|
@ -189,7 +195,7 @@ class LazyConvertDict(object):
|
|||
|
||||
class Model(object):
|
||||
"""An abstract object representing an object in the database. Model
|
||||
objects act like dictionaries (i.e., the allow subscript access like
|
||||
objects act like dictionaries (i.e., they allow subscript access like
|
||||
``obj['field']``). The same field set is available via attribute
|
||||
access as a shortcut (i.e., ``obj.field``). Three kinds of attributes are
|
||||
available:
|
||||
|
|
|
|||
|
|
@ -156,12 +156,8 @@ class NoneQuery(FieldQuery):
|
|||
def col_clause(self):
|
||||
return self.field + " IS NULL", ()
|
||||
|
||||
@classmethod
|
||||
def match(cls, item):
|
||||
try:
|
||||
return item[cls.field] is None
|
||||
except KeyError:
|
||||
return True
|
||||
def match(self, item):
|
||||
return item.get(self.field) is None
|
||||
|
||||
def __repr__(self):
|
||||
return "{0.__class__.__name__}({0.field!r}, {0.fast})".format(self)
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ class Type(object):
|
|||
For fixed fields the type of `value` is determined by the column
|
||||
type affinity given in the `sql` property and the SQL to Python
|
||||
mapping of the database adapter. For more information see:
|
||||
http://www.sqlite.org/datatype3.html
|
||||
https://www.sqlite.org/datatype3.html
|
||||
https://docs.python.org/2/library/sqlite3.html#sqlite-and-python-types
|
||||
|
||||
Flexible fields have the type affinity `TEXT`. This means the
|
||||
|
|
@ -131,6 +131,14 @@ class Integer(Type):
|
|||
query = query.NumericQuery
|
||||
model_type = int
|
||||
|
||||
def normalize(self, value):
|
||||
try:
|
||||
return self.model_type(round(float(value)))
|
||||
except ValueError:
|
||||
return self.null
|
||||
except TypeError:
|
||||
return self.null
|
||||
|
||||
|
||||
class PaddedInt(Integer):
|
||||
"""An integer field that is formatted with a given number of digits,
|
||||
|
|
|
|||
|
|
@ -754,6 +754,8 @@ class ImportTask(BaseImportTask):
|
|||
self.record_replaced(lib)
|
||||
self.remove_replaced(lib)
|
||||
self.album = lib.add_album(self.imported_items())
|
||||
if 'data_source' in self.imported_items()[0]:
|
||||
self.album.data_source = self.imported_items()[0].data_source
|
||||
self.reimport_metadata(lib)
|
||||
|
||||
def record_replaced(self, lib):
|
||||
|
|
@ -1032,8 +1034,8 @@ class ArchiveImportTask(SentinelImportTask):
|
|||
cls._handlers = []
|
||||
from zipfile import is_zipfile, ZipFile
|
||||
cls._handlers.append((is_zipfile, ZipFile))
|
||||
from tarfile import is_tarfile, TarFile
|
||||
cls._handlers.append((is_tarfile, TarFile))
|
||||
import tarfile
|
||||
cls._handlers.append((tarfile.is_tarfile, tarfile.open))
|
||||
try:
|
||||
from rarfile import is_rarfile, RarFile
|
||||
except ImportError:
|
||||
|
|
|
|||
|
|
@ -413,7 +413,8 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
|
|||
raise KeyError(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Get the value for a key. Certain unset values are remapped.
|
||||
"""Get the value for a key. `artist` and `albumartist`
|
||||
are fallback values for each other when not set.
|
||||
"""
|
||||
value = self._get(key)
|
||||
|
||||
|
|
@ -451,6 +452,10 @@ class Item(LibModel):
|
|||
'albumartist_sort': types.STRING,
|
||||
'albumartist_credit': types.STRING,
|
||||
'genre': types.STRING,
|
||||
'style': types.STRING,
|
||||
'discogs_albumid': types.INTEGER,
|
||||
'discogs_artistid': types.INTEGER,
|
||||
'discogs_labelid': types.INTEGER,
|
||||
'lyricist': types.STRING,
|
||||
'composer': types.STRING,
|
||||
'composer_sort': types.STRING,
|
||||
|
|
@ -989,6 +994,10 @@ class Album(LibModel):
|
|||
'albumartist_credit': types.STRING,
|
||||
'album': types.STRING,
|
||||
'genre': types.STRING,
|
||||
'style': types.STRING,
|
||||
'discogs_albumid': types.INTEGER,
|
||||
'discogs_artistid': types.INTEGER,
|
||||
'discogs_labelid': types.INTEGER,
|
||||
'year': types.PaddedInt(4),
|
||||
'month': types.PaddedInt(2),
|
||||
'day': types.PaddedInt(2),
|
||||
|
|
@ -1034,6 +1043,10 @@ class Album(LibModel):
|
|||
'albumartist_credit',
|
||||
'album',
|
||||
'genre',
|
||||
'style',
|
||||
'discogs_albumid',
|
||||
'discogs_artistid',
|
||||
'discogs_labelid',
|
||||
'year',
|
||||
'month',
|
||||
'day',
|
||||
|
|
@ -1172,7 +1185,7 @@ class Album(LibModel):
|
|||
"""
|
||||
item = self.items().get()
|
||||
if not item:
|
||||
raise ValueError(u'empty album')
|
||||
raise ValueError(u'empty album for album id %d' % self.id)
|
||||
return os.path.dirname(item.path)
|
||||
|
||||
def _albumtotal(self):
|
||||
|
|
|
|||
197
beets/plugins.py
197
beets/plugins.py
|
|
@ -20,6 +20,7 @@ from __future__ import division, absolute_import, print_function
|
|||
import traceback
|
||||
import re
|
||||
import inspect
|
||||
import abc
|
||||
from collections import defaultdict
|
||||
from functools import wraps
|
||||
|
||||
|
|
@ -29,6 +30,7 @@ from beets import logging
|
|||
import mediafile
|
||||
import six
|
||||
|
||||
|
||||
PLUGIN_NAMESPACE = 'beetsplug'
|
||||
|
||||
# Plugins using the Last.fm API can share the same API key.
|
||||
|
|
@ -170,7 +172,7 @@ class BeetsPlugin(object):
|
|||
"""
|
||||
return beets.autotag.hooks.Distance()
|
||||
|
||||
def candidates(self, items, artist, album, va_likely):
|
||||
def candidates(self, items, artist, album, va_likely, extra_tags=None):
|
||||
"""Should return a sequence of AlbumInfo objects that match the
|
||||
album whose items are provided.
|
||||
"""
|
||||
|
|
@ -204,7 +206,7 @@ class BeetsPlugin(object):
|
|||
|
||||
``descriptor`` must be an instance of ``mediafile.MediaField``.
|
||||
"""
|
||||
# Defer impor to prevent circular dependency
|
||||
# Defer import to prevent circular dependency
|
||||
from beets import library
|
||||
mediafile.MediaFile.add_field(name, descriptor)
|
||||
library.Item._media_fields.add(name)
|
||||
|
|
@ -377,11 +379,12 @@ def album_distance(items, album_info, mapping):
|
|||
return dist
|
||||
|
||||
|
||||
def candidates(items, artist, album, va_likely):
|
||||
def candidates(items, artist, album, va_likely, extra_tags=None):
|
||||
"""Gets MusicBrainz candidates for an album from each plugin.
|
||||
"""
|
||||
for plugin in find_plugins():
|
||||
for candidate in plugin.candidates(items, artist, album, va_likely):
|
||||
for candidate in plugin.candidates(items, artist, album, va_likely,
|
||||
extra_tags):
|
||||
yield candidate
|
||||
|
||||
|
||||
|
|
@ -576,3 +579,189 @@ def notify_info_yielded(event):
|
|||
yield v
|
||||
return decorated
|
||||
return decorator
|
||||
|
||||
|
||||
def get_distance(config, data_source, info):
|
||||
"""Returns the ``data_source`` weight and the maximum source weight
|
||||
for albums or individual tracks.
|
||||
"""
|
||||
dist = beets.autotag.Distance()
|
||||
if info.data_source == data_source:
|
||||
dist.add('source', config['source_weight'].as_number())
|
||||
return dist
|
||||
|
||||
|
||||
def apply_item_changes(lib, item, move, pretend, write):
|
||||
"""Store, move, and write the item according to the arguments.
|
||||
|
||||
:param lib: beets library.
|
||||
:type lib: beets.library.Library
|
||||
:param item: Item whose changes to apply.
|
||||
:type item: beets.library.Item
|
||||
:param move: Move the item if it's in the library.
|
||||
:type move: bool
|
||||
:param pretend: Return without moving, writing, or storing the item's
|
||||
metadata.
|
||||
:type pretend: bool
|
||||
:param write: Write the item's metadata to its media file.
|
||||
:type write: bool
|
||||
"""
|
||||
if pretend:
|
||||
return
|
||||
|
||||
from beets import util
|
||||
|
||||
# Move the item if it's in the library.
|
||||
if move and lib.directory in util.ancestry(item.path):
|
||||
item.move(with_album=False)
|
||||
|
||||
if write:
|
||||
item.try_write()
|
||||
|
||||
item.store()
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class MetadataSourcePlugin(object):
|
||||
def __init__(self):
|
||||
super(MetadataSourcePlugin, self).__init__()
|
||||
self.config.add({'source_weight': 0.5})
|
||||
|
||||
@abc.abstractproperty
|
||||
def id_regex(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractproperty
|
||||
def data_source(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractproperty
|
||||
def search_url(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractproperty
|
||||
def album_url(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractproperty
|
||||
def track_url(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def _search_api(self, query_type, filters, keywords=''):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def album_for_id(self, album_id):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def track_for_id(self, track_id=None, track_data=None):
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def get_artist(artists, id_key='id', name_key='name'):
|
||||
"""Returns an artist string (all artists) and an artist_id (the main
|
||||
artist) for a list of artist object dicts.
|
||||
|
||||
For each artist, this function moves articles (such as 'a', 'an',
|
||||
and 'the') to the front and strips trailing disambiguation numbers. It
|
||||
returns a tuple containing the comma-separated string of all
|
||||
normalized artists and the ``id`` of the main/first artist.
|
||||
|
||||
:param artists: Iterable of artist dicts or lists returned by API.
|
||||
:type artists: list[dict] or list[list]
|
||||
:param id_key: Key or index corresponding to the value of ``id`` for
|
||||
the main/first artist. Defaults to 'id'.
|
||||
:type id_key: str or int
|
||||
:param name_key: Key or index corresponding to values of names
|
||||
to concatenate for the artist string (containing all artists).
|
||||
Defaults to 'name'.
|
||||
:type name_key: str or int
|
||||
:return: Normalized artist string.
|
||||
:rtype: str
|
||||
"""
|
||||
artist_id = None
|
||||
artist_names = []
|
||||
for artist in artists:
|
||||
if not artist_id:
|
||||
artist_id = artist[id_key]
|
||||
name = artist[name_key]
|
||||
# Strip disambiguation number.
|
||||
name = re.sub(r' \(\d+\)$', '', name)
|
||||
# Move articles to the front.
|
||||
name = re.sub(r'^(.*?), (a|an|the)$', r'\2 \1', name, flags=re.I)
|
||||
artist_names.append(name)
|
||||
artist = ', '.join(artist_names).replace(' ,', ',') or None
|
||||
return artist, artist_id
|
||||
|
||||
def _get_id(self, url_type, id_):
|
||||
"""Parse an ID from its URL if necessary.
|
||||
|
||||
:param url_type: Type of URL. Either 'album' or 'track'.
|
||||
:type url_type: str
|
||||
:param id_: Album/track ID or URL.
|
||||
:type id_: str
|
||||
:return: Album/track ID.
|
||||
:rtype: str
|
||||
"""
|
||||
self._log.debug(
|
||||
u"Searching {} for {} '{}'", self.data_source, url_type, id_
|
||||
)
|
||||
match = re.search(self.id_regex['pattern'].format(url_type), str(id_))
|
||||
if match:
|
||||
id_ = match.group(self.id_regex['match_group'])
|
||||
if id_:
|
||||
return id_
|
||||
return None
|
||||
|
||||
def candidates(self, items, artist, album, va_likely, extra_tags=None):
|
||||
"""Returns a list of AlbumInfo objects for Search API results
|
||||
matching an ``album`` and ``artist`` (if not various).
|
||||
|
||||
:param items: List of items comprised by an album to be matched.
|
||||
:type items: list[beets.library.Item]
|
||||
:param artist: The artist of the album to be matched.
|
||||
:type artist: str
|
||||
:param album: The name of the album to be matched.
|
||||
:type album: str
|
||||
:param va_likely: True if the album to be matched likely has
|
||||
Various Artists.
|
||||
:type va_likely: bool
|
||||
:return: Candidate AlbumInfo objects.
|
||||
:rtype: list[beets.autotag.hooks.AlbumInfo]
|
||||
"""
|
||||
query_filters = {'album': album}
|
||||
if not va_likely:
|
||||
query_filters['artist'] = artist
|
||||
results = self._search_api(query_type='album', filters=query_filters)
|
||||
albums = [self.album_for_id(album_id=r['id']) for r in results]
|
||||
return [a for a in albums if a is not None]
|
||||
|
||||
def item_candidates(self, item, artist, title):
|
||||
"""Returns a list of TrackInfo objects for Search API results
|
||||
matching ``title`` and ``artist``.
|
||||
|
||||
:param item: Singleton item to be matched.
|
||||
:type item: beets.library.Item
|
||||
:param artist: The artist of the track to be matched.
|
||||
:type artist: str
|
||||
:param title: The title of the track to be matched.
|
||||
:type title: str
|
||||
:return: Candidate TrackInfo objects.
|
||||
:rtype: list[beets.autotag.hooks.TrackInfo]
|
||||
"""
|
||||
tracks = self._search_api(
|
||||
query_type='track', keywords=title, filters={'artist': artist}
|
||||
)
|
||||
return [self.track_for_id(track_data=track) for track in tracks]
|
||||
|
||||
def album_distance(self, items, album_info, mapping):
|
||||
return get_distance(
|
||||
data_source=self.data_source, info=album_info, config=self.config
|
||||
)
|
||||
|
||||
def track_distance(self, item, track_info):
|
||||
return get_distance(
|
||||
data_source=self.data_source, info=track_info, config=self.config
|
||||
)
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ def input_(prompt=None):
|
|||
"""
|
||||
# raw_input incorrectly sends prompts to stderr, not stdout, so we
|
||||
# use print_() explicitly to display prompts.
|
||||
# http://bugs.python.org/issue1927
|
||||
# https://bugs.python.org/issue1927
|
||||
if prompt:
|
||||
print_(prompt, end=u' ')
|
||||
|
||||
|
|
@ -475,7 +475,7 @@ def human_seconds_short(interval):
|
|||
# Colorization.
|
||||
|
||||
# ANSI terminal colorization code heavily inspired by pygments:
|
||||
# http://dev.pocoo.org/hg/pygments-main/file/b2deea5b5030/pygments/console.py
|
||||
# https://bitbucket.org/birkenfeld/pygments-main/src/default/pygments/console.py
|
||||
# (pygments is by Tim Hatch, Armin Ronacher, et al.)
|
||||
COLOR_ESCAPE = "\x1b["
|
||||
DARK_COLORS = {
|
||||
|
|
@ -929,7 +929,7 @@ class CommonOptionsParser(optparse.OptionParser, object):
|
|||
#
|
||||
# This is a fairly generic subcommand parser for optparse. It is
|
||||
# maintained externally here:
|
||||
# http://gist.github.com/462717
|
||||
# https://gist.github.com/462717
|
||||
# There you will also find a better description of the code and a more
|
||||
# succinct example program.
|
||||
|
||||
|
|
|
|||
|
|
@ -241,7 +241,8 @@ def show_change(cur_artist, cur_album, match):
|
|||
if mediums and mediums > 1:
|
||||
return u'{0}-{1}'.format(medium, medium_index)
|
||||
else:
|
||||
return six.text_type(medium_index or index)
|
||||
return six.text_type(medium_index if medium_index is not None
|
||||
else index)
|
||||
else:
|
||||
return six.text_type(index)
|
||||
|
||||
|
|
@ -476,10 +477,11 @@ def summarize_items(items, singleton):
|
|||
def _summary_judgment(rec):
|
||||
"""Determines whether a decision should be made without even asking
|
||||
the user. This occurs in quiet mode and when an action is chosen for
|
||||
NONE recommendations. Return an action or None if the user should be
|
||||
queried. May also print to the console if a summary judgment is
|
||||
made.
|
||||
NONE recommendations. Return None if the user should be queried.
|
||||
Otherwise, returns an action. May also print to the console if a
|
||||
summary judgment is made.
|
||||
"""
|
||||
|
||||
if config['import']['quiet']:
|
||||
if rec == Recommendation.strong:
|
||||
return importer.action.APPLY
|
||||
|
|
@ -488,14 +490,14 @@ def _summary_judgment(rec):
|
|||
'skip': importer.action.SKIP,
|
||||
'asis': importer.action.ASIS,
|
||||
})
|
||||
|
||||
elif config['import']['timid']:
|
||||
return None
|
||||
elif rec == Recommendation.none:
|
||||
action = config['import']['none_rec_action'].as_choice({
|
||||
'skip': importer.action.SKIP,
|
||||
'asis': importer.action.ASIS,
|
||||
'ask': None,
|
||||
})
|
||||
|
||||
else:
|
||||
return None
|
||||
|
||||
|
|
@ -542,7 +544,7 @@ def choose_candidate(candidates, singleton, rec, cur_artist=None,
|
|||
print_(u"No matching release found for {0} tracks."
|
||||
.format(itemcount))
|
||||
print_(u'For help, see: '
|
||||
u'http://beets.readthedocs.org/en/latest/faq.html#nomatch')
|
||||
u'https://beets.readthedocs.org/en/latest/faq.html#nomatch')
|
||||
sel = ui.input_options(choice_opts)
|
||||
if sel in choice_actions:
|
||||
return choice_actions[sel]
|
||||
|
|
@ -1183,6 +1185,12 @@ def update_items(lib, query, album, move, pretend, fields):
|
|||
|
||||
|
||||
def update_func(lib, opts, args):
|
||||
# Verify that the library folder exists to prevent accidental wipes.
|
||||
if not os.path.isdir(lib.directory):
|
||||
ui.print_("Library path is unavailable or does not exist.")
|
||||
ui.print_(lib.directory)
|
||||
if not ui.input_yn("Are you sure you want to continue (y/n)?", True):
|
||||
return
|
||||
update_items(lib, decargs(args), opts.album, ui.should_move(opts.move),
|
||||
opts.pretend, opts.fields)
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ import re
|
|||
import shutil
|
||||
import fnmatch
|
||||
import functools
|
||||
from collections import Counter
|
||||
from collections import Counter, namedtuple
|
||||
from multiprocessing.pool import ThreadPool
|
||||
import traceback
|
||||
import subprocess
|
||||
|
|
@ -223,6 +223,13 @@ def sorted_walk(path, ignore=(), ignore_hidden=False, logger=None):
|
|||
yield res
|
||||
|
||||
|
||||
def path_as_posix(path):
|
||||
"""Return the string representation of the path with forward (/)
|
||||
slashes.
|
||||
"""
|
||||
return path.replace(b'\\', b'/')
|
||||
|
||||
|
||||
def mkdirall(path):
|
||||
"""Make all the enclosing directories of path (like mkdir -p on the
|
||||
parent).
|
||||
|
|
@ -412,7 +419,7 @@ def syspath(path, prefix=True):
|
|||
path = path.decode(encoding, 'replace')
|
||||
|
||||
# Add the magic prefix if it isn't already there.
|
||||
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx
|
||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx
|
||||
if prefix and not path.startswith(WINDOWS_MAGIC_PREFIX):
|
||||
if path.startswith(u'\\\\'):
|
||||
# UNC path. Final path should look like \\?\UNC\...
|
||||
|
|
@ -563,7 +570,7 @@ def unique_path(path):
|
|||
# Note: The Windows "reserved characters" are, of course, allowed on
|
||||
# Unix. They are forbidden here because they cause problems on Samba
|
||||
# shares, which are sufficiently common as to cause frequent problems.
|
||||
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx
|
||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx
|
||||
CHAR_REPLACE = [
|
||||
(re.compile(r'[\\/]'), u'_'), # / and \ -- forbidden everywhere.
|
||||
(re.compile(r'^\.'), u'_'), # Leading dot (hidden files on Unix).
|
||||
|
|
@ -763,7 +770,11 @@ def cpu_count():
|
|||
num = 0
|
||||
elif sys.platform == 'darwin':
|
||||
try:
|
||||
num = int(command_output(['/usr/sbin/sysctl', '-n', 'hw.ncpu']))
|
||||
num = int(command_output([
|
||||
'/usr/sbin/sysctl',
|
||||
'-n',
|
||||
'hw.ncpu',
|
||||
]).stdout)
|
||||
except (ValueError, OSError, subprocess.CalledProcessError):
|
||||
num = 0
|
||||
else:
|
||||
|
|
@ -794,9 +805,16 @@ def convert_command_args(args):
|
|||
return [convert(a) for a in args]
|
||||
|
||||
|
||||
# stdout and stderr as bytes
|
||||
CommandOutput = namedtuple("CommandOutput", ("stdout", "stderr"))
|
||||
|
||||
|
||||
def command_output(cmd, shell=False):
|
||||
"""Runs the command and returns its output after it has exited.
|
||||
|
||||
Returns a CommandOutput. The attributes ``stdout`` and ``stderr`` contain
|
||||
byte strings of the respective output streams.
|
||||
|
||||
``cmd`` is a list of arguments starting with the command names. The
|
||||
arguments are bytes on Unix and strings on Windows.
|
||||
If ``shell`` is true, ``cmd`` is assumed to be a string and passed to a
|
||||
|
|
@ -831,7 +849,7 @@ def command_output(cmd, shell=False):
|
|||
cmd=' '.join(cmd),
|
||||
output=stdout + stderr,
|
||||
)
|
||||
return stdout
|
||||
return CommandOutput(stdout, stderr)
|
||||
|
||||
|
||||
def max_filename_length(path, limit=MAX_FILENAME_LENGTH):
|
||||
|
|
|
|||
|
|
@ -40,14 +40,19 @@ else:
|
|||
log = logging.getLogger('beets')
|
||||
|
||||
|
||||
def resize_url(url, maxwidth):
|
||||
def resize_url(url, maxwidth, quality=0):
|
||||
"""Return a proxied image URL that resizes the original image to
|
||||
maxwidth (preserving aspect ratio).
|
||||
"""
|
||||
return '{0}?{1}'.format(PROXY_URL, urlencode({
|
||||
params = {
|
||||
'url': url.replace('http://', ''),
|
||||
'w': maxwidth,
|
||||
}))
|
||||
}
|
||||
|
||||
if quality > 0:
|
||||
params['q'] = quality
|
||||
|
||||
return '{0}?{1}'.format(PROXY_URL, urlencode(params))
|
||||
|
||||
|
||||
def temp_file_for(path):
|
||||
|
|
@ -59,7 +64,7 @@ def temp_file_for(path):
|
|||
return util.bytestring_path(f.name)
|
||||
|
||||
|
||||
def pil_resize(maxwidth, path_in, path_out=None):
|
||||
def pil_resize(maxwidth, path_in, path_out=None, quality=0):
|
||||
"""Resize using Python Imaging Library (PIL). Return the output path
|
||||
of resized image.
|
||||
"""
|
||||
|
|
@ -72,7 +77,7 @@ def pil_resize(maxwidth, path_in, path_out=None):
|
|||
im = Image.open(util.syspath(path_in))
|
||||
size = maxwidth, maxwidth
|
||||
im.thumbnail(size, Image.ANTIALIAS)
|
||||
im.save(util.py3_path(path_out))
|
||||
im.save(util.py3_path(path_out), quality=quality)
|
||||
return path_out
|
||||
except IOError:
|
||||
log.error(u"PIL cannot create thumbnail for '{0}'",
|
||||
|
|
@ -80,7 +85,7 @@ def pil_resize(maxwidth, path_in, path_out=None):
|
|||
return path_in
|
||||
|
||||
|
||||
def im_resize(maxwidth, path_in, path_out=None):
|
||||
def im_resize(maxwidth, path_in, path_out=None, quality=0):
|
||||
"""Resize using ImageMagick.
|
||||
|
||||
Use the ``magick`` program or ``convert`` on older versions. Return
|
||||
|
|
@ -93,10 +98,15 @@ def im_resize(maxwidth, path_in, path_out=None):
|
|||
# "-resize WIDTHx>" shrinks images with the width larger
|
||||
# than the given width while maintaining the aspect ratio
|
||||
# with regards to the height.
|
||||
cmd = ArtResizer.shared.im_convert_cmd + \
|
||||
[util.syspath(path_in, prefix=False),
|
||||
'-resize', '{0}x>'.format(maxwidth),
|
||||
util.syspath(path_out, prefix=False)]
|
||||
cmd = ArtResizer.shared.im_convert_cmd + [
|
||||
util.syspath(path_in, prefix=False),
|
||||
'-resize', '{0}x>'.format(maxwidth),
|
||||
]
|
||||
|
||||
if quality > 0:
|
||||
cmd += ['-quality', '{0}'.format(quality)]
|
||||
|
||||
cmd.append(util.syspath(path_out, prefix=False))
|
||||
|
||||
try:
|
||||
util.command_output(cmd)
|
||||
|
|
@ -129,7 +139,7 @@ def im_getsize(path_in):
|
|||
['-format', '%w %h', util.syspath(path_in, prefix=False)]
|
||||
|
||||
try:
|
||||
out = util.command_output(cmd)
|
||||
out = util.command_output(cmd).stdout
|
||||
except subprocess.CalledProcessError as exc:
|
||||
log.warning(u'ImageMagick size query failed')
|
||||
log.debug(
|
||||
|
|
@ -190,18 +200,19 @@ class ArtResizer(six.with_metaclass(Shareable, object)):
|
|||
self.im_convert_cmd = ['magick']
|
||||
self.im_identify_cmd = ['magick', 'identify']
|
||||
|
||||
def resize(self, maxwidth, path_in, path_out=None):
|
||||
def resize(self, maxwidth, path_in, path_out=None, quality=0):
|
||||
"""Manipulate an image file according to the method, returning a
|
||||
new path. For PIL or IMAGEMAGIC methods, resizes the image to a
|
||||
temporary file. For WEBPROXY, returns `path_in` unmodified.
|
||||
temporary file and encodes with the specified quality level.
|
||||
For WEBPROXY, returns `path_in` unmodified.
|
||||
"""
|
||||
if self.local:
|
||||
func = BACKEND_FUNCS[self.method[0]]
|
||||
return func(maxwidth, path_in, path_out)
|
||||
return func(maxwidth, path_in, path_out, quality=quality)
|
||||
else:
|
||||
return path_in
|
||||
|
||||
def proxy_url(self, maxwidth, url):
|
||||
def proxy_url(self, maxwidth, url, quality=0):
|
||||
"""Modifies an image URL according the method, returning a new
|
||||
URL. For WEBPROXY, a URL on the proxy server is returned.
|
||||
Otherwise, the URL is returned unmodified.
|
||||
|
|
@ -209,7 +220,7 @@ class ArtResizer(six.with_metaclass(Shareable, object)):
|
|||
if self.local:
|
||||
return url
|
||||
else:
|
||||
return resize_url(url, maxwidth)
|
||||
return resize_url(url, maxwidth, quality)
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
|
|
@ -265,7 +276,7 @@ def get_im_version():
|
|||
cmd = cmd_name + ['--version']
|
||||
|
||||
try:
|
||||
out = util.command_output(cmd)
|
||||
out = util.command_output(cmd).stdout
|
||||
except (subprocess.CalledProcessError, OSError) as exc:
|
||||
log.debug(u'ImageMagick version check failed: {}', exc)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -73,15 +73,26 @@ def ex_literal(val):
|
|||
"""An int, float, long, bool, string, or None literal with the given
|
||||
value.
|
||||
"""
|
||||
if val is None:
|
||||
return ast.Name('None', ast.Load())
|
||||
elif isinstance(val, six.integer_types):
|
||||
return ast.Num(val)
|
||||
elif isinstance(val, bool):
|
||||
return ast.Name(bytes(val), ast.Load())
|
||||
elif isinstance(val, six.string_types):
|
||||
return ast.Str(val)
|
||||
raise TypeError(u'no literal for {0}'.format(type(val)))
|
||||
if sys.version_info[:2] < (3, 4):
|
||||
if val is None:
|
||||
return ast.Name('None', ast.Load())
|
||||
elif isinstance(val, six.integer_types):
|
||||
return ast.Num(val)
|
||||
elif isinstance(val, bool):
|
||||
return ast.Name(bytes(val), ast.Load())
|
||||
elif isinstance(val, six.string_types):
|
||||
return ast.Str(val)
|
||||
raise TypeError(u'no literal for {0}'.format(type(val)))
|
||||
elif sys.version_info[:2] < (3, 6):
|
||||
if val in [None, True, False]:
|
||||
return ast.NameConstant(val)
|
||||
elif isinstance(val, six.integer_types):
|
||||
return ast.Num(val)
|
||||
elif isinstance(val, six.string_types):
|
||||
return ast.Str(val)
|
||||
raise TypeError(u'no literal for {0}'.format(type(val)))
|
||||
else:
|
||||
return ast.Constant(val)
|
||||
|
||||
|
||||
def ex_varassign(name, expr):
|
||||
|
|
|
|||
|
|
@ -32,6 +32,9 @@ from beets import plugins
|
|||
from beets import util
|
||||
from beets import ui
|
||||
|
||||
# We use this field to check whether AcousticBrainz info is present.
|
||||
PROBE_FIELD = 'mood_acoustic'
|
||||
|
||||
|
||||
class ABSubmitError(Exception):
|
||||
"""Raised when failing to analyse file with extractor."""
|
||||
|
|
@ -43,7 +46,7 @@ def call(args):
|
|||
Raise a AnalysisABSubmitError on failure.
|
||||
"""
|
||||
try:
|
||||
return util.command_output(args)
|
||||
return util.command_output(args).stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ABSubmitError(
|
||||
u'{0} exited with status {1}'.format(args[0], e.returncode)
|
||||
|
|
@ -55,7 +58,11 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
def __init__(self):
|
||||
super(AcousticBrainzSubmitPlugin, self).__init__()
|
||||
|
||||
self.config.add({'extractor': u''})
|
||||
self.config.add({
|
||||
'extractor': u'',
|
||||
'force': False,
|
||||
'pretend': False
|
||||
})
|
||||
|
||||
self.extractor = self.config['extractor'].as_str()
|
||||
if self.extractor:
|
||||
|
|
@ -73,8 +80,8 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
call([self.extractor])
|
||||
except OSError:
|
||||
raise ui.UserError(
|
||||
u'No extractor command found: please install the '
|
||||
u'extractor binary from http://acousticbrainz.org/download'
|
||||
u'No extractor command found: please install the extractor'
|
||||
u' binary from https://acousticbrainz.org/download'
|
||||
)
|
||||
except ABSubmitError:
|
||||
# Extractor found, will exit with an error if not called with
|
||||
|
|
@ -98,12 +105,24 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
'absubmit',
|
||||
help=u'calculate and submit AcousticBrainz analysis'
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
u'-f', u'--force', dest='force_refetch',
|
||||
action='store_true', default=False,
|
||||
help=u're-download data when already present'
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
u'-p', u'--pretend', dest='pretend_fetch',
|
||||
action='store_true', default=False,
|
||||
help=u'pretend to perform action, but show \
|
||||
only files which would be processed'
|
||||
)
|
||||
cmd.func = self.command
|
||||
return [cmd]
|
||||
|
||||
def command(self, lib, opts, args):
|
||||
# Get items from arguments
|
||||
items = lib.items(ui.decargs(args))
|
||||
self.opts = opts
|
||||
util.par_map(self.analyze_submit, items)
|
||||
|
||||
def analyze_submit(self, item):
|
||||
|
|
@ -113,12 +132,22 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
|
||||
def _get_analysis(self, item):
|
||||
mbid = item['mb_trackid']
|
||||
# If file has no mbid skip it.
|
||||
|
||||
# Avoid re-analyzing files that already have AB data.
|
||||
if not self.opts.force_refetch and not self.config['force']:
|
||||
if item.get(PROBE_FIELD):
|
||||
return None
|
||||
|
||||
# If file has no MBID, skip it.
|
||||
if not mbid:
|
||||
self._log.info(u'Not analysing {}, missing '
|
||||
u'musicbrainz track id.', item)
|
||||
return None
|
||||
|
||||
if self.opts.pretend_fetch or self.config['pretend']:
|
||||
self._log.info(u'pretend action - extract item: {}', item)
|
||||
return None
|
||||
|
||||
# Temporary file to save extractor output to, extractor only works
|
||||
# if an output file is given. Here we use a temporary file to copy
|
||||
# the data into a python object and then remove the file from the
|
||||
|
|
@ -135,7 +164,7 @@ class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
|
|||
item=item, error=e
|
||||
)
|
||||
return None
|
||||
with open(filename, 'rb') as tmp_file:
|
||||
with open(filename, 'r') as tmp_file:
|
||||
analysis = json.load(tmp_file)
|
||||
# Add the hash to the output.
|
||||
analysis['metadata']['version']['essentia_build_sha'] = \
|
||||
|
|
|
|||
|
|
@ -74,6 +74,9 @@ ABSCHEME = {
|
|||
'sad': 'mood_sad'
|
||||
}
|
||||
},
|
||||
'moods_mirex': {
|
||||
'value': 'moods_mirex'
|
||||
},
|
||||
'ismir04_rhythm': {
|
||||
'value': 'rhythm'
|
||||
},
|
||||
|
|
@ -82,6 +85,9 @@ ABSCHEME = {
|
|||
'tonal': 'tonal'
|
||||
}
|
||||
},
|
||||
'timbre': {
|
||||
'value': 'timbre'
|
||||
},
|
||||
'voice_instrumental': {
|
||||
'value': 'voice_instrumental'
|
||||
},
|
||||
|
|
@ -124,7 +130,9 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
'mood_party': types.Float(6),
|
||||
'mood_relaxed': types.Float(6),
|
||||
'mood_sad': types.Float(6),
|
||||
'moods_mirex': types.STRING,
|
||||
'rhythm': types.Float(6),
|
||||
'timbre': types.STRING,
|
||||
'tonal': types.Float(6),
|
||||
'voice_instrumental': types.STRING,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,13 +28,13 @@ from requests_oauthlib.oauth1_session import (TokenRequestDenied, TokenMissing,
|
|||
|
||||
import beets
|
||||
import beets.ui
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo, Distance
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo
|
||||
from beets.plugins import BeetsPlugin, MetadataSourcePlugin, get_distance
|
||||
import confuse
|
||||
|
||||
|
||||
AUTH_ERRORS = (TokenRequestDenied, TokenMissing, VerifierMissing)
|
||||
USER_AGENT = u'beets/{0} +http://beets.io/'.format(beets.__version__)
|
||||
USER_AGENT = u'beets/{0} +https://beets.io/'.format(beets.__version__)
|
||||
|
||||
|
||||
class BeatportAPIError(Exception):
|
||||
|
|
@ -109,7 +109,7 @@ class BeatportClient(object):
|
|||
:rtype: (unicode, unicode) tuple
|
||||
"""
|
||||
self.api.parse_authorization_response(
|
||||
"http://beets.io/auth?" + auth_data)
|
||||
"https://beets.io/auth?" + auth_data)
|
||||
access_data = self.api.fetch_access_token(
|
||||
self._make_url('/identity/1/oauth/access-token'))
|
||||
return access_data['oauth_token'], access_data['oauth_token_secret']
|
||||
|
|
@ -150,9 +150,11 @@ class BeatportClient(object):
|
|||
:rtype: :py:class:`BeatportRelease`
|
||||
"""
|
||||
response = self._get('/catalog/3/releases', id=beatport_id)
|
||||
release = BeatportRelease(response[0])
|
||||
release.tracks = self.get_release_tracks(beatport_id)
|
||||
return release
|
||||
if response:
|
||||
release = BeatportRelease(response[0])
|
||||
release.tracks = self.get_release_tracks(beatport_id)
|
||||
return release
|
||||
return None
|
||||
|
||||
def get_release_tracks(self, beatport_id):
|
||||
""" Get all tracks for a given release.
|
||||
|
|
@ -224,8 +226,9 @@ class BeatportRelease(BeatportObject):
|
|||
if 'category' in data:
|
||||
self.category = data['category']
|
||||
if 'slug' in data:
|
||||
self.url = "http://beatport.com/release/{0}/{1}".format(
|
||||
self.url = "https://beatport.com/release/{0}/{1}".format(
|
||||
data['slug'], data['id'])
|
||||
self.genre = data.get('genre')
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
|
|
@ -252,12 +255,24 @@ class BeatportTrack(BeatportObject):
|
|||
except ValueError:
|
||||
pass
|
||||
if 'slug' in data:
|
||||
self.url = "http://beatport.com/track/{0}/{1}".format(data['slug'],
|
||||
data['id'])
|
||||
self.url = "https://beatport.com/track/{0}/{1}" \
|
||||
.format(data['slug'], data['id'])
|
||||
self.track_number = data.get('trackNumber')
|
||||
self.bpm = data.get('bpm')
|
||||
self.initial_key = six.text_type(
|
||||
(data.get('key') or {}).get('shortName')
|
||||
)
|
||||
|
||||
# Use 'subgenre' and if not present, 'genre' as a fallback.
|
||||
if data.get('subGenres'):
|
||||
self.genre = six.text_type(data['subGenres'][0].get('name'))
|
||||
elif data.get('genres'):
|
||||
self.genre = six.text_type(data['genres'][0].get('name'))
|
||||
|
||||
|
||||
class BeatportPlugin(BeetsPlugin):
|
||||
data_source = 'Beatport'
|
||||
|
||||
def __init__(self):
|
||||
super(BeatportPlugin, self).__init__()
|
||||
self.config.add({
|
||||
|
|
@ -321,24 +336,26 @@ class BeatportPlugin(BeetsPlugin):
|
|||
return self.config['tokenfile'].get(confuse.Filename(in_app_dir=True))
|
||||
|
||||
def album_distance(self, items, album_info, mapping):
|
||||
"""Returns the beatport source weight and the maximum source weight
|
||||
"""Returns the Beatport source weight and the maximum source weight
|
||||
for albums.
|
||||
"""
|
||||
dist = Distance()
|
||||
if album_info.data_source == 'Beatport':
|
||||
dist.add('source', self.config['source_weight'].as_number())
|
||||
return dist
|
||||
return get_distance(
|
||||
data_source=self.data_source,
|
||||
info=album_info,
|
||||
config=self.config
|
||||
)
|
||||
|
||||
def track_distance(self, item, track_info):
|
||||
"""Returns the beatport source weight and the maximum source weight
|
||||
"""Returns the Beatport source weight and the maximum source weight
|
||||
for individual tracks.
|
||||
"""
|
||||
dist = Distance()
|
||||
if track_info.data_source == 'Beatport':
|
||||
dist.add('source', self.config['source_weight'].as_number())
|
||||
return dist
|
||||
return get_distance(
|
||||
data_source=self.data_source,
|
||||
info=track_info,
|
||||
config=self.config
|
||||
)
|
||||
|
||||
def candidates(self, items, artist, release, va_likely):
|
||||
def candidates(self, items, artist, release, va_likely, extra_tags=None):
|
||||
"""Returns a list of AlbumInfo objects for beatport search results
|
||||
matching release and artist (if not various).
|
||||
"""
|
||||
|
|
@ -365,27 +382,31 @@ class BeatportPlugin(BeetsPlugin):
|
|||
|
||||
def album_for_id(self, release_id):
|
||||
"""Fetches a release by its Beatport ID and returns an AlbumInfo object
|
||||
or None if the release is not found.
|
||||
or None if the query is not a valid ID or release is not found.
|
||||
"""
|
||||
self._log.debug(u'Searching for release {0}', release_id)
|
||||
match = re.search(r'(^|beatport\.com/release/.+/)(\d+)$', release_id)
|
||||
if not match:
|
||||
self._log.debug(u'Not a valid Beatport release ID.')
|
||||
return None
|
||||
release = self.client.get_release(match.group(2))
|
||||
album = self._get_album_info(release)
|
||||
return album
|
||||
if release:
|
||||
return self._get_album_info(release)
|
||||
return None
|
||||
|
||||
def track_for_id(self, track_id):
|
||||
"""Fetches a track by its Beatport ID and returns a TrackInfo object
|
||||
or None if the track is not found.
|
||||
or None if the track is not a valid Beatport ID or track is not found.
|
||||
"""
|
||||
self._log.debug(u'Searching for track {0}', track_id)
|
||||
match = re.search(r'(^|beatport\.com/track/.+/)(\d+)$', track_id)
|
||||
if not match:
|
||||
self._log.debug(u'Not a valid Beatport track ID.')
|
||||
return None
|
||||
bp_track = self.client.get_track(match.group(2))
|
||||
track = self._get_track_info(bp_track)
|
||||
return track
|
||||
if bp_track is not None:
|
||||
return self._get_track_info(bp_track)
|
||||
return None
|
||||
|
||||
def _get_releases(self, query):
|
||||
"""Returns a list of AlbumInfo objects for a beatport search query.
|
||||
|
|
@ -419,7 +440,8 @@ class BeatportPlugin(BeetsPlugin):
|
|||
day=release.release_date.day,
|
||||
label=release.label_name,
|
||||
catalognum=release.catalog_number, media=u'Digital',
|
||||
data_source=u'Beatport', data_url=release.url)
|
||||
data_source=self.data_source, data_url=release.url,
|
||||
genre=release.genre)
|
||||
|
||||
def _get_track_info(self, track):
|
||||
"""Returns a TrackInfo object for a Beatport Track object.
|
||||
|
|
@ -433,25 +455,17 @@ class BeatportPlugin(BeetsPlugin):
|
|||
artist=artist, artist_id=artist_id,
|
||||
length=length, index=track.track_number,
|
||||
medium_index=track.track_number,
|
||||
data_source=u'Beatport', data_url=track.url)
|
||||
data_source=self.data_source, data_url=track.url,
|
||||
bpm=track.bpm, initial_key=track.initial_key,
|
||||
genre=track.genre)
|
||||
|
||||
def _get_artist(self, artists):
|
||||
"""Returns an artist string (all artists) and an artist_id (the main
|
||||
artist) for a list of Beatport release or track artists.
|
||||
"""
|
||||
artist_id = None
|
||||
bits = []
|
||||
for artist in artists:
|
||||
if not artist_id:
|
||||
artist_id = artist[0]
|
||||
name = artist[1]
|
||||
# Strip disambiguation number.
|
||||
name = re.sub(r' \(\d+\)$', '', name)
|
||||
# Move articles to the front.
|
||||
name = re.sub(r'^(.*?), (a|an|the)$', r'\2 \1', name, flags=re.I)
|
||||
bits.append(name)
|
||||
artist = ', '.join(bits).replace(' ,', ',') or None
|
||||
return artist, artist_id
|
||||
return MetadataSourcePlugin.get_artist(
|
||||
artists=artists, id_key=0, name_key=1
|
||||
)
|
||||
|
||||
def _get_tracks(self, query):
|
||||
"""Returns a list of TrackInfo objects for a Beatport query.
|
||||
|
|
|
|||
|
|
@ -639,6 +639,8 @@ class BaseServer(object):
|
|||
self.playlist.pop(old_index)
|
||||
if self.current_index > old_index:
|
||||
self.current_index -= 1
|
||||
self.playlist_version += 1
|
||||
self._send_event("playlist")
|
||||
if self.current_index >= len(self.playlist):
|
||||
# Fallen off the end. Move to stopped state or loop.
|
||||
if self.repeat:
|
||||
|
|
|
|||
|
|
@ -64,7 +64,8 @@ class GstPlayer(object):
|
|||
"""
|
||||
|
||||
# Set up the Gstreamer player. From the pygst tutorial:
|
||||
# http://pygstdocs.berlios.de/pygst-tutorial/playbin.html
|
||||
# https://pygstdocs.berlios.de/pygst-tutorial/playbin.html (gone)
|
||||
# https://brettviren.github.io/pygst-tutorial-org/pygst-tutorial.html
|
||||
####
|
||||
# Updated to GStreamer 1.0 with:
|
||||
# https://wiki.ubuntu.com/Novacut/GStreamer1.0
|
||||
|
|
|
|||
188
beetsplug/bpsync.py
Normal file
188
beetsplug/bpsync.py
Normal file
|
|
@ -0,0 +1,188 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2019, Rahul Ahuja.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Update library's tags using Beatport.
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from beets.plugins import BeetsPlugin, apply_item_changes
|
||||
from beets import autotag, library, ui, util
|
||||
|
||||
from .beatport import BeatportPlugin
|
||||
|
||||
|
||||
class BPSyncPlugin(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super(BPSyncPlugin, self).__init__()
|
||||
self.beatport_plugin = BeatportPlugin()
|
||||
self.beatport_plugin.setup()
|
||||
|
||||
def commands(self):
|
||||
cmd = ui.Subcommand('bpsync', help=u'update metadata from Beatport')
|
||||
cmd.parser.add_option(
|
||||
u'-p',
|
||||
u'--pretend',
|
||||
action='store_true',
|
||||
help=u'show all changes but do nothing',
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
u'-m',
|
||||
u'--move',
|
||||
action='store_true',
|
||||
dest='move',
|
||||
help=u"move files in the library directory",
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
u'-M',
|
||||
u'--nomove',
|
||||
action='store_false',
|
||||
dest='move',
|
||||
help=u"don't move files in library",
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
u'-W',
|
||||
u'--nowrite',
|
||||
action='store_false',
|
||||
default=None,
|
||||
dest='write',
|
||||
help=u"don't write updated metadata to files",
|
||||
)
|
||||
cmd.parser.add_format_option()
|
||||
cmd.func = self.func
|
||||
return [cmd]
|
||||
|
||||
def func(self, lib, opts, args):
|
||||
"""Command handler for the bpsync function.
|
||||
"""
|
||||
move = ui.should_move(opts.move)
|
||||
pretend = opts.pretend
|
||||
write = ui.should_write(opts.write)
|
||||
query = ui.decargs(args)
|
||||
|
||||
self.singletons(lib, query, move, pretend, write)
|
||||
self.albums(lib, query, move, pretend, write)
|
||||
|
||||
def singletons(self, lib, query, move, pretend, write):
|
||||
"""Retrieve and apply info from the autotagger for items matched by
|
||||
query.
|
||||
"""
|
||||
for item in lib.items(query + [u'singleton:true']):
|
||||
if not item.mb_trackid:
|
||||
self._log.info(
|
||||
u'Skipping singleton with no mb_trackid: {}', item
|
||||
)
|
||||
continue
|
||||
|
||||
if not self.is_beatport_track(item):
|
||||
self._log.info(
|
||||
u'Skipping non-{} singleton: {}',
|
||||
self.beatport_plugin.data_source,
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
||||
# Apply.
|
||||
trackinfo = self.beatport_plugin.track_for_id(item.mb_trackid)
|
||||
with lib.transaction():
|
||||
autotag.apply_item_metadata(item, trackinfo)
|
||||
apply_item_changes(lib, item, move, pretend, write)
|
||||
|
||||
@staticmethod
|
||||
def is_beatport_track(item):
|
||||
return (
|
||||
item.get('data_source') == BeatportPlugin.data_source
|
||||
and item.mb_trackid.isnumeric()
|
||||
)
|
||||
|
||||
def get_album_tracks(self, album):
|
||||
if not album.mb_albumid:
|
||||
self._log.info(u'Skipping album with no mb_albumid: {}', album)
|
||||
return False
|
||||
if not album.mb_albumid.isnumeric():
|
||||
self._log.info(
|
||||
u'Skipping album with invalid {} ID: {}',
|
||||
self.beatport_plugin.data_source,
|
||||
album,
|
||||
)
|
||||
return False
|
||||
items = list(album.items())
|
||||
if album.get('data_source') == self.beatport_plugin.data_source:
|
||||
return items
|
||||
if not all(self.is_beatport_track(item) for item in items):
|
||||
self._log.info(
|
||||
u'Skipping non-{} release: {}',
|
||||
self.beatport_plugin.data_source,
|
||||
album,
|
||||
)
|
||||
return False
|
||||
return items
|
||||
|
||||
def albums(self, lib, query, move, pretend, write):
|
||||
"""Retrieve and apply info from the autotagger for albums matched by
|
||||
query and their items.
|
||||
"""
|
||||
# Process matching albums.
|
||||
for album in lib.albums(query):
|
||||
# Do we have a valid Beatport album?
|
||||
items = self.get_album_tracks(album)
|
||||
if not items:
|
||||
continue
|
||||
|
||||
# Get the Beatport album information.
|
||||
albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid)
|
||||
if not albuminfo:
|
||||
self._log.info(
|
||||
u'Release ID {} not found for album {}',
|
||||
album.mb_albumid,
|
||||
album,
|
||||
)
|
||||
continue
|
||||
|
||||
beatport_trackid_to_trackinfo = {
|
||||
track.track_id: track for track in albuminfo.tracks
|
||||
}
|
||||
library_trackid_to_item = {
|
||||
int(item.mb_trackid): item for item in items
|
||||
}
|
||||
item_to_trackinfo = {
|
||||
item: beatport_trackid_to_trackinfo[track_id]
|
||||
for track_id, item in library_trackid_to_item.items()
|
||||
}
|
||||
|
||||
self._log.info(u'applying changes to {}', album)
|
||||
with lib.transaction():
|
||||
autotag.apply_metadata(albuminfo, item_to_trackinfo)
|
||||
changed = False
|
||||
# Find any changed item to apply Beatport changes to album.
|
||||
any_changed_item = items[0]
|
||||
for item in items:
|
||||
item_changed = ui.show_model_changes(item)
|
||||
changed |= item_changed
|
||||
if item_changed:
|
||||
any_changed_item = item
|
||||
apply_item_changes(lib, item, move, pretend, write)
|
||||
|
||||
if pretend or not changed:
|
||||
continue
|
||||
|
||||
# Update album structure to reflect an item in it.
|
||||
for key in library.Album.item_keys:
|
||||
album[key] = any_changed_item[key]
|
||||
album.store()
|
||||
|
||||
# Move album art (and any inconsistent items).
|
||||
if move and lib.directory in util.ancestry(items[0].path):
|
||||
self._log.debug(u'moving album {}', album)
|
||||
album.move()
|
||||
|
|
@ -191,7 +191,7 @@ class AcoustidPlugin(plugins.BeetsPlugin):
|
|||
dist.add_expr('track_id', info.track_id not in recording_ids)
|
||||
return dist
|
||||
|
||||
def candidates(self, items, artist, album, va_likely):
|
||||
def candidates(self, items, artist, album, va_likely, extra_tags=None):
|
||||
albums = []
|
||||
for relid in prefix(_all_releases(items), MAX_RELEASES):
|
||||
album = hooks.album_for_mbid(relid)
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ class ConvertPlugin(BeetsPlugin):
|
|||
self.config.add({
|
||||
u'dest': None,
|
||||
u'pretend': False,
|
||||
u'link': False,
|
||||
u'hardlink': False,
|
||||
u'threads': util.cpu_count(),
|
||||
u'format': u'mp3',
|
||||
u'id3v23': u'inherit',
|
||||
|
|
@ -167,6 +169,13 @@ class ConvertPlugin(BeetsPlugin):
|
|||
help=u'set the target format of the tracks')
|
||||
cmd.parser.add_option('-y', '--yes', action='store_true', dest='yes',
|
||||
help=u'do not ask for confirmation')
|
||||
cmd.parser.add_option('-l', '--link', action='store_true', dest='link',
|
||||
help=u'symlink files that do not \
|
||||
need transcoding.')
|
||||
cmd.parser.add_option('-H', '--hardlink', action='store_true',
|
||||
dest='hardlink',
|
||||
help=u'hardlink files that do not \
|
||||
need transcoding. Overrides --link.')
|
||||
cmd.parser.add_album_option()
|
||||
cmd.func = self.convert_func
|
||||
return [cmd]
|
||||
|
|
@ -251,7 +260,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
util.displayable_path(source))
|
||||
|
||||
def convert_item(self, dest_dir, keep_new, path_formats, fmt,
|
||||
pretend=False):
|
||||
pretend=False, link=False, hardlink=False):
|
||||
"""A pipeline thread that converts `Item` objects from a
|
||||
library.
|
||||
"""
|
||||
|
|
@ -299,20 +308,35 @@ class ConvertPlugin(BeetsPlugin):
|
|||
util.move(item.path, original)
|
||||
|
||||
if should_transcode(item, fmt):
|
||||
linked = False
|
||||
try:
|
||||
self.encode(command, original, converted, pretend)
|
||||
except subprocess.CalledProcessError:
|
||||
continue
|
||||
else:
|
||||
linked = link or hardlink
|
||||
if pretend:
|
||||
self._log.info(u'cp {0} {1}',
|
||||
msg = 'ln' if hardlink else ('ln -s' if link else 'cp')
|
||||
|
||||
self._log.info(u'{2} {0} {1}',
|
||||
util.displayable_path(original),
|
||||
util.displayable_path(converted))
|
||||
util.displayable_path(converted),
|
||||
msg)
|
||||
else:
|
||||
# No transcoding necessary.
|
||||
self._log.info(u'Copying {0}',
|
||||
util.displayable_path(item.path))
|
||||
util.copy(original, converted)
|
||||
msg = 'Hardlinking' if hardlink \
|
||||
else ('Linking' if link else 'Copying')
|
||||
|
||||
self._log.info(u'{1} {0}',
|
||||
util.displayable_path(item.path),
|
||||
msg)
|
||||
|
||||
if hardlink:
|
||||
util.hardlink(original, converted)
|
||||
elif link:
|
||||
util.link(original, converted)
|
||||
else:
|
||||
util.copy(original, converted)
|
||||
|
||||
if pretend:
|
||||
continue
|
||||
|
|
@ -331,7 +355,7 @@ class ConvertPlugin(BeetsPlugin):
|
|||
item.read()
|
||||
item.store() # Store new path and audio data.
|
||||
|
||||
if self.config['embed']:
|
||||
if self.config['embed'] and not linked:
|
||||
album = item._cached_album
|
||||
if album and album.artpath:
|
||||
self._log.debug(u'embedding album art from {}',
|
||||
|
|
@ -346,7 +370,8 @@ class ConvertPlugin(BeetsPlugin):
|
|||
plugins.send('after_convert', item=item,
|
||||
dest=converted, keepnew=False)
|
||||
|
||||
def copy_album_art(self, album, dest_dir, path_formats, pretend=False):
|
||||
def copy_album_art(self, album, dest_dir, path_formats, pretend=False,
|
||||
link=False, hardlink=False):
|
||||
"""Copies or converts the associated cover art of the album. Album must
|
||||
have at least one track.
|
||||
"""
|
||||
|
|
@ -400,14 +425,26 @@ class ConvertPlugin(BeetsPlugin):
|
|||
ArtResizer.shared.resize(maxwidth, album.artpath, dest)
|
||||
else:
|
||||
if pretend:
|
||||
self._log.info(u'cp {0} {1}',
|
||||
msg = 'ln' if hardlink else ('ln -s' if link else 'cp')
|
||||
|
||||
self._log.info(u'{2} {0} {1}',
|
||||
util.displayable_path(album.artpath),
|
||||
util.displayable_path(dest))
|
||||
util.displayable_path(dest),
|
||||
msg)
|
||||
else:
|
||||
self._log.info(u'Copying cover art from {0} to {1}',
|
||||
msg = 'Hardlinking' if hardlink \
|
||||
else ('Linking' if link else 'Copying')
|
||||
|
||||
self._log.info(u'{2} cover art from {0} to {1}',
|
||||
util.displayable_path(album.artpath),
|
||||
util.displayable_path(dest))
|
||||
util.copy(album.artpath, dest)
|
||||
util.displayable_path(dest),
|
||||
msg)
|
||||
if hardlink:
|
||||
util.hardlink(album.artpath, dest)
|
||||
elif link:
|
||||
util.link(album.artpath, dest)
|
||||
else:
|
||||
util.copy(album.artpath, dest)
|
||||
|
||||
def convert_func(self, lib, opts, args):
|
||||
dest = opts.dest or self.config['dest'].get()
|
||||
|
|
@ -426,6 +463,16 @@ class ConvertPlugin(BeetsPlugin):
|
|||
else:
|
||||
pretend = self.config['pretend'].get(bool)
|
||||
|
||||
if opts.hardlink is not None:
|
||||
hardlink = opts.hardlink
|
||||
link = False
|
||||
elif opts.link is not None:
|
||||
hardlink = False
|
||||
link = opts.link
|
||||
else:
|
||||
hardlink = self.config['hardlink'].get(bool)
|
||||
link = self.config['link'].get(bool)
|
||||
|
||||
if opts.album:
|
||||
albums = lib.albums(ui.decargs(args))
|
||||
items = [i for a in albums for i in a.items()]
|
||||
|
|
@ -446,13 +493,16 @@ class ConvertPlugin(BeetsPlugin):
|
|||
|
||||
if opts.album and self.config['copy_album_art']:
|
||||
for album in albums:
|
||||
self.copy_album_art(album, dest, path_formats, pretend)
|
||||
self.copy_album_art(album, dest, path_formats, pretend,
|
||||
link, hardlink)
|
||||
|
||||
convert = [self.convert_item(dest,
|
||||
opts.keep_new,
|
||||
path_formats,
|
||||
fmt,
|
||||
pretend)
|
||||
pretend,
|
||||
link,
|
||||
hardlink)
|
||||
for _ in range(threads)]
|
||||
pipe = util.pipeline.Pipeline([iter(items), convert])
|
||||
pipe.run_parallel()
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class CuePlugin(BeetsPlugin):
|
|||
|
||||
# self.register_listener('import_task_start', self.look_for_cues)
|
||||
|
||||
def candidates(self, items, artist, album, va_likely):
|
||||
def candidates(self, items, artist, album, va_likely, extra_tags=None):
|
||||
import pdb
|
||||
pdb.set_trace()
|
||||
|
||||
|
|
@ -53,5 +53,6 @@ class CuePlugin(BeetsPlugin):
|
|||
title = "dunno lol"
|
||||
track_id = "wtf"
|
||||
index = int(path.basename(t)[len("split-track"):-len(".wav")])
|
||||
yield TrackInfo(title, track_id, index=index, artist=artist)
|
||||
yield TrackInfo(title=title, track_id=track_id, index=index,
|
||||
artist=artist)
|
||||
# generate TrackInfo instances
|
||||
|
|
|
|||
233
beetsplug/deezer.py
Normal file
233
beetsplug/deezer.py
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2019, Rahul Ahuja.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Adds Deezer release and track search support to the autotagger
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, division
|
||||
|
||||
import collections
|
||||
|
||||
import six
|
||||
import unidecode
|
||||
import requests
|
||||
|
||||
from beets import ui
|
||||
from beets.autotag import AlbumInfo, TrackInfo
|
||||
from beets.plugins import MetadataSourcePlugin, BeetsPlugin
|
||||
|
||||
|
||||
class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin):
|
||||
data_source = 'Deezer'
|
||||
|
||||
# Base URLs for the Deezer API
|
||||
# Documentation: https://developers.deezer.com/api/
|
||||
search_url = 'https://api.deezer.com/search/'
|
||||
album_url = 'https://api.deezer.com/album/'
|
||||
track_url = 'https://api.deezer.com/track/'
|
||||
|
||||
id_regex = {
|
||||
'pattern': r'(^|deezer\.com/)([a-z]*/)?({}/)?(\d+)',
|
||||
'match_group': 4,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
super(DeezerPlugin, self).__init__()
|
||||
|
||||
def album_for_id(self, album_id):
|
||||
"""Fetch an album by its Deezer ID or URL and return an
|
||||
AlbumInfo object or None if the album is not found.
|
||||
|
||||
:param album_id: Deezer ID or URL for the album.
|
||||
:type album_id: str
|
||||
:return: AlbumInfo object for album.
|
||||
:rtype: beets.autotag.hooks.AlbumInfo or None
|
||||
"""
|
||||
deezer_id = self._get_id('album', album_id)
|
||||
if deezer_id is None:
|
||||
return None
|
||||
|
||||
album_data = requests.get(self.album_url + deezer_id).json()
|
||||
artist, artist_id = self.get_artist(album_data['contributors'])
|
||||
|
||||
release_date = album_data['release_date']
|
||||
date_parts = [int(part) for part in release_date.split('-')]
|
||||
num_date_parts = len(date_parts)
|
||||
|
||||
if num_date_parts == 3:
|
||||
year, month, day = date_parts
|
||||
elif num_date_parts == 2:
|
||||
year, month = date_parts
|
||||
day = None
|
||||
elif num_date_parts == 1:
|
||||
year = date_parts[0]
|
||||
month = None
|
||||
day = None
|
||||
else:
|
||||
raise ui.UserError(
|
||||
u"Invalid `release_date` returned "
|
||||
u"by {} API: '{}'".format(self.data_source, release_date)
|
||||
)
|
||||
|
||||
tracks_data = requests.get(
|
||||
self.album_url + deezer_id + '/tracks'
|
||||
).json()['data']
|
||||
if not tracks_data:
|
||||
return None
|
||||
tracks = []
|
||||
medium_totals = collections.defaultdict(int)
|
||||
for i, track_data in enumerate(tracks_data, start=1):
|
||||
track = self._get_track(track_data)
|
||||
track.index = i
|
||||
medium_totals[track.medium] += 1
|
||||
tracks.append(track)
|
||||
for track in tracks:
|
||||
track.medium_total = medium_totals[track.medium]
|
||||
|
||||
return AlbumInfo(
|
||||
album=album_data['title'],
|
||||
album_id=deezer_id,
|
||||
artist=artist,
|
||||
artist_credit=self.get_artist([album_data['artist']])[0],
|
||||
artist_id=artist_id,
|
||||
tracks=tracks,
|
||||
albumtype=album_data['record_type'],
|
||||
va=len(album_data['contributors']) == 1
|
||||
and artist.lower() == 'various artists',
|
||||
year=year,
|
||||
month=month,
|
||||
day=day,
|
||||
label=album_data['label'],
|
||||
mediums=max(medium_totals.keys()),
|
||||
data_source=self.data_source,
|
||||
data_url=album_data['link'],
|
||||
)
|
||||
|
||||
def _get_track(self, track_data):
|
||||
"""Convert a Deezer track object dict to a TrackInfo object.
|
||||
|
||||
:param track_data: Deezer Track object dict
|
||||
:type track_data: dict
|
||||
:return: TrackInfo object for track
|
||||
:rtype: beets.autotag.hooks.TrackInfo
|
||||
"""
|
||||
artist, artist_id = self.get_artist(
|
||||
track_data.get('contributors', [track_data['artist']])
|
||||
)
|
||||
return TrackInfo(
|
||||
title=track_data['title'],
|
||||
track_id=track_data['id'],
|
||||
artist=artist,
|
||||
artist_id=artist_id,
|
||||
length=track_data['duration'],
|
||||
index=track_data['track_position'],
|
||||
medium=track_data['disk_number'],
|
||||
medium_index=track_data['track_position'],
|
||||
data_source=self.data_source,
|
||||
data_url=track_data['link'],
|
||||
)
|
||||
|
||||
def track_for_id(self, track_id=None, track_data=None):
|
||||
"""Fetch a track by its Deezer ID or URL and return a
|
||||
TrackInfo object or None if the track is not found.
|
||||
|
||||
:param track_id: (Optional) Deezer ID or URL for the track. Either
|
||||
``track_id`` or ``track_data`` must be provided.
|
||||
:type track_id: str
|
||||
:param track_data: (Optional) Simplified track object dict. May be
|
||||
provided instead of ``track_id`` to avoid unnecessary API calls.
|
||||
:type track_data: dict
|
||||
:return: TrackInfo object for track
|
||||
:rtype: beets.autotag.hooks.TrackInfo or None
|
||||
"""
|
||||
if track_data is None:
|
||||
deezer_id = self._get_id('track', track_id)
|
||||
if deezer_id is None:
|
||||
return None
|
||||
track_data = requests.get(self.track_url + deezer_id).json()
|
||||
track = self._get_track(track_data)
|
||||
|
||||
# Get album's tracks to set `track.index` (position on the entire
|
||||
# release) and `track.medium_total` (total number of tracks on
|
||||
# the track's disc).
|
||||
album_tracks_data = requests.get(
|
||||
self.album_url + str(track_data['album']['id']) + '/tracks'
|
||||
).json()['data']
|
||||
medium_total = 0
|
||||
for i, track_data in enumerate(album_tracks_data, start=1):
|
||||
if track_data['disk_number'] == track.medium:
|
||||
medium_total += 1
|
||||
if track_data['id'] == track.track_id:
|
||||
track.index = i
|
||||
track.medium_total = medium_total
|
||||
return track
|
||||
|
||||
@staticmethod
|
||||
def _construct_search_query(filters=None, keywords=''):
|
||||
"""Construct a query string with the specified filters and keywords to
|
||||
be provided to the Deezer Search API
|
||||
(https://developers.deezer.com/api/search).
|
||||
|
||||
:param filters: (Optional) Field filters to apply.
|
||||
:type filters: dict
|
||||
:param keywords: (Optional) Query keywords to use.
|
||||
:type keywords: str
|
||||
:return: Query string to be provided to the Search API.
|
||||
:rtype: str
|
||||
"""
|
||||
query_components = [
|
||||
keywords,
|
||||
' '.join('{}:"{}"'.format(k, v) for k, v in filters.items()),
|
||||
]
|
||||
query = ' '.join([q for q in query_components if q])
|
||||
if not isinstance(query, six.text_type):
|
||||
query = query.decode('utf8')
|
||||
return unidecode.unidecode(query)
|
||||
|
||||
def _search_api(self, query_type, filters=None, keywords=''):
|
||||
"""Query the Deezer Search API for the specified ``keywords``, applying
|
||||
the provided ``filters``.
|
||||
|
||||
:param query_type: The Deezer Search API method to use. Valid types
|
||||
are: 'album', 'artist', 'history', 'playlist', 'podcast',
|
||||
'radio', 'track', 'user', and 'track'.
|
||||
:type query_type: str
|
||||
:param filters: (Optional) Field filters to apply.
|
||||
:type filters: dict
|
||||
:param keywords: (Optional) Query keywords to use.
|
||||
:type keywords: str
|
||||
:return: JSON data for the class:`Response <Response>` object or None
|
||||
if no search results are returned.
|
||||
:rtype: dict or None
|
||||
"""
|
||||
query = self._construct_search_query(
|
||||
keywords=keywords, filters=filters
|
||||
)
|
||||
if not query:
|
||||
return None
|
||||
self._log.debug(
|
||||
u"Searching {} for '{}'".format(self.data_source, query)
|
||||
)
|
||||
response = requests.get(
|
||||
self.search_url + query_type, params={'q': query}
|
||||
)
|
||||
response.raise_for_status()
|
||||
response_data = response.json().get('data', [])
|
||||
self._log.debug(
|
||||
u"Found {} result(s) from {} for '{}'",
|
||||
len(response_data),
|
||||
self.data_source,
|
||||
query,
|
||||
)
|
||||
return response_data
|
||||
|
|
@ -20,8 +20,8 @@ from __future__ import division, absolute_import, print_function
|
|||
|
||||
import beets.ui
|
||||
from beets import config
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo, Distance
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo
|
||||
from beets.plugins import MetadataSourcePlugin, BeetsPlugin, get_distance
|
||||
import confuse
|
||||
from discogs_client import Release, Master, Client
|
||||
from discogs_client.exceptions import DiscogsAPIError
|
||||
|
|
@ -37,7 +37,9 @@ import traceback
|
|||
from string import ascii_lowercase
|
||||
|
||||
|
||||
USER_AGENT = u'beets/{0} +http://beets.io/'.format(beets.__version__)
|
||||
USER_AGENT = u'beets/{0} +https://beets.io/'.format(beets.__version__)
|
||||
API_KEY = 'rAzVUQYRaoFjeBjyWuWZ'
|
||||
API_SECRET = 'plxtUTqoCzwxZpqdPysCwGuBSmZNdZVy'
|
||||
|
||||
# Exceptions that discogs_client should really handle but does not.
|
||||
CONNECTION_ERRORS = (ConnectionError, socket.error, http_client.HTTPException,
|
||||
|
|
@ -50,11 +52,13 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
def __init__(self):
|
||||
super(DiscogsPlugin, self).__init__()
|
||||
self.config.add({
|
||||
'apikey': 'rAzVUQYRaoFjeBjyWuWZ',
|
||||
'apisecret': 'plxtUTqoCzwxZpqdPysCwGuBSmZNdZVy',
|
||||
'apikey': API_KEY,
|
||||
'apisecret': API_SECRET,
|
||||
'tokenfile': 'discogs_token.json',
|
||||
'source_weight': 0.5,
|
||||
'user_token': '',
|
||||
'separator': u', ',
|
||||
'index_tracks': False,
|
||||
})
|
||||
self.config['apikey'].redact = True
|
||||
self.config['apisecret'].redact = True
|
||||
|
|
@ -156,12 +160,22 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
def album_distance(self, items, album_info, mapping):
|
||||
"""Returns the album distance.
|
||||
"""
|
||||
dist = Distance()
|
||||
if album_info.data_source == 'Discogs':
|
||||
dist.add('source', self.config['source_weight'].as_number())
|
||||
return dist
|
||||
return get_distance(
|
||||
data_source='Discogs',
|
||||
info=album_info,
|
||||
config=self.config
|
||||
)
|
||||
|
||||
def candidates(self, items, artist, album, va_likely):
|
||||
def track_distance(self, item, track_info):
|
||||
"""Returns the track distance.
|
||||
"""
|
||||
return get_distance(
|
||||
data_source='Discogs',
|
||||
info=track_info,
|
||||
config=self.config
|
||||
)
|
||||
|
||||
def candidates(self, items, artist, album, va_likely, extra_tags=None):
|
||||
"""Returns a list of AlbumInfo objects for discogs search results
|
||||
matching an album and artist (if not various).
|
||||
"""
|
||||
|
|
@ -207,7 +221,8 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
getattr(result, 'title')
|
||||
except DiscogsAPIError as e:
|
||||
if e.status_code != 404:
|
||||
self._log.debug(u'API Error: {0} (query: {1})', e, result._uri)
|
||||
self._log.debug(u'API Error: {0} (query: {1})', e,
|
||||
result.data['resource_url'])
|
||||
if e.status_code == 401:
|
||||
self.reset_auth()
|
||||
return self.album_for_id(album_id)
|
||||
|
|
@ -259,7 +274,8 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
return year
|
||||
except DiscogsAPIError as e:
|
||||
if e.status_code != 404:
|
||||
self._log.debug(u'API Error: {0} (query: {1})', e, result._uri)
|
||||
self._log.debug(u'API Error: {0} (query: {1})', e,
|
||||
result.data['resource_url'])
|
||||
if e.status_code == 401:
|
||||
self.reset_auth()
|
||||
return self.get_master_year(master_id)
|
||||
|
|
@ -287,7 +303,9 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
self._log.warning(u"Release does not contain the required fields")
|
||||
return None
|
||||
|
||||
artist, artist_id = self.get_artist([a.data for a in result.artists])
|
||||
artist, artist_id = MetadataSourcePlugin.get_artist(
|
||||
[a.data for a in result.artists]
|
||||
)
|
||||
album = re.sub(r' +', ' ', result.title)
|
||||
album_id = result.data['id']
|
||||
# Use `.data` to access the tracklist directly instead of the
|
||||
|
|
@ -302,10 +320,13 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
mediums = [t.medium for t in tracks]
|
||||
country = result.data.get('country')
|
||||
data_url = result.data.get('uri')
|
||||
style = self.format(result.data.get('styles'))
|
||||
genre = self.format(result.data.get('genres'))
|
||||
discogs_albumid = self.extract_release_id(result.data.get('uri'))
|
||||
|
||||
# Extract information for the optional AlbumInfo fields that are
|
||||
# contained on nested discogs fields.
|
||||
albumtype = media = label = catalogno = None
|
||||
albumtype = media = label = catalogno = labelid = None
|
||||
if result.data.get('formats'):
|
||||
albumtype = ', '.join(
|
||||
result.data['formats'][0].get('descriptions', [])) or None
|
||||
|
|
@ -313,6 +334,7 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
if result.data.get('labels'):
|
||||
label = result.data['labels'][0].get('name')
|
||||
catalogno = result.data['labels'][0].get('catno')
|
||||
labelid = result.data['labels'][0].get('id')
|
||||
|
||||
# Additional cleanups (various artists name, catalog number, media).
|
||||
if va:
|
||||
|
|
@ -334,36 +356,29 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
# a master release, otherwise fetch the master release.
|
||||
original_year = self.get_master_year(master_id) if master_id else year
|
||||
|
||||
return AlbumInfo(album, album_id, artist, artist_id, tracks, asin=None,
|
||||
albumtype=albumtype, va=va, year=year, month=None,
|
||||
day=None, label=label, mediums=len(set(mediums)),
|
||||
artist_sort=None, releasegroup_id=master_id,
|
||||
catalognum=catalogno, script=None, language=None,
|
||||
country=country, albumstatus=None, media=media,
|
||||
albumdisambig=None, artist_credit=None,
|
||||
original_year=original_year, original_month=None,
|
||||
original_day=None, data_source='Discogs',
|
||||
data_url=data_url)
|
||||
return AlbumInfo(album=album, album_id=album_id, artist=artist,
|
||||
artist_id=artist_id, tracks=tracks,
|
||||
albumtype=albumtype, va=va, year=year,
|
||||
label=label, mediums=len(set(mediums)),
|
||||
releasegroup_id=master_id, catalognum=catalogno,
|
||||
country=country, style=style, genre=genre,
|
||||
media=media, original_year=original_year,
|
||||
data_source='Discogs', data_url=data_url,
|
||||
discogs_albumid=discogs_albumid,
|
||||
discogs_labelid=labelid, discogs_artistid=artist_id)
|
||||
|
||||
def get_artist(self, artists):
|
||||
"""Returns an artist string (all artists) and an artist_id (the main
|
||||
artist) for a list of discogs album or track artists.
|
||||
"""
|
||||
artist_id = None
|
||||
bits = []
|
||||
for i, artist in enumerate(artists):
|
||||
if not artist_id:
|
||||
artist_id = artist['id']
|
||||
name = artist['name']
|
||||
# Strip disambiguation number.
|
||||
name = re.sub(r' \(\d+\)$', '', name)
|
||||
# Move articles to the front.
|
||||
name = re.sub(r'(?i)^(.*?), (a|an|the)$', r'\2 \1', name)
|
||||
bits.append(name)
|
||||
if artist['join'] and i < len(artists) - 1:
|
||||
bits.append(artist['join'])
|
||||
artist = ' '.join(bits).replace(' ,', ',') or None
|
||||
return artist, artist_id
|
||||
def format(self, classification):
|
||||
if classification:
|
||||
return self.config['separator'].as_str() \
|
||||
.join(sorted(classification))
|
||||
else:
|
||||
return None
|
||||
|
||||
def extract_release_id(self, uri):
|
||||
if uri:
|
||||
return uri.split("/")[-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_tracks(self, tracklist):
|
||||
"""Returns a list of TrackInfo objects for a discogs tracklist.
|
||||
|
|
@ -380,14 +395,28 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
tracks = []
|
||||
index_tracks = {}
|
||||
index = 0
|
||||
# Distinct works and intra-work divisions, as defined by index tracks.
|
||||
divisions, next_divisions = [], []
|
||||
for track in clean_tracklist:
|
||||
# Only real tracks have `position`. Otherwise, it's an index track.
|
||||
if track['position']:
|
||||
index += 1
|
||||
track_info = self.get_track_info(track, index)
|
||||
if next_divisions:
|
||||
# End of a block of index tracks: update the current
|
||||
# divisions.
|
||||
divisions += next_divisions
|
||||
del next_divisions[:]
|
||||
track_info = self.get_track_info(track, index, divisions)
|
||||
track_info.track_alt = track['position']
|
||||
tracks.append(track_info)
|
||||
else:
|
||||
next_divisions.append(track['title'])
|
||||
# We expect new levels of division at the beginning of the
|
||||
# tracklist (and possibly elsewhere).
|
||||
try:
|
||||
divisions.pop()
|
||||
except IndexError:
|
||||
pass
|
||||
index_tracks[index + 1] = track['title']
|
||||
|
||||
# Fix up medium and medium_index for each track. Discogs position is
|
||||
|
|
@ -522,18 +551,22 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
|
||||
return tracklist
|
||||
|
||||
def get_track_info(self, track, index):
|
||||
def get_track_info(self, track, index, divisions):
|
||||
"""Returns a TrackInfo object for a discogs track.
|
||||
"""
|
||||
title = track['title']
|
||||
if self.config['index_tracks']:
|
||||
prefix = ', '.join(divisions)
|
||||
title = ': '.join([prefix, title])
|
||||
track_id = None
|
||||
medium, medium_index, _ = self.get_track_index(track['position'])
|
||||
artist, artist_id = self.get_artist(track.get('artists', []))
|
||||
artist, artist_id = MetadataSourcePlugin.get_artist(
|
||||
track.get('artists', [])
|
||||
)
|
||||
length = self.get_track_length(track['duration'])
|
||||
return TrackInfo(title, track_id, artist=artist, artist_id=artist_id,
|
||||
length=length, index=index,
|
||||
medium=medium, medium_index=medium_index,
|
||||
artist_sort=None, disctitle=None, artist_credit=None)
|
||||
return TrackInfo(title=title, track_id=track_id, artist=artist,
|
||||
artist_id=artist_id, length=length, index=index,
|
||||
medium=medium, medium_index=medium_index)
|
||||
|
||||
def get_track_index(self, position):
|
||||
"""Returns the medium, medium index and subtrack index for a discogs
|
||||
|
|
|
|||
|
|
@ -205,7 +205,7 @@ class DuplicatesPlugin(BeetsPlugin):
|
|||
u'computing checksum',
|
||||
key, displayable_path(item.path))
|
||||
try:
|
||||
checksum = command_output(args)
|
||||
checksum = command_output(args).stdout
|
||||
setattr(item, key, checksum)
|
||||
item.store()
|
||||
self._log.debug(u'computed checksum for {0} using {1}',
|
||||
|
|
|
|||
|
|
@ -59,7 +59,8 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
'auto': True,
|
||||
'compare_threshold': 0,
|
||||
'ifempty': False,
|
||||
'remove_art_file': False
|
||||
'remove_art_file': False,
|
||||
'quality': 0,
|
||||
})
|
||||
|
||||
if self.config['maxwidth'].get(int) and not ArtResizer.shared.local:
|
||||
|
|
@ -86,6 +87,7 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
u"-y", u"--yes", action="store_true", help=u"skip confirmation"
|
||||
)
|
||||
maxwidth = self.config['maxwidth'].get(int)
|
||||
quality = self.config['quality'].get(int)
|
||||
compare_threshold = self.config['compare_threshold'].get(int)
|
||||
ifempty = self.config['ifempty'].get(bool)
|
||||
|
||||
|
|
@ -104,8 +106,9 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
for item in items:
|
||||
art.embed_item(self._log, item, imagepath, maxwidth, None,
|
||||
compare_threshold, ifempty)
|
||||
art.embed_item(self._log, item, imagepath, maxwidth,
|
||||
None, compare_threshold, ifempty,
|
||||
quality=quality)
|
||||
else:
|
||||
albums = lib.albums(decargs(args))
|
||||
|
||||
|
|
@ -114,8 +117,9 @@ class EmbedCoverArtPlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
for album in albums:
|
||||
art.embed_album(self._log, album, maxwidth, False,
|
||||
compare_threshold, ifempty)
|
||||
art.embed_album(self._log, album, maxwidth,
|
||||
False, compare_threshold, ifempty,
|
||||
quality=quality)
|
||||
self.remove_artfile(album)
|
||||
|
||||
embed_cmd.func = embed_func
|
||||
|
|
|
|||
|
|
@ -18,8 +18,10 @@
|
|||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import json
|
||||
import codecs
|
||||
import json
|
||||
import csv
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from datetime import datetime, date
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
|
@ -44,7 +46,7 @@ class ExportPlugin(BeetsPlugin):
|
|||
self.config.add({
|
||||
'default_format': 'json',
|
||||
'json': {
|
||||
# json module formatting options
|
||||
# JSON module formatting options.
|
||||
'formatting': {
|
||||
'ensure_ascii': False,
|
||||
'indent': 4,
|
||||
|
|
@ -52,6 +54,19 @@ class ExportPlugin(BeetsPlugin):
|
|||
'sort_keys': True
|
||||
}
|
||||
},
|
||||
'csv': {
|
||||
# CSV module formatting options.
|
||||
'formatting': {
|
||||
# The delimiter used to seperate columns.
|
||||
'delimiter': ',',
|
||||
# The dialect to use when formating the file output.
|
||||
'dialect': 'excel'
|
||||
}
|
||||
},
|
||||
'xml': {
|
||||
# XML module formatting options.
|
||||
'formatting': {}
|
||||
}
|
||||
# TODO: Use something like the edit plugin
|
||||
# 'item_fields': []
|
||||
})
|
||||
|
|
@ -78,17 +93,21 @@ class ExportPlugin(BeetsPlugin):
|
|||
u'-o', u'--output',
|
||||
help=u'path for the output file. If not given, will print the data'
|
||||
)
|
||||
cmd.parser.add_option(
|
||||
u'-f', u'--format', default='json',
|
||||
help=u"the output format: json (default), csv, or xml"
|
||||
)
|
||||
return [cmd]
|
||||
|
||||
def run(self, lib, opts, args):
|
||||
|
||||
file_path = opts.output
|
||||
file_format = self.config['default_format'].get(str)
|
||||
file_mode = 'a' if opts.append else 'w'
|
||||
file_format = opts.format or self.config['default_format'].get(str)
|
||||
format_options = self.config[file_format]['formatting'].get(dict)
|
||||
|
||||
export_format = ExportFormat.factory(
|
||||
file_format, **{
|
||||
file_type=file_format,
|
||||
**{
|
||||
'file_path': file_path,
|
||||
'file_mode': file_mode
|
||||
}
|
||||
|
|
@ -100,6 +119,7 @@ class ExportPlugin(BeetsPlugin):
|
|||
included_keys = []
|
||||
for keys in opts.included_keys:
|
||||
included_keys.extend(keys.split(','))
|
||||
|
||||
key_filter = make_key_filter(included_keys)
|
||||
|
||||
for data_emitter in data_collector(lib, ui.decargs(args)):
|
||||
|
|
@ -117,35 +137,69 @@ class ExportPlugin(BeetsPlugin):
|
|||
|
||||
class ExportFormat(object):
|
||||
"""The output format type"""
|
||||
|
||||
@classmethod
|
||||
def factory(cls, type, **kwargs):
|
||||
if type == "json":
|
||||
if kwargs['file_path']:
|
||||
return JsonFileFormat(**kwargs)
|
||||
else:
|
||||
return JsonPrintFormat()
|
||||
raise NotImplementedError()
|
||||
|
||||
def export(self, data, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class JsonPrintFormat(ExportFormat):
|
||||
"""Outputs to the console"""
|
||||
|
||||
def export(self, data, **kwargs):
|
||||
json.dump(data, sys.stdout, cls=ExportEncoder, **kwargs)
|
||||
|
||||
|
||||
class JsonFileFormat(ExportFormat):
|
||||
"""Saves in a json file"""
|
||||
|
||||
def __init__(self, file_path, file_mode=u'w', encoding=u'utf-8'):
|
||||
self.path = file_path
|
||||
self.mode = file_mode
|
||||
self.encoding = encoding
|
||||
# creates a file object to write/append or sets to stdout
|
||||
self.out_stream = codecs.open(self.path, self.mode, self.encoding) \
|
||||
if self.path else sys.stdout
|
||||
|
||||
@classmethod
|
||||
def factory(cls, file_type, **kwargs):
|
||||
if file_type == "json":
|
||||
return JsonFormat(**kwargs)
|
||||
elif file_type == "csv":
|
||||
return CSVFormat(**kwargs)
|
||||
elif file_type == "xml":
|
||||
return XMLFormat(**kwargs)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
def export(self, data, **kwargs):
|
||||
with codecs.open(self.path, self.mode, self.encoding) as f:
|
||||
json.dump(data, f, cls=ExportEncoder, **kwargs)
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class JsonFormat(ExportFormat):
|
||||
"""Saves in a json file"""
|
||||
def __init__(self, file_path, file_mode=u'w', encoding=u'utf-8'):
|
||||
super(JsonFormat, self).__init__(file_path, file_mode, encoding)
|
||||
|
||||
def export(self, data, **kwargs):
|
||||
json.dump(data, self.out_stream, cls=ExportEncoder, **kwargs)
|
||||
|
||||
|
||||
class CSVFormat(ExportFormat):
|
||||
"""Saves in a csv file"""
|
||||
def __init__(self, file_path, file_mode=u'w', encoding=u'utf-8'):
|
||||
super(CSVFormat, self).__init__(file_path, file_mode, encoding)
|
||||
|
||||
def export(self, data, **kwargs):
|
||||
header = list(data[0].keys()) if data else []
|
||||
writer = csv.DictWriter(self.out_stream, fieldnames=header, **kwargs)
|
||||
writer.writeheader()
|
||||
writer.writerows(data)
|
||||
|
||||
|
||||
class XMLFormat(ExportFormat):
|
||||
"""Saves in a xml file"""
|
||||
def __init__(self, file_path, file_mode=u'w', encoding=u'utf-8'):
|
||||
super(XMLFormat, self).__init__(file_path, file_mode, encoding)
|
||||
|
||||
def export(self, data, **kwargs):
|
||||
# Creates the XML file structure.
|
||||
library = ElementTree.Element(u'library')
|
||||
tracks = ElementTree.SubElement(library, u'tracks')
|
||||
if data and isinstance(data[0], dict):
|
||||
for index, item in enumerate(data):
|
||||
track = ElementTree.SubElement(tracks, u'track')
|
||||
for key, value in item.items():
|
||||
track_details = ElementTree.SubElement(track, key)
|
||||
track_details.text = value
|
||||
# Depending on the version of python the encoding needs to change
|
||||
try:
|
||||
data = ElementTree.tostring(library, encoding='unicode', **kwargs)
|
||||
except LookupError:
|
||||
data = ElementTree.tostring(library, encoding='utf-8', **kwargs)
|
||||
|
||||
self.out_stream.write(data)
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ from contextlib import closing
|
|||
import os
|
||||
import re
|
||||
from tempfile import NamedTemporaryFile
|
||||
from collections import OrderedDict
|
||||
|
||||
import requests
|
||||
|
||||
|
|
@ -135,7 +136,8 @@ class Candidate(object):
|
|||
|
||||
def resize(self, plugin):
|
||||
if plugin.maxwidth and self.check == self.CANDIDATE_DOWNSCALE:
|
||||
self.path = ArtResizer.shared.resize(plugin.maxwidth, self.path)
|
||||
self.path = ArtResizer.shared.resize(plugin.maxwidth, self.path,
|
||||
quality=plugin.quality)
|
||||
|
||||
|
||||
def _logged_get(log, *args, **kwargs):
|
||||
|
|
@ -164,9 +166,14 @@ def _logged_get(log, *args, **kwargs):
|
|||
message = 'getting URL'
|
||||
|
||||
req = requests.Request('GET', *args, **req_kwargs)
|
||||
|
||||
with requests.Session() as s:
|
||||
s.headers = {'User-Agent': 'beets'}
|
||||
prepped = s.prepare_request(req)
|
||||
settings = s.merge_environment_settings(
|
||||
prepped.url, {}, None, None, None
|
||||
)
|
||||
send_kwargs.update(settings)
|
||||
log.debug('{}: {}', message, prepped.url)
|
||||
return s.send(prepped, **send_kwargs)
|
||||
|
||||
|
|
@ -203,6 +210,9 @@ class ArtSource(RequestMixin):
|
|||
def fetch_image(self, candidate, plugin):
|
||||
raise NotImplementedError()
|
||||
|
||||
def cleanup(self, candidate):
|
||||
pass
|
||||
|
||||
|
||||
class LocalArtSource(ArtSource):
|
||||
IS_LOCAL = True
|
||||
|
|
@ -284,10 +294,18 @@ class RemoteArtSource(ArtSource):
|
|||
self._log.debug(u'error fetching art: {}', exc)
|
||||
return
|
||||
|
||||
def cleanup(self, candidate):
|
||||
if candidate.path:
|
||||
try:
|
||||
util.remove(path=candidate.path)
|
||||
except util.FilesystemError as exc:
|
||||
self._log.debug(u'error cleaning up tmp art: {}', exc)
|
||||
|
||||
|
||||
class CoverArtArchive(RemoteArtSource):
|
||||
NAME = u"Cover Art Archive"
|
||||
VALID_MATCHING_CRITERIA = ['release', 'releasegroup']
|
||||
VALID_THUMBNAIL_SIZES = [250, 500, 1200]
|
||||
|
||||
if util.SNI_SUPPORTED:
|
||||
URL = 'https://coverartarchive.org/release/{mbid}/front'
|
||||
|
|
@ -300,18 +318,39 @@ class CoverArtArchive(RemoteArtSource):
|
|||
"""Return the Cover Art Archive and Cover Art Archive release group URLs
|
||||
using album MusicBrainz release ID and release group ID.
|
||||
"""
|
||||
release_url = self.URL.format(mbid=album.mb_albumid)
|
||||
release_group_url = self.GROUP_URL.format(mbid=album.mb_releasegroupid)
|
||||
|
||||
# Cover Art Archive API offers pre-resized thumbnails at several sizes.
|
||||
# If the maxwidth config matches one of the already available sizes
|
||||
# fetch it directly intead of fetching the full sized image and
|
||||
# resizing it.
|
||||
size_suffix = None
|
||||
if plugin.maxwidth in self.VALID_THUMBNAIL_SIZES:
|
||||
size_suffix = "-" + str(plugin.maxwidth)
|
||||
|
||||
if 'release' in self.match_by and album.mb_albumid:
|
||||
yield self._candidate(url=self.URL.format(mbid=album.mb_albumid),
|
||||
if size_suffix:
|
||||
release_thumbnail_url = release_url + size_suffix
|
||||
yield self._candidate(url=release_thumbnail_url,
|
||||
match=Candidate.MATCH_EXACT)
|
||||
yield self._candidate(url=release_url,
|
||||
match=Candidate.MATCH_EXACT)
|
||||
if 'releasegroup' in self.match_by and album.mb_releasegroupid:
|
||||
yield self._candidate(
|
||||
url=self.GROUP_URL.format(mbid=album.mb_releasegroupid),
|
||||
match=Candidate.MATCH_FALLBACK)
|
||||
if size_suffix:
|
||||
release_group_thumbnail_url = release_group_url + size_suffix
|
||||
yield self._candidate(url=release_group_thumbnail_url,
|
||||
match=Candidate.MATCH_FALLBACK)
|
||||
yield self._candidate(url=release_group_url,
|
||||
match=Candidate.MATCH_FALLBACK)
|
||||
|
||||
|
||||
class Amazon(RemoteArtSource):
|
||||
NAME = u"Amazon"
|
||||
URL = 'http://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg'
|
||||
if util.SNI_SUPPORTED:
|
||||
URL = 'https://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg'
|
||||
else:
|
||||
URL = 'http://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg'
|
||||
INDICES = (1, 2)
|
||||
|
||||
def get(self, album, plugin, paths):
|
||||
|
|
@ -325,7 +364,10 @@ class Amazon(RemoteArtSource):
|
|||
|
||||
class AlbumArtOrg(RemoteArtSource):
|
||||
NAME = u"AlbumArt.org scraper"
|
||||
URL = 'http://www.albumart.org/index_detail.php'
|
||||
if util.SNI_SUPPORTED:
|
||||
URL = 'https://www.albumart.org/index_detail.php'
|
||||
else:
|
||||
URL = 'http://www.albumart.org/index_detail.php'
|
||||
PAT = r'href\s*=\s*"([^>"]*)"[^>]*title\s*=\s*"View larger image"'
|
||||
|
||||
def get(self, album, plugin, paths):
|
||||
|
|
@ -499,12 +541,18 @@ class ITunesStore(RemoteArtSource):
|
|||
payload['term'])
|
||||
return
|
||||
|
||||
if self._config['high_resolution']:
|
||||
image_suffix = '100000x100000-999'
|
||||
else:
|
||||
image_suffix = '1200x1200bb'
|
||||
|
||||
for c in candidates:
|
||||
try:
|
||||
if (c['artistName'] == album.albumartist
|
||||
and c['collectionName'] == album.album):
|
||||
art_url = c['artworkUrl100']
|
||||
art_url = art_url.replace('100x100', '1200x1200')
|
||||
art_url = art_url.replace('100x100bb',
|
||||
image_suffix)
|
||||
yield self._candidate(url=art_url,
|
||||
match=Candidate.MATCH_EXACT)
|
||||
except KeyError as e:
|
||||
|
|
@ -514,7 +562,8 @@ class ITunesStore(RemoteArtSource):
|
|||
|
||||
try:
|
||||
fallback_art_url = candidates[0]['artworkUrl100']
|
||||
fallback_art_url = fallback_art_url.replace('100x100', '1200x1200')
|
||||
fallback_art_url = fallback_art_url.replace('100x100bb',
|
||||
image_suffix)
|
||||
yield self._candidate(url=fallback_art_url,
|
||||
match=Candidate.MATCH_FALLBACK)
|
||||
except KeyError as e:
|
||||
|
|
@ -723,11 +772,72 @@ class FileSystem(LocalArtSource):
|
|||
match=Candidate.MATCH_FALLBACK)
|
||||
|
||||
|
||||
class LastFM(RemoteArtSource):
|
||||
NAME = u"Last.fm"
|
||||
|
||||
# Sizes in priority order.
|
||||
SIZES = OrderedDict([
|
||||
('mega', (300, 300)),
|
||||
('extralarge', (300, 300)),
|
||||
('large', (174, 174)),
|
||||
('medium', (64, 64)),
|
||||
('small', (34, 34)),
|
||||
])
|
||||
|
||||
if util.SNI_SUPPORTED:
|
||||
API_URL = 'https://ws.audioscrobbler.com/2.0'
|
||||
else:
|
||||
API_URL = 'http://ws.audioscrobbler.com/2.0'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(LastFM, self).__init__(*args, **kwargs)
|
||||
self.key = self._config['lastfm_key'].get(),
|
||||
|
||||
def get(self, album, plugin, paths):
|
||||
if not album.mb_albumid:
|
||||
return
|
||||
|
||||
try:
|
||||
response = self.request(self.API_URL, params={
|
||||
'method': 'album.getinfo',
|
||||
'api_key': self.key,
|
||||
'mbid': album.mb_albumid,
|
||||
'format': 'json',
|
||||
})
|
||||
except requests.RequestException:
|
||||
self._log.debug(u'lastfm: error receiving response')
|
||||
return
|
||||
|
||||
try:
|
||||
data = response.json()
|
||||
|
||||
if 'error' in data:
|
||||
if data['error'] == 6:
|
||||
self._log.debug('lastfm: no results for {}',
|
||||
album.mb_albumid)
|
||||
else:
|
||||
self._log.error(
|
||||
'lastfm: failed to get album info: {} ({})',
|
||||
data['message'], data['error'])
|
||||
else:
|
||||
images = {image['size']: image['#text']
|
||||
for image in data['album']['image']}
|
||||
|
||||
# Provide candidates in order of size.
|
||||
for size in self.SIZES.keys():
|
||||
if size in images:
|
||||
yield self._candidate(url=images[size],
|
||||
size=self.SIZES[size])
|
||||
except ValueError:
|
||||
self._log.debug(u'lastfm: error loading response: {}'
|
||||
.format(response.text))
|
||||
return
|
||||
|
||||
# Try each source in turn.
|
||||
|
||||
SOURCES_ALL = [u'filesystem',
|
||||
u'coverart', u'itunes', u'amazon', u'albumart',
|
||||
u'wikipedia', u'google', u'fanarttv']
|
||||
u'wikipedia', u'google', u'fanarttv', u'lastfm']
|
||||
|
||||
ART_SOURCES = {
|
||||
u'filesystem': FileSystem,
|
||||
|
|
@ -738,6 +848,7 @@ ART_SOURCES = {
|
|||
u'wikipedia': Wikipedia,
|
||||
u'google': GoogleImages,
|
||||
u'fanarttv': FanartTV,
|
||||
u'lastfm': LastFM,
|
||||
}
|
||||
SOURCE_NAMES = {v: k for k, v in ART_SOURCES.items()}
|
||||
|
||||
|
|
@ -759,6 +870,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
'auto': True,
|
||||
'minwidth': 0,
|
||||
'maxwidth': 0,
|
||||
'quality': 0,
|
||||
'enforce_ratio': False,
|
||||
'cautious': False,
|
||||
'cover_names': ['cover', 'front', 'art', 'album', 'folder'],
|
||||
|
|
@ -767,13 +879,17 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
'google_key': None,
|
||||
'google_engine': u'001442825323518660753:hrh5ch1gjzm',
|
||||
'fanarttv_key': None,
|
||||
'lastfm_key': None,
|
||||
'store_source': False,
|
||||
'high_resolution': False,
|
||||
})
|
||||
self.config['google_key'].redact = True
|
||||
self.config['fanarttv_key'].redact = True
|
||||
self.config['lastfm_key'].redact = True
|
||||
|
||||
self.minwidth = self.config['minwidth'].get(int)
|
||||
self.maxwidth = self.config['maxwidth'].get(int)
|
||||
self.quality = self.config['quality'].get(int)
|
||||
|
||||
# allow both pixel and percentage-based margin specifications
|
||||
self.enforce_ratio = self.config['enforce_ratio'].get(
|
||||
|
|
@ -809,6 +925,9 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
if not self.config['google_key'].get() and \
|
||||
u'google' in available_sources:
|
||||
available_sources.remove(u'google')
|
||||
if not self.config['lastfm_key'].get() and \
|
||||
u'lastfm' in available_sources:
|
||||
available_sources.remove(u'lastfm')
|
||||
available_sources = [(s, c)
|
||||
for s in available_sources
|
||||
for c in ART_SOURCES[s].VALID_MATCHING_CRITERIA]
|
||||
|
|
@ -889,7 +1008,7 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
cmd.parser.add_option(
|
||||
u'-q', u'--quiet', dest='quiet',
|
||||
action='store_true', default=False,
|
||||
help=u'shows only quiet art'
|
||||
help=u'quiet mode: do not output albums that already have artwork'
|
||||
)
|
||||
|
||||
def func(lib, opts, args):
|
||||
|
|
@ -903,9 +1022,10 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
def art_for_album(self, album, paths, local_only=False):
|
||||
"""Given an Album object, returns a path to downloaded art for the
|
||||
album (or None if no art is found). If `maxwidth`, then images are
|
||||
resized to this maximum pixel size. If `local_only`, then only local
|
||||
image files from the filesystem are returned; no network requests
|
||||
are made.
|
||||
resized to this maximum pixel size. If `quality` then resized images
|
||||
are saved at the specified quality level. If `local_only`, then only
|
||||
local image files from the filesystem are returned; no network
|
||||
requests are made.
|
||||
"""
|
||||
out = None
|
||||
|
||||
|
|
@ -926,6 +1046,8 @@ class FetchArtPlugin(plugins.BeetsPlugin, RequestMixin):
|
|||
u'using {0.LOC_STR} image {1}'.format(
|
||||
source, util.displayable_path(out.path)))
|
||||
break
|
||||
# Remove temporary files for invalid candidates.
|
||||
source.cleanup(candidate)
|
||||
if out:
|
||||
break
|
||||
|
||||
|
|
|
|||
276
beetsplug/fish.py
Normal file
276
beetsplug/fish.py
Normal file
|
|
@ -0,0 +1,276 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2015, winters jean-marie.
|
||||
# Copyright 2020, Justin Mayer <https://justinmayer.com>
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""This plugin generates tab completions for Beets commands for the Fish shell
|
||||
<https://fishshell.com/>, including completions for Beets commands, plugin
|
||||
commands, and option flags. Also generated are completions for all the album
|
||||
and track fields, suggesting for example `genre:` or `album:` when querying the
|
||||
Beets database. Completions for the *values* of those fields are not generated
|
||||
by default but can be added via the `-e` / `--extravalues` flag. For example:
|
||||
`beet fish -e genre -e albumartist`
|
||||
"""
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets import library, ui
|
||||
from beets.ui import commands
|
||||
from operator import attrgetter
|
||||
import os
|
||||
BL_NEED2 = """complete -c beet -n '__fish_beet_needs_command' {} {}\n"""
|
||||
BL_USE3 = """complete -c beet -n '__fish_beet_using_command {}' {} {}\n"""
|
||||
BL_SUBS = """complete -c beet -n '__fish_at_level {} ""' {} {}\n"""
|
||||
BL_EXTRA3 = """complete -c beet -n '__fish_beet_use_extra {}' {} {}\n"""
|
||||
|
||||
HEAD = '''
|
||||
function __fish_beet_needs_command
|
||||
set cmd (commandline -opc)
|
||||
if test (count $cmd) -eq 1
|
||||
return 0
|
||||
end
|
||||
return 1
|
||||
end
|
||||
|
||||
function __fish_beet_using_command
|
||||
set cmd (commandline -opc)
|
||||
set needle (count $cmd)
|
||||
if test $needle -gt 1
|
||||
if begin test $argv[1] = $cmd[2];
|
||||
and not contains -- $cmd[$needle] $FIELDS; end
|
||||
return 0
|
||||
end
|
||||
end
|
||||
return 1
|
||||
end
|
||||
|
||||
function __fish_beet_use_extra
|
||||
set cmd (commandline -opc)
|
||||
set needle (count $cmd)
|
||||
if test $argv[2] = $cmd[$needle]
|
||||
return 0
|
||||
end
|
||||
return 1
|
||||
end
|
||||
'''
|
||||
|
||||
|
||||
class FishPlugin(BeetsPlugin):
|
||||
|
||||
def commands(self):
|
||||
cmd = ui.Subcommand('fish', help='generate Fish shell tab completions')
|
||||
cmd.func = self.run
|
||||
cmd.parser.add_option('-f', '--noFields', action='store_true',
|
||||
default=False,
|
||||
help='omit album/track field completions')
|
||||
cmd.parser.add_option(
|
||||
'-e',
|
||||
'--extravalues',
|
||||
action='append',
|
||||
type='choice',
|
||||
choices=library.Item.all_keys() +
|
||||
library.Album.all_keys(),
|
||||
help='include specified field *values* in completions')
|
||||
return [cmd]
|
||||
|
||||
def run(self, lib, opts, args):
|
||||
# Gather the commands from Beets core and its plugins.
|
||||
# Collect the album and track fields.
|
||||
# If specified, also collect the values for these fields.
|
||||
# Make a giant string of all the above, formatted in a way that
|
||||
# allows Fish to do tab completion for the `beet` command.
|
||||
home_dir = os.path.expanduser("~")
|
||||
completion_dir = os.path.join(home_dir, '.config/fish/completions')
|
||||
try:
|
||||
os.makedirs(completion_dir)
|
||||
except OSError:
|
||||
if not os.path.isdir(completion_dir):
|
||||
raise
|
||||
completion_file_path = os.path.join(completion_dir, 'beet.fish')
|
||||
nobasicfields = opts.noFields # Do not complete for album/track fields
|
||||
extravalues = opts.extravalues # e.g., Also complete artists names
|
||||
beetcmds = sorted(
|
||||
(commands.default_commands +
|
||||
commands.plugins.commands()),
|
||||
key=attrgetter('name'))
|
||||
fields = sorted(set(
|
||||
library.Album.all_keys() + library.Item.all_keys()))
|
||||
# Collect commands, their aliases, and their help text
|
||||
cmd_names_help = []
|
||||
for cmd in beetcmds:
|
||||
names = [alias for alias in cmd.aliases]
|
||||
names.append(cmd.name)
|
||||
for name in names:
|
||||
cmd_names_help.append((name, cmd.help))
|
||||
# Concatenate the string
|
||||
totstring = HEAD + "\n"
|
||||
totstring += get_cmds_list([name[0] for name in cmd_names_help])
|
||||
totstring += '' if nobasicfields else get_standard_fields(fields)
|
||||
totstring += get_extravalues(lib, extravalues) if extravalues else ''
|
||||
totstring += "\n" + "# ====== {} =====".format(
|
||||
"setup basic beet completion") + "\n" * 2
|
||||
totstring += get_basic_beet_options()
|
||||
totstring += "\n" + "# ====== {} =====".format(
|
||||
"setup field completion for subcommands") + "\n"
|
||||
totstring += get_subcommands(
|
||||
cmd_names_help, nobasicfields, extravalues)
|
||||
# Set up completion for all the command options
|
||||
totstring += get_all_commands(beetcmds)
|
||||
|
||||
with open(completion_file_path, 'w') as fish_file:
|
||||
fish_file.write(totstring)
|
||||
|
||||
|
||||
def get_cmds_list(cmds_names):
|
||||
# Make a list of all Beets core & plugin commands
|
||||
substr = ''
|
||||
substr += (
|
||||
"set CMDS " + " ".join(cmds_names) + ("\n" * 2)
|
||||
)
|
||||
return substr
|
||||
|
||||
|
||||
def get_standard_fields(fields):
|
||||
# Make a list of album/track fields and append with ':'
|
||||
fields = (field + ":" for field in fields)
|
||||
substr = ''
|
||||
substr += (
|
||||
"set FIELDS " + " ".join(fields) + ("\n" * 2)
|
||||
)
|
||||
return substr
|
||||
|
||||
|
||||
def get_extravalues(lib, extravalues):
|
||||
# Make a list of all values from an album/track field.
|
||||
# 'beet ls albumartist: <TAB>' yields completions for ABBA, Beatles, etc.
|
||||
word = ''
|
||||
values_set = get_set_of_values_for_field(lib, extravalues)
|
||||
for fld in extravalues:
|
||||
extraname = fld.upper() + 'S'
|
||||
word += (
|
||||
"set " + extraname + " " + " ".join(sorted(values_set[fld]))
|
||||
+ ("\n" * 2)
|
||||
)
|
||||
return word
|
||||
|
||||
|
||||
def get_set_of_values_for_field(lib, fields):
|
||||
# Get unique values from a specified album/track field
|
||||
fields_dict = {}
|
||||
for each in fields:
|
||||
fields_dict[each] = set()
|
||||
for item in lib.items():
|
||||
for field in fields:
|
||||
fields_dict[field].add(wrap(item[field]))
|
||||
return fields_dict
|
||||
|
||||
|
||||
def get_basic_beet_options():
|
||||
word = (
|
||||
BL_NEED2.format("-l format-item",
|
||||
"-f -d 'print with custom format'") +
|
||||
BL_NEED2.format("-l format-album",
|
||||
"-f -d 'print with custom format'") +
|
||||
BL_NEED2.format("-s l -l library",
|
||||
"-f -r -d 'library database file to use'") +
|
||||
BL_NEED2.format("-s d -l directory",
|
||||
"-f -r -d 'destination music directory'") +
|
||||
BL_NEED2.format("-s v -l verbose",
|
||||
"-f -d 'print debugging information'") +
|
||||
|
||||
BL_NEED2.format("-s c -l config",
|
||||
"-f -r -d 'path to configuration file'") +
|
||||
BL_NEED2.format("-s h -l help",
|
||||
"-f -d 'print this help message and exit'"))
|
||||
return word
|
||||
|
||||
|
||||
def get_subcommands(cmd_name_and_help, nobasicfields, extravalues):
|
||||
# Formatting for Fish to complete our fields/values
|
||||
word = ""
|
||||
for cmdname, cmdhelp in cmd_name_and_help:
|
||||
word += "\n" + "# ------ {} -------".format(
|
||||
"fieldsetups for " + cmdname) + "\n"
|
||||
word += (
|
||||
BL_NEED2.format(
|
||||
("-a " + cmdname),
|
||||
("-f " + "-d " + wrap(clean_whitespace(cmdhelp)))))
|
||||
|
||||
if nobasicfields is False:
|
||||
word += (
|
||||
BL_USE3.format(
|
||||
cmdname,
|
||||
("-a " + wrap("$FIELDS")),
|
||||
("-f " + "-d " + wrap("fieldname"))))
|
||||
|
||||
if extravalues:
|
||||
for f in extravalues:
|
||||
setvar = wrap("$" + f.upper() + "S")
|
||||
word += " ".join(BL_EXTRA3.format(
|
||||
(cmdname + " " + f + ":"),
|
||||
('-f ' + '-A ' + '-a ' + setvar),
|
||||
('-d ' + wrap(f))).split()) + "\n"
|
||||
return word
|
||||
|
||||
|
||||
def get_all_commands(beetcmds):
|
||||
# Formatting for Fish to complete command options
|
||||
word = ""
|
||||
for cmd in beetcmds:
|
||||
names = [alias for alias in cmd.aliases]
|
||||
names.append(cmd.name)
|
||||
for name in names:
|
||||
word += "\n"
|
||||
word += ("\n" * 2) + "# ====== {} =====".format(
|
||||
"completions for " + name) + "\n"
|
||||
|
||||
for option in cmd.parser._get_all_options()[1:]:
|
||||
cmd_l = (" -l " + option._long_opts[0].replace('--', '')
|
||||
)if option._long_opts else ''
|
||||
cmd_s = (" -s " + option._short_opts[0].replace('-', '')
|
||||
) if option._short_opts else ''
|
||||
cmd_need_arg = ' -r ' if option.nargs in [1] else ''
|
||||
cmd_helpstr = (" -d " + wrap(' '.join(option.help.split()))
|
||||
) if option.help else ''
|
||||
cmd_arglist = (' -a ' + wrap(" ".join(option.choices))
|
||||
) if option.choices else ''
|
||||
|
||||
word += " ".join(BL_USE3.format(
|
||||
name,
|
||||
(cmd_need_arg + cmd_s + cmd_l + " -f " + cmd_arglist),
|
||||
cmd_helpstr).split()) + "\n"
|
||||
|
||||
word = (word + " ".join(BL_USE3.format(
|
||||
name,
|
||||
("-s " + "h " + "-l " + "help" + " -f "),
|
||||
('-d ' + wrap("print help") + "\n")
|
||||
).split()))
|
||||
return word
|
||||
|
||||
|
||||
def clean_whitespace(word):
|
||||
# Remove excess whitespace and tabs in a string
|
||||
return " ".join(word.split())
|
||||
|
||||
|
||||
def wrap(word):
|
||||
# Need " or ' around strings but watch out if they're in the string
|
||||
sptoken = '\"'
|
||||
if ('"') in word and ("'") in word:
|
||||
word.replace('"', sptoken)
|
||||
return '"' + word + '"'
|
||||
|
||||
tok = '"' if "'" in word else "'"
|
||||
return tok + word + tok
|
||||
|
|
@ -105,7 +105,10 @@ class HookPlugin(BeetsPlugin):
|
|||
u' '.join(command_pieces), event)
|
||||
|
||||
try:
|
||||
subprocess.Popen(command_pieces).wait()
|
||||
subprocess.check_call(command_pieces)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
self._log.error(u'hook for {0} exited with status {1}',
|
||||
event, exc.returncode)
|
||||
except OSError as exc:
|
||||
self._log.error(u'hook for {0} failed: {1}', event, exc)
|
||||
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
util.displayable_path(item.path), item.added)
|
||||
item.store()
|
||||
|
||||
def update_after_write_time(self, item):
|
||||
def update_after_write_time(self, item, path):
|
||||
"""Update the mtime of the item's file with the item.added value
|
||||
after each write of the item if `preserve_write_mtimes` is enabled.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
cmd = "ipfs add -q -r".split()
|
||||
cmd.append(album_dir)
|
||||
try:
|
||||
output = util.command_output(cmd).split()
|
||||
output = util.command_output(cmd).stdout.split()
|
||||
except (OSError, subprocess.CalledProcessError) as exc:
|
||||
self._log.error(u'Failed to add {0}, error: {1}', album_dir, exc)
|
||||
return False
|
||||
|
|
@ -151,6 +151,8 @@ class IPFSPlugin(BeetsPlugin):
|
|||
def ipfs_get(self, lib, query):
|
||||
query = query[0]
|
||||
# Check if query is a hash
|
||||
# TODO: generalize to other hashes; probably use a multihash
|
||||
# implementation
|
||||
if query.startswith("Qm") and len(query) == 46:
|
||||
self.ipfs_get_from_hash(lib, query)
|
||||
else:
|
||||
|
|
@ -183,7 +185,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
else:
|
||||
cmd = "ipfs add -q ".split()
|
||||
cmd.append(tmp.name)
|
||||
output = util.command_output(cmd)
|
||||
output = util.command_output(cmd).stdout
|
||||
except (OSError, subprocess.CalledProcessError) as err:
|
||||
msg = "Failed to publish library. Error: {0}".format(err)
|
||||
self._log.error(msg)
|
||||
|
|
@ -197,7 +199,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
else:
|
||||
lib_name = _hash
|
||||
lib_root = os.path.dirname(lib.path)
|
||||
remote_libs = lib_root + "/remotes"
|
||||
remote_libs = os.path.join(lib_root, b"remotes")
|
||||
if not os.path.exists(remote_libs):
|
||||
try:
|
||||
os.makedirs(remote_libs)
|
||||
|
|
@ -205,7 +207,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
msg = "Could not create {0}. Error: {1}".format(remote_libs, e)
|
||||
self._log.error(msg)
|
||||
return False
|
||||
path = remote_libs + "/" + lib_name + ".db"
|
||||
path = os.path.join(remote_libs, lib_name.encode() + b".db")
|
||||
if not os.path.exists(path):
|
||||
cmd = "ipfs get {0} -o".format(_hash).split()
|
||||
cmd.append(path)
|
||||
|
|
@ -216,7 +218,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
return False
|
||||
|
||||
# add all albums from remotes into a combined library
|
||||
jpath = remote_libs + "/joined.db"
|
||||
jpath = os.path.join(remote_libs, b"joined.db")
|
||||
jlib = library.Library(jpath)
|
||||
nlib = library.Library(path)
|
||||
for album in nlib.albums():
|
||||
|
|
@ -244,7 +246,7 @@ class IPFSPlugin(BeetsPlugin):
|
|||
return
|
||||
|
||||
for album in albums:
|
||||
ui.print_(format(album, fmt), " : ", album.ipfs)
|
||||
ui.print_(format(album, fmt), " : ", album.ipfs.decode())
|
||||
|
||||
def query(self, lib, args):
|
||||
rlib = self.get_remote_lib(lib)
|
||||
|
|
@ -253,8 +255,8 @@ class IPFSPlugin(BeetsPlugin):
|
|||
|
||||
def get_remote_lib(self, lib):
|
||||
lib_root = os.path.dirname(lib.path)
|
||||
remote_libs = lib_root + "/remotes"
|
||||
path = remote_libs + "/joined.db"
|
||||
remote_libs = os.path.join(lib_root, b"remotes")
|
||||
path = os.path.join(remote_libs, b"joined.db")
|
||||
if not os.path.isfile(path):
|
||||
raise IOError
|
||||
return library.Library(path)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os.path
|
||||
import subprocess
|
||||
|
||||
from beets import ui
|
||||
|
|
@ -52,21 +53,25 @@ class KeyFinderPlugin(BeetsPlugin):
|
|||
|
||||
def find_key(self, items, write=False):
|
||||
overwrite = self.config['overwrite'].get(bool)
|
||||
bin = self.config['bin'].as_str()
|
||||
command = [self.config['bin'].as_str()]
|
||||
# The KeyFinder GUI program needs the -f flag before the path.
|
||||
# keyfinder-cli is similar, but just wants the path with no flag.
|
||||
if 'keyfinder-cli' not in os.path.basename(command[0]).lower():
|
||||
command.append('-f')
|
||||
|
||||
for item in items:
|
||||
if item['initial_key'] and not overwrite:
|
||||
continue
|
||||
|
||||
try:
|
||||
output = util.command_output([bin, '-f',
|
||||
util.syspath(item.path)])
|
||||
output = util.command_output(command + [util.syspath(
|
||||
item.path)]).stdout
|
||||
except (subprocess.CalledProcessError, OSError) as exc:
|
||||
self._log.error(u'execution failed: {0}', exc)
|
||||
continue
|
||||
except UnicodeEncodeError:
|
||||
# Workaround for Python 2 Windows bug.
|
||||
# http://bugs.python.org/issue1759845
|
||||
# https://bugs.python.org/issue1759845
|
||||
self._log.error(u'execution failed for Unicode path: {0!r}',
|
||||
item.path)
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -648,35 +648,51 @@
|
|||
- glam rock
|
||||
- hard rock
|
||||
- heavy metal:
|
||||
- alternative metal
|
||||
- alternative metal:
|
||||
- funk metal
|
||||
- black metal:
|
||||
- viking metal
|
||||
- christian metal
|
||||
- death metal:
|
||||
- death/doom
|
||||
- goregrind
|
||||
- melodic death metal
|
||||
- technical death metal
|
||||
- doom metal
|
||||
- doom metal:
|
||||
- epic doom metal
|
||||
- funeral doom
|
||||
- drone metal
|
||||
- epic metal
|
||||
- folk metal:
|
||||
- celtic metal
|
||||
- medieval metal
|
||||
- pagan metal
|
||||
- funk metal
|
||||
- glam metal
|
||||
- gothic metal
|
||||
- industrial metal:
|
||||
- industrial death metal
|
||||
- metalcore:
|
||||
- deathcore
|
||||
- mathcore:
|
||||
- djent
|
||||
- power metal
|
||||
- synthcore
|
||||
- neoclassical metal
|
||||
- post-metal
|
||||
- power metal:
|
||||
- progressive power metal
|
||||
- progressive metal
|
||||
- sludge metal
|
||||
- speed metal
|
||||
- stoner rock
|
||||
- stoner rock:
|
||||
- stoner metal
|
||||
- symphonic metal
|
||||
- thrash metal:
|
||||
- crossover thrash
|
||||
- groove metal
|
||||
- progressive thrash metal
|
||||
- teutonic thrash metal
|
||||
- traditional heavy metal
|
||||
- math rock
|
||||
- new wave:
|
||||
- world fusion
|
||||
|
|
@ -719,6 +735,7 @@
|
|||
- street punk
|
||||
- thrashcore
|
||||
- horror punk
|
||||
- oi!
|
||||
- pop punk
|
||||
- psychobilly
|
||||
- riot grrrl
|
||||
|
|
|
|||
|
|
@ -450,6 +450,8 @@ emo rap
|
|||
emocore
|
||||
emotronic
|
||||
enka
|
||||
epic doom metal
|
||||
epic metal
|
||||
eremwu eu
|
||||
ethereal pop
|
||||
ethereal wave
|
||||
|
|
@ -1024,6 +1026,7 @@ neo-medieval
|
|||
neo-prog
|
||||
neo-psychedelia
|
||||
neoclassical
|
||||
neoclassical metal
|
||||
neoclassical music
|
||||
neofolk
|
||||
neotraditional country
|
||||
|
|
@ -1176,8 +1179,10 @@ progressive folk
|
|||
progressive folk music
|
||||
progressive house
|
||||
progressive metal
|
||||
progressive power metal
|
||||
progressive rock
|
||||
progressive trance
|
||||
progressive thrash metal
|
||||
protopunk
|
||||
psych folk
|
||||
psychedelic music
|
||||
|
|
@ -1396,6 +1401,7 @@ symphonic metal
|
|||
symphonic poem
|
||||
symphonic rock
|
||||
symphony
|
||||
synthcore
|
||||
synthpop
|
||||
synthpunk
|
||||
t'ong guitar
|
||||
|
|
@ -1428,6 +1434,7 @@ tejano
|
|||
tejano music
|
||||
tekno
|
||||
tembang sunda
|
||||
teutonic thrash metal
|
||||
texas blues
|
||||
thai pop
|
||||
thillana
|
||||
|
|
@ -1444,6 +1451,7 @@ toeshey
|
|||
togaku
|
||||
trad jazz
|
||||
traditional bluegrass
|
||||
traditional heavy metal
|
||||
traditional pop music
|
||||
trallalero
|
||||
trance
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2016, Rafael Bodill http://github.com/rafi
|
||||
# Copyright 2016, Rafael Bodill https://github.com/rafi
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
|
|
|
|||
|
|
@ -55,6 +55,7 @@ except ImportError:
|
|||
|
||||
from beets import plugins
|
||||
from beets import ui
|
||||
from beets import util
|
||||
import beets
|
||||
|
||||
DIV_RE = re.compile(r'<(/?)div>?', re.I)
|
||||
|
|
@ -186,6 +187,9 @@ def search_pairs(item):
|
|||
In addition to the artist and title obtained from the `item` the
|
||||
method tries to strip extra information like paranthesized suffixes
|
||||
and featured artists from the strings and add them as candidates.
|
||||
The artist sort name is added as a fallback candidate to help in
|
||||
cases where artist name includes special characters or is in a
|
||||
non-latin script.
|
||||
The method also tries to split multiple titles separated with `/`.
|
||||
"""
|
||||
def generate_alternatives(string, patterns):
|
||||
|
|
@ -199,12 +203,16 @@ def search_pairs(item):
|
|||
alternatives.append(match.group(1))
|
||||
return alternatives
|
||||
|
||||
title, artist = item.title, item.artist
|
||||
title, artist, artist_sort = item.title, item.artist, item.artist_sort
|
||||
|
||||
patterns = [
|
||||
# Remove any featuring artists from the artists name
|
||||
r"(.*?) {0}".format(plugins.feat_tokens())]
|
||||
artists = generate_alternatives(artist, patterns)
|
||||
# Use the artist_sort as fallback only if it differs from artist to avoid
|
||||
# repeated remote requests with the same search terms
|
||||
if artist != artist_sort:
|
||||
artists.append(artist_sort)
|
||||
|
||||
patterns = [
|
||||
# Remove a parenthesized suffix from a title string. Common
|
||||
|
|
@ -351,62 +359,95 @@ class Genius(Backend):
|
|||
'User-Agent': USER_AGENT,
|
||||
}
|
||||
|
||||
def lyrics_from_song_api_path(self, song_api_path):
|
||||
song_url = self.base_url + song_api_path
|
||||
response = requests.get(song_url, headers=self.headers)
|
||||
json = response.json()
|
||||
path = json["response"]["song"]["path"]
|
||||
|
||||
# Gotta go regular html scraping... come on Genius.
|
||||
page_url = "https://genius.com" + path
|
||||
try:
|
||||
page = requests.get(page_url)
|
||||
except requests.RequestException as exc:
|
||||
self._log.debug(u'Genius page request for {0} failed: {1}',
|
||||
page_url, exc)
|
||||
return None
|
||||
html = BeautifulSoup(page.text, "html.parser")
|
||||
|
||||
# Remove script tags that they put in the middle of the lyrics.
|
||||
[h.extract() for h in html('script')]
|
||||
|
||||
# At least Genius is nice and has a tag called 'lyrics'!
|
||||
# Updated css where the lyrics are based in HTML.
|
||||
lyrics = html.find("div", class_="lyrics").get_text()
|
||||
|
||||
return lyrics
|
||||
|
||||
def fetch(self, artist, title):
|
||||
"""Fetch lyrics from genius.com
|
||||
|
||||
Because genius doesn't allow accesssing lyrics via the api,
|
||||
we first query the api for a url matching our artist & title,
|
||||
then attempt to scrape that url for the lyrics.
|
||||
"""
|
||||
json = self._search(artist, title)
|
||||
if not json:
|
||||
self._log.debug(u'Genius API request returned invalid JSON')
|
||||
return None
|
||||
|
||||
# find a matching artist in the json
|
||||
for hit in json["response"]["hits"]:
|
||||
hit_artist = hit["result"]["primary_artist"]["name"]
|
||||
|
||||
if slug(hit_artist) == slug(artist):
|
||||
return self._scrape_lyrics_from_html(
|
||||
self.fetch_url(hit["result"]["url"]))
|
||||
|
||||
self._log.debug(u'Genius failed to find a matching artist for \'{0}\'',
|
||||
artist)
|
||||
|
||||
def _search(self, artist, title):
|
||||
"""Searches the genius api for a given artist and title
|
||||
|
||||
https://docs.genius.com/#search-h2
|
||||
|
||||
:returns: json response
|
||||
"""
|
||||
search_url = self.base_url + "/search"
|
||||
data = {'q': title}
|
||||
data = {'q': title + " " + artist.lower()}
|
||||
try:
|
||||
response = requests.get(search_url, data=data,
|
||||
headers=self.headers)
|
||||
response = requests.get(
|
||||
search_url, data=data, headers=self.headers)
|
||||
except requests.RequestException as exc:
|
||||
self._log.debug(u'Genius API request failed: {0}', exc)
|
||||
return None
|
||||
|
||||
try:
|
||||
json = response.json()
|
||||
return response.json()
|
||||
except ValueError:
|
||||
self._log.debug(u'Genius API request returned invalid JSON')
|
||||
return None
|
||||
|
||||
song_info = None
|
||||
for hit in json["response"]["hits"]:
|
||||
if hit["result"]["primary_artist"]["name"] == artist:
|
||||
song_info = hit
|
||||
break
|
||||
def _scrape_lyrics_from_html(self, html):
|
||||
"""Scrape lyrics from a given genius.com html"""
|
||||
|
||||
if song_info:
|
||||
song_api_path = song_info["result"]["api_path"]
|
||||
return self.lyrics_from_song_api_path(song_api_path)
|
||||
html = BeautifulSoup(html, "html.parser")
|
||||
|
||||
# Remove script tags that they put in the middle of the lyrics.
|
||||
[h.extract() for h in html('script')]
|
||||
|
||||
# Most of the time, the page contains a div with class="lyrics" where
|
||||
# all of the lyrics can be found already correctly formatted
|
||||
# Sometimes, though, it packages the lyrics into separate divs, most
|
||||
# likely for easier ad placement
|
||||
lyrics_div = html.find("div", class_="lyrics")
|
||||
if not lyrics_div:
|
||||
self._log.debug(u'Received unusual song page html')
|
||||
verse_div = html.find("div",
|
||||
class_=re.compile("Lyrics__Container"))
|
||||
if not verse_div:
|
||||
if html.find("div",
|
||||
class_=re.compile("LyricsPlaceholder__Message"),
|
||||
string="This song is an instrumental"):
|
||||
self._log.debug('Detected instrumental')
|
||||
return "[Instrumental]"
|
||||
else:
|
||||
self._log.debug("Couldn't scrape page using known layouts")
|
||||
return None
|
||||
|
||||
lyrics_div = verse_div.parent
|
||||
for br in lyrics_div.find_all("br"):
|
||||
br.replace_with("\n")
|
||||
ads = lyrics_div.find_all("div",
|
||||
class_=re.compile("InreadAd__Container"))
|
||||
for ad in ads:
|
||||
ad.replace_with("\n")
|
||||
|
||||
return lyrics_div.get_text()
|
||||
|
||||
|
||||
class LyricsWiki(SymbolsReplaced):
|
||||
"""Fetch lyrics from LyricsWiki."""
|
||||
|
||||
URL_PATTERN = 'http://lyrics.wikia.com/%s:%s'
|
||||
if util.SNI_SUPPORTED:
|
||||
URL_PATTERN = 'https://lyrics.wikia.com/%s:%s'
|
||||
else:
|
||||
URL_PATTERN = 'http://lyrics.wikia.com/%s:%s'
|
||||
|
||||
def fetch(self, artist, title):
|
||||
url = self.build_url(artist, title)
|
||||
|
|
@ -522,7 +563,7 @@ class Google(Backend):
|
|||
|
||||
bad_triggers = ['lyrics', 'copyright', 'property', 'links']
|
||||
if artist:
|
||||
bad_triggers_occ += [artist]
|
||||
bad_triggers += [artist]
|
||||
|
||||
for item in bad_triggers:
|
||||
bad_triggers_occ += [item] * len(re.findall(r'\W%s\W' % item,
|
||||
|
|
@ -740,7 +781,8 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
write = ui.should_write()
|
||||
if opts.writerest:
|
||||
self.writerest_indexes(opts.writerest)
|
||||
for item in lib.items(ui.decargs(args)):
|
||||
items = lib.items(ui.decargs(args))
|
||||
for item in items:
|
||||
if not opts.local_only and not self.config['local']:
|
||||
self.fetch_item_lyrics(
|
||||
lib, item, write,
|
||||
|
|
@ -750,10 +792,10 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
if opts.printlyr:
|
||||
ui.print_(item.lyrics)
|
||||
if opts.writerest:
|
||||
self.writerest(opts.writerest, item)
|
||||
if opts.writerest:
|
||||
# flush last artist
|
||||
self.writerest(opts.writerest, None)
|
||||
self.appendrest(opts.writerest, item)
|
||||
if opts.writerest and items:
|
||||
# flush last artist & write to ReST
|
||||
self.writerest(opts.writerest)
|
||||
ui.print_(u'ReST files generated. to build, use one of:')
|
||||
ui.print_(u' sphinx-build -b html %s _build/html'
|
||||
% opts.writerest)
|
||||
|
|
@ -765,26 +807,21 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
cmd.func = func
|
||||
return [cmd]
|
||||
|
||||
def writerest(self, directory, item):
|
||||
"""Write the item to an ReST file
|
||||
def appendrest(self, directory, item):
|
||||
"""Append the item to an ReST file
|
||||
|
||||
This will keep state (in the `rest` variable) in order to avoid
|
||||
writing continuously to the same files.
|
||||
"""
|
||||
|
||||
if item is None or slug(self.artist) != slug(item.albumartist):
|
||||
if self.rest is not None:
|
||||
path = os.path.join(directory, 'artists',
|
||||
slug(self.artist) + u'.rst')
|
||||
with open(path, 'wb') as output:
|
||||
output.write(self.rest.encode('utf-8'))
|
||||
self.rest = None
|
||||
if item is None:
|
||||
return
|
||||
if slug(self.artist) != slug(item.albumartist):
|
||||
# Write current file and start a new one ~ item.albumartist
|
||||
self.writerest(directory)
|
||||
self.artist = item.albumartist.strip()
|
||||
self.rest = u"%s\n%s\n\n.. contents::\n :local:\n\n" \
|
||||
% (self.artist,
|
||||
u'=' * len(self.artist))
|
||||
|
||||
if self.album != item.album:
|
||||
tmpalbum = self.album = item.album.strip()
|
||||
if self.album == '':
|
||||
|
|
@ -796,6 +833,15 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
u'~' * len(title_str),
|
||||
block)
|
||||
|
||||
def writerest(self, directory):
|
||||
"""Write self.rest to a ReST file
|
||||
"""
|
||||
if self.rest is not None and self.artist is not None:
|
||||
path = os.path.join(directory, 'artists',
|
||||
slug(self.artist) + u'.rst')
|
||||
with open(path, 'wb') as output:
|
||||
output.write(self.rest.encode('utf-8'))
|
||||
|
||||
def writerest_indexes(self, directory):
|
||||
"""Write conf.py and index.rst files necessary for Sphinx
|
||||
|
||||
|
|
@ -877,7 +923,7 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
return _scrape_strip_cruft(lyrics, True)
|
||||
|
||||
def append_translation(self, text, to_lang):
|
||||
import xml.etree.ElementTree as ET
|
||||
from xml.etree import ElementTree
|
||||
|
||||
if not self.bing_auth_token:
|
||||
self.bing_auth_token = self.get_bing_access_token()
|
||||
|
|
@ -895,7 +941,8 @@ class LyricsPlugin(plugins.BeetsPlugin):
|
|||
self.bing_auth_token = None
|
||||
return self.append_translation(text, to_lang)
|
||||
return text
|
||||
lines_translated = ET.fromstring(r.text.encode('utf-8')).text
|
||||
lines_translated = ElementTree.fromstring(
|
||||
r.text.encode('utf-8')).text
|
||||
# Use a translation mapping dict to build resulting lyrics
|
||||
translations = dict(zip(text_lines, lines_translated.split('|')))
|
||||
result = ''
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ This plugin allows the user to print track information in a format that is
|
|||
parseable by the MusicBrainz track parser [1]. Programmatic submitting is not
|
||||
implemented by MusicBrainz yet.
|
||||
|
||||
[1] http://wiki.musicbrainz.org/History:How_To_Parse_Track_Listings
|
||||
[1] https://wiki.musicbrainz.org/History:How_To_Parse_Track_Listings
|
||||
"""
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@
|
|||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.plugins import BeetsPlugin, apply_item_changes
|
||||
from beets import autotag, library, ui, util
|
||||
from beets.autotag import hooks
|
||||
from collections import defaultdict
|
||||
|
|
@ -27,19 +27,6 @@ import re
|
|||
MBID_REGEX = r"(\d|\w){8}-(\d|\w){4}-(\d|\w){4}-(\d|\w){4}-(\d|\w){12}"
|
||||
|
||||
|
||||
def apply_item_changes(lib, item, move, pretend, write):
|
||||
"""Store, move and write the item according to the arguments.
|
||||
"""
|
||||
if not pretend:
|
||||
# Move the item if it's in the library.
|
||||
if move and lib.directory in util.ancestry(item.path):
|
||||
item.move(with_album=False)
|
||||
|
||||
if write:
|
||||
item.try_write()
|
||||
item.store()
|
||||
|
||||
|
||||
class MBSyncPlugin(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super(MBSyncPlugin, self).__init__()
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class Amarok(MetaSource):
|
|||
'amarok_lastplayed': DateType(),
|
||||
}
|
||||
|
||||
queryXML = u'<query version="1.0"> \
|
||||
query_xml = u'<query version="1.0"> \
|
||||
<filters> \
|
||||
<and><include field="filename" value=%s /></and> \
|
||||
</filters> \
|
||||
|
|
@ -72,7 +72,7 @@ class Amarok(MetaSource):
|
|||
# of the result set. So query for the filename and then try to match
|
||||
# the correct item from the results we get back
|
||||
results = self.collection.Query(
|
||||
self.queryXML % quoteattr(basename(path))
|
||||
self.query_xml % quoteattr(basename(path))
|
||||
)
|
||||
for result in results:
|
||||
if result['xesam:url'] != path:
|
||||
|
|
|
|||
212
beetsplug/parentwork.py
Normal file
212
beetsplug/parentwork.py
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2017, Dorian Soergel.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Gets parent work, its disambiguation and id, composer, composer sort name
|
||||
and work composition date
|
||||
"""
|
||||
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from beets import ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
import musicbrainzngs
|
||||
|
||||
|
||||
def direct_parent_id(mb_workid, work_date=None):
|
||||
"""Given a Musicbrainz work id, find the id one of the works the work is
|
||||
part of and the first composition date it encounters.
|
||||
"""
|
||||
work_info = musicbrainzngs.get_work_by_id(mb_workid,
|
||||
includes=["work-rels",
|
||||
"artist-rels"])
|
||||
if 'artist-relation-list' in work_info['work'] and work_date is None:
|
||||
for artist in work_info['work']['artist-relation-list']:
|
||||
if artist['type'] == 'composer':
|
||||
if 'end' in artist.keys():
|
||||
work_date = artist['end']
|
||||
|
||||
if 'work-relation-list' in work_info['work']:
|
||||
for direct_parent in work_info['work']['work-relation-list']:
|
||||
if direct_parent['type'] == 'parts' \
|
||||
and direct_parent.get('direction') == 'backward':
|
||||
direct_id = direct_parent['work']['id']
|
||||
return direct_id, work_date
|
||||
return None, work_date
|
||||
|
||||
|
||||
def work_parent_id(mb_workid):
|
||||
"""Find the parent work id and composition date of a work given its id.
|
||||
"""
|
||||
work_date = None
|
||||
while True:
|
||||
new_mb_workid, work_date = direct_parent_id(mb_workid, work_date)
|
||||
if not new_mb_workid:
|
||||
return mb_workid, work_date
|
||||
mb_workid = new_mb_workid
|
||||
return mb_workid, work_date
|
||||
|
||||
|
||||
def find_parentwork_info(mb_workid):
|
||||
"""Get the MusicBrainz information dict about a parent work, including
|
||||
the artist relations, and the composition date for a work's parent work.
|
||||
"""
|
||||
parent_id, work_date = work_parent_id(mb_workid)
|
||||
work_info = musicbrainzngs.get_work_by_id(parent_id,
|
||||
includes=["artist-rels"])
|
||||
return work_info, work_date
|
||||
|
||||
|
||||
class ParentWorkPlugin(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super(ParentWorkPlugin, self).__init__()
|
||||
|
||||
self.config.add({
|
||||
'auto': False,
|
||||
'force': False,
|
||||
})
|
||||
|
||||
if self.config['auto']:
|
||||
self.import_stages = [self.imported]
|
||||
|
||||
def commands(self):
|
||||
|
||||
def func(lib, opts, args):
|
||||
self.config.set_args(opts)
|
||||
force_parent = self.config['force'].get(bool)
|
||||
write = ui.should_write()
|
||||
|
||||
for item in lib.items(ui.decargs(args)):
|
||||
changed = self.find_work(item, force_parent)
|
||||
if changed:
|
||||
item.store()
|
||||
if write:
|
||||
item.try_write()
|
||||
command = ui.Subcommand(
|
||||
'parentwork',
|
||||
help=u'fetche parent works, composers and dates')
|
||||
|
||||
command.parser.add_option(
|
||||
u'-f', u'--force', dest='force',
|
||||
action='store_true', default=None,
|
||||
help=u're-fetch when parent work is already present')
|
||||
|
||||
command.func = func
|
||||
return [command]
|
||||
|
||||
def imported(self, session, task):
|
||||
"""Import hook for fetching parent works automatically.
|
||||
"""
|
||||
force_parent = self.config['force'].get(bool)
|
||||
|
||||
for item in task.imported_items():
|
||||
self.find_work(item, force_parent)
|
||||
item.store()
|
||||
|
||||
def get_info(self, item, work_info):
|
||||
"""Given the parent work info dict, fetch parent_composer,
|
||||
parent_composer_sort, parentwork, parentwork_disambig, mb_workid and
|
||||
composer_ids.
|
||||
"""
|
||||
|
||||
parent_composer = []
|
||||
parent_composer_sort = []
|
||||
parentwork_info = {}
|
||||
|
||||
composer_exists = False
|
||||
if 'artist-relation-list' in work_info['work']:
|
||||
for artist in work_info['work']['artist-relation-list']:
|
||||
if artist['type'] == 'composer':
|
||||
parent_composer.append(artist['artist']['name'])
|
||||
parent_composer_sort.append(artist['artist']['sort-name'])
|
||||
if 'end' in artist.keys():
|
||||
parentwork_info["parentwork_date"] = artist['end']
|
||||
|
||||
parentwork_info['parent_composer'] = u', '.join(parent_composer)
|
||||
parentwork_info['parent_composer_sort'] = u', '.join(
|
||||
parent_composer_sort)
|
||||
|
||||
if not composer_exists:
|
||||
self._log.debug(
|
||||
'no composer for {}; add one at '
|
||||
'https://musicbrainz.org/work/{}',
|
||||
item, work_info['work']['id'],
|
||||
)
|
||||
|
||||
parentwork_info['parentwork'] = work_info['work']['title']
|
||||
parentwork_info['mb_parentworkid'] = work_info['work']['id']
|
||||
|
||||
if 'disambiguation' in work_info['work']:
|
||||
parentwork_info['parentwork_disambig'] = work_info[
|
||||
'work']['disambiguation']
|
||||
|
||||
else:
|
||||
parentwork_info['parentwork_disambig'] = None
|
||||
|
||||
return parentwork_info
|
||||
|
||||
def find_work(self, item, force):
|
||||
"""Finds the parent work of a recording and populates the tags
|
||||
accordingly.
|
||||
|
||||
The parent work is found recursively, by finding the direct parent
|
||||
repeatedly until there are no more links in the chain. We return the
|
||||
final, topmost work in the chain.
|
||||
|
||||
Namely, the tags parentwork, parentwork_disambig, mb_parentworkid,
|
||||
parent_composer, parent_composer_sort and work_date are populated.
|
||||
"""
|
||||
|
||||
if not item.mb_workid:
|
||||
self._log.info('No work for {}, \
|
||||
add one at https://musicbrainz.org/recording/{}', item, item.mb_trackid)
|
||||
return
|
||||
|
||||
hasparent = hasattr(item, 'parentwork')
|
||||
work_changed = True
|
||||
if hasattr(item, 'parentwork_workid_current'):
|
||||
work_changed = item.parentwork_workid_current != item.mb_workid
|
||||
if force or not hasparent or work_changed:
|
||||
try:
|
||||
work_info, work_date = find_parentwork_info(item.mb_workid)
|
||||
except musicbrainzngs.musicbrainz.WebServiceError as e:
|
||||
self._log.debug("error fetching work: {}", e)
|
||||
return
|
||||
parent_info = self.get_info(item, work_info)
|
||||
parent_info['parentwork_workid_current'] = item.mb_workid
|
||||
if 'parent_composer' in parent_info:
|
||||
self._log.debug("Work fetched: {} - {}",
|
||||
parent_info['parentwork'],
|
||||
parent_info['parent_composer'])
|
||||
else:
|
||||
self._log.debug("Work fetched: {} - no parent composer",
|
||||
parent_info['parentwork'])
|
||||
|
||||
elif hasparent:
|
||||
self._log.debug("{}: Work present, skipping", item)
|
||||
return
|
||||
|
||||
# apply all non-null values to the item
|
||||
for key, value in parent_info.items():
|
||||
if value:
|
||||
item[key] = value
|
||||
|
||||
if work_date:
|
||||
item['work_date'] = work_date
|
||||
return ui.show_model_changes(
|
||||
item, fields=['parentwork', 'parentwork_disambig',
|
||||
'mb_parentworkid', 'parent_composer',
|
||||
'parent_composer_sort', 'work_date',
|
||||
'parentwork_workid_current', 'parentwork_date'])
|
||||
|
|
@ -18,6 +18,7 @@ import os
|
|||
import fnmatch
|
||||
import tempfile
|
||||
import beets
|
||||
from beets.util import path_as_posix
|
||||
|
||||
|
||||
class PlaylistQuery(beets.dbcore.Query):
|
||||
|
|
@ -70,7 +71,7 @@ class PlaylistQuery(beets.dbcore.Query):
|
|||
if not self.paths:
|
||||
# Playlist is empty
|
||||
return '0', ()
|
||||
clause = 'path IN ({0})'.format(', '.join('?' for path in self.paths))
|
||||
clause = 'path IN ({0})'.format(', '.join('?' for path in self.paths))
|
||||
return clause, (beets.library.BLOB_TYPE(p) for p in self.paths)
|
||||
|
||||
def match(self, item):
|
||||
|
|
@ -86,6 +87,7 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
'auto': False,
|
||||
'playlist_dir': '.',
|
||||
'relative_to': 'library',
|
||||
'forward_slash': False,
|
||||
})
|
||||
|
||||
self.playlist_dir = self.config['playlist_dir'].as_filename()
|
||||
|
|
@ -160,6 +162,8 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
try:
|
||||
new_path = self.changes[beets.util.normpath(lookup)]
|
||||
except KeyError:
|
||||
if self.config['forward_slash']:
|
||||
line = path_as_posix(line)
|
||||
tempfp.write(line)
|
||||
else:
|
||||
if new_path is None:
|
||||
|
|
@ -170,8 +174,10 @@ class PlaylistPlugin(beets.plugins.BeetsPlugin):
|
|||
changes += 1
|
||||
if is_relative:
|
||||
new_path = os.path.relpath(new_path, base_dir)
|
||||
|
||||
tempfp.write(line.replace(original_path, new_path))
|
||||
line = line.replace(original_path, new_path)
|
||||
if self.config['forward_slash']:
|
||||
line = path_as_posix(line)
|
||||
tempfp.write(line)
|
||||
|
||||
if changes or deletions:
|
||||
self._log.info(
|
||||
|
|
|
|||
|
|
@ -12,39 +12,49 @@ Put something like the following in your config.yaml to configure:
|
|||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import requests
|
||||
import xml.etree.ElementTree as ET
|
||||
from xml.etree import ElementTree
|
||||
from six.moves.urllib.parse import urljoin, urlencode
|
||||
from beets import config
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
|
||||
def get_music_section(host, port, token, library_name):
|
||||
def get_music_section(host, port, token, library_name, secure,
|
||||
ignore_cert_errors):
|
||||
"""Getting the section key for the music library in Plex.
|
||||
"""
|
||||
api_endpoint = append_token('library/sections', token)
|
||||
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
|
||||
url = urljoin('{0}://{1}:{2}'.format(get_protocol(secure), host,
|
||||
port), api_endpoint)
|
||||
|
||||
# Sends request.
|
||||
r = requests.get(url)
|
||||
r = requests.get(url, verify=not ignore_cert_errors)
|
||||
|
||||
# Parse xml tree and extract music section key.
|
||||
tree = ET.fromstring(r.content)
|
||||
tree = ElementTree.fromstring(r.content)
|
||||
for child in tree.findall('Directory'):
|
||||
if child.get('title') == library_name:
|
||||
return child.get('key')
|
||||
|
||||
|
||||
def update_plex(host, port, token, library_name):
|
||||
def update_plex(host, port, token, library_name, secure,
|
||||
ignore_cert_errors):
|
||||
"""Ignore certificate errors if configured to.
|
||||
"""
|
||||
if ignore_cert_errors:
|
||||
import urllib3
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
"""Sends request to the Plex api to start a library refresh.
|
||||
"""
|
||||
# Getting section key and build url.
|
||||
section_key = get_music_section(host, port, token, library_name)
|
||||
section_key = get_music_section(host, port, token, library_name,
|
||||
secure, ignore_cert_errors)
|
||||
api_endpoint = 'library/sections/{0}/refresh'.format(section_key)
|
||||
api_endpoint = append_token(api_endpoint, token)
|
||||
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
|
||||
url = urljoin('{0}://{1}:{2}'.format(get_protocol(secure), host,
|
||||
port), api_endpoint)
|
||||
|
||||
# Sends request and returns requests object.
|
||||
r = requests.get(url)
|
||||
r = requests.get(url, verify=not ignore_cert_errors)
|
||||
return r
|
||||
|
||||
|
||||
|
|
@ -56,6 +66,13 @@ def append_token(url, token):
|
|||
return url
|
||||
|
||||
|
||||
def get_protocol(secure):
|
||||
if secure:
|
||||
return 'https'
|
||||
else:
|
||||
return 'http'
|
||||
|
||||
|
||||
class PlexUpdate(BeetsPlugin):
|
||||
def __init__(self):
|
||||
super(PlexUpdate, self).__init__()
|
||||
|
|
@ -65,7 +82,9 @@ class PlexUpdate(BeetsPlugin):
|
|||
u'host': u'localhost',
|
||||
u'port': 32400,
|
||||
u'token': u'',
|
||||
u'library_name': u'Music'})
|
||||
u'library_name': u'Music',
|
||||
u'secure': False,
|
||||
u'ignore_cert_errors': False})
|
||||
|
||||
config['plex']['token'].redact = True
|
||||
self.register_listener('database_change', self.listen_for_db_change)
|
||||
|
|
@ -85,7 +104,9 @@ class PlexUpdate(BeetsPlugin):
|
|||
config['plex']['host'].get(),
|
||||
config['plex']['port'].get(),
|
||||
config['plex']['token'].get(),
|
||||
config['plex']['library_name'].get())
|
||||
config['plex']['library_name'].get(),
|
||||
config['plex']['secure'].get(bool),
|
||||
config['plex']['ignore_cert_errors'].get(bool))
|
||||
self._log.info(u'... started.')
|
||||
|
||||
except requests.exceptions.RequestException:
|
||||
|
|
|
|||
|
|
@ -18,8 +18,11 @@ from __future__ import division, absolute_import, print_function
|
|||
import subprocess
|
||||
import os
|
||||
import collections
|
||||
import math
|
||||
import sys
|
||||
import warnings
|
||||
import enum
|
||||
import re
|
||||
import xml.parsers.expat
|
||||
from six.moves import zip
|
||||
|
||||
|
|
@ -47,12 +50,12 @@ class FatalGstreamerPluginReplayGainError(FatalReplayGainError):
|
|||
loading the required plugins."""
|
||||
|
||||
|
||||
def call(args):
|
||||
def call(args, **kwargs):
|
||||
"""Execute the command and return its output or raise a
|
||||
ReplayGainError on failure.
|
||||
"""
|
||||
try:
|
||||
return command_output(args)
|
||||
return command_output(args, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ReplayGainError(
|
||||
u"{0} exited with status {1}".format(args[0], e.returncode)
|
||||
|
|
@ -64,12 +67,45 @@ def call(args):
|
|||
raise ReplayGainError(u"argument encoding failed")
|
||||
|
||||
|
||||
def after_version(version_a, version_b):
|
||||
return tuple(int(s) for s in version_a.split('.')) \
|
||||
>= tuple(int(s) for s in version_b.split('.'))
|
||||
|
||||
|
||||
def db_to_lufs(db):
|
||||
"""Convert db to LUFS.
|
||||
|
||||
According to https://wiki.hydrogenaud.io/index.php?title=
|
||||
ReplayGain_2.0_specification#Reference_level
|
||||
"""
|
||||
return db - 107
|
||||
|
||||
|
||||
def lufs_to_db(db):
|
||||
"""Convert LUFS to db.
|
||||
|
||||
According to https://wiki.hydrogenaud.io/index.php?title=
|
||||
ReplayGain_2.0_specification#Reference_level
|
||||
"""
|
||||
return db + 107
|
||||
|
||||
|
||||
# Backend base and plumbing classes.
|
||||
|
||||
# gain: in LU to reference level
|
||||
# peak: part of full scale (FS is 1.0)
|
||||
Gain = collections.namedtuple("Gain", "gain peak")
|
||||
# album_gain: Gain object
|
||||
# track_gains: list of Gain objects
|
||||
AlbumGain = collections.namedtuple("AlbumGain", "album_gain track_gains")
|
||||
|
||||
|
||||
class Peak(enum.Enum):
|
||||
none = 0
|
||||
true = 1
|
||||
sample = 2
|
||||
|
||||
|
||||
class Backend(object):
|
||||
"""An abstract class representing engine for calculating RG values.
|
||||
"""
|
||||
|
|
@ -80,12 +116,16 @@ class Backend(object):
|
|||
"""
|
||||
self._log = log
|
||||
|
||||
def compute_track_gain(self, items):
|
||||
def compute_track_gain(self, items, target_level, peak):
|
||||
"""Computes the track gain of the given tracks, returns a list
|
||||
of Gain objects.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def compute_album_gain(self, album):
|
||||
# TODO: implement album gain in terms of track gain of the
|
||||
# individual tracks which can be used for any backend.
|
||||
def compute_album_gain(self, items, target_level, peak):
|
||||
"""Computes the album gain of the given album, returns an
|
||||
AlbumGain object.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
|
|
@ -95,45 +135,55 @@ class Bs1770gainBackend(Backend):
|
|||
its flavors EBU R128, ATSC A/85 and Replaygain 2.0.
|
||||
"""
|
||||
|
||||
methods = {
|
||||
-24: "atsc",
|
||||
-23: "ebu",
|
||||
-18: "replaygain",
|
||||
}
|
||||
|
||||
def __init__(self, config, log):
|
||||
super(Bs1770gainBackend, self).__init__(config, log)
|
||||
config.add({
|
||||
'chunk_at': 5000,
|
||||
'method': 'replaygain',
|
||||
'method': '',
|
||||
})
|
||||
self.chunk_at = config['chunk_at'].as_number()
|
||||
self.method = '--' + config['method'].as_str()
|
||||
# backward compatibility to `method` config option
|
||||
self.__method = config['method'].as_str()
|
||||
|
||||
cmd = 'bs1770gain'
|
||||
try:
|
||||
call([cmd, self.method])
|
||||
version_out = call([cmd, '--version'])
|
||||
self.command = cmd
|
||||
self.version = re.search(
|
||||
'bs1770gain ([0-9]+.[0-9]+.[0-9]+), ',
|
||||
version_out.stdout.decode('utf-8')
|
||||
).group(1)
|
||||
except OSError:
|
||||
raise FatalReplayGainError(
|
||||
u'Is bs1770gain installed? Is your method in config correct?'
|
||||
u'Is bs1770gain installed?'
|
||||
)
|
||||
if not self.command:
|
||||
raise FatalReplayGainError(
|
||||
u'no replaygain command found: install bs1770gain'
|
||||
)
|
||||
|
||||
def compute_track_gain(self, items):
|
||||
def compute_track_gain(self, items, target_level, peak):
|
||||
"""Computes the track gain of the given tracks, returns a list
|
||||
of TrackGain objects.
|
||||
"""
|
||||
|
||||
output = self.compute_gain(items, False)
|
||||
output = self.compute_gain(items, target_level, False)
|
||||
return output
|
||||
|
||||
def compute_album_gain(self, album):
|
||||
def compute_album_gain(self, items, target_level, peak):
|
||||
"""Computes the album gain of the given album, returns an
|
||||
AlbumGain object.
|
||||
"""
|
||||
# TODO: What should be done when not all tracks in the album are
|
||||
# supported?
|
||||
|
||||
supported_items = album.items()
|
||||
output = self.compute_gain(supported_items, True)
|
||||
output = self.compute_gain(items, target_level, True)
|
||||
|
||||
if not output:
|
||||
raise ReplayGainError(u'no output from bs1770gain')
|
||||
|
|
@ -158,7 +208,7 @@ class Bs1770gainBackend(Backend):
|
|||
else:
|
||||
break
|
||||
|
||||
def compute_gain(self, items, is_album):
|
||||
def compute_gain(self, items, target_level, is_album):
|
||||
"""Computes the track or album gain of a list of items, returns
|
||||
a list of TrackGain objects.
|
||||
When computing album gain, the last TrackGain object returned is
|
||||
|
|
@ -179,23 +229,45 @@ class Bs1770gainBackend(Backend):
|
|||
i = 0
|
||||
for chunk in self.isplitter(items, self.chunk_at):
|
||||
i += 1
|
||||
returnchunk = self.compute_chunk_gain(chunk, is_album)
|
||||
returnchunk = self.compute_chunk_gain(
|
||||
chunk,
|
||||
is_album,
|
||||
target_level
|
||||
)
|
||||
albumgaintot += returnchunk[-1].gain
|
||||
albumpeaktot = max(albumpeaktot, returnchunk[-1].peak)
|
||||
returnchunks = returnchunks + returnchunk[0:-1]
|
||||
returnchunks.append(Gain(albumgaintot / i, albumpeaktot))
|
||||
return returnchunks
|
||||
else:
|
||||
return self.compute_chunk_gain(items, is_album)
|
||||
return self.compute_chunk_gain(items, is_album, target_level)
|
||||
|
||||
def compute_chunk_gain(self, items, is_album):
|
||||
def compute_chunk_gain(self, items, is_album, target_level):
|
||||
"""Compute ReplayGain values and return a list of results
|
||||
dictionaries as given by `parse_tool_output`.
|
||||
"""
|
||||
# choose method
|
||||
target_level = db_to_lufs(target_level)
|
||||
if self.__method != "":
|
||||
# backward compatibility to `method` option
|
||||
method = self.__method
|
||||
gain_adjustment = target_level \
|
||||
- [k for k, v in self.methods.items() if v == method][0]
|
||||
elif target_level in self.methods:
|
||||
method = self.methods[target_level]
|
||||
gain_adjustment = 0
|
||||
else:
|
||||
lufs_target = -23
|
||||
method = self.methods[lufs_target]
|
||||
gain_adjustment = target_level - lufs_target
|
||||
|
||||
# Construct shell command.
|
||||
cmd = [self.command]
|
||||
cmd += [self.method]
|
||||
cmd += ["--" + method]
|
||||
cmd += ['--xml', '-p']
|
||||
if after_version(self.version, '0.6.0'):
|
||||
cmd += ['--unit=ebu'] # set units to LU
|
||||
cmd += ['--suppress-progress'] # don't print % to XML output
|
||||
|
||||
# Workaround for Windows: the underlying tool fails on paths
|
||||
# with the \\?\ prefix, so we don't use it here. This
|
||||
|
|
@ -207,10 +279,17 @@ class Bs1770gainBackend(Backend):
|
|||
self._log.debug(
|
||||
u'executing {0}', u' '.join(map(displayable_path, args))
|
||||
)
|
||||
output = call(args)
|
||||
output = call(args).stdout
|
||||
|
||||
self._log.debug(u'analysis finished: {0}', output)
|
||||
results = self.parse_tool_output(output, path_list, is_album)
|
||||
|
||||
if gain_adjustment:
|
||||
results = [
|
||||
Gain(res.gain + gain_adjustment, res.peak)
|
||||
for res in results
|
||||
]
|
||||
|
||||
self._log.debug(u'{0} items, {1} results', len(items), len(results))
|
||||
return results
|
||||
|
||||
|
|
@ -223,6 +302,7 @@ class Bs1770gainBackend(Backend):
|
|||
album_gain = {} # mutable variable so it can be set from handlers
|
||||
parser = xml.parsers.expat.ParserCreate(encoding='utf-8')
|
||||
state = {'file': None, 'gain': None, 'peak': None}
|
||||
album_state = {'gain': None, 'peak': None}
|
||||
|
||||
def start_element_handler(name, attrs):
|
||||
if name == u'track':
|
||||
|
|
@ -231,9 +311,13 @@ class Bs1770gainBackend(Backend):
|
|||
raise ReplayGainError(
|
||||
u'duplicate filename in bs1770gain output')
|
||||
elif name == u'integrated':
|
||||
state['gain'] = float(attrs[u'lu'])
|
||||
if 'lu' in attrs:
|
||||
state['gain'] = float(attrs[u'lu'])
|
||||
elif name == u'sample-peak':
|
||||
state['peak'] = float(attrs[u'factor'])
|
||||
if 'factor' in attrs:
|
||||
state['peak'] = float(attrs[u'factor'])
|
||||
elif 'amplitude' in attrs:
|
||||
state['peak'] = float(attrs[u'amplitude'])
|
||||
|
||||
def end_element_handler(name):
|
||||
if name == u'track':
|
||||
|
|
@ -249,6 +333,17 @@ class Bs1770gainBackend(Backend):
|
|||
'the output of bs1770gain')
|
||||
album_gain["album"] = Gain(state['gain'], state['peak'])
|
||||
state['gain'] = state['peak'] = None
|
||||
elif len(per_file_gain) == len(path_list):
|
||||
if state['gain'] is not None:
|
||||
album_state['gain'] = state['gain']
|
||||
if state['peak'] is not None:
|
||||
album_state['peak'] = state['peak']
|
||||
if album_state['gain'] is not None \
|
||||
and album_state['peak'] is not None:
|
||||
album_gain["album"] = Gain(
|
||||
album_state['gain'], album_state['peak'])
|
||||
state['gain'] = state['peak'] = None
|
||||
|
||||
parser.StartElementHandler = start_element_handler
|
||||
parser.EndElementHandler = end_element_handler
|
||||
|
||||
|
|
@ -279,6 +374,250 @@ class Bs1770gainBackend(Backend):
|
|||
return out
|
||||
|
||||
|
||||
# ffmpeg backend
|
||||
class FfmpegBackend(Backend):
|
||||
"""A replaygain backend using ffmpeg's ebur128 filter.
|
||||
"""
|
||||
def __init__(self, config, log):
|
||||
super(FfmpegBackend, self).__init__(config, log)
|
||||
self._ffmpeg_path = "ffmpeg"
|
||||
|
||||
# check that ffmpeg is installed
|
||||
try:
|
||||
ffmpeg_version_out = call([self._ffmpeg_path, "-version"])
|
||||
except OSError:
|
||||
raise FatalReplayGainError(
|
||||
u"could not find ffmpeg at {0}".format(self._ffmpeg_path)
|
||||
)
|
||||
incompatible_ffmpeg = True
|
||||
for line in ffmpeg_version_out.stdout.splitlines():
|
||||
if line.startswith(b"configuration:"):
|
||||
if b"--enable-libebur128" in line:
|
||||
incompatible_ffmpeg = False
|
||||
if line.startswith(b"libavfilter"):
|
||||
version = line.split(b" ", 1)[1].split(b"/", 1)[0].split(b".")
|
||||
version = tuple(map(int, version))
|
||||
if version >= (6, 67, 100):
|
||||
incompatible_ffmpeg = False
|
||||
if incompatible_ffmpeg:
|
||||
raise FatalReplayGainError(
|
||||
u"Installed FFmpeg version does not support ReplayGain."
|
||||
u"calculation. Either libavfilter version 6.67.100 or above or"
|
||||
u"the --enable-libebur128 configuration option is required."
|
||||
)
|
||||
|
||||
def compute_track_gain(self, items, target_level, peak):
|
||||
"""Computes the track gain of the given tracks, returns a list
|
||||
of Gain objects (the track gains).
|
||||
"""
|
||||
gains = []
|
||||
for item in items:
|
||||
gains.append(
|
||||
self._analyse_item(
|
||||
item,
|
||||
target_level,
|
||||
peak,
|
||||
count_blocks=False,
|
||||
)[0] # take only the gain, discarding number of gating blocks
|
||||
)
|
||||
return gains
|
||||
|
||||
def compute_album_gain(self, items, target_level, peak):
|
||||
"""Computes the album gain of the given album, returns an
|
||||
AlbumGain object.
|
||||
"""
|
||||
target_level_lufs = db_to_lufs(target_level)
|
||||
|
||||
# analyse tracks
|
||||
# list of track Gain objects
|
||||
track_gains = []
|
||||
# maximum peak
|
||||
album_peak = 0
|
||||
# sum of BS.1770 gating block powers
|
||||
sum_powers = 0
|
||||
# total number of BS.1770 gating blocks
|
||||
n_blocks = 0
|
||||
|
||||
for item in items:
|
||||
track_gain, track_n_blocks = self._analyse_item(
|
||||
item, target_level, peak
|
||||
)
|
||||
track_gains.append(track_gain)
|
||||
|
||||
# album peak is maximum track peak
|
||||
album_peak = max(album_peak, track_gain.peak)
|
||||
|
||||
# prepare album_gain calculation
|
||||
# total number of blocks is sum of track blocks
|
||||
n_blocks += track_n_blocks
|
||||
|
||||
# convert `LU to target_level` -> LUFS
|
||||
track_loudness = target_level_lufs - track_gain.gain
|
||||
# This reverses ITU-R BS.1770-4 p. 6 equation (5) to convert
|
||||
# from loudness to power. The result is the average gating
|
||||
# block power.
|
||||
track_power = 10**((track_loudness + 0.691) / 10)
|
||||
|
||||
# Weight that average power by the number of gating blocks to
|
||||
# get the sum of all their powers. Add that to the sum of all
|
||||
# block powers in this album.
|
||||
sum_powers += track_power * track_n_blocks
|
||||
|
||||
# calculate album gain
|
||||
if n_blocks > 0:
|
||||
# compare ITU-R BS.1770-4 p. 6 equation (5)
|
||||
# Album gain is the replaygain of the concatenation of all tracks.
|
||||
album_gain = -0.691 + 10 * math.log10(sum_powers / n_blocks)
|
||||
else:
|
||||
album_gain = -70
|
||||
# convert LUFS -> `LU to target_level`
|
||||
album_gain = target_level_lufs - album_gain
|
||||
|
||||
self._log.debug(
|
||||
u"{0}: gain {1} LU, peak {2}"
|
||||
.format(items, album_gain, album_peak)
|
||||
)
|
||||
|
||||
return AlbumGain(Gain(album_gain, album_peak), track_gains)
|
||||
|
||||
def _construct_cmd(self, item, peak_method):
|
||||
"""Construct the shell command to analyse items."""
|
||||
return [
|
||||
self._ffmpeg_path,
|
||||
"-nostats",
|
||||
"-hide_banner",
|
||||
"-i",
|
||||
item.path,
|
||||
"-map",
|
||||
"a:0",
|
||||
"-filter",
|
||||
"ebur128=peak={0}".format(peak_method),
|
||||
"-f",
|
||||
"null",
|
||||
"-",
|
||||
]
|
||||
|
||||
def _analyse_item(self, item, target_level, peak, count_blocks=True):
|
||||
"""Analyse item. Return a pair of a Gain object and the number
|
||||
of gating blocks above the threshold.
|
||||
|
||||
If `count_blocks` is False, the number of gating blocks returned
|
||||
will be 0.
|
||||
"""
|
||||
target_level_lufs = db_to_lufs(target_level)
|
||||
peak_method = peak.name
|
||||
|
||||
# call ffmpeg
|
||||
self._log.debug(u"analyzing {0}".format(item))
|
||||
cmd = self._construct_cmd(item, peak_method)
|
||||
self._log.debug(
|
||||
u'executing {0}', u' '.join(map(displayable_path, cmd))
|
||||
)
|
||||
output = call(cmd).stderr.splitlines()
|
||||
|
||||
# parse output
|
||||
|
||||
if peak == Peak.none:
|
||||
peak = 0
|
||||
else:
|
||||
line_peak = self._find_line(
|
||||
output,
|
||||
" {0} peak:".format(peak_method.capitalize()).encode(),
|
||||
start_line=len(output) - 1, step_size=-1,
|
||||
)
|
||||
peak = self._parse_float(
|
||||
output[self._find_line(
|
||||
output, b" Peak:",
|
||||
line_peak,
|
||||
)]
|
||||
)
|
||||
# convert TPFS -> part of FS
|
||||
peak = 10**(peak / 20)
|
||||
|
||||
line_integrated_loudness = self._find_line(
|
||||
output, b" Integrated loudness:",
|
||||
start_line=len(output) - 1, step_size=-1,
|
||||
)
|
||||
gain = self._parse_float(
|
||||
output[self._find_line(
|
||||
output, b" I:",
|
||||
line_integrated_loudness,
|
||||
)]
|
||||
)
|
||||
# convert LUFS -> LU from target level
|
||||
gain = target_level_lufs - gain
|
||||
|
||||
# count BS.1770 gating blocks
|
||||
n_blocks = 0
|
||||
if count_blocks:
|
||||
gating_threshold = self._parse_float(
|
||||
output[self._find_line(
|
||||
output, b" Threshold:",
|
||||
start_line=line_integrated_loudness,
|
||||
)]
|
||||
)
|
||||
for line in output:
|
||||
if not line.startswith(b"[Parsed_ebur128"):
|
||||
continue
|
||||
if line.endswith(b"Summary:"):
|
||||
continue
|
||||
line = line.split(b"M:", 1)
|
||||
if len(line) < 2:
|
||||
continue
|
||||
if self._parse_float(b"M: " + line[1]) >= gating_threshold:
|
||||
n_blocks += 1
|
||||
self._log.debug(
|
||||
u"{0}: {1} blocks over {2} LUFS"
|
||||
.format(item, n_blocks, gating_threshold)
|
||||
)
|
||||
|
||||
self._log.debug(
|
||||
u"{0}: gain {1} LU, peak {2}"
|
||||
.format(item, gain, peak)
|
||||
)
|
||||
|
||||
return Gain(gain, peak), n_blocks
|
||||
|
||||
def _find_line(self, output, search, start_line=0, step_size=1):
|
||||
"""Return index of line beginning with `search`.
|
||||
|
||||
Begins searching at index `start_line` in `output`.
|
||||
"""
|
||||
end_index = len(output) if step_size > 0 else -1
|
||||
for i in range(start_line, end_index, step_size):
|
||||
if output[i].startswith(search):
|
||||
return i
|
||||
raise ReplayGainError(
|
||||
u"ffmpeg output: missing {0} after line {1}"
|
||||
.format(repr(search), start_line)
|
||||
)
|
||||
|
||||
def _parse_float(self, line):
|
||||
"""Extract a float from a key value pair in `line`.
|
||||
|
||||
This format is expected: /[^:]:[[:space:]]*value.*/, where `value` is
|
||||
the float.
|
||||
"""
|
||||
# extract value
|
||||
value = line.split(b":", 1)
|
||||
if len(value) < 2:
|
||||
raise ReplayGainError(
|
||||
u"ffmpeg output: expected key value pair, found {0}"
|
||||
.format(line)
|
||||
)
|
||||
value = value[1].lstrip()
|
||||
# strip unit
|
||||
value = value.split(b" ", 1)[0]
|
||||
# cast value to float
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError:
|
||||
raise ReplayGainError(
|
||||
u"ffmpeg output: expected float value, found {0}"
|
||||
.format(value)
|
||||
)
|
||||
|
||||
|
||||
# mpgain/aacgain CLI tool backend.
|
||||
class CommandBackend(Backend):
|
||||
|
||||
|
|
@ -312,30 +651,28 @@ class CommandBackend(Backend):
|
|||
)
|
||||
|
||||
self.noclip = config['noclip'].get(bool)
|
||||
target_level = config['targetlevel'].as_number()
|
||||
self.gain_offset = int(target_level - 89)
|
||||
|
||||
def compute_track_gain(self, items):
|
||||
def compute_track_gain(self, items, target_level, peak):
|
||||
"""Computes the track gain of the given tracks, returns a list
|
||||
of TrackGain objects.
|
||||
"""
|
||||
supported_items = list(filter(self.format_supported, items))
|
||||
output = self.compute_gain(supported_items, False)
|
||||
output = self.compute_gain(supported_items, target_level, False)
|
||||
return output
|
||||
|
||||
def compute_album_gain(self, album):
|
||||
def compute_album_gain(self, items, target_level, peak):
|
||||
"""Computes the album gain of the given album, returns an
|
||||
AlbumGain object.
|
||||
"""
|
||||
# TODO: What should be done when not all tracks in the album are
|
||||
# supported?
|
||||
|
||||
supported_items = list(filter(self.format_supported, album.items()))
|
||||
if len(supported_items) != len(album.items()):
|
||||
supported_items = list(filter(self.format_supported, items))
|
||||
if len(supported_items) != len(items):
|
||||
self._log.debug(u'tracks are of unsupported format')
|
||||
return AlbumGain(None, [])
|
||||
|
||||
output = self.compute_gain(supported_items, True)
|
||||
output = self.compute_gain(supported_items, target_level, True)
|
||||
return AlbumGain(output[-1], output[:-1])
|
||||
|
||||
def format_supported(self, item):
|
||||
|
|
@ -347,7 +684,7 @@ class CommandBackend(Backend):
|
|||
return False
|
||||
return True
|
||||
|
||||
def compute_gain(self, items, is_album):
|
||||
def compute_gain(self, items, target_level, is_album):
|
||||
"""Computes the track or album gain of a list of items, returns
|
||||
a list of TrackGain objects.
|
||||
|
||||
|
|
@ -374,12 +711,12 @@ class CommandBackend(Backend):
|
|||
else:
|
||||
# Disable clipping warning.
|
||||
cmd = cmd + ['-c']
|
||||
cmd = cmd + ['-d', str(self.gain_offset)]
|
||||
cmd = cmd + ['-d', str(int(target_level - 89))]
|
||||
cmd = cmd + [syspath(i.path) for i in items]
|
||||
|
||||
self._log.debug(u'analyzing {0} files', len(items))
|
||||
self._log.debug(u"executing {0}", " ".join(map(displayable_path, cmd)))
|
||||
output = call(cmd)
|
||||
output = call(cmd).stdout
|
||||
self._log.debug(u'analysis finished')
|
||||
return self.parse_tool_output(output,
|
||||
len(items) + (1 if is_album else 0))
|
||||
|
|
@ -437,8 +774,6 @@ class GStreamerBackend(Backend):
|
|||
# to rganalsys should have their gain computed, even if it
|
||||
# already exists.
|
||||
self._rg.set_property("forced", True)
|
||||
self._rg.set_property("reference-level",
|
||||
config["targetlevel"].as_number())
|
||||
self._sink = self.Gst.ElementFactory.make("fakesink", "sink")
|
||||
|
||||
self._pipe = self.Gst.Pipeline()
|
||||
|
|
@ -499,7 +834,7 @@ class GStreamerBackend(Backend):
|
|||
self.GLib = GLib
|
||||
self.Gst = Gst
|
||||
|
||||
def compute(self, files, album):
|
||||
def compute(self, files, target_level, album):
|
||||
self._error = None
|
||||
self._files = list(files)
|
||||
|
||||
|
|
@ -508,6 +843,8 @@ class GStreamerBackend(Backend):
|
|||
|
||||
self._file_tags = collections.defaultdict(dict)
|
||||
|
||||
self._rg.set_property("reference-level", target_level)
|
||||
|
||||
if album:
|
||||
self._rg.set_property("num-tracks", len(self._files))
|
||||
|
||||
|
|
@ -516,8 +853,8 @@ class GStreamerBackend(Backend):
|
|||
if self._error is not None:
|
||||
raise self._error
|
||||
|
||||
def compute_track_gain(self, items):
|
||||
self.compute(items, False)
|
||||
def compute_track_gain(self, items, target_level, peak):
|
||||
self.compute(items, target_level, False)
|
||||
if len(self._file_tags) != len(items):
|
||||
raise ReplayGainError(u"Some tracks did not receive tags")
|
||||
|
||||
|
|
@ -528,9 +865,9 @@ class GStreamerBackend(Backend):
|
|||
|
||||
return ret
|
||||
|
||||
def compute_album_gain(self, album):
|
||||
items = list(album.items())
|
||||
self.compute(items, True)
|
||||
def compute_album_gain(self, items, target_level, peak):
|
||||
items = list(items)
|
||||
self.compute(items, target_level, True)
|
||||
if len(self._file_tags) != len(items):
|
||||
raise ReplayGainError(u"Some items in album did not receive tags")
|
||||
|
||||
|
|
@ -714,7 +1051,7 @@ class AudioToolsBackend(Backend):
|
|||
file format is not supported
|
||||
"""
|
||||
try:
|
||||
audiofile = self._mod_audiotools.open(item.path)
|
||||
audiofile = self._mod_audiotools.open(py3_path(syspath(item.path)))
|
||||
except IOError:
|
||||
raise ReplayGainError(
|
||||
u"File {} was not found".format(item.path)
|
||||
|
|
@ -744,14 +1081,21 @@ class AudioToolsBackend(Backend):
|
|||
return
|
||||
return rg
|
||||
|
||||
def compute_track_gain(self, items):
|
||||
def compute_track_gain(self, items, target_level, peak):
|
||||
"""Compute ReplayGain values for the requested items.
|
||||
|
||||
:return list: list of :class:`Gain` objects
|
||||
"""
|
||||
return [self._compute_track_gain(item) for item in items]
|
||||
return [self._compute_track_gain(item, target_level) for item in items]
|
||||
|
||||
def _title_gain(self, rg, audiofile):
|
||||
def _with_target_level(self, gain, target_level):
|
||||
"""Return `gain` relative to `target_level`.
|
||||
|
||||
Assumes `gain` is relative to 89 db.
|
||||
"""
|
||||
return gain + (target_level - 89)
|
||||
|
||||
def _title_gain(self, rg, audiofile, target_level):
|
||||
"""Get the gain result pair from PyAudioTools using the `ReplayGain`
|
||||
instance `rg` for the given `audiofile`.
|
||||
|
||||
|
|
@ -761,14 +1105,15 @@ class AudioToolsBackend(Backend):
|
|||
try:
|
||||
# The method needs an audiotools.PCMReader instance that can
|
||||
# be obtained from an audiofile instance.
|
||||
return rg.title_gain(audiofile.to_pcm())
|
||||
gain, peak = rg.title_gain(audiofile.to_pcm())
|
||||
except ValueError as exc:
|
||||
# `audiotools.replaygain` can raise a `ValueError` if the sample
|
||||
# rate is incorrect.
|
||||
self._log.debug(u'error in rg.title_gain() call: {}', exc)
|
||||
raise ReplayGainError(u'audiotools audio data error')
|
||||
return self._with_target_level(gain, target_level), peak
|
||||
|
||||
def _compute_track_gain(self, item):
|
||||
def _compute_track_gain(self, item, target_level):
|
||||
"""Compute ReplayGain value for the requested item.
|
||||
|
||||
:rtype: :class:`Gain`
|
||||
|
|
@ -778,30 +1123,32 @@ class AudioToolsBackend(Backend):
|
|||
|
||||
# Each call to title_gain on a ReplayGain object returns peak and gain
|
||||
# of the track.
|
||||
rg_track_gain, rg_track_peak = self._title_gain(rg, audiofile)
|
||||
rg_track_gain, rg_track_peak = self._title_gain(
|
||||
rg, audiofile, target_level
|
||||
)
|
||||
|
||||
self._log.debug(u'ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}',
|
||||
item.artist, item.title, rg_track_gain, rg_track_peak)
|
||||
return Gain(gain=rg_track_gain, peak=rg_track_peak)
|
||||
|
||||
def compute_album_gain(self, album):
|
||||
def compute_album_gain(self, items, target_level, peak):
|
||||
"""Compute ReplayGain values for the requested album and its items.
|
||||
|
||||
:rtype: :class:`AlbumGain`
|
||||
"""
|
||||
self._log.debug(u'Analysing album {0}', album)
|
||||
|
||||
# The first item is taken and opened to get the sample rate to
|
||||
# initialize the replaygain object. The object is used for all the
|
||||
# tracks in the album to get the album values.
|
||||
item = list(album.items())[0]
|
||||
item = list(items)[0]
|
||||
audiofile = self.open_audio_file(item)
|
||||
rg = self.init_replaygain(audiofile, item)
|
||||
|
||||
track_gains = []
|
||||
for item in album.items():
|
||||
for item in items:
|
||||
audiofile = self.open_audio_file(item)
|
||||
rg_track_gain, rg_track_peak = self._title_gain(rg, audiofile)
|
||||
rg_track_gain, rg_track_peak = self._title_gain(
|
||||
rg, audiofile, target_level
|
||||
)
|
||||
track_gains.append(
|
||||
Gain(gain=rg_track_gain, peak=rg_track_peak)
|
||||
)
|
||||
|
|
@ -811,8 +1158,9 @@ class AudioToolsBackend(Backend):
|
|||
# After getting the values for all tracks, it's possible to get the
|
||||
# album values.
|
||||
rg_album_gain, rg_album_peak = rg.album_gain()
|
||||
rg_album_gain = self._with_target_level(rg_album_gain, target_level)
|
||||
self._log.debug(u'ReplayGain for album {0}: {1:.2f}, {2:.2f}',
|
||||
album, rg_album_gain, rg_album_peak)
|
||||
items[0].album, rg_album_gain, rg_album_peak)
|
||||
|
||||
return AlbumGain(
|
||||
Gain(gain=rg_album_gain, peak=rg_album_peak),
|
||||
|
|
@ -831,6 +1179,12 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
"gstreamer": GStreamerBackend,
|
||||
"audiotools": AudioToolsBackend,
|
||||
"bs1770gain": Bs1770gainBackend,
|
||||
"ffmpeg": FfmpegBackend,
|
||||
}
|
||||
|
||||
peak_methods = {
|
||||
"true": Peak.true,
|
||||
"sample": Peak.sample,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
|
@ -841,11 +1195,15 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
'overwrite': False,
|
||||
'auto': True,
|
||||
'backend': u'command',
|
||||
'per_disc': False,
|
||||
'peak': 'true',
|
||||
'targetlevel': 89,
|
||||
'r128': ['Opus'],
|
||||
'r128_targetlevel': lufs_to_db(-23),
|
||||
})
|
||||
|
||||
self.overwrite = self.config['overwrite'].get(bool)
|
||||
self.per_disc = self.config['per_disc'].get(bool)
|
||||
backend_name = self.config['backend'].as_str()
|
||||
if backend_name not in self.backends:
|
||||
raise ui.UserError(
|
||||
|
|
@ -855,6 +1213,16 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
u', '.join(self.backends.keys())
|
||||
)
|
||||
)
|
||||
peak_method = self.config["peak"].as_str()
|
||||
if peak_method not in self.peak_methods:
|
||||
raise ui.UserError(
|
||||
u"Selected ReplayGain peak method {0} is not supported. "
|
||||
u"Please select one of: {1}".format(
|
||||
peak_method,
|
||||
u', '.join(self.peak_methods.keys())
|
||||
)
|
||||
)
|
||||
self._peak_method = self.peak_methods[peak_method]
|
||||
|
||||
# On-import analysis.
|
||||
if self.config['auto']:
|
||||
|
|
@ -871,8 +1239,6 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
raise ui.UserError(
|
||||
u'replaygain initialization failed: {0}'.format(e))
|
||||
|
||||
self.r128_backend_instance = ''
|
||||
|
||||
def should_use_r128(self, item):
|
||||
"""Checks the plugin setting to decide whether the calculation
|
||||
should be done using the EBU R128 standard and use R128_ tags instead.
|
||||
|
|
@ -902,29 +1268,47 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
item.rg_track_gain = track_gain.gain
|
||||
item.rg_track_peak = track_gain.peak
|
||||
item.store()
|
||||
|
||||
self._log.debug(u'applied track gain {0}, peak {1}',
|
||||
self._log.debug(u'applied track gain {0} LU, peak {1} of FS',
|
||||
item.rg_track_gain, item.rg_track_peak)
|
||||
|
||||
def store_album_gain(self, item, album_gain):
|
||||
item.rg_album_gain = album_gain.gain
|
||||
item.rg_album_peak = album_gain.peak
|
||||
item.store()
|
||||
self._log.debug(u'applied album gain {0} LU, peak {1} of FS',
|
||||
item.rg_album_gain, item.rg_album_peak)
|
||||
|
||||
def store_track_r128_gain(self, item, track_gain):
|
||||
item.r128_track_gain = int(round(track_gain.gain * pow(2, 8)))
|
||||
item.r128_track_gain = track_gain.gain
|
||||
item.store()
|
||||
|
||||
self._log.debug(u'applied r128 track gain {0}', item.r128_track_gain)
|
||||
self._log.debug(u'applied r128 track gain {0} LU',
|
||||
item.r128_track_gain)
|
||||
|
||||
def store_album_gain(self, album, album_gain):
|
||||
album.rg_album_gain = album_gain.gain
|
||||
album.rg_album_peak = album_gain.peak
|
||||
album.store()
|
||||
def store_album_r128_gain(self, item, album_gain):
|
||||
item.r128_album_gain = album_gain.gain
|
||||
item.store()
|
||||
self._log.debug(u'applied r128 album gain {0} LU',
|
||||
item.r128_album_gain)
|
||||
|
||||
self._log.debug(u'applied album gain {0}, peak {1}',
|
||||
album.rg_album_gain, album.rg_album_peak)
|
||||
def tag_specific_values(self, items):
|
||||
"""Return some tag specific values.
|
||||
|
||||
def store_album_r128_gain(self, album, album_gain):
|
||||
album.r128_album_gain = int(round(album_gain.gain * pow(2, 8)))
|
||||
album.store()
|
||||
Returns a tuple (store_track_gain, store_album_gain, target_level,
|
||||
peak_method).
|
||||
"""
|
||||
if any([self.should_use_r128(item) for item in items]):
|
||||
store_track_gain = self.store_track_r128_gain
|
||||
store_album_gain = self.store_album_r128_gain
|
||||
target_level = self.config['r128_targetlevel'].as_number()
|
||||
peak = Peak.none # R128_* tags do not store the track/album peak
|
||||
else:
|
||||
store_track_gain = self.store_track_gain
|
||||
store_album_gain = self.store_album_gain
|
||||
target_level = self.config['targetlevel'].as_number()
|
||||
peak = self._peak_method
|
||||
|
||||
self._log.debug(u'applied r128 album gain {0}', album.r128_album_gain)
|
||||
return store_track_gain, store_album_gain, target_level, peak
|
||||
|
||||
def handle_album(self, album, write, force=False):
|
||||
"""Compute album and track replay gain store it in all of the
|
||||
|
|
@ -942,40 +1326,44 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
|
||||
if (any([self.should_use_r128(item) for item in album.items()]) and not
|
||||
all(([self.should_use_r128(item) for item in album.items()]))):
|
||||
raise ReplayGainError(
|
||||
u"Mix of ReplayGain and EBU R128 detected"
|
||||
u" for some tracks in album {0}".format(album)
|
||||
)
|
||||
self._log.error(
|
||||
u"Cannot calculate gain for album {0} (incompatible formats)",
|
||||
album)
|
||||
return
|
||||
|
||||
if any([self.should_use_r128(item) for item in album.items()]):
|
||||
if self.r128_backend_instance == '':
|
||||
self.init_r128_backend()
|
||||
backend_instance = self.r128_backend_instance
|
||||
store_track_gain = self.store_track_r128_gain
|
||||
store_album_gain = self.store_album_r128_gain
|
||||
tag_vals = self.tag_specific_values(album.items())
|
||||
store_track_gain, store_album_gain, target_level, peak = tag_vals
|
||||
|
||||
discs = dict()
|
||||
if self.per_disc:
|
||||
for item in album.items():
|
||||
if discs.get(item.disc) is None:
|
||||
discs[item.disc] = []
|
||||
discs[item.disc].append(item)
|
||||
else:
|
||||
backend_instance = self.backend_instance
|
||||
store_track_gain = self.store_track_gain
|
||||
store_album_gain = self.store_album_gain
|
||||
discs[1] = album.items()
|
||||
|
||||
try:
|
||||
album_gain = backend_instance.compute_album_gain(album)
|
||||
if len(album_gain.track_gains) != len(album.items()):
|
||||
raise ReplayGainError(
|
||||
u"ReplayGain backend failed "
|
||||
u"for some tracks in album {0}".format(album)
|
||||
for discnumber, items in discs.items():
|
||||
try:
|
||||
album_gain = self.backend_instance.compute_album_gain(
|
||||
items, target_level, peak
|
||||
)
|
||||
if len(album_gain.track_gains) != len(items):
|
||||
raise ReplayGainError(
|
||||
u"ReplayGain backend failed "
|
||||
u"for some tracks in album {0}".format(album)
|
||||
)
|
||||
|
||||
store_album_gain(album, album_gain.album_gain)
|
||||
for item, track_gain in zip(album.items(), album_gain.track_gains):
|
||||
store_track_gain(item, track_gain)
|
||||
if write:
|
||||
item.try_write()
|
||||
except ReplayGainError as e:
|
||||
self._log.info(u"ReplayGain error: {0}", e)
|
||||
except FatalReplayGainError as e:
|
||||
raise ui.UserError(
|
||||
u"Fatal replay gain error: {0}".format(e))
|
||||
for item, track_gain in zip(items, album_gain.track_gains):
|
||||
store_track_gain(item, track_gain)
|
||||
store_album_gain(item, album_gain.album_gain)
|
||||
if write:
|
||||
item.try_write()
|
||||
except ReplayGainError as e:
|
||||
self._log.info(u"ReplayGain error: {0}", e)
|
||||
except FatalReplayGainError as e:
|
||||
raise ui.UserError(
|
||||
u"Fatal replay gain error: {0}".format(e))
|
||||
|
||||
def handle_track(self, item, write, force=False):
|
||||
"""Compute track replay gain and store it in the item.
|
||||
|
|
@ -990,17 +1378,13 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
|
||||
self._log.info(u'analyzing {0}', item)
|
||||
|
||||
if self.should_use_r128(item):
|
||||
if self.r128_backend_instance == '':
|
||||
self.init_r128_backend()
|
||||
backend_instance = self.r128_backend_instance
|
||||
store_track_gain = self.store_track_r128_gain
|
||||
else:
|
||||
backend_instance = self.backend_instance
|
||||
store_track_gain = self.store_track_gain
|
||||
tag_vals = self.tag_specific_values([item])
|
||||
store_track_gain, store_album_gain, target_level, peak = tag_vals
|
||||
|
||||
try:
|
||||
track_gains = backend_instance.compute_track_gain([item])
|
||||
track_gains = self.backend_instance.compute_track_gain(
|
||||
[item], target_level, peak
|
||||
)
|
||||
if len(track_gains) != 1:
|
||||
raise ReplayGainError(
|
||||
u"ReplayGain backend failed for track {0}".format(item)
|
||||
|
|
@ -1015,19 +1399,6 @@ class ReplayGainPlugin(BeetsPlugin):
|
|||
raise ui.UserError(
|
||||
u"Fatal replay gain error: {0}".format(e))
|
||||
|
||||
def init_r128_backend(self):
|
||||
backend_name = 'bs1770gain'
|
||||
|
||||
try:
|
||||
self.r128_backend_instance = self.backends[backend_name](
|
||||
self.config, self._log
|
||||
)
|
||||
except (ReplayGainError, FatalReplayGainError) as e:
|
||||
raise ui.UserError(
|
||||
u'replaygain initialization failed: {0}'.format(e))
|
||||
|
||||
self.r128_backend_instance.method = '--ebu'
|
||||
|
||||
def imported(self, session, task):
|
||||
"""Add replay gain info to items or albums of ``task``.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from __future__ import division, absolute_import, print_function
|
|||
from beets.plugins import BeetsPlugin
|
||||
from beets import ui
|
||||
from beets.util import (mkdirall, normpath, sanitize_path, syspath,
|
||||
bytestring_path)
|
||||
bytestring_path, path_as_posix)
|
||||
from beets.library import Item, Album, parse_query_string
|
||||
from beets.dbcore import OrQuery
|
||||
from beets.dbcore.query import MultipleSort, ParsingError
|
||||
|
|
@ -37,7 +37,8 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
'relative_to': None,
|
||||
'playlist_dir': u'.',
|
||||
'auto': True,
|
||||
'playlists': []
|
||||
'playlists': [],
|
||||
'forward_slash': False,
|
||||
})
|
||||
|
||||
self._matched_playlists = None
|
||||
|
|
@ -104,17 +105,18 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
|
||||
playlist_data = (playlist['name'],)
|
||||
try:
|
||||
for key, Model in (('query', Item), ('album_query', Album)):
|
||||
for key, model_cls in (('query', Item),
|
||||
('album_query', Album)):
|
||||
qs = playlist.get(key)
|
||||
if qs is None:
|
||||
query_and_sort = None, None
|
||||
elif isinstance(qs, six.string_types):
|
||||
query_and_sort = parse_query_string(qs, Model)
|
||||
query_and_sort = parse_query_string(qs, model_cls)
|
||||
elif len(qs) == 1:
|
||||
query_and_sort = parse_query_string(qs[0], Model)
|
||||
query_and_sort = parse_query_string(qs[0], model_cls)
|
||||
else:
|
||||
# multiple queries and sorts
|
||||
queries, sorts = zip(*(parse_query_string(q, Model)
|
||||
queries, sorts = zip(*(parse_query_string(q, model_cls)
|
||||
for q in qs))
|
||||
query = OrQuery(queries)
|
||||
final_sorts = []
|
||||
|
|
@ -206,6 +208,8 @@ class SmartPlaylistPlugin(BeetsPlugin):
|
|||
mkdirall(m3u_path)
|
||||
with open(syspath(m3u_path), 'wb') as f:
|
||||
for path in m3us[m3u]:
|
||||
if self.config['forward_slash'].get():
|
||||
path = path_as_posix(path)
|
||||
f.write(path + b'\n')
|
||||
|
||||
self._log.info(u"{0} playlists updated", len(self._matched_playlists))
|
||||
|
|
|
|||
|
|
@ -1,5 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2019, Rahul Ahuja.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Adds Spotify release and track search support to the autotagger, along with
|
||||
Spotify playlist construction.
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import re
|
||||
|
|
@ -11,22 +27,30 @@ import collections
|
|||
import six
|
||||
import unidecode
|
||||
import requests
|
||||
import confuse
|
||||
|
||||
from beets import ui
|
||||
from beets.plugins import BeetsPlugin
|
||||
import confuse
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo, Distance
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo
|
||||
from beets.plugins import MetadataSourcePlugin, BeetsPlugin
|
||||
|
||||
|
||||
class SpotifyPlugin(BeetsPlugin):
|
||||
class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
|
||||
data_source = 'Spotify'
|
||||
|
||||
# Base URLs for the Spotify API
|
||||
# Documentation: https://developer.spotify.com/web-api
|
||||
oauth_token_url = 'https://accounts.spotify.com/api/token'
|
||||
open_track_url = 'http://open.spotify.com/track/'
|
||||
open_track_url = 'https://open.spotify.com/track/'
|
||||
search_url = 'https://api.spotify.com/v1/search'
|
||||
album_url = 'https://api.spotify.com/v1/albums/'
|
||||
track_url = 'https://api.spotify.com/v1/tracks/'
|
||||
playlist_partial = 'spotify:trackset:Playlist:'
|
||||
|
||||
# Spotify IDs consist of 22 alphanumeric characters
|
||||
# (zero-left-padded base62 representation of randomly generated UUID4)
|
||||
id_regex = {
|
||||
'pattern': r'(^|open\.spotify\.com/{}/)([0-9A-Za-z]{{22}})',
|
||||
'match_group': 2,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
super(SpotifyPlugin, self).__init__()
|
||||
|
|
@ -43,7 +67,6 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
'client_id': '4e414367a1d14c75a5c5129a627fcab8',
|
||||
'client_secret': 'f82bdc09b2254f1a8286815d02fd46dc',
|
||||
'tokenfile': 'spotify_token.json',
|
||||
'source_weight': 0.5,
|
||||
}
|
||||
)
|
||||
self.config['client_secret'].redact = True
|
||||
|
|
@ -93,7 +116,9 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
self.access_token = response.json()['access_token']
|
||||
|
||||
# Save the token for later use.
|
||||
self._log.debug(u'Spotify access token: {}', self.access_token)
|
||||
self._log.debug(
|
||||
u'{} access token: {}', self.data_source, self.access_token
|
||||
)
|
||||
with open(self.tokenfile, 'w') as f:
|
||||
json.dump({'access_token': self.access_token}, f)
|
||||
|
||||
|
|
@ -119,31 +144,19 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
if response.status_code != 200:
|
||||
if u'token expired' in response.text:
|
||||
self._log.debug(
|
||||
'Spotify access token has expired. Reauthenticating.'
|
||||
'{} access token has expired. Reauthenticating.',
|
||||
self.data_source,
|
||||
)
|
||||
self._authenticate()
|
||||
return self._handle_response(request_type, url, params=params)
|
||||
else:
|
||||
raise ui.UserError(u'Spotify API error:\n{}', response.text)
|
||||
raise ui.UserError(
|
||||
u'{} API error:\n{}\nURL:\n{}\nparams:\n{}'.format(
|
||||
self.data_source, response.text, url, params
|
||||
)
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def _get_spotify_id(self, url_type, id_):
|
||||
"""Parse a Spotify ID from its URL if necessary.
|
||||
|
||||
:param url_type: Type of Spotify URL, either 'album' or 'track'.
|
||||
:type url_type: str
|
||||
:param id_: Spotify ID or URL.
|
||||
:type id_: str
|
||||
:return: Spotify ID.
|
||||
:rtype: str
|
||||
"""
|
||||
# Spotify IDs consist of 22 alphanumeric characters
|
||||
# (zero-left-padded base62 representation of randomly generated UUID4)
|
||||
id_regex = r'(^|open\.spotify\.com/{}/)([0-9A-Za-z]{{22}})'
|
||||
self._log.debug(u'Searching for {} {}', url_type, id_)
|
||||
match = re.search(id_regex.format(url_type), id_)
|
||||
return match.group(2) if match else None
|
||||
|
||||
def album_for_id(self, album_id):
|
||||
"""Fetch an album by its Spotify ID or URL and return an
|
||||
AlbumInfo object or None if the album is not found.
|
||||
|
|
@ -153,61 +166,63 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
:return: AlbumInfo object for album
|
||||
:rtype: beets.autotag.hooks.AlbumInfo or None
|
||||
"""
|
||||
spotify_id = self._get_spotify_id('album', album_id)
|
||||
spotify_id = self._get_id('album', album_id)
|
||||
if spotify_id is None:
|
||||
return None
|
||||
|
||||
response_data = self._handle_response(
|
||||
album_data = self._handle_response(
|
||||
requests.get, self.album_url + spotify_id
|
||||
)
|
||||
artist, artist_id = self._get_artist(response_data['artists'])
|
||||
artist, artist_id = self.get_artist(album_data['artists'])
|
||||
|
||||
date_parts = [
|
||||
int(part) for part in response_data['release_date'].split('-')
|
||||
int(part) for part in album_data['release_date'].split('-')
|
||||
]
|
||||
|
||||
release_date_precision = response_data['release_date_precision']
|
||||
release_date_precision = album_data['release_date_precision']
|
||||
if release_date_precision == 'day':
|
||||
year, month, day = date_parts
|
||||
elif release_date_precision == 'month':
|
||||
year, month = date_parts
|
||||
day = None
|
||||
elif release_date_precision == 'year':
|
||||
year = date_parts
|
||||
year = date_parts[0]
|
||||
month = None
|
||||
day = None
|
||||
else:
|
||||
raise ui.UserError(
|
||||
u"Invalid `release_date_precision` returned "
|
||||
u"by Spotify API: '{}'".format(release_date_precision)
|
||||
u"by {} API: '{}'".format(
|
||||
self.data_source, release_date_precision
|
||||
)
|
||||
)
|
||||
|
||||
tracks = []
|
||||
medium_totals = collections.defaultdict(int)
|
||||
for i, track_data in enumerate(response_data['tracks']['items']):
|
||||
for i, track_data in enumerate(album_data['tracks']['items'], start=1):
|
||||
track = self._get_track(track_data)
|
||||
track.index = i + 1
|
||||
track.index = i
|
||||
medium_totals[track.medium] += 1
|
||||
tracks.append(track)
|
||||
for track in tracks:
|
||||
track.medium_total = medium_totals[track.medium]
|
||||
|
||||
return AlbumInfo(
|
||||
album=response_data['name'],
|
||||
album=album_data['name'],
|
||||
album_id=spotify_id,
|
||||
artist=artist,
|
||||
artist_id=artist_id,
|
||||
tracks=tracks,
|
||||
albumtype=response_data['album_type'],
|
||||
va=len(response_data['artists']) == 1
|
||||
albumtype=album_data['album_type'],
|
||||
va=len(album_data['artists']) == 1
|
||||
and artist.lower() == 'various artists',
|
||||
year=year,
|
||||
month=month,
|
||||
day=day,
|
||||
label=response_data['label'],
|
||||
label=album_data['label'],
|
||||
mediums=max(medium_totals.keys()),
|
||||
data_source='Spotify',
|
||||
data_url=response_data['external_urls']['spotify'],
|
||||
data_source=self.data_source,
|
||||
data_url=album_data['external_urls']['spotify'],
|
||||
)
|
||||
|
||||
def _get_track(self, track_data):
|
||||
|
|
@ -219,7 +234,7 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
:return: TrackInfo object for track
|
||||
:rtype: beets.autotag.hooks.TrackInfo
|
||||
"""
|
||||
artist, artist_id = self._get_artist(track_data['artists'])
|
||||
artist, artist_id = self.get_artist(track_data['artists'])
|
||||
return TrackInfo(
|
||||
title=track_data['name'],
|
||||
track_id=track_data['id'],
|
||||
|
|
@ -229,7 +244,7 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
index=track_data['track_number'],
|
||||
medium=track_data['disc_number'],
|
||||
medium_index=track_data['track_number'],
|
||||
data_source='Spotify',
|
||||
data_source=self.data_source,
|
||||
data_url=track_data['external_urls']['spotify'],
|
||||
)
|
||||
|
||||
|
|
@ -247,7 +262,7 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
:rtype: beets.autotag.hooks.TrackInfo or None
|
||||
"""
|
||||
if track_data is None:
|
||||
spotify_id = self._get_spotify_id('track', track_id)
|
||||
spotify_id = self._get_id('track', track_id)
|
||||
if spotify_id is None:
|
||||
return None
|
||||
track_data = self._handle_response(
|
||||
|
|
@ -262,107 +277,14 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
requests.get, self.album_url + track_data['album']['id']
|
||||
)
|
||||
medium_total = 0
|
||||
for i, track_data in enumerate(album_data['tracks']['items']):
|
||||
for i, track_data in enumerate(album_data['tracks']['items'], start=1):
|
||||
if track_data['disc_number'] == track.medium:
|
||||
medium_total += 1
|
||||
if track_data['id'] == track.track_id:
|
||||
track.index = i + 1
|
||||
track.index = i
|
||||
track.medium_total = medium_total
|
||||
return track
|
||||
|
||||
@staticmethod
|
||||
def _get_artist(artists):
|
||||
"""Returns an artist string (all artists) and an artist_id (the main
|
||||
artist) for a list of Spotify artist object dicts.
|
||||
|
||||
:param artists: Iterable of simplified Spotify artist objects
|
||||
(https://developer.spotify.com/documentation/web-api/reference/object-model/#artist-object-simplified)
|
||||
:type artists: list[dict]
|
||||
:return: Normalized artist string
|
||||
:rtype: str
|
||||
"""
|
||||
artist_id = None
|
||||
artist_names = []
|
||||
for artist in artists:
|
||||
if not artist_id:
|
||||
artist_id = artist['id']
|
||||
name = artist['name']
|
||||
# Move articles to the front.
|
||||
name = re.sub(r'^(.*?), (a|an|the)$', r'\2 \1', name, flags=re.I)
|
||||
artist_names.append(name)
|
||||
artist = ', '.join(artist_names).replace(' ,', ',') or None
|
||||
return artist, artist_id
|
||||
|
||||
def album_distance(self, items, album_info, mapping):
|
||||
"""Returns the Spotify source weight and the maximum source weight
|
||||
for albums.
|
||||
"""
|
||||
dist = Distance()
|
||||
if album_info.data_source == 'Spotify':
|
||||
dist.add('source', self.config['source_weight'].as_number())
|
||||
return dist
|
||||
|
||||
def track_distance(self, item, track_info):
|
||||
"""Returns the Spotify source weight and the maximum source weight
|
||||
for individual tracks.
|
||||
"""
|
||||
dist = Distance()
|
||||
if track_info.data_source == 'Spotify':
|
||||
dist.add('source', self.config['source_weight'].as_number())
|
||||
return dist
|
||||
|
||||
def candidates(self, items, artist, album, va_likely):
|
||||
"""Returns a list of AlbumInfo objects for Spotify Search API results
|
||||
matching an ``album`` and ``artist`` (if not various).
|
||||
|
||||
:param items: List of items comprised by an album to be matched.
|
||||
:type items: list[beets.library.Item]
|
||||
:param artist: The artist of the album to be matched.
|
||||
:type artist: str
|
||||
:param album: The name of the album to be matched.
|
||||
:type album: str
|
||||
:param va_likely: True if the album to be matched likely has
|
||||
Various Artists.
|
||||
:type va_likely: bool
|
||||
:return: Candidate AlbumInfo objects.
|
||||
:rtype: list[beets.autotag.hooks.AlbumInfo]
|
||||
"""
|
||||
query_filters = {'album': album}
|
||||
if not va_likely:
|
||||
query_filters['artist'] = artist
|
||||
response_data = self._search_spotify(
|
||||
query_type='album', filters=query_filters
|
||||
)
|
||||
if response_data is None:
|
||||
return []
|
||||
return [
|
||||
self.album_for_id(album_id=album_data['id'])
|
||||
for album_data in response_data['albums']['items']
|
||||
]
|
||||
|
||||
def item_candidates(self, item, artist, title):
|
||||
"""Returns a list of TrackInfo objects for Spotify Search API results
|
||||
matching ``title`` and ``artist``.
|
||||
|
||||
:param item: Singleton item to be matched.
|
||||
:type item: beets.library.Item
|
||||
:param artist: The artist of the track to be matched.
|
||||
:type artist: str
|
||||
:param title: The title of the track to be matched.
|
||||
:type title: str
|
||||
:return: Candidate TrackInfo objects.
|
||||
:rtype: list[beets.autotag.hooks.TrackInfo]
|
||||
"""
|
||||
response_data = self._search_spotify(
|
||||
query_type='track', keywords=title, filters={'artist': artist}
|
||||
)
|
||||
if response_data is None:
|
||||
return []
|
||||
return [
|
||||
self.track_for_id(track_data=track_data)
|
||||
for track_data in response_data['tracks']['items']
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _construct_search_query(filters=None, keywords=''):
|
||||
"""Construct a query string with the specified filters and keywords to
|
||||
|
|
@ -385,14 +307,12 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
query = query.decode('utf8')
|
||||
return unidecode.unidecode(query)
|
||||
|
||||
def _search_spotify(self, query_type, filters=None, keywords=''):
|
||||
def _search_api(self, query_type, filters=None, keywords=''):
|
||||
"""Query the Spotify Search API for the specified ``keywords``, applying
|
||||
the provided ``filters``.
|
||||
|
||||
:param query_type: A comma-separated list of item types to search
|
||||
across. Valid types are: 'album', 'artist', 'playlist', and
|
||||
'track'. Search results include hits from all the specified item
|
||||
types.
|
||||
:param query_type: Item type to search across. Valid types are:
|
||||
'album', 'artist', 'playlist', and 'track'.
|
||||
:type query_type: str
|
||||
:param filters: (Optional) Field filters to apply.
|
||||
:type filters: dict
|
||||
|
|
@ -407,19 +327,25 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
)
|
||||
if not query:
|
||||
return None
|
||||
self._log.debug(u"Searching Spotify for '{}'".format(query))
|
||||
response_data = self._handle_response(
|
||||
requests.get,
|
||||
self.search_url,
|
||||
params={'q': query, 'type': query_type},
|
||||
)
|
||||
num_results = 0
|
||||
for result_type_data in response_data.values():
|
||||
num_results += len(result_type_data['items'])
|
||||
self._log.debug(
|
||||
u"Found {} results from Spotify for '{}'", num_results, query
|
||||
u"Searching {} for '{}'".format(self.data_source, query)
|
||||
)
|
||||
return response_data if num_results > 0 else None
|
||||
response_data = (
|
||||
self._handle_response(
|
||||
requests.get,
|
||||
self.search_url,
|
||||
params={'q': query, 'type': query_type},
|
||||
)
|
||||
.get(query_type + 's', {})
|
||||
.get('items', [])
|
||||
)
|
||||
self._log.debug(
|
||||
u"Found {} result(s) from {} for '{}'",
|
||||
len(response_data),
|
||||
self.data_source,
|
||||
query,
|
||||
)
|
||||
return response_data
|
||||
|
||||
def commands(self):
|
||||
def queries(lib, opts, args):
|
||||
|
|
@ -429,21 +355,23 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
self._output_match_results(results)
|
||||
|
||||
spotify_cmd = ui.Subcommand(
|
||||
'spotify', help=u'build a Spotify playlist'
|
||||
'spotify', help=u'build a {} playlist'.format(self.data_source)
|
||||
)
|
||||
spotify_cmd.parser.add_option(
|
||||
u'-m',
|
||||
u'--mode',
|
||||
action='store',
|
||||
help=u'"open" to open Spotify with playlist, '
|
||||
u'"list" to print (default)',
|
||||
help=u'"open" to open {} with playlist, '
|
||||
u'"list" to print (default)'.format(self.data_source),
|
||||
)
|
||||
spotify_cmd.parser.add_option(
|
||||
u'-f',
|
||||
u'--show-failures',
|
||||
action='store_true',
|
||||
dest='show_failures',
|
||||
help=u'list tracks that did not match a Spotify ID',
|
||||
help=u'list tracks that did not match a {} ID'.format(
|
||||
self.data_source
|
||||
),
|
||||
)
|
||||
spotify_cmd.func = queries
|
||||
return [spotify_cmd]
|
||||
|
|
@ -483,7 +411,8 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
|
||||
if not items:
|
||||
self._log.debug(
|
||||
u'Your beets query returned no items, skipping Spotify.'
|
||||
u'Your beets query returned no items, skipping {}.',
|
||||
self.data_source,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
@ -511,16 +440,15 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
|
||||
# Query the Web API for each track, look for the items' JSON data
|
||||
query_filters = {'artist': artist, 'album': album}
|
||||
response_data = self._search_spotify(
|
||||
response_data_tracks = self._search_api(
|
||||
query_type='track', keywords=keywords, filters=query_filters
|
||||
)
|
||||
if response_data is None:
|
||||
if not response_data_tracks:
|
||||
query = self._construct_search_query(
|
||||
keywords=keywords, filters=query_filters
|
||||
)
|
||||
failures.append(query)
|
||||
continue
|
||||
response_data_tracks = response_data['tracks']['items']
|
||||
|
||||
# Apply market filter if requested
|
||||
region_filter = self.config['region_filter'].get()
|
||||
|
|
@ -536,7 +464,8 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
or self.config['tiebreak'].get() == 'first'
|
||||
):
|
||||
self._log.debug(
|
||||
u'Spotify track(s) found, count: {}',
|
||||
u'{} track(s) found, count: {}',
|
||||
self.data_source,
|
||||
len(response_data_tracks),
|
||||
)
|
||||
chosen_result = response_data_tracks[0]
|
||||
|
|
@ -555,16 +484,19 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
if failure_count > 0:
|
||||
if self.config['show_failures'].get():
|
||||
self._log.info(
|
||||
u'{} track(s) did not match a Spotify ID:', failure_count
|
||||
u'{} track(s) did not match a {} ID:',
|
||||
failure_count,
|
||||
self.data_source,
|
||||
)
|
||||
for track in failures:
|
||||
self._log.info(u'track: {}', track)
|
||||
self._log.info(u'')
|
||||
else:
|
||||
self._log.warning(
|
||||
u'{} track(s) did not match a Spotify ID;\n'
|
||||
u'{} track(s) did not match a {} ID:\n'
|
||||
u'use --show-failures to display',
|
||||
failure_count,
|
||||
self.data_source,
|
||||
)
|
||||
|
||||
return results
|
||||
|
|
@ -580,11 +512,19 @@ class SpotifyPlugin(BeetsPlugin):
|
|||
if results:
|
||||
spotify_ids = [track_data['id'] for track_data in results]
|
||||
if self.config['mode'].get() == 'open':
|
||||
self._log.info(u'Attempting to open Spotify with playlist')
|
||||
spotify_url = self.playlist_partial + ",".join(spotify_ids)
|
||||
self._log.info(
|
||||
u'Attempting to open {} with playlist'.format(
|
||||
self.data_source
|
||||
)
|
||||
)
|
||||
spotify_url = 'spotify:trackset:Playlist:' + ','.join(
|
||||
spotify_ids
|
||||
)
|
||||
webbrowser.open(spotify_url)
|
||||
else:
|
||||
for spotify_id in spotify_ids:
|
||||
print(self.open_track_url + spotify_id)
|
||||
else:
|
||||
self._log.warning(u'No Spotify tracks found from beets query')
|
||||
self._log.warning(
|
||||
u'No {} tracks found from beets query'.format(self.data_source)
|
||||
)
|
||||
|
|
|
|||
173
beetsplug/subsonicplaylist.py
Normal file
173
beetsplug/subsonicplaylist.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2019, Joris Jensen
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import random
|
||||
import string
|
||||
from xml.etree import ElementTree
|
||||
from hashlib import md5
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
|
||||
from beets.dbcore import AndQuery
|
||||
from beets.dbcore.query import MatchQuery
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand
|
||||
|
||||
__author__ = 'https://github.com/MrNuggelz'
|
||||
|
||||
|
||||
def filter_to_be_removed(items, keys):
|
||||
if len(items) > len(keys):
|
||||
dont_remove = []
|
||||
for artist, album, title in keys:
|
||||
for item in items:
|
||||
if artist == item['artist'] and \
|
||||
album == item['album'] and \
|
||||
title == item['title']:
|
||||
dont_remove.append(item)
|
||||
return [item for item in items if item not in dont_remove]
|
||||
else:
|
||||
def to_be_removed(item):
|
||||
for artist, album, title in keys:
|
||||
if artist == item['artist'] and\
|
||||
album == item['album'] and\
|
||||
title == item['title']:
|
||||
return False
|
||||
return True
|
||||
|
||||
return [item for item in items if to_be_removed(item)]
|
||||
|
||||
|
||||
class SubsonicPlaylistPlugin(BeetsPlugin):
|
||||
|
||||
def __init__(self):
|
||||
super(SubsonicPlaylistPlugin, self).__init__()
|
||||
self.config.add(
|
||||
{
|
||||
'delete': False,
|
||||
'playlist_ids': [],
|
||||
'playlist_names': [],
|
||||
'username': '',
|
||||
'password': ''
|
||||
}
|
||||
)
|
||||
self.config['password'].redact = True
|
||||
|
||||
def update_tags(self, playlist_dict, lib):
|
||||
with lib.transaction():
|
||||
for query, playlist_tag in playlist_dict.items():
|
||||
query = AndQuery([MatchQuery("artist", query[0]),
|
||||
MatchQuery("album", query[1]),
|
||||
MatchQuery("title", query[2])])
|
||||
items = lib.items(query)
|
||||
if not items:
|
||||
self._log.warn(u"{} | track not found ({})", playlist_tag,
|
||||
query)
|
||||
continue
|
||||
for item in items:
|
||||
item.subsonic_playlist = playlist_tag
|
||||
item.try_sync(write=True, move=False)
|
||||
|
||||
def get_playlist(self, playlist_id):
|
||||
xml = self.send('getPlaylist', {'id': playlist_id}).text
|
||||
playlist = ElementTree.fromstring(xml)[0]
|
||||
if playlist.attrib.get('code', '200') != '200':
|
||||
alt_error = 'error getting playlist, but no error message found'
|
||||
self._log.warn(playlist.attrib.get('message', alt_error))
|
||||
return
|
||||
|
||||
name = playlist.attrib.get('name', 'undefined')
|
||||
tracks = [(t.attrib['artist'], t.attrib['album'], t.attrib['title'])
|
||||
for t in playlist]
|
||||
return name, tracks
|
||||
|
||||
def commands(self):
|
||||
def build_playlist(lib, opts, args):
|
||||
self.config.set_args(opts)
|
||||
ids = self.config['playlist_ids'].as_str_seq()
|
||||
if self.config['playlist_names'].as_str_seq():
|
||||
playlists = ElementTree.fromstring(
|
||||
self.send('getPlaylists').text)[0]
|
||||
if playlists.attrib.get('code', '200') != '200':
|
||||
alt_error = 'error getting playlists,' \
|
||||
' but no error message found'
|
||||
self._log.warn(
|
||||
playlists.attrib.get('message', alt_error))
|
||||
return
|
||||
for name in self.config['playlist_names'].as_str_seq():
|
||||
for playlist in playlists:
|
||||
if name == playlist.attrib['name']:
|
||||
ids.append(playlist.attrib['id'])
|
||||
|
||||
playlist_dict = self.get_playlists(ids)
|
||||
|
||||
# delete old tags
|
||||
if self.config['delete']:
|
||||
existing = list(lib.items('subsonic_playlist:";"'))
|
||||
to_be_removed = filter_to_be_removed(
|
||||
existing,
|
||||
playlist_dict.keys())
|
||||
for item in to_be_removed:
|
||||
item['subsonic_playlist'] = ''
|
||||
with lib.transaction():
|
||||
item.try_sync(write=True, move=False)
|
||||
|
||||
self.update_tags(playlist_dict, lib)
|
||||
|
||||
subsonicplaylist_cmds = Subcommand(
|
||||
'subsonicplaylist', help=u'import a subsonic playlist'
|
||||
)
|
||||
subsonicplaylist_cmds.parser.add_option(
|
||||
u'-d',
|
||||
u'--delete',
|
||||
action='store_true',
|
||||
help=u'delete tag from items not in any playlist anymore',
|
||||
)
|
||||
subsonicplaylist_cmds.func = build_playlist
|
||||
return [subsonicplaylist_cmds]
|
||||
|
||||
def generate_token(self):
|
||||
salt = ''.join(random.choices(string.ascii_lowercase + string.digits))
|
||||
return md5(
|
||||
(self.config['password'].get() + salt).encode()).hexdigest(), salt
|
||||
|
||||
def send(self, endpoint, params=None):
|
||||
if params is None:
|
||||
params = dict()
|
||||
a, b = self.generate_token()
|
||||
params['u'] = self.config['username']
|
||||
params['t'] = a
|
||||
params['s'] = b
|
||||
params['v'] = '1.12.0'
|
||||
params['c'] = 'beets'
|
||||
resp = requests.get('{}/rest/{}?{}'.format(
|
||||
self.config['base_url'].get(),
|
||||
endpoint,
|
||||
urlencode(params))
|
||||
)
|
||||
return resp
|
||||
|
||||
def get_playlists(self, ids):
|
||||
output = dict()
|
||||
for playlist_id in ids:
|
||||
name, tracks = self.get_playlist(playlist_id)
|
||||
for track in tracks:
|
||||
if track not in output:
|
||||
output[track] = ';'
|
||||
output[track] += name + ';'
|
||||
return output
|
||||
|
|
@ -17,20 +17,20 @@
|
|||
Your Beets configuration file should contain
|
||||
a "subsonic" section like the following:
|
||||
subsonic:
|
||||
host: 192.168.x.y (Subsonic server IP)
|
||||
port: 4040 (default)
|
||||
user: <your username>
|
||||
pass: <your password>
|
||||
contextpath: /subsonic
|
||||
url: https://mydomain.com:443/subsonic
|
||||
user: username
|
||||
pass: password
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets import config
|
||||
import requests
|
||||
import string
|
||||
import hashlib
|
||||
import random
|
||||
import string
|
||||
|
||||
import requests
|
||||
|
||||
from beets import config
|
||||
from beets.plugins import BeetsPlugin
|
||||
|
||||
__author__ = 'https://github.com/maffo999'
|
||||
|
||||
|
|
@ -41,47 +41,75 @@ class SubsonicUpdate(BeetsPlugin):
|
|||
|
||||
# Set default configuration values
|
||||
config['subsonic'].add({
|
||||
'host': 'localhost',
|
||||
'port': '4040',
|
||||
'user': 'admin',
|
||||
'pass': 'admin',
|
||||
'contextpath': '/',
|
||||
'url': 'http://localhost:4040',
|
||||
})
|
||||
|
||||
config['subsonic']['pass'].redact = True
|
||||
self.register_listener('import', self.loaded)
|
||||
self.register_listener('import', self.start_scan)
|
||||
|
||||
def loaded(self):
|
||||
host = config['subsonic']['host'].as_str()
|
||||
port = config['subsonic']['port'].get(int)
|
||||
user = config['subsonic']['user'].as_str()
|
||||
passw = config['subsonic']['pass'].as_str()
|
||||
contextpath = config['subsonic']['contextpath'].as_str()
|
||||
@staticmethod
|
||||
def __create_token():
|
||||
"""Create salt and token from given password.
|
||||
|
||||
# To avoid sending plaintext passwords, authentication will be
|
||||
# performed via username, a token, and a 6 random
|
||||
# letters/numbers sequence.
|
||||
# The token is the concatenation of your password and the 6 random
|
||||
# letters/numbers (the salt) which is hashed with MD5.
|
||||
:return: The generated salt and hashed token
|
||||
"""
|
||||
password = config['subsonic']['pass'].as_str()
|
||||
|
||||
# Pick the random sequence and salt the password
|
||||
r = string.ascii_letters + string.digits
|
||||
salt = "".join([random.choice(r) for n in range(6)])
|
||||
t = passw + salt
|
||||
token = hashlib.md5()
|
||||
token.update(t.encode('utf-8'))
|
||||
salt = "".join([random.choice(r) for _ in range(6)])
|
||||
salted_password = password + salt
|
||||
token = hashlib.md5(salted_password.encode('utf-8')).hexdigest()
|
||||
|
||||
# Put together the payload of the request to the server and the URL
|
||||
return salt, token
|
||||
|
||||
@staticmethod
|
||||
def __format_url():
|
||||
"""Get the Subsonic URL to trigger a scan. Uses either the url
|
||||
config option or the deprecated host, port, and context_path config
|
||||
options together.
|
||||
|
||||
:return: Endpoint for updating Subsonic
|
||||
"""
|
||||
|
||||
url = config['subsonic']['url'].as_str()
|
||||
if url and url.endswith('/'):
|
||||
url = url[:-1]
|
||||
|
||||
# @deprecated("Use url config option instead")
|
||||
if not url:
|
||||
host = config['subsonic']['host'].as_str()
|
||||
port = config['subsonic']['port'].get(int)
|
||||
context_path = config['subsonic']['contextpath'].as_str()
|
||||
if context_path == '/':
|
||||
context_path = ''
|
||||
url = "http://{}:{}{}".format(host, port, context_path)
|
||||
|
||||
return url + '/rest/startScan'
|
||||
|
||||
def start_scan(self):
|
||||
user = config['subsonic']['user'].as_str()
|
||||
url = self.__format_url()
|
||||
salt, token = self.__create_token()
|
||||
|
||||
payload = {
|
||||
'u': user,
|
||||
't': token.hexdigest(),
|
||||
't': token,
|
||||
's': salt,
|
||||
'v': '1.15.0', # Subsonic 6.1 and newer.
|
||||
'c': 'beets'
|
||||
}
|
||||
if contextpath == '/':
|
||||
contextpath = ''
|
||||
url = "http://{}:{}{}/rest/startScan".format(host, port, contextpath)
|
||||
|
||||
response = requests.post(url, params=payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
self._log.error(u'Generic error, please try again later.')
|
||||
if response.status_code == 403:
|
||||
self._log.error(u'Server authentication failed')
|
||||
elif response.status_code == 200:
|
||||
self._log.debug(u'Updating Subsonic')
|
||||
else:
|
||||
self._log.error(
|
||||
u'Generic error, please try again later [Status Code: {}]'
|
||||
.format(response.status_code))
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
|
||||
def thumbnail_file_name(self, path):
|
||||
"""Compute the thumbnail file name
|
||||
See http://standards.freedesktop.org/thumbnail-spec/latest/x227.html
|
||||
See https://standards.freedesktop.org/thumbnail-spec/latest/x227.html
|
||||
"""
|
||||
uri = self.get_uri(path)
|
||||
hash = md5(uri.encode('utf-8')).hexdigest()
|
||||
|
|
@ -168,7 +168,7 @@ class ThumbnailsPlugin(BeetsPlugin):
|
|||
|
||||
def add_tags(self, album, image_path):
|
||||
"""Write required metadata to the thumbnail
|
||||
See http://standards.freedesktop.org/thumbnail-spec/latest/x142.html
|
||||
See https://standards.freedesktop.org/thumbnail-spec/latest/x142.html
|
||||
"""
|
||||
mtime = os.stat(album.artpath).st_mtime
|
||||
metadata = {"Thumb::URI": self.get_uri(album.artpath),
|
||||
|
|
@ -224,7 +224,7 @@ class PathlibURI(URIGetter):
|
|||
name = "Python Pathlib"
|
||||
|
||||
def uri(self, path):
|
||||
return PurePosixPath(path).as_uri()
|
||||
return PurePosixPath(util.py3_path(path)).as_uri()
|
||||
|
||||
|
||||
def copy_c_string(c_string):
|
||||
|
|
|
|||
60
beetsplug/unimported.py
Normal file
60
beetsplug/unimported.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This file is part of beets.
|
||||
# Copyright 2019, Joris Jensen
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""
|
||||
List all files in the library folder which are not listed in the
|
||||
beets library database, including art files
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
import os
|
||||
|
||||
from beets import util
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.ui import Subcommand, print_
|
||||
|
||||
__author__ = 'https://github.com/MrNuggelz'
|
||||
|
||||
|
||||
class Unimported(BeetsPlugin):
|
||||
|
||||
def __init__(self):
|
||||
super(Unimported, self).__init__()
|
||||
self.config.add(
|
||||
{
|
||||
'ignore_extensions': []
|
||||
}
|
||||
)
|
||||
|
||||
def commands(self):
|
||||
def print_unimported(lib, opts, args):
|
||||
ignore_exts = [('.' + x).encode() for x
|
||||
in self.config['ignore_extensions'].as_str_seq()]
|
||||
in_folder = set(
|
||||
(os.path.join(r, file) for r, d, f in os.walk(lib.directory)
|
||||
for file in f if not any(
|
||||
[file.endswith(extension) for extension in
|
||||
ignore_exts])))
|
||||
in_library = set(x.path for x in lib.items())
|
||||
art_files = set(x.artpath for x in lib.albums())
|
||||
for f in in_folder - in_library - art_files:
|
||||
print_(util.displayable_path(f))
|
||||
|
||||
unimported = Subcommand(
|
||||
'unimported',
|
||||
help='list all files in the library folder which are not listed'
|
||||
' in the beets library database')
|
||||
unimported.func = print_unimported
|
||||
return [unimported]
|
||||
|
|
@ -169,7 +169,7 @@ class IdListConverter(BaseConverter):
|
|||
return ids
|
||||
|
||||
def to_url(self, value):
|
||||
return ','.join(value)
|
||||
return ','.join(str(v) for v in value)
|
||||
|
||||
|
||||
class QueryConverter(PathConverter):
|
||||
|
|
@ -177,10 +177,11 @@ class QueryConverter(PathConverter):
|
|||
"""
|
||||
|
||||
def to_python(self, value):
|
||||
return value.split('/')
|
||||
queries = value.split('/')
|
||||
return [query.replace('\\', os.sep) for query in queries]
|
||||
|
||||
def to_url(self, value):
|
||||
return ','.join(value)
|
||||
return ','.join([v.replace(os.sep, '\\') for v in value])
|
||||
|
||||
|
||||
class EverythingConverter(PathConverter):
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ $.fn.player = function(debug) {
|
|||
|
||||
// Simple selection disable for jQuery.
|
||||
// Cut-and-paste from:
|
||||
// http://stackoverflow.com/questions/2700000
|
||||
// https://stackoverflow.com/questions/2700000
|
||||
$.fn.disableSelection = function() {
|
||||
$(this).attr('unselectable', 'on')
|
||||
.css('-moz-user-select', 'none')
|
||||
|
|
|
|||
|
|
@ -6,17 +6,142 @@ Changelog
|
|||
|
||||
New features:
|
||||
|
||||
* :doc:`/plugins/lastgenre`: Added more heavy metal genres: https://en.wikipedia.org/wiki/Heavy_metal_genres to genres.txt and genres-tree.yaml
|
||||
* :doc:`/plugins/subsonicplaylist`: import playlist from a subsonic server.
|
||||
* A new :ref:`extra_tags` configuration option allows more tagged metadata
|
||||
to be included in MusicBrainz queries.
|
||||
* A new :doc:`/plugins/fish` adds `Fish shell`_ tab autocompletion to beets
|
||||
* :doc:`plugins/fetchart` and :doc:`plugins/embedart`: Added a new ``quality``
|
||||
option that controls the quality of the image output when the image is
|
||||
resized.
|
||||
* :doc:`plugins/keyfinder`: Added support for `keyfinder-cli`_
|
||||
Thanks to :user:`BrainDamage`.
|
||||
* :doc:`plugins/fetchart`: Added a new ``high_resolution`` config option to
|
||||
allow downloading of higher resolution iTunes artwork (at the expense of
|
||||
file size).
|
||||
:bug: `3391`
|
||||
* :doc:`plugins/discogs` now adds two extra fields: `discogs_labelid` and
|
||||
`discogs_artistid`
|
||||
:bug: `3413`
|
||||
* :doc:`/plugins/export`: Added new ``-f`` (``--format``) flag;
|
||||
which allows for the ability to export in json, csv and xml.
|
||||
Thanks to :user:`austinmm`.
|
||||
:bug:`3402`
|
||||
* :doc:`/plugins/unimported`: lets you find untracked files in your library directory.
|
||||
* We now fetch information about `works`_ from MusicBrainz.
|
||||
MusicBrainz matches provide the fields ``work`` (the title), ``mb_workid``
|
||||
(the MBID), and ``work_disambig`` (the disambiguation string).
|
||||
Thanks to :user:`dosoe`.
|
||||
:bug:`2580` :bug:`3272`
|
||||
* :doc:`/plugins/convert`: Added new ``-l`` (``--link``) flag and ``link``
|
||||
option as well as the ``-H`` (``--hardlink``) flag and ``hardlink``
|
||||
option which symlinks or hardlinks files that do not need to
|
||||
be converted instead of copying them.
|
||||
:bug:`2324`
|
||||
* :doc:`/plugins/bpd`: BPD now supports most of the features of version 0.16
|
||||
of the MPD protocol. This is enough to get it talking to more complicated
|
||||
clients like ncmpcpp, but there are still some incompatibilities, largely due
|
||||
to MPD commands we don't support yet. Let us know if you find an MPD client
|
||||
that doesn't get along with BPD!
|
||||
:bug:`3214` :bug:`800`
|
||||
* :doc:`/plugins/replaygain`: The plugin now supports a ``per_disc`` option
|
||||
which enables calculation of album ReplayGain on disc level instead of album
|
||||
level.
|
||||
Thanks to :user:`samuelnilsson`
|
||||
:bug:`293`
|
||||
* :doc:`/plugins/replaygain`: The new ``ffmpeg`` ReplayGain backend supports
|
||||
``R128_`` tags, just like the ``bs1770gain`` backend.
|
||||
:bug:`3056`
|
||||
* :doc:`plugins/replaygain`: ``r128_targetlevel`` is a new configuration option
|
||||
for the ReplayGain plugin: It defines the reference volume for files using
|
||||
``R128_`` tags. ``targetlevel`` only configures the reference volume for
|
||||
``REPLAYGAIN_`` files.
|
||||
This also deprecates the ``bs1770gain`` ReplayGain backend's ``method``
|
||||
option. Use ``targetlevel`` and ``r128_targetlevel`` instead.
|
||||
:bug:`3065`
|
||||
* A new :doc:`/plugins/parentwork` gets information about the original work,
|
||||
which is useful for classical music.
|
||||
Thanks to :user:`dosoe`.
|
||||
:bug:`2580` :bug:`3279`
|
||||
* :doc:`/plugins/discogs`: The field now collects the "style" field.
|
||||
Thanks to :user:`thedevilisinthedetails`.
|
||||
:bug:`2579` :bug:`3251`
|
||||
* :doc:`/plugins/absubmit`: By default, the plugin now avoids re-analyzing
|
||||
files that already have AB data.
|
||||
There are new ``force`` and ``pretend`` options to help control this new
|
||||
behavior.
|
||||
Thanks to :user:`SusannaMaria`.
|
||||
:bug:`3318`
|
||||
* :doc:`/plugins/discogs`: The plugin now also gets genre information and a
|
||||
new ``discogs_albumid`` field from the Discogs API.
|
||||
Thanks to :user:`thedevilisinthedetails`.
|
||||
:bug:`465` :bug:`3322`
|
||||
* :doc:`/plugins/acousticbrainz`: The plugin now fetches two more additional
|
||||
fields: ``moods_mirex`` and ``timbre``.
|
||||
Thanks to :user:`malcops`.
|
||||
:bug:`2860`
|
||||
* :doc:`/plugins/playlist` and :doc:`/plugins/smartplaylist`: A new
|
||||
``forward_slash`` config option facilitates compatibility with MPD on
|
||||
Windows.
|
||||
Thanks to :user:`MartyLake`.
|
||||
:bug:`3331` :bug:`3334`
|
||||
* The 'data_source' field is now also applied as an album-level flexible
|
||||
attribute during imports, allowing for more refined album level searches.
|
||||
:bug:`3350` :bug:`1693`
|
||||
* :doc:`/plugins/deezer`: Added Deezer plugin as an import metadata provider:
|
||||
you can now match tracks and albums using the `Deezer`_ database.
|
||||
Thanks to :user:`rhlahuja`.
|
||||
:bug:`3355`
|
||||
* :doc:`/plugins/beatport`: The plugin now gets the musical key, BPM and the
|
||||
genre for each track.
|
||||
:bug:`2080`
|
||||
* :doc:`/plugins/beatport`: Fix default assignment of the musical key.
|
||||
:bug:`3377`
|
||||
* :doc:`/plugins/bpsync`: Add `bpsync` plugin to sync metadata changes
|
||||
from the Beatport database.
|
||||
* :doc:`/plugins/beatport`: Fix assignment of `genre` and rename `musical_key`
|
||||
to `initial_key`.
|
||||
:bug:`3387`
|
||||
* :doc:`/plugins/hook` now treats non-zero exit codes as errors.
|
||||
:bug:`3409`
|
||||
* :doc:`/plugins/subsonicupdate`: A new ``url`` configuration replaces the
|
||||
older (and now deprecated) separate ``host``, ``port``, and ``contextpath``
|
||||
config options. As a consequence, the plugin can now talk to Subsonic over
|
||||
HTTPS.
|
||||
Thanks to :user:`jef`.
|
||||
:bug:`3449`
|
||||
* :doc:`/plugins/discogs`: The new ``index_tracks`` option enables
|
||||
incorporation of work names and intra-work divisions into imported track
|
||||
titles.
|
||||
Thanks to :user:`cole-miller`.
|
||||
:bug:`3459`
|
||||
* :doc:`/plugins/fetchart`: Album art can now be fetched from `last.fm`_.
|
||||
:bug:`3530`
|
||||
* The classes ``AlbumInfo`` and ``TrackInfo`` now have flexible attributes,
|
||||
allowing to solve :bug:`1547`.
|
||||
Thanks to :user:`dosoe`.
|
||||
* :doc:`/plugins/web`: The query API now interprets backslashes as path
|
||||
separators to support path queries.
|
||||
Thanks to :user:`nmeum`.
|
||||
:bug:`3567`
|
||||
* ``beet import`` now handles tar archives with bzip2 or gzip compression.
|
||||
:bug:`3606`
|
||||
* :doc:`/plugins/plexupdate`: Add option to use secure connection to Plex
|
||||
server, and to ignore certificate validation errors if necessary.
|
||||
:bug:`2871`
|
||||
* :doc:`/plugins/lyrics`: Improved searching Genius backend when artist
|
||||
contained special characters.
|
||||
:bug:`3634`
|
||||
* :doc:`/plugins/parentwork`: Also get the composition date of the parent work,
|
||||
instead of just the child work.
|
||||
Thanks to :user:`aereaux`.
|
||||
:bug:`3650`
|
||||
* :doc:`/plugins/lyrics`: Fix a bug in the heuristic for detecting valid
|
||||
lyrics in the Google source of the lyrics plugin
|
||||
:bug:`2969`
|
||||
* :doc:`/plugins/thumbnails`: Fix a bug where pathlib expected a string instead
|
||||
of bytes for a path.
|
||||
:bug:`3360`
|
||||
* Fields in queries now fall back to an item's album and check its fields too.
|
||||
Notably, this allows querying items by an album flex attribute, also in path
|
||||
configuration.
|
||||
|
|
@ -25,6 +150,11 @@ New features:
|
|||
|
||||
Fixes:
|
||||
|
||||
* :doc:`/plugins/fetchart`: Fixed a bug that caused fetchart to not take
|
||||
environment variables such as proxy servers into account when making requests
|
||||
:bug:`3450`
|
||||
* :doc:`/plugins/fetchart`: Temporary files for fetched album art that fail
|
||||
validation are now removed
|
||||
* :doc:`/plugins/inline`: In function-style field definitions that refer to
|
||||
flexible attributes, values could stick around from one function invocation
|
||||
to the next. This meant that, when displaying a list of objects, later
|
||||
|
|
@ -35,6 +165,84 @@ Fixes:
|
|||
fixing crashes in MPD clients like mpDris2 on seek.
|
||||
The ``playlistid`` command now works properly in its zero-argument form.
|
||||
:bug:`3214`
|
||||
* :doc:`/plugins/replaygain`: Fix a Python 3 incompatibility in the Python
|
||||
Audio Tools backend.
|
||||
:bug:`3305`
|
||||
* :doc:`/plugins/importadded`: Fixed a crash that occurred when the
|
||||
``after_write`` signal was emitted.
|
||||
:bug:`3301`
|
||||
* :doc:`plugins/replaygain`: Fix the storage format in R128 gain tags.
|
||||
:bug:`3311` :bug:`3314`
|
||||
* :doc:`/plugins/discogs`: Fixed a crash that occurred when the Master URI
|
||||
isn't set.
|
||||
:bug:`2965` :bug:`3239`
|
||||
* :doc:`/plugins/spotify`: Fix handling of year-only release dates
|
||||
returned by Spotify Albums API.
|
||||
Thanks to :user:`rhlahuja`.
|
||||
:bug:`3343`
|
||||
* Fixed a bug that caused the UI to display incorrect track numbers for tracks
|
||||
with index 0 when the ``per_disc_numbering`` option was set.
|
||||
:bug:`3346`
|
||||
* ``none_rec_action`` does not import automatically when ``timid`` is enabled.
|
||||
Thanks to :user:`RollingStar`.
|
||||
:bug:`3242`
|
||||
* Fix a bug that caused a crash when tagging items with the beatport plugin.
|
||||
:bug:`3374`
|
||||
* ``beet update`` will now confirm that the user still wants to update if
|
||||
their library folder cannot be found, preventing the user from accidentally
|
||||
wiping out their beets database.
|
||||
Thanks to :user:`logan-arens`.
|
||||
:bug:`1934`
|
||||
* :doc:`/plugins/bpd`: Fix the transition to next track when in consume mode.
|
||||
Thanks to :user:`aereaux`.
|
||||
:bug:`3437`
|
||||
* :doc:`/plugins/lyrics`: Fix a corner-case with Genius lowercase artist names
|
||||
:bug:`3446`
|
||||
* :doc:`/plugins/replaygain`: Support ``bs1770gain`` v0.6.0 and up
|
||||
:bug:`3480`
|
||||
* :doc:`/plugins/parentwork`: Don't save tracks when nothing has changed.
|
||||
:bug:`3492`
|
||||
* Added a warning when configuration files defined in the `include` directive
|
||||
of the configuration file fail to be imported.
|
||||
:bug:`3498`
|
||||
* Added the normalize method to the dbcore.types.INTEGER class which now
|
||||
properly returns integer values, which should avoid problems where fields
|
||||
like ``bpm`` would sometimes store non-integer values.
|
||||
:bug:`762` :bug:`3507` :bug:`3508`
|
||||
* Removed ``@classmethod`` decorator from dbcore.query.NoneQuery.match method
|
||||
failing with AttributeError when called. It is now an instance method.
|
||||
:bug:`3516` :bug:`3517`
|
||||
* :doc:`/plugins/lyrics`: Tolerate missing lyrics div in Genius scraper.
|
||||
Thanks to :user:`thejli21`.
|
||||
:bug:`3535` :bug:`3554`
|
||||
* :doc:`/plugins/lyrics`: Use the artist sort name to search for lyrics, which
|
||||
can help find matches when the artist name has special characters.
|
||||
Thanks to :user:`hashhar`.
|
||||
:bug:`3340` :bug:`3558`
|
||||
* :doc:`/plugins/replaygain`: Trying to calculate volume gain for an album
|
||||
consisting of some formats using ``ReplayGain`` and some using ``R128``
|
||||
will no longer crash; instead it is skipped and and a message is logged.
|
||||
The log message has also been rewritten for to improve clarity.
|
||||
Thanks to :user:`autrimpo`.
|
||||
:bug:`3533`
|
||||
* :doc:`/plugins/lyrics`: Adapt the Genius backend to changes in markup to
|
||||
reduce the scraping failure rate.
|
||||
:bug:`3535` :bug:`3594`
|
||||
* :doc:`/plugins/lyrics`: Fix crash when writing ReST files for a query without
|
||||
results or fetched lyrics
|
||||
:bug:`2805`
|
||||
* Adapt to breaking changes in Python's ``ast`` module in 3.8
|
||||
* :doc:`/plugins/fetchart`: Attempt to fetch pre-resized thumbnails from Cover
|
||||
Art Archive if the ``maxwidth`` option matches one of the sizes supported by
|
||||
the Cover Art Archive API.
|
||||
Thanks to :user:`trolley`.
|
||||
:bug:`3637`
|
||||
* :doc:`/plugins/ipfs`: Fix Python 3 compatibility.
|
||||
Thanks to :user:`musoke`.
|
||||
:bug:`2554`
|
||||
* Fix a bug that caused metadata starting with something resembling a drive
|
||||
letter to be incorrectly split into an extra directory after the colon.
|
||||
:bug:`3685`
|
||||
|
||||
For plugin developers:
|
||||
|
||||
|
|
@ -49,6 +257,26 @@ For plugin developers:
|
|||
is almost identical apart from the name change. Again, we'll re-export at the
|
||||
old location (with a deprecation warning) for backwards compatibility, but
|
||||
might stop doing this in a future release.
|
||||
* ``beets.util.command_output`` now returns a named tuple containing both the
|
||||
standard output and the standard error data instead of just stdout alone.
|
||||
Client code will need to access the ``stdout`` attribute on the return
|
||||
value.
|
||||
Thanks to :user:`zsinskri`.
|
||||
:bug:`3329`
|
||||
* There were sporadic failures in ``test.test_player``. Hopefully these are
|
||||
fixed. If they resurface, please reopen the relevant issue.
|
||||
:bug:`3309` :bug:`3330`
|
||||
* The internal structure of the replaygain plugin had some changes: There are no
|
||||
longer separate R128 backend instances. Instead the targetlevel is passed to
|
||||
``compute_album_gain`` and ``compute_track_gain``.
|
||||
:bug:`3065`
|
||||
* The ``beets.plugins.MetadataSourcePlugin`` base class has been added to
|
||||
simplify development of plugins which query album, track, and search
|
||||
APIs to provide metadata matches for the importer. Refer to the Spotify and
|
||||
Deezer plugins for examples of using this template class.
|
||||
:bug:`3355`
|
||||
* The autotag hooks have been modified such that they now take 'bpm',
|
||||
'musical_key' and a per-track based 'genre' as attributes.
|
||||
* Item (and attribute) access on an item now falls back to the album's
|
||||
attributes as well. If you specifically want to access an item's attributes,
|
||||
use ``Item.get(key, with_album=False)``. :bug:`2988`
|
||||
|
|
@ -70,10 +298,15 @@ For packagers:
|
|||
* We attempted to fix an unreliable test, so a patch to `skip <https://sources.debian.org/src/beets/1.4.7-2/debian/patches/skip-broken-test/>`_
|
||||
or `repair <https://build.opensuse.org/package/view_file/openSUSE:Factory/beets/fix_test_command_line_option_relative_to_working_dir.diff?expand=1>`_
|
||||
the test may no longer be necessary.
|
||||
* This version drops support for Python 3.4.
|
||||
|
||||
.. _Fish shell: https://fishshell.com/
|
||||
.. _MediaFile: https://github.com/beetbox/mediafile
|
||||
.. _Confuse: https://github.com/beetbox/confuse
|
||||
.. _works: https://musicbrainz.org/doc/Work
|
||||
.. _Deezer: https://www.deezer.com
|
||||
.. _keyfinder-cli: https://github.com/EvanPurkhiser/keyfinder-cli
|
||||
.. _last.fm: https://last.fm
|
||||
|
||||
|
||||
1.4.9 (May 30, 2019)
|
||||
|
|
@ -1227,7 +1460,7 @@ There are even more new features:
|
|||
don't actually need to be moved. :bug:`1583`
|
||||
|
||||
.. _Google Code-In: https://codein.withgoogle.com/
|
||||
.. _AcousticBrainz: http://acousticbrainz.org/
|
||||
.. _AcousticBrainz: https://acousticbrainz.org/
|
||||
|
||||
Fixes:
|
||||
|
||||
|
|
@ -1252,7 +1485,7 @@ Fixes:
|
|||
* :doc:`/plugins/replaygain`: Fix a crash using the Python Audio Tools
|
||||
backend. :bug:`1873`
|
||||
|
||||
.. _beets.io: http://beets.io/
|
||||
.. _beets.io: https://beets.io/
|
||||
.. _Beetbox: https://github.com/beetbox
|
||||
|
||||
|
||||
|
|
@ -1369,7 +1602,7 @@ Fixes:
|
|||
communication errors. The backend has also been disabled by default, since
|
||||
the API it depends on is currently down. :bug:`1770`
|
||||
|
||||
.. _Emby: http://emby.media
|
||||
.. _Emby: https://emby.media
|
||||
|
||||
|
||||
1.3.15 (October 17, 2015)
|
||||
|
|
@ -1531,8 +1764,8 @@ Fixes:
|
|||
* :doc:`/plugins/convert`: Fix a problem with filename encoding on Windows
|
||||
under Python 3. :bug:`2515` :bug:`2516`
|
||||
|
||||
.. _Python bug: http://bugs.python.org/issue16512
|
||||
.. _ipfs: http://ipfs.io
|
||||
.. _Python bug: https://bugs.python.org/issue16512
|
||||
.. _ipfs: https://ipfs.io
|
||||
|
||||
|
||||
1.3.13 (April 24, 2015)
|
||||
|
|
@ -1883,7 +2116,7 @@ As usual, there are loads of little fixes and improvements:
|
|||
* The :ref:`config-cmd` command can now use ``$EDITOR`` variables with
|
||||
arguments.
|
||||
|
||||
.. _API changes: http://developer.echonest.com/forums/thread/3650
|
||||
.. _API changes: https://developer.echonest.com/forums/thread/3650
|
||||
.. _Plex: https://plex.tv/
|
||||
.. _musixmatch: https://www.musixmatch.com/
|
||||
|
||||
|
|
@ -2363,7 +2596,7 @@ Fixes:
|
|||
* :doc:`/plugins/convert`: Display a useful error message when the FFmpeg
|
||||
executable can't be found.
|
||||
|
||||
.. _requests: http://www.python-requests.org/
|
||||
.. _requests: https://www.python-requests.org/
|
||||
|
||||
|
||||
1.3.3 (February 26, 2014)
|
||||
|
|
@ -2545,7 +2778,7 @@ As usual, there are also innumerable little fixes and improvements:
|
|||
|
||||
|
||||
.. _Acoustic Attributes: http://developer.echonest.com/acoustic-attributes.html
|
||||
.. _MPD: http://www.musicpd.org/
|
||||
.. _MPD: https://www.musicpd.org/
|
||||
|
||||
|
||||
1.3.1 (October 12, 2013)
|
||||
|
|
@ -2612,7 +2845,7 @@ And some fixes:
|
|||
* :doc:`/plugins/scrub`: Avoid preserving certain non-standard ID3 tags such
|
||||
as NCON.
|
||||
|
||||
.. _Opus: http://www.opus-codec.org/
|
||||
.. _Opus: https://www.opus-codec.org/
|
||||
.. _@Verrus: https://github.com/Verrus
|
||||
|
||||
|
||||
|
|
@ -2650,7 +2883,7 @@ previous versions would spit out a warning and then list your entire library.
|
|||
|
||||
There's more detail than you could ever need `on the beets blog`_.
|
||||
|
||||
.. _on the beets blog: http://beets.io/blog/flexattr.html
|
||||
.. _on the beets blog: https://beets.io/blog/flexattr.html
|
||||
|
||||
|
||||
1.2.2 (August 27, 2013)
|
||||
|
|
@ -2844,8 +3077,8 @@ And a batch of fixes:
|
|||
* :doc:`/plugins/lyrics`: Lyrics searches should now turn up more results due
|
||||
to some fixes in dealing with special characters.
|
||||
|
||||
.. _Discogs: http://discogs.com/
|
||||
.. _Beatport: http://www.beatport.com/
|
||||
.. _Discogs: https://discogs.com/
|
||||
.. _Beatport: https://www.beatport.com/
|
||||
|
||||
|
||||
1.1.0 (April 29, 2013)
|
||||
|
|
@ -2894,7 +3127,7 @@ will automatically migrate your configuration to the new system.
|
|||
header. Thanks to Uwe L. Korn.
|
||||
* :doc:`/plugins/lastgenre`: Fix an error when using genre canonicalization.
|
||||
|
||||
.. _Tomahawk: http://www.tomahawk-player.org/
|
||||
.. _Tomahawk: https://tomahawk-player.org/
|
||||
|
||||
1.1b3 (March 16, 2013)
|
||||
----------------------
|
||||
|
|
@ -3069,7 +3302,7 @@ Other new stuff:
|
|||
(YAML doesn't like tabs.)
|
||||
* Fix the ``-l`` (log path) command-line option for the ``import`` command.
|
||||
|
||||
.. _iTunes Sound Check: http://support.apple.com/kb/HT2425
|
||||
.. _iTunes Sound Check: https://support.apple.com/kb/HT2425
|
||||
|
||||
1.1b1 (January 29, 2013)
|
||||
------------------------
|
||||
|
|
@ -3078,7 +3311,7 @@ This release entirely revamps beets' configuration system. The configuration
|
|||
file is now a `YAML`_ document and is located, along with other support files,
|
||||
in a common directory (e.g., ``~/.config/beets`` on Unix-like systems).
|
||||
|
||||
.. _YAML: http://en.wikipedia.org/wiki/YAML
|
||||
.. _YAML: https://en.wikipedia.org/wiki/YAML
|
||||
|
||||
* Renamed plugins: The ``rdm`` plugin has been renamed to ``random`` and
|
||||
``fuzzy_search`` has been renamed to ``fuzzy``.
|
||||
|
|
@ -3238,9 +3471,9 @@ begins today on features for version 1.1.
|
|||
unintentionally loading the plugins they contain.
|
||||
|
||||
.. _The Echo Nest: http://the.echonest.com/
|
||||
.. _Tomahawk resolver: http://beets.io/blog/tomahawk-resolver.html
|
||||
.. _Tomahawk resolver: https://beets.io/blog/tomahawk-resolver.html
|
||||
.. _mp3gain: http://mp3gain.sourceforge.net/download.php
|
||||
.. _aacgain: http://aacgain.altosdesign.com
|
||||
.. _aacgain: https://aacgain.altosdesign.com
|
||||
|
||||
1.0b15 (July 26, 2012)
|
||||
----------------------
|
||||
|
|
@ -3349,7 +3582,7 @@ fetching cover art for your music, enable this plugin after upgrading to beets
|
|||
database with ``beet import -AWC /path/to/music``.
|
||||
* Fix ``import`` with relative path arguments on Windows.
|
||||
|
||||
.. _artist credits: http://wiki.musicbrainz.org/Artist_Credit
|
||||
.. _artist credits: https://wiki.musicbrainz.org/Artist_Credit
|
||||
|
||||
1.0b14 (May 12, 2012)
|
||||
---------------------
|
||||
|
|
@ -3507,7 +3740,7 @@ to come in the next couple of releases.
|
|||
data.
|
||||
* Fix the ``list`` command in BPD (thanks to Simon Chopin).
|
||||
|
||||
.. _Colorama: http://pypi.python.org/pypi/colorama
|
||||
.. _Colorama: https://pypi.python.org/pypi/colorama
|
||||
|
||||
1.0b12 (January 16, 2012)
|
||||
-------------------------
|
||||
|
|
@ -3620,12 +3853,12 @@ release: one for assigning genres and another for ReplayGain analysis.
|
|||
corrupted.
|
||||
|
||||
.. _KraYmer: https://github.com/KraYmer
|
||||
.. _Next Generation Schema: http://musicbrainz.org/doc/XML_Web_Service/Version_2
|
||||
.. _Next Generation Schema: https://musicbrainz.org/doc/XML_Web_Service/Version_2
|
||||
.. _python-musicbrainzngs: https://github.com/alastair/python-musicbrainzngs
|
||||
.. _acoustid: http://acoustid.org/
|
||||
.. _acoustid: https://acoustid.org/
|
||||
.. _Peter Brunner: https://github.com/Lugoues
|
||||
.. _Simon Chopin: https://github.com/laarmen
|
||||
.. _albumart.org: http://www.albumart.org/
|
||||
.. _albumart.org: https://www.albumart.org/
|
||||
|
||||
1.0b10 (September 22, 2011)
|
||||
---------------------------
|
||||
|
|
@ -3794,8 +4027,8 @@ below, for a plethora of new features.
|
|||
|
||||
* Fix a crash on album queries with item-only field names.
|
||||
|
||||
.. _xargs: http://en.wikipedia.org/wiki/xargs
|
||||
.. _unidecode: http://pypi.python.org/pypi/Unidecode/0.04.1
|
||||
.. _xargs: https://en.wikipedia.org/wiki/xargs
|
||||
.. _unidecode: https://pypi.python.org/pypi/Unidecode/0.04.1
|
||||
|
||||
1.0b8 (April 28, 2011)
|
||||
----------------------
|
||||
|
|
@ -3938,7 +4171,7 @@ new configuration options and the ability to clean up empty directory subtrees.
|
|||
|
||||
* The old "albumify" plugin for upgrading databases was removed.
|
||||
|
||||
.. _as specified by MusicBrainz: http://wiki.musicbrainz.org/ReleaseType
|
||||
.. _as specified by MusicBrainz: https://wiki.musicbrainz.org/ReleaseType
|
||||
|
||||
1.0b6 (January 20, 2011)
|
||||
------------------------
|
||||
|
|
@ -4054,7 +4287,7 @@ are also rolled into this release.
|
|||
|
||||
* Fixed escaping of ``/`` characters in paths on Windows.
|
||||
|
||||
.. _!!!: http://musicbrainz.org/artist/f26c72d3-e52c-467b-b651-679c73d8e1a7.html
|
||||
.. _!!!: https://musicbrainz.org/artist/f26c72d3-e52c-467b-b651-679c73d8e1a7.html
|
||||
|
||||
1.0b4 (August 9, 2010)
|
||||
----------------------
|
||||
|
|
@ -4243,7 +4476,7 @@ Vorbis) and an option to log untaggable albums during import.
|
|||
removed dependency on the aging ``cmdln`` module in favor of `a hand-rolled
|
||||
solution`_.
|
||||
|
||||
.. _a hand-rolled solution: http://gist.github.com/462717
|
||||
.. _a hand-rolled solution: https://gist.github.com/462717
|
||||
|
||||
1.0b1 (June 17, 2010)
|
||||
---------------------
|
||||
|
|
|
|||
3
docs/contributing.rst
Normal file
3
docs/contributing.rst
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
.. contributing:
|
||||
|
||||
.. include:: ../CONTRIBUTING.rst
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
API Documentation
|
||||
=================
|
||||
|
||||
.. currentmodule:: beets.library
|
||||
|
||||
This page describes the internal API of beets' core. It's a work in
|
||||
progress---since beets is an application first and a library second, its API
|
||||
has been mainly undocumented until recently. Please file bugs if you run
|
||||
across incomplete or incorrect docs here.
|
||||
|
||||
The :class:`Library` object is the central repository for data in beets. It
|
||||
represents a database containing songs, which are :class:`Item` instances, and
|
||||
groups of items, which are :class:`Album` instances.
|
||||
|
||||
The Library Class
|
||||
-----------------
|
||||
|
||||
.. autoclass:: Library(path, directory[, path_formats[, replacements]])
|
||||
|
||||
.. automethod:: items
|
||||
|
||||
.. automethod:: albums
|
||||
|
||||
.. automethod:: get_item
|
||||
|
||||
.. automethod:: get_album
|
||||
|
||||
.. automethod:: add
|
||||
|
||||
.. automethod:: add_album
|
||||
|
||||
.. automethod:: transaction
|
||||
|
||||
Transactions
|
||||
''''''''''''
|
||||
|
||||
The :class:`Library` class provides the basic methods necessary to access and
|
||||
manipulate its contents. To perform more complicated operations atomically, or
|
||||
to interact directly with the underlying SQLite database, you must use a
|
||||
*transaction*. For example::
|
||||
|
||||
lib = Library()
|
||||
with lib.transaction() as tx:
|
||||
items = lib.items(query)
|
||||
lib.add_album(list(items))
|
||||
|
||||
.. currentmodule:: beets.dbcore.db
|
||||
|
||||
.. autoclass:: Transaction
|
||||
:members:
|
||||
|
||||
Model Classes
|
||||
-------------
|
||||
|
||||
The two model entities in beets libraries, :class:`Item` and :class:`Album`,
|
||||
share a base class, :class:`Model`, that provides common functionality and
|
||||
ORM-like abstraction.
|
||||
|
||||
The fields model classes can be accessed using attributes (dots, as in
|
||||
``item.artist``) or items (brackets, as in ``item['artist']``). The
|
||||
:class:`Model` base class provides some methods that resemble `dict`
|
||||
objects.
|
||||
|
||||
Model base
|
||||
''''''''''
|
||||
|
||||
.. currentmodule:: beets.dbcore
|
||||
|
||||
.. autoclass:: Model
|
||||
:members:
|
||||
|
||||
Item
|
||||
''''
|
||||
|
||||
.. currentmodule:: beets.library
|
||||
|
||||
.. autoclass:: Item
|
||||
:members:
|
||||
|
||||
Album
|
||||
'''''
|
||||
|
||||
.. autoclass:: Album
|
||||
:members:
|
||||
9
docs/dev/cli.rst
Normal file
9
docs/dev/cli.rst
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
Providing a CLI
|
||||
===============
|
||||
|
||||
The ``beets.ui`` module houses interactions with the user via a terminal, the
|
||||
:doc:`/reference/cli`.
|
||||
The main function is called when the user types beet on the command line.
|
||||
The CLI functionality is organized into commands, some of which are built-in
|
||||
and some of which are provided by plugins. The built-in commands are all
|
||||
implemented in the ``beets.ui.commands`` submodule.
|
||||
19
docs/dev/importer.rst
Normal file
19
docs/dev/importer.rst
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
Music Importer
|
||||
==============
|
||||
|
||||
The importer component is responsible for the user-centric workflow that adds
|
||||
music to a library. This is one of the first aspects that a user experiences
|
||||
when using beets: it finds music in the filesystem, groups it into albums,
|
||||
finds corresponding metadata in MusicBrainz, asks the user for intervention,
|
||||
applies changes, and moves/copies files. A description of its user interface is
|
||||
given in :doc:`/guides/tagger`.
|
||||
|
||||
The workflow is implemented in the ``beets.importer`` module and is
|
||||
distinct from the core logic for matching MusicBrainz metadata (in the
|
||||
``beets.autotag`` module). The workflow is also decoupled from the command-line
|
||||
interface with the hope that, eventually, other (graphical) interfaces can be
|
||||
bolted onto the same importer implementation.
|
||||
|
||||
The importer is multithreaded and follows the pipeline pattern. Each pipeline
|
||||
stage is a Python coroutine. The ``beets.util.pipeline`` module houses
|
||||
a generic, reusable implementation of a multithreaded pipeline.
|
||||
|
|
@ -7,9 +7,11 @@ in hacking beets itself or creating plugins for it.
|
|||
See also the documentation for `MediaFile`_, the library used by beets to read
|
||||
and write metadata tags in media files.
|
||||
|
||||
.. _MediaFile: http://mediafile.readthedocs.io/
|
||||
.. _MediaFile: https://mediafile.readthedocs.io/
|
||||
|
||||
.. toctree::
|
||||
|
||||
plugins
|
||||
api
|
||||
library
|
||||
importer
|
||||
cli
|
||||
|
|
|
|||
279
docs/dev/library.rst
Normal file
279
docs/dev/library.rst
Normal file
|
|
@ -0,0 +1,279 @@
|
|||
Library Database API
|
||||
====================
|
||||
|
||||
.. currentmodule:: beets.library
|
||||
|
||||
This page describes the internal API of beets' core database features. It
|
||||
doesn't exhaustively document the API, but is aimed at giving an overview of
|
||||
the architecture to orient anyone who wants to dive into the code.
|
||||
|
||||
The :class:`Library` object is the central repository for data in beets. It
|
||||
represents a database containing songs, which are :class:`Item` instances, and
|
||||
groups of items, which are :class:`Album` instances.
|
||||
|
||||
The Library Class
|
||||
-----------------
|
||||
|
||||
The :class:`Library` is typically instantiated as a singleton. A single
|
||||
invocation of beets usually has only one :class:`Library`. It's powered by
|
||||
:class:`dbcore.Database` under the hood, which handles the `SQLite`_
|
||||
abstraction, something like a very minimal `ORM`_. The library is also
|
||||
responsible for handling queries to retrieve stored objects.
|
||||
|
||||
.. autoclass:: Library(path, directory[, path_formats[, replacements]])
|
||||
|
||||
.. automethod:: __init__
|
||||
|
||||
You can add new items or albums to the library:
|
||||
|
||||
.. automethod:: add
|
||||
|
||||
.. automethod:: add_album
|
||||
|
||||
And there are methods for querying the database:
|
||||
|
||||
.. automethod:: items
|
||||
|
||||
.. automethod:: albums
|
||||
|
||||
.. automethod:: get_item
|
||||
|
||||
.. automethod:: get_album
|
||||
|
||||
Any modifications must go through a :class:`Transaction` which you get can
|
||||
using this method:
|
||||
|
||||
.. automethod:: transaction
|
||||
|
||||
.. _SQLite: https://sqlite.org/
|
||||
.. _ORM: https://en.wikipedia.org/wiki/Object-relational_mapping
|
||||
|
||||
|
||||
Model Classes
|
||||
-------------
|
||||
|
||||
The two model entities in beets libraries, :class:`Item` and :class:`Album`,
|
||||
share a base class, :class:`LibModel`, that provides common functionality. That
|
||||
class itself specialises :class:`dbcore.Model` which provides an ORM-like
|
||||
abstraction.
|
||||
|
||||
To get or change the metadata of a model (an item or album), either access its
|
||||
attributes (e.g., ``print(album.year)`` or ``album.year = 2012``) or use the
|
||||
``dict``-like interface (e.g. ``item['artist']``).
|
||||
|
||||
|
||||
Model base
|
||||
''''''''''
|
||||
|
||||
Models use dirty-flags to track when the object's metadata goes out of
|
||||
sync with the database. The dirty dictionary maps field names to booleans
|
||||
indicating whether the field has been written since the object was last
|
||||
synchronized (via load or store) with the database.
|
||||
|
||||
.. autoclass:: LibModel
|
||||
|
||||
.. automethod:: all_keys
|
||||
|
||||
.. automethod:: __init__
|
||||
|
||||
.. autoattribute:: _types
|
||||
|
||||
.. autoattribute:: _fields
|
||||
|
||||
There are CRUD-like methods for interacting with the database:
|
||||
|
||||
.. automethod:: store
|
||||
|
||||
.. automethod:: load
|
||||
|
||||
.. automethod:: remove
|
||||
|
||||
.. automethod:: add
|
||||
|
||||
The base class :class:`dbcore.Model` has a ``dict``-like interface, so
|
||||
normal the normal mapping API is supported:
|
||||
|
||||
.. automethod:: keys
|
||||
|
||||
.. automethod:: update
|
||||
|
||||
.. automethod:: items
|
||||
|
||||
.. automethod:: get
|
||||
|
||||
Item
|
||||
''''
|
||||
|
||||
Each :class:`Item` object represents a song or track. (We use the more generic
|
||||
term item because, one day, beets might support non-music media.) An item can
|
||||
either be purely abstract, in which case it's just a bag of metadata fields,
|
||||
or it can have an associated file (indicated by ``item.path``).
|
||||
|
||||
In terms of the underlying SQLite database, items are backed by a single table
|
||||
called items with one column per metadata fields. The metadata fields currently
|
||||
in use are listed in ``library.py`` in ``Item._fields``.
|
||||
|
||||
To read and write a file's tags, we use the `MediaFile`_ library.
|
||||
To make changes to either the database or the tags on a file, you
|
||||
update an item's fields (e.g., ``item.title = "Let It Be"``) and then call
|
||||
``item.write()``.
|
||||
|
||||
.. _MediaFile: https://mediafile.readthedocs.io/
|
||||
|
||||
Items also track their modification times (mtimes) to help detect when they
|
||||
become out of sync with on-disk metadata, mainly to speed up the
|
||||
:ref:`update-cmd` (which needs to check whether the database is in sync with
|
||||
the filesystem). This feature turns out to be sort of complicated.
|
||||
|
||||
For any :class:`Item`, there are two mtimes: the on-disk mtime (maintained by
|
||||
the OS) and the database mtime (maintained by beets). Correspondingly, there is
|
||||
on-disk metadata (ID3 tags, for example) and DB metadata. The goal with the
|
||||
mtime is to ensure that the on-disk and DB mtimes match when the on-disk and DB
|
||||
metadata are in sync; this lets beets do a quick mtime check and avoid
|
||||
rereading files in some circumstances.
|
||||
|
||||
Specifically, beets attempts to maintain the following invariant:
|
||||
|
||||
If the on-disk metadata differs from the DB metadata, then the on-disk
|
||||
mtime must be greater than the DB mtime.
|
||||
|
||||
As a result, it is always valid for the DB mtime to be zero (assuming that real
|
||||
disk mtimes are always positive). However, whenever possible, beets tries to
|
||||
set ``db_mtime = disk_mtime`` at points where it knows the metadata is
|
||||
synchronized. When it is possible that the metadata is out of sync, beets can
|
||||
then just set ``db_mtime = 0`` to return to a consistent state.
|
||||
|
||||
This leads to the following implementation policy:
|
||||
|
||||
* On every write of disk metadata (``Item.write()``), the DB mtime is updated
|
||||
to match the post-write disk mtime.
|
||||
* Same for metadata reads (``Item.read()``).
|
||||
* On every modification to DB metadata (``item.field = ...``), the DB mtime
|
||||
is reset to zero.
|
||||
|
||||
|
||||
.. autoclass:: Item
|
||||
|
||||
.. automethod:: __init__
|
||||
|
||||
.. automethod:: from_path
|
||||
|
||||
.. automethod:: get_album
|
||||
|
||||
.. automethod:: destination
|
||||
|
||||
.. automethod:: current_mtime
|
||||
|
||||
The methods ``read()`` and ``write()`` are complementary: one reads a
|
||||
file's tags and updates the item's metadata fields accordingly while the
|
||||
other takes the item's fields and writes them to the file's tags.
|
||||
|
||||
.. automethod:: read
|
||||
|
||||
.. automethod:: write
|
||||
|
||||
.. automethod:: try_write
|
||||
|
||||
.. automethod:: try_sync
|
||||
|
||||
The :class:`Item` class supplements the normal model interface so that they
|
||||
interacting with the filesystem as well:
|
||||
|
||||
.. automethod:: move
|
||||
|
||||
.. automethod:: remove
|
||||
|
||||
Album
|
||||
'''''
|
||||
|
||||
An :class:`Album` is a collection of Items in the database. Every item in the
|
||||
database has either zero or one associated albums (accessible via
|
||||
``item.album_id``). An item that has no associated album is called a
|
||||
singleton.
|
||||
Changing fields on an album (e.g. ``album.year = 2012``) updates the album
|
||||
itself and also changes the same field in all associated items.
|
||||
|
||||
An :class:`Album` object keeps track of album-level metadata, which is (mostly)
|
||||
a subset of the track-level metadata. The album-level metadata fields are
|
||||
listed in ``Album._fields``.
|
||||
For those fields that are both item-level and album-level (e.g., ``year`` or
|
||||
``albumartist``), every item in an album should share the same value. Albums
|
||||
use an SQLite table called ``albums``, in which each column is an album
|
||||
metadata field.
|
||||
|
||||
.. autoclass:: Album
|
||||
|
||||
.. automethod:: __init__
|
||||
|
||||
.. automethod:: item_dir
|
||||
|
||||
Albums extend the normal model interface to also forward changes to their
|
||||
items:
|
||||
|
||||
.. autoattribute:: item_keys
|
||||
|
||||
.. automethod:: store
|
||||
|
||||
.. automethod:: try_sync
|
||||
|
||||
.. automethod:: move
|
||||
|
||||
.. automethod:: remove
|
||||
|
||||
Albums also manage album art, image files that are associated with each
|
||||
album:
|
||||
|
||||
.. automethod:: set_art
|
||||
|
||||
.. automethod:: move_art
|
||||
|
||||
.. automethod:: art_destination
|
||||
|
||||
Transactions
|
||||
''''''''''''
|
||||
|
||||
The :class:`Library` class provides the basic methods necessary to access and
|
||||
manipulate its contents. To perform more complicated operations atomically, or
|
||||
to interact directly with the underlying SQLite database, you must use a
|
||||
*transaction* (see this `blog post`_ for motivation). For example::
|
||||
|
||||
lib = Library()
|
||||
with lib.transaction() as tx:
|
||||
items = lib.items(query)
|
||||
lib.add_album(list(items))
|
||||
|
||||
.. _blog post: https://beets.io/blog/sqlite-nightmare.html
|
||||
|
||||
.. currentmodule:: beets.dbcore.db
|
||||
|
||||
.. autoclass:: Transaction
|
||||
:members:
|
||||
|
||||
|
||||
Queries
|
||||
-------
|
||||
|
||||
To access albums and items in a library, we use :doc:`/reference/query`.
|
||||
In beets, the :class:`Query` abstract base class represents a criterion that
|
||||
matches items or albums in the database.
|
||||
Every subclass of :class:`Query` must implement two methods, which implement
|
||||
two different ways of identifying matching items/albums.
|
||||
|
||||
The ``clause()`` method should return an SQLite ``WHERE`` clause that matches
|
||||
appropriate albums/items. This allows for efficient batch queries.
|
||||
Correspondingly, the ``match(item)`` method should take an :class:`Item` object
|
||||
and return a boolean, indicating whether or not a specific item matches the
|
||||
criterion. This alternate implementation allows clients to determine whether
|
||||
items that have already been fetched from the database match the query.
|
||||
|
||||
There are many different types of queries. Just as an example,
|
||||
:class:`FieldQuery` determines whether a certain field matches a certain value
|
||||
(an equality query).
|
||||
:class:`AndQuery` (like its abstract superclass, :class:`CollectionQuery`)
|
||||
takes a set of other query objects and bundles them together, matching only
|
||||
albums/items that match all constituent queries.
|
||||
|
||||
Beets has a human-writable plain-text query syntax that can be parsed into
|
||||
:class:`Query` objects. Calling ``AndQuery.from_strings`` parses a list of
|
||||
query parts into a query object that can then be used with :class:`Library`
|
||||
objects.
|
||||
|
|
@ -15,7 +15,7 @@ structure should look like this::
|
|||
myawesomeplugin.py
|
||||
|
||||
.. _Stack Overflow question about namespace packages:
|
||||
http://stackoverflow.com/questions/1675734/how-do-i-create-a-namespace-package-in-python/1676069#1676069
|
||||
https://stackoverflow.com/questions/1675734/how-do-i-create-a-namespace-package-in-python/1676069#1676069
|
||||
|
||||
Then, you'll need to put this stuff in ``__init__.py`` to make ``beetsplug`` a
|
||||
namespace package::
|
||||
|
|
@ -42,7 +42,7 @@ Then, as described above, edit your ``config.yaml`` to include
|
|||
``plugins: myawesomeplugin`` (substituting the name of the Python module
|
||||
containing your plugin).
|
||||
|
||||
.. _virtualenv: http://pypi.python.org/pypi/virtualenv
|
||||
.. _virtualenv: https://pypi.org/project/virtualenv
|
||||
|
||||
.. _add_subcommands:
|
||||
|
||||
|
|
@ -73,7 +73,7 @@ but it defaults to an empty parser (you can extend it later). ``help`` is a
|
|||
description of your command, and ``aliases`` is a list of shorthand versions of
|
||||
your command name.
|
||||
|
||||
.. _OptionParser instance: http://docs.python.org/library/optparse.html
|
||||
.. _OptionParser instance: https://docs.python.org/library/optparse.html
|
||||
|
||||
You'll need to add a function to your command by saying ``mycommand.func =
|
||||
myfunction``. This function should take the following parameters: ``lib`` (a
|
||||
|
|
@ -81,7 +81,7 @@ beets ``Library`` object) and ``opts`` and ``args`` (command-line options and
|
|||
arguments as returned by `OptionParser.parse_args`_).
|
||||
|
||||
.. _OptionParser.parse_args:
|
||||
http://docs.python.org/library/optparse.html#parsing-arguments
|
||||
https://docs.python.org/library/optparse.html#parsing-arguments
|
||||
|
||||
The function should use any of the utility functions defined in ``beets.ui``.
|
||||
Try running ``pydoc beets.ui`` to see what's available.
|
||||
|
|
@ -301,7 +301,7 @@ To access this value, say ``self.config['foo'].get()`` at any point in your
|
|||
plugin's code. The `self.config` object is a *view* as defined by the `Confuse`_
|
||||
library.
|
||||
|
||||
.. _Confuse: http://confuse.readthedocs.org/
|
||||
.. _Confuse: https://confuse.readthedocs.org/
|
||||
|
||||
If you want to access configuration values *outside* of your plugin's section,
|
||||
import the `config` object from the `beets` module. That is, just put ``from
|
||||
|
|
@ -379,7 +379,7 @@ access to file tags. If you have created a descriptor you can add it through
|
|||
your plugins ``add_media_field()`` method.
|
||||
|
||||
.. automethod:: beets.plugins.BeetsPlugin.add_media_field
|
||||
.. _MediaFile: http://mediafile.readthedocs.io/
|
||||
.. _MediaFile: https://mediafile.readthedocs.io/
|
||||
|
||||
|
||||
Here's an example plugin that provides a meaningless new field "foo"::
|
||||
|
|
|
|||
22
docs/faq.rst
22
docs/faq.rst
|
|
@ -6,8 +6,8 @@ Got a question that isn't answered here? Try `IRC`_, the `discussion board`_, or
|
|||
:ref:`filing an issue <bugs>` in the bug tracker.
|
||||
|
||||
.. _IRC: irc://irc.freenode.net/beets
|
||||
.. _mailing list: http://groups.google.com/group/beets-users
|
||||
.. _discussion board: http://discourse.beets.io
|
||||
.. _mailing list: https://groups.google.com/group/beets-users
|
||||
.. _discussion board: https://discourse.beets.io
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
|
|
@ -94,14 +94,14 @@ the tracks into a single directory to force them to be tagged together.
|
|||
|
||||
An MBID looks like one of these:
|
||||
|
||||
- ``http://musicbrainz.org/release/ded77dcf-7279-457e-955d-625bd3801b87``
|
||||
- ``https://musicbrainz.org/release/ded77dcf-7279-457e-955d-625bd3801b87``
|
||||
- ``d569deba-8c6b-4d08-8c43-d0e5a1b8c7f3``
|
||||
|
||||
Beets can recognize either the hex-with-dashes UUID-style string or the
|
||||
full URL that contains it (as of 1.0b11).
|
||||
|
||||
You can get these IDs by `searching on the MusicBrainz web
|
||||
site <http://musicbrainz.org/>`__ and going to a *release* page (when
|
||||
site <https://musicbrainz.org/>`__ and going to a *release* page (when
|
||||
tagging full albums) or a *recording* page (when tagging singletons).
|
||||
Then, copy the URL of the page and paste it into beets.
|
||||
|
||||
|
|
@ -119,7 +119,7 @@ Run a command like this::
|
|||
|
||||
pip install -U beets
|
||||
|
||||
The ``-U`` flag tells `pip <http://www.pip-installer.org>`__ to upgrade
|
||||
The ``-U`` flag tells `pip <https://pip.pypa.io/>`__ to upgrade
|
||||
beets to the latest version. If you want a specific version, you can
|
||||
specify with using ``==`` like so::
|
||||
|
||||
|
|
@ -163,10 +163,10 @@ on GitHub. `Enter a new issue <https://github.com/beetbox/beets/issues/new>`__
|
|||
there to report a bug. Please follow these guidelines when reporting an issue:
|
||||
|
||||
- Most importantly: if beets is crashing, please `include the
|
||||
traceback <http://imgur.com/jacoj>`__. Tracebacks can be more
|
||||
traceback <https://imgur.com/jacoj>`__. Tracebacks can be more
|
||||
readable if you put them in a pastebin (e.g.,
|
||||
`Gist <https://gist.github.com/>`__ or
|
||||
`Hastebin <http://hastebin.com/>`__), especially when communicating
|
||||
`Hastebin <https://hastebin.com/>`__), especially when communicating
|
||||
over IRC or email.
|
||||
- Turn on beets' debug output (using the -v option: for example,
|
||||
``beet -v import ...``) and include that with your bug report. Look
|
||||
|
|
@ -188,7 +188,7 @@ there to report a bug. Please follow these guidelines when reporting an issue:
|
|||
|
||||
If you've never reported a bug before, Mozilla has some well-written
|
||||
`general guidelines for good bug
|
||||
reports <http://www.mozilla.org/bugs/>`__.
|
||||
reports <https://www.mozilla.org/bugs/>`__.
|
||||
|
||||
|
||||
.. _find-config:
|
||||
|
|
@ -237,7 +237,7 @@ Why does beets…
|
|||
There are a number of possibilities:
|
||||
|
||||
- First, make sure the album is in `the MusicBrainz
|
||||
database <http://musicbrainz.org/>`__. You
|
||||
database <https://musicbrainz.org/>`__. You
|
||||
can search on their site to make sure it's cataloged there. (If not,
|
||||
anyone can edit MusicBrainz---so consider adding the data yourself.)
|
||||
- If the album in question is a multi-disc release, see the relevant
|
||||
|
|
@ -320,7 +320,7 @@ it encounters files that *look* like music files (according to their
|
|||
extension) but seem to be broken. Most of the time, this is because the
|
||||
file is corrupted. To check whether the file is intact, try opening it
|
||||
in another media player (e.g.,
|
||||
`VLC <http://www.videolan.org/vlc/index.html>`__) to see whether it can
|
||||
`VLC <https://www.videolan.org/vlc/index.html>`__) to see whether it can
|
||||
read the file. You can also use specialized programs for checking file
|
||||
integrity---for example, type ``metaflac --list music.flac`` to check
|
||||
FLAC files.
|
||||
|
|
@ -378,4 +378,4 @@ installed using pip, the command ``pip show -f beets`` can show you where
|
|||
``beet`` was placed on your system. If you need help extending your ``$PATH``,
|
||||
try `this Super User answer`_.
|
||||
|
||||
.. _this Super User answer: http://superuser.com/a/284361/4569
|
||||
.. _this Super User answer: https://superuser.com/a/284361/4569
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ everything by the Long Winters for listening on the go.
|
|||
The plugin has many more dials you can fiddle with to get your conversions how
|
||||
you like them. Check out :doc:`its documentation </plugins/convert>`.
|
||||
|
||||
.. _ffmpeg: http://www.ffmpeg.org
|
||||
.. _ffmpeg: https://www.ffmpeg.org
|
||||
|
||||
|
||||
Store any data you like
|
||||
|
|
@ -127,7 +127,7 @@ And, unlike :ref:`built-in fields <itemfields>`, such fields can be removed::
|
|||
Read more than you ever wanted to know about the *flexible attributes*
|
||||
feature `on the beets blog`_.
|
||||
|
||||
.. _on the beets blog: http://beets.io/blog/flexattr.html
|
||||
.. _on the beets blog: https://beets.io/blog/flexattr.html
|
||||
|
||||
|
||||
Choose a path style manually for some music
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ Getting Started
|
|||
Welcome to `beets`_! This guide will help you begin using it to make your music
|
||||
collection better.
|
||||
|
||||
.. _beets: http://beets.io/
|
||||
.. _beets: https://beets.io/
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
|
@ -12,7 +12,7 @@ Installing
|
|||
You will need Python.
|
||||
Beets works on `Python 2.7`_ and Python 3.4 or later.
|
||||
|
||||
.. _Python 2.7: http://www.python.org/download/
|
||||
.. _Python 2.7: https://www.python.org/download/
|
||||
|
||||
* **macOS** v10.7 (Lion) and later include Python 2.7 out of the box.
|
||||
You can opt for Python 3 by installing it via `Homebrew`_:
|
||||
|
|
@ -43,13 +43,13 @@ Beets works on `Python 2.7`_ and Python 3.4 or later.
|
|||
* On **Fedora** 22 or later, there is a `DNF package`_::
|
||||
|
||||
$ sudo dnf install beets beets-plugins beets-doc
|
||||
|
||||
|
||||
* On **Solus**, run ``eopkg install beets``.
|
||||
|
||||
* On **NixOS**, there's a `package <NixOS_>`_ you can install with ``nix-env -i beets``.
|
||||
|
||||
.. _DNF package: https://apps.fedoraproject.org/packages/beets
|
||||
.. _SlackBuild: http://slackbuilds.org/repository/14.2/multimedia/beets/
|
||||
.. _SlackBuild: https://slackbuilds.org/repository/14.2/multimedia/beets/
|
||||
.. _FreeBSD: http://portsmon.freebsd.org/portoverview.py?category=audio&portname=beets
|
||||
.. _AUR: https://aur.archlinux.org/packages/beets-git/
|
||||
.. _Debian details: https://tracker.debian.org/pkg/beets
|
||||
|
|
@ -64,14 +64,14 @@ beets`` if you run into permissions problems).
|
|||
To install without pip, download beets from `its PyPI page`_ and run ``python
|
||||
setup.py install`` in the directory therein.
|
||||
|
||||
.. _its PyPI page: http://pypi.python.org/pypi/beets#downloads
|
||||
.. _pip: http://www.pip-installer.org/
|
||||
.. _its PyPI page: https://pypi.org/project/beets#downloads
|
||||
.. _pip: https://pip.pypa.io
|
||||
|
||||
The best way to upgrade beets to a new version is by running ``pip install -U
|
||||
beets``. You may want to follow `@b33ts`_ on Twitter to hear about progress on
|
||||
new versions.
|
||||
|
||||
.. _@b33ts: http://twitter.com/b33ts
|
||||
.. _@b33ts: https://twitter.com/b33ts
|
||||
|
||||
Installing on macOS 10.11 and Higher
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
@ -87,7 +87,7 @@ If this happens, you can install beets for the current user only by typing
|
|||
``~/Library/Python/3.6/bin`` to your ``$PATH``.
|
||||
|
||||
.. _System Integrity Protection: https://support.apple.com/en-us/HT204899
|
||||
.. _Homebrew: http://brew.sh
|
||||
.. _Homebrew: https://brew.sh
|
||||
|
||||
Installing on Windows
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
@ -122,10 +122,10 @@ Because I don't use Windows myself, I may have missed something. If you have
|
|||
trouble or you have more detail to contribute here, please direct it to
|
||||
`the mailing list`_.
|
||||
|
||||
.. _install Python: http://python.org/download/
|
||||
.. _install Python: https://python.org/download/
|
||||
.. _beets.reg: https://github.com/beetbox/beets/blob/master/extra/beets.reg
|
||||
.. _install pip: http://www.pip-installer.org/en/latest/installing.html#install-pip
|
||||
.. _get-pip.py: https://raw.github.com/pypa/pip/master/contrib/get-pip.py
|
||||
.. _install pip: https://pip.pypa.io/en/stable/installing/
|
||||
.. _get-pip.py: https://bootstrap.pypa.io/get-pip.py
|
||||
|
||||
|
||||
Configuring
|
||||
|
|
@ -179,7 +179,7 @@ There are approximately six million other configuration options you can set
|
|||
here, including the directory and file naming scheme. See
|
||||
:doc:`/reference/config` for a full reference.
|
||||
|
||||
.. _YAML: http://yaml.org/
|
||||
.. _YAML: https://yaml.org/
|
||||
|
||||
Importing Your Library
|
||||
----------------------
|
||||
|
|
@ -300,6 +300,6 @@ import`` gives more specific help about the ``import`` command.
|
|||
Please let me know what you think of beets via `the discussion board`_ or
|
||||
`Twitter`_.
|
||||
|
||||
.. _the mailing list: http://groups.google.com/group/beets-users
|
||||
.. _the discussion board: http://discourse.beets.io
|
||||
.. _twitter: http://twitter.com/b33ts
|
||||
.. _the mailing list: https://groups.google.com/group/beets-users
|
||||
.. _the discussion board: https://discourse.beets.io
|
||||
.. _twitter: https://twitter.com/b33ts
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ Before you jump into acoustic fingerprinting with both feet, though, give beets
|
|||
a try without it. You may be surprised at how well metadata-based matching
|
||||
works.
|
||||
|
||||
.. _Chromaprint: http://acoustid.org/chromaprint
|
||||
.. _Chromaprint: https://acoustid.org/chromaprint
|
||||
|
||||
Album Art, Lyrics, Genres and Such
|
||||
----------------------------------
|
||||
|
|
@ -292,7 +292,7 @@ sure the album is present in `the MusicBrainz database`_. You can search on
|
|||
their site to make sure it's cataloged there. If not, anyone can edit
|
||||
MusicBrainz---so consider adding the data yourself.
|
||||
|
||||
.. _the MusicBrainz database: http://musicbrainz.org/
|
||||
.. _the MusicBrainz database: https://musicbrainz.org/
|
||||
|
||||
If you think beets is ignoring an album that's listed in MusicBrainz, please
|
||||
`file a bug report`_.
|
||||
|
|
@ -305,5 +305,5 @@ I Hope That Makes Sense
|
|||
If we haven't made the process clear, please post on `the discussion
|
||||
board`_ and we'll try to improve this guide.
|
||||
|
||||
.. _the mailing list: http://groups.google.com/group/beets-users
|
||||
.. _the discussion board: http://discourse.beets.io
|
||||
.. _the mailing list: https://groups.google.com/group/beets-users
|
||||
.. _the discussion board: https://discourse.beets.io
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ beets: the music geek's media organizer
|
|||
=======================================
|
||||
|
||||
Welcome to the documentation for `beets`_, the media library management system
|
||||
for obsessive-compulsive music geeks.
|
||||
for obsessive music geeks.
|
||||
|
||||
If you're new to beets, begin with the :doc:`guides/main` guide. That guide
|
||||
walks you through installing beets, setting it up how you like it, and starting
|
||||
|
|
@ -17,10 +17,10 @@ Freenode, drop by `the discussion board`_, send email to `the mailing list`_,
|
|||
or `file a bug`_ in the issue tracker. Please let us know where you think this
|
||||
documentation can be improved.
|
||||
|
||||
.. _beets: http://beets.io/
|
||||
.. _the mailing list: http://groups.google.com/group/beets-users
|
||||
.. _beets: https://beets.io/
|
||||
.. _the mailing list: https://groups.google.com/group/beets-users
|
||||
.. _file a bug: https://github.com/beetbox/beets/issues
|
||||
.. _the discussion board: http://discourse.beets.io
|
||||
.. _the discussion board: https://discourse.beets.io
|
||||
|
||||
Contents
|
||||
--------
|
||||
|
|
@ -32,6 +32,7 @@ Contents
|
|||
reference/index
|
||||
plugins/index
|
||||
faq
|
||||
contributing
|
||||
dev/index
|
||||
|
||||
.. toctree::
|
||||
|
|
|
|||
|
|
@ -7,22 +7,32 @@ The ``absubmit`` plugin lets you submit acoustic analysis results to the
|
|||
Installation
|
||||
------------
|
||||
|
||||
The ``absubmit`` plugin requires the `streaming_extractor_music`_ program to run. Its source can be found on `GitHub`_, and while it is possible to compile the extractor from source, AcousticBrainz would prefer if you used their binary (see the AcousticBrainz `FAQ`_).
|
||||
The ``absubmit`` plugin requires the `streaming_extractor_music`_ program
|
||||
to run. Its source can be found on `GitHub`_, and while it is possible to
|
||||
compile the extractor from source, AcousticBrainz would prefer if you used
|
||||
their binary (see the AcousticBrainz `FAQ`_).
|
||||
|
||||
The ``absubmit`` plugin also requires `requests`_, which you can install using `pip`_ by typing::
|
||||
The ``absubmit`` plugin also requires `requests`_, which you can install
|
||||
using `pip`_ by typing::
|
||||
|
||||
pip install requests
|
||||
|
||||
After installing both the extractor binary and requests you can enable the plugin ``absubmit`` in your configuration (see :ref:`using-plugins`).
|
||||
After installing both the extractor binary and requests you can enable
|
||||
the plugin ``absubmit`` in your configuration (see :ref:`using-plugins`).
|
||||
|
||||
Submitting Data
|
||||
---------------
|
||||
|
||||
Type::
|
||||
|
||||
beet absubmit [QUERY]
|
||||
beet absubmit [-f] [-d] [QUERY]
|
||||
|
||||
to run the analysis program and upload its results.
|
||||
To run the analysis program and upload its results. By default, the
|
||||
command will only look for AcousticBrainz data when the tracks
|
||||
doesn't already have it; the ``-f`` or ``--force`` switch makes it refetch
|
||||
data even when it already exists. You can use the ``-d`` or ``--dry`` swtich
|
||||
to check which files will be analyzed, before you start a longer period
|
||||
of processing.
|
||||
|
||||
The plugin works on music with a MusicBrainz track ID attached. The plugin
|
||||
will also skip music that the analysis tool doesn't support.
|
||||
|
|
@ -34,16 +44,24 @@ will also skip music that the analysis tool doesn't support.
|
|||
Configuration
|
||||
-------------
|
||||
|
||||
To configure the plugin, make a ``absubmit:`` section in your configuration file. The available options are:
|
||||
To configure the plugin, make a ``absubmit:`` section in your configuration
|
||||
file. The available options are:
|
||||
|
||||
- **auto**: Analyze every file on import. Otherwise, you need to use the ``beet absubmit`` command explicitly.
|
||||
- **auto**: Analyze every file on import. Otherwise, you need to use the
|
||||
``beet absubmit`` command explicitly.
|
||||
Default: ``no``
|
||||
- **extractor**: The absolute path to the `streaming_extractor_music`_ binary.
|
||||
Default: search for the program in your ``$PATH``
|
||||
- **force**: Analyze items and submit of AcousticBrainz data even for tracks
|
||||
that already have it.
|
||||
Default: ``no``.
|
||||
- **pretend**: Do not analyze and submit of AcousticBrainz data but print out
|
||||
the items which would be processed.
|
||||
Default: ``no``.
|
||||
|
||||
.. _streaming_extractor_music: http://acousticbrainz.org/download
|
||||
.. _FAQ: http://acousticbrainz.org/faq
|
||||
.. _pip: http://www.pip-installer.org/
|
||||
.. _requests: http://docs.python-requests.org/en/master/
|
||||
.. _streaming_extractor_music: https://acousticbrainz.org/download
|
||||
.. _FAQ: https://acousticbrainz.org/faq
|
||||
.. _pip: https://pip.pypa.io
|
||||
.. _requests: https://docs.python-requests.org/en/master/
|
||||
.. _github: https://github.com/MTG/essentia
|
||||
.. _AcousticBrainz: https://acousticbrainz.org
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ AcousticBrainz Plugin
|
|||
The ``acousticbrainz`` plugin gets acoustic-analysis information from the
|
||||
`AcousticBrainz`_ project.
|
||||
|
||||
.. _AcousticBrainz: http://acousticbrainz.org/
|
||||
.. _AcousticBrainz: https://acousticbrainz.org/
|
||||
|
||||
Enable the ``acousticbrainz`` plugin in your configuration (see :ref:`using-plugins`) and run it by typing::
|
||||
|
||||
|
|
@ -38,7 +38,9 @@ these fields:
|
|||
* ``mood_party``
|
||||
* ``mood_relaxed``
|
||||
* ``mood_sad``
|
||||
* ``moods_mirex``
|
||||
* ``rhythm``
|
||||
* ``timbre``
|
||||
* ``tonal``
|
||||
* ``voice_instrumental``
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ Beatport Plugin
|
|||
The ``beatport`` plugin adds support for querying the `Beatport`_ catalogue
|
||||
during the autotagging process. This can potentially be helpful for users
|
||||
whose collection includes a lot of diverse electronic music releases, for which
|
||||
both MusicBrainz and (to a lesser degree) Discogs show no matches.
|
||||
both MusicBrainz and (to a lesser degree) `Discogs`_ show no matches.
|
||||
|
||||
.. _Discogs: https://discogs.com
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
|
@ -21,16 +23,24 @@ run the :ref:`import-cmd` command after enabling the plugin, it will ask you
|
|||
to authorize with Beatport by visiting the site in a browser. On the site
|
||||
you will be asked to enter your username and password to authorize beets
|
||||
to query the Beatport API. You will then be displayed with a single line of
|
||||
text that you should paste into your terminal. This will store the
|
||||
authentication data for subsequent runs and you will not be required to
|
||||
repeat the above steps.
|
||||
text that you should paste as a whole into your terminal. This will store the
|
||||
authentication data for subsequent runs and you will not be required to repeat
|
||||
the above steps.
|
||||
|
||||
Matches from Beatport should now show up alongside matches
|
||||
from MusicBrainz and other sources.
|
||||
|
||||
If you have a Beatport ID or a URL for a release or track you want to tag, you
|
||||
can just enter one of the two at the "enter Id" prompt in the importer.
|
||||
can just enter one of the two at the "enter Id" prompt in the importer. You can
|
||||
also search for an id like so:
|
||||
|
||||
.. _requests: http://docs.python-requests.org/en/latest/
|
||||
beet import path/to/music/library --search-id id
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
This plugin can be configured like other metadata source plugins as described in :ref:`metadata-source-plugin-configuration`.
|
||||
|
||||
.. _requests: https://docs.python-requests.org/en/latest/
|
||||
.. _requests_oauthlib: https://github.com/requests/requests-oauthlib
|
||||
.. _Beatport: http://beatport.com
|
||||
.. _Beatport: https://beetport.com
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ implements the MPD protocol, so it's compatible with all the great MPD clients
|
|||
out there. I'm using `Theremin`_, `gmpc`_, `Sonata`_, and `Ario`_ successfully.
|
||||
|
||||
.. _Theremin: https://theremin.sigterm.eu/
|
||||
.. _gmpc: http://gmpc.wikia.com/wiki/Gnome_Music_Player_Client
|
||||
.. _gmpc: https://gmpc.wikia.com/wiki/Gnome_Music_Player_Client
|
||||
.. _Sonata: http://sonata.berlios.de/
|
||||
.. _Ario: http://ario-player.sourceforge.net/
|
||||
|
||||
|
|
@ -29,8 +29,8 @@ You will also need the various GStreamer plugin packages to make everything
|
|||
work. See the :doc:`/plugins/chroma` documentation for more information on
|
||||
installing GStreamer plugins.
|
||||
|
||||
.. _GStreamer WinBuilds: http://www.gstreamer-winbuild.ylatuya.es/
|
||||
.. _Homebrew: http://mxcl.github.com/homebrew/
|
||||
.. _GStreamer WinBuilds: https://www.gstreamer-winbuild.ylatuya.es/
|
||||
.. _Homebrew: https://brew.sh
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
|
@ -44,7 +44,7 @@ Then, you can run BPD by invoking::
|
|||
Fire up your favorite MPD client to start playing music. The MPD site has `a
|
||||
long list of available clients`_. Here are my favorites:
|
||||
|
||||
.. _a long list of available clients: http://mpd.wikia.com/wiki/Clients
|
||||
.. _a long list of available clients: https://mpd.wikia.com/wiki/Clients
|
||||
|
||||
* Linux: `gmpc`_, `Sonata`_
|
||||
|
||||
|
|
@ -52,9 +52,9 @@ long list of available clients`_. Here are my favorites:
|
|||
|
||||
* Windows: I don't know. Get in touch if you have a recommendation.
|
||||
|
||||
* iPhone/iPod touch: `MPoD`_
|
||||
* iPhone/iPod touch: `Rigelian`_
|
||||
|
||||
.. _MPoD: http://www.katoemba.net/makesnosenseatall/mpod/
|
||||
.. _Rigelian: https://www.rigelian.net/
|
||||
|
||||
One nice thing about MPD's (and thus BPD's) client-server architecture is that
|
||||
the client can just as easily on a different computer from the server as it can
|
||||
|
|
@ -109,7 +109,7 @@ behaviour to their MPD equivalents. BPD aims to look enough like MPD that it
|
|||
can interact with the ecosystem of clients, but doesn't try to be
|
||||
a fully-fledged MPD replacement in terms of its playback capabilities.
|
||||
|
||||
.. _the MPD protocol: http://www.musicpd.org/doc/protocol/
|
||||
.. _the MPD protocol: https://www.musicpd.org/doc/protocol/
|
||||
|
||||
These are some of the known differences between BPD and MPD:
|
||||
|
||||
|
|
|
|||
34
docs/plugins/bpsync.rst
Normal file
34
docs/plugins/bpsync.rst
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
BPSync Plugin
|
||||
=============
|
||||
|
||||
This plugin provides the ``bpsync`` command, which lets you fetch metadata
|
||||
from Beatport for albums and tracks that already have Beatport IDs.
|
||||
This plugin works similarly to :doc:`/plugins/mbsync`.
|
||||
|
||||
If you have downloaded music from Beatport, this can speed
|
||||
up the initial import if you just import "as-is" and then use ``bpsync`` to
|
||||
get up-to-date tags that are written to the files according to your beets
|
||||
configuration.
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Enable the ``bpsync`` plugin in your configuration (see :ref:`using-plugins`)
|
||||
and then run ``beet bpsync QUERY`` to fetch updated metadata for a part of your
|
||||
collection (or omit the query to run over your whole library).
|
||||
|
||||
This plugin treats albums and singletons (non-album tracks) separately. It
|
||||
first processes all matching singletons and then proceeds on to full albums.
|
||||
The same query is used to search for both kinds of entities.
|
||||
|
||||
The command has a few command-line options:
|
||||
|
||||
* To preview the changes that would be made without applying them, use the
|
||||
``-p`` (``--pretend``) flag.
|
||||
* By default, files will be moved (renamed) according to their metadata if
|
||||
they are inside your beets library directory. To disable this, use the
|
||||
``-M`` (``--nomove``) command-line option.
|
||||
* If you have the ``import.write`` configuration option enabled, then this
|
||||
plugin will write new metadata to files' tags. To disable this, use the
|
||||
``-W`` (``--nowrite``) option.
|
||||
|
|
@ -8,8 +8,8 @@ information at all (or have completely incorrect data). This plugin uses an
|
|||
open-source fingerprinting technology called `Chromaprint`_ and its associated
|
||||
Web service, called `Acoustid`_.
|
||||
|
||||
.. _Chromaprint: http://acoustid.org/chromaprint
|
||||
.. _acoustid: http://acoustid.org/
|
||||
.. _Chromaprint: https://acoustid.org/chromaprint
|
||||
.. _acoustid: https://acoustid.org/
|
||||
|
||||
Turning on fingerprinting can increase the accuracy of the
|
||||
autotagger---especially on files with very poor metadata---but it comes at a
|
||||
|
|
@ -31,7 +31,7 @@ First, install pyacoustid itself. You can do this using `pip`_, like so::
|
|||
|
||||
$ pip install pyacoustid
|
||||
|
||||
.. _pip: http://www.pip-installer.org/
|
||||
.. _pip: https://pip.pypa.io
|
||||
|
||||
Then, you will need to install `Chromaprint`_, either as a dynamic library or
|
||||
in the form of a command-line tool (``fpcalc``).
|
||||
|
|
@ -45,7 +45,7 @@ The simplest way to get up and running, especially on Windows, is to
|
|||
means something like ``C:\\Program Files``. On OS X or Linux, put the
|
||||
executable somewhere like ``/usr/local/bin``.
|
||||
|
||||
.. _download: http://acoustid.org/chromaprint
|
||||
.. _download: https://acoustid.org/chromaprint
|
||||
|
||||
Installing the Library
|
||||
''''''''''''''''''''''
|
||||
|
|
@ -56,7 +56,7 @@ site has links to packages for major Linux distributions. If you use
|
|||
`Homebrew`_ on Mac OS X, you can install the library with ``brew install
|
||||
chromaprint``.
|
||||
|
||||
.. _Homebrew: http://mxcl.github.com/homebrew/
|
||||
.. _Homebrew: https://brew.sh/
|
||||
|
||||
You will also need a mechanism for decoding audio files supported by the
|
||||
`audioread`_ library:
|
||||
|
|
@ -78,12 +78,12 @@ You will also need a mechanism for decoding audio files supported by the
|
|||
* On Windows, builds are provided by `GStreamer`_
|
||||
|
||||
.. _audioread: https://github.com/beetbox/audioread
|
||||
.. _pyacoustid: http://github.com/beetbox/pyacoustid
|
||||
.. _FFmpeg: http://ffmpeg.org/
|
||||
.. _MAD: http://spacepants.org/src/pymad/
|
||||
.. _pymad: http://www.underbit.com/products/mad/
|
||||
.. _Core Audio: http://developer.apple.com/technologies/mac/audio-and-video.html
|
||||
.. _Gstreamer: http://gstreamer.freedesktop.org/
|
||||
.. _pyacoustid: https://github.com/beetbox/pyacoustid
|
||||
.. _FFmpeg: https://ffmpeg.org/
|
||||
.. _MAD: https://spacepants.org/src/pymad/
|
||||
.. _pymad: https://www.underbit.com/products/mad/
|
||||
.. _Core Audio: https://developer.apple.com/technologies/mac/audio-and-video.html
|
||||
.. _Gstreamer: https://gstreamer.freedesktop.org/
|
||||
.. _PyGObject: https://wiki.gnome.org/Projects/PyGObject
|
||||
|
||||
To decode audio formats (MP3, FLAC, etc.) with GStreamer, you'll need the
|
||||
|
|
@ -96,7 +96,9 @@ Usage
|
|||
|
||||
Once you have all the dependencies sorted out, enable the ``chroma`` plugin in
|
||||
your configuration (see :ref:`using-plugins`) to benefit from fingerprinting
|
||||
the next time you run ``beet import``.
|
||||
the next time you run ``beet import``. (The plugin doesn't produce any obvious
|
||||
output by default. If you want to confirm that it's enabled, you can try
|
||||
running in verbose mode once with ``beet -v import``.)
|
||||
|
||||
You can also use the ``beet fingerprint`` command to generate fingerprints for
|
||||
items already in your library. (Provide a query to fingerprint a subset of your
|
||||
|
|
@ -132,4 +134,4 @@ Then, run ``beet submit``. (You can also provide a query to submit a subset of
|
|||
your library.) The command will use stored fingerprints if they're available;
|
||||
otherwise it will fingerprint each file before submitting it.
|
||||
|
||||
.. _get an API key: http://acoustid.org/api-key
|
||||
.. _get an API key: https://acoustid.org/api-key
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ To use the ``convert`` plugin, first enable it in your configuration (see
|
|||
:ref:`using-plugins`). By default, the plugin depends on `FFmpeg`_ to
|
||||
transcode the audio, so you might want to install it.
|
||||
|
||||
.. _FFmpeg: http://ffmpeg.org
|
||||
.. _FFmpeg: https://ffmpeg.org
|
||||
|
||||
|
||||
Usage
|
||||
|
|
@ -24,7 +24,9 @@ To convert a part of your collection, run ``beet convert QUERY``. The
|
|||
command will transcode all the files matching the query to the
|
||||
destination directory given by the ``-d`` (``--dest``) option or the
|
||||
``dest`` configuration. The path layout mirrors that of your library,
|
||||
but it may be customized through the ``paths`` configuration.
|
||||
but it may be customized through the ``paths`` configuration. Files
|
||||
that have been previously converted---and thus already exist in the
|
||||
destination directory---will be skipped.
|
||||
|
||||
The plugin uses a command-line program to transcode the audio. With the
|
||||
``-f`` (``--format``) option you can choose the transcoding command
|
||||
|
|
@ -46,6 +48,12 @@ To test your configuration without taking any actions, use the ``--pretend``
|
|||
flag. The plugin will print out the commands it will run instead of executing
|
||||
them.
|
||||
|
||||
By default, files that do not need to be transcoded will be copied to their
|
||||
destination. Passing the ``-l`` (``--link``) flag creates symbolic links
|
||||
instead, passing ``-H`` (``--hardlink``) creates hard links.
|
||||
Note that album art embedding is disabled for files that are linked.
|
||||
Refer to the ``link`` and ``hardlink`` options below.
|
||||
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
|
@ -91,6 +99,18 @@ file. The available options are:
|
|||
- **threads**: The number of threads to use for parallel encoding.
|
||||
By default, the plugin will detect the number of processors available and use
|
||||
them all.
|
||||
- **link**: By default, files that do not need to be transcoded will be copied
|
||||
to their destination. This option creates symbolic links instead. Note that
|
||||
options such as ``embed`` that modify the output files after the transcoding
|
||||
step will cause the original files to be modified as well if ``link`` is
|
||||
enabled. For this reason, album-art embedding is disabled
|
||||
for files that are linked.
|
||||
Default: ``false``.
|
||||
- **hardlink**: This options works similar to ``link``, but it creates
|
||||
hard links instead of symlinks.
|
||||
This option overrides ``link``. Only works when converting to a directory
|
||||
on the same filesystem as the library.
|
||||
Default: ``false``.
|
||||
|
||||
You can also configure the format to use for transcoding (see the next
|
||||
section):
|
||||
|
|
@ -170,6 +190,6 @@ can use the :doc:`/plugins/replaygain` to do this analysis. See the LAME
|
|||
options and a thorough discussion of MP3 encoding.
|
||||
|
||||
.. _documentation: http://lame.sourceforge.net/using.php
|
||||
.. _HydrogenAudio wiki: http://wiki.hydrogenaud.io/index.php?title=LAME
|
||||
.. _gapless: http://wiki.hydrogenaud.io/index.php?title=Gapless_playback
|
||||
.. _LAME: http://lame.sourceforge.net/
|
||||
.. _HydrogenAudio wiki: https://wiki.hydrogenaud.io/index.php?title=LAME
|
||||
.. _gapless: https://wiki.hydrogenaud.io/index.php?title=Gapless_playback
|
||||
.. _LAME: https://lame.sourceforge.net/
|
||||
|
|
|
|||
25
docs/plugins/deezer.rst
Normal file
25
docs/plugins/deezer.rst
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
Deezer Plugin
|
||||
==============
|
||||
|
||||
The ``deezer`` plugin provides metadata matches for the importer using the
|
||||
`Deezer`_ `Album`_ and `Track`_ APIs.
|
||||
|
||||
.. _Deezer: https://www.deezer.com
|
||||
.. _Album: https://developers.deezer.com/api/album
|
||||
.. _Track: https://developers.deezer.com/api/track
|
||||
|
||||
Basic Usage
|
||||
-----------
|
||||
|
||||
First, enable the ``deezer`` plugin (see :ref:`using-plugins`).
|
||||
|
||||
You can enter the URL for an album or song on Deezer at the ``enter Id``
|
||||
prompt during import::
|
||||
|
||||
Enter search, enter Id, aBort, eDit, edit Candidates, plaY? i
|
||||
Enter release ID: https://www.deezer.com/en/album/572261
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
This plugin can be configured like other metadata source plugins as described in :ref:`metadata-source-plugin-configuration`.
|
||||
|
|
@ -4,7 +4,7 @@ Discogs Plugin
|
|||
The ``discogs`` plugin extends the autotagger's search capabilities to
|
||||
include matches from the `Discogs`_ database.
|
||||
|
||||
.. _Discogs: http://discogs.com
|
||||
.. _Discogs: https://discogs.com
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
|
@ -43,6 +43,38 @@ documentation), login to `Discogs`_, and visit the
|
|||
token`` button, and place the generated token in your configuration, as the
|
||||
``user_token`` config option in the ``discogs`` section.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
This plugin can be configured like other metadata source plugins as described in :ref:`metadata-source-plugin-configuration`.
|
||||
|
||||
There is one additional option in the ``discogs:`` section, ``index_tracks``.
|
||||
Index tracks (see the `Discogs guidelines
|
||||
<https://support.discogs.com/hc/en-us/articles/360005055373-Database-Guidelines-12-Tracklisting#12.13>`_),
|
||||
along with headers, mark divisions between distinct works on the same release
|
||||
or within works. When ``index_tracks`` is enabled::
|
||||
|
||||
discogs:
|
||||
index_tracks: yes
|
||||
|
||||
beets will incorporate the names of the divisions containing each track into
|
||||
the imported track's title. For example, importing
|
||||
`this album
|
||||
<https://www.discogs.com/Handel-Sutherland-Kirkby-Kwella-Nelson-Watkinson-Bowman-Rolfe-Johnson-Elliott-Partridge-Thomas-The-A/release/2026070>`_
|
||||
would result in track names like::
|
||||
|
||||
Messiah, Part I: No.1: Sinfony
|
||||
Messiah, Part II: No.22: Chorus- Behold The Lamb Of God
|
||||
Athalia, Act I, Scene I: Sinfonia
|
||||
|
||||
whereas with ``index_tracks`` disabled you'd get::
|
||||
|
||||
No.1: Sinfony
|
||||
No.22: Chorus- Behold The Lamb Of God
|
||||
Sinfonia
|
||||
|
||||
This option is useful when importing classical music.
|
||||
|
||||
Troubleshooting
|
||||
---------------
|
||||
|
||||
|
|
|
|||
|
|
@ -58,6 +58,13 @@ file. The available options are:
|
|||
the aspect ratio is preserved. See also :ref:`image-resizing` for further
|
||||
caveats about image resizing.
|
||||
Default: 0 (disabled).
|
||||
- **quality**: The JPEG quality level to use when compressing images (when
|
||||
``maxwidth`` is set). This should be either a number from 1 to 100 or 0 to
|
||||
use the default quality. 65–75 is usually a good starting point. The default
|
||||
behavior depends on the imaging tool used for scaling: ImageMagick tries to
|
||||
estimate the input image quality and uses 92 if it cannot be determined, and
|
||||
PIL defaults to 75.
|
||||
Default: 0 (disabled)
|
||||
- **remove_art_file**: Automatically remove the album art file for the album
|
||||
after it has been embedded. This option is best used alongside the
|
||||
:doc:`FetchArt </plugins/fetchart>` plugin to download art with the purpose of
|
||||
|
|
@ -69,7 +76,7 @@ Note: ``compare_threshold`` option requires `ImageMagick`_, and ``maxwidth``
|
|||
requires either `ImageMagick`_ or `Pillow`_.
|
||||
|
||||
.. _Pillow: https://github.com/python-pillow/Pillow
|
||||
.. _ImageMagick: http://www.imagemagick.org/
|
||||
.. _ImageMagick: https://www.imagemagick.org/
|
||||
.. _PHASH: http://www.fmwconcepts.com/misc_tests/perceptual_hash_test_results_510/
|
||||
|
||||
Manually Embedding and Extracting Art
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ To use the ``embyupdate`` plugin you need to install the `requests`_ library wit
|
|||
|
||||
With that all in place, you'll see beets send the "update" command to your Emby server every time you change your beets library.
|
||||
|
||||
.. _Emby: http://emby.media/
|
||||
.. _requests: http://docs.python-requests.org/en/latest/
|
||||
.. _Emby: https://emby.media/
|
||||
.. _requests: https://docs.python-requests.org/en/latest/
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@ Export Plugin
|
|||
=============
|
||||
|
||||
The ``export`` plugin lets you get data from the items and export the content
|
||||
as `JSON`_.
|
||||
as `JSON`_, `CSV`_, or `XML`_.
|
||||
|
||||
.. _JSON: http://www.json.org
|
||||
.. _JSON: https://www.json.org
|
||||
.. _CSV: https://fileinfo.com/extension/csv
|
||||
.. _XML: https://fileinfo.com/extension/xml
|
||||
|
||||
Enable the ``export`` plugin (see :ref:`using-plugins` for help). Then, type ``beet export`` followed by a :doc:`query </reference/query>` to get the data from
|
||||
your library. For example, run this::
|
||||
|
|
@ -13,6 +15,7 @@ your library. For example, run this::
|
|||
|
||||
to print a JSON file containing information about your Beatles tracks.
|
||||
|
||||
|
||||
Command-Line Options
|
||||
--------------------
|
||||
|
||||
|
|
@ -36,30 +39,42 @@ The ``export`` command has these command-line options:
|
|||
|
||||
* ``--append``: Appends the data to the file instead of writing.
|
||||
|
||||
* ``--format`` or ``-f``: Specifies the format the data will be exported as. If not informed, JSON will be used by default. The format options include csv, json and xml.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
To configure the plugin, make a ``export:`` section in your configuration
|
||||
file. Under the ``json`` key, these options are available:
|
||||
file.
|
||||
For JSON export, these options are available under the ``json`` key:
|
||||
|
||||
- **ensure_ascii**: Escape non-ASCII characters with ``\uXXXX`` entities.
|
||||
|
||||
- **indent**: The number of spaces for indentation.
|
||||
|
||||
- **separators**: A ``[item_separator, dict_separator]`` tuple.
|
||||
|
||||
- **sort_keys**: Sorts the keys in JSON dictionaries.
|
||||
|
||||
These options match the options from the `Python json module`_.
|
||||
Those options match the options from the `Python json module`_.
|
||||
Similarly, these options are available for the CSV format under the ``csv``
|
||||
key:
|
||||
|
||||
- **delimiter**: Used as the separating character between fields. The default value is a comma (,).
|
||||
- **dialect**: The kind of CSV file to produce. The default is `excel`.
|
||||
|
||||
These options match the options from the `Python csv module`_.
|
||||
|
||||
.. _Python json module: https://docs.python.org/2/library/json.html#basic-usage
|
||||
.. _Python csv module: https://docs.python.org/3/library/csv.html#csv-fmt-params
|
||||
|
||||
The default options look like this::
|
||||
|
||||
export:
|
||||
json:
|
||||
formatting:
|
||||
ensure_ascii: False
|
||||
ensure_ascii: false
|
||||
indent: 4
|
||||
separators: [',' , ': ']
|
||||
sort_keys: true
|
||||
csv:
|
||||
formatting:
|
||||
delimiter: ','
|
||||
dialect: excel
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ To use the ``fetchart`` plugin, first enable it in your configuration (see
|
|||
|
||||
The plugin uses `requests`_ to fetch album art from the Web.
|
||||
|
||||
.. _requests: http://docs.python-requests.org/en/latest/
|
||||
.. _requests: https://requests.readthedocs.io/en/master/
|
||||
|
||||
Fetching Album Art During Import
|
||||
--------------------------------
|
||||
|
|
@ -42,6 +42,13 @@ file. The available options are:
|
|||
- **maxwidth**: A maximum image width to downscale fetched images if they are
|
||||
too big. The resize operation reduces image width to at most ``maxwidth``
|
||||
pixels. The height is recomputed so that the aspect ratio is preserved.
|
||||
- **quality**: The JPEG quality level to use when compressing images (when
|
||||
``maxwidth`` is set). This should be either a number from 1 to 100 or 0 to
|
||||
use the default quality. 65–75 is usually a good starting point. The default
|
||||
behavior depends on the imaging tool used for scaling: ImageMagick tries to
|
||||
estimate the input image quality and uses 92 if it cannot be determined, and
|
||||
PIL defaults to 75.
|
||||
Default: 0 (disabled)
|
||||
- **enforce_ratio**: Only images with a width:height ratio of 1:1 are
|
||||
considered as valid album art candidates if set to ``yes``.
|
||||
It is also possible to specify a certain deviation to the exact ratio to
|
||||
|
|
@ -51,9 +58,9 @@ file. The available options are:
|
|||
- **sources**: List of sources to search for images. An asterisk `*` expands
|
||||
to all available sources.
|
||||
Default: ``filesystem coverart itunes amazon albumart``, i.e., everything but
|
||||
``wikipedia``, ``google`` and ``fanarttv``. Enable those sources for more
|
||||
matches at the cost of some speed. They are searched in the given order,
|
||||
thus in the default config, no remote (Web) art source are queried if
|
||||
``wikipedia``, ``google``, ``fanarttv`` and ``lastfm``. Enable those sources
|
||||
for more matches at the cost of some speed. They are searched in the given
|
||||
order, thus in the default config, no remote (Web) art source are queried if
|
||||
local art is found in the filesystem. To use a local image as fallback,
|
||||
move it to the end of the list. For even more fine-grained control over
|
||||
the search order, see the section on :ref:`album-art-sources` below.
|
||||
|
|
@ -64,9 +71,14 @@ file. The available options are:
|
|||
Default: The `beets custom search engine`_, which searches the entire web.
|
||||
- **fanarttv_key**: The personal API key for requesting art from
|
||||
fanart.tv. See below.
|
||||
- **lastfm_key**: The personal API key for requesting art from Last.fm. See
|
||||
below.
|
||||
- **store_source**: If enabled, fetchart stores the artwork's source in a
|
||||
flexible tag named ``art_source``. See below for the rationale behind this.
|
||||
Default: ``no``.
|
||||
- **high_resolution**: If enabled, fetchart retrieves artwork in the highest
|
||||
resolution it can find (warning: image files can sometimes reach >20MB).
|
||||
Default: ``no``.
|
||||
|
||||
Note: ``maxwidth`` and ``enforce_ratio`` options require either `ImageMagick`_
|
||||
or `Pillow`_.
|
||||
|
|
@ -81,7 +93,7 @@ or `Pillow`_.
|
|||
|
||||
.. _beets custom search engine: https://cse.google.com.au:443/cse/publicurl?cx=001442825323518660753:hrh5ch1gjzm
|
||||
.. _Pillow: https://github.com/python-pillow/Pillow
|
||||
.. _ImageMagick: http://www.imagemagick.org/
|
||||
.. _ImageMagick: https://www.imagemagick.org/
|
||||
|
||||
Here's an example that makes plugin select only images that contain ``front`` or
|
||||
``back`` keywords in their filenames and prioritizes the iTunes source over
|
||||
|
|
@ -114,8 +126,9 @@ art::
|
|||
|
||||
$ beet fetchart [-q] [query]
|
||||
|
||||
By default the command will display all results, the ``-q`` or ``--quiet``
|
||||
switch will only display results for album arts that are still missing.
|
||||
By default the command will display all albums matching the ``query``. When the
|
||||
``-q`` or ``--quiet`` switch is given, only albums for which artwork has been
|
||||
fetched, or for which artwork could not be found will be printed.
|
||||
|
||||
.. _image-resizing:
|
||||
|
||||
|
|
@ -135,7 +148,7 @@ On some versions of Windows, the program can be shadowed by a system-provided
|
|||
environment variable so that ImageMagick comes first or use Pillow instead.
|
||||
|
||||
.. _Pillow: https://github.com/python-pillow/Pillow
|
||||
.. _ImageMagick: http://www.imagemagick.org/
|
||||
.. _ImageMagick: https://www.imagemagick.org/
|
||||
|
||||
.. _album-art-sources:
|
||||
|
||||
|
|
@ -191,7 +204,7 @@ Optionally, you can `define a custom search engine`_. Get your search engine's
|
|||
token and use it for your ``google_engine`` configuration option. The
|
||||
default engine searches the entire web for cover art.
|
||||
|
||||
.. _define a custom search engine: http://www.google.com/cse/all
|
||||
.. _define a custom search engine: https://www.google.com/cse/all
|
||||
|
||||
Note that the Google custom search API is limited to 100 queries per day.
|
||||
After that, the fetchart plugin will fall back on other declared data sources.
|
||||
|
|
@ -211,6 +224,15 @@ personal key will give you earlier access to new art.
|
|||
|
||||
.. _on their blog: https://fanart.tv/2015/01/personal-api-keys/
|
||||
|
||||
Last.fm
|
||||
'''''''
|
||||
|
||||
To use the Last.fm backend, you need to `register for a Last.fm API key`_. Set
|
||||
the ``lastfm_key`` configuration option to your API key, then add ``lastfm`` to
|
||||
the list of sources in your configutation.
|
||||
|
||||
.. _register for a Last.fm API key: https://www.last.fm/api/account/create
|
||||
|
||||
Storing the Artwork's Source
|
||||
----------------------------
|
||||
|
||||
|
|
|
|||
52
docs/plugins/fish.rst
Normal file
52
docs/plugins/fish.rst
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
Fish Plugin
|
||||
===========
|
||||
|
||||
The ``fish`` plugin adds a ``beet fish`` command that creates a `Fish shell`_
|
||||
tab-completion file named ``beet.fish`` in ``~/.config/fish/completions``.
|
||||
This enables tab-completion of ``beet`` commands for the `Fish shell`_.
|
||||
|
||||
.. _Fish shell: https://fishshell.com/
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
Enable the ``fish`` plugin (see :ref:`using-plugins`) on a system running the
|
||||
`Fish shell`_.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Type ``beet fish`` to generate the ``beet.fish`` completions file at:
|
||||
``~/.config/fish/completions/``. If you later install or disable plugins, run
|
||||
``beet fish`` again to update the completions based on the enabled plugins.
|
||||
|
||||
For users not accustomed to tab completion… After you type ``beet`` followed by
|
||||
a space in your shell prompt and then the ``TAB`` key, you should see a list of
|
||||
the beets commands (and their abbreviated versions) that can be invoked in your
|
||||
current environment. Similarly, typing ``beet -<TAB>`` will show you all the
|
||||
option flags available to you, which also applies to subcommands such as
|
||||
``beet import -<TAB>``. If you type ``beet ls`` followed by a space and then the
|
||||
and the ``TAB`` key, you will see a list of all the album/track fields that can
|
||||
be used in beets queries. For example, typing ``beet ls ge<TAB>`` will complete
|
||||
to ``genre:`` and leave you ready to type the rest of your query.
|
||||
|
||||
Options
|
||||
-------
|
||||
|
||||
In addition to beets commands, plugin commands, and option flags, the generated
|
||||
completions also include by default all the album/track fields. If you only want
|
||||
the former and do not want the album/track fields included in the generated
|
||||
completions, use ``beet fish -f`` to only generate completions for beets/plugin
|
||||
commands and option flags.
|
||||
|
||||
If you want generated completions to also contain album/track field *values* for
|
||||
the items in your library, you can use the ``-e`` or ``--extravalues`` option.
|
||||
For example: ``beet fish -e genre`` or ``beet fish -e genre -e albumartist``
|
||||
In the latter case, subsequently typing ``beet list genre: <TAB>`` will display
|
||||
a list of all the genres in your library and ``beet list albumartist: <TAB>``
|
||||
will show a list of the album artists in your library. Keep in mind that all of
|
||||
these values will be put into the generated completions file, so use this option
|
||||
with care when specified fields contain a large number of values. Libraries with,
|
||||
for example, very large numbers of genres/artists may result in higher memory
|
||||
utilization, completion latency, et cetera. This option is not meant to replace
|
||||
database queries altogether.
|
||||
|
|
@ -41,4 +41,4 @@ your entire collection.
|
|||
Use the ``-d`` flag to remove featured artists (equivalent of the ``drop``
|
||||
config option).
|
||||
|
||||
.. _MusicBrainz style: http://musicbrainz.org/doc/Style
|
||||
.. _MusicBrainz style: https://musicbrainz.org/doc/Style
|
||||
|
|
|
|||
|
|
@ -35,6 +35,27 @@ like this::
|
|||
|
||||
pip install beets[fetchart,lyrics,lastgenre]
|
||||
|
||||
.. _metadata-source-plugin-configuration:
|
||||
|
||||
Using Metadata Source Plugins
|
||||
-----------------------------
|
||||
|
||||
Some plugins provide sources for metadata in addition to MusicBrainz. These
|
||||
plugins share the following configuration option:
|
||||
|
||||
- **source_weight**: Penalty applied to matches during import. Set to 0.0 to
|
||||
disable.
|
||||
Default: ``0.5``.
|
||||
|
||||
For example, to equally consider matches from Discogs and MusicBrainz add the
|
||||
following to your configuration::
|
||||
|
||||
plugins: discogs
|
||||
|
||||
discogs:
|
||||
source_weight: 0.0
|
||||
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
|
||||
|
|
@ -44,9 +65,11 @@ like this::
|
|||
beatport
|
||||
bpd
|
||||
bpm
|
||||
bpsync
|
||||
bucket
|
||||
chroma
|
||||
convert
|
||||
deezer
|
||||
discogs
|
||||
duplicates
|
||||
edit
|
||||
|
|
@ -55,6 +78,7 @@ like this::
|
|||
export
|
||||
fetchart
|
||||
filefilter
|
||||
fish
|
||||
freedesktop
|
||||
fromfilename
|
||||
ftintitle
|
||||
|
|
@ -80,6 +104,7 @@ like this::
|
|||
missing
|
||||
mpdstats
|
||||
mpdupdate
|
||||
parentwork
|
||||
permissions
|
||||
play
|
||||
playlist
|
||||
|
|
@ -91,10 +116,12 @@ like this::
|
|||
smartplaylist
|
||||
sonosupdate
|
||||
spotify
|
||||
subsonicplaylist
|
||||
subsonicupdate
|
||||
the
|
||||
thumbnails
|
||||
types
|
||||
unimported
|
||||
web
|
||||
zero
|
||||
|
||||
|
|
@ -104,10 +131,14 @@ Autotagger Extensions
|
|||
* :doc:`chroma`: Use acoustic fingerprinting to identify audio files with
|
||||
missing or incorrect metadata.
|
||||
* :doc:`discogs`: Search for releases in the `Discogs`_ database.
|
||||
* :doc:`spotify`: Search for releases in the `Spotify`_ database.
|
||||
* :doc:`deezer`: Search for releases in the `Deezer`_ database.
|
||||
* :doc:`fromfilename`: Guess metadata for untagged tracks from their
|
||||
filenames.
|
||||
|
||||
.. _Discogs: http://www.discogs.com/
|
||||
.. _Discogs: https://www.discogs.com/
|
||||
.. _Spotify: https://www.spotify.com
|
||||
.. _Deezer: https://www.deezer.com/
|
||||
|
||||
Metadata
|
||||
--------
|
||||
|
|
@ -115,6 +146,7 @@ Metadata
|
|||
* :doc:`absubmit`: Analyse audio with the `streaming_extractor_music`_ program and submit the metadata to the AcousticBrainz server
|
||||
* :doc:`acousticbrainz`: Fetch various AcousticBrainz metadata
|
||||
* :doc:`bpm`: Measure tempo using keystrokes.
|
||||
* :doc:`bpsync`: Fetch updated metadata from Beatport.
|
||||
* :doc:`edit`: Edit metadata from a text editor.
|
||||
* :doc:`embedart`: Embed album art images into files' metadata.
|
||||
* :doc:`fetchart`: Fetch album cover art from various sources.
|
||||
|
|
@ -127,16 +159,17 @@ Metadata
|
|||
* :doc:`lastgenre`: Fetch genres based on Last.fm tags.
|
||||
* :doc:`lastimport`: Collect play counts from Last.fm.
|
||||
* :doc:`lyrics`: Automatically fetch song lyrics.
|
||||
* :doc:`mbsync`: Fetch updated metadata from MusicBrainz
|
||||
* :doc:`mbsync`: Fetch updated metadata from MusicBrainz.
|
||||
* :doc:`metasync`: Fetch metadata from local or remote sources
|
||||
* :doc:`mpdstats`: Connect to `MPD`_ and update the beets library with play
|
||||
statistics (last_played, play_count, skip_count, rating).
|
||||
* :doc:`parentwork`: Fetch work titles and works they are part of.
|
||||
* :doc:`replaygain`: Calculate volume normalization for players that support it.
|
||||
* :doc:`scrub`: Clean extraneous metadata from music files.
|
||||
* :doc:`zero`: Nullify fields by pattern or unconditionally.
|
||||
|
||||
.. _KeyFinder: http://www.ibrahimshaath.co.uk/keyfinder/
|
||||
.. _streaming_extractor_music: http://acousticbrainz.org/download
|
||||
.. _streaming_extractor_music: https://acousticbrainz.org/download
|
||||
|
||||
Path Formats
|
||||
------------
|
||||
|
|
@ -153,6 +186,7 @@ Interoperability
|
|||
|
||||
* :doc:`badfiles`: Check audio file integrity.
|
||||
* :doc:`embyupdate`: Automatically notifies `Emby`_ whenever the beets library changes.
|
||||
* :doc:`fish`: Adds `Fish shell`_ tab autocompletion to ``beet`` commands.
|
||||
* :doc:`importfeeds`: Keep track of imported files via ``.m3u`` playlist file(s) or symlinks.
|
||||
* :doc:`ipfs`: Import libraries from friends and get albums from them via ipfs.
|
||||
* :doc:`kodiupdate`: Automatically notifies `Kodi`_ whenever the beets library
|
||||
|
|
@ -167,12 +201,16 @@ Interoperability
|
|||
* :doc:`sonosupdate`: Automatically notifies `Sonos`_ whenever the beets library
|
||||
changes.
|
||||
* :doc:`thumbnails`: Get thumbnails with the cover art on your album folders.
|
||||
* :doc:`subsonicupdate`: Automatically notifies `Subsonic`_ whenever the beets
|
||||
library changes.
|
||||
|
||||
|
||||
.. _Emby: http://emby.media
|
||||
.. _Plex: http://plex.tv
|
||||
.. _Kodi: http://kodi.tv
|
||||
.. _Sonos: http://sonos.com
|
||||
.. _Emby: https://emby.media
|
||||
.. _Fish shell: https://fishshell.com/
|
||||
.. _Plex: https://plex.tv
|
||||
.. _Kodi: https://kodi.tv
|
||||
.. _Sonos: https://sonos.com
|
||||
.. _Subsonic: http://www.subsonic.org/
|
||||
|
||||
Miscellaneous
|
||||
-------------
|
||||
|
|
@ -194,14 +232,14 @@ Miscellaneous
|
|||
* :doc:`mbcollection`: Maintain your MusicBrainz collection list.
|
||||
* :doc:`mbsubmit`: Print an album's tracks in a MusicBrainz-friendly format.
|
||||
* :doc:`missing`: List missing tracks.
|
||||
* `mstream`_: A music streaming server + webapp that can be used alongside beets.
|
||||
* `mstream`_: A music streaming server + webapp that can be used alongside beets.
|
||||
* :doc:`random`: Randomly choose albums and tracks from your library.
|
||||
* :doc:`spotify`: Create Spotify playlists from the Beets library.
|
||||
* :doc:`types`: Declare types for flexible attributes.
|
||||
* :doc:`web`: An experimental Web-based GUI for beets.
|
||||
|
||||
.. _MPD: http://www.musicpd.org/
|
||||
.. _MPD clients: http://mpd.wikia.com/wiki/Clients
|
||||
.. _MPD: https://www.musicpd.org/
|
||||
.. _MPD clients: https://mpd.wikia.com/wiki/Clients
|
||||
.. _mstream: https://github.com/IrosTheBeggar/mStream
|
||||
|
||||
.. _other-plugins:
|
||||
|
|
@ -263,6 +301,27 @@ Here are a few of the plugins written by the beets community:
|
|||
* `beet-summarize`_ can compute lots of counts and statistics about your music
|
||||
library.
|
||||
|
||||
* `beets-mosaic`_ generates a montage of a mosaic from cover art.
|
||||
|
||||
* `beets-goingrunning`_ generates playlists to go with your running sessions.
|
||||
|
||||
* `beets-xtractor`_ extracts low- and high-level musical information from your songs.
|
||||
|
||||
* `beets-yearfixer`_ attempts to fix all missing ``original_year`` and ``year`` fields.
|
||||
|
||||
* `beets-autofix`_ automates repetitive tasks to keep your library in order.
|
||||
|
||||
* `beets-describe`_ gives you the full picture of a single attribute of your library items.
|
||||
|
||||
* `beets-bpmanalyser`_ analyses songs and calculates their tempo (BPM).
|
||||
|
||||
* `beets-originquery`_ augments MusicBrainz queries with locally-sourced data
|
||||
to improve autotagger results.
|
||||
|
||||
* `drop2beets`_ automatically imports singles as soon as they are dropped in a
|
||||
folder (using Linux's ``inotify``). You can also set a sub-folders
|
||||
hierarchy to set flexible attributes by the way.
|
||||
|
||||
.. _beets-barcode: https://github.com/8h2a/beets-barcode
|
||||
.. _beets-check: https://github.com/geigerzaehler/beets-check
|
||||
.. _copyartifacts: https://github.com/sbarakat/beets-copyartifacts
|
||||
|
|
@ -284,3 +343,12 @@ Here are a few of the plugins written by the beets community:
|
|||
.. _beets-popularity: https://github.com/abba23/beets-popularity
|
||||
.. _beets-ydl: https://github.com/vmassuchetto/beets-ydl
|
||||
.. _beet-summarize: https://github.com/steven-murray/beet-summarize
|
||||
.. _beets-mosaic: https://github.com/SusannaMaria/beets-mosaic
|
||||
.. _beets-goingrunning: https://pypi.org/project/beets-goingrunning
|
||||
.. _beets-xtractor: https://github.com/adamjakab/BeetsPluginXtractor
|
||||
.. _beets-yearfixer: https://github.com/adamjakab/BeetsPluginYearFixer
|
||||
.. _beets-autofix: https://github.com/adamjakab/BeetsPluginAutofix
|
||||
.. _beets-describe: https://github.com/adamjakab/BeetsPluginDescribe
|
||||
.. _beets-bpmanalyser: https://github.com/adamjakab/BeetsPluginBpmAnalyser
|
||||
.. _beets-originquery: https://github.com/x1ppy/beets-originquery
|
||||
.. _drop2beets: https://github.com/martinkirch/drop2beets
|
||||
|
|
|
|||
|
|
@ -42,4 +42,4 @@ Additional command-line options include:
|
|||
* ``--keys-only`` or ``-k``: Show the name of the tags without the values.
|
||||
|
||||
.. _id3v2: http://id3v2.sourceforge.net
|
||||
.. _mp3info: http://www.ibiblio.org/mp3info/
|
||||
.. _mp3info: https://www.ibiblio.org/mp3info/
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ IPFS Plugin
|
|||
The ``ipfs`` plugin makes it easy to share your library and music with friends.
|
||||
The plugin uses `ipfs`_ for storing the library and file content.
|
||||
|
||||
.. _ipfs: http://ipfs.io/
|
||||
.. _ipfs: https://ipfs.io/
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
Key Finder Plugin
|
||||
=================
|
||||
|
||||
The `keyfinder` plugin uses the `KeyFinder`_ program to detect the
|
||||
musical key of track from its audio data and store it in the
|
||||
`initial_key` field of your database. It does so
|
||||
The `keyfinder` plugin uses either the `KeyFinder`_ or `keyfinder-cli`_
|
||||
program to detect the musical key of a track from its audio data and store
|
||||
it in the `initial_key` field of your database. It does so
|
||||
automatically when importing music or through the ``beet keyfinder
|
||||
[QUERY]`` command.
|
||||
|
||||
|
|
@ -20,13 +20,16 @@ configuration file. The available options are:
|
|||
import. Otherwise, you need to use the ``beet keyfinder`` command
|
||||
explicitly.
|
||||
Default: ``yes``
|
||||
- **bin**: The name of the `KeyFinder`_ program on your system or
|
||||
a path to the binary. If you installed the KeyFinder GUI on a Mac, for
|
||||
example, you want something like
|
||||
- **bin**: The name of the program use for key analysis. You can use either
|
||||
`KeyFinder`_ or `keyfinder-cli`_.
|
||||
If you installed the KeyFinder GUI on a Mac, for example, you want
|
||||
something like
|
||||
``/Applications/KeyFinder.app/Contents/MacOS/KeyFinder``.
|
||||
If using `keyfinder-cli`_, the binary must be named ``keyfinder-cli``.
|
||||
Default: ``KeyFinder`` (i.e., search for the program in your ``$PATH``)..
|
||||
- **overwrite**: Calculate a key even for files that already have an
|
||||
`initial_key` value.
|
||||
Default: ``no``.
|
||||
|
||||
.. _KeyFinder: http://www.ibrahimshaath.co.uk/keyfinder/
|
||||
.. _KeyFinder: https://www.ibrahimshaath.co.uk/keyfinder/
|
||||
.. _keyfinder-cli: https://github.com/EvanPurkhiser/keyfinder-cli/
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue