mirror of
https://github.com/beetbox/beets.git
synced 2026-01-02 22:12:53 +01:00
Merge remote-tracking branch 'upstream/master' into spotify_track
This commit is contained in:
commit
5fb35128d2
7 changed files with 124 additions and 11 deletions
|
|
@ -79,6 +79,43 @@ def _do_query(lib, query, album, also_items=True):
|
|||
return items, albums
|
||||
|
||||
|
||||
def _paths_from_logfile(path):
|
||||
"""Parse the logfile and yield skipped paths to pass to the `import`
|
||||
command.
|
||||
"""
|
||||
with open(path, mode="r", encoding="utf-8") as fp:
|
||||
for i, line in enumerate(fp, start=1):
|
||||
verb, sep, paths = line.rstrip("\n").partition(" ")
|
||||
if not sep:
|
||||
raise ValueError(f"line {i} is invalid")
|
||||
|
||||
# Ignore informational lines that don't need to be re-imported.
|
||||
if verb in {"import", "duplicate-keep", "duplicate-replace"}:
|
||||
continue
|
||||
|
||||
if verb not in {"asis", "skip", "duplicate-skip"}:
|
||||
raise ValueError(f"line {i} contains unknown verb {verb}")
|
||||
|
||||
yield os.path.commonpath(paths.split("; "))
|
||||
|
||||
|
||||
def _parse_logfiles(logfiles):
|
||||
"""Parse all `logfiles` and yield paths from it."""
|
||||
for logfile in logfiles:
|
||||
try:
|
||||
yield from _paths_from_logfile(syspath(normpath(logfile)))
|
||||
except ValueError as err:
|
||||
raise ui.UserError('malformed logfile {}: {}'.format(
|
||||
util.displayable_path(logfile),
|
||||
str(err)
|
||||
)) from err
|
||||
except IOError as err:
|
||||
raise ui.UserError('unreadable logfile {}: {}'.format(
|
||||
util.displayable_path(logfile),
|
||||
str(err)
|
||||
)) from err
|
||||
|
||||
|
||||
# fields: Shows a list of available fields for queries and format strings.
|
||||
|
||||
def _print_keys(query):
|
||||
|
|
@ -913,12 +950,6 @@ def import_files(lib, paths, query):
|
|||
"""Import the files in the given list of paths or matching the
|
||||
query.
|
||||
"""
|
||||
# Check the user-specified directories.
|
||||
for path in paths:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
raise ui.UserError('no such file or directory: {}'.format(
|
||||
displayable_path(path)))
|
||||
|
||||
# Check parameter consistency.
|
||||
if config['import']['quiet'] and config['import']['timid']:
|
||||
raise ui.UserError("can't be both quiet and timid")
|
||||
|
|
@ -960,7 +991,12 @@ def import_func(lib, opts, args):
|
|||
else:
|
||||
query = None
|
||||
paths = args
|
||||
if not paths:
|
||||
|
||||
# The paths from the logfiles go into a separate list to allow handling
|
||||
# errors differently from user-specified paths.
|
||||
paths_from_logfiles = list(_parse_logfiles(opts.from_logfiles or []))
|
||||
|
||||
if not paths and not paths_from_logfiles:
|
||||
raise ui.UserError('no path specified')
|
||||
|
||||
# On Python 2, we used to get filenames as raw bytes, which is
|
||||
|
|
@ -969,6 +1005,31 @@ def import_func(lib, opts, args):
|
|||
# filename.
|
||||
paths = [p.encode(util.arg_encoding(), 'surrogateescape')
|
||||
for p in paths]
|
||||
paths_from_logfiles = [p.encode(util.arg_encoding(), 'surrogateescape')
|
||||
for p in paths_from_logfiles]
|
||||
|
||||
# Check the user-specified directories.
|
||||
for path in paths:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
raise ui.UserError('no such file or directory: {}'.format(
|
||||
displayable_path(path)))
|
||||
|
||||
# Check the directories from the logfiles, but don't throw an error in
|
||||
# case those paths don't exist. Maybe some of those paths have already
|
||||
# been imported and moved separately, so logging a warning should
|
||||
# suffice.
|
||||
for path in paths_from_logfiles:
|
||||
if not os.path.exists(syspath(normpath(path))):
|
||||
log.warning('No such file or directory: {}'.format(
|
||||
displayable_path(path)))
|
||||
continue
|
||||
|
||||
paths.append(path)
|
||||
|
||||
# If all paths were read from a logfile, and none of them exist, throw
|
||||
# an error
|
||||
if not paths:
|
||||
raise ui.UserError('none of the paths are importable')
|
||||
|
||||
import_files(lib, paths, query)
|
||||
|
||||
|
|
@ -1061,6 +1122,11 @@ import_cmd.parser.add_option(
|
|||
metavar='ID',
|
||||
help='restrict matching to a specific metadata backend ID'
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
'--from-logfile', dest='from_logfiles', action='append',
|
||||
metavar='PATH',
|
||||
help='read skipped paths from an existing logfile'
|
||||
)
|
||||
import_cmd.parser.add_option(
|
||||
'--set', dest='set_fields', action='callback',
|
||||
callback=_store_dict,
|
||||
|
|
|
|||
|
|
@ -321,7 +321,7 @@ class AcousticPlugin(plugins.BeetsPlugin):
|
|||
else:
|
||||
yield v, subdata[k]
|
||||
else:
|
||||
self._log.warning('Acousticbrainz did not provide info'
|
||||
self._log.warning('Acousticbrainz did not provide info '
|
||||
'about {}', k)
|
||||
self._log.debug('Data {} could not be mapped to scheme {} '
|
||||
'because key {} was not found', subdata, v, k)
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
register('item_copied', self.record_import_mtime)
|
||||
register('item_linked', self.record_import_mtime)
|
||||
register('item_hardlinked', self.record_import_mtime)
|
||||
register('item_reflinked', self.record_import_mtime)
|
||||
register('album_imported', self.update_album_times)
|
||||
register('item_imported', self.update_item_times)
|
||||
register('after_write', self.update_after_write_time)
|
||||
|
|
@ -49,7 +50,8 @@ class ImportAddedPlugin(BeetsPlugin):
|
|||
|
||||
def record_if_inplace(self, task, session):
|
||||
if not (session.config['copy'] or session.config['move'] or
|
||||
session.config['link'] or session.config['hardlink']):
|
||||
session.config['link'] or session.config['hardlink'] or
|
||||
session.config['reflink']):
|
||||
self._log.debug("In place import detected, recording mtimes from "
|
||||
"source paths")
|
||||
items = [task.item] \
|
||||
|
|
|
|||
|
|
@ -29,6 +29,9 @@ New features:
|
|||
:bug:`1840` :bug:`4302`
|
||||
* Added a ``-P`` (or ``--disable-plugins``) flag to specify one/multiple plugin(s) to be
|
||||
disabled at startup.
|
||||
* :ref:`import-options`: Add support for re-running the importer on paths in
|
||||
log files that were created with the ``-l`` (or ``--logfile``) argument.
|
||||
:bug:`4379` :bug:`4387`
|
||||
|
||||
Bug fixes:
|
||||
|
||||
|
|
@ -82,6 +85,9 @@ Bug fixes:
|
|||
:bug:`4272`
|
||||
* :doc:`plugins/lyrics`: Fixed issue with Genius header being included in lyrics,
|
||||
added test case of up-to-date Genius html
|
||||
* :doc:`plugins/importadded`: Fix a bug with recently added reflink import option
|
||||
that casues a crash when ImportAdded plugin enabled.
|
||||
:bug:`4389`
|
||||
|
||||
For packagers:
|
||||
|
||||
|
|
|
|||
|
|
@ -80,6 +80,8 @@ all of these limitations.
|
|||
|
||||
Now that that's out of the way, let's tag some music.
|
||||
|
||||
.. _import-options:
|
||||
|
||||
Options
|
||||
-------
|
||||
|
||||
|
|
@ -101,7 +103,8 @@ command-line options you should know:
|
|||
* ``beet import -l LOGFILE``: write a message to ``LOGFILE`` every time you skip
|
||||
an album or choose to take its tags "as-is" (see below) or the album is
|
||||
skipped as a duplicate; this lets you come back later and reexamine albums
|
||||
that weren't tagged successfully
|
||||
that weren't tagged successfully. Run ``beet import --from-logfile=LOGFILE``
|
||||
rerun the importer on such paths from the logfile.
|
||||
|
||||
* ``beet import -q``: quiet mode. Never prompt for input and, instead,
|
||||
conservatively skip any albums that need your opinion. The ``-ql`` combination
|
||||
|
|
|
|||
|
|
@ -86,7 +86,9 @@ Optional command flags:
|
|||
that weren't tagged successfully---either because they're not in the
|
||||
MusicBrainz database or because something's wrong with the files. Use the
|
||||
``-l`` option to specify a filename to log every time you skip an album
|
||||
or import it "as-is" or an album gets skipped as a duplicate.
|
||||
or import it "as-is" or an album gets skipped as a duplicate. You can later
|
||||
review the file manually or import skipped paths from the logfile
|
||||
automatically by using the ``--from-logfile LOGFILE`` argument.
|
||||
|
||||
* Relatedly, the ``-q`` (quiet) option can help with large imports by
|
||||
autotagging without ever bothering to ask for user input. Whenever the
|
||||
|
|
|
|||
|
|
@ -729,6 +729,40 @@ class ImportTest(_common.TestCase):
|
|||
self.assertRaises(ui.UserError, commands.import_files, None, [],
|
||||
None)
|
||||
|
||||
def test_parse_paths_from_logfile(self):
|
||||
if os.path.__name__ == 'ntpath':
|
||||
logfile_content = (
|
||||
"import started Wed Jun 15 23:08:26 2022\n"
|
||||
"asis C:\\music\\Beatles, The\\The Beatles; C:\\music\\Beatles, The\\The Beatles\\CD 01; C:\\music\\Beatles, The\\The Beatles\\CD 02\n" # noqa: E501
|
||||
"duplicate-replace C:\\music\\Bill Evans\\Trio '65\n"
|
||||
"skip C:\\music\\Michael Jackson\\Bad\n"
|
||||
"skip C:\\music\\Soulwax\\Any Minute Now\n"
|
||||
)
|
||||
expected_paths = [
|
||||
"C:\\music\\Beatles, The\\The Beatles",
|
||||
"C:\\music\\Michael Jackson\\Bad",
|
||||
"C:\\music\\Soulwax\\Any Minute Now",
|
||||
]
|
||||
else:
|
||||
logfile_content = (
|
||||
"import started Wed Jun 15 23:08:26 2022\n"
|
||||
"asis /music/Beatles, The/The Beatles; /music/Beatles, The/The Beatles/CD 01; /music/Beatles, The/The Beatles/CD 02\n" # noqa: E501
|
||||
"duplicate-replace /music/Bill Evans/Trio '65\n"
|
||||
"skip /music/Michael Jackson/Bad\n"
|
||||
"skip /music/Soulwax/Any Minute Now\n"
|
||||
)
|
||||
expected_paths = [
|
||||
"/music/Beatles, The/The Beatles",
|
||||
"/music/Michael Jackson/Bad",
|
||||
"/music/Soulwax/Any Minute Now",
|
||||
]
|
||||
|
||||
logfile = os.path.join(self.temp_dir, b"logfile.log")
|
||||
with open(logfile, mode="w") as fp:
|
||||
fp.write(logfile_content)
|
||||
actual_paths = list(commands._paths_from_logfile(logfile))
|
||||
self.assertEqual(actual_paths, expected_paths)
|
||||
|
||||
|
||||
@_common.slow_test()
|
||||
class ConfigTest(unittest.TestCase, TestHelper, _common.Assertions):
|
||||
|
|
|
|||
Loading…
Reference in a new issue