mirror of
https://github.com/beetbox/beets.git
synced 2026-01-14 20:24:36 +01:00
parent
f145e3b184
commit
dc5a79e35c
4 changed files with 39 additions and 13 deletions
|
|
@ -544,6 +544,11 @@ class ImportTask(object):
|
|||
return
|
||||
plugins.send('album_imported', lib=lib, album=self.album)
|
||||
|
||||
def emit_created(self, session):
|
||||
"""Send the `import_task_created` event for this task.
|
||||
"""
|
||||
plugins.send('import_task_created', session=session, task=self)
|
||||
|
||||
def lookup_candidates(self):
|
||||
"""Retrieve and store candidates for this album.
|
||||
"""
|
||||
|
|
@ -1150,6 +1155,7 @@ def read_tasks(session):
|
|||
# Generate tasks.
|
||||
task_factory = ImportTaskFactory(toppath, session)
|
||||
for t in task_factory.tasks():
|
||||
t.emit_created(session)
|
||||
yield t
|
||||
skipped += task_factory.skipped
|
||||
|
||||
|
|
@ -1170,7 +1176,9 @@ def query_tasks(session):
|
|||
if session.config['singletons']:
|
||||
# Search for items.
|
||||
for item in session.lib.items(session.query):
|
||||
yield SingletonImportTask(None, item)
|
||||
task = SingletonImportTask(None, item)
|
||||
task.emit_created(session)
|
||||
yield task
|
||||
|
||||
else:
|
||||
# Search for albums.
|
||||
|
|
@ -1185,7 +1193,9 @@ def query_tasks(session):
|
|||
item.id = None
|
||||
item.album_id = None
|
||||
|
||||
yield ImportTask(None, [album.item_dir()], items)
|
||||
task = ImportTask(None, [album.item_dir()], items)
|
||||
task.emit_created(session)
|
||||
yield task
|
||||
|
||||
|
||||
@pipeline.mutator_stage
|
||||
|
|
@ -1231,7 +1241,9 @@ def user_query(session, task):
|
|||
# Set up a little pipeline for dealing with the singletons.
|
||||
def emitter(task):
|
||||
for item in task.items:
|
||||
yield SingletonImportTask(task.toppath, item)
|
||||
task = SingletonImportTask(task.toppath, item)
|
||||
task.emit_created(session)
|
||||
yield task
|
||||
yield SentinelImportTask(task.toppath, task.paths)
|
||||
|
||||
ipl = pipeline.Pipeline([
|
||||
|
|
@ -1341,10 +1353,13 @@ def manipulate_files(session, task):
|
|||
def log_files(session, task):
|
||||
"""A coroutine (pipeline stage) to log each file to be imported.
|
||||
"""
|
||||
if task.skip:
|
||||
return
|
||||
|
||||
if isinstance(task, SingletonImportTask):
|
||||
log.info(u'Singleton: {0}', displayable_path(task.item['path']))
|
||||
elif task.items:
|
||||
log.info(u'Album {0}', displayable_path(task.paths[0]))
|
||||
log.info(u'Album: {0}', displayable_path(task.paths[0]))
|
||||
for item in task.items:
|
||||
log.info(u' {0}', displayable_path(item['path']))
|
||||
|
||||
|
|
@ -1366,7 +1381,9 @@ def group_albums(session):
|
|||
continue
|
||||
tasks = []
|
||||
for _, items in itertools.groupby(task.items, group):
|
||||
tasks.append(ImportTask(items=list(items)))
|
||||
task = ImportTask(items=list(items))
|
||||
task.emit_created(session)
|
||||
tasks.append(task)
|
||||
tasks.append(SentinelImportTask(task.toppath, task.paths))
|
||||
|
||||
task = pipeline.multiple(tasks)
|
||||
|
|
|
|||
|
|
@ -31,11 +31,15 @@ Fixes:
|
|||
canonicalization tree. :bug:`1239` :bug:`1240`
|
||||
* Incremental imports now (once again) show a "skipped N directories" message.
|
||||
|
||||
For developers: The logging system in beets has been overhauled. Plugins now
|
||||
each have their own logger, which helps by automatically adjusting the
|
||||
verbosity level in import mode and by prefixing the plugin's name. Also,
|
||||
logging calls can (and should!) use modern ``{}``-style string formatting
|
||||
lazily. See :ref:`plugin-logging` in the plugin API docs.
|
||||
For developers:
|
||||
|
||||
* The logging system in beets has been overhauled. Plugins now each have their
|
||||
own logger, which helps by automatically adjusting the verbosity level in
|
||||
import mode and by prefixing the plugin's name. Also, logging calls can (and
|
||||
should!) use modern ``{}``-style string formatting lazily. See
|
||||
:ref:`plugin-logging` in the plugin API docs.
|
||||
* A new ``import_task_created`` event lets you manipulate import tasks
|
||||
immediately after they are initialized.
|
||||
|
||||
|
||||
1.3.10 (January 5, 2015)
|
||||
|
|
|
|||
|
|
@ -174,8 +174,13 @@ The events currently available are:
|
|||
* *after_write*: called with an ``Item`` object after a file's metadata is
|
||||
written to disk (i.e., just after the file on disk is closed).
|
||||
|
||||
* *import_task_created*: called immediately after an import task is
|
||||
initialized. Plugins can use this to, for example, cancel processing of a
|
||||
task before anything else happens. ``task`` (an `ImportTask`) and
|
||||
``session`` (an `ImportSession`).
|
||||
|
||||
* *import_task_start*: called when before an import task begins processing.
|
||||
Parameters: ``task`` (an `ImportTask`) and ``session`` (an `ImportSession`).
|
||||
Parameters: ``task`` and ``session``.
|
||||
|
||||
* *import_task_apply*: called after metadata changes have been applied in an
|
||||
import task. Parameters: ``task`` and ``session``.
|
||||
|
|
|
|||
|
|
@ -1602,9 +1602,9 @@ class ImportPretendTest(_common.TestCase, ImportHelper):
|
|||
logs = self.__run(self.import_paths, singletons=False)
|
||||
|
||||
self.assertEqual(logs, [
|
||||
'Album %s' % displayable_path(self.import_paths[0]),
|
||||
'Album: %s' % displayable_path(self.import_paths[0]),
|
||||
' %s' % self.import_files[0],
|
||||
'Album %s' % displayable_path(self.import_paths[1]),
|
||||
'Album: %s' % displayable_path(self.import_paths[1]),
|
||||
' %s' % self.import_paths[1]])
|
||||
|
||||
def test_import_pretend_empty(self):
|
||||
|
|
|
|||
Loading…
Reference in a new issue