mirror of
https://github.com/beetbox/beets.git
synced 2025-12-25 01:53:31 +01:00
Merge branch 'master' of https://github.com/sampsyo/beets into olinbg-features
This commit is contained in:
commit
1f5cac687d
36 changed files with 1805 additions and 753 deletions
|
|
@ -12,7 +12,7 @@
|
|||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
__version__ = '1.3.7'
|
||||
__version__ = '1.3.8'
|
||||
__author__ = 'Adrian Sampson <adrian@radbox.org>'
|
||||
|
||||
import beets.library
|
||||
|
|
|
|||
|
|
@ -14,12 +14,9 @@
|
|||
|
||||
"""Facilities for automatically determining files' correct metadata.
|
||||
"""
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
|
||||
from beets import library, mediafile, config
|
||||
from beets.util import sorted_walk, ancestry, displayable_path
|
||||
from beets import config
|
||||
|
||||
# Parts of external interface.
|
||||
from .hooks import AlbumInfo, TrackInfo, AlbumMatch, TrackMatch # noqa
|
||||
|
|
@ -29,130 +26,9 @@ from .match import Recommendation # noqa
|
|||
# Global logger.
|
||||
log = logging.getLogger('beets')
|
||||
|
||||
# Constants for directory walker.
|
||||
MULTIDISC_MARKERS = (r'dis[ck]', r'cd')
|
||||
MULTIDISC_PAT_FMT = r'^(.*%s[\W_]*)\d'
|
||||
|
||||
|
||||
# Additional utilities for the main interface.
|
||||
|
||||
def albums_in_dir(path):
|
||||
"""Recursively searches the given directory and returns an iterable
|
||||
of (paths, items) where paths is a list of directories and items is
|
||||
a list of Items that is probably an album. Specifically, any folder
|
||||
containing any media files is an album.
|
||||
"""
|
||||
collapse_pat = collapse_paths = collapse_items = None
|
||||
|
||||
for root, dirs, files in sorted_walk(path,
|
||||
ignore=config['ignore'].as_str_seq(),
|
||||
logger=log):
|
||||
# Get a list of items in the directory.
|
||||
items = []
|
||||
for filename in files:
|
||||
try:
|
||||
i = library.Item.from_path(os.path.join(root, filename))
|
||||
except library.ReadError as exc:
|
||||
if isinstance(exc.reason, mediafile.FileTypeError):
|
||||
# Silently ignore non-music files.
|
||||
pass
|
||||
elif isinstance(exc.reason, mediafile.UnreadableFileError):
|
||||
log.warn(u'unreadable file: {0}'.format(
|
||||
displayable_path(filename))
|
||||
)
|
||||
else:
|
||||
log.error(u'error reading {0}: {1}'.format(
|
||||
displayable_path(filename),
|
||||
exc,
|
||||
))
|
||||
else:
|
||||
items.append(i)
|
||||
|
||||
# If we're currently collapsing the constituent directories in a
|
||||
# multi-disc album, check whether we should continue collapsing
|
||||
# and add the current directory. If so, just add the directory
|
||||
# and move on to the next directory. If not, stop collapsing.
|
||||
if collapse_paths:
|
||||
if (not collapse_pat and collapse_paths[0] in ancestry(root)) or \
|
||||
(collapse_pat and
|
||||
collapse_pat.match(os.path.basename(root))):
|
||||
# Still collapsing.
|
||||
collapse_paths.append(root)
|
||||
collapse_items += items
|
||||
continue
|
||||
else:
|
||||
# Collapse finished. Yield the collapsed directory and
|
||||
# proceed to process the current one.
|
||||
if collapse_items:
|
||||
yield collapse_paths, collapse_items
|
||||
collapse_pat = collapse_paths = collapse_items = None
|
||||
|
||||
# Check whether this directory looks like the *first* directory
|
||||
# in a multi-disc sequence. There are two indicators: the file
|
||||
# is named like part of a multi-disc sequence (e.g., "Title Disc
|
||||
# 1") or it contains no items but only directories that are
|
||||
# named in this way.
|
||||
start_collapsing = False
|
||||
for marker in MULTIDISC_MARKERS:
|
||||
marker_pat = re.compile(MULTIDISC_PAT_FMT % marker, re.I)
|
||||
match = marker_pat.match(os.path.basename(root))
|
||||
|
||||
# Is this directory the root of a nested multi-disc album?
|
||||
if dirs and not items:
|
||||
# Check whether all subdirectories have the same prefix.
|
||||
start_collapsing = True
|
||||
subdir_pat = None
|
||||
for subdir in dirs:
|
||||
# The first directory dictates the pattern for
|
||||
# the remaining directories.
|
||||
if not subdir_pat:
|
||||
match = marker_pat.match(subdir)
|
||||
if match:
|
||||
subdir_pat = re.compile(
|
||||
r'^%s\d' % re.escape(match.group(1)), re.I
|
||||
)
|
||||
else:
|
||||
start_collapsing = False
|
||||
break
|
||||
|
||||
# Subsequent directories must match the pattern.
|
||||
elif not subdir_pat.match(subdir):
|
||||
start_collapsing = False
|
||||
break
|
||||
|
||||
# If all subdirectories match, don't check other
|
||||
# markers.
|
||||
if start_collapsing:
|
||||
break
|
||||
|
||||
# Is this directory the first in a flattened multi-disc album?
|
||||
elif match:
|
||||
start_collapsing = True
|
||||
# Set the current pattern to match directories with the same
|
||||
# prefix as this one, followed by a digit.
|
||||
collapse_pat = re.compile(
|
||||
r'^%s\d' % re.escape(match.group(1)), re.I
|
||||
)
|
||||
break
|
||||
|
||||
# If either of the above heuristics indicated that this is the
|
||||
# beginning of a multi-disc album, initialize the collapsed
|
||||
# directory and item lists and check the next directory.
|
||||
if start_collapsing:
|
||||
# Start collapsing; continue to the next iteration.
|
||||
collapse_paths = [root]
|
||||
collapse_items = items
|
||||
continue
|
||||
|
||||
# If it's nonempty, yield it.
|
||||
if items:
|
||||
yield [root], items
|
||||
|
||||
# Clear out any unfinished collapse.
|
||||
if collapse_paths and collapse_items:
|
||||
yield collapse_paths, collapse_items
|
||||
|
||||
|
||||
def apply_item_metadata(item, track_info):
|
||||
"""Set an item's metadata from its matched TrackInfo object.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -55,6 +55,9 @@ list_format_item: $artist - $album - $title
|
|||
list_format_album: $albumartist - $album
|
||||
time_format: '%Y-%m-%d %H:%M:%S'
|
||||
|
||||
sort_album: smartartist+
|
||||
sort_item: smartartist+
|
||||
|
||||
paths:
|
||||
default: $albumartist/$album%aunique{}/$track $title
|
||||
singleton: Non-Album/$artist/$title
|
||||
|
|
|
|||
|
|
@ -19,5 +19,6 @@ from .db import Model, Database
|
|||
from .query import Query, FieldQuery, MatchQuery, AndQuery, OrQuery
|
||||
from .types import Type
|
||||
from .queryparse import query_from_strings
|
||||
from .queryparse import sort_from_strings
|
||||
|
||||
# flake8: noqa
|
||||
|
|
|
|||
|
|
@ -24,10 +24,56 @@ import collections
|
|||
|
||||
import beets
|
||||
from beets.util.functemplate import Template
|
||||
from .query import MatchQuery
|
||||
from .query import MatchQuery, build_sql
|
||||
from .types import BASE_TYPE
|
||||
|
||||
|
||||
class FormattedMapping(collections.Mapping):
|
||||
"""A `dict`-like formatted view of a model.
|
||||
|
||||
The accessor `mapping[key]` returns the formated version of
|
||||
`model[key]` as a unicode string.
|
||||
|
||||
If `for_path` is true, all path separators in the formatted values
|
||||
are replaced.
|
||||
"""
|
||||
|
||||
def __init__(self, model, for_path=False):
|
||||
self.for_path = for_path
|
||||
self.model = model
|
||||
self.model_keys = model.keys(True)
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self.model_keys:
|
||||
return self._get_formatted(self.model, key)
|
||||
else:
|
||||
raise KeyError(key)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.model_keys)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.model_keys)
|
||||
|
||||
def get(self, key, default=None):
|
||||
if default is None:
|
||||
default = self.model._type(key).format(None)
|
||||
return super(FormattedMapping, self).get(key, default)
|
||||
|
||||
def _get_formatted(self, model, key):
|
||||
value = model._type(key).format(model.get(key))
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode('utf8', 'ignore')
|
||||
|
||||
if self.for_path:
|
||||
sep_repl = beets.config['path_sep_replace'].get(unicode)
|
||||
for sep in (os.path.sep, os.path.altsep):
|
||||
if sep:
|
||||
value = value.replace(sep, sep_repl)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
# Abstract base for model classes.
|
||||
|
||||
class Model(object):
|
||||
|
|
@ -380,63 +426,24 @@ class Model(object):
|
|||
|
||||
# Formatting and templating.
|
||||
|
||||
@classmethod
|
||||
def _format(cls, key, value, for_path=False):
|
||||
"""Format a value as the given field for this model.
|
||||
"""
|
||||
# Format the value as a string according to its type.
|
||||
value = cls._type(key).format(value)
|
||||
_formatter = FormattedMapping
|
||||
|
||||
# Formatting must result in a string. To deal with
|
||||
# Python2isms, implicitly convert ASCII strings.
|
||||
assert isinstance(value, basestring), \
|
||||
u'field formatter must produce strings'
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode('utf8', 'ignore')
|
||||
|
||||
if for_path:
|
||||
sep_repl = beets.config['path_sep_replace'].get(unicode)
|
||||
for sep in (os.path.sep, os.path.altsep):
|
||||
if sep:
|
||||
value = value.replace(sep, sep_repl)
|
||||
|
||||
return value
|
||||
|
||||
def _get_formatted(self, key, for_path=False):
|
||||
"""Get a field value formatted as a string (`unicode` object)
|
||||
for display to the user. If `for_path` is true, then the value
|
||||
will be sanitized for inclusion in a pathname (i.e., path
|
||||
separators will be removed from the value).
|
||||
"""
|
||||
return self._format(key, self.get(key), for_path)
|
||||
|
||||
def _formatted_mapping(self, for_path=False):
|
||||
def formatted(self, for_path=False):
|
||||
"""Get a mapping containing all values on this object formatted
|
||||
as human-readable strings.
|
||||
as human-readable unicode strings.
|
||||
"""
|
||||
return FormattedMapping(self, for_path)
|
||||
|
||||
@property
|
||||
def formatted(self):
|
||||
"""A `dict`-like view containing formatted values.
|
||||
"""
|
||||
return self._formatted_mapping(False)
|
||||
return self._formatter(self, for_path)
|
||||
|
||||
def evaluate_template(self, template, for_path=False):
|
||||
"""Evaluate a template (a string or a `Template` object) using
|
||||
the object's fields. If `for_path` is true, then no new path
|
||||
separators will be added to the template.
|
||||
"""
|
||||
# Build value mapping.
|
||||
mapping = self._formatted_mapping(for_path)
|
||||
|
||||
# Get template functions.
|
||||
funcs = self._template_funcs()
|
||||
|
||||
# Perform substitution.
|
||||
if isinstance(template, basestring):
|
||||
template = Template(template)
|
||||
return template.substitute(mapping, funcs)
|
||||
return template.substitute(self.formatted(for_path),
|
||||
self._template_funcs())
|
||||
|
||||
# Parsing.
|
||||
|
||||
|
|
@ -450,73 +457,70 @@ class Model(object):
|
|||
return cls._type(key).parse(string)
|
||||
|
||||
|
||||
class FormattedMapping(collections.Mapping):
|
||||
"""A `dict`-like formatted view of a model.
|
||||
|
||||
The accessor ``mapping[key]`` returns the formated version of
|
||||
``model[key]``. The formatting is handled by `model._format()`.
|
||||
"""
|
||||
# TODO Move all formatting logic here
|
||||
# TODO Add caching
|
||||
|
||||
def __init__(self, model, for_path=False):
|
||||
self.for_path = for_path
|
||||
self.model = model
|
||||
self.model_keys = model.keys(True)
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self.model_keys:
|
||||
return self.model._get_formatted(key, self.for_path)
|
||||
else:
|
||||
raise KeyError(key)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.model_keys)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.model_keys)
|
||||
|
||||
|
||||
# Database controller and supporting interfaces.
|
||||
|
||||
class Results(object):
|
||||
"""An item query result set. Iterating over the collection lazily
|
||||
constructs LibModel objects that reflect database rows.
|
||||
"""
|
||||
def __init__(self, model_class, rows, db, query=None):
|
||||
def __init__(self, model_class, rows, db, query=None, sort=None):
|
||||
"""Create a result set that will construct objects of type
|
||||
`model_class`, which should be a subclass of `LibModel`, out of
|
||||
the query result mapping in `rows`. The new objects are
|
||||
associated with the database `db`. If `query` is provided, it is
|
||||
used as a predicate to filter the results for a "slow query" that
|
||||
cannot be evaluated by the database directly.
|
||||
associated with the database `db`.
|
||||
If `query` is provided, it is used as a predicate to filter the results
|
||||
for a "slow query" that cannot be evaluated by the database directly.
|
||||
If `sort` is provided, it is used to sort the full list of results
|
||||
before returning. This means it is a "slow sort" and all objects must
|
||||
be built before returning the first one.
|
||||
"""
|
||||
self.model_class = model_class
|
||||
self.rows = rows
|
||||
self.db = db
|
||||
self.query = query
|
||||
self.sort = sort
|
||||
|
||||
def __iter__(self):
|
||||
"""Construct Python objects for all rows that pass the query
|
||||
predicate.
|
||||
"""
|
||||
for row in self.rows:
|
||||
# Get the flexible attributes for the object.
|
||||
with self.db.transaction() as tx:
|
||||
flex_rows = tx.query(
|
||||
'SELECT * FROM {0} WHERE entity_id=?'.format(
|
||||
self.model_class._flex_table
|
||||
),
|
||||
(row['id'],)
|
||||
)
|
||||
values = dict(row)
|
||||
flex_values = dict((row['key'], row['value']) for row in flex_rows)
|
||||
if self.sort:
|
||||
# Slow sort. Must build the full list first.
|
||||
objects = []
|
||||
for row in self.rows:
|
||||
obj = self._make_model(row)
|
||||
# check the predicate if any
|
||||
if not self.query or self.query.match(obj):
|
||||
objects.append(obj)
|
||||
# Now that we have the full list, we can sort it
|
||||
objects = self.sort.sort(objects)
|
||||
for o in objects:
|
||||
yield o
|
||||
else:
|
||||
for row in self.rows:
|
||||
obj = self._make_model(row)
|
||||
# check the predicate if any
|
||||
if not self.query or self.query.match(obj):
|
||||
yield obj
|
||||
|
||||
# Construct the Python object and yield it if it passes the
|
||||
# predicate.
|
||||
obj = self.model_class._awaken(self.db, values, flex_values)
|
||||
if not self.query or self.query.match(obj):
|
||||
yield obj
|
||||
def _make_model(self, row):
|
||||
# Get the flexible attributes for the object.
|
||||
with self.db.transaction() as tx:
|
||||
flex_rows = tx.query(
|
||||
'SELECT * FROM {0} WHERE entity_id=?'.format(
|
||||
self.model_class._flex_table
|
||||
),
|
||||
(row['id'],)
|
||||
)
|
||||
|
||||
cols = dict(row)
|
||||
values = dict((k, v) for (k, v) in cols.items()
|
||||
if not k[:4] == 'flex')
|
||||
flex_values = dict((row['key'], row['value']) for row in flex_rows)
|
||||
|
||||
# Construct the Python object
|
||||
obj = self.model_class._awaken(self.db, values, flex_values)
|
||||
return obj
|
||||
|
||||
def __len__(self):
|
||||
"""Get the number of matching objects.
|
||||
|
|
@ -739,24 +743,20 @@ class Database(object):
|
|||
|
||||
# Querying.
|
||||
|
||||
def _fetch(self, model_cls, query, order_by=None):
|
||||
def _fetch(self, model_cls, query, sort_order=None):
|
||||
"""Fetch the objects of type `model_cls` matching the given
|
||||
query. The query may be given as a string, string sequence, a
|
||||
Query object, or None (to fetch everything). If provided,
|
||||
`order_by` is a SQLite ORDER BY clause for sorting.
|
||||
"""
|
||||
where, subvals = query.clause()
|
||||
`sort_order` is either a SQLite ORDER BY clause for sorting or a
|
||||
Sort object.
|
||||
"""
|
||||
|
||||
sql, subvals, query, sort = build_sql(model_cls, query, sort_order)
|
||||
|
||||
sql = "SELECT * FROM {0} WHERE {1}".format(
|
||||
model_cls._table,
|
||||
where or '1',
|
||||
)
|
||||
if order_by:
|
||||
sql += " ORDER BY {0}".format(order_by)
|
||||
with self.transaction() as tx:
|
||||
rows = tx.query(sql, subvals)
|
||||
|
||||
return Results(model_cls, rows, self, None if where else query)
|
||||
return Results(model_cls, rows, self, query, sort)
|
||||
|
||||
def _get(self, model_cls, id):
|
||||
"""Get a Model object by its id or None if the id does not
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
"""The Query type hierarchy for DBCore.
|
||||
"""
|
||||
import re
|
||||
from operator import attrgetter
|
||||
from beets import util
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
|
@ -497,3 +498,257 @@ class DateQuery(FieldQuery):
|
|||
# Match any date.
|
||||
clause = '1'
|
||||
return clause, subvals
|
||||
|
||||
|
||||
class Sort(object):
|
||||
"""An abstract class representing a sort operation for a query into the
|
||||
item database.
|
||||
"""
|
||||
def select_clause(self):
|
||||
""" Generates a select sql fragment if the sort operation requires one,
|
||||
an empty string otherwise.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def union_clause(self):
|
||||
""" Generates a union sql fragment if the sort operation requires one,
|
||||
an empty string otherwise.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def order_clause(self):
|
||||
"""Generates a sql fragment to be use in a ORDER BY clause or None if
|
||||
it's a slow query.
|
||||
"""
|
||||
return None
|
||||
|
||||
def sort(self, items):
|
||||
"""Return a key function that can be used with the list.sort() method.
|
||||
Meant to be used with slow sort, it must be implemented even for sort
|
||||
that can be done with sql, as they might be used in conjunction with
|
||||
slow sort.
|
||||
"""
|
||||
return sorted(items, key=lambda x: x)
|
||||
|
||||
def is_slow(self):
|
||||
return False
|
||||
|
||||
|
||||
class MultipleSort(Sort):
|
||||
"""Sort class that combines several sort criteria.
|
||||
This implementation tries to implement as many sort operation in sql,
|
||||
falling back to python sort only when necessary.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.sorts = []
|
||||
|
||||
def add_criteria(self, sort):
|
||||
self.sorts.append(sort)
|
||||
|
||||
def _sql_sorts(self):
|
||||
""" Returns the list of sort for which sql can be used
|
||||
"""
|
||||
# with several Sort, we can use SQL sorting only if there is only
|
||||
# SQL-capable Sort or if the list ends with SQl-capable Sort.
|
||||
sql_sorts = []
|
||||
for sort in reversed(self.sorts):
|
||||
if not sort.order_clause() is None:
|
||||
sql_sorts.append(sort)
|
||||
else:
|
||||
break
|
||||
sql_sorts.reverse()
|
||||
return sql_sorts
|
||||
|
||||
def select_clause(self):
|
||||
sql_sorts = self._sql_sorts()
|
||||
select_strings = []
|
||||
for sort in sql_sorts:
|
||||
select = sort.select_clause()
|
||||
if select:
|
||||
select_strings.append(select)
|
||||
|
||||
select_string = ",".join(select_strings)
|
||||
return select_string
|
||||
|
||||
def union_clause(self):
|
||||
sql_sorts = self._sql_sorts()
|
||||
union_strings = []
|
||||
for sort in sql_sorts:
|
||||
union = sort.union_clause()
|
||||
union_strings.append(union)
|
||||
|
||||
return "".join(union_strings)
|
||||
|
||||
def order_clause(self):
|
||||
sql_sorts = self._sql_sorts()
|
||||
order_strings = []
|
||||
for sort in sql_sorts:
|
||||
order = sort.order_clause()
|
||||
order_strings.append(order)
|
||||
|
||||
return ",".join(order_strings)
|
||||
|
||||
def is_slow(self):
|
||||
for sort in self.sorts:
|
||||
if sort.is_slow():
|
||||
return True
|
||||
return False
|
||||
|
||||
def sort(self, items):
|
||||
slow_sorts = []
|
||||
switch_slow = False
|
||||
for sort in reversed(self.sorts):
|
||||
if switch_slow:
|
||||
slow_sorts.append(sort)
|
||||
elif sort.order_clause() is None:
|
||||
switch_slow = True
|
||||
slow_sorts.append(sort)
|
||||
else:
|
||||
pass
|
||||
|
||||
for sort in slow_sorts:
|
||||
items = sort.sort(items)
|
||||
return items
|
||||
|
||||
|
||||
class FlexFieldSort(Sort):
|
||||
"""Sort object to sort on a flexible attribute field
|
||||
"""
|
||||
def __init__(self, model_cls, field, is_ascending):
|
||||
self.model_cls = model_cls
|
||||
self.field = field
|
||||
self.is_ascending = is_ascending
|
||||
|
||||
def select_clause(self):
|
||||
""" Return a select sql fragment.
|
||||
"""
|
||||
return "sort_flexattr{0!s}.value as flex_{0!s} ".format(self.field)
|
||||
|
||||
def union_clause(self):
|
||||
""" Returns an union sql fragment.
|
||||
"""
|
||||
union = ("LEFT JOIN {flextable} as sort_flexattr{index!s} "
|
||||
"ON {table}.id = sort_flexattr{index!s}.entity_id "
|
||||
"AND sort_flexattr{index!s}.key='{flexattr}' ").format(
|
||||
flextable=self.model_cls._flex_table,
|
||||
table=self.model_cls._table,
|
||||
index=self.field, flexattr=self.field)
|
||||
return union
|
||||
|
||||
def order_clause(self):
|
||||
""" Returns an order sql fragment.
|
||||
"""
|
||||
order = "ASC" if self.is_ascending else "DESC"
|
||||
return "flex_{0} {1} ".format(self.field, order)
|
||||
|
||||
def sort(self, items):
|
||||
return sorted(items, key=attrgetter(self.field),
|
||||
reverse=(not self.is_ascending))
|
||||
|
||||
|
||||
class FixedFieldSort(Sort):
|
||||
"""Sort object to sort on a fixed field
|
||||
"""
|
||||
def __init__(self, field, is_ascending=True):
|
||||
self.field = field
|
||||
self.is_ascending = is_ascending
|
||||
|
||||
def order_clause(self):
|
||||
order = "ASC" if self.is_ascending else "DESC"
|
||||
return "{0} {1}".format(self.field, order)
|
||||
|
||||
def sort(self, items):
|
||||
return sorted(items, key=attrgetter(self.field),
|
||||
reverse=(not self.is_ascending))
|
||||
|
||||
|
||||
class SmartArtistSort(Sort):
|
||||
""" Sort Album or Item on artist sort fields, defaulting back on
|
||||
artist field if the sort specific field is empty.
|
||||
"""
|
||||
def __init__(self, model_cls, is_ascending=True):
|
||||
self.model_cls = model_cls
|
||||
self.is_ascending = is_ascending
|
||||
|
||||
def select_clause(self):
|
||||
return ""
|
||||
|
||||
def union_clause(self):
|
||||
return ""
|
||||
|
||||
def order_clause(self):
|
||||
order = "ASC" if self.is_ascending else "DESC"
|
||||
if 'albumartist_sort' in self.model_cls._fields:
|
||||
exp1 = 'albumartist_sort'
|
||||
exp2 = 'albumartist'
|
||||
elif 'artist_sort' in self.model_cls_fields:
|
||||
exp1 = 'artist_sort'
|
||||
exp2 = 'artist'
|
||||
else:
|
||||
return ""
|
||||
|
||||
order_str = ('(CASE {0} WHEN NULL THEN {1} '
|
||||
'WHEN "" THEN {1} '
|
||||
'ELSE {0} END) {2} ').format(exp1, exp2, order)
|
||||
return order_str
|
||||
|
||||
|
||||
class ComputedFieldSort(Sort):
|
||||
|
||||
def __init__(self, model_cls, field, is_ascending=True):
|
||||
self.is_ascending = is_ascending
|
||||
self.field = field
|
||||
self._getters = model_cls._getters()
|
||||
|
||||
def is_slow(self):
|
||||
return True
|
||||
|
||||
def sort(self, items):
|
||||
return sorted(items, key=lambda x: self._getters[self.field](x),
|
||||
reverse=(not self.is_ascending))
|
||||
|
||||
special_sorts = {'smartartist': SmartArtistSort}
|
||||
|
||||
|
||||
def build_sql(model_cls, query, sort):
|
||||
""" Generate a sql statement (and the values that must be injected into it)
|
||||
from a query, sort and a model class. Query and sort objects are returned
|
||||
only for slow query and slow sort operation.
|
||||
"""
|
||||
where, subvals = query.clause()
|
||||
if where is not None:
|
||||
query = None
|
||||
|
||||
if not sort:
|
||||
sort_select = ""
|
||||
sort_union = ""
|
||||
sort_order = ""
|
||||
sort = None
|
||||
elif isinstance(sort, basestring):
|
||||
sort_select = ""
|
||||
sort_union = ""
|
||||
sort_order = " ORDER BY {0}".format(sort) \
|
||||
if sort else ""
|
||||
sort = None
|
||||
elif isinstance(sort, Sort):
|
||||
select_clause = sort.select_clause()
|
||||
sort_select = " ,{0} ".format(select_clause) \
|
||||
if select_clause else ""
|
||||
sort_union = sort.union_clause()
|
||||
order_clause = sort.order_clause()
|
||||
sort_order = " ORDER BY {0}".format(order_clause) \
|
||||
if order_clause else ""
|
||||
if sort.is_slow():
|
||||
sort = None
|
||||
|
||||
sql = ("SELECT {table}.* {sort_select} FROM {table} {sort_union} WHERE "
|
||||
"{query_clause} {sort_order}").format(
|
||||
sort_select=sort_select,
|
||||
sort_union=sort_union,
|
||||
table=model_cls._table,
|
||||
query_clause=where or '1',
|
||||
sort_order=sort_order
|
||||
)
|
||||
|
||||
return sql, subvals, query, sort
|
||||
|
|
|
|||
|
|
@ -121,3 +121,33 @@ def query_from_strings(query_cls, model_cls, prefixes, query_parts):
|
|||
if not subqueries: # No terms in query.
|
||||
subqueries = [query.TrueQuery()]
|
||||
return query_cls(subqueries)
|
||||
|
||||
|
||||
def construct_sort_part(model_cls, part):
|
||||
""" Creates a Sort object from a single criteria. Returns a `Sort` instance.
|
||||
"""
|
||||
sort = None
|
||||
field = part[:-1]
|
||||
is_ascending = (part[-1] == '+')
|
||||
if field in model_cls._fields:
|
||||
sort = query.FixedFieldSort(field, is_ascending)
|
||||
elif field in model_cls._getters():
|
||||
# Computed field, all following fields must use the slow path.
|
||||
sort = query.ComputedFieldSort(model_cls, field, is_ascending)
|
||||
elif field in query.special_sorts:
|
||||
sort = query.special_sorts[field](model_cls, is_ascending)
|
||||
else:
|
||||
# Neither fixed nor computed : must be a flex attr.
|
||||
sort = query.FlexFieldSort(model_cls, field, is_ascending)
|
||||
return sort
|
||||
|
||||
|
||||
def sort_from_strings(model_cls, sort_parts):
|
||||
"""Creates a Sort object from a list of sort criteria strings.
|
||||
"""
|
||||
if not sort_parts:
|
||||
return None
|
||||
sort = query.MultipleSort()
|
||||
for part in sort_parts:
|
||||
sort.add_criteria(construct_sort_part(model_cls, part))
|
||||
return sort
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ autotagging music files.
|
|||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import pickle
|
||||
import itertools
|
||||
|
|
@ -33,7 +34,7 @@ from beets import dbcore
|
|||
from beets import plugins
|
||||
from beets import util
|
||||
from beets import config
|
||||
from beets.util import pipeline
|
||||
from beets.util import pipeline, sorted_walk, ancestry
|
||||
from beets.util import syspath, normpath, displayable_path
|
||||
from enum import Enum
|
||||
from beets import mediafile
|
||||
|
|
@ -65,7 +66,12 @@ def _open_state():
|
|||
try:
|
||||
with open(config['statefile'].as_filename()) as f:
|
||||
return pickle.load(f)
|
||||
except (IOError, EOFError):
|
||||
except Exception as exc:
|
||||
# The `pickle` module can emit all sorts of exceptions during
|
||||
# unpickling, including ImportError. We use a catch-all
|
||||
# exception to avoid enumerating them all (the docs don't even have a
|
||||
# full list!).
|
||||
log.debug(u'state file could not be read: {0}'.format(exc))
|
||||
return {}
|
||||
|
||||
|
||||
|
|
@ -103,8 +109,8 @@ def progress_add(toppath, *paths):
|
|||
for path in paths:
|
||||
# Normally `progress_add` will be called with the path
|
||||
# argument increasing. This is because of the ordering in
|
||||
# `autotag.albums_in_dir`. We take advantage of that to make
|
||||
# the code faster
|
||||
# `albums_in_dir`. We take advantage of that to make the
|
||||
# code faster
|
||||
if imported and imported[len(imported) - 1] <= path:
|
||||
imported.append(path)
|
||||
else:
|
||||
|
|
@ -912,38 +918,41 @@ def read_tasks(session):
|
|||
|
||||
# A flat album import merges all items into one album.
|
||||
if session.config['flat'] and not session.config['singletons']:
|
||||
all_items = []
|
||||
for _, items in autotag.albums_in_dir(toppath):
|
||||
all_items += items
|
||||
if all_items:
|
||||
all_item_paths = []
|
||||
for _, item_paths in albums_in_dir(toppath):
|
||||
all_item_paths += item_paths
|
||||
if all_item_paths:
|
||||
if session.already_imported(toppath, [toppath]):
|
||||
log.debug(u'Skipping previously-imported path: {0}'
|
||||
.format(displayable_path(toppath)))
|
||||
skipped += 1
|
||||
continue
|
||||
all_items = read_items(all_item_paths)
|
||||
yield ImportTask(toppath, [toppath], all_items)
|
||||
yield SentinelImportTask(toppath)
|
||||
continue
|
||||
|
||||
# Produce paths under this directory.
|
||||
for paths, items in autotag.albums_in_dir(toppath):
|
||||
for dirs, paths in albums_in_dir(toppath):
|
||||
if session.config['singletons']:
|
||||
for item in items:
|
||||
if session.already_imported(toppath, [item.path]):
|
||||
for path in paths:
|
||||
if session.already_imported(toppath, [path]):
|
||||
log.debug(u'Skipping previously-imported path: {0}'
|
||||
.format(displayable_path(paths)))
|
||||
.format(displayable_path(path)))
|
||||
skipped += 1
|
||||
continue
|
||||
yield SingletonImportTask(toppath, item)
|
||||
yield SentinelImportTask(toppath, paths)
|
||||
yield SingletonImportTask(toppath, read_items([path])[0])
|
||||
yield SentinelImportTask(toppath, dirs)
|
||||
|
||||
else:
|
||||
if session.already_imported(toppath, paths):
|
||||
if session.already_imported(toppath, dirs):
|
||||
log.debug(u'Skipping previously-imported path: {0}'
|
||||
.format(displayable_path(paths)))
|
||||
.format(displayable_path(dirs)))
|
||||
skipped += 1
|
||||
continue
|
||||
yield ImportTask(toppath, paths, items)
|
||||
print(paths)
|
||||
print(read_items(paths))
|
||||
yield ImportTask(toppath, dirs, read_items(paths))
|
||||
|
||||
# Indicate the directory is finished.
|
||||
# FIXME hack to delete extracted archives
|
||||
|
|
@ -1156,3 +1165,129 @@ def group_albums(session):
|
|||
tasks.append(SentinelImportTask(task.toppath, task.paths))
|
||||
|
||||
task = pipeline.multiple(tasks)
|
||||
|
||||
|
||||
MULTIDISC_MARKERS = (r'dis[ck]', r'cd')
|
||||
MULTIDISC_PAT_FMT = r'^(.*%s[\W_]*)\d'
|
||||
|
||||
|
||||
def albums_in_dir(path):
|
||||
"""Recursively searches the given directory and returns an iterable
|
||||
of (paths, items) where paths is a list of directories and items is
|
||||
a list of Items that is probably an album. Specifically, any folder
|
||||
containing any media files is an album.
|
||||
"""
|
||||
collapse_pat = collapse_paths = collapse_items = None
|
||||
ignore = config['ignore'].as_str_seq()
|
||||
|
||||
for root, dirs, files in sorted_walk(path, ignore=ignore, logger=log):
|
||||
items = [os.path.join(root, f) for f in files]
|
||||
# If we're currently collapsing the constituent directories in a
|
||||
# multi-disc album, check whether we should continue collapsing
|
||||
# and add the current directory. If so, just add the directory
|
||||
# and move on to the next directory. If not, stop collapsing.
|
||||
if collapse_paths:
|
||||
if (not collapse_pat and collapse_paths[0] in ancestry(root)) or \
|
||||
(collapse_pat and
|
||||
collapse_pat.match(os.path.basename(root))):
|
||||
# Still collapsing.
|
||||
collapse_paths.append(root)
|
||||
collapse_items += items
|
||||
continue
|
||||
else:
|
||||
# Collapse finished. Yield the collapsed directory and
|
||||
# proceed to process the current one.
|
||||
if collapse_items:
|
||||
yield collapse_paths, collapse_items
|
||||
collapse_pat = collapse_paths = collapse_items = None
|
||||
|
||||
# Check whether this directory looks like the *first* directory
|
||||
# in a multi-disc sequence. There are two indicators: the file
|
||||
# is named like part of a multi-disc sequence (e.g., "Title Disc
|
||||
# 1") or it contains no items but only directories that are
|
||||
# named in this way.
|
||||
start_collapsing = False
|
||||
for marker in MULTIDISC_MARKERS:
|
||||
marker_pat = re.compile(MULTIDISC_PAT_FMT % marker, re.I)
|
||||
match = marker_pat.match(os.path.basename(root))
|
||||
|
||||
# Is this directory the root of a nested multi-disc album?
|
||||
if dirs and not items:
|
||||
# Check whether all subdirectories have the same prefix.
|
||||
start_collapsing = True
|
||||
subdir_pat = None
|
||||
for subdir in dirs:
|
||||
# The first directory dictates the pattern for
|
||||
# the remaining directories.
|
||||
if not subdir_pat:
|
||||
match = marker_pat.match(subdir)
|
||||
if match:
|
||||
subdir_pat = re.compile(
|
||||
r'^%s\d' % re.escape(match.group(1)), re.I
|
||||
)
|
||||
else:
|
||||
start_collapsing = False
|
||||
break
|
||||
|
||||
# Subsequent directories must match the pattern.
|
||||
elif not subdir_pat.match(subdir):
|
||||
start_collapsing = False
|
||||
break
|
||||
|
||||
# If all subdirectories match, don't check other
|
||||
# markers.
|
||||
if start_collapsing:
|
||||
break
|
||||
|
||||
# Is this directory the first in a flattened multi-disc album?
|
||||
elif match:
|
||||
start_collapsing = True
|
||||
# Set the current pattern to match directories with the same
|
||||
# prefix as this one, followed by a digit.
|
||||
collapse_pat = re.compile(
|
||||
r'^%s\d' % re.escape(match.group(1)), re.I
|
||||
)
|
||||
break
|
||||
|
||||
# If either of the above heuristics indicated that this is the
|
||||
# beginning of a multi-disc album, initialize the collapsed
|
||||
# directory and item lists and check the next directory.
|
||||
if start_collapsing:
|
||||
# Start collapsing; continue to the next iteration.
|
||||
collapse_paths = [root]
|
||||
collapse_items = items
|
||||
continue
|
||||
|
||||
# If it's nonempty, yield it.
|
||||
if items:
|
||||
yield [root], items
|
||||
|
||||
# Clear out any unfinished collapse.
|
||||
if collapse_paths and collapse_items:
|
||||
yield collapse_paths, collapse_items
|
||||
|
||||
|
||||
def read_items(paths):
|
||||
"""Return a list of items created from each path.
|
||||
|
||||
If an item could not be read it skips the item and logs an error.
|
||||
"""
|
||||
# TODO remove this method. Should be handled in ImportTask creation.
|
||||
items = []
|
||||
for path in paths:
|
||||
try:
|
||||
items.append(library.Item.from_path(path))
|
||||
except library.ReadError as exc:
|
||||
if isinstance(exc.reason, mediafile.FileTypeError):
|
||||
# Silently ignore non-music files.
|
||||
pass
|
||||
elif isinstance(exc.reason, mediafile.UnreadableFileError):
|
||||
log.warn(u'unreadable file: {0}'.format(
|
||||
displayable_path(path))
|
||||
)
|
||||
else:
|
||||
log.error(u'error reading {0}: {1}'.format(
|
||||
displayable_path(path),
|
||||
exc,
|
||||
))
|
||||
return items
|
||||
|
|
|
|||
165
beets/library.py
165
beets/library.py
|
|
@ -216,6 +216,57 @@ class LibModel(dbcore.Model):
|
|||
plugins.send('database_change', lib=self._db)
|
||||
|
||||
|
||||
class FormattedItemMapping(dbcore.db.FormattedMapping):
|
||||
"""Add lookup for album-level fields.
|
||||
|
||||
Album-level fields take precedence if `for_path` is true.
|
||||
"""
|
||||
|
||||
def __init__(self, item, for_path=False):
|
||||
super(FormattedItemMapping, self).__init__(item, for_path)
|
||||
self.album = item.get_album()
|
||||
self.album_keys = []
|
||||
if self.album:
|
||||
for key in self.album.keys(True):
|
||||
if key in Album.item_keys or key not in item._fields.keys():
|
||||
self.album_keys.append(key)
|
||||
self.all_keys = set(self.model_keys).union(self.album_keys)
|
||||
|
||||
def _get(self, key):
|
||||
"""Get the value for a key, either from the album or the item.
|
||||
Raise a KeyError for invalid keys.
|
||||
"""
|
||||
if self.for_path and key in self.album_keys:
|
||||
return self._get_formatted(self.album, key)
|
||||
elif key in self.model_keys:
|
||||
return self._get_formatted(self.model, key)
|
||||
elif key in self.album_keys:
|
||||
return self._get_formatted(self.album, key)
|
||||
else:
|
||||
raise KeyError(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Get the value for a key. Certain unset values are remapped.
|
||||
"""
|
||||
value = self._get(key)
|
||||
|
||||
# `artist` and `albumartist` fields fall back to one another.
|
||||
# This is helpful in path formats when the album artist is unset
|
||||
# on as-is imports.
|
||||
if key == 'artist' and not value:
|
||||
return self._get('albumartist')
|
||||
elif key == 'albumartist' and not value:
|
||||
return self._get('artist')
|
||||
else:
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.all_keys)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.all_keys)
|
||||
|
||||
|
||||
class Item(LibModel):
|
||||
_table = 'items'
|
||||
_flex_table = 'item_attributes'
|
||||
|
|
@ -296,6 +347,8 @@ class Item(LibModel):
|
|||
`write`.
|
||||
"""
|
||||
|
||||
_formatter = FormattedItemMapping
|
||||
|
||||
@classmethod
|
||||
def _getters(cls):
|
||||
getters = plugins.item_field_getters()
|
||||
|
|
@ -398,7 +451,7 @@ class Item(LibModel):
|
|||
try:
|
||||
mediafile = MediaFile(syspath(path),
|
||||
id3v23=beets.config['id3v23'].get(bool))
|
||||
except (OSError, IOError) as exc:
|
||||
except (OSError, IOError, UnreadableFileError) as exc:
|
||||
raise ReadError(self.path, exc)
|
||||
|
||||
mediafile.update(self)
|
||||
|
|
@ -523,12 +576,6 @@ class Item(LibModel):
|
|||
|
||||
# Templating.
|
||||
|
||||
def _formatted_mapping(self, for_path=False):
|
||||
"""Get a mapping containing string-formatted values from either
|
||||
this item or the associated album, if any.
|
||||
"""
|
||||
return FormattedItemMapping(self, for_path)
|
||||
|
||||
def destination(self, fragment=False, basedir=None, platform=None,
|
||||
path_formats=None):
|
||||
"""Returns the path in the library directory designated for the
|
||||
|
|
@ -548,7 +595,7 @@ class Item(LibModel):
|
|||
for query, path_format in path_formats:
|
||||
if query == PF_KEY_DEFAULT:
|
||||
continue
|
||||
query = get_query(query, type(self))
|
||||
(query, _) = get_query_sort(query, type(self))
|
||||
if query.match(self):
|
||||
# The query matches the item! Use the corresponding path
|
||||
# format.
|
||||
|
|
@ -604,56 +651,6 @@ class Item(LibModel):
|
|||
return normpath(os.path.join(basedir, subpath))
|
||||
|
||||
|
||||
class FormattedItemMapping(dbcore.db.FormattedMapping):
|
||||
"""A `dict`-like formatted view of an item that inherits album fields.
|
||||
|
||||
The accessor ``mapping[key]`` returns the formated version of either
|
||||
``item[key]`` or ``album[key]``. Here `album` is the album
|
||||
associated to `item` if it exists.
|
||||
"""
|
||||
def __init__(self, item, for_path=False):
|
||||
super(FormattedItemMapping, self).__init__(item, for_path)
|
||||
self.album = item.get_album()
|
||||
self.album_keys = []
|
||||
if self.album:
|
||||
for key in self.album.keys(True):
|
||||
if key in Album.item_keys or key not in item._fields.keys():
|
||||
self.album_keys.append(key)
|
||||
self.all_keys = set(self.model_keys).union(self.album_keys)
|
||||
|
||||
def _get(self, key):
|
||||
"""Get the value for a key, either from the album or the item.
|
||||
Raise a KeyError for invalid keys.
|
||||
"""
|
||||
if key in self.album_keys:
|
||||
return self.album._get_formatted(key, self.for_path)
|
||||
elif key in self.model_keys:
|
||||
return self.model._get_formatted(key, self.for_path)
|
||||
else:
|
||||
raise KeyError(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Get the value for a key. Certain unset values are remapped.
|
||||
"""
|
||||
value = self._get(key)
|
||||
|
||||
# `artist` and `albumartist` fields fall back to one another.
|
||||
# This is helpful in path formats when the album artist is unset
|
||||
# on as-is imports.
|
||||
if key == 'artist' and not value:
|
||||
return self._get('albumartist')
|
||||
elif key == 'albumartist' and not value:
|
||||
return self._get('artist')
|
||||
else:
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.all_keys)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.all_keys)
|
||||
|
||||
|
||||
class Album(LibModel):
|
||||
"""Provides access to information about albums stored in a
|
||||
library. Reflects the library's "albums" table, including album
|
||||
|
|
@ -896,9 +893,10 @@ class Album(LibModel):
|
|||
|
||||
# Query construction helper.
|
||||
|
||||
def get_query(val, model_cls):
|
||||
def get_query_sort(val, model_cls):
|
||||
"""Take a value which may be None, a query string, a query string
|
||||
list, or a Query object, and return a suitable Query object.
|
||||
list, or a Query object, and return a suitable Query object and Sort
|
||||
object.
|
||||
|
||||
`model_cls` is the subclass of Model indicating which entity this
|
||||
is a query for (i.e., Album or Item) and is used to determine which
|
||||
|
|
@ -919,7 +917,7 @@ def get_query(val, model_cls):
|
|||
val = [s.decode('utf8') for s in shlex.split(val)]
|
||||
|
||||
if val is None:
|
||||
return dbcore.query.TrueQuery()
|
||||
return (dbcore.query.TrueQuery(), None)
|
||||
|
||||
elif isinstance(val, list) or isinstance(val, tuple):
|
||||
# Special-case path-like queries, which are non-field queries
|
||||
|
|
@ -937,18 +935,23 @@ def get_query(val, model_cls):
|
|||
path_parts = ()
|
||||
non_path_parts = val
|
||||
|
||||
# separate query token and sort token
|
||||
query_val = [s for s in non_path_parts if not s.endswith(('+', '-'))]
|
||||
sort_val = [s for s in non_path_parts if s.endswith(('+', '-'))]
|
||||
|
||||
# Parse remaining parts and construct an AndQuery.
|
||||
query = dbcore.query_from_strings(
|
||||
dbcore.AndQuery, model_cls, prefixes, non_path_parts
|
||||
dbcore.AndQuery, model_cls, prefixes, query_val
|
||||
)
|
||||
sort = dbcore.sort_from_strings(model_cls, sort_val)
|
||||
|
||||
# Add path queries to aggregate query.
|
||||
if path_parts:
|
||||
query.subqueries += [PathQuery('path', s) for s in path_parts]
|
||||
return query
|
||||
return query, sort
|
||||
|
||||
elif isinstance(val, dbcore.Query):
|
||||
return val
|
||||
return val, None
|
||||
|
||||
else:
|
||||
raise ValueError('query must be None or have type Query or str')
|
||||
|
|
@ -1015,30 +1018,30 @@ class Library(dbcore.Database):
|
|||
|
||||
# Querying.
|
||||
|
||||
def _fetch(self, model_cls, query, order_by=None):
|
||||
"""Parse a query and fetch.
|
||||
"""
|
||||
def _fetch(self, model_cls, query, sort_order=None):
|
||||
"""Parse a query and fetch. If a order specification is present in the
|
||||
query string the sort_order argument is ignored.
|
||||
"""
|
||||
query, sort = get_query_sort(query, model_cls)
|
||||
sort = sort or sort_order
|
||||
|
||||
return super(Library, self)._fetch(
|
||||
model_cls, get_query(query, model_cls), order_by
|
||||
model_cls, query, sort
|
||||
)
|
||||
|
||||
def albums(self, query=None):
|
||||
def albums(self, query=None, sort_order=None):
|
||||
"""Get a sorted list of :class:`Album` objects matching the
|
||||
given query.
|
||||
given sort order. If a order specification is present in the query
|
||||
string the sort_order argument is ignored.
|
||||
"""
|
||||
order = '{0}, album'.format(
|
||||
_orelse("albumartist_sort", "albumartist")
|
||||
)
|
||||
return self._fetch(Album, query, order)
|
||||
return self._fetch(Album, query, sort_order)
|
||||
|
||||
def items(self, query=None):
|
||||
def items(self, query=None, sort_order=None):
|
||||
"""Get a sorted list of :class:`Item` objects matching the given
|
||||
query.
|
||||
given sort order. If a order specification is present in the query
|
||||
string the sort_order argument is ignored.
|
||||
"""
|
||||
order = '{0}, album, disc, track'.format(
|
||||
_orelse("artist_sort", "artist")
|
||||
)
|
||||
return self._fetch(Item, query, order)
|
||||
return self._fetch(Item, query, sort_order)
|
||||
|
||||
# Convenience accessors.
|
||||
|
||||
|
|
@ -1213,7 +1216,7 @@ class DefaultTemplateFunctions(object):
|
|||
return res
|
||||
|
||||
# Flatten disambiguation value into a string.
|
||||
disam_value = album._get_formatted(disambiguator, True)
|
||||
disam_value = album.formatted(True).get(disambiguator)
|
||||
res = u' [{0}]'.format(disam_value)
|
||||
self.lib._memotable[memokey] = res
|
||||
return res
|
||||
|
|
|
|||
|
|
@ -411,7 +411,7 @@ class StorageStyle(object):
|
|||
"""
|
||||
try:
|
||||
return mutagen_file[self.key][0]
|
||||
except KeyError:
|
||||
except (KeyError, IndexError):
|
||||
return None
|
||||
|
||||
def deserialize(self, mutagen_value):
|
||||
|
|
@ -665,7 +665,7 @@ class MP3StorageStyle(StorageStyle):
|
|||
def fetch(self, mutagen_file):
|
||||
try:
|
||||
return mutagen_file[self.key].text[0]
|
||||
except KeyError:
|
||||
except (KeyError, IndexError):
|
||||
return None
|
||||
|
||||
def store(self, mutagen_file, value):
|
||||
|
|
|
|||
|
|
@ -576,8 +576,8 @@ def _field_diff(field, old, new):
|
|||
return None
|
||||
|
||||
# Get formatted values for output.
|
||||
oldstr = old.formatted.get(field, u'')
|
||||
newstr = new.formatted.get(field, u'')
|
||||
oldstr = old.formatted().get(field, u'')
|
||||
newstr = new.formatted().get(field, u'')
|
||||
|
||||
# For strings, highlight changes. For others, colorize the whole
|
||||
# thing.
|
||||
|
|
@ -620,7 +620,7 @@ def show_model_changes(new, old=None, fields=None, always=False):
|
|||
|
||||
changes.append(u' {0}: {1}'.format(
|
||||
field,
|
||||
colorize('red', new.formatted[field])
|
||||
colorize('red', new.formatted()[field])
|
||||
))
|
||||
|
||||
# Print changes.
|
||||
|
|
|
|||
|
|
@ -39,6 +39,7 @@ from beets.util.functemplate import Template
|
|||
from beets import library
|
||||
from beets import config
|
||||
from beets.util.confit import _package_path
|
||||
from beets.dbcore import sort_from_strings
|
||||
|
||||
VARIOUS_ARTISTS = u'Various Artists'
|
||||
|
||||
|
|
@ -966,11 +967,18 @@ def list_items(lib, query, album, fmt):
|
|||
albums instead of single items.
|
||||
"""
|
||||
tmpl = Template(ui._pick_format(album, fmt))
|
||||
|
||||
if album:
|
||||
for album in lib.albums(query):
|
||||
sort_parts = str(config['sort_album']).split()
|
||||
sort_order = sort_from_strings(library.Album,
|
||||
sort_parts)
|
||||
for album in lib.albums(query, sort_order):
|
||||
ui.print_obj(album, lib, tmpl)
|
||||
else:
|
||||
for item in lib.items(query):
|
||||
sort_parts = str(config['sort_item']).split()
|
||||
sort_order = sort_from_strings(library.Item,
|
||||
sort_parts)
|
||||
for item in lib.items(query, sort_order):
|
||||
ui.print_obj(item, lib, tmpl)
|
||||
|
||||
|
||||
|
|
@ -1178,7 +1186,7 @@ def show_stats(lib, query, exact):
|
|||
total_items = 0
|
||||
artists = set()
|
||||
albums = set()
|
||||
albumartists = set()
|
||||
album_artists = set()
|
||||
|
||||
for item in items:
|
||||
if exact:
|
||||
|
|
@ -1188,8 +1196,9 @@ def show_stats(lib, query, exact):
|
|||
total_time += item.length
|
||||
total_items += 1
|
||||
artists.add(item.artist)
|
||||
albums.add(item.album)
|
||||
albumartists.add(item.albumartist)
|
||||
album_artists.add(item.albumartist)
|
||||
if item.album_id:
|
||||
albums.add(item.album_id)
|
||||
|
||||
size_str = '' + ui.human_bytes(total_size)
|
||||
if exact:
|
||||
|
|
@ -1199,9 +1208,10 @@ def show_stats(lib, query, exact):
|
|||
Total time: {1} ({2:.2f} seconds)
|
||||
Total size: {3}
|
||||
Artists: {4}
|
||||
Album Artists: {5}
|
||||
Albums: {6}""".format(total_items, ui.human_seconds(total_time), total_time,
|
||||
size_str, len(artists), len(albumartists), len(albums)))
|
||||
Albums: {5}
|
||||
Album artists: {6}""".format(total_items, ui.human_seconds(total_time),
|
||||
total_time, size_str, len(artists), len(albums),
|
||||
len(album_artists)))
|
||||
|
||||
|
||||
def stats_func(lib, opts, args):
|
||||
|
|
@ -1242,15 +1252,16 @@ default_commands.append(version_cmd)
|
|||
|
||||
def modify_items(lib, mods, dels, query, write, move, album, confirm):
|
||||
"""Modifies matching items according to user-specified assignments and
|
||||
deletions. `mods` is a list of "field=value" strings indicating
|
||||
deletions.
|
||||
|
||||
`mods` is a dictionary of field and value pairse indicating
|
||||
assignments. `dels` is a list of fields to be deleted.
|
||||
"""
|
||||
# Parse key=value specifications into a dictionary.
|
||||
model_cls = library.Album if album else library.Item
|
||||
fsets = {}
|
||||
for mod in mods:
|
||||
key, value = mod.split('=', 1)
|
||||
fsets[key] = model_cls._parse(key, value)
|
||||
|
||||
for key, value in mods.items():
|
||||
mods[key] = model_cls._parse(key, value)
|
||||
|
||||
# Get the items to modify.
|
||||
items, albums = _do_query(lib, query, album, False)
|
||||
|
|
@ -1258,11 +1269,11 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm):
|
|||
|
||||
# Apply changes *temporarily*, preview them, and collect modified
|
||||
# objects.
|
||||
print_('Modifying %i %ss.' % (len(objs), 'album' if album else 'item'))
|
||||
print_('Modifying {0} {1}s.'
|
||||
.format(len(objs), 'album' if album else 'item'))
|
||||
changed = set()
|
||||
for obj in objs:
|
||||
for field, value in fsets.iteritems():
|
||||
obj[field] = value
|
||||
obj.update(mods)
|
||||
for field in dels:
|
||||
del obj[field]
|
||||
if ui.show_model_changes(obj):
|
||||
|
|
@ -1313,14 +1324,15 @@ def modify_parse_args(args):
|
|||
assignments (field=value), and deletions (field!). Returns the result as
|
||||
a three-tuple in that order.
|
||||
"""
|
||||
mods = []
|
||||
mods = {}
|
||||
dels = []
|
||||
query = []
|
||||
for arg in args:
|
||||
if arg.endswith('!') and '=' not in arg and ':' not in arg:
|
||||
dels.append(arg[:-1]) # Strip trailing !.
|
||||
elif '=' in arg and ':' not in arg.split('=', 1)[0]:
|
||||
mods.append(arg)
|
||||
key, val = arg.split('=', 1)
|
||||
mods[key] = val
|
||||
else:
|
||||
query.append(arg)
|
||||
return query, mods, dels
|
||||
|
|
|
|||
|
|
@ -198,7 +198,7 @@ def convert_item(dest_dir, keep_new, path_formats, command, ext,
|
|||
continue
|
||||
|
||||
# Write tags from the database to the converted file.
|
||||
item.write(path=converted)
|
||||
item.try_write(path=converted)
|
||||
|
||||
if keep_new:
|
||||
# If we're keeping the transcoded file, read it again (after
|
||||
|
|
|
|||
|
|
@ -17,9 +17,9 @@ discogs-client library.
|
|||
"""
|
||||
from beets.autotag.hooks import AlbumInfo, TrackInfo, Distance
|
||||
from beets.plugins import BeetsPlugin
|
||||
from discogs_client import DiscogsAPIError, Release, Search
|
||||
from discogs_client import Release, Client
|
||||
from discogs_client.exceptions import DiscogsAPIError
|
||||
import beets
|
||||
import discogs_client
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
|
|
@ -30,10 +30,6 @@ log = logging.getLogger('beets')
|
|||
urllib3_logger = logging.getLogger('requests.packages.urllib3')
|
||||
urllib3_logger.setLevel(logging.CRITICAL)
|
||||
|
||||
# Set user-agent for discogs client.
|
||||
discogs_client.user_agent = 'beets/%s +http://beets.radbox.org/' % \
|
||||
beets.__version__
|
||||
|
||||
|
||||
class DiscogsPlugin(BeetsPlugin):
|
||||
|
||||
|
|
@ -42,6 +38,8 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
self.config.add({
|
||||
'source_weight': 0.5,
|
||||
})
|
||||
self.discogs_client = Client('beets/%s +http://beets.radbox.org/' %
|
||||
beets.__version__)
|
||||
|
||||
def album_distance(self, items, album_info, mapping):
|
||||
"""Returns the album distance.
|
||||
|
|
@ -78,7 +76,7 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
album_id)
|
||||
if not match:
|
||||
return None
|
||||
result = Release(match.group(2))
|
||||
result = Release(self.discogs_client, {'id': int(match.group(2))})
|
||||
# Try to obtain title to verify that we indeed have a valid Release
|
||||
try:
|
||||
getattr(result, 'title')
|
||||
|
|
@ -96,24 +94,19 @@ class DiscogsPlugin(BeetsPlugin):
|
|||
# cause a query to return no results, even if they match the artist or
|
||||
# album title. Use `re.UNICODE` flag to avoid stripping non-english
|
||||
# word characters.
|
||||
query = re.sub(r'(?u)\W+', ' ', query)
|
||||
query = re.sub(r'(?u)\W+', ' ', query).encode('utf8')
|
||||
# Strip medium information from query, Things like "CD1" and "disk 1"
|
||||
# can also negate an otherwise positive result.
|
||||
query = re.sub(r'(?i)\b(CD|disc)\s*\d+', '', query)
|
||||
albums = []
|
||||
for result in Search(query).results():
|
||||
if isinstance(result, Release):
|
||||
albums.append(self.get_album_info(result))
|
||||
if len(albums) >= 5:
|
||||
break
|
||||
return albums
|
||||
releases = self.discogs_client.search(query, type='release').page(1)
|
||||
return [self.get_album_info(release) for release in releases[:5]]
|
||||
|
||||
def get_album_info(self, result):
|
||||
"""Returns an AlbumInfo object for a discogs Release object.
|
||||
"""
|
||||
artist, artist_id = self.get_artist([a.data for a in result.artists])
|
||||
album = re.sub(r' +', ' ', result.title)
|
||||
album_id = result.data['id']
|
||||
artist, artist_id = self.get_artist(result.data['artists'])
|
||||
# Use `.data` to access the tracklist directly instead of the
|
||||
# convenient `.tracklist` property, which will strip out useful artist
|
||||
# information and leave us with skeleton `Artist` objects that will
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@
|
|||
import logging
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets.importer import action
|
||||
from beets.library import get_query
|
||||
from beets.library import get_query_sort
|
||||
from beets.library import Item
|
||||
from beets.library import Album
|
||||
|
||||
|
|
@ -57,9 +57,9 @@ class IHatePlugin(BeetsPlugin):
|
|||
for query_string in action_patterns:
|
||||
query = None
|
||||
if task.is_album:
|
||||
query = get_query(query_string, Album)
|
||||
(query, _) = get_query_sort(query_string, Album)
|
||||
else:
|
||||
query = get_query(query_string, Item)
|
||||
(query, _) = get_query_sort(query_string, Item)
|
||||
if any(query.match(item) for item in task.imported_items()):
|
||||
return True
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -16,61 +16,137 @@
|
|||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets import ui
|
||||
from beets import mediafile
|
||||
from beets import util
|
||||
from beets.util import displayable_path, normpath, syspath
|
||||
|
||||
|
||||
def info(paths):
|
||||
# Set up fields to output.
|
||||
fields = list(mediafile.MediaFile.readable_fields())
|
||||
fields.remove('art')
|
||||
fields.remove('images')
|
||||
log = logging.getLogger('beets')
|
||||
|
||||
# Line format.
|
||||
other_fields = ['album art']
|
||||
maxwidth = max(len(name) for name in fields + other_fields)
|
||||
lineformat = u'{{0:>{0}}}: {{1}}'.format(maxwidth)
|
||||
|
||||
def run(lib, opts, args):
|
||||
"""Print tag info or library data for each file referenced by args.
|
||||
|
||||
Main entry point for the `beet info ARGS...` command.
|
||||
|
||||
If an argument is a path pointing to an existing file, then the tags
|
||||
of that file are printed. All other arguments are considered
|
||||
queries, and for each item matching all those queries the tags from
|
||||
the file are printed.
|
||||
|
||||
If `opts.summarize` is true, the function merges all tags into one
|
||||
dictionary and only prints that. If two files have different values
|
||||
for the same tag, the value is set to '[various]'
|
||||
"""
|
||||
if opts.library:
|
||||
data_collector = library_data
|
||||
else:
|
||||
data_collector = tag_data
|
||||
|
||||
first = True
|
||||
for path in paths:
|
||||
if not first:
|
||||
ui.print_()
|
||||
|
||||
path = util.normpath(path)
|
||||
if not os.path.isfile(path):
|
||||
ui.print_(u'not a file: {0}'.format(
|
||||
util.displayable_path(path)
|
||||
))
|
||||
continue
|
||||
ui.print_(path)
|
||||
summary = {}
|
||||
for data_emitter in data_collector(lib, ui.decargs(args)):
|
||||
try:
|
||||
mf = mediafile.MediaFile(path)
|
||||
except mediafile.UnreadableFileError:
|
||||
ui.print_('cannot read file: {0}'.format(
|
||||
util.displayable_path(path)
|
||||
))
|
||||
data = data_emitter()
|
||||
except mediafile.UnreadableFileError as ex:
|
||||
log.error('cannot read file: {0}'.format(ex.message))
|
||||
continue
|
||||
|
||||
# Basic fields.
|
||||
for name in fields:
|
||||
ui.print_(lineformat.format(name, getattr(mf, name)))
|
||||
# Extra stuff.
|
||||
ui.print_(lineformat.format('album art', mf.art is not None))
|
||||
if opts.summarize:
|
||||
update_summary(summary, data)
|
||||
else:
|
||||
if not first:
|
||||
ui.print_()
|
||||
else:
|
||||
print_data(data)
|
||||
first = False
|
||||
|
||||
first = False
|
||||
if opts.summarize:
|
||||
print_data(summary)
|
||||
|
||||
|
||||
def tag_data(lib, args):
|
||||
query = []
|
||||
for arg in args:
|
||||
path = normpath(arg)
|
||||
if os.path.isfile(syspath(path)):
|
||||
yield tag_data_emitter(path)
|
||||
else:
|
||||
query.append(arg)
|
||||
|
||||
if query:
|
||||
for item in lib.items(query):
|
||||
yield tag_data_emitter(item.path)
|
||||
|
||||
|
||||
def tag_data_emitter(path):
|
||||
def emitter():
|
||||
fields = list(mediafile.MediaFile.readable_fields())
|
||||
fields.remove('images')
|
||||
mf = mediafile.MediaFile(syspath(path))
|
||||
tags = {}
|
||||
for field in fields:
|
||||
tags[field] = getattr(mf, field)
|
||||
tags['art'] = mf.art is not None
|
||||
tags['path'] = displayable_path(path)
|
||||
return tags
|
||||
return emitter
|
||||
|
||||
|
||||
def library_data(lib, args):
|
||||
for item in lib.items(args):
|
||||
yield library_data_emitter(item)
|
||||
|
||||
|
||||
def library_data_emitter(item):
|
||||
def emitter():
|
||||
data = dict(item.formatted())
|
||||
data['path'] = displayable_path(item.path)
|
||||
return data
|
||||
return emitter
|
||||
|
||||
|
||||
def update_summary(summary, tags):
|
||||
for key, value in tags.iteritems():
|
||||
if key not in summary:
|
||||
summary[key] = value
|
||||
elif summary[key] != value:
|
||||
summary[key] = '[various]'
|
||||
return summary
|
||||
|
||||
|
||||
def print_data(data):
|
||||
path = data.pop('path')
|
||||
formatted = {}
|
||||
for key, value in data.iteritems():
|
||||
if isinstance(value, list):
|
||||
formatted[key] = u'; '.join(value)
|
||||
if value is not None:
|
||||
formatted[key] = value
|
||||
|
||||
maxwidth = max(len(key) for key in formatted)
|
||||
lineformat = u'{{0:>{0}}}: {{1}}'.format(maxwidth)
|
||||
|
||||
if path:
|
||||
ui.print_(displayable_path(path))
|
||||
|
||||
for field in sorted(formatted):
|
||||
value = formatted[field]
|
||||
if isinstance(value, list):
|
||||
value = u'; '.join(value)
|
||||
ui.print_(lineformat.format(field, value))
|
||||
|
||||
|
||||
class InfoPlugin(BeetsPlugin):
|
||||
|
||||
def commands(self):
|
||||
cmd = ui.Subcommand('info', help='show file metadata')
|
||||
|
||||
def func(lib, opts, args):
|
||||
if not args:
|
||||
raise ui.UserError('no file specified')
|
||||
info(args)
|
||||
cmd.func = func
|
||||
cmd.func = run
|
||||
cmd.parser.add_option('-l', '--library', action='store_true',
|
||||
help='show library fields instead of tags')
|
||||
cmd.parser.add_option('-s', '--summarize', action='store_true',
|
||||
help='summarize the tags of all files')
|
||||
return [cmd]
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ import urllib
|
|||
import json
|
||||
import unicodedata
|
||||
import difflib
|
||||
import itertools
|
||||
|
||||
from beets.plugins import BeetsPlugin
|
||||
from beets import ui
|
||||
|
|
@ -130,33 +131,51 @@ def strip_cruft(lyrics, wscollapse=True):
|
|||
return lyrics
|
||||
|
||||
|
||||
def split_multi_titles(s):
|
||||
"""Check for a dual song (e.g. Pink Floyd - Speak to Me / Breathe)
|
||||
and returns titles as a list or None if song is not dual."""
|
||||
if '/' not in s:
|
||||
return None
|
||||
return [x.strip() for x in s.split('/')]
|
||||
def search_pairs(item):
|
||||
"""Yield a pairs of artists and titles to search for.
|
||||
|
||||
The first item in the pair is the name of the artist, the second
|
||||
item is a list of song names.
|
||||
|
||||
def remove_ft_artist_suffix(s):
|
||||
"""Remove any featuring artists from an artist string.
|
||||
In addition to the artist and title obtained from the `item` the
|
||||
method tries to strip extra information like paranthesized suffixes
|
||||
and featured artists from the strings and add them as caniddates.
|
||||
The method also tries to split multiple titles separated with `/`.
|
||||
"""
|
||||
pattern = r"(.*?) (&|\b(and|feat(uring)?\b))"
|
||||
match = re.search(pattern, s, re.IGNORECASE)
|
||||
|
||||
title, artist = item.title, item.artist
|
||||
titles = [title]
|
||||
artists = [artist]
|
||||
|
||||
# Remove any featuring artists from the artists name
|
||||
pattern = r"(.*?) (&|\b(and|ft|feat(uring)?\b))"
|
||||
match = re.search(pattern, artist, re.IGNORECASE)
|
||||
if match:
|
||||
s = match.group(1)
|
||||
return s
|
||||
artists.append(match.group(1))
|
||||
|
||||
|
||||
def remove_parenthesized_suffix(s):
|
||||
"""Remove a parenthesized suffix from a title string. Common
|
||||
examples include (live), (remix), and (acoustic).
|
||||
"""
|
||||
# Remove a parenthesized suffix from a title string. Common
|
||||
# examples include (live), (remix), and (acoustic).
|
||||
pattern = r"(.+?)\s+[(].*[)]$"
|
||||
match = re.search(pattern, s, re.IGNORECASE)
|
||||
match = re.search(pattern, title, re.IGNORECASE)
|
||||
if match:
|
||||
s = match.group(1)
|
||||
return s
|
||||
titles.append(match.group(1))
|
||||
|
||||
# Remove any featuring artists from the title
|
||||
pattern = r"(.*?) \b(ft|feat(uring)?)\b"
|
||||
for title in titles:
|
||||
match = re.search(pattern, title, re.IGNORECASE)
|
||||
if match:
|
||||
titles.append(match.group(1))
|
||||
|
||||
# Check for a dual song (e.g. Pink Floyd - Speak to Me / Breathe)
|
||||
# and each of them.
|
||||
multi_titles = []
|
||||
for title in titles:
|
||||
multi_titles.append([title])
|
||||
if '/' in title:
|
||||
multi_titles.append([x.strip() for x in title.split('/')])
|
||||
|
||||
return itertools.product(artists, multi_titles)
|
||||
|
||||
|
||||
def _encode(s):
|
||||
|
|
@ -492,45 +511,31 @@ class LyricsPlugin(BeetsPlugin):
|
|||
parameter controls the visibility of the function's status log
|
||||
messages.
|
||||
"""
|
||||
fallback = self.config['fallback'].get()
|
||||
|
||||
# Skip if the item already has lyrics.
|
||||
if not force and item.lyrics:
|
||||
log.log(loglevel, u'lyrics already present: %s - %s' %
|
||||
(item.artist, item.title))
|
||||
return
|
||||
|
||||
artist = remove_ft_artist_suffix(item.artist)
|
||||
title = remove_parenthesized_suffix(
|
||||
remove_ft_artist_suffix(item.title)
|
||||
)
|
||||
lyrics = None
|
||||
for artist, titles in search_pairs(item):
|
||||
lyrics = [self.get_lyrics(artist, title) for title in titles]
|
||||
if any(lyrics):
|
||||
break
|
||||
|
||||
# Fetch lyrics.
|
||||
lyrics = self.get_lyrics(artist, title)
|
||||
lyrics = u"\n\n---\n\n".join([l for l in lyrics if l])
|
||||
|
||||
if not lyrics:
|
||||
# Check for combined title.
|
||||
# (e.g. Pink Floyd - Speak to Me / Breathe)
|
||||
titles = split_multi_titles(title)
|
||||
if titles:
|
||||
for t in titles:
|
||||
lyrics_title = self.get_lyrics(artist, t)
|
||||
if lyrics_title:
|
||||
if lyrics:
|
||||
lyrics += u"\n\n---\n\n%s" % lyrics_title
|
||||
else:
|
||||
lyrics = lyrics_title
|
||||
|
||||
if not lyrics:
|
||||
if lyrics:
|
||||
log.log(loglevel, u'fetched lyrics: %s - %s' %
|
||||
(item.artist, item.title))
|
||||
else:
|
||||
log.log(loglevel, u'lyrics not found: %s - %s' %
|
||||
(artist, title))
|
||||
(item.artist, item.title))
|
||||
fallback = self.config['fallback'].get()
|
||||
if fallback:
|
||||
lyrics = fallback
|
||||
else:
|
||||
return
|
||||
else:
|
||||
log.log(loglevel, u'fetched lyrics : %s - %s' %
|
||||
(artist, title))
|
||||
|
||||
item.lyrics = lyrics
|
||||
|
||||
|
|
@ -542,12 +547,6 @@ class LyricsPlugin(BeetsPlugin):
|
|||
"""Fetch lyrics, trying each source in turn. Return a string or
|
||||
None if no lyrics were found.
|
||||
"""
|
||||
# Remove featuring artists from search.
|
||||
pattern = u"(.*) feat(uring|\.)?\s\S+"
|
||||
match = re.search(pattern, artist, re.IGNORECASE)
|
||||
if match:
|
||||
artist = match.group(0)
|
||||
|
||||
for backend in self.backends:
|
||||
lyrics = backend(artist, title)
|
||||
if lyrics:
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ from beets.plugins import BeetsPlugin
|
|||
from beets import autotag, library, ui, util
|
||||
from beets.autotag import hooks
|
||||
from beets import config
|
||||
from collections import defaultdict
|
||||
|
||||
log = logging.getLogger('beets')
|
||||
|
||||
|
|
@ -64,14 +65,27 @@ def mbsync_albums(lib, query, move, pretend, write):
|
|||
log.info(u'Release ID not found: {0}'.format(a.mb_albumid))
|
||||
continue
|
||||
|
||||
# Map recording MBIDs to their information. Recordings can appear
|
||||
# multiple times on a release, so each MBID maps to a list of TrackInfo
|
||||
# objects.
|
||||
track_index = defaultdict(list)
|
||||
for track_info in album_info.tracks:
|
||||
track_index[track_info.track_id].append(track_info)
|
||||
|
||||
# Construct a track mapping according to MBIDs. This should work
|
||||
# for albums that have missing or extra tracks.
|
||||
# for albums that have missing or extra tracks. If there are multiple
|
||||
# copies of a recording, they are disambiguated using their disc and
|
||||
# track number.
|
||||
mapping = {}
|
||||
for item in items:
|
||||
for track_info in album_info.tracks:
|
||||
if item.mb_trackid == track_info.track_id:
|
||||
mapping[item] = track_info
|
||||
break
|
||||
candidates = track_index[item.mb_trackid]
|
||||
if len(candidates) == 1:
|
||||
mapping[item] = candidates[0]
|
||||
else:
|
||||
for c in candidates:
|
||||
if c.medium_index == item.track and c.medium == item.disc:
|
||||
mapping[item] = c
|
||||
break
|
||||
|
||||
# Apply.
|
||||
with lib.transaction():
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ def _items_for_query(lib, playlist, album=False):
|
|||
query_strings = [query_strings]
|
||||
model = library.Album if album else library.Item
|
||||
query = dbcore.OrQuery(
|
||||
[library.get_query(q, model) for q in query_strings]
|
||||
[library.get_query_sort(q, model)[0] for q in query_strings]
|
||||
)
|
||||
|
||||
# Execute query, depending on type.
|
||||
|
|
|
|||
|
|
@ -1,35 +1,84 @@
|
|||
Changelog
|
||||
=========
|
||||
|
||||
1.3.7 (in development)
|
||||
1.3.8 (in development)
|
||||
----------------------
|
||||
|
||||
New stuff
|
||||
This release adds **sorting** to beets queries. See :ref:`query-sort`.
|
||||
|
||||
Features:
|
||||
|
||||
* :doc:`/plugins/info`: Files can be specified through library queries
|
||||
and the ``--library`` option prints library fields instead of tags.
|
||||
Tags and library fields for multiple files can be summarized with the
|
||||
``--summarize`` option.
|
||||
|
||||
Fixes:
|
||||
|
||||
* Invalid state files don't crash the importer.
|
||||
* :doc:`/plugins/lyrics`: Only strip featured artists and
|
||||
parenthesized title suffixes if no lyrics for the original artist and
|
||||
title were found.
|
||||
* Fix a crash when reading some files with missing tags.
|
||||
* :doc:`/plugins/discogs`: Compatibility with the new 2.0 version of the
|
||||
`discogs_client`_ Python library. If you were using the old version, you wil
|
||||
need to upgrade to the latest version of the library to use the
|
||||
correspondingly new version of the plugin (e.g., with
|
||||
``pip install -U discogs-client``). Thanks to Andriy Kohut.
|
||||
* Fix a crash when writing files that can't be read. Thanks to Jocelyn De La
|
||||
Rosa.
|
||||
* The :ref:`stats-cmd` command now counts album artists. The album count also
|
||||
more accurately reflects the number of albums in the database.
|
||||
* :doc:`/plugins/convert`: Avoid crashes when tags cannot be written to newly
|
||||
converted files.
|
||||
* Formatting templates with item data no longer confusingly shows album-level
|
||||
data when the two are inconsistent.
|
||||
* Resuming imports and beginning incremental imports should now be much faster
|
||||
when there is a lot of previously-imported music to skip.
|
||||
|
||||
.. _discogs_client: https://github.com/discogs/discogs_client
|
||||
|
||||
|
||||
1.3.7 (August 22, 2014)
|
||||
-----------------------
|
||||
|
||||
This release of beets fixes all the bugs, and you can be confident that you
|
||||
will never again find any bugs in beets, ever.
|
||||
It also adds support for plain old AIFF files and adds three more plugins,
|
||||
including a nifty one that lets you measure a song's tempo by tapping out the
|
||||
beat on your keyboard.
|
||||
The importer deals more elegantly with duplicates and you can broaden your
|
||||
cover art search to the entire web with Google Image Search.
|
||||
|
||||
The big new features are:
|
||||
|
||||
* Support for AIFF files. Tags are stored as ID3 frames in one of the file's
|
||||
IFF chunks. Thanks to Evan Purkhiser for contributing support to `Mutagen`_.
|
||||
* The new :doc:`/plugins/importadded` reads files' modification times to set
|
||||
their "added" date. Thanks to Stig Inge Lea Bjørnsen.
|
||||
* Support for AIFF files. Tags are stored as ID3 frames in one of the file's
|
||||
IFF chunks.
|
||||
* A new :ref:`required` configuration option for the importer skips matches
|
||||
that are missing certain data. Thanks to oprietop.
|
||||
* The new :doc:`/plugins/bpm` lets you manually measure the tempo of a playing
|
||||
song. Thanks to aroquen.
|
||||
* The new :doc:`/plugins/spotify` generates playlists for your `Spotify`_
|
||||
account. Thanks to Olin Gay.
|
||||
* A new :ref:`required` configuration option for the importer skips matches
|
||||
that are missing certain data. Thanks to oprietop.
|
||||
* When the importer detects duplicates, it now shows you some details about
|
||||
the potentially-replaced music so you can make an informed decision. Thanks
|
||||
to Howard Jones.
|
||||
* :doc:`/plugins/fetchart`: You can now optionally search for cover art on
|
||||
Google Image Search. Thanks to Lemutar.
|
||||
* A new :ref:`asciify-paths` configuration option replaces all non-ASCII
|
||||
characters in paths.
|
||||
|
||||
.. _Mutagen: https://bitbucket.org/lazka/mutagen
|
||||
.. _Spotify: https://www.spotify.com/
|
||||
|
||||
Little improvements and fixes:
|
||||
And the multitude of little improvements and fixes:
|
||||
|
||||
* Compatibility with the latest version of `Mutagen`_, 1.23.
|
||||
* :doc:`/plugins/web`: Lyrics now display readably with correct line breaks.
|
||||
Also, the detail view scrolls to reveal all of the lyrics. Thanks to Meet
|
||||
Udeshi.
|
||||
* Compatibility with the latest version of Mutagen, 1.23.
|
||||
* :doc:`/plugins/fetchart`: You can now optionally search for cover art on
|
||||
Google Image Search. Thanks to Lemutar.
|
||||
* :doc:`/plugins/play`: The ``command`` config option can now contain
|
||||
arguments (rather than just an executable). Thanks to Alessandro Ghedini.
|
||||
* Fix an error when using the :ref:`modify-cmd` command to remove a flexible
|
||||
|
|
@ -51,11 +100,11 @@ Little improvements and fixes:
|
|||
* Don't display changes for fields that are not in the restricted field set.
|
||||
This fixes :ref:`write-cmd` showing changes for fields that are not written
|
||||
to the file.
|
||||
* :ref:`write-cmd` command: Don't display the item name if there are no
|
||||
changes for it.
|
||||
* When using both :doc:`/plugins/convert` and :doc:`/plugins/scrub`, avoid
|
||||
scrubbing the source file of conversions. (Fix a regression introduced in
|
||||
the previous release.)
|
||||
* The :ref:`write-cmd` command avoids displaying the item name if there are
|
||||
no changes for it.
|
||||
* When using both the :doc:`/plugins/convert` and the :doc:`/plugins/scrub`,
|
||||
avoid scrubbing the source file of conversions. (Fix a regression introduced
|
||||
in the previous release.)
|
||||
* :doc:`/plugins/replaygain`: Logging is now quieter during import. Thanks to
|
||||
Yevgeny Bezman.
|
||||
* :doc:`/plugins/fetchart`: When loading art from the filesystem, we now
|
||||
|
|
@ -71,7 +120,7 @@ Little improvements and fixes:
|
|||
* :doc:`/plugins/bucket`: You can now customize the definition of alphanumeric
|
||||
"ranges" using regular expressions. And the heuristic for detecting years
|
||||
has been improved. Thanks to sotho.
|
||||
* Already imported singleton tracks are skipped when resuming an
|
||||
* Already-imported singleton tracks are skipped when resuming an
|
||||
import.
|
||||
* :doc:`/plugins/chroma`: A new ``auto`` configuration option disables
|
||||
fingerprinting on import. Thanks to ddettrittus.
|
||||
|
|
@ -79,8 +128,6 @@ Little improvements and fixes:
|
|||
transcoding preset from the command-line.
|
||||
* :doc:`/plugins/convert`: Transcoding presets can now omit their filename
|
||||
extensions (extensions default to the name of the preset).
|
||||
* A new :ref:`asciify-paths` configuration option replaces all non-ASCII
|
||||
characters in paths.
|
||||
* :doc:`/plugins/convert`: A new ``--pretend`` option lets you preview the
|
||||
commands the plugin will execute without actually taking any action. Thanks
|
||||
to Dietrich Daroch.
|
||||
|
|
@ -90,6 +137,8 @@ Little improvements and fixes:
|
|||
work in ``auto`` mode. Thanks to Harry Khanna.
|
||||
* The :ref:`write-cmd` command now has a ``--force`` flag. Thanks again to
|
||||
Harry Khanna.
|
||||
* :doc:`/plugins/mbsync`: Track alignment now works with albums that have
|
||||
multiple copies of the same recording. Thanks to Rui Gonçalves.
|
||||
|
||||
|
||||
1.3.6 (May 10, 2014)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ project = u'beets'
|
|||
copyright = u'2012, Adrian Sampson'
|
||||
|
||||
version = '1.3'
|
||||
release = '1.3.7'
|
||||
release = '1.3.8'
|
||||
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
|
|
|
|||
|
|
@ -12,5 +12,18 @@ Enable the plugin and then type::
|
|||
and the plugin will enumerate all the tags in the specified file. It also
|
||||
accepts multiple filenames in a single command-line.
|
||||
|
||||
You can also enter a :doc:`query </reference/query>` to inspect music from
|
||||
your library::
|
||||
|
||||
$ beet info beatles
|
||||
|
||||
Command-line options include:
|
||||
|
||||
* ``--library`` or ``-l``: Show data from the library database instead of the
|
||||
files' tags.
|
||||
* ``--summarize`` or ``-s``: Merge all the information from multiple files
|
||||
into a single list of values. If the tags differ across the files, print
|
||||
``[various]``.
|
||||
|
||||
.. _id3v2: http://id3v2.sourceforge.net
|
||||
.. _mp3info: http://www.ibiblio.org/mp3info/
|
||||
|
|
|
|||
|
|
@ -174,8 +174,9 @@ list
|
|||
|
||||
Want to search for "Gronlandic Edit" by of Montreal? Try ``beet list
|
||||
gronlandic``. Maybe you want to see everything released in 2009 with
|
||||
"vegetables" in the title? Try ``beet list year:2009 title:vegetables``. (Read
|
||||
more in :doc:`query`.)
|
||||
"vegetables" in the title? Try ``beet list year:2009 title:vegetables``. You
|
||||
can also specify the order used when outputting the results (Read more in
|
||||
:doc:`query`.)
|
||||
|
||||
You can use the ``-a`` switch to search for albums instead of individual items.
|
||||
In this case, the queries you use are restricted to album-level fields: for
|
||||
|
|
|
|||
|
|
@ -188,6 +188,24 @@ Format to use when listing *albums* with :ref:`list-cmd` and other
|
|||
commands. Defaults to ``$albumartist - $album``. The ``-f`` command-line
|
||||
option overrides this setting.
|
||||
|
||||
.. _sort_item:
|
||||
|
||||
sort_item
|
||||
~~~~~~~~~
|
||||
|
||||
Sort order to use when listing *individual items* with the :ref:`list-cmd`
|
||||
command and other commands that need to print out items. Defaults to
|
||||
``smartartist+``. Any command-line sort order overrides this setting.
|
||||
|
||||
.. _sort_album:
|
||||
|
||||
sort_album
|
||||
~~~~~~~~~~
|
||||
|
||||
Sort order to use when listing *albums* with the :ref:`list-cmd`
|
||||
command. Defaults to ``smartartist+``. Any command-line sort order overrides
|
||||
this setting.
|
||||
|
||||
.. _original_date:
|
||||
|
||||
original_date
|
||||
|
|
|
|||
|
|
@ -183,3 +183,32 @@ equivalent::
|
|||
Note that this only matches items that are *already in your library*, so a path
|
||||
query won't necessarily find *all* the audio files in a directory---just the
|
||||
ones you've already added to your beets library.
|
||||
|
||||
|
||||
.. _query-sort:
|
||||
|
||||
Sort Order
|
||||
----------
|
||||
|
||||
You can also specify the order used when outputting the results. Of course, this
|
||||
is only useful when displaying the result, for example with the ``list``
|
||||
command, and is useless when the query is used as a filter for an command. Use
|
||||
the name of the `field` you want to sort on, followed by a ``+`` or ``-`` sign
|
||||
if you want ascending or descending sort. For example this command::
|
||||
|
||||
$ beet list -a year+
|
||||
|
||||
will list all albums in chronological order.
|
||||
|
||||
There is a special ``smartartist`` sort that uses sort-specific field (
|
||||
``artist_sort`` for items and ``albumartist_sort`` for albums) but falls back to
|
||||
standard artist fields if these are empty. When no sort order is specified,
|
||||
``smartartist+`` is used (but this is configurable).
|
||||
|
||||
You can also specify several sort orders, which will be used in the same order at
|
||||
which they appear in your query::
|
||||
|
||||
$ beet list -a genre+ year+
|
||||
|
||||
This command will sort all albums by genre and, in each genre, in chronological
|
||||
order.
|
||||
|
|
|
|||
2
setup.py
2
setup.py
|
|
@ -98,7 +98,7 @@ setup(
|
|||
'beatport': ['requests'],
|
||||
'fetchart': ['requests'],
|
||||
'chroma': ['pyacoustid'],
|
||||
'discogs': ['discogs-client'],
|
||||
'discogs': ['discogs-client>=2.0.0'],
|
||||
'echonest': ['pyechonest'],
|
||||
'echonest_tempo': ['pyechonest'],
|
||||
'lastgenre': ['pylast'],
|
||||
|
|
|
|||
|
|
@ -85,6 +85,30 @@ def item(lib=None):
|
|||
lib.add(i)
|
||||
return i
|
||||
|
||||
_album_ident = 0
|
||||
def album(lib=None):
|
||||
global _item_ident
|
||||
_item_ident += 1
|
||||
i = beets.library.Album(
|
||||
artpath= None,
|
||||
albumartist = 'some album artist',
|
||||
albumartist_sort = 'some sort album artist',
|
||||
albumartist_credit = 'some album artist credit',
|
||||
album = 'the album',
|
||||
genre = 'the genre',
|
||||
year = 2014,
|
||||
month = 2,
|
||||
day = 5,
|
||||
tracktotal = 0,
|
||||
disctotal = 1,
|
||||
comp = False,
|
||||
mb_albumid = 'someID-1',
|
||||
mb_albumartistid = 'someID-1'
|
||||
)
|
||||
if lib:
|
||||
lib.add(i)
|
||||
return i
|
||||
|
||||
# Dummy import session.
|
||||
def import_session(lib=None, logfile=None, paths=[], query=[], cli=False):
|
||||
cls = commands.TerminalImportSession if cli else importer.ImportSession
|
||||
|
|
|
|||
|
|
@ -80,12 +80,13 @@ def capture_stdout():
|
|||
'spam'
|
||||
"""
|
||||
org = sys.stdout
|
||||
sys.stdout = StringIO()
|
||||
sys.stdout = capture = StringIO()
|
||||
sys.stdout.encoding = 'utf8'
|
||||
try:
|
||||
yield sys.stdout
|
||||
finally:
|
||||
sys.stdout = org
|
||||
print(capture.getvalue())
|
||||
|
||||
|
||||
def has_program(cmd, args=['--version']):
|
||||
|
|
@ -236,8 +237,8 @@ class TestHelper(object):
|
|||
path = os.path.join(_common.RSRC, 'full.' + ext)
|
||||
for i in range(count):
|
||||
item = Item.from_path(str(path))
|
||||
item.album = u'\xc3\xa4lbum {0}'.format(i) # Check unicode paths
|
||||
item.title = u't\xc3\x8ftle {0}'.format(i)
|
||||
item.album = u'\u00e4lbum {0}'.format(i) # Check unicode paths
|
||||
item.title = u't\u00eftle {0}'.format(i)
|
||||
item.add(self.lib)
|
||||
item.move(copy=True)
|
||||
item.store()
|
||||
|
|
@ -289,6 +290,11 @@ class TestHelper(object):
|
|||
lib = Library(':memory:')
|
||||
beets.ui._raw_main(list(args), lib)
|
||||
|
||||
def run_with_output(self, *args):
|
||||
with capture_stdout() as out:
|
||||
self.run_command(*args)
|
||||
return out.getvalue()
|
||||
|
||||
def create_temp_dir(self):
|
||||
"""Create a temporary directory and assign it into
|
||||
`self.temp_dir`. Call `remove_temp_dir` later to delete it.
|
||||
|
|
|
|||
|
|
@ -14,8 +14,6 @@
|
|||
|
||||
"""Tests for autotagging functionality.
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import copy
|
||||
|
||||
|
|
@ -486,128 +484,6 @@ class AlbumDistanceTest(_common.TestCase):
|
|||
self.assertEqual(dist, 0)
|
||||
|
||||
|
||||
def _mkmp3(path):
|
||||
shutil.copyfile(os.path.join(_common.RSRC, 'min.mp3'), path)
|
||||
|
||||
|
||||
class AlbumsInDirTest(_common.TestCase):
|
||||
def setUp(self):
|
||||
super(AlbumsInDirTest, self).setUp()
|
||||
|
||||
# create a directory structure for testing
|
||||
self.base = os.path.abspath(os.path.join(self.temp_dir, 'tempdir'))
|
||||
os.mkdir(self.base)
|
||||
|
||||
os.mkdir(os.path.join(self.base, 'album1'))
|
||||
os.mkdir(os.path.join(self.base, 'album2'))
|
||||
os.mkdir(os.path.join(self.base, 'more'))
|
||||
os.mkdir(os.path.join(self.base, 'more', 'album3'))
|
||||
os.mkdir(os.path.join(self.base, 'more', 'album4'))
|
||||
|
||||
_mkmp3(os.path.join(self.base, 'album1', 'album1song1.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'album1', 'album1song2.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'album2', 'album2song.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'more', 'album3', 'album3song.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'more', 'album4', 'album4song.mp3'))
|
||||
|
||||
def test_finds_all_albums(self):
|
||||
albums = list(autotag.albums_in_dir(self.base))
|
||||
self.assertEqual(len(albums), 4)
|
||||
|
||||
def test_separates_contents(self):
|
||||
found = []
|
||||
for _, album in autotag.albums_in_dir(self.base):
|
||||
found.append(re.search(r'album(.)song', album[0].path).group(1))
|
||||
self.assertTrue('1' in found)
|
||||
self.assertTrue('2' in found)
|
||||
self.assertTrue('3' in found)
|
||||
self.assertTrue('4' in found)
|
||||
|
||||
def test_finds_multiple_songs(self):
|
||||
for _, album in autotag.albums_in_dir(self.base):
|
||||
n = re.search(r'album(.)song', album[0].path).group(1)
|
||||
if n == '1':
|
||||
self.assertEqual(len(album), 2)
|
||||
else:
|
||||
self.assertEqual(len(album), 1)
|
||||
|
||||
|
||||
class MultiDiscAlbumsInDirTest(_common.TestCase):
|
||||
def setUp(self):
|
||||
super(MultiDiscAlbumsInDirTest, self).setUp()
|
||||
|
||||
self.base = os.path.abspath(os.path.join(self.temp_dir, 'tempdir'))
|
||||
os.mkdir(self.base)
|
||||
|
||||
self.dirs = [
|
||||
# Nested album, multiple subdirs.
|
||||
# Also, false positive marker in root dir, and subtitle for disc 3.
|
||||
os.path.join(self.base, 'ABCD1234'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 1'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 3 - bonus'),
|
||||
|
||||
# Nested album, single subdir.
|
||||
# Also, punctuation between marker and disc number.
|
||||
os.path.join(self.base, 'album'),
|
||||
os.path.join(self.base, 'album', 'cd _ 1'),
|
||||
|
||||
# Flattened album, case typo.
|
||||
# Also, false positive marker in parent dir.
|
||||
os.path.join(self.base, 'artist [CD5]'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAT disc 1'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAt disc 2'),
|
||||
|
||||
# Single disc album, sorted between CAT discs.
|
||||
os.path.join(self.base, 'artist [CD5]', 'CATS'),
|
||||
]
|
||||
self.files = [
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 1', 'song1.mp3'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 3 - bonus', 'song2.mp3'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 3 - bonus', 'song3.mp3'),
|
||||
os.path.join(self.base, 'album', 'cd _ 1', 'song4.mp3'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAT disc 1', 'song5.mp3'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAt disc 2', 'song6.mp3'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CATS', 'song7.mp3'),
|
||||
]
|
||||
|
||||
for path in self.dirs:
|
||||
os.mkdir(path)
|
||||
for path in self.files:
|
||||
_mkmp3(path)
|
||||
|
||||
def test_coalesce_nested_album_multiple_subdirs(self):
|
||||
albums = list(autotag.albums_in_dir(self.base))
|
||||
self.assertEquals(len(albums), 4)
|
||||
root, items = albums[0]
|
||||
self.assertEquals(root, self.dirs[0:3])
|
||||
self.assertEquals(len(items), 3)
|
||||
|
||||
def test_coalesce_nested_album_single_subdir(self):
|
||||
albums = list(autotag.albums_in_dir(self.base))
|
||||
root, items = albums[1]
|
||||
self.assertEquals(root, self.dirs[3:5])
|
||||
self.assertEquals(len(items), 1)
|
||||
|
||||
def test_coalesce_flattened_album_case_typo(self):
|
||||
albums = list(autotag.albums_in_dir(self.base))
|
||||
root, items = albums[2]
|
||||
self.assertEquals(root, self.dirs[6:8])
|
||||
self.assertEquals(len(items), 2)
|
||||
|
||||
def test_single_disc_album(self):
|
||||
albums = list(autotag.albums_in_dir(self.base))
|
||||
root, items = albums[3]
|
||||
self.assertEquals(root, self.dirs[8:])
|
||||
self.assertEquals(len(items), 1)
|
||||
|
||||
def test_do_not_yield_empty_album(self):
|
||||
# Remove all the MP3s.
|
||||
for path in self.files:
|
||||
os.remove(path)
|
||||
albums = list(autotag.albums_in_dir(self.base))
|
||||
self.assertEquals(len(albums), 0)
|
||||
|
||||
|
||||
class AssignmentTest(unittest.TestCase):
|
||||
def item(self, title, track):
|
||||
return Item(
|
||||
|
|
|
|||
|
|
@ -255,54 +255,54 @@ class FormatTest(_common.TestCase):
|
|||
def test_format_fixed_field(self):
|
||||
model = TestModel1()
|
||||
model.field_one = u'caf\xe9'
|
||||
value = model._get_formatted('field_one')
|
||||
value = model.formatted().get('field_one')
|
||||
self.assertEqual(value, u'caf\xe9')
|
||||
|
||||
def test_format_flex_field(self):
|
||||
model = TestModel1()
|
||||
model.other_field = u'caf\xe9'
|
||||
value = model._get_formatted('other_field')
|
||||
value = model.formatted().get('other_field')
|
||||
self.assertEqual(value, u'caf\xe9')
|
||||
|
||||
def test_format_flex_field_bytes(self):
|
||||
model = TestModel1()
|
||||
model.other_field = u'caf\xe9'.encode('utf8')
|
||||
value = model._get_formatted('other_field')
|
||||
value = model.formatted().get('other_field')
|
||||
self.assertTrue(isinstance(value, unicode))
|
||||
self.assertEqual(value, u'caf\xe9')
|
||||
|
||||
def test_format_unset_field(self):
|
||||
model = TestModel1()
|
||||
value = model._get_formatted('other_field')
|
||||
value = model.formatted().get('other_field')
|
||||
self.assertEqual(value, u'')
|
||||
|
||||
def test_format_typed_flex_field(self):
|
||||
model = TestModel1()
|
||||
model.some_float_field = 3.14159265358979
|
||||
value = model._get_formatted('some_float_field')
|
||||
value = model.formatted().get('some_float_field')
|
||||
self.assertEqual(value, u'3.1')
|
||||
|
||||
|
||||
class FormattedMappingTest(_common.TestCase):
|
||||
def test_keys_equal_model_keys(self):
|
||||
model = TestModel1()
|
||||
formatted = model._formatted_mapping()
|
||||
formatted = model.formatted()
|
||||
self.assertEqual(set(model.keys(True)), set(formatted.keys()))
|
||||
|
||||
def test_get_unset_field(self):
|
||||
model = TestModel1()
|
||||
formatted = model._formatted_mapping()
|
||||
formatted = model.formatted()
|
||||
with self.assertRaises(KeyError):
|
||||
formatted['other_field']
|
||||
|
||||
def test_get_method_with_none_default(self):
|
||||
def test_get_method_with_default(self):
|
||||
model = TestModel1()
|
||||
formatted = model._formatted_mapping()
|
||||
self.assertIsNone(formatted.get('other_field'))
|
||||
formatted = model.formatted()
|
||||
self.assertEqual(formatted.get('other_field'), u'')
|
||||
|
||||
def test_get_method_with_specified_default(self):
|
||||
model = TestModel1()
|
||||
formatted = model._formatted_mapping()
|
||||
formatted = model.formatted()
|
||||
self.assertEqual(formatted.get('other_field', 'default'), 'default')
|
||||
|
||||
|
||||
|
|
@ -412,6 +412,37 @@ class QueryFromStringsTest(_common.TestCase):
|
|||
self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery)
|
||||
|
||||
|
||||
class SortFromStringsTest(_common.TestCase):
|
||||
def sfs(self, strings):
|
||||
return dbcore.queryparse.sort_from_strings(
|
||||
TestModel1,
|
||||
strings,
|
||||
)
|
||||
|
||||
def test_zero_parts(self):
|
||||
s = self.sfs([])
|
||||
self.assertIsNone(s)
|
||||
|
||||
def test_one_parts(self):
|
||||
s = self.sfs(['field+'])
|
||||
self.assertIsInstance(s, dbcore.query.Sort)
|
||||
|
||||
def test_two_parts(self):
|
||||
s = self.sfs(['field+', 'another_field-'])
|
||||
self.assertIsInstance(s, dbcore.query.MultipleSort)
|
||||
self.assertEqual(len(s.sorts), 2)
|
||||
|
||||
def test_fixed_field_sort(self):
|
||||
s = self.sfs(['field_one+'])
|
||||
self.assertIsInstance(s, dbcore.query.MultipleSort)
|
||||
self.assertIsInstance(s.sorts[0], dbcore.query.FixedFieldSort)
|
||||
|
||||
def test_flex_field_sort(self):
|
||||
s = self.sfs(['flex_field+'])
|
||||
self.assertIsInstance(s, dbcore.query.MultipleSort)
|
||||
self.assertIsInstance(s.sorts[0], dbcore.query.FlexFieldSort)
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
"""Tests for the general importer functionality.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import StringIO
|
||||
from tempfile import mkstemp
|
||||
|
|
@ -26,6 +27,7 @@ import _common
|
|||
from _common import unittest
|
||||
from helper import TestImportSession, TestHelper, has_program
|
||||
from beets import importer
|
||||
from beets.importer import albums_in_dir
|
||||
from beets.mediafile import MediaFile
|
||||
from beets import autotag
|
||||
from beets.autotag import AlbumInfo, TrackInfo, AlbumMatch
|
||||
|
|
@ -1197,6 +1199,135 @@ class IncrementalImportTest(unittest.TestCase, TestHelper):
|
|||
importer.run()
|
||||
self.assertEqual(len(self.lib.items()), 2)
|
||||
|
||||
def test_invalid_state_file(self):
|
||||
importer = self.create_importer()
|
||||
with open(self.config['statefile'].as_filename(), 'w') as f:
|
||||
f.write('000')
|
||||
importer.run()
|
||||
self.assertEqual(len(self.lib.albums()), 1)
|
||||
|
||||
|
||||
def _mkmp3(path):
|
||||
shutil.copyfile(os.path.join(_common.RSRC, 'min.mp3'), path)
|
||||
|
||||
|
||||
class AlbumsInDirTest(_common.TestCase):
|
||||
def setUp(self):
|
||||
super(AlbumsInDirTest, self).setUp()
|
||||
|
||||
# create a directory structure for testing
|
||||
self.base = os.path.abspath(os.path.join(self.temp_dir, 'tempdir'))
|
||||
os.mkdir(self.base)
|
||||
|
||||
os.mkdir(os.path.join(self.base, 'album1'))
|
||||
os.mkdir(os.path.join(self.base, 'album2'))
|
||||
os.mkdir(os.path.join(self.base, 'more'))
|
||||
os.mkdir(os.path.join(self.base, 'more', 'album3'))
|
||||
os.mkdir(os.path.join(self.base, 'more', 'album4'))
|
||||
|
||||
_mkmp3(os.path.join(self.base, 'album1', 'album1song1.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'album1', 'album1song2.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'album2', 'album2song.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'more', 'album3', 'album3song.mp3'))
|
||||
_mkmp3(os.path.join(self.base, 'more', 'album4', 'album4song.mp3'))
|
||||
|
||||
def test_finds_all_albums(self):
|
||||
albums = list(albums_in_dir(self.base))
|
||||
self.assertEqual(len(albums), 4)
|
||||
|
||||
def test_separates_contents(self):
|
||||
found = []
|
||||
for _, album in albums_in_dir(self.base):
|
||||
found.append(re.search(r'album(.)song', album[0]).group(1))
|
||||
self.assertTrue('1' in found)
|
||||
self.assertTrue('2' in found)
|
||||
self.assertTrue('3' in found)
|
||||
self.assertTrue('4' in found)
|
||||
|
||||
def test_finds_multiple_songs(self):
|
||||
for _, album in albums_in_dir(self.base):
|
||||
n = re.search(r'album(.)song', album[0]).group(1)
|
||||
if n == '1':
|
||||
self.assertEqual(len(album), 2)
|
||||
else:
|
||||
self.assertEqual(len(album), 1)
|
||||
|
||||
|
||||
class MultiDiscAlbumsInDirTest(_common.TestCase):
|
||||
def setUp(self):
|
||||
super(MultiDiscAlbumsInDirTest, self).setUp()
|
||||
|
||||
self.base = os.path.abspath(os.path.join(self.temp_dir, 'tempdir'))
|
||||
os.mkdir(self.base)
|
||||
|
||||
self.dirs = [
|
||||
# Nested album, multiple subdirs.
|
||||
# Also, false positive marker in root dir, and subtitle for disc 3.
|
||||
os.path.join(self.base, 'ABCD1234'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 1'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 3 - bonus'),
|
||||
|
||||
# Nested album, single subdir.
|
||||
# Also, punctuation between marker and disc number.
|
||||
os.path.join(self.base, 'album'),
|
||||
os.path.join(self.base, 'album', 'cd _ 1'),
|
||||
|
||||
# Flattened album, case typo.
|
||||
# Also, false positive marker in parent dir.
|
||||
os.path.join(self.base, 'artist [CD5]'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAT disc 1'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAt disc 2'),
|
||||
|
||||
# Single disc album, sorted between CAT discs.
|
||||
os.path.join(self.base, 'artist [CD5]', 'CATS'),
|
||||
]
|
||||
self.files = [
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 1', 'song1.mp3'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 3 - bonus', 'song2.mp3'),
|
||||
os.path.join(self.base, 'ABCD1234', 'cd 3 - bonus', 'song3.mp3'),
|
||||
os.path.join(self.base, 'album', 'cd _ 1', 'song4.mp3'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAT disc 1', 'song5.mp3'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CAt disc 2', 'song6.mp3'),
|
||||
os.path.join(self.base, 'artist [CD5]', 'CATS', 'song7.mp3'),
|
||||
]
|
||||
|
||||
for path in self.dirs:
|
||||
os.mkdir(path)
|
||||
for path in self.files:
|
||||
_mkmp3(path)
|
||||
|
||||
def test_coalesce_nested_album_multiple_subdirs(self):
|
||||
albums = list(albums_in_dir(self.base))
|
||||
self.assertEquals(len(albums), 4)
|
||||
root, items = albums[0]
|
||||
self.assertEquals(root, self.dirs[0:3])
|
||||
self.assertEquals(len(items), 3)
|
||||
|
||||
def test_coalesce_nested_album_single_subdir(self):
|
||||
albums = list(albums_in_dir(self.base))
|
||||
root, items = albums[1]
|
||||
self.assertEquals(root, self.dirs[3:5])
|
||||
self.assertEquals(len(items), 1)
|
||||
|
||||
def test_coalesce_flattened_album_case_typo(self):
|
||||
albums = list(albums_in_dir(self.base))
|
||||
root, items = albums[2]
|
||||
self.assertEquals(root, self.dirs[6:8])
|
||||
self.assertEquals(len(items), 2)
|
||||
|
||||
def test_single_disc_album(self):
|
||||
albums = list(albums_in_dir(self.base))
|
||||
root, items = albums[3]
|
||||
self.assertEquals(root, self.dirs[8:])
|
||||
self.assertEquals(len(items), 1)
|
||||
|
||||
def test_do_not_yield_empty_album(self):
|
||||
# Remove all the MP3s.
|
||||
for path in self.files:
|
||||
os.remove(path)
|
||||
albums = list(albums_in_dir(self.base))
|
||||
self.assertEquals(len(albums), 0)
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
|
|
|||
97
test/test_info.py
Normal file
97
test/test_info.py
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
# This file is part of beets.
|
||||
# Copyright 2014, Thomas Scholtes.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
from _common import unittest
|
||||
from helper import TestHelper
|
||||
|
||||
from beets.mediafile import MediaFile
|
||||
|
||||
|
||||
class InfoTest(unittest.TestCase, TestHelper):
|
||||
|
||||
def setUp(self):
|
||||
self.setup_beets()
|
||||
self.load_plugins('info')
|
||||
|
||||
def tearDown(self):
|
||||
self.unload_plugins()
|
||||
self.teardown_beets()
|
||||
|
||||
def run_command(self, *args):
|
||||
super(InfoTest, self).run_command('info', *args)
|
||||
|
||||
def test_path(self):
|
||||
path = self.create_mediafile_fixture()
|
||||
|
||||
mediafile = MediaFile(path)
|
||||
mediafile.albumartist = 'AAA'
|
||||
mediafile.disctitle = 'DDD'
|
||||
mediafile.genres = ['a', 'b', 'c']
|
||||
mediafile.composer = None
|
||||
mediafile.save()
|
||||
|
||||
out = self.run_with_output(path)
|
||||
self.assertIn(path, out)
|
||||
self.assertIn('albumartist: AAA', out)
|
||||
self.assertIn('disctitle: DDD', out)
|
||||
self.assertIn('genres: a; b; c', out)
|
||||
self.assertNotIn('composer:', out)
|
||||
|
||||
def test_item_query(self):
|
||||
items = self.add_item_fixtures(count=2)
|
||||
items[0].album = 'xxxx'
|
||||
items[0].write()
|
||||
items[0].album = 'yyyy'
|
||||
items[0].store()
|
||||
|
||||
out = self.run_with_output('album:yyyy')
|
||||
self.assertIn(items[0].path, out)
|
||||
self.assertIn('album: xxxx', out)
|
||||
|
||||
self.assertNotIn(items[1].path, out)
|
||||
|
||||
def test_item_library_query(self):
|
||||
item, = self.add_item_fixtures()
|
||||
item.album = 'xxxx'
|
||||
item.store()
|
||||
|
||||
out = self.run_with_output('--library', 'album:xxxx')
|
||||
self.assertIn(item.path, out)
|
||||
self.assertIn('album: xxxx', out)
|
||||
|
||||
def test_collect_item_and_path(self):
|
||||
path = self.create_mediafile_fixture()
|
||||
mediafile = MediaFile(path)
|
||||
item, = self.add_item_fixtures()
|
||||
|
||||
item.album = mediafile.album = 'AAA'
|
||||
item.tracktotal = mediafile.tracktotal = 5
|
||||
item.title = 'TTT'
|
||||
mediafile.title = 'SSS'
|
||||
|
||||
item.write()
|
||||
item.store()
|
||||
mediafile.save()
|
||||
|
||||
out = self.run_with_output('--summarize', 'album:AAA', path)
|
||||
self.assertIn('album: AAA', out)
|
||||
self.assertIn('tracktotal: 5', out)
|
||||
self.assertIn('title: [various]', out)
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='suite')
|
||||
|
|
@ -340,37 +340,37 @@ class DestinationTest(_common.TestCase):
|
|||
with _common.platform_posix():
|
||||
name = os.path.join('a', 'b')
|
||||
self.i.title = name
|
||||
newname = self.i._get_formatted('title')
|
||||
newname = self.i.formatted().get('title')
|
||||
self.assertEqual(name, newname)
|
||||
|
||||
def test_get_formatted_pads_with_zero(self):
|
||||
with _common.platform_posix():
|
||||
self.i.track = 1
|
||||
name = self.i._get_formatted('track')
|
||||
name = self.i.formatted().get('track')
|
||||
self.assertTrue(name.startswith('0'))
|
||||
|
||||
def test_get_formatted_uses_kbps_bitrate(self):
|
||||
with _common.platform_posix():
|
||||
self.i.bitrate = 12345
|
||||
val = self.i._get_formatted('bitrate')
|
||||
val = self.i.formatted().get('bitrate')
|
||||
self.assertEqual(val, u'12kbps')
|
||||
|
||||
def test_get_formatted_uses_khz_samplerate(self):
|
||||
with _common.platform_posix():
|
||||
self.i.samplerate = 12345
|
||||
val = self.i._get_formatted('samplerate')
|
||||
val = self.i.formatted().get('samplerate')
|
||||
self.assertEqual(val, u'12kHz')
|
||||
|
||||
def test_get_formatted_datetime(self):
|
||||
with _common.platform_posix():
|
||||
self.i.added = 1368302461.210265
|
||||
val = self.i._get_formatted('added')
|
||||
val = self.i.formatted().get('added')
|
||||
self.assertTrue(val.startswith('2013'))
|
||||
|
||||
def test_get_formatted_none(self):
|
||||
with _common.platform_posix():
|
||||
self.i.some_other_field = None
|
||||
val = self.i._get_formatted('some_other_field')
|
||||
val = self.i.formatted().get('some_other_field')
|
||||
self.assertEqual(val, u'')
|
||||
|
||||
def test_artist_falls_back_to_albumartist(self):
|
||||
|
|
@ -462,23 +462,35 @@ class DestinationTest(_common.TestCase):
|
|||
|
||||
class ItemFormattedMappingTest(_common.LibTestCase):
|
||||
def test_formatted_item_value(self):
|
||||
formatted = self.i._formatted_mapping()
|
||||
formatted = self.i.formatted()
|
||||
self.assertEqual(formatted['artist'], 'the artist')
|
||||
|
||||
def test_get_unset_field(self):
|
||||
formatted = self.i._formatted_mapping()
|
||||
formatted = self.i.formatted()
|
||||
with self.assertRaises(KeyError):
|
||||
formatted['other_field']
|
||||
|
||||
def test_get_method_with_none_default(self):
|
||||
formatted = self.i._formatted_mapping()
|
||||
self.assertIsNone(formatted.get('other_field'))
|
||||
def test_get_method_with_default(self):
|
||||
formatted = self.i.formatted()
|
||||
self.assertEqual(formatted.get('other_field'), u'')
|
||||
|
||||
def test_get_method_with_specified_default(self):
|
||||
formatted = self.i._formatted_mapping()
|
||||
formatted = self.i.formatted()
|
||||
self.assertEqual(formatted.get('other_field', 'default'), 'default')
|
||||
|
||||
def test_album_field_overrides_item_field(self):
|
||||
def test_item_precedence(self):
|
||||
album = self.lib.add_album([self.i])
|
||||
album['artist'] = 'foo'
|
||||
album.store()
|
||||
self.assertNotEqual('foo', self.i.formatted().get('artist'))
|
||||
|
||||
def test_album_flex_field(self):
|
||||
album = self.lib.add_album([self.i])
|
||||
album['flex'] = 'foo'
|
||||
album.store()
|
||||
self.assertEqual('foo', self.i.formatted().get('flex'))
|
||||
|
||||
def test_album_field_overrides_item_field_for_path(self):
|
||||
# Make the album inconsistent with the item.
|
||||
album = self.lib.add_album([self.i])
|
||||
album.album = 'foo'
|
||||
|
|
@ -487,23 +499,23 @@ class ItemFormattedMappingTest(_common.LibTestCase):
|
|||
self.i.store()
|
||||
|
||||
# Ensure the album takes precedence.
|
||||
formatted = self.i._formatted_mapping()
|
||||
formatted = self.i.formatted(for_path=True)
|
||||
self.assertEqual(formatted['album'], 'foo')
|
||||
|
||||
def test_artist_falls_back_to_albumartist(self):
|
||||
self.i.artist = ''
|
||||
formatted = self.i._formatted_mapping()
|
||||
formatted = self.i.formatted()
|
||||
self.assertEqual(formatted['artist'], 'the album artist')
|
||||
|
||||
def test_albumartist_falls_back_to_artist(self):
|
||||
self.i.albumartist = ''
|
||||
formatted = self.i._formatted_mapping()
|
||||
formatted = self.i.formatted()
|
||||
self.assertEqual(formatted['albumartist'], 'the artist')
|
||||
|
||||
def test_both_artist_and_albumartist_empty(self):
|
||||
self.i.artist = ''
|
||||
self.i.albumartist = ''
|
||||
formatted = self.i._formatted_mapping()
|
||||
formatted = self.i.formatted()
|
||||
self.assertEqual(formatted['albumartist'], '')
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
from _common import unittest
|
||||
from beetsplug import lyrics
|
||||
from beets.library import Item
|
||||
|
||||
|
||||
class LyricsPluginTest(unittest.TestCase):
|
||||
|
|
@ -23,53 +24,82 @@ class LyricsPluginTest(unittest.TestCase):
|
|||
"""Set up configuration"""
|
||||
lyrics.LyricsPlugin()
|
||||
|
||||
def test_split_multi_titles(self):
|
||||
self.assertEqual(lyrics.split_multi_titles('song1 / song2 / song3'),
|
||||
['song1', 'song2', 'song3'])
|
||||
self.assertEqual(lyrics.split_multi_titles('song1/song2 song3'),
|
||||
['song1', 'song2 song3'])
|
||||
self.assertEqual(lyrics.split_multi_titles('song1 song2'),
|
||||
None)
|
||||
def test_search_artist(self):
|
||||
item = Item(artist='Alice ft. Bob', title='song')
|
||||
self.assertIn(('Alice ft. Bob', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
self.assertIn(('Alice', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
|
||||
def test_remove_ft_artist_suffix(self):
|
||||
self.assertEqual(
|
||||
lyrics.remove_ft_artist_suffix('Bob featuring Marcia'),
|
||||
'Bob'
|
||||
)
|
||||
self.assertEqual(
|
||||
lyrics.remove_ft_artist_suffix('Bob feat Marcia'),
|
||||
'Bob'
|
||||
)
|
||||
self.assertEqual(
|
||||
lyrics.remove_ft_artist_suffix('Bob and Marcia'),
|
||||
'Bob'
|
||||
)
|
||||
self.assertEqual(
|
||||
lyrics.remove_ft_artist_suffix('Bob feat. Marcia'),
|
||||
'Bob'
|
||||
)
|
||||
self.assertEqual(
|
||||
lyrics.remove_ft_artist_suffix('Bob & Marcia'),
|
||||
'Bob'
|
||||
)
|
||||
self.assertEqual(
|
||||
lyrics.remove_ft_artist_suffix('Bob feats Marcia'),
|
||||
'Bob feats Marcia'
|
||||
)
|
||||
item = Item(artist='Alice feat Bob', title='song')
|
||||
self.assertIn(('Alice feat Bob', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
self.assertIn(('Alice', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
|
||||
def test_remove_parenthesized_suffix(self):
|
||||
self.assertEqual(
|
||||
lyrics.remove_parenthesized_suffix('Song (live)'),
|
||||
'Song'
|
||||
)
|
||||
self.assertEqual(
|
||||
lyrics.remove_parenthesized_suffix('Song (live) (new)'),
|
||||
'Song'
|
||||
)
|
||||
self.assertEqual(
|
||||
lyrics.remove_parenthesized_suffix('Song (live (new))'),
|
||||
'Song'
|
||||
)
|
||||
item = Item(artist='Alice feat. Bob', title='song')
|
||||
self.assertIn(('Alice feat. Bob', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
self.assertIn(('Alice', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
|
||||
item = Item(artist='Alice feats Bob', title='song')
|
||||
self.assertIn(('Alice feats Bob', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
self.assertNotIn(('Alice', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
|
||||
item = Item(artist='Alice featuring Bob', title='song')
|
||||
self.assertIn(('Alice featuring Bob', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
self.assertIn(('Alice', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
|
||||
item = Item(artist='Alice & Bob', title='song')
|
||||
self.assertIn(('Alice & Bob', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
self.assertIn(('Alice', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
|
||||
item = Item(artist='Alice and Bob', title='song')
|
||||
self.assertIn(('Alice and Bob', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
self.assertIn(('Alice', ['song']),
|
||||
lyrics.search_pairs(item))
|
||||
|
||||
def test_search_pairs_multi_titles(self):
|
||||
item = Item(title='1 / 2', artist='A')
|
||||
self.assertIn(('A', ['1 / 2']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['1', '2']), lyrics.search_pairs(item))
|
||||
|
||||
item = Item(title='1/2', artist='A')
|
||||
self.assertIn(('A', ['1/2']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['1', '2']), lyrics.search_pairs(item))
|
||||
|
||||
def test_search_pairs_titles(self):
|
||||
item = Item(title='Song (live)', artist='A')
|
||||
self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['Song (live)']), lyrics.search_pairs(item))
|
||||
|
||||
item = Item(title='Song (live) (new)', artist='A')
|
||||
self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['Song (live) (new)']), lyrics.search_pairs(item))
|
||||
|
||||
item = Item(title='Song (live (new))', artist='A')
|
||||
self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['Song (live (new))']), lyrics.search_pairs(item))
|
||||
|
||||
item = Item(title='Song ft. B', artist='A')
|
||||
self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['Song ft. B']), lyrics.search_pairs(item))
|
||||
|
||||
item = Item(title='Song featuring B', artist='A')
|
||||
self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['Song featuring B']), lyrics.search_pairs(item))
|
||||
|
||||
item = Item(title='Song and B', artist='A')
|
||||
self.assertNotIn(('A', ['Song']), lyrics.search_pairs(item))
|
||||
self.assertIn(('A', ['Song and B']), lyrics.search_pairs(item))
|
||||
|
||||
def test_remove_credits(self):
|
||||
self.assertEqual(
|
||||
|
|
|
|||
334
test/test_sort.py
Normal file
334
test/test_sort.py
Normal file
|
|
@ -0,0 +1,334 @@
|
|||
# This file is part of beets.
|
||||
# Copyright 2013, Adrian Sampson.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
|
||||
"""Various tests for querying the library database.
|
||||
"""
|
||||
import _common
|
||||
from _common import unittest
|
||||
import beets.library
|
||||
from beets import dbcore
|
||||
|
||||
|
||||
# A test case class providing a library with some dummy data and some
|
||||
# assertions involving that data.
|
||||
class DummyDataTestCase(_common.TestCase):
|
||||
def setUp(self):
|
||||
super(DummyDataTestCase, self).setUp()
|
||||
self.lib = beets.library.Library(':memory:')
|
||||
|
||||
albums = [_common.album() for _ in range(3)]
|
||||
albums[0].album = "album A"
|
||||
albums[0].genre = "Rock"
|
||||
albums[0].year = "2001"
|
||||
albums[0].flex1 = "flex1-1"
|
||||
albums[0].flex2 = "flex2-A"
|
||||
albums[1].album = "album B"
|
||||
albums[1].genre = "Rock"
|
||||
albums[1].year = "2001"
|
||||
albums[1].flex1 = "flex1-2"
|
||||
albums[1].flex2 = "flex2-A"
|
||||
albums[2].album = "album C"
|
||||
albums[2].genre = "Jazz"
|
||||
albums[2].year = "2005"
|
||||
albums[2].flex1 = "flex1-1"
|
||||
albums[2].flex2 = "flex2-B"
|
||||
for album in albums:
|
||||
self.lib.add(album)
|
||||
|
||||
items = [_common.item() for _ in range(4)]
|
||||
items[0].title = 'foo bar'
|
||||
items[0].artist = 'one'
|
||||
items[0].album = 'baz'
|
||||
items[0].year = 2001
|
||||
items[0].comp = True
|
||||
items[0].flex1 = "flex1-0"
|
||||
items[0].flex2 = "flex2-A"
|
||||
items[0].album_id = albums[0].id
|
||||
items[1].title = 'baz qux'
|
||||
items[1].artist = 'two'
|
||||
items[1].album = 'baz'
|
||||
items[1].year = 2002
|
||||
items[1].comp = True
|
||||
items[1].flex1 = "flex1-1"
|
||||
items[1].flex2 = "flex2-A"
|
||||
items[1].album_id = albums[0].id
|
||||
items[2].title = 'beets 4 eva'
|
||||
items[2].artist = 'three'
|
||||
items[2].album = 'foo'
|
||||
items[2].year = 2003
|
||||
items[2].comp = False
|
||||
items[2].flex1 = "flex1-2"
|
||||
items[2].flex2 = "flex1-B"
|
||||
items[2].album_id = albums[1].id
|
||||
items[3].title = 'beets 4 eva'
|
||||
items[3].artist = 'three'
|
||||
items[3].album = 'foo2'
|
||||
items[3].year = 2004
|
||||
items[3].comp = False
|
||||
items[3].flex1 = "flex1-2"
|
||||
items[3].flex2 = "flex1-C"
|
||||
items[3].album_id = albums[2].id
|
||||
for item in items:
|
||||
self.lib.add(item)
|
||||
|
||||
|
||||
class SortFixedFieldTest(DummyDataTestCase):
|
||||
def test_sort_asc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FixedFieldSort("year", True)
|
||||
results = self.lib.items(q, sort)
|
||||
self.assertLessEqual(results[0]['year'], results[1]['year'])
|
||||
self.assertEqual(results[0]['year'], 2001)
|
||||
# same thing with query string
|
||||
q = 'year+'
|
||||
results2 = self.lib.items(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_desc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FixedFieldSort("year", False)
|
||||
results = self.lib.items(q, sort)
|
||||
self.assertGreaterEqual(results[0]['year'], results[1]['year'])
|
||||
self.assertEqual(results[0]['year'], 2004)
|
||||
# same thing with query string
|
||||
q = 'year-'
|
||||
results2 = self.lib.items(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_two_field_asc(self):
|
||||
q = ''
|
||||
s1 = dbcore.query.FixedFieldSort("album", True)
|
||||
s2 = dbcore.query.FixedFieldSort("year", True)
|
||||
sort = dbcore.query.MultipleSort()
|
||||
sort.add_criteria(s1)
|
||||
sort.add_criteria(s2)
|
||||
results = self.lib.items(q, sort)
|
||||
self.assertLessEqual(results[0]['album'], results[1]['album'])
|
||||
self.assertLessEqual(results[1]['album'], results[2]['album'])
|
||||
self.assertEqual(results[0]['album'], 'baz')
|
||||
self.assertEqual(results[1]['album'], 'baz')
|
||||
self.assertLessEqual(results[0]['year'], results[1]['year'])
|
||||
# same thing with query string
|
||||
q = 'album+ year+'
|
||||
results2 = self.lib.items(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
|
||||
class SortFlexFieldTest(DummyDataTestCase):
|
||||
def test_sort_asc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FlexFieldSort(beets.library.Item, "flex1", True)
|
||||
results = self.lib.items(q, sort)
|
||||
self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
|
||||
self.assertEqual(results[0]['flex1'], 'flex1-0')
|
||||
# same thing with query string
|
||||
q = 'flex1+'
|
||||
results2 = self.lib.items(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_desc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FlexFieldSort(beets.library.Item, "flex1", False)
|
||||
results = self.lib.items(q, sort)
|
||||
self.assertGreaterEqual(results[0]['flex1'], results[1]['flex1'])
|
||||
self.assertGreaterEqual(results[1]['flex1'], results[2]['flex1'])
|
||||
self.assertGreaterEqual(results[2]['flex1'], results[3]['flex1'])
|
||||
self.assertEqual(results[0]['flex1'], 'flex1-2')
|
||||
# same thing with query string
|
||||
q = 'flex1-'
|
||||
results2 = self.lib.items(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_two_field(self):
|
||||
q = ''
|
||||
s1 = dbcore.query.FlexFieldSort(beets.library.Item, "flex2", False)
|
||||
s2 = dbcore.query.FlexFieldSort(beets.library.Item, "flex1", True)
|
||||
sort = dbcore.query.MultipleSort()
|
||||
sort.add_criteria(s1)
|
||||
sort.add_criteria(s2)
|
||||
results = self.lib.items(q, sort)
|
||||
self.assertGreaterEqual(results[0]['flex2'], results[1]['flex2'])
|
||||
self.assertGreaterEqual(results[1]['flex2'], results[2]['flex2'])
|
||||
self.assertEqual(results[0]['flex2'], 'flex2-A')
|
||||
self.assertEqual(results[1]['flex2'], 'flex2-A')
|
||||
self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
|
||||
# same thing with query string
|
||||
q = 'flex2- flex1+'
|
||||
results2 = self.lib.items(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
|
||||
class SortAlbumFixedFieldTest(DummyDataTestCase):
|
||||
def test_sort_asc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FixedFieldSort("year", True)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertLessEqual(results[0]['year'], results[1]['year'])
|
||||
self.assertEqual(results[0]['year'], 2001)
|
||||
# same thing with query string
|
||||
q = 'year+'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_desc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FixedFieldSort("year", False)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertGreaterEqual(results[0]['year'], results[1]['year'])
|
||||
self.assertEqual(results[0]['year'], 2005)
|
||||
# same thing with query string
|
||||
q = 'year-'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_two_field_asc(self):
|
||||
q = ''
|
||||
s1 = dbcore.query.FixedFieldSort("genre", True)
|
||||
s2 = dbcore.query.FixedFieldSort("album", True)
|
||||
sort = dbcore.query.MultipleSort()
|
||||
sort.add_criteria(s1)
|
||||
sort.add_criteria(s2)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertLessEqual(results[0]['genre'], results[1]['genre'])
|
||||
self.assertLessEqual(results[1]['genre'], results[2]['genre'])
|
||||
self.assertEqual(results[1]['genre'], 'Rock')
|
||||
self.assertEqual(results[2]['genre'], 'Rock')
|
||||
self.assertLessEqual(results[1]['album'], results[2]['album'])
|
||||
# same thing with query string
|
||||
q = 'genre+ album+'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
|
||||
class SortAlbumFlexdFieldTest(DummyDataTestCase):
|
||||
def test_sort_asc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FlexFieldSort(beets.library.Album, "flex1", True)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
|
||||
self.assertLessEqual(results[1]['flex1'], results[2]['flex1'])
|
||||
# same thing with query string
|
||||
q = 'flex1+'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_desc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.FlexFieldSort(beets.library.Album, "flex1", False)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertGreaterEqual(results[0]['flex1'], results[1]['flex1'])
|
||||
self.assertGreaterEqual(results[1]['flex1'], results[2]['flex1'])
|
||||
# same thing with query string
|
||||
q = 'flex1-'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_two_field_asc(self):
|
||||
q = ''
|
||||
s1 = dbcore.query.FlexFieldSort(beets.library.Album, "flex2", True)
|
||||
s2 = dbcore.query.FlexFieldSort(beets.library.Album, "flex1", True)
|
||||
sort = dbcore.query.MultipleSort()
|
||||
sort.add_criteria(s1)
|
||||
sort.add_criteria(s2)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertLessEqual(results[0]['flex2'], results[1]['flex2'])
|
||||
self.assertLessEqual(results[1]['flex2'], results[2]['flex2'])
|
||||
self.assertEqual(results[0]['flex2'], 'flex2-A')
|
||||
self.assertEqual(results[1]['flex2'], 'flex2-A')
|
||||
self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
|
||||
# same thing with query string
|
||||
q = 'flex2+ flex1+'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
|
||||
class SortAlbumComputedFieldTest(DummyDataTestCase):
|
||||
def test_sort_asc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.ComputedFieldSort(beets.library.Album, "path",
|
||||
True)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertLessEqual(results[0]['path'], results[1]['path'])
|
||||
self.assertLessEqual(results[1]['path'], results[2]['path'])
|
||||
# same thing with query string
|
||||
q = 'path+'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_sort_desc(self):
|
||||
q = ''
|
||||
sort = dbcore.query.ComputedFieldSort(beets.library.Album, "path",
|
||||
False)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertGreaterEqual(results[0]['path'], results[1]['path'])
|
||||
self.assertGreaterEqual(results[1]['path'], results[2]['path'])
|
||||
# same thing with query string
|
||||
q = 'path-'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
|
||||
class SortCombinedFieldTest(DummyDataTestCase):
|
||||
def test_computed_first(self):
|
||||
q = ''
|
||||
s1 = dbcore.query.ComputedFieldSort(beets.library.Album, "path", True)
|
||||
s2 = dbcore.query.FixedFieldSort("year", True)
|
||||
sort = dbcore.query.MultipleSort()
|
||||
sort.add_criteria(s1)
|
||||
sort.add_criteria(s2)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertLessEqual(results[0]['path'], results[1]['path'])
|
||||
self.assertLessEqual(results[1]['path'], results[2]['path'])
|
||||
q = 'path+ year+'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
def test_computed_second(self):
|
||||
q = ''
|
||||
s1 = dbcore.query.FixedFieldSort("year", True)
|
||||
s2 = dbcore.query.ComputedFieldSort(beets.library.Album, "path", True)
|
||||
sort = dbcore.query.MultipleSort()
|
||||
sort.add_criteria(s1)
|
||||
sort.add_criteria(s2)
|
||||
results = self.lib.albums(q, sort)
|
||||
self.assertLessEqual(results[0]['year'], results[1]['year'])
|
||||
self.assertLessEqual(results[1]['year'], results[2]['year'])
|
||||
self.assertLessEqual(results[0]['path'], results[1]['path'])
|
||||
q = 'year+ path+'
|
||||
results2 = self.lib.albums(q)
|
||||
for r1, r2 in zip(results, results2):
|
||||
self.assertEqual(r1.id, r2.id)
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='suite')
|
||||
132
test/test_ui.py
132
test/test_ui.py
|
|
@ -22,7 +22,7 @@ import platform
|
|||
|
||||
import _common
|
||||
from _common import unittest
|
||||
from helper import capture_stdout, has_program, TestHelper
|
||||
from helper import capture_stdout, has_program, TestHelper, control_stdin
|
||||
|
||||
from beets import library
|
||||
from beets import ui
|
||||
|
|
@ -141,100 +141,104 @@ class RemoveTest(_common.TestCase):
|
|||
self.assertFalse(os.path.exists(self.i.path))
|
||||
|
||||
|
||||
class ModifyTest(_common.TestCase):
|
||||
class ModifyTest(unittest.TestCase, TestHelper):
|
||||
|
||||
def setUp(self):
|
||||
super(ModifyTest, self).setUp()
|
||||
self.setup_beets()
|
||||
self.add_album_fixture()
|
||||
|
||||
self.io.install()
|
||||
def tearDown(self):
|
||||
self.teardown_beets()
|
||||
|
||||
self.libdir = os.path.join(self.temp_dir, 'testlibdir')
|
||||
def modify(self, *args):
|
||||
with control_stdin('y'):
|
||||
ui._raw_main(['modify'] + list(args), self.lib)
|
||||
|
||||
# Copy a file into the library.
|
||||
self.lib = library.Library(':memory:', self.libdir)
|
||||
self.i = library.Item.from_path(os.path.join(_common.RSRC, 'full.mp3'))
|
||||
self.lib.add(self.i)
|
||||
self.i.move(copy=True)
|
||||
self.album = self.lib.add_album([self.i])
|
||||
# Item tests
|
||||
|
||||
def _modify(self, mods=(), dels=(), query=(), write=False, move=False,
|
||||
album=False):
|
||||
self.io.addinput('y')
|
||||
commands.modify_items(self.lib, mods, dels, query,
|
||||
write, move, album, True)
|
||||
|
||||
def test_modify_item_dbdata(self):
|
||||
self._modify(["title=newTitle"])
|
||||
def test_modify_item(self):
|
||||
self.modify("title=newTitle")
|
||||
item = self.lib.items().get()
|
||||
self.assertEqual(item.title, 'newTitle')
|
||||
|
||||
def test_modify_album_dbdata(self):
|
||||
self._modify(["album=newAlbum"], album=True)
|
||||
album = self.lib.albums()[0]
|
||||
def test_modify_write_tags(self):
|
||||
self.modify("title=newTitle")
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertEqual(item.title, 'newTitle')
|
||||
|
||||
def test_modify_dont_write_tags(self):
|
||||
self.modify("--nowrite", "title=newTitle")
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertNotEqual(item.title, 'newTitle')
|
||||
|
||||
def test_move(self):
|
||||
self.modify("title=newTitle")
|
||||
item = self.lib.items().get()
|
||||
self.assertIn('newTitle', item.path)
|
||||
|
||||
def test_not_move(self):
|
||||
self.modify("--nomove", "title=newTitle")
|
||||
item = self.lib.items().get()
|
||||
self.assertNotIn('newTitle', item.path)
|
||||
|
||||
# Album Tests
|
||||
|
||||
def test_modify_album(self):
|
||||
self.modify("--album", "album=newAlbum")
|
||||
album = self.lib.albums().get()
|
||||
self.assertEqual(album.album, 'newAlbum')
|
||||
|
||||
def test_modify_item_tag_unmodified(self):
|
||||
self._modify(["title=newTitle"], write=False)
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertEqual(item.title, 'full')
|
||||
|
||||
def test_modify_album_tag_unmodified(self):
|
||||
self._modify(["album=newAlbum"], write=False, album=True)
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertEqual(item.album, 'the album')
|
||||
|
||||
def test_modify_item_tag(self):
|
||||
self._modify(["title=newTitle"], write=True)
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertEqual(item.title, 'newTitle')
|
||||
|
||||
def test_modify_album_tag(self):
|
||||
self._modify(["album=newAlbum"], write=True, album=True)
|
||||
def test_modify_album_write_tags(self):
|
||||
self.modify("--album", "album=newAlbum")
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertEqual(item.album, 'newAlbum')
|
||||
|
||||
def test_item_move(self):
|
||||
self._modify(["title=newTitle"], move=True)
|
||||
def test_modify_album_dont_write_tags(self):
|
||||
self.modify("--album", "--nowrite", "album=newAlbum")
|
||||
item = self.lib.items().get()
|
||||
self.assertTrue('newTitle' in item.path)
|
||||
item.read()
|
||||
self.assertEqual(item.album, 'the album')
|
||||
|
||||
def test_album_move(self):
|
||||
self._modify(["album=newAlbum"], move=True, album=True)
|
||||
self.modify("--album", "album=newAlbum")
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertTrue('newAlbum' in item.path)
|
||||
|
||||
def test_item_not_move(self):
|
||||
self._modify(["title=newTitle"], move=False)
|
||||
item = self.lib.items().get()
|
||||
self.assertFalse('newTitle' in item.path)
|
||||
self.assertIn('newAlbum', item.path)
|
||||
|
||||
def test_album_not_move(self):
|
||||
self._modify(["album=newAlbum"], move=False, album=True)
|
||||
self.modify("--nomove", "--album", "album=newAlbum")
|
||||
item = self.lib.items().get()
|
||||
item.read()
|
||||
self.assertFalse('newAlbum' in item.path)
|
||||
self.assertNotIn('newAlbum', item.path)
|
||||
|
||||
# Misc
|
||||
|
||||
def test_write_initial_key_tag(self):
|
||||
self._modify(["initial_key=C#m"], write=True)
|
||||
self.modify("initial_key=C#m")
|
||||
item = self.lib.items().get()
|
||||
mediafile = MediaFile(item.path)
|
||||
self.assertEqual(mediafile.initial_key, 'C#m')
|
||||
|
||||
def test_remove_flexattr(self):
|
||||
self._modify(["flexattr=testAttr"], write=True)
|
||||
def test_set_flexattr(self):
|
||||
self.modify("flexattr=testAttr")
|
||||
item = self.lib.items().get()
|
||||
self.assertEqual(item.flexattr, 'testAttr')
|
||||
self._modify(dels=["flexattr"], write=True)
|
||||
|
||||
def test_remove_flexattr(self):
|
||||
item = self.lib.items().get()
|
||||
self.assertTrue("flexattr" not in item)
|
||||
item.flexattr = 'testAttr'
|
||||
item.store()
|
||||
|
||||
self.modify("flexattr!")
|
||||
item = self.lib.items().get()
|
||||
self.assertNotIn("flexattr", item)
|
||||
|
||||
@unittest.skip('not yet implemented')
|
||||
def test_delete_initial_key_tag(self):
|
||||
item = self.i
|
||||
item = self.lib.items().get()
|
||||
item.initial_key = 'C#m'
|
||||
item.write()
|
||||
item.store()
|
||||
|
|
@ -242,7 +246,7 @@ class ModifyTest(_common.TestCase):
|
|||
mediafile = MediaFile(item.path)
|
||||
self.assertEqual(mediafile.initial_key, 'C#m')
|
||||
|
||||
self._modify(dels=["initial_key!"], write=True)
|
||||
self.modify("initial_key!")
|
||||
mediafile = MediaFile(item.path)
|
||||
self.assertIsNone(mediafile.initial_key)
|
||||
|
||||
|
|
@ -250,7 +254,7 @@ class ModifyTest(_common.TestCase):
|
|||
(query, mods, dels) = commands.modify_parse_args(["title:oldTitle",
|
||||
"title=newTitle"])
|
||||
self.assertEqual(query, ["title:oldTitle"])
|
||||
self.assertEqual(mods, ["title=newTitle"])
|
||||
self.assertEqual(mods, {"title": "newTitle"})
|
||||
|
||||
def test_arg_parsing_delete(self):
|
||||
(query, mods, dels) = commands.modify_parse_args(["title:oldTitle",
|
||||
|
|
@ -262,13 +266,13 @@ class ModifyTest(_common.TestCase):
|
|||
(query, mods, dels) = commands.modify_parse_args(["title:oldTitle!",
|
||||
"title=newTitle!"])
|
||||
self.assertEqual(query, ["title:oldTitle!"])
|
||||
self.assertEqual(mods, ["title=newTitle!"])
|
||||
self.assertEqual(mods, {"title": "newTitle!"})
|
||||
|
||||
def test_arg_parsing_equals_in_value(self):
|
||||
(query, mods, dels) = commands.modify_parse_args(["title:foo=bar",
|
||||
"title=newTitle"])
|
||||
self.assertEqual(query, ["title:foo=bar"])
|
||||
self.assertEqual(mods, ["title=newTitle"])
|
||||
self.assertEqual(mods, {"title": "newTitle"})
|
||||
|
||||
|
||||
class MoveTest(_common.TestCase):
|
||||
|
|
|
|||
Loading…
Reference in a new issue