mirror of
git://github.com/kovidgoyal/calibre.git
synced 2026-05-09 11:14:02 +02:00
Greatly reduce the delay at the end of a bulk metadata edit operation that operates on a very large number (thousands) of books
This commit is contained in:
parent
7eb2914c67
commit
a78aa3e12a
1 changed files with 9 additions and 3 deletions
|
|
@ -5,8 +5,9 @@
|
|||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import os
|
||||
import os, itertools, operator
|
||||
from functools import partial
|
||||
from future_builtins import map
|
||||
|
||||
from PyQt4.Qt import (QTableView, Qt, QAbstractItemView, QMenu, pyqtSignal,
|
||||
QModelIndex, QIcon, QItemSelection, QMimeData, QDrag, QApplication,
|
||||
|
|
@ -793,8 +794,13 @@ def select_rows(self, identifiers, using_ids=True, change_current=True,
|
|||
sel = QItemSelection()
|
||||
m = self.model()
|
||||
max_col = m.columnCount(QModelIndex()) - 1
|
||||
for row in rows:
|
||||
sel.select(m.index(row, 0), m.index(row, max_col))
|
||||
# Create a range based selector for each set of contiguous rows
|
||||
# as supplying selectors for each individual row causes very poor
|
||||
# performance if a large number of rows has to be selected.
|
||||
for k, g in itertools.groupby(enumerate(rows), lambda (i,x):i-x):
|
||||
group = list(map(operator.itemgetter(1), g))
|
||||
sel.merge(QItemSelection(m.index(min(group), 0),
|
||||
m.index(max(group), max_col)), sm.Select)
|
||||
sm.select(sel, sm.ClearAndSelect)
|
||||
|
||||
def get_selected_ids(self):
|
||||
|
|
|
|||
Loading…
Reference in a new issue