mirror of
https://github.com/gotson/komga.git
synced 2025-12-15 21:12:27 +01:00
fix(api): paging and sort could be incorrect when searching for books
This commit is contained in:
parent
b7cb89d4ac
commit
c4cdd7a60e
2 changed files with 56 additions and 12 deletions
|
|
@ -120,16 +120,9 @@ class BookDtoDao(
|
|||
else -> b.ID.`in`(dsl.selectTempStrings())
|
||||
}
|
||||
|
||||
// we can handle paging from the search results directly to reduce the sql query complexity
|
||||
val (bookIdsPaged, pagingBySearch) = when {
|
||||
bookIds.isNullOrEmpty() -> emptyList<String>() to false
|
||||
pageable.isPaged -> bookIds.drop(pageable.pageSize * pageable.pageNumber).take(pageable.pageSize) to true
|
||||
else -> bookIds to false
|
||||
}
|
||||
|
||||
val orderBy =
|
||||
pageable.sort.mapNotNull {
|
||||
if (it.property == "relevance" && !bookIds.isNullOrEmpty()) b.ID.sortByValues(bookIdsPaged, it.isAscending)
|
||||
if (it.property == "relevance" && !bookIds.isNullOrEmpty()) b.ID.sortByValues(bookIds, it.isAscending)
|
||||
else it.toSortField(sorts)
|
||||
}
|
||||
|
||||
|
|
@ -151,15 +144,12 @@ class BookDtoDao(
|
|||
.groupBy(b.ID),
|
||||
)
|
||||
|
||||
// adjust temp table if we are paging by search results
|
||||
if (pagingBySearch) dsl.insertTempStrings(batchSize, bookIdsPaged)
|
||||
|
||||
val dtos = selectBase(userId, selectReadListNumber)
|
||||
.where(conditions)
|
||||
.and(searchCondition)
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(orderBy)
|
||||
.apply { if (!pagingBySearch && pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap()
|
||||
|
||||
count to dtos
|
||||
|
|
|
|||
|
|
@ -416,6 +416,60 @@ class BookDtoDaoTest(
|
|||
assertThat(found.map { it.name }).containsExactly("Batman", "Batman and Robin", "The incredible adventures of Batman, the man who is also a bat!")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by term and sort order then results are ordered by sort order`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("Book 3", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Book 1", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Book 2", seriesId = series.id, libraryId = library.id),
|
||||
),
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "book"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("name")),
|
||||
).content
|
||||
val pages = (0..2).map {
|
||||
bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "book"),
|
||||
user.id,
|
||||
PageRequest.of(it, 1, Sort.by("name")),
|
||||
)
|
||||
}
|
||||
val page0 = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "book"),
|
||||
user.id,
|
||||
PageRequest.of(0, 2, Sort.by("name")),
|
||||
)
|
||||
val page1 = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "book"),
|
||||
user.id,
|
||||
PageRequest.of(1, 2, Sort.by("name")),
|
||||
)
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(3)
|
||||
assertThat(found.map { it.name }).containsExactly("Book 1", "Book 2", "Book 3")
|
||||
|
||||
assertThat(pages).hasSize(3)
|
||||
assertThat(pages.map { it.totalPages }).containsOnly(3)
|
||||
assertThat(pages.map { it.totalElements }).containsOnly(3)
|
||||
assertThat(pages.map { it.size }).containsOnly(1)
|
||||
assertThat(pages.flatMap { it.content }.map { it.name }).containsExactly("Book 1", "Book 2", "Book 3")
|
||||
|
||||
assertThat(page0).hasSize(2)
|
||||
assertThat(page0.content.map { it.name }).containsExactly("Book 1", "Book 2")
|
||||
assertThat(page1).hasSize(1)
|
||||
assertThat(page1.content.map { it.name }).containsExactly("Book 3")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by term with accent then results are matched accent insensitive`() {
|
||||
// given
|
||||
|
|
|
|||
Loading…
Reference in a new issue