feat(api): full text search

closes #24, closes #496
This commit is contained in:
Gauthier Roebroeck 2021-07-29 15:17:43 +08:00
parent 15598f5d7a
commit 8f27faf417
20 changed files with 585 additions and 215 deletions

View file

@ -1,5 +1,6 @@
import org.apache.tools.ant.taskdefs.condition.Os
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
import org.jooq.meta.jaxb.ForcedType
plugins {
run {
@ -242,6 +243,13 @@ jooq {
generator.apply {
database.apply {
name = "org.jooq.meta.sqlite.SQLiteDatabase"
forcedTypes.addAll(
listOf(
ForcedType()
.withName("varchar")
.withIncludeExpression("fts_.*.(title|.*id|isbn|publisher|name)")
)
)
}
target.apply {
packageName = "org.gotson.komga.jooq"

View file

@ -0,0 +1,83 @@
-- FTS for BOOK_METADATA
create virtual table fts_book_metadata using fts5(title, isbn, book_id UNINDEXED, content=book_metadata, tokenize = 'porter unicode61 remove_diacritics 2');
INSERT INTO fts_book_metadata(fts_book_metadata) VALUES('rebuild');
-- Triggers to keep the FTS index up to date
CREATE TRIGGER book_metadata__after_insert AFTER INSERT ON book_metadata BEGIN
INSERT INTO fts_book_metadata(rowid, title, isbn, book_id) VALUES (new.rowid, new.title, new.isbn, new.book_id);
END;
CREATE TRIGGER book_metadata__after_delete AFTER DELETE ON book_metadata BEGIN
INSERT INTO fts_book_metadata(fts_book_metadata, rowid, title, isbn, book_id) VALUES('delete', old.rowid, old.title, old.isbn, old.book_id);
END;
CREATE TRIGGER book_metadata__after_update AFTER UPDATE ON book_metadata BEGIN
INSERT INTO fts_book_metadata(fts_book_metadata, rowid, title, isbn, book_id) VALUES('delete', old.rowid, old.title, old.isbn, old.book_id);
INSERT INTO fts_book_metadata(rowid, title, isbn, book_id) VALUES (new.rowid, new.title, new.isbn, new.book_id);
END;
-- FTS for SERIES_METADATA
create virtual table fts_series_metadata using fts5(title, publisher, series_id UNINDEXED, content=series_metadata, tokenize = 'porter unicode61 remove_diacritics 2');
INSERT INTO fts_series_metadata(fts_series_metadata) VALUES('rebuild');
-- Triggers to keep the FTS index up to date
CREATE TRIGGER series_metadata__after_insert AFTER INSERT ON series_metadata BEGIN
INSERT INTO fts_series_metadata(rowid, title, publisher, series_id) VALUES (new.rowid, new.title, new.publisher, new.series_id);
END;
CREATE TRIGGER series_metadata__after_delete AFTER DELETE ON series_metadata BEGIN
INSERT INTO fts_series_metadata(fts_series_metadata, rowid, title, publisher, series_id) VALUES('delete', old.rowid, old.title, old.publisher, old.series_id);
END;
CREATE TRIGGER series_metadata__after_update AFTER UPDATE ON series_metadata BEGIN
INSERT INTO fts_series_metadata(fts_series_metadata, rowid, title, publisher, series_id) VALUES('delete', old.rowid, old.title, old.publisher, old.series_id);
INSERT INTO fts_series_metadata(rowid, title, publisher, series_id) VALUES (new.rowid, new.title, new.publisher, new.series_id);
END;
-- FTS for COLLECTION
create virtual table fts_collection using fts5(name, id UNINDEXED, content=collection, tokenize = 'porter unicode61 remove_diacritics 2');
INSERT INTO fts_collection(fts_collection) VALUES('rebuild');
-- Triggers to keep the FTS index up to date
CREATE TRIGGER collection__after_insert AFTER INSERT ON collection BEGIN
INSERT INTO fts_collection(rowid, name, id) VALUES (new.rowid, new.name, new.id);
END;
CREATE TRIGGER collection__after_delete AFTER DELETE ON collection BEGIN
INSERT INTO fts_collection(fts_collection, rowid, name, id) VALUES('delete', old.rowid, old.name, old.id);
END;
CREATE TRIGGER collection__after_update AFTER UPDATE ON collection BEGIN
INSERT INTO fts_collection(fts_collection, rowid, name, id) VALUES('delete', old.rowid, old.name, old.id);
INSERT INTO fts_collection(rowid, name, id) VALUES (new.rowid, new.name, new.id);
END;
-- FTS for READLIST
create virtual table fts_readlist using fts5(name, id UNINDEXED, content=readlist, tokenize = 'porter unicode61 remove_diacritics 2');
INSERT INTO fts_readlist(fts_readlist) VALUES('rebuild');
-- Triggers to keep the FTS index up to date
CREATE TRIGGER readlist__after_insert AFTER INSERT ON readlist BEGIN
INSERT INTO fts_readlist(rowid, name, id) VALUES (new.rowid, new.name, new.id);
END;
CREATE TRIGGER readlist__after_delete AFTER DELETE ON readlist BEGIN
INSERT INTO fts_readlist(fts_readlist, rowid, name, id) VALUES('delete', old.rowid, old.name, old.id);
END;
CREATE TRIGGER readlist__after_update AFTER UPDATE ON readlist BEGIN
INSERT INTO fts_readlist(fts_readlist, rowid, name, id) VALUES('delete', old.rowid, old.name, old.id);
INSERT INTO fts_readlist(rowid, name, id) VALUES (new.rowid, new.name, new.id);
END;
-- FTS for BOOK_METADATA_AGGREGATION_AUTHORS
create virtual table fts_book_metadata_aggregation_author using fts5(name, series_id UNINDEXED, content=book_metadata_aggregation_author, tokenize = 'porter unicode61 remove_diacritics 2');
INSERT INTO fts_book_metadata_aggregation_author(fts_book_metadata_aggregation_author) VALUES('rebuild');
-- Triggers to keep the FTS index up to date
CREATE TRIGGER book_metadata_aggregation_author__after_insert AFTER INSERT ON book_metadata_aggregation_author BEGIN
INSERT INTO fts_book_metadata_aggregation_author(rowid, name, series_id) VALUES (new.rowid, new.name, new.series_id);
END;
CREATE TRIGGER book_metadata_aggregation_author__after_delete AFTER DELETE ON book_metadata_aggregation_author BEGIN
INSERT INTO fts_book_metadata_aggregation_author(fts_book_metadata_aggregation_author, rowid, name, series_id) VALUES('delete', old.rowid, old.name, old.series_id);
END;
CREATE TRIGGER book_metadata_aggregation_author__after_update AFTER UPDATE ON book_metadata_aggregation_author BEGIN
INSERT INTO fts_book_metadata_aggregation_author(fts_book_metadata_aggregation_author, rowid, name, series_id) VALUES('delete', old.rowid, old.name, old.series_id);
INSERT INTO fts_book_metadata_aggregation_author(rowid, name, series_id) VALUES (new.rowid, new.name, new.series_id);
END;

View file

@ -7,7 +7,7 @@ import org.springframework.data.domain.Pageable
interface ReadListRepository {
fun findByIdOrNull(readListId: String): ReadList?
fun searchAll(search: String? = null, pageable: Pageable): Page<ReadList>
fun findAll(search: String? = null, pageable: Pageable): Page<ReadList>
/**
* Find one ReadList by readListId,

View file

@ -7,7 +7,7 @@ import org.springframework.data.domain.Pageable
interface SeriesCollectionRepository {
fun findByIdOrNull(collectionId: String): SeriesCollection?
fun searchAll(search: String? = null, pageable: Pageable): Page<SeriesCollection>
fun findAll(search: String? = null, pageable: Pageable): Page<SeriesCollection>
/**
* Find one SeriesCollection by collectionId,

View file

@ -12,7 +12,7 @@ private val log = KotlinLogging.logger {}
class SqliteUdfDataSource : SimpleDriverDataSource() {
companion object {
const val udfStripAccents = "UDF_UNIDECODE"
const val udfStripAccents = "UDF_STRIP_ACCENTS"
}
override fun getConnection(): Connection =

View file

@ -149,7 +149,7 @@ class BookDao(
.limit(1)
.fetchOne(b.ID)
override fun findFirstUnreadIdInSeriesOrNull(seriesId: String, userId: String): String? ? =
override fun findFirstUnreadIdInSeriesOrNull(seriesId: String, userId: String): String? =
dsl.select(b.ID)
.from(b)
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))

View file

@ -1,8 +1,8 @@
package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.BookSearchWithReadProgress
import org.gotson.komga.domain.model.ReadStatus
import org.gotson.komga.infrastructure.language.stripAccents
import org.gotson.komga.infrastructure.web.toFilePath
import org.gotson.komga.interfaces.rest.dto.AuthorDto
import org.gotson.komga.interfaces.rest.dto.BookDto
@ -20,6 +20,7 @@ import org.jooq.DSLContext
import org.jooq.Record
import org.jooq.ResultQuery
import org.jooq.impl.DSL
import org.jooq.impl.DSL.field
import org.jooq.impl.DSL.inline
import org.jooq.impl.DSL.lower
import org.springframework.data.domain.Page
@ -30,6 +31,8 @@ import org.springframework.data.domain.Sort
import org.springframework.stereotype.Component
import java.net.URL
private val logger = KotlinLogging.logger {}
@Component
class BookDtoDao(
private val dsl: DSLContext
@ -43,6 +46,7 @@ class BookDtoDao(
private val s = Tables.SERIES
private val rlb = Tables.READLIST_BOOK
private val bt = Tables.BOOK_METADATA_TAG
private val fts = Tables.FTS_BOOK_METADATA
private val sorts = mapOf(
"name" to lower(b.NAME.udfStripAccents()),
@ -60,7 +64,8 @@ class BookDtoDao(
"metadata.title" to lower(d.TITLE.udfStripAccents()),
"metadata.releaseDate" to d.RELEASE_DATE,
"readProgress.lastModified" to r.LAST_MODIFIED_DATE,
"readList.number" to rlb.NUMBER
"readList.number" to rlb.NUMBER,
"relevance" to field("rank"),
)
override fun findAll(search: BookSearchWithReadProgress, userId: String, pageable: Pageable): Page<BookDto> {
@ -88,36 +93,45 @@ class BookDtoDao(
joinConditions: JoinConditions = JoinConditions(),
filterOnLibraryIds: Collection<String>?,
): Page<BookDto> {
val count = dsl.selectDistinct(b.ID)
.from(b)
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.apply { if (joinConditions.tag) leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)) }
.apply { if (joinConditions.selectReadListNumber) leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID)) }
.apply { if (joinConditions.author) leftJoin(a).on(b.ID.eq(a.BOOK_ID)) }
.where(conditions)
.groupBy(b.ID)
.fetch()
.size
return try {
val count = dsl.selectDistinct(b.ID)
.from(b)
.apply { if (joinConditions.fullTextSearch) join(fts).on(b.ID.eq(fts.BOOK_ID)) }
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.apply { if (joinConditions.tag) leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)) }
.apply { if (joinConditions.selectReadListNumber) leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID)) }
.apply { if (joinConditions.author) leftJoin(a).on(b.ID.eq(a.BOOK_ID)) }
.where(conditions)
.groupBy(b.ID)
.fetch()
.size
val orderBy = pageable.sort.toOrderBy(sorts)
val orderBy = pageable.sort.toOrderBy(sorts)
val dtos = selectBase(userId, joinConditions)
.where(conditions)
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap()
val dtos = selectBase(userId, joinConditions)
.where(conditions)
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap()
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
}
}
}
override fun findByIdOrNull(bookId: String, userId: String): BookDto? =
@ -234,6 +248,7 @@ class BookDtoDao(
*r.fields()
).apply { if (joinConditions.selectReadListNumber) select(rlb.NUMBER) }
.from(b)
.apply { if (joinConditions.fullTextSearch) join(fts).on(b.ID.eq(fts.BOOK_ID)) }
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
@ -266,9 +281,9 @@ class BookDtoDao(
private fun BookSearchWithReadProgress.toCondition(): Condition {
var c: Condition = DSL.trueCondition()
searchTerm?.let { c = c.and(fts.match(it)) }
if (!libraryIds.isNullOrEmpty()) c = c.and(b.LIBRARY_ID.`in`(libraryIds))
if (!seriesIds.isNullOrEmpty()) c = c.and(b.SERIES_ID.`in`(seriesIds))
searchTerm?.let { c = c.and(d.TITLE.udfStripAccents().containsIgnoreCase(it.stripAccents())) }
if (!mediaStatus.isNullOrEmpty()) c = c.and(m.STATUS.`in`(mediaStatus))
if (deleted == true) c = c.and(b.DELETED_DATE.isNotNull)
if (deleted == false) c = c.and(b.DELETED_DATE.isNull)
@ -302,12 +317,14 @@ class BookDtoDao(
JoinConditions(
tag = !tags.isNullOrEmpty(),
author = !authors.isNullOrEmpty(),
fullTextSearch = !searchTerm.isNullOrBlank(),
)
private data class JoinConditions(
val selectReadListNumber: Boolean = false,
val tag: Boolean = false,
val author: Boolean = false,
val fullTextSearch: Boolean = false,
)
private fun BookRecord.toDto(media: MediaDto, metadata: BookMetadataDto, readProgress: ReadProgressDto?) =

View file

@ -1,8 +1,8 @@
package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.ReadList
import org.gotson.komga.domain.persistence.ReadListRepository
import org.gotson.komga.infrastructure.language.stripAccents
import org.gotson.komga.jooq.Tables
import org.gotson.komga.jooq.tables.records.ReadlistRecord
import org.jooq.DSLContext
@ -20,6 +20,8 @@ import java.time.LocalDateTime
import java.time.ZoneId
import java.util.SortedMap
private val logger = KotlinLogging.logger {}
@Component
class ReadListDao(
private val dsl: DSLContext
@ -28,9 +30,11 @@ class ReadListDao(
private val rl = Tables.READLIST
private val rlb = Tables.READLIST_BOOK
private val b = Tables.BOOK
private val fts = Tables.FTS_READLIST
private val sorts = mapOf(
"name" to DSL.lower(rl.NAME.udfStripAccents())
"name" to DSL.lower(rl.NAME.udfStripAccents()),
"relevance" to DSL.field("rank"),
)
override fun findByIdOrNull(readListId: String): ReadList? =
@ -46,60 +50,80 @@ class ReadListDao(
.fetchAndMap(filterOnLibraryIds)
.firstOrNull()
override fun searchAll(search: String?, pageable: Pageable): Page<ReadList> {
val conditions = search?.let { rl.NAME.udfStripAccents().containsIgnoreCase(it.stripAccents()) }
override fun findAll(search: String?, pageable: Pageable): Page<ReadList> {
val conditions = search?.let { searchCondition(it) }
?: DSL.trueCondition()
val count = dsl.selectCount()
.from(rl)
.where(conditions)
.fetchOne(0, Long::class.java) ?: 0
return try {
val count = dsl.selectCount()
.from(rl)
.apply { if (!search.isNullOrBlank()) join(fts).on(rl.ID.eq(fts.ID)) }
.where(conditions)
.fetchOne(0, Long::class.java) ?: 0
val orderBy = pageable.sort.toOrderBy(sorts)
val orderBy = pageable.sort.toOrderBy(sorts)
val items = selectBase()
.where(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val items = selectBase(!search.isNullOrBlank())
.where(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
}
}
}
override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<ReadList> {
val ids = dsl.selectDistinct(rl.ID)
.from(rl)
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
.where(b.LIBRARY_ID.`in`(belongsToLibraryIds))
.apply { search?.let { and(rl.NAME.udfStripAccents().containsIgnoreCase(it.stripAccents())) } }
.fetch(0, String::class.java)
val count = ids.size
val orderBy = pageable.sort.toOrderBy(sorts)
val items = selectBase()
.where(rl.ID.`in`(ids))
val conditions = b.LIBRARY_ID.`in`(belongsToLibraryIds)
.apply { search?.let { and(searchCondition(it)) } }
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.apply { search?.let { and(rl.NAME.udfStripAccents().containsIgnoreCase(it.stripAccents())) } }
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
return try {
val ids = dsl.selectDistinct(rl.ID)
.from(rl)
.apply { if (!search.isNullOrBlank()) join(fts).on(rl.ID.eq(fts.ID)) }
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
.where(conditions)
.fetch(0, String::class.java)
val count = ids.size
val orderBy = pageable.sort.toOrderBy(sorts)
val items = selectBase(!search.isNullOrBlank())
.where(rl.ID.`in`(ids))
.and(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
}
}
}
override fun findAllContainingBookId(containsBookId: String, filterOnLibraryIds: Collection<String>?): Collection<ReadList> {
@ -133,9 +157,13 @@ class ReadListDao(
.fetchAndMap(null)
.firstOrNull()
private fun selectBase() =
private fun searchCondition(search: String) =
fts.match(search)
private fun selectBase(joinFts: Boolean = false) =
dsl.selectDistinct(*rl.fields())
.from(rl)
.apply { if (joinFts) join(fts).on(rl.ID.eq(fts.ID)) }
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))

View file

@ -1,5 +1,6 @@
package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.Author
import org.gotson.komga.domain.persistence.ReferentialRepository
import org.gotson.komga.infrastructure.language.stripAccents
@ -7,6 +8,7 @@ import org.gotson.komga.jooq.Tables
import org.gotson.komga.jooq.tables.records.BookMetadataAggregationAuthorRecord
import org.gotson.komga.jooq.tables.records.BookMetadataAuthorRecord
import org.jooq.DSLContext
import org.jooq.impl.DSL.field
import org.jooq.impl.DSL.lower
import org.jooq.impl.DSL.select
import org.springframework.data.domain.Page
@ -17,6 +19,8 @@ import org.springframework.data.domain.Sort
import org.springframework.stereotype.Component
import java.time.LocalDate
private val logger = KotlinLogging.logger {}
@Component
class ReferentialDao(
private val dsl: DSLContext
@ -32,6 +36,7 @@ class ReferentialDao(
private val bt = Tables.BOOK_METADATA_TAG
private val st = Tables.SERIES_METADATA_TAG
private val cs = Tables.COLLECTION_SERIES
private val ftsAuthors = Tables.FTS_BOOK_METADATA_AGGREGATION_AUTHOR
override fun findAllAuthorsByName(search: String, filterOnLibraryIds: Collection<String>?): List<Author> =
dsl.selectDistinct(a.NAME, a.ROLE)
@ -105,38 +110,47 @@ class ReferentialDao(
)
private fun findAuthorsByName(search: String, role: String?, filterOnLibraryIds: Collection<String>?, pageable: Pageable, filterBy: FilterBy?): Page<Author> {
val query = dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
.from(bmaa)
.apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) }
.apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) }
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
.apply { role?.let { and(bmaa.ROLE.eq(role)) } }
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
.apply {
filterBy?.let {
when (it.type) {
FilterByType.LIBRARY -> and(s.LIBRARY_ID.eq(it.id))
FilterByType.COLLECTION -> and(cs.COLLECTION_ID.eq(it.id))
FilterByType.SERIES -> and(bmaa.SERIES_ID.eq(it.id))
return try {
val query = dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
.from(bmaa)
.join(ftsAuthors).on(ftsAuthors.rowid().eq(bmaa.rowid()))
.apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) }
.apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) }
.where(ftsAuthors.match(search))
.apply { role?.let { and(bmaa.ROLE.eq(role)) } }
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
.apply {
filterBy?.let {
when (it.type) {
FilterByType.LIBRARY -> and(s.LIBRARY_ID.eq(it.id))
FilterByType.COLLECTION -> and(cs.COLLECTION_ID.eq(it.id))
FilterByType.SERIES -> and(bmaa.SERIES_ID.eq(it.id))
}
}
}
val count = dsl.fetchCount(query)
val items = query
.orderBy(field("rank"))
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchInto(a)
.map { it.toDomain() }
val pageSort = Sort.by("relevance")
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
}
val count = dsl.fetchCount(query)
val items = query
.orderBy(lower(bmaa.NAME.udfStripAccents()), bmaa.ROLE)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchInto(a)
.map { it.toDomain() }
val pageSort = Sort.by("name")
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
}
}
override fun findAllAuthorsNamesByName(search: String, filterOnLibraryIds: Collection<String>?): List<String> =

View file

@ -1,8 +1,8 @@
package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.SeriesCollection
import org.gotson.komga.domain.persistence.SeriesCollectionRepository
import org.gotson.komga.infrastructure.language.stripAccents
import org.gotson.komga.jooq.Tables
import org.gotson.komga.jooq.tables.records.CollectionRecord
import org.jooq.DSLContext
@ -19,6 +19,8 @@ import org.springframework.transaction.annotation.Transactional
import java.time.LocalDateTime
import java.time.ZoneId
private val logger = KotlinLogging.logger {}
@Component
class SeriesCollectionDao(
private val dsl: DSLContext
@ -27,9 +29,11 @@ class SeriesCollectionDao(
private val c = Tables.COLLECTION
private val cs = Tables.COLLECTION_SERIES
private val s = Tables.SERIES
private val fts = Tables.FTS_COLLECTION
private val sorts = mapOf(
"name" to DSL.lower(c.NAME.udfStripAccents())
"name" to DSL.lower(c.NAME.udfStripAccents()),
"relevance" to DSL.field("rank"),
)
override fun findByIdOrNull(collectionId: String): SeriesCollection? =
@ -45,60 +49,79 @@ class SeriesCollectionDao(
.fetchAndMap(filterOnLibraryIds)
.firstOrNull()
override fun searchAll(search: String?, pageable: Pageable): Page<SeriesCollection> {
val conditions = search?.let { c.NAME.udfStripAccents().containsIgnoreCase(it.stripAccents()) }
override fun findAll(search: String?, pageable: Pageable): Page<SeriesCollection> {
val conditions = search?.let { searchCondition(search) }
?: DSL.trueCondition()
val count = dsl.selectCount()
.from(c)
.where(conditions)
.fetchOne(0, Long::class.java) ?: 0
return try {
val count = dsl.selectCount()
.from(c)
.apply { if (!search.isNullOrBlank()) join(fts).on(c.ID.eq(fts.ID)) }
.where(conditions)
.fetchOne(0, Long::class.java) ?: 0
val orderBy = pageable.sort.toOrderBy(sorts)
val orderBy = pageable.sort.toOrderBy(sorts)
val items = selectBase()
.where(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val items = selectBase(!search.isNullOrBlank())
.where(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
}
}
}
override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<SeriesCollection> {
val ids = dsl.selectDistinct(c.ID)
.from(c)
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
.where(s.LIBRARY_ID.`in`(belongsToLibraryIds))
.apply { search?.let { and(c.NAME.udfStripAccents().containsIgnoreCase(it.stripAccents())) } }
.fetch(0, String::class.java)
val count = ids.size
val orderBy = pageable.sort.toOrderBy(sorts)
val items = selectBase()
.where(c.ID.`in`(ids))
val conditions = s.LIBRARY_ID.`in`(belongsToLibraryIds)
.apply { search?.let { and(searchCondition(it)) } }
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
.apply { search?.let { and(c.NAME.udfStripAccents().containsIgnoreCase(it.stripAccents())) } }
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
return try {
val ids = dsl.selectDistinct(c.ID)
.from(c)
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
.where(conditions)
.fetch(0, String::class.java)
val count = ids.size
val orderBy = pageable.sort.toOrderBy(sorts)
val items = selectBase(!search.isNullOrBlank())
.where(c.ID.`in`(ids))
.and(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
}
}
}
override fun findAllContainingSeriesId(containsSeriesId: String, filterOnLibraryIds: Collection<String>?): Collection<SeriesCollection> {
@ -132,9 +155,13 @@ class SeriesCollectionDao(
.fetchAndMap(null)
.firstOrNull()
private fun selectBase() =
private fun searchCondition(search: String) =
fts.match(search)
private fun selectBase(joinFts: Boolean = false) =
dsl.selectDistinct(*c.fields())
.from(c)
.apply { if (joinFts) join(fts).on(c.ID.eq(fts.ID)) }
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))

View file

@ -1,9 +1,9 @@
package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.ReadStatus
import org.gotson.komga.domain.model.SeriesSearch
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
import org.gotson.komga.infrastructure.language.stripAccents
import org.gotson.komga.infrastructure.web.toFilePath
import org.gotson.komga.interfaces.rest.dto.AuthorDto
import org.gotson.komga.interfaces.rest.dto.BookMetadataAggregationDto
@ -34,6 +34,8 @@ import org.springframework.stereotype.Component
import java.math.BigDecimal
import java.net.URL
private val logger = KotlinLogging.logger {}
const val BOOKS_COUNT = "booksCount"
const val BOOKS_UNREAD_COUNT = "booksUnreadCount"
const val BOOKS_IN_PROGRESS_COUNT = "booksInProgressCount"
@ -54,6 +56,7 @@ class SeriesDtoDao(
private val st = Tables.SERIES_METADATA_TAG
private val bma = Tables.BOOK_METADATA_AGGREGATION
private val bmaa = Tables.BOOK_METADATA_AGGREGATION_AUTHOR
private val fts = Tables.FTS_SERIES_METADATA
val countUnread: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isNull, 1).otherwise(0))
val countRead: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isTrue, 1).otherwise(0))
@ -76,6 +79,7 @@ class SeriesDtoDao(
"collection.number" to cs.NUMBER,
"name" to lower(s.NAME.udfStripAccents()),
"booksCount" to s.BOOK_COUNT,
"relevance" to DSL.field("rank"),
)
override fun findAll(search: SeriesSearchWithReadProgress, userId: String, pageable: Pageable): Page<SeriesDto> {
@ -112,20 +116,29 @@ class SeriesDtoDao(
val joinConditions = search.toJoinConditions()
val firstChar = lower(substring(d.TITLE_SORT, 1, 1))
return dsl.select(firstChar, count())
.from(s)
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
.apply { if (joinConditions.tag) leftJoin(st).on(s.ID.eq(st.SERIES_ID)) }
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
.where(conditions)
.groupBy(firstChar)
.map {
GroupCountDto(it.value1(), it.value2())
return try {
dsl.select(firstChar, count())
.from(s)
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
.apply { if (joinConditions.tag) leftJoin(st).on(s.ID.eq(st.SERIES_ID)) }
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
.where(conditions)
.groupBy(firstChar)
.map {
GroupCountDto(it.value1(), it.value2())
}
} catch (e: Exception) {
if (e.isFtsError()) emptyList()
else {
logger.error(e) { "Error while fetching data" }
throw e
}
}
}
override fun findByIdOrNull(seriesId: String, userId: String): SeriesDto? =
@ -142,6 +155,7 @@ class SeriesDtoDao(
dsl.selectDistinct(*groupFields)
.apply { if (joinConditions.selectCollectionNumber) select(cs.NUMBER) }
.from(s)
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
@ -156,34 +170,42 @@ class SeriesDtoDao(
pageable: Pageable,
joinConditions: JoinConditions = JoinConditions()
): Page<SeriesDto> {
val count = dsl.select(s.ID)
.from(s)
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
.apply { if (joinConditions.tag) leftJoin(st).on(s.ID.eq(st.SERIES_ID)) }
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
.where(conditions)
.fetch()
.size
return try {
val count = dsl.select(count(s.ID))
.from(s)
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
.apply { if (joinConditions.tag) leftJoin(st).on(s.ID.eq(st.SERIES_ID)) }
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
.where(conditions)
.fetchOne(count(s.ID)) ?: 0
val orderBy = pageable.sort.toOrderBy(sorts)
val orderBy = pageable.sort.toOrderBy(sorts)
val dtos = selectBase(userId, joinConditions)
.where(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap()
val dtos = selectBase(userId, joinConditions)
.where(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap()
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
}
}
}
private fun readProgressConditionSeries(userId: String): Condition = rs.USER_ID.eq(userId).or(rs.USER_ID.isNull)
@ -228,9 +250,9 @@ class SeriesDtoDao(
private fun SeriesSearchWithReadProgress.toCondition(): Condition {
var c: Condition = DSL.trueCondition()
searchTerm?.let { c = c.and(fts.match(it)) }
if (!libraryIds.isNullOrEmpty()) c = c.and(s.LIBRARY_ID.`in`(libraryIds))
if (!collectionIds.isNullOrEmpty()) c = c.and(cs.COLLECTION_ID.`in`(collectionIds))
searchTerm?.let { c = c.and(d.TITLE.udfStripAccents().containsIgnoreCase(it.stripAccents())) }
searchRegex?.let { c = c.and((it.second.toColumn()).likeRegex(it.first)) }
if (!metadataStatus.isNullOrEmpty()) c = c.and(d.STATUS.`in`(metadataStatus))
if (!publishers.isNullOrEmpty()) c = c.and(lower(d.PUBLISHER).`in`(publishers.map { it.lowercase() }))
@ -282,6 +304,7 @@ class SeriesDtoDao(
tag = !tags.isNullOrEmpty(),
collection = !collectionIds.isNullOrEmpty(),
aggregationAuthor = !authors.isNullOrEmpty(),
fullTextSearch = !searchTerm.isNullOrBlank(),
)
private data class JoinConditions(
@ -290,6 +313,7 @@ class SeriesDtoDao(
val tag: Boolean = false,
val collection: Boolean = false,
val aggregationAuthor: Boolean = false,
val fullTextSearch: Boolean = false,
)
private fun SeriesRecord.toDto(

View file

@ -1,11 +1,14 @@
package org.gotson.komga.infrastructure.jooq
import org.gotson.komga.infrastructure.datasource.SqliteUdfDataSource
import org.jooq.Condition
import org.jooq.Field
import org.jooq.SortField
import org.jooq.Table
import org.jooq.TableField
import org.jooq.impl.DSL
import org.springframework.data.domain.Sort
import org.sqlite.SQLiteException
import java.time.LocalDateTime
import java.time.ZoneId
import java.time.ZoneOffset
@ -24,3 +27,21 @@ fun LocalDateTime.toCurrentTimeZone(): LocalDateTime =
fun TableField<*, String>.udfStripAccents() =
DSL.function(SqliteUdfDataSource.udfStripAccents, String::class.java, this)
fun Table<*>.match(term: String): Condition =
DSL.condition("{0} MATCH {1}", DSL.field(this.name), term.ftsSanitized())
fun String.ftsSanitized() = this
.replace("-", " ") // to better match queries like "x-men"
.replace("[^\\p{L}\\p{Z}\\p{N}\":+*^{}()]".toRegex(), "") // to avoid fts5 syntax error
.removePrefix("*") // to avoid unknown special query
private val ftsErrorMessages = listOf("no such column", "unknown special query", "fts5: syntax error near", "unterminated string")
/**
* FTS queries of the form field:term with a field name that doesn't exist will raise an exception
* given the same search string can be requested for different object type, this could happen quite often
*/
fun Exception.isFtsError() =
cause is SQLiteException &&
ftsErrorMessages.any { message?.contains(it) == true }

View file

@ -298,7 +298,7 @@ class OpdsController(
val pageRequest = UnpagedSorted(Sort.by(Sort.Order.asc("name")))
val collections =
if (principal.user.sharedAllLibraries) {
collectionRepository.searchAll(pageable = pageRequest)
collectionRepository.findAll(pageable = pageRequest)
} else {
collectionRepository.findAllByLibraryIds(principal.user.sharedLibrariesIds, principal.user.sharedLibrariesIds, pageable = pageRequest)
}
@ -322,7 +322,7 @@ class OpdsController(
val pageRequest = UnpagedSorted(Sort.by(Sort.Order.asc("name")))
val readLists =
if (principal.user.sharedAllLibraries) {
readListRepository.searchAll(pageable = pageRequest)
readListRepository.findAll(pageable = pageRequest)
} else {
readListRepository.findAllByLibraryIds(principal.user.sharedLibrariesIds, principal.user.sharedLibrariesIds, pageable = pageRequest)
}

View file

@ -107,8 +107,11 @@ class BookController(
@Parameter(hidden = true) page: Pageable
): Page<BookDto> {
val sort =
if (page.sort.isSorted) page.sort
else Sort.by(Sort.Order.asc("metadata.title"))
when {
page.sort.isSorted -> page.sort
!searchTerm.isNullOrBlank() -> Sort.by("relevance")
else -> Sort.by(Sort.Order.asc("metadata.title"))
}
val pageRequest =
if (unpaged) UnpagedSorted(sort)

View file

@ -75,16 +75,21 @@ class ReadListController(
@RequestParam(name = "unpaged", required = false) unpaged: Boolean = false,
@Parameter(hidden = true) page: Pageable
): Page<ReadListDto> {
val sort = when {
!searchTerm.isNullOrBlank() -> Sort.by("relevance")
else -> Sort.by(Sort.Order.asc("name"))
}
val pageRequest =
if (unpaged) UnpagedSorted(Sort.by(Sort.Order.asc("name")))
if (unpaged) UnpagedSorted(sort)
else PageRequest.of(
page.pageNumber,
page.pageSize,
Sort.by(Sort.Order.asc("name"))
sort
)
return when {
principal.user.sharedAllLibraries && libraryIds == null -> readListRepository.searchAll(
principal.user.sharedAllLibraries && libraryIds == null -> readListRepository.findAll(
searchTerm,
pageable = pageRequest
)

View file

@ -69,16 +69,21 @@ class SeriesCollectionController(
@RequestParam(name = "unpaged", required = false) unpaged: Boolean = false,
@Parameter(hidden = true) page: Pageable
): Page<CollectionDto> {
val sort = when {
!searchTerm.isNullOrBlank() -> Sort.by("relevance")
else -> Sort.by(Sort.Order.asc("name"))
}
val pageRequest =
if (unpaged) UnpagedSorted(Sort.by(Sort.Order.asc("name")))
if (unpaged) UnpagedSorted(sort)
else PageRequest.of(
page.pageNumber,
page.pageSize,
Sort.by(Sort.Order.asc("name"))
sort
)
return when {
principal.user.sharedAllLibraries && libraryIds == null -> collectionRepository.searchAll(searchTerm, pageable = pageRequest)
principal.user.sharedAllLibraries && libraryIds == null -> collectionRepository.findAll(searchTerm, pageable = pageRequest)
principal.user.sharedAllLibraries && libraryIds != null -> collectionRepository.findAllByLibraryIds(libraryIds, null, searchTerm, pageable = pageRequest)
!principal.user.sharedAllLibraries && libraryIds != null -> collectionRepository.findAllByLibraryIds(libraryIds, principal.user.sharedLibrariesIds, searchTerm, pageable = pageRequest)
else -> collectionRepository.findAllByLibraryIds(principal.user.sharedLibrariesIds, principal.user.sharedLibrariesIds, searchTerm, pageable = pageRequest)

View file

@ -125,8 +125,11 @@ class SeriesController(
@Parameter(hidden = true) page: Pageable
): Page<SeriesDto> {
val sort =
if (page.sort.isSorted) page.sort
else Sort.by(Sort.Order.asc("metadata.titleSort"))
when {
page.sort.isSorted -> page.sort
!searchTerm.isNullOrBlank() -> Sort.by("relevance")
else -> Sort.by(Sort.Order.asc("metadata.titleSort"))
}
val pageRequest =
if (unpaged) UnpagedSorted(sort)
@ -325,7 +328,8 @@ class SeriesController(
if (!principal.user.canAccessLibrary(it)) throw ResponseStatusException(HttpStatus.FORBIDDEN)
} ?: throw ResponseStatusException(HttpStatus.NOT_FOUND)
return seriesLifecycle.getThumbnailBytes(seriesId, principal.user.id) ?: throw ResponseStatusException(HttpStatus.NOT_FOUND)
return seriesLifecycle.getThumbnailBytes(seriesId, principal.user.id)
?: throw ResponseStatusException(HttpStatus.NOT_FOUND)
}
@PageableAsQueryParam

View file

@ -8,7 +8,7 @@ fun makeBook(name: String, fileLastModified: LocalDateTime = LocalDateTime.now()
Thread.sleep(5)
return Book(
name = name,
url = url ?: URL("file:/$name"),
url = url ?: URL("file:/${name.replace(" ", "_")}"),
fileLastModified = fileLastModified,
libraryId = libraryId,
seriesId = seriesId
@ -19,13 +19,13 @@ fun makeSeries(name: String, libraryId: String = "", url: URL? = null): Series {
Thread.sleep(5)
return Series(
name = name,
url = url ?: URL("file:/$name"),
url = url ?: URL("file:/${name.replace(" ", "_")}"),
fileLastModified = LocalDateTime.now(),
libraryId = libraryId
)
}
fun makeLibrary(name: String = "default", path: String = "file:/$name", id: String = TsidCreator.getTsid256().toString(), url: URL? = null): Library {
fun makeLibrary(name: String = "default", path: String = "file:/${name.replace(" ", "_")}", id: String = TsidCreator.getTsid256().toString(), url: URL? = null): Library {
return Library(
name = name,
root = url ?: URL(path),

View file

@ -1535,8 +1535,8 @@ class LibraryContentLifecycleTest(
libraryContentLifecycle.emptyTrash(library)
// then
val collections = collectionRepository.searchAll(null, Pageable.unpaged())
val readLists = readListRepository.searchAll(null, Pageable.unpaged())
val collections = collectionRepository.findAll(null, Pageable.unpaged())
val readLists = readListRepository.findAll(null, Pageable.unpaged())
assertThat(collections.content).isEmpty()
assertThat(readLists.content).isEmpty()

View file

@ -1,6 +1,7 @@
package org.gotson.komga.infrastructure.jooq
import org.assertj.core.api.Assertions.assertThat
import org.assertj.core.api.Assertions.assertThatCode
import org.gotson.komga.domain.model.BookSearchWithReadProgress
import org.gotson.komga.domain.model.KomgaUser
import org.gotson.komga.domain.model.ReadProgress
@ -8,6 +9,7 @@ import org.gotson.komga.domain.model.ReadStatus
import org.gotson.komga.domain.model.makeBook
import org.gotson.komga.domain.model.makeLibrary
import org.gotson.komga.domain.model.makeSeries
import org.gotson.komga.domain.persistence.BookMetadataRepository
import org.gotson.komga.domain.persistence.BookRepository
import org.gotson.komga.domain.persistence.KomgaUserRepository
import org.gotson.komga.domain.persistence.LibraryRepository
@ -25,6 +27,7 @@ import org.junit.jupiter.api.extension.ExtendWith
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.data.domain.PageRequest
import org.springframework.data.domain.Sort
import org.springframework.test.context.junit.jupiter.SpringExtension
@ExtendWith(SpringExtension::class)
@ -32,6 +35,7 @@ import org.springframework.test.context.junit.jupiter.SpringExtension
class BookDtoDaoTest(
@Autowired private val bookDtoDao: BookDtoDao,
@Autowired private val bookRepository: BookRepository,
@Autowired private val bookMetadataRepository: BookMetadataRepository,
@Autowired private val bookLifecycle: BookLifecycle,
@Autowired private val seriesLifecycle: SeriesLifecycle,
@Autowired private val libraryRepository: LibraryRepository,
@ -286,4 +290,131 @@ class BookDtoDaoTest(
assertThat(found.first().name).isEqualTo("2")
}
}
@Nested
inner class FullTextSearch {
@Test
fun `given books when searching by term then results are ordered by rank`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("The incredible adventures of Batman, the man who is also a bat!", seriesId = series.id, libraryId = library.id),
makeBook("Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman", seriesId = series.id, libraryId = library.id),
)
)
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "batman"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(3)
assertThat(found.map { it.name }).containsExactly("Batman", "Batman and Robin", "The incredible adventures of Batman, the man who is also a bat!")
}
@Test
fun `given books when searching by term with accent then results are matched accent insensitive`() {
// given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(
series,
listOf(
book1,
makeBook("Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman", seriesId = series.id, libraryId = library.id),
)
)
bookMetadataRepository.findById(book1.id).let {
bookMetadataRepository.update(it.copy(title = "Éric le bleu"))
}
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "eric"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Éric le bleu")
}
}
@Test
fun `given books when searching by ISBN then results are matched`() {
// given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(
series,
listOf(
book1,
makeBook("Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman", seriesId = series.id, libraryId = library.id),
)
)
bookMetadataRepository.findById(book1.id).let {
bookMetadataRepository.update(it.copy(isbn = "9782413016878"))
}
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "9782413016878"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.name }).containsExactly("Éric le rouge")
}
@Test
fun `given books when searching by term containing hyphens then results are ordered by rank`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("Batman", seriesId = series.id, libraryId = library.id),
makeBook("Another X-Men adventure", seriesId = series.id, libraryId = library.id),
makeBook("X-Men", seriesId = series.id, libraryId = library.id),
)
)
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "x-men"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.name }).containsExactly("X-Men", "Another X-Men adventure")
}
@Test
fun `when searching by unknown field then empty result are returned and no exception is thrown`() {
assertThatCode {
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "publisher:batman"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(0)
}.doesNotThrowAnyException()
}
}