feat: better full text search

work with CJK
search by more fields

closes #592, closes #597
This commit is contained in:
Gauthier Roebroeck 2021-08-09 14:58:57 +08:00
parent c73e2b11c8
commit 5aa9a95ca5
27 changed files with 2072 additions and 573 deletions

1
.gitignore vendored
View file

@ -48,3 +48,4 @@ nbdist/
### Komga ### Komga
/komga/src/main/resources/public/ /komga/src/main/resources/public/
/komga/artemis/ /komga/artemis/
/komga/lucene/

View file

@ -8,6 +8,7 @@ COPY ${DEPENDENCY}/snapshot-dependencies/ ./
COPY ${DEPENDENCY}/application/ ./ COPY ${DEPENDENCY}/application/ ./
ENV KOMGA_DATABASE_BACKUP_PATH="/config/database-backup.zip" ENV KOMGA_DATABASE_BACKUP_PATH="/config/database-backup.zip"
ENV KOMGA_DATABASE_FILE="/config/database.sqlite" ENV KOMGA_DATABASE_FILE="/config/database.sqlite"
ENV KOMGA_LUCENE_DATA_DIRECTORY="/config/lucene"
ENV SPRING_ARTEMIS_EMBEDDED_DATA_DIRECTORY="/config/artemis" ENV SPRING_ARTEMIS_EMBEDDED_DATA_DIRECTORY="/config/artemis"
ENV LOGGING_FILE_NAME="/config/logs/komga.log" ENV LOGGING_FILE_NAME="/config/logs/komga.log"
ENTRYPOINT ["java", "org.springframework.boot.loader.JarLauncher"] ENTRYPOINT ["java", "org.springframework.boot.loader.JarLauncher"]

View file

@ -1,6 +1,5 @@
import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.taskdefs.condition.Os
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
import org.jooq.meta.jaxb.ForcedType
plugins { plugins {
run { run {
@ -58,6 +57,13 @@ dependencies {
implementation("org.apache.commons:commons-lang3:3.12.0") implementation("org.apache.commons:commons-lang3:3.12.0")
implementation("commons-validator:commons-validator:1.7") implementation("commons-validator:commons-validator:1.7")
run {
val luceneVersion = "8.9.0"
implementation("org.apache.lucene:lucene-core:$luceneVersion")
implementation("org.apache.lucene:lucene-analyzers-common:$luceneVersion")
implementation("org.apache.lucene:lucene-queryparser:$luceneVersion")
}
implementation("com.ibm.icu:icu4j:69.1") implementation("com.ibm.icu:icu4j:69.1")
implementation("org.apache.tika:tika-core:1.26") implementation("org.apache.tika:tika-core:1.26")
@ -243,13 +249,6 @@ jooq {
generator.apply { generator.apply {
database.apply { database.apply {
name = "org.jooq.meta.sqlite.SQLiteDatabase" name = "org.jooq.meta.sqlite.SQLiteDatabase"
forcedTypes.addAll(
listOf(
ForcedType()
.withName("varchar")
.withIncludeExpression("fts_.*.(title|.*id|isbn|publisher|name)")
)
)
} }
target.apply { target.apply {
packageName = "org.gotson.komga.jooq" packageName = "org.gotson.komga.jooq"

View file

@ -0,0 +1,24 @@
drop table fts_book_metadata;
DROP TRIGGER book_metadata__after_insert;
DROP TRIGGER book_metadata__after_delete;
DROP TRIGGER book_metadata__after_update;
drop table fts_series_metadata;
DROP TRIGGER series_metadata__after_insert;
DROP TRIGGER series_metadata__after_delete;
DROP TRIGGER series_metadata__after_update;
drop table fts_collection;
DROP TRIGGER collection__after_insert;
DROP TRIGGER collection__after_delete;
DROP TRIGGER collection__after_update;
drop table fts_readlist;
DROP TRIGGER readlist__after_insert;
DROP TRIGGER readlist__after_delete;
DROP TRIGGER readlist__after_update;
drop table fts_book_metadata_aggregation_author;
DROP TRIGGER book_metadata_aggregation_author__after_insert;
DROP TRIGGER book_metadata_aggregation_author__after_delete;
DROP TRIGGER book_metadata_aggregation_author__after_update;

View file

@ -77,4 +77,9 @@ sealed class Task(priority: Int = DEFAULT_PRIORITY) : Serializable {
override fun uniqueId(): String = "REPAIR_EXTENSION_$bookId" override fun uniqueId(): String = "REPAIR_EXTENSION_$bookId"
override fun toString(): String = "RepairExtension(bookId='$bookId', priority='$priority')" override fun toString(): String = "RepairExtension(bookId='$bookId', priority='$priority')"
} }
class RebuildIndex(priority: Int = DEFAULT_PRIORITY) : Task(priority) {
override fun uniqueId() = "REBUILD_INDEX"
override fun toString(): String = "RebuildIndex(priority='$priority')"
}
} }

View file

@ -13,6 +13,7 @@ import org.gotson.komga.domain.service.LocalArtworkLifecycle
import org.gotson.komga.domain.service.SeriesMetadataLifecycle import org.gotson.komga.domain.service.SeriesMetadataLifecycle
import org.gotson.komga.infrastructure.jms.QUEUE_TASKS import org.gotson.komga.infrastructure.jms.QUEUE_TASKS
import org.gotson.komga.infrastructure.jms.QUEUE_TASKS_SELECTOR import org.gotson.komga.infrastructure.jms.QUEUE_TASKS_SELECTOR
import org.gotson.komga.infrastructure.search.SearchIndexLifecycle
import org.springframework.jms.annotation.JmsListener import org.springframework.jms.annotation.JmsListener
import org.springframework.stereotype.Service import org.springframework.stereotype.Service
import java.nio.file.Paths import java.nio.file.Paths
@ -33,6 +34,7 @@ class TaskHandler(
private val localArtworkLifecycle: LocalArtworkLifecycle, private val localArtworkLifecycle: LocalArtworkLifecycle,
private val bookImporter: BookImporter, private val bookImporter: BookImporter,
private val bookConverter: BookConverter, private val bookConverter: BookConverter,
private val searchIndexLifecycle: SearchIndexLifecycle,
) { ) {
@JmsListener(destination = QUEUE_TASKS, selector = QUEUE_TASKS_SELECTOR) @JmsListener(destination = QUEUE_TASKS, selector = QUEUE_TASKS_SELECTOR)
@ -116,6 +118,8 @@ class TaskHandler(
bookRepository.findByIdOrNull(task.bookId)?.let { book -> bookRepository.findByIdOrNull(task.bookId)?.let { book ->
bookLifecycle.hashAndPersist(book) bookLifecycle.hashAndPersist(book)
} ?: logger.warn { "Cannot execute task $task: Book does not exist" } } ?: logger.warn { "Cannot execute task $task: Book does not exist" }
is Task.RebuildIndex -> searchIndexLifecycle.rebuildIndex()
} }
}.also { }.also {
logger.info { "Task $task executed in $it" } logger.info { "Task $task executed in $it" }

View file

@ -117,6 +117,10 @@ class TaskReceiver(
submitTask(Task.ImportBook(sourceFile, seriesId, copyMode, destinationName, upgradeBookId, priority)) submitTask(Task.ImportBook(sourceFile, seriesId, copyMode, destinationName, upgradeBookId, priority))
} }
fun rebuildIndex(priority: Int = DEFAULT_PRIORITY) {
submitTask(Task.RebuildIndex(priority))
}
private fun submitTask(task: Task) { private fun submitTask(task: Task) {
logger.info { "Sending task: $task" } logger.info { "Sending task: $task" }
jmsTemplates[task.priority]!!.convertAndSend(QUEUE_TASKS, task) { jmsTemplates[task.priority]!!.convertAndSend(QUEUE_TASKS, task) {

View file

@ -30,6 +30,8 @@ class KomgaProperties {
var cors = Cors() var cors = Cors()
var lucene = Lucene()
class RememberMe { class RememberMe {
@get:NotBlank @get:NotBlank
var key: String? = null var key: String? = null
@ -49,4 +51,9 @@ class KomgaProperties {
@Deprecated("Unused since 0.81.0") @Deprecated("Unused since 0.81.0")
var batchSize: Int = 500 var batchSize: Int = 500
} }
class Lucene {
@get:NotBlank
var dataDirectory: String = ""
}
} }

View file

@ -1,8 +1,9 @@
package org.gotson.komga.infrastructure.jooq package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.BookSearchWithReadProgress import org.gotson.komga.domain.model.BookSearchWithReadProgress
import org.gotson.komga.domain.model.ReadStatus import org.gotson.komga.domain.model.ReadStatus
import org.gotson.komga.infrastructure.search.LuceneEntity
import org.gotson.komga.infrastructure.search.LuceneHelper
import org.gotson.komga.infrastructure.web.toFilePath import org.gotson.komga.infrastructure.web.toFilePath
import org.gotson.komga.interfaces.rest.dto.AuthorDto import org.gotson.komga.interfaces.rest.dto.AuthorDto
import org.gotson.komga.interfaces.rest.dto.BookDto import org.gotson.komga.interfaces.rest.dto.BookDto
@ -20,9 +21,9 @@ import org.jooq.DSLContext
import org.jooq.Record import org.jooq.Record
import org.jooq.ResultQuery import org.jooq.ResultQuery
import org.jooq.impl.DSL import org.jooq.impl.DSL
import org.jooq.impl.DSL.field
import org.jooq.impl.DSL.inline import org.jooq.impl.DSL.inline
import org.jooq.impl.DSL.lower import org.jooq.impl.DSL.lower
import org.jooq.impl.DSL.noCondition
import org.springframework.data.domain.Page import org.springframework.data.domain.Page
import org.springframework.data.domain.PageImpl import org.springframework.data.domain.PageImpl
import org.springframework.data.domain.PageRequest import org.springframework.data.domain.PageRequest
@ -31,11 +32,10 @@ import org.springframework.data.domain.Sort
import org.springframework.stereotype.Component import org.springframework.stereotype.Component
import java.net.URL import java.net.URL
private val logger = KotlinLogging.logger {}
@Component @Component
class BookDtoDao( class BookDtoDao(
private val dsl: DSLContext private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
) : BookDtoRepository { ) : BookDtoRepository {
private val b = Tables.BOOK private val b = Tables.BOOK
@ -46,7 +46,6 @@ class BookDtoDao(
private val s = Tables.SERIES private val s = Tables.SERIES
private val rlb = Tables.READLIST_BOOK private val rlb = Tables.READLIST_BOOK
private val bt = Tables.BOOK_METADATA_TAG private val bt = Tables.BOOK_METADATA_TAG
private val fts = Tables.FTS_BOOK_METADATA
private val sorts = mapOf( private val sorts = mapOf(
"name" to lower(b.NAME.udfStripAccents()), "name" to lower(b.NAME.udfStripAccents()),
@ -65,13 +64,12 @@ class BookDtoDao(
"metadata.releaseDate" to d.RELEASE_DATE, "metadata.releaseDate" to d.RELEASE_DATE,
"readProgress.lastModified" to r.LAST_MODIFIED_DATE, "readProgress.lastModified" to r.LAST_MODIFIED_DATE,
"readList.number" to rlb.NUMBER, "readList.number" to rlb.NUMBER,
"relevance" to field("rank"),
) )
override fun findAll(search: BookSearchWithReadProgress, userId: String, pageable: Pageable): Page<BookDto> { override fun findAll(search: BookSearchWithReadProgress, userId: String, pageable: Pageable): Page<BookDto> {
val conditions = search.toCondition() val conditions = search.toCondition()
return findAll(conditions, userId, pageable, search.toJoinConditions(), null) return findAll(conditions, userId, pageable, search.toJoinConditions(), null, search.searchTerm)
} }
override fun findAllByReadListId( override fun findAllByReadListId(
@ -83,7 +81,7 @@ class BookDtoDao(
): Page<BookDto> { ): Page<BookDto> {
val conditions = rlb.READLIST_ID.eq(readListId).and(search.toCondition()) val conditions = rlb.READLIST_ID.eq(readListId).and(search.toCondition())
return findAll(conditions, userId, pageable, search.toJoinConditions().copy(selectReadListNumber = true), filterOnLibraryIds) return findAll(conditions, userId, pageable, search.toJoinConditions().copy(selectReadListNumber = true), filterOnLibraryIds, search.searchTerm)
} }
private fun findAll( private fun findAll(
@ -92,46 +90,47 @@ class BookDtoDao(
pageable: Pageable, pageable: Pageable,
joinConditions: JoinConditions = JoinConditions(), joinConditions: JoinConditions = JoinConditions(),
filterOnLibraryIds: Collection<String>?, filterOnLibraryIds: Collection<String>?,
searchTerm: String?,
): Page<BookDto> { ): Page<BookDto> {
return try { val bookIds = luceneHelper.searchEntitiesIds(searchTerm, LuceneEntity.Book, if (pageable.isPaged) pageable.pageSize else 20)
val count = dsl.selectDistinct(b.ID) val searchCondition = b.ID.inOrNoCondition(bookIds)
.from(b)
.apply { if (joinConditions.fullTextSearch) join(fts).on(b.ID.eq(fts.BOOK_ID)) }
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.apply { if (joinConditions.tag) leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)) }
.apply { if (joinConditions.selectReadListNumber) leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID)) }
.apply { if (joinConditions.author) leftJoin(a).on(b.ID.eq(a.BOOK_ID)) }
.where(conditions)
.groupBy(b.ID)
.fetch()
.size
val orderBy = pageable.sort.toOrderBy(sorts) val count = dsl.selectDistinct(b.ID)
.from(b)
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.apply { if (joinConditions.tag) leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)) }
.apply { if (joinConditions.selectReadListNumber) leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID)) }
.apply { if (joinConditions.author) leftJoin(a).on(b.ID.eq(a.BOOK_ID)) }
.where(conditions)
.and(searchCondition)
.groupBy(b.ID)
.fetch()
.size
val dtos = selectBase(userId, joinConditions) val orderBy =
.where(conditions) pageable.sort.mapNotNull {
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } } if (it.property == "relevance" && !bookIds.isNullOrEmpty()) b.ID.sortByValues(bookIds, it.isAscending)
.orderBy(orderBy) else it.toSortField(sorts)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap()
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
}
val dtos = selectBase(userId, joinConditions)
.where(conditions)
.and(searchCondition)
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap()
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} }
override fun findByIdOrNull(bookId: String, userId: String): BookDto? = override fun findByIdOrNull(bookId: String, userId: String): BookDto? =
@ -248,7 +247,6 @@ class BookDtoDao(
*r.fields() *r.fields()
).apply { if (joinConditions.selectReadListNumber) select(rlb.NUMBER) } ).apply { if (joinConditions.selectReadListNumber) select(rlb.NUMBER) }
.from(b) .from(b)
.apply { if (joinConditions.fullTextSearch) join(fts).on(b.ID.eq(fts.BOOK_ID)) }
.leftJoin(m).on(b.ID.eq(m.BOOK_ID)) .leftJoin(m).on(b.ID.eq(m.BOOK_ID))
.leftJoin(d).on(b.ID.eq(d.BOOK_ID)) .leftJoin(d).on(b.ID.eq(d.BOOK_ID))
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId)) .leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
@ -279,9 +277,8 @@ class BookDtoDao(
} }
private fun BookSearchWithReadProgress.toCondition(): Condition { private fun BookSearchWithReadProgress.toCondition(): Condition {
var c: Condition = DSL.trueCondition() var c: Condition = noCondition()
if (!searchTerm.isNullOrBlank()) c = c.and(fts.match(searchTerm))
if (!libraryIds.isNullOrEmpty()) c = c.and(b.LIBRARY_ID.`in`(libraryIds)) if (!libraryIds.isNullOrEmpty()) c = c.and(b.LIBRARY_ID.`in`(libraryIds))
if (!seriesIds.isNullOrEmpty()) c = c.and(b.SERIES_ID.`in`(seriesIds)) if (!seriesIds.isNullOrEmpty()) c = c.and(b.SERIES_ID.`in`(seriesIds))
if (!mediaStatus.isNullOrEmpty()) c = c.and(m.STATUS.`in`(mediaStatus)) if (!mediaStatus.isNullOrEmpty()) c = c.and(m.STATUS.`in`(mediaStatus))
@ -303,7 +300,7 @@ class BookDtoDao(
} }
if (!authors.isNullOrEmpty()) { if (!authors.isNullOrEmpty()) {
var ca: Condition = DSL.falseCondition() var ca = noCondition()
authors.forEach { authors.forEach {
ca = ca.or(a.NAME.equalIgnoreCase(it.name).and(a.ROLE.equalIgnoreCase(it.role))) ca = ca.or(a.NAME.equalIgnoreCase(it.name).and(a.ROLE.equalIgnoreCase(it.role)))
} }
@ -317,14 +314,12 @@ class BookDtoDao(
JoinConditions( JoinConditions(
tag = !tags.isNullOrEmpty(), tag = !tags.isNullOrEmpty(),
author = !authors.isNullOrEmpty(), author = !authors.isNullOrEmpty(),
fullTextSearch = !searchTerm.isNullOrBlank(),
) )
private data class JoinConditions( private data class JoinConditions(
val selectReadListNumber: Boolean = false, val selectReadListNumber: Boolean = false,
val tag: Boolean = false, val tag: Boolean = false,
val author: Boolean = false, val author: Boolean = false,
val fullTextSearch: Boolean = false,
) )
private fun BookRecord.toDto(media: MediaDto, metadata: BookMetadataDto, readProgress: ReadProgressDto?) = private fun BookRecord.toDto(media: MediaDto, metadata: BookMetadataDto, readProgress: ReadProgressDto?) =

View file

@ -1,8 +1,9 @@
package org.gotson.komga.infrastructure.jooq package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.ReadList import org.gotson.komga.domain.model.ReadList
import org.gotson.komga.domain.persistence.ReadListRepository import org.gotson.komga.domain.persistence.ReadListRepository
import org.gotson.komga.infrastructure.search.LuceneEntity
import org.gotson.komga.infrastructure.search.LuceneHelper
import org.gotson.komga.jooq.Tables import org.gotson.komga.jooq.Tables
import org.gotson.komga.jooq.tables.records.ReadlistRecord import org.gotson.komga.jooq.tables.records.ReadlistRecord
import org.jooq.DSLContext import org.jooq.DSLContext
@ -20,21 +21,18 @@ import java.time.LocalDateTime
import java.time.ZoneId import java.time.ZoneId
import java.util.SortedMap import java.util.SortedMap
private val logger = KotlinLogging.logger {}
@Component @Component
class ReadListDao( class ReadListDao(
private val dsl: DSLContext private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
) : ReadListRepository { ) : ReadListRepository {
private val rl = Tables.READLIST private val rl = Tables.READLIST
private val rlb = Tables.READLIST_BOOK private val rlb = Tables.READLIST_BOOK
private val b = Tables.BOOK private val b = Tables.BOOK
private val fts = Tables.FTS_READLIST
private val sorts = mapOf( private val sorts = mapOf(
"name" to DSL.lower(rl.NAME.udfStripAccents()), "name" to DSL.lower(rl.NAME.udfStripAccents()),
"relevance" to DSL.field("rank"),
) )
override fun findByIdOrNull(readListId: String): ReadList? = override fun findByIdOrNull(readListId: String): ReadList? =
@ -51,79 +49,72 @@ class ReadListDao(
.firstOrNull() .firstOrNull()
override fun findAll(search: String?, pageable: Pageable): Page<ReadList> { override fun findAll(search: String?, pageable: Pageable): Page<ReadList> {
val conditions = if (!search.isNullOrBlank()) searchCondition(search) val readListIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.ReadList, if (pageable.isPaged) pageable.pageSize else 20)
else DSL.trueCondition() val searchCondition = rl.ID.inOrNoCondition(readListIds)
return try { val count = dsl.selectCount()
val count = dsl.selectCount() .from(rl)
.from(rl) .where(searchCondition)
.apply { if (!search.isNullOrBlank()) join(fts).on(rl.ID.eq(fts.ID)) } .fetchOne(0, Long::class.java) ?: 0
.where(conditions)
.fetchOne(0, Long::class.java) ?: 0
val orderBy = pageable.sort.toOrderBy(sorts) val orderBy =
pageable.sort.mapNotNull {
val items = selectBase(!search.isNullOrBlank()) if (it.property == "relevance" && !readListIds.isNullOrEmpty()) rl.ID.sortByValues(readListIds, it.isAscending)
.where(conditions) else it.toSortField(sorts)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
}
val items = selectBase()
.where(searchCondition)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
} }
override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<ReadList> { override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<ReadList> {
val readListIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.ReadList, if (pageable.isPaged) pageable.pageSize else 20)
val searchCondition = rl.ID.inOrNoCondition(readListIds)
val conditions = b.LIBRARY_ID.`in`(belongsToLibraryIds) val conditions = b.LIBRARY_ID.`in`(belongsToLibraryIds)
.apply { if (!search.isNullOrBlank()) and(searchCondition(search)) } .and(searchCondition)
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } } .apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
return try { val ids = dsl.selectDistinct(rl.ID)
val ids = dsl.selectDistinct(rl.ID) .from(rl)
.from(rl) .leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
.apply { if (!search.isNullOrBlank()) join(fts).on(rl.ID.eq(fts.ID)) } .leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID)) .where(conditions)
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID)) .fetch(0, String::class.java)
.where(conditions)
.fetch(0, String::class.java)
val count = ids.size val count = ids.size
val orderBy = pageable.sort.toOrderBy(sorts) val orderBy =
pageable.sort.mapNotNull {
val items = selectBase(!search.isNullOrBlank()) if (it.property == "relevance" && !readListIds.isNullOrEmpty()) rl.ID.sortByValues(readListIds, it.isAscending)
.where(rl.ID.`in`(ids)) else it.toSortField(sorts)
.and(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
}
val items = selectBase()
.where(rl.ID.`in`(ids))
.and(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} }
override fun findAllContainingBookId(containsBookId: String, filterOnLibraryIds: Collection<String>?): Collection<ReadList> { override fun findAllContainingBookId(containsBookId: String, filterOnLibraryIds: Collection<String>?): Collection<ReadList> {
@ -157,13 +148,9 @@ class ReadListDao(
.fetchAndMap(null) .fetchAndMap(null)
.firstOrNull() .firstOrNull()
private fun searchCondition(search: String) = private fun selectBase() =
fts.match(search)
private fun selectBase(joinFts: Boolean = false) =
dsl.selectDistinct(*rl.fields()) dsl.selectDistinct(*rl.fields())
.from(rl) .from(rl)
.apply { if (joinFts) join(fts).on(rl.ID.eq(fts.ID)) }
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID)) .leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID)) .leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))

View file

@ -1,6 +1,5 @@
package org.gotson.komga.infrastructure.jooq package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.Author import org.gotson.komga.domain.model.Author
import org.gotson.komga.domain.persistence.ReferentialRepository import org.gotson.komga.domain.persistence.ReferentialRepository
import org.gotson.komga.infrastructure.language.stripAccents import org.gotson.komga.infrastructure.language.stripAccents
@ -8,10 +7,9 @@ import org.gotson.komga.jooq.Tables
import org.gotson.komga.jooq.tables.records.BookMetadataAggregationAuthorRecord import org.gotson.komga.jooq.tables.records.BookMetadataAggregationAuthorRecord
import org.gotson.komga.jooq.tables.records.BookMetadataAuthorRecord import org.gotson.komga.jooq.tables.records.BookMetadataAuthorRecord
import org.jooq.DSLContext import org.jooq.DSLContext
import org.jooq.impl.DSL.field
import org.jooq.impl.DSL.lower import org.jooq.impl.DSL.lower
import org.jooq.impl.DSL.noCondition
import org.jooq.impl.DSL.select import org.jooq.impl.DSL.select
import org.jooq.impl.DSL.trueCondition
import org.springframework.data.domain.Page import org.springframework.data.domain.Page
import org.springframework.data.domain.PageImpl import org.springframework.data.domain.PageImpl
import org.springframework.data.domain.PageRequest import org.springframework.data.domain.PageRequest
@ -20,8 +18,6 @@ import org.springframework.data.domain.Sort
import org.springframework.stereotype.Component import org.springframework.stereotype.Component
import java.time.LocalDate import java.time.LocalDate
private val logger = KotlinLogging.logger {}
@Component @Component
class ReferentialDao( class ReferentialDao(
private val dsl: DSLContext private val dsl: DSLContext
@ -39,7 +35,6 @@ class ReferentialDao(
private val st = Tables.SERIES_METADATA_TAG private val st = Tables.SERIES_METADATA_TAG
private val cs = Tables.COLLECTION_SERIES private val cs = Tables.COLLECTION_SERIES
private val rb = Tables.READLIST_BOOK private val rb = Tables.READLIST_BOOK
private val ftsAuthors = Tables.FTS_BOOK_METADATA_AGGREGATION_AUTHOR
override fun findAllAuthorsByName(search: String, filterOnLibraryIds: Collection<String>?): List<Author> = override fun findAllAuthorsByName(search: String, filterOnLibraryIds: Collection<String>?): List<Author> =
dsl.selectDistinct(a.NAME, a.ROLE) dsl.selectDistinct(a.NAME, a.ROLE)
@ -118,56 +113,46 @@ class ReferentialDao(
) )
private fun findAuthorsByName(search: String?, role: String?, filterOnLibraryIds: Collection<String>?, pageable: Pageable, filterBy: FilterBy?): Page<Author> { private fun findAuthorsByName(search: String?, role: String?, filterOnLibraryIds: Collection<String>?, pageable: Pageable, filterBy: FilterBy?): Page<Author> {
return try { val query = dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
val query = dsl.selectDistinct(bmaa.NAME, bmaa.ROLE) .from(bmaa)
.from(bmaa) .apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) }
.apply { if (!search.isNullOrBlank()) join(ftsAuthors).on(ftsAuthors.rowid().eq(bmaa.rowid())) } .apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) }
.apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) } .apply {
.apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) } if (filterBy?.type == FilterByType.READLIST)
.apply { leftJoin(b).on(bmaa.SERIES_ID.eq(b.SERIES_ID))
if (filterBy?.type == FilterByType.READLIST) .leftJoin(rb).on(b.ID.eq(rb.BOOK_ID))
leftJoin(b).on(bmaa.SERIES_ID.eq(b.SERIES_ID)) }
.leftJoin(rb).on(b.ID.eq(rb.BOOK_ID)) .where(noCondition())
} .apply { search?.let { and(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents())) } }
.where(trueCondition()) .apply { role?.let { and(bmaa.ROLE.eq(role)) } }
.apply { if (!search.isNullOrBlank()) and(ftsAuthors.match(search)) } .apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
.apply { role?.let { and(bmaa.ROLE.eq(role)) } } .apply {
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } } filterBy?.let {
.apply { when (it.type) {
filterBy?.let { FilterByType.LIBRARY -> and(s.LIBRARY_ID.eq(it.id))
when (it.type) { FilterByType.COLLECTION -> and(cs.COLLECTION_ID.eq(it.id))
FilterByType.LIBRARY -> and(s.LIBRARY_ID.eq(it.id)) FilterByType.SERIES -> and(bmaa.SERIES_ID.eq(it.id))
FilterByType.COLLECTION -> and(cs.COLLECTION_ID.eq(it.id)) FilterByType.READLIST -> and(rb.READLIST_ID.eq(it.id))
FilterByType.SERIES -> and(bmaa.SERIES_ID.eq(it.id))
FilterByType.READLIST -> and(rb.READLIST_ID.eq(it.id))
}
} }
} }
val count = dsl.fetchCount(query)
val sort = if (!search.isNullOrBlank()) field("rank")
else lower(bmaa.NAME.udfStripAccents())
val items = query
.orderBy(sort)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchInto(a)
.map { it.toDomain() }
val pageSort = Sort.by("relevance")
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
}
val count = dsl.fetchCount(query)
val sort = lower(bmaa.NAME.udfStripAccents())
val items = query
.orderBy(sort)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchInto(a)
.map { it.toDomain() }
val pageSort = Sort.by("relevance")
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} }
override fun findAllAuthorsNamesByName(search: String, filterOnLibraryIds: Collection<String>?): List<String> = override fun findAllAuthorsNamesByName(search: String, filterOnLibraryIds: Collection<String>?): List<String> =

View file

@ -1,8 +1,9 @@
package org.gotson.komga.infrastructure.jooq package org.gotson.komga.infrastructure.jooq
import mu.KotlinLogging
import org.gotson.komga.domain.model.SeriesCollection import org.gotson.komga.domain.model.SeriesCollection
import org.gotson.komga.domain.persistence.SeriesCollectionRepository import org.gotson.komga.domain.persistence.SeriesCollectionRepository
import org.gotson.komga.infrastructure.search.LuceneEntity
import org.gotson.komga.infrastructure.search.LuceneHelper
import org.gotson.komga.jooq.Tables import org.gotson.komga.jooq.Tables
import org.gotson.komga.jooq.tables.records.CollectionRecord import org.gotson.komga.jooq.tables.records.CollectionRecord
import org.jooq.DSLContext import org.jooq.DSLContext
@ -19,21 +20,18 @@ import org.springframework.transaction.annotation.Transactional
import java.time.LocalDateTime import java.time.LocalDateTime
import java.time.ZoneId import java.time.ZoneId
private val logger = KotlinLogging.logger {}
@Component @Component
class SeriesCollectionDao( class SeriesCollectionDao(
private val dsl: DSLContext private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
) : SeriesCollectionRepository { ) : SeriesCollectionRepository {
private val c = Tables.COLLECTION private val c = Tables.COLLECTION
private val cs = Tables.COLLECTION_SERIES private val cs = Tables.COLLECTION_SERIES
private val s = Tables.SERIES private val s = Tables.SERIES
private val fts = Tables.FTS_COLLECTION
private val sorts = mapOf( private val sorts = mapOf(
"name" to DSL.lower(c.NAME.udfStripAccents()), "name" to DSL.lower(c.NAME.udfStripAccents()),
"relevance" to DSL.field("rank"),
) )
override fun findByIdOrNull(collectionId: String): SeriesCollection? = override fun findByIdOrNull(collectionId: String): SeriesCollection? =
@ -50,78 +48,72 @@ class SeriesCollectionDao(
.firstOrNull() .firstOrNull()
override fun findAll(search: String?, pageable: Pageable): Page<SeriesCollection> { override fun findAll(search: String?, pageable: Pageable): Page<SeriesCollection> {
val conditions = if (!search.isNullOrBlank()) searchCondition(search) val collectionIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.Collection, if (pageable.isPaged) pageable.pageSize else 20)
else DSL.trueCondition() val searchCondition = c.ID.inOrNoCondition(collectionIds)
return try { val count = dsl.selectCount()
val count = dsl.selectCount() .from(c)
.from(c) .where(searchCondition)
.apply { if (!search.isNullOrBlank()) join(fts).on(c.ID.eq(fts.ID)) } .fetchOne(0, Long::class.java) ?: 0
.where(conditions)
.fetchOne(0, Long::class.java) ?: 0
val orderBy = pageable.sort.toOrderBy(sorts) val orderBy =
pageable.sort.mapNotNull {
val items = selectBase(!search.isNullOrBlank()) if (it.property == "relevance" && !collectionIds.isNullOrEmpty()) c.ID.sortByValues(collectionIds, it.isAscending)
.where(conditions) else it.toSortField(sorts)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
}
val items = selectBase()
.where(searchCondition)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(null)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
count
)
} }
override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<SeriesCollection> { override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<SeriesCollection> {
val collectionIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.Collection, if (pageable.isPaged) pageable.pageSize else 20)
val searchCondition = c.ID.inOrNoCondition(collectionIds)
val conditions = s.LIBRARY_ID.`in`(belongsToLibraryIds) val conditions = s.LIBRARY_ID.`in`(belongsToLibraryIds)
.apply { if (!search.isNullOrBlank()) and(searchCondition(search)) } .and(searchCondition)
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } } .apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
return try { val ids = dsl.selectDistinct(c.ID)
val ids = dsl.selectDistinct(c.ID) .from(c)
.from(c) .leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID)) .leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID)) .where(conditions)
.where(conditions) .fetch(0, String::class.java)
.fetch(0, String::class.java)
val count = ids.size val count = ids.size
val orderBy = pageable.sort.toOrderBy(sorts) val orderBy =
pageable.sort.mapNotNull {
val items = selectBase(!search.isNullOrBlank()) if (it.property == "relevance" && !collectionIds.isNullOrEmpty()) c.ID.sortByValues(collectionIds, it.isAscending)
.where(c.ID.`in`(ids)) else it.toSortField(sorts)
.and(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
}
val items = selectBase()
.where(c.ID.`in`(ids))
.and(conditions)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap(filterOnLibraryIds)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
items,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} }
override fun findAllContainingSeriesId(containsSeriesId: String, filterOnLibraryIds: Collection<String>?): Collection<SeriesCollection> { override fun findAllContainingSeriesId(containsSeriesId: String, filterOnLibraryIds: Collection<String>?): Collection<SeriesCollection> {
@ -155,13 +147,9 @@ class SeriesCollectionDao(
.fetchAndMap(null) .fetchAndMap(null)
.firstOrNull() .firstOrNull()
private fun searchCondition(search: String) = private fun selectBase() =
fts.match(search)
private fun selectBase(joinFts: Boolean = false) =
dsl.selectDistinct(*c.fields()) dsl.selectDistinct(*c.fields())
.from(c) .from(c)
.apply { if (joinFts) join(fts).on(c.ID.eq(fts.ID)) }
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID)) .leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID)) .leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
@ -232,6 +220,7 @@ class SeriesCollectionDao(
@Transactional @Transactional
override fun delete(collectionId: String) { override fun delete(collectionId: String) {
dsl.deleteFrom(cs).where(cs.COLLECTION_ID.eq(collectionId)).execute() dsl.deleteFrom(cs).where(cs.COLLECTION_ID.eq(collectionId)).execute()
dsl.deleteFrom(c).where(c.ID.eq(collectionId)).execute() dsl.deleteFrom(c).where(c.ID.eq(collectionId)).execute()
} }

View file

@ -4,6 +4,8 @@ import mu.KotlinLogging
import org.gotson.komga.domain.model.ReadStatus import org.gotson.komga.domain.model.ReadStatus
import org.gotson.komga.domain.model.SeriesSearch import org.gotson.komga.domain.model.SeriesSearch
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
import org.gotson.komga.infrastructure.search.LuceneEntity
import org.gotson.komga.infrastructure.search.LuceneHelper
import org.gotson.komga.infrastructure.web.toFilePath import org.gotson.komga.infrastructure.web.toFilePath
import org.gotson.komga.interfaces.rest.dto.AuthorDto import org.gotson.komga.interfaces.rest.dto.AuthorDto
import org.gotson.komga.interfaces.rest.dto.BookMetadataAggregationDto import org.gotson.komga.interfaces.rest.dto.BookMetadataAggregationDto
@ -43,7 +45,8 @@ const val BOOKS_READ_COUNT = "booksReadCount"
@Component @Component
class SeriesDtoDao( class SeriesDtoDao(
private val dsl: DSLContext private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
) : SeriesDtoRepository { ) : SeriesDtoRepository {
companion object { companion object {
@ -57,7 +60,6 @@ class SeriesDtoDao(
private val bma = Tables.BOOK_METADATA_AGGREGATION private val bma = Tables.BOOK_METADATA_AGGREGATION
private val bmaa = Tables.BOOK_METADATA_AGGREGATION_AUTHOR private val bmaa = Tables.BOOK_METADATA_AGGREGATION_AUTHOR
private val bmat = Tables.BOOK_METADATA_AGGREGATION_TAG private val bmat = Tables.BOOK_METADATA_AGGREGATION_TAG
private val fts = Tables.FTS_SERIES_METADATA
val countUnread: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isNull, 1).otherwise(0)) val countUnread: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isNull, 1).otherwise(0))
val countRead: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isTrue, 1).otherwise(0)) val countRead: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isTrue, 1).otherwise(0))
@ -81,13 +83,12 @@ class SeriesDtoDao(
"collection.number" to cs.NUMBER, "collection.number" to cs.NUMBER,
"name" to lower(s.NAME.udfStripAccents()), "name" to lower(s.NAME.udfStripAccents()),
"booksCount" to s.BOOK_COUNT, "booksCount" to s.BOOK_COUNT,
"relevance" to DSL.field("rank"),
) )
override fun findAll(search: SeriesSearchWithReadProgress, userId: String, pageable: Pageable): Page<SeriesDto> { override fun findAll(search: SeriesSearchWithReadProgress, userId: String, pageable: Pageable): Page<SeriesDto> {
val conditions = search.toCondition() val conditions = search.toCondition()
return findAll(conditions, userId, pageable, search.toJoinConditions()) return findAll(conditions, userId, pageable, search.toJoinConditions(), search.searchTerm)
} }
override fun findAllByCollectionId( override fun findAllByCollectionId(
@ -99,7 +100,7 @@ class SeriesDtoDao(
val conditions = search.toCondition().and(cs.COLLECTION_ID.eq(collectionId)) val conditions = search.toCondition().and(cs.COLLECTION_ID.eq(collectionId))
val joinConditions = search.toJoinConditions().copy(selectCollectionNumber = true, collection = true) val joinConditions = search.toJoinConditions().copy(selectCollectionNumber = true, collection = true)
return findAll(conditions, userId, pageable, joinConditions) return findAll(conditions, userId, pageable, joinConditions, search.searchTerm)
} }
override fun findAllRecentlyUpdated( override fun findAllRecentlyUpdated(
@ -110,41 +111,35 @@ class SeriesDtoDao(
val conditions = search.toCondition() val conditions = search.toCondition()
.and(s.CREATED_DATE.ne(s.LAST_MODIFIED_DATE)) .and(s.CREATED_DATE.ne(s.LAST_MODIFIED_DATE))
return findAll(conditions, userId, pageable, search.toJoinConditions()) return findAll(conditions, userId, pageable, search.toJoinConditions(), search.searchTerm)
} }
override fun countByFirstCharacter(search: SeriesSearchWithReadProgress, userId: String): List<GroupCountDto> { override fun countByFirstCharacter(search: SeriesSearchWithReadProgress, userId: String): List<GroupCountDto> {
val conditions = search.toCondition() val conditions = search.toCondition()
val joinConditions = search.toJoinConditions() val joinConditions = search.toJoinConditions()
val seriesIds = luceneHelper.searchEntitiesIds(search.searchTerm, LuceneEntity.Series, 20)
val searchCondition = s.ID.inOrNoCondition(seriesIds)
val firstChar = lower(substring(d.TITLE_SORT, 1, 1)) val firstChar = lower(substring(d.TITLE_SORT, 1, 1))
return try { return dsl.select(firstChar, count())
dsl.select(firstChar, count()) .from(s)
.from(s) .leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) } .leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(d).on(s.ID.eq(d.SERIES_ID)) .leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID)) .apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId)) .apply {
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) } if (joinConditions.tag)
.apply { leftJoin(st).on(s.ID.eq(st.SERIES_ID))
if (joinConditions.tag) .leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID))
leftJoin(st).on(s.ID.eq(st.SERIES_ID)) }
.leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID)) .apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
} .apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) } .where(conditions)
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) } .and(searchCondition)
.where(conditions) .groupBy(firstChar)
.groupBy(firstChar) .map {
.map { GroupCountDto(it.value1(), it.value2())
GroupCountDto(it.value1(), it.value2())
}
} catch (e: Exception) {
if (e.isFtsError()) emptyList()
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
}
} }
override fun findByIdOrNull(seriesId: String, userId: String): SeriesDto? = override fun findByIdOrNull(seriesId: String, userId: String): SeriesDto? =
@ -161,7 +156,6 @@ class SeriesDtoDao(
dsl.selectDistinct(*groupFields) dsl.selectDistinct(*groupFields)
.apply { if (joinConditions.selectCollectionNumber) select(cs.NUMBER) } .apply { if (joinConditions.selectCollectionNumber) select(cs.NUMBER) }
.from(s) .from(s)
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
.leftJoin(d).on(s.ID.eq(d.SERIES_ID)) .leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID)) .leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId)) .leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
@ -178,48 +172,49 @@ class SeriesDtoDao(
conditions: Condition, conditions: Condition,
userId: String, userId: String,
pageable: Pageable, pageable: Pageable,
joinConditions: JoinConditions = JoinConditions() joinConditions: JoinConditions = JoinConditions(),
searchTerm: String?,
): Page<SeriesDto> { ): Page<SeriesDto> {
return try { val seriesIds = luceneHelper.searchEntitiesIds(searchTerm, LuceneEntity.Series, if (pageable.isPaged) pageable.pageSize else 20)
val count = dsl.select(count(s.ID)) val searchCondition = s.ID.inOrNoCondition(seriesIds)
.from(s)
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
.apply {
if (joinConditions.tag)
leftJoin(st).on(s.ID.eq(st.SERIES_ID))
.leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID))
}
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
.where(conditions)
.fetchOne(count(s.ID)) ?: 0
val orderBy = pageable.sort.toOrderBy(sorts) val count = dsl.select(count(s.ID))
.from(s)
val dtos = selectBase(userId, joinConditions) .leftJoin(d).on(s.ID.eq(d.SERIES_ID))
.where(conditions) .leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
.orderBy(orderBy) .leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) } .apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
.fetchAndMap() .apply {
if (joinConditions.tag)
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted() leftJoin(st).on(s.ID.eq(st.SERIES_ID))
PageImpl( .leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID))
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} catch (e: Exception) {
if (e.isFtsError()) PageImpl(emptyList())
else {
logger.error(e) { "Error while fetching data" }
throw e
} }
} .apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
.where(conditions)
.and(searchCondition)
.fetchOne(count(s.ID)) ?: 0
val orderBy =
pageable.sort.mapNotNull {
if (it.property == "relevance" && !seriesIds.isNullOrEmpty()) s.ID.sortByValues(seriesIds, it.isAscending)
else it.toSortField(sorts)
}
val dtos = selectBase(userId, joinConditions)
.where(conditions)
.and(searchCondition)
.orderBy(orderBy)
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
.fetchAndMap()
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
return PageImpl(
dtos,
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
else PageRequest.of(0, maxOf(count, 20), pageSort),
count.toLong()
)
} }
private fun readProgressConditionSeries(userId: String): Condition = rs.USER_ID.eq(userId).or(rs.USER_ID.isNull) private fun readProgressConditionSeries(userId: String): Condition = rs.USER_ID.eq(userId).or(rs.USER_ID.isNull)
@ -266,9 +261,8 @@ class SeriesDtoDao(
} }
private fun SeriesSearchWithReadProgress.toCondition(): Condition { private fun SeriesSearchWithReadProgress.toCondition(): Condition {
var c: Condition = DSL.trueCondition() var c = DSL.noCondition()
if (!searchTerm.isNullOrBlank()) c = c.and(fts.match(searchTerm))
if (!libraryIds.isNullOrEmpty()) c = c.and(s.LIBRARY_ID.`in`(libraryIds)) if (!libraryIds.isNullOrEmpty()) c = c.and(s.LIBRARY_ID.`in`(libraryIds))
if (!collectionIds.isNullOrEmpty()) c = c.and(cs.COLLECTION_ID.`in`(collectionIds)) if (!collectionIds.isNullOrEmpty()) c = c.and(cs.COLLECTION_ID.`in`(collectionIds))
searchRegex?.let { c = c.and((it.second.toColumn()).likeRegex(it.first)) } searchRegex?.let { c = c.and((it.second.toColumn()).likeRegex(it.first)) }
@ -280,14 +274,14 @@ class SeriesDtoDao(
if (!genres.isNullOrEmpty()) c = c.and(lower(g.GENRE).`in`(genres.map { it.lowercase() })) if (!genres.isNullOrEmpty()) c = c.and(lower(g.GENRE).`in`(genres.map { it.lowercase() }))
if (!tags.isNullOrEmpty()) c = c.and(lower(st.TAG).`in`(tags.map { it.lowercase() }).or(lower(bmat.TAG).`in`(tags.map { it.lowercase() }))) if (!tags.isNullOrEmpty()) c = c.and(lower(st.TAG).`in`(tags.map { it.lowercase() }).or(lower(bmat.TAG).`in`(tags.map { it.lowercase() })))
if (!ageRatings.isNullOrEmpty()) { if (!ageRatings.isNullOrEmpty()) {
val c1 = if (ageRatings.contains(null)) d.AGE_RATING.isNull else DSL.falseCondition() val c1 = if (ageRatings.contains(null)) d.AGE_RATING.isNull else DSL.noCondition()
val c2 = if (ageRatings.filterNotNull().isNotEmpty()) d.AGE_RATING.`in`(ageRatings.filterNotNull()) else DSL.falseCondition() val c2 = if (ageRatings.filterNotNull().isNotEmpty()) d.AGE_RATING.`in`(ageRatings.filterNotNull()) else DSL.noCondition()
c = c.and(c1.or(c2)) c = c.and(c1.or(c2))
} }
// cast to String is necessary for SQLite, else the years in the IN block are coerced to Int, even though YEAR for SQLite uses strftime (string) // cast to String is necessary for SQLite, else the years in the IN block are coerced to Int, even though YEAR for SQLite uses strftime (string)
if (!releaseYears.isNullOrEmpty()) c = c.and(DSL.year(bma.RELEASE_DATE).cast(String::class.java).`in`(releaseYears)) if (!releaseYears.isNullOrEmpty()) c = c.and(DSL.year(bma.RELEASE_DATE).cast(String::class.java).`in`(releaseYears))
if (!authors.isNullOrEmpty()) { if (!authors.isNullOrEmpty()) {
var ca: Condition = DSL.falseCondition() var ca = DSL.noCondition()
authors.forEach { authors.forEach {
ca = ca.or(bmaa.NAME.equalIgnoreCase(it.name).and(bmaa.ROLE.equalIgnoreCase(it.role))) ca = ca.or(bmaa.NAME.equalIgnoreCase(it.name).and(bmaa.ROLE.equalIgnoreCase(it.role)))
} }
@ -320,7 +314,6 @@ class SeriesDtoDao(
tag = !tags.isNullOrEmpty(), tag = !tags.isNullOrEmpty(),
collection = !collectionIds.isNullOrEmpty(), collection = !collectionIds.isNullOrEmpty(),
aggregationAuthor = !authors.isNullOrEmpty(), aggregationAuthor = !authors.isNullOrEmpty(),
fullTextSearch = !searchTerm.isNullOrBlank(),
) )
private data class JoinConditions( private data class JoinConditions(
@ -329,7 +322,6 @@ class SeriesDtoDao(
val tag: Boolean = false, val tag: Boolean = false,
val collection: Boolean = false, val collection: Boolean = false,
val aggregationAuthor: Boolean = false, val aggregationAuthor: Boolean = false,
val fullTextSearch: Boolean = false,
) )
private fun SeriesRecord.toDto( private fun SeriesRecord.toDto(

View file

@ -4,10 +4,8 @@ import org.gotson.komga.infrastructure.datasource.SqliteUdfDataSource
import org.jooq.Condition import org.jooq.Condition
import org.jooq.Field import org.jooq.Field
import org.jooq.SortField import org.jooq.SortField
import org.jooq.Table
import org.jooq.impl.DSL import org.jooq.impl.DSL
import org.springframework.data.domain.Sort import org.springframework.data.domain.Sort
import org.sqlite.SQLiteException
import java.time.LocalDateTime import java.time.LocalDateTime
import java.time.ZoneId import java.time.ZoneId
import java.time.ZoneOffset import java.time.ZoneOffset
@ -17,8 +15,26 @@ fun LocalDateTime.toUTC(): LocalDateTime =
fun Sort.toOrderBy(sorts: Map<String, Field<out Any>>): List<SortField<out Any>> = fun Sort.toOrderBy(sorts: Map<String, Field<out Any>>): List<SortField<out Any>> =
this.mapNotNull { this.mapNotNull {
val f = sorts[it.property] it.toSortField(sorts)
if (it.isAscending) f?.asc() else f?.desc() }
fun Sort.Order.toSortField(sorts: Map<String, Field<out Any>>): SortField<out Any>? {
val f = sorts[property] ?: return null
return if (isAscending) f.asc() else f.desc()
}
fun Field<String>.sortByValues(values: List<String>, asc: Boolean = true): Field<Int> {
var c = DSL.choose(this).`when`("dummy dsl", Int.MAX_VALUE)
val multiplier = if (asc) 1 else -1
values.forEachIndexed { index, value -> c = c.`when`(value, index * multiplier) }
return c.otherwise(Int.MAX_VALUE)
}
fun Field<String>.inOrNoCondition(list: List<String>?): Condition =
when {
list == null -> DSL.noCondition()
list.isEmpty() -> DSL.falseCondition()
else -> this.`in`(list)
} }
fun LocalDateTime.toCurrentTimeZone(): LocalDateTime = fun LocalDateTime.toCurrentTimeZone(): LocalDateTime =
@ -26,21 +42,3 @@ fun LocalDateTime.toCurrentTimeZone(): LocalDateTime =
fun Field<String>.udfStripAccents() = fun Field<String>.udfStripAccents() =
DSL.function(SqliteUdfDataSource.udfStripAccents, String::class.java, this) DSL.function(SqliteUdfDataSource.udfStripAccents, String::class.java, this)
fun Table<*>.match(term: String): Condition =
DSL.condition("{0} MATCH {1}", DSL.field(this.name), term.ftsSanitized())
fun String.ftsSanitized() = this
.replace("-", " ") // to better match queries like "x-men"
.replace("[^\\p{L}\\p{Z}\\p{N}\":+*^{}()]".toRegex(), "") // to avoid fts5 syntax error
.removePrefix("*") // to avoid unknown special query
private val ftsErrorMessages = listOf("no such column", "unknown special query", "fts5: syntax error near", "unterminated string")
/**
* FTS queries of the form field:term with a field name that doesn't exist will raise an exception
* given the same search string can be requested for different object type, this could happen quite often
*/
fun Exception.isFtsError() =
cause is SQLiteException &&
ftsErrorMessages.any { message?.contains(it) == true }

View file

@ -1,9 +1,12 @@
package org.gotson.komga.infrastructure.language package org.gotson.komga.infrastructure.language
import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.StringUtils
import java.time.LocalDate
import java.time.LocalDateTime import java.time.LocalDateTime
import java.time.ZoneId
import java.time.temporal.ChronoUnit import java.time.temporal.ChronoUnit
import java.time.temporal.TemporalUnit import java.time.temporal.TemporalUnit
import java.util.Date
import java.util.Enumeration import java.util.Enumeration
import java.util.SortedMap import java.util.SortedMap
@ -39,3 +42,5 @@ fun LocalDateTime.notEquals(other: LocalDateTime, precision: TemporalUnit = Chro
this.truncatedTo(precision) != other.truncatedTo(precision) this.truncatedTo(precision) != other.truncatedTo(precision)
fun String.stripAccents(): String = StringUtils.stripAccents(this) fun String.stripAccents(): String = StringUtils.stripAccents(this)
fun LocalDate.toDate(): Date = Date.from(this.atStartOfDay(ZoneId.of("Z")).toInstant())

View file

@ -0,0 +1,31 @@
package org.gotson.komga.infrastructure.search
import org.apache.lucene.store.ByteBuffersDirectory
import org.apache.lucene.store.Directory
import org.apache.lucene.store.FSDirectory
import org.apache.lucene.util.Version
import org.gotson.komga.infrastructure.configuration.KomgaProperties
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.Profile
import java.nio.file.Paths
@Configuration
class LuceneConfiguration(
private val komgaProperties: KomgaProperties,
) {
@Bean
fun analyzer() =
MultiLingualAnalyzer().apply { version = Version.LUCENE_8_9_0 }
@Bean
@Profile("test")
fun memoryDirectory(): Directory =
ByteBuffersDirectory()
@Bean
@Profile("!test")
fun diskDirectory(): Directory =
FSDirectory.open(Paths.get(komgaProperties.lucene.dataDirectory))
}

View file

@ -0,0 +1,88 @@
package org.gotson.komga.infrastructure.search
import org.apache.lucene.document.DateTools
import org.apache.lucene.document.Document
import org.apache.lucene.document.Field
import org.apache.lucene.document.StringField
import org.apache.lucene.document.TextField
import org.gotson.komga.domain.model.ReadList
import org.gotson.komga.domain.model.SeriesCollection
import org.gotson.komga.infrastructure.language.toDate
import org.gotson.komga.interfaces.rest.dto.BookDto
import org.gotson.komga.interfaces.rest.dto.SeriesDto
enum class LuceneEntity(val type: String, val id: String, val defaultFields: Array<String>) {
Book("book", "book_id", arrayOf("title", "isbn")),
Series("series", "series_id", arrayOf("title")),
Collection("collection", "collection_id", arrayOf("name")),
ReadList("readlist", "readlist_id", arrayOf("name"));
companion object {
const val TYPE = "type"
}
}
fun BookDto.toDocument() =
Document().apply {
add(TextField("title", metadata.title, Field.Store.NO))
add(TextField("isbn", metadata.isbn, Field.Store.NO))
metadata.tags.forEach {
add(TextField("tag", it, Field.Store.NO))
}
metadata.authors.forEach {
add(TextField("author", it.name, Field.Store.NO))
add(TextField(it.role, it.name, Field.Store.NO))
}
if (metadata.releaseDate != null) add(TextField("release_date", DateTools.dateToString(metadata.releaseDate.toDate(), DateTools.Resolution.YEAR), Field.Store.NO))
add(TextField("status", media.status, Field.Store.NO))
add(TextField("deleted", deleted.toString(), Field.Store.NO))
add(StringField(LuceneEntity.TYPE, LuceneEntity.Book.type, Field.Store.NO))
add(StringField(LuceneEntity.Book.id, id, Field.Store.YES))
}
fun SeriesDto.toDocument() =
Document().apply {
add(TextField("title", metadata.title, Field.Store.NO))
add(TextField("publisher", metadata.publisher, Field.Store.NO))
add(TextField("status", metadata.status, Field.Store.NO))
add(TextField("reading_direction", metadata.readingDirection, Field.Store.NO))
if (metadata.ageRating != null) add(TextField("age_rating", metadata.ageRating.toString(), Field.Store.NO))
if (metadata.language.isNotBlank()) add(TextField("language", metadata.language, Field.Store.NO))
metadata.tags.forEach {
add(TextField("series_tag", it, Field.Store.NO))
add(TextField("tag", it, Field.Store.NO))
}
booksMetadata.tags.forEach {
add(TextField("book_tag", it, Field.Store.NO))
add(TextField("tag", it, Field.Store.NO))
}
metadata.genres.forEach {
add(TextField("genre", it, Field.Store.NO))
}
if (metadata.totalBookCount != null) add(TextField("total_book_count", metadata.totalBookCount.toString(), Field.Store.NO))
add(TextField("book_count", booksCount.toString(), Field.Store.NO))
booksMetadata.authors.forEach {
add(TextField("author", it.name, Field.Store.NO))
add(TextField(it.role, it.name, Field.Store.NO))
}
if (booksMetadata.releaseDate != null) add(TextField("release_date", DateTools.dateToString(booksMetadata.releaseDate.toDate(), DateTools.Resolution.YEAR), Field.Store.NO))
add(TextField("deleted", deleted.toString(), Field.Store.NO))
add(StringField(LuceneEntity.TYPE, LuceneEntity.Series.type, Field.Store.NO))
add(StringField(LuceneEntity.Series.id, id, Field.Store.YES))
}
fun SeriesCollection.toDocument() =
Document().apply {
add(TextField("name", name, Field.Store.NO))
add(StringField(LuceneEntity.TYPE, LuceneEntity.Collection.type, Field.Store.NO))
add(StringField(LuceneEntity.Collection.id, id, Field.Store.YES))
}
fun ReadList.toDocument() =
Document().apply {
add(TextField("name", name, Field.Store.NO))
add(StringField(LuceneEntity.TYPE, LuceneEntity.ReadList.type, Field.Store.NO))
add(StringField(LuceneEntity.ReadList.id, id, Field.Store.YES))
}

View file

@ -0,0 +1,60 @@
package org.gotson.komga.infrastructure.search
import mu.KotlinLogging
import org.apache.lucene.analysis.Analyzer
import org.apache.lucene.index.DirectoryReader
import org.apache.lucene.index.IndexWriter
import org.apache.lucene.index.IndexWriterConfig
import org.apache.lucene.index.Term
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser
import org.apache.lucene.queryparser.classic.ParseException
import org.apache.lucene.queryparser.classic.QueryParser
import org.apache.lucene.search.BooleanClause
import org.apache.lucene.search.BooleanQuery
import org.apache.lucene.search.IndexSearcher
import org.apache.lucene.search.TermQuery
import org.apache.lucene.store.Directory
import org.springframework.stereotype.Component
private val logger = KotlinLogging.logger {}
@Component
class LuceneHelper(
private val directory: Directory,
private val analyzer: Analyzer,
) {
fun getIndexWriter() = IndexWriter(directory, IndexWriterConfig(analyzer))
fun getIndexReader(): DirectoryReader = DirectoryReader.open(directory)
fun indexExists(): Boolean = DirectoryReader.indexExists(directory)
fun searchEntitiesIds(searchTerm: String?, entity: LuceneEntity, size: Int): List<String>? {
return if (!searchTerm.isNullOrBlank()) {
try {
val fieldsQuery = MultiFieldQueryParser(entity.defaultFields, analyzer).apply {
defaultOperator = QueryParser.Operator.AND
}.parse(searchTerm)
val typeQuery = TermQuery(Term(LuceneEntity.TYPE, entity.type))
val booleanQuery = BooleanQuery.Builder()
.add(fieldsQuery, BooleanClause.Occur.MUST)
.add(typeQuery, BooleanClause.Occur.MUST)
.build()
getIndexReader().use { index ->
val searcher = IndexSearcher(index)
val topDocs = searcher.search(booleanQuery, size)
topDocs.scoreDocs.map { searcher.doc(it.doc)[entity.id] }
}
} catch (e: ParseException) {
emptyList()
} catch (e: Exception) {
logger.error(e) { "Error fetching entities from index" }
emptyList()
}
} else null
}
}

View file

@ -0,0 +1,29 @@
package org.gotson.komga.infrastructure.search
import org.apache.lucene.analysis.Analyzer
import org.apache.lucene.analysis.LowerCaseFilter
import org.apache.lucene.analysis.TokenStream
import org.apache.lucene.analysis.Tokenizer
import org.apache.lucene.analysis.cjk.CJKBigramFilter
import org.apache.lucene.analysis.cjk.CJKWidthFilter
import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter
import org.apache.lucene.analysis.standard.StandardTokenizer
class MultiLingualAnalyzer : Analyzer() {
override fun createComponents(fieldName: String): TokenStreamComponents {
val source: Tokenizer = StandardTokenizer()
// run the widthfilter first before bigramming, it sometimes combines characters.
var filter: TokenStream = CJKWidthFilter(source)
filter = LowerCaseFilter(filter)
filter = CJKBigramFilter(filter)
filter = ASCIIFoldingFilter(filter)
return TokenStreamComponents(source, filter)
}
override fun normalize(fieldName: String?, `in`: TokenStream): TokenStream {
var filter: TokenStream = CJKWidthFilter(`in`)
filter = LowerCaseFilter(filter)
filter = ASCIIFoldingFilter(filter)
return filter
}
}

View file

@ -0,0 +1,116 @@
package org.gotson.komga.infrastructure.search
import mu.KotlinLogging
import org.apache.lucene.document.Document
import org.apache.lucene.index.Term
import org.gotson.komga.domain.model.BookSearchWithReadProgress
import org.gotson.komga.domain.model.DomainEvent
import org.gotson.komga.domain.model.ReadList
import org.gotson.komga.domain.model.SeriesCollection
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
import org.gotson.komga.domain.persistence.ReadListRepository
import org.gotson.komga.domain.persistence.SeriesCollectionRepository
import org.gotson.komga.infrastructure.jms.QUEUE_SSE
import org.gotson.komga.infrastructure.jms.QUEUE_SSE_SELECTOR
import org.gotson.komga.infrastructure.jms.TOPIC_FACTORY
import org.gotson.komga.interfaces.rest.dto.BookDto
import org.gotson.komga.interfaces.rest.dto.SeriesDto
import org.gotson.komga.interfaces.rest.persistence.BookDtoRepository
import org.gotson.komga.interfaces.rest.persistence.SeriesDtoRepository
import org.springframework.data.domain.Page
import org.springframework.data.domain.PageRequest
import org.springframework.data.domain.Pageable
import org.springframework.jms.annotation.JmsListener
import org.springframework.stereotype.Component
import kotlin.math.ceil
import kotlin.time.measureTime
private val logger = KotlinLogging.logger {}
@Component
class SearchIndexLifecycle(
private val collectionRepository: SeriesCollectionRepository,
private val readListRepository: ReadListRepository,
private val bookDtoRepository: BookDtoRepository,
private val seriesDtoRepository: SeriesDtoRepository,
private val luceneHelper: LuceneHelper,
) {
fun rebuildIndex() {
logger.info { "Rebuild all indexes" }
LuceneEntity.values().forEach {
when (it) {
LuceneEntity.Book -> rebuildIndex(it, { p: Pageable -> bookDtoRepository.findAll(BookSearchWithReadProgress(), "unused", p) }, { e: BookDto -> e.toDocument() })
LuceneEntity.Series -> rebuildIndex(it, { p: Pageable -> seriesDtoRepository.findAll(SeriesSearchWithReadProgress(), "unused", p) }, { e: SeriesDto -> e.toDocument() })
LuceneEntity.Collection -> rebuildIndex(it, { p: Pageable -> collectionRepository.findAll(pageable = p) }, { e: SeriesCollection -> e.toDocument() })
LuceneEntity.ReadList -> rebuildIndex(it, { p: Pageable -> readListRepository.findAll(pageable = p) }, { e: ReadList -> e.toDocument() })
}
}
}
private fun <T> rebuildIndex(entity: LuceneEntity, provider: (Pageable) -> Page<out T>, toDoc: (T) -> Document) {
logger.info { "Rebuilding index for ${entity.name}" }
val count = provider(Pageable.ofSize(1)).totalElements
val batchSize = 5_000
val pages = ceil(count.toDouble() / batchSize).toInt()
logger.info { "Number of entities: $count" }
luceneHelper.getIndexWriter().use { indexWriter ->
measureTime {
indexWriter.deleteDocuments(Term(LuceneEntity.TYPE, entity.type))
(0 until pages).forEach { page ->
logger.info { "Processing page $page of $batchSize elements" }
val entityDocs = provider(PageRequest.of(page, batchSize)).content
.map { toDoc(it) }
indexWriter.addDocuments(entityDocs)
}
}.also { duration ->
logger.info { "Wrote ${entity.name} index in $duration" }
}
}
}
@JmsListener(destination = QUEUE_SSE, selector = QUEUE_SSE_SELECTOR, containerFactory = TOPIC_FACTORY)
fun consumeEvents(event: DomainEvent) {
when (event) {
is DomainEvent.SeriesAdded -> seriesDtoRepository.findByIdOrNull(event.series.id, "unused")?.toDocument()?.let { addEntity(it) }
is DomainEvent.SeriesUpdated -> seriesDtoRepository.findByIdOrNull(event.series.id, "unused")?.toDocument()?.let { updateEntity(LuceneEntity.Series, event.series.id, it) }
is DomainEvent.SeriesDeleted -> deleteEntity(LuceneEntity.Series, event.series.id)
is DomainEvent.BookAdded -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.toDocument()?.let { addEntity(it) }
is DomainEvent.BookUpdated -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.toDocument()?.let { updateEntity(LuceneEntity.Book, event.book.id, it) }
is DomainEvent.BookDeleted -> deleteEntity(LuceneEntity.Book, event.book.id)
is DomainEvent.ReadListAdded -> readListRepository.findByIdOrNull(event.readList.id)?.toDocument()?.let { addEntity(it) }
is DomainEvent.ReadListUpdated -> readListRepository.findByIdOrNull(event.readList.id)?.toDocument()?.let { updateEntity(LuceneEntity.ReadList, event.readList.id, it) }
is DomainEvent.ReadListDeleted -> deleteEntity(LuceneEntity.ReadList, event.readList.id)
is DomainEvent.CollectionAdded -> collectionRepository.findByIdOrNull(event.collection.id)?.toDocument()?.let { addEntity(it) }
is DomainEvent.CollectionUpdated -> collectionRepository.findByIdOrNull(event.collection.id)?.toDocument()?.let { updateEntity(LuceneEntity.Collection, event.collection.id, it) }
is DomainEvent.CollectionDeleted -> deleteEntity(LuceneEntity.Collection, event.collection.id)
else -> Unit
}
}
private fun addEntity(doc: Document) {
luceneHelper.getIndexWriter().use { indexWriter ->
indexWriter.addDocument(doc)
}
}
private fun updateEntity(entity: LuceneEntity, entityId: String, newDoc: Document) {
luceneHelper.getIndexWriter().use { indexWriter ->
indexWriter.updateDocument(Term(entity.id, entityId), newDoc)
}
}
private fun deleteEntity(entity: LuceneEntity, entityId: String) {
luceneHelper.getIndexWriter().use { indexWriter ->
indexWriter.deleteDocuments(Term(entity.id, entityId))
}
}
}

View file

@ -0,0 +1,28 @@
package org.gotson.komga.interfaces.scheduler
import mu.KotlinLogging
import org.gotson.komga.application.tasks.HIGHEST_PRIORITY
import org.gotson.komga.application.tasks.TaskReceiver
import org.gotson.komga.infrastructure.search.LuceneHelper
import org.springframework.boot.context.event.ApplicationReadyEvent
import org.springframework.context.annotation.Profile
import org.springframework.context.event.EventListener
import org.springframework.stereotype.Component
private val logger = KotlinLogging.logger {}
@Profile("!test")
@Component
class SearchIndexController(
private val luceneHelper: LuceneHelper,
private val taskReceiver: TaskReceiver,
) {
@EventListener(ApplicationReadyEvent::class)
fun createIndexIfNoneExist() {
if (!luceneHelper.indexExists()) {
logger.info { "Lucene index not found, trigger rebuild" }
taskReceiver.rebuildIndex(HIGHEST_PRIORITY)
}
}
}

View file

@ -1,3 +1,5 @@
komga: komga:
database: database:
file: ./localdb.sqlite file: ./localdb.sqlite
lucene:
data-directory: ./lucene/localdb

View file

@ -14,6 +14,8 @@ komga:
- "@eaDir" - "@eaDir"
database: database:
file: \${user.home}/.komga/database.sqlite file: \${user.home}/.komga/database.sqlite
lucene:
data-directory: \${user.home}/.komga/lucene
spring: spring:
flyway: flyway:

View file

@ -1,9 +1,16 @@
package org.gotson.komga.infrastructure.jooq package org.gotson.komga.infrastructure.jooq
import com.ninjasquad.springmockk.MockkBean
import io.mockk.Runs
import io.mockk.every
import io.mockk.just
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.assertj.core.api.Assertions.assertThatCode import org.assertj.core.api.Assertions.assertThatCode
import org.gotson.komga.application.events.EventPublisher
import org.gotson.komga.domain.model.Author
import org.gotson.komga.domain.model.BookSearchWithReadProgress import org.gotson.komga.domain.model.BookSearchWithReadProgress
import org.gotson.komga.domain.model.KomgaUser import org.gotson.komga.domain.model.KomgaUser
import org.gotson.komga.domain.model.Media
import org.gotson.komga.domain.model.ReadProgress import org.gotson.komga.domain.model.ReadProgress
import org.gotson.komga.domain.model.ReadStatus import org.gotson.komga.domain.model.ReadStatus
import org.gotson.komga.domain.model.makeBook import org.gotson.komga.domain.model.makeBook
@ -13,14 +20,17 @@ import org.gotson.komga.domain.persistence.BookMetadataRepository
import org.gotson.komga.domain.persistence.BookRepository import org.gotson.komga.domain.persistence.BookRepository
import org.gotson.komga.domain.persistence.KomgaUserRepository import org.gotson.komga.domain.persistence.KomgaUserRepository
import org.gotson.komga.domain.persistence.LibraryRepository import org.gotson.komga.domain.persistence.LibraryRepository
import org.gotson.komga.domain.persistence.MediaRepository
import org.gotson.komga.domain.persistence.ReadProgressRepository import org.gotson.komga.domain.persistence.ReadProgressRepository
import org.gotson.komga.domain.service.BookLifecycle import org.gotson.komga.domain.service.BookLifecycle
import org.gotson.komga.domain.service.KomgaUserLifecycle import org.gotson.komga.domain.service.KomgaUserLifecycle
import org.gotson.komga.domain.service.LibraryLifecycle import org.gotson.komga.domain.service.LibraryLifecycle
import org.gotson.komga.domain.service.SeriesLifecycle import org.gotson.komga.domain.service.SeriesLifecycle
import org.gotson.komga.infrastructure.search.SearchIndexLifecycle
import org.junit.jupiter.api.AfterAll import org.junit.jupiter.api.AfterAll
import org.junit.jupiter.api.AfterEach import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.BeforeAll
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Nested import org.junit.jupiter.api.Nested
import org.junit.jupiter.api.Test import org.junit.jupiter.api.Test
import org.junit.jupiter.api.extension.ExtendWith import org.junit.jupiter.api.extension.ExtendWith
@ -30,6 +40,8 @@ import org.springframework.data.domain.PageRequest
import org.springframework.data.domain.Sort import org.springframework.data.domain.Sort
import org.springframework.test.context.junit.jupiter.SpringExtension import org.springframework.test.context.junit.jupiter.SpringExtension
import java.net.URL import java.net.URL
import java.time.LocalDate
import java.time.LocalDateTime
@ExtendWith(SpringExtension::class) @ExtendWith(SpringExtension::class)
@SpringBootTest @SpringBootTest
@ -37,33 +49,46 @@ class BookDtoDaoTest(
@Autowired private val bookDtoDao: BookDtoDao, @Autowired private val bookDtoDao: BookDtoDao,
@Autowired private val bookRepository: BookRepository, @Autowired private val bookRepository: BookRepository,
@Autowired private val bookMetadataRepository: BookMetadataRepository, @Autowired private val bookMetadataRepository: BookMetadataRepository,
@Autowired private val mediaRepository: MediaRepository,
@Autowired private val bookLifecycle: BookLifecycle, @Autowired private val bookLifecycle: BookLifecycle,
@Autowired private val seriesLifecycle: SeriesLifecycle, @Autowired private val seriesLifecycle: SeriesLifecycle,
@Autowired private val libraryRepository: LibraryRepository, @Autowired private val libraryRepository: LibraryRepository,
@Autowired private val libraryLifecycle: LibraryLifecycle, @Autowired private val libraryLifecycle: LibraryLifecycle,
@Autowired private val readProgressRepository: ReadProgressRepository, @Autowired private val readProgressRepository: ReadProgressRepository,
@Autowired private val userRepository: KomgaUserRepository, @Autowired private val userRepository: KomgaUserRepository,
@Autowired private val userLifecycle: KomgaUserLifecycle @Autowired private val userLifecycle: KomgaUserLifecycle,
@Autowired private val searchIndexLifecycle: SearchIndexLifecycle,
) { ) {
private val library = makeLibrary() private val library = makeLibrary()
private var series = makeSeries("Series") private var series = makeSeries("Series")
private val user = KomgaUser("user@example.org", "", false) private val user = KomgaUser("user@example.org", "", false)
@MockkBean
private lateinit var mockEventPublisher: EventPublisher
@BeforeAll @BeforeAll
fun setup() { fun setup() {
every { mockEventPublisher.publishEvent(any()) } just Runs
libraryRepository.insert(library) libraryRepository.insert(library)
series = seriesLifecycle.createSeries(series.copy(libraryId = library.id)) series = seriesLifecycle.createSeries(series.copy(libraryId = library.id))
userRepository.insert(user) userRepository.insert(user)
} }
@BeforeEach
fun resetMocks() {
every { mockEventPublisher.publishEvent(any()) } just Runs
}
@AfterEach @AfterEach
fun deleteBooks() { fun deleteBooks() {
bookLifecycle.deleteMany(bookRepository.findAll()) bookLifecycle.deleteMany(bookRepository.findAll())
searchIndexLifecycle.rebuildIndex()
} }
@AfterAll @AfterAll
fun tearDown() { fun tearDown() {
every { mockEventPublisher.publishEvent(any()) } just Runs
userRepository.findAll().forEach { userRepository.findAll().forEach {
userLifecycle.deleteUser(it) userLifecycle.deleteUser(it)
} }
@ -101,7 +126,7 @@ class BookDtoDaoTest(
// then // then
assertThat(found).hasSize(1) assertThat(found).hasSize(1)
assertThat(found.first().readProgress?.completed).isTrue() assertThat(found.first().readProgress?.completed).isTrue
assertThat(found.first().name).isEqualTo("2") assertThat(found.first().name).isEqualTo("2")
} }
@ -137,7 +162,7 @@ class BookDtoDaoTest(
// then // then
assertThat(found).hasSize(1) assertThat(found).hasSize(1)
assertThat(found.first().readProgress?.completed).isFalse() assertThat(found.first().readProgress?.completed).isFalse
assertThat(found.first().name).isEqualTo("1") assertThat(found.first().name).isEqualTo("1")
} }
@ -307,6 +332,8 @@ class BookDtoDaoTest(
) )
) )
searchIndexLifecycle.rebuildIndex()
// when // when
val found = bookDtoDao.findAll( val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "batman"), BookSearchWithReadProgress(searchTerm = "batman"),
@ -337,6 +364,8 @@ class BookDtoDaoTest(
bookMetadataRepository.update(it.copy(title = "Éric le bleu")) bookMetadataRepository.update(it.copy(title = "Éric le bleu"))
} }
searchIndexLifecycle.rebuildIndex()
// when // when
val found = bookDtoDao.findAll( val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "eric"), BookSearchWithReadProgress(searchTerm = "eric"),
@ -348,119 +377,353 @@ class BookDtoDaoTest(
assertThat(found).hasSize(1) assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Éric le bleu") assertThat(found.map { it.metadata.title }).containsExactly("Éric le bleu")
} }
}
@Test @Test
fun `given books when searching by ISBN then results are matched`() { fun `given books when searching by ISBN then results are matched`() {
// given // given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id) val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks( seriesLifecycle.addBooks(
series, series,
listOf( listOf(
book1, book1,
makeBook("Robin", seriesId = series.id, libraryId = library.id), makeBook("Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id), makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman", seriesId = series.id, libraryId = library.id), makeBook("Batman", seriesId = series.id, libraryId = library.id),
)
) )
)
bookMetadataRepository.findById(book1.id).let { bookMetadataRepository.findById(book1.id).let {
bookMetadataRepository.update(it.copy(isbn = "9782413016878")) bookMetadataRepository.update(it.copy(isbn = "9782413016878"))
} }
// when searchIndexLifecycle.rebuildIndex()
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "9782413016878"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.name }).containsExactly("Éric le rouge")
}
@Test
fun `given books when searching by term containing hyphens then results are ordered by rank`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("Batman", seriesId = series.id, libraryId = library.id),
makeBook("Another X-Men adventure", seriesId = series.id, libraryId = library.id),
makeBook("X-Men", seriesId = series.id, libraryId = library.id),
)
)
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "x-men"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.name }).containsExactly("X-Men", "Another X-Men adventure")
}
@Test
fun `given books when searching by single letter then results are ordered by rank`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("J", seriesId = series.id, libraryId = library.id),
makeBook("Adventures of J. J.", seriesId = series.id, libraryId = library.id),
makeBook("Jackal", seriesId = series.id, libraryId = library.id),
)
)
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "j"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.name }).containsExactly("J", "Adventures of J. J.")
}
@Test
fun `when searching by unknown field then empty result are returned and no exception is thrown`() {
assertThatCode {
// when // when
val found = bookDtoDao.findAll( val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "publisher:batman"), BookSearchWithReadProgress(searchTerm = "9782413016878"),
user.id, user.id,
UnpagedSorted(Sort.by("relevance")), UnpagedSorted(Sort.by("relevance")),
).content ).content
// then // then
assertThat(found).hasSize(0) assertThat(found).hasSize(1)
}.doesNotThrowAnyException() assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
} }
@Test @Test
fun `given books in CJK when searching by CJK term then results are ordered by rank`() { fun `given books when searching by tags then results are matched`() {
// given // given
seriesLifecycle.addBooks( val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
series, seriesLifecycle.addBooks(
listOf( series,
makeBook("[不道德公會][河添太一 ][東立]Vol.04-搬运", seriesId = series.id, libraryId = library.id, url = URL("file:/file.cbz")), listOf(
book1,
)
) )
)
// when bookMetadataRepository.findById(book1.id).let {
val found = bookDtoDao.findAll( bookMetadataRepository.update(it.copy(tags = setOf("tag1")))
BookSearchWithReadProgress(searchTerm = "不道德"), }
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then searchIndexLifecycle.rebuildIndex()
assertThat(found).hasSize(1)
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "tag:tag1"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
}
@Test
fun `given books when searching by authors then results are matched`() {
// given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(
series,
listOf(
book1,
)
)
bookMetadataRepository.findById(book1.id).let {
bookMetadataRepository.update(it.copy(authors = listOf(Author("bob", "writer"))))
}
searchIndexLifecycle.rebuildIndex()
// when
val foundGeneric = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "author:bob"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val foundByRole = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "writer:bob"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val notFound = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "penciller:bob"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(foundGeneric).hasSize(1)
assertThat(foundGeneric.map { it.metadata.title }).containsExactly("Éric le rouge")
assertThat(foundByRole).hasSize(1)
assertThat(foundByRole.map { it.metadata.title }).containsExactly("Éric le rouge")
assertThat(notFound).isEmpty()
}
@Test
fun `given books when searching by release year then results are matched`() {
// given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book1))
bookMetadataRepository.findById(book1.id).let {
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(1999, 5, 12)))
}
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "release_date:1999"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
}
@Test
fun `given books when searching by release year range then results are matched`() {
// given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
val book2 = makeBook("Éric le bleu", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book1, book2))
bookMetadataRepository.findById(book1.id).let {
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(1999, 5, 12)))
}
bookMetadataRepository.findById(book2.id).let {
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(2005, 5, 12)))
}
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "release_date:[1990 TO 2010]"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge", "Éric le bleu")
}
@Test
fun `given books when searching by media status then results are matched`() {
// given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book1))
mediaRepository.findById(book1.id).let {
mediaRepository.update(it.copy(status = Media.Status.ERROR))
}
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "status:error"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
}
@Test
fun `given books when searching by deleted then results are matched`() {
// given
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
.copy(deletedDate = LocalDateTime.now())
seriesLifecycle.addBooks(
series,
listOf(
book1,
makeBook("Batman", seriesId = series.id, libraryId = library.id),
)
)
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "deleted:true"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
}
@Test
fun `given books with dots in title when searching by title then results are matched`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("S.W.O.R.D.", seriesId = series.id, libraryId = library.id),
makeBook("Batman", seriesId = series.id, libraryId = library.id),
)
)
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "s.w.o.r.d."),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("S.W.O.R.D.")
}
@Test
fun `given books when searching with multiple words then results are matched`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id),
makeBook("Robin and Batman", seriesId = series.id, libraryId = library.id),
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
makeBook("Batman", seriesId = series.id, libraryId = library.id),
)
)
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "batman robin"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.metadata.title }).containsExactlyInAnyOrder("Batman and Robin", "Robin and Batman")
}
@Test
fun `given books when searching by term containing hyphens then results are ordered by rank`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("Batman", seriesId = series.id, libraryId = library.id),
makeBook("Another X-Men adventure", seriesId = series.id, libraryId = library.id),
makeBook("X-Men", seriesId = series.id, libraryId = library.id),
)
)
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "x-men"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.name }).containsExactly("X-Men", "Another X-Men adventure")
}
@Test
fun `given books when searching by single letter then results are ordered by rank`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("J", seriesId = series.id, libraryId = library.id),
makeBook("Adventures of J. J.", seriesId = series.id, libraryId = library.id),
makeBook("Jackal", seriesId = series.id, libraryId = library.id),
)
)
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "j"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.name }).containsExactly("J", "Adventures of J. J.")
}
@Test
fun `when searching by unknown field then empty result are returned and no exception is thrown`() {
assertThatCode {
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "publisher:batman"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(0)
}.doesNotThrowAnyException()
}
@Test
fun `given books in CJK when searching by CJK term then results are ordered by rank`() {
// given
seriesLifecycle.addBooks(
series,
listOf(
makeBook("[不道德公會][河添太一 ][東立]Vol.04-搬运", seriesId = series.id, libraryId = library.id, url = URL("file:/file.cbz")),
)
)
searchIndexLifecycle.rebuildIndex()
// when
val found = bookDtoDao.findAll(
BookSearchWithReadProgress(searchTerm = "不道德"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
}
} }
} }

View file

@ -1,30 +1,46 @@
package org.gotson.komga.infrastructure.jooq package org.gotson.komga.infrastructure.jooq
import com.ninjasquad.springmockk.MockkBean
import io.mockk.Runs
import io.mockk.every
import io.mockk.just
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.gotson.komga.application.events.EventPublisher
import org.gotson.komga.domain.model.Author
import org.gotson.komga.domain.model.KomgaUser import org.gotson.komga.domain.model.KomgaUser
import org.gotson.komga.domain.model.ReadProgress import org.gotson.komga.domain.model.ReadProgress
import org.gotson.komga.domain.model.ReadStatus import org.gotson.komga.domain.model.ReadStatus
import org.gotson.komga.domain.model.SeriesMetadata
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
import org.gotson.komga.domain.model.makeBook import org.gotson.komga.domain.model.makeBook
import org.gotson.komga.domain.model.makeLibrary import org.gotson.komga.domain.model.makeLibrary
import org.gotson.komga.domain.model.makeSeries import org.gotson.komga.domain.model.makeSeries
import org.gotson.komga.domain.persistence.BookMetadataRepository
import org.gotson.komga.domain.persistence.BookRepository import org.gotson.komga.domain.persistence.BookRepository
import org.gotson.komga.domain.persistence.KomgaUserRepository import org.gotson.komga.domain.persistence.KomgaUserRepository
import org.gotson.komga.domain.persistence.LibraryRepository import org.gotson.komga.domain.persistence.LibraryRepository
import org.gotson.komga.domain.persistence.ReadProgressRepository import org.gotson.komga.domain.persistence.ReadProgressRepository
import org.gotson.komga.domain.persistence.SeriesMetadataRepository
import org.gotson.komga.domain.persistence.SeriesRepository import org.gotson.komga.domain.persistence.SeriesRepository
import org.gotson.komga.domain.service.KomgaUserLifecycle import org.gotson.komga.domain.service.KomgaUserLifecycle
import org.gotson.komga.domain.service.LibraryLifecycle import org.gotson.komga.domain.service.LibraryLifecycle
import org.gotson.komga.domain.service.SeriesLifecycle import org.gotson.komga.domain.service.SeriesLifecycle
import org.gotson.komga.domain.service.SeriesMetadataLifecycle
import org.gotson.komga.infrastructure.search.SearchIndexLifecycle
import org.junit.jupiter.api.AfterAll import org.junit.jupiter.api.AfterAll
import org.junit.jupiter.api.AfterEach import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.BeforeAll
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Nested
import org.junit.jupiter.api.Test import org.junit.jupiter.api.Test
import org.junit.jupiter.api.extension.ExtendWith import org.junit.jupiter.api.extension.ExtendWith
import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest import org.springframework.boot.test.context.SpringBootTest
import org.springframework.data.domain.PageRequest import org.springframework.data.domain.PageRequest
import org.springframework.data.domain.Sort
import org.springframework.test.context.junit.jupiter.SpringExtension import org.springframework.test.context.junit.jupiter.SpringExtension
import java.time.LocalDate
import java.time.LocalDateTime
@ExtendWith(SpringExtension::class) @ExtendWith(SpringExtension::class)
@SpringBootTest @SpringBootTest
@ -32,30 +48,45 @@ class SeriesDtoDaoTest(
@Autowired private val seriesDtoDao: SeriesDtoDao, @Autowired private val seriesDtoDao: SeriesDtoDao,
@Autowired private val bookRepository: BookRepository, @Autowired private val bookRepository: BookRepository,
@Autowired private val seriesRepository: SeriesRepository, @Autowired private val seriesRepository: SeriesRepository,
@Autowired private val seriesMetadataRepository: SeriesMetadataRepository,
@Autowired private val bookMetadataRepository: BookMetadataRepository,
@Autowired private val seriesLifecycle: SeriesLifecycle, @Autowired private val seriesLifecycle: SeriesLifecycle,
@Autowired private val seriesMetadataLifecycle: SeriesMetadataLifecycle,
@Autowired private val libraryRepository: LibraryRepository, @Autowired private val libraryRepository: LibraryRepository,
@Autowired private val libraryLifecycle: LibraryLifecycle, @Autowired private val libraryLifecycle: LibraryLifecycle,
@Autowired private val readProgressRepository: ReadProgressRepository, @Autowired private val readProgressRepository: ReadProgressRepository,
@Autowired private val userRepository: KomgaUserRepository, @Autowired private val userRepository: KomgaUserRepository,
@Autowired private val userLifecycle: KomgaUserLifecycle @Autowired private val userLifecycle: KomgaUserLifecycle,
@Autowired private val searchIndexLifecycle: SearchIndexLifecycle,
) { ) {
private val library = makeLibrary() private val library = makeLibrary()
private val user = KomgaUser("user@example.org", "", false) private val user = KomgaUser("user@example.org", "", false)
@MockkBean
private lateinit var mockEventPublisher: EventPublisher
@BeforeAll @BeforeAll
fun setup() { fun setup() {
every { mockEventPublisher.publishEvent(any()) } just Runs
libraryRepository.insert(library) libraryRepository.insert(library)
userRepository.insert(user) userRepository.insert(user)
} }
@BeforeEach
fun resetMocks() {
every { mockEventPublisher.publishEvent(any()) } just Runs
}
@AfterEach @AfterEach
fun deleteSeries() { fun deleteSeries() {
seriesLifecycle.deleteMany(seriesRepository.findAll()) seriesLifecycle.deleteMany(seriesRepository.findAll())
searchIndexLifecycle.rebuildIndex()
} }
@AfterAll @AfterAll
fun tearDown() { fun tearDown() {
every { mockEventPublisher.publishEvent(any()) } just Runs
userRepository.findAll().forEach { userRepository.findAll().forEach {
userLifecycle.deleteUser(it) userLifecycle.deleteUser(it)
} }
@ -95,148 +126,545 @@ class SeriesDtoDaoTest(
} }
} }
@Test @Nested
fun `given series in various read status when searching for read series then only read series are returned`() { inner class ReadProgress {
// given @Test
setupSeries() fun `given series in various read status when searching for read series then only read series are returned`() {
// given
setupSeries()
// when // when
val found = seriesDtoDao.findAll( val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ)), SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ)),
user.id, user.id,
PageRequest.of(0, 20) PageRequest.of(0, 20)
).sortedBy { it.name } ).sortedBy { it.name }
// then // then
assertThat(found).hasSize(1) assertThat(found).hasSize(1)
assertThat(found.first().booksReadCount).isEqualTo(3) assertThat(found.first().booksReadCount).isEqualTo(3)
assertThat(found.first().name).isEqualTo("2") assertThat(found.first().name).isEqualTo("2")
}
@Test
fun `given series in various read status when searching for unread series then only unread series are returned`() {
// given
setupSeries()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then
assertThat(found).hasSize(1)
assertThat(found.first().booksUnreadCount).isEqualTo(3)
assertThat(found.first().name).isEqualTo("3")
}
@Test
fun `given series in various read status when searching for in progress series then only in progress series are returned`() {
// given
setupSeries()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.IN_PROGRESS)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then
assertThat(found).hasSize(2)
assertThat(found.first().booksInProgressCount).isEqualTo(3)
assertThat(found.first().name).isEqualTo("1")
assertThat(found.last().booksInProgressCount).isEqualTo(1)
assertThat(found.last().name).isEqualTo("4")
}
@Test
fun `given series in various read status when searching for read and unread series then only matching series are returned`() {
// given
setupSeries()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.UNREAD)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then
assertThat(found).hasSize(2)
assertThat(found.map { it.name }).containsExactlyInAnyOrder("2", "3")
}
@Test
fun `given series in various read status when searching for read and in progress series then only matching series are returned`() {
// given
setupSeries()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then
assertThat(found).hasSize(3)
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "4")
}
@Test
fun `given series in various read status when searching for unread and in progress series then only matching series are returned`() {
// given
setupSeries()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD, ReadStatus.IN_PROGRESS)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then
assertThat(found).hasSize(3)
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "3", "4")
}
@Test
fun `given series in various read status when searching for read and unread and in progress series then only matching series are returned`() {
// given
setupSeries()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS, ReadStatus.UNREAD)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then
assertThat(found).hasSize(4)
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4")
}
@Test
fun `given series in various read status when searching without read progress then all series are returned`() {
// given
setupSeries()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then
assertThat(found).hasSize(4)
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4")
}
} }
@Test @Nested
fun `given series in various read status when searching for unread series then only unread series are returned`() { inner class FullTextSearch {
// given @Test
setupSeries() fun `given series when searching by term then results are ordered by rank`() {
// given
seriesLifecycle.createSeries(makeSeries("The incredible adventures of Batman, the man who is also a bat!", library.id))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
seriesLifecycle.createSeries(makeSeries("Batman", library.id))
// when searchIndexLifecycle.rebuildIndex()
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then // when
assertThat(found).hasSize(1) val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "batman"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
assertThat(found.first().booksUnreadCount).isEqualTo(3) // then
assertThat(found.first().name).isEqualTo("3") assertThat(found).hasSize(3)
} assertThat(found.map { it.metadata.title }).containsExactly("Batman", "Batman and Robin", "The incredible adventures of Batman, the man who is also a bat!")
}
@Test @Test
fun `given series in various read status when searching for in progress series then only in progress series are returned`() { fun `given series when searching by publisher then results are matched`() {
// given // given
setupSeries() val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
// when seriesMetadataRepository.findById(series.id).let {
val found = seriesDtoDao.findAll( seriesMetadataRepository.update(it.copy(publisher = "Vertigo"))
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.IN_PROGRESS)), }
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then searchIndexLifecycle.rebuildIndex()
assertThat(found).hasSize(2)
assertThat(found.first().booksInProgressCount).isEqualTo(3) // when
assertThat(found.first().name).isEqualTo("1") val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "publisher:vertigo"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
assertThat(found.last().booksInProgressCount).isEqualTo(1) // then
assertThat(found.last().name).isEqualTo("4") assertThat(found).hasSize(1)
} assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
@Test @Test
fun `given series in various read status when searching for read and unread series then only matching series are returned`() { fun `given series when searching by status then results are matched`() {
// given // given
setupSeries() val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
// when seriesMetadataRepository.findById(series.id).let {
val found = seriesDtoDao.findAll( seriesMetadataRepository.update(it.copy(status = SeriesMetadata.Status.HIATUS))
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.UNREAD)), }
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then searchIndexLifecycle.rebuildIndex()
assertThat(found).hasSize(2)
assertThat(found.map { it.name }).containsExactlyInAnyOrder("2", "3")
}
@Test // when
fun `given series in various read status when searching for read and in progress series then only matching series are returned`() { val found = seriesDtoDao.findAll(
// given SeriesSearchWithReadProgress(searchTerm = "status:hiatus"),
setupSeries() user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// when // then
val found = seriesDtoDao.findAll( assertThat(found).hasSize(1)
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS)), assertThat(found.map { it.metadata.title }).containsExactly("Batman")
user.id, }
PageRequest.of(0, 20)
).sortedBy { it.name }
// then @Test
assertThat(found).hasSize(3) fun `given series when searching by reading direction then results are matched`() {
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "4") // given
} val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
@Test seriesMetadataRepository.findById(series.id).let {
fun `given series in various read status when searching for unread and in progress series then only matching series are returned`() { seriesMetadataRepository.update(it.copy(readingDirection = SeriesMetadata.ReadingDirection.LEFT_TO_RIGHT))
// given }
setupSeries()
// when searchIndexLifecycle.rebuildIndex()
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD, ReadStatus.IN_PROGRESS)),
user.id,
PageRequest.of(0, 20)
).sortedBy { it.name }
// then // when
assertThat(found).hasSize(3) val found = seriesDtoDao.findAll(
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "3", "4") SeriesSearchWithReadProgress(searchTerm = "reading_direction:left_to_right"),
} user.id,
UnpagedSorted(Sort.by("relevance")),
).content
@Test // then
fun `given series in various read status when searching for read and unread and in progress series then only matching series are returned`() { assertThat(found).hasSize(1)
// given assertThat(found.map { it.metadata.title }).containsExactly("Batman")
setupSeries() }
// when @Test
val found = seriesDtoDao.findAll( fun `given series when searching by age rating then results are matched`() {
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS, ReadStatus.UNREAD)), // given
user.id, val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
PageRequest.of(0, 20) seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
).sortedBy { it.name }
// then seriesMetadataRepository.findById(series.id).let {
assertThat(found).hasSize(4) seriesMetadataRepository.update(it.copy(ageRating = 12))
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4") }
}
@Test searchIndexLifecycle.rebuildIndex()
fun `given series in various read status when searching without read progress then all series are returned`() {
// given
setupSeries()
// when // when
val found = seriesDtoDao.findAll( val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(), SeriesSearchWithReadProgress(searchTerm = "age_rating:12"),
user.id, user.id,
PageRequest.of(0, 20) UnpagedSorted(Sort.by("relevance")),
).sortedBy { it.name } ).content
// then // then
assertThat(found).hasSize(4) assertThat(found).hasSize(1)
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4") assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
@Test
fun `given series when searching by language then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
seriesMetadataRepository.findById(series.id).let {
seriesMetadataRepository.update(it.copy(language = "en-us"))
}
searchIndexLifecycle.rebuildIndex()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "language:en-us"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
@Test
fun `given series when searching by tags then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
val book = makeBook("Batman 01", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
seriesMetadataRepository.findById(series.id).let {
seriesMetadataRepository.update(it.copy(tags = setOf("seriestag")))
}
bookMetadataRepository.findById(book.id).let {
bookMetadataRepository.update(it.copy(tags = setOf("booktag")))
}
seriesMetadataLifecycle.aggregateMetadata(series)
searchIndexLifecycle.rebuildIndex()
// when
val foundByBookTag = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "book_tag:booktag"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val notFoundByBookTag = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "book_tag:seriestag"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val foundBySeriesTag = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "series_tag:seriestag"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val notFoundBySeriesTag = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "series_tag:booktag"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val foundByTagFromBook = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "tag:booktag"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val foundByTagFromSeries = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "tag:seriestag"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(foundByBookTag).hasSize(1)
assertThat(foundByBookTag.map { it.metadata.title }).containsExactly("Batman")
assertThat(notFoundByBookTag).isEmpty()
assertThat(foundBySeriesTag).hasSize(1)
assertThat(foundBySeriesTag.map { it.metadata.title }).containsExactly("Batman")
assertThat(notFoundBySeriesTag).isEmpty()
assertThat(foundByTagFromBook).hasSize(1)
assertThat(foundByTagFromBook.map { it.metadata.title }).containsExactly("Batman")
assertThat(foundByTagFromSeries).hasSize(1)
assertThat(foundByTagFromSeries.map { it.metadata.title }).containsExactly("Batman")
}
@Test
fun `given series when searching by genre then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
seriesMetadataRepository.findById(series.id).let {
seriesMetadataRepository.update(it.copy(genres = setOf("action")))
}
searchIndexLifecycle.rebuildIndex()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "genre:action"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
@Test
fun `given series when searching by total book count then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
seriesMetadataRepository.findById(series.id).let {
seriesMetadataRepository.update(it.copy(totalBookCount = 5))
}
searchIndexLifecycle.rebuildIndex()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "total_book_count:5"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
@Test
fun `given series when searching by book count then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
seriesLifecycle.addBooks(
series,
listOf(
makeBook("Batman 01", seriesId = series.id, libraryId = library.id),
makeBook("Batman 02", seriesId = series.id, libraryId = library.id)
)
)
seriesLifecycle.sortBooks(series)
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
seriesMetadataRepository.findById(series.id).let {
seriesMetadataRepository.update(it.copy(genres = setOf("action")))
}
searchIndexLifecycle.rebuildIndex()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "book_count:2"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
@Test
fun `given series when searching by authors then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
val book = makeBook("Batman 01", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
bookMetadataRepository.findById(book.id).let {
bookMetadataRepository.update(
it.copy(
authors = listOf(
Author("David", "penciller")
)
)
)
}
seriesMetadataLifecycle.aggregateMetadata(series)
searchIndexLifecycle.rebuildIndex()
// when
val foundGeneric = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "author:david"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val foundByRole = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "penciller:david"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
val notFoundByRole = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "writer:david"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(foundGeneric).hasSize(1)
assertThat(foundGeneric.map { it.metadata.title }).containsExactly("Batman")
assertThat(foundByRole).hasSize(1)
assertThat(foundByRole.map { it.metadata.title }).containsExactly("Batman")
assertThat(notFoundByRole).isEmpty()
}
@Test
fun `given series when searching by release year then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
val book = makeBook("Batman 01", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
bookMetadataRepository.findById(book.id).let {
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(1999, 10, 10)))
}
seriesMetadataLifecycle.aggregateMetadata(series)
searchIndexLifecycle.rebuildIndex()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "release_date:1999"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
@Test
fun `given series when searching by deleted then results are matched`() {
// given
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id).copy(deletedDate = LocalDateTime.now()))
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
searchIndexLifecycle.rebuildIndex()
// when
val found = seriesDtoDao.findAll(
SeriesSearchWithReadProgress(searchTerm = "deleted:true"),
user.id,
UnpagedSorted(Sort.by("relevance")),
).content
// then
assertThat(found).hasSize(1)
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
}
} }
} }

View file

@ -0,0 +1,133 @@
package org.gotson.komga.infrastructure.search
import org.apache.lucene.analysis.Analyzer
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
class MultilingualAnalyzerTest {
private val analyzer = MultiLingualAnalyzer()
private fun Analyzer.getTokens(text: String): List<String> {
val ts = tokenStream("text", text)
val tokens = mutableListOf<String>()
ts.use { ts ->
ts.reset()
while (ts.incrementToken()) {
ts.reflectWith { _, key, value -> if (key == "term") tokens += value.toString() }
}
ts.end()
}
return tokens
}
@Test
fun `english text`() {
// given
val text = "The incredible adventures of Batman, the man who is also a bat!"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("the", "incredible", "adventures", "of", "batman", "the", "man", "who", "is", "also", "a", "bat")
}
@Test
fun accents() {
// given
val text = "Éric èl rojo"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactlyInAnyOrder("eric", "el", "rojo")
}
@Test
fun isbn() {
// given
val text = "9782413016878"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("9782413016878")
}
@Test
fun `single letter`() {
// given
val text = "J"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("j")
}
@Test
fun `chinese mixed`() {
// given
val text = "[不道德公會][河添太一 ][東立]Vol.04-搬运"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("不道", "道德", "德公", "公會", "河添", "添太", "太一", "東立", "vol", "04", "搬运")
}
@Test
fun `chinese only`() {
// given
val text = "不道德公會河添太一東立搬运"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("不道", "道德", "德公", "公會", "會河", "河添", "添太", "太一", "一東", "東立", "立搬", "搬运")
}
@Test
fun `hiragana only`() {
// given
val text = "探偵はもう、死んでいる。"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("探偵", "偵は", "はも", "もう", "死ん", "んで", "でい", "いる")
}
@Test
fun `katakana only`() {
// given
val text = "ワンパンマン"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("ワン", "ンパ", "パン", "ンマ", "マン")
}
@Test
fun `korean only`() {
// given
val text = "고교생을 환불해 주세요"
// when
val tokens = analyzer.getTokens(text)
// then
assertThat(tokens).containsExactly("고교", "교생", "생을", "환불", "불해", "주세", "세요")
}
}

View file

@ -0,0 +1,323 @@
package org.gotson.komga.infrastructure.search
import com.ninjasquad.springmockk.MockkBean
import io.mockk.every
import io.mockk.slot
import org.assertj.core.api.Assertions.assertThat
import org.gotson.komga.application.events.EventPublisher
import org.gotson.komga.domain.model.DomainEvent
import org.gotson.komga.domain.model.ReadList
import org.gotson.komga.domain.model.SeriesCollection
import org.gotson.komga.domain.model.makeBook
import org.gotson.komga.domain.model.makeLibrary
import org.gotson.komga.domain.model.makeSeries
import org.gotson.komga.domain.persistence.BookMetadataRepository
import org.gotson.komga.domain.persistence.LibraryRepository
import org.gotson.komga.domain.persistence.ReadListRepository
import org.gotson.komga.domain.persistence.SeriesCollectionRepository
import org.gotson.komga.domain.persistence.SeriesMetadataRepository
import org.gotson.komga.domain.persistence.SeriesRepository
import org.gotson.komga.domain.service.BookLifecycle
import org.gotson.komga.domain.service.LibraryLifecycle
import org.gotson.komga.domain.service.ReadListLifecycle
import org.gotson.komga.domain.service.SeriesCollectionLifecycle
import org.gotson.komga.domain.service.SeriesLifecycle
import org.junit.jupiter.api.AfterAll
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeAll
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Nested
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.extension.ExtendWith
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.data.domain.Pageable
import org.springframework.test.context.junit.jupiter.SpringExtension
@ExtendWith(SpringExtension::class)
@SpringBootTest
class SearchIndexLifecycleTest(
@Autowired private val libraryRepository: LibraryRepository,
@Autowired private val libraryLifecycle: LibraryLifecycle,
@Autowired private val seriesRepository: SeriesRepository,
@Autowired private val seriesLifecycle: SeriesLifecycle,
@Autowired private val seriesMetadataRepository: SeriesMetadataRepository,
@Autowired private val bookMetadataRepository: BookMetadataRepository,
@Autowired private val bookLifecycle: BookLifecycle,
@Autowired private val collectionLifecycle: SeriesCollectionLifecycle,
@Autowired private val collectionRepository: SeriesCollectionRepository,
@Autowired private val readListLifecycle: ReadListLifecycle,
@Autowired private val readListRepository: ReadListRepository,
@Autowired private val searchIndexLifecycle: SearchIndexLifecycle,
@Autowired private val luceneHelper: LuceneHelper,
) {
private val library = makeLibrary()
@MockkBean
private lateinit var mockEventPublisher: EventPublisher
@BeforeAll
fun setup() {
captureEvents()
libraryRepository.insert(library)
}
@BeforeEach
fun resetMocks() {
captureEvents()
}
@AfterEach
fun deleteEntities() {
seriesLifecycle.deleteMany(seriesRepository.findAll())
collectionRepository.findAll(pageable = Pageable.unpaged()).forEach {
collectionLifecycle.deleteCollection(it)
}
readListRepository.findAll(pageable = Pageable.unpaged()).forEach {
readListLifecycle.deleteReadList(it)
}
}
@AfterAll
fun tearDown() {
captureEvents()
libraryRepository.findAll().forEach {
libraryLifecycle.deleteLibrary(it)
}
}
private fun captureEvents() {
val eventSlot = slot<DomainEvent>()
every { mockEventPublisher.publishEvent(capture(eventSlot)) } answers {
searchIndexLifecycle.consumeEvents(eventSlot.captured)
}
}
@Nested
inner class Book {
@Test
fun `given empty index when adding an entity then it is added to the index`() {
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
seriesLifecycle.addBooks(series, listOf(makeBook("book", seriesId = series.id, libraryId = library.id)))
val found = luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10)
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
@Test
fun `given an entity when updating then it is updated in the index`() {
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
val book = makeBook("book", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book))
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
bookMetadataRepository.findById(book.id).let {
bookMetadataRepository.update(it.copy(title = "updated"))
}
mockEventPublisher.publishEvent(DomainEvent.BookUpdated(book))
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
luceneHelper.searchEntitiesIds("updated", LuceneEntity.Book, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
}
@Test
fun `given an entity when deleting then it is removed from the index`() {
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
val book = makeBook("book", seriesId = series.id, libraryId = library.id)
seriesLifecycle.addBooks(series, listOf(book))
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
bookLifecycle.deleteOne(book)
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
}
}
@Nested
inner class Series {
@Test
fun `given empty index when adding an entity then it is added to the index`() {
seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
val found = luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10)
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
@Test
fun `given an entity when updating then it is updated in the index`() {
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
seriesMetadataRepository.findById(series.id).let {
seriesMetadataRepository.update(it.copy(title = "updated"))
}
mockEventPublisher.publishEvent(DomainEvent.SeriesUpdated(series))
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
luceneHelper.searchEntitiesIds("updated", LuceneEntity.Series, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
}
@Test
fun `given an entity when deleting then it is removed from the index`() {
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
seriesLifecycle.deleteMany(listOf(series))
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
}
}
@Nested
inner class Collection {
@Test
fun `given empty index when adding an entity then it is added to the index`() {
val collection = SeriesCollection("collection")
collectionLifecycle.addCollection(collection)
val found = luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10)
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
@Test
fun `given an entity when updating then it is updated in the index`() {
val collection = SeriesCollection("collection")
collectionLifecycle.addCollection(collection)
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
collectionRepository.findByIdOrNull(collection.id)?.let {
collectionRepository.update(it.copy(name = "updated"))
}
mockEventPublisher.publishEvent(DomainEvent.CollectionUpdated(collection))
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
luceneHelper.searchEntitiesIds("updated", LuceneEntity.Collection, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
}
@Test
fun `given an entity when deleting then it is removed from the index`() {
val collection = SeriesCollection("collection")
collectionLifecycle.addCollection(collection)
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
collectionLifecycle.deleteCollection(collection)
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
}
}
@Nested
inner class ReadList {
@Test
fun `given empty index when adding an entity then it is added to the index`() {
val readList = ReadList("readlist")
readListLifecycle.addReadList(readList)
val found = luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10)
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
@Test
fun `given an entity when updating then it is updated in the index`() {
val readList = org.gotson.komga.domain.model.ReadList("readlist")
readListLifecycle.addReadList(readList)
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
readListRepository.findByIdOrNull(readList.id)?.let {
readListRepository.update(it.copy(name = "updated"))
}
mockEventPublisher.publishEvent(DomainEvent.ReadListUpdated(readList))
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
luceneHelper.searchEntitiesIds("updated", LuceneEntity.ReadList, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
}
@Test
fun `given an entity when deleting then it is removed from the index`() {
val readList = org.gotson.komga.domain.model.ReadList("readlist")
readListLifecycle.addReadList(readList)
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).hasSize(1)
}
readListLifecycle.deleteReadList(readList)
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
assertThat(found).isNotNull
assertThat(found).isEmpty()
}
}
}
}