mirror of
https://github.com/gotson/komga.git
synced 2026-05-07 03:51:02 +02:00
feat: Continue PostgreSQL support migration
- Update ReferentialDao to use JooqUdfHelper - Update SeriesSearchHelper and BookSearchHelper to take JooqUdfHelper parameter - Update SeriesDtoDao and BookDtoDao to pass JooqUdfHelper to helper classes - Fix compilation errors in helper classes Note: Migration is incomplete. Several DAO classes still need JooqUdfHelper injection: - SeriesDtoDao needs jooqUdfHelper in constructor - ReadListDao needs jooqUdfHelper injection - SeriesCollectionDao needs jooqUdfHelper injection - BookDtoDao sorts map still uses SqliteUdfDataSource
This commit is contained in:
parent
a38aa4024f
commit
ee45e4f0fb
9 changed files with 837 additions and 34 deletions
35
fix_book_search_helper.py
Normal file
35
fix_book_search_helper.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/env python3
|
||||
import re
|
||||
|
||||
|
||||
def fix_book_search_helper():
|
||||
file_path = "/Users/duong/Documents/GitHub/komga/komga/src/main/kotlin/org/gotson/komga/infrastructure/jooq/BookSearchHelper.kt"
|
||||
|
||||
with open(file_path, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Fix collate(SqliteUdfDataSource.COLLATION_UNICODE_3) calls
|
||||
lines = content.split("\n")
|
||||
fixed_lines = []
|
||||
|
||||
for line in lines:
|
||||
if "SqliteUdfDataSource.COLLATION_UNICODE_3" in line:
|
||||
# Replace field.collate(SqliteUdfDataSource.COLLATION_UNICODE_3) with jooqUdfHelper.run { field.collateUnicode3() }
|
||||
line = re.sub(
|
||||
r"(\w+(?:\.\w+)*)\.collate\(SqliteUdfDataSource\.COLLATION_UNICODE_3\)",
|
||||
r"jooqUdfHelper.run { \1.collateUnicode3() }",
|
||||
line,
|
||||
)
|
||||
fixed_lines.append(line)
|
||||
|
||||
content = "\n".join(fixed_lines)
|
||||
|
||||
# Write back
|
||||
with open(file_path, "w") as f:
|
||||
f.write(content)
|
||||
|
||||
print(f"Fixed {file_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
fix_book_search_helper()
|
||||
42
fix_referential_dao.py
Normal file
42
fix_referential_dao.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
#!/usr/bin/env python3
|
||||
import re
|
||||
|
||||
|
||||
def fix_referential_dao():
|
||||
file_path = "/Users/duong/Documents/GitHub/komga/komga/src/main/kotlin/org/gotson/komga/infrastructure/jooq/main/ReferentialDao.kt"
|
||||
|
||||
with open(file_path, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Fix udfStripAccents() calls
|
||||
# Pattern: field.udfStripAccents() -> jooqUdfHelper.run { field.udfStripAccents() }
|
||||
# But need to be careful not to match already fixed ones
|
||||
pattern1 = r"(\w+\.\w+\.udfStripAccents\(\))"
|
||||
|
||||
def replace_udf(match):
|
||||
field_expr = match.group(1)
|
||||
return f"jooqUdfHelper.run {{ {field_expr} }}"
|
||||
|
||||
# First pass: fix udfStripAccents
|
||||
content = re.sub(pattern1, replace_udf, content)
|
||||
|
||||
# Fix collate(SqliteUdfDataSource.COLLATION_UNICODE_3) calls
|
||||
# Pattern: field.collate(SqliteUdfDataSource.COLLATION_UNICODE_3) -> jooqUdfHelper.run { field.collateUnicode3() }
|
||||
pattern2 = r"(\w+\.\w+)\.collate\(SqliteUdfDataSource\.COLLATION_UNICODE_3\)"
|
||||
|
||||
def replace_collate(match):
|
||||
field_expr = match.group(1)
|
||||
return f"jooqUdfHelper.run {{ {field_expr}.collateUnicode3() }}"
|
||||
|
||||
# Second pass: fix collate
|
||||
content = re.sub(pattern2, replace_collate, content)
|
||||
|
||||
# Write back
|
||||
with open(file_path, "w") as f:
|
||||
f.write(content)
|
||||
|
||||
print(f"Fixed {file_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
fix_referential_dao()
|
||||
47
fix_series_search_helper.py
Normal file
47
fix_series_search_helper.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
#!/usr/bin/env python3
|
||||
import re
|
||||
|
||||
|
||||
def fix_series_search_helper():
|
||||
file_path = "/Users/duong/Documents/GitHub/komga/komga/src/main/kotlin/org/gotson/komga/infrastructure/jooq/SeriesSearchHelper.kt"
|
||||
|
||||
with open(file_path, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Fix collate(SqliteUdfDataSource.COLLATION_UNICODE_3) calls
|
||||
# Pattern: .collate(SqliteUdfDataSource.COLLATION_UNICODE_3) -> jooqUdfHelper.run { .collateUnicode3() }
|
||||
# Need to handle the field before .collate
|
||||
|
||||
# This is complex because we need to capture the field expression
|
||||
# Let's do a simpler approach: replace the constant with jooqUdfHelper usage
|
||||
|
||||
# First, let's find all lines with the pattern
|
||||
lines = content.split("\n")
|
||||
fixed_lines = []
|
||||
|
||||
for line in lines:
|
||||
if "SqliteUdfDataSource.COLLATION_UNICODE_3" in line:
|
||||
# This is a complex replacement because we need to wrap the whole expression
|
||||
# The pattern is usually: field.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
# We need to change it to: jooqUdfHelper.run { field.collateUnicode3() }
|
||||
|
||||
# Simple regex to capture field.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
# But we need to handle nested parentheses
|
||||
line = re.sub(
|
||||
r"(\w+(?:\.\w+)*)\.collate\(SqliteUdfDataSource\.COLLATION_UNICODE_3\)",
|
||||
r"jooqUdfHelper.run { \1.collateUnicode3() }",
|
||||
line,
|
||||
)
|
||||
fixed_lines.append(line)
|
||||
|
||||
content = "\n".join(fixed_lines)
|
||||
|
||||
# Write back
|
||||
with open(file_path, "w") as f:
|
||||
f.write(content)
|
||||
|
||||
print(f"Fixed {file_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
fix_series_search_helper()
|
||||
|
|
@ -7,7 +7,6 @@ import org.gotson.komga.domain.model.ReadStatus
|
|||
import org.gotson.komga.domain.model.SearchCondition
|
||||
import org.gotson.komga.domain.model.SearchContext
|
||||
import org.gotson.komga.domain.model.SearchOperator
|
||||
import org.gotson.komga.infrastructure.datasource.SqliteUdfDataSource
|
||||
import org.gotson.komga.infrastructure.jooq.RequiredJoin.ReadProgress
|
||||
import org.gotson.komga.jooq.main.Tables
|
||||
import org.jooq.Condition
|
||||
|
|
@ -20,6 +19,7 @@ private val logger = KotlinLogging.logger {}
|
|||
*/
|
||||
class BookSearchHelper(
|
||||
val context: SearchContext,
|
||||
private val jooqUdfHelper: JooqUdfHelper,
|
||||
) : ContentRestrictionsSearchHelper() {
|
||||
fun toCondition(searchCondition: SearchCondition.Book?): Pair<Condition, Set<RequiredJoin>> {
|
||||
val base = toCondition()
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import org.gotson.komga.domain.model.SearchCondition
|
|||
import org.gotson.komga.domain.model.SearchContext
|
||||
import org.gotson.komga.domain.model.SearchOperator
|
||||
import org.gotson.komga.domain.model.SeriesMetadata
|
||||
import org.gotson.komga.infrastructure.datasource.SqliteUdfDataSource
|
||||
import org.gotson.komga.jooq.main.Tables
|
||||
import org.jooq.Condition
|
||||
import org.jooq.impl.DSL
|
||||
|
|
@ -18,6 +17,7 @@ private val logger = KotlinLogging.logger {}
|
|||
*/
|
||||
class SeriesSearchHelper(
|
||||
val context: SearchContext,
|
||||
private val jooqUdfHelper: JooqUdfHelper,
|
||||
) : ContentRestrictionsSearchHelper() {
|
||||
fun toCondition(searchCondition: SearchCondition.Series?): Pair<Condition, Set<RequiredJoin>> {
|
||||
val base = toCondition()
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ class BookDtoDao(
|
|||
): Page<BookDto> {
|
||||
requireNotNull(context.userId) { "Missing userId in search context" }
|
||||
|
||||
val (conditions, joins) = BookSearchHelper(context).toCondition(search.condition)
|
||||
val (conditions, joins) = BookSearchHelper(context, jooqUdfHelper).toCondition(search.condition)
|
||||
return findAll(conditions, context.userId, pageable, search.fullTextSearch, joins)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -49,9 +49,9 @@ class ReferentialDao(
|
|||
.selectDistinct(a.NAME, a.ROLE)
|
||||
.from(a)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(a.BOOK_ID.eq(b.ID)) } }
|
||||
.where(a.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.where(jooqUdfHelper.run { jooqUdfHelper.run { a.NAME.udfStripAccents() } }.containsIgnoreCase(search.stripAccents()))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(a.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { a.NAME.collateUnicode3() })
|
||||
.fetchInto(a)
|
||||
.map { it.toDomain() }
|
||||
|
||||
|
|
@ -65,10 +65,10 @@ class ReferentialDao(
|
|||
.from(bmaa)
|
||||
.leftJoin(s)
|
||||
.on(bmaa.SERIES_ID.eq(s.ID))
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.where(jooqUdfHelper.run { bmaa.NAME.udfStripAccents() }.containsIgnoreCase(search.stripAccents()))
|
||||
.and(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { bmaa.NAME.collateUnicode3() })
|
||||
.fetchInto(bmaa)
|
||||
.map { it.toDomain() }
|
||||
|
||||
|
|
@ -83,10 +83,10 @@ class ReferentialDao(
|
|||
.leftJoin(cs)
|
||||
.on(bmaa.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.where(jooqUdfHelper.run { bmaa.NAME.udfStripAccents() }.containsIgnoreCase(search.stripAccents()))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { bmaa.NAME.collateUnicode3() })
|
||||
.fetchInto(bmaa)
|
||||
.map { it.toDomain() }
|
||||
|
||||
|
|
@ -99,10 +99,10 @@ class ReferentialDao(
|
|||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.where(jooqUdfHelper.run { bmaa.NAME.udfStripAccents() }.containsIgnoreCase(search.stripAccents()))
|
||||
.and(bmaa.SERIES_ID.eq(seriesId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { bmaa.NAME.collateUnicode3() })
|
||||
.fetchInto(bmaa)
|
||||
.map { it.toDomain() }
|
||||
|
||||
|
|
@ -177,7 +177,7 @@ class ReferentialDao(
|
|||
.leftJoin(rb)
|
||||
.on(b.ID.eq(rb.BOOK_ID))
|
||||
}.where(noCondition())
|
||||
.apply { search?.let { and(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents())) } }
|
||||
.apply { search?.let { and(jooqUdfHelper.run { bmaa.NAME.udfStripAccents() }.containsIgnoreCase(search.stripAccents())) } }
|
||||
.apply { role?.let { and(bmaa.ROLE.eq(role)) } }
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.apply {
|
||||
|
|
@ -192,7 +192,7 @@ class ReferentialDao(
|
|||
}
|
||||
|
||||
val count = dslRO.fetchCount(query)
|
||||
val sort = bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
val sort = jooqUdfHelper.run { bmaa.NAME.collateUnicode3() }
|
||||
|
||||
val items =
|
||||
query
|
||||
|
|
@ -220,9 +220,9 @@ class ReferentialDao(
|
|||
.selectDistinct(a.NAME)
|
||||
.from(a)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(a.BOOK_ID.eq(b.ID)) } }
|
||||
.where(a.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.where(jooqUdfHelper.run { a.NAME.udfStripAccents() }.containsIgnoreCase(search.stripAccents()))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(a.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { a.NAME.collateUnicode3() })
|
||||
.fetch(a.NAME)
|
||||
|
||||
override fun findAllAuthorsRoles(filterOnLibraryIds: Collection<String>?): List<String> =
|
||||
|
|
@ -248,7 +248,7 @@ class ReferentialDao(
|
|||
.on(g.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(jooqUdfHelper.run { g.GENRE.collateUnicode3() })
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllGenresByLibraries(
|
||||
|
|
@ -262,7 +262,7 @@ class ReferentialDao(
|
|||
.on(g.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { g.GENRE.collateUnicode3() })
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllGenresByCollection(
|
||||
|
|
@ -277,7 +277,7 @@ class ReferentialDao(
|
|||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(g.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { g.GENRE.collateUnicode3() })
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllSeriesAndBookTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
|
|
@ -351,7 +351,7 @@ class ReferentialDao(
|
|||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(jooqUdfHelper.run { st.TAG.collateUnicode3() })
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllSeriesTagsByLibrary(
|
||||
|
|
@ -365,7 +365,7 @@ class ReferentialDao(
|
|||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { st.TAG.collateUnicode3() })
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllBookTagsBySeries(
|
||||
|
|
@ -379,7 +379,7 @@ class ReferentialDao(
|
|||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { bt.TAG.collateUnicode3() })
|
||||
.fetchSet(bt.TAG)
|
||||
|
||||
override fun findAllBookTagsByReadList(
|
||||
|
|
@ -395,7 +395,7 @@ class ReferentialDao(
|
|||
.on(bt.BOOK_ID.eq(rb.BOOK_ID))
|
||||
.where(rb.READLIST_ID.eq(readListId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { bt.TAG.collateUnicode3() })
|
||||
.fetchSet(bt.TAG)
|
||||
|
||||
override fun findAllSeriesTagsByCollection(
|
||||
|
|
@ -410,7 +410,7 @@ class ReferentialDao(
|
|||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(st.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { st.TAG.collateUnicode3() })
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllBookTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
|
|
@ -423,7 +423,7 @@ class ReferentialDao(
|
|||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(jooqUdfHelper.run { bt.TAG.collateUnicode3() })
|
||||
.fetchSet(bt.TAG)
|
||||
|
||||
override fun findAllLanguages(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
|
|
@ -474,7 +474,7 @@ class ReferentialDao(
|
|||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { sd.PUBLISHER.collateUnicode3() })
|
||||
.fetchSet(sd.PUBLISHER)
|
||||
|
||||
override fun findAllPublishers(
|
||||
|
|
@ -490,7 +490,7 @@ class ReferentialDao(
|
|||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
||||
val count = dslRO.fetchCount(query)
|
||||
val sort = sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
val sort = jooqUdfHelper.run { sd.PUBLISHER.collateUnicode3() }
|
||||
|
||||
val items =
|
||||
query
|
||||
|
|
@ -521,7 +521,7 @@ class ReferentialDao(
|
|||
.where(sd.PUBLISHER.ne(""))
|
||||
.and(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { sd.PUBLISHER.collateUnicode3() })
|
||||
.fetchSet(sd.PUBLISHER)
|
||||
|
||||
override fun findAllPublishersByCollection(
|
||||
|
|
@ -537,7 +537,7 @@ class ReferentialDao(
|
|||
.where(sd.PUBLISHER.ne(""))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { sd.PUBLISHER.collateUnicode3() })
|
||||
.fetchSet(sd.PUBLISHER)
|
||||
|
||||
override fun findAllAgeRatings(filterOnLibraryIds: Collection<String>?): Set<Int?> =
|
||||
|
|
@ -633,7 +633,7 @@ class ReferentialDao(
|
|||
.on(sl.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(jooqUdfHelper.run { sl.LABEL.collateUnicode3() })
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
override fun findAllSharingLabelsByLibraries(
|
||||
|
|
@ -647,7 +647,7 @@ class ReferentialDao(
|
|||
.on(sl.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { sl.LABEL.collateUnicode3() })
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
override fun findAllSharingLabelsByCollection(
|
||||
|
|
@ -662,7 +662,7 @@ class ReferentialDao(
|
|||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sl.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.orderBy(jooqUdfHelper.run { sl.LABEL.collateUnicode3() })
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
private fun BookMetadataAuthorRecord.toDomain(): Author =
|
||||
|
|
|
|||
|
|
@ -0,0 +1,679 @@
|
|||
package org.gotson.komga.infrastructure.jooq.main
|
||||
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import org.gotson.komga.domain.persistence.ReferentialRepository
|
||||
import org.gotson.komga.infrastructure.jooq.JooqUdfHelper
|
||||
import org.gotson.komga.infrastructure.jooq.SplitDslDaoBase
|
||||
import org.gotson.komga.jooq.main.Tables
|
||||
import org.gotson.komga.jooq.main.tables.records.BookMetadataAggregationAuthorRecord
|
||||
import org.gotson.komga.jooq.main.tables.records.BookMetadataAuthorRecord
|
||||
import org.gotson.komga.language.stripAccents
|
||||
import org.jooq.DSLContext
|
||||
import org.jooq.impl.DSL.noCondition
|
||||
import org.jooq.impl.DSL.select
|
||||
import org.springframework.beans.factory.annotation.Qualifier
|
||||
import org.springframework.data.domain.Page
|
||||
import org.springframework.data.domain.PageImpl
|
||||
import org.springframework.data.domain.PageRequest
|
||||
import org.springframework.data.domain.Pageable
|
||||
import org.springframework.data.domain.Sort
|
||||
import org.springframework.stereotype.Component
|
||||
import java.time.LocalDate
|
||||
|
||||
@Component
|
||||
class ReferentialDao(
|
||||
dslRW: DSLContext,
|
||||
@Qualifier("dslContextRO") dslRO: DSLContext,
|
||||
private val jooqUdfHelper: JooqUdfHelper,
|
||||
) : SplitDslDaoBase(dslRW, dslRO),
|
||||
ReferentialRepository {
|
||||
private val a = Tables.BOOK_METADATA_AUTHOR
|
||||
private val sd = Tables.SERIES_METADATA
|
||||
private val bma = Tables.BOOK_METADATA_AGGREGATION
|
||||
private val bmaa = Tables.BOOK_METADATA_AGGREGATION_AUTHOR
|
||||
private val bmat = Tables.BOOK_METADATA_AGGREGATION_TAG
|
||||
private val s = Tables.SERIES
|
||||
private val b = Tables.BOOK
|
||||
private val g = Tables.SERIES_METADATA_GENRE
|
||||
private val bt = Tables.BOOK_METADATA_TAG
|
||||
private val st = Tables.SERIES_METADATA_TAG
|
||||
private val cs = Tables.COLLECTION_SERIES
|
||||
private val rb = Tables.READLIST_BOOK
|
||||
private val sl = Tables.SERIES_METADATA_SHARING
|
||||
|
||||
override fun findAllAuthorsByName(
|
||||
search: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dslRO
|
||||
.selectDistinct(a.NAME, a.ROLE)
|
||||
.from(a)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(a.BOOK_ID.eq(b.ID)) } }
|
||||
.where(jooqUdfHelper.run { a.NAME.udfStripAccents() }.containsIgnoreCase(search.stripAccents()))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(jooqUdfHelper.run { a.NAME.collateUnicode3() })
|
||||
.fetchInto(a)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllAuthorsByNameAndLibrary(
|
||||
search: String,
|
||||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dslRO
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.leftJoin(s)
|
||||
.on(bmaa.SERIES_ID.eq(s.ID))
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.and(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchInto(bmaa)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllAuthorsByNameAndCollection(
|
||||
search: String,
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dslRO
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.leftJoin(cs)
|
||||
.on(bmaa.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchInto(bmaa)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllAuthorsByNameAndSeries(
|
||||
search: String,
|
||||
seriesId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dslRO
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.and(bmaa.SERIES_ID.eq(seriesId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchInto(bmaa)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllAuthorsByName(
|
||||
search: String?,
|
||||
role: String?,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, null)
|
||||
|
||||
override fun findAllAuthorsByNameAndLibraries(
|
||||
search: String?,
|
||||
role: String?,
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.LIBRARY, libraryIds))
|
||||
|
||||
override fun findAllAuthorsByNameAndCollection(
|
||||
search: String?,
|
||||
role: String?,
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.COLLECTION, setOf(collectionId)))
|
||||
|
||||
override fun findAllAuthorsByNameAndSeries(
|
||||
search: String?,
|
||||
role: String?,
|
||||
seriesId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.SERIES, setOf(seriesId)))
|
||||
|
||||
override fun findAllAuthorsByNameAndReadList(
|
||||
search: String?,
|
||||
role: String?,
|
||||
readListId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.READLIST, setOf(readListId)))
|
||||
|
||||
private enum class FilterByType {
|
||||
LIBRARY,
|
||||
COLLECTION,
|
||||
SERIES,
|
||||
READLIST,
|
||||
}
|
||||
|
||||
private data class FilterBy(
|
||||
val type: FilterByType,
|
||||
val ids: Set<String>,
|
||||
)
|
||||
|
||||
private fun findAuthorsByName(
|
||||
search: String?,
|
||||
role: String?,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
filterBy: FilterBy?,
|
||||
): Page<Author> {
|
||||
val query =
|
||||
dslRO
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) }
|
||||
.apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) }
|
||||
.apply {
|
||||
if (filterBy?.type == FilterByType.READLIST)
|
||||
leftJoin(b)
|
||||
.on(bmaa.SERIES_ID.eq(b.SERIES_ID))
|
||||
.leftJoin(rb)
|
||||
.on(b.ID.eq(rb.BOOK_ID))
|
||||
}.where(noCondition())
|
||||
.apply { search?.let { and(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents())) } }
|
||||
.apply { role?.let { and(bmaa.ROLE.eq(role)) } }
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.apply {
|
||||
filterBy?.let {
|
||||
when (it.type) {
|
||||
FilterByType.LIBRARY -> and(s.LIBRARY_ID.`in`(it.ids))
|
||||
FilterByType.COLLECTION -> and(cs.COLLECTION_ID.`in`(it.ids))
|
||||
FilterByType.SERIES -> and(bmaa.SERIES_ID.`in`(it.ids))
|
||||
FilterByType.READLIST -> and(rb.READLIST_ID.`in`(it.ids))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val count = dslRO.fetchCount(query)
|
||||
val sort = bmaa.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
|
||||
val items =
|
||||
query
|
||||
.orderBy(sort)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchInto(a)
|
||||
.map { it.toDomain() }
|
||||
|
||||
val pageSort = Sort.by("relevance")
|
||||
return PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged)
|
||||
PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else
|
||||
PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong(),
|
||||
)
|
||||
}
|
||||
|
||||
override fun findAllAuthorsNamesByName(
|
||||
search: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<String> =
|
||||
dslRO
|
||||
.selectDistinct(a.NAME)
|
||||
.from(a)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(a.BOOK_ID.eq(b.ID)) } }
|
||||
.where(a.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(a.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetch(a.NAME)
|
||||
|
||||
override fun findAllAuthorsRoles(filterOnLibraryIds: Collection<String>?): List<String> =
|
||||
dslRO
|
||||
.selectDistinct(a.ROLE)
|
||||
.from(a)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(b)
|
||||
.on(a.BOOK_ID.eq(b.ID))
|
||||
.where(b.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(a.ROLE)
|
||||
.fetch(a.ROLE)
|
||||
|
||||
override fun findAllGenres(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(g.GENRE)
|
||||
.from(g)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s)
|
||||
.on(g.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllGenresByLibraries(
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(g.GENRE)
|
||||
.from(g)
|
||||
.leftJoin(s)
|
||||
.on(g.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllGenresByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(g.GENRE)
|
||||
.from(g)
|
||||
.leftJoin(cs)
|
||||
.on(g.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(g.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllSeriesAndBookTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dslRO
|
||||
.select(bt.TAG.`as`("tag"))
|
||||
.from(bt)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(bt.BOOK_ID.eq(b.ID)).where(b.LIBRARY_ID.`in`(it)) } }
|
||||
.union(
|
||||
select(st.TAG.`as`("tag"))
|
||||
.from(st)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(st.SERIES_ID.eq(s.ID)).where(s.LIBRARY_ID.`in`(it)) } },
|
||||
).fetchSet(0, String::class.java)
|
||||
.sortedBy { it.stripAccents().lowercase() }
|
||||
.toSet()
|
||||
|
||||
override fun findAllSeriesAndBookTagsByLibraries(
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.select(bt.TAG.`as`("tag"))
|
||||
.from(bt)
|
||||
.leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.union(
|
||||
select(st.TAG.`as`("tag"))
|
||||
.from(st)
|
||||
.leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } },
|
||||
).fetchSet(0, String::class.java)
|
||||
.sortedBy { it.stripAccents().lowercase() }
|
||||
.toSet()
|
||||
|
||||
override fun findAllSeriesAndBookTagsByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.select(bmat.TAG.`as`("tag"))
|
||||
.from(bmat)
|
||||
.leftJoin(s)
|
||||
.on(bmat.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(cs)
|
||||
.on(bmat.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.union(
|
||||
select(st.TAG.`as`("tag"))
|
||||
.from(st)
|
||||
.leftJoin(cs)
|
||||
.on(st.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } },
|
||||
).fetchSet(0, String::class.java)
|
||||
.sortedBy { it.stripAccents().lowercase() }
|
||||
.toSet()
|
||||
|
||||
override fun findAllSeriesTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dslRO
|
||||
.select(st.TAG)
|
||||
.from(st)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllSeriesTagsByLibrary(
|
||||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.select(st.TAG)
|
||||
.from(st)
|
||||
.leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllBookTagsBySeries(
|
||||
seriesId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.select(bt.TAG)
|
||||
.from(bt)
|
||||
.leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(bt.TAG)
|
||||
|
||||
override fun findAllBookTagsByReadList(
|
||||
readListId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.select(bt.TAG)
|
||||
.from(bt)
|
||||
.leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(rb)
|
||||
.on(bt.BOOK_ID.eq(rb.BOOK_ID))
|
||||
.where(rb.READLIST_ID.eq(readListId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(bt.TAG)
|
||||
|
||||
override fun findAllSeriesTagsByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.select(st.TAG)
|
||||
.from(st)
|
||||
.leftJoin(cs)
|
||||
.on(st.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(st.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllBookTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dslRO
|
||||
.select(bt.TAG)
|
||||
.from(bt)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(bt.TAG)
|
||||
|
||||
override fun findAllLanguages(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sd.LANGUAGE)
|
||||
.from(sd)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.LANGUAGE.ne(""))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.LANGUAGE)
|
||||
.fetchSet(sd.LANGUAGE)
|
||||
|
||||
override fun findAllLanguagesByLibraries(
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sd.LANGUAGE)
|
||||
.from(sd)
|
||||
.leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(sd.LANGUAGE.ne(""))
|
||||
.and(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.LANGUAGE)
|
||||
.fetchSet(sd.LANGUAGE)
|
||||
|
||||
override fun findAllLanguagesByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sd.LANGUAGE)
|
||||
.from(sd)
|
||||
.leftJoin(cs)
|
||||
.on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.LANGUAGE.ne(""))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.LANGUAGE)
|
||||
.fetchSet(sd.LANGUAGE)
|
||||
|
||||
override fun findAllPublishers(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(sd.PUBLISHER)
|
||||
|
||||
override fun findAllPublishers(
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<String> {
|
||||
val query =
|
||||
dslRO
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
||||
val count = dslRO.fetchCount(query)
|
||||
val sort = sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
|
||||
val items =
|
||||
query
|
||||
.orderBy(sort)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetch(sd.PUBLISHER)
|
||||
|
||||
val pageSort = Sort.by("name")
|
||||
return PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged)
|
||||
PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else
|
||||
PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong(),
|
||||
)
|
||||
}
|
||||
|
||||
override fun findAllPublishersByLibraries(
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
.and(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(sd.PUBLISHER)
|
||||
|
||||
override fun findAllPublishersByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.leftJoin(cs)
|
||||
.on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.PUBLISHER.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(sd.PUBLISHER)
|
||||
|
||||
override fun findAllAgeRatings(filterOnLibraryIds: Collection<String>?): Set<Int?> =
|
||||
dslRO
|
||||
.selectDistinct(sd.AGE_RATING)
|
||||
.from(sd)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(sd.AGE_RATING)
|
||||
.fetchSet(sd.AGE_RATING)
|
||||
|
||||
override fun findAllAgeRatingsByLibraries(
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<Int?> =
|
||||
dslRO
|
||||
.selectDistinct(sd.AGE_RATING)
|
||||
.from(sd)
|
||||
.leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.AGE_RATING)
|
||||
.fetchSet(sd.AGE_RATING)
|
||||
|
||||
override fun findAllAgeRatingsByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<Int?> =
|
||||
dslRO
|
||||
.selectDistinct(sd.AGE_RATING)
|
||||
.from(sd)
|
||||
.leftJoin(cs)
|
||||
.on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.AGE_RATING)
|
||||
.fetchSet(sd.AGE_RATING)
|
||||
|
||||
override fun findAllSeriesReleaseDates(filterOnLibraryIds: Collection<String>?): Set<LocalDate> =
|
||||
dslRO
|
||||
.selectDistinct(bma.RELEASE_DATE)
|
||||
.from(bma)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bma.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bma.RELEASE_DATE.isNotNull)
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bma.RELEASE_DATE.desc())
|
||||
.fetchSet(bma.RELEASE_DATE)
|
||||
|
||||
override fun findAllSeriesReleaseDatesByLibraries(
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<LocalDate> =
|
||||
dslRO
|
||||
.selectDistinct(bma.RELEASE_DATE)
|
||||
.from(bma)
|
||||
.leftJoin(s)
|
||||
.on(bma.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.and(bma.RELEASE_DATE.isNotNull)
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bma.RELEASE_DATE.desc())
|
||||
.fetchSet(bma.RELEASE_DATE)
|
||||
|
||||
override fun findAllSeriesReleaseDatesByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<LocalDate> =
|
||||
dslRO
|
||||
.selectDistinct(bma.RELEASE_DATE)
|
||||
.from(bma)
|
||||
.leftJoin(cs)
|
||||
.on(bma.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bma.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.and(bma.RELEASE_DATE.isNotNull)
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bma.RELEASE_DATE.desc())
|
||||
.fetchSet(bma.RELEASE_DATE)
|
||||
|
||||
override fun findAllSharingLabels(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sl.LABEL)
|
||||
.from(sl)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s)
|
||||
.on(sl.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
override fun findAllSharingLabelsByLibraries(
|
||||
libraryIds: Set<String>,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sl.LABEL)
|
||||
.from(sl)
|
||||
.leftJoin(s)
|
||||
.on(sl.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(libraryIds))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
override fun findAllSharingLabelsByCollection(
|
||||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dslRO
|
||||
.selectDistinct(sl.LABEL)
|
||||
.from(sl)
|
||||
.leftJoin(cs)
|
||||
.on(sl.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sl.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
private fun BookMetadataAuthorRecord.toDomain(): Author =
|
||||
Author(
|
||||
name = name,
|
||||
role = role,
|
||||
)
|
||||
|
||||
private fun BookMetadataAggregationAuthorRecord.toDomain(): Author =
|
||||
Author(
|
||||
name = name,
|
||||
role = role,
|
||||
)
|
||||
}
|
||||
|
|
@ -110,7 +110,7 @@ class SeriesDtoDao(
|
|||
): Page<SeriesDto> {
|
||||
requireNotNull(context.userId) { "Missing userId in search context" }
|
||||
|
||||
val (conditions, joins) = SeriesSearchHelper(context).toCondition(search.condition)
|
||||
val (conditions, joins) = SeriesSearchHelper(context, jooqUdfHelper).toCondition(search.condition)
|
||||
val conditionsRefined = conditions.and(search.regexSearch?.let { it.second.toColumn().likeRegex(it.first) } ?: DSL.noCondition())
|
||||
|
||||
return findAll(conditionsRefined, context.userId, pageable, joins, search.fullTextSearch)
|
||||
|
|
@ -123,7 +123,7 @@ class SeriesDtoDao(
|
|||
): Page<SeriesDto> {
|
||||
requireNotNull(context.userId) { "Missing userId in search context" }
|
||||
|
||||
val (conditions, joins) = SeriesSearchHelper(context).toCondition(search.condition)
|
||||
val (conditions, joins) = SeriesSearchHelper(context, jooqUdfHelper).toCondition(search.condition)
|
||||
val conditionsRefined = conditions.and(s.CREATED_DATE.notEqual(s.LAST_MODIFIED_DATE))
|
||||
|
||||
return findAll(conditionsRefined, context.userId, pageable, joins, search.fullTextSearch)
|
||||
|
|
@ -135,7 +135,7 @@ class SeriesDtoDao(
|
|||
): List<GroupCountDto> {
|
||||
requireNotNull(context.userId) { "Missing userId in search context" }
|
||||
|
||||
val (conditions, joins) = SeriesSearchHelper(context).toCondition(search.condition)
|
||||
val (conditions, joins) = SeriesSearchHelper(context, jooqUdfHelper).toCondition(search.condition)
|
||||
val conditionsRefined = conditions.and(search.regexSearch?.let { it.second.toColumn().likeRegex(it.first) } ?: DSL.noCondition())
|
||||
|
||||
val seriesIds = luceneHelper.searchEntitiesIds(search.fullTextSearch, LuceneEntity.Series)
|
||||
|
|
|
|||
Loading…
Reference in a new issue