mirror of
https://github.com/gotson/komga.git
synced 2026-05-08 21:00:16 +02:00
style: ktlint format
This commit is contained in:
parent
307c652a47
commit
d659446194
191 changed files with 4823 additions and 3466 deletions
|
|
@ -24,8 +24,10 @@ class BrowseBenchmark : AbstractRestBenchmark() {
|
|||
|
||||
// find series with most books
|
||||
biggestSeriesId =
|
||||
seriesController.getAllSeries(principal, page = PageRequest.of(0, 1, Sort.by(Sort.Order.desc("booksCount"))))
|
||||
.content.first()
|
||||
seriesController
|
||||
.getAllSeries(principal, page = PageRequest.of(0, 1, Sort.by(Sort.Order.desc("booksCount"))))
|
||||
.content
|
||||
.first()
|
||||
.id
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -47,8 +47,12 @@ class DashboardBenchmark : AbstractRestBenchmark() {
|
|||
}
|
||||
|
||||
// retrieve most recent book release date
|
||||
bookLatestReleaseDate = bookController.getAllBooks(principal, page = PageRequest.of(0, 1, Sort.by(Sort.Order.desc("metadata.releaseDate"))))
|
||||
.content.firstOrNull()?.metadata?.releaseDate ?: LocalDate.now()
|
||||
bookLatestReleaseDate = bookController
|
||||
.getAllBooks(principal, page = PageRequest.of(0, 1, Sort.by(Sort.Order.desc("metadata.releaseDate"))))
|
||||
.content
|
||||
.firstOrNull()
|
||||
?.metadata
|
||||
?.releaseDate ?: LocalDate.now()
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
|
|
|
|||
|
|
@ -23,8 +23,10 @@ class UnsortedBenchmark : AbstractRestBenchmark() {
|
|||
|
||||
// find series with most books
|
||||
biggestSeriesId =
|
||||
seriesController.getAllSeries(principal, page = PageRequest.of(0, 1, Sort.by(Sort.Order.desc("booksCount"))))
|
||||
.content.first()
|
||||
seriesController
|
||||
.getAllSeries(principal, page = PageRequest.of(0, 1, Sort.by(Sort.Order.desc("booksCount"))))
|
||||
.content
|
||||
.first()
|
||||
.id
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -36,16 +36,17 @@ class LibraryScanScheduler(
|
|||
fun scheduleScan(library: Library) {
|
||||
registry.remove(library.id)?.cancel(false)
|
||||
if (library.scanInterval != DISABLED) {
|
||||
registrar.scheduleFixedRateTask(
|
||||
FixedRateTask(
|
||||
{
|
||||
logger.info { "Periodic scan for library: ${library.name}" }
|
||||
taskEmitter.scanLibrary(library.id)
|
||||
},
|
||||
library.scanInterval.toDuration(),
|
||||
library.scanInterval.toDuration(),
|
||||
),
|
||||
)?.let { registry[library.id] = it }
|
||||
registrar
|
||||
.scheduleFixedRateTask(
|
||||
FixedRateTask(
|
||||
{
|
||||
logger.info { "Periodic scan for library: ${library.name}" }
|
||||
taskEmitter.scanLibrary(library.id)
|
||||
},
|
||||
library.scanInterval.toDuration(),
|
||||
library.scanInterval.toDuration(),
|
||||
),
|
||||
)?.let { registry[library.id] = it }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,149 +12,232 @@ const val DEFAULT_PRIORITY = 4
|
|||
const val LOW_PRIORITY = 2
|
||||
const val LOWEST_PRIORITY = 0
|
||||
|
||||
sealed class Task(val priority: Int = DEFAULT_PRIORITY, val groupId: String? = null) {
|
||||
sealed class Task(
|
||||
val priority: Int = DEFAULT_PRIORITY,
|
||||
val groupId: String? = null,
|
||||
) {
|
||||
abstract val uniqueId: String
|
||||
|
||||
class ScanLibrary(val libraryId: String, val scanDeep: Boolean, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class ScanLibrary(
|
||||
val libraryId: String,
|
||||
val scanDeep: Boolean,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "SCAN_LIBRARY_${libraryId}_DEEP_$scanDeep"
|
||||
|
||||
override fun toString(): String = "ScanLibrary(libraryId='$libraryId', scanDeep='$scanDeep', priority='$priority')"
|
||||
}
|
||||
|
||||
class FindBooksToConvert(val libraryId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class FindBooksToConvert(
|
||||
val libraryId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "FIND_BOOKS_TO_CONVERT_$libraryId"
|
||||
|
||||
override fun toString(): String = "FindBooksToConvert(libraryId='$libraryId', priority='$priority')"
|
||||
}
|
||||
|
||||
class FindBooksWithMissingPageHash(val libraryId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class FindBooksWithMissingPageHash(
|
||||
val libraryId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "FIND_BOOKS_WITH_MISSING_PAGE_HASH_$libraryId"
|
||||
|
||||
override fun toString(): String = "FindBooksWithMissingPageHash(libraryId='$libraryId', priority='$priority')"
|
||||
}
|
||||
|
||||
class FindDuplicatePagesToDelete(val libraryId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class FindDuplicatePagesToDelete(
|
||||
val libraryId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "FIND_DUPLICATE_PAGES_TO_DELETE_$libraryId"
|
||||
|
||||
override fun toString(): String = "FindDuplicatePagesToDelete(libraryId='$libraryId', priority='$priority')"
|
||||
}
|
||||
|
||||
class EmptyTrash(val libraryId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class EmptyTrash(
|
||||
val libraryId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "EMPTY_TRASH_$libraryId"
|
||||
|
||||
override fun toString(): String = "EmptyTrash(libraryId='$libraryId', priority='$priority')"
|
||||
}
|
||||
|
||||
class AnalyzeBook(val bookId: String, priority: Int = DEFAULT_PRIORITY, groupId: String) : Task(priority, groupId) {
|
||||
class AnalyzeBook(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
groupId: String,
|
||||
) : Task(priority, groupId) {
|
||||
override val uniqueId = "ANALYZE_BOOK_$bookId"
|
||||
|
||||
override fun toString(): String = "AnalyzeBook(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class GenerateBookThumbnail(val bookId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class GenerateBookThumbnail(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "GENERATE_BOOK_THUMBNAIL_$bookId"
|
||||
|
||||
override fun toString(): String = "GenerateBookThumbnail(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RefreshBookMetadata(val bookId: String, val capabilities: Set<BookMetadataPatchCapability>, priority: Int = DEFAULT_PRIORITY, groupId: String) : Task(priority, groupId) {
|
||||
class RefreshBookMetadata(
|
||||
val bookId: String,
|
||||
val capabilities: Set<BookMetadataPatchCapability>,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
groupId: String,
|
||||
) : Task(priority, groupId) {
|
||||
override val uniqueId = "REFRESH_BOOK_METADATA_$bookId"
|
||||
|
||||
override fun toString(): String = "RefreshBookMetadata(bookId='$bookId', capabilities=$capabilities, priority='$priority')"
|
||||
}
|
||||
|
||||
class HashBook(val bookId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class HashBook(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "HASH_BOOK_$bookId"
|
||||
|
||||
override fun toString(): String = "HashBook(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class HashBookPages(val bookId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class HashBookPages(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "HASH_BOOK_PAGES_$bookId"
|
||||
|
||||
override fun toString(): String = "HashBookPages(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RefreshSeriesMetadata(val seriesId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority, seriesId) {
|
||||
class RefreshSeriesMetadata(
|
||||
val seriesId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority, seriesId) {
|
||||
override val uniqueId = "REFRESH_SERIES_METADATA_$seriesId"
|
||||
|
||||
override fun toString(): String = "RefreshSeriesMetadata(seriesId='$seriesId', priority='$priority')"
|
||||
}
|
||||
|
||||
class AggregateSeriesMetadata(val seriesId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority, seriesId) {
|
||||
class AggregateSeriesMetadata(
|
||||
val seriesId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority, seriesId) {
|
||||
override val uniqueId = "AGGREGATE_SERIES_METADATA_$seriesId"
|
||||
|
||||
override fun toString(): String = "AggregateSeriesMetadata(seriesId='$seriesId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RefreshBookLocalArtwork(val bookId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class RefreshBookLocalArtwork(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId: String = "REFRESH_BOOK_LOCAL_ARTWORK_$bookId"
|
||||
|
||||
override fun toString(): String = "RefreshBookLocalArtwork(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RefreshSeriesLocalArtwork(val seriesId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class RefreshSeriesLocalArtwork(
|
||||
val seriesId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId: String = "REFRESH_SERIES_LOCAL_ARTWORK_$seriesId"
|
||||
|
||||
override fun toString(): String = "RefreshSeriesLocalArtwork(seriesId=$seriesId, priority='$priority')"
|
||||
}
|
||||
|
||||
class ImportBook(val sourceFile: String, val seriesId: String, val copyMode: CopyMode, val destinationName: String?, val upgradeBookId: String?, priority: Int = DEFAULT_PRIORITY) : Task(priority, seriesId) {
|
||||
class ImportBook(
|
||||
val sourceFile: String,
|
||||
val seriesId: String,
|
||||
val copyMode: CopyMode,
|
||||
val destinationName: String?,
|
||||
val upgradeBookId: String?,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority, seriesId) {
|
||||
override val uniqueId: String = "IMPORT_BOOK_${seriesId}_$sourceFile"
|
||||
|
||||
override fun toString(): String =
|
||||
"ImportBook(sourceFile='$sourceFile', seriesId='$seriesId', copyMode=$copyMode, destinationName=$destinationName, upgradeBookId=$upgradeBookId, priority='$priority')"
|
||||
override fun toString(): String = "ImportBook(sourceFile='$sourceFile', seriesId='$seriesId', copyMode=$copyMode, destinationName=$destinationName, upgradeBookId=$upgradeBookId, priority='$priority')"
|
||||
}
|
||||
|
||||
class ConvertBook(val bookId: String, priority: Int = DEFAULT_PRIORITY, groupId: String) : Task(priority, groupId) {
|
||||
class ConvertBook(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
groupId: String,
|
||||
) : Task(priority, groupId) {
|
||||
override val uniqueId: String = "CONVERT_BOOK_$bookId"
|
||||
|
||||
override fun toString(): String = "ConvertBook(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RepairExtension(val bookId: String, priority: Int = DEFAULT_PRIORITY, groupId: String) : Task(priority, groupId) {
|
||||
class RepairExtension(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
groupId: String,
|
||||
) : Task(priority, groupId) {
|
||||
override val uniqueId: String = "REPAIR_EXTENSION_$bookId"
|
||||
|
||||
override fun toString(): String = "RepairExtension(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RemoveHashedPages(val bookId: String, val pages: Collection<BookPageNumbered>, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class RemoveHashedPages(
|
||||
val bookId: String,
|
||||
val pages: Collection<BookPageNumbered>,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId: String = "REMOVE_HASHED_PAGES_$bookId"
|
||||
|
||||
override fun toString(): String = "RemoveHashedPages(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RebuildIndex(val entities: Set<LuceneEntity>?, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class RebuildIndex(
|
||||
val entities: Set<LuceneEntity>?,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "REBUILD_INDEX"
|
||||
|
||||
override fun toString(): String = "RebuildIndex(priority='$priority',entities='${entities?.map { it.type }}')"
|
||||
}
|
||||
|
||||
class UpgradeIndex(priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class UpgradeIndex(
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "UPGRADE_INDEX"
|
||||
|
||||
override fun toString(): String = "UpgradeIndex(priority='$priority')"
|
||||
}
|
||||
|
||||
class DeleteBook(val bookId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class DeleteBook(
|
||||
val bookId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "DELETE_BOOK_$bookId"
|
||||
|
||||
override fun toString(): String = "DeleteBook(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class DeleteSeries(val seriesId: String, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class DeleteSeries(
|
||||
val seriesId: String,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "DELETE_SERIES_$seriesId"
|
||||
|
||||
override fun toString(): String = "DeleteSeries(seriesId='$seriesId', priority='$priority')"
|
||||
}
|
||||
|
||||
class FixThumbnailsWithoutMetadata(priority: Int = DEFAULT_PRIORITY) : Task(priority, "FixThumbnailsWithoutMetadata") {
|
||||
class FixThumbnailsWithoutMetadata(
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority, "FixThumbnailsWithoutMetadata") {
|
||||
override val uniqueId = "FIX_THUMBNAILS_WITHOUT_METADATA_${LocalDateTime.now()}"
|
||||
|
||||
override fun toString(): String = "FixThumbnailsWithoutMetadata(priority='$priority')"
|
||||
}
|
||||
|
||||
class FindBookThumbnailsToRegenerate(val forBiggerResultOnly: Boolean, priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
class FindBookThumbnailsToRegenerate(
|
||||
val forBiggerResultOnly: Boolean,
|
||||
priority: Int = DEFAULT_PRIORITY,
|
||||
) : Task(priority) {
|
||||
override val uniqueId = "FIND_BOOK_THUMBNAILS_TO_REGENERATE"
|
||||
|
||||
override fun toString(): String = "FindBookThumbnailsToRegenerate(forBiggerResultOnly='$forBiggerResultOnly', priority='$priority')"
|
||||
|
|
|
|||
|
|
@ -54,8 +54,7 @@ class TaskEmitter(
|
|||
),
|
||||
SearchContext.empty(),
|
||||
UnpagedSorted(Sort.by(Sort.Order.asc("seriesId"), Sort.Order.asc("number"))),
|
||||
)
|
||||
.content
|
||||
).content
|
||||
.map { Task.AnalyzeBook(it.id, groupId = it.seriesId) }
|
||||
.let { submitTasks(it) }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -48,8 +48,9 @@ class TaskProcessor(
|
|||
executor.execute { takeAndProcess() }
|
||||
} else {
|
||||
// fan out while threads are available
|
||||
while (tasksRepository.hasAvailable() && executor.activeCount < executor.corePoolSize)
|
||||
while (tasksRepository.hasAvailable() && executor.activeCount < executor.corePoolSize) {
|
||||
executor.execute { takeAndProcess() }
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.debug { "Not processing tasks" }
|
||||
|
|
|
|||
|
|
@ -54,31 +54,29 @@ class BookMetadata(
|
|||
bookId: String = this.bookId,
|
||||
createdDate: LocalDateTime = this.createdDate,
|
||||
lastModifiedDate: LocalDateTime = this.lastModifiedDate,
|
||||
) =
|
||||
BookMetadata(
|
||||
title = title,
|
||||
summary = summary,
|
||||
number = number,
|
||||
numberSort = numberSort,
|
||||
releaseDate = releaseDate,
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
isbn = isbn,
|
||||
links = links,
|
||||
titleLock = titleLock,
|
||||
summaryLock = summaryLock,
|
||||
numberLock = numberLock,
|
||||
numberSortLock = numberSortLock,
|
||||
releaseDateLock = releaseDateLock,
|
||||
authorsLock = authorsLock,
|
||||
tagsLock = tagsLock,
|
||||
isbnLock = isbnLock,
|
||||
linksLock = linksLock,
|
||||
bookId = bookId,
|
||||
createdDate = createdDate,
|
||||
lastModifiedDate = lastModifiedDate,
|
||||
)
|
||||
) = BookMetadata(
|
||||
title = title,
|
||||
summary = summary,
|
||||
number = number,
|
||||
numberSort = numberSort,
|
||||
releaseDate = releaseDate,
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
isbn = isbn,
|
||||
links = links,
|
||||
titleLock = titleLock,
|
||||
summaryLock = summaryLock,
|
||||
numberLock = numberLock,
|
||||
numberSortLock = numberSortLock,
|
||||
releaseDateLock = releaseDateLock,
|
||||
authorsLock = authorsLock,
|
||||
tagsLock = tagsLock,
|
||||
isbnLock = isbnLock,
|
||||
linksLock = linksLock,
|
||||
bookId = bookId,
|
||||
createdDate = createdDate,
|
||||
lastModifiedDate = lastModifiedDate,
|
||||
)
|
||||
|
||||
override fun toString(): String =
|
||||
"BookMetadata(numberSort=$numberSort, releaseDate=$releaseDate, authors=$authors, isbn='$isbn', links=$links, titleLock=$titleLock, summaryLock=$summaryLock, numberLock=$numberLock, numberSortLock=$numberSortLock, releaseDateLock=$releaseDateLock, authorsLock=$authorsLock, tagsLock=$tagsLock, isbnLock=$isbnLock, linksLock=$linksLock, bookId='$bookId', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate, title='$title', summary='$summary', number='$number', tags=$tags)"
|
||||
override fun toString(): String = "BookMetadata(numberSort=$numberSort, releaseDate=$releaseDate, authors=$authors, isbn='$isbn', links=$links, titleLock=$titleLock, summaryLock=$summaryLock, numberLock=$numberLock, numberSortLock=$numberSortLock, releaseDateLock=$releaseDateLock, authorsLock=$authorsLock, tagsLock=$tagsLock, isbnLock=$isbnLock, linksLock=$linksLock, bookId='$bookId', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate, title='$title', summary='$summary', number='$number', tags=$tags)"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,11 +26,12 @@ open class BookPage(
|
|||
|
||||
fun Collection<BookPage>.restoreHashFrom(restoreFrom: Collection<BookPage>): List<BookPage> =
|
||||
this.map { newPage ->
|
||||
restoreFrom.find {
|
||||
it.fileSize == newPage.fileSize &&
|
||||
it.mediaType == newPage.mediaType &&
|
||||
it.fileName == newPage.fileName &&
|
||||
it.fileHash.isNotBlank()
|
||||
}?.let { newPage.copy(fileHash = it.fileHash) }
|
||||
restoreFrom
|
||||
.find {
|
||||
it.fileSize == newPage.fileSize &&
|
||||
it.mediaType == newPage.mediaType &&
|
||||
it.fileName == newPage.fileName &&
|
||||
it.fileHash.isNotBlank()
|
||||
}?.let { newPage.copy(fileHash = it.fileHash) }
|
||||
?: newPage
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ class ContentRestrictions(
|
|||
labelsExclude: Set<String> = emptySet(),
|
||||
) {
|
||||
val labelsAllow =
|
||||
labelsAllow.lowerNotBlank().toSet()
|
||||
labelsAllow
|
||||
.lowerNotBlank()
|
||||
.toSet()
|
||||
.minus(labelsExclude.lowerNotBlank().toSet())
|
||||
|
||||
val labelsExclude = labelsExclude.lowerNotBlank().toSet()
|
||||
|
|
|
|||
|
|
@ -3,65 +3,133 @@ package org.gotson.komga.domain.model
|
|||
import java.net.URL
|
||||
|
||||
sealed class DomainEvent {
|
||||
data class LibraryAdded(val library: Library) : DomainEvent()
|
||||
data class LibraryAdded(
|
||||
val library: Library,
|
||||
) : DomainEvent()
|
||||
|
||||
data class LibraryUpdated(val library: Library) : DomainEvent()
|
||||
data class LibraryUpdated(
|
||||
val library: Library,
|
||||
) : DomainEvent()
|
||||
|
||||
data class LibraryDeleted(val library: Library) : DomainEvent()
|
||||
data class LibraryDeleted(
|
||||
val library: Library,
|
||||
) : DomainEvent()
|
||||
|
||||
data class LibraryScanned(val library: Library) : DomainEvent()
|
||||
data class LibraryScanned(
|
||||
val library: Library,
|
||||
) : DomainEvent()
|
||||
|
||||
data class SeriesAdded(val series: Series) : DomainEvent()
|
||||
data class SeriesAdded(
|
||||
val series: Series,
|
||||
) : DomainEvent()
|
||||
|
||||
data class SeriesUpdated(val series: Series) : DomainEvent()
|
||||
data class SeriesUpdated(
|
||||
val series: Series,
|
||||
) : DomainEvent()
|
||||
|
||||
data class SeriesDeleted(val series: Series) : DomainEvent()
|
||||
data class SeriesDeleted(
|
||||
val series: Series,
|
||||
) : DomainEvent()
|
||||
|
||||
data class BookAdded(val book: Book) : DomainEvent()
|
||||
data class BookAdded(
|
||||
val book: Book,
|
||||
) : DomainEvent()
|
||||
|
||||
data class BookUpdated(val book: Book) : DomainEvent()
|
||||
data class BookUpdated(
|
||||
val book: Book,
|
||||
) : DomainEvent()
|
||||
|
||||
data class BookDeleted(val book: Book) : DomainEvent()
|
||||
data class BookDeleted(
|
||||
val book: Book,
|
||||
) : DomainEvent()
|
||||
|
||||
data class BookImported(val book: Book?, val sourceFile: URL, val success: Boolean, val message: String? = null) : DomainEvent()
|
||||
data class BookImported(
|
||||
val book: Book?,
|
||||
val sourceFile: URL,
|
||||
val success: Boolean,
|
||||
val message: String? = null,
|
||||
) : DomainEvent()
|
||||
|
||||
data class CollectionAdded(val collection: SeriesCollection) : DomainEvent()
|
||||
data class CollectionAdded(
|
||||
val collection: SeriesCollection,
|
||||
) : DomainEvent()
|
||||
|
||||
data class CollectionUpdated(val collection: SeriesCollection) : DomainEvent()
|
||||
data class CollectionUpdated(
|
||||
val collection: SeriesCollection,
|
||||
) : DomainEvent()
|
||||
|
||||
data class CollectionDeleted(val collection: SeriesCollection) : DomainEvent()
|
||||
data class CollectionDeleted(
|
||||
val collection: SeriesCollection,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ReadListAdded(val readList: ReadList) : DomainEvent()
|
||||
data class ReadListAdded(
|
||||
val readList: ReadList,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ReadListUpdated(val readList: ReadList) : DomainEvent()
|
||||
data class ReadListUpdated(
|
||||
val readList: ReadList,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ReadListDeleted(val readList: ReadList) : DomainEvent()
|
||||
data class ReadListDeleted(
|
||||
val readList: ReadList,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ReadProgressChanged(val progress: ReadProgress) : DomainEvent()
|
||||
data class ReadProgressChanged(
|
||||
val progress: ReadProgress,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ReadProgressDeleted(val progress: ReadProgress) : DomainEvent()
|
||||
data class ReadProgressDeleted(
|
||||
val progress: ReadProgress,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ReadProgressSeriesChanged(val seriesId: String, val userId: String) : DomainEvent()
|
||||
data class ReadProgressSeriesChanged(
|
||||
val seriesId: String,
|
||||
val userId: String,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ReadProgressSeriesDeleted(val seriesId: String, val userId: String) : DomainEvent()
|
||||
data class ReadProgressSeriesDeleted(
|
||||
val seriesId: String,
|
||||
val userId: String,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailBookAdded(val thumbnail: ThumbnailBook) : DomainEvent()
|
||||
data class ThumbnailBookAdded(
|
||||
val thumbnail: ThumbnailBook,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailBookDeleted(val thumbnail: ThumbnailBook) : DomainEvent()
|
||||
data class ThumbnailBookDeleted(
|
||||
val thumbnail: ThumbnailBook,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailSeriesAdded(val thumbnail: ThumbnailSeries) : DomainEvent()
|
||||
data class ThumbnailSeriesAdded(
|
||||
val thumbnail: ThumbnailSeries,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailSeriesDeleted(val thumbnail: ThumbnailSeries) : DomainEvent()
|
||||
data class ThumbnailSeriesDeleted(
|
||||
val thumbnail: ThumbnailSeries,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailSeriesCollectionAdded(val thumbnail: ThumbnailSeriesCollection) : DomainEvent()
|
||||
data class ThumbnailSeriesCollectionAdded(
|
||||
val thumbnail: ThumbnailSeriesCollection,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailSeriesCollectionDeleted(val thumbnail: ThumbnailSeriesCollection) : DomainEvent()
|
||||
data class ThumbnailSeriesCollectionDeleted(
|
||||
val thumbnail: ThumbnailSeriesCollection,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailReadListAdded(val thumbnail: ThumbnailReadList) : DomainEvent()
|
||||
data class ThumbnailReadListAdded(
|
||||
val thumbnail: ThumbnailReadList,
|
||||
) : DomainEvent()
|
||||
|
||||
data class ThumbnailReadListDeleted(val thumbnail: ThumbnailReadList) : DomainEvent()
|
||||
data class ThumbnailReadListDeleted(
|
||||
val thumbnail: ThumbnailReadList,
|
||||
) : DomainEvent()
|
||||
|
||||
data class UserUpdated(val user: KomgaUser, val expireSession: Boolean) : DomainEvent()
|
||||
data class UserUpdated(
|
||||
val user: KomgaUser,
|
||||
val expireSession: Boolean,
|
||||
) : DomainEvent()
|
||||
|
||||
data class UserDeleted(val user: KomgaUser) : DomainEvent()
|
||||
data class UserDeleted(
|
||||
val user: KomgaUser,
|
||||
) : DomainEvent()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,20 +18,45 @@ class MediaNotReadyException : Exception()
|
|||
|
||||
class NoThumbnailFoundException : Exception()
|
||||
|
||||
class MediaUnsupportedException(message: String, code: String = "") : CodedException(message, code)
|
||||
class MediaUnsupportedException(
|
||||
message: String,
|
||||
code: String = "",
|
||||
) : CodedException(message, code)
|
||||
|
||||
class ImageConversionException(message: String, code: String = "") : CodedException(message, code)
|
||||
class ImageConversionException(
|
||||
message: String,
|
||||
code: String = "",
|
||||
) : CodedException(message, code)
|
||||
|
||||
class DirectoryNotFoundException(message: String, code: String = "") : CodedException(message, code)
|
||||
class DirectoryNotFoundException(
|
||||
message: String,
|
||||
code: String = "",
|
||||
) : CodedException(message, code)
|
||||
|
||||
class DuplicateNameException(message: String, code: String = "") : CodedException(message, code)
|
||||
class DuplicateNameException(
|
||||
message: String,
|
||||
code: String = "",
|
||||
) : CodedException(message, code)
|
||||
|
||||
class PathContainedInPath(message: String, code: String = "") : CodedException(message, code)
|
||||
class PathContainedInPath(
|
||||
message: String,
|
||||
code: String = "",
|
||||
) : CodedException(message, code)
|
||||
|
||||
class UserEmailAlreadyExistsException(message: String, code: String = "") : CodedException(message, code)
|
||||
class UserEmailAlreadyExistsException(
|
||||
message: String,
|
||||
code: String = "",
|
||||
) : CodedException(message, code)
|
||||
|
||||
class BookConversionException(message: String) : Exception(message)
|
||||
class BookConversionException(
|
||||
message: String,
|
||||
) : Exception(message)
|
||||
|
||||
class ComicRackListException(message: String, code: String = "") : CodedException(message, code)
|
||||
class ComicRackListException(
|
||||
message: String,
|
||||
code: String = "",
|
||||
) : CodedException(message, code)
|
||||
|
||||
class EntryNotFoundException(message: String) : Exception(message)
|
||||
class EntryNotFoundException(
|
||||
message: String,
|
||||
) : Exception(message)
|
||||
|
|
|
|||
|
|
@ -12,64 +12,82 @@ sealed class HistoricalEvent(
|
|||
val timestamp: LocalDateTime = LocalDateTime.now(),
|
||||
val id: String = TsidCreator.getTsid256().toString(),
|
||||
) {
|
||||
class BookFileDeleted(book: Book, reason: String) : HistoricalEvent(
|
||||
type = "BookFileDeleted",
|
||||
bookId = book.id,
|
||||
seriesId = book.seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"reason" to reason,
|
||||
"name" to book.path.toString(),
|
||||
),
|
||||
)
|
||||
class BookFileDeleted(
|
||||
book: Book,
|
||||
reason: String,
|
||||
) : HistoricalEvent(
|
||||
type = "BookFileDeleted",
|
||||
bookId = book.id,
|
||||
seriesId = book.seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"reason" to reason,
|
||||
"name" to book.path.toString(),
|
||||
),
|
||||
)
|
||||
|
||||
class SeriesFolderDeleted(seriesId: String, seriesPath: Path, reason: String) : HistoricalEvent(
|
||||
type = "SeriesFolderDeleted",
|
||||
seriesId = seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"reason" to reason,
|
||||
"name" to seriesPath.toString(),
|
||||
),
|
||||
) {
|
||||
class SeriesFolderDeleted(
|
||||
seriesId: String,
|
||||
seriesPath: Path,
|
||||
reason: String,
|
||||
) : HistoricalEvent(
|
||||
type = "SeriesFolderDeleted",
|
||||
seriesId = seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"reason" to reason,
|
||||
"name" to seriesPath.toString(),
|
||||
),
|
||||
) {
|
||||
constructor(series: Series, reason: String) : this(series.id, series.path, reason)
|
||||
}
|
||||
|
||||
class BookConverted(book: Book, previous: Book) : HistoricalEvent(
|
||||
type = "BookConverted",
|
||||
bookId = book.id,
|
||||
seriesId = book.seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"name" to book.path.toString(),
|
||||
"former file" to previous.path.toString(),
|
||||
),
|
||||
)
|
||||
class BookConverted(
|
||||
book: Book,
|
||||
previous: Book,
|
||||
) : HistoricalEvent(
|
||||
type = "BookConverted",
|
||||
bookId = book.id,
|
||||
seriesId = book.seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"name" to book.path.toString(),
|
||||
"former file" to previous.path.toString(),
|
||||
),
|
||||
)
|
||||
|
||||
class BookImported(book: Book, series: Series, source: Path, upgrade: Boolean) : HistoricalEvent(
|
||||
type = "BookImported",
|
||||
bookId = book.id,
|
||||
seriesId = series.id,
|
||||
properties =
|
||||
mapOf(
|
||||
"name" to book.path.toString(),
|
||||
"source" to source.toString(),
|
||||
"upgrade" to if (upgrade) "Yes" else "No",
|
||||
),
|
||||
)
|
||||
class BookImported(
|
||||
book: Book,
|
||||
series: Series,
|
||||
source: Path,
|
||||
upgrade: Boolean,
|
||||
) : HistoricalEvent(
|
||||
type = "BookImported",
|
||||
bookId = book.id,
|
||||
seriesId = series.id,
|
||||
properties =
|
||||
mapOf(
|
||||
"name" to book.path.toString(),
|
||||
"source" to source.toString(),
|
||||
"upgrade" to if (upgrade) "Yes" else "No",
|
||||
),
|
||||
)
|
||||
|
||||
class DuplicatePageDeleted(book: Book, page: BookPageNumbered) : HistoricalEvent(
|
||||
type = "DuplicatePageDeleted",
|
||||
bookId = book.id,
|
||||
seriesId = book.seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"name" to book.path.toString(),
|
||||
"page number" to page.pageNumber.toString(),
|
||||
"page file name" to page.fileName,
|
||||
"page file hash" to page.fileHash,
|
||||
"page file size" to page.fileSize.toString(),
|
||||
"page media type" to page.mediaType,
|
||||
),
|
||||
)
|
||||
class DuplicatePageDeleted(
|
||||
book: Book,
|
||||
page: BookPageNumbered,
|
||||
) : HistoricalEvent(
|
||||
type = "DuplicatePageDeleted",
|
||||
bookId = book.id,
|
||||
seriesId = book.seriesId,
|
||||
properties =
|
||||
mapOf(
|
||||
"name" to book.path.toString(),
|
||||
"page number" to page.pageNumber.toString(),
|
||||
"page file name" to page.fileName,
|
||||
"page file hash" to page.fileHash,
|
||||
"page file size" to page.fileSize.toString(),
|
||||
"page media type" to page.mediaType,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -62,11 +62,9 @@ data class KomgaUser(
|
|||
|
||||
fun canAccessAllLibraries(): Boolean = sharedAllLibraries || roleAdmin
|
||||
|
||||
fun canAccessLibrary(libraryId: String): Boolean =
|
||||
canAccessAllLibraries() || sharedLibrariesIds.any { it == libraryId }
|
||||
fun canAccessLibrary(libraryId: String): Boolean = canAccessAllLibraries() || sharedLibrariesIds.any { it == libraryId }
|
||||
|
||||
fun canAccessLibrary(library: Library): Boolean =
|
||||
canAccessAllLibraries() || sharedLibrariesIds.any { it == library.id }
|
||||
fun canAccessLibrary(library: Library): Boolean = canAccessAllLibraries() || sharedLibrariesIds.any { it == library.id }
|
||||
|
||||
fun isContentAllowed(
|
||||
ageRating: Int? = null,
|
||||
|
|
@ -109,6 +107,5 @@ data class KomgaUser(
|
|||
return !ageDenied && !labelDenied
|
||||
}
|
||||
|
||||
override fun toString(): String =
|
||||
"KomgaUser(email='$email', roleAdmin=$roleAdmin, roleFileDownload=$roleFileDownload, rolePageStreaming=$rolePageStreaming, roleKoboSync=$roleKoboSync, sharedLibrariesIds=$sharedLibrariesIds, sharedAllLibraries=$sharedAllLibraries, restrictions=$restrictions, id='$id', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate)"
|
||||
override fun toString(): String = "KomgaUser(email='$email', roleAdmin=$roleAdmin, roleFileDownload=$roleFileDownload, rolePageStreaming=$rolePageStreaming, roleKoboSync=$roleKoboSync, sharedLibrariesIds=$sharedLibrariesIds, sharedAllLibraries=$sharedAllLibraries, restrictions=$restrictions, id='$id', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate)"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,5 @@ data class Media(
|
|||
OUTDATED,
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "Media(status=$status, mediaType=$mediaType, comment=$comment, bookId='$bookId', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate)"
|
||||
}
|
||||
override fun toString(): String = "Media(status=$status, mediaType=$mediaType, comment=$comment, bookId='$bookId', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate)"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
enum class MediaType(val type: String, val profile: MediaProfile, val fileExtension: String, val exportType: String = type) {
|
||||
enum class MediaType(
|
||||
val type: String,
|
||||
val profile: MediaProfile,
|
||||
val fileExtension: String,
|
||||
val exportType: String = type,
|
||||
) {
|
||||
ZIP("application/zip", MediaProfile.DIVINA, "cbz", "application/vnd.comicbook+zip"),
|
||||
RAR_GENERIC("application/x-rar-compressed", MediaProfile.DIVINA, "cbr", "application/vnd.comicbook-rar"),
|
||||
RAR_4("application/x-rar-compressed; version=4", MediaProfile.DIVINA, "cbr", "application/vnd.comicbook-rar"),
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ class PageHashKnown(
|
|||
val matchCount: Int = 0,
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = createdDate,
|
||||
) : Auditable, PageHash(hash, size) {
|
||||
) : PageHash(hash, size),
|
||||
Auditable {
|
||||
enum class Action {
|
||||
DELETE_AUTO,
|
||||
DELETE_MANUAL,
|
||||
|
|
|
|||
|
|
@ -42,7 +42,8 @@ class SearchCondition {
|
|||
data class LibraryId(
|
||||
@JsonProperty("libraryId")
|
||||
val operator: SearchOperator.Equality<String>,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
data class CollectionId(
|
||||
@JsonProperty("collectionId")
|
||||
|
|
@ -62,7 +63,8 @@ class SearchCondition {
|
|||
data class Deleted(
|
||||
@JsonProperty("deleted")
|
||||
val operator: SearchOperator.Boolean,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
data class Complete(
|
||||
@JsonProperty("complete")
|
||||
|
|
@ -72,12 +74,14 @@ class SearchCondition {
|
|||
data class OneShot(
|
||||
@JsonProperty("oneShot")
|
||||
val operator: SearchOperator.Boolean,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
data class Title(
|
||||
@JsonProperty("title")
|
||||
val operator: SearchOperator.StringOp,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
data class TitleSort(
|
||||
@JsonProperty("titleSort")
|
||||
|
|
@ -87,7 +91,8 @@ class SearchCondition {
|
|||
data class ReleaseDate(
|
||||
@JsonProperty("releaseDate")
|
||||
val operator: SearchOperator.Date,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
data class NumberSort(
|
||||
@JsonProperty("numberSort")
|
||||
|
|
@ -97,7 +102,8 @@ class SearchCondition {
|
|||
data class Tag(
|
||||
@JsonProperty("tag")
|
||||
val operator: SearchOperator.Equality<String>,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
data class SharingLabel(
|
||||
@JsonProperty("sharingLabel")
|
||||
|
|
@ -127,7 +133,8 @@ class SearchCondition {
|
|||
data class ReadStatus(
|
||||
@JsonProperty("readStatus")
|
||||
val operator: SearchOperator.Equality<org.gotson.komga.domain.model.ReadStatus>,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
data class MediaStatus(
|
||||
@JsonProperty("mediaStatus")
|
||||
|
|
@ -147,7 +154,8 @@ class SearchCondition {
|
|||
data class Author(
|
||||
@JsonProperty("author")
|
||||
val operator: SearchOperator.Equality<AuthorMatch>,
|
||||
) : Book, Series
|
||||
) : Book,
|
||||
Series
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
data class AuthorMatch(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
class SearchContext private constructor(val userId: String?, val restrictions: ContentRestrictions, val libraryIds: Collection<String>?) {
|
||||
class SearchContext private constructor(
|
||||
val userId: String?,
|
||||
val restrictions: ContentRestrictions,
|
||||
val libraryIds: Collection<String>?,
|
||||
) {
|
||||
constructor(user: KomgaUser?) : this(user?.id, user?.restrictions ?: ContentRestrictions(), user?.getAuthorizedLibraryIds(null))
|
||||
|
||||
companion object {
|
||||
|
|
|
|||
|
|
@ -49,46 +49,82 @@ class SearchOperator {
|
|||
sealed interface Boolean
|
||||
|
||||
@JsonTypeName("is")
|
||||
data class Is<T>(val value: T) : Equality<T>, StringOp, Numeric<T>, NumericNullable<T>
|
||||
data class Is<T>(
|
||||
val value: T,
|
||||
) : Equality<T>,
|
||||
StringOp,
|
||||
Numeric<T>,
|
||||
NumericNullable<T>
|
||||
|
||||
@JsonTypeName("isNot")
|
||||
data class IsNot<T>(val value: T) : Equality<T>, StringOp, Numeric<T>, NumericNullable<T>
|
||||
data class IsNot<T>(
|
||||
val value: T,
|
||||
) : Equality<T>,
|
||||
StringOp,
|
||||
Numeric<T>,
|
||||
NumericNullable<T>
|
||||
|
||||
@JsonTypeName("contains")
|
||||
data class Contains(val value: String) : StringOp
|
||||
data class Contains(
|
||||
val value: String,
|
||||
) : StringOp
|
||||
|
||||
@JsonTypeName("doesNotContain")
|
||||
data class DoesNotContain(val value: String) : StringOp
|
||||
data class DoesNotContain(
|
||||
val value: String,
|
||||
) : StringOp
|
||||
|
||||
@JsonTypeName("beginsWith")
|
||||
data class BeginsWith(val value: String) : StringOp
|
||||
data class BeginsWith(
|
||||
val value: String,
|
||||
) : StringOp
|
||||
|
||||
@JsonTypeName("doesNotBeginWith")
|
||||
data class DoesNotBeginWith(val value: String) : StringOp
|
||||
data class DoesNotBeginWith(
|
||||
val value: String,
|
||||
) : StringOp
|
||||
|
||||
@JsonTypeName("endsWith")
|
||||
data class EndsWith(val value: String) : StringOp
|
||||
data class EndsWith(
|
||||
val value: String,
|
||||
) : StringOp
|
||||
|
||||
@JsonTypeName("doesNotEndWith")
|
||||
data class DoesNotEndWith(val value: String) : StringOp
|
||||
data class DoesNotEndWith(
|
||||
val value: String,
|
||||
) : StringOp
|
||||
|
||||
@JsonTypeName("greaterThan")
|
||||
data class GreaterThan<T>(val value: T) : Numeric<T>, NumericNullable<T>
|
||||
data class GreaterThan<T>(
|
||||
val value: T,
|
||||
) : Numeric<T>,
|
||||
NumericNullable<T>
|
||||
|
||||
@JsonTypeName("lessThan")
|
||||
data class LessThan<T>(val value: T) : Numeric<T>, NumericNullable<T>
|
||||
data class LessThan<T>(
|
||||
val value: T,
|
||||
) : Numeric<T>,
|
||||
NumericNullable<T>
|
||||
|
||||
@JsonTypeName("before")
|
||||
data class Before(val dateTime: ZonedDateTime) : Date
|
||||
data class Before(
|
||||
val dateTime: ZonedDateTime,
|
||||
) : Date
|
||||
|
||||
@JsonTypeName("after")
|
||||
data class After(val dateTime: ZonedDateTime) : Date
|
||||
data class After(
|
||||
val dateTime: ZonedDateTime,
|
||||
) : Date
|
||||
|
||||
@JsonTypeName("isInTheLast")
|
||||
data class IsInTheLast(val duration: Duration) : Date
|
||||
data class IsInTheLast(
|
||||
val duration: Duration,
|
||||
) : Date
|
||||
|
||||
@JsonTypeName("isNotInTheLast")
|
||||
data class IsNotInTheLast(val duration: Duration) : Date
|
||||
data class IsNotInTheLast(
|
||||
val duration: Duration,
|
||||
) : Date
|
||||
|
||||
@JsonTypeName("isTrue")
|
||||
data object IsTrue : Boolean
|
||||
|
|
@ -110,9 +146,7 @@ class SearchOperator {
|
|||
return true
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return javaClass.hashCode()
|
||||
}
|
||||
override fun hashCode(): Int = javaClass.hashCode()
|
||||
}
|
||||
|
||||
@JsonTypeName("isNotNull")
|
||||
|
|
@ -123,8 +157,6 @@ class SearchOperator {
|
|||
return true
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return javaClass.hashCode()
|
||||
}
|
||||
override fun hashCode(): Int = javaClass.hashCode()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -77,40 +77,39 @@ class SeriesMetadata(
|
|||
seriesId: String = this.seriesId,
|
||||
createdDate: LocalDateTime = this.createdDate,
|
||||
lastModifiedDate: LocalDateTime = this.lastModifiedDate,
|
||||
) =
|
||||
SeriesMetadata(
|
||||
status = status,
|
||||
title = title,
|
||||
titleSort = titleSort,
|
||||
summary = summary,
|
||||
readingDirection = readingDirection,
|
||||
publisher = publisher,
|
||||
ageRating = ageRating,
|
||||
language = language,
|
||||
genres = genres,
|
||||
tags = tags,
|
||||
totalBookCount = totalBookCount,
|
||||
sharingLabels = sharingLabels,
|
||||
links = links,
|
||||
alternateTitles = alternateTitles,
|
||||
statusLock = statusLock,
|
||||
titleLock = titleLock,
|
||||
titleSortLock = titleSortLock,
|
||||
summaryLock = summaryLock,
|
||||
readingDirectionLock = readingDirectionLock,
|
||||
publisherLock = publisherLock,
|
||||
ageRatingLock = ageRatingLock,
|
||||
languageLock = languageLock,
|
||||
genresLock = genresLock,
|
||||
tagsLock = tagsLock,
|
||||
totalBookCountLock = totalBookCountLock,
|
||||
sharingLabelsLock = sharingLabelsLock,
|
||||
linksLock = linksLock,
|
||||
alternateTitlesLock = alternateTitlesLock,
|
||||
seriesId = seriesId,
|
||||
createdDate = createdDate,
|
||||
lastModifiedDate = lastModifiedDate,
|
||||
)
|
||||
) = SeriesMetadata(
|
||||
status = status,
|
||||
title = title,
|
||||
titleSort = titleSort,
|
||||
summary = summary,
|
||||
readingDirection = readingDirection,
|
||||
publisher = publisher,
|
||||
ageRating = ageRating,
|
||||
language = language,
|
||||
genres = genres,
|
||||
tags = tags,
|
||||
totalBookCount = totalBookCount,
|
||||
sharingLabels = sharingLabels,
|
||||
links = links,
|
||||
alternateTitles = alternateTitles,
|
||||
statusLock = statusLock,
|
||||
titleLock = titleLock,
|
||||
titleSortLock = titleSortLock,
|
||||
summaryLock = summaryLock,
|
||||
readingDirectionLock = readingDirectionLock,
|
||||
publisherLock = publisherLock,
|
||||
ageRatingLock = ageRatingLock,
|
||||
languageLock = languageLock,
|
||||
genresLock = genresLock,
|
||||
tagsLock = tagsLock,
|
||||
totalBookCountLock = totalBookCountLock,
|
||||
sharingLabelsLock = sharingLabelsLock,
|
||||
linksLock = linksLock,
|
||||
alternateTitlesLock = alternateTitlesLock,
|
||||
seriesId = seriesId,
|
||||
createdDate = createdDate,
|
||||
lastModifiedDate = lastModifiedDate,
|
||||
)
|
||||
|
||||
enum class Status {
|
||||
ENDED,
|
||||
|
|
@ -126,6 +125,5 @@ class SeriesMetadata(
|
|||
WEBTOON,
|
||||
}
|
||||
|
||||
override fun toString(): String =
|
||||
"SeriesMetadata(status=$status, readingDirection=$readingDirection, ageRating=$ageRating, totalBookCount=$totalBookCount, links=$links, alternateTitles=$alternateTitles, statusLock=$statusLock, titleLock=$titleLock, titleSortLock=$titleSortLock, summaryLock=$summaryLock, readingDirectionLock=$readingDirectionLock, publisherLock=$publisherLock, ageRatingLock=$ageRatingLock, languageLock=$languageLock, genresLock=$genresLock, tagsLock=$tagsLock, totalBookCountLock=$totalBookCountLock, sharingLabelsLock=$sharingLabelsLock, linksLock=$linksLock, alternateTitlesLock=$alternateTitlesLock, seriesId='$seriesId', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate, title='$title', titleSort='$titleSort', summary='$summary', publisher='$publisher', language='$language', tags=$tags, genres=$genres, sharingLabels=$sharingLabels)"
|
||||
override fun toString(): String = "SeriesMetadata(status=$status, readingDirection=$readingDirection, ageRating=$ageRating, totalBookCount=$totalBookCount, links=$links, alternateTitles=$alternateTitles, statusLock=$statusLock, titleLock=$titleLock, titleSortLock=$titleSortLock, summaryLock=$summaryLock, readingDirectionLock=$readingDirectionLock, publisherLock=$publisherLock, ageRatingLock=$ageRatingLock, languageLock=$languageLock, genresLock=$genresLock, tagsLock=$tagsLock, totalBookCountLock=$totalBookCountLock, sharingLabelsLock=$sharingLabelsLock, linksLock=$linksLock, alternateTitlesLock=$alternateTitlesLock, seriesId='$seriesId', createdDate=$createdDate, lastModifiedDate=$lastModifiedDate, title='$title', titleSort='$titleSort', summary='$summary', publisher='$publisher', language='$language', tags=$tags, genres=$genres, sharingLabels=$sharingLabels)"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
enum class ThumbnailSize(val maxEdge: Int) {
|
||||
enum class ThumbnailSize(
|
||||
val maxEdge: Int,
|
||||
) {
|
||||
DEFAULT(300),
|
||||
MEDIUM(600),
|
||||
LARGE(900),
|
||||
|
|
|
|||
|
|
@ -206,12 +206,20 @@ class BookAnalyzer(
|
|||
fun getPoster(book: BookWithMedia): TypedBytes? =
|
||||
when (book.media.profile) {
|
||||
MediaProfile.DIVINA ->
|
||||
divinaExtractors[book.media.mediaType]?.getEntryStream(book.book.path, book.media.pages.first().fileName)?.let {
|
||||
TypedBytes(
|
||||
it,
|
||||
book.media.pages.first().mediaType,
|
||||
)
|
||||
}
|
||||
divinaExtractors[book.media.mediaType]
|
||||
?.getEntryStream(
|
||||
book.book.path,
|
||||
book.media.pages
|
||||
.first()
|
||||
.fileName,
|
||||
)?.let {
|
||||
TypedBytes(
|
||||
it,
|
||||
book.media.pages
|
||||
.first()
|
||||
.mediaType,
|
||||
)
|
||||
}
|
||||
|
||||
MediaProfile.PDF -> pdfExtractor.getPageContentAsImage(book.book.path, 1)
|
||||
MediaProfile.EPUB -> epubExtractor.getCover(book.book.path)
|
||||
|
|
|
|||
|
|
@ -60,7 +60,8 @@ class BookConverter(
|
|||
|
||||
fun getConvertibleBooks(library: Library): Collection<Book> =
|
||||
if (library.convertToCbz) {
|
||||
bookRepository.findAllByLibraryIdAndMediaTypes(library.id, convertibleTypes)
|
||||
bookRepository
|
||||
.findAllByLibraryIdAndMediaTypes(library.id, convertibleTypes)
|
||||
.also { logger.info { "Found ${it.size} books to convert" } }
|
||||
} else {
|
||||
logger.info { "CBZ conversion is not enabled, skipping" }
|
||||
|
|
@ -100,7 +101,8 @@ class BookConverter(
|
|||
zipStream.setLevel(Deflater.NO_COMPRESSION)
|
||||
|
||||
media
|
||||
.pages.map { it.fileName }
|
||||
.pages
|
||||
.map { it.fileName }
|
||||
.union(media.files.map { it.fileName })
|
||||
.forEach { entry ->
|
||||
zipStream.putArchiveEntry(ZipArchiveEntry(entry))
|
||||
|
|
@ -111,7 +113,8 @@ class BookConverter(
|
|||
|
||||
// perform checks on new file
|
||||
val convertedBook =
|
||||
fileSystemScanner.scanFile(destinationPath)
|
||||
fileSystemScanner
|
||||
.scanFile(destinationPath)
|
||||
?.copy(
|
||||
id = book.id,
|
||||
seriesId = book.seriesId,
|
||||
|
|
@ -129,11 +132,13 @@ class BookConverter(
|
|||
convertedMedia.mediaType != MediaType.ZIP.type
|
||||
-> throw BookConversionException("Converted file is not a zip file, aborting conversion")
|
||||
|
||||
!convertedMedia.pages.map { FilenameUtils.getName(it.fileName) to it.mediaType }
|
||||
!convertedMedia.pages
|
||||
.map { FilenameUtils.getName(it.fileName) to it.mediaType }
|
||||
.containsAll(media.pages.map { FilenameUtils.getName(it.fileName) to it.mediaType })
|
||||
-> throw BookConversionException("Converted file does not contain all pages from existing file, aborting conversion")
|
||||
|
||||
!convertedMedia.files.map { FilenameUtils.getName(it.fileName) }
|
||||
!convertedMedia.files
|
||||
.map { FilenameUtils.getName(it.fileName) }
|
||||
.containsAll(media.files.map { FilenameUtils.getName(it.fileName) })
|
||||
-> throw BookConversionException("Converted file does not contain all files from existing file, aborting conversion")
|
||||
}
|
||||
|
|
@ -201,7 +206,8 @@ class BookConverter(
|
|||
book.path.moveTo(destinationPath)
|
||||
|
||||
val repairedBook =
|
||||
fileSystemScanner.scanFile(destinationPath)
|
||||
fileSystemScanner
|
||||
.scanFile(destinationPath)
|
||||
?.copy(
|
||||
id = book.id,
|
||||
seriesId = book.seriesId,
|
||||
|
|
|
|||
|
|
@ -89,9 +89,16 @@ class BookImporter(
|
|||
fileSystemScanner.scanBookSidecars(sourceFile).associateWith {
|
||||
series.path.resolve(
|
||||
if (destinationName != null)
|
||||
it.url.toURI().toPath().name.replace(sourceFile.nameWithoutExtension, destinationName, true)
|
||||
it.url
|
||||
.toURI()
|
||||
.toPath()
|
||||
.name
|
||||
.replace(sourceFile.nameWithoutExtension, destinationName, true)
|
||||
else
|
||||
it.url.toURI().toPath().name,
|
||||
it.url
|
||||
.toURI()
|
||||
.toPath()
|
||||
.name,
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -166,13 +173,17 @@ class BookImporter(
|
|||
logger.warn(e) { "Filesystem does not support hardlinks, copying instead" }
|
||||
sourceFile.copyTo(destFile)
|
||||
sidecars.forEach {
|
||||
it.key.url.toURI().toPath().copyTo(it.value, true)
|
||||
it.key.url
|
||||
.toURI()
|
||||
.toPath()
|
||||
.copyTo(it.value, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val importedBook =
|
||||
fileSystemScanner.scanFile(destFile)
|
||||
fileSystemScanner
|
||||
.scanFile(destFile)
|
||||
?.copy(libraryId = series.libraryId)
|
||||
?: throw IllegalStateException("Newly imported book could not be scanned: $destFile").withCode("ERR_1022")
|
||||
|
||||
|
|
@ -200,16 +211,21 @@ class BookImporter(
|
|||
}
|
||||
|
||||
// copy read progress
|
||||
readProgressRepository.findAllByBookId(bookToUpgrade.id)
|
||||
readProgressRepository
|
||||
.findAllByBookId(bookToUpgrade.id)
|
||||
.map { it.copy(bookId = importedBook.id) }
|
||||
.forEach { readProgressRepository.save(it) }
|
||||
|
||||
// replace upgraded book by imported book in read lists
|
||||
readListRepository.findAllContainingBookId(bookToUpgrade.id, filterOnLibraryIds = null)
|
||||
readListRepository
|
||||
.findAllContainingBookId(bookToUpgrade.id, filterOnLibraryIds = null)
|
||||
.forEach { rl ->
|
||||
readListRepository.update(
|
||||
rl.copy(
|
||||
bookIds = rl.bookIds.values.map { if (it == bookToUpgrade.id) importedBook.id else it }.toIndexedMap(),
|
||||
bookIds =
|
||||
rl.bookIds.values
|
||||
.map { if (it == bookToUpgrade.id) importedBook.id else it }
|
||||
.toIndexedMap(),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,7 +82,8 @@ class BookLifecycle(
|
|||
mediaRepository.findById(book.id).let { previous ->
|
||||
if (previous.status == Media.Status.OUTDATED && previous.pageCount != media.pageCount) {
|
||||
val adjustedProgress =
|
||||
readProgressRepository.findAllByBookId(book.id)
|
||||
readProgressRepository
|
||||
.findAllByBookId(book.id)
|
||||
.map { it.copy(page = if (it.completed) media.pageCount else 1) }
|
||||
if (adjustedProgress.isNotEmpty()) {
|
||||
logger.info { "Number of pages differ, adjust read progress for book" }
|
||||
|
|
@ -145,7 +146,8 @@ class BookLifecycle(
|
|||
|
||||
ThumbnailBook.Type.SIDECAR -> {
|
||||
// delete existing thumbnail with the same url
|
||||
thumbnailBookRepository.findAllByBookIdAndType(thumbnail.bookId, setOf(ThumbnailBook.Type.SIDECAR))
|
||||
thumbnailBookRepository
|
||||
.findAllByBookIdAndType(thumbnail.bookId, setOf(ThumbnailBook.Type.SIDECAR))
|
||||
.filter { it.url == thumbnail.url }
|
||||
.forEach {
|
||||
thumbnailBookRepository.delete(it.id)
|
||||
|
|
@ -253,7 +255,8 @@ class BookLifecycle(
|
|||
private fun thumbnailsHouseKeeping(bookId: String) {
|
||||
logger.info { "House keeping thumbnails for book: $bookId" }
|
||||
val all =
|
||||
thumbnailBookRepository.findAllByBookId(bookId)
|
||||
thumbnailBookRepository
|
||||
.findAllByBookId(bookId)
|
||||
.mapNotNull {
|
||||
if (!it.exists()) {
|
||||
logger.warn { "Thumbnail doesn't exist, removing entry" }
|
||||
|
|
@ -278,13 +281,12 @@ class BookLifecycle(
|
|||
}
|
||||
}
|
||||
|
||||
fun findBookThumbnailsToRegenerate(forBiggerResultOnly: Boolean): Collection<String> {
|
||||
return if (forBiggerResultOnly) {
|
||||
fun findBookThumbnailsToRegenerate(forBiggerResultOnly: Boolean): Collection<String> =
|
||||
if (forBiggerResultOnly) {
|
||||
thumbnailBookRepository.findAllBookIdsByThumbnailTypeAndDimensionSmallerThan(ThumbnailBook.Type.GENERATED, komgaSettingsProvider.thumbnailSize.maxEdge)
|
||||
} else {
|
||||
bookRepository.findAll(SearchCondition.Deleted(SearchOperator.IsFalse), SearchContext.empty(), Pageable.unpaged()).content.map { it.id }
|
||||
}
|
||||
}
|
||||
|
||||
@Throws(
|
||||
ImageConversionException::class,
|
||||
|
|
@ -439,7 +441,12 @@ class BookLifecycle(
|
|||
newProgression: R2Progression,
|
||||
) {
|
||||
readProgressRepository.findByBookIdAndUserIdOrNull(book.id, user.id)?.let { savedProgress ->
|
||||
check(newProgression.modified.toLocalDateTime().toCurrentTimeZone().isAfter(savedProgress.readDate)) { "Progression is older than existing" }
|
||||
check(
|
||||
newProgression.modified
|
||||
.toLocalDateTime()
|
||||
.toCurrentTimeZone()
|
||||
.isAfter(savedProgress.readDate),
|
||||
) { "Progression is older than existing" }
|
||||
}
|
||||
|
||||
val media = mediaRepository.findById(book.id)
|
||||
|
|
@ -465,7 +472,8 @@ class BookLifecycle(
|
|||
MediaProfile.EPUB -> {
|
||||
val href =
|
||||
newProgression.locator.href
|
||||
.replaceAfter("#", "").removeSuffix("#")
|
||||
.replaceAfter("#", "")
|
||||
.removeSuffix("#")
|
||||
.let { UriUtils.decode(it, Charsets.UTF_8) }
|
||||
require(href in media.files.map { it.fileName }) { "Resource does not exist in book: $href" }
|
||||
requireNotNull(newProgression.locator.locations?.progression) { "location.progression is required" }
|
||||
|
|
@ -519,7 +527,8 @@ class BookLifecycle(
|
|||
if (!book.path.isWritable()) return logger.info { "Cannot delete book file, path is not writable: ${book.path}" }
|
||||
|
||||
val thumbnails =
|
||||
thumbnailBookRepository.findAllByBookIdAndType(book.id, setOf(ThumbnailBook.Type.SIDECAR))
|
||||
thumbnailBookRepository
|
||||
.findAllByBookIdAndType(book.id, setOf(ThumbnailBook.Type.SIDECAR))
|
||||
.mapNotNull { it.url?.toURI()?.toPath() }
|
||||
.filter { it.exists() && it.isWritable() }
|
||||
|
||||
|
|
@ -531,7 +540,10 @@ class BookLifecycle(
|
|||
if (it.deleteIfExists()) logger.info { "Deleted file: $it" }
|
||||
}
|
||||
|
||||
if (book.path.parent.listDirectoryEntries().isEmpty())
|
||||
if (book.path.parent
|
||||
.listDirectoryEntries()
|
||||
.isEmpty()
|
||||
)
|
||||
if (book.path.parent.deleteIfExists()) {
|
||||
logger.info { "Deleted directory: ${book.path.parent}" }
|
||||
historicalEventRepository.insert(HistoricalEvent.SeriesFolderDeleted(book.seriesId, book.path.parent, "Folder was deleted because it was empty"))
|
||||
|
|
|
|||
|
|
@ -103,7 +103,8 @@ class BookPageEditor(
|
|||
zipStream.setMethod(ZipArchiveOutputStream.DEFLATED)
|
||||
zipStream.setLevel(Deflater.NO_COMPRESSION)
|
||||
|
||||
pagesToKeep.map { it.fileName }
|
||||
pagesToKeep
|
||||
.map { it.fileName }
|
||||
.union(media.files.map { it.fileName })
|
||||
.forEach { entry ->
|
||||
zipStream.putArchiveEntry(ZipArchiveEntry(entry))
|
||||
|
|
@ -114,7 +115,8 @@ class BookPageEditor(
|
|||
|
||||
// perform checks on new file
|
||||
val createdBook =
|
||||
fileSystemScanner.scanFile(tempFile)
|
||||
fileSystemScanner
|
||||
.scanFile(tempFile)
|
||||
?.copy(
|
||||
id = book.id,
|
||||
seriesId = book.seriesId,
|
||||
|
|
@ -132,11 +134,13 @@ class BookPageEditor(
|
|||
createdMedia.mediaType != MediaType.ZIP.type
|
||||
-> throw BookConversionException("Created file is not a zip file, aborting page removal")
|
||||
|
||||
!createdMedia.pages.map { FilenameUtils.getName(it.fileName) to it.mediaType }
|
||||
!createdMedia.pages
|
||||
.map { FilenameUtils.getName(it.fileName) to it.mediaType }
|
||||
.containsAll(pagesToKeep.map { FilenameUtils.getName(it.fileName) to it.mediaType })
|
||||
-> throw BookConversionException("Created file does not contain all pages to keep from existing file, aborting conversion")
|
||||
|
||||
!createdMedia.files.map { FilenameUtils.getName(it.fileName) }
|
||||
!createdMedia.files
|
||||
.map { FilenameUtils.getName(it.fileName) }
|
||||
.containsAll(media.files.map { FilenameUtils.getName(it.fileName) })
|
||||
-> throw BookConversionException("Created file does not contain all files from existing file, aborting page removal")
|
||||
}
|
||||
|
|
@ -148,7 +152,8 @@ class BookPageEditor(
|
|||
|
||||
tempFile.moveTo(book.path, true)
|
||||
val newBook =
|
||||
fileSystemScanner.scanFile(book.path)
|
||||
fileSystemScanner
|
||||
.scanFile(book.path)
|
||||
?.copy(
|
||||
id = book.id,
|
||||
seriesId = book.seriesId,
|
||||
|
|
|
|||
|
|
@ -121,12 +121,13 @@ class FileSystemScanner(
|
|||
}
|
||||
}
|
||||
|
||||
sidecarSeriesConsumers.firstOrNull { consumer ->
|
||||
consumer.getSidecarSeriesFilenames().any { file.name.equals(it, ignoreCase = true) }
|
||||
}?.let {
|
||||
val sidecar = Sidecar(file.toUri().toURL(), file.parent.toUri().toURL(), attrs.getUpdatedTime(), it.getSidecarSeriesType(), Sidecar.Source.SERIES)
|
||||
pathToSeriesSidecars.merge(file.parent, mutableListOf(sidecar)) { prev, one -> prev.union(one).toMutableList() }
|
||||
}
|
||||
sidecarSeriesConsumers
|
||||
.firstOrNull { consumer ->
|
||||
consumer.getSidecarSeriesFilenames().any { file.name.equals(it, ignoreCase = true) }
|
||||
}?.let {
|
||||
val sidecar = Sidecar(file.toUri().toURL(), file.parent.toUri().toURL(), attrs.getUpdatedTime(), it.getSidecarSeriesType(), Sidecar.Source.SERIES)
|
||||
pathToSeriesSidecars.merge(file.parent, mutableListOf(sidecar)) { prev, one -> prev.union(one).toMutableList() }
|
||||
}
|
||||
|
||||
// book sidecars can't be exactly matched during a file visit
|
||||
// this prefilters files to reduce the candidates
|
||||
|
|
@ -217,7 +218,8 @@ class FileSystemScanner(
|
|||
fun scanBookSidecars(path: Path): List<Sidecar> {
|
||||
val bookBaseName = path.nameWithoutExtension
|
||||
val parent = path.parent
|
||||
return parent.listDirectoryEntries()
|
||||
return parent
|
||||
.listDirectoryEntries()
|
||||
.filter { candidate -> sidecarBookPrefilter.any { it.matches(candidate.name) } }
|
||||
.mapNotNull { candidate ->
|
||||
sidecarBookConsumers.firstOrNull { it.isSidecarBookMatch(bookBaseName, candidate.name) }?.let {
|
||||
|
|
@ -238,8 +240,6 @@ class FileSystemScanner(
|
|||
)
|
||||
}
|
||||
|
||||
fun BasicFileAttributes.getUpdatedTime(): LocalDateTime =
|
||||
maxOf(creationTime(), lastModifiedTime()).toLocalDateTime()
|
||||
fun BasicFileAttributes.getUpdatedTime(): LocalDateTime = maxOf(creationTime(), lastModifiedTime()).toLocalDateTime()
|
||||
|
||||
fun FileTime.toLocalDateTime(): LocalDateTime =
|
||||
LocalDateTime.ofInstant(this.toInstant(), ZoneId.systemDefault())
|
||||
fun FileTime.toLocalDateTime(): LocalDateTime = LocalDateTime.ofInstant(this.toInstant(), ZoneId.systemDefault())
|
||||
|
|
|
|||
|
|
@ -127,7 +127,11 @@ class LibraryContentLifecycle(
|
|||
if (books.isNotEmpty()) {
|
||||
logger.info { "Soft deleting books not on disk anymore: $books" }
|
||||
bookLifecycle.softDeleteMany(books)
|
||||
books.map { it.seriesId }.distinct().mapNotNull { seriesRepository.findByIdOrNull(it) }.toMutableList()
|
||||
books
|
||||
.map { it.seriesId }
|
||||
.distinct()
|
||||
.mapNotNull { seriesRepository.findByIdOrNull(it) }
|
||||
.toMutableList()
|
||||
} else {
|
||||
mutableListOf()
|
||||
}
|
||||
|
|
@ -281,7 +285,9 @@ class LibraryContentLifecycle(
|
|||
val bookSizes = newBooks.map { it.fileSize }
|
||||
|
||||
val deletedCandidates =
|
||||
seriesRepository.findAll(SearchCondition.Deleted(SearchOperator.IsTrue), SearchContext.empty(), Pageable.unpaged()).content
|
||||
seriesRepository
|
||||
.findAll(SearchCondition.Deleted(SearchOperator.IsTrue), SearchContext.empty(), Pageable.unpaged())
|
||||
.content
|
||||
.mapNotNull { deletedCandidate ->
|
||||
val deletedBooks = bookRepository.findAllBySeriesId(deletedCandidate.id)
|
||||
val deletedBooksSizes = deletedBooks.map { it.fileSize }
|
||||
|
|
@ -324,7 +330,8 @@ class LibraryContentLifecycle(
|
|||
}
|
||||
|
||||
// replace deleted series by new series in collections
|
||||
collectionRepository.findAllContainingSeriesId(match.first.id, filterOnLibraryIds = null)
|
||||
collectionRepository
|
||||
.findAllContainingSeriesId(match.first.id, filterOnLibraryIds = null)
|
||||
.forEach { col ->
|
||||
collectionRepository.update(
|
||||
col.copy(
|
||||
|
|
@ -396,16 +403,21 @@ class LibraryContentLifecycle(
|
|||
}
|
||||
|
||||
// copy read progress
|
||||
readProgressRepository.findAllByBookId(match.id)
|
||||
readProgressRepository
|
||||
.findAllByBookId(match.id)
|
||||
.map { it.copy(bookId = bookToAdd.id) }
|
||||
.forEach { readProgressRepository.save(it) }
|
||||
|
||||
// replace deleted book by new book in read lists
|
||||
readListRepository.findAllContainingBookId(match.id, filterOnLibraryIds = null)
|
||||
readListRepository
|
||||
.findAllContainingBookId(match.id, filterOnLibraryIds = null)
|
||||
.forEach { rl ->
|
||||
readListRepository.update(
|
||||
rl.copy(
|
||||
bookIds = rl.bookIds.values.map { if (it == match.id) bookToAdd.id else it }.toIndexedMap(),
|
||||
bookIds =
|
||||
rl.bookIds.values
|
||||
.map { if (it == match.id) bookToAdd.id else it }
|
||||
.toIndexedMap(),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
|
@ -422,25 +434,27 @@ class LibraryContentLifecycle(
|
|||
logger.info { "Empty trash for library: $library" }
|
||||
|
||||
val seriesToDelete =
|
||||
seriesRepository.findAll(
|
||||
SearchCondition.AllOfSeries(
|
||||
SearchCondition.LibraryId(SearchOperator.Is(library.id)),
|
||||
SearchCondition.Deleted(SearchOperator.IsTrue),
|
||||
),
|
||||
SearchContext.empty(),
|
||||
Pageable.unpaged(),
|
||||
).content
|
||||
seriesRepository
|
||||
.findAll(
|
||||
SearchCondition.AllOfSeries(
|
||||
SearchCondition.LibraryId(SearchOperator.Is(library.id)),
|
||||
SearchCondition.Deleted(SearchOperator.IsTrue),
|
||||
),
|
||||
SearchContext.empty(),
|
||||
Pageable.unpaged(),
|
||||
).content
|
||||
seriesLifecycle.deleteMany(seriesToDelete)
|
||||
|
||||
val booksToDelete =
|
||||
bookRepository.findAll(
|
||||
SearchCondition.AllOfBook(
|
||||
SearchCondition.LibraryId(SearchOperator.Is(library.id)),
|
||||
SearchCondition.Deleted(SearchOperator.IsTrue),
|
||||
),
|
||||
SearchContext.empty(),
|
||||
Pageable.unpaged(),
|
||||
).content
|
||||
bookRepository
|
||||
.findAll(
|
||||
SearchCondition.AllOfBook(
|
||||
SearchCondition.LibraryId(SearchOperator.Is(library.id)),
|
||||
SearchCondition.Deleted(SearchOperator.IsTrue),
|
||||
),
|
||||
SearchContext.empty(),
|
||||
Pageable.unpaged(),
|
||||
).content
|
||||
bookLifecycle.deleteMany(booksToDelete)
|
||||
booksToDelete.map { it.seriesId }.distinct().forEach { seriesId ->
|
||||
seriesRepository.findByIdOrNull(seriesId)?.let { seriesLifecycle.sortBooks(it) }
|
||||
|
|
|
|||
|
|
@ -27,7 +27,8 @@ class PageHashLifecycle(
|
|||
|
||||
fun getBookIdsWithMissingPageHash(library: Library): Collection<String> =
|
||||
if (library.hashPages) {
|
||||
mediaRepository.findAllBookIdsByLibraryIdAndMediaTypeAndWithMissingPageHash(library.id, hashableMediaTypes, komgaProperties.pageHashing)
|
||||
mediaRepository
|
||||
.findAllBookIdsByLibraryIdAndMediaTypeAndWithMissingPageHash(library.id, hashableMediaTypes, komgaProperties.pageHashing)
|
||||
.also { logger.info { "Found ${it.size} books with missing page hash" } }
|
||||
} else {
|
||||
logger.info { "Page hashing is not enabled, skipping" }
|
||||
|
|
@ -44,8 +45,7 @@ class PageHashLifecycle(
|
|||
return bookLifecycle.getBookPage(book, match.pageNumber, resizeTo = resizeTo)
|
||||
}
|
||||
|
||||
fun getBookPagesToDeleteAutomatically(library: Library): Map<String, Collection<BookPageNumbered>> =
|
||||
pageHashRepository.findMatchesByKnownHashAction(listOf(PageHashKnown.Action.DELETE_AUTO), library.id)
|
||||
fun getBookPagesToDeleteAutomatically(library: Library): Map<String, Collection<BookPageNumbered>> = pageHashRepository.findMatchesByKnownHashAction(listOf(PageHashKnown.Action.DELETE_AUTO), library.id)
|
||||
|
||||
fun createOrUpdate(pageHash: PageHashKnown) {
|
||||
val existing = pageHashRepository.findKnown(pageHash.hash)
|
||||
|
|
|
|||
|
|
@ -147,8 +147,7 @@ class ReadListLifecycle(
|
|||
eventPublisher.publishEvent(DomainEvent.ThumbnailReadListDeleted(thumbnail))
|
||||
}
|
||||
|
||||
fun getThumbnailBytes(thumbnailId: String): ByteArray? =
|
||||
thumbnailReadListRepository.findByIdOrNull(thumbnailId)?.thumbnail
|
||||
fun getThumbnailBytes(thumbnailId: String): ByteArray? = thumbnailReadListRepository.findByIdOrNull(thumbnailId)?.thumbnail
|
||||
|
||||
fun getThumbnailBytes(readList: ReadList): ByteArray {
|
||||
thumbnailReadListRepository.findSelectedByReadListIdOrNull(readList.id)?.let {
|
||||
|
|
|
|||
|
|
@ -133,8 +133,7 @@ class SeriesCollectionLifecycle(
|
|||
eventPublisher.publishEvent(DomainEvent.ThumbnailSeriesCollectionDeleted(thumbnail))
|
||||
}
|
||||
|
||||
fun getThumbnailBytes(thumbnailId: String): ByteArray? =
|
||||
thumbnailSeriesCollectionRepository.findByIdOrNull(thumbnailId)?.thumbnail
|
||||
fun getThumbnailBytes(thumbnailId: String): ByteArray? = thumbnailSeriesCollectionRepository.findByIdOrNull(thumbnailId)?.thumbnail
|
||||
|
||||
fun getThumbnailBytes(
|
||||
collection: SeriesCollection,
|
||||
|
|
|
|||
|
|
@ -81,8 +81,7 @@ class SeriesLifecycle(
|
|||
.stripAccents()
|
||||
.replace(whitespacePattern, " ")
|
||||
},
|
||||
)
|
||||
.map { book -> book to metadatas.first { it.bookId == book.id } }
|
||||
).map { book -> book to metadatas.first { it.bookId == book.id } }
|
||||
logger.debug { "Sorted books: $sorted" }
|
||||
|
||||
bookRepository.update(
|
||||
|
|
@ -135,14 +134,15 @@ class SeriesLifecycle(
|
|||
mediaRepository.insert(toAdd.map { Media(bookId = it.id) })
|
||||
|
||||
// create associated metadata
|
||||
toAdd.map {
|
||||
BookMetadata(
|
||||
title = it.name,
|
||||
number = it.number.toString(),
|
||||
numberSort = it.number.toFloat(),
|
||||
bookId = it.id,
|
||||
)
|
||||
}.let { bookMetadataRepository.insert(it) }
|
||||
toAdd
|
||||
.map {
|
||||
BookMetadata(
|
||||
title = it.name,
|
||||
number = it.number.toString(),
|
||||
numberSort = it.number.toFloat(),
|
||||
bookId = it.id,
|
||||
)
|
||||
}.let { bookMetadataRepository.insert(it) }
|
||||
}
|
||||
|
||||
toAdd.forEach { eventPublisher.publishEvent(DomainEvent.BookAdded(it)) }
|
||||
|
|
@ -208,13 +208,15 @@ class SeriesLifecycle(
|
|||
user: KomgaUser,
|
||||
) {
|
||||
val bookIds =
|
||||
bookRepository.findAllIdsBySeriesId(seriesId)
|
||||
bookRepository
|
||||
.findAllIdsBySeriesId(seriesId)
|
||||
.filter { bookId ->
|
||||
val readProgress = readProgressRepository.findByBookIdAndUserIdOrNull(bookId, user.id)
|
||||
readProgress == null || !readProgress.completed
|
||||
}
|
||||
val progresses =
|
||||
mediaRepository.getPagesSizes(bookIds)
|
||||
mediaRepository
|
||||
.getPagesSizes(bookIds)
|
||||
.map { (bookId, pageSize) -> ReadProgress(bookId, user.id, pageSize, true) }
|
||||
|
||||
readProgressRepository.save(progresses)
|
||||
|
|
@ -291,7 +293,8 @@ class SeriesLifecycle(
|
|||
): ThumbnailSeries {
|
||||
// delete existing thumbnail with the same url
|
||||
if (thumbnail.url != null) {
|
||||
thumbnailsSeriesRepository.findAllBySeriesId(thumbnail.seriesId)
|
||||
thumbnailsSeriesRepository
|
||||
.findAllBySeriesId(thumbnail.seriesId)
|
||||
.filter { it.url == thumbnail.url }
|
||||
.forEach {
|
||||
thumbnailsSeriesRepository.delete(it.id)
|
||||
|
|
@ -327,11 +330,13 @@ class SeriesLifecycle(
|
|||
if (!series.path.isWritable()) return logger.info { "Cannot delete series folder, path is not writable: ${series.path}" }
|
||||
|
||||
val thumbnails =
|
||||
thumbnailsSeriesRepository.findAllBySeriesIdIdAndType(series.id, ThumbnailSeries.Type.SIDECAR)
|
||||
thumbnailsSeriesRepository
|
||||
.findAllBySeriesIdIdAndType(series.id, ThumbnailSeries.Type.SIDECAR)
|
||||
.mapNotNull { it.url?.toURI()?.toPath() }
|
||||
.filter { it.exists() && it.isWritable() }
|
||||
|
||||
bookRepository.findAllBySeriesId(series.id)
|
||||
bookRepository
|
||||
.findAllBySeriesId(series.id)
|
||||
.forEach { bookLifecycle.deleteBookFiles(it) }
|
||||
thumbnails.forEach {
|
||||
if (it.deleteIfExists()) logger.info { "Deleted file: $it" }
|
||||
|
|
@ -349,7 +354,8 @@ class SeriesLifecycle(
|
|||
private fun thumbnailsHouseKeeping(seriesId: String) {
|
||||
logger.info { "House keeping thumbnails for series: $seriesId" }
|
||||
val all =
|
||||
thumbnailsSeriesRepository.findAllBySeriesId(seriesId)
|
||||
thumbnailsSeriesRepository
|
||||
.findAllBySeriesId(seriesId)
|
||||
.mapNotNull {
|
||||
if (!it.exists()) {
|
||||
logger.warn { "Thumbnail doesn't exist, removing entry" }
|
||||
|
|
|
|||
|
|
@ -49,7 +49,8 @@ class SeriesMetadataLifecycle(
|
|||
else -> {
|
||||
logger.debug { "Provider: ${provider.javaClass.simpleName}" }
|
||||
val patches =
|
||||
bookRepository.findAllBySeriesId(series.id)
|
||||
bookRepository
|
||||
.findAllBySeriesId(series.id)
|
||||
.mapNotNull { book ->
|
||||
try {
|
||||
provider.getSeriesMetadataFromBook(BookWithMedia(book, mediaRepository.findById(book.id)), library.importComicInfoSeriesAppendVolume)
|
||||
|
|
@ -107,7 +108,12 @@ class SeriesMetadataLifecycle(
|
|||
title = patches.mostFrequent { it.title },
|
||||
titleSort = patches.mostFrequent { it.titleSort },
|
||||
status = patches.mostFrequent { it.status },
|
||||
genres = patches.mapNotNull { it.genres }.flatten().toSet().ifEmpty { null },
|
||||
genres =
|
||||
patches
|
||||
.mapNotNull { it.genres }
|
||||
.flatten()
|
||||
.toSet()
|
||||
.ifEmpty { null },
|
||||
language = patches.mostFrequent { it.language },
|
||||
summary = null,
|
||||
readingDirection = patches.mostFrequent { it.readingDirection },
|
||||
|
|
|
|||
|
|
@ -59,7 +59,8 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> =
|
||||
syncPointRepository.findBooksById(toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findBooksById(toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markBooksSynced(toSyncPointId, false, page.content.map { it.bookId }) }
|
||||
|
||||
/**
|
||||
|
|
@ -70,7 +71,8 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> =
|
||||
syncPointRepository.findBooksAdded(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findBooksAdded(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markBooksSynced(toSyncPointId, false, page.content.map { it.bookId }) }
|
||||
|
||||
/**
|
||||
|
|
@ -81,7 +83,8 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> =
|
||||
syncPointRepository.findBooksChanged(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findBooksChanged(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markBooksSynced(toSyncPointId, false, page.content.map { it.bookId }) }
|
||||
|
||||
/**
|
||||
|
|
@ -92,7 +95,8 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> =
|
||||
syncPointRepository.findBooksRemoved(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findBooksRemoved(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markBooksSynced(toSyncPointId, true, page.content.map { it.bookId }) }
|
||||
|
||||
/**
|
||||
|
|
@ -103,14 +107,16 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> =
|
||||
syncPointRepository.findBooksReadProgressChanged(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findBooksReadProgressChanged(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markBooksSynced(toSyncPointId, false, page.content.map { it.bookId }) }
|
||||
|
||||
fun takeReadLists(
|
||||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.ReadList> =
|
||||
syncPointRepository.findReadListsById(toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findReadListsById(toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markReadListsSynced(toSyncPointId, false, page.content.map { it.readListId }) }
|
||||
|
||||
fun takeReadListsAdded(
|
||||
|
|
@ -118,7 +124,8 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.ReadList> =
|
||||
syncPointRepository.findReadListsAdded(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findReadListsAdded(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markReadListsSynced(toSyncPointId, false, page.content.map { it.readListId }) }
|
||||
|
||||
fun takeReadListsChanged(
|
||||
|
|
@ -126,7 +133,8 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.ReadList> =
|
||||
syncPointRepository.findReadListsChanged(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findReadListsChanged(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markReadListsSynced(toSyncPointId, false, page.content.map { it.readListId }) }
|
||||
|
||||
fun takeReadListsRemoved(
|
||||
|
|
@ -134,6 +142,7 @@ class SyncPointLifecycle(
|
|||
toSyncPointId: String,
|
||||
pageable: Pageable,
|
||||
): Page<SyncPoint.ReadList> =
|
||||
syncPointRepository.findReadListsRemoved(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
syncPointRepository
|
||||
.findReadListsRemoved(fromSyncPointId, toSyncPointId, true, pageable)
|
||||
.also { page -> syncPointRepository.markReadListsSynced(toSyncPointId, true, page.content.map { it.readListId }) }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,8 +40,7 @@ class ThumbnailLifecycle(
|
|||
*
|
||||
* @return true if more thumbnails need fixing
|
||||
*/
|
||||
fun fixThumbnailsMetadata(): Boolean =
|
||||
fixThumbnailMetadataBook() || fixThumbnailMetadataSeries() || fixThumbnailMetadataCollection() || fixThumbnailMetadataReadList()
|
||||
fun fixThumbnailsMetadata(): Boolean = fixThumbnailMetadataBook() || fixThumbnailMetadataSeries() || fixThumbnailMetadataCollection() || fixThumbnailMetadataReadList()
|
||||
|
||||
private fun fixThumbnailMetadataBook(): Boolean =
|
||||
fixThumbnailMetadata(
|
||||
|
|
@ -162,7 +161,14 @@ class ThumbnailLifecycle(
|
|||
dimension = imageAnalyzer.getDimension(url.toURI().toPath().inputStream()) ?: Dimension(0, 0),
|
||||
)
|
||||
|
||||
private data class Result(val processed: Int, val hasMore: Boolean)
|
||||
private data class Result(
|
||||
val processed: Int,
|
||||
val hasMore: Boolean,
|
||||
)
|
||||
|
||||
private data class ThumbnailMetadata(val mediaType: String, val fileSize: Long, val dimension: Dimension)
|
||||
private data class ThumbnailMetadata(
|
||||
val mediaType: String,
|
||||
val fileSize: Long,
|
||||
val dimension: Dimension,
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,7 +39,12 @@ class TransientBookLifecycle(
|
|||
if (folderToScan.startsWith(library.path)) throw PathContainedInPath("Cannot scan folder that is part of an existing library", "ERR_1017")
|
||||
}
|
||||
|
||||
val books = fileSystemScanner.scanRootFolder(folderToScan).series.values.flatten().map { TransientBook(it, Media()) }
|
||||
val books =
|
||||
fileSystemScanner
|
||||
.scanRootFolder(folderToScan)
|
||||
.series.values
|
||||
.flatten()
|
||||
.map { TransientBook(it, Media()) }
|
||||
|
||||
transientBookRepository.save(books)
|
||||
|
||||
|
|
@ -66,8 +71,7 @@ class TransientBookLifecycle(
|
|||
if (it.supportsAppendVolume) add(it.getSeriesMetadataFromBook(bookWithMedia, true)?.title)
|
||||
add(it.getSeriesMetadataFromBook(bookWithMedia, false)?.title)
|
||||
}
|
||||
}
|
||||
.filterNotNull()
|
||||
}.filterNotNull()
|
||||
.firstNotNullOfOrNull { seriesRepository.findAllByTitleContaining(it).firstOrNull() }
|
||||
|
||||
return series?.id to number
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ private val logger = KotlinLogging.logger {}
|
|||
@Service
|
||||
class TransientBookCache : TransientBookRepository {
|
||||
private val cache =
|
||||
Caffeine.newBuilder()
|
||||
Caffeine
|
||||
.newBuilder()
|
||||
.expireAfterAccess(1, TimeUnit.HOURS)
|
||||
.build<String, TransientBook>()
|
||||
|
||||
|
|
|
|||
|
|
@ -16,8 +16,7 @@ class DataSourcesConfiguration(
|
|||
) {
|
||||
@Bean("sqliteDataSource")
|
||||
@Primary
|
||||
fun sqliteDataSource(): DataSource =
|
||||
buildDataSource("SqliteMainPool", SqliteUdfDataSource::class.java, komgaProperties.database)
|
||||
fun sqliteDataSource(): DataSource = buildDataSource("SqliteMainPool", SqliteUdfDataSource::class.java, komgaProperties.database)
|
||||
|
||||
@Bean("tasksDataSource")
|
||||
fun tasksDataSource(): DataSource =
|
||||
|
|
@ -41,7 +40,8 @@ class DataSourcesConfiguration(
|
|||
}
|
||||
|
||||
val dataSource =
|
||||
DataSourceBuilder.create()
|
||||
DataSourceBuilder
|
||||
.create()
|
||||
.driverClassName("org.sqlite.JDBC")
|
||||
.url("jdbc:sqlite:${databaseProps.file}$extraPragmas")
|
||||
.type(dataSourceClass)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,8 @@ class FlywaySecondaryMigrationInitializer(
|
|||
) : InitializingBean {
|
||||
// by default Spring Boot will perform migration only on the @Primary datasource
|
||||
override fun afterPropertiesSet() {
|
||||
Flyway.configure()
|
||||
Flyway
|
||||
.configure()
|
||||
.locations("classpath:tasks/migration/sqlite")
|
||||
.dataSource(tasksDataSource)
|
||||
.load()
|
||||
|
|
|
|||
|
|
@ -17,14 +17,12 @@ class SqliteUdfDataSource : SQLiteDataSource() {
|
|||
const val COLLATION_UNICODE_3 = "COLLATION_UNICODE_3"
|
||||
}
|
||||
|
||||
override fun getConnection(): Connection =
|
||||
super.getConnection().also { addAllUdf(it as SQLiteConnection) }
|
||||
override fun getConnection(): Connection = super.getConnection().also { addAllUdf(it as SQLiteConnection) }
|
||||
|
||||
override fun getConnection(
|
||||
username: String?,
|
||||
password: String?,
|
||||
): SQLiteConnection =
|
||||
super.getConnection(username, password).also { addAllUdf(it) }
|
||||
): SQLiteConnection = super.getConnection(username, password).also { addAllUdf(it) }
|
||||
|
||||
private fun addAllUdf(connection: SQLiteConnection) {
|
||||
createUdfRegexp(connection)
|
||||
|
|
|
|||
|
|
@ -38,11 +38,14 @@ class ImageConverter(
|
|||
|
||||
private fun chooseWebpReader() {
|
||||
val providers =
|
||||
IIORegistry.getDefaultInstance().getServiceProviders(
|
||||
ImageReaderSpi::class.java,
|
||||
{ it is ImageReaderSpi && it.mimeTypes.contains("image/webp") },
|
||||
false,
|
||||
).asSequence().toList()
|
||||
IIORegistry
|
||||
.getDefaultInstance()
|
||||
.getServiceProviders(
|
||||
ImageReaderSpi::class.java,
|
||||
{ it is ImageReaderSpi && it.mimeTypes.contains("image/webp") },
|
||||
false,
|
||||
).asSequence()
|
||||
.toList()
|
||||
|
||||
if (providers.size > 1) {
|
||||
logger.debug { "WebP reader providers: ${providers.map { it.javaClass.canonicalName }}" }
|
||||
|
|
@ -60,8 +63,7 @@ class ImageConverter(
|
|||
fun canConvertMediaType(
|
||||
from: String,
|
||||
to: String,
|
||||
) =
|
||||
supportedReadMediaTypes.contains(from) && supportedWriteMediaTypes.contains(to)
|
||||
) = supportedReadMediaTypes.contains(from) && supportedWriteMediaTypes.contains(to)
|
||||
|
||||
fun convertImage(
|
||||
imageBytes: ByteArray,
|
||||
|
|
@ -128,14 +130,14 @@ class ImageConverter(
|
|||
// prevent upscaling
|
||||
val resizeTo = if (longestEdge != null) min(longestEdge, size) else size
|
||||
|
||||
return Thumbnails.of(imageBytes.inputStream())
|
||||
return Thumbnails
|
||||
.of(imageBytes.inputStream())
|
||||
.size(resizeTo, resizeTo)
|
||||
.imageType(BufferedImage.TYPE_INT_ARGB)
|
||||
.outputFormat(format.imageIOFormat)
|
||||
}
|
||||
|
||||
private fun containsAlphaChannel(image: BufferedImage): Boolean =
|
||||
image.colorModel.hasAlpha()
|
||||
private fun containsAlphaChannel(image: BufferedImage): Boolean = image.colorModel.hasAlpha()
|
||||
|
||||
private fun containsTransparency(image: BufferedImage): Boolean {
|
||||
for (x in 0 until image.width) {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
package org.gotson.komga.infrastructure.image
|
||||
|
||||
enum class ImageType(val mediaType: String, val imageIOFormat: String) {
|
||||
enum class ImageType(
|
||||
val mediaType: String,
|
||||
val imageIOFormat: String,
|
||||
) {
|
||||
PNG("image/png", "PNG"),
|
||||
JPEG("image/jpeg", "JPEG"),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,8 +39,8 @@ class BookSearchHelper(
|
|||
return toConditionInternal(SearchCondition.AnyOfBook(libraryIds.map { SearchCondition.LibraryId(SearchOperator.Is(it)) }))
|
||||
}
|
||||
|
||||
private fun toConditionInternal(searchCondition: SearchCondition.Book?): Pair<Condition, Set<RequiredJoin>> {
|
||||
return when (searchCondition) {
|
||||
private fun toConditionInternal(searchCondition: SearchCondition.Book?): Pair<Condition, Set<RequiredJoin>> =
|
||||
when (searchCondition) {
|
||||
is SearchCondition.AllOfBook ->
|
||||
searchCondition.conditions.fold(DSL.noCondition() to emptySet()) { acc: Pair<Condition, Set<RequiredJoin>>, cond: SearchCondition.Book ->
|
||||
val bookCondition = toConditionInternal(cond)
|
||||
|
|
@ -60,7 +60,8 @@ class BookSearchHelper(
|
|||
is SearchCondition.ReadListId ->
|
||||
Tables.BOOK.ID.let { field ->
|
||||
val inner = { readListId: String ->
|
||||
DSL.select(Tables.READLIST_BOOK.BOOK_ID)
|
||||
DSL
|
||||
.select(Tables.READLIST_BOOK.BOOK_ID)
|
||||
.from(Tables.READLIST_BOOK)
|
||||
.where(Tables.READLIST_BOOK.READLIST_ID.eq(readListId))
|
||||
}
|
||||
|
|
@ -138,9 +139,14 @@ class BookSearchHelper(
|
|||
is SearchCondition.Tag ->
|
||||
Tables.BOOK.ID.let { field ->
|
||||
val inner = { tag: String ->
|
||||
DSL.select(Tables.BOOK_METADATA_TAG.BOOK_ID)
|
||||
DSL
|
||||
.select(Tables.BOOK_METADATA_TAG.BOOK_ID)
|
||||
.from(Tables.BOOK_METADATA_TAG)
|
||||
.where(Tables.BOOK_METADATA_TAG.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3).equalIgnoreCase(tag))
|
||||
.where(
|
||||
Tables.BOOK_METADATA_TAG.TAG
|
||||
.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
.equalIgnoreCase(tag),
|
||||
)
|
||||
}
|
||||
when (searchCondition.operator) {
|
||||
is SearchOperator.Is -> field.`in`(inner(searchCondition.operator.value))
|
||||
|
|
@ -151,11 +157,25 @@ class BookSearchHelper(
|
|||
is SearchCondition.Author ->
|
||||
Tables.BOOK.ID.let { field ->
|
||||
val inner = { name: String?, role: String? ->
|
||||
DSL.select(Tables.BOOK_METADATA_AUTHOR.BOOK_ID)
|
||||
DSL
|
||||
.select(Tables.BOOK_METADATA_AUTHOR.BOOK_ID)
|
||||
.from(Tables.BOOK_METADATA_AUTHOR)
|
||||
.where(DSL.noCondition())
|
||||
.apply { if (name != null) and(Tables.BOOK_METADATA_AUTHOR.NAME.collate(SqliteUdfDataSource.COLLATION_UNICODE_3).equalIgnoreCase(name)) }
|
||||
.apply { if (role != null) and(Tables.BOOK_METADATA_AUTHOR.ROLE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3).equalIgnoreCase(role)) }
|
||||
.apply {
|
||||
if (name != null)
|
||||
and(
|
||||
Tables.BOOK_METADATA_AUTHOR.NAME
|
||||
.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
.equalIgnoreCase(name),
|
||||
)
|
||||
}.apply {
|
||||
if (role != null)
|
||||
and(
|
||||
Tables.BOOK_METADATA_AUTHOR.ROLE
|
||||
.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
.equalIgnoreCase(role),
|
||||
)
|
||||
}
|
||||
}
|
||||
when (searchCondition.operator) {
|
||||
is SearchOperator.Is -> {
|
||||
|
|
@ -178,5 +198,4 @@ class BookSearchHelper(
|
|||
|
||||
null -> DSL.noCondition() to emptySet()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ abstract class ContentRestrictionsSearchHelper {
|
|||
protected fun toConditionInternal(restrictions: ContentRestrictions): Pair<Condition, Set<RequiredJoin>> {
|
||||
val ageAllowed =
|
||||
if (restrictions.ageRestriction?.restriction == AllowExclude.ALLOW_ONLY) {
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNotNull.and(Tables.SERIES_METADATA.AGE_RATING.lessOrEqual(restrictions.ageRestriction.age)) to setOf(RequiredJoin.SeriesMetadata)
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNotNull
|
||||
.and(Tables.SERIES_METADATA.AGE_RATING.lessOrEqual(restrictions.ageRestriction.age)) to setOf(RequiredJoin.SeriesMetadata)
|
||||
} else {
|
||||
DSL.noCondition() to emptySet()
|
||||
}
|
||||
|
|
@ -18,7 +19,8 @@ abstract class ContentRestrictionsSearchHelper {
|
|||
val labelAllowed =
|
||||
if (restrictions.labelsAllow.isNotEmpty())
|
||||
Tables.SERIES_METADATA.SERIES_ID.`in`(
|
||||
DSL.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
.from(Tables.SERIES_METADATA_SHARING)
|
||||
.where(Tables.SERIES_METADATA_SHARING.LABEL.`in`(restrictions.labelsAllow)),
|
||||
) to setOf(RequiredJoin.SeriesMetadata)
|
||||
|
|
@ -27,21 +29,24 @@ abstract class ContentRestrictionsSearchHelper {
|
|||
|
||||
val ageDenied =
|
||||
if (restrictions.ageRestriction?.restriction == AllowExclude.EXCLUDE)
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNull.or(Tables.SERIES_METADATA.AGE_RATING.lessThan(restrictions.ageRestriction.age)) to setOf(RequiredJoin.SeriesMetadata)
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNull
|
||||
.or(Tables.SERIES_METADATA.AGE_RATING.lessThan(restrictions.ageRestriction.age)) to setOf(RequiredJoin.SeriesMetadata)
|
||||
else
|
||||
DSL.noCondition() to emptySet()
|
||||
|
||||
val labelDenied =
|
||||
if (restrictions.labelsExclude.isNotEmpty())
|
||||
Tables.SERIES_METADATA.SERIES_ID.notIn(
|
||||
DSL.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
.from(Tables.SERIES_METADATA_SHARING)
|
||||
.where(Tables.SERIES_METADATA_SHARING.LABEL.`in`(restrictions.labelsExclude)),
|
||||
) to setOf(RequiredJoin.SeriesMetadata)
|
||||
else
|
||||
DSL.noCondition() to emptySet()
|
||||
|
||||
return ageAllowed.first.or(labelAllowed.first)
|
||||
return ageAllowed.first
|
||||
.or(labelAllowed.first)
|
||||
.and(ageDenied.first.and(labelDenied.first)) to (ageAllowed.second + labelAllowed.second + ageDenied.second + labelDenied.second)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,28 +25,25 @@ class KomgaJooqConfiguration {
|
|||
dataSource: DataSource,
|
||||
transactionProvider: ObjectProvider<TransactionProvider?>,
|
||||
executeListenerProviders: ObjectProvider<ExecuteListenerProvider?>,
|
||||
): DSLContext =
|
||||
createDslContext(dataSource, transactionProvider, executeListenerProviders)
|
||||
): DSLContext = createDslContext(dataSource, transactionProvider, executeListenerProviders)
|
||||
|
||||
@Bean("tasksDslContext")
|
||||
fun tasksDslContext(
|
||||
@Qualifier("tasksDataSource") dataSource: DataSource,
|
||||
transactionProvider: ObjectProvider<TransactionProvider?>,
|
||||
executeListenerProviders: ObjectProvider<ExecuteListenerProvider?>,
|
||||
): DSLContext =
|
||||
createDslContext(dataSource, transactionProvider, executeListenerProviders)
|
||||
): DSLContext = createDslContext(dataSource, transactionProvider, executeListenerProviders)
|
||||
|
||||
private fun createDslContext(
|
||||
dataSource: DataSource,
|
||||
transactionProvider: ObjectProvider<TransactionProvider?>,
|
||||
executeListenerProviders: ObjectProvider<ExecuteListenerProvider?>,
|
||||
) =
|
||||
DefaultDSLContext(
|
||||
DefaultConfiguration().also { configuration ->
|
||||
configuration.set(SQLDialect.SQLITE)
|
||||
configuration.set(DataSourceConnectionProvider(TransactionAwareDataSourceProxy(dataSource)))
|
||||
transactionProvider.ifAvailable { newTransactionProvider: TransactionProvider? -> configuration.set(newTransactionProvider) }
|
||||
configuration.set(*executeListenerProviders.orderedStream().toList().toTypedArray())
|
||||
},
|
||||
)
|
||||
) = DefaultDSLContext(
|
||||
DefaultConfiguration().also { configuration ->
|
||||
configuration.set(SQLDialect.SQLITE)
|
||||
configuration.set(DataSourceConnectionProvider(TransactionAwareDataSourceProxy(dataSource)))
|
||||
transactionProvider.ifAvailable { newTransactionProvider: TransactionProvider? -> configuration.set(newTransactionProvider) }
|
||||
configuration.set(*executeListenerProviders.orderedStream().toList().toTypedArray())
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ sealed class RequiredJoin {
|
|||
|
||||
data object Media : RequiredJoin()
|
||||
|
||||
data class ReadProgress(val userId: String) : RequiredJoin()
|
||||
data class ReadProgress(
|
||||
val userId: String,
|
||||
) : RequiredJoin()
|
||||
|
||||
data object BookMetadataAggregation : RequiredJoin()
|
||||
|
||||
|
|
|
|||
|
|
@ -9,11 +9,10 @@ import java.time.temporal.ChronoUnit
|
|||
fun SearchOperator.Equality<String>.toCondition(
|
||||
field: Field<String>,
|
||||
ignoreCase: Boolean = false,
|
||||
) =
|
||||
when (this) {
|
||||
is SearchOperator.Is -> if (ignoreCase) field.equalIgnoreCase(this.value) else field.eq(this.value)
|
||||
is SearchOperator.IsNot -> if (ignoreCase) field.notEqualIgnoreCase(this.value) else field.ne(this.value)
|
||||
}
|
||||
) = when (this) {
|
||||
is SearchOperator.Is -> if (ignoreCase) field.equalIgnoreCase(this.value) else field.eq(this.value)
|
||||
is SearchOperator.IsNot -> if (ignoreCase) field.notEqualIgnoreCase(this.value) else field.ne(this.value)
|
||||
}
|
||||
|
||||
fun <T> SearchOperator.Equality<T>.toCondition(field: Field<T>) =
|
||||
when (this) {
|
||||
|
|
@ -24,11 +23,10 @@ fun <T> SearchOperator.Equality<T>.toCondition(field: Field<T>) =
|
|||
fun <T> SearchOperator.Equality<T>.toCondition(
|
||||
field: Field<String>,
|
||||
converter: (T) -> String,
|
||||
) =
|
||||
when (this) {
|
||||
is SearchOperator.Is -> field.eq(converter(this.value))
|
||||
is SearchOperator.IsNot -> field.ne(converter(this.value))
|
||||
}
|
||||
) = when (this) {
|
||||
is SearchOperator.Is -> field.eq(converter(this.value))
|
||||
is SearchOperator.IsNot -> field.ne(converter(this.value))
|
||||
}
|
||||
|
||||
fun SearchOperator.StringOp.toCondition(field: Field<String>) =
|
||||
when (this) {
|
||||
|
|
|
|||
|
|
@ -37,8 +37,8 @@ class SeriesSearchHelper(
|
|||
return toConditionInternal(SearchCondition.AnyOfSeries(libraryIds.map { SearchCondition.LibraryId(SearchOperator.Is(it)) }))
|
||||
}
|
||||
|
||||
private fun toConditionInternal(searchCondition: SearchCondition.Series?): Pair<Condition, Set<RequiredJoin>> {
|
||||
return when (searchCondition) {
|
||||
private fun toConditionInternal(searchCondition: SearchCondition.Series?): Pair<Condition, Set<RequiredJoin>> =
|
||||
when (searchCondition) {
|
||||
is SearchCondition.AllOfSeries ->
|
||||
searchCondition.conditions.fold(DSL.noCondition() to emptySet()) { acc: Pair<Condition, Set<RequiredJoin>>, cond: SearchCondition.Series ->
|
||||
val seriesCondition = toConditionInternal(cond)
|
||||
|
|
@ -98,13 +98,22 @@ class SeriesSearchHelper(
|
|||
is SearchCondition.Tag ->
|
||||
Tables.SERIES.ID.let { field ->
|
||||
val inner = { tag: String ->
|
||||
DSL.select(Tables.SERIES_METADATA_TAG.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.SERIES_METADATA_TAG.SERIES_ID)
|
||||
.from(Tables.SERIES_METADATA_TAG)
|
||||
.where(Tables.SERIES_METADATA_TAG.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3).equalIgnoreCase(tag))
|
||||
.union(
|
||||
DSL.select(Tables.BOOK_METADATA_AGGREGATION_TAG.SERIES_ID)
|
||||
.where(
|
||||
Tables.SERIES_METADATA_TAG.TAG
|
||||
.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
.equalIgnoreCase(tag),
|
||||
).union(
|
||||
DSL
|
||||
.select(Tables.BOOK_METADATA_AGGREGATION_TAG.SERIES_ID)
|
||||
.from(Tables.BOOK_METADATA_AGGREGATION_TAG)
|
||||
.where(Tables.BOOK_METADATA_AGGREGATION_TAG.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3).equalIgnoreCase(tag)),
|
||||
.where(
|
||||
Tables.BOOK_METADATA_AGGREGATION_TAG.TAG
|
||||
.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
.equalIgnoreCase(tag),
|
||||
),
|
||||
)
|
||||
}
|
||||
when (searchCondition.operator) {
|
||||
|
|
@ -116,23 +125,25 @@ class SeriesSearchHelper(
|
|||
is SearchCondition.Author ->
|
||||
Tables.SERIES.ID.let { field ->
|
||||
val inner = { name: String?, role: String? ->
|
||||
DSL.select(Tables.BOOK_METADATA_AGGREGATION_AUTHOR.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.BOOK_METADATA_AGGREGATION_AUTHOR.SERIES_ID)
|
||||
.from(Tables.BOOK_METADATA_AGGREGATION_AUTHOR)
|
||||
.where(DSL.noCondition())
|
||||
.apply {
|
||||
if (name != null)
|
||||
and(
|
||||
Tables.BOOK_METADATA_AGGREGATION_AUTHOR.NAME.collate(
|
||||
SqliteUdfDataSource.COLLATION_UNICODE_3,
|
||||
).equalIgnoreCase(name),
|
||||
Tables.BOOK_METADATA_AGGREGATION_AUTHOR.NAME
|
||||
.collate(
|
||||
SqliteUdfDataSource.COLLATION_UNICODE_3,
|
||||
).equalIgnoreCase(name),
|
||||
)
|
||||
}
|
||||
.apply {
|
||||
}.apply {
|
||||
if (role != null)
|
||||
and(
|
||||
Tables.BOOK_METADATA_AGGREGATION_AUTHOR.ROLE.collate(
|
||||
SqliteUdfDataSource.COLLATION_UNICODE_3,
|
||||
).equalIgnoreCase(role),
|
||||
Tables.BOOK_METADATA_AGGREGATION_AUTHOR.ROLE
|
||||
.collate(
|
||||
SqliteUdfDataSource.COLLATION_UNICODE_3,
|
||||
).equalIgnoreCase(role),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -160,7 +171,8 @@ class SeriesSearchHelper(
|
|||
is SearchCondition.CollectionId ->
|
||||
Tables.SERIES.ID.let { field ->
|
||||
val inner = { collectionId: String ->
|
||||
DSL.select(Tables.COLLECTION_SERIES.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.COLLECTION_SERIES.SERIES_ID)
|
||||
.from(Tables.COLLECTION_SERIES)
|
||||
.where(Tables.COLLECTION_SERIES.COLLECTION_ID.eq(collectionId))
|
||||
}
|
||||
|
|
@ -181,9 +193,14 @@ class SeriesSearchHelper(
|
|||
is SearchCondition.Genre ->
|
||||
Tables.SERIES.ID.let { field ->
|
||||
val inner = { genre: String ->
|
||||
DSL.select(Tables.SERIES_METADATA_GENRE.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.SERIES_METADATA_GENRE.SERIES_ID)
|
||||
.from(Tables.SERIES_METADATA_GENRE)
|
||||
.where(Tables.SERIES_METADATA_GENRE.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3).equalIgnoreCase(genre))
|
||||
.where(
|
||||
Tables.SERIES_METADATA_GENRE.GENRE
|
||||
.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
.equalIgnoreCase(genre),
|
||||
)
|
||||
}
|
||||
when (searchCondition.operator) {
|
||||
is SearchOperator.Is -> field.`in`(inner(searchCondition.operator.value))
|
||||
|
|
@ -198,9 +215,14 @@ class SeriesSearchHelper(
|
|||
is SearchCondition.SharingLabel ->
|
||||
Tables.SERIES.ID.let { field ->
|
||||
val inner = { label: String ->
|
||||
DSL.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
.from(Tables.SERIES_METADATA_SHARING)
|
||||
.where(Tables.SERIES_METADATA_SHARING.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3).equalIgnoreCase(label))
|
||||
.where(
|
||||
Tables.SERIES_METADATA_SHARING.LABEL
|
||||
.collate(SqliteUdfDataSource.COLLATION_UNICODE_3)
|
||||
.equalIgnoreCase(label),
|
||||
)
|
||||
}
|
||||
when (searchCondition.operator) {
|
||||
is SearchOperator.Is -> field.`in`(inner(searchCondition.operator.value))
|
||||
|
|
@ -222,5 +244,4 @@ class SeriesSearchHelper(
|
|||
|
||||
null -> DSL.noCondition() to emptySet()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,9 +6,7 @@ import org.springframework.data.domain.Sort
|
|||
class UnpagedSorted(
|
||||
private val sort: Sort,
|
||||
) : Pageable {
|
||||
override fun getPageNumber(): Int {
|
||||
throw UnsupportedOperationException()
|
||||
}
|
||||
override fun getPageNumber(): Int = throw UnsupportedOperationException()
|
||||
|
||||
override fun hasPrevious(): Boolean = false
|
||||
|
||||
|
|
@ -18,13 +16,9 @@ class UnpagedSorted(
|
|||
|
||||
override fun next(): Pageable = this
|
||||
|
||||
override fun getPageSize(): Int {
|
||||
throw UnsupportedOperationException()
|
||||
}
|
||||
override fun getPageSize(): Int = throw UnsupportedOperationException()
|
||||
|
||||
override fun getOffset(): Long {
|
||||
throw UnsupportedOperationException()
|
||||
}
|
||||
override fun getOffset(): Long = throw UnsupportedOperationException()
|
||||
|
||||
override fun first(): Pageable = this
|
||||
|
||||
|
|
|
|||
|
|
@ -45,8 +45,7 @@ fun Field<String>.inOrNoCondition(list: Collection<String>?): Condition =
|
|||
else -> this.`in`(list)
|
||||
}
|
||||
|
||||
fun Field<String>.udfStripAccents() =
|
||||
DSL.function(SqliteUdfDataSource.UDF_STRIP_ACCENTS, String::class.java, this)
|
||||
fun Field<String>.udfStripAccents() = DSL.function(SqliteUdfDataSource.UDF_STRIP_ACCENTS, String::class.java, this)
|
||||
|
||||
fun DSLContext.insertTempStrings(
|
||||
batchSize: Int,
|
||||
|
|
@ -55,13 +54,14 @@ fun DSLContext.insertTempStrings(
|
|||
this.deleteFrom(Tables.TEMP_STRING_LIST).execute()
|
||||
if (collection.isNotEmpty()) {
|
||||
collection.chunked(batchSize).forEach { chunk ->
|
||||
this.batch(
|
||||
this.insertInto(Tables.TEMP_STRING_LIST, Tables.TEMP_STRING_LIST.STRING).values(null as String?),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(it)
|
||||
}
|
||||
}.execute()
|
||||
this
|
||||
.batch(
|
||||
this.insertInto(Tables.TEMP_STRING_LIST, Tables.TEMP_STRING_LIST.STRING).values(null as String?),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(it)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -71,7 +71,8 @@ fun DSLContext.selectTempStrings() = this.select(Tables.TEMP_STRING_LIST.STRING)
|
|||
fun ContentRestrictions.toCondition(): Condition {
|
||||
val ageAllowed =
|
||||
if (ageRestriction?.restriction == AllowExclude.ALLOW_ONLY) {
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNotNull.and(Tables.SERIES_METADATA.AGE_RATING.lessOrEqual(ageRestriction.age))
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNotNull
|
||||
.and(Tables.SERIES_METADATA.AGE_RATING.lessOrEqual(ageRestriction.age))
|
||||
} else {
|
||||
DSL.noCondition()
|
||||
}
|
||||
|
|
@ -79,7 +80,8 @@ fun ContentRestrictions.toCondition(): Condition {
|
|||
val labelAllowed =
|
||||
if (labelsAllow.isNotEmpty())
|
||||
Tables.SERIES_METADATA.SERIES_ID.`in`(
|
||||
DSL.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
.from(Tables.SERIES_METADATA_SHARING)
|
||||
.where(Tables.SERIES_METADATA_SHARING.LABEL.`in`(labelsAllow)),
|
||||
)
|
||||
|
|
@ -88,21 +90,24 @@ fun ContentRestrictions.toCondition(): Condition {
|
|||
|
||||
val ageDenied =
|
||||
if (ageRestriction?.restriction == AllowExclude.EXCLUDE)
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNull.or(Tables.SERIES_METADATA.AGE_RATING.lessThan(ageRestriction.age))
|
||||
Tables.SERIES_METADATA.AGE_RATING.isNull
|
||||
.or(Tables.SERIES_METADATA.AGE_RATING.lessThan(ageRestriction.age))
|
||||
else
|
||||
DSL.noCondition()
|
||||
|
||||
val labelDenied =
|
||||
if (labelsExclude.isNotEmpty())
|
||||
Tables.SERIES_METADATA.SERIES_ID.notIn(
|
||||
DSL.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
DSL
|
||||
.select(Tables.SERIES_METADATA_SHARING.SERIES_ID)
|
||||
.from(Tables.SERIES_METADATA_SHARING)
|
||||
.where(Tables.SERIES_METADATA_SHARING.LABEL.`in`(labelsExclude)),
|
||||
)
|
||||
else
|
||||
DSL.noCondition()
|
||||
|
||||
return ageAllowed.or(labelAllowed)
|
||||
return ageAllowed
|
||||
.or(labelAllowed)
|
||||
.and(ageDenied.and(labelDenied))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -52,7 +52,8 @@ class AuthenticationActivityDao(
|
|||
user: KomgaUser,
|
||||
apiKeyId: String?,
|
||||
): AuthenticationActivity? =
|
||||
dsl.selectFrom(aa)
|
||||
dsl
|
||||
.selectFrom(aa)
|
||||
.where(aa.USER_ID.eq(user.id))
|
||||
.or(aa.EMAIL.eq(user.email))
|
||||
.apply { apiKeyId?.let { and(aa.API_KEY_ID.eq(it)) } }
|
||||
|
|
@ -70,7 +71,8 @@ class AuthenticationActivityDao(
|
|||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val items =
|
||||
dsl.selectFrom(aa)
|
||||
dsl
|
||||
.selectFrom(aa)
|
||||
.where(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
|
|
@ -89,20 +91,23 @@ class AuthenticationActivityDao(
|
|||
}
|
||||
|
||||
override fun insert(activity: AuthenticationActivity) {
|
||||
dsl.insertInto(aa, aa.USER_ID, aa.EMAIL, aa.API_KEY_ID, aa.API_KEY_COMMENT, aa.IP, aa.USER_AGENT, aa.SUCCESS, aa.ERROR, aa.SOURCE)
|
||||
dsl
|
||||
.insertInto(aa, aa.USER_ID, aa.EMAIL, aa.API_KEY_ID, aa.API_KEY_COMMENT, aa.IP, aa.USER_AGENT, aa.SUCCESS, aa.ERROR, aa.SOURCE)
|
||||
.values(activity.userId, activity.email, activity.apiKeyId, activity.apiKeyComment, activity.ip, activity.userAgent, activity.success, activity.error, activity.source)
|
||||
.execute()
|
||||
}
|
||||
|
||||
override fun deleteByUser(user: KomgaUser) {
|
||||
dsl.deleteFrom(aa)
|
||||
dsl
|
||||
.deleteFrom(aa)
|
||||
.where(aa.USER_ID.eq(user.id))
|
||||
.or(aa.EMAIL.eq(user.email))
|
||||
.execute()
|
||||
}
|
||||
|
||||
override fun deleteOlderThan(dateTime: LocalDateTime) {
|
||||
dsl.deleteFrom(aa)
|
||||
dsl
|
||||
.deleteFrom(aa)
|
||||
.where(aa.DATE_TIME.lt(dateTime))
|
||||
.execute()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,8 +41,10 @@ class BookCommonDao(
|
|||
.`as`(
|
||||
select(s.ID, rs.MOST_RECENT_READ_DATE)
|
||||
.from(s)
|
||||
.innerJoin(rs).on(s.ID.eq(rs.SERIES_ID).and(rs.USER_ID.eq(userId)))
|
||||
.innerJoin(sd).on(s.ID.eq(sd.SERIES_ID))
|
||||
.innerJoin(rs)
|
||||
.on(s.ID.eq(rs.SERIES_ID).and(rs.USER_ID.eq(userId)))
|
||||
.innerJoin(sd)
|
||||
.on(s.ID.eq(sd.SERIES_ID))
|
||||
.where(rs.IN_PROGRESS_COUNT.eq(0))
|
||||
.and(rs.READ_COUNT.ne(s.BOOK_COUNT))
|
||||
.and(restrictions.toCondition())
|
||||
|
|
@ -60,8 +62,11 @@ class BookCommonDao(
|
|||
cteBooksFieldSeriesId,
|
||||
cteBooksFieldNumberSort,
|
||||
).from(b)
|
||||
.innerJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(r.USER_ID.eq(userId))
|
||||
.innerJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(r.USER_ID.eq(userId))
|
||||
.where(r.COMPLETED.isNull)
|
||||
.and(
|
||||
b.SERIES_ID.`in`(select(cteSeries.field(s.ID)).from(cteSeries)),
|
||||
|
|
@ -77,28 +82,40 @@ class BookCommonDao(
|
|||
.with(cteBooks)
|
||||
.select(*selectFields)
|
||||
.from(cteSeries)
|
||||
.innerJoin(b1).on(cteSeries.field(s.ID)!!.eq(b1.field(cteBooksFieldSeriesId)))
|
||||
.innerJoin(b1)
|
||||
.on(cteSeries.field(s.ID)!!.eq(b1.field(cteBooksFieldSeriesId)))
|
||||
// we join the cteBooks table on itself, using the grouping ID (seriesId) using a left outer join
|
||||
// it returns the row b1 for which no other row b2 exists with the same seriesId and a smaller numberSort
|
||||
// when b2 is null, it means the left outer join fond no such match, and therefore b1 has the smaller value of numberSort
|
||||
.leftOuterJoin(b2).on(
|
||||
b1.field(cteBooksFieldSeriesId)!!.eq(b2.field(cteBooksFieldSeriesId))
|
||||
.leftOuterJoin(b2)
|
||||
.on(
|
||||
b1
|
||||
.field(cteBooksFieldSeriesId)!!
|
||||
.eq(b2.field(cteBooksFieldSeriesId))
|
||||
.and(
|
||||
b1.field(cteBooksFieldNumberSort)!!.gt(b2.field(cteBooksFieldNumberSort))
|
||||
b1
|
||||
.field(cteBooksFieldNumberSort)!!
|
||||
.gt(b2.field(cteBooksFieldNumberSort))
|
||||
.or(
|
||||
b1.field(cteBooksFieldNumberSort)!!.eq(b2.field(cteBooksFieldNumberSort))
|
||||
b1
|
||||
.field(cteBooksFieldNumberSort)!!
|
||||
.eq(b2.field(cteBooksFieldNumberSort))
|
||||
.and(b1.field(cteBooksFieldBookId)!!.gt(b2.field(cteBooksFieldBookId))),
|
||||
),
|
||||
),
|
||||
)
|
||||
.innerJoin(b).on(b1.field(cteBooksFieldBookId)!!.eq(b.ID))
|
||||
.innerJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.innerJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.innerJoin(sd).on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
).innerJoin(b)
|
||||
.on(b1.field(cteBooksFieldBookId)!!.eq(b.ID))
|
||||
.innerJoin(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.innerJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.innerJoin(sd)
|
||||
.on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
// fetchAndMap expects some values for ReadProgress
|
||||
// On Deck books are by definition unread, thus don't have read progress
|
||||
// we join on the table to keep fetchAndMap, with a false condition to only get null values
|
||||
.leftOuterJoin(r).on(falseCondition())
|
||||
.leftOuterJoin(r)
|
||||
.on(falseCondition())
|
||||
.where(b2.field(cteBooksFieldBookId)!!.isNull)
|
||||
|
||||
val mostRecentReadDateQuery =
|
||||
|
|
|
|||
|
|
@ -47,14 +47,14 @@ class BookDao(
|
|||
"number" to b.NUMBER,
|
||||
)
|
||||
|
||||
override fun findByIdOrNull(bookId: String): Book? =
|
||||
findByIdOrNull(dsl, bookId)
|
||||
override fun findByIdOrNull(bookId: String): Book? = findByIdOrNull(dsl, bookId)
|
||||
|
||||
override fun findNotDeletedByLibraryIdAndUrlOrNull(
|
||||
libraryId: String,
|
||||
url: URL,
|
||||
): Book? =
|
||||
dsl.selectFrom(b)
|
||||
dsl
|
||||
.selectFrom(b)
|
||||
.where(b.LIBRARY_ID.eq(libraryId).and(b.URL.eq(url.toString())))
|
||||
.and(b.DELETED_DATE.isNull)
|
||||
.orderBy(b.LAST_MODIFIED_DATE.desc())
|
||||
|
|
@ -66,19 +66,22 @@ class BookDao(
|
|||
dsl: DSLContext,
|
||||
bookId: String,
|
||||
): Book? =
|
||||
dsl.selectFrom(b)
|
||||
dsl
|
||||
.selectFrom(b)
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchOneInto(b)
|
||||
?.toDomain()
|
||||
|
||||
override fun findAllBySeriesId(seriesId: String): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
dsl
|
||||
.selectFrom(b)
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.fetchInto(b)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllBySeriesIds(seriesIds: Collection<String>): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
dsl
|
||||
.selectFrom(b)
|
||||
.where(b.SERIES_ID.`in`(seriesIds))
|
||||
.fetchInto(b)
|
||||
.map { it.toDomain() }
|
||||
|
|
@ -90,7 +93,8 @@ class BookDao(
|
|||
): Collection<Book> {
|
||||
dsl.insertTempStrings(batchSize, urls.map { it.toString() })
|
||||
|
||||
return dsl.selectFrom(b)
|
||||
return dsl
|
||||
.selectFrom(b)
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.and(b.DELETED_DATE.isNull)
|
||||
.and(b.URL.notIn(dsl.selectTempStrings()))
|
||||
|
|
@ -99,13 +103,15 @@ class BookDao(
|
|||
}
|
||||
|
||||
override fun findAllDeletedByFileSize(fileSize: Long): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
dsl
|
||||
.selectFrom(b)
|
||||
.where(b.DELETED_DATE.isNotNull.and(b.FILE_SIZE.eq(fileSize)))
|
||||
.fetchInto(b)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAll(): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
dsl
|
||||
.selectFrom(b)
|
||||
.fetchInto(b)
|
||||
.map { it.toDomain() }
|
||||
|
||||
|
|
@ -124,7 +130,8 @@ class BookDao(
|
|||
pageable: Pageable,
|
||||
): PageImpl<Book> {
|
||||
val count =
|
||||
dsl.selectCount()
|
||||
dsl
|
||||
.selectCount()
|
||||
.from(b)
|
||||
.apply {
|
||||
joins.forEach { join ->
|
||||
|
|
@ -137,14 +144,14 @@ class BookDao(
|
|||
RequiredJoin.BookMetadataAggregation -> Unit
|
||||
}
|
||||
}
|
||||
}
|
||||
.where(conditions)
|
||||
}.where(conditions)
|
||||
.fetchOne(0, Long::class.java) ?: 0
|
||||
|
||||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val items =
|
||||
dsl.select(*b.fields())
|
||||
dsl
|
||||
.select(*b.fields())
|
||||
.from(b)
|
||||
.apply {
|
||||
joins.forEach { join ->
|
||||
|
|
@ -157,8 +164,7 @@ class BookDao(
|
|||
RequiredJoin.BookMetadataAggregation -> Unit
|
||||
}
|
||||
}
|
||||
}
|
||||
.where(conditions)
|
||||
}.where(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchInto(b)
|
||||
|
|
@ -176,30 +182,36 @@ class BookDao(
|
|||
}
|
||||
|
||||
override fun getLibraryIdOrNull(bookId: String): String? =
|
||||
dsl.select(b.LIBRARY_ID)
|
||||
dsl
|
||||
.select(b.LIBRARY_ID)
|
||||
.from(b)
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchOne(b.LIBRARY_ID)
|
||||
|
||||
override fun getSeriesIdOrNull(bookId: String): String? =
|
||||
dsl.select(b.SERIES_ID)
|
||||
dsl
|
||||
.select(b.SERIES_ID)
|
||||
.from(b)
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchOne(b.SERIES_ID)
|
||||
|
||||
override fun findFirstIdInSeriesOrNull(seriesId: String): String? =
|
||||
dsl.select(b.ID)
|
||||
dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.orderBy(d.NUMBER_SORT)
|
||||
.limit(1)
|
||||
.fetchOne(b.ID)
|
||||
|
||||
override fun findLastIdInSeriesOrNull(seriesId: String): String? =
|
||||
dsl.select(b.ID)
|
||||
dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.orderBy(d.NUMBER_SORT.desc())
|
||||
.limit(1)
|
||||
|
|
@ -209,10 +221,14 @@ class BookDao(
|
|||
seriesId: String,
|
||||
userId: String,
|
||||
): String? =
|
||||
dsl.select(b.ID)
|
||||
dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(r.USER_ID.eq(userId).or(r.USER_ID.isNull))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(r.USER_ID.eq(userId).or(r.USER_ID.isNull))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.and(r.COMPLETED.isNull.or(r.COMPLETED.isFalse))
|
||||
.orderBy(d.NUMBER_SORT)
|
||||
|
|
@ -220,27 +236,30 @@ class BookDao(
|
|||
.fetchOne(b.ID)
|
||||
|
||||
override fun findAllIdsBySeriesId(seriesId: String): Collection<String> =
|
||||
dsl.select(b.ID)
|
||||
dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.fetch(b.ID)
|
||||
|
||||
override fun findAllIdsByLibraryId(libraryId: String): Collection<String> =
|
||||
dsl.select(b.ID)
|
||||
dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.fetch(b.ID)
|
||||
|
||||
override fun existsById(bookId: String): Boolean =
|
||||
dsl.fetchExists(b, b.ID.eq(bookId))
|
||||
override fun existsById(bookId: String): Boolean = dsl.fetchExists(b, b.ID.eq(bookId))
|
||||
|
||||
override fun findAllByLibraryIdAndMediaTypes(
|
||||
libraryId: String,
|
||||
mediaTypes: Collection<String>,
|
||||
): Collection<Book> =
|
||||
dsl.select(*b.fields())
|
||||
dsl
|
||||
.select(*b.fields())
|
||||
.from(b)
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.and(m.MEDIA_TYPE.`in`(mediaTypes))
|
||||
.fetchInto(b)
|
||||
|
|
@ -251,9 +270,11 @@ class BookDao(
|
|||
mediaType: String,
|
||||
extension: String,
|
||||
): Collection<Book> =
|
||||
dsl.select(*b.fields())
|
||||
dsl
|
||||
.select(*b.fields())
|
||||
.from(b)
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.and(m.MEDIA_TYPE.eq(mediaType))
|
||||
.and(b.URL.notLike("%.$extension"))
|
||||
|
|
@ -261,7 +282,8 @@ class BookDao(
|
|||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllByLibraryIdAndWithEmptyHash(libraryId: String): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
dsl
|
||||
.selectFrom(b)
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.and(b.FILE_HASH.eq(""))
|
||||
.fetchInto(b)
|
||||
|
|
@ -276,38 +298,40 @@ class BookDao(
|
|||
override fun insert(books: Collection<Book>) {
|
||||
if (books.isNotEmpty()) {
|
||||
books.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(
|
||||
b,
|
||||
b.ID,
|
||||
b.NAME,
|
||||
b.URL,
|
||||
b.NUMBER,
|
||||
b.FILE_LAST_MODIFIED,
|
||||
b.FILE_SIZE,
|
||||
b.FILE_HASH,
|
||||
b.LIBRARY_ID,
|
||||
b.SERIES_ID,
|
||||
b.DELETED_DATE,
|
||||
b.ONESHOT,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(
|
||||
it.id,
|
||||
it.name,
|
||||
it.url,
|
||||
it.number,
|
||||
it.fileLastModified,
|
||||
it.fileSize,
|
||||
it.fileHash,
|
||||
it.libraryId,
|
||||
it.seriesId,
|
||||
it.deletedDate,
|
||||
it.oneshot,
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(
|
||||
b,
|
||||
b.ID,
|
||||
b.NAME,
|
||||
b.URL,
|
||||
b.NUMBER,
|
||||
b.FILE_LAST_MODIFIED,
|
||||
b.FILE_SIZE,
|
||||
b.FILE_HASH,
|
||||
b.LIBRARY_ID,
|
||||
b.SERIES_ID,
|
||||
b.DELETED_DATE,
|
||||
b.ONESHOT,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(
|
||||
it.id,
|
||||
it.name,
|
||||
it.url,
|
||||
it.number,
|
||||
it.fileLastModified,
|
||||
it.fileSize,
|
||||
it.fileHash,
|
||||
it.libraryId,
|
||||
it.seriesId,
|
||||
it.deletedDate,
|
||||
it.oneshot,
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -323,7 +347,8 @@ class BookDao(
|
|||
}
|
||||
|
||||
private fun updateBook(book: Book) {
|
||||
dsl.update(b)
|
||||
dsl
|
||||
.update(b)
|
||||
.set(b.NAME, book.name)
|
||||
.set(b.URL, book.url.toString())
|
||||
.set(b.NUMBER, book.number)
|
||||
|
|
@ -357,13 +382,15 @@ class BookDao(
|
|||
override fun count(): Long = dsl.fetchCount(b).toLong()
|
||||
|
||||
override fun countGroupedByLibraryId(): Map<String, Int> =
|
||||
dsl.select(b.LIBRARY_ID, DSL.count(b.ID))
|
||||
dsl
|
||||
.select(b.LIBRARY_ID, DSL.count(b.ID))
|
||||
.from(b)
|
||||
.groupBy(b.LIBRARY_ID)
|
||||
.fetchMap(b.LIBRARY_ID, DSL.count(b.ID))
|
||||
|
||||
override fun getFilesizeGroupedByLibraryId(): Map<String, BigDecimal> =
|
||||
dsl.select(b.LIBRARY_ID, DSL.sum(b.FILE_SIZE))
|
||||
dsl
|
||||
.select(b.LIBRARY_ID, DSL.sum(b.FILE_SIZE))
|
||||
.from(b)
|
||||
.groupBy(b.LIBRARY_ID)
|
||||
.fetchMap(b.LIBRARY_ID, DSL.sum(b.FILE_SIZE))
|
||||
|
|
|
|||
|
|
@ -90,8 +90,7 @@ class BookDtoDao(
|
|||
"readList.number" to rlb.NUMBER,
|
||||
)
|
||||
|
||||
override fun findAll(pageable: Pageable): Page<BookDto> =
|
||||
findAll(BookSearch(), SearchContext.ofAnonymousUser(), pageable)
|
||||
override fun findAll(pageable: Pageable): Page<BookDto> = findAll(BookSearch(), SearchContext.ofAnonymousUser(), pageable)
|
||||
|
||||
override fun findAll(
|
||||
context: SearchContext,
|
||||
|
|
@ -141,12 +140,18 @@ class BookDtoDao(
|
|||
|
||||
val count =
|
||||
dsl.fetchCount(
|
||||
dsl.select(b.ID)
|
||||
dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.leftJoin(sd).on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.leftJoin(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(readProgressCondition(userId))
|
||||
.leftJoin(sd)
|
||||
.on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.apply {
|
||||
joins.forEach { join ->
|
||||
when (join) {
|
||||
|
|
@ -159,8 +164,7 @@ class BookDtoDao(
|
|||
RequiredJoin.SeriesMetadata -> Unit
|
||||
}
|
||||
}
|
||||
}
|
||||
.where(conditions)
|
||||
}.where(conditions)
|
||||
.and(searchCondition)
|
||||
.groupBy(b.ID),
|
||||
)
|
||||
|
|
@ -199,14 +203,12 @@ class BookDtoDao(
|
|||
override fun findPreviousInSeriesOrNull(
|
||||
bookId: String,
|
||||
userId: String,
|
||||
): BookDto? =
|
||||
findSiblingSeries(bookId, userId, next = false)
|
||||
): BookDto? = findSiblingSeries(bookId, userId, next = false)
|
||||
|
||||
override fun findNextInSeriesOrNull(
|
||||
bookId: String,
|
||||
userId: String,
|
||||
): BookDto? =
|
||||
findSiblingSeries(bookId, userId, next = true)
|
||||
): BookDto? = findSiblingSeries(bookId, userId, next = true)
|
||||
|
||||
override fun findPreviousInReadListOrNull(
|
||||
readList: ReadList,
|
||||
|
|
@ -214,8 +216,7 @@ class BookDtoDao(
|
|||
userId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
restrictions: ContentRestrictions,
|
||||
): BookDto? =
|
||||
findSiblingReadList(readList, bookId, userId, filterOnLibraryIds, restrictions, next = false)
|
||||
): BookDto? = findSiblingReadList(readList, bookId, userId, filterOnLibraryIds, restrictions, next = false)
|
||||
|
||||
override fun findNextInReadListOrNull(
|
||||
readList: ReadList,
|
||||
|
|
@ -223,8 +224,7 @@ class BookDtoDao(
|
|||
userId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
restrictions: ContentRestrictions,
|
||||
): BookDto? =
|
||||
findSiblingReadList(readList, bookId, userId, filterOnLibraryIds, restrictions, next = true)
|
||||
): BookDto? = findSiblingReadList(readList, bookId, userId, filterOnLibraryIds, restrictions, next = true)
|
||||
|
||||
override fun findAllOnDeck(
|
||||
userId: String,
|
||||
|
|
@ -256,7 +256,8 @@ class BookDtoDao(
|
|||
pageable: Pageable,
|
||||
): Page<BookDto> {
|
||||
val hashes =
|
||||
dsl.select(b.FILE_HASH, DSL.count(b.ID))
|
||||
dsl
|
||||
.select(b.FILE_HASH, DSL.count(b.ID))
|
||||
.from(b)
|
||||
.where(b.FILE_HASH.ne(""))
|
||||
.groupBy(b.FILE_HASH, b.FILE_SIZE)
|
||||
|
|
@ -293,9 +294,11 @@ class BookDtoDao(
|
|||
next: Boolean,
|
||||
): BookDto? {
|
||||
val record =
|
||||
dsl.select(b.SERIES_ID, d.NUMBER_SORT)
|
||||
dsl
|
||||
.select(b.SERIES_ID, d.NUMBER_SORT)
|
||||
.from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchOne()!!
|
||||
val seriesId = record.get(0, String::class.java)
|
||||
|
|
@ -320,9 +323,11 @@ class BookDtoDao(
|
|||
): BookDto? {
|
||||
if (readList.ordered) {
|
||||
val numberSort =
|
||||
dsl.select(rlb.NUMBER)
|
||||
dsl
|
||||
.select(rlb.NUMBER)
|
||||
.from(b)
|
||||
.leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID))
|
||||
.leftJoin(rlb)
|
||||
.on(b.ID.eq(rlb.BOOK_ID))
|
||||
.where(b.ID.eq(bookId))
|
||||
.and(rlb.READLIST_ID.eq(readList.id))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -341,10 +346,13 @@ class BookDtoDao(
|
|||
// it is too complex to perform a seek by release date as it could be null and could also have multiple occurrences of the same value
|
||||
// instead we pull the whole list of ids, and perform the seek on the list
|
||||
val bookIds =
|
||||
dsl.select(b.ID)
|
||||
dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(rlb)
|
||||
.on(b.ID.eq(rlb.BOOK_ID))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.apply { if (restrictions.isRestricted) leftJoin(sd).on(sd.SERIES_ID.eq(b.SERIES_ID)) }
|
||||
.where(rlb.READLIST_ID.eq(readList.id))
|
||||
.apply { if (restrictions.isRestricted) and(restrictions.toCondition()) }
|
||||
|
|
@ -382,10 +390,15 @@ class BookDtoDao(
|
|||
return dsl
|
||||
.let { if (joinOnReadList) it.selectDistinct(selectFields) else it.select(selectFields) }
|
||||
.from(b)
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.leftJoin(sd).on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.leftJoin(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(readProgressCondition(userId))
|
||||
.leftJoin(sd)
|
||||
.on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.apply {
|
||||
if (joinOnReadList) leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID))
|
||||
joins.forEach { join ->
|
||||
|
|
@ -412,18 +425,21 @@ class BookDtoDao(
|
|||
transactionTemplate.executeWithoutResult {
|
||||
dsl.insertTempStrings(batchSize, bookIds)
|
||||
authors =
|
||||
dsl.selectFrom(a)
|
||||
dsl
|
||||
.selectFrom(a)
|
||||
.where(a.BOOK_ID.`in`(dsl.selectTempStrings()))
|
||||
.filter { it.name != null }
|
||||
.groupBy({ it.bookId }, { AuthorDto(it.name, it.role) })
|
||||
|
||||
tags =
|
||||
dsl.selectFrom(bt)
|
||||
dsl
|
||||
.selectFrom(bt)
|
||||
.where(bt.BOOK_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.bookId }, { it.tag })
|
||||
|
||||
links =
|
||||
dsl.selectFrom(bl)
|
||||
dsl
|
||||
.selectFrom(bl)
|
||||
.where(bl.BOOK_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.bookId }, { WebLinkDto(it.label, it.url) })
|
||||
}
|
||||
|
|
@ -445,26 +461,25 @@ class BookDtoDao(
|
|||
metadata: BookMetadataDto,
|
||||
readProgress: ReadProgressDto?,
|
||||
seriesTitle: String,
|
||||
) =
|
||||
BookDto(
|
||||
id = id,
|
||||
seriesId = seriesId,
|
||||
seriesTitle = seriesTitle,
|
||||
libraryId = libraryId,
|
||||
name = name,
|
||||
url = URL(url).toFilePath(),
|
||||
number = number,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
fileLastModified = fileLastModified.toUTC(),
|
||||
sizeBytes = fileSize,
|
||||
media = media,
|
||||
metadata = metadata,
|
||||
readProgress = readProgress,
|
||||
deleted = deletedDate != null,
|
||||
fileHash = fileHash,
|
||||
oneshot = oneshot,
|
||||
)
|
||||
) = BookDto(
|
||||
id = id,
|
||||
seriesId = seriesId,
|
||||
seriesTitle = seriesTitle,
|
||||
libraryId = libraryId,
|
||||
name = name,
|
||||
url = URL(url).toFilePath(),
|
||||
number = number,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
fileLastModified = fileLastModified.toUTC(),
|
||||
sizeBytes = fileSize,
|
||||
media = media,
|
||||
metadata = metadata,
|
||||
readProgress = readProgress,
|
||||
deleted = deletedDate != null,
|
||||
fileHash = fileHash,
|
||||
oneshot = oneshot,
|
||||
)
|
||||
|
||||
private fun MediaRecord.toDto() =
|
||||
MediaDto(
|
||||
|
|
@ -480,29 +495,28 @@ class BookDtoDao(
|
|||
authors: List<AuthorDto>,
|
||||
tags: Set<String>,
|
||||
links: List<WebLinkDto>,
|
||||
) =
|
||||
BookMetadataDto(
|
||||
title = title,
|
||||
titleLock = titleLock,
|
||||
summary = summary,
|
||||
summaryLock = summaryLock,
|
||||
number = number,
|
||||
numberLock = numberLock,
|
||||
numberSort = numberSort,
|
||||
numberSortLock = numberSortLock,
|
||||
releaseDate = releaseDate,
|
||||
releaseDateLock = releaseDateLock,
|
||||
authors = authors,
|
||||
authorsLock = authorsLock,
|
||||
tags = tags,
|
||||
tagsLock = tagsLock,
|
||||
isbn = isbn,
|
||||
isbnLock = isbnLock,
|
||||
links = links,
|
||||
linksLock = linksLock,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
)
|
||||
) = BookMetadataDto(
|
||||
title = title,
|
||||
titleLock = titleLock,
|
||||
summary = summary,
|
||||
summaryLock = summaryLock,
|
||||
number = number,
|
||||
numberLock = numberLock,
|
||||
numberSort = numberSort,
|
||||
numberSortLock = numberSortLock,
|
||||
releaseDate = releaseDate,
|
||||
releaseDateLock = releaseDateLock,
|
||||
authors = authors,
|
||||
authorsLock = authorsLock,
|
||||
tags = tags,
|
||||
tagsLock = tagsLock,
|
||||
isbn = isbn,
|
||||
isbnLock = isbnLock,
|
||||
links = links,
|
||||
linksLock = linksLock,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
)
|
||||
|
||||
private fun ReadProgressRecord.toDto() =
|
||||
ReadProgressDto(
|
||||
|
|
|
|||
|
|
@ -25,16 +25,16 @@ class BookMetadataAggregationDao(
|
|||
private val a = Tables.BOOK_METADATA_AGGREGATION_AUTHOR
|
||||
private val t = Tables.BOOK_METADATA_AGGREGATION_TAG
|
||||
|
||||
override fun findById(seriesId: String): BookMetadataAggregation =
|
||||
findOne(listOf(seriesId)).first()
|
||||
override fun findById(seriesId: String): BookMetadataAggregation = findOne(listOf(seriesId)).first()
|
||||
|
||||
override fun findByIdOrNull(seriesId: String): BookMetadataAggregation? =
|
||||
findOne(listOf(seriesId)).firstOrNull()
|
||||
override fun findByIdOrNull(seriesId: String): BookMetadataAggregation? = findOne(listOf(seriesId)).firstOrNull()
|
||||
|
||||
private fun findOne(seriesIds: Collection<String>) =
|
||||
dsl.select(*d.fields(), *a.fields())
|
||||
dsl
|
||||
.select(*d.fields(), *a.fields())
|
||||
.from(d)
|
||||
.leftJoin(a).on(d.SERIES_ID.eq(a.SERIES_ID))
|
||||
.leftJoin(a)
|
||||
.on(d.SERIES_ID.eq(a.SERIES_ID))
|
||||
.where(d.SERIES_ID.`in`(seriesIds))
|
||||
.fetchGroups(
|
||||
{ it.into(d) },
|
||||
|
|
@ -44,14 +44,16 @@ class BookMetadataAggregationDao(
|
|||
}
|
||||
|
||||
private fun findTags(seriesId: String) =
|
||||
dsl.select(t.TAG)
|
||||
dsl
|
||||
.select(t.TAG)
|
||||
.from(t)
|
||||
.where(t.SERIES_ID.eq(seriesId))
|
||||
.fetchSet(t.TAG)
|
||||
|
||||
@Transactional
|
||||
override fun insert(metadata: BookMetadataAggregation) {
|
||||
dsl.insertInto(d)
|
||||
dsl
|
||||
.insertInto(d)
|
||||
.set(d.SERIES_ID, metadata.seriesId)
|
||||
.set(d.RELEASE_DATE, metadata.releaseDate)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
|
|
@ -64,7 +66,8 @@ class BookMetadataAggregationDao(
|
|||
|
||||
@Transactional
|
||||
override fun update(metadata: BookMetadataAggregation) {
|
||||
dsl.update(d)
|
||||
dsl
|
||||
.update(d)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
.set(d.SUMMARY_NUMBER, metadata.summaryNumber)
|
||||
.set(d.RELEASE_DATE, metadata.releaseDate)
|
||||
|
|
@ -72,11 +75,13 @@ class BookMetadataAggregationDao(
|
|||
.where(d.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(a)
|
||||
dsl
|
||||
.deleteFrom(a)
|
||||
.where(a.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(t)
|
||||
dsl
|
||||
.deleteFrom(t)
|
||||
.where(t.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
|
|
@ -87,14 +92,16 @@ class BookMetadataAggregationDao(
|
|||
private fun insertAuthors(metadata: BookMetadataAggregation) {
|
||||
if (metadata.authors.isNotEmpty()) {
|
||||
metadata.authors.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(a, a.SERIES_ID, a.NAME, a.ROLE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it.name, it.role)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(a, a.SERIES_ID, a.NAME, a.ROLE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it.name, it.role)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -102,14 +109,16 @@ class BookMetadataAggregationDao(
|
|||
private fun insertTags(metadata: BookMetadataAggregation) {
|
||||
if (metadata.tags.isNotEmpty()) {
|
||||
metadata.tags.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(t, t.SERIES_ID, t.TAG)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(t, t.SERIES_ID, t.TAG)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -135,17 +144,16 @@ class BookMetadataAggregationDao(
|
|||
private fun BookMetadataAggregationRecord.toDomain(
|
||||
authors: List<Author>,
|
||||
tags: Set<String>,
|
||||
) =
|
||||
BookMetadataAggregation(
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
releaseDate = releaseDate,
|
||||
summary = summary,
|
||||
summaryNumber = summaryNumber,
|
||||
seriesId = seriesId,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
)
|
||||
) = BookMetadataAggregation(
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
releaseDate = releaseDate,
|
||||
summary = summary,
|
||||
summaryNumber = summaryNumber,
|
||||
seriesId = seriesId,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
)
|
||||
|
||||
private fun BookMetadataAggregationAuthorRecord.toDomain() =
|
||||
Author(
|
||||
|
|
|
|||
|
|
@ -30,39 +30,39 @@ class BookMetadataDao(
|
|||
|
||||
private val groupFields = arrayOf(*d.fields(), *a.fields())
|
||||
|
||||
override fun findById(bookId: String): BookMetadata =
|
||||
find(dsl, listOf(bookId)).first()
|
||||
override fun findById(bookId: String): BookMetadata = find(dsl, listOf(bookId)).first()
|
||||
|
||||
override fun findByIdOrNull(bookId: String): BookMetadata? =
|
||||
find(dsl, listOf(bookId)).firstOrNull()
|
||||
override fun findByIdOrNull(bookId: String): BookMetadata? = find(dsl, listOf(bookId)).firstOrNull()
|
||||
|
||||
override fun findAllByIds(bookIds: Collection<String>): Collection<BookMetadata> =
|
||||
find(dsl, bookIds)
|
||||
override fun findAllByIds(bookIds: Collection<String>): Collection<BookMetadata> = find(dsl, bookIds)
|
||||
|
||||
private fun find(
|
||||
dsl: DSLContext,
|
||||
bookIds: Collection<String>,
|
||||
) =
|
||||
dsl.select(*groupFields)
|
||||
.from(d)
|
||||
.leftJoin(a).on(d.BOOK_ID.eq(a.BOOK_ID))
|
||||
.where(d.BOOK_ID.`in`(bookIds))
|
||||
.groupBy(*groupFields)
|
||||
.fetchGroups(
|
||||
{ it.into(d) },
|
||||
{ it.into(a) },
|
||||
).map { (dr, ar) ->
|
||||
dr.toDomain(ar.filterNot { it.name == null }.map { it.toDomain() }, findTags(dr.bookId), findLinks(dr.bookId))
|
||||
}
|
||||
) = dsl
|
||||
.select(*groupFields)
|
||||
.from(d)
|
||||
.leftJoin(a)
|
||||
.on(d.BOOK_ID.eq(a.BOOK_ID))
|
||||
.where(d.BOOK_ID.`in`(bookIds))
|
||||
.groupBy(*groupFields)
|
||||
.fetchGroups(
|
||||
{ it.into(d) },
|
||||
{ it.into(a) },
|
||||
).map { (dr, ar) ->
|
||||
dr.toDomain(ar.filterNot { it.name == null }.map { it.toDomain() }, findTags(dr.bookId), findLinks(dr.bookId))
|
||||
}
|
||||
|
||||
private fun findTags(bookId: String) =
|
||||
dsl.select(bt.TAG)
|
||||
dsl
|
||||
.select(bt.TAG)
|
||||
.from(bt)
|
||||
.where(bt.BOOK_ID.eq(bookId))
|
||||
.fetchSet(bt.TAG)
|
||||
|
||||
private fun findLinks(bookId: String) =
|
||||
dsl.select(bl.LABEL, bl.URL)
|
||||
dsl
|
||||
.select(bl.LABEL, bl.URL)
|
||||
.from(bl)
|
||||
.where(bl.BOOK_ID.eq(bookId))
|
||||
.fetchInto(bl)
|
||||
|
|
@ -77,48 +77,50 @@ class BookMetadataDao(
|
|||
override fun insert(metadatas: Collection<BookMetadata>) {
|
||||
if (metadatas.isNotEmpty()) {
|
||||
metadatas.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(
|
||||
d,
|
||||
d.BOOK_ID,
|
||||
d.TITLE,
|
||||
d.TITLE_LOCK,
|
||||
d.SUMMARY,
|
||||
d.SUMMARY_LOCK,
|
||||
d.NUMBER,
|
||||
d.NUMBER_LOCK,
|
||||
d.NUMBER_SORT,
|
||||
d.NUMBER_SORT_LOCK,
|
||||
d.RELEASE_DATE,
|
||||
d.RELEASE_DATE_LOCK,
|
||||
d.AUTHORS_LOCK,
|
||||
d.TAGS_LOCK,
|
||||
d.ISBN,
|
||||
d.ISBN_LOCK,
|
||||
d.LINKS_LOCK,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(
|
||||
it.bookId,
|
||||
it.title,
|
||||
it.titleLock,
|
||||
it.summary,
|
||||
it.summaryLock,
|
||||
it.number,
|
||||
it.numberLock,
|
||||
it.numberSort,
|
||||
it.numberSortLock,
|
||||
it.releaseDate,
|
||||
it.releaseDateLock,
|
||||
it.authorsLock,
|
||||
it.tagsLock,
|
||||
it.isbn,
|
||||
it.isbnLock,
|
||||
it.linksLock,
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(
|
||||
d,
|
||||
d.BOOK_ID,
|
||||
d.TITLE,
|
||||
d.TITLE_LOCK,
|
||||
d.SUMMARY,
|
||||
d.SUMMARY_LOCK,
|
||||
d.NUMBER,
|
||||
d.NUMBER_LOCK,
|
||||
d.NUMBER_SORT,
|
||||
d.NUMBER_SORT_LOCK,
|
||||
d.RELEASE_DATE,
|
||||
d.RELEASE_DATE_LOCK,
|
||||
d.AUTHORS_LOCK,
|
||||
d.TAGS_LOCK,
|
||||
d.ISBN,
|
||||
d.ISBN_LOCK,
|
||||
d.LINKS_LOCK,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(
|
||||
it.bookId,
|
||||
it.title,
|
||||
it.titleLock,
|
||||
it.summary,
|
||||
it.summaryLock,
|
||||
it.number,
|
||||
it.numberLock,
|
||||
it.numberSort,
|
||||
it.numberSortLock,
|
||||
it.releaseDate,
|
||||
it.releaseDateLock,
|
||||
it.authorsLock,
|
||||
it.tagsLock,
|
||||
it.isbn,
|
||||
it.isbnLock,
|
||||
it.linksLock,
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
|
||||
insertAuthors(metadatas)
|
||||
|
|
@ -138,7 +140,8 @@ class BookMetadataDao(
|
|||
}
|
||||
|
||||
private fun updateMetadata(metadata: BookMetadata) {
|
||||
dsl.update(d)
|
||||
dsl
|
||||
.update(d)
|
||||
.set(d.TITLE, metadata.title)
|
||||
.set(d.TITLE_LOCK, metadata.titleLock)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
|
|
@ -158,13 +161,16 @@ class BookMetadataDao(
|
|||
.where(d.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(a)
|
||||
dsl
|
||||
.deleteFrom(a)
|
||||
.where(a.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
dsl.deleteFrom(bt)
|
||||
dsl
|
||||
.deleteFrom(bt)
|
||||
.where(bt.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
dsl.deleteFrom(bl)
|
||||
dsl
|
||||
.deleteFrom(bl)
|
||||
.where(bl.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
|
||||
|
|
@ -176,16 +182,18 @@ class BookMetadataDao(
|
|||
private fun insertAuthors(metadatas: Collection<BookMetadata>) {
|
||||
if (metadatas.any { it.authors.isNotEmpty() }) {
|
||||
metadatas.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(a, a.BOOK_ID, a.NAME, a.ROLE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { metadata ->
|
||||
metadata.authors.forEach {
|
||||
step.bind(metadata.bookId, it.name, it.role)
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(a, a.BOOK_ID, a.NAME, a.ROLE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { metadata ->
|
||||
metadata.authors.forEach {
|
||||
step.bind(metadata.bookId, it.name, it.role)
|
||||
}
|
||||
}
|
||||
}
|
||||
}.execute()
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -193,16 +201,18 @@ class BookMetadataDao(
|
|||
private fun insertTags(metadatas: Collection<BookMetadata>) {
|
||||
if (metadatas.any { it.tags.isNotEmpty() }) {
|
||||
metadatas.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(bt, bt.BOOK_ID, bt.TAG)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach { metadata ->
|
||||
metadata.tags.forEach {
|
||||
step.bind(metadata.bookId, it)
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(bt, bt.BOOK_ID, bt.TAG)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach { metadata ->
|
||||
metadata.tags.forEach {
|
||||
step.bind(metadata.bookId, it)
|
||||
}
|
||||
}
|
||||
}
|
||||
}.execute()
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -210,16 +220,18 @@ class BookMetadataDao(
|
|||
private fun insertLinks(metadatas: Collection<BookMetadata>) {
|
||||
if (metadatas.any { it.links.isNotEmpty() }) {
|
||||
metadatas.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(bl, bl.BOOK_ID, bl.LABEL, bl.URL)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { metadata ->
|
||||
metadata.links.forEach {
|
||||
step.bind(metadata.bookId, it.label, it.url.toString())
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(bl, bl.BOOK_ID, bl.LABEL, bl.URL)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { metadata ->
|
||||
metadata.links.forEach {
|
||||
step.bind(metadata.bookId, it.label, it.url.toString())
|
||||
}
|
||||
}
|
||||
}
|
||||
}.execute()
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -248,30 +260,29 @@ class BookMetadataDao(
|
|||
authors: List<Author>,
|
||||
tags: Set<String>,
|
||||
links: List<WebLink>,
|
||||
) =
|
||||
BookMetadata(
|
||||
title = title,
|
||||
summary = summary,
|
||||
number = number,
|
||||
numberSort = numberSort,
|
||||
releaseDate = releaseDate,
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
isbn = isbn,
|
||||
links = links,
|
||||
bookId = bookId,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
titleLock = titleLock,
|
||||
summaryLock = summaryLock,
|
||||
numberLock = numberLock,
|
||||
numberSortLock = numberSortLock,
|
||||
releaseDateLock = releaseDateLock,
|
||||
authorsLock = authorsLock,
|
||||
tagsLock = tagsLock,
|
||||
isbnLock = isbnLock,
|
||||
linksLock = linksLock,
|
||||
)
|
||||
) = BookMetadata(
|
||||
title = title,
|
||||
summary = summary,
|
||||
number = number,
|
||||
numberSort = numberSort,
|
||||
releaseDate = releaseDate,
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
isbn = isbn,
|
||||
links = links,
|
||||
bookId = bookId,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
titleLock = titleLock,
|
||||
summaryLock = summaryLock,
|
||||
numberLock = numberLock,
|
||||
numberSortLock = numberSortLock,
|
||||
releaseDateLock = releaseDateLock,
|
||||
authorsLock = authorsLock,
|
||||
tagsLock = tagsLock,
|
||||
isbnLock = isbnLock,
|
||||
linksLock = linksLock,
|
||||
)
|
||||
|
||||
private fun BookMetadataAuthorRecord.toDomain() =
|
||||
Author(
|
||||
|
|
|
|||
|
|
@ -16,7 +16,8 @@ class HistoricalEventDao(
|
|||
|
||||
@Transactional
|
||||
override fun insert(event: HistoricalEvent) {
|
||||
dsl.insertInto(e)
|
||||
dsl
|
||||
.insertInto(e)
|
||||
.set(e.ID, event.id)
|
||||
.set(e.TYPE, event.type)
|
||||
.set(e.BOOK_ID, event.bookId)
|
||||
|
|
@ -25,14 +26,16 @@ class HistoricalEventDao(
|
|||
.execute()
|
||||
|
||||
if (event.properties.isNotEmpty()) {
|
||||
dsl.batch(
|
||||
dsl.insertInto(ep, ep.ID, ep.KEY, ep.VALUE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
event.properties.forEach { (key, value) ->
|
||||
step.bind(event.id, key, value)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(ep, ep.ID, ep.KEY, ep.VALUE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
event.properties.forEach { (key, value) ->
|
||||
step.bind(event.id, key, value)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,8 @@ class HistoricalEventDtoDao(
|
|||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val items =
|
||||
dsl.selectFrom(e)
|
||||
dsl
|
||||
.selectFrom(e)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.map { er ->
|
||||
|
|
|
|||
|
|
@ -29,30 +29,36 @@ class KoboDtoDao(
|
|||
bookIds: Collection<String>,
|
||||
): Collection<KoboBookMetadataDto> {
|
||||
val records =
|
||||
dsl.select(
|
||||
d.BOOK_ID,
|
||||
d.TITLE,
|
||||
d.NUMBER,
|
||||
d.NUMBER_SORT,
|
||||
d.ISBN,
|
||||
d.SUMMARY,
|
||||
d.RELEASE_DATE,
|
||||
d.CREATED_DATE,
|
||||
sd.SERIES_ID,
|
||||
sd.TITLE,
|
||||
sd.PUBLISHER,
|
||||
sd.LANGUAGE,
|
||||
b.FILE_SIZE,
|
||||
b.ONESHOT,
|
||||
m.EPUB_IS_KEPUB,
|
||||
m.EXTENSION_CLASS,
|
||||
m.EXTENSION_VALUE_BLOB,
|
||||
bt.ID,
|
||||
).from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(sd).on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)).and(bt.SELECTED.isTrue)
|
||||
dsl
|
||||
.select(
|
||||
d.BOOK_ID,
|
||||
d.TITLE,
|
||||
d.NUMBER,
|
||||
d.NUMBER_SORT,
|
||||
d.ISBN,
|
||||
d.SUMMARY,
|
||||
d.RELEASE_DATE,
|
||||
d.CREATED_DATE,
|
||||
sd.SERIES_ID,
|
||||
sd.TITLE,
|
||||
sd.PUBLISHER,
|
||||
sd.LANGUAGE,
|
||||
b.FILE_SIZE,
|
||||
b.ONESHOT,
|
||||
m.EPUB_IS_KEPUB,
|
||||
m.EXTENSION_CLASS,
|
||||
m.EXTENSION_VALUE_BLOB,
|
||||
bt.ID,
|
||||
).from(b)
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(sd)
|
||||
.on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.leftJoin(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(bt)
|
||||
.on(b.ID.eq(bt.BOOK_ID))
|
||||
.and(bt.SELECTED.isTrue)
|
||||
.where(d.BOOK_ID.`in`(bookIds))
|
||||
.fetch()
|
||||
|
||||
|
|
@ -65,7 +71,8 @@ class KoboDtoDao(
|
|||
val mediaExtension = mapper.deserializeMediaExtension(mr.extensionClass, mr.extensionValueBlob) as? MediaExtensionEpub
|
||||
|
||||
val authors =
|
||||
dsl.selectFrom(a)
|
||||
dsl
|
||||
.selectFrom(a)
|
||||
.where(a.BOOK_ID.`in`(bookIds))
|
||||
.filter { it.name != null }
|
||||
.groupBy({ it.bookId }, { it })
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@ class KomgaUserDao(
|
|||
.fetchAndMap()
|
||||
|
||||
override fun findApiKeyByUserId(userId: String): Collection<ApiKey> =
|
||||
dsl.selectFrom(uak)
|
||||
dsl
|
||||
.selectFrom(uak)
|
||||
.where(uak.USER_ID.eq(userId))
|
||||
.fetchInto(uak)
|
||||
.map {
|
||||
|
|
@ -53,13 +54,16 @@ class KomgaUserDao(
|
|||
.select(*u.fields())
|
||||
.select(ul.LIBRARY_ID)
|
||||
.from(u)
|
||||
.leftJoin(ul).onKey()
|
||||
.leftJoin(ul)
|
||||
.onKey()
|
||||
|
||||
private fun ResultQuery<Record>.fetchAndMap() =
|
||||
this.fetchGroups({ it.into(u) }, { it.into(ul) })
|
||||
this
|
||||
.fetchGroups({ it.into(u) }, { it.into(ul) })
|
||||
.map { (ur, ulr) ->
|
||||
val usr =
|
||||
dsl.selectFrom(us)
|
||||
dsl
|
||||
.selectFrom(us)
|
||||
.where(us.USER_ID.eq(ur.id))
|
||||
.toList()
|
||||
KomgaUser(
|
||||
|
|
@ -89,7 +93,8 @@ class KomgaUserDao(
|
|||
|
||||
@Transactional
|
||||
override fun insert(user: KomgaUser) {
|
||||
dsl.insertInto(u)
|
||||
dsl
|
||||
.insertInto(u)
|
||||
.set(u.ID, user.id)
|
||||
.set(u.EMAIL, user.email)
|
||||
.set(u.PASSWORD, user.password)
|
||||
|
|
@ -106,15 +111,15 @@ class KomgaUserDao(
|
|||
AllowExclude.EXCLUDE -> false
|
||||
null -> null
|
||||
},
|
||||
)
|
||||
.execute()
|
||||
).execute()
|
||||
|
||||
insertSharedLibraries(user)
|
||||
insertSharingRestrictions(user)
|
||||
}
|
||||
|
||||
override fun insert(apiKey: ApiKey) {
|
||||
dsl.insertInto(uak)
|
||||
dsl
|
||||
.insertInto(uak)
|
||||
.set(uak.ID, apiKey.id)
|
||||
.set(uak.USER_ID, apiKey.userId)
|
||||
.set(uak.API_KEY, apiKey.key)
|
||||
|
|
@ -124,7 +129,8 @@ class KomgaUserDao(
|
|||
|
||||
@Transactional
|
||||
override fun update(user: KomgaUser) {
|
||||
dsl.update(u)
|
||||
dsl
|
||||
.update(u)
|
||||
.set(u.EMAIL, user.email)
|
||||
.set(u.PASSWORD, user.password)
|
||||
.set(u.ROLE_ADMIN, user.roleAdmin)
|
||||
|
|
@ -140,16 +146,17 @@ class KomgaUserDao(
|
|||
AllowExclude.EXCLUDE -> false
|
||||
null -> null
|
||||
},
|
||||
)
|
||||
.set(u.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
).set(u.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(u.ID.eq(user.id))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(ul)
|
||||
dsl
|
||||
.deleteFrom(ul)
|
||||
.where(ul.USER_ID.eq(user.id))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(us)
|
||||
dsl
|
||||
.deleteFrom(us)
|
||||
.where(us.USER_ID.eq(user.id))
|
||||
.execute()
|
||||
|
||||
|
|
@ -166,7 +173,8 @@ class KomgaUserDao(
|
|||
|
||||
private fun insertSharedLibraries(user: KomgaUser) {
|
||||
user.sharedLibrariesIds.forEach {
|
||||
dsl.insertInto(ul)
|
||||
dsl
|
||||
.insertInto(ul)
|
||||
.columns(ul.USER_ID, ul.LIBRARY_ID)
|
||||
.values(user.id, it)
|
||||
.execute()
|
||||
|
|
@ -175,14 +183,16 @@ class KomgaUserDao(
|
|||
|
||||
private fun insertSharingRestrictions(user: KomgaUser) {
|
||||
user.restrictions.labelsAllow.forEach { label ->
|
||||
dsl.insertInto(us)
|
||||
dsl
|
||||
.insertInto(us)
|
||||
.columns(us.USER_ID, us.ALLOW, us.LABEL)
|
||||
.values(user.id, true, label)
|
||||
.execute()
|
||||
}
|
||||
|
||||
user.restrictions.labelsExclude.forEach { label ->
|
||||
dsl.insertInto(us)
|
||||
dsl
|
||||
.insertInto(us)
|
||||
.columns(us.USER_ID, us.ALLOW, us.LABEL)
|
||||
.values(user.id, false, label)
|
||||
.execute()
|
||||
|
|
@ -211,7 +221,8 @@ class KomgaUserDao(
|
|||
apiKeyId: String,
|
||||
userId: String,
|
||||
) {
|
||||
dsl.deleteFrom(uak)
|
||||
dsl
|
||||
.deleteFrom(uak)
|
||||
.where(uak.ID.eq(apiKeyId))
|
||||
.and(uak.USER_ID.eq(userId))
|
||||
.execute()
|
||||
|
|
@ -222,28 +233,28 @@ class KomgaUserDao(
|
|||
}
|
||||
|
||||
override fun findAnnouncementIdsReadByUserId(userId: String): Set<String> =
|
||||
dsl.select(ar.ANNOUNCEMENT_ID)
|
||||
dsl
|
||||
.select(ar.ANNOUNCEMENT_ID)
|
||||
.from(ar)
|
||||
.where(ar.USER_ID.eq(userId))
|
||||
.fetchSet(ar.ANNOUNCEMENT_ID)
|
||||
|
||||
override fun existsByEmailIgnoreCase(email: String): Boolean =
|
||||
dsl.fetchExists(
|
||||
dsl.selectFrom(u)
|
||||
dsl
|
||||
.selectFrom(u)
|
||||
.where(u.EMAIL.equalIgnoreCase(email)),
|
||||
)
|
||||
|
||||
override fun existsApiKeyByIdAndUserId(
|
||||
apiKeyId: String,
|
||||
userId: String,
|
||||
): Boolean =
|
||||
dsl.fetchExists(uak, uak.ID.eq(apiKeyId).and(uak.USER_ID.eq(userId)))
|
||||
): Boolean = dsl.fetchExists(uak, uak.ID.eq(apiKeyId).and(uak.USER_ID.eq(userId)))
|
||||
|
||||
override fun existsApiKeyByCommentAndUserId(
|
||||
comment: String,
|
||||
userId: String,
|
||||
): Boolean =
|
||||
dsl.fetchExists(uak, uak.COMMENT.equalIgnoreCase(comment).and(uak.USER_ID.eq(userId)))
|
||||
): Boolean = dsl.fetchExists(uak, uak.COMMENT.equalIgnoreCase(comment).and(uak.USER_ID.eq(userId)))
|
||||
|
||||
override fun findByEmailIgnoreCaseOrNull(email: String): KomgaUser? =
|
||||
selectBase()
|
||||
|
|
@ -254,13 +265,15 @@ class KomgaUserDao(
|
|||
override fun findByApiKeyOrNull(apiKey: String): Pair<KomgaUser, ApiKey>? {
|
||||
val user =
|
||||
selectBase()
|
||||
.leftJoin(uak).on(u.ID.eq(uak.USER_ID))
|
||||
.leftJoin(uak)
|
||||
.on(u.ID.eq(uak.USER_ID))
|
||||
.where(uak.API_KEY.eq(apiKey))
|
||||
.fetchAndMap()
|
||||
.firstOrNull() ?: return null
|
||||
|
||||
val key =
|
||||
dsl.selectFrom(uak)
|
||||
dsl
|
||||
.selectFrom(uak)
|
||||
.where(uak.API_KEY.eq(apiKey))
|
||||
.fetchInto(uak)
|
||||
.map { it.toDomain() }
|
||||
|
|
|
|||
|
|
@ -46,12 +46,15 @@ class LibraryDao(
|
|||
.fetchAndMap()
|
||||
|
||||
private fun selectBase() =
|
||||
dsl.select()
|
||||
dsl
|
||||
.select()
|
||||
.from(l)
|
||||
.leftJoin(le).onKey()
|
||||
.leftJoin(le)
|
||||
.onKey()
|
||||
|
||||
private fun ResultQuery<Record>.fetchAndMap(): Collection<Library> =
|
||||
this.fetchGroups({ it.into(l) }, { it.into(le) })
|
||||
this
|
||||
.fetchGroups({ it.into(l) }, { it.into(le) })
|
||||
.map { (lr, ler) ->
|
||||
lr.toDomain(ler.mapNotNull { it.exclusion }.toSet())
|
||||
}
|
||||
|
|
@ -72,7 +75,8 @@ class LibraryDao(
|
|||
|
||||
@Transactional
|
||||
override fun insert(library: Library) {
|
||||
dsl.insertInto(l)
|
||||
dsl
|
||||
.insertInto(l)
|
||||
.set(l.ID, library.id)
|
||||
.set(l.NAME, library.name)
|
||||
.set(l.ROOT, library.root.toString())
|
||||
|
|
@ -108,7 +112,8 @@ class LibraryDao(
|
|||
|
||||
@Transactional
|
||||
override fun update(library: Library) {
|
||||
dsl.update(l)
|
||||
dsl
|
||||
.update(l)
|
||||
.set(l.NAME, library.name)
|
||||
.set(l.ROOT, library.root.toString())
|
||||
.set(l.IMPORT_COMICINFO_BOOK, library.importComicInfoBook)
|
||||
|
|
@ -147,21 +152,24 @@ class LibraryDao(
|
|||
override fun count(): Long = dsl.fetchCount(l).toLong()
|
||||
|
||||
fun findDirectoryExclusions(libraryId: String): Set<String> =
|
||||
dsl.select(le.EXCLUSION)
|
||||
dsl
|
||||
.select(le.EXCLUSION)
|
||||
.from(le)
|
||||
.where(le.LIBRARY_ID.eq(libraryId))
|
||||
.fetchSet(le.EXCLUSION)
|
||||
|
||||
private fun insertDirectoryExclusions(library: Library) {
|
||||
if (library.scanDirectoryExclusions.isNotEmpty()) {
|
||||
dsl.batch(
|
||||
dsl.insertInto(le, le.LIBRARY_ID, le.EXCLUSION)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
library.scanDirectoryExclusions.forEach {
|
||||
step.bind(library.id, it)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(le, le.LIBRARY_ID, le.EXCLUSION)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
library.scanDirectoryExclusions.forEach {
|
||||
step.bind(library.id, it)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -54,14 +54,13 @@ class MediaDao(
|
|||
*p.fields(),
|
||||
)
|
||||
|
||||
override fun findById(bookId: String): Media =
|
||||
find(dsl, bookId)!!
|
||||
override fun findById(bookId: String): Media = find(dsl, bookId)!!
|
||||
|
||||
override fun findByIdOrNull(bookId: String): Media? =
|
||||
find(dsl, bookId)
|
||||
override fun findByIdOrNull(bookId: String): Media? = find(dsl, bookId)
|
||||
|
||||
override fun findExtensionByIdOrNull(bookId: String): MediaExtension? =
|
||||
dsl.select(m.EXTENSION_CLASS, m.EXTENSION_VALUE_BLOB)
|
||||
dsl
|
||||
.select(m.EXTENSION_CLASS, m.EXTENSION_VALUE_BLOB)
|
||||
.from(m)
|
||||
.where(m.BOOK_ID.eq(bookId))
|
||||
.fetchOne()
|
||||
|
|
@ -77,10 +76,13 @@ class MediaDao(
|
|||
val neededHash = pageHashing * 2
|
||||
val neededHashForBook = DSL.`when`(pagesCount.lt(neededHash), pagesCount).otherwise(neededHash)
|
||||
|
||||
return dsl.select(b.ID)
|
||||
return dsl
|
||||
.select(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(p).on(b.ID.eq(p.BOOK_ID))
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(p)
|
||||
.on(b.ID.eq(p.BOOK_ID))
|
||||
.leftJoin(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.and(m.STATUS.eq(Media.Status.READY.name))
|
||||
.and(m.MEDIA_TYPE.`in`(mediaTypes))
|
||||
|
|
@ -91,7 +93,8 @@ class MediaDao(
|
|||
}
|
||||
|
||||
override fun getPagesSizes(bookIds: Collection<String>): Collection<Pair<String, Int>> =
|
||||
dsl.select(m.BOOK_ID, m.PAGE_COUNT)
|
||||
dsl
|
||||
.select(m.BOOK_ID, m.PAGE_COUNT)
|
||||
.from(m)
|
||||
.where(m.BOOK_ID.`in`(bookIds))
|
||||
.fetch()
|
||||
|
|
@ -101,9 +104,11 @@ class MediaDao(
|
|||
dsl: DSLContext,
|
||||
bookId: String,
|
||||
): Media? =
|
||||
dsl.select(*groupFields)
|
||||
dsl
|
||||
.select(*groupFields)
|
||||
.from(m)
|
||||
.leftJoin(p).on(m.BOOK_ID.eq(p.BOOK_ID))
|
||||
.leftJoin(p)
|
||||
.on(m.BOOK_ID.eq(p.BOOK_ID))
|
||||
.where(m.BOOK_ID.eq(bookId))
|
||||
.groupBy(*groupFields)
|
||||
.orderBy(p.NUMBER.asc())
|
||||
|
|
@ -112,7 +117,8 @@ class MediaDao(
|
|||
{ it.into(p) },
|
||||
).map { (mr, pr) ->
|
||||
val files =
|
||||
dsl.selectFrom(f)
|
||||
dsl
|
||||
.selectFrom(f)
|
||||
.where(f.BOOK_ID.eq(bookId))
|
||||
.fetchInto(f)
|
||||
|
||||
|
|
@ -128,34 +134,36 @@ class MediaDao(
|
|||
override fun insert(medias: Collection<Media>) {
|
||||
if (medias.isNotEmpty()) {
|
||||
medias.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(
|
||||
m,
|
||||
m.BOOK_ID,
|
||||
m.STATUS,
|
||||
m.MEDIA_TYPE,
|
||||
m.COMMENT,
|
||||
m.PAGE_COUNT,
|
||||
m.EPUB_DIVINA_COMPATIBLE,
|
||||
m.EPUB_IS_KEPUB,
|
||||
m.EXTENSION_CLASS,
|
||||
m.EXTENSION_VALUE_BLOB,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { media ->
|
||||
step.bind(
|
||||
media.bookId,
|
||||
media.status,
|
||||
media.mediaType,
|
||||
media.comment,
|
||||
media.pageCount,
|
||||
media.epubDivinaCompatible,
|
||||
media.epubIsKepub,
|
||||
media.extension?.let { if (it is ProxyExtension) null else it::class.qualifiedName },
|
||||
media.extension?.let { if (it is ProxyExtension) null else mapper.serializeJsonGz(it) },
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(
|
||||
m,
|
||||
m.BOOK_ID,
|
||||
m.STATUS,
|
||||
m.MEDIA_TYPE,
|
||||
m.COMMENT,
|
||||
m.PAGE_COUNT,
|
||||
m.EPUB_DIVINA_COMPATIBLE,
|
||||
m.EPUB_IS_KEPUB,
|
||||
m.EXTENSION_CLASS,
|
||||
m.EXTENSION_VALUE_BLOB,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { media ->
|
||||
step.bind(
|
||||
media.bookId,
|
||||
media.status,
|
||||
media.mediaType,
|
||||
media.comment,
|
||||
media.pageCount,
|
||||
media.epubDivinaCompatible,
|
||||
media.epubIsKepub,
|
||||
media.extension?.let { if (it is ProxyExtension) null else it::class.qualifiedName },
|
||||
media.extension?.let { if (it is ProxyExtension) null else mapper.serializeJsonGz(it) },
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
|
||||
insertPages(medias)
|
||||
|
|
@ -166,34 +174,36 @@ class MediaDao(
|
|||
private fun insertPages(medias: Collection<Media>) {
|
||||
if (medias.any { it.pages.isNotEmpty() }) {
|
||||
medias.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(
|
||||
p,
|
||||
p.BOOK_ID,
|
||||
p.FILE_NAME,
|
||||
p.MEDIA_TYPE,
|
||||
p.NUMBER,
|
||||
p.WIDTH,
|
||||
p.HEIGHT,
|
||||
p.FILE_HASH,
|
||||
p.FILE_SIZE,
|
||||
).values(null as String?, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { media ->
|
||||
media.pages.forEachIndexed { index, page ->
|
||||
step.bind(
|
||||
media.bookId,
|
||||
page.fileName,
|
||||
page.mediaType,
|
||||
index,
|
||||
page.dimension?.width,
|
||||
page.dimension?.height,
|
||||
page.fileHash,
|
||||
page.fileSize,
|
||||
)
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(
|
||||
p,
|
||||
p.BOOK_ID,
|
||||
p.FILE_NAME,
|
||||
p.MEDIA_TYPE,
|
||||
p.NUMBER,
|
||||
p.WIDTH,
|
||||
p.HEIGHT,
|
||||
p.FILE_HASH,
|
||||
p.FILE_SIZE,
|
||||
).values(null as String?, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { media ->
|
||||
media.pages.forEachIndexed { index, page ->
|
||||
step.bind(
|
||||
media.bookId,
|
||||
page.fileName,
|
||||
page.mediaType,
|
||||
index,
|
||||
page.dimension?.width,
|
||||
page.dimension?.height,
|
||||
page.fileHash,
|
||||
page.fileSize,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}.execute()
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -201,35 +211,38 @@ class MediaDao(
|
|||
private fun insertFiles(medias: Collection<Media>) {
|
||||
if (medias.any { it.files.isNotEmpty() }) {
|
||||
medias.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(
|
||||
f,
|
||||
f.BOOK_ID,
|
||||
f.FILE_NAME,
|
||||
f.MEDIA_TYPE,
|
||||
f.SUB_TYPE,
|
||||
f.FILE_SIZE,
|
||||
).values(null as String?, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { media ->
|
||||
media.files.forEach {
|
||||
step.bind(
|
||||
media.bookId,
|
||||
it.fileName,
|
||||
it.mediaType,
|
||||
it.subType,
|
||||
it.fileSize,
|
||||
)
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(
|
||||
f,
|
||||
f.BOOK_ID,
|
||||
f.FILE_NAME,
|
||||
f.MEDIA_TYPE,
|
||||
f.SUB_TYPE,
|
||||
f.FILE_SIZE,
|
||||
).values(null as String?, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach { media ->
|
||||
media.files.forEach {
|
||||
step.bind(
|
||||
media.bookId,
|
||||
it.fileName,
|
||||
it.mediaType,
|
||||
it.subType,
|
||||
it.fileSize,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}.execute()
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
override fun update(media: Media) {
|
||||
dsl.update(m)
|
||||
dsl
|
||||
.update(m)
|
||||
.set(m.STATUS, media.status.toString())
|
||||
.set(m.MEDIA_TYPE, media.mediaType)
|
||||
.set(m.COMMENT, media.comment)
|
||||
|
|
@ -241,16 +254,17 @@ class MediaDao(
|
|||
set(m.EXTENSION_CLASS, media.extension::class.qualifiedName)
|
||||
set(m.EXTENSION_VALUE_BLOB, mapper.serializeJsonGz(media.extension))
|
||||
}
|
||||
}
|
||||
.set(m.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
}.set(m.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(m.BOOK_ID.eq(media.bookId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(p)
|
||||
dsl
|
||||
.deleteFrom(p)
|
||||
.where(p.BOOK_ID.eq(media.bookId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(f)
|
||||
dsl
|
||||
.deleteFrom(f)
|
||||
.where(f.BOOK_ID.eq(media.bookId))
|
||||
.execute()
|
||||
|
||||
|
|
@ -279,21 +293,20 @@ class MediaDao(
|
|||
private fun MediaRecord.toDomain(
|
||||
pages: List<BookPage>,
|
||||
files: List<MediaFile>,
|
||||
) =
|
||||
Media(
|
||||
status = Media.Status.valueOf(status),
|
||||
mediaType = mediaType,
|
||||
pages = pages,
|
||||
pageCount = pageCount,
|
||||
files = files,
|
||||
extension = ProxyExtension.of(extensionClass),
|
||||
comment = comment,
|
||||
bookId = bookId,
|
||||
epubDivinaCompatible = epubDivinaCompatible,
|
||||
epubIsKepub = epubIsKepub,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
)
|
||||
) = Media(
|
||||
status = Media.Status.valueOf(status),
|
||||
mediaType = mediaType,
|
||||
pages = pages,
|
||||
pageCount = pageCount,
|
||||
files = files,
|
||||
extension = ProxyExtension.of(extensionClass),
|
||||
comment = comment,
|
||||
bookId = bookId,
|
||||
epubDivinaCompatible = epubDivinaCompatible,
|
||||
epubIsKepub = epubIsKepub,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
)
|
||||
|
||||
private fun MediaPageRecord.toDomain() =
|
||||
BookPage(
|
||||
|
|
|
|||
|
|
@ -54,7 +54,8 @@ class PageHashDao(
|
|||
)
|
||||
|
||||
override fun findKnown(pageHash: String): PageHashKnown? =
|
||||
dsl.selectFrom(ph)
|
||||
dsl
|
||||
.selectFrom(ph)
|
||||
.where(ph.HASH.eq(pageHash))
|
||||
.fetchOneInto(ph)
|
||||
?.toDomain()
|
||||
|
|
@ -64,9 +65,11 @@ class PageHashDao(
|
|||
pageable: Pageable,
|
||||
): Page<PageHashKnown> {
|
||||
val query =
|
||||
dsl.select(*ph.fields(), DSL.count(p.FILE_HASH).`as`("count"))
|
||||
dsl
|
||||
.select(*ph.fields(), DSL.count(p.FILE_HASH).`as`("count"))
|
||||
.from(ph)
|
||||
.leftJoin(p).on(ph.HASH.eq(p.FILE_HASH))
|
||||
.leftJoin(p)
|
||||
.on(ph.HASH.eq(p.FILE_HASH))
|
||||
.apply { actions?.let { where(ph.ACTION.`in`(actions)) } }
|
||||
.groupBy(*ph.fields())
|
||||
|
||||
|
|
@ -94,22 +97,22 @@ class PageHashDao(
|
|||
override fun findAllUnknown(pageable: Pageable): Page<PageHashUnknown> {
|
||||
val bookCount = DSL.count(p.BOOK_ID)
|
||||
val query =
|
||||
dsl.select(
|
||||
p.FILE_HASH,
|
||||
p.FILE_SIZE,
|
||||
bookCount.`as`("count"),
|
||||
(bookCount * p.FILE_SIZE).`as`("totalSize"),
|
||||
)
|
||||
.from(p)
|
||||
dsl
|
||||
.select(
|
||||
p.FILE_HASH,
|
||||
p.FILE_SIZE,
|
||||
bookCount.`as`("count"),
|
||||
(bookCount * p.FILE_SIZE).`as`("totalSize"),
|
||||
).from(p)
|
||||
.where(p.FILE_HASH.ne(""))
|
||||
.and(
|
||||
DSL.notExists(
|
||||
dsl.selectOne()
|
||||
dsl
|
||||
.selectOne()
|
||||
.from(ph)
|
||||
.where(ph.HASH.eq(p.FILE_HASH)),
|
||||
),
|
||||
)
|
||||
.groupBy(p.FILE_HASH)
|
||||
).groupBy(p.FILE_HASH)
|
||||
.having(DSL.count(p.BOOK_ID).gt(1))
|
||||
|
||||
val count = dsl.fetchCount(query)
|
||||
|
|
@ -139,9 +142,11 @@ class PageHashDao(
|
|||
pageable: Pageable,
|
||||
): Page<PageHashMatch> {
|
||||
val query =
|
||||
dsl.select(p.BOOK_ID, b.URL, p.NUMBER, p.FILE_NAME, p.FILE_SIZE, p.MEDIA_TYPE)
|
||||
dsl
|
||||
.select(p.BOOK_ID, b.URL, p.NUMBER, p.FILE_NAME, p.FILE_SIZE, p.MEDIA_TYPE)
|
||||
.from(p)
|
||||
.leftJoin(b).on(p.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(b)
|
||||
.on(p.BOOK_ID.eq(b.ID))
|
||||
.where(p.FILE_HASH.eq(pageHash))
|
||||
|
||||
val count = dsl.fetchCount(query)
|
||||
|
|
@ -177,9 +182,11 @@ class PageHashDao(
|
|||
actions: List<PageHashKnown.Action>?,
|
||||
libraryId: String?,
|
||||
): Map<String, Collection<BookPageNumbered>> =
|
||||
dsl.select(p.BOOK_ID, p.FILE_NAME, p.NUMBER, p.FILE_HASH, p.MEDIA_TYPE, p.FILE_SIZE)
|
||||
dsl
|
||||
.select(p.BOOK_ID, p.FILE_NAME, p.NUMBER, p.FILE_HASH, p.MEDIA_TYPE, p.FILE_SIZE)
|
||||
.from(p)
|
||||
.innerJoin(ph).on(p.FILE_HASH.eq(ph.HASH))
|
||||
.innerJoin(ph)
|
||||
.on(p.FILE_HASH.eq(ph.HASH))
|
||||
.apply { libraryId?.let<String, Unit> { innerJoin(b).on(b.ID.eq(p.BOOK_ID)) } }
|
||||
.where(ph.ACTION.`in`(actions))
|
||||
.apply { libraryId?.let<String, Unit> { and(b.LIBRARY_ID.eq(it)) } }
|
||||
|
|
@ -196,24 +203,28 @@ class PageHashDao(
|
|||
.fold(emptyList()) { acc, (_, new) -> acc + new }
|
||||
|
||||
override fun getKnownThumbnail(pageHash: String): ByteArray? =
|
||||
dsl.select(pht.THUMBNAIL)
|
||||
dsl
|
||||
.select(pht.THUMBNAIL)
|
||||
.from(pht)
|
||||
.where(pht.HASH.eq(pageHash))
|
||||
.fetchOne()?.value1()
|
||||
.fetchOne()
|
||||
?.value1()
|
||||
|
||||
@Transactional
|
||||
override fun insert(
|
||||
pageHash: PageHashKnown,
|
||||
thumbnail: ByteArray?,
|
||||
) {
|
||||
dsl.insertInto(ph)
|
||||
dsl
|
||||
.insertInto(ph)
|
||||
.set(ph.HASH, pageHash.hash)
|
||||
.set(ph.SIZE, pageHash.size)
|
||||
.set(ph.ACTION, pageHash.action.name)
|
||||
.execute()
|
||||
|
||||
if (thumbnail != null) {
|
||||
dsl.insertInto(pht)
|
||||
dsl
|
||||
.insertInto(pht)
|
||||
.set(pht.HASH, pageHash.hash)
|
||||
.set(pht.THUMBNAIL, thumbnail)
|
||||
.execute()
|
||||
|
|
@ -221,7 +232,8 @@ class PageHashDao(
|
|||
}
|
||||
|
||||
override fun update(pageHash: PageHashKnown) {
|
||||
dsl.update(ph)
|
||||
dsl
|
||||
.update(ph)
|
||||
.set(ph.ACTION, pageHash.action.name)
|
||||
.set(ph.SIZE, pageHash.size)
|
||||
.set(ph.DELETE_COUNT, pageHash.deleteCount)
|
||||
|
|
|
|||
|
|
@ -80,10 +80,13 @@ class ReadListDao(
|
|||
if (belongsToLibraryIds == null && filterOnLibraryIds == null && !restrictions.isRestricted)
|
||||
null
|
||||
else
|
||||
dsl.selectDistinct(rl.ID)
|
||||
dsl
|
||||
.selectDistinct(rl.ID)
|
||||
.from(rl)
|
||||
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(rlb)
|
||||
.on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(b)
|
||||
.on(rlb.BOOK_ID.eq(b.ID))
|
||||
.apply { if (restrictions.isRestricted) leftJoin(sd).on(sd.SERIES_ID.eq(b.SERIES_ID)) }
|
||||
.where(conditions)
|
||||
|
||||
|
|
@ -126,9 +129,11 @@ class ReadListDao(
|
|||
restrictions: ContentRestrictions,
|
||||
): Collection<ReadList> {
|
||||
val queryIds =
|
||||
dsl.select(rl.ID)
|
||||
dsl
|
||||
.select(rl.ID)
|
||||
.from(rl)
|
||||
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(rlb)
|
||||
.on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.apply { if (restrictions.isRestricted) leftJoin(b).on(rlb.BOOK_ID.eq(b.ID)).leftJoin(sd).on(sd.SERIES_ID.eq(b.SERIES_ID)) }
|
||||
.where(rlb.BOOK_ID.eq(containsBookId))
|
||||
.apply { if (restrictions.isRestricted) and(restrictions.toCondition()) }
|
||||
|
|
@ -141,12 +146,15 @@ class ReadListDao(
|
|||
}
|
||||
|
||||
override fun findAllEmpty(): Collection<ReadList> =
|
||||
dsl.selectFrom(rl)
|
||||
dsl
|
||||
.selectFrom(rl)
|
||||
.where(
|
||||
rl.ID.`in`(
|
||||
dsl.select(rl.ID)
|
||||
dsl
|
||||
.select(rl.ID)
|
||||
.from(rl)
|
||||
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(rlb)
|
||||
.on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.where(rlb.READLIST_ID.isNull),
|
||||
),
|
||||
).fetchInto(rl)
|
||||
|
|
@ -159,10 +167,13 @@ class ReadListDao(
|
|||
.firstOrNull()
|
||||
|
||||
private fun selectBase(joinOnSeriesMetadata: Boolean = false) =
|
||||
dsl.selectDistinct(*rl.fields())
|
||||
dsl
|
||||
.selectDistinct(*rl.fields())
|
||||
.from(rl)
|
||||
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(rlb)
|
||||
.on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(b)
|
||||
.on(rlb.BOOK_ID.eq(b.ID))
|
||||
.apply { if (joinOnSeriesMetadata) leftJoin(sd).on(sd.SERIES_ID.eq(b.SERIES_ID)) }
|
||||
|
||||
private fun ResultQuery<Record>.fetchAndMap(
|
||||
|
|
@ -172,9 +183,11 @@ class ReadListDao(
|
|||
fetchInto(rl)
|
||||
.map { rr ->
|
||||
val bookIds =
|
||||
dsl.select(*rlb.fields())
|
||||
dsl
|
||||
.select(*rlb.fields())
|
||||
.from(rlb)
|
||||
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(b)
|
||||
.on(rlb.BOOK_ID.eq(b.ID))
|
||||
.apply { if (restrictions.isRestricted) leftJoin(sd).on(sd.SERIES_ID.eq(b.SERIES_ID)) }
|
||||
.where(rlb.READLIST_ID.eq(rr.id))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -182,13 +195,15 @@ class ReadListDao(
|
|||
.orderBy(rlb.NUMBER.asc())
|
||||
.fetchInto(rlb)
|
||||
.mapNotNull { it.number to it.bookId }
|
||||
.toMap().toSortedMap()
|
||||
.toMap()
|
||||
.toSortedMap()
|
||||
rr.toDomain(bookIds)
|
||||
}
|
||||
|
||||
@Transactional
|
||||
override fun insert(readList: ReadList) {
|
||||
dsl.insertInto(rl)
|
||||
dsl
|
||||
.insertInto(rl)
|
||||
.set(rl.ID, readList.id)
|
||||
.set(rl.NAME, readList.name)
|
||||
.set(rl.SUMMARY, readList.summary)
|
||||
|
|
@ -201,7 +216,8 @@ class ReadListDao(
|
|||
|
||||
private fun insertBooks(readList: ReadList) {
|
||||
readList.bookIds.map { (index, id) ->
|
||||
dsl.insertInto(rlb)
|
||||
dsl
|
||||
.insertInto(rlb)
|
||||
.set(rlb.READLIST_ID, readList.id)
|
||||
.set(rlb.BOOK_ID, id)
|
||||
.set(rlb.NUMBER, index)
|
||||
|
|
@ -211,7 +227,8 @@ class ReadListDao(
|
|||
|
||||
@Transactional
|
||||
override fun update(readList: ReadList) {
|
||||
dsl.update(rl)
|
||||
dsl
|
||||
.update(rl)
|
||||
.set(rl.NAME, readList.name)
|
||||
.set(rl.SUMMARY, readList.summary)
|
||||
.set(rl.ORDERED, readList.ordered)
|
||||
|
|
@ -226,7 +243,8 @@ class ReadListDao(
|
|||
}
|
||||
|
||||
override fun removeBookFromAll(bookId: String) {
|
||||
dsl.deleteFrom(rlb)
|
||||
dsl
|
||||
.deleteFrom(rlb)
|
||||
.where(rlb.BOOK_ID.eq(bookId))
|
||||
.execute()
|
||||
}
|
||||
|
|
@ -235,7 +253,8 @@ class ReadListDao(
|
|||
override fun removeBooksFromAll(bookIds: Collection<String>) {
|
||||
dsl.insertTempStrings(batchSize, bookIds)
|
||||
|
||||
dsl.deleteFrom(rlb)
|
||||
dsl
|
||||
.deleteFrom(rlb)
|
||||
.where(rlb.BOOK_ID.`in`(dsl.selectTempStrings()))
|
||||
.execute()
|
||||
}
|
||||
|
|
@ -260,7 +279,8 @@ class ReadListDao(
|
|||
|
||||
override fun existsByName(name: String): Boolean =
|
||||
dsl.fetchExists(
|
||||
dsl.selectFrom(rl)
|
||||
dsl
|
||||
.selectFrom(rl)
|
||||
.where(rl.NAME.equalIgnoreCase(name)),
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -32,21 +32,26 @@ class ReadListRequestDao(
|
|||
val numberField = "number"
|
||||
val requestsTable = values(*requestsAsRows.toTypedArray()).`as`("request", indexField, seriesField, numberField)
|
||||
val matchedRequests =
|
||||
dsl.select(
|
||||
requestsTable.field(indexField, Int::class.java),
|
||||
sd.SERIES_ID,
|
||||
sd.TITLE,
|
||||
bd.BOOK_ID,
|
||||
bd.NUMBER,
|
||||
bd.TITLE,
|
||||
bma.RELEASE_DATE,
|
||||
)
|
||||
.from(requestsTable)
|
||||
.innerJoin(sd).on(requestsTable.field(seriesField, String::class.java)?.eq(sd.TITLE.noCase()))
|
||||
.leftJoin(bma).on(sd.SERIES_ID.eq(bma.SERIES_ID))
|
||||
.innerJoin(b).on(sd.SERIES_ID.eq(b.SERIES_ID))
|
||||
.innerJoin(bd).on(
|
||||
b.ID.eq(bd.BOOK_ID)
|
||||
dsl
|
||||
.select(
|
||||
requestsTable.field(indexField, Int::class.java),
|
||||
sd.SERIES_ID,
|
||||
sd.TITLE,
|
||||
bd.BOOK_ID,
|
||||
bd.NUMBER,
|
||||
bd.TITLE,
|
||||
bma.RELEASE_DATE,
|
||||
).from(requestsTable)
|
||||
.innerJoin(sd)
|
||||
.on(requestsTable.field(seriesField, String::class.java)?.eq(sd.TITLE.noCase()))
|
||||
.leftJoin(bma)
|
||||
.on(sd.SERIES_ID.eq(bma.SERIES_ID))
|
||||
.innerJoin(b)
|
||||
.on(sd.SERIES_ID.eq(b.SERIES_ID))
|
||||
.innerJoin(bd)
|
||||
.on(
|
||||
b.ID
|
||||
.eq(bd.BOOK_ID)
|
||||
.and(ltrim(bd.NUMBER, value("0")).eq(ltrim(requestsTable.field(numberField, String::class.java), value("0")).noCase())),
|
||||
).fetchGroups(requestsTable.field(indexField, Int::class.java))
|
||||
.mapValues { (_, records) ->
|
||||
|
|
|
|||
|
|
@ -32,7 +32,8 @@ class ReadProgressDao(
|
|||
private val b = Tables.BOOK
|
||||
|
||||
override fun findAll(): Collection<ReadProgress> =
|
||||
dsl.selectFrom(r)
|
||||
dsl
|
||||
.selectFrom(r)
|
||||
.fetchInto(r)
|
||||
.map { it.toDomain() }
|
||||
|
||||
|
|
@ -40,19 +41,22 @@ class ReadProgressDao(
|
|||
bookId: String,
|
||||
userId: String,
|
||||
): ReadProgress? =
|
||||
dsl.selectFrom(r)
|
||||
dsl
|
||||
.selectFrom(r)
|
||||
.where(r.BOOK_ID.eq(bookId).and(r.USER_ID.eq(userId)))
|
||||
.fetchOneInto(r)
|
||||
?.toDomain()
|
||||
|
||||
override fun findAllByUserId(userId: String): Collection<ReadProgress> =
|
||||
dsl.selectFrom(r)
|
||||
dsl
|
||||
.selectFrom(r)
|
||||
.where(r.USER_ID.eq(userId))
|
||||
.fetchInto(r)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllByBookId(bookId: String): Collection<ReadProgress> =
|
||||
dsl.selectFrom(r)
|
||||
dsl
|
||||
.selectFrom(r)
|
||||
.where(r.BOOK_ID.eq(bookId))
|
||||
.fetchInto(r)
|
||||
.map { it.toDomain() }
|
||||
|
|
@ -61,7 +65,8 @@ class ReadProgressDao(
|
|||
bookIds: Collection<String>,
|
||||
userId: String,
|
||||
): Collection<ReadProgress> =
|
||||
dsl.selectFrom(r)
|
||||
dsl
|
||||
.selectFrom(r)
|
||||
.where(r.BOOK_ID.`in`(bookIds).and(r.USER_ID.eq(userId)))
|
||||
.fetchInto(r)
|
||||
.map { it.toDomain() }
|
||||
|
|
@ -87,18 +92,18 @@ class ReadProgressDao(
|
|||
}
|
||||
|
||||
private fun ReadProgress.toQuery(): Query =
|
||||
dsl.insertInto(
|
||||
r,
|
||||
r.BOOK_ID,
|
||||
r.USER_ID,
|
||||
r.PAGE,
|
||||
r.COMPLETED,
|
||||
r.READ_DATE,
|
||||
r.DEVICE_ID,
|
||||
r.DEVICE_NAME,
|
||||
r.LOCATOR,
|
||||
)
|
||||
.values(
|
||||
dsl
|
||||
.insertInto(
|
||||
r,
|
||||
r.BOOK_ID,
|
||||
r.USER_ID,
|
||||
r.PAGE,
|
||||
r.COMPLETED,
|
||||
r.READ_DATE,
|
||||
r.DEVICE_ID,
|
||||
r.DEVICE_NAME,
|
||||
r.LOCATOR,
|
||||
).values(
|
||||
bookId,
|
||||
userId,
|
||||
page,
|
||||
|
|
@ -107,8 +112,7 @@ class ReadProgressDao(
|
|||
deviceId,
|
||||
deviceName,
|
||||
locator?.let { mapper.serializeJsonGz(it) },
|
||||
)
|
||||
.onDuplicateKeyUpdate()
|
||||
).onDuplicateKeyUpdate()
|
||||
.set(r.PAGE, page)
|
||||
.set(r.COMPLETED, completed)
|
||||
.set(r.READ_DATE, readDate.toUTC())
|
||||
|
|
@ -160,7 +164,11 @@ class ReadProgressDao(
|
|||
) {
|
||||
dsl.insertTempStrings(batchSize, bookIds)
|
||||
|
||||
dsl.deleteFrom(r).where(r.BOOK_ID.`in`(dsl.selectTempStrings())).and(r.USER_ID.eq(userId)).execute()
|
||||
dsl
|
||||
.deleteFrom(r)
|
||||
.where(r.BOOK_ID.`in`(dsl.selectTempStrings()))
|
||||
.and(r.USER_ID.eq(userId))
|
||||
.execute()
|
||||
aggregateSeriesProgress(bookIds, userId)
|
||||
}
|
||||
|
||||
|
|
@ -177,24 +185,29 @@ class ReadProgressDao(
|
|||
dsl.insertTempStrings(batchSize, bookIds)
|
||||
|
||||
val seriesIdsQuery =
|
||||
dsl.select(b.SERIES_ID)
|
||||
dsl
|
||||
.select(b.SERIES_ID)
|
||||
.from(b)
|
||||
.where(b.ID.`in`(dsl.selectTempStrings()))
|
||||
|
||||
dsl.deleteFrom(rs)
|
||||
dsl
|
||||
.deleteFrom(rs)
|
||||
.where(rs.SERIES_ID.`in`(seriesIdsQuery))
|
||||
.apply { userId?.let { and(rs.USER_ID.eq(it)) } }
|
||||
.execute()
|
||||
|
||||
dsl.insertInto(rs)
|
||||
dsl
|
||||
.insertInto(rs)
|
||||
.select(
|
||||
dsl.select(b.SERIES_ID, r.USER_ID)
|
||||
dsl
|
||||
.select(b.SERIES_ID, r.USER_ID)
|
||||
.select(DSL.sum(DSL.`when`(r.COMPLETED.isTrue, 1).otherwise(0)))
|
||||
.select(DSL.sum(DSL.`when`(r.COMPLETED.isFalse, 1).otherwise(0)))
|
||||
.select(DSL.max(r.READ_DATE))
|
||||
.select(DSL.currentTimestamp())
|
||||
.from(b)
|
||||
.innerJoin(r).on(b.ID.eq(r.BOOK_ID))
|
||||
.innerJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.where(b.SERIES_ID.`in`(seriesIdsQuery))
|
||||
.apply { userId?.let { and(r.USER_ID.eq(it)) } }
|
||||
.groupBy(b.SERIES_ID, r.USER_ID),
|
||||
|
|
|
|||
|
|
@ -31,22 +31,27 @@ class ReadProgressDtoDao(
|
|||
userId: String,
|
||||
): TachiyomiReadProgressV2Dto {
|
||||
val numberSortReadProgress =
|
||||
dsl.select(
|
||||
d.NUMBER_SORT,
|
||||
r.COMPLETED,
|
||||
)
|
||||
.from(b)
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
dsl
|
||||
.select(
|
||||
d.NUMBER_SORT,
|
||||
r.COMPLETED,
|
||||
).from(b)
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(readProgressCondition(userId))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.orderBy(d.NUMBER_SORT)
|
||||
.fetch()
|
||||
.toList()
|
||||
|
||||
val maxNumberSort =
|
||||
dsl.select(DSL.max(d.NUMBER_SORT))
|
||||
dsl
|
||||
.select(DSL.max(d.NUMBER_SORT))
|
||||
.from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.fetchOne(DSL.max(d.NUMBER_SORT)) ?: 0F
|
||||
|
||||
|
|
@ -58,36 +63,40 @@ class ReadProgressDtoDao(
|
|||
private fun getSeriesBooksCount(
|
||||
seriesId: String,
|
||||
userId: String,
|
||||
) =
|
||||
dsl
|
||||
.select(countUnread.`as`(BOOKS_UNREAD_COUNT))
|
||||
.select(countRead.`as`(BOOKS_READ_COUNT))
|
||||
.select(countInProgress.`as`(BOOKS_IN_PROGRESS_COUNT))
|
||||
.from(b)
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.fetch()
|
||||
.first()
|
||||
.map {
|
||||
BooksCount(
|
||||
unreadCount = it.get(BOOKS_UNREAD_COUNT, Int::class.java),
|
||||
readCount = it.get(BOOKS_READ_COUNT, Int::class.java),
|
||||
inProgressCount = it.get(BOOKS_IN_PROGRESS_COUNT, Int::class.java),
|
||||
)
|
||||
}
|
||||
) = dsl
|
||||
.select(countUnread.`as`(BOOKS_UNREAD_COUNT))
|
||||
.select(countRead.`as`(BOOKS_READ_COUNT))
|
||||
.select(countInProgress.`as`(BOOKS_IN_PROGRESS_COUNT))
|
||||
.from(b)
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(readProgressCondition(userId))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.fetch()
|
||||
.first()
|
||||
.map {
|
||||
BooksCount(
|
||||
unreadCount = it.get(BOOKS_UNREAD_COUNT, Int::class.java),
|
||||
readCount = it.get(BOOKS_READ_COUNT, Int::class.java),
|
||||
inProgressCount = it.get(BOOKS_IN_PROGRESS_COUNT, Int::class.java),
|
||||
)
|
||||
}
|
||||
|
||||
override fun findProgressByReadList(
|
||||
readListId: String,
|
||||
userId: String,
|
||||
): TachiyomiReadProgressDto {
|
||||
val indexedReadProgress =
|
||||
dsl.select(
|
||||
rowNumber().over().orderBy(rlb.NUMBER),
|
||||
r.COMPLETED,
|
||||
)
|
||||
.from(b)
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID))
|
||||
dsl
|
||||
.select(
|
||||
rowNumber().over().orderBy(rlb.NUMBER),
|
||||
r.COMPLETED,
|
||||
).from(b)
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(readProgressCondition(userId))
|
||||
.leftJoin(rlb)
|
||||
.on(b.ID.eq(rlb.BOOK_ID))
|
||||
.where(rlb.READLIST_ID.eq(readListId))
|
||||
.orderBy(rlb.NUMBER)
|
||||
.fetch()
|
||||
|
|
@ -99,8 +108,11 @@ class ReadProgressDtoDao(
|
|||
.select(countRead.`as`(BOOKS_READ_COUNT))
|
||||
.select(countInProgress.`as`(BOOKS_IN_PROGRESS_COUNT))
|
||||
.from(b)
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID))
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(readProgressCondition(userId))
|
||||
.leftJoin(rlb)
|
||||
.on(b.ID.eq(rlb.BOOK_ID))
|
||||
.where(rlb.READLIST_ID.eq(readListId))
|
||||
.fetch()
|
||||
.first()
|
||||
|
|
@ -144,7 +156,8 @@ class ReadProgressDtoDao(
|
|||
private fun readProgressCondition(userId: String): Condition = r.USER_ID.eq(userId).or(r.USER_ID.isNull)
|
||||
|
||||
private fun <T> List<Record2<T, Boolean>>.lastRead(): T? =
|
||||
this.takeWhile { it.component2() == true }
|
||||
this
|
||||
.takeWhile { it.component2() == true }
|
||||
.lastOrNull()
|
||||
?.component1()
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,8 @@ class ReferentialDao(
|
|||
search: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dsl.selectDistinct(a.NAME, a.ROLE)
|
||||
dsl
|
||||
.selectDistinct(a.NAME, a.ROLE)
|
||||
.from(a)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(a.BOOK_ID.eq(b.ID)) } }
|
||||
.where(a.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
|
|
@ -55,9 +56,11 @@ class ReferentialDao(
|
|||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
dsl
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(bmaa.SERIES_ID.eq(s.ID))
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.and(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -70,9 +73,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
dsl
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(bmaa.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
|
|
@ -86,7 +91,8 @@ class ReferentialDao(
|
|||
seriesId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<Author> =
|
||||
dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
dsl
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
|
|
@ -101,9 +107,7 @@ class ReferentialDao(
|
|||
role: String?,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> {
|
||||
return findAuthorsByName(search, role, filterOnLibraryIds, pageable, null)
|
||||
}
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, null)
|
||||
|
||||
override fun findAllAuthorsByNameAndLibrary(
|
||||
search: String?,
|
||||
|
|
@ -111,9 +115,7 @@ class ReferentialDao(
|
|||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> {
|
||||
return findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.LIBRARY, libraryId))
|
||||
}
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.LIBRARY, libraryId))
|
||||
|
||||
override fun findAllAuthorsByNameAndCollection(
|
||||
search: String?,
|
||||
|
|
@ -121,9 +123,7 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> {
|
||||
return findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.COLLECTION, collectionId))
|
||||
}
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.COLLECTION, collectionId))
|
||||
|
||||
override fun findAllAuthorsByNameAndSeries(
|
||||
search: String?,
|
||||
|
|
@ -131,9 +131,7 @@ class ReferentialDao(
|
|||
seriesId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> {
|
||||
return findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.SERIES, seriesId))
|
||||
}
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.SERIES, seriesId))
|
||||
|
||||
override fun findAllAuthorsByNameAndReadList(
|
||||
search: String?,
|
||||
|
|
@ -141,9 +139,7 @@ class ReferentialDao(
|
|||
readListId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
pageable: Pageable,
|
||||
): Page<Author> {
|
||||
return findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.READLIST, readListId))
|
||||
}
|
||||
): Page<Author> = findAuthorsByName(search, role, filterOnLibraryIds, pageable, FilterBy(FilterByType.READLIST, readListId))
|
||||
|
||||
private enum class FilterByType {
|
||||
LIBRARY,
|
||||
|
|
@ -165,16 +161,18 @@ class ReferentialDao(
|
|||
filterBy: FilterBy?,
|
||||
): Page<Author> {
|
||||
val query =
|
||||
dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
dsl
|
||||
.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) }
|
||||
.apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) }
|
||||
.apply {
|
||||
if (filterBy?.type == FilterByType.READLIST)
|
||||
leftJoin(b).on(bmaa.SERIES_ID.eq(b.SERIES_ID))
|
||||
.leftJoin(rb).on(b.ID.eq(rb.BOOK_ID))
|
||||
}
|
||||
.where(noCondition())
|
||||
leftJoin(b)
|
||||
.on(bmaa.SERIES_ID.eq(b.SERIES_ID))
|
||||
.leftJoin(rb)
|
||||
.on(b.ID.eq(rb.BOOK_ID))
|
||||
}.where(noCondition())
|
||||
.apply { search?.let { and(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents())) } }
|
||||
.apply { role?.let { and(bmaa.ROLE.eq(role)) } }
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -214,7 +212,8 @@ class ReferentialDao(
|
|||
search: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): List<String> =
|
||||
dsl.selectDistinct(a.NAME)
|
||||
dsl
|
||||
.selectDistinct(a.NAME)
|
||||
.from(a)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(a.BOOK_ID.eq(b.ID)) } }
|
||||
.where(a.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents()))
|
||||
|
|
@ -223,36 +222,40 @@ class ReferentialDao(
|
|||
.fetch(a.NAME)
|
||||
|
||||
override fun findAllAuthorsRoles(filterOnLibraryIds: Collection<String>?): List<String> =
|
||||
dsl.selectDistinct(a.ROLE)
|
||||
dsl
|
||||
.selectDistinct(a.ROLE)
|
||||
.from(a)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(b).on(a.BOOK_ID.eq(b.ID))
|
||||
leftJoin(b)
|
||||
.on(a.BOOK_ID.eq(b.ID))
|
||||
.where(b.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}
|
||||
.orderBy(a.ROLE)
|
||||
}.orderBy(a.ROLE)
|
||||
.fetch(a.ROLE)
|
||||
|
||||
override fun findAllGenres(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dsl.selectDistinct(g.GENRE)
|
||||
dsl
|
||||
.selectDistinct(g.GENRE)
|
||||
.from(g)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s).on(g.SERIES_ID.eq(s.ID))
|
||||
leftJoin(s)
|
||||
.on(g.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}
|
||||
.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllGenresByLibrary(
|
||||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(g.GENRE)
|
||||
dsl
|
||||
.selectDistinct(g.GENRE)
|
||||
.from(g)
|
||||
.leftJoin(s).on(g.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(g.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(g.GENRE.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
|
|
@ -262,9 +265,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(g.GENRE)
|
||||
dsl
|
||||
.selectDistinct(g.GENRE)
|
||||
.from(g)
|
||||
.leftJoin(cs).on(g.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(g.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(g.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -272,15 +277,15 @@ class ReferentialDao(
|
|||
.fetchSet(g.GENRE)
|
||||
|
||||
override fun findAllSeriesAndBookTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dsl.select(bt.TAG.`as`("tag"))
|
||||
dsl
|
||||
.select(bt.TAG.`as`("tag"))
|
||||
.from(bt)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(b).on(bt.BOOK_ID.eq(b.ID)).where(b.LIBRARY_ID.`in`(it)) } }
|
||||
.union(
|
||||
select(st.TAG.`as`("tag"))
|
||||
.from(st)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(st.SERIES_ID.eq(s.ID)).where(s.LIBRARY_ID.`in`(it)) } },
|
||||
)
|
||||
.fetchSet(0, String::class.java)
|
||||
).fetchSet(0, String::class.java)
|
||||
.sortedBy { it.stripAccents().lowercase() }
|
||||
.toSet()
|
||||
|
||||
|
|
@ -288,19 +293,21 @@ class ReferentialDao(
|
|||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.select(bt.TAG.`as`("tag"))
|
||||
dsl
|
||||
.select(bt.TAG.`as`("tag"))
|
||||
.from(bt)
|
||||
.leftJoin(b).on(bt.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.union(
|
||||
select(st.TAG.`as`("tag"))
|
||||
.from(st)
|
||||
.leftJoin(s).on(st.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } },
|
||||
)
|
||||
.fetchSet(0, String::class.java)
|
||||
).fetchSet(0, String::class.java)
|
||||
.sortedBy { it.stripAccents().lowercase() }
|
||||
.toSet()
|
||||
|
||||
|
|
@ -308,43 +315,50 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.select(bmat.TAG.`as`("tag"))
|
||||
dsl
|
||||
.select(bmat.TAG.`as`("tag"))
|
||||
.from(bmat)
|
||||
.leftJoin(s).on(bmat.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(cs).on(bmat.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(s)
|
||||
.on(bmat.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(cs)
|
||||
.on(bmat.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.union(
|
||||
select(st.TAG.`as`("tag"))
|
||||
.from(st)
|
||||
.leftJoin(cs).on(st.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(s).on(st.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(cs)
|
||||
.on(st.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } },
|
||||
)
|
||||
.fetchSet(0, String::class.java)
|
||||
).fetchSet(0, String::class.java)
|
||||
.sortedBy { it.stripAccents().lowercase() }
|
||||
.toSet()
|
||||
|
||||
override fun findAllSeriesTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dsl.select(st.TAG)
|
||||
dsl
|
||||
.select(st.TAG)
|
||||
.from(st)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s).on(st.SERIES_ID.eq(s.ID))
|
||||
leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}
|
||||
.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllSeriesTagsByLibrary(
|
||||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.select(st.TAG)
|
||||
dsl
|
||||
.select(st.TAG)
|
||||
.from(st)
|
||||
.leftJoin(s).on(st.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(st.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
|
|
@ -354,9 +368,11 @@ class ReferentialDao(
|
|||
seriesId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.select(bt.TAG)
|
||||
dsl
|
||||
.select(bt.TAG)
|
||||
.from(bt)
|
||||
.leftJoin(b).on(bt.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
|
|
@ -366,10 +382,13 @@ class ReferentialDao(
|
|||
readListId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.select(bt.TAG)
|
||||
dsl
|
||||
.select(bt.TAG)
|
||||
.from(bt)
|
||||
.leftJoin(b).on(bt.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(rb).on(bt.BOOK_ID.eq(rb.BOOK_ID))
|
||||
.leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.leftJoin(rb)
|
||||
.on(bt.BOOK_ID.eq(rb.BOOK_ID))
|
||||
.where(rb.READLIST_ID.eq(readListId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(bt.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
|
|
@ -379,9 +398,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.select(st.TAG)
|
||||
dsl
|
||||
.select(st.TAG)
|
||||
.from(st)
|
||||
.leftJoin(cs).on(st.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(st.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(st.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -389,19 +410,21 @@ class ReferentialDao(
|
|||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllBookTags(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dsl.select(bt.TAG)
|
||||
dsl
|
||||
.select(bt.TAG)
|
||||
.from(bt)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(b).on(bt.BOOK_ID.eq(b.ID))
|
||||
leftJoin(b)
|
||||
.on(bt.BOOK_ID.eq(b.ID))
|
||||
.where(b.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}
|
||||
.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(st.TAG.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
override fun findAllLanguages(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dsl.selectDistinct(sd.LANGUAGE)
|
||||
dsl
|
||||
.selectDistinct(sd.LANGUAGE)
|
||||
.from(sd)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.LANGUAGE.ne(""))
|
||||
|
|
@ -413,9 +436,11 @@ class ReferentialDao(
|
|||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(sd.LANGUAGE)
|
||||
dsl
|
||||
.selectDistinct(sd.LANGUAGE)
|
||||
.from(sd)
|
||||
.leftJoin(s).on(sd.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(sd.LANGUAGE.ne(""))
|
||||
.and(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -426,9 +451,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(sd.LANGUAGE)
|
||||
dsl
|
||||
.selectDistinct(sd.LANGUAGE)
|
||||
.from(sd)
|
||||
.leftJoin(cs).on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.LANGUAGE.ne(""))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
|
|
@ -437,7 +464,8 @@ class ReferentialDao(
|
|||
.fetchSet(sd.LANGUAGE)
|
||||
|
||||
override fun findAllPublishers(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dsl.selectDistinct(sd.PUBLISHER)
|
||||
dsl
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
|
|
@ -450,7 +478,8 @@ class ReferentialDao(
|
|||
pageable: Pageable,
|
||||
): Page<String> {
|
||||
val query =
|
||||
dsl.selectDistinct(sd.PUBLISHER)
|
||||
dsl
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
|
|
@ -480,9 +509,11 @@ class ReferentialDao(
|
|||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(sd.PUBLISHER)
|
||||
dsl
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.leftJoin(s).on(sd.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
.and(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -493,9 +524,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(sd.PUBLISHER)
|
||||
dsl
|
||||
.selectDistinct(sd.PUBLISHER)
|
||||
.from(sd)
|
||||
.leftJoin(cs).on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(sd.PUBLISHER.ne(""))
|
||||
.and(cs.COLLECTION_ID.eq(collectionId))
|
||||
|
|
@ -504,24 +537,27 @@ class ReferentialDao(
|
|||
.fetchSet(sd.PUBLISHER)
|
||||
|
||||
override fun findAllAgeRatings(filterOnLibraryIds: Collection<String>?): Set<Int?> =
|
||||
dsl.selectDistinct(sd.AGE_RATING)
|
||||
dsl
|
||||
.selectDistinct(sd.AGE_RATING)
|
||||
.from(sd)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s).on(sd.SERIES_ID.eq(s.ID))
|
||||
leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}
|
||||
.orderBy(sd.AGE_RATING)
|
||||
}.orderBy(sd.AGE_RATING)
|
||||
.fetchSet(sd.AGE_RATING)
|
||||
|
||||
override fun findAllAgeRatingsByLibrary(
|
||||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<Int?> =
|
||||
dsl.selectDistinct(sd.AGE_RATING)
|
||||
dsl
|
||||
.selectDistinct(sd.AGE_RATING)
|
||||
.from(sd)
|
||||
.leftJoin(s).on(sd.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(sd.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sd.AGE_RATING)
|
||||
|
|
@ -531,9 +567,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<Int?> =
|
||||
dsl.selectDistinct(sd.AGE_RATING)
|
||||
dsl
|
||||
.selectDistinct(sd.AGE_RATING)
|
||||
.from(sd)
|
||||
.leftJoin(cs).on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(sd.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sd.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -541,7 +579,8 @@ class ReferentialDao(
|
|||
.fetchSet(sd.AGE_RATING)
|
||||
|
||||
override fun findAllSeriesReleaseDates(filterOnLibraryIds: Collection<String>?): Set<LocalDate> =
|
||||
dsl.selectDistinct(bma.RELEASE_DATE)
|
||||
dsl
|
||||
.selectDistinct(bma.RELEASE_DATE)
|
||||
.from(bma)
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bma.SERIES_ID.eq(s.ID)) } }
|
||||
.where(bma.RELEASE_DATE.isNotNull)
|
||||
|
|
@ -553,9 +592,11 @@ class ReferentialDao(
|
|||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<LocalDate> =
|
||||
dsl.selectDistinct(bma.RELEASE_DATE)
|
||||
dsl
|
||||
.selectDistinct(bma.RELEASE_DATE)
|
||||
.from(bma)
|
||||
.leftJoin(s).on(bma.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(bma.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.and(bma.RELEASE_DATE.isNotNull)
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -566,9 +607,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<LocalDate> =
|
||||
dsl.selectDistinct(bma.RELEASE_DATE)
|
||||
dsl
|
||||
.selectDistinct(bma.RELEASE_DATE)
|
||||
.from(bma)
|
||||
.leftJoin(cs).on(bma.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(bma.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(bma.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.and(bma.RELEASE_DATE.isNotNull)
|
||||
|
|
@ -577,24 +620,27 @@ class ReferentialDao(
|
|||
.fetchSet(bma.RELEASE_DATE)
|
||||
|
||||
override fun findAllSharingLabels(filterOnLibraryIds: Collection<String>?): Set<String> =
|
||||
dsl.selectDistinct(sl.LABEL)
|
||||
dsl
|
||||
.selectDistinct(sl.LABEL)
|
||||
.from(sl)
|
||||
.apply {
|
||||
filterOnLibraryIds?.let {
|
||||
leftJoin(s).on(sl.SERIES_ID.eq(s.ID))
|
||||
leftJoin(s)
|
||||
.on(sl.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.`in`(it))
|
||||
}
|
||||
}
|
||||
.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
}.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
override fun findAllSharingLabelsByLibrary(
|
||||
libraryId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(sl.LABEL)
|
||||
dsl
|
||||
.selectDistinct(sl.LABEL)
|
||||
.from(sl)
|
||||
.leftJoin(s).on(sl.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(sl.SERIES_ID.eq(s.ID))
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(sl.LABEL.collate(SqliteUdfDataSource.COLLATION_UNICODE_3))
|
||||
|
|
@ -604,9 +650,11 @@ class ReferentialDao(
|
|||
collectionId: String,
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
): Set<String> =
|
||||
dsl.selectDistinct(sl.LABEL)
|
||||
dsl
|
||||
.selectDistinct(sl.LABEL)
|
||||
.from(sl)
|
||||
.leftJoin(cs).on(sl.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(sl.SERIES_ID.eq(cs.SERIES_ID))
|
||||
.apply { filterOnLibraryIds?.let { leftJoin(s).on(sl.SERIES_ID.eq(s.ID)) } }
|
||||
.where(cs.COLLECTION_ID.eq(collectionId))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
|
|||
|
|
@ -77,11 +77,15 @@ class SeriesCollectionDao(
|
|||
if (belongsToLibraryIds == null && filterOnLibraryIds == null && !restrictions.isRestricted)
|
||||
null
|
||||
else
|
||||
dsl.selectDistinct(c.ID)
|
||||
dsl
|
||||
.selectDistinct(c.ID)
|
||||
.from(c)
|
||||
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(sd).on(cs.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.leftJoin(cs)
|
||||
.on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(s)
|
||||
.on(cs.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(sd)
|
||||
.on(cs.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.where(conditions)
|
||||
|
||||
val count =
|
||||
|
|
@ -123,9 +127,11 @@ class SeriesCollectionDao(
|
|||
restrictions: ContentRestrictions,
|
||||
): Collection<SeriesCollection> {
|
||||
val queryIds =
|
||||
dsl.select(c.ID)
|
||||
dsl
|
||||
.select(c.ID)
|
||||
.from(c)
|
||||
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(cs)
|
||||
.on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.apply { if (restrictions.isRestricted) leftJoin(sd).on(cs.SERIES_ID.eq(sd.SERIES_ID)) }
|
||||
.where(cs.SERIES_ID.eq(containsSeriesId))
|
||||
.apply { if (restrictions.isRestricted) and(restrictions.toCondition()) }
|
||||
|
|
@ -138,12 +144,15 @@ class SeriesCollectionDao(
|
|||
}
|
||||
|
||||
override fun findAllEmpty(): Collection<SeriesCollection> =
|
||||
dsl.selectFrom(c)
|
||||
dsl
|
||||
.selectFrom(c)
|
||||
.where(
|
||||
c.ID.`in`(
|
||||
dsl.select(c.ID)
|
||||
dsl
|
||||
.select(c.ID)
|
||||
.from(c)
|
||||
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(cs)
|
||||
.on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.where(cs.COLLECTION_ID.isNull),
|
||||
),
|
||||
).fetchInto(c)
|
||||
|
|
@ -156,10 +165,13 @@ class SeriesCollectionDao(
|
|||
.firstOrNull()
|
||||
|
||||
private fun selectBase(joinOnSeriesMetadata: Boolean = false) =
|
||||
dsl.selectDistinct(*c.fields())
|
||||
dsl
|
||||
.selectDistinct(*c.fields())
|
||||
.from(c)
|
||||
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(cs)
|
||||
.on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(s)
|
||||
.on(cs.SERIES_ID.eq(s.ID))
|
||||
.apply { if (joinOnSeriesMetadata) leftJoin(sd).on(cs.SERIES_ID.eq(sd.SERIES_ID)) }
|
||||
|
||||
private fun ResultQuery<Record>.fetchAndMap(
|
||||
|
|
@ -169,9 +181,11 @@ class SeriesCollectionDao(
|
|||
fetchInto(c)
|
||||
.map { cr ->
|
||||
val seriesIds =
|
||||
dsl.select(*cs.fields())
|
||||
dsl
|
||||
.select(*cs.fields())
|
||||
.from(cs)
|
||||
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
|
||||
.leftJoin(s)
|
||||
.on(cs.SERIES_ID.eq(s.ID))
|
||||
.apply { if (restrictions.isRestricted) leftJoin(sd).on(cs.SERIES_ID.eq(sd.SERIES_ID)) }
|
||||
.where(cs.COLLECTION_ID.eq(cr.id))
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
|
@ -184,7 +198,8 @@ class SeriesCollectionDao(
|
|||
|
||||
@Transactional
|
||||
override fun insert(collection: SeriesCollection) {
|
||||
dsl.insertInto(c)
|
||||
dsl
|
||||
.insertInto(c)
|
||||
.set(c.ID, collection.id)
|
||||
.set(c.NAME, collection.name)
|
||||
.set(c.ORDERED, collection.ordered)
|
||||
|
|
@ -196,7 +211,8 @@ class SeriesCollectionDao(
|
|||
|
||||
private fun insertSeries(collection: SeriesCollection) {
|
||||
collection.seriesIds.forEachIndexed { index, id ->
|
||||
dsl.insertInto(cs)
|
||||
dsl
|
||||
.insertInto(cs)
|
||||
.set(cs.COLLECTION_ID, collection.id)
|
||||
.set(cs.SERIES_ID, id)
|
||||
.set(cs.NUMBER, index)
|
||||
|
|
@ -206,7 +222,8 @@ class SeriesCollectionDao(
|
|||
|
||||
@Transactional
|
||||
override fun update(collection: SeriesCollection) {
|
||||
dsl.update(c)
|
||||
dsl
|
||||
.update(c)
|
||||
.set(c.NAME, collection.name)
|
||||
.set(c.ORDERED, collection.ordered)
|
||||
.set(c.SERIES_COUNT, collection.seriesIds.size)
|
||||
|
|
@ -221,7 +238,8 @@ class SeriesCollectionDao(
|
|||
|
||||
@Transactional
|
||||
override fun removeSeriesFromAll(seriesId: String) {
|
||||
dsl.deleteFrom(cs)
|
||||
dsl
|
||||
.deleteFrom(cs)
|
||||
.where(cs.SERIES_ID.eq(seriesId))
|
||||
.execute()
|
||||
}
|
||||
|
|
@ -230,7 +248,8 @@ class SeriesCollectionDao(
|
|||
override fun removeSeriesFromAll(seriesIds: Collection<String>) {
|
||||
dsl.insertTempStrings(batchSize, seriesIds)
|
||||
|
||||
dsl.deleteFrom(cs)
|
||||
dsl
|
||||
.deleteFrom(cs)
|
||||
.where(cs.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.execute()
|
||||
}
|
||||
|
|
@ -255,7 +274,8 @@ class SeriesCollectionDao(
|
|||
|
||||
override fun existsByName(name: String): Boolean =
|
||||
dsl.fetchExists(
|
||||
dsl.selectFrom(c)
|
||||
dsl
|
||||
.selectFrom(c)
|
||||
.where(c.NAME.equalIgnoreCase(name)),
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -38,18 +38,21 @@ class SeriesDao(
|
|||
private val bma = Tables.BOOK_METADATA_AGGREGATION
|
||||
|
||||
override fun findAll(): Collection<Series> =
|
||||
dsl.selectFrom(s)
|
||||
dsl
|
||||
.selectFrom(s)
|
||||
.fetchInto(s)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findByIdOrNull(seriesId: String): Series? =
|
||||
dsl.selectFrom(s)
|
||||
dsl
|
||||
.selectFrom(s)
|
||||
.where(s.ID.eq(seriesId))
|
||||
.fetchOneInto(s)
|
||||
?.toDomain()
|
||||
|
||||
override fun findAllByLibraryId(libraryId: String): List<Series> =
|
||||
dsl.selectFrom(s)
|
||||
dsl
|
||||
.selectFrom(s)
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.fetchInto(s)
|
||||
.map { it.toDomain() }
|
||||
|
|
@ -61,7 +64,8 @@ class SeriesDao(
|
|||
): List<Series> {
|
||||
dsl.insertTempStrings(batchSize, urls.map { it.toString() })
|
||||
|
||||
return dsl.selectFrom(s)
|
||||
return dsl
|
||||
.selectFrom(s)
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.and(s.DELETED_DATE.isNull)
|
||||
.and(s.URL.notIn(dsl.selectTempStrings()))
|
||||
|
|
@ -73,7 +77,8 @@ class SeriesDao(
|
|||
libraryId: String,
|
||||
url: URL,
|
||||
): Series? =
|
||||
dsl.selectFrom(s)
|
||||
dsl
|
||||
.selectFrom(s)
|
||||
.where(s.LIBRARY_ID.eq(libraryId).and(s.URL.eq(url.toString())))
|
||||
.and(s.DELETED_DATE.isNull)
|
||||
.orderBy(s.LAST_MODIFIED_DATE.desc())
|
||||
|
|
@ -82,21 +87,25 @@ class SeriesDao(
|
|||
?.toDomain()
|
||||
|
||||
override fun findAllByTitleContaining(title: String): Collection<Series> =
|
||||
dsl.selectDistinct(*s.fields())
|
||||
dsl
|
||||
.selectDistinct(*s.fields())
|
||||
.from(s)
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(d)
|
||||
.on(s.ID.eq(d.SERIES_ID))
|
||||
.where(d.TITLE.containsIgnoreCase(title))
|
||||
.fetchInto(s)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun getLibraryId(seriesId: String): String? =
|
||||
dsl.select(s.LIBRARY_ID)
|
||||
dsl
|
||||
.select(s.LIBRARY_ID)
|
||||
.from(s)
|
||||
.where(s.ID.eq(seriesId))
|
||||
.fetchOne(0, String::class.java)
|
||||
|
||||
override fun findAllIdsByLibraryId(libraryId: String): Collection<String> =
|
||||
dsl.select(s.ID)
|
||||
dsl
|
||||
.select(s.ID)
|
||||
.from(s)
|
||||
.where(s.LIBRARY_ID.eq(libraryId))
|
||||
.fetch(s.ID)
|
||||
|
|
@ -116,7 +125,8 @@ class SeriesDao(
|
|||
pageable: Pageable,
|
||||
): Page<Series> {
|
||||
val query =
|
||||
dsl.selectDistinct(*s.fields())
|
||||
dsl
|
||||
.selectDistinct(*s.fields())
|
||||
.from(s)
|
||||
.apply {
|
||||
joins.forEach { join ->
|
||||
|
|
@ -129,8 +139,7 @@ class SeriesDao(
|
|||
RequiredJoin.Media -> Unit
|
||||
}
|
||||
}
|
||||
}
|
||||
.where(conditions)
|
||||
}.where(conditions)
|
||||
|
||||
val count = dsl.fetchCount(query)
|
||||
val items =
|
||||
|
|
@ -150,7 +159,8 @@ class SeriesDao(
|
|||
}
|
||||
|
||||
override fun insert(series: Series) {
|
||||
dsl.insertInto(s)
|
||||
dsl
|
||||
.insertInto(s)
|
||||
.set(s.ID, series.id)
|
||||
.set(s.NAME, series.name)
|
||||
.set(s.URL, series.url.toString())
|
||||
|
|
@ -165,7 +175,8 @@ class SeriesDao(
|
|||
series: Series,
|
||||
updateModifiedTime: Boolean,
|
||||
) {
|
||||
dsl.update(s)
|
||||
dsl
|
||||
.update(s)
|
||||
.set(s.NAME, series.name)
|
||||
.set(s.URL, series.url.toString())
|
||||
.set(s.FILE_LAST_MODIFIED, series.fileLastModified)
|
||||
|
|
@ -196,7 +207,8 @@ class SeriesDao(
|
|||
override fun count(): Long = dsl.fetchCount(s).toLong()
|
||||
|
||||
override fun countGroupedByLibraryId(): Map<String, Int> =
|
||||
dsl.select(s.LIBRARY_ID, DSL.count(s.ID))
|
||||
dsl
|
||||
.select(s.LIBRARY_ID, DSL.count(s.ID))
|
||||
.from(s)
|
||||
.groupBy(s.LIBRARY_ID)
|
||||
.fetchMap(s.LIBRARY_ID, DSL.count(s.ID))
|
||||
|
|
|
|||
|
|
@ -95,8 +95,7 @@ class SeriesDtoDao(
|
|||
"booksCount" to s.BOOK_COUNT,
|
||||
)
|
||||
|
||||
override fun findAll(pageable: Pageable): Page<SeriesDto> =
|
||||
findAll(SeriesSearch(), SearchContext.ofAnonymousUser(), pageable)
|
||||
override fun findAll(pageable: Pageable): Page<SeriesDto> = findAll(SeriesSearch(), SearchContext.ofAnonymousUser(), pageable)
|
||||
|
||||
override fun findAll(
|
||||
context: SearchContext,
|
||||
|
|
@ -142,11 +141,16 @@ class SeriesDtoDao(
|
|||
val searchCondition = s.ID.inOrNoCondition(seriesIds)
|
||||
|
||||
val firstChar = lower(substring(d.TITLE_SORT, 1, 1))
|
||||
return dsl.select(firstChar, count())
|
||||
return dsl
|
||||
.select(firstChar, count())
|
||||
.from(s)
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(context.userId))
|
||||
.leftJoin(d)
|
||||
.on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma)
|
||||
.on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs)
|
||||
.on(s.ID.eq(rs.SERIES_ID))
|
||||
.and(readProgressConditionSeries(context.userId))
|
||||
.apply {
|
||||
joins.forEach { join ->
|
||||
when (join) {
|
||||
|
|
@ -159,8 +163,7 @@ class SeriesDtoDao(
|
|||
RequiredJoin.BookMetadataAggregation -> Unit
|
||||
}
|
||||
}
|
||||
}
|
||||
.where(conditionsRefined)
|
||||
}.where(conditionsRefined)
|
||||
.and(searchCondition)
|
||||
.groupBy(firstChar)
|
||||
.map {
|
||||
|
|
@ -186,9 +189,13 @@ class SeriesDtoDao(
|
|||
dsl
|
||||
.let { if (joinOnCollection) it.selectDistinct(*groupFields) else it.select(*groupFields) }
|
||||
.from(s)
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
|
||||
.leftJoin(d)
|
||||
.on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma)
|
||||
.on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs)
|
||||
.on(s.ID.eq(rs.SERIES_ID))
|
||||
.and(readProgressConditionSeries(userId))
|
||||
.apply {
|
||||
if (joinOnCollection)leftJoin(cs).on(s.ID.eq(cs.SERIES_ID))
|
||||
joins.forEach { join ->
|
||||
|
|
@ -215,11 +222,16 @@ class SeriesDtoDao(
|
|||
val searchCondition = s.ID.inOrNoCondition(seriesIds)
|
||||
|
||||
val count =
|
||||
dsl.select(countDistinct(s.ID))
|
||||
dsl
|
||||
.select(countDistinct(s.ID))
|
||||
.from(s)
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
|
||||
.leftJoin(d)
|
||||
.on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma)
|
||||
.on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs)
|
||||
.on(s.ID.eq(rs.SERIES_ID))
|
||||
.and(readProgressConditionSeries(userId))
|
||||
.apply {
|
||||
joins.forEach { join ->
|
||||
when (join) {
|
||||
|
|
@ -232,8 +244,7 @@ class SeriesDtoDao(
|
|||
RequiredJoin.Media -> Unit
|
||||
}
|
||||
}
|
||||
}
|
||||
.where(conditions)
|
||||
}.where(conditions)
|
||||
.and(searchCondition)
|
||||
.fetchOne(countDistinct(s.ID)) ?: 0
|
||||
|
||||
|
|
@ -280,38 +291,45 @@ class SeriesDtoDao(
|
|||
transactionTemplate.executeWithoutResult {
|
||||
dsl.insertTempStrings(batchSize, seriesIds)
|
||||
genres =
|
||||
dsl.selectFrom(g)
|
||||
dsl
|
||||
.selectFrom(g)
|
||||
.where(g.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.seriesId }, { it.genre })
|
||||
|
||||
tags =
|
||||
dsl.selectFrom(st)
|
||||
dsl
|
||||
.selectFrom(st)
|
||||
.where(st.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.seriesId }, { it.tag })
|
||||
|
||||
sharingLabels =
|
||||
dsl.selectFrom(sl)
|
||||
dsl
|
||||
.selectFrom(sl)
|
||||
.where(sl.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.seriesId }, { it.label })
|
||||
|
||||
links =
|
||||
dsl.selectFrom(slk)
|
||||
dsl
|
||||
.selectFrom(slk)
|
||||
.where(slk.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.seriesId }, { WebLinkDto(it.label, it.url) })
|
||||
|
||||
alternateTitles =
|
||||
dsl.selectFrom(sat)
|
||||
dsl
|
||||
.selectFrom(sat)
|
||||
.where(sat.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.seriesId }, { AlternateTitleDto(it.label, it.title) })
|
||||
|
||||
aggregatedAuthors =
|
||||
dsl.selectFrom(bmaa)
|
||||
dsl
|
||||
.selectFrom(bmaa)
|
||||
.where(bmaa.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.filter { it.name != null }
|
||||
.groupBy({ it.seriesId }, { AuthorDto(it.name, it.role) })
|
||||
|
||||
aggregatedTags =
|
||||
dsl.selectFrom(bmat)
|
||||
dsl
|
||||
.selectFrom(bmat)
|
||||
.where(bmat.SERIES_ID.`in`(dsl.selectTempStrings()))
|
||||
.groupBy({ it.seriesId }, { it.tag })
|
||||
}
|
||||
|
|
@ -350,24 +368,23 @@ class SeriesDtoDao(
|
|||
booksInProgressCount: Int,
|
||||
metadata: SeriesMetadataDto,
|
||||
booksMetadata: BookMetadataAggregationDto,
|
||||
) =
|
||||
SeriesDto(
|
||||
id = id,
|
||||
libraryId = libraryId,
|
||||
name = name,
|
||||
url = URL(url).toFilePath(),
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
fileLastModified = fileLastModified,
|
||||
booksCount = booksCount,
|
||||
booksReadCount = booksReadCount,
|
||||
booksUnreadCount = booksUnreadCount,
|
||||
booksInProgressCount = booksInProgressCount,
|
||||
metadata = metadata,
|
||||
booksMetadata = booksMetadata,
|
||||
deleted = deletedDate != null,
|
||||
oneshot = oneshot,
|
||||
)
|
||||
) = SeriesDto(
|
||||
id = id,
|
||||
libraryId = libraryId,
|
||||
name = name,
|
||||
url = URL(url).toFilePath(),
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
fileLastModified = fileLastModified,
|
||||
booksCount = booksCount,
|
||||
booksReadCount = booksReadCount,
|
||||
booksUnreadCount = booksUnreadCount,
|
||||
booksInProgressCount = booksInProgressCount,
|
||||
metadata = metadata,
|
||||
booksMetadata = booksMetadata,
|
||||
deleted = deletedDate != null,
|
||||
oneshot = oneshot,
|
||||
)
|
||||
|
||||
private fun SeriesMetadataRecord.toDto(
|
||||
genres: Set<String>,
|
||||
|
|
@ -375,51 +392,49 @@ class SeriesDtoDao(
|
|||
sharingLabels: Set<String>,
|
||||
links: List<WebLinkDto>,
|
||||
alternateTitles: List<AlternateTitleDto>,
|
||||
) =
|
||||
SeriesMetadataDto(
|
||||
status = status,
|
||||
statusLock = statusLock,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
title = title,
|
||||
titleLock = titleLock,
|
||||
titleSort = titleSort,
|
||||
titleSortLock = titleSortLock,
|
||||
summary = summary,
|
||||
summaryLock = summaryLock,
|
||||
readingDirection = readingDirection ?: "",
|
||||
readingDirectionLock = readingDirectionLock,
|
||||
publisher = publisher,
|
||||
publisherLock = publisherLock,
|
||||
ageRating = ageRating,
|
||||
ageRatingLock = ageRatingLock,
|
||||
language = language,
|
||||
languageLock = languageLock,
|
||||
genres = genres,
|
||||
genresLock = genresLock,
|
||||
tags = tags,
|
||||
tagsLock = tagsLock,
|
||||
totalBookCount = totalBookCount,
|
||||
totalBookCountLock = totalBookCountLock,
|
||||
sharingLabels = sharingLabels,
|
||||
sharingLabelsLock = sharingLabelsLock,
|
||||
links = links,
|
||||
linksLock = linksLock,
|
||||
alternateTitles = alternateTitles,
|
||||
alternateTitlesLock = alternateTitlesLock,
|
||||
)
|
||||
) = SeriesMetadataDto(
|
||||
status = status,
|
||||
statusLock = statusLock,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
title = title,
|
||||
titleLock = titleLock,
|
||||
titleSort = titleSort,
|
||||
titleSortLock = titleSortLock,
|
||||
summary = summary,
|
||||
summaryLock = summaryLock,
|
||||
readingDirection = readingDirection ?: "",
|
||||
readingDirectionLock = readingDirectionLock,
|
||||
publisher = publisher,
|
||||
publisherLock = publisherLock,
|
||||
ageRating = ageRating,
|
||||
ageRatingLock = ageRatingLock,
|
||||
language = language,
|
||||
languageLock = languageLock,
|
||||
genres = genres,
|
||||
genresLock = genresLock,
|
||||
tags = tags,
|
||||
tagsLock = tagsLock,
|
||||
totalBookCount = totalBookCount,
|
||||
totalBookCountLock = totalBookCountLock,
|
||||
sharingLabels = sharingLabels,
|
||||
sharingLabelsLock = sharingLabelsLock,
|
||||
links = links,
|
||||
linksLock = linksLock,
|
||||
alternateTitles = alternateTitles,
|
||||
alternateTitlesLock = alternateTitlesLock,
|
||||
)
|
||||
|
||||
private fun BookMetadataAggregationRecord.toDto(
|
||||
authors: List<AuthorDto>,
|
||||
tags: Set<String>,
|
||||
) =
|
||||
BookMetadataAggregationDto(
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
releaseDate = releaseDate,
|
||||
summary = summary,
|
||||
summaryNumber = summaryNumber,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
)
|
||||
) = BookMetadataAggregationDto(
|
||||
authors = authors,
|
||||
tags = tags,
|
||||
releaseDate = releaseDate,
|
||||
summary = summary,
|
||||
summaryNumber = summaryNumber,
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,44 +29,48 @@ class SeriesMetadataDao(
|
|||
private val slk = Tables.SERIES_METADATA_LINK
|
||||
private val sat = Tables.SERIES_METADATA_ALTERNATE_TITLE
|
||||
|
||||
override fun findById(seriesId: String): SeriesMetadata =
|
||||
findOne(seriesId)!!.toDomain(findGenres(seriesId), findTags(seriesId), findSharingLabels(seriesId), findLinks(seriesId), findAlternateTitles(seriesId))
|
||||
override fun findById(seriesId: String): SeriesMetadata = findOne(seriesId)!!.toDomain(findGenres(seriesId), findTags(seriesId), findSharingLabels(seriesId), findLinks(seriesId), findAlternateTitles(seriesId))
|
||||
|
||||
override fun findByIdOrNull(seriesId: String): SeriesMetadata? =
|
||||
findOne(seriesId)?.toDomain(findGenres(seriesId), findTags(seriesId), findSharingLabels(seriesId), findLinks(seriesId), findAlternateTitles(seriesId))
|
||||
override fun findByIdOrNull(seriesId: String): SeriesMetadata? = findOne(seriesId)?.toDomain(findGenres(seriesId), findTags(seriesId), findSharingLabels(seriesId), findLinks(seriesId), findAlternateTitles(seriesId))
|
||||
|
||||
private fun findOne(seriesId: String) =
|
||||
dsl.selectFrom(d)
|
||||
dsl
|
||||
.selectFrom(d)
|
||||
.where(d.SERIES_ID.eq(seriesId))
|
||||
.fetchOneInto(d)
|
||||
|
||||
private fun findGenres(seriesId: String) =
|
||||
dsl.select(g.GENRE)
|
||||
dsl
|
||||
.select(g.GENRE)
|
||||
.from(g)
|
||||
.where(g.SERIES_ID.eq(seriesId))
|
||||
.fetchSet(g.GENRE)
|
||||
|
||||
private fun findTags(seriesId: String) =
|
||||
dsl.select(st.TAG)
|
||||
dsl
|
||||
.select(st.TAG)
|
||||
.from(st)
|
||||
.where(st.SERIES_ID.eq(seriesId))
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
private fun findSharingLabels(seriesId: String) =
|
||||
dsl.select(sl.LABEL)
|
||||
dsl
|
||||
.select(sl.LABEL)
|
||||
.from(sl)
|
||||
.where(sl.SERIES_ID.eq(seriesId))
|
||||
.fetchSet(sl.LABEL)
|
||||
|
||||
private fun findLinks(seriesId: String) =
|
||||
dsl.select(slk.LABEL, slk.URL)
|
||||
dsl
|
||||
.select(slk.LABEL, slk.URL)
|
||||
.from(slk)
|
||||
.where(slk.SERIES_ID.eq(seriesId))
|
||||
.fetchInto(slk)
|
||||
.map { WebLink(it.label, URI(it.url)) }
|
||||
|
||||
private fun findAlternateTitles(seriesId: String) =
|
||||
dsl.select(sat.LABEL, sat.TITLE)
|
||||
dsl
|
||||
.select(sat.LABEL, sat.TITLE)
|
||||
.from(sat)
|
||||
.where(sat.SERIES_ID.eq(seriesId))
|
||||
.fetchInto(sat)
|
||||
|
|
@ -74,7 +78,8 @@ class SeriesMetadataDao(
|
|||
|
||||
@Transactional
|
||||
override fun insert(metadata: SeriesMetadata) {
|
||||
dsl.insertInto(d)
|
||||
dsl
|
||||
.insertInto(d)
|
||||
.set(d.SERIES_ID, metadata.seriesId)
|
||||
.set(d.STATUS, metadata.status.toString())
|
||||
.set(d.TITLE, metadata.title)
|
||||
|
|
@ -110,7 +115,8 @@ class SeriesMetadataDao(
|
|||
|
||||
@Transactional
|
||||
override fun update(metadata: SeriesMetadata) {
|
||||
dsl.update(d)
|
||||
dsl
|
||||
.update(d)
|
||||
.set(d.STATUS, metadata.status.toString())
|
||||
.set(d.TITLE, metadata.title)
|
||||
.set(d.TITLE_SORT, metadata.titleSort)
|
||||
|
|
@ -138,23 +144,28 @@ class SeriesMetadataDao(
|
|||
.where(d.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(g)
|
||||
dsl
|
||||
.deleteFrom(g)
|
||||
.where(g.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(st)
|
||||
dsl
|
||||
.deleteFrom(st)
|
||||
.where(st.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(sl)
|
||||
dsl
|
||||
.deleteFrom(sl)
|
||||
.where(sl.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(slk)
|
||||
dsl
|
||||
.deleteFrom(slk)
|
||||
.where(slk.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(sat)
|
||||
dsl
|
||||
.deleteFrom(sat)
|
||||
.where(sat.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
|
|
@ -168,14 +179,16 @@ class SeriesMetadataDao(
|
|||
private fun insertGenres(metadata: SeriesMetadata) {
|
||||
if (metadata.genres.isNotEmpty()) {
|
||||
metadata.genres.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(g, g.SERIES_ID, g.GENRE)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(g, g.SERIES_ID, g.GENRE)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -183,14 +196,16 @@ class SeriesMetadataDao(
|
|||
private fun insertTags(metadata: SeriesMetadata) {
|
||||
if (metadata.tags.isNotEmpty()) {
|
||||
metadata.tags.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(st, st.SERIES_ID, st.TAG)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(st, st.SERIES_ID, st.TAG)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -198,14 +213,16 @@ class SeriesMetadataDao(
|
|||
private fun insertSharingLabels(metadata: SeriesMetadata) {
|
||||
if (metadata.sharingLabels.isNotEmpty()) {
|
||||
metadata.sharingLabels.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(sl, sl.SERIES_ID, sl.LABEL)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(sl, sl.SERIES_ID, sl.LABEL)
|
||||
.values(null as String?, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -213,14 +230,16 @@ class SeriesMetadataDao(
|
|||
private fun insertLinks(metadata: SeriesMetadata) {
|
||||
if (metadata.links.isNotEmpty()) {
|
||||
metadata.links.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(slk, slk.SERIES_ID, slk.LABEL, slk.URL)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it.label, it.url.toString())
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(slk, slk.SERIES_ID, slk.LABEL, slk.URL)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it.label, it.url.toString())
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -228,14 +247,16 @@ class SeriesMetadataDao(
|
|||
private fun insertAlternateTitles(metadata: SeriesMetadata) {
|
||||
if (metadata.alternateTitles.isNotEmpty()) {
|
||||
metadata.alternateTitles.chunked(batchSize).forEach { chunk ->
|
||||
dsl.batch(
|
||||
dsl.insertInto(sat, sat.SERIES_ID, sat.LABEL, sat.TITLE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it.label, it.title)
|
||||
}
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl
|
||||
.insertInto(sat, sat.SERIES_ID, sat.LABEL, sat.TITLE)
|
||||
.values(null as String?, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(metadata.seriesId, it.label, it.title)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -270,41 +291,40 @@ class SeriesMetadataDao(
|
|||
sharingLabels: Set<String>,
|
||||
links: List<WebLink>,
|
||||
alternateTitles: List<AlternateTitle>,
|
||||
) =
|
||||
SeriesMetadata(
|
||||
status = SeriesMetadata.Status.valueOf(status),
|
||||
title = title,
|
||||
titleSort = titleSort,
|
||||
summary = summary,
|
||||
readingDirection =
|
||||
readingDirection?.let {
|
||||
SeriesMetadata.ReadingDirection.valueOf(readingDirection)
|
||||
},
|
||||
publisher = publisher,
|
||||
ageRating = ageRating,
|
||||
language = language,
|
||||
genres = genres,
|
||||
tags = tags,
|
||||
totalBookCount = totalBookCount,
|
||||
sharingLabels = sharingLabels,
|
||||
links = links,
|
||||
alternateTitles = alternateTitles,
|
||||
statusLock = statusLock,
|
||||
titleLock = titleLock,
|
||||
titleSortLock = titleSortLock,
|
||||
summaryLock = summaryLock,
|
||||
readingDirectionLock = readingDirectionLock,
|
||||
publisherLock = publisherLock,
|
||||
ageRatingLock = ageRatingLock,
|
||||
languageLock = languageLock,
|
||||
genresLock = genresLock,
|
||||
tagsLock = tagsLock,
|
||||
totalBookCountLock = totalBookCountLock,
|
||||
sharingLabelsLock = sharingLabelsLock,
|
||||
linksLock = linksLock,
|
||||
alternateTitlesLock = alternateTitlesLock,
|
||||
seriesId = seriesId,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
)
|
||||
) = SeriesMetadata(
|
||||
status = SeriesMetadata.Status.valueOf(status),
|
||||
title = title,
|
||||
titleSort = titleSort,
|
||||
summary = summary,
|
||||
readingDirection =
|
||||
readingDirection?.let {
|
||||
SeriesMetadata.ReadingDirection.valueOf(readingDirection)
|
||||
},
|
||||
publisher = publisher,
|
||||
ageRating = ageRating,
|
||||
language = language,
|
||||
genres = genres,
|
||||
tags = tags,
|
||||
totalBookCount = totalBookCount,
|
||||
sharingLabels = sharingLabels,
|
||||
links = links,
|
||||
alternateTitles = alternateTitles,
|
||||
statusLock = statusLock,
|
||||
titleLock = titleLock,
|
||||
titleSortLock = titleSortLock,
|
||||
summaryLock = summaryLock,
|
||||
readingDirectionLock = readingDirectionLock,
|
||||
publisherLock = publisherLock,
|
||||
ageRatingLock = ageRatingLock,
|
||||
languageLock = languageLock,
|
||||
genresLock = genresLock,
|
||||
tagsLock = tagsLock,
|
||||
totalBookCountLock = totalBookCountLock,
|
||||
sharingLabelsLock = sharingLabelsLock,
|
||||
linksLock = linksLock,
|
||||
alternateTitlesLock = alternateTitlesLock,
|
||||
seriesId = seriesId,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ class ServerSettingsDao(
|
|||
key: String,
|
||||
clazz: Class<T>,
|
||||
): T? =
|
||||
dsl.select(s.VALUE)
|
||||
dsl
|
||||
.select(s.VALUE)
|
||||
.from(s)
|
||||
.where(s.KEY.eq(key))
|
||||
.fetchOneInto(clazz)
|
||||
|
|
@ -23,7 +24,8 @@ class ServerSettingsDao(
|
|||
key: String,
|
||||
value: String,
|
||||
) {
|
||||
dsl.insertInto(s)
|
||||
dsl
|
||||
.insertInto(s)
|
||||
.values(key, value)
|
||||
.onDuplicateKeyUpdate()
|
||||
.set(s.VALUE, value)
|
||||
|
|
|
|||
|
|
@ -22,21 +22,20 @@ class SidecarDao(
|
|||
private val sc = Tables.SIDECAR
|
||||
private val l = Tables.LIBRARY
|
||||
|
||||
override fun findAll(): Collection<SidecarStored> =
|
||||
dsl.selectFrom(sc).fetch().map { it.toDomain() }
|
||||
override fun findAll(): Collection<SidecarStored> = dsl.selectFrom(sc).fetch().map { it.toDomain() }
|
||||
|
||||
override fun save(
|
||||
libraryId: String,
|
||||
sidecar: Sidecar,
|
||||
) {
|
||||
dsl.insertInto(sc)
|
||||
dsl
|
||||
.insertInto(sc)
|
||||
.values(
|
||||
sidecar.url.toString(),
|
||||
sidecar.parentUrl.toString(),
|
||||
sidecar.lastModifiedTime,
|
||||
libraryId,
|
||||
)
|
||||
.onDuplicateKeyUpdate()
|
||||
).onDuplicateKeyUpdate()
|
||||
.set(sc.LAST_MODIFIED_TIME, sidecar.lastModifiedTime)
|
||||
.set(sc.PARENT_URL, sidecar.parentUrl.toString())
|
||||
.set(sc.LIBRARY_ID, libraryId)
|
||||
|
|
@ -50,20 +49,23 @@ class SidecarDao(
|
|||
) {
|
||||
dsl.insertTempStrings(batchSize, urls.map { it.toString() })
|
||||
|
||||
dsl.deleteFrom(sc)
|
||||
dsl
|
||||
.deleteFrom(sc)
|
||||
.where(sc.LIBRARY_ID.eq(libraryId))
|
||||
.and(sc.URL.`in`(dsl.selectTempStrings()))
|
||||
.execute()
|
||||
}
|
||||
|
||||
override fun deleteByLibraryId(libraryId: String) {
|
||||
dsl.deleteFrom(sc)
|
||||
dsl
|
||||
.deleteFrom(sc)
|
||||
.where(sc.LIBRARY_ID.eq(libraryId))
|
||||
.execute()
|
||||
}
|
||||
|
||||
override fun countGroupedByLibraryId(): Map<String, Int> =
|
||||
dsl.select(sc.LIBRARY_ID, DSL.count(sc.URL))
|
||||
dsl
|
||||
.select(sc.LIBRARY_ID, DSL.count(sc.URL))
|
||||
.from(sc)
|
||||
.groupBy(sc.LIBRARY_ID)
|
||||
.fetchMap(sc.LIBRARY_ID, DSL.count(sc.URL))
|
||||
|
|
|
|||
|
|
@ -56,63 +56,72 @@ class SyncPointDao(
|
|||
val syncPointId = TsidCreator.getTsid256().toString()
|
||||
val createdAt = LocalDateTime.now(ZoneId.of("Z"))
|
||||
|
||||
dsl.insertInto(
|
||||
sp,
|
||||
sp.ID,
|
||||
sp.USER_ID,
|
||||
sp.API_KEY_ID,
|
||||
sp.CREATED_DATE,
|
||||
).values(
|
||||
syncPointId,
|
||||
context.userId,
|
||||
apiKeyId,
|
||||
createdAt,
|
||||
).execute()
|
||||
dsl
|
||||
.insertInto(
|
||||
sp,
|
||||
sp.ID,
|
||||
sp.USER_ID,
|
||||
sp.API_KEY_ID,
|
||||
sp.CREATED_DATE,
|
||||
).values(
|
||||
syncPointId,
|
||||
context.userId,
|
||||
apiKeyId,
|
||||
createdAt,
|
||||
).execute()
|
||||
|
||||
dsl.insertInto(
|
||||
spb,
|
||||
spb.SYNC_POINT_ID,
|
||||
spb.BOOK_ID,
|
||||
spb.BOOK_CREATED_DATE,
|
||||
spb.BOOK_LAST_MODIFIED_DATE,
|
||||
spb.BOOK_FILE_LAST_MODIFIED,
|
||||
spb.BOOK_FILE_SIZE,
|
||||
spb.BOOK_FILE_HASH,
|
||||
spb.BOOK_METADATA_LAST_MODIFIED_DATE,
|
||||
spb.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE,
|
||||
spb.BOOK_THUMBNAIL_ID,
|
||||
).select(
|
||||
dsl.select(
|
||||
DSL.`val`(syncPointId),
|
||||
b.ID,
|
||||
b.CREATED_DATE,
|
||||
b.LAST_MODIFIED_DATE,
|
||||
b.FILE_LAST_MODIFIED,
|
||||
b.FILE_SIZE,
|
||||
b.FILE_HASH,
|
||||
d.LAST_MODIFIED_DATE,
|
||||
r.LAST_MODIFIED_DATE,
|
||||
bt.ID,
|
||||
).from(b)
|
||||
.apply {
|
||||
joins.forEach {
|
||||
when (it) {
|
||||
// we don't have to handle those since we already join on those tables anyway, the 'when' is here for future proofing
|
||||
RequiredJoin.BookMetadata -> Unit
|
||||
RequiredJoin.SeriesMetadata -> Unit
|
||||
RequiredJoin.Media -> Unit
|
||||
is RequiredJoin.ReadProgress -> Unit
|
||||
RequiredJoin.BookMetadataAggregation -> Unit
|
||||
dsl
|
||||
.insertInto(
|
||||
spb,
|
||||
spb.SYNC_POINT_ID,
|
||||
spb.BOOK_ID,
|
||||
spb.BOOK_CREATED_DATE,
|
||||
spb.BOOK_LAST_MODIFIED_DATE,
|
||||
spb.BOOK_FILE_LAST_MODIFIED,
|
||||
spb.BOOK_FILE_SIZE,
|
||||
spb.BOOK_FILE_HASH,
|
||||
spb.BOOK_METADATA_LAST_MODIFIED_DATE,
|
||||
spb.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE,
|
||||
spb.BOOK_THUMBNAIL_ID,
|
||||
).select(
|
||||
dsl
|
||||
.select(
|
||||
DSL.`val`(syncPointId),
|
||||
b.ID,
|
||||
b.CREATED_DATE,
|
||||
b.LAST_MODIFIED_DATE,
|
||||
b.FILE_LAST_MODIFIED,
|
||||
b.FILE_SIZE,
|
||||
b.FILE_HASH,
|
||||
d.LAST_MODIFIED_DATE,
|
||||
r.LAST_MODIFIED_DATE,
|
||||
bt.ID,
|
||||
).from(b)
|
||||
.apply {
|
||||
joins.forEach {
|
||||
when (it) {
|
||||
// we don't have to handle those since we already join on those tables anyway, the 'when' is here for future proofing
|
||||
RequiredJoin.BookMetadata -> Unit
|
||||
RequiredJoin.SeriesMetadata -> Unit
|
||||
RequiredJoin.Media -> Unit
|
||||
is RequiredJoin.ReadProgress -> Unit
|
||||
RequiredJoin.BookMetadataAggregation -> Unit
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.join(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.join(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.join(sd).on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(r.USER_ID.eq(context.userId))
|
||||
.leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)).and(bt.SELECTED.isTrue)
|
||||
.where(condition),
|
||||
).execute()
|
||||
}.join(m)
|
||||
.on(b.ID.eq(m.BOOK_ID))
|
||||
.join(d)
|
||||
.on(b.ID.eq(d.BOOK_ID))
|
||||
.join(sd)
|
||||
.on(b.SERIES_ID.eq(sd.SERIES_ID))
|
||||
.leftJoin(r)
|
||||
.on(b.ID.eq(r.BOOK_ID))
|
||||
.and(r.USER_ID.eq(context.userId))
|
||||
.leftJoin(bt)
|
||||
.on(b.ID.eq(bt.BOOK_ID))
|
||||
.and(bt.SELECTED.isTrue)
|
||||
.where(condition),
|
||||
).execute()
|
||||
|
||||
return findByIdOrNull(syncPointId)!!
|
||||
}
|
||||
|
|
@ -131,7 +140,8 @@ class SyncPointDao(
|
|||
val (query, _, queryMostRecentDate) = bookCommonDao.getBooksOnDeckQuery(context.userId, context.restrictions, filterOnLibraryIds, onDeckFields)
|
||||
|
||||
val count =
|
||||
dsl.insertInto(sprlb)
|
||||
dsl
|
||||
.insertInto(sprlb)
|
||||
.select(query)
|
||||
.execute()
|
||||
|
||||
|
|
@ -139,25 +149,27 @@ class SyncPointDao(
|
|||
if (count > 0) {
|
||||
val mostRecentDate = dsl.fetch(queryMostRecentDate).into(LocalDateTime::class.java).firstOrNull() ?: createdAt
|
||||
|
||||
dsl.insertInto(
|
||||
sprl,
|
||||
sprl.SYNC_POINT_ID,
|
||||
sprl.READLIST_ID,
|
||||
sprl.READLIST_NAME,
|
||||
sprl.READLIST_CREATED_DATE,
|
||||
sprl.READLIST_LAST_MODIFIED_DATE,
|
||||
).values(
|
||||
syncPointId,
|
||||
ON_DECK_ID,
|
||||
"On Deck",
|
||||
createdAt,
|
||||
mostRecentDate,
|
||||
).execute()
|
||||
dsl
|
||||
.insertInto(
|
||||
sprl,
|
||||
sprl.SYNC_POINT_ID,
|
||||
sprl.READLIST_ID,
|
||||
sprl.READLIST_NAME,
|
||||
sprl.READLIST_CREATED_DATE,
|
||||
sprl.READLIST_LAST_MODIFIED_DATE,
|
||||
).values(
|
||||
syncPointId,
|
||||
ON_DECK_ID,
|
||||
"On Deck",
|
||||
createdAt,
|
||||
mostRecentDate,
|
||||
).execute()
|
||||
}
|
||||
}
|
||||
|
||||
override fun findByIdOrNull(syncPointId: String): SyncPoint? =
|
||||
dsl.selectFrom(sp)
|
||||
dsl
|
||||
.selectFrom(sp)
|
||||
.where(sp.ID.eq(syncPointId))
|
||||
.fetchInto(sp)
|
||||
.map {
|
||||
|
|
@ -175,7 +187,8 @@ class SyncPointDao(
|
|||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> {
|
||||
val query =
|
||||
dsl.selectFrom(spb)
|
||||
dsl
|
||||
.selectFrom(spb)
|
||||
.where(spb.SYNC_POINT_ID.eq(syncPointId))
|
||||
.apply {
|
||||
if (onlyNotSynced) {
|
||||
|
|
@ -193,14 +206,14 @@ class SyncPointDao(
|
|||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> {
|
||||
val query =
|
||||
dsl.selectFrom(spb)
|
||||
dsl
|
||||
.selectFrom(spb)
|
||||
.where(spb.SYNC_POINT_ID.eq(toSyncPointId))
|
||||
.apply {
|
||||
if (onlyNotSynced) {
|
||||
and(spb.SYNCED.isFalse)
|
||||
}
|
||||
}
|
||||
.and(
|
||||
}.and(
|
||||
spb.BOOK_ID.notIn(
|
||||
dsl.select(spb.BOOK_ID).from(spb).where(spb.SYNC_POINT_ID.eq(fromSyncPointId)),
|
||||
),
|
||||
|
|
@ -216,14 +229,14 @@ class SyncPointDao(
|
|||
pageable: Pageable,
|
||||
): Page<SyncPoint.Book> {
|
||||
val query =
|
||||
dsl.selectFrom(spb)
|
||||
dsl
|
||||
.selectFrom(spb)
|
||||
.where(spb.SYNC_POINT_ID.eq(fromSyncPointId))
|
||||
.and(
|
||||
spb.BOOK_ID.notIn(
|
||||
dsl.select(spb.BOOK_ID).from(spb).where(spb.SYNC_POINT_ID.eq(toSyncPointId)),
|
||||
),
|
||||
)
|
||||
.apply {
|
||||
).apply {
|
||||
if (onlyNotSynced)
|
||||
and(
|
||||
spb.BOOK_ID.notIn(
|
||||
|
|
@ -243,18 +256,20 @@ class SyncPointDao(
|
|||
): Page<SyncPoint.Book> {
|
||||
val spbFrom = spb.`as`("spbFrom")
|
||||
val query =
|
||||
dsl.select(*spb.fields())
|
||||
dsl
|
||||
.select(*spb.fields())
|
||||
.from(spb)
|
||||
.join(spbFrom).on(spb.BOOK_ID.eq(spbFrom.BOOK_ID))
|
||||
.join(spbFrom)
|
||||
.on(spb.BOOK_ID.eq(spbFrom.BOOK_ID))
|
||||
.where(spb.SYNC_POINT_ID.eq(toSyncPointId))
|
||||
.and(spbFrom.SYNC_POINT_ID.eq(fromSyncPointId))
|
||||
.apply {
|
||||
if (onlyNotSynced) {
|
||||
and(spb.SYNCED.isFalse)
|
||||
}
|
||||
}
|
||||
.and(
|
||||
spb.BOOK_FILE_LAST_MODIFIED.ne(spbFrom.BOOK_FILE_LAST_MODIFIED)
|
||||
}.and(
|
||||
spb.BOOK_FILE_LAST_MODIFIED
|
||||
.ne(spbFrom.BOOK_FILE_LAST_MODIFIED)
|
||||
.or(spb.BOOK_FILE_SIZE.ne(spbFrom.BOOK_FILE_SIZE))
|
||||
.or(spb.BOOK_FILE_HASH.ne(spbFrom.BOOK_FILE_HASH).and(spbFrom.BOOK_FILE_HASH.isNotNull))
|
||||
.or(spb.BOOK_METADATA_LAST_MODIFIED_DATE.ne(spbFrom.BOOK_METADATA_LAST_MODIFIED_DATE))
|
||||
|
|
@ -272,26 +287,29 @@ class SyncPointDao(
|
|||
): Page<SyncPoint.Book> {
|
||||
val spbFrom = spb.`as`("spbFrom")
|
||||
val query =
|
||||
dsl.select(*spb.fields())
|
||||
dsl
|
||||
.select(*spb.fields())
|
||||
.from(spb)
|
||||
.join(spbFrom).on(spb.BOOK_ID.eq(spbFrom.BOOK_ID))
|
||||
.join(spbFrom)
|
||||
.on(spb.BOOK_ID.eq(spbFrom.BOOK_ID))
|
||||
.where(spb.SYNC_POINT_ID.eq(toSyncPointId))
|
||||
.and(spbFrom.SYNC_POINT_ID.eq(fromSyncPointId))
|
||||
.apply {
|
||||
if (onlyNotSynced) {
|
||||
and(spb.SYNCED.isFalse)
|
||||
}
|
||||
}
|
||||
.and(
|
||||
}.and(
|
||||
// unchanged book
|
||||
spb.BOOK_FILE_LAST_MODIFIED.eq(spbFrom.BOOK_FILE_LAST_MODIFIED)
|
||||
spb.BOOK_FILE_LAST_MODIFIED
|
||||
.eq(spbFrom.BOOK_FILE_LAST_MODIFIED)
|
||||
.and(spb.BOOK_FILE_SIZE.eq(spbFrom.BOOK_FILE_SIZE))
|
||||
.and(spb.BOOK_FILE_HASH.eq(spbFrom.BOOK_FILE_HASH).or(spbFrom.BOOK_FILE_HASH.isNull))
|
||||
.and(spb.BOOK_METADATA_LAST_MODIFIED_DATE.eq(spbFrom.BOOK_METADATA_LAST_MODIFIED_DATE))
|
||||
.and(spb.BOOK_THUMBNAIL_ID.eq(spbFrom.BOOK_THUMBNAIL_ID))
|
||||
// with changed read progress
|
||||
.and(
|
||||
spb.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE.ne(spbFrom.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE)
|
||||
spb.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE
|
||||
.ne(spbFrom.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE)
|
||||
.or(spb.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE.isNull.and(spbFrom.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE.isNotNull))
|
||||
.or(spb.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE.isNotNull.and(spbFrom.BOOK_READ_PROGRESS_LAST_MODIFIED_DATE.isNull)),
|
||||
),
|
||||
|
|
@ -306,7 +324,8 @@ class SyncPointDao(
|
|||
pageable: Pageable,
|
||||
): Page<SyncPoint.ReadList> {
|
||||
val query =
|
||||
dsl.selectFrom(sprl)
|
||||
dsl
|
||||
.selectFrom(sprl)
|
||||
.where(sprl.SYNC_POINT_ID.eq(syncPointId))
|
||||
.apply {
|
||||
if (onlyNotSynced) {
|
||||
|
|
@ -326,9 +345,11 @@ class SyncPointDao(
|
|||
val to = sprl.`as`("to")
|
||||
val from = sprl.`as`("from")
|
||||
val query =
|
||||
dsl.select(*to.fields())
|
||||
dsl
|
||||
.select(*to.fields())
|
||||
.from(to)
|
||||
.leftOuterJoin(from).on(to.READLIST_ID.eq(from.READLIST_ID).and(from.SYNC_POINT_ID.eq(fromSyncPointId)))
|
||||
.leftOuterJoin(from)
|
||||
.on(to.READLIST_ID.eq(from.READLIST_ID).and(from.SYNC_POINT_ID.eq(fromSyncPointId)))
|
||||
.where(to.SYNC_POINT_ID.eq(toSyncPointId))
|
||||
.apply { if (onlyNotSynced) and(to.SYNCED.isFalse) }
|
||||
.and(from.READLIST_ID.isNull)
|
||||
|
|
@ -344,14 +365,17 @@ class SyncPointDao(
|
|||
): Page<SyncPoint.ReadList> {
|
||||
val from = sprl.`as`("from")
|
||||
val query =
|
||||
dsl.select(*sprl.fields())
|
||||
dsl
|
||||
.select(*sprl.fields())
|
||||
.from(sprl)
|
||||
.join(from).on(sprl.READLIST_ID.eq(from.READLIST_ID))
|
||||
.join(from)
|
||||
.on(sprl.READLIST_ID.eq(from.READLIST_ID))
|
||||
.where(sprl.SYNC_POINT_ID.eq(toSyncPointId))
|
||||
.and(from.SYNC_POINT_ID.eq(fromSyncPointId))
|
||||
.apply { if (onlyNotSynced) and(sprl.SYNCED.isFalse) }
|
||||
.and(
|
||||
sprl.READLIST_LAST_MODIFIED_DATE.ne(from.READLIST_LAST_MODIFIED_DATE)
|
||||
sprl.READLIST_LAST_MODIFIED_DATE
|
||||
.ne(from.READLIST_LAST_MODIFIED_DATE)
|
||||
.or(sprl.READLIST_NAME.ne(from.READLIST_NAME)),
|
||||
)
|
||||
|
||||
|
|
@ -367,9 +391,11 @@ class SyncPointDao(
|
|||
val from = sprl.`as`("from")
|
||||
val to = sprl.`as`("to")
|
||||
val query =
|
||||
dsl.select(*from.fields())
|
||||
dsl
|
||||
.select(*from.fields())
|
||||
.from(from)
|
||||
.leftOuterJoin(to).on(from.READLIST_ID.eq(to.READLIST_ID).and(to.SYNC_POINT_ID.eq(toSyncPointId)))
|
||||
.leftOuterJoin(to)
|
||||
.on(from.READLIST_ID.eq(to.READLIST_ID).and(to.SYNC_POINT_ID.eq(toSyncPointId)))
|
||||
.where(from.SYNC_POINT_ID.eq(fromSyncPointId))
|
||||
.apply {
|
||||
if (onlyNotSynced)
|
||||
|
|
@ -378,8 +404,7 @@ class SyncPointDao(
|
|||
dsl.select(sprls.READLIST_ID).from(sprls).where(sprls.SYNC_POINT_ID.eq(toSyncPointId)),
|
||||
),
|
||||
)
|
||||
}
|
||||
.and(to.READLIST_ID.isNull)
|
||||
}.and(to.READLIST_ID.isNull)
|
||||
|
||||
return queryToPageReadList(query, pageable)
|
||||
}
|
||||
|
|
@ -388,7 +413,8 @@ class SyncPointDao(
|
|||
syncPointId: String,
|
||||
readListIds: Collection<String>,
|
||||
): List<SyncPoint.ReadList.Book> =
|
||||
dsl.select(*sprlb.fields())
|
||||
dsl
|
||||
.select(*sprlb.fields())
|
||||
.from(sprlb)
|
||||
.where(sprlb.SYNC_POINT_ID.eq(syncPointId))
|
||||
.and(sprlb.READLIST_ID.`in`(readListIds))
|
||||
|
|
@ -404,13 +430,15 @@ class SyncPointDao(
|
|||
// we store status in a separate table
|
||||
if (bookIds.isNotEmpty()) {
|
||||
if (forRemovedBooks)
|
||||
dsl.batch(
|
||||
dsl.insertInto(spbs, spbs.SYNC_POINT_ID, spbs.BOOK_ID).values(null as String?, null).onDuplicateKeyIgnore(),
|
||||
).also { step ->
|
||||
bookIds.map { step.bind(syncPointId, it) }
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl.insertInto(spbs, spbs.SYNC_POINT_ID, spbs.BOOK_ID).values(null as String?, null).onDuplicateKeyIgnore(),
|
||||
).also { step ->
|
||||
bookIds.map { step.bind(syncPointId, it) }
|
||||
}.execute()
|
||||
else
|
||||
dsl.update(spb)
|
||||
dsl
|
||||
.update(spb)
|
||||
.set(spb.SYNCED, true)
|
||||
.where(spb.SYNC_POINT_ID.eq(syncPointId))
|
||||
.and(spb.BOOK_ID.`in`(bookIds))
|
||||
|
|
@ -427,13 +455,15 @@ class SyncPointDao(
|
|||
// we store status in a separate table
|
||||
if (readListIds.isNotEmpty()) {
|
||||
if (forRemovedReadLists)
|
||||
dsl.batch(
|
||||
dsl.insertInto(sprls, sprls.SYNC_POINT_ID, sprls.READLIST_ID).values(null as String?, null).onDuplicateKeyIgnore(),
|
||||
).also { step ->
|
||||
readListIds.map { step.bind(syncPointId, it) }
|
||||
}.execute()
|
||||
dsl
|
||||
.batch(
|
||||
dsl.insertInto(sprls, sprls.SYNC_POINT_ID, sprls.READLIST_ID).values(null as String?, null).onDuplicateKeyIgnore(),
|
||||
).also { step ->
|
||||
readListIds.map { step.bind(syncPointId, it) }
|
||||
}.execute()
|
||||
else
|
||||
dsl.update(sprl)
|
||||
dsl
|
||||
.update(sprl)
|
||||
.set(sprl.SYNCED, true)
|
||||
.where(sprl.SYNC_POINT_ID.eq(syncPointId))
|
||||
.and(sprl.READLIST_ID.`in`(readListIds))
|
||||
|
|
|
|||
|
|
@ -24,7 +24,8 @@ class ThumbnailBookDao(
|
|||
private val tb = Tables.THUMBNAIL_BOOK
|
||||
|
||||
override fun findAllByBookId(bookId: String): Collection<ThumbnailBook> =
|
||||
dsl.selectFrom(tb)
|
||||
dsl
|
||||
.selectFrom(tb)
|
||||
.where(tb.BOOK_ID.eq(bookId))
|
||||
.fetchInto(tb)
|
||||
.map { it.toDomain() }
|
||||
|
|
@ -33,20 +34,23 @@ class ThumbnailBookDao(
|
|||
bookId: String,
|
||||
type: Set<ThumbnailBook.Type>,
|
||||
): Collection<ThumbnailBook> =
|
||||
dsl.selectFrom(tb)
|
||||
dsl
|
||||
.selectFrom(tb)
|
||||
.where(tb.BOOK_ID.eq(bookId))
|
||||
.and(tb.TYPE.`in`(type.map { it.name }))
|
||||
.fetchInto(tb)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findByIdOrNull(thumbnailId: String): ThumbnailBook? =
|
||||
dsl.selectFrom(tb)
|
||||
dsl
|
||||
.selectFrom(tb)
|
||||
.where(tb.ID.eq(thumbnailId))
|
||||
.fetchOneInto(tb)
|
||||
?.toDomain()
|
||||
|
||||
override fun findSelectedByBookIdOrNull(bookId: String): ThumbnailBook? =
|
||||
dsl.selectFrom(tb)
|
||||
dsl
|
||||
.selectFrom(tb)
|
||||
.where(tb.BOOK_ID.eq(bookId))
|
||||
.and(tb.SELECTED.isTrue)
|
||||
.limit(1)
|
||||
|
|
@ -56,7 +60,8 @@ class ThumbnailBookDao(
|
|||
|
||||
override fun findAllWithoutMetadata(pageable: Pageable): Page<ThumbnailBook> {
|
||||
val query =
|
||||
dsl.selectFrom(tb)
|
||||
dsl
|
||||
.selectFrom(tb)
|
||||
.where(tb.FILE_SIZE.eq(0))
|
||||
.or(tb.MEDIA_TYPE.eq(""))
|
||||
.or(tb.WIDTH.eq(0))
|
||||
|
|
@ -76,7 +81,8 @@ class ThumbnailBookDao(
|
|||
type: ThumbnailBook.Type,
|
||||
size: Int,
|
||||
): Collection<String> =
|
||||
dsl.select(tb.BOOK_ID)
|
||||
dsl
|
||||
.select(tb.BOOK_ID)
|
||||
.from(tb)
|
||||
.where(tb.TYPE.eq(type.toString()))
|
||||
.and(tb.WIDTH.lt(size))
|
||||
|
|
@ -86,7 +92,8 @@ class ThumbnailBookDao(
|
|||
override fun existsById(thumbnailId: String): Boolean = dsl.fetchExists(tb, tb.ID.eq(thumbnailId))
|
||||
|
||||
override fun insert(thumbnail: ThumbnailBook) {
|
||||
dsl.insertInto(tb)
|
||||
dsl
|
||||
.insertInto(tb)
|
||||
.set(tb.ID, thumbnail.id)
|
||||
.set(tb.BOOK_ID, thumbnail.bookId)
|
||||
.set(tb.THUMBNAIL, thumbnail.thumbnail)
|
||||
|
|
@ -101,7 +108,8 @@ class ThumbnailBookDao(
|
|||
}
|
||||
|
||||
override fun update(thumbnail: ThumbnailBook) {
|
||||
dsl.update(tb)
|
||||
dsl
|
||||
.update(tb)
|
||||
.set(tb.BOOK_ID, thumbnail.bookId)
|
||||
.set(tb.THUMBNAIL, thumbnail.thumbnail)
|
||||
.set(tb.URL, thumbnail.url?.toString())
|
||||
|
|
@ -118,7 +126,9 @@ class ThumbnailBookDao(
|
|||
override fun updateMetadata(thumbnails: Collection<ThumbnailBook>) {
|
||||
dsl.batched { c ->
|
||||
thumbnails.forEach {
|
||||
c.dsl().update(tb)
|
||||
c
|
||||
.dsl()
|
||||
.update(tb)
|
||||
.set(tb.MEDIA_TYPE, it.mediaType)
|
||||
.set(tb.WIDTH, it.dimension.width)
|
||||
.set(tb.HEIGHT, it.dimension.height)
|
||||
|
|
@ -131,13 +141,15 @@ class ThumbnailBookDao(
|
|||
|
||||
@Transactional
|
||||
override fun markSelected(thumbnail: ThumbnailBook) {
|
||||
dsl.update(tb)
|
||||
dsl
|
||||
.update(tb)
|
||||
.set(tb.SELECTED, false)
|
||||
.where(tb.BOOK_ID.eq(thumbnail.bookId))
|
||||
.and(tb.ID.ne(thumbnail.id))
|
||||
.execute()
|
||||
|
||||
dsl.update(tb)
|
||||
dsl
|
||||
.update(tb)
|
||||
.set(tb.SELECTED, true)
|
||||
.where(tb.BOOK_ID.eq(thumbnail.bookId))
|
||||
.and(tb.ID.eq(thumbnail.id))
|
||||
|
|
@ -163,7 +175,8 @@ class ThumbnailBookDao(
|
|||
bookId: String,
|
||||
type: ThumbnailBook.Type,
|
||||
) {
|
||||
dsl.deleteFrom(tb)
|
||||
dsl
|
||||
.deleteFrom(tb)
|
||||
.where(tb.BOOK_ID.eq(bookId))
|
||||
.and(tb.TYPE.eq(type.toString()))
|
||||
.execute()
|
||||
|
|
|
|||
|
|
@ -19,19 +19,22 @@ class ThumbnailReadListDao(
|
|||
private val tr = Tables.THUMBNAIL_READLIST
|
||||
|
||||
override fun findAllByReadListId(readListId: String): Collection<ThumbnailReadList> =
|
||||
dsl.selectFrom(tr)
|
||||
dsl
|
||||
.selectFrom(tr)
|
||||
.where(tr.READLIST_ID.eq(readListId))
|
||||
.fetchInto(tr)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findByIdOrNull(thumbnailId: String): ThumbnailReadList? =
|
||||
dsl.selectFrom(tr)
|
||||
dsl
|
||||
.selectFrom(tr)
|
||||
.where(tr.ID.eq(thumbnailId))
|
||||
.fetchOneInto(tr)
|
||||
?.toDomain()
|
||||
|
||||
override fun findSelectedByReadListIdOrNull(readListId: String): ThumbnailReadList? =
|
||||
dsl.selectFrom(tr)
|
||||
dsl
|
||||
.selectFrom(tr)
|
||||
.where(tr.READLIST_ID.eq(readListId))
|
||||
.and(tr.SELECTED.isTrue)
|
||||
.limit(1)
|
||||
|
|
@ -41,7 +44,8 @@ class ThumbnailReadListDao(
|
|||
|
||||
override fun findAllWithoutMetadata(pageable: Pageable): Page<ThumbnailReadList> {
|
||||
val query =
|
||||
dsl.selectFrom(tr)
|
||||
dsl
|
||||
.selectFrom(tr)
|
||||
.where(tr.FILE_SIZE.eq(0))
|
||||
.or(tr.MEDIA_TYPE.eq(""))
|
||||
.or(tr.WIDTH.eq(0))
|
||||
|
|
@ -58,7 +62,8 @@ class ThumbnailReadListDao(
|
|||
}
|
||||
|
||||
override fun insert(thumbnail: ThumbnailReadList) {
|
||||
dsl.insertInto(tr)
|
||||
dsl
|
||||
.insertInto(tr)
|
||||
.set(tr.ID, thumbnail.id)
|
||||
.set(tr.READLIST_ID, thumbnail.readListId)
|
||||
.set(tr.THUMBNAIL, thumbnail.thumbnail)
|
||||
|
|
@ -72,7 +77,8 @@ class ThumbnailReadListDao(
|
|||
}
|
||||
|
||||
override fun update(thumbnail: ThumbnailReadList) {
|
||||
dsl.update(tr)
|
||||
dsl
|
||||
.update(tr)
|
||||
.set(tr.READLIST_ID, thumbnail.readListId)
|
||||
.set(tr.THUMBNAIL, thumbnail.thumbnail)
|
||||
.set(tr.SELECTED, thumbnail.selected)
|
||||
|
|
@ -88,7 +94,9 @@ class ThumbnailReadListDao(
|
|||
override fun updateMetadata(thumbnails: Collection<ThumbnailReadList>) {
|
||||
dsl.batched { c ->
|
||||
thumbnails.forEach {
|
||||
c.dsl().update(tr)
|
||||
c
|
||||
.dsl()
|
||||
.update(tr)
|
||||
.set(tr.MEDIA_TYPE, it.mediaType)
|
||||
.set(tr.WIDTH, it.dimension.width)
|
||||
.set(tr.HEIGHT, it.dimension.height)
|
||||
|
|
@ -101,13 +109,15 @@ class ThumbnailReadListDao(
|
|||
|
||||
@Transactional
|
||||
override fun markSelected(thumbnail: ThumbnailReadList) {
|
||||
dsl.update(tr)
|
||||
dsl
|
||||
.update(tr)
|
||||
.set(tr.SELECTED, false)
|
||||
.where(tr.READLIST_ID.eq(thumbnail.readListId))
|
||||
.and(tr.ID.ne(thumbnail.id))
|
||||
.execute()
|
||||
|
||||
dsl.update(tr)
|
||||
dsl
|
||||
.update(tr)
|
||||
.set(tr.SELECTED, true)
|
||||
.where(tr.READLIST_ID.eq(thumbnail.readListId))
|
||||
.and(tr.ID.eq(thumbnail.id))
|
||||
|
|
|
|||
|
|
@ -19,13 +19,15 @@ class ThumbnailSeriesCollectionDao(
|
|||
private val tc = Tables.THUMBNAIL_COLLECTION
|
||||
|
||||
override fun findByIdOrNull(thumbnailId: String): ThumbnailSeriesCollection? =
|
||||
dsl.selectFrom(tc)
|
||||
dsl
|
||||
.selectFrom(tc)
|
||||
.where(tc.ID.eq(thumbnailId))
|
||||
.fetchOneInto(tc)
|
||||
?.toDomain()
|
||||
|
||||
override fun findSelectedByCollectionIdOrNull(collectionId: String): ThumbnailSeriesCollection? =
|
||||
dsl.selectFrom(tc)
|
||||
dsl
|
||||
.selectFrom(tc)
|
||||
.where(tc.COLLECTION_ID.eq(collectionId))
|
||||
.and(tc.SELECTED.isTrue)
|
||||
.limit(1)
|
||||
|
|
@ -34,14 +36,16 @@ class ThumbnailSeriesCollectionDao(
|
|||
.firstOrNull()
|
||||
|
||||
override fun findAllByCollectionId(collectionId: String): Collection<ThumbnailSeriesCollection> =
|
||||
dsl.selectFrom(tc)
|
||||
dsl
|
||||
.selectFrom(tc)
|
||||
.where(tc.COLLECTION_ID.eq(collectionId))
|
||||
.fetchInto(tc)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllWithoutMetadata(pageable: Pageable): Page<ThumbnailSeriesCollection> {
|
||||
val query =
|
||||
dsl.selectFrom(tc)
|
||||
dsl
|
||||
.selectFrom(tc)
|
||||
.where(tc.FILE_SIZE.eq(0))
|
||||
.or(tc.MEDIA_TYPE.eq(""))
|
||||
.or(tc.WIDTH.eq(0))
|
||||
|
|
@ -58,7 +62,8 @@ class ThumbnailSeriesCollectionDao(
|
|||
}
|
||||
|
||||
override fun insert(thumbnail: ThumbnailSeriesCollection) {
|
||||
dsl.insertInto(tc)
|
||||
dsl
|
||||
.insertInto(tc)
|
||||
.set(tc.ID, thumbnail.id)
|
||||
.set(tc.COLLECTION_ID, thumbnail.collectionId)
|
||||
.set(tc.THUMBNAIL, thumbnail.thumbnail)
|
||||
|
|
@ -72,7 +77,8 @@ class ThumbnailSeriesCollectionDao(
|
|||
}
|
||||
|
||||
override fun update(thumbnail: ThumbnailSeriesCollection) {
|
||||
dsl.update(tc)
|
||||
dsl
|
||||
.update(tc)
|
||||
.set(tc.COLLECTION_ID, thumbnail.collectionId)
|
||||
.set(tc.THUMBNAIL, thumbnail.thumbnail)
|
||||
.set(tc.SELECTED, thumbnail.selected)
|
||||
|
|
@ -88,7 +94,9 @@ class ThumbnailSeriesCollectionDao(
|
|||
override fun updateMetadata(thumbnails: Collection<ThumbnailSeriesCollection>) {
|
||||
dsl.batched { c ->
|
||||
thumbnails.forEach {
|
||||
c.dsl().update(tc)
|
||||
c
|
||||
.dsl()
|
||||
.update(tc)
|
||||
.set(tc.MEDIA_TYPE, it.mediaType)
|
||||
.set(tc.WIDTH, it.dimension.width)
|
||||
.set(tc.HEIGHT, it.dimension.height)
|
||||
|
|
@ -101,13 +109,15 @@ class ThumbnailSeriesCollectionDao(
|
|||
|
||||
@Transactional
|
||||
override fun markSelected(thumbnail: ThumbnailSeriesCollection) {
|
||||
dsl.update(tc)
|
||||
dsl
|
||||
.update(tc)
|
||||
.set(tc.SELECTED, false)
|
||||
.where(tc.COLLECTION_ID.eq(thumbnail.collectionId))
|
||||
.and(tc.ID.ne(thumbnail.id))
|
||||
.execute()
|
||||
|
||||
dsl.update(tc)
|
||||
dsl
|
||||
.update(tc)
|
||||
.set(tc.SELECTED, true)
|
||||
.where(tc.COLLECTION_ID.eq(thumbnail.collectionId))
|
||||
.and(tc.ID.eq(thumbnail.id))
|
||||
|
|
|
|||
|
|
@ -24,13 +24,15 @@ class ThumbnailSeriesDao(
|
|||
private val ts = Tables.THUMBNAIL_SERIES
|
||||
|
||||
override fun findByIdOrNull(thumbnailId: String): ThumbnailSeries? =
|
||||
dsl.selectFrom(ts)
|
||||
dsl
|
||||
.selectFrom(ts)
|
||||
.where(ts.ID.eq(thumbnailId))
|
||||
.fetchOneInto(ts)
|
||||
?.toDomain()
|
||||
|
||||
override fun findAllBySeriesId(seriesId: String): Collection<ThumbnailSeries> =
|
||||
dsl.selectFrom(ts)
|
||||
dsl
|
||||
.selectFrom(ts)
|
||||
.where(ts.SERIES_ID.eq(seriesId))
|
||||
.fetchInto(ts)
|
||||
.map { it.toDomain() }
|
||||
|
|
@ -39,14 +41,16 @@ class ThumbnailSeriesDao(
|
|||
seriesId: String,
|
||||
type: ThumbnailSeries.Type,
|
||||
): Collection<ThumbnailSeries> =
|
||||
dsl.selectFrom(ts)
|
||||
dsl
|
||||
.selectFrom(ts)
|
||||
.where(ts.SERIES_ID.eq(seriesId))
|
||||
.and(ts.TYPE.eq(type.toString()))
|
||||
.fetchInto(ts)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findSelectedBySeriesIdOrNull(seriesId: String): ThumbnailSeries? =
|
||||
dsl.selectFrom(ts)
|
||||
dsl
|
||||
.selectFrom(ts)
|
||||
.where(ts.SERIES_ID.eq(seriesId))
|
||||
.and(ts.SELECTED.isTrue)
|
||||
.limit(1)
|
||||
|
|
@ -56,7 +60,8 @@ class ThumbnailSeriesDao(
|
|||
|
||||
override fun findAllWithoutMetadata(pageable: Pageable): Page<ThumbnailSeries> {
|
||||
val query =
|
||||
dsl.selectFrom(ts)
|
||||
dsl
|
||||
.selectFrom(ts)
|
||||
.where(ts.FILE_SIZE.eq(0))
|
||||
.or(ts.MEDIA_TYPE.eq(""))
|
||||
.or(ts.WIDTH.eq(0))
|
||||
|
|
@ -73,7 +78,8 @@ class ThumbnailSeriesDao(
|
|||
}
|
||||
|
||||
override fun insert(thumbnail: ThumbnailSeries) {
|
||||
dsl.insertInto(ts)
|
||||
dsl
|
||||
.insertInto(ts)
|
||||
.set(ts.ID, thumbnail.id)
|
||||
.set(ts.SERIES_ID, thumbnail.seriesId)
|
||||
.set(ts.URL, thumbnail.url?.toString())
|
||||
|
|
@ -88,7 +94,8 @@ class ThumbnailSeriesDao(
|
|||
}
|
||||
|
||||
override fun update(thumbnail: ThumbnailSeries) {
|
||||
dsl.update(ts)
|
||||
dsl
|
||||
.update(ts)
|
||||
.set(ts.SERIES_ID, thumbnail.seriesId)
|
||||
.set(ts.THUMBNAIL, thumbnail.thumbnail)
|
||||
.set(ts.URL, thumbnail.url?.toString())
|
||||
|
|
@ -105,7 +112,9 @@ class ThumbnailSeriesDao(
|
|||
override fun updateMetadata(thumbnails: Collection<ThumbnailSeries>) {
|
||||
dsl.batched { c ->
|
||||
thumbnails.forEach {
|
||||
c.dsl().update(ts)
|
||||
c
|
||||
.dsl()
|
||||
.update(ts)
|
||||
.set(ts.MEDIA_TYPE, it.mediaType)
|
||||
.set(ts.WIDTH, it.dimension.width)
|
||||
.set(ts.HEIGHT, it.dimension.height)
|
||||
|
|
@ -118,13 +127,15 @@ class ThumbnailSeriesDao(
|
|||
|
||||
@Transactional
|
||||
override fun markSelected(thumbnail: ThumbnailSeries) {
|
||||
dsl.update(ts)
|
||||
dsl
|
||||
.update(ts)
|
||||
.set(ts.SELECTED, false)
|
||||
.where(ts.SERIES_ID.eq(thumbnail.seriesId))
|
||||
.and(ts.ID.ne(thumbnail.id))
|
||||
.execute()
|
||||
|
||||
dsl.update(ts)
|
||||
dsl
|
||||
.update(ts)
|
||||
.set(ts.SELECTED, true)
|
||||
.where(ts.SERIES_ID.eq(thumbnail.seriesId))
|
||||
.and(ts.ID.eq(thumbnail.id))
|
||||
|
|
|
|||
|
|
@ -32,18 +32,21 @@ class TasksDao(
|
|||
private val tasksAvailableCondition =
|
||||
t.OWNER.isNull
|
||||
.and(
|
||||
t.GROUP_ID.notIn(
|
||||
dsl.select(t.GROUP_ID).from(t).where(t.OWNER.isNotNull).and(t.GROUP_ID.isNotNull),
|
||||
)
|
||||
.or(t.GROUP_ID.isNull),
|
||||
t.GROUP_ID
|
||||
.notIn(
|
||||
dsl
|
||||
.select(t.GROUP_ID)
|
||||
.from(t)
|
||||
.where(t.OWNER.isNotNull)
|
||||
.and(t.GROUP_ID.isNotNull),
|
||||
).or(t.GROUP_ID.isNull),
|
||||
)
|
||||
|
||||
override fun hasAvailable(): Boolean {
|
||||
return dsl.fetchExists(
|
||||
override fun hasAvailable(): Boolean =
|
||||
dsl.fetchExists(
|
||||
t,
|
||||
tasksAvailableCondition,
|
||||
)
|
||||
}
|
||||
|
||||
@Transactional
|
||||
override fun takeFirst(owner: String): Task? {
|
||||
|
|
@ -62,7 +65,8 @@ class TasksDao(
|
|||
}
|
||||
} ?: return null
|
||||
|
||||
dsl.update(t)
|
||||
dsl
|
||||
.update(t)
|
||||
.set(t.OWNER, owner)
|
||||
.where(t.ID.eq(task.uniqueId))
|
||||
.execute()
|
||||
|
|
@ -76,7 +80,8 @@ class TasksDao(
|
|||
.mapNotNull { it.toDomain() }
|
||||
|
||||
override fun findAllGroupedByOwner(): Map<String?, List<Task>> =
|
||||
dsl.select(t.OWNER, t.CLASS, t.PAYLOAD)
|
||||
dsl
|
||||
.select(t.OWNER, t.CLASS, t.PAYLOAD)
|
||||
.from(t)
|
||||
.fetch()
|
||||
.mapNotNull {
|
||||
|
|
@ -84,7 +89,8 @@ class TasksDao(
|
|||
}.groupBy({ it.first }, { it.second })
|
||||
|
||||
private fun selectBase() =
|
||||
dsl.select(t.CLASS, t.PAYLOAD)
|
||||
dsl
|
||||
.select(t.CLASS, t.PAYLOAD)
|
||||
.from(t)
|
||||
|
||||
private fun Record2<String, String>.toDomain(): Task? =
|
||||
|
|
@ -95,17 +101,15 @@ class TasksDao(
|
|||
null
|
||||
}
|
||||
|
||||
override fun count(): Int {
|
||||
return dsl.fetchCount(t)
|
||||
}
|
||||
override fun count(): Int = dsl.fetchCount(t)
|
||||
|
||||
override fun countBySimpleType(): Map<String, Int> {
|
||||
return dsl.select(t.SIMPLE_TYPE, DSL.count(t.SIMPLE_TYPE))
|
||||
override fun countBySimpleType(): Map<String, Int> =
|
||||
dsl
|
||||
.select(t.SIMPLE_TYPE, DSL.count(t.SIMPLE_TYPE))
|
||||
.from(t)
|
||||
.groupBy(t.SIMPLE_TYPE)
|
||||
.fetch()
|
||||
.associate { it.value1() to it.value2() }
|
||||
}
|
||||
|
||||
override fun save(task: Task) {
|
||||
task.toQuery().execute()
|
||||
|
|
@ -119,7 +123,8 @@ class TasksDao(
|
|||
}
|
||||
|
||||
override fun disown(): Int =
|
||||
dsl.update(t)
|
||||
dsl
|
||||
.update(t)
|
||||
.set(t.OWNER, null as String?)
|
||||
.where(t.OWNER.isNotNull)
|
||||
.execute()
|
||||
|
|
@ -132,28 +137,26 @@ class TasksDao(
|
|||
dsl.deleteFrom(t).execute()
|
||||
}
|
||||
|
||||
override fun deleteAllWithoutOwner(): Int =
|
||||
dsl.deleteFrom(t).where(t.OWNER.isNull).execute()
|
||||
override fun deleteAllWithoutOwner(): Int = dsl.deleteFrom(t).where(t.OWNER.isNull).execute()
|
||||
|
||||
private fun Task.toQuery(): Query =
|
||||
dsl.insertInto(
|
||||
t,
|
||||
t.ID,
|
||||
t.PRIORITY,
|
||||
t.GROUP_ID,
|
||||
t.CLASS,
|
||||
t.SIMPLE_TYPE,
|
||||
t.PAYLOAD,
|
||||
)
|
||||
.values(
|
||||
dsl
|
||||
.insertInto(
|
||||
t,
|
||||
t.ID,
|
||||
t.PRIORITY,
|
||||
t.GROUP_ID,
|
||||
t.CLASS,
|
||||
t.SIMPLE_TYPE,
|
||||
t.PAYLOAD,
|
||||
).values(
|
||||
uniqueId,
|
||||
priority,
|
||||
groupId,
|
||||
javaClass.typeName,
|
||||
javaClass.simpleName,
|
||||
objectMapper.writeValueAsString(this),
|
||||
)
|
||||
.onDuplicateKeyUpdate()
|
||||
).onDuplicateKeyUpdate()
|
||||
.set(t.GROUP_ID, groupId)
|
||||
.set(t.PRIORITY, priority)
|
||||
.set(t.CLASS, javaClass.typeName)
|
||||
|
|
|
|||
|
|
@ -30,16 +30,17 @@ class KoboProxy(
|
|||
private val komgaSettingsProvider: KomgaSettingsProvider,
|
||||
) {
|
||||
private val koboApiClient =
|
||||
RestClient.builder()
|
||||
RestClient
|
||||
.builder()
|
||||
.baseUrl("https://storeapi.kobo.com")
|
||||
.requestFactory(
|
||||
ClientHttpRequestFactoryBuilder.reactor().build(
|
||||
ClientHttpRequestFactorySettings.defaults()
|
||||
ClientHttpRequestFactorySettings
|
||||
.defaults()
|
||||
.withReadTimeout(1.minutes.toJavaDuration())
|
||||
.withConnectTimeout(1.minutes.toJavaDuration()),
|
||||
),
|
||||
)
|
||||
.build()
|
||||
).build()
|
||||
|
||||
private val pathRegex = """\/kobo\/[-\w]*(.*)""".toRegex()
|
||||
|
||||
|
|
@ -82,15 +83,17 @@ class KoboProxy(
|
|||
null
|
||||
|
||||
val response =
|
||||
koboApiClient.method(HttpMethod.valueOf(request.method))
|
||||
koboApiClient
|
||||
.method(HttpMethod.valueOf(request.method))
|
||||
.uri { uriBuilder ->
|
||||
uriBuilder.path(path)
|
||||
uriBuilder
|
||||
.path(path)
|
||||
.queryParams(LinkedMultiValueMap(request.parameterMap.mapValues { it.value.toList() }))
|
||||
.build()
|
||||
.also { logger.debug { "Proxy URL: $it" } }
|
||||
}
|
||||
.headers { headersOut ->
|
||||
request.headerNames.toList()
|
||||
}.headers { headersOut ->
|
||||
request.headerNames
|
||||
.toList()
|
||||
.filterNot { headersOutExclude.contains(it, true) }
|
||||
.filter { headersOutInclude.contains(it, true) || isKoboHeader(it) }
|
||||
.forEach {
|
||||
|
|
@ -104,13 +107,11 @@ class KoboProxy(
|
|||
}
|
||||
}
|
||||
logger.debug { "Headers out: $headersOut" }
|
||||
}
|
||||
.apply { if (body != null) body(body) }
|
||||
}.apply { if (body != null) body(body) }
|
||||
.retrieve()
|
||||
.onStatus(HttpStatusCode::isError) { _, response ->
|
||||
throw ResponseStatusException(response.statusCode, response.statusText)
|
||||
}
|
||||
.toEntity<JsonNode>()
|
||||
}.toEntity<JsonNode>()
|
||||
|
||||
logger.debug { "Kobo response: $response" }
|
||||
|
||||
|
|
|
|||
|
|
@ -56,8 +56,7 @@ class KomgaSyncTokenGenerator(
|
|||
return KomgaSyncToken()
|
||||
}
|
||||
|
||||
fun toBase64(token: KomgaSyncToken): String =
|
||||
KOMGA_TOKEN_PREFIX + base64Encoder.encodeToString(objectMapper.writeValueAsString(token).toByteArray())
|
||||
fun toBase64(token: KomgaSyncToken): String = KOMGA_TOKEN_PREFIX + base64Encoder.encodeToString(objectMapper.writeValueAsString(token).toByteArray())
|
||||
|
||||
fun fromRequestHeaders(request: HttpServletRequest): KomgaSyncToken? {
|
||||
val syncTokenB64 = request.getHeader(X_KOBO_SYNCTOKEN)
|
||||
|
|
|
|||
|
|
@ -31,11 +31,9 @@ class ContentDetector(
|
|||
* Detects the media type of the content of the stream.
|
||||
* The stream will not be closed.
|
||||
*/
|
||||
fun detectMediaType(stream: InputStream): String =
|
||||
tika.detector.detect(stream, Metadata()).toString()
|
||||
fun detectMediaType(stream: InputStream): String = tika.detector.detect(stream, Metadata()).toString()
|
||||
|
||||
fun isImage(mediaType: String): Boolean =
|
||||
mediaType.startsWith("image/")
|
||||
fun isImage(mediaType: String): Boolean = mediaType.startsWith("image/")
|
||||
|
||||
fun mediaTypeToExtension(mediaType: String): String? =
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -62,14 +62,12 @@ class Rar5Extractor(
|
|||
logger.warn(e) { "Could not analyze entry: ${entry.name}" }
|
||||
MediaContainerEntry(name = entry.name, comment = e.message)
|
||||
}
|
||||
}
|
||||
.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
}.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
.toList()
|
||||
}
|
||||
|
||||
override fun getEntryStream(
|
||||
path: Path,
|
||||
entryName: String,
|
||||
): ByteArray =
|
||||
Archive.getInputStream(path, entryName).use { it?.readBytes() ?: ByteArray(0) }
|
||||
): ByteArray = Archive.getInputStream(path, entryName).use { it?.readBytes() ?: ByteArray(0) }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -47,8 +47,7 @@ class RarExtractor(
|
|||
logger.warn(e) { "Could not analyze entry: ${entry.fileName}" }
|
||||
MediaContainerEntry(name = entry.fileName, comment = e.message)
|
||||
}
|
||||
}
|
||||
.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
}.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
}
|
||||
|
||||
override fun getEntryStream(
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ class ZipExtractor(
|
|||
analyzeDimensions: Boolean,
|
||||
): List<MediaContainerEntry> =
|
||||
ZipFile.builder().setPath(path).use { zip ->
|
||||
zip.entries.toList()
|
||||
zip.entries
|
||||
.toList()
|
||||
.filter { !it.isDirectory }
|
||||
.map { entry ->
|
||||
try {
|
||||
|
|
@ -47,8 +48,7 @@ class ZipExtractor(
|
|||
logger.warn(e) { "Could not analyze entry: ${entry.name}" }
|
||||
MediaContainerEntry(name = entry.name, comment = e.message)
|
||||
}
|
||||
}
|
||||
.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
}.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
}
|
||||
|
||||
override fun getEntryStream(
|
||||
|
|
|
|||
|
|
@ -31,7 +31,9 @@ inline fun <R> Path.epub(block: (EpubPackage) -> R): R =
|
|||
fun ZipFile.getPackagePath(): String =
|
||||
getEntryInputStream("META-INF/container.xml")
|
||||
.use { Jsoup.parse(it, null, "") }
|
||||
.getElementsByTag("rootfile").first()?.attr("full-path") ?: throw MediaUnsupportedException("META-INF/container.xml does not contain rootfile tag")
|
||||
.getElementsByTag("rootfile")
|
||||
.first()
|
||||
?.attr("full-path") ?: throw MediaUnsupportedException("META-INF/container.xml does not contain rootfile tag")
|
||||
|
||||
/**
|
||||
* Returns the content of the Epub package file as a [String]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
package org.gotson.komga.infrastructure.mediacontainer.epub
|
||||
|
||||
enum class Epub2Nav(val level1: String, val level2: String) {
|
||||
enum class Epub2Nav(
|
||||
val level1: String,
|
||||
val level2: String,
|
||||
) {
|
||||
TOC("navMap", "navPoint"),
|
||||
PAGELIST("pageList", "pageTarget"),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package org.gotson.komga.infrastructure.mediacontainer.epub
|
||||
|
||||
enum class Epub3Nav(val value: String) {
|
||||
enum class Epub3Nav(
|
||||
val value: String,
|
||||
) {
|
||||
TOC("toc"),
|
||||
LANDMARKS("landmarks"),
|
||||
PAGELIST("page-list"),
|
||||
|
|
|
|||
|
|
@ -59,7 +59,11 @@ class EpubExtractor(
|
|||
// EPUB 3 - try to get cover from manifest properties 'cover-image'
|
||||
manifest.values.firstOrNull { it.properties.contains("cover-image") }
|
||||
?: // EPUB 2 - get cover from meta element with name="cover"
|
||||
opfDoc.selectFirst("metadata > meta[name=cover]")?.attr("content")?.ifBlank { null }?.let { manifest[it] }
|
||||
opfDoc
|
||||
.selectFirst("metadata > meta[name=cover]")
|
||||
?.attr("content")
|
||||
?.ifBlank { null }
|
||||
?.let { manifest[it] }
|
||||
|
||||
if (coverManifestItem != null) {
|
||||
val href = coverManifestItem.href
|
||||
|
|
@ -98,7 +102,11 @@ class EpubExtractor(
|
|||
}
|
||||
|
||||
private fun getResources(epub: EpubPackage): List<MediaFile> {
|
||||
val spine = epub.opfDoc.select("spine > itemref").map { it.attr("idref") }.mapNotNull { epub.manifest[it] }
|
||||
val spine =
|
||||
epub.opfDoc
|
||||
.select("spine > itemref")
|
||||
.map { it.attr("idref") }
|
||||
.mapNotNull { epub.manifest[it] }
|
||||
|
||||
val pages =
|
||||
spine.map { page ->
|
||||
|
|
@ -134,7 +142,8 @@ class EpubExtractor(
|
|||
|
||||
try {
|
||||
val pagesWithImages =
|
||||
epub.opfDoc.select("spine > itemref")
|
||||
epub.opfDoc
|
||||
.select("spine > itemref")
|
||||
.map { it.attr("idref") }
|
||||
.mapNotNull { idref -> epub.manifest[idref]?.href?.let { normalizeHref(epub.opfDir, it) } }
|
||||
.map { pagePath ->
|
||||
|
|
@ -144,11 +153,13 @@ class EpubExtractor(
|
|||
if (doc.body().text().length > letterCountThreshold) return emptyList()
|
||||
|
||||
val img =
|
||||
doc.getElementsByTag("img")
|
||||
doc
|
||||
.getElementsByTag("img")
|
||||
.map { it.attr("src") } // get the src, which can be a relative path
|
||||
|
||||
val svg =
|
||||
doc.select("svg > image[xlink:href]")
|
||||
doc
|
||||
.select("svg > image[xlink:href]")
|
||||
.map { it.attr("xlink:href") } // get the source, which can be a relative path
|
||||
|
||||
(img + svg).map { (Path(pagePath).parent ?: Path("")).resolve(it).normalize().invariantSeparatorsPathString } // resolve it against the page folder
|
||||
|
|
@ -160,7 +171,10 @@ class EpubExtractor(
|
|||
|
||||
val divinaPages =
|
||||
imagesPath.mapNotNull { imagePath ->
|
||||
val mediaType = epub.manifest.values.firstOrNull { normalizeHref(epub.opfDir, it.href) == imagePath }?.mediaType ?: return@mapNotNull null
|
||||
val mediaType =
|
||||
epub.manifest.values
|
||||
.firstOrNull { normalizeHref(epub.opfDir, it.href) == imagePath }
|
||||
?.mediaType ?: return@mapNotNull null
|
||||
val zipEntry = epub.zip.getEntry(imagePath)
|
||||
if (!contentDetector.isImage(mediaType)) return@mapNotNull null
|
||||
|
||||
|
|
@ -200,11 +214,15 @@ class EpubExtractor(
|
|||
|
||||
private fun computePageCount(epub: EpubPackage): Int {
|
||||
val spine =
|
||||
epub.opfDoc.select("spine > itemref")
|
||||
epub.opfDoc
|
||||
.select("spine > itemref")
|
||||
.map { it.attr("idref") }
|
||||
.mapNotNull { idref -> epub.manifest[idref]?.href?.let { normalizeHref(epub.opfDir, it) } }
|
||||
|
||||
return epub.zip.entries.toList().filter { it.name in spine }.sumOf { ceil(it.compressedSize / 1024.0).toInt() }
|
||||
return epub.zip.entries
|
||||
.toList()
|
||||
.filter { it.name in spine }
|
||||
.sumOf { ceil(it.compressedSize / 1024.0).toInt() }
|
||||
}
|
||||
|
||||
private fun isFixedLayout(epub: EpubPackage) =
|
||||
|
|
@ -229,7 +247,8 @@ class EpubExtractor(
|
|||
kepubConverter.isAvailable -> {
|
||||
try {
|
||||
val kepub =
|
||||
kepubConverter.convertEpubToKepubWithoutChecks(path)
|
||||
kepubConverter
|
||||
.convertEpubToKepubWithoutChecks(path)
|
||||
?.also { it.toFile().deleteOnExit() }
|
||||
// if the conversion failed, throw an exception that will be caught in the catch block
|
||||
?: throw IllegalStateException()
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@ fun processNav(
|
|||
): List<EpubTocEntry> {
|
||||
val doc = Jsoup.parse(document.content)
|
||||
val nav =
|
||||
doc.select("nav")
|
||||
doc
|
||||
.select("nav")
|
||||
// Jsoup selectors cannot find an attribute with namespace
|
||||
.firstOrNull { it.attributes().any { attr -> attr.key.endsWith("type") && attr.value == navElement.value } }
|
||||
return nav?.select(":root > ol > li")?.toList()?.mapNotNull { navLiElementToTocEntry(it, document.path.parent) } ?: emptyList()
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@ fun processNcx(
|
|||
document: ResourceContent,
|
||||
navType: Epub2Nav,
|
||||
): List<EpubTocEntry> =
|
||||
Jsoup.parse(document.content)
|
||||
Jsoup
|
||||
.parse(document.content)
|
||||
.select("${navType.level1} > ${navType.level2}")
|
||||
.toList()
|
||||
.mapNotNull { ncxElementToTocEntry(navType, it, document.path.parent) }
|
||||
|
|
|
|||
|
|
@ -33,7 +33,8 @@ class ComicInfoProvider(
|
|||
@Autowired(required = false) private val mapper: XmlMapper = XmlMapper(),
|
||||
private val bookAnalyzer: BookAnalyzer,
|
||||
private val isbnValidator: ISBNValidator,
|
||||
) : BookMetadataProvider, SeriesMetadataFromBookProvider {
|
||||
) : BookMetadataProvider,
|
||||
SeriesMetadataFromBookProvider {
|
||||
override val capabilities =
|
||||
setOf(
|
||||
BookMetadataPatchCapability.TITLE,
|
||||
|
|
@ -150,7 +151,11 @@ class ComicInfoProvider(
|
|||
language = if (comicInfo.languageISO != null && BCP47TagValidator.isValid(comicInfo.languageISO!!)) BCP47TagValidator.normalize(comicInfo.languageISO!!) else null,
|
||||
genres = if (!genres.isNullOrEmpty()) genres.toSet() else null,
|
||||
totalBookCount = comicInfo.count,
|
||||
collections = comicInfo.seriesGroup?.split(',')?.mapNotNull { it.trim().ifBlank { null } }?.toSet() ?: emptySet(),
|
||||
collections =
|
||||
comicInfo.seriesGroup
|
||||
?.split(',')
|
||||
?.mapNotNull { it.trim().ifBlank { null } }
|
||||
?.toSet() ?: emptySet(),
|
||||
)
|
||||
}
|
||||
return null
|
||||
|
|
|
|||
|
|
@ -2,7 +2,10 @@ package org.gotson.komga.infrastructure.metadata.comicrack.dto
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator
|
||||
|
||||
enum class AgeRating(val value: String, val ageRating: Int? = null) {
|
||||
enum class AgeRating(
|
||||
val value: String,
|
||||
val ageRating: Int? = null,
|
||||
) {
|
||||
UNKNOWN("Unknown"),
|
||||
ADULTS_ONLY_18("Adults Only 18+", 18),
|
||||
EARLY_CHILDHOOD("Early Childhood", 3),
|
||||
|
|
|
|||
|
|
@ -20,7 +20,5 @@ class Book {
|
|||
@JsonProperty(value = "FileName")
|
||||
var fileName: String? = null
|
||||
|
||||
override fun toString(): String {
|
||||
return "Book(series=$series, number=$number, volume=$volume, year=$year, fileName=$fileName)"
|
||||
}
|
||||
override fun toString(): String = "Book(series=$series, number=$number, volume=$volume, year=$year, fileName=$fileName)"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,9 @@ package org.gotson.komga.infrastructure.metadata.comicrack.dto
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator
|
||||
|
||||
enum class Manga(private val value: String) {
|
||||
enum class Manga(
|
||||
private val value: String,
|
||||
) {
|
||||
UNKNOWN("Unknown"),
|
||||
NO("No"),
|
||||
YES("Yes"),
|
||||
|
|
|
|||
|
|
@ -17,7 +17,5 @@ class ReadingList {
|
|||
@JsonSetter(nulls = Nulls.AS_EMPTY)
|
||||
var books: List<Book> = emptyList()
|
||||
|
||||
override fun toString(): String {
|
||||
return "ReadingList(name=$name, books=$books)"
|
||||
}
|
||||
override fun toString(): String = "ReadingList(name=$name, books=$books)"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,9 @@ package org.gotson.komga.infrastructure.metadata.comicrack.dto
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator
|
||||
|
||||
enum class YesNo(val value: String) {
|
||||
enum class YesNo(
|
||||
val value: String,
|
||||
) {
|
||||
UNKNOWN("Unknown"),
|
||||
NO("No"),
|
||||
YES("Yes"),
|
||||
|
|
|
|||
|
|
@ -25,7 +25,8 @@ import java.time.format.DateTimeFormatter
|
|||
@Service
|
||||
class EpubMetadataProvider(
|
||||
private val isbnValidator: ISBNValidator,
|
||||
) : BookMetadataProvider, SeriesMetadataFromBookProvider {
|
||||
) : BookMetadataProvider,
|
||||
SeriesMetadataFromBookProvider {
|
||||
private val relators =
|
||||
mapOf(
|
||||
"aut" to "writer",
|
||||
|
|
@ -52,14 +53,21 @@ class EpubMetadataProvider(
|
|||
val opf = Jsoup.parse(packageFile, "", Parser.xmlParser())
|
||||
|
||||
val title = opf.selectFirst("metadata > dc|title")?.text()?.ifBlank { null }
|
||||
val description = opf.selectFirst("metadata > dc|description")?.text()?.let { Jsoup.clean(it, Safelist.none()) }?.ifBlank { null }
|
||||
val description =
|
||||
opf
|
||||
.selectFirst("metadata > dc|description")
|
||||
?.text()
|
||||
?.let { Jsoup.clean(it, Safelist.none()) }
|
||||
?.ifBlank { null }
|
||||
val date = opf.selectFirst("metadata > dc|date")?.text()?.let { parseDate(it) }
|
||||
|
||||
val authorRoles =
|
||||
opf.select("metadata > *|meta[property=role][scheme=marc:relators]")
|
||||
opf
|
||||
.select("metadata > *|meta[property=role][scheme=marc:relators]")
|
||||
.associate { it.attr("refines").removePrefix("#") to it.text() }
|
||||
val authors =
|
||||
opf.select("metadata > dc|creator")
|
||||
opf
|
||||
.select("metadata > dc|creator")
|
||||
.mapNotNull { el ->
|
||||
val name = el.text().trim()
|
||||
if (name.isBlank()) {
|
||||
|
|
@ -73,14 +81,18 @@ class EpubMetadataProvider(
|
|||
}.ifEmpty { null }
|
||||
|
||||
val isbn =
|
||||
opf.select("metadata > dc|identifier")
|
||||
opf
|
||||
.select("metadata > dc|identifier")
|
||||
.map { it.text().lowercase().removePrefix("isbn:") }
|
||||
.firstNotNullOfOrNull { isbnValidator.validate(it) }
|
||||
|
||||
val seriesIndex =
|
||||
opf.selectFirst("metadata > *|meta[property=belongs-to-collection]")?.attr("id")?.let { id ->
|
||||
opf.selectFirst("metadata > *|meta[refines=#$id][property=group-position]")
|
||||
}?.text()
|
||||
opf
|
||||
.selectFirst("metadata > *|meta[property=belongs-to-collection]")
|
||||
?.attr("id")
|
||||
?.let { id ->
|
||||
opf.selectFirst("metadata > *|meta[refines=#$id][property=group-position]")
|
||||
}?.text()
|
||||
|
||||
return BookMetadataPatch(
|
||||
title = title,
|
||||
|
|
@ -109,7 +121,8 @@ class EpubMetadataProvider(
|
|||
val publisher = opf.selectFirst("metadata > dc|publisher")?.text()?.ifBlank { null }
|
||||
val language = opf.selectFirst("metadata > dc|language")?.text()?.ifBlank { null }
|
||||
val genres =
|
||||
opf.select("metadata > dc|subject")
|
||||
opf
|
||||
.select("metadata > dc|subject")
|
||||
.mapNotNull { it.text().trim().ifBlank { null } }
|
||||
.toSet()
|
||||
.ifEmpty { null }
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue