mirror of
https://github.com/gotson/komga.git
synced 2025-12-20 15:34:17 +01:00
parent
86c4d021f2
commit
bc25c11990
5 changed files with 1103 additions and 103 deletions
|
|
@ -14,8 +14,10 @@ interface BookRepository {
|
|||
fun findAll(): Collection<Book>
|
||||
fun findAllBySeriesId(seriesId: String): Collection<Book>
|
||||
fun findAllBySeriesIds(seriesIds: Collection<String>): Collection<Book>
|
||||
fun findAllByLibraryIdAndUrlNotIn(libraryId: String, urls: Collection<URL>): Collection<Book>
|
||||
fun findAll(bookSearch: BookSearch): Collection<Book>
|
||||
fun findAll(bookSearch: BookSearch, pageable: Pageable): Page<Book>
|
||||
fun findAllDeletedByFileSize(fileSize: Long): Collection<Book>
|
||||
|
||||
fun getLibraryIdOrNull(bookId: String): String?
|
||||
fun getSeriesIdOrNull(bookId: String): String?
|
||||
|
|
|
|||
|
|
@ -2,19 +2,31 @@ package org.gotson.komga.domain.service
|
|||
|
||||
import mu.KotlinLogging
|
||||
import org.gotson.komga.application.tasks.TaskReceiver
|
||||
import org.gotson.komga.domain.model.Book
|
||||
import org.gotson.komga.domain.model.BookMetadataPatchCapability
|
||||
import org.gotson.komga.domain.model.BookSearch
|
||||
import org.gotson.komga.domain.model.DirectoryNotFoundException
|
||||
import org.gotson.komga.domain.model.Library
|
||||
import org.gotson.komga.domain.model.Media
|
||||
import org.gotson.komga.domain.model.ScanResult
|
||||
import org.gotson.komga.domain.model.Series
|
||||
import org.gotson.komga.domain.model.SeriesSearch
|
||||
import org.gotson.komga.domain.model.Sidecar
|
||||
import org.gotson.komga.domain.model.ThumbnailBook
|
||||
import org.gotson.komga.domain.persistence.BookMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.BookRepository
|
||||
import org.gotson.komga.domain.persistence.MediaRepository
|
||||
import org.gotson.komga.domain.persistence.ReadListRepository
|
||||
import org.gotson.komga.domain.persistence.ReadProgressRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesCollectionRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesRepository
|
||||
import org.gotson.komga.domain.persistence.SidecarRepository
|
||||
import org.gotson.komga.domain.persistence.ThumbnailBookRepository
|
||||
import org.gotson.komga.infrastructure.configuration.KomgaProperties
|
||||
import org.gotson.komga.infrastructure.hash.Hasher
|
||||
import org.gotson.komga.infrastructure.language.notEquals
|
||||
import org.gotson.komga.infrastructure.language.toIndexedMap
|
||||
import org.springframework.stereotype.Service
|
||||
import org.springframework.transaction.support.TransactionTemplate
|
||||
import java.nio.file.Paths
|
||||
|
|
@ -36,6 +48,13 @@ class LibraryContentLifecycle(
|
|||
private val komgaProperties: KomgaProperties,
|
||||
private val taskReceiver: TaskReceiver,
|
||||
private val transactionTemplate: TransactionTemplate,
|
||||
private val hasher: Hasher,
|
||||
private val bookMetadataRepository: BookMetadataRepository,
|
||||
private val seriesMetadataRepository: SeriesMetadataRepository,
|
||||
private val readListRepository: ReadListRepository,
|
||||
private val readProgressRepository: ReadProgressRepository,
|
||||
private val collectionRepository: SeriesCollectionRepository,
|
||||
private val thumbnailBookRepository: ThumbnailBookRepository,
|
||||
) {
|
||||
|
||||
fun scanRootFolder(library: Library) {
|
||||
|
|
@ -61,7 +80,7 @@ class LibraryContentLifecycle(
|
|||
seriesLifecycle.softDeleteMany(series)
|
||||
} else {
|
||||
scannedSeries.keys.map { it.url }.let { urls ->
|
||||
val series = seriesRepository.findAllByLibraryIdAndUrlNotIn(library.id, urls)
|
||||
val series = seriesRepository.findAllByLibraryIdAndUrlNotIn(library.id, urls).filterNot { it.deletedDate != null }
|
||||
if (series.isNotEmpty()) {
|
||||
logger.info { "Soft deleting series not on disk anymore: $series" }
|
||||
seriesLifecycle.softDeleteMany(series)
|
||||
|
|
@ -69,6 +88,16 @@ class LibraryContentLifecycle(
|
|||
}
|
||||
}
|
||||
|
||||
// delete books that don't exist anymore. We need to do this now, so trash bin can work
|
||||
val seriesToSortAndRefresh = scannedSeries.values.flatten().map { it.url }.let { urls ->
|
||||
val books = bookRepository.findAllByLibraryIdAndUrlNotIn(library.id, urls).filterNot { it.deletedDate != null }
|
||||
if (books.isNotEmpty()) {
|
||||
logger.info { "Soft deleting books not on disk anymore: $books" }
|
||||
bookLifecycle.softDeleteMany(books)
|
||||
books.map { it.seriesId }.distinct().mapNotNull { seriesRepository.findByIdOrNull(it) }.toMutableList()
|
||||
} else mutableListOf()
|
||||
}
|
||||
|
||||
scannedSeries.forEach { (newSeries, newBooks) ->
|
||||
val existingSeries = seriesRepository.findByLibraryIdAndUrlOrNull(library.id, newSeries.url)
|
||||
|
||||
|
|
@ -77,7 +106,9 @@ class LibraryContentLifecycle(
|
|||
logger.info { "Adding new series: $newSeries" }
|
||||
val createdSeries = seriesLifecycle.createSeries(newSeries)
|
||||
seriesLifecycle.addBooks(createdSeries, newBooks)
|
||||
seriesLifecycle.sortBooks(createdSeries)
|
||||
tryRestoreSeries(createdSeries, newBooks)
|
||||
tryRestoreBooks(newBooks)
|
||||
seriesToSortAndRefresh.add(createdSeries)
|
||||
} else {
|
||||
// if series already exists, update it
|
||||
logger.debug { "Scanned series already exists. Scanned: $newSeries, Existing: $existingSeries" }
|
||||
|
|
@ -90,18 +121,18 @@ class LibraryContentLifecycle(
|
|||
// update list of books with existing entities if they exist
|
||||
val existingBooks = bookRepository.findAllBySeriesId(existingSeries.id)
|
||||
logger.debug { "Existing books: $existingBooks" }
|
||||
|
||||
// update existing books
|
||||
newBooks.forEach { newBook ->
|
||||
logger.debug { "Trying to match scanned book by url: $newBook" }
|
||||
existingBooks.find { it.url == newBook.url }?.let { existingBook ->
|
||||
existingBooks.find { it.url == newBook.url && it.deletedDate == null }?.let { existingBook ->
|
||||
logger.debug { "Matched existing book: $existingBook" }
|
||||
if (newBook.fileLastModified.notEquals(existingBook.fileLastModified) || existingBook.deletedDate != null) {
|
||||
if (newBook.fileLastModified.notEquals(existingBook.fileLastModified)) {
|
||||
logger.info { "Book changed on disk, update and reset media status: $existingBook" }
|
||||
val updatedBook = existingBook.copy(
|
||||
fileLastModified = newBook.fileLastModified,
|
||||
fileSize = newBook.fileSize,
|
||||
fileHash = "",
|
||||
deletedDate = null,
|
||||
)
|
||||
transactionTemplate.executeWithoutResult {
|
||||
mediaRepository.findById(existingBook.id).let {
|
||||
|
|
@ -113,28 +144,23 @@ class LibraryContentLifecycle(
|
|||
}
|
||||
}
|
||||
|
||||
// remove books not present anymore
|
||||
val newBooksUrls = newBooks.map { it.url }
|
||||
existingBooks
|
||||
.filterNot { existingBook -> newBooksUrls.contains(existingBook.url) }
|
||||
.let { books ->
|
||||
logger.info { "Deleting books not on disk anymore: $books" }
|
||||
bookLifecycle.softDeleteMany(books)
|
||||
books.map { it.seriesId }.distinct().forEach { taskReceiver.refreshSeriesMetadata(it) }
|
||||
}
|
||||
|
||||
// add new books
|
||||
val existingBooksUrls = existingBooks.map { it.url }
|
||||
val existingBooksUrls = existingBooks.filterNot { it.deletedDate != null }.map { it.url }
|
||||
val booksToAdd = newBooks.filterNot { newBook -> existingBooksUrls.contains(newBook.url) }
|
||||
logger.info { "Adding new books: $booksToAdd" }
|
||||
seriesLifecycle.addBooks(existingSeries, booksToAdd)
|
||||
|
||||
// sort all books
|
||||
seriesLifecycle.sortBooks(existingSeries)
|
||||
tryRestoreBooks(booksToAdd)
|
||||
seriesToSortAndRefresh.add(existingSeries)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// for all series where books have been removed or added, trigger a sort and refresh metadata
|
||||
seriesToSortAndRefresh.distinctBy { it.id }.forEach {
|
||||
seriesLifecycle.sortBooks(it)
|
||||
taskReceiver.refreshSeriesMetadata(it.id)
|
||||
}
|
||||
|
||||
if (!rootFolderInaccessible) {
|
||||
val existingSidecars = sidecarRepository.findAll()
|
||||
scanResult.sidecars.forEach { newSidecar ->
|
||||
|
|
@ -175,6 +201,147 @@ class LibraryContentLifecycle(
|
|||
}.also { logger.info { "Library updated in $it" } }
|
||||
}
|
||||
|
||||
/**
|
||||
* This will try to match newSeries with a deleted series.
|
||||
* Series are matched if:
|
||||
* - they have the same number of books
|
||||
* - all the books are matched by file size and file hash
|
||||
*
|
||||
* If a series is matched, the following will be restored from the deleted series to the new series:
|
||||
* - Collections
|
||||
* - Metadata. The metadata title will only be copied if locked. If not locked, the folder name is used.
|
||||
* - all books, via #tryRestoreBooks
|
||||
*/
|
||||
private fun tryRestoreSeries(newSeries: Series, newBooks: List<Book>) {
|
||||
logger.info { "Try to restore series: $newSeries" }
|
||||
val bookSizes = newBooks.map { it.fileSize }
|
||||
|
||||
val deletedCandidates = seriesRepository.findAll(SeriesSearch(deleted = true))
|
||||
.mapNotNull { deletedCandidate ->
|
||||
val deletedBooks = bookRepository.findAllBySeriesId(deletedCandidate.id)
|
||||
val deletedBooksSizes = deletedBooks.map { it.fileSize }
|
||||
if (newBooks.size == deletedBooks.size && bookSizes.containsAll(deletedBooksSizes) && deletedBooksSizes.containsAll(bookSizes) && deletedBooks.all { it.fileHash.isNotBlank() }) {
|
||||
deletedCandidate to deletedBooks
|
||||
} else null
|
||||
}
|
||||
logger.debug { "Deleted series candidates: $deletedCandidates" }
|
||||
|
||||
if (deletedCandidates.isNotEmpty()) {
|
||||
val newBooksWithHash = newBooks.map { book -> bookRepository.findByIdOrNull(book.id)!!.copy(fileHash = hasher.computeHash(book.path)) }
|
||||
bookRepository.update(newBooksWithHash)
|
||||
|
||||
val match = deletedCandidates.find { (_, books) ->
|
||||
books.map { it.fileHash }.containsAll(newBooksWithHash.map { it.fileHash }) && newBooksWithHash.map { it.fileHash }.containsAll(books.map { it.fileHash })
|
||||
}
|
||||
|
||||
if (match != null) {
|
||||
// restore series
|
||||
logger.info { "Match found, restore $match into $newSeries" }
|
||||
transactionTemplate.executeWithoutResult {
|
||||
// copy metadata
|
||||
seriesMetadataRepository.findById(match.first.id).let { deleted ->
|
||||
val newlyAdded = seriesMetadataRepository.findById(newSeries.id)
|
||||
seriesMetadataRepository.update(
|
||||
deleted.copy(
|
||||
seriesId = newSeries.id,
|
||||
title = if (deleted.titleLock) deleted.title else newlyAdded.title,
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// replace deleted series by new series in collections
|
||||
collectionRepository.findAllContainingSeriesId(match.first.id, filterOnLibraryIds = null)
|
||||
.forEach { col ->
|
||||
collectionRepository.update(
|
||||
col.copy(
|
||||
seriesIds = col.seriesIds.map { if (it == match.first.id) newSeries.id else it }
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
tryRestoreBooks(newBooksWithHash)
|
||||
|
||||
// delete upgraded series
|
||||
seriesLifecycle.deleteMany(listOf(match.first))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This will try to match each book in newBooks with a deleted book.
|
||||
* Books are matched by file size, then by file hash.
|
||||
*
|
||||
* If a book is matched, the following will be restored from the deleted book to the new book:
|
||||
* - Media
|
||||
* - Read Progress
|
||||
* - Read Lists
|
||||
* - Metadata. The metadata title will only be copied if locked. If not locked, the filename is used, but a refresh for Title will be requested.
|
||||
*/
|
||||
private fun tryRestoreBooks(newBooks: List<Book>) {
|
||||
logger.info { "Try to restore books: $newBooks" }
|
||||
newBooks.forEach { bookToAdd ->
|
||||
// try to find a deleted book that matches the file size
|
||||
val deletedCandidates = bookRepository.findAllDeletedByFileSize(bookToAdd.fileSize).filter { it.fileHash.isNotBlank() }
|
||||
logger.debug { "Deleted candidates: $deletedCandidates" }
|
||||
|
||||
if (deletedCandidates.isNotEmpty()) {
|
||||
// if the book has no hash, compute the hash and store it
|
||||
val bookWithHash =
|
||||
if (bookToAdd.fileHash.isNotBlank()) bookToAdd
|
||||
else bookRepository.findByIdOrNull(bookToAdd.id)!!.copy(fileHash = hasher.computeHash(bookToAdd.path)).also { bookRepository.update(it) }
|
||||
|
||||
val match = deletedCandidates.find { it.fileHash == bookWithHash.fileHash }
|
||||
|
||||
if (match != null) {
|
||||
// restore book
|
||||
logger.info { "Match found, restore $match into $bookToAdd" }
|
||||
transactionTemplate.executeWithoutResult {
|
||||
// copy media
|
||||
mediaRepository.findById(match.id).let { deleted ->
|
||||
mediaRepository.update(deleted.copy(bookId = bookToAdd.id))
|
||||
}
|
||||
|
||||
// copy generated thumbnails
|
||||
thumbnailBookRepository.findAllByBookIdAndType(match.id, ThumbnailBook.Type.GENERATED).forEach { deleted ->
|
||||
thumbnailBookRepository.update(deleted.copy(bookId = bookToAdd.id))
|
||||
}
|
||||
|
||||
// copy metadata
|
||||
bookMetadataRepository.findById(match.id).let { deleted ->
|
||||
val newlyAdded = bookMetadataRepository.findById(bookToAdd.id)
|
||||
bookMetadataRepository.update(
|
||||
deleted.copy(
|
||||
bookId = bookToAdd.id,
|
||||
title = if (deleted.titleLock) deleted.title else newlyAdded.title,
|
||||
)
|
||||
)
|
||||
if (!deleted.titleLock) taskReceiver.refreshBookMetadata(bookToAdd.id, listOf(BookMetadataPatchCapability.TITLE))
|
||||
}
|
||||
|
||||
// copy read progress
|
||||
readProgressRepository.findAllByBookId(match.id)
|
||||
.map { it.copy(bookId = bookToAdd.id) }
|
||||
.forEach { readProgressRepository.save(it) }
|
||||
|
||||
// replace deleted book by new book in read lists
|
||||
readListRepository.findAllContainingBookId(match.id, filterOnLibraryIds = null)
|
||||
.forEach { rl ->
|
||||
readListRepository.update(
|
||||
rl.copy(
|
||||
bookIds = rl.bookIds.values.map { if (it == match.id) bookToAdd.id else it }.toIndexedMap()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// delete soft-deleted book
|
||||
bookLifecycle.deleteOne(match)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun emptyTrash(library: Library) {
|
||||
logger.info { "Empty trash for library: $library" }
|
||||
|
||||
|
|
|
|||
|
|
@ -61,6 +61,18 @@ class BookDao(
|
|||
.fetchInto(b)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllByLibraryIdAndUrlNotIn(libraryId: String, urls: Collection<URL>): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
.where(b.LIBRARY_ID.eq(libraryId).and(b.URL.notIn(urls.map { it.toString() })))
|
||||
.fetchInto(b)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllDeletedByFileSize(fileSize: Long): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
.where(b.DELETED_DATE.isNotNull.and(b.FILE_SIZE.eq(fileSize)))
|
||||
.fetchInto(b)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAll(): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
.fetchInto(b)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -13,4 +13,5 @@ spring:
|
|||
|
||||
logging:
|
||||
level:
|
||||
org.jooq: DEBUG
|
||||
org.gotson.komga: DEBUG
|
||||
# org.jooq: DEBUG
|
||||
|
|
|
|||
Loading…
Reference in a new issue