mirror of
https://github.com/gotson/komga.git
synced 2026-01-06 07:44:35 +01:00
feat(api): add support for oneshots directory in libraries
This commit is contained in:
parent
11f8aaf122
commit
739eecafc4
27 changed files with 262 additions and 67 deletions
|
|
@ -0,0 +1,8 @@
|
|||
alter table BOOK
|
||||
add column oneshot boolean NOT NULL DEFAULT 0;
|
||||
|
||||
alter table SERIES
|
||||
add column oneshot boolean NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE LIBRARY
|
||||
add column ONESHOTS_DIRECTORY varchar NULL DEFAULT NULL;
|
||||
|
|
@ -20,6 +20,7 @@ data class Book(
|
|||
val libraryId: String = "",
|
||||
|
||||
val deletedDate: LocalDateTime? = null,
|
||||
val oneshot: Boolean = false,
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = createdDate,
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ data class Library(
|
|||
val hashFiles: Boolean = true,
|
||||
val hashPages: Boolean = false,
|
||||
val analyzeDimensions: Boolean = true,
|
||||
val oneshotsDirectory: String? = null,
|
||||
|
||||
val unavailableDate: LocalDateTime? = null,
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ data class Series(
|
|||
val bookCount: Int = 0,
|
||||
|
||||
val deletedDate: LocalDateTime? = null,
|
||||
val oneshot: Boolean = false,
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = createdDate,
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ open class SeriesSearch(
|
|||
val publishers: Collection<String>? = null,
|
||||
val deleted: Boolean? = null,
|
||||
val complete: Boolean? = null,
|
||||
val oneshot: Boolean? = null,
|
||||
) {
|
||||
enum class SearchField {
|
||||
NAME, TITLE, TITLE_SORT
|
||||
|
|
@ -24,6 +25,7 @@ class SeriesSearchWithReadProgress(
|
|||
publishers: Collection<String>? = null,
|
||||
deleted: Boolean? = null,
|
||||
complete: Boolean? = null,
|
||||
oneshot: Boolean? = null,
|
||||
val languages: Collection<String>? = null,
|
||||
val genres: Collection<String>? = null,
|
||||
val tags: Collection<String>? = null,
|
||||
|
|
@ -41,4 +43,5 @@ class SeriesSearchWithReadProgress(
|
|||
publishers = publishers,
|
||||
deleted = deleted,
|
||||
complete = complete,
|
||||
oneshot = oneshot,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ class BookImporter(
|
|||
fun importBook(sourceFile: Path, series: Series, copyMode: CopyMode, destinationName: String? = null, upgradeBookId: String? = null): Book {
|
||||
try {
|
||||
if (sourceFile.notExists()) throw FileNotFoundException("File not found: $sourceFile").withCode("ERR_1018")
|
||||
if (series.oneshot) throw IllegalArgumentException("Destination series is oneshot")
|
||||
|
||||
libraryRepository.findAll().forEach { library ->
|
||||
if (sourceFile.startsWith(library.path)) throw PathContainedInPath("Cannot import file that is part of an existing library", "ERR_1019")
|
||||
|
|
@ -99,6 +100,7 @@ class BookImporter(
|
|||
logger.warn { "Could not delete upgraded book: ${bookToUpgrade.path}" }
|
||||
}
|
||||
}
|
||||
|
||||
destFile.exists() -> throw FileAlreadyExistsException("Destination file already exists: $destFile").withCode("ERR_1021")
|
||||
}
|
||||
// delete existing sidecars
|
||||
|
|
@ -121,6 +123,7 @@ class BookImporter(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
CopyMode.COPY -> {
|
||||
logger.info { "Copying file $sourceFile to $destFile" }
|
||||
sourceFile.copyTo(destFile)
|
||||
|
|
@ -131,6 +134,7 @@ class BookImporter(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
CopyMode.HARDLINK -> try {
|
||||
logger.info { "Hardlink file $sourceFile to $destFile" }
|
||||
Files.createLink(destFile, sourceFile)
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ class FileSystemScanner(
|
|||
|
||||
private val sidecarBookPrefilter = sidecarBookConsumers.flatMap { it.getSidecarBookPrefilter() }
|
||||
|
||||
fun scanRootFolder(root: Path, forceDirectoryModifiedTime: Boolean = false): ScanResult {
|
||||
fun scanRootFolder(root: Path, forceDirectoryModifiedTime: Boolean = false, oneshotsDir: String? = null): ScanResult {
|
||||
logger.info { "Scanning folder: $root" }
|
||||
logger.info { "Supported extensions: $supportedExtensions" }
|
||||
logger.info { "Excluded patterns: ${komgaProperties.librariesScanDirectoryExclusions}" }
|
||||
|
|
@ -132,16 +132,28 @@ class FileSystemScanner(
|
|||
val books = pathToBooks[dir]
|
||||
val tempSeries = pathToSeries[dir]
|
||||
if (!books.isNullOrEmpty() && tempSeries !== null) {
|
||||
val series =
|
||||
if (forceDirectoryModifiedTime)
|
||||
tempSeries.copy(fileLastModified = maxOf(tempSeries.fileLastModified, books.maxOf { it.fileLastModified }))
|
||||
else
|
||||
tempSeries
|
||||
if (!oneshotsDir.isNullOrBlank() && dir.pathString.contains(oneshotsDir, true)) {
|
||||
books.forEach { book ->
|
||||
val series = Series(
|
||||
name = book.name,
|
||||
url = book.url,
|
||||
fileLastModified = book.fileLastModified,
|
||||
oneshot = true,
|
||||
)
|
||||
scannedSeries[series] = listOf(book.copy(oneshot = true))
|
||||
}
|
||||
} else {
|
||||
val series =
|
||||
if (forceDirectoryModifiedTime)
|
||||
tempSeries.copy(fileLastModified = maxOf(tempSeries.fileLastModified, books.maxOf { it.fileLastModified }))
|
||||
else
|
||||
tempSeries
|
||||
|
||||
scannedSeries[series] = books
|
||||
scannedSeries[series] = books
|
||||
|
||||
// only add series sidecars if series has books
|
||||
pathToSeriesSidecars[dir]?.let { scannedSidecars.addAll(it) }
|
||||
// only add series sidecars if series has books
|
||||
pathToSeriesSidecars[dir]?.let { scannedSidecars.addAll(it) }
|
||||
}
|
||||
|
||||
// book sidecars are matched here, with the actual list of books
|
||||
books.forEach { book ->
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ class LibraryContentLifecycle(
|
|||
logger.info { "Updating library: $library" }
|
||||
measureTime {
|
||||
val scanResult = try {
|
||||
fileSystemScanner.scanRootFolder(Paths.get(library.root.toURI()), library.scanForceModifiedTime)
|
||||
fileSystemScanner.scanRootFolder(Paths.get(library.root.toURI()), library.scanForceModifiedTime, library.oneshotsDirectory)
|
||||
} catch (e: DirectoryNotFoundException) {
|
||||
library.copy(unavailableDate = LocalDateTime.now()).let {
|
||||
libraryRepository.update(it)
|
||||
|
|
|
|||
|
|
@ -252,7 +252,8 @@ class BookDao(
|
|||
b.LIBRARY_ID,
|
||||
b.SERIES_ID,
|
||||
b.DELETED_DATE,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null, null),
|
||||
b.ONESHOT,
|
||||
).values(null as String?, null, null, null, null, null, null, null, null, null, null),
|
||||
).also { step ->
|
||||
chunk.forEach {
|
||||
step.bind(
|
||||
|
|
@ -266,6 +267,7 @@ class BookDao(
|
|||
it.libraryId,
|
||||
it.seriesId,
|
||||
it.deletedDate,
|
||||
it.oneshot,
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
|
|
@ -295,6 +297,7 @@ class BookDao(
|
|||
.set(b.SERIES_ID, book.seriesId)
|
||||
.set(b.DELETED_DATE, book.deletedDate)
|
||||
.set(b.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.set(b.ONESHOT, book.oneshot)
|
||||
.where(b.ID.eq(book.id))
|
||||
.execute()
|
||||
}
|
||||
|
|
@ -353,6 +356,7 @@ class BookDao(
|
|||
libraryId = libraryId,
|
||||
seriesId = seriesId,
|
||||
deletedDate = deletedDate,
|
||||
oneshot = oneshot,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
number = number,
|
||||
|
|
|
|||
|
|
@ -431,6 +431,7 @@ class BookDtoDao(
|
|||
readProgress = readProgress,
|
||||
deleted = deletedDate != null,
|
||||
fileHash = fileHash,
|
||||
oneshot = oneshot,
|
||||
)
|
||||
|
||||
private fun MediaRecord.toDto() =
|
||||
|
|
|
|||
|
|
@ -79,6 +79,7 @@ class LibraryDao(
|
|||
.set(l.HASH_FILES, library.hashFiles)
|
||||
.set(l.HASH_PAGES, library.hashPages)
|
||||
.set(l.ANALYZE_DIMENSIONS, library.analyzeDimensions)
|
||||
.set(l.ONESHOTS_DIRECTORY, library.oneshotsDirectory)
|
||||
.set(l.UNAVAILABLE_DATE, library.unavailableDate)
|
||||
.execute()
|
||||
}
|
||||
|
|
@ -106,6 +107,7 @@ class LibraryDao(
|
|||
.set(l.HASH_FILES, library.hashFiles)
|
||||
.set(l.HASH_PAGES, library.hashPages)
|
||||
.set(l.ANALYZE_DIMENSIONS, library.analyzeDimensions)
|
||||
.set(l.ONESHOTS_DIRECTORY, library.oneshotsDirectory)
|
||||
.set(l.UNAVAILABLE_DATE, library.unavailableDate)
|
||||
.set(l.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(l.ID.eq(library.id))
|
||||
|
|
@ -136,6 +138,7 @@ class LibraryDao(
|
|||
hashFiles = hashFiles,
|
||||
hashPages = hashPages,
|
||||
analyzeDimensions = analyzeDimensions,
|
||||
oneshotsDirectory = oneshotsDirectory,
|
||||
|
||||
unavailableDate = unavailableDate,
|
||||
id = id,
|
||||
|
|
|
|||
|
|
@ -104,6 +104,7 @@ class SeriesDao(
|
|||
.set(s.FILE_LAST_MODIFIED, series.fileLastModified)
|
||||
.set(s.LIBRARY_ID, series.libraryId)
|
||||
.set(s.DELETED_DATE, series.deletedDate)
|
||||
.set(s.ONESHOT, series.oneshot)
|
||||
.execute()
|
||||
}
|
||||
|
||||
|
|
@ -115,6 +116,7 @@ class SeriesDao(
|
|||
.set(s.LIBRARY_ID, series.libraryId)
|
||||
.set(s.BOOK_COUNT, series.bookCount)
|
||||
.set(s.DELETED_DATE, series.deletedDate)
|
||||
.set(s.ONESHOT, series.oneshot)
|
||||
.apply { if (updateModifiedTime) set(s.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z"))) }
|
||||
.where(s.ID.eq(series.id))
|
||||
.execute()
|
||||
|
|
@ -174,6 +176,7 @@ class SeriesDao(
|
|||
libraryId = libraryId,
|
||||
bookCount = bookCount,
|
||||
deletedDate = deletedDate,
|
||||
oneshot = oneshot,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -303,6 +303,7 @@ class SeriesDtoDao(
|
|||
if (deleted == false) c = c.and(s.DELETED_DATE.isNull)
|
||||
if (complete == false) c = c.and(d.TOTAL_BOOK_COUNT.isNotNull.and(d.TOTAL_BOOK_COUNT.ne(s.BOOK_COUNT)))
|
||||
if (complete == true) c = c.and(d.TOTAL_BOOK_COUNT.isNotNull.and(d.TOTAL_BOOK_COUNT.eq(s.BOOK_COUNT)))
|
||||
if (oneshot != null) c = c.and(s.ONESHOT.eq(oneshot))
|
||||
if (!languages.isNullOrEmpty()) c = c.and(d.LANGUAGE.collate(SqliteUdfDataSource.collationUnicode3).`in`(languages))
|
||||
if (!genres.isNullOrEmpty()) c = c.and(g.GENRE.collate(SqliteUdfDataSource.collationUnicode3).`in`(genres))
|
||||
if (!tags.isNullOrEmpty()) c = c.and(st.TAG.collate(SqliteUdfDataSource.collationUnicode3).`in`(tags).or(bmat.TAG.collate(SqliteUdfDataSource.collationUnicode3).`in`(tags)))
|
||||
|
|
@ -383,6 +384,7 @@ class SeriesDtoDao(
|
|||
metadata = metadata,
|
||||
booksMetadata = booksMetadata,
|
||||
deleted = deletedDate != null,
|
||||
oneshot = oneshot,
|
||||
)
|
||||
|
||||
private fun SeriesMetadataRecord.toDto(genres: Set<String>, tags: Set<String>, sharingLabels: Set<String>, links: List<WebLinkDto>, alternateTitles: List<AlternateTitleDto>) =
|
||||
|
|
|
|||
|
|
@ -52,6 +52,11 @@ class LocalArtworkProvider(
|
|||
}
|
||||
|
||||
fun getSeriesThumbnails(series: Series): List<ThumbnailSeries> {
|
||||
if (series.oneshot) {
|
||||
logger.debug { "Disabled for oneshot series, skipping" }
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
logger.info { "Looking for local thumbnails for series: $series" }
|
||||
|
||||
return Files.list(series.path).use { dirStream ->
|
||||
|
|
|
|||
|
|
@ -25,6 +25,11 @@ class MylarSeriesProvider(
|
|||
) : SeriesMetadataProvider, SidecarSeriesConsumer {
|
||||
|
||||
override fun getSeriesMetadata(series: Series): SeriesMetadataPatch? {
|
||||
if (series.oneshot) {
|
||||
logger.debug { "Disabled for oneshot series, skipping" }
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
val seriesJsonPath = series.path.resolve(SERIES_JSON)
|
||||
if (seriesJsonPath.notExists()) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
package org.gotson.komga.infrastructure.metadata.oneshot
|
||||
|
||||
import org.gotson.komga.domain.model.Library
|
||||
import org.gotson.komga.domain.model.MetadataPatchTarget
|
||||
import org.gotson.komga.domain.model.Series
|
||||
import org.gotson.komga.domain.model.SeriesMetadata
|
||||
import org.gotson.komga.domain.model.SeriesMetadataPatch
|
||||
import org.gotson.komga.domain.persistence.BookMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.BookRepository
|
||||
import org.gotson.komga.infrastructure.metadata.SeriesMetadataProvider
|
||||
import org.springframework.stereotype.Service
|
||||
|
||||
@Service
|
||||
class OneShotSeriesProvider(
|
||||
private val bookRepository: BookRepository,
|
||||
private val bookMetadataRepository: BookMetadataRepository,
|
||||
) : SeriesMetadataProvider {
|
||||
override fun getSeriesMetadata(series: Series): SeriesMetadataPatch? {
|
||||
if (!series.oneshot) return null
|
||||
val bookMetadata = bookMetadataRepository.findById(bookRepository.findAllIdsBySeriesId(series.id).first())
|
||||
return SeriesMetadataPatch(
|
||||
bookMetadata.title,
|
||||
bookMetadata.title,
|
||||
SeriesMetadata.Status.ENDED,
|
||||
bookMetadata.summary,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
emptySet(),
|
||||
)
|
||||
}
|
||||
|
||||
override fun shouldLibraryHandlePatch(library: Library, target: MetadataPatchTarget): Boolean =
|
||||
target == MetadataPatchTarget.SERIES
|
||||
}
|
||||
|
|
@ -37,13 +37,15 @@ fun BookDto.toDocument() =
|
|||
if (metadata.releaseDate != null) add(TextField("release_date", DateTools.dateToString(metadata.releaseDate.toDate(), DateTools.Resolution.YEAR), Field.Store.NO))
|
||||
add(TextField("status", media.status, Field.Store.NO))
|
||||
add(TextField("deleted", deleted.toString(), Field.Store.NO))
|
||||
add(TextField("oneshot", oneshot.toString(), Field.Store.NO))
|
||||
|
||||
add(StringField(LuceneEntity.TYPE, LuceneEntity.Book.type, Field.Store.NO))
|
||||
add(StringField(LuceneEntity.Book.id, id, Field.Store.YES))
|
||||
}
|
||||
|
||||
fun SeriesDto.toDocument() =
|
||||
Document().apply {
|
||||
if (oneshot) null
|
||||
else Document().apply {
|
||||
add(TextField("title", metadata.title, Field.Store.NO))
|
||||
if (metadata.titleSort != metadata.title) add(TextField("title", metadata.titleSort, Field.Store.NO))
|
||||
metadata.alternateTitles.forEach { add(TextField("title", it.title, Field.Store.NO)) }
|
||||
|
|
@ -80,6 +82,22 @@ fun SeriesDto.toDocument() =
|
|||
add(StringField(LuceneEntity.Series.id, id, Field.Store.YES))
|
||||
}
|
||||
|
||||
fun SeriesDto.oneshotDocument(document: Document) =
|
||||
document.apply {
|
||||
add(TextField("publisher", metadata.publisher, Field.Store.NO))
|
||||
add(TextField("status", metadata.status, Field.Store.NO))
|
||||
add(TextField("reading_direction", metadata.readingDirection, Field.Store.NO))
|
||||
if (metadata.ageRating != null) add(TextField("age_rating", metadata.ageRating.toString(), Field.Store.NO))
|
||||
if (metadata.language.isNotBlank()) add(TextField("language", metadata.language, Field.Store.NO))
|
||||
metadata.genres.forEach {
|
||||
add(TextField("genre", it, Field.Store.NO))
|
||||
}
|
||||
metadata.sharingLabels.forEach {
|
||||
add(TextField("sharing_label", it, Field.Store.NO))
|
||||
}
|
||||
add(TextField("complete", "true", Field.Store.NO))
|
||||
}
|
||||
|
||||
fun SeriesCollection.toDocument() =
|
||||
Document().apply {
|
||||
add(TextField("name", name, Field.Store.NO))
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class SearchIndexLifecycle(
|
|||
|
||||
targetEntities.forEach {
|
||||
when (it) {
|
||||
LuceneEntity.Book -> rebuildIndex(it, { p: Pageable -> bookDtoRepository.findAll(BookSearchWithReadProgress(), "unused", p) }, { e: BookDto -> e.toDocument() })
|
||||
LuceneEntity.Book -> rebuildIndex(it, { p: Pageable -> bookDtoRepository.findAll(BookSearchWithReadProgress(), "unused", p) }, { e: BookDto -> e.bookToDocument() })
|
||||
LuceneEntity.Series -> rebuildIndex(it, { p: Pageable -> seriesDtoRepository.findAll(SeriesSearchWithReadProgress(), "unused", p) }, { e: SeriesDto -> e.toDocument() })
|
||||
LuceneEntity.Collection -> rebuildIndex(it, { p: Pageable -> collectionRepository.findAll(pageable = p) }, { e: SeriesCollection -> e.toDocument() })
|
||||
LuceneEntity.ReadList -> rebuildIndex(it, { p: Pageable -> readListRepository.findAll(pageable = p) }, { e: ReadList -> e.toDocument() })
|
||||
|
|
@ -58,7 +58,7 @@ class SearchIndexLifecycle(
|
|||
luceneHelper.setIndexVersion(INDEX_VERSION)
|
||||
}
|
||||
|
||||
private fun <T> rebuildIndex(entity: LuceneEntity, provider: (Pageable) -> Page<out T>, toDoc: (T) -> Document) {
|
||||
private fun <T> rebuildIndex(entity: LuceneEntity, provider: (Pageable) -> Page<out T>, toDoc: (T) -> Document?) {
|
||||
logger.info { "Rebuilding index for ${entity.name}" }
|
||||
|
||||
val count = provider(Pageable.ofSize(1)).totalElements
|
||||
|
|
@ -73,7 +73,7 @@ class SearchIndexLifecycle(
|
|||
(0 until pages).forEach { page ->
|
||||
logger.info { "Processing page ${page + 1} of $pages ($batchSize elements)" }
|
||||
val entityDocs = provider(PageRequest.of(page, batchSize)).content
|
||||
.map { toDoc(it) }
|
||||
.mapNotNull { toDoc(it) }
|
||||
indexWriter.addDocuments(entityDocs)
|
||||
}
|
||||
}.also { duration ->
|
||||
|
|
@ -89,8 +89,8 @@ class SearchIndexLifecycle(
|
|||
is DomainEvent.SeriesUpdated -> seriesDtoRepository.findByIdOrNull(event.series.id, "unused")?.toDocument()?.let { updateEntity(LuceneEntity.Series, event.series.id, it) }
|
||||
is DomainEvent.SeriesDeleted -> deleteEntity(LuceneEntity.Series, event.series.id)
|
||||
|
||||
is DomainEvent.BookAdded -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.toDocument()?.let { addEntity(it) }
|
||||
is DomainEvent.BookUpdated -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.toDocument()?.let { updateEntity(LuceneEntity.Book, event.book.id, it) }
|
||||
is DomainEvent.BookAdded -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.bookToDocument()?.let { addEntity(it) }
|
||||
is DomainEvent.BookUpdated -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.bookToDocument()?.let { updateEntity(LuceneEntity.Book, event.book.id, it) }
|
||||
is DomainEvent.BookDeleted -> deleteEntity(LuceneEntity.Book, event.book.id)
|
||||
|
||||
is DomainEvent.ReadListAdded -> readListRepository.findByIdOrNull(event.readList.id)?.toDocument()?.let { addEntity(it) }
|
||||
|
|
@ -105,6 +105,11 @@ class SearchIndexLifecycle(
|
|||
}
|
||||
}
|
||||
|
||||
private fun BookDto.bookToDocument(): Document =
|
||||
if (this.oneshot) {
|
||||
seriesDtoRepository.findByIdOrNull(seriesId, "unused")!!.oneshotDocument(toDocument())
|
||||
} else this.toDocument()
|
||||
|
||||
private fun addEntity(doc: Document) {
|
||||
luceneHelper.getIndexWriter().use { indexWriter ->
|
||||
indexWriter.addDocument(doc)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
package org.gotson.komga.interfaces.api.rest
|
||||
|
||||
import io.swagger.v3.oas.annotations.Parameter
|
||||
import jakarta.validation.Valid
|
||||
import org.gotson.komga.application.tasks.HIGHEST_PRIORITY
|
||||
import org.gotson.komga.application.tasks.HIGH_PRIORITY
|
||||
|
|
@ -27,6 +28,7 @@ import org.springframework.security.access.prepost.PreAuthorize
|
|||
import org.springframework.security.core.annotation.AuthenticationPrincipal
|
||||
import org.springframework.web.bind.annotation.DeleteMapping
|
||||
import org.springframework.web.bind.annotation.GetMapping
|
||||
import org.springframework.web.bind.annotation.PatchMapping
|
||||
import org.springframework.web.bind.annotation.PathVariable
|
||||
import org.springframework.web.bind.annotation.PostMapping
|
||||
import org.springframework.web.bind.annotation.PutMapping
|
||||
|
|
@ -98,6 +100,7 @@ class LibraryController(
|
|||
hashFiles = library.hashFiles,
|
||||
hashPages = library.hashPages,
|
||||
analyzeDimensions = library.analyzeDimensions,
|
||||
oneshotsDirectory = library.oneshotsDirectory?.ifBlank { null },
|
||||
),
|
||||
).toDto(includeRoot = principal.user.roleAdmin)
|
||||
} catch (e: Exception) {
|
||||
|
|
@ -108,6 +111,7 @@ class LibraryController(
|
|||
is PathContainedInPath,
|
||||
->
|
||||
throw ResponseStatusException(HttpStatus.BAD_REQUEST, e.message)
|
||||
|
||||
else -> throw ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
|
|
@ -115,35 +119,52 @@ class LibraryController(
|
|||
@PutMapping("/{libraryId}")
|
||||
@PreAuthorize("hasRole('$ROLE_ADMIN')")
|
||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||
@Deprecated("Use PATCH /v1/library instead", ReplaceWith("patchOne"))
|
||||
fun updateOne(
|
||||
@PathVariable libraryId: String,
|
||||
@Valid @RequestBody
|
||||
library: LibraryUpdateDto,
|
||||
) {
|
||||
libraryRepository.findByIdOrNull(libraryId)?.let {
|
||||
val toUpdate = Library(
|
||||
id = libraryId,
|
||||
name = library.name,
|
||||
root = filePathToUrl(library.root),
|
||||
importComicInfoBook = library.importComicInfoBook,
|
||||
importComicInfoSeries = library.importComicInfoSeries,
|
||||
importComicInfoCollection = library.importComicInfoCollection,
|
||||
importComicInfoReadList = library.importComicInfoReadList,
|
||||
importComicInfoSeriesAppendVolume = library.importComicInfoSeriesAppendVolume,
|
||||
importEpubBook = library.importEpubBook,
|
||||
importEpubSeries = library.importEpubSeries,
|
||||
importMylarSeries = library.importMylarSeries,
|
||||
importLocalArtwork = library.importLocalArtwork,
|
||||
importBarcodeIsbn = library.importBarcodeIsbn,
|
||||
scanForceModifiedTime = library.scanForceModifiedTime,
|
||||
repairExtensions = library.repairExtensions,
|
||||
convertToCbz = library.convertToCbz,
|
||||
emptyTrashAfterScan = library.emptyTrashAfterScan,
|
||||
seriesCover = library.seriesCover.toDomain(),
|
||||
hashFiles = library.hashFiles,
|
||||
hashPages = library.hashPages,
|
||||
analyzeDimensions = library.analyzeDimensions,
|
||||
)
|
||||
patchOne(libraryId, library)
|
||||
}
|
||||
|
||||
@PatchMapping("/{libraryId}")
|
||||
@PreAuthorize("hasRole('$ROLE_ADMIN')")
|
||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||
fun patchOne(
|
||||
@PathVariable libraryId: String,
|
||||
@Parameter(description = "Fields to update. You can omit fields you don't want to update.")
|
||||
@Valid
|
||||
@RequestBody
|
||||
library: LibraryUpdateDto,
|
||||
) {
|
||||
libraryRepository.findByIdOrNull(libraryId)?.let { existing ->
|
||||
val toUpdate = with(library) {
|
||||
existing.copy(
|
||||
id = libraryId,
|
||||
name = name ?: existing.name,
|
||||
root = root?.let { filePathToUrl(root!!) } ?: existing.root,
|
||||
importComicInfoBook = importComicInfoBook ?: existing.importComicInfoBook,
|
||||
importComicInfoSeries = importComicInfoSeries ?: existing.importComicInfoSeries,
|
||||
importComicInfoCollection = importComicInfoCollection ?: existing.importComicInfoCollection,
|
||||
importComicInfoReadList = importComicInfoReadList ?: existing.importComicInfoReadList,
|
||||
importComicInfoSeriesAppendVolume = importComicInfoSeriesAppendVolume ?: existing.importComicInfoSeriesAppendVolume,
|
||||
importEpubBook = importEpubBook ?: existing.importEpubBook,
|
||||
importEpubSeries = importEpubSeries ?: existing.importEpubSeries,
|
||||
importMylarSeries = importMylarSeries ?: existing.importMylarSeries,
|
||||
importLocalArtwork = importLocalArtwork ?: existing.importLocalArtwork,
|
||||
importBarcodeIsbn = importBarcodeIsbn ?: existing.importBarcodeIsbn,
|
||||
scanForceModifiedTime = scanForceModifiedTime ?: existing.scanForceModifiedTime,
|
||||
repairExtensions = repairExtensions ?: existing.repairExtensions,
|
||||
convertToCbz = convertToCbz ?: existing.convertToCbz,
|
||||
emptyTrashAfterScan = emptyTrashAfterScan ?: existing.emptyTrashAfterScan,
|
||||
seriesCover = seriesCover?.toDomain() ?: existing.seriesCover,
|
||||
hashFiles = hashFiles ?: existing.hashFiles,
|
||||
hashPages = hashPages ?: existing.hashPages,
|
||||
analyzeDimensions = analyzeDimensions ?: existing.analyzeDimensions,
|
||||
oneshotsDirectory = if (isSet("oneshotsDirectory")) oneshotsDirectory?.ifBlank { null } else existing.oneshotsDirectory,
|
||||
)
|
||||
}
|
||||
try {
|
||||
libraryLifecycle.updateLibrary(toUpdate)
|
||||
} catch (e: Exception) {
|
||||
|
|
@ -154,6 +175,7 @@ class LibraryController(
|
|||
is PathContainedInPath,
|
||||
->
|
||||
throw ResponseStatusException(HttpStatus.BAD_REQUEST, e.message)
|
||||
|
||||
else -> throw ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -285,7 +285,6 @@ class SeriesCollectionController(
|
|||
val seriesSearch = SeriesSearchWithReadProgress(
|
||||
libraryIds = principal.user.getAuthorizedLibraryIds(libraryIds),
|
||||
metadataStatus = metadataStatus,
|
||||
readStatus = readStatus,
|
||||
publishers = publishers,
|
||||
deleted = deleted,
|
||||
complete = complete,
|
||||
|
|
@ -294,6 +293,7 @@ class SeriesCollectionController(
|
|||
tags = tags,
|
||||
ageRatings = ageRatings?.map { it.toIntOrNull() },
|
||||
releaseYears = releaseYears,
|
||||
readStatus = readStatus,
|
||||
authors = authors,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -143,6 +143,7 @@ class SeriesController(
|
|||
@RequestParam(name = "sharing_label", required = false) sharingLabels: List<String>? = null,
|
||||
@RequestParam(name = "deleted", required = false) deleted: Boolean? = null,
|
||||
@RequestParam(name = "complete", required = false) complete: Boolean? = null,
|
||||
@RequestParam(name = "oneshot", required = false) oneshot: Boolean? = null,
|
||||
@RequestParam(name = "unpaged", required = false) unpaged: Boolean = false,
|
||||
@Parameter(hidden = true) @Authors authors: List<Author>? = null,
|
||||
@Parameter(hidden = true) page: Pageable,
|
||||
|
|
@ -174,15 +175,16 @@ class SeriesController(
|
|||
}
|
||||
},
|
||||
metadataStatus = metadataStatus,
|
||||
readStatus = readStatus,
|
||||
publishers = publishers,
|
||||
deleted = deleted,
|
||||
complete = complete,
|
||||
oneshot = oneshot,
|
||||
languages = languages,
|
||||
genres = genres,
|
||||
tags = tags,
|
||||
ageRatings = ageRatings?.map { it.toIntOrNull() },
|
||||
releaseYears = releaseYears,
|
||||
readStatus = readStatus,
|
||||
authors = authors,
|
||||
sharingLabels = sharingLabels,
|
||||
)
|
||||
|
|
@ -220,6 +222,7 @@ class SeriesController(
|
|||
@RequestParam(name = "sharing_label", required = false) sharingLabels: List<String>? = null,
|
||||
@RequestParam(name = "deleted", required = false) deleted: Boolean?,
|
||||
@RequestParam(name = "complete", required = false) complete: Boolean?,
|
||||
@RequestParam(name = "oneshot", required = false) oneshot: Boolean? = null,
|
||||
@Parameter(hidden = true) @Authors authors: List<Author>?,
|
||||
@Parameter(hidden = true) page: Pageable,
|
||||
): List<GroupCountDto> {
|
||||
|
|
@ -235,15 +238,16 @@ class SeriesController(
|
|||
}
|
||||
},
|
||||
metadataStatus = metadataStatus,
|
||||
readStatus = readStatus,
|
||||
publishers = publishers,
|
||||
deleted = deleted,
|
||||
complete = complete,
|
||||
oneshot = oneshot,
|
||||
languages = languages,
|
||||
genres = genres,
|
||||
tags = tags,
|
||||
ageRatings = ageRatings?.map { it.toIntOrNull() },
|
||||
releaseYears = releaseYears,
|
||||
readStatus = readStatus,
|
||||
authors = authors,
|
||||
sharingLabels = sharingLabels,
|
||||
)
|
||||
|
|
@ -258,6 +262,7 @@ class SeriesController(
|
|||
@AuthenticationPrincipal principal: KomgaPrincipal,
|
||||
@RequestParam(name = "library_id", required = false) libraryIds: List<String>?,
|
||||
@RequestParam(name = "deleted", required = false) deleted: Boolean?,
|
||||
@RequestParam(name = "oneshot", required = false) oneshot: Boolean? = null,
|
||||
@RequestParam(name = "unpaged", required = false) unpaged: Boolean = false,
|
||||
@Parameter(hidden = true) page: Pageable,
|
||||
): Page<SeriesDto> {
|
||||
|
|
@ -275,6 +280,7 @@ class SeriesController(
|
|||
SeriesSearchWithReadProgress(
|
||||
libraryIds = principal.user.getAuthorizedLibraryIds(libraryIds),
|
||||
deleted = deleted,
|
||||
oneshot = oneshot,
|
||||
),
|
||||
principal.user.id,
|
||||
pageRequest,
|
||||
|
|
@ -289,6 +295,7 @@ class SeriesController(
|
|||
@AuthenticationPrincipal principal: KomgaPrincipal,
|
||||
@RequestParam(name = "library_id", required = false) libraryIds: List<String>? = null,
|
||||
@RequestParam(name = "deleted", required = false) deleted: Boolean? = null,
|
||||
@RequestParam(name = "oneshot", required = false) oneshot: Boolean? = null,
|
||||
@RequestParam(name = "unpaged", required = false) unpaged: Boolean = false,
|
||||
@Parameter(hidden = true) page: Pageable,
|
||||
): Page<SeriesDto> {
|
||||
|
|
@ -306,6 +313,7 @@ class SeriesController(
|
|||
SeriesSearchWithReadProgress(
|
||||
libraryIds = principal.user.getAuthorizedLibraryIds(libraryIds),
|
||||
deleted = deleted,
|
||||
oneshot = oneshot,
|
||||
),
|
||||
principal.user.id,
|
||||
pageRequest,
|
||||
|
|
@ -320,6 +328,7 @@ class SeriesController(
|
|||
@AuthenticationPrincipal principal: KomgaPrincipal,
|
||||
@RequestParam(name = "library_id", required = false) libraryIds: List<String>? = null,
|
||||
@RequestParam(name = "deleted", required = false) deleted: Boolean? = null,
|
||||
@RequestParam(name = "oneshot", required = false) oneshot: Boolean? = null,
|
||||
@RequestParam(name = "unpaged", required = false) unpaged: Boolean = false,
|
||||
@Parameter(hidden = true) page: Pageable,
|
||||
): Page<SeriesDto> {
|
||||
|
|
@ -337,6 +346,7 @@ class SeriesController(
|
|||
SeriesSearchWithReadProgress(
|
||||
libraryIds = principal.user.getAuthorizedLibraryIds(libraryIds),
|
||||
deleted = deleted,
|
||||
oneshot = oneshot,
|
||||
),
|
||||
principal.user.id,
|
||||
principal.user.restrictions,
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ data class BookDto(
|
|||
val readProgress: ReadProgressDto? = null,
|
||||
val deleted: Boolean,
|
||||
val fileHash: String,
|
||||
val oneshot: Boolean,
|
||||
)
|
||||
|
||||
fun BookDto.restrictUrl(restrict: Boolean) =
|
||||
|
|
|
|||
|
|
@ -23,4 +23,5 @@ data class LibraryCreationDto(
|
|||
val hashFiles: Boolean = true,
|
||||
val hashPages: Boolean = false,
|
||||
val analyzeDimensions: Boolean = true,
|
||||
val oneshotsDirectory: String? = null,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ data class LibraryDto(
|
|||
val hashFiles: Boolean,
|
||||
val hashPages: Boolean,
|
||||
val analyzeDimensions: Boolean,
|
||||
val oneshotsDirectory: String?,
|
||||
val unavailable: Boolean,
|
||||
)
|
||||
|
||||
|
|
@ -50,5 +51,6 @@ fun Library.toDto(includeRoot: Boolean) = LibraryDto(
|
|||
hashFiles = hashFiles,
|
||||
hashPages = hashPages,
|
||||
analyzeDimensions = analyzeDimensions,
|
||||
oneshotsDirectory = oneshotsDirectory,
|
||||
unavailable = unavailableDate != null,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,26 +1,39 @@
|
|||
package org.gotson.komga.interfaces.api.rest.dto
|
||||
|
||||
import jakarta.validation.constraints.NotBlank
|
||||
import org.gotson.komga.infrastructure.validation.NullOrNotBlank
|
||||
import kotlin.properties.Delegates
|
||||
|
||||
data class LibraryUpdateDto(
|
||||
@get:NotBlank val name: String,
|
||||
@get:NotBlank val root: String,
|
||||
val importComicInfoBook: Boolean,
|
||||
val importComicInfoSeries: Boolean,
|
||||
val importComicInfoCollection: Boolean,
|
||||
val importComicInfoReadList: Boolean,
|
||||
val importComicInfoSeriesAppendVolume: Boolean,
|
||||
val importEpubBook: Boolean,
|
||||
val importEpubSeries: Boolean,
|
||||
val importMylarSeries: Boolean,
|
||||
val importLocalArtwork: Boolean,
|
||||
val importBarcodeIsbn: Boolean,
|
||||
val scanForceModifiedTime: Boolean,
|
||||
val repairExtensions: Boolean,
|
||||
val convertToCbz: Boolean,
|
||||
val emptyTrashAfterScan: Boolean,
|
||||
val seriesCover: SeriesCoverDto,
|
||||
val hashFiles: Boolean,
|
||||
val hashPages: Boolean,
|
||||
val analyzeDimensions: Boolean,
|
||||
)
|
||||
class LibraryUpdateDto {
|
||||
private val isSet = mutableMapOf<String, Boolean>()
|
||||
fun isSet(prop: String) = isSet.getOrDefault(prop, false)
|
||||
|
||||
@get:NullOrNotBlank
|
||||
val name: String? = null
|
||||
|
||||
@get:NullOrNotBlank
|
||||
val root: String? = null
|
||||
|
||||
val importComicInfoBook: Boolean? = null
|
||||
val importComicInfoSeries: Boolean? = null
|
||||
val importComicInfoCollection: Boolean? = null
|
||||
val importComicInfoReadList: Boolean? = null
|
||||
val importComicInfoSeriesAppendVolume: Boolean? = null
|
||||
val importEpubBook: Boolean? = null
|
||||
val importEpubSeries: Boolean? = null
|
||||
val importMylarSeries: Boolean? = null
|
||||
val importLocalArtwork: Boolean? = null
|
||||
val importBarcodeIsbn: Boolean? = null
|
||||
|
||||
val scanForceModifiedTime: Boolean? = null
|
||||
val repairExtensions: Boolean? = null
|
||||
val convertToCbz: Boolean? = null
|
||||
val emptyTrashAfterScan: Boolean? = null
|
||||
val seriesCover: SeriesCoverDto? = null
|
||||
val hashFiles: Boolean? = null
|
||||
val hashPages: Boolean? = null
|
||||
val analyzeDimensions: Boolean? = null
|
||||
var oneshotsDirectory: String?
|
||||
by Delegates.observable(null) { prop, _, _ ->
|
||||
isSet[prop.name] = true
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ data class SeriesDto(
|
|||
val metadata: SeriesMetadataDto,
|
||||
val booksMetadata: BookMetadataAggregationDto,
|
||||
val deleted: Boolean,
|
||||
val oneshot: Boolean,
|
||||
)
|
||||
|
||||
fun SeriesDto.restrictUrl(restrict: Boolean) =
|
||||
|
|
|
|||
|
|
@ -319,6 +319,37 @@ class FileSystemScannerTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given oneshot directory when scanning then return a series per file`() {
|
||||
Jimfs.newFileSystem(Configuration.unix()).use { fs ->
|
||||
// given
|
||||
val root = fs.getPath("/root")
|
||||
Files.createDirectory(root)
|
||||
|
||||
val normal = makeSubDir(root, "normal", listOf("comic.cbz"))
|
||||
makeSubDir(normal, "_oneshots", listOf("single4.cbz", "single5.cbz"))
|
||||
makeSubDir(root, "_oneshots", listOf("single.cbz", "single2.cbz", "single3.cbz"))
|
||||
|
||||
// when
|
||||
val scan = scanner.scanRootFolder(root).series
|
||||
|
||||
// then
|
||||
assertThat(scan).hasSize(6)
|
||||
assertThat(scan.keys.map { it.name })
|
||||
.containsExactlyInAnyOrder("normal", "single", "single2", "single3", "single4", "single5")
|
||||
.doesNotContain("_oneshots")
|
||||
val (oneshots, regular) = scan.keys.partition { it.name.startsWith("single") }
|
||||
assertThat(oneshots.map { it.oneshot }).containsOnly(true)
|
||||
assertThat(oneshots.flatMap { scan[it] ?: emptyList() }.map { it.oneshot }).containsOnly(true)
|
||||
assertThat(regular.map { it.oneshot }).containsOnly(false)
|
||||
assertThat(regular.flatMap { scan[it] ?: emptyList() }.map { it.oneshot }).containsOnly(false)
|
||||
|
||||
scan.forEach { (_, books) ->
|
||||
assertThat(books).hasSize(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun makeSubDir(root: Path, name: String, files: List<String>): Path {
|
||||
val dir = root.resolve(name)
|
||||
Files.createDirectory(dir)
|
||||
|
|
|
|||
Loading…
Reference in a new issue