feat: add library options for hashing and dimensions analysis

closes #645
This commit is contained in:
Gauthier Roebroeck 2022-01-05 16:29:48 +08:00
parent 3962e8ee6e
commit 5d4ec94e01
29 changed files with 211 additions and 46 deletions

View file

@ -92,12 +92,23 @@
hide-details
class="mx-4"
/>
<v-checkbox
v-model="form.scanForceModifiedTime"
:label="$t('dialog.edit_library.field_scanner_force_directory_modified_time')"
hide-details
class="mx-4"
/>
>
<template v-slot:append>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-icon v-on="on" color="info">mdi-help-circle-outline</v-icon>
</template>
{{ $t('dialog.edit_library.tooltip_scanner_force_modified_time') }}
</v-tooltip>
</template>
</v-checkbox>
<v-checkbox
v-model="form.scanDeep"
:label="$t('dialog.edit_library.field_scanner_deep_scan')"
@ -106,6 +117,58 @@
/>
</v-col>
</v-row>
<v-row>
<v-col cols="auto">
<span class="text-subtitle-1 text--primary">{{ $t('dialog.edit_library.label_analysis') }}</span>
<v-checkbox
v-model="form.hashFiles"
:label="$t('dialog.edit_library.field_analysis_hash_files')"
hide-details
class="mx-4 align-center"
>
<template v-slot:append>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-icon v-on="on" color="warning">mdi-alert-circle-outline</v-icon>
</template>
{{ $t('dialog.edit_library.tooltip_use_resources') }}
</v-tooltip>
</template>
</v-checkbox>
<v-checkbox
v-model="form.hashPages"
:label="$t('dialog.edit_library.field_analysis_hash_pages')"
hide-details
class="mx-4"
>
<template v-slot:append>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-icon v-on="on" color="warning">mdi-alert-circle-outline</v-icon>
</template>
{{ $t('dialog.edit_library.tooltip_use_resources') }}
</v-tooltip>
</template>
</v-checkbox>
<v-checkbox
v-model="form.analyzeDimensions"
:label="$t('dialog.edit_library.field_analysis_analyze_dimensions')"
hide-details
class="mx-4"
>
<template v-slot:append>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-icon v-on="on" color="warning">mdi-alert-circle-outline</v-icon>
</template>
{{ $t('dialog.edit_library.tooltip_use_resources') }}
</v-tooltip>
</template>
</v-checkbox>
</v-col>
</v-row>
<v-row>
<v-col>
<v-checkbox
@ -138,7 +201,9 @@
</v-row>
<v-row>
<v-col>
<span class="text-subtitle-1 text--primary">{{ $t('dialog.edit_library.label_series_cover') }}</span>
<span class="text-subtitle-1 text--primary">{{
$t('dialog.edit_library.label_series_cover')
}}</span>
<v-select :items="seriesCover"
v-model="form.seriesCover"
:label="$t('dialog.edit_library.field_series_cover')"
@ -224,7 +289,9 @@
</v-row>
<v-row>
<v-col>
<span class="text-subtitle-1 text--primary">{{ $t('dialog.edit_library.label_import_mylar') }}</span>
<span class="text-subtitle-1 text--primary">{{
$t('dialog.edit_library.label_import_mylar')
}}</span>
<v-checkbox
v-model="form.importMylarSeries"
:label="$t('dialog.edit_library.field_import_mylar_series')"
@ -256,7 +323,16 @@
:label="$t('dialog.edit_library.field_import_barcode_isbn')"
hide-details
class="mx-4"
/>
>
<template v-slot:append>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-icon v-on="on" color="warning">mdi-alert-circle-outline</v-icon>
</template>
{{ $t('dialog.edit_library.tooltip_use_resources') }}
</v-tooltip>
</template>
</v-checkbox>
</v-col>
</v-row>
@ -307,13 +383,16 @@ export default Vue.extend({
importEpubSeries: true,
importMylarSeries: true,
importLocalArtwork: true,
importBarcodeIsbn: true,
importBarcodeIsbn: false,
scanForceModifiedTime: false,
scanDeep: false,
repairExtensions: false,
convertToCbz: false,
emptyTrashAfterScan: false,
seriesCover: SeriesCoverDto.FIRST as SeriesCoverDto,
hashFiles: true,
hashPages: false,
analyzeDimensions: true,
},
validationFieldNames: new Map([]),
}
@ -435,13 +514,16 @@ export default Vue.extend({
this.form.importEpubSeries = library ? library.importEpubSeries : true
this.form.importMylarSeries = library ? library.importMylarSeries : true
this.form.importLocalArtwork = library ? library.importLocalArtwork : true
this.form.importBarcodeIsbn = library ? library.importBarcodeIsbn : true
this.form.importBarcodeIsbn = library ? library.importBarcodeIsbn : false
this.form.scanForceModifiedTime = library ? library.scanForceModifiedTime : false
this.form.scanDeep = library ? library.scanDeep : false
this.form.repairExtensions = library ? library.repairExtensions : false
this.form.convertToCbz = library ? library.convertToCbz : false
this.form.emptyTrashAfterScan = library ? library.emptyTrashAfterScan : false
this.form.seriesCover = library ? library.seriesCover : SeriesCoverDto.FIRST
this.form.hashFiles = library ? library.hashFiles : true
this.form.hashPages = library ? library.hashPages : false
this.form.analyzeDimensions = library ? library.analyzeDimensions : true
this.$v.$reset()
},
validateLibrary() {
@ -466,6 +548,9 @@ export default Vue.extend({
convertToCbz: this.form.convertToCbz,
emptyTrashAfterScan: this.form.emptyTrashAfterScan,
seriesCover: this.form.seriesCover,
hashFiles: this.form.hashFiles,
hashPages: this.form.hashPages,
analyzeDimensions: this.form.analyzeDimensions,
}
}
return null

View file

@ -363,6 +363,9 @@
"button_next": "Next",
"dialog_title_add": "Add Library",
"dialot_title_edit": "Edit Library",
"field_analysis_analyze_dimensions": "Analyze pages dimensions",
"field_analysis_hash_files": "Compute hash for files",
"field_analysis_hash_pages": "Compute hash for pages",
"field_convert_to_cbz": "Automatically convert to CBZ",
"field_import_barcode_isbn": "ISBN barcode",
"field_import_comicinfo_book": "Book metadata",
@ -382,6 +385,7 @@
"field_series_cover": "Series cover",
"file_browser_dialog_button_confirm": "Choose",
"file_browser_dialog_title": "Library's root folder",
"label_analysis": "Analysis",
"label_file_management": "File management",
"label_import_barcode_isbn": "Import ISBN within barcode",
"label_import_comicinfo": "Import metadata for CBR/CBZ containing a ComicInfo.xml file",
@ -392,7 +396,9 @@
"label_series_cover": "Series cover",
"tab_general": "General",
"tab_metadata": "Metadata",
"tab_options": "Options"
"tab_options": "Options",
"tooltip_scanner_force_modified_time": "Enable if the library is on a Google Drive",
"tooltip_use_resources": "Can consume lots of resources on large libraries or slow hardware"
},
"edit_readlist": {
"button_cancel": "Cancel",

View file

@ -18,6 +18,9 @@ export interface LibraryCreationDto {
convertToCbz: boolean,
emptyTrashAfterScan: boolean,
seriesCover: SeriesCoverDto,
hashFiles: boolean,
hashPages: boolean,
analyzeDimensions: boolean,
}
export interface LibraryUpdateDto {
@ -38,6 +41,9 @@ export interface LibraryUpdateDto {
convertToCbz: boolean,
emptyTrashAfterScan: boolean,
seriesCover: SeriesCoverDto,
hashFiles: boolean,
hashPages: boolean,
analyzeDimensions: boolean,
}
export interface LibraryDto {
@ -59,5 +65,8 @@ export interface LibraryDto {
convertToCbz: boolean,
emptyTrashAfterScan: boolean,
seriesCover: SeriesCoverDto,
hashFiles: boolean,
hashPages: boolean,
analyzeDimensions: boolean,
unavailable: boolean,
}

View file

@ -228,6 +228,7 @@ val migrationDirsSqlite = listOf(
flyway {
url = dbSqlite["url"]
locations = arrayOf("classpath:db/migration/sqlite")
placeholders = mapOf("library-file-hashing" to "true")
}
tasks.flywayMigrate {
// in order to include the Java migrations, flywayClasses must be run before flywayMigrate

View file

@ -0,0 +1,6 @@
alter table library
add column HASH_FILES boolean NOT NULL DEFAULT ${library-file-hashing};
alter table library
add column HASH_PAGES boolean NOT NULL DEFAULT 0;
alter table library
add column ANALYZE_DIMENSIONS boolean NOT NULL DEFAULT 1;

View file

@ -66,16 +66,17 @@ class TaskReceiver(
}
fun hashBooksWithoutHash(library: Library) {
if (komgaProperties.fileHashing)
if (library.hashFiles)
bookRepository.findAllIdsByLibraryIdAndWithEmptyHash(library.id).forEach {
submitTask(Task.HashBook(it, LOWEST_PRIORITY))
}
}
fun hashBookPagesWithMissingHash(library: Library) {
mediaRepository.findAllBookIdsByLibraryIdAndWithMissingPageHash(library.id, komgaProperties.pageHashing).forEach {
submitTask(Task.HashBookPages(it, LOWEST_PRIORITY))
}
if (library.hashPages)
mediaRepository.findAllBookIdsByLibraryIdAndWithMissingPageHash(library.id, komgaProperties.pageHashing).forEach {
submitTask(Task.HashBookPages(it, LOWEST_PRIORITY))
}
}
fun convertBooksToCbz(library: Library, priority: Int = DEFAULT_PRIORITY) {

View file

@ -25,6 +25,9 @@ data class Library(
val convertToCbz: Boolean = false,
val emptyTrashAfterScan: Boolean = false,
val seriesCover: SeriesCover = SeriesCover.FIRST,
val hashFiles: Boolean = true,
val hashPages: Boolean = false,
val analyzeDimensions: Boolean = true,
val unavailableDate: LocalDateTime? = null,

View file

@ -38,7 +38,7 @@ class BookAnalyzer(
private val thumbnailSize = 300
private val thumbnailFormat = "jpeg"
fun analyze(book: Book): Media {
fun analyze(book: Book, analyzeDimensions: Boolean): Media {
logger.info { "Trying to analyze book: $book" }
try {
val mediaType = contentDetector.detectMediaType(book.path)
@ -47,7 +47,7 @@ class BookAnalyzer(
return Media(mediaType = mediaType, status = Media.Status.UNSUPPORTED, comment = "ERR_1001", bookId = book.id)
val entries = try {
supportedMediaTypes.getValue(mediaType).getEntries(book.path)
supportedMediaTypes.getValue(mediaType).getEntries(book.path, analyzeDimensions)
} catch (ex: MediaUnsupportedException) {
return Media(mediaType = mediaType, status = Media.Status.UNSUPPORTED, comment = ex.code, bookId = book.id)
} catch (ex: Exception) {

View file

@ -100,7 +100,7 @@ class BookConverter(
)
?: throw IllegalStateException("Newly converted book could not be scanned: $destinationFilename")
val convertedMedia = bookAnalyzer.analyze(convertedBook)
val convertedMedia = bookAnalyzer.analyze(convertedBook, libraryRepository.findById(book.libraryId).analyzeDimensions)
try {
when {

View file

@ -16,11 +16,11 @@ import org.gotson.komga.domain.model.ThumbnailBook
import org.gotson.komga.domain.model.withCode
import org.gotson.komga.domain.persistence.BookMetadataRepository
import org.gotson.komga.domain.persistence.BookRepository
import org.gotson.komga.domain.persistence.LibraryRepository
import org.gotson.komga.domain.persistence.MediaRepository
import org.gotson.komga.domain.persistence.ReadListRepository
import org.gotson.komga.domain.persistence.ReadProgressRepository
import org.gotson.komga.domain.persistence.ThumbnailBookRepository
import org.gotson.komga.infrastructure.configuration.KomgaProperties
import org.gotson.komga.infrastructure.hash.Hasher
import org.gotson.komga.infrastructure.image.ImageConverter
import org.gotson.komga.infrastructure.image.ImageType
@ -47,17 +47,17 @@ class BookLifecycle(
private val readProgressRepository: ReadProgressRepository,
private val thumbnailBookRepository: ThumbnailBookRepository,
private val readListRepository: ReadListRepository,
private val libraryRepository: LibraryRepository,
private val bookAnalyzer: BookAnalyzer,
private val imageConverter: ImageConverter,
private val eventPublisher: EventPublisher,
private val transactionTemplate: TransactionTemplate,
private val hasher: Hasher,
private val komgaProperties: KomgaProperties,
) {
fun analyzeAndPersist(book: Book): Boolean {
logger.info { "Analyze and persist book: $book" }
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, libraryRepository.findById(book.libraryId).analyzeDimensions)
transactionTemplate.executeWithoutResult {
// if the number of pages has changed, delete all read progress for that book
@ -77,8 +77,8 @@ class BookLifecycle(
}
fun hashAndPersist(book: Book) {
if (!komgaProperties.fileHashing)
return logger.info { "File hashing is disabled, it may have changed since the task was submitted, skipping" }
if (!libraryRepository.findById(book.libraryId).hashFiles)
return logger.info { "File hashing is disabled for the library, it may have changed since the task was submitted, skipping" }
logger.info { "Hash and persist book: $book" }
if (book.fileHash.isBlank()) {
@ -90,6 +90,9 @@ class BookLifecycle(
}
fun hashPagesAndPersist(book: Book) {
if (!libraryRepository.findById(book.libraryId).hashPages)
return logger.info { "Page hashing is disabled for the library, it may have changed since the task was submitted, skipping" }
logger.info { "Hash and persist pages for book: $book" }
mediaRepository.update(bookAnalyzer.hashPages(BookWithMedia(book, mediaRepository.findById(book.id))))

View file

@ -33,7 +33,7 @@ class TransientBookLifecycle(
}
fun analyzeAndPersist(transientBook: BookWithMedia): BookWithMedia {
val media = bookAnalyzer.analyze(transientBook.book)
val media = bookAnalyzer.analyze(transientBook.book, true)
val updated = transientBook.copy(media = media)
transientBookRepository.save(updated)

View file

@ -23,6 +23,7 @@ class KomgaProperties {
var deleteEmptyCollections: Boolean = true
@Deprecated("Deprecated since 0.143.0, you can configure this in the library options directly")
var fileHashing: Boolean = true
@Positive

View file

@ -75,8 +75,11 @@ class LibraryDao(
.set(l.REPAIR_EXTENSIONS, library.repairExtensions)
.set(l.CONVERT_TO_CBZ, library.convertToCbz)
.set(l.EMPTY_TRASH_AFTER_SCAN, library.emptyTrashAfterScan)
.set(l.UNAVAILABLE_DATE, library.unavailableDate)
.set(l.SERIES_COVER, library.seriesCover.toString())
.set(l.HASH_FILES, library.hashFiles)
.set(l.HASH_PAGES, library.hashPages)
.set(l.ANALYZE_DIMENSIONS, library.analyzeDimensions)
.set(l.UNAVAILABLE_DATE, library.unavailableDate)
.execute()
}
@ -100,6 +103,9 @@ class LibraryDao(
.set(l.CONVERT_TO_CBZ, library.convertToCbz)
.set(l.EMPTY_TRASH_AFTER_SCAN, library.emptyTrashAfterScan)
.set(l.SERIES_COVER, library.seriesCover.toString())
.set(l.HASH_FILES, library.hashFiles)
.set(l.HASH_PAGES, library.hashPages)
.set(l.ANALYZE_DIMENSIONS, library.analyzeDimensions)
.set(l.UNAVAILABLE_DATE, library.unavailableDate)
.set(l.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
.where(l.ID.eq(library.id))
@ -127,6 +133,10 @@ class LibraryDao(
convertToCbz = convertToCbz,
emptyTrashAfterScan = emptyTrashAfterScan,
seriesCover = Library.SeriesCover.valueOf(seriesCover),
hashFiles = hashFiles,
hashPages = hashPages,
analyzeDimensions = analyzeDimensions,
unavailableDate = unavailableDate,
id = id,
createdDate = createdDate.toCurrentTimeZone(),

View file

@ -22,7 +22,7 @@ class EpubExtractor(
override fun mediaTypes(): List<String> = listOf("application/epub+zip")
override fun getEntries(path: Path): List<MediaContainerEntry> {
override fun getEntries(path: Path, analyzeDimensions: Boolean): List<MediaContainerEntry> {
ZipFile(path.toFile()).use { zip ->
try {
val opfFile = getPackagePath(zip)
@ -56,7 +56,7 @@ class EpubExtractor(
val mediaType = manifest.values.first {
it.href == (opfDir?.relativize(image) ?: image).invariantSeparatorsPathString
}.mediaType
val dimension = if (contentDetector.isImage(mediaType))
val dimension = if (analyzeDimensions && contentDetector.isImage(mediaType))
zip.getInputStream(zip.getEntry(name)).use { imageAnalyzer.getDimension(it) }
else
null
@ -64,7 +64,7 @@ class EpubExtractor(
}
} catch (e: Exception) {
logger.error(e) { "File is not a proper Epub, treating it as a zip file" }
return zipExtractor.getEntries(path)
return zipExtractor.getEntries(path, analyzeDimensions)
}
}
}

View file

@ -8,7 +8,7 @@ interface MediaContainerExtractor {
fun mediaTypes(): List<String>
@Throws(MediaUnsupportedException::class)
fun getEntries(path: Path): List<MediaContainerEntry>
fun getEntries(path: Path, analyzeDimensions: Boolean): List<MediaContainerEntry>
fun getEntryStream(path: Path, entryName: String): ByteArray
}

View file

@ -35,12 +35,12 @@ class PdfExtractor(
override fun mediaTypes(): List<String> = listOf("application/pdf")
override fun getEntries(path: Path): List<MediaContainerEntry> =
override fun getEntries(path: Path, analyzeDimensions: Boolean): List<MediaContainerEntry> =
PDDocument.load(path.toFile()).use { pdf ->
(0 until pdf.numberOfPages).map { index ->
val page = pdf.getPage(index)
val scale = page.getScale()
val dimension = Dimension((page.cropBox.width * scale).roundToInt(), (page.cropBox.height * scale).roundToInt())
val dimension = if (analyzeDimensions) Dimension((page.cropBox.width * scale).roundToInt(), (page.cropBox.height * scale).roundToInt()) else null
MediaContainerEntry(name = index.toString(), mediaType = mediaType, dimension = dimension)
}
}

View file

@ -21,7 +21,7 @@ class RarExtractor(
override fun mediaTypes(): List<String> = listOf("application/x-rar-compressed", "application/x-rar-compressed; version=4")
override fun getEntries(path: Path): List<MediaContainerEntry> =
override fun getEntries(path: Path, analyzeDimensions: Boolean): List<MediaContainerEntry> =
Archive(path.toFile()).use { rar ->
if (rar.isPasswordProtected) throw MediaUnsupportedException("Encrypted RAR archives are not supported", "ERR_1002")
if (rar.mainHeader.isSolid) throw MediaUnsupportedException("Solid RAR archives are not supported", "ERR_1003")
@ -32,7 +32,7 @@ class RarExtractor(
try {
val buffer = rar.getInputStream(entry).use { it.readBytes() }
val mediaType = buffer.inputStream().use { contentDetector.detectMediaType(it) }
val dimension = if (contentDetector.isImage(mediaType))
val dimension = if (analyzeDimensions && contentDetector.isImage(mediaType))
buffer.inputStream().use { imageAnalyzer.getDimension(it) }
else
null

View file

@ -28,7 +28,7 @@ class ZipExtractor(
override fun mediaTypes(): List<String> = listOf("application/zip")
override fun getEntries(path: Path): List<MediaContainerEntry> =
override fun getEntries(path: Path, analyzeDimensions: Boolean): List<MediaContainerEntry> =
ZipFile(path.toFile()).use { zip ->
zip.entries.toList()
.filter { !it.isDirectory }
@ -36,7 +36,7 @@ class ZipExtractor(
try {
zip.getInputStream(entry).buffered().use { stream ->
val mediaType = contentDetector.detectMediaType(stream)
val dimension = if (contentDetector.isImage(mediaType))
val dimension = if (analyzeDimensions && contentDetector.isImage(mediaType))
imageAnalyzer.getDimension(stream)
else
null

View file

@ -92,6 +92,9 @@ class LibraryController(
convertToCbz = library.convertToCbz,
emptyTrashAfterScan = library.emptyTrashAfterScan,
seriesCover = library.seriesCover.toDomain(),
hashFiles = library.hashFiles,
hashPages = library.hashPages,
analyzeDimensions = library.analyzeDimensions,
),
).toDto(includeRoot = principal.user.roleAdmin)
} catch (e: Exception) {
@ -133,6 +136,9 @@ class LibraryController(
convertToCbz = library.convertToCbz,
emptyTrashAfterScan = library.emptyTrashAfterScan,
seriesCover = library.seriesCover.toDomain(),
hashFiles = library.hashFiles,
hashPages = library.hashPages,
analyzeDimensions = library.analyzeDimensions,
)
try {
libraryLifecycle.updateLibrary(toUpdate)

View file

@ -20,4 +20,7 @@ data class LibraryCreationDto(
val convertToCbz: Boolean = false,
val emptyTrashAfterScan: Boolean = false,
val seriesCover: SeriesCoverDto = SeriesCoverDto.FIRST,
val hashFiles: Boolean = true,
val hashPages: Boolean = false,
val analyzeDimensions: Boolean = true,
)

View file

@ -22,6 +22,9 @@ data class LibraryDto(
val convertToCbz: Boolean,
val emptyTrashAfterScan: Boolean,
val seriesCover: SeriesCoverDto,
val hashFiles: Boolean,
val hashPages: Boolean,
val analyzeDimensions: Boolean,
val unavailable: Boolean,
)
@ -44,5 +47,8 @@ fun Library.toDto(includeRoot: Boolean) = LibraryDto(
convertToCbz = convertToCbz,
emptyTrashAfterScan = emptyTrashAfterScan,
seriesCover = seriesCover.toDto(),
hashFiles = hashFiles,
hashPages = hashPages,
analyzeDimensions = analyzeDimensions,
unavailable = unavailableDate != null,
)

View file

@ -20,4 +20,7 @@ data class LibraryUpdateDto(
val convertToCbz: Boolean,
val emptyTrashAfterScan: Boolean,
val seriesCover: SeriesCoverDto,
val hashFiles: Boolean,
val hashPages: Boolean,
val analyzeDimensions: Boolean,
)

View file

@ -9,7 +9,6 @@ komga:
file: ":memory:"
cors.allowed-origins:
- http://localhost:8081
# file-hashing: false
# delete-empty-collections: true
# delete-empty-read-lists: true
oauth2-account-creation: false

View file

@ -28,6 +28,8 @@ spring:
enabled: true
locations: classpath:db/migration/{vendor}
mixed: true
placeholders:
library-file-hashing: \${komga.file-hashing:true}
thymeleaf:
prefix: classpath:/public/
mvc:

View file

@ -2,6 +2,7 @@ package org.gotson.komga.domain.service
import com.ninjasquad.springmockk.SpykBean
import io.mockk.every
import io.mockk.verify
import org.assertj.core.api.Assertions.assertThat
import org.gotson.komga.domain.model.Book
import org.gotson.komga.domain.model.BookPage
@ -40,7 +41,7 @@ class BookAnalyzerTest(
val file = ClassPathResource("archives/rar4.rar")
val book = Book("book", file.url, LocalDateTime.now())
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, false)
assertThat(media.mediaType).isEqualTo("application/x-rar-compressed; version=4")
assertThat(media.status).isEqualTo(Media.Status.READY)
@ -57,7 +58,7 @@ class BookAnalyzerTest(
val file = ClassPathResource("archives/rar4-solid.rar")
val book = Book("book", file.url, LocalDateTime.now())
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, false)
assertThat(media.mediaType).isEqualTo("application/x-rar-compressed; version=4")
assertThat(media.status).isEqualTo(Media.Status.UNSUPPORTED)
@ -73,7 +74,7 @@ class BookAnalyzerTest(
val file = ClassPathResource("archives/$fileName")
val book = Book("book", file.url, LocalDateTime.now())
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, false)
assertThat(media.mediaType).isEqualTo("application/x-rar-compressed; version=5")
assertThat(media.status).isEqualTo(Media.Status.UNSUPPORTED)
@ -89,7 +90,7 @@ class BookAnalyzerTest(
val file = ClassPathResource("archives/$fileName")
val book = Book("book", file.url, LocalDateTime.now())
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, false)
assertThat(media.mediaType).isEqualTo("application/x-7z-compressed")
assertThat(media.status).isEqualTo(Media.Status.UNSUPPORTED)
@ -105,7 +106,7 @@ class BookAnalyzerTest(
val file = ClassPathResource("archives/$fileName")
val book = Book("book", file.url, LocalDateTime.now())
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, false)
assertThat(media.mediaType).isEqualTo("application/zip")
assertThat(media.status).isEqualTo(Media.Status.READY)
@ -117,7 +118,7 @@ class BookAnalyzerTest(
val file = ClassPathResource("archives/zip-encrypted.zip")
val book = Book("book", file.url, LocalDateTime.now())
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, false)
assertThat(media.mediaType).isEqualTo("application/zip")
assertThat(media.status).isEqualTo(Media.Status.ERROR)
@ -128,7 +129,7 @@ class BookAnalyzerTest(
val file = ClassPathResource("archives/epub3.epub")
val book = Book("book", file.url, LocalDateTime.now())
val media = bookAnalyzer.analyze(book)
val media = bookAnalyzer.analyze(book, false)
assertThat(media.mediaType).isEqualTo("application/epub+zip")
assertThat(media.status).isEqualTo(Media.Status.READY)

View file

@ -100,7 +100,7 @@ class BookLifecycleTest(
assertThat(readProgressRepository.findAll()).hasSize(2)
// when
every { mockAnalyzer.analyze(any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = mutableListOf(makeBookPage("1.jpg"), makeBookPage("2.jpg")), bookId = book.id)
every { mockAnalyzer.analyze(any(), any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = mutableListOf(makeBookPage("1.jpg"), makeBookPage("2.jpg")), bookId = book.id)
bookLifecycle.analyzeAndPersist(book)
// then
@ -133,7 +133,7 @@ class BookLifecycleTest(
assertThat(readProgressRepository.findAll()).hasSize(2)
// when
every { mockAnalyzer.analyze(any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = (1..10).map { BookPage("$it", "image/jpeg") }, bookId = book.id)
every { mockAnalyzer.analyze(any(), any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = (1..10).map { BookPage("$it", "image/jpeg") }, bookId = book.id)
bookLifecycle.analyzeAndPersist(book)
// then

View file

@ -284,7 +284,7 @@ class LibraryContentLifecycleTest(
)
libraryContentLifecycle.scanRootFolder(library)
every { mockAnalyzer.analyze(any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = mutableListOf(makeBookPage("1.jpg"), makeBookPage("2.jpg")), bookId = book1.id)
every { mockAnalyzer.analyze(any(), any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = mutableListOf(makeBookPage("1.jpg"), makeBookPage("2.jpg")), bookId = book1.id)
bookRepository.findAll().map { bookLifecycle.analyzeAndPersist(it) }
// when
@ -292,7 +292,7 @@ class LibraryContentLifecycleTest(
// then
verify(exactly = 2) { mockScanner.scanRootFolder(any()) }
verify(exactly = 1) { mockAnalyzer.analyze(any()) }
verify(exactly = 1) { mockAnalyzer.analyze(any(), any()) }
bookRepository.findAll().first().let { book ->
assertThat(book.lastModifiedDate).isNotEqualTo(book.createdDate)
@ -320,7 +320,7 @@ class LibraryContentLifecycleTest(
)
libraryContentLifecycle.scanRootFolder(library)
every { mockAnalyzer.analyze(any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = mutableListOf(makeBookPage("1.jpg"), makeBookPage("2.jpg")), bookId = book1.id)
every { mockAnalyzer.analyze(any(), any()) } returns Media(status = Media.Status.READY, mediaType = "application/zip", pages = mutableListOf(makeBookPage("1.jpg"), makeBookPage("2.jpg")), bookId = book1.id)
every { mockHasher.computeHash(any<Path>()) }.returnsMany("abc", "def")
bookRepository.findAll().map {
@ -333,7 +333,7 @@ class LibraryContentLifecycleTest(
// then
verify(exactly = 2) { mockScanner.scanRootFolder(any()) }
verify(exactly = 1) { mockAnalyzer.analyze(any()) }
verify(exactly = 1) { mockAnalyzer.analyze(any(), any()) }
verify(exactly = 1) { mockHasher.computeHash(any<Path>()) }
bookRepository.findAll().first().let { book ->

View file

@ -68,6 +68,9 @@ class LibraryDaoTest(
convertToCbz = true,
emptyTrashAfterScan = true,
seriesCover = Library.SeriesCover.LAST,
hashFiles = false,
hashPages = true,
analyzeDimensions = false,
)
}
@ -95,6 +98,9 @@ class LibraryDaoTest(
assertThat(modified.convertToCbz).isEqualTo(updated.convertToCbz)
assertThat(modified.emptyTrashAfterScan).isEqualTo(updated.emptyTrashAfterScan)
assertThat(modified.seriesCover).isEqualTo(updated.seriesCover)
assertThat(modified.hashFiles).isEqualTo(updated.hashFiles)
assertThat(modified.hashPages).isEqualTo(updated.hashPages)
assertThat(modified.analyzeDimensions).isEqualTo(updated.analyzeDimensions)
}
@Test

View file

@ -19,7 +19,7 @@ class EpubExtractorTest {
fun `given epub 3 file when parsing for entries then returns all images contained in pages`() {
val epubResource = ClassPathResource("epub/The Incomplete Theft - Ralph Burke.epub")
val entries = epubExtractor.getEntries(epubResource.file.toPath())
val entries = epubExtractor.getEntries(epubResource.file.toPath(), true)
assertThat(entries).hasSize(1)
with(entries.first()) {
@ -28,4 +28,18 @@ class EpubExtractorTest {
assertThat(dimension).isEqualTo(Dimension(461, 616))
}
}
@Test
fun `given epub 3 file when parsing for entries without analyzing dimensions then returns all images contained in pages without dimensions`() {
val epubResource = ClassPathResource("epub/The Incomplete Theft - Ralph Burke.epub")
val entries = epubExtractor.getEntries(epubResource.file.toPath(), false)
assertThat(entries).hasSize(1)
with(entries.first()) {
assertThat(name).isEqualTo("cover.jpeg")
assertThat(mediaType).isEqualTo("image/jpeg")
assertThat(dimension).isNull()
}
}
}