mirror of
https://github.com/gotson/komga.git
synced 2025-12-21 16:03:03 +01:00
feat: aggregate book information at series level
This commit is contained in:
parent
fc27ec86a3
commit
eb029d9bb5
15 changed files with 516 additions and 5 deletions
|
|
@ -0,0 +1,20 @@
|
|||
CREATE TABLE BOOK_METADATA_AGGREGATION
|
||||
(
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
RELEASE_DATE date NULL,
|
||||
SUMMARY varchar NOT NULL DEFAULT '',
|
||||
SUMMARY_NUMBER varchar NOT NULL DEFAULT '',
|
||||
SERIES_ID varchar NOT NULL PRIMARY KEY,
|
||||
FOREIGN KEY (SERIES_ID) REFERENCES SERIES (ID)
|
||||
);
|
||||
CREATE TABLE BOOK_METADATA_AGGREGATION_AUTHOR
|
||||
(
|
||||
NAME varchar NOT NULL,
|
||||
ROLE varchar NOT NULL,
|
||||
SERIES_ID varchar NOT NULL,
|
||||
FOREIGN KEY (SERIES_ID) REFERENCES SERIES (ID)
|
||||
);
|
||||
INSERT INTO BOOK_METADATA_AGGREGATION(SERIES_ID)
|
||||
SELECT ID
|
||||
from SERIES;
|
||||
|
|
@ -24,4 +24,8 @@ sealed class Task : Serializable {
|
|||
data class RefreshSeriesMetadata(val seriesId: String) : Task() {
|
||||
override fun uniqueId() = "REFRESH_SERIES_METADATA_$seriesId"
|
||||
}
|
||||
|
||||
data class AggregateSeriesMetadata(val seriesId: String) : Task() {
|
||||
override fun uniqueId() = "AGGREGATE_SERIES_METADATA_$seriesId"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -60,6 +60,12 @@ class TaskHandler(
|
|||
is Task.RefreshSeriesMetadata ->
|
||||
seriesRepository.findByIdOrNull(task.seriesId)?.let {
|
||||
metadataLifecycle.refreshMetadata(it)
|
||||
taskReceiver.aggregateSeriesMetadata(it.id)
|
||||
} ?: logger.warn { "Cannot execute task $task: Series does not exist" }
|
||||
|
||||
is Task.AggregateSeriesMetadata ->
|
||||
seriesRepository.findByIdOrNull(task.seriesId)?.let {
|
||||
metadataLifecycle.aggregateMetadata(it)
|
||||
} ?: logger.warn { "Cannot execute task $task: Series does not exist" }
|
||||
}
|
||||
}.also {
|
||||
|
|
|
|||
|
|
@ -66,6 +66,10 @@ class TaskReceiver(
|
|||
submitTask(Task.RefreshSeriesMetadata(seriesId))
|
||||
}
|
||||
|
||||
fun aggregateSeriesMetadata(seriesId: String) {
|
||||
submitTask(Task.AggregateSeriesMetadata(seriesId))
|
||||
}
|
||||
|
||||
private fun submitTask(task: Task) {
|
||||
logger.info { "Sending task: $task" }
|
||||
jmsTemplate.convertAndSend(QUEUE_TASKS, task) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalDateTime
|
||||
|
||||
data class BookMetadataAggregation(
|
||||
val authors: List<Author> = emptyList(),
|
||||
val releaseDate: LocalDate? = null,
|
||||
val summary: String = "",
|
||||
val summaryNumber: String = "",
|
||||
|
||||
val seriesId: String = "",
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
) : Auditable()
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
package org.gotson.komga.domain.persistence
|
||||
|
||||
import org.gotson.komga.domain.model.BookMetadataAggregation
|
||||
|
||||
interface BookMetadataAggregationRepository {
|
||||
fun findById(seriesId: String): BookMetadataAggregation
|
||||
fun findByIdOrNull(seriesId: String): BookMetadataAggregation?
|
||||
|
||||
fun insert(metadata: BookMetadataAggregation)
|
||||
fun update(metadata: BookMetadataAggregation)
|
||||
|
||||
fun delete(seriesId: String)
|
||||
fun delete(seriesIds: Collection<String>)
|
||||
|
||||
fun count(): Long
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
package org.gotson.komga.domain.service
|
||||
|
||||
import org.gotson.komga.domain.model.BookMetadata
|
||||
import org.gotson.komga.domain.model.BookMetadataAggregation
|
||||
import org.springframework.stereotype.Service
|
||||
|
||||
@Service
|
||||
class MetadataAggregator {
|
||||
|
||||
fun aggregate(metadatas: Collection<BookMetadata>) : BookMetadataAggregation {
|
||||
val authors = metadatas.flatMap { it.authors }.distinctBy { "${it.role}__${it.name}" }
|
||||
val (summary, summaryNumber) = metadatas.sortedBy { it.numberSort }.find { it.summary.isNotBlank() }?.let {
|
||||
it.summary to it.number
|
||||
} ?: "" to ""
|
||||
val releaseDate = metadatas.mapNotNull { it.releaseDate }.minOrNull()
|
||||
|
||||
return BookMetadataAggregation(authors = authors, releaseDate = releaseDate, summary = summary, summaryNumber = summaryNumber)
|
||||
}
|
||||
}
|
||||
|
|
@ -6,6 +6,7 @@ import org.gotson.komga.domain.model.ReadList
|
|||
import org.gotson.komga.domain.model.Series
|
||||
import org.gotson.komga.domain.model.SeriesCollection
|
||||
import org.gotson.komga.domain.model.SeriesMetadataPatch
|
||||
import org.gotson.komga.domain.persistence.BookMetadataAggregationRepository
|
||||
import org.gotson.komga.domain.persistence.BookMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.BookRepository
|
||||
import org.gotson.komga.domain.persistence.LibraryRepository
|
||||
|
|
@ -27,9 +28,11 @@ class MetadataLifecycle(
|
|||
private val bookMetadataProviders: List<BookMetadataProvider>,
|
||||
private val seriesMetadataProviders: List<SeriesMetadataProvider>,
|
||||
private val metadataApplier: MetadataApplier,
|
||||
private val metadataAggregator: MetadataAggregator,
|
||||
private val mediaRepository: MediaRepository,
|
||||
private val bookMetadataRepository: BookMetadataRepository,
|
||||
private val seriesMetadataRepository: SeriesMetadataRepository,
|
||||
private val bookMetadataAggregationRepository: BookMetadataAggregationRepository,
|
||||
private val libraryRepository: LibraryRepository,
|
||||
private val bookRepository: BookRepository,
|
||||
private val bookLifecycle: BookLifecycle,
|
||||
|
|
@ -192,6 +195,15 @@ class MetadataLifecycle(
|
|||
}
|
||||
}
|
||||
|
||||
fun aggregateMetadata(series: Series){
|
||||
logger.info { "Aggregate book metadata for series: $series" }
|
||||
|
||||
val metadatas = bookMetadataRepository.findByIds(bookRepository.findAllIdBySeriesId(series.id))
|
||||
val aggregation = metadataAggregator.aggregate(metadatas).copy(seriesId = series.id)
|
||||
|
||||
bookMetadataAggregationRepository.update(aggregation)
|
||||
}
|
||||
|
||||
private fun <T, R : Any> Iterable<T>.mostFrequent(transform: (T) -> R?): R? {
|
||||
return this
|
||||
.mapNotNull(transform)
|
||||
|
|
|
|||
|
|
@ -6,10 +6,12 @@ import org.apache.commons.lang3.StringUtils
|
|||
import org.gotson.komga.application.tasks.TaskReceiver
|
||||
import org.gotson.komga.domain.model.Book
|
||||
import org.gotson.komga.domain.model.BookMetadata
|
||||
import org.gotson.komga.domain.model.BookMetadataAggregation
|
||||
import org.gotson.komga.domain.model.Media
|
||||
import org.gotson.komga.domain.model.Series
|
||||
import org.gotson.komga.domain.model.SeriesMetadata
|
||||
import org.gotson.komga.domain.model.ThumbnailSeries
|
||||
import org.gotson.komga.domain.persistence.BookMetadataAggregationRepository
|
||||
import org.gotson.komga.domain.persistence.BookMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.BookRepository
|
||||
import org.gotson.komga.domain.persistence.MediaRepository
|
||||
|
|
@ -35,6 +37,7 @@ class SeriesLifecycle(
|
|||
private val seriesRepository: SeriesRepository,
|
||||
private val thumbnailsSeriesRepository: ThumbnailSeriesRepository,
|
||||
private val seriesMetadataRepository: SeriesMetadataRepository,
|
||||
private val bookMetadataAggregationRepository: BookMetadataAggregationRepository,
|
||||
private val collectionRepository: SeriesCollectionRepository,
|
||||
private val taskReceiver: TaskReceiver
|
||||
) {
|
||||
|
|
@ -107,6 +110,10 @@ class SeriesLifecycle(
|
|||
)
|
||||
)
|
||||
|
||||
bookMetadataAggregationRepository.insert(
|
||||
BookMetadataAggregation(seriesId = series.id)
|
||||
)
|
||||
|
||||
return seriesRepository.findByIdOrNull(series.id)!!
|
||||
}
|
||||
|
||||
|
|
@ -119,6 +126,7 @@ class SeriesLifecycle(
|
|||
collectionRepository.removeSeriesFromAll(seriesId)
|
||||
thumbnailsSeriesRepository.deleteBySeriesId(seriesId)
|
||||
seriesMetadataRepository.delete(seriesId)
|
||||
bookMetadataAggregationRepository.delete(seriesId)
|
||||
|
||||
seriesRepository.delete(seriesId)
|
||||
}
|
||||
|
|
@ -132,6 +140,7 @@ class SeriesLifecycle(
|
|||
collectionRepository.removeSeriesFromAll(seriesIds)
|
||||
thumbnailsSeriesRepository.deleteBySeriesIds(seriesIds)
|
||||
seriesMetadataRepository.delete(seriesIds)
|
||||
bookMetadataAggregationRepository.delete(seriesIds)
|
||||
|
||||
seriesRepository.deleteAll(seriesIds)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,124 @@
|
|||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import org.gotson.komga.domain.model.BookMetadataAggregation
|
||||
import org.gotson.komga.domain.persistence.BookMetadataAggregationRepository
|
||||
import org.gotson.komga.jooq.Tables
|
||||
import org.gotson.komga.jooq.tables.records.BookMetadataAggregationAuthorRecord
|
||||
import org.gotson.komga.jooq.tables.records.BookMetadataAggregationRecord
|
||||
import org.jooq.DSLContext
|
||||
import org.springframework.stereotype.Component
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
|
||||
@Component
|
||||
class BookMetadataAggregationDao(
|
||||
private val dsl: DSLContext
|
||||
) : BookMetadataAggregationRepository {
|
||||
|
||||
private val d = Tables.BOOK_METADATA_AGGREGATION
|
||||
private val a = Tables.BOOK_METADATA_AGGREGATION_AUTHOR
|
||||
|
||||
private val groupFields = arrayOf(*d.fields(), *a.fields())
|
||||
|
||||
override fun findById(seriesId: String): BookMetadataAggregation =
|
||||
findOne(listOf(seriesId)).first()
|
||||
|
||||
override fun findByIdOrNull(seriesId: String): BookMetadataAggregation? =
|
||||
findOne(listOf(seriesId)).firstOrNull()
|
||||
|
||||
private fun findOne(seriesIds: Collection<String>) =
|
||||
dsl.select(*groupFields)
|
||||
.from(d)
|
||||
.leftJoin(a).on(d.SERIES_ID.eq(a.SERIES_ID))
|
||||
.where(d.SERIES_ID.`in`(seriesIds))
|
||||
.groupBy(*groupFields)
|
||||
.fetchGroups(
|
||||
{ it.into(d) }, { it.into(a) }
|
||||
).map { (dr, ar) ->
|
||||
dr.toDomain(ar.filterNot { it.name == null }.map { it.toDomain() })
|
||||
}
|
||||
|
||||
override fun insert(metadata: BookMetadataAggregation) {
|
||||
dsl.transaction { config ->
|
||||
config.dsl().insertInto(d)
|
||||
.set(d.SERIES_ID, metadata.seriesId)
|
||||
.set(d.RELEASE_DATE, metadata.releaseDate)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
.set(d.SUMMARY_NUMBER, metadata.summaryNumber)
|
||||
.execute()
|
||||
|
||||
insertAuthors(config.dsl(), metadata)
|
||||
}
|
||||
}
|
||||
|
||||
override fun update(metadata: BookMetadataAggregation) {
|
||||
dsl.transaction { config ->
|
||||
config.dsl().update(d)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
.set(d.SUMMARY_NUMBER, metadata.summaryNumber)
|
||||
.set(d.RELEASE_DATE, metadata.releaseDate)
|
||||
.set(d.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(d.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
config.dsl().deleteFrom(a)
|
||||
.where(a.SERIES_ID.eq(metadata.seriesId))
|
||||
.execute()
|
||||
|
||||
insertAuthors(config.dsl(), metadata)
|
||||
}
|
||||
}
|
||||
|
||||
private fun insertAuthors(dsl: DSLContext, metadata: BookMetadataAggregation) {
|
||||
if (metadata.authors.isNotEmpty()) {
|
||||
dsl.batch(
|
||||
dsl.insertInto(a, a.SERIES_ID, a.NAME, a.ROLE)
|
||||
.values(null as String?, null, null)
|
||||
).also { step ->
|
||||
metadata.authors.forEach {
|
||||
step.bind(metadata.seriesId, it.name, it.role)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
|
||||
override fun delete(seriesId: String) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl()) {
|
||||
deleteFrom(a).where(a.SERIES_ID.eq(seriesId)).execute()
|
||||
deleteFrom(d).where(d.SERIES_ID.eq(seriesId)).execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun delete(seriesIds: Collection<String>) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl()) {
|
||||
deleteFrom(a).where(a.SERIES_ID.`in`(seriesIds)).execute()
|
||||
deleteFrom(d).where(d.SERIES_ID.`in`(seriesIds)).execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun count(): Long = dsl.fetchCount(d).toLong()
|
||||
|
||||
private fun BookMetadataAggregationRecord.toDomain(authors: List<Author>) =
|
||||
BookMetadataAggregation(
|
||||
authors = authors,
|
||||
releaseDate = releaseDate,
|
||||
summary = summary,
|
||||
summaryNumber = summaryNumber,
|
||||
|
||||
seriesId = seriesId,
|
||||
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone()
|
||||
)
|
||||
|
||||
private fun BookMetadataAggregationAuthorRecord.toDomain() =
|
||||
Author(
|
||||
name = name,
|
||||
role = role
|
||||
)
|
||||
}
|
||||
|
|
@ -3,10 +3,13 @@ package org.gotson.komga.infrastructure.jooq
|
|||
import org.gotson.komga.domain.model.ReadStatus
|
||||
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
|
||||
import org.gotson.komga.infrastructure.web.toFilePath
|
||||
import org.gotson.komga.interfaces.rest.dto.AuthorDto
|
||||
import org.gotson.komga.interfaces.rest.dto.BookMetadataAggregationDto
|
||||
import org.gotson.komga.interfaces.rest.dto.SeriesDto
|
||||
import org.gotson.komga.interfaces.rest.dto.SeriesMetadataDto
|
||||
import org.gotson.komga.interfaces.rest.persistence.SeriesDtoRepository
|
||||
import org.gotson.komga.jooq.Tables
|
||||
import org.gotson.komga.jooq.tables.records.BookMetadataAggregationRecord
|
||||
import org.gotson.komga.jooq.tables.records.SeriesMetadataRecord
|
||||
import org.gotson.komga.jooq.tables.records.SeriesRecord
|
||||
import org.jooq.AggregateFunction
|
||||
|
|
@ -45,6 +48,8 @@ class SeriesDtoDao(
|
|||
private val cs = Tables.COLLECTION_SERIES
|
||||
private val g = Tables.SERIES_METADATA_GENRE
|
||||
private val st = Tables.SERIES_METADATA_TAG
|
||||
private val bma = Tables.BOOK_METADATA_AGGREGATION
|
||||
private val bmaa = Tables.BOOK_METADATA_AGGREGATION_AUTHOR
|
||||
|
||||
val countUnread: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isNull, 1).otherwise(0))
|
||||
val countRead: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isTrue, 1).otherwise(0))
|
||||
|
|
@ -53,7 +58,8 @@ class SeriesDtoDao(
|
|||
|
||||
private val groupFields = arrayOf(
|
||||
*s.fields(),
|
||||
*d.fields()
|
||||
*d.fields(),
|
||||
*bma.fields(),
|
||||
)
|
||||
|
||||
private val sorts = mapOf(
|
||||
|
|
@ -106,6 +112,7 @@ class SeriesDtoDao(
|
|||
.from(s)
|
||||
.leftJoin(b).on(s.ID.eq(b.SERIES_ID))
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
|
||||
.apply { if (joinConditions.tag) leftJoin(st).on(s.ID.eq(st.SERIES_ID)) }
|
||||
|
|
@ -122,6 +129,7 @@ class SeriesDtoDao(
|
|||
.from(s)
|
||||
.leftJoin(b).on(s.ID.eq(b.SERIES_ID))
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
|
||||
.apply { if (joinConditions.tag) leftJoin(st).on(s.ID.eq(st.SERIES_ID)) }
|
||||
|
|
@ -158,6 +166,7 @@ class SeriesDtoDao(
|
|||
.map { rec ->
|
||||
val sr = rec.into(s)
|
||||
val dr = rec.into(d)
|
||||
val bmar = rec.into(bma)
|
||||
val booksCount = rec.get(BOOKS_COUNT, Int::class.java)
|
||||
|
||||
val booksCountRecord = dsl
|
||||
|
|
@ -184,7 +193,13 @@ class SeriesDtoDao(
|
|||
.where(st.SERIES_ID.eq(sr.id))
|
||||
.fetchSet(st.TAG)
|
||||
|
||||
sr.toDto(booksCount, booksReadCount, booksUnreadCount, booksInProgressCount, dr.toDto(genres, tags))
|
||||
val aggregatedAuthors = dsl.selectFrom(bmaa)
|
||||
.where(bmaa.SERIES_ID.eq(sr.id))
|
||||
.fetchInto(bmaa)
|
||||
.filter {it.name != null }
|
||||
.map { AuthorDto(it.name, it.role) }
|
||||
|
||||
sr.toDto(booksCount, booksReadCount, booksUnreadCount, booksInProgressCount, dr.toDto(genres, tags), bmar.toDto(aggregatedAuthors))
|
||||
}
|
||||
|
||||
private fun SeriesSearchWithReadProgress.toCondition(): Condition {
|
||||
|
|
@ -230,7 +245,7 @@ class SeriesDtoDao(
|
|||
}
|
||||
}.reduce { acc, condition -> acc.or(condition) }
|
||||
|
||||
private fun SeriesRecord.toDto(booksCount: Int, booksReadCount: Int, booksUnreadCount: Int, booksInProgressCount: Int, metadata: SeriesMetadataDto) =
|
||||
private fun SeriesRecord.toDto(booksCount: Int, booksReadCount: Int, booksUnreadCount: Int, booksInProgressCount: Int, metadata: SeriesMetadataDto, booksMetadata: BookMetadataAggregationDto) =
|
||||
SeriesDto(
|
||||
id = id,
|
||||
libraryId = libraryId,
|
||||
|
|
@ -243,7 +258,8 @@ class SeriesDtoDao(
|
|||
booksReadCount = booksReadCount,
|
||||
booksUnreadCount = booksUnreadCount,
|
||||
booksInProgressCount = booksInProgressCount,
|
||||
metadata = metadata
|
||||
metadata = metadata,
|
||||
booksMetadata = booksMetadata,
|
||||
)
|
||||
|
||||
private fun SeriesMetadataRecord.toDto(genres: Set<String>, tags: Set<String>) =
|
||||
|
|
@ -271,4 +287,15 @@ class SeriesDtoDao(
|
|||
tags = tags,
|
||||
tagsLock = tagsLock
|
||||
)
|
||||
|
||||
private fun BookMetadataAggregationRecord.toDto(authors: List<AuthorDto>) =
|
||||
BookMetadataAggregationDto(
|
||||
authors = authors,
|
||||
releaseDate = releaseDate,
|
||||
summary = summary,
|
||||
summaryNumber = summaryNumber,
|
||||
|
||||
created = createdDate.toCurrentTimeZone(),
|
||||
lastModified = lastModifiedDate.toCurrentTimeZone()
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -447,6 +447,7 @@ class BookController(
|
|||
)
|
||||
}
|
||||
bookMetadataRepository.update(updated)
|
||||
taskReceiver.aggregateSeriesMetadata(bookRepository.findByIdOrNull(bookId)!!.seriesId)
|
||||
} ?: throw ResponseStatusException(HttpStatus.NOT_FOUND)
|
||||
|
||||
@PatchMapping("api/v1/books/{bookId}/read-progress")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package org.gotson.komga.interfaces.rest.dto
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalDateTime
|
||||
|
||||
data class SeriesDto(
|
||||
|
|
@ -18,7 +20,8 @@ data class SeriesDto(
|
|||
val booksReadCount: Int,
|
||||
val booksUnreadCount: Int,
|
||||
val booksInProgressCount: Int,
|
||||
val metadata: SeriesMetadataDto
|
||||
val metadata: SeriesMetadataDto,
|
||||
val booksMetadata: BookMetadataAggregationDto,
|
||||
)
|
||||
|
||||
fun SeriesDto.restrictUrl(restrict: Boolean) =
|
||||
|
|
@ -51,3 +54,16 @@ data class SeriesMetadataDto(
|
|||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
|
||||
val lastModified: LocalDateTime
|
||||
)
|
||||
|
||||
data class BookMetadataAggregationDto(
|
||||
val authors: List<AuthorDto> = emptyList(),
|
||||
@JsonFormat(pattern = "yyyy-MM-dd")
|
||||
val releaseDate: LocalDate?,
|
||||
val summary: String,
|
||||
val summaryNumber: String,
|
||||
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
|
||||
val created: LocalDateTime,
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
|
||||
val lastModified: LocalDateTime
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,63 @@
|
|||
package org.gotson.komga.domain.service
|
||||
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import org.gotson.komga.domain.model.BookMetadata
|
||||
import org.junit.jupiter.api.Test
|
||||
import java.time.LocalDate
|
||||
|
||||
class MetadataAggregatorTest {
|
||||
private val aggregator = MetadataAggregator()
|
||||
|
||||
@Test
|
||||
fun `given metadatas when aggregating then aggregation is relevant`(){
|
||||
val metadatas = listOf(
|
||||
BookMetadata(title = "ignored", summary = "summary 1", number = "1", numberSort = 1F, authors = listOf(Author("author1", "role1"), Author("author2", "role2")), releaseDate = LocalDate.of(2020, 1,1)),
|
||||
BookMetadata(title = "ignored", summary = "summary 2", number = "2", numberSort = 2F, authors = listOf(Author("author3", "role3"), Author("author2", "role3")), releaseDate = LocalDate.of(2021, 1,1)),
|
||||
)
|
||||
|
||||
val aggregation = aggregator.aggregate(metadatas)
|
||||
|
||||
assertThat(aggregation.authors).hasSize(4)
|
||||
assertThat(aggregation.releaseDate?.year).isEqualTo(2020)
|
||||
assertThat(aggregation.summary).isEqualTo("summary 1")
|
||||
assertThat(aggregation.summaryNumber).isEqualTo("1")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given metadatas with summary only on second book when aggregating then aggregation has second book's summary`(){
|
||||
val metadatas = listOf(
|
||||
BookMetadata(title = "ignored", number = "1", numberSort = 1F),
|
||||
BookMetadata(title = "ignored", summary = "summary 2", number = "2", numberSort = 2F),
|
||||
)
|
||||
|
||||
val aggregation = aggregator.aggregate(metadatas)
|
||||
|
||||
assertThat(aggregation.summary).isEqualTo("summary 2")
|
||||
assertThat(aggregation.summaryNumber).isEqualTo("2")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given metadatas with second book with earlier release date when aggregating then aggregation has release date from second book`(){
|
||||
val metadatas = listOf(
|
||||
BookMetadata(title = "ignored", number = "1", numberSort = 1F, releaseDate = LocalDate.of(2020, 1,1)),
|
||||
BookMetadata(title = "ignored", number = "2", numberSort = 2F, releaseDate = LocalDate.of(2019, 1,1)),
|
||||
)
|
||||
|
||||
val aggregation = aggregator.aggregate(metadatas)
|
||||
|
||||
assertThat(aggregation.releaseDate?.year).isEqualTo(2019)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given metadatas with duplicate authors when aggregating then aggregation has no duplicate authors`(){
|
||||
val metadatas = listOf(
|
||||
BookMetadata(title = "ignored", number = "1", numberSort = 1F, authors = listOf(Author("author1", "role1"), Author("author2", "role2"))),
|
||||
BookMetadata(title = "ignored", number = "2", numberSort = 2F, authors = listOf(Author("author1", "role1"), Author("author2", "role2"))),
|
||||
)
|
||||
|
||||
val aggregation = aggregator.aggregate(metadatas)
|
||||
|
||||
assertThat(aggregation.authors).hasSize(2)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,174 @@
|
|||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.assertj.core.api.Assertions.catchThrowable
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import org.gotson.komga.domain.model.BookMetadataAggregation
|
||||
import org.gotson.komga.domain.model.makeLibrary
|
||||
import org.gotson.komga.domain.model.makeSeries
|
||||
import org.gotson.komga.domain.persistence.LibraryRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesRepository
|
||||
import org.junit.jupiter.api.AfterAll
|
||||
import org.junit.jupiter.api.AfterEach
|
||||
import org.junit.jupiter.api.BeforeAll
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
import org.springframework.beans.factory.annotation.Autowired
|
||||
import org.springframework.boot.test.context.SpringBootTest
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalDateTime
|
||||
|
||||
@ExtendWith(SpringExtension::class)
|
||||
@SpringBootTest
|
||||
class BookMetadataAggregationDaoTest(
|
||||
@Autowired private val bookMetadataAggregationDao: BookMetadataAggregationDao,
|
||||
@Autowired private val seriesRepository: SeriesRepository,
|
||||
@Autowired private val libraryRepository: LibraryRepository
|
||||
) {
|
||||
|
||||
private val library = makeLibrary()
|
||||
|
||||
@BeforeAll
|
||||
fun setup() {
|
||||
libraryRepository.insert(library)
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
fun deleteSeries() {
|
||||
seriesRepository.findAll().forEach {
|
||||
bookMetadataAggregationDao.delete(it.id)
|
||||
}
|
||||
seriesRepository.deleteAll()
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
fun tearDown() {
|
||||
libraryRepository.deleteAll()
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given a bookMetadataAggregation when inserting then it is persisted`() {
|
||||
val series = makeSeries("Series", libraryId = library.id).also { seriesRepository.insert(it) }
|
||||
|
||||
val now = LocalDateTime.now()
|
||||
val metadata = BookMetadataAggregation(
|
||||
authors = listOf(Author("author", "role")),
|
||||
releaseDate = LocalDate.now(),
|
||||
summary = "Summary",
|
||||
summaryNumber = "1",
|
||||
seriesId = series.id
|
||||
)
|
||||
|
||||
bookMetadataAggregationDao.insert(metadata)
|
||||
val created = bookMetadataAggregationDao.findById(metadata.seriesId)
|
||||
|
||||
assertThat(created.seriesId).isEqualTo(series.id)
|
||||
assertThat(created.createdDate).isCloseTo(now, offset)
|
||||
assertThat(created.lastModifiedDate).isCloseTo(now, offset)
|
||||
|
||||
assertThat(created.releaseDate).isEqualTo(metadata.releaseDate)
|
||||
assertThat(created.summary).isEqualTo(metadata.summary)
|
||||
assertThat(created.summaryNumber).isEqualTo(metadata.summaryNumber)
|
||||
with(created.authors.first()) {
|
||||
assertThat(name).isEqualTo(metadata.authors.first().name)
|
||||
assertThat(role).isEqualTo(metadata.authors.first().role)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given a minimum bookMetadataAggregation when inserting then it is persisted`() {
|
||||
val series = makeSeries("Series", libraryId = library.id).also { seriesRepository.insert(it) }
|
||||
|
||||
val now = LocalDateTime.now()
|
||||
val metadata = BookMetadataAggregation(
|
||||
seriesId = series.id
|
||||
)
|
||||
|
||||
bookMetadataAggregationDao.insert(metadata)
|
||||
val created = bookMetadataAggregationDao.findById(metadata.seriesId)
|
||||
|
||||
assertThat(created.seriesId).isEqualTo(series.id)
|
||||
assertThat(created.createdDate).isCloseTo(now, offset)
|
||||
assertThat(created.lastModifiedDate).isCloseTo(now, offset)
|
||||
|
||||
assertThat(created.releaseDate).isNull()
|
||||
assertThat(created.summary).isBlank
|
||||
assertThat(created.summaryNumber).isBlank
|
||||
assertThat(created.authors).isEmpty()
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given existing bookMetadataAggregation when finding by id then metadata is returned`() {
|
||||
val series = makeSeries("Series", libraryId = library.id).also { seriesRepository.insert(it) }
|
||||
|
||||
val metadata = BookMetadataAggregation(
|
||||
authors = listOf(Author("author", "role")),
|
||||
releaseDate = LocalDate.now(),
|
||||
summary = "Summary",
|
||||
seriesId = series.id
|
||||
)
|
||||
|
||||
bookMetadataAggregationDao.insert(metadata)
|
||||
|
||||
val found = bookMetadataAggregationDao.findById(series.id)
|
||||
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found.summary).isEqualTo("Summary")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given non-existing bookMetadataAggregation when finding by id then exception is thrown`() {
|
||||
val found = catchThrowable { bookMetadataAggregationDao.findById("128742") }
|
||||
|
||||
assertThat(found).isInstanceOf(Exception::class.java)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given non-existing bookMetadataAggregation when findByIdOrNull then null is returned`() {
|
||||
val found = bookMetadataAggregationDao.findByIdOrNull("128742")
|
||||
|
||||
assertThat(found).isNull()
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given a bookMetadataAggregation when updating then it is persisted`() {
|
||||
val series = makeSeries("Series", libraryId = library.id).also { seriesRepository.insert(it) }
|
||||
|
||||
val metadata = BookMetadataAggregation(
|
||||
authors = listOf(Author("author", "role")),
|
||||
releaseDate = LocalDate.now(),
|
||||
summary = "Summary",
|
||||
summaryNumber = "1",
|
||||
seriesId = series.id
|
||||
)
|
||||
bookMetadataAggregationDao.insert(metadata)
|
||||
val created = bookMetadataAggregationDao.findById(metadata.seriesId)
|
||||
|
||||
val modificationDate = LocalDateTime.now()
|
||||
|
||||
val updated = with(created) {
|
||||
copy(
|
||||
releaseDate = LocalDate.now().plusYears(1),
|
||||
summary = "SummaryUpdated",
|
||||
summaryNumber = "2",
|
||||
authors = listOf(Author("authorUpdated", "roleUpdated"), Author("author2", "role2")),
|
||||
)
|
||||
}
|
||||
|
||||
bookMetadataAggregationDao.update(updated)
|
||||
val modified = bookMetadataAggregationDao.findById(updated.seriesId)
|
||||
|
||||
assertThat(modified.seriesId).isEqualTo(series.id)
|
||||
assertThat(modified.createdDate).isEqualTo(updated.createdDate)
|
||||
assertThat(modified.lastModifiedDate)
|
||||
.isCloseTo(modificationDate, offset)
|
||||
.isNotEqualTo(modified.createdDate)
|
||||
|
||||
assertThat(modified.releaseDate).isEqualTo(updated.releaseDate)
|
||||
assertThat(modified.summary).isEqualTo(updated.summary)
|
||||
assertThat(modified.authors).hasSize(2)
|
||||
assertThat(modified.authors.map { it.name }).containsExactlyInAnyOrderElementsOf(updated.authors.map { it.name })
|
||||
assertThat(modified.authors.map { it.role }).containsExactlyInAnyOrderElementsOf(updated.authors.map { it.role })
|
||||
}
|
||||
}
|
||||
Loading…
Reference in a new issue