refactor: add param target for some annotations to fix archunit tests

This commit is contained in:
Gauthier Roebroeck 2025-07-15 13:34:57 +08:00
parent 5f00bd5e71
commit 1975ff2107
24 changed files with 26 additions and 26 deletions

View file

@ -21,8 +21,8 @@ import java.io.File
@Profile("!test") @Profile("!test")
@Component @Component
class TrayIconRunner( class TrayIconRunner(
@Value("#{komgaProperties.configDir}") komgaConfigDir: String, @param:Value("#{komgaProperties.configDir}") komgaConfigDir: String,
@Value("\${logging.file.name}") logFileName: String, @param:Value("\${logging.file.name}") logFileName: String,
serverSettings: WebServerEffectiveSettings, serverSettings: WebServerEffectiveSettings,
env: Environment, env: Environment,
) : ApplicationRunner { ) : ApplicationRunner {

View file

@ -32,7 +32,7 @@ import java.time.ZoneId
@Component @Component
class BookDao( class BookDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : BookRepository { ) : BookRepository {
private val b = Tables.BOOK private val b = Tables.BOOK
private val m = Tables.MEDIA private val m = Tables.MEDIA

View file

@ -53,7 +53,7 @@ import java.net.URL
class BookDtoDao( class BookDtoDao(
private val dsl: DSLContext, private val dsl: DSLContext,
private val luceneHelper: LuceneHelper, private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val transactionTemplate: TransactionTemplate, private val transactionTemplate: TransactionTemplate,
private val bookCommonDao: BookCommonDao, private val bookCommonDao: BookCommonDao,
) : BookDtoRepository { ) : BookDtoRepository {

View file

@ -19,7 +19,7 @@ import java.time.ZoneId
@Component @Component
class BookMetadataAggregationDao( class BookMetadataAggregationDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : BookMetadataAggregationRepository { ) : BookMetadataAggregationRepository {
private val d = Tables.BOOK_METADATA_AGGREGATION private val d = Tables.BOOK_METADATA_AGGREGATION
private val a = Tables.BOOK_METADATA_AGGREGATION_AUTHOR private val a = Tables.BOOK_METADATA_AGGREGATION_AUTHOR

View file

@ -21,7 +21,7 @@ import java.time.ZoneId
@Component @Component
class BookMetadataDao( class BookMetadataDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : BookMetadataRepository { ) : BookMetadataRepository {
private val d = Tables.BOOK_METADATA private val d = Tables.BOOK_METADATA
private val a = Tables.BOOK_METADATA_AUTHOR private val a = Tables.BOOK_METADATA_AUTHOR

View file

@ -31,7 +31,7 @@ private val logger = KotlinLogging.logger {}
@Component @Component
class MediaDao( class MediaDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val mapper: ObjectMapper, private val mapper: ObjectMapper,
) : MediaRepository { ) : MediaRepository {
private val m = Tables.MEDIA private val m = Tables.MEDIA

View file

@ -34,7 +34,7 @@ import java.util.SortedMap
class ReadListDao( class ReadListDao(
private val dsl: DSLContext, private val dsl: DSLContext,
private val luceneHelper: LuceneHelper, private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : ReadListRepository { ) : ReadListRepository {
private val rl = Tables.READLIST private val rl = Tables.READLIST
private val rlb = Tables.READLIST_BOOK private val rlb = Tables.READLIST_BOOK

View file

@ -24,7 +24,7 @@ import java.time.ZoneId
@Component @Component
class ReadProgressDao( class ReadProgressDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val mapper: ObjectMapper, private val mapper: ObjectMapper,
) : ReadProgressRepository { ) : ReadProgressRepository {
private val r = Tables.READ_PROGRESS private val r = Tables.READ_PROGRESS

View file

@ -33,7 +33,7 @@ import java.time.ZoneId
class SeriesCollectionDao( class SeriesCollectionDao(
private val dsl: DSLContext, private val dsl: DSLContext,
private val luceneHelper: LuceneHelper, private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SeriesCollectionRepository { ) : SeriesCollectionRepository {
private val c = Tables.COLLECTION private val c = Tables.COLLECTION
private val cs = Tables.COLLECTION_SERIES private val cs = Tables.COLLECTION_SERIES

View file

@ -30,7 +30,7 @@ import java.time.ZoneId
@Component @Component
class SeriesDao( class SeriesDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SeriesRepository { ) : SeriesRepository {
private val s = Tables.SERIES private val s = Tables.SERIES
private val d = Tables.SERIES_METADATA private val d = Tables.SERIES_METADATA

View file

@ -58,7 +58,7 @@ const val BOOKS_READ_COUNT = "booksReadCount"
class SeriesDtoDao( class SeriesDtoDao(
private val dsl: DSLContext, private val dsl: DSLContext,
private val luceneHelper: LuceneHelper, private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val transactionTemplate: TransactionTemplate, private val transactionTemplate: TransactionTemplate,
) : SeriesDtoRepository { ) : SeriesDtoRepository {
private val s = Tables.SERIES private val s = Tables.SERIES

View file

@ -20,7 +20,7 @@ import java.time.ZoneId
@Component @Component
class SeriesMetadataDao( class SeriesMetadataDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SeriesMetadataRepository { ) : SeriesMetadataRepository {
private val d = Tables.SERIES_METADATA private val d = Tables.SERIES_METADATA
private val g = Tables.SERIES_METADATA_GENRE private val g = Tables.SERIES_METADATA_GENRE

View file

@ -17,7 +17,7 @@ import java.net.URL
@Component @Component
class SidecarDao( class SidecarDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SidecarRepository { ) : SidecarRepository {
private val sc = Tables.SIDECAR private val sc = Tables.SIDECAR
private val l = Tables.LIBRARY private val l = Tables.LIBRARY

View file

@ -16,7 +16,7 @@ import java.net.URL
@Component @Component
class ThumbnailBookDao( class ThumbnailBookDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : ThumbnailBookRepository { ) : ThumbnailBookRepository {
private val tb = Tables.THUMBNAIL_BOOK private val tb = Tables.THUMBNAIL_BOOK

View file

@ -16,7 +16,7 @@ import java.net.URL
@Component @Component
class ThumbnailSeriesDao( class ThumbnailSeriesDao(
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : ThumbnailSeriesRepository { ) : ThumbnailSeriesRepository {
private val ts = Tables.THUMBNAIL_SERIES private val ts = Tables.THUMBNAIL_SERIES

View file

@ -24,7 +24,7 @@ private val logger = KotlinLogging.logger {}
class TasksDao( class TasksDao(
@Qualifier("tasksDslContext") @Qualifier("tasksDslContext")
private val dsl: DSLContext, private val dsl: DSLContext,
@Value("#{@komgaProperties.tasksDb.batchChunkSize}") private val batchSize: Int, @param:Value("#{@komgaProperties.tasksDb.batchChunkSize}") private val batchSize: Int,
private val objectMapper: ObjectMapper, private val objectMapper: ObjectMapper,
) : TasksRepository { ) : TasksRepository {
private val t = Tables.TASK private val t = Tables.TASK

View file

@ -23,7 +23,7 @@ private val logger = KotlinLogging.logger {}
@Component @Component
class KepubConverter( class KepubConverter(
private val settingsProvider: KomgaSettingsProvider, private val settingsProvider: KomgaSettingsProvider,
@Value("\${komga.kobo.kepubify-path:#{null}}") val kepubifyConfigurationPath: String?, @param:Value("\${komga.kobo.kepubify-path:#{null}}") val kepubifyConfigurationPath: String?,
) { ) {
final var kepubifyPath: Path? = null final var kepubifyPath: Path? = null
private set private set

View file

@ -33,7 +33,7 @@ class EpubExtractor(
private val contentDetector: ContentDetector, private val contentDetector: ContentDetector,
private val imageAnalyzer: ImageAnalyzer, private val imageAnalyzer: ImageAnalyzer,
private val kepubConverter: KepubConverter, private val kepubConverter: KepubConverter,
@Value("#{@komgaProperties.epubDivinaLetterCountThreshold}") private val letterCountThreshold: Int, @param:Value("#{@komgaProperties.epubDivinaLetterCountThreshold}") private val letterCountThreshold: Int,
) { ) {
/** /**
* Retrieves a specific entry by name from the zip archive * Retrieves a specific entry by name from the zip archive

View file

@ -29,7 +29,7 @@ private const val COMIC_INFO = "ComicInfo.xml"
@Service @Service
class ComicInfoProvider( class ComicInfoProvider(
@Autowired(required = false) private val mapper: XmlMapper = XmlMapper(), @param:Autowired(required = false) private val mapper: XmlMapper = XmlMapper(),
private val bookAnalyzer: BookAnalyzer, private val bookAnalyzer: BookAnalyzer,
private val isbnValidator: ISBNValidator, private val isbnValidator: ISBNValidator,
) : BookMetadataProvider, ) : BookMetadataProvider,

View file

@ -13,7 +13,7 @@ private val logger = KotlinLogging.logger {}
@Service @Service
class ReadListProvider( class ReadListProvider(
@Autowired(required = false) private val mapper: XmlMapper = XmlMapper(), @param:Autowired(required = false) private val mapper: XmlMapper = XmlMapper(),
) { ) {
@Throws(ComicRackListException::class) @Throws(ComicRackListException::class)
fun importFromCbl(cbl: ByteArray): ReadListRequest { fun importFromCbl(cbl: ByteArray): ReadListRequest {

View file

@ -62,7 +62,7 @@ import org.springframework.security.access.prepost.PreAuthorize
@Configuration @Configuration
class OpenApiConfiguration( class OpenApiConfiguration(
@Value("\${application.version}") private val appVersion: String, @param:Value("\${application.version}") private val appVersion: String,
env: Environment, env: Environment,
) { ) {
private val generateOpenApi = env.activeProfiles.contains("generate-openapi") private val generateOpenApi = env.activeProfiles.contains("generate-openapi")

View file

@ -36,7 +36,7 @@ class LuceneHelper(
private val searchAnalyzer: Analyzer, private val searchAnalyzer: Analyzer,
private val taskScheduler: TaskScheduler, private val taskScheduler: TaskScheduler,
private val indexAnalyzer: Analyzer, private val indexAnalyzer: Analyzer,
@Value("#{@komgaProperties.lucene.commitDelay}") @param:Value("#{@komgaProperties.lucene.commitDelay}")
private val commitDelay: Duration, private val commitDelay: Duration,
) { ) {
private val indexWriter: IndexWriter = IndexWriter(directory, IndexWriterConfig(indexAnalyzer)) private val indexWriter: IndexWriter = IndexWriter(directory, IndexWriterConfig(indexAnalyzer))

View file

@ -31,8 +31,8 @@ import kotlin.time.Duration.Companion.days
@Tag(name = OpenApiConfiguration.TagNames.SERVER_SETTINGS) @Tag(name = OpenApiConfiguration.TagNames.SERVER_SETTINGS)
class SettingsController( class SettingsController(
private val komgaSettingsProvider: KomgaSettingsProvider, private val komgaSettingsProvider: KomgaSettingsProvider,
@Value("\${server.port:#{null}}") private val configServerPort: Int?, @param:Value("\${server.port:#{null}}") private val configServerPort: Int?,
@Value("\${server.servlet.context-path:#{null}}") private val configServerContextPath: String?, @param:Value("\${server.servlet.context-path:#{null}}") private val configServerContextPath: String?,
private val serverSettings: WebServerEffectiveSettings, private val serverSettings: WebServerEffectiveSettings,
private val kepubConverter: KepubConverter, private val kepubConverter: KepubConverter,
) { ) {

View file

@ -140,7 +140,7 @@ class KoboControllerTest(
@Nested @Nested
inner class HostHeader( inner class HostHeader(
@Value("\${server.port:#{null}}") private val configServerPort: Int?, @param:Value("\${server.port:#{null}}") private val configServerPort: Int?,
) { ) {
@ParameterizedTest @ParameterizedTest
@MethodSource("headers") @MethodSource("headers")