refactor: add param target for some annotations to fix archunit tests

This commit is contained in:
Gauthier Roebroeck 2025-07-15 13:34:57 +08:00
parent 5f00bd5e71
commit 1975ff2107
24 changed files with 26 additions and 26 deletions

View file

@ -21,8 +21,8 @@ import java.io.File
@Profile("!test")
@Component
class TrayIconRunner(
@Value("#{komgaProperties.configDir}") komgaConfigDir: String,
@Value("\${logging.file.name}") logFileName: String,
@param:Value("#{komgaProperties.configDir}") komgaConfigDir: String,
@param:Value("\${logging.file.name}") logFileName: String,
serverSettings: WebServerEffectiveSettings,
env: Environment,
) : ApplicationRunner {

View file

@ -32,7 +32,7 @@ import java.time.ZoneId
@Component
class BookDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : BookRepository {
private val b = Tables.BOOK
private val m = Tables.MEDIA

View file

@ -53,7 +53,7 @@ import java.net.URL
class BookDtoDao(
private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val transactionTemplate: TransactionTemplate,
private val bookCommonDao: BookCommonDao,
) : BookDtoRepository {

View file

@ -19,7 +19,7 @@ import java.time.ZoneId
@Component
class BookMetadataAggregationDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : BookMetadataAggregationRepository {
private val d = Tables.BOOK_METADATA_AGGREGATION
private val a = Tables.BOOK_METADATA_AGGREGATION_AUTHOR

View file

@ -21,7 +21,7 @@ import java.time.ZoneId
@Component
class BookMetadataDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : BookMetadataRepository {
private val d = Tables.BOOK_METADATA
private val a = Tables.BOOK_METADATA_AUTHOR

View file

@ -31,7 +31,7 @@ private val logger = KotlinLogging.logger {}
@Component
class MediaDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val mapper: ObjectMapper,
) : MediaRepository {
private val m = Tables.MEDIA

View file

@ -34,7 +34,7 @@ import java.util.SortedMap
class ReadListDao(
private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : ReadListRepository {
private val rl = Tables.READLIST
private val rlb = Tables.READLIST_BOOK

View file

@ -24,7 +24,7 @@ import java.time.ZoneId
@Component
class ReadProgressDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val mapper: ObjectMapper,
) : ReadProgressRepository {
private val r = Tables.READ_PROGRESS

View file

@ -33,7 +33,7 @@ import java.time.ZoneId
class SeriesCollectionDao(
private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SeriesCollectionRepository {
private val c = Tables.COLLECTION
private val cs = Tables.COLLECTION_SERIES

View file

@ -30,7 +30,7 @@ import java.time.ZoneId
@Component
class SeriesDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SeriesRepository {
private val s = Tables.SERIES
private val d = Tables.SERIES_METADATA

View file

@ -58,7 +58,7 @@ const val BOOKS_READ_COUNT = "booksReadCount"
class SeriesDtoDao(
private val dsl: DSLContext,
private val luceneHelper: LuceneHelper,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
private val transactionTemplate: TransactionTemplate,
) : SeriesDtoRepository {
private val s = Tables.SERIES

View file

@ -20,7 +20,7 @@ import java.time.ZoneId
@Component
class SeriesMetadataDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SeriesMetadataRepository {
private val d = Tables.SERIES_METADATA
private val g = Tables.SERIES_METADATA_GENRE

View file

@ -17,7 +17,7 @@ import java.net.URL
@Component
class SidecarDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : SidecarRepository {
private val sc = Tables.SIDECAR
private val l = Tables.LIBRARY

View file

@ -16,7 +16,7 @@ import java.net.URL
@Component
class ThumbnailBookDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : ThumbnailBookRepository {
private val tb = Tables.THUMBNAIL_BOOK

View file

@ -16,7 +16,7 @@ import java.net.URL
@Component
class ThumbnailSeriesDao(
private val dsl: DSLContext,
@Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.database.batchChunkSize}") private val batchSize: Int,
) : ThumbnailSeriesRepository {
private val ts = Tables.THUMBNAIL_SERIES

View file

@ -24,7 +24,7 @@ private val logger = KotlinLogging.logger {}
class TasksDao(
@Qualifier("tasksDslContext")
private val dsl: DSLContext,
@Value("#{@komgaProperties.tasksDb.batchChunkSize}") private val batchSize: Int,
@param:Value("#{@komgaProperties.tasksDb.batchChunkSize}") private val batchSize: Int,
private val objectMapper: ObjectMapper,
) : TasksRepository {
private val t = Tables.TASK

View file

@ -23,7 +23,7 @@ private val logger = KotlinLogging.logger {}
@Component
class KepubConverter(
private val settingsProvider: KomgaSettingsProvider,
@Value("\${komga.kobo.kepubify-path:#{null}}") val kepubifyConfigurationPath: String?,
@param:Value("\${komga.kobo.kepubify-path:#{null}}") val kepubifyConfigurationPath: String?,
) {
final var kepubifyPath: Path? = null
private set

View file

@ -33,7 +33,7 @@ class EpubExtractor(
private val contentDetector: ContentDetector,
private val imageAnalyzer: ImageAnalyzer,
private val kepubConverter: KepubConverter,
@Value("#{@komgaProperties.epubDivinaLetterCountThreshold}") private val letterCountThreshold: Int,
@param:Value("#{@komgaProperties.epubDivinaLetterCountThreshold}") private val letterCountThreshold: Int,
) {
/**
* Retrieves a specific entry by name from the zip archive

View file

@ -29,7 +29,7 @@ private const val COMIC_INFO = "ComicInfo.xml"
@Service
class ComicInfoProvider(
@Autowired(required = false) private val mapper: XmlMapper = XmlMapper(),
@param:Autowired(required = false) private val mapper: XmlMapper = XmlMapper(),
private val bookAnalyzer: BookAnalyzer,
private val isbnValidator: ISBNValidator,
) : BookMetadataProvider,

View file

@ -13,7 +13,7 @@ private val logger = KotlinLogging.logger {}
@Service
class ReadListProvider(
@Autowired(required = false) private val mapper: XmlMapper = XmlMapper(),
@param:Autowired(required = false) private val mapper: XmlMapper = XmlMapper(),
) {
@Throws(ComicRackListException::class)
fun importFromCbl(cbl: ByteArray): ReadListRequest {

View file

@ -62,7 +62,7 @@ import org.springframework.security.access.prepost.PreAuthorize
@Configuration
class OpenApiConfiguration(
@Value("\${application.version}") private val appVersion: String,
@param:Value("\${application.version}") private val appVersion: String,
env: Environment,
) {
private val generateOpenApi = env.activeProfiles.contains("generate-openapi")

View file

@ -36,7 +36,7 @@ class LuceneHelper(
private val searchAnalyzer: Analyzer,
private val taskScheduler: TaskScheduler,
private val indexAnalyzer: Analyzer,
@Value("#{@komgaProperties.lucene.commitDelay}")
@param:Value("#{@komgaProperties.lucene.commitDelay}")
private val commitDelay: Duration,
) {
private val indexWriter: IndexWriter = IndexWriter(directory, IndexWriterConfig(indexAnalyzer))

View file

@ -31,8 +31,8 @@ import kotlin.time.Duration.Companion.days
@Tag(name = OpenApiConfiguration.TagNames.SERVER_SETTINGS)
class SettingsController(
private val komgaSettingsProvider: KomgaSettingsProvider,
@Value("\${server.port:#{null}}") private val configServerPort: Int?,
@Value("\${server.servlet.context-path:#{null}}") private val configServerContextPath: String?,
@param:Value("\${server.port:#{null}}") private val configServerPort: Int?,
@param:Value("\${server.servlet.context-path:#{null}}") private val configServerContextPath: String?,
private val serverSettings: WebServerEffectiveSettings,
private val kepubConverter: KepubConverter,
) {

View file

@ -140,7 +140,7 @@ class KoboControllerTest(
@Nested
inner class HostHeader(
@Value("\${server.port:#{null}}") private val configServerPort: Int?,
@param:Value("\${server.port:#{null}}") private val configServerPort: Int?,
) {
@ParameterizedTest
@MethodSource("headers")