feat: remove H2 dependencies

Komga will delete old H2 database files and backups on startup for cleanup

closes #455
This commit is contained in:
Gauthier Roebroeck 2021-03-15 16:16:42 +08:00
parent 4bb0eda34a
commit 50248e7233
44 changed files with 11 additions and 988 deletions

View file

@ -8,7 +8,6 @@ COPY ${DEPENDENCY}/snapshot-dependencies/ ./
COPY ${DEPENDENCY}/application/ ./
ENV KOMGA_DATABASE_BACKUP_PATH="/config/database-backup.zip"
ENV KOMGA_DATABASE_FILE="/config/database.sqlite"
ENV SPRING_DATASOURCE_URL="jdbc:h2:/config/database.h2"
ENV SPRING_ARTEMIS_EMBEDDED_DATA_DIRECTORY="/config/artemis"
ENV LOGGING_FILE_NAME="/config/logs/komga.log"
ENTRYPOINT ["java", "org.springframework.boot.loader.JarLauncher"]

View file

@ -27,7 +27,6 @@ dependencies {
implementation("org.springframework.boot:spring-boot-starter-web")
implementation("org.springframework.boot:spring-boot-starter-validation")
implementation("org.springframework.boot:spring-boot-starter-data-jdbc")
implementation("org.springframework.boot:spring-boot-starter-actuator")
implementation("org.springframework.boot:spring-boot-starter-security")
implementation("org.springframework.boot:spring-boot-starter-thymeleaf")
@ -84,8 +83,6 @@ dependencies {
implementation("com.github.f4b6a3:tsid-creator:3.0.1")
runtimeOnly("com.h2database:h2:1.4.200")
// While waiting for https://github.com/xerial/sqlite-jdbc/pull/491 and https://github.com/xerial/sqlite-jdbc/pull/494
// runtimeOnly("org.xerial:sqlite-jdbc:3.32.3.2")
// jooqGenerator("org.xerial:sqlite-jdbc:3.32.3.2")

View file

@ -1,47 +0,0 @@
package db.migration.h2
import org.flywaydb.core.api.migration.BaseJavaMigration
import org.flywaydb.core.api.migration.Context
import org.springframework.jdbc.core.JdbcTemplate
import org.springframework.jdbc.datasource.SingleConnectionDataSource
import java.net.URI
import java.nio.file.Paths
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
class V20190926114415__create_library_from_series : BaseJavaMigration() {
override fun migrate(context: Context) {
val jdbcTemplate = JdbcTemplate(SingleConnectionDataSource(context.connection, true))
val urls = jdbcTemplate.queryForList("SELECT url FROM serie", String::class.java)
if (urls.isNotEmpty()) {
val rootFolder = findCommonDirPath(urls, '/')
val libraryId = jdbcTemplate.queryForObject("SELECT NEXTVAL('hibernate_sequence')", Int::class.java)
val libraryName = Paths.get(URI(rootFolder)).fileName
val now = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd hh:mm:ss"))
jdbcTemplate.execute("INSERT INTO library (ID, CREATED_DATE, LAST_MODIFIED_DATE, NAME, ROOT) VALUES ($libraryId, '$now', '$now', '$libraryName', '$rootFolder')")
jdbcTemplate.execute("UPDATE serie SET library_id = $libraryId")
}
}
}
// version 1.1.51 - https://www.rosettacode.org/wiki/Find_common_directory_path#Kotlin
fun findCommonDirPath(paths: List<String>, separator: Char): String {
if (paths.isEmpty()) return ""
if (paths.size == 1) return paths[0]
val splits = paths[0].split(separator)
val n = splits.size
val paths2 = paths.drop(1)
var k = 0
var common = ""
while (true) {
val prevCommon = common
common += if (k == 0) splits[0] else separator + splits[k]
if (!paths2.all { it.startsWith(common + separator) || it == common }) return prevCommon
if (++k == n) return common
}
}

View file

@ -1,23 +0,0 @@
package db.migration.h2
import org.flywaydb.core.api.migration.BaseJavaMigration
import org.flywaydb.core.api.migration.Context
import org.springframework.jdbc.core.JdbcTemplate
import org.springframework.jdbc.datasource.SingleConnectionDataSource
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
class V20200121154334__create_series_metadata_from_series : BaseJavaMigration() {
override fun migrate(context: Context) {
val jdbcTemplate = JdbcTemplate(SingleConnectionDataSource(context.connection, true))
val seriesIds = jdbcTemplate.queryForList("SELECT id FROM series", Long::class.java)
val now = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd hh:mm:ss"))
seriesIds.forEach { seriesId ->
val metadataId = jdbcTemplate.queryForObject("SELECT NEXTVAL('hibernate_sequence')", Int::class.java)
jdbcTemplate.execute("INSERT INTO series_metadata (ID, CREATED_DATE, LAST_MODIFIED_DATE, STATUS) VALUES ($metadataId, '$now', '$now', 'ONGOING')")
jdbcTemplate.execute("UPDATE series SET metadata_id = $metadataId WHERE id = $seriesId")
}
}
}

View file

@ -1,37 +0,0 @@
package db.migration.h2
import org.flywaydb.core.api.migration.BaseJavaMigration
import org.flywaydb.core.api.migration.Context
import org.springframework.jdbc.core.JdbcTemplate
import org.springframework.jdbc.datasource.SingleConnectionDataSource
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
class V20200306175848__create_book_metadata_from_book : BaseJavaMigration() {
override fun migrate(context: Context) {
val jdbcTemplate = JdbcTemplate(SingleConnectionDataSource(context.connection, true))
val books = jdbcTemplate.queryForList("SELECT id, name, number FROM book")
val now = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd hh:mm:ss"))
books.forEach { book ->
val metadataId = jdbcTemplate.queryForObject("SELECT NEXTVAL('hibernate_sequence')", Int::class.java)
jdbcTemplate.update(
"INSERT INTO book_metadata (ID, CREATED_DATE, LAST_MODIFIED_DATE, TITLE, NUMBER, NUMBER_SORT) VALUES (?, ?, ?, ?, ?, ?)",
metadataId,
now,
now,
book["name"],
book["number"],
book["number"]
)
jdbcTemplate.update(
"UPDATE book SET metadata_id = ? WHERE id = ?",
metadataId,
book["id"]
)
}
jdbcTemplate.execute("alter table book alter column metadata_id set not null")
}
}

View file

@ -1,55 +0,0 @@
create sequence hibernate_sequence start with 1 increment by 1;
create table book
(
id bigint not null,
created_date timestamp not null,
last_modified_date timestamp not null,
file_last_modified timestamp not null,
name varchar not null,
url varchar not null,
book_metadata_id bigint not null,
serie_id bigint not null,
index integer,
primary key (id)
);
create table book_metadata
(
id bigint not null,
media_type varchar,
status varchar not null,
thumbnail blob,
primary key (id)
);
create table book_metadata_page
(
book_metadata_id bigint not null,
file_name varchar not null,
media_type varchar not null,
number integer
);
create table serie
(
id bigint not null,
created_date timestamp not null,
last_modified_date timestamp not null,
file_last_modified timestamp not null,
name varchar not null,
url varchar not null,
primary key (id)
);
alter table book
add constraint uk_book_book_metadata_id unique (book_metadata_id);
alter table book
add constraint fk_book_book_metadata_book_metadata_id foreign key (book_metadata_id) references book_metadata (id);
alter table book
add constraint fk_book_serie_serie_id foreign key (serie_id) references serie (id);
alter table book_metadata_page
add constraint fk_book_metadata_page_book_metadata_book_metadata_id foreign key (book_metadata_id) references book_metadata (id);

View file

@ -1,10 +0,0 @@
update BOOK_METADATA
set STATUS = 'UNKNOWN'
where ID in (
select m.id
from BOOK_METADATA m,
BOOK_METADATA_PAGE p
where m.ID = p.BOOK_METADATA_ID
and m.THUMBNAIL is null
and p.NUMBER = 0
and p.MEDIA_TYPE = 'image/webp');

View file

@ -1,5 +0,0 @@
update BOOK_METADATA
set STATUS = 'UNKNOWN'
where ID in (
SELECT ID FROM BOOK_METADATA where MEDIA_TYPE = 'application/pdf'
);

View file

@ -1,5 +0,0 @@
update BOOK_METADATA
set STATUS = 'UNKNOWN'
where ID in (
SELECT ID FROM BOOK_METADATA where MEDIA_TYPE = 'application/pdf'
);

View file

@ -1,5 +0,0 @@
update BOOK_METADATA
set STATUS = 'UNKNOWN'
where ID in (
SELECT ID FROM BOOK_METADATA where MEDIA_TYPE = 'application/pdf'
);

View file

@ -1 +0,0 @@
update serie set file_last_modified = '1970-01-01 00:00:00';

View file

@ -1,18 +0,0 @@
create table library
(
id bigint not null,
created_date timestamp not null,
last_modified_date timestamp not null,
name varchar not null,
root varchar not null,
primary key (id)
);
alter table library
add constraint uk_library_name unique (name);
alter table serie
add (library_id bigint);
alter table serie
add constraint fk_serie_library_library_id foreign key (library_id) references library (id);

View file

@ -1,2 +0,0 @@
alter table serie
alter column library_id set not null;

View file

@ -1,17 +0,0 @@
alter table serie
rename to series;
alter table series
rename constraint fk_serie_library_library_id to fk_series_library_library_id;
alter index if exists fk_serie_library_library_id_index_4 rename to fk_series_library_library_id_index_4;
alter table book
alter column serie_id
rename to series_id;
alter table book
rename constraint fk_book_serie_serie_id to fk_book_series_series_id;
alter index if exists fk_book_serie_serie_id_index_1 rename to fk_book_series_series_id_index_1;

View file

@ -1,21 +0,0 @@
create table user
(
id bigint not null,
created_date timestamp not null,
last_modified_date timestamp not null,
email varchar not null,
password varchar not null,
primary key (id)
);
alter table user
add constraint uk_user_login unique (email);
create table user_role
(
user_id bigint not null,
roles varchar
);
alter table user_role
add constraint fk_user_role_user_user_id foreign key (user_id) references user (id);

View file

@ -1,15 +0,0 @@
alter table user
add (shared_all_libraries boolean not null default true);
create table user_library_sharing
(
user_id bigint not null,
library_id bigint not null,
primary key (user_id, library_id)
);
alter table user_library_sharing
add constraint fk_user_library_sharing_library_id foreign key (library_id) references library (id);
alter table user_library_sharing
add constraint fk_user_library_sharing_user_id foreign key (user_id) references user (id);

View file

@ -1,8 +0,0 @@
alter table book
add (file_size bigint default 0);
-- force rescan to update file size of all books
update series
set file_last_modified = '1970-01-01 00:00:00';
update book
set file_last_modified = '1970-01-01 00:00:00';

View file

@ -1,6 +0,0 @@
alter table book
add (number float4 default 0);
update book
set number = (index + 1);
alter table book
drop column index;

View file

@ -1,25 +0,0 @@
alter table book_metadata
rename to media;
alter table book
alter column book_metadata_id
rename to media_id;
alter table book
rename constraint uk_book_book_metadata_id to uk_book_media_id;
alter table book
rename constraint fk_book_book_metadata_book_metadata_id to fk_book_media_media_id;
alter table book_metadata_page
rename to media_page;
alter table media_page
alter column book_metadata_id
rename to media_id;
alter table media_page
rename constraint fk_book_metadata_page_book_metadata_book_metadata_id to fk_media_page_media_media_id;
alter index if exists uk_book_book_metadata_id_index_7 rename to uk_book_media_id_index_7;
alter index if exists fk_book_metadata_page_book_metadata_book_metadata_id_index_9 rename to fk_media_page_media_media_id_index_9;

View file

@ -1,13 +0,0 @@
alter table media
add (created_date timestamp);
alter table media
add (last_modified_date timestamp);
update media
set created_date = CURRENT_TIMESTAMP(),
last_modified_date = CURRENT_TIMESTAMP();
alter table media
alter column created_date set not null;
alter table media
alter column last_modified_date set not null;

View file

@ -1,2 +0,0 @@
alter table media
add (comment varchar);

View file

@ -1,15 +0,0 @@
create table series_metadata
(
id bigint not null,
created_date timestamp not null,
last_modified_date timestamp not null,
status varchar not null,
primary key (id)
);
alter table series
add (metadata_id bigint);
alter table series
add constraint fk_series_series_metadata_metadata_id foreign key (metadata_id) references series_metadata (id);

View file

@ -1,2 +0,0 @@
alter table series
alter column metadata_id set not null;

View file

@ -1,18 +0,0 @@
alter table series_metadata
add (
status_lock boolean default false,
title varchar,
title_lock boolean default false,
title_sort varchar,
title_sort_lock boolean default false
);
update series_metadata m
set m.title = (select name from series where metadata_id = m.id),
m.title_sort = (select name from series where metadata_id = m.id);
alter table series_metadata
alter column title set not null;
alter table series_metadata
alter column title_sort set not null;

View file

@ -1,2 +0,0 @@
alter table book
alter column number set data type int;

View file

@ -1,40 +0,0 @@
create table book_metadata
(
id bigint not null,
created_date timestamp not null,
last_modified_date timestamp not null,
age_rating integer,
age_rating_lock boolean not null default false,
number varchar not null,
number_lock boolean not null default false,
number_sort float4 not null,
number_sort_lock boolean not null default false,
publisher varchar not null default '',
publisher_lock boolean not null default false,
reading_direction varchar,
reading_direction_lock boolean not null default false,
release_date date,
release_date_lock boolean not null default false,
summary varchar not null default '',
summary_lock boolean not null default false,
title varchar not null,
title_lock boolean not null default false,
authors_lock boolean not null default false,
primary key (id)
);
create table book_metadata_author
(
book_metadata_id bigint not null,
name varchar not null,
role varchar not null
);
alter table book
add (metadata_id bigint);
alter table book_metadata_author
add constraint fk_book_metadata_author_book_metadata_id foreign key (book_metadata_id) references book_metadata (id);
alter table book
add constraint fk_book_book__metadata_metadata_id foreign key (metadata_id) references book_metadata (id);

View file

@ -1,8 +0,0 @@
create table media_file
(
media_id bigint not null,
files varchar
);
alter table media_file
add constraint fk_media_file_media_media_id foreign key (media_id) references media(id);

View file

@ -1,2 +0,0 @@
update series
set file_last_modified = '1970-01-01 00:00:00';

View file

@ -1,172 +0,0 @@
-- set default values for created_date / last_modified_date
alter table user
alter column CREATED_DATE set default now();
alter table user
alter column LAST_MODIFIED_DATE set default now();
alter table library
alter column CREATED_DATE set default now();
alter table library
alter column LAST_MODIFIED_DATE set default now();
alter table book
alter column CREATED_DATE set default now();
alter table book
alter column LAST_MODIFIED_DATE set default now();
alter table book_metadata
alter column CREATED_DATE set default now();
alter table book_metadata
alter column LAST_MODIFIED_DATE set default now();
alter table media
alter column CREATED_DATE set default now();
alter table media
alter column LAST_MODIFIED_DATE set default now();
alter table series
alter column CREATED_DATE set default now();
alter table series
alter column LAST_MODIFIED_DATE set default now();
alter table series_metadata
alter column CREATED_DATE set default now();
alter table series_metadata
alter column LAST_MODIFIED_DATE set default now();
-- replace USER_ROLE table by boolean value per role in USER table
alter table user
add role_admin boolean default false;
update user u
set role_admin = exists(select roles from user_role ur where ur.roles like 'ADMIN' and ur.user_id = u.id);
drop table user_role;
-- add LIBRARY_ID field to table BOOK
alter table book
add library_id bigint;
alter table book
add constraint fk_book_library_library_id foreign key (library_id) references library (id);
update book b
set library_id = (select s.library_id from series s where s.ID = b.series_id);
alter table book
alter column library_id set not null;
-- inverse relationship between series and series_metadata
alter table SERIES_METADATA
add column series_id bigint;
update SERIES_METADATA m
set m.series_id = (select s.id from series s where s.metadata_id = m.id);
alter table SERIES
drop constraint FK_SERIES_SERIES_METADATA_METADATA_ID;
alter table SERIES_METADATA
drop primary key;
alter table SERIES_METADATA
drop column id;
alter table SERIES_METADATA
alter column series_id set not null;
alter table SERIES_METADATA
add primary key (series_id);
alter table series
drop column METADATA_ID;
alter table SERIES_METADATA
add constraint FK_SERIES_METADATA_SERIES_SERIES_ID foreign key (series_id) references series (id);
-- inverse relationship between book and book_metadata
alter table BOOK_METADATA
add column book_id bigint;
update BOOK_METADATA m
set m.book_id = (select b.id from book b where b.metadata_id = m.id);
alter table BOOK_METADATA_AUTHOR
add column book_id bigint;
update BOOK_METADATA_AUTHOR a
set a.book_id = (select m.book_id from BOOK_METADATA m where m.id = a.BOOK_METADATA_ID);
alter table BOOK
drop constraint FK_BOOK_BOOK__METADATA_METADATA_ID;
alter table BOOK_METADATA_AUTHOR
drop constraint FK_BOOK_METADATA_AUTHOR_BOOK_METADATA_ID;
alter table BOOK_METADATA
drop primary key;
alter table BOOK_METADATA
drop column id;
alter table BOOK_METADATA
alter column book_id set not null;
alter table BOOK_METADATA
add primary key (book_id);
alter table BOOK_METADATA_AUTHOR
drop column BOOK_METADATA_ID;
alter table BOOK_METADATA_AUTHOR
alter column book_id set not null;
alter table BOOK
drop column METADATA_ID;
alter table BOOK_METADATA
add constraint FK_BOOK_METADATA_BOOK_BOOK_ID foreign key (book_id) references book (id);
alter table BOOK_METADATA_AUTHOR
add constraint FK_BOOK_METADATA_AUTHOR_BOOK_BOOK_ID foreign key (book_id) references book (id);
-- inverse relationship between book and media
alter table MEDIA
add column book_id bigint;
update MEDIA m
set m.book_id = (select b.id from book b where b.MEDIA_ID = m.id);
alter table MEDIA_PAGE
add column book_id bigint;
update MEDIA_PAGE p
set p.book_id = (select m.book_id from MEDIA m where m.id = p.MEDIA_ID);
alter table MEDIA_FILE
add column book_id bigint;
update MEDIA_FILE f
set f.book_id = (select m.book_id from MEDIA m where m.id = f.MEDIA_ID);
alter table BOOK
drop constraint FK_BOOK_MEDIA_MEDIA_ID;
alter table MEDIA_PAGE
drop constraint FK_MEDIA_PAGE_MEDIA_MEDIA_ID;
alter table MEDIA_FILE
drop constraint FK_MEDIA_FILE_MEDIA_MEDIA_ID;
alter table MEDIA
drop primary key;
alter table MEDIA
drop column id;
alter table MEDIA
alter column book_id set not null;
alter table MEDIA
add primary key (book_id);
alter table MEDIA_PAGE
drop column MEDIA_ID;
alter table MEDIA_PAGE
alter column book_id set not null;
alter table MEDIA_FILE
drop column MEDIA_ID;
alter table MEDIA_FILE
alter column book_id set not null;
alter table MEDIA_FILE
alter column FILES rename to FILE_NAME;
alter table BOOK
drop column MEDIA_ID;
alter table MEDIA
add constraint FK_MEDIA_BOOK_BOOK_ID foreign key (book_id) references book (id);
alter table MEDIA_PAGE
add constraint FK_MEDIA_PAGE_BOOK_BOOK_ID foreign key (book_id) references book (id);
alter table MEDIA_FILE
add constraint FK_MEDIA_FILE_BOOK_BOOK_ID foreign key (book_id) references book (id);
-- store media page count in DB
alter table media
add column page_count bigint default 0;
update media m
set page_count = (select count(p.BOOK_ID) from media_page p where p.BOOK_ID = m.BOOK_ID);

View file

@ -1,15 +0,0 @@
create table read_progress
(
book_id bigint not null,
user_id bigint not null,
created_date timestamp not null default now(),
last_modified_date timestamp not null default now(),
page integer not null,
completed boolean not null
);
alter table read_progress
add constraint fk_read_progress_book_book_id foreign key (book_id) references book (id);
alter table read_progress
add constraint fk_read_progress_user_user_id foreign key (user_id) references user (id);

View file

@ -1,4 +0,0 @@
alter table user
add column role_file_download boolean default true;
alter table user
add column role_page_streaming boolean default true;

View file

@ -1,23 +0,0 @@
create table collection
(
id bigint not null,
name varchar not null,
ordered boolean not null default false,
series_count int not null,
created_date timestamp not null default now(),
last_modified_date timestamp not null default now(),
primary key (id)
);
create table collection_series
(
collection_id bigint not null,
series_id bigint not null,
number integer not null
);
alter table collection_series
add constraint fk_collection_series_collection_collection_id foreign key (collection_id) references collection (id);
alter table collection_series
add constraint fk_collection_series_series_series_id foreign key (series_id) references series (id);

View file

@ -1,10 +0,0 @@
alter table library
add column import_comicinfo_book boolean default true;
alter table library
add column import_comicinfo_series boolean default true;
alter table library
add column import_comicinfo_collection boolean default true;
alter table library
add column import_epub_book boolean default true;
alter table library
add column import_epub_series boolean default true;

View file

@ -2,22 +2,16 @@ package org.gotson.komga.infrastructure.datasource
import com.zaxxer.hikari.HikariDataSource
import org.gotson.komga.infrastructure.configuration.KomgaProperties
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties
import org.springframework.boot.context.properties.ConfigurationProperties
import org.springframework.boot.jdbc.DataSourceBuilder
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.Primary
import org.springframework.data.jdbc.repository.config.AbstractJdbcConfiguration
import org.springframework.data.relational.core.dialect.Dialect
import org.springframework.data.relational.core.dialect.H2Dialect
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations
import javax.sql.DataSource
@Configuration
class DataSourcesConfiguration(
private val komgaProperties: KomgaProperties
) : AbstractJdbcConfiguration() {
) {
@Bean("sqliteDataSource")
@Primary
@ -31,16 +25,4 @@ class DataSourcesConfiguration(
.build() as HikariDataSource
)
.apply { maximumPoolSize = 1 }
@Bean
@Primary
@ConfigurationProperties(prefix = "spring.datasource")
fun h2DataSourceProperties() = DataSourceProperties()
@Bean("h2DataSource")
fun h2DataSource(): DataSource =
h2DataSourceProperties().initializeDataSourceBuilder().type(HikariDataSource::class.java).build()
@Bean
override fun jdbcDialect(operations: NamedParameterJdbcOperations): Dialect = H2Dialect.INSTANCE
}

View file

@ -1,287 +1,37 @@
package org.gotson.komga.infrastructure.datasource
import mu.KotlinLogging
import org.flywaydb.core.Flyway
import org.flywaydb.core.api.configuration.FluentConfiguration
import org.gotson.komga.infrastructure.configuration.KomgaProperties
import org.springframework.beans.factory.BeanInitializationException
import org.springframework.beans.factory.annotation.Qualifier
import org.springframework.beans.factory.annotation.Value
import org.springframework.context.annotation.Profile
import org.springframework.jdbc.core.JdbcTemplate
import org.springframework.jdbc.support.JdbcUtils
import org.springframework.jms.config.JmsListenerEndpointRegistry
import org.springframework.stereotype.Component
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.sql.PreparedStatement
import java.sql.ResultSet
import java.sql.ResultSetMetaData
import java.sql.Types
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import javax.annotation.PostConstruct
import javax.sql.DataSource
import kotlin.time.measureTime
import kotlin.io.path.ExperimentalPathApi
import kotlin.io.path.deleteIfExists
import kotlin.io.path.listDirectoryEntries
private val logger = KotlinLogging.logger {}
@Component
@Profile("!test")
class DatabaseMigration(
@Qualifier("h2DataSource") private val h2DataSource: DataSource,
@Qualifier("sqliteDataSource") private val sqliteDataSource: DataSource,
private val jmsListenerEndpointRegistry: JmsListenerEndpointRegistry,
@Value("\${spring.datasource.url}") private val h2Url: String,
private val komgaProperties: KomgaProperties
) {
// tables in order of creation, to ensure there is no missing foreign key
private val tables = listOf(
"LIBRARY",
"USER",
"USER_LIBRARY_SHARING",
"SERIES",
"SERIES_METADATA",
"BOOK",
"MEDIA",
"MEDIA_PAGE",
"MEDIA_FILE",
"BOOK_METADATA",
"BOOK_METADATA_AUTHOR",
"READ_PROGRESS",
"COLLECTION",
"COLLECTION_SERIES"
)
lateinit var h2MigratedFilePath: Path
lateinit var sqlitePath: Path
@ExperimentalPathApi
@PostConstruct
fun init() {
try {
logger.info { "Initiating database migration from H2 to SQLite" }
logger.info { "H2 url: $h2Url" }
var h2Filename = extractH2Path(h2Url)?.plus(".mv.db")
if (h2Filename == null) {
logger.warn { "The H2 URL ($h2Url) does not refer to a file database, skipping migration" }
return
}
val h2Path = convertHomeDir(h2Filename)
h2Filename = h2Path.toString()
logger.info { "H2 database file: $h2Filename" }
if (Files.notExists(h2Path)) {
logger.warn { "The H2 database file does not exists: $h2Path, skipping migration" }
return
}
h2MigratedFilePath = Paths.get("$h2Filename.migrated")
if (Files.exists(h2MigratedFilePath)) {
logger.info { "The H2 database has already been migrated, skipping migration" }
return
}
h2Backup(h2Filename)
// make sure H2 database is at the latest migration
flywayMigrateH2()
sqlitePath = convertHomeDir(komgaProperties.database.file)
// flyway Migrate must perform exactly one migration (target of one)
// if it performs 0, the database has already been migrated and probably has data in it
// it should never perform more than one with a target of 1 migration
if (flywayMigrateSqlite() != 1)
throw BeanInitializationException("The SQLite database ($sqlitePath) is not newly minted")
logger.info { "Stopping all JMS listeners" }
jmsListenerEndpointRegistry.stop()
fixH2Database()
var rows: Int
measureTime {
rows = transferH2DataToSqlite()
}.also {
val insertsPerSecond = rows / it.inSeconds
logger.info { "Migration performed in $it ($rows rows). $insertsPerSecond inserts per second." }
}
logger.info { "Creating H2 migrated file: $h2MigratedFilePath" }
Files.createFile(h2MigratedFilePath)
logger.info { "Starting all JMS listeners" }
jmsListenerEndpointRegistry.start()
logger.info { "Migration finished" }
} catch (e: Exception) {
logger.error(e) { "Migration failed" }
if (this::sqlitePath.isInitialized) {
logger.info { "Deleting Sqlite database if exists" }
Files.deleteIfExists(sqlitePath)
}
if (this::h2MigratedFilePath.isInitialized) {
logger.info { "Deleting H2 migrated file if exists" }
Files.deleteIfExists(h2MigratedFilePath)
}
throw BeanInitializationException("Migration failed")
}
}
private fun flywayMigrateSqlite(): Int {
logger.info { "Initialize SQLite database with initial migration: 20200706141854" }
return Flyway(
FluentConfiguration()
.dataSource(sqliteDataSource)
.locations("classpath:db/migration/sqlite")
.target("20200706141854")
).migrate().migrationsExecuted
}
private fun flywayMigrateH2(): Int {
logger.info { "Migrating H2 database to the latest migration" }
return Flyway(
FluentConfiguration()
.dataSource(h2DataSource)
.locations("classpath:db/migration/h2")
).migrate().migrationsExecuted
}
private fun h2Backup(h2Filename: String) {
val jdbcTemplate = JdbcTemplate(h2DataSource)
val timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd.HH-mm-ss"))
val backup = "$h2Filename.backup.$timestamp.zip"
logger.info { "Perform a specific backup of the H2 database to: $backup" }
jdbcTemplate.execute("BACKUP TO '$backup'")
logger.info { "Backup finished" }
}
private fun fixH2Database() {
logger.info { "Checking H2 database for inconsistent data" }
val jdbc = JdbcTemplate(h2DataSource)
val countBook = jdbc.queryForObject("select count(distinct BOOK_ID) from media_page where number is null", Integer::class.java)!!
if (countBook > 0) {
logger.info { "Found $countBook books with missing page numbers, marking them as to be re-analyzed" }
jdbc.update(
"""
update media set STATUS='UNKNOWN'
where BOOK_ID in (
select distinct BOOK_ID from media_page where number is null
)"""
)
jdbc.update("delete from media_page where number is null")
jdbc.update(
"""
delete from media_page
where BOOK_ID in (
select distinct BOOK_ID from media_page where number is null
)"""
)
}
val invalidReadProgress = jdbc.query(
"""
select b.id as BOOK_ID, u.id as USER_ID, count(p.BOOK_ID)
from read_progress p left join user u on p.user_id = u.id left join book b on p.book_id = b.id
group by b.id, b.name, u.id, u.email
having count(p.book_id) > 1
"""
) { rs, _ -> Triple(rs.getLong(1), rs.getLong(2), rs.getLong(3)) }
if (invalidReadProgress.isNotEmpty()) {
logger.info { "Found ${invalidReadProgress.size} invalid read progress, removing extra rows and keep one per (book,user)" }
invalidReadProgress.forEach {
jdbc.update(
"delete from read_progress where book_id = ? and user_id = ? and rownum() < ?",
it.first, it.second, it.third
)
}
}
}
private fun transferH2DataToSqlite(): Int {
val maxBatchSize = komgaProperties.database.batchSize
val sourceConnection = h2DataSource.connection
val destinationConnection = sqliteDataSource.connection
var resultSet: ResultSet? = null
var selectStatement: PreparedStatement? = null
var insertStatement: PreparedStatement? = null
var totalRows = 0
destinationConnection.autoCommit = false
destinationConnection.transactionIsolation = 1
try {
tables.forEach { table ->
logger.info { "Migrate table: $table" }
selectStatement = sourceConnection.prepareStatement("select * from $table")
resultSet = selectStatement!!.executeQuery()
insertStatement = destinationConnection.prepareStatement(createInsert(resultSet!!.metaData, table))
var batchSize = 0
var batchCount = 1
while (resultSet!!.next()) {
for (i in 1..resultSet!!.metaData.columnCount) {
if (resultSet!!.metaData.getColumnType(i) == Types.BLOB) {
val blob = resultSet!!.getBlob(i)
val byteArray = blob?.binaryStream?.readBytes()
insertStatement!!.setObject(i, byteArray)
} else
insertStatement!!.setObject(i, resultSet!!.getObject(i))
}
insertStatement!!.addBatch()
batchSize++
totalRows++
if (batchSize >= maxBatchSize) {
insertStatement!!.executeBatch()
logger.info { "Insert batch #$batchCount ($batchSize rows)" }
batchSize = 0
batchCount++
}
convertHomeDir(komgaProperties.database.file).parent.listDirectoryEntries("*.mv.db*").let { h2Files ->
if (h2Files.isNotEmpty()) {
logger.info { "Deleting old H2 database files" }
h2Files.forEach {
logger.info { "Delete: $it" }
it.deleteIfExists()
}
insertStatement!!.executeBatch()
logger.info { "Insert batch #$batchCount ($batchSize rows)" }
}
} catch (e: Exception) {
destinationConnection.rollback()
throw e
} finally {
destinationConnection.commit()
JdbcUtils.closeResultSet(resultSet)
JdbcUtils.closeStatement(selectStatement)
JdbcUtils.closeStatement(insertStatement)
JdbcUtils.closeConnection(sourceConnection)
JdbcUtils.closeConnection(destinationConnection)
}
return totalRows
}
private fun createInsert(metadata: ResultSetMetaData, table: String): String {
val columns = (1..metadata.columnCount).map { metadata.getColumnName(it) }
val quids = MutableList(columns.size) { "?" }
return "insert into $table (${columns.joinToString()}) values (${quids.joinToString()})"
}
}
val excludeH2Url = listOf(":mem:", ":ssl:", ":tcp:", ":zip:")
fun extractH2Path(url: String): String? {
if (!url.startsWith("jdbc:h2:")) return null
if (excludeH2Url.any { url.contains(it, ignoreCase = true) }) return null
return url.split(":").last().split(";").first()
}
fun convertHomeDir(path: String): Path {

View file

@ -1,10 +0,0 @@
package org.gotson.komga.infrastructure.development
import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.ImportResource
import org.springframework.context.annotation.Profile
@Profile("dev")
@Configuration
@ImportResource("classpath:h2server.xml")
class DevelopmentConfiguration

View file

@ -5,7 +5,6 @@ import org.gotson.komga.domain.model.ROLE_ADMIN
import org.gotson.komga.domain.model.ROLE_USER
import org.gotson.komga.infrastructure.configuration.KomgaProperties
import org.springframework.boot.actuate.autoconfigure.security.servlet.EndpointRequest
import org.springframework.boot.autoconfigure.security.servlet.PathRequest
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Profile
import org.springframework.http.HttpMethod
@ -41,9 +40,6 @@ class SecurityConfiguration(
// restrict all actuator endpoints to ADMIN only
.requestMatchers(EndpointRequest.toAnyEndpoint()).hasRole(ROLE_ADMIN)
// restrict H2 console to ADMIN only
.requestMatchers(PathRequest.toH2Console()).hasRole(ROLE_ADMIN)
// claim is unprotected
.antMatchers("/api/v1/claim").permitAll()
@ -53,10 +49,8 @@ class SecurityConfiguration(
"/opds/**"
).hasRole(ROLE_USER)
// authorize frames for H2 console
.and()
.headers {
it.frameOptions().sameOrigin()
it.cacheControl().disable() // headers are set in WebMvcConfiguration
}

View file

@ -9,8 +9,6 @@ komga:
file: ":memory:"
spring:
datasource:
url: jdbc:h2:mem:testdb
artemis:
embedded:
data-directory: ./artemis

View file

@ -1,8 +1,3 @@
komga:
database:
file: ./localdb.sqlite
spring:
datasource:
url: jdbc:h2:./testdb

View file

@ -16,15 +16,6 @@ komga:
file: \${user.home}/.komga/database.sqlite
spring:
# cache:
# caffeine-spec: maximumSize=500,expireAfterWrite=300s
datasource:
url: jdbc:h2:~/.komga/database.h2
username: sa
initialization-mode: always
h2:
console:
enabled: true
flyway:
enabled: true
locations: classpath:db/migration/{vendor}

View file

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
<bean id="org.h2.tools.Server"
class="org.h2.tools.Server"
factory-method="createTcpServer"
init-method="start"
destroy-method="stop">
<constructor-arg value="-tcp,-tcpAllowOthers,-tcpPort,8043"/>
</bean>
</beans>

View file

@ -1,31 +0,0 @@
package org.gotson.komga.infrastructure.datasource
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
class DatabaseMigrationTest {
companion object {
@JvmStatic
fun h2Urls() =
listOf(
"not a jdbc url" to null,
"jdbc:h2:./testdb" to "./testdb",
"jdbc:h2:file:./testdb" to "./testdb",
"jdbc:h2:~/.komga/database.h2" to "~/.komga/database.h2",
"jdbc:h2:mem:testdb" to null,
"jdbc:h2:tcp://localhost/~/test" to null,
"jdbc:h2:ssl://localhost:8085/~/sample" to null,
"jdbc:h2:file:~/private;CIPHER=AES;FILE_LOCK=SOCKET" to "~/private",
"jdbc:h2:zip:~/db.zip!/test" to null
)
}
@ParameterizedTest
@MethodSource("h2Urls")
fun `given h2 url when extracting file name then file name is returned`(pair: Pair<String, String?>) {
val fileName = extractH2Path(pair.first)
assertThat(fileName).isEqualTo(pair.second)
}
}

View file

@ -5,8 +5,6 @@ komga:
file: ":memory:"
spring:
datasource:
url: jdbc:h2:mem:testdb
flyway:
enabled: true
artemis: