mirror of
https://github.com/gotson/komga.git
synced 2025-12-06 08:32:25 +01:00
feat: change database from H2 to SQLite
This is a major change, but done transparently. At startup, a migration from H2 to SQLite will be triggered: - if the H2 database is a file (not in memory) - if the H2 database has not been migrated yet - if the SQLite database is newly minted All the data will be transferred from H2 to SQLite before the startup of the application (before the API can serve any requests). After the migration, an empty file will be stored next to the H2 database file (same name with ".imported" suffix). The H2 database files will be automatically removed in a later version. A new configuration key is available to customize the file path of the SQLite database: `komga.database.file` The database backup feature has been removed. It might be re-added later on using a different logic. The IDs of entities have been changed from number to string in the API. closes #218
This commit is contained in:
parent
1965415c00
commit
20b2b39d36
152 changed files with 1712 additions and 1198 deletions
|
|
@ -27,10 +27,10 @@ Komga is composed of 2 projects:
|
|||
|
||||
Komga uses Spring Profiles extensively:
|
||||
- `dev`: add more logging, disable periodic scanning, in-memory database
|
||||
- `localdb`: a dev profile that stores the database in `./testdb`.
|
||||
- `localdb`: a dev profile that stores the database in `./localdb`.
|
||||
- `noclaim`: will create initial users at startup if none exist and output users and passwords in the standard output
|
||||
- if `dev` is active, will create `admin@example.org` with password `admin`, and `user@example.org` with password `user`
|
||||
- if `dev` is not active, will create `admin@example.org` with a random password
|
||||
- if `dev` is not active, will create `admin@example.org` with a random password that will be shown in the logs
|
||||
|
||||
### Gradle tasks
|
||||
|
||||
|
|
|
|||
|
|
@ -133,13 +133,13 @@ export default Vue.extend({
|
|||
this.$router.push({ name: 'search', query: { q: s } }).catch(e => {
|
||||
})
|
||||
},
|
||||
seriesThumbnailUrl (seriesId: number): string {
|
||||
seriesThumbnailUrl (seriesId: string): string {
|
||||
return seriesThumbnailUrl(seriesId)
|
||||
},
|
||||
bookThumbnailUrl (bookId: number): string {
|
||||
bookThumbnailUrl (bookId: string): string {
|
||||
return bookThumbnailUrl(bookId)
|
||||
},
|
||||
collectionThumbnailUrl (collectionId: number): string {
|
||||
collectionThumbnailUrl (collectionId: string): string {
|
||||
return collectionThumbnailUrl(collectionId)
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ export default Vue.extend({
|
|||
|
||||
},
|
||||
computed: {
|
||||
seriesIds (): number[] {
|
||||
seriesIds (): string[] {
|
||||
if (Array.isArray(this.series)) return this.series.map(s => s.id)
|
||||
else return [this.series.id]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ export default Vue.extend({
|
|||
type: Boolean,
|
||||
},
|
||||
bookId: {
|
||||
type: Number,
|
||||
type: String,
|
||||
},
|
||||
},
|
||||
data: () => {
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ export default Vue.extend({
|
|||
snackText: '',
|
||||
modal: false,
|
||||
allLibraries: true,
|
||||
selectedLibraries: [] as number[],
|
||||
selectedLibraries: [] as string[],
|
||||
}
|
||||
},
|
||||
props: {
|
||||
|
|
|
|||
|
|
@ -12,15 +12,15 @@ const urls = {
|
|||
|
||||
export default urls
|
||||
|
||||
export function bookThumbnailUrl (bookId: number): string {
|
||||
export function bookThumbnailUrl (bookId: string): string {
|
||||
return `${urls.originNoSlash}/api/v1/books/${bookId}/thumbnail`
|
||||
}
|
||||
|
||||
export function bookFileUrl (bookId: number): string {
|
||||
export function bookFileUrl (bookId: string): string {
|
||||
return `${urls.originNoSlash}/api/v1/books/${bookId}/file`
|
||||
}
|
||||
|
||||
export function bookPageUrl (bookId: number, page: number, convertTo?: string): string {
|
||||
export function bookPageUrl (bookId: string, page: number, convertTo?: string): string {
|
||||
let url = `${urls.originNoSlash}/api/v1/books/${bookId}/pages/${page}`
|
||||
if (convertTo) {
|
||||
url += `?convert=${convertTo}`
|
||||
|
|
@ -28,14 +28,14 @@ export function bookPageUrl (bookId: number, page: number, convertTo?: string):
|
|||
return url
|
||||
}
|
||||
|
||||
export function bookPageThumbnailUrl (bookId: number, page: number): string {
|
||||
export function bookPageThumbnailUrl (bookId: string, page: number): string {
|
||||
return `${urls.originNoSlash}/api/v1/books/${bookId}/pages/${page}/thumbnail`
|
||||
}
|
||||
|
||||
export function seriesThumbnailUrl (seriesId: number): string {
|
||||
export function seriesThumbnailUrl (seriesId: string): string {
|
||||
return `${urls.originNoSlash}/api/v1/series/${seriesId}/thumbnail`
|
||||
}
|
||||
|
||||
export function collectionThumbnailUrl (collectionId: number): string {
|
||||
export function collectionThumbnailUrl (collectionId: string): string {
|
||||
return `${urls.originNoSlash}/api/v1/collections/${collectionId}/thumbnail`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ const vuexModule: Module<any, any> = {
|
|||
await service.postUser(user)
|
||||
dispatch('getAllUsers')
|
||||
},
|
||||
async updateUserRoles ({ dispatch }, { userId, roles }: { userId: number, roles: RolesUpdateDto }) {
|
||||
async updateUserRoles ({ dispatch }, { userId, roles }: { userId: string, roles: RolesUpdateDto }) {
|
||||
await service.patchUserRoles(userId, roles)
|
||||
dispatch('getAllUsers')
|
||||
},
|
||||
|
|
|
|||
|
|
@ -76,32 +76,32 @@ const router = new Router({
|
|||
name: 'browse-libraries',
|
||||
beforeEnter: noLibraryGuard,
|
||||
component: () => import(/* webpackChunkName: "browse-libraries" */ './views/BrowseLibraries.vue'),
|
||||
props: (route) => ({ libraryId: Number(route.params.libraryId) }),
|
||||
props: (route) => ({ libraryId: route.params.libraryId }),
|
||||
},
|
||||
{
|
||||
path: '/libraries/:libraryId/collections',
|
||||
name: 'browse-collections',
|
||||
beforeEnter: noLibraryGuard,
|
||||
component: () => import(/* webpackChunkName: "browse-collections" */ './views/BrowseCollections.vue'),
|
||||
props: (route) => ({ libraryId: Number(route.params.libraryId) }),
|
||||
props: (route) => ({ libraryId: route.params.libraryId }),
|
||||
},
|
||||
{
|
||||
path: '/collections/:collectionId',
|
||||
name: 'browse-collection',
|
||||
component: () => import(/* webpackChunkName: "browse-collection" */ './views/BrowseCollection.vue'),
|
||||
props: (route) => ({ collectionId: Number(route.params.collectionId) }),
|
||||
props: (route) => ({ collectionId: route.params.collectionId }),
|
||||
},
|
||||
{
|
||||
path: '/series/:seriesId',
|
||||
name: 'browse-series',
|
||||
component: () => import(/* webpackChunkName: "browse-series" */ './views/BrowseSeries.vue'),
|
||||
props: (route) => ({ seriesId: Number(route.params.seriesId) }),
|
||||
props: (route) => ({ seriesId: route.params.seriesId }),
|
||||
},
|
||||
{
|
||||
path: '/book/:bookId',
|
||||
name: 'browse-book',
|
||||
component: () => import(/* webpackChunkName: "browse-book" */ './views/BrowseBook.vue'),
|
||||
props: (route) => ({ bookId: Number(route.params.bookId) }),
|
||||
props: (route) => ({ bookId: route.params.bookId }),
|
||||
},
|
||||
{
|
||||
path: '/search',
|
||||
|
|
@ -124,7 +124,7 @@ const router = new Router({
|
|||
path: '/book/:bookId/read',
|
||||
name: 'read-book',
|
||||
component: () => import(/* webpackChunkName: "read-book" */ './views/BookReader.vue'),
|
||||
props: (route) => ({ bookId: Number(route.params.bookId) }),
|
||||
props: (route) => ({ bookId: route.params.bookId }),
|
||||
},
|
||||
{
|
||||
path: '*',
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ export default class KomgaBooksService {
|
|||
this.http = http
|
||||
}
|
||||
|
||||
async getBooks (libraryId?: number, pageRequest?: PageRequest, search?: string, mediaStatus?: string[], readStatus?: string[]): Promise<Page<BookDto>> {
|
||||
async getBooks (libraryId?: string, pageRequest?: PageRequest, search?: string, mediaStatus?: string[], readStatus?: string[]): Promise<Page<BookDto>> {
|
||||
try {
|
||||
const params = { ...pageRequest } as any
|
||||
if (libraryId) {
|
||||
|
|
@ -53,7 +53,7 @@ export default class KomgaBooksService {
|
|||
}
|
||||
}
|
||||
|
||||
async getBook (bookId: number): Promise<BookDto> {
|
||||
async getBook (bookId: string): Promise<BookDto> {
|
||||
try {
|
||||
return (await this.http.get(`${API_BOOKS}/${bookId}`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -65,7 +65,7 @@ export default class KomgaBooksService {
|
|||
}
|
||||
}
|
||||
|
||||
async getBookSiblingNext (bookId: number): Promise<BookDto> {
|
||||
async getBookSiblingNext (bookId: string): Promise<BookDto> {
|
||||
try {
|
||||
return (await this.http.get(`${API_BOOKS}/${bookId}/next`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -77,7 +77,7 @@ export default class KomgaBooksService {
|
|||
}
|
||||
}
|
||||
|
||||
async getBookSiblingPrevious (bookId: number): Promise<BookDto> {
|
||||
async getBookSiblingPrevious (bookId: string): Promise<BookDto> {
|
||||
try {
|
||||
return (await this.http.get(`${API_BOOKS}/${bookId}/previous`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -89,7 +89,7 @@ export default class KomgaBooksService {
|
|||
}
|
||||
}
|
||||
|
||||
async getBookPages (bookId: number): Promise<PageDto[]> {
|
||||
async getBookPages (bookId: string): Promise<PageDto[]> {
|
||||
try {
|
||||
return (await this.http.get(`${API_BOOKS}/${bookId}/pages`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -125,7 +125,7 @@ export default class KomgaBooksService {
|
|||
}
|
||||
}
|
||||
|
||||
async updateMetadata (bookId: number, metadata: BookMetadataUpdateDto) {
|
||||
async updateMetadata (bookId: string, metadata: BookMetadataUpdateDto) {
|
||||
try {
|
||||
await this.http.patch(`${API_BOOKS}/${bookId}/metadata`, metadata)
|
||||
} catch (e) {
|
||||
|
|
@ -137,7 +137,7 @@ export default class KomgaBooksService {
|
|||
}
|
||||
}
|
||||
|
||||
async updateReadProgress (bookId: number, readProgress: ReadProgressUpdateDto) {
|
||||
async updateReadProgress (bookId: string, readProgress: ReadProgressUpdateDto) {
|
||||
try {
|
||||
await this.http.patch(`${API_BOOKS}/${bookId}/read-progress`, readProgress)
|
||||
} catch (e) {
|
||||
|
|
@ -149,7 +149,7 @@ export default class KomgaBooksService {
|
|||
}
|
||||
}
|
||||
|
||||
async deleteReadProgress (bookId: number) {
|
||||
async deleteReadProgress (bookId: string) {
|
||||
try {
|
||||
await this.http.delete(`${API_BOOKS}/${bookId}/read-progress`)
|
||||
} catch (e) {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ export default class KomgaCollectionsService {
|
|||
this.http = http
|
||||
}
|
||||
|
||||
async getCollections (libraryIds?: number[], pageRequest?: PageRequest, search?: string): Promise<Page<CollectionDto>> {
|
||||
async getCollections (libraryIds?: string[], pageRequest?: PageRequest, search?: string): Promise<Page<CollectionDto>> {
|
||||
try {
|
||||
const params = { ...pageRequest } as any
|
||||
if (libraryIds) params.library_id = libraryIds
|
||||
|
|
@ -30,7 +30,7 @@ export default class KomgaCollectionsService {
|
|||
}
|
||||
}
|
||||
|
||||
async getOneCollection (collectionId: number): Promise<CollectionDto> {
|
||||
async getOneCollection (collectionId: string): Promise<CollectionDto> {
|
||||
try {
|
||||
return (await this.http.get(`${API_COLLECTIONS}/${collectionId}`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -54,7 +54,7 @@ export default class KomgaCollectionsService {
|
|||
}
|
||||
}
|
||||
|
||||
async patchCollection (collectionId: number, collection: CollectionUpdateDto) {
|
||||
async patchCollection (collectionId: string, collection: CollectionUpdateDto) {
|
||||
try {
|
||||
await this.http.patch(`${API_COLLECTIONS}/${collectionId}`, collection)
|
||||
} catch (e) {
|
||||
|
|
@ -66,7 +66,7 @@ export default class KomgaCollectionsService {
|
|||
}
|
||||
}
|
||||
|
||||
async deleteCollection (collectionId: number) {
|
||||
async deleteCollection (collectionId: string) {
|
||||
try {
|
||||
await this.http.delete(`${API_COLLECTIONS}/${collectionId}`)
|
||||
} catch (e) {
|
||||
|
|
@ -78,7 +78,7 @@ export default class KomgaCollectionsService {
|
|||
}
|
||||
}
|
||||
|
||||
async getSeries (collectionId: number, pageRequest?: PageRequest): Promise<Page<SeriesDto>> {
|
||||
async getSeries (collectionId: string, pageRequest?: PageRequest): Promise<Page<SeriesDto>> {
|
||||
try {
|
||||
const params = { ...pageRequest }
|
||||
return (await this.http.get(`${API_COLLECTIONS}/${collectionId}/series`, {
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ export default class KomgaLibrariesService {
|
|||
}
|
||||
}
|
||||
|
||||
async getLibrary (libraryId: number): Promise<LibraryDto> {
|
||||
async getLibrary (libraryId: string): Promise<LibraryDto> {
|
||||
try {
|
||||
return (await this.http.get(`${API_LIBRARIES}/${libraryId}`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -45,7 +45,7 @@ export default class KomgaLibrariesService {
|
|||
}
|
||||
}
|
||||
|
||||
async updateLibrary (libraryId: number, library: LibraryUpdateDto) {
|
||||
async updateLibrary (libraryId: string, library: LibraryUpdateDto) {
|
||||
try {
|
||||
await this.http.put(`${API_LIBRARIES}/${libraryId}`, library)
|
||||
} catch (e) {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ export default class KomgaSeriesService {
|
|||
this.http = http
|
||||
}
|
||||
|
||||
async getSeries (libraryId?: number, pageRequest?: PageRequest, search?: string, status?: string[], readStatus?: string[]): Promise<Page<SeriesDto>> {
|
||||
async getSeries (libraryId?: string, pageRequest?: PageRequest, search?: string, status?: string[], readStatus?: string[]): Promise<Page<SeriesDto>> {
|
||||
try {
|
||||
const params = { ...pageRequest } as any
|
||||
if (libraryId) {
|
||||
|
|
@ -69,7 +69,7 @@ export default class KomgaSeriesService {
|
|||
}
|
||||
}
|
||||
|
||||
async getOneSeries (seriesId: number): Promise<SeriesDto> {
|
||||
async getOneSeries (seriesId: string): Promise<SeriesDto> {
|
||||
try {
|
||||
return (await this.http.get(`${API_SERIES}/${seriesId}`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -81,7 +81,7 @@ export default class KomgaSeriesService {
|
|||
}
|
||||
}
|
||||
|
||||
async getBooks (seriesId: number, pageRequest?: PageRequest, readStatus?: string[]): Promise<Page<BookDto>> {
|
||||
async getBooks (seriesId: string, pageRequest?: PageRequest, readStatus?: string[]): Promise<Page<BookDto>> {
|
||||
try {
|
||||
const params = { ...pageRequest } as any
|
||||
if (readStatus) {
|
||||
|
|
@ -100,7 +100,7 @@ export default class KomgaSeriesService {
|
|||
}
|
||||
}
|
||||
|
||||
async getCollections (seriesId: number): Promise<CollectionDto[]> {
|
||||
async getCollections (seriesId: string): Promise<CollectionDto[]> {
|
||||
try {
|
||||
return (await this.http.get(`${API_SERIES}/${seriesId}/collections`)).data
|
||||
} catch (e) {
|
||||
|
|
@ -136,7 +136,7 @@ export default class KomgaSeriesService {
|
|||
}
|
||||
}
|
||||
|
||||
async updateMetadata (seriesId: number, metadata: SeriesMetadataUpdateDto) {
|
||||
async updateMetadata (seriesId: string, metadata: SeriesMetadataUpdateDto) {
|
||||
try {
|
||||
await this.http.patch(`${API_SERIES}/${seriesId}/metadata`, metadata)
|
||||
} catch (e) {
|
||||
|
|
@ -148,7 +148,7 @@ export default class KomgaSeriesService {
|
|||
}
|
||||
}
|
||||
|
||||
async markAsRead (seriesId: number) {
|
||||
async markAsRead (seriesId: string) {
|
||||
try {
|
||||
await this.http.post(`${API_SERIES}/${seriesId}/read-progress`)
|
||||
} catch (e) {
|
||||
|
|
@ -160,7 +160,7 @@ export default class KomgaSeriesService {
|
|||
}
|
||||
}
|
||||
|
||||
async markAsUnread (seriesId: number) {
|
||||
async markAsUnread (seriesId: string) {
|
||||
try {
|
||||
await this.http.delete(`${API_SERIES}/${seriesId}/read-progress`)
|
||||
} catch (e) {
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ export default class KomgaUsersService {
|
|||
}
|
||||
}
|
||||
|
||||
async patchUserRoles (userId: number, roles: RolesUpdateDto): Promise<UserDto> {
|
||||
async patchUserRoles (userId: string, roles: RolesUpdateDto): Promise<UserDto> {
|
||||
try {
|
||||
return (await this.http.patch(`${API_USERS}/${userId}`, roles)).data
|
||||
} catch (e) {
|
||||
|
|
|
|||
|
|
@ -1,29 +1,29 @@
|
|||
interface EventBookChanged {
|
||||
id: number,
|
||||
seriesId: number
|
||||
id: string,
|
||||
seriesId: string
|
||||
}
|
||||
|
||||
interface EventSeriesChanged {
|
||||
id: number,
|
||||
libraryId: number
|
||||
id: string,
|
||||
libraryId: string
|
||||
}
|
||||
|
||||
interface EventCollectionChanged {
|
||||
id: number
|
||||
id: string
|
||||
}
|
||||
|
||||
interface EventCollectionDeleted {
|
||||
id: number
|
||||
id: string
|
||||
}
|
||||
|
||||
interface EventLibraryAdded {
|
||||
id: number
|
||||
id: string
|
||||
}
|
||||
|
||||
interface EventLibraryChanged {
|
||||
id: number
|
||||
id: string
|
||||
}
|
||||
|
||||
interface EventLibraryDeleted {
|
||||
id: number
|
||||
id: string
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
interface BookDto {
|
||||
id: number,
|
||||
seriesId: number,
|
||||
libraryId: number,
|
||||
id: string,
|
||||
seriesId: string,
|
||||
libraryId: string,
|
||||
name: string,
|
||||
url: string,
|
||||
number: number,
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
interface CollectionDto {
|
||||
id: number,
|
||||
id: string,
|
||||
name: string,
|
||||
ordered: boolean,
|
||||
filtered: boolean,
|
||||
seriesIds: number[],
|
||||
seriesIds: string[],
|
||||
createdDate: string,
|
||||
lastModifiedDate: string
|
||||
}
|
||||
|
|
@ -11,11 +11,11 @@ interface CollectionDto {
|
|||
interface CollectionCreationDto {
|
||||
name: string,
|
||||
ordered: boolean,
|
||||
seriesIds: number[]
|
||||
seriesIds: string[]
|
||||
}
|
||||
|
||||
interface CollectionUpdateDto {
|
||||
name?: string,
|
||||
ordered?: boolean,
|
||||
seriesIds?: number[]
|
||||
seriesIds?: string[]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ interface LibraryUpdateDto {
|
|||
}
|
||||
|
||||
interface LibraryDto {
|
||||
id: number,
|
||||
id: string,
|
||||
name: string,
|
||||
root: string,
|
||||
importComicInfoBook: boolean,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
interface SeriesDto {
|
||||
id: number,
|
||||
libraryId: number,
|
||||
id: string,
|
||||
libraryId: string,
|
||||
name: string,
|
||||
url: string,
|
||||
lastModified: string,
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
interface UserDto {
|
||||
id: number,
|
||||
id: string,
|
||||
email: string,
|
||||
roles: string[]
|
||||
}
|
||||
|
||||
interface UserWithSharedLibrariesDto {
|
||||
id: number,
|
||||
id: string,
|
||||
email: string,
|
||||
roles: string[],
|
||||
sharedAllLibraries: boolean,
|
||||
|
|
@ -13,7 +13,7 @@ interface UserWithSharedLibrariesDto {
|
|||
}
|
||||
|
||||
interface SharedLibraryDto {
|
||||
id: number
|
||||
id: string
|
||||
}
|
||||
|
||||
interface UserCreationDto {
|
||||
|
|
@ -27,7 +27,7 @@ interface PasswordUpdateDto {
|
|||
|
||||
interface SharedLibrariesUpdateDto {
|
||||
all: boolean,
|
||||
libraryIds: number[]
|
||||
libraryIds: string[]
|
||||
}
|
||||
|
||||
interface RolesUpdateDto {
|
||||
|
|
|
|||
|
|
@ -384,14 +384,14 @@ export default Vue.extend({
|
|||
},
|
||||
props: {
|
||||
bookId: {
|
||||
type: Number,
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
async beforeRouteUpdate (to, from, next) {
|
||||
if (to.params.bookId !== from.params.bookId) {
|
||||
// route update means going to previous/next book, in this case we start from first page
|
||||
this.setup(Number(to.params.bookId), 1)
|
||||
this.setup(to.params.bookId, 1)
|
||||
}
|
||||
next()
|
||||
},
|
||||
|
|
@ -517,7 +517,7 @@ export default Vue.extend({
|
|||
keyPressed (e: KeyboardEvent) {
|
||||
executeShortcut(this, e)
|
||||
},
|
||||
async setup (bookId: number, page: number) {
|
||||
async setup (bookId: string, page: number) {
|
||||
this.book = await this.$komgaBooks.getBook(bookId)
|
||||
this.pages = await this.$komgaBooks.getBookPages(bookId)
|
||||
if (page >= 1 && page <= this.pagesCount) {
|
||||
|
|
|
|||
|
|
@ -184,13 +184,13 @@ export default Vue.extend({
|
|||
},
|
||||
props: {
|
||||
bookId: {
|
||||
type: Number,
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
async beforeRouteUpdate (to, from, next) {
|
||||
if (to.params.bookId !== from.params.bookId) {
|
||||
this.loadBook(Number(to.params.bookId))
|
||||
this.loadBook(to.params.bookId)
|
||||
}
|
||||
|
||||
next()
|
||||
|
|
@ -239,7 +239,7 @@ export default Vue.extend({
|
|||
reloadBook (event: EventBookChanged) {
|
||||
if (event.id === this.bookId) this.loadBook(this.bookId)
|
||||
},
|
||||
async loadBook (bookId: number) {
|
||||
async loadBook (bookId: string) {
|
||||
this.book = await this.$komgaBooks.getBook(bookId)
|
||||
},
|
||||
analyze () {
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ export default Vue.extend({
|
|||
},
|
||||
props: {
|
||||
collectionId: {
|
||||
type: Number,
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
|
@ -140,7 +140,7 @@ export default Vue.extend({
|
|||
this.series = []
|
||||
this.editElements = false
|
||||
|
||||
this.loadCollection(Number(to.params.collectionId))
|
||||
this.loadCollection(to.params.collectionId)
|
||||
}
|
||||
|
||||
next()
|
||||
|
|
@ -156,7 +156,7 @@ export default Vue.extend({
|
|||
this.loadCollection(this.collectionId)
|
||||
}
|
||||
},
|
||||
async loadCollection (collectionId: number) {
|
||||
async loadCollection (collectionId: string) {
|
||||
this.collection = await this.$komgaCollections.getOneCollection(collectionId)
|
||||
this.series = (await this.$komgaCollections.getSeries(collectionId, { unpaged: true } as PageRequest)).content
|
||||
this.seriesCopy = [...this.series]
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ import { COLLECTION_CHANGED, LIBRARY_CHANGED } from '@/types/events'
|
|||
import Vue from 'vue'
|
||||
|
||||
const cookiePageSize = 'pagesize'
|
||||
const all = 'all'
|
||||
|
||||
export default Vue.extend({
|
||||
name: 'BrowseCollections',
|
||||
|
|
@ -72,8 +73,8 @@ export default Vue.extend({
|
|||
},
|
||||
props: {
|
||||
libraryId: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
type: String,
|
||||
default: all,
|
||||
},
|
||||
},
|
||||
created () {
|
||||
|
|
@ -105,7 +106,7 @@ export default Vue.extend({
|
|||
this.totalElements = null
|
||||
this.collections = []
|
||||
|
||||
this.loadLibrary(Number(to.params.libraryId))
|
||||
this.loadLibrary(to.params.libraryId)
|
||||
}
|
||||
|
||||
next()
|
||||
|
|
@ -173,7 +174,7 @@ export default Vue.extend({
|
|||
this.loadLibrary(this.libraryId)
|
||||
}
|
||||
},
|
||||
async loadLibrary (libraryId: number) {
|
||||
async loadLibrary (libraryId: string) {
|
||||
this.library = this.getLibraryLazy(libraryId)
|
||||
await this.loadPage(libraryId, this.page)
|
||||
|
||||
|
|
@ -181,21 +182,21 @@ export default Vue.extend({
|
|||
await this.$router.push({ name: 'browse-libraries', params: { libraryId: libraryId.toString() } })
|
||||
}
|
||||
},
|
||||
async loadPage (libraryId: number, page: number) {
|
||||
async loadPage (libraryId: string, page: number) {
|
||||
const pageRequest = {
|
||||
page: page - 1,
|
||||
size: this.pageSize,
|
||||
} as PageRequest
|
||||
|
||||
const lib = libraryId !== 0 ? [libraryId] : undefined
|
||||
const lib = libraryId !== all ? [libraryId] : undefined
|
||||
const collectionsPage = await this.$komgaCollections.getCollections(lib, pageRequest)
|
||||
|
||||
this.totalPages = collectionsPage.totalPages
|
||||
this.totalElements = collectionsPage.totalElements
|
||||
this.collections = collectionsPage.content
|
||||
},
|
||||
getLibraryLazy (libraryId: any): LibraryDto | undefined {
|
||||
if (libraryId !== 0) {
|
||||
getLibraryLazy (libraryId: string): LibraryDto | undefined {
|
||||
if (libraryId !== all) {
|
||||
return this.$store.getters.getLibraryById(libraryId)
|
||||
} else {
|
||||
return undefined
|
||||
|
|
|
|||
|
|
@ -86,6 +86,7 @@ import { COLLECTION_CHANGED, LIBRARY_CHANGED, LIBRARY_DELETED, SERIES_CHANGED }
|
|||
import Vue from 'vue'
|
||||
|
||||
const cookiePageSize = 'pagesize'
|
||||
const all = 'all'
|
||||
|
||||
export default Vue.extend({
|
||||
name: 'BrowseLibraries',
|
||||
|
|
@ -136,8 +137,8 @@ export default Vue.extend({
|
|||
},
|
||||
props: {
|
||||
libraryId: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
type: String,
|
||||
default: all,
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
|
|
@ -188,7 +189,7 @@ export default Vue.extend({
|
|||
this.series = []
|
||||
this.collectionsCount = 0
|
||||
|
||||
this.loadLibrary(Number(to.params.libraryId))
|
||||
this.loadLibrary(to.params.libraryId)
|
||||
|
||||
this.setWatches()
|
||||
}
|
||||
|
|
@ -214,10 +215,10 @@ export default Vue.extend({
|
|||
},
|
||||
},
|
||||
methods: {
|
||||
cookieSort (libraryId: number): string {
|
||||
cookieSort (libraryId: string): string {
|
||||
return `library.sort.${libraryId}`
|
||||
},
|
||||
cookieFilter (libraryId: number): string {
|
||||
cookieFilter (libraryId: string): string {
|
||||
return `library.filter.${libraryId}`
|
||||
},
|
||||
resetParams (route: any) {
|
||||
|
|
@ -236,7 +237,7 @@ export default Vue.extend({
|
|||
libraryDeleted (event: EventLibraryDeleted) {
|
||||
if (event.id === this.libraryId) {
|
||||
this.$router.push({ name: 'home' })
|
||||
} else if (this.libraryId === 0) {
|
||||
} else if (this.libraryId === all) {
|
||||
this.loadLibrary(this.libraryId)
|
||||
}
|
||||
},
|
||||
|
|
@ -280,19 +281,19 @@ export default Vue.extend({
|
|||
this.loadLibrary(this.libraryId)
|
||||
},
|
||||
reloadSeries (event: EventSeriesChanged) {
|
||||
if (this.libraryId === 0 || event.libraryId === this.libraryId) {
|
||||
if (this.libraryId === all || event.libraryId === this.libraryId) {
|
||||
this.loadPage(this.libraryId, this.page, this.sortActive)
|
||||
}
|
||||
},
|
||||
reloadLibrary (event: EventLibraryChanged) {
|
||||
if (this.libraryId === 0 || event.id === this.libraryId) {
|
||||
if (this.libraryId === all || event.id === this.libraryId) {
|
||||
this.loadLibrary(this.libraryId)
|
||||
}
|
||||
},
|
||||
async loadLibrary (libraryId: number) {
|
||||
async loadLibrary (libraryId: string) {
|
||||
this.library = this.getLibraryLazy(libraryId)
|
||||
|
||||
const lib = libraryId !== 0 ? [libraryId] : undefined
|
||||
const lib = libraryId !== all ? [libraryId] : undefined
|
||||
this.collectionsCount = (await this.$komgaCollections.getCollections(lib, { size: 1 })).totalElements
|
||||
|
||||
await this.loadPage(libraryId, this.page, this.sortActive)
|
||||
|
|
@ -311,7 +312,7 @@ export default Vue.extend({
|
|||
}).catch(_ => {
|
||||
})
|
||||
},
|
||||
async loadPage (libraryId: number, page: number, sort: SortActive) {
|
||||
async loadPage (libraryId: string, page: number, sort: SortActive) {
|
||||
const pageRequest = {
|
||||
page: page - 1,
|
||||
size: this.pageSize,
|
||||
|
|
@ -321,7 +322,7 @@ export default Vue.extend({
|
|||
pageRequest.sort = [`${sort.key},${sort.order}`]
|
||||
}
|
||||
|
||||
const requestLibraryId = libraryId !== 0 ? libraryId : undefined
|
||||
const requestLibraryId = libraryId !== all ? libraryId : undefined
|
||||
const seriesPage = await this.$komgaSeries.getSeries(requestLibraryId, pageRequest, undefined, this.filters.status, this.filters.readStatus)
|
||||
|
||||
this.totalPages = seriesPage.totalPages
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ export default Vue.extend({
|
|||
},
|
||||
props: {
|
||||
seriesId: {
|
||||
type: Number,
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
|
@ -253,7 +253,7 @@ export default Vue.extend({
|
|||
this.books = []
|
||||
this.collections = []
|
||||
|
||||
this.loadSeries(Number(to.params.seriesId))
|
||||
this.loadSeries(to.params.seriesId)
|
||||
|
||||
this.setWatches()
|
||||
}
|
||||
|
|
@ -302,7 +302,7 @@ export default Vue.extend({
|
|||
reloadBooks (event: EventBookChanged) {
|
||||
if (event.seriesId === this.seriesId) this.loadSeries(this.seriesId)
|
||||
},
|
||||
async loadSeries (seriesId: number) {
|
||||
async loadSeries (seriesId: string) {
|
||||
this.series = await this.$komgaSeries.getOneSeries(seriesId)
|
||||
this.collections = await this.$komgaSeries.getCollections(seriesId)
|
||||
await this.loadPage(seriesId, this.page, this.sortActive)
|
||||
|
|
@ -326,7 +326,7 @@ export default Vue.extend({
|
|||
}).catch(_ => {
|
||||
})
|
||||
},
|
||||
async loadPage (seriesId: number, page: number, sort: SortActive) {
|
||||
async loadPage (seriesId: string, page: number, sort: SortActive) {
|
||||
const pageRequest = {
|
||||
page: page - 1,
|
||||
size: this.pageSize,
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@
|
|||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item :to="{name:'browse-libraries', params: {libraryId: 0}}">
|
||||
<v-list-item :to="{name:'browse-libraries', params: {libraryId: 'all'}}">
|
||||
<v-list-item-icon>
|
||||
<v-icon>mdi-book-multiple</v-icon>
|
||||
</v-list-item-icon>
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ export default Vue.extend({
|
|||
await this.$router.push({ name: 'home' })
|
||||
}
|
||||
} catch (e) {
|
||||
this.showSnack(e.message)
|
||||
this.showSnack(e?.message)
|
||||
}
|
||||
},
|
||||
showSnack (message: string) {
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ COPY ${DEPENDENCY}/BOOT-INF/lib /app/lib
|
|||
COPY ${DEPENDENCY}/META-INF /app/META-INF
|
||||
COPY ${DEPENDENCY}/BOOT-INF/classes /app
|
||||
ENV KOMGA_DATABASE_BACKUP_PATH="/config/database-backup.zip"
|
||||
ENV KOMGA_DATABASE_FILE="/config/database.sqlite"
|
||||
ENV SPRING_DATASOURCE_URL="jdbc:h2:/config/database.h2"
|
||||
ENV SPRING_ARTEMIS_EMBEDDED_DATA_DIRECTORY="/config/artemis"
|
||||
ENV LOGGING_FILE_NAME="/config/logs/komga.log"
|
||||
|
|
|
|||
|
|
@ -92,8 +92,11 @@ dependencies {
|
|||
|
||||
implementation("com.jakewharton.byteunits:byteunits:0.9.1")
|
||||
|
||||
implementation("com.github.f4b6a3:tsid-creator:2.2.4")
|
||||
|
||||
runtimeOnly("com.h2database:h2:1.4.200")
|
||||
jooqGeneratorRuntime("com.h2database:h2:1.4.200")
|
||||
runtimeOnly("org.xerial:sqlite-jdbc:3.32.3")
|
||||
jooqGeneratorRuntime("org.xerial:sqlite-jdbc:3.32.3")
|
||||
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test") {
|
||||
exclude(module = "mockito-core")
|
||||
|
|
@ -197,42 +200,38 @@ sourceSets {
|
|||
}
|
||||
}
|
||||
|
||||
val jooqDb = mapOf(
|
||||
"url" to "jdbc:h2:${project.buildDir}/generated/flyway/h2",
|
||||
"schema" to "PUBLIC",
|
||||
"user" to "sa",
|
||||
"password" to ""
|
||||
|
||||
val dbSqlite = mapOf(
|
||||
"url" to "jdbc:sqlite:${project.buildDir}/generated/flyway/database.sqlite"
|
||||
)
|
||||
val migrationDirs = listOf(
|
||||
"$projectDir/src/flyway/resources/db/migration",
|
||||
"$projectDir/src/flyway/kotlin/db/migration"
|
||||
val migrationDirsSqlite = listOf(
|
||||
"$projectDir/src/flyway/resources/db/migration/sqlite"
|
||||
// "$projectDir/src/flyway/kotlin/db/migration/sqlite"
|
||||
)
|
||||
flyway {
|
||||
url = jooqDb["url"]
|
||||
user = jooqDb["user"]
|
||||
password = jooqDb["password"]
|
||||
schemas = arrayOf(jooqDb["schema"])
|
||||
locations = arrayOf("classpath:db/migration")
|
||||
url = dbSqlite["url"]
|
||||
locations = arrayOf("classpath:db/migration/sqlite")
|
||||
}
|
||||
//in order to include the Java migrations, flywayClasses must be run before flywayMigrate
|
||||
tasks.flywayMigrate {
|
||||
//in order to include the Java migrations, flywayClasses must be run before flywayMigrate
|
||||
dependsOn("flywayClasses")
|
||||
migrationDirs.forEach { inputs.dir(it) }
|
||||
migrationDirsSqlite.forEach { inputs.dir(it) }
|
||||
outputs.dir("${project.buildDir}/generated/flyway")
|
||||
doFirst { delete(outputs.files) }
|
||||
doFirst {
|
||||
delete(outputs.files)
|
||||
mkdir("${project.buildDir}/generated/flyway")
|
||||
}
|
||||
}
|
||||
|
||||
jooqGenerator {
|
||||
jooqVersion = "3.13.1"
|
||||
configuration("primary", project.sourceSets.getByName("main")) {
|
||||
databaseSources = migrationDirs
|
||||
databaseSources = migrationDirsSqlite
|
||||
|
||||
configuration = jooqCodegenConfiguration {
|
||||
jdbc {
|
||||
username = jooqDb["user"]
|
||||
password = jooqDb["password"]
|
||||
driver = "org.h2.Driver"
|
||||
url = jooqDb["url"]
|
||||
driver = "org.sqlite.JDBC"
|
||||
url = dbSqlite["url"]
|
||||
}
|
||||
|
||||
generator {
|
||||
|
|
@ -242,8 +241,7 @@ jooqGenerator {
|
|||
}
|
||||
|
||||
database {
|
||||
name = "org.jooq.meta.h2.H2Database"
|
||||
inputSchema = jooqDb["schema"]
|
||||
name = "org.jooq.meta.sqlite.SQLiteDatabase"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
package db.migration
|
||||
package db.migration.h2
|
||||
|
||||
import org.flywaydb.core.api.migration.BaseJavaMigration
|
||||
import org.flywaydb.core.api.migration.Context
|
||||
|
|
@ -44,4 +44,4 @@ fun findCommonDirPath(paths: List<String>, separator: Char): String {
|
|||
if (!paths2.all { it.startsWith(common + separator) || it == common }) return prevCommon
|
||||
if (++k == n) return common
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package db.migration
|
||||
package db.migration.h2
|
||||
|
||||
import org.flywaydb.core.api.migration.BaseJavaMigration
|
||||
import org.flywaydb.core.api.migration.Context
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package db.migration
|
||||
package db.migration.h2
|
||||
|
||||
import org.flywaydb.core.api.migration.BaseJavaMigration
|
||||
import org.flywaydb.core.api.migration.Context
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
CREATE TABLE LIBRARY
|
||||
(
|
||||
ID varchar NOT NULL PRIMARY KEY,
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
NAME varchar NOT NULL,
|
||||
ROOT varchar NOT NULL,
|
||||
IMPORT_COMICINFO_BOOK boolean NOT NULL DEFAULT 1,
|
||||
IMPORT_COMICINFO_SERIES boolean NOT NULL DEFAULT 1,
|
||||
IMPORT_COMICINFO_COLLECTION boolean NOT NULL DEFAULT 1,
|
||||
IMPORT_EPUB_BOOK boolean NOT NULL DEFAULT 1,
|
||||
IMPORT_EPUB_SERIES boolean NOT NULL DEFAULT 1
|
||||
);
|
||||
CREATE TABLE USER
|
||||
(
|
||||
ID varchar NOT NULL PRIMARY KEY,
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
EMAIL varchar NOT NULL UNIQUE,
|
||||
PASSWORD varchar NOT NULL,
|
||||
SHARED_ALL_LIBRARIES boolean NOT NULL DEFAULT 1,
|
||||
ROLE_ADMIN boolean NOT NULL DEFAULT 0,
|
||||
ROLE_FILE_DOWNLOAD boolean NOT NULL DEFAULT 1,
|
||||
ROLE_PAGE_STREAMING boolean NOT NULL DEFAULT 1
|
||||
);
|
||||
CREATE TABLE USER_LIBRARY_SHARING
|
||||
(
|
||||
USER_ID varchar NOT NULL,
|
||||
LIBRARY_ID varchar NOT NULL,
|
||||
PRIMARY KEY (USER_ID, LIBRARY_ID),
|
||||
FOREIGN KEY (USER_ID) REFERENCES USER (ID),
|
||||
FOREIGN KEY (LIBRARY_ID) REFERENCES LIBRARY (ID)
|
||||
);
|
||||
CREATE TABLE SERIES
|
||||
(
|
||||
ID varchar NOT NULL PRIMARY KEY,
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FILE_LAST_MODIFIED datetime NOT NULL,
|
||||
NAME varchar NOT NULL,
|
||||
URL varchar NOT NULL,
|
||||
LIBRARY_ID varchar NOT NULL,
|
||||
FOREIGN KEY (LIBRARY_ID) REFERENCES LIBRARY (ID)
|
||||
);
|
||||
CREATE TABLE SERIES_METADATA
|
||||
(
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
STATUS varchar NOT NULL,
|
||||
STATUS_LOCK boolean NOT NULL DEFAULT 0,
|
||||
TITLE varchar NOT NULL,
|
||||
TITLE_LOCK boolean NOT NULL DEFAULT 0,
|
||||
TITLE_SORT varchar NOT NULL,
|
||||
TITLE_SORT_LOCK boolean NOT NULL DEFAULT 0,
|
||||
SERIES_ID varchar NOT NULL PRIMARY KEY,
|
||||
FOREIGN KEY (SERIES_ID) REFERENCES SERIES (ID)
|
||||
);
|
||||
CREATE TABLE BOOK
|
||||
(
|
||||
ID varchar NOT NULL PRIMARY KEY,
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FILE_LAST_MODIFIED datetime NOT NULL,
|
||||
NAME varchar NOT NULL,
|
||||
URL varchar NOT NULL,
|
||||
SERIES_ID varchar NOT NULL,
|
||||
FILE_SIZE int8 NOT NULL DEFAULT 0,
|
||||
NUMBER int NOT NULL DEFAULT 0,
|
||||
LIBRARY_ID varchar NOT NULL,
|
||||
FOREIGN KEY (LIBRARY_ID) REFERENCES LIBRARY (ID),
|
||||
FOREIGN KEY (SERIES_ID) REFERENCES SERIES (ID)
|
||||
);
|
||||
CREATE TABLE MEDIA
|
||||
(
|
||||
MEDIA_TYPE varchar NULL,
|
||||
STATUS varchar NOT NULL,
|
||||
THUMBNAIL blob NULL,
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
COMMENT varchar NULL,
|
||||
BOOK_ID varchar NOT NULL PRIMARY KEY,
|
||||
PAGE_COUNT int NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY (BOOK_ID) REFERENCES BOOK (ID)
|
||||
);
|
||||
CREATE TABLE MEDIA_PAGE
|
||||
(
|
||||
FILE_NAME varchar NOT NULL,
|
||||
MEDIA_TYPE varchar NOT NULL,
|
||||
NUMBER int NOT NULL,
|
||||
BOOK_ID varchar NOT NULL,
|
||||
PRIMARY KEY (BOOK_ID, NUMBER),
|
||||
FOREIGN KEY (BOOK_ID) REFERENCES BOOK (ID)
|
||||
);
|
||||
CREATE TABLE MEDIA_FILE
|
||||
(
|
||||
FILE_NAME varchar NOT NULL,
|
||||
BOOK_ID varchar NOT NULL,
|
||||
FOREIGN KEY (BOOK_ID) REFERENCES BOOK (ID)
|
||||
);
|
||||
CREATE TABLE BOOK_METADATA
|
||||
(
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
AGE_RATING int NULL,
|
||||
AGE_RATING_LOCK boolean NOT NULL DEFAULT 0,
|
||||
NUMBER varchar NOT NULL,
|
||||
NUMBER_LOCK boolean NOT NULL DEFAULT 0,
|
||||
NUMBER_SORT real NOT NULL,
|
||||
NUMBER_SORT_LOCK boolean NOT NULL DEFAULT 0,
|
||||
PUBLISHER varchar NOT NULL DEFAULT '',
|
||||
PUBLISHER_LOCK boolean NOT NULL DEFAULT 0,
|
||||
READING_DIRECTION varchar NULL,
|
||||
READING_DIRECTION_LOCK boolean NOT NULL DEFAULT 0,
|
||||
RELEASE_DATE date NULL,
|
||||
RELEASE_DATE_LOCK boolean NOT NULL DEFAULT 0,
|
||||
SUMMARY varchar NOT NULL DEFAULT '',
|
||||
SUMMARY_LOCK boolean NOT NULL DEFAULT 0,
|
||||
TITLE varchar NOT NULL,
|
||||
TITLE_LOCK boolean NOT NULL DEFAULT 0,
|
||||
AUTHORS_LOCK boolean NOT NULL DEFAULT 0,
|
||||
BOOK_ID varchar NOT NULL PRIMARY KEY,
|
||||
FOREIGN KEY (BOOK_ID) REFERENCES BOOK (ID)
|
||||
);
|
||||
CREATE TABLE BOOK_METADATA_AUTHOR
|
||||
(
|
||||
NAME varchar NOT NULL,
|
||||
ROLE varchar NOT NULL,
|
||||
BOOK_ID varchar NOT NULL,
|
||||
FOREIGN KEY (BOOK_ID) REFERENCES BOOK (ID)
|
||||
);
|
||||
CREATE TABLE READ_PROGRESS
|
||||
(
|
||||
BOOK_ID varchar NOT NULL,
|
||||
USER_ID varchar NOT NULL,
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PAGE int NOT NULL,
|
||||
COMPLETED boolean NOT NULL,
|
||||
PRIMARY KEY (BOOK_ID, USER_ID),
|
||||
FOREIGN KEY (BOOK_ID) REFERENCES BOOK (ID),
|
||||
FOREIGN KEY (USER_ID) REFERENCES USER (ID)
|
||||
);
|
||||
CREATE TABLE COLLECTION
|
||||
(
|
||||
ID varchar NOT NULL PRIMARY KEY,
|
||||
NAME varchar NOT NULL,
|
||||
ORDERED boolean NOT NULL DEFAULT 0,
|
||||
SERIES_COUNT int NOT NULL,
|
||||
CREATED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
LAST_MODIFIED_DATE datetime NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
CREATE TABLE COLLECTION_SERIES
|
||||
(
|
||||
COLLECTION_ID varchar NOT NULL,
|
||||
SERIES_ID varchar NOT NULL,
|
||||
NUMBER int NOT NULL,
|
||||
PRIMARY KEY (COLLECTION_ID, SERIES_ID),
|
||||
FOREIGN KEY (COLLECTION_ID) REFERENCES COLLECTION (ID),
|
||||
FOREIGN KEY (SERIES_ID) REFERENCES SERIES (ID)
|
||||
);
|
||||
|
|
@ -5,28 +5,23 @@ import java.io.Serializable
|
|||
sealed class Task : Serializable {
|
||||
abstract fun uniqueId(): String
|
||||
|
||||
data class ScanLibrary(val libraryId: Long) : Task() {
|
||||
data class ScanLibrary(val libraryId: String) : Task() {
|
||||
override fun uniqueId() = "SCAN_LIBRARY_$libraryId"
|
||||
}
|
||||
|
||||
data class AnalyzeBook(val bookId: Long) : Task() {
|
||||
data class AnalyzeBook(val bookId: String) : Task() {
|
||||
override fun uniqueId() = "ANALYZE_BOOK_$bookId"
|
||||
}
|
||||
|
||||
data class GenerateBookThumbnail(val bookId: Long) : Task() {
|
||||
data class GenerateBookThumbnail(val bookId: String) : Task() {
|
||||
override fun uniqueId() = "GENERATE_BOOK_THUMBNAIL_$bookId"
|
||||
}
|
||||
|
||||
data class RefreshBookMetadata(val bookId: Long) : Task() {
|
||||
data class RefreshBookMetadata(val bookId: String) : Task() {
|
||||
override fun uniqueId() = "REFRESH_BOOK_METADATA_$bookId"
|
||||
}
|
||||
|
||||
data class RefreshSeriesMetadata(val seriesId: Long) : Task() {
|
||||
data class RefreshSeriesMetadata(val seriesId: String) : Task() {
|
||||
override fun uniqueId() = "REFRESH_SERIES_METADATA_$seriesId"
|
||||
}
|
||||
|
||||
object BackupDatabase : Task() {
|
||||
override fun uniqueId(): String = "BACKUP_DATABASE"
|
||||
override fun toString(): String = "BackupDatabase"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import org.gotson.komga.domain.persistence.SeriesRepository
|
|||
import org.gotson.komga.domain.service.BookLifecycle
|
||||
import org.gotson.komga.domain.service.LibraryScanner
|
||||
import org.gotson.komga.domain.service.MetadataLifecycle
|
||||
import org.gotson.komga.infrastructure.h2.DatabaseBackuper
|
||||
import org.gotson.komga.infrastructure.jms.QUEUE_TASKS
|
||||
import org.gotson.komga.infrastructure.jms.QUEUE_TASKS_SELECTOR
|
||||
import org.springframework.jms.annotation.JmsListener
|
||||
|
|
@ -24,8 +23,7 @@ class TaskHandler(
|
|||
private val seriesRepository: SeriesRepository,
|
||||
private val libraryScanner: LibraryScanner,
|
||||
private val bookLifecycle: BookLifecycle,
|
||||
private val metadataLifecycle: MetadataLifecycle,
|
||||
private val databaseBackuper: DatabaseBackuper
|
||||
private val metadataLifecycle: MetadataLifecycle
|
||||
) {
|
||||
|
||||
@JmsListener(destination = QUEUE_TASKS, selector = QUEUE_TASKS_SELECTOR)
|
||||
|
|
@ -63,10 +61,6 @@ class TaskHandler(
|
|||
metadataLifecycle.refreshMetadata(it)
|
||||
} ?: logger.warn { "Cannot execute task $task: Series does not exist" }
|
||||
|
||||
is Task.BackupDatabase -> {
|
||||
databaseBackuper.backupDatabase()
|
||||
}
|
||||
|
||||
}
|
||||
}.also {
|
||||
logger.info { "Task $task executed in $it" }
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class TaskReceiver(
|
|||
libraryRepository.findAll().forEach { scanLibrary(it.id) }
|
||||
}
|
||||
|
||||
fun scanLibrary(libraryId: Long) {
|
||||
fun scanLibrary(libraryId: String) {
|
||||
submitTask(Task.ScanLibrary(libraryId))
|
||||
}
|
||||
|
||||
|
|
@ -40,7 +40,7 @@ class TaskReceiver(
|
|||
}
|
||||
}
|
||||
|
||||
fun analyzeBook(bookId: Long) {
|
||||
fun analyzeBook(bookId: String) {
|
||||
submitTask(Task.AnalyzeBook(bookId))
|
||||
}
|
||||
|
||||
|
|
@ -48,11 +48,11 @@ class TaskReceiver(
|
|||
submitTask(Task.AnalyzeBook(book.id))
|
||||
}
|
||||
|
||||
fun generateBookThumbnail(bookId: Long) {
|
||||
fun generateBookThumbnail(bookId: String) {
|
||||
submitTask(Task.GenerateBookThumbnail(bookId))
|
||||
}
|
||||
|
||||
fun refreshBookMetadata(bookId: Long) {
|
||||
fun refreshBookMetadata(bookId: String) {
|
||||
submitTask(Task.RefreshBookMetadata(bookId))
|
||||
}
|
||||
|
||||
|
|
@ -60,14 +60,10 @@ class TaskReceiver(
|
|||
submitTask(Task.RefreshBookMetadata(book.id))
|
||||
}
|
||||
|
||||
fun refreshSeriesMetadata(seriesId: Long) {
|
||||
fun refreshSeriesMetadata(seriesId: String) {
|
||||
submitTask(Task.RefreshSeriesMetadata(seriesId))
|
||||
}
|
||||
|
||||
fun databaseBackup() {
|
||||
submitTask(Task.BackupDatabase)
|
||||
}
|
||||
|
||||
private fun submitTask(task: Task) {
|
||||
logger.info { "Sending task: $task" }
|
||||
jmsTemplate.convertAndSend(QUEUE_TASKS, task) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
import com.github.f4b6a3.tsid.TsidCreator
|
||||
import com.jakewharton.byteunits.BinaryByteUnit
|
||||
import org.apache.commons.io.FilenameUtils
|
||||
import java.net.URL
|
||||
|
|
@ -14,9 +15,9 @@ data class Book(
|
|||
val fileSize: Long = 0,
|
||||
val number: Int = 0,
|
||||
|
||||
val id: Long = 0,
|
||||
val seriesId: Long = 0,
|
||||
val libraryId: Long = 0,
|
||||
val id: String = TsidCreator.getTsidString256(),
|
||||
val seriesId: String = "",
|
||||
val libraryId: String = "",
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class BookMetadata(
|
|||
val releaseDateLock: Boolean = false,
|
||||
val authorsLock: Boolean = false,
|
||||
|
||||
val bookId: Long = 0,
|
||||
val bookId: String = "",
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
|
|
@ -54,7 +54,7 @@ class BookMetadata(
|
|||
ageRatingLock: Boolean = this.ageRatingLock,
|
||||
releaseDateLock: Boolean = this.releaseDateLock,
|
||||
authorsLock: Boolean = this.authorsLock,
|
||||
bookId: Long = this.bookId,
|
||||
bookId: String = this.bookId,
|
||||
createdDate: LocalDateTime = this.createdDate,
|
||||
lastModifiedDate: LocalDateTime = this.lastModifiedDate
|
||||
) =
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
open class BookSearch(
|
||||
val libraryIds: Collection<Long>? = null,
|
||||
val seriesIds: Collection<Long>? = null,
|
||||
val libraryIds: Collection<String>? = null,
|
||||
val seriesIds: Collection<String>? = null,
|
||||
val searchTerm: String? = null,
|
||||
val mediaStatus: Collection<Media.Status>? = null
|
||||
)
|
||||
|
||||
class BookSearchWithReadProgress(
|
||||
libraryIds: Collection<Long>? = null,
|
||||
seriesIds: Collection<Long>? = null,
|
||||
libraryIds: Collection<String>? = null,
|
||||
seriesIds: Collection<String>? = null,
|
||||
searchTerm: String? = null,
|
||||
mediaStatus: Collection<Media.Status>? = null,
|
||||
val readStatus: Collection<ReadStatus>? = null
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
import com.github.f4b6a3.tsid.TsidCreator
|
||||
import java.time.LocalDateTime
|
||||
import javax.validation.constraints.Email
|
||||
import javax.validation.constraints.NotBlank
|
||||
|
|
@ -18,9 +19,9 @@ data class KomgaUser(
|
|||
val roleAdmin: Boolean,
|
||||
val roleFileDownload: Boolean = true,
|
||||
val rolePageStreaming: Boolean = true,
|
||||
val sharedLibrariesIds: Set<Long> = emptySet(),
|
||||
val sharedLibrariesIds: Set<String> = emptySet(),
|
||||
val sharedAllLibraries: Boolean = true,
|
||||
val id: Long = 0,
|
||||
val id: String = TsidCreator.getTsidString256(),
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
) : Auditable() {
|
||||
|
|
@ -38,7 +39,7 @@ data class KomgaUser(
|
|||
*
|
||||
* @return a list of authorised LibraryIds, or null if the user is authorized to see all libraries
|
||||
*/
|
||||
fun getAuthorizedLibraryIds(libraryIds: Collection<Long>?): Collection<Long>? =
|
||||
fun getAuthorizedLibraryIds(libraryIds: Collection<String>?): Collection<String>? =
|
||||
when {
|
||||
// limited user & libraryIds are specified: filter on provided libraries intersecting user's authorized libraries
|
||||
!sharedAllLibraries && libraryIds != null -> libraryIds.intersect(sharedLibrariesIds)
|
||||
|
|
@ -61,7 +62,7 @@ data class KomgaUser(
|
|||
return sharedAllLibraries || sharedLibrariesIds.any { it == series.libraryId }
|
||||
}
|
||||
|
||||
fun canAccessLibrary(libraryId: Long): Boolean =
|
||||
fun canAccessLibrary(libraryId: String): Boolean =
|
||||
sharedAllLibraries || sharedLibrariesIds.any { it == libraryId }
|
||||
|
||||
fun canAccessLibrary(library: Library): Boolean {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
import com.github.f4b6a3.tsid.TsidCreator
|
||||
import java.net.URL
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.Paths
|
||||
|
|
@ -14,7 +15,7 @@ data class Library(
|
|||
val importEpubBook: Boolean = true,
|
||||
val importEpubSeries: Boolean = true,
|
||||
|
||||
val id: Long = 0,
|
||||
val id: String = TsidCreator.getTsidString256(),
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class Media(
|
|||
val pages: List<BookPage> = emptyList(),
|
||||
val files: List<String> = emptyList(),
|
||||
val comment: String? = null,
|
||||
val bookId: Long = 0,
|
||||
val bookId: String = "",
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
) : Auditable() {
|
||||
|
|
@ -21,7 +21,7 @@ class Media(
|
|||
pages: List<BookPage> = this.pages.toList(),
|
||||
files: List<String> = this.files.toList(),
|
||||
comment: String? = this.comment,
|
||||
bookId: Long = this.bookId,
|
||||
bookId: String = this.bookId,
|
||||
createdDate: LocalDateTime = this.createdDate,
|
||||
lastModifiedDate: LocalDateTime = this.lastModifiedDate
|
||||
) =
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ package org.gotson.komga.domain.model
|
|||
import java.time.LocalDateTime
|
||||
|
||||
data class ReadProgress(
|
||||
val bookId: Long,
|
||||
val userId: Long,
|
||||
val bookId: String,
|
||||
val userId: String,
|
||||
val page: Int,
|
||||
val completed: Boolean,
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,16 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
import com.github.f4b6a3.tsid.TsidCreator
|
||||
import java.net.URL
|
||||
import java.time.LocalDateTime
|
||||
|
||||
data class Series(
|
||||
val name: String,
|
||||
val url: URL,
|
||||
var fileLastModified: LocalDateTime,
|
||||
val fileLastModified: LocalDateTime,
|
||||
|
||||
val id: Long = 0,
|
||||
val libraryId: Long = 0,
|
||||
val id: String = TsidCreator.getTsidString256(),
|
||||
val libraryId: String = "",
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
|
|
|
|||
|
|
@ -1,14 +1,15 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
import com.github.f4b6a3.tsid.TsidCreator
|
||||
import java.time.LocalDateTime
|
||||
|
||||
data class SeriesCollection(
|
||||
val name: String,
|
||||
val ordered: Boolean = false,
|
||||
|
||||
val seriesIds: List<Long> = emptyList(),
|
||||
val seriesIds: List<String> = emptyList(),
|
||||
|
||||
val id: Long = 0,
|
||||
val id: String = TsidCreator.getTsidString256(),
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now(),
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class SeriesMetadata(
|
|||
val titleLock: Boolean = false,
|
||||
val titleSortLock: Boolean = false,
|
||||
|
||||
val seriesId: Long = 0,
|
||||
val seriesId: String = "",
|
||||
|
||||
override val createdDate: LocalDateTime = LocalDateTime.now(),
|
||||
override val lastModifiedDate: LocalDateTime = LocalDateTime.now()
|
||||
|
|
@ -26,7 +26,7 @@ class SeriesMetadata(
|
|||
statusLock: Boolean = this.statusLock,
|
||||
titleLock: Boolean = this.titleLock,
|
||||
titleSortLock: Boolean = this.titleSortLock,
|
||||
seriesId: Long = this.seriesId,
|
||||
seriesId: String = this.seriesId,
|
||||
createdDate: LocalDateTime = this.createdDate,
|
||||
lastModifiedDate: LocalDateTime = this.lastModifiedDate
|
||||
) =
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
package org.gotson.komga.domain.model
|
||||
|
||||
open class SeriesSearch(
|
||||
val libraryIds: Collection<Long>? = null,
|
||||
val libraryIds: Collection<String>? = null,
|
||||
val collectionIds: Collection<Long>? = null,
|
||||
val searchTerm: String? = null,
|
||||
val metadataStatus: Collection<SeriesMetadata.Status>? = null
|
||||
)
|
||||
|
||||
class SeriesSearchWithReadProgress(
|
||||
libraryIds: Collection<Long>? = null,
|
||||
libraryIds: Collection<String>? = null,
|
||||
collectionIds: Collection<Long>? = null,
|
||||
searchTerm: String? = null,
|
||||
metadataStatus: Collection<SeriesMetadata.Status>? = null,
|
||||
|
|
|
|||
|
|
@ -3,13 +3,17 @@ package org.gotson.komga.domain.persistence
|
|||
import org.gotson.komga.domain.model.BookMetadata
|
||||
|
||||
interface BookMetadataRepository {
|
||||
fun findById(bookId: Long): BookMetadata
|
||||
fun findByIdOrNull(bookId: Long): BookMetadata?
|
||||
fun findById(bookId: String): BookMetadata
|
||||
fun findByIdOrNull(bookId: String): BookMetadata?
|
||||
fun findByIds(bookIds: Collection<String>): Collection<BookMetadata>
|
||||
|
||||
fun findAuthorsByName(search: String): List<String>
|
||||
|
||||
fun insert(metadata: BookMetadata): BookMetadata
|
||||
fun insert(metadata: BookMetadata)
|
||||
fun insertMany(metadatas: Collection<BookMetadata>)
|
||||
fun update(metadata: BookMetadata)
|
||||
fun updateMany(metadatas: Collection<BookMetadata>)
|
||||
|
||||
fun delete(bookId: Long)
|
||||
fun delete(bookId: String)
|
||||
fun deleteByBookIds(bookIds: Collection<String>)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,22 +4,25 @@ import org.gotson.komga.domain.model.Book
|
|||
import org.gotson.komga.domain.model.BookSearch
|
||||
|
||||
interface BookRepository {
|
||||
fun findByIdOrNull(bookId: Long): Book?
|
||||
fun findBySeriesId(seriesId: Long): Collection<Book>
|
||||
fun findByIdOrNull(bookId: String): Book?
|
||||
fun findBySeriesId(seriesId: String): Collection<Book>
|
||||
fun findAll(): Collection<Book>
|
||||
fun findAll(bookSearch: BookSearch): Collection<Book>
|
||||
|
||||
fun getLibraryId(bookId: Long): Long?
|
||||
fun findFirstIdInSeries(seriesId: Long): Long?
|
||||
fun findAllIdBySeriesId(seriesId: Long): Collection<Long>
|
||||
fun findAllIdByLibraryId(libraryId: Long): Collection<Long>
|
||||
fun findAllId(bookSearch: BookSearch): Collection<Long>
|
||||
fun getLibraryId(bookId: String): String?
|
||||
fun findFirstIdInSeries(seriesId: String): String?
|
||||
fun findAllIdBySeriesId(seriesId: String): Collection<String>
|
||||
fun findAllIdBySeriesIds(seriesIds: Collection<String>): Collection<String>
|
||||
fun findAllIdByLibraryId(libraryId: String): Collection<String>
|
||||
fun findAllId(bookSearch: BookSearch): Collection<String>
|
||||
|
||||
fun insert(book: Book): Book
|
||||
fun insert(book: Book)
|
||||
fun insertMany(books: Collection<Book>)
|
||||
fun update(book: Book)
|
||||
fun updateMany(books: Collection<Book>)
|
||||
|
||||
fun delete(bookId: Long)
|
||||
fun deleteAll(bookIds: List<Long>)
|
||||
fun delete(bookId: String)
|
||||
fun deleteByBookIds(bookIds: Collection<String>)
|
||||
fun deleteAll()
|
||||
|
||||
fun count(): Long
|
||||
|
|
|
|||
|
|
@ -6,12 +6,12 @@ interface KomgaUserRepository {
|
|||
fun count(): Long
|
||||
|
||||
fun findAll(): Collection<KomgaUser>
|
||||
fun findByIdOrNull(id: Long): KomgaUser?
|
||||
fun findByIdOrNull(id: String): KomgaUser?
|
||||
|
||||
fun save(user: KomgaUser): KomgaUser
|
||||
fun saveAll(users: Iterable<KomgaUser>): Collection<KomgaUser>
|
||||
fun insert(user: KomgaUser)
|
||||
fun update(user: KomgaUser)
|
||||
|
||||
fun delete(user: KomgaUser)
|
||||
fun delete(userId: String)
|
||||
fun deleteAll()
|
||||
|
||||
fun existsByEmailIgnoreCase(email: String): Boolean
|
||||
|
|
|
|||
|
|
@ -3,15 +3,15 @@ package org.gotson.komga.domain.persistence
|
|||
import org.gotson.komga.domain.model.Library
|
||||
|
||||
interface LibraryRepository {
|
||||
fun findByIdOrNull(libraryId: Long): Library?
|
||||
fun findById(libraryId: Long): Library
|
||||
fun findByIdOrNull(libraryId: String): Library?
|
||||
fun findById(libraryId: String): Library
|
||||
fun findAll(): Collection<Library>
|
||||
fun findAllById(libraryIds: Collection<Long>): Collection<Library>
|
||||
fun findAllById(libraryIds: Collection<String>): Collection<Library>
|
||||
|
||||
fun delete(libraryId: Long)
|
||||
fun delete(libraryId: String)
|
||||
fun deleteAll()
|
||||
|
||||
fun insert(library: Library): Library
|
||||
fun insert(library: Library)
|
||||
fun update(library: Library)
|
||||
|
||||
fun count(): Long
|
||||
|
|
|
|||
|
|
@ -3,12 +3,14 @@ package org.gotson.komga.domain.persistence
|
|||
import org.gotson.komga.domain.model.Media
|
||||
|
||||
interface MediaRepository {
|
||||
fun findById(bookId: Long): Media
|
||||
fun findById(bookId: String): Media
|
||||
|
||||
fun getThumbnail(bookId: Long): ByteArray?
|
||||
fun getThumbnail(bookId: String): ByteArray?
|
||||
|
||||
fun insert(media: Media): Media
|
||||
fun insert(media: Media)
|
||||
fun insertMany(medias: Collection<Media>)
|
||||
fun update(media: Media)
|
||||
|
||||
fun delete(bookId: Long)
|
||||
fun delete(bookId: String)
|
||||
fun deleteByBookIds(bookIds: Collection<String>)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,14 +5,14 @@ import org.gotson.komga.domain.model.ReadProgress
|
|||
|
||||
interface ReadProgressRepository {
|
||||
fun findAll(): Collection<ReadProgress>
|
||||
fun findByBookIdAndUserId(bookId: Long, userId: Long): ReadProgress?
|
||||
fun findByUserId(userId: Long): Collection<ReadProgress>
|
||||
fun findByBookIdAndUserId(bookId: String, userId: String): ReadProgress?
|
||||
fun findByUserId(userId: String): Collection<ReadProgress>
|
||||
|
||||
fun save(readProgress: ReadProgress)
|
||||
|
||||
fun delete(bookId: Long, userId: Long)
|
||||
fun deleteByUserId(userId: Long)
|
||||
fun deleteByBookId(bookId: Long)
|
||||
fun deleteByBookIds(bookIds: Collection<Long>)
|
||||
fun delete(bookId: String, userId: String)
|
||||
fun deleteByUserId(userId: String)
|
||||
fun deleteByBookId(bookId: String)
|
||||
fun deleteByBookIds(bookIds: Collection<String>)
|
||||
fun deleteAll()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,36 +5,37 @@ import org.springframework.data.domain.Page
|
|||
import org.springframework.data.domain.Pageable
|
||||
|
||||
interface SeriesCollectionRepository {
|
||||
fun findByIdOrNull(collectionId: Long): SeriesCollection?
|
||||
fun findByIdOrNull(collectionId: String): SeriesCollection?
|
||||
fun findAll(search: String? = null, pageable: Pageable): Page<SeriesCollection>
|
||||
|
||||
/**
|
||||
* Find one SeriesCollection by collectionId,
|
||||
* optionally with only seriesId filtered by the provided filterOnLibraryIds.
|
||||
*/
|
||||
fun findByIdOrNull(collectionId: Long, filterOnLibraryIds: Collection<Long>?): SeriesCollection?
|
||||
fun findByIdOrNull(collectionId: String, filterOnLibraryIds: Collection<String>?): SeriesCollection?
|
||||
|
||||
/**
|
||||
* Find all SeriesCollection with at least one Series belonging to the provided belongsToLibraryIds,
|
||||
* optionally with only seriesId filtered by the provided filterOnLibraryIds.
|
||||
*/
|
||||
fun findAllByLibraries(belongsToLibraryIds: Collection<Long>, filterOnLibraryIds: Collection<Long>?, search: String? = null, pageable: Pageable): Page<SeriesCollection>
|
||||
fun findAllByLibraries(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String? = null, pageable: Pageable): Page<SeriesCollection>
|
||||
|
||||
/**
|
||||
* Find all SeriesCollection that contains the provided containsSeriesId,
|
||||
* optionally with only seriesId filtered by the provided filterOnLibraryIds.
|
||||
*/
|
||||
fun findAllBySeries(containsSeriesId: Long, filterOnLibraryIds: Collection<Long>?): Collection<SeriesCollection>
|
||||
fun findAllBySeries(containsSeriesId: String, filterOnLibraryIds: Collection<String>?): Collection<SeriesCollection>
|
||||
|
||||
fun findByNameOrNull(name: String): SeriesCollection?
|
||||
|
||||
fun insert(collection: SeriesCollection): SeriesCollection
|
||||
fun insert(collection: SeriesCollection)
|
||||
fun update(collection: SeriesCollection)
|
||||
|
||||
fun removeSeriesFromAll(seriesId: Long)
|
||||
fun removeSeriesFromAll(seriesId: String)
|
||||
fun removeSeriesFromAll(seriesIds: Collection<String>)
|
||||
|
||||
fun delete(collectionId: String)
|
||||
|
||||
fun delete(collectionId: Long)
|
||||
fun deleteAll()
|
||||
|
||||
fun existsByName(name: String): Boolean
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,13 +3,13 @@ package org.gotson.komga.domain.persistence
|
|||
import org.gotson.komga.domain.model.SeriesMetadata
|
||||
|
||||
interface SeriesMetadataRepository {
|
||||
fun findById(seriesId: Long): SeriesMetadata
|
||||
fun findByIdOrNull(seriesId: Long): SeriesMetadata?
|
||||
fun findById(seriesId: String): SeriesMetadata
|
||||
fun findByIdOrNull(seriesId: String): SeriesMetadata?
|
||||
|
||||
fun insert(metadata: SeriesMetadata): SeriesMetadata
|
||||
fun update(metadata: SeriesMetadata)
|
||||
|
||||
fun delete(seriesId: Long)
|
||||
fun delete(seriesId: String)
|
||||
|
||||
fun count(): Long
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,20 +6,20 @@ import java.net.URL
|
|||
|
||||
interface SeriesRepository {
|
||||
fun findAll(): Collection<Series>
|
||||
fun findByIdOrNull(seriesId: Long): Series?
|
||||
fun findByLibraryId(libraryId: Long): Collection<Series>
|
||||
fun findByLibraryIdAndUrlNotIn(libraryId: Long, urls: Collection<URL>): Collection<Series>
|
||||
fun findByLibraryIdAndUrl(libraryId: Long, url: URL): Series?
|
||||
fun findByIdOrNull(seriesId: String): Series?
|
||||
fun findByLibraryId(libraryId: String): Collection<Series>
|
||||
fun findByLibraryIdAndUrlNotIn(libraryId: String, urls: Collection<URL>): Collection<Series>
|
||||
fun findByLibraryIdAndUrl(libraryId: String, url: URL): Series?
|
||||
fun findAll(search: SeriesSearch): Collection<Series>
|
||||
|
||||
fun getLibraryId(seriesId: Long): Long?
|
||||
fun getLibraryId(seriesId: String): String?
|
||||
|
||||
fun insert(series: Series): Series
|
||||
fun insert(series: Series)
|
||||
fun update(series: Series)
|
||||
|
||||
fun delete(seriesId: Long)
|
||||
fun delete(seriesId: String)
|
||||
fun deleteAll()
|
||||
fun deleteAll(seriesIds: Collection<Long>)
|
||||
fun deleteAll(seriesIds: Collection<String>)
|
||||
|
||||
fun count(): Long
|
||||
}
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ class BookLifecycle(
|
|||
}
|
||||
}
|
||||
|
||||
fun delete(bookId: Long) {
|
||||
fun deleteOne(bookId: String) {
|
||||
logger.info { "Delete book id: $bookId" }
|
||||
|
||||
readProgressRepository.deleteByBookId(bookId)
|
||||
|
|
@ -114,6 +114,16 @@ class BookLifecycle(
|
|||
bookRepository.delete(bookId)
|
||||
}
|
||||
|
||||
fun deleteMany(bookIds: Collection<String>) {
|
||||
logger.info { "Delete all books: $bookIds" }
|
||||
|
||||
readProgressRepository.deleteByBookIds(bookIds)
|
||||
mediaRepository.deleteByBookIds(bookIds)
|
||||
bookMetadataRepository.deleteByBookIds(bookIds)
|
||||
|
||||
bookRepository.deleteByBookIds(bookIds)
|
||||
}
|
||||
|
||||
fun markReadProgress(book: Book, user: KomgaUser, page: Int) {
|
||||
val media = mediaRepository.findById(book.id)
|
||||
require(page >= 1 && page <= media.pages.size) { "Page argument ($page) must be within 1 and book page count (${media.pages.size})" }
|
||||
|
|
@ -121,13 +131,13 @@ class BookLifecycle(
|
|||
readProgressRepository.save(ReadProgress(book.id, user.id, page, page == media.pages.size))
|
||||
}
|
||||
|
||||
fun markReadProgressCompleted(bookId: Long, user: KomgaUser) {
|
||||
fun markReadProgressCompleted(bookId: String, user: KomgaUser) {
|
||||
val media = mediaRepository.findById(bookId)
|
||||
|
||||
readProgressRepository.save(ReadProgress(bookId, user.id, media.pages.size, true))
|
||||
}
|
||||
|
||||
fun deleteReadProgress(bookId: Long, user: KomgaUser) {
|
||||
fun deleteReadProgress(bookId: String, user: KomgaUser) {
|
||||
readProgressRepository.delete(bookId, user.id)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class KomgaUserLifecycle(
|
|||
userRepository.findByEmailIgnoreCase(user.username)?.let { komgaUser ->
|
||||
logger.info { "Changing password for user ${user.username}" }
|
||||
val updatedUser = komgaUser.copy(password = passwordEncoder.encode(newPassword))
|
||||
userRepository.save(updatedUser)
|
||||
userRepository.update(updatedUser)
|
||||
|
||||
if (expireSessions) expireSessions(updatedUser)
|
||||
|
||||
|
|
@ -47,7 +47,9 @@ class KomgaUserLifecycle(
|
|||
fun createUser(komgaUser: KomgaUser): KomgaUser {
|
||||
if (userRepository.existsByEmailIgnoreCase(komgaUser.email)) throw UserEmailAlreadyExistsException("A user with the same email already exists: ${komgaUser.email}")
|
||||
|
||||
val createdUser = userRepository.save(komgaUser.copy(password = passwordEncoder.encode(komgaUser.password)))
|
||||
userRepository.insert(komgaUser.copy(password = passwordEncoder.encode(komgaUser.password)))
|
||||
|
||||
val createdUser = userRepository.findByIdOrNull(komgaUser.id)!!
|
||||
logger.info { "User created: $createdUser" }
|
||||
return createdUser
|
||||
}
|
||||
|
|
@ -55,7 +57,7 @@ class KomgaUserLifecycle(
|
|||
fun deleteUser(user: KomgaUser) {
|
||||
logger.info { "Deleting user: $user" }
|
||||
readProgressRepository.deleteByUserId(user.id)
|
||||
userRepository.delete(user)
|
||||
userRepository.delete(user.id)
|
||||
expireSessions(user)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -34,9 +34,10 @@ class LibraryLifecycle(
|
|||
val existing = libraryRepository.findAll()
|
||||
checkLibraryValidity(library, existing)
|
||||
|
||||
return libraryRepository.insert(library).also {
|
||||
taskReceiver.scanLibrary(it.id)
|
||||
}
|
||||
libraryRepository.insert(library)
|
||||
taskReceiver.scanLibrary(library.id)
|
||||
|
||||
return libraryRepository.findById(library.id)
|
||||
}
|
||||
|
||||
fun updateLibrary(toUpdate: Library) {
|
||||
|
|
@ -70,9 +71,8 @@ class LibraryLifecycle(
|
|||
fun deleteLibrary(library: Library) {
|
||||
logger.info { "Deleting library: $library" }
|
||||
|
||||
seriesRepository.findByLibraryId(library.id).forEach {
|
||||
seriesLifecycle.deleteSeries(it.id)
|
||||
}
|
||||
val seriesIds = seriesRepository.findByLibraryId(library.id).map { it.id }
|
||||
seriesLifecycle.deleteMany(seriesIds)
|
||||
|
||||
libraryRepository.delete(library.id)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import org.gotson.komga.domain.persistence.SeriesRepository
|
|||
import org.springframework.stereotype.Service
|
||||
import java.nio.file.Paths
|
||||
import java.time.temporal.ChronoUnit
|
||||
import kotlin.time.measureTime
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
|
|
@ -24,78 +25,81 @@ class LibraryScanner(
|
|||
|
||||
fun scanRootFolder(library: Library) {
|
||||
logger.info { "Updating library: $library" }
|
||||
val scannedSeries =
|
||||
fileSystemScanner.scanRootFolder(Paths.get(library.root.toURI()))
|
||||
.map { (series, books) ->
|
||||
series.copy(libraryId = library.id) to books.map { it.copy(libraryId = library.id) }
|
||||
}.toMap()
|
||||
measureTime {
|
||||
val scannedSeries =
|
||||
fileSystemScanner.scanRootFolder(Paths.get(library.root.toURI()))
|
||||
.map { (series, books) ->
|
||||
series.copy(libraryId = library.id) to books.map { it.copy(libraryId = library.id) }
|
||||
}.toMap()
|
||||
|
||||
// delete series that don't exist anymore
|
||||
if (scannedSeries.isEmpty()) {
|
||||
logger.info { "Scan returned no series, deleting all existing series" }
|
||||
seriesRepository.findByLibraryId(library.id).forEach {
|
||||
seriesLifecycle.deleteSeries(it.id)
|
||||
}
|
||||
} else {
|
||||
scannedSeries.keys.map { it.url }.let { urls ->
|
||||
seriesRepository.findByLibraryIdAndUrlNotIn(library.id, urls).forEach {
|
||||
logger.info { "Deleting series not on disk anymore: $it" }
|
||||
seriesLifecycle.deleteSeries(it.id)
|
||||
// delete series that don't exist anymore
|
||||
if (scannedSeries.isEmpty()) {
|
||||
logger.info { "Scan returned no series, deleting all existing series" }
|
||||
val seriesIds = seriesRepository.findByLibraryId(library.id).map { it.id }
|
||||
seriesLifecycle.deleteMany(seriesIds)
|
||||
} else {
|
||||
scannedSeries.keys.map { it.url }.let { urls ->
|
||||
val series = seriesRepository.findByLibraryIdAndUrlNotIn(library.id, urls)
|
||||
if (series.isNotEmpty()) {
|
||||
logger.info { "Deleting series not on disk anymore: $series" }
|
||||
seriesLifecycle.deleteMany(series.map { it.id })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scannedSeries.forEach { (newSeries, newBooks) ->
|
||||
val existingSeries = seriesRepository.findByLibraryIdAndUrl(library.id, newSeries.url)
|
||||
scannedSeries.forEach { (newSeries, newBooks) ->
|
||||
val existingSeries = seriesRepository.findByLibraryIdAndUrl(library.id, newSeries.url)
|
||||
|
||||
// if series does not exist, save it
|
||||
if (existingSeries == null) {
|
||||
logger.info { "Adding new series: $newSeries" }
|
||||
val createdSeries = seriesLifecycle.createSeries(newSeries)
|
||||
seriesLifecycle.addBooks(createdSeries, newBooks)
|
||||
seriesLifecycle.sortBooks(createdSeries)
|
||||
} else {
|
||||
// if series already exists, update it
|
||||
if (newSeries.fileLastModified.truncatedTo(ChronoUnit.MILLIS) != existingSeries.fileLastModified.truncatedTo(ChronoUnit.MILLIS)) {
|
||||
logger.info { "Series changed on disk, updating: $existingSeries" }
|
||||
existingSeries.fileLastModified = newSeries.fileLastModified
|
||||
// if series does not exist, save it
|
||||
if (existingSeries == null) {
|
||||
logger.info { "Adding new series: $newSeries" }
|
||||
val createdSeries = seriesLifecycle.createSeries(newSeries)
|
||||
seriesLifecycle.addBooks(createdSeries, newBooks)
|
||||
seriesLifecycle.sortBooks(createdSeries)
|
||||
} else {
|
||||
// if series already exists, update it
|
||||
if (newSeries.fileLastModified.truncatedTo(ChronoUnit.MILLIS) != existingSeries.fileLastModified.truncatedTo(ChronoUnit.MILLIS)) {
|
||||
logger.info { "Series changed on disk, updating: $existingSeries" }
|
||||
|
||||
seriesRepository.update(existingSeries)
|
||||
seriesRepository.update(existingSeries.copy(fileLastModified = newSeries.fileLastModified))
|
||||
|
||||
// update list of books with existing entities if they exist
|
||||
val existingBooks = bookRepository.findBySeriesId(existingSeries.id)
|
||||
// update list of books with existing entities if they exist
|
||||
val existingBooks = bookRepository.findBySeriesId(existingSeries.id)
|
||||
|
||||
// update existing books
|
||||
newBooks.forEach { newBook ->
|
||||
existingBooks.find { it.url == newBook.url }?.let { existingBook ->
|
||||
if (newBook.fileLastModified.truncatedTo(ChronoUnit.MILLIS) != existingBook.fileLastModified.truncatedTo(ChronoUnit.MILLIS)) {
|
||||
logger.info { "Book changed on disk, update and reset media status: $existingBook" }
|
||||
val updatedBook = existingBook.copy(
|
||||
fileLastModified = newBook.fileLastModified,
|
||||
fileSize = newBook.fileSize
|
||||
)
|
||||
mediaRepository.findById(existingBook.id).let {
|
||||
mediaRepository.update(it.copy(status = Media.Status.OUTDATED))
|
||||
// update existing books
|
||||
newBooks.forEach { newBook ->
|
||||
existingBooks.find { it.url == newBook.url }?.let { existingBook ->
|
||||
if (newBook.fileLastModified.truncatedTo(ChronoUnit.MILLIS) != existingBook.fileLastModified.truncatedTo(ChronoUnit.MILLIS)) {
|
||||
logger.info { "Book changed on disk, update and reset media status: $existingBook" }
|
||||
val updatedBook = existingBook.copy(
|
||||
fileLastModified = newBook.fileLastModified,
|
||||
fileSize = newBook.fileSize
|
||||
)
|
||||
mediaRepository.findById(existingBook.id).let {
|
||||
mediaRepository.update(it.copy(status = Media.Status.OUTDATED))
|
||||
}
|
||||
bookRepository.update(updatedBook)
|
||||
}
|
||||
bookRepository.update(updatedBook)
|
||||
}
|
||||
}
|
||||
|
||||
// remove books not present anymore
|
||||
val newBooksUrls = newBooks.map { it.url }
|
||||
existingBooks
|
||||
.filterNot { existingBook -> newBooksUrls.contains(existingBook.url) }
|
||||
.let { books -> bookLifecycle.deleteMany(books.map { it.id }) }
|
||||
|
||||
// add new books
|
||||
val existingBooksUrls = existingBooks.map { it.url }
|
||||
val booksToAdd = newBooks.filterNot { newBook -> existingBooksUrls.contains(newBook.url) }
|
||||
seriesLifecycle.addBooks(existingSeries, booksToAdd)
|
||||
|
||||
// sort all books
|
||||
seriesLifecycle.sortBooks(existingSeries)
|
||||
}
|
||||
|
||||
// remove books not present anymore
|
||||
existingBooks
|
||||
.filterNot { existingBook -> newBooks.map { it.url }.contains(existingBook.url) }
|
||||
.forEach { bookLifecycle.delete(it.id) }
|
||||
|
||||
// add new books
|
||||
val booksToAdd = newBooks.filterNot { newBook -> existingBooks.map { it.url }.contains(newBook.url) }
|
||||
seriesLifecycle.addBooks(existingSeries, booksToAdd)
|
||||
|
||||
// sort all books
|
||||
seriesLifecycle.sortBooks(existingSeries)
|
||||
}
|
||||
}
|
||||
}
|
||||
}.also { logger.info { "Library updated in $it" } }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,7 +22,9 @@ class SeriesCollectionLifecycle(
|
|||
if (collectionRepository.existsByName(collection.name))
|
||||
throw DuplicateNameException("Collection name already exists")
|
||||
|
||||
return collectionRepository.insert(collection)
|
||||
collectionRepository.insert(collection)
|
||||
|
||||
return collectionRepository.findByIdOrNull(collection.id)!!
|
||||
}
|
||||
|
||||
fun updateCollection(toUpdate: SeriesCollection) {
|
||||
|
|
@ -35,7 +37,7 @@ class SeriesCollectionLifecycle(
|
|||
collectionRepository.update(toUpdate)
|
||||
}
|
||||
|
||||
fun deleteCollection(collectionId: Long) {
|
||||
fun deleteCollection(collectionId: String) {
|
||||
collectionRepository.delete(collectionId)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import org.gotson.komga.domain.persistence.SeriesCollectionRepository
|
|||
import org.gotson.komga.domain.persistence.SeriesMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesRepository
|
||||
import org.springframework.stereotype.Service
|
||||
import java.util.Comparator
|
||||
import java.util.*
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
private val natSortComparator: Comparator<String> = CaseInsensitiveSimpleNaturalComparator.getInstance()
|
||||
|
|
@ -33,21 +33,23 @@ class SeriesLifecycle(
|
|||
|
||||
fun sortBooks(series: Series) {
|
||||
val books = bookRepository.findBySeriesId(series.id)
|
||||
val metadatas = bookMetadataRepository.findByIds(books.map { it.id })
|
||||
|
||||
val sorted = books.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
sorted.forEachIndexed { index, book ->
|
||||
val number = index + 1
|
||||
bookRepository.update(book.copy(number = number))
|
||||
val sorted = books
|
||||
.sortedWith(compareBy(natSortComparator) { it.name })
|
||||
.map { book -> book to metadatas.first { it.bookId == book.id } }
|
||||
|
||||
bookMetadataRepository.findById(book.id).let { metadata ->
|
||||
val renumbered = metadata.copy(
|
||||
number = if (!metadata.numberLock) number.toString() else metadata.number,
|
||||
numberSort = if (!metadata.numberSortLock) number.toFloat() else metadata.numberSort
|
||||
)
|
||||
if (!metadata.numberLock || !metadata.numberSortLock)
|
||||
bookMetadataRepository.update(renumbered)
|
||||
}
|
||||
}
|
||||
bookRepository.updateMany(
|
||||
sorted.mapIndexed { index, (book, _) -> book.copy(number = index + 1) }
|
||||
)
|
||||
|
||||
sorted.mapIndexedNotNull { index, (_, metadata) ->
|
||||
if (metadata.numberLock && metadata.numberSortLock) null
|
||||
else metadata.copy(
|
||||
number = if (!metadata.numberLock) (index + 1).toString() else metadata.number,
|
||||
numberSort = if (!metadata.numberSortLock) (index + 1).toFloat() else metadata.numberSort
|
||||
)
|
||||
}.let { bookMetadataRepository.updateMany(it) }
|
||||
}
|
||||
|
||||
fun addBooks(series: Series, booksToAdd: Collection<Book>) {
|
||||
|
|
@ -55,45 +57,57 @@ class SeriesLifecycle(
|
|||
check(it.libraryId == series.libraryId) { "Cannot add book to series if they don't share the same libraryId" }
|
||||
}
|
||||
|
||||
booksToAdd.forEach { book ->
|
||||
val createdBook = bookRepository.insert(book.copy(seriesId = series.id))
|
||||
bookRepository.insertMany(
|
||||
booksToAdd.map { it.copy(seriesId = series.id) }
|
||||
)
|
||||
|
||||
// create associated media
|
||||
mediaRepository.insert(Media(bookId = createdBook.id))
|
||||
// create associated media
|
||||
mediaRepository.insertMany(booksToAdd.map { Media(bookId = it.id) })
|
||||
|
||||
// create associated metadata
|
||||
bookMetadataRepository.insert(BookMetadata(
|
||||
title = createdBook.name,
|
||||
number = createdBook.number.toString(),
|
||||
numberSort = createdBook.number.toFloat(),
|
||||
bookId = createdBook.id
|
||||
))
|
||||
}
|
||||
// create associated metadata
|
||||
booksToAdd.map {
|
||||
BookMetadata(
|
||||
title = it.name,
|
||||
number = it.number.toString(),
|
||||
numberSort = it.number.toFloat(),
|
||||
bookId = it.id
|
||||
)
|
||||
}.let { bookMetadataRepository.insertMany(it) }
|
||||
}
|
||||
|
||||
fun createSeries(series: Series): Series {
|
||||
val createdSeries = seriesRepository.insert(series)
|
||||
seriesRepository.insert(series)
|
||||
|
||||
seriesMetadataRepository.insert(
|
||||
SeriesMetadata(
|
||||
title = createdSeries.name,
|
||||
titleSort = StringUtils.stripAccents(createdSeries.name),
|
||||
seriesId = createdSeries.id
|
||||
title = series.name,
|
||||
titleSort = StringUtils.stripAccents(series.name),
|
||||
seriesId = series.id
|
||||
)
|
||||
)
|
||||
|
||||
return createdSeries
|
||||
return seriesRepository.findByIdOrNull(series.id)!!
|
||||
}
|
||||
|
||||
fun deleteSeries(seriesId: Long) {
|
||||
fun deleteOne(seriesId: String) {
|
||||
logger.info { "Delete series id: $seriesId" }
|
||||
|
||||
bookRepository.findBySeriesId(seriesId).forEach {
|
||||
bookLifecycle.delete(it.id)
|
||||
}
|
||||
val bookIds = bookRepository.findAllIdBySeriesId(seriesId)
|
||||
bookLifecycle.deleteMany(bookIds)
|
||||
|
||||
collectionRepository.removeSeriesFromAll(seriesId)
|
||||
|
||||
seriesRepository.delete(seriesId)
|
||||
}
|
||||
|
||||
fun deleteMany(seriesIds: Collection<String>) {
|
||||
logger.info { "Delete series ids: $seriesIds" }
|
||||
|
||||
val bookIds = bookRepository.findAllIdBySeriesIds(seriesIds)
|
||||
bookLifecycle.deleteMany(bookIds)
|
||||
|
||||
collectionRepository.removeSeriesFromAll(seriesIds)
|
||||
|
||||
seriesRepository.deleteAll(seriesIds)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,20 +20,20 @@ class KomgaProperties {
|
|||
|
||||
var rememberMe = RememberMe()
|
||||
|
||||
var database = Database()
|
||||
|
||||
class RememberMe {
|
||||
@NotBlank
|
||||
@get:NotBlank
|
||||
var key: String? = null
|
||||
|
||||
@Positive
|
||||
@get:Positive
|
||||
var validity: Int = 1209600 // 2 weeks
|
||||
}
|
||||
|
||||
var databaseBackup = DatabaseBackup()
|
||||
class Database {
|
||||
@get:NotBlank
|
||||
var file: String = ""
|
||||
|
||||
class DatabaseBackup {
|
||||
var enabled: Boolean = true
|
||||
var path: String = ""
|
||||
var schedule: String = ""
|
||||
var startup: Boolean = true
|
||||
var batchSize: Int = 500
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
package org.gotson.komga.infrastructure.datasource
|
||||
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import org.gotson.komga.infrastructure.configuration.KomgaProperties
|
||||
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties
|
||||
import org.springframework.boot.jdbc.DataSourceBuilder
|
||||
import org.springframework.context.annotation.Bean
|
||||
import org.springframework.context.annotation.Configuration
|
||||
import org.springframework.context.annotation.Primary
|
||||
import javax.sql.DataSource
|
||||
|
||||
@Configuration
|
||||
class DataSourcesConfiguration(
|
||||
private val komgaProperties: KomgaProperties
|
||||
) {
|
||||
|
||||
@Bean("sqliteDataSource")
|
||||
@Primary
|
||||
fun sqliteDataSource(): DataSource =
|
||||
(DataSourceBuilder.create()
|
||||
.apply {
|
||||
driverClassName("org.sqlite.JDBC")
|
||||
url("jdbc:sqlite:${komgaProperties.database.file}?foreign_keys=on;")
|
||||
}.type(HikariDataSource::class.java)
|
||||
.build() as HikariDataSource)
|
||||
.apply { maximumPoolSize = 1 }
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
@ConfigurationProperties(prefix = "spring.datasource")
|
||||
fun h2DataSourceProperties() = DataSourceProperties()
|
||||
|
||||
@Bean("h2DataSource")
|
||||
fun h2DataSource(): DataSource =
|
||||
h2DataSourceProperties().initializeDataSourceBuilder().type(HikariDataSource::class.java).build()
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,247 @@
|
|||
package org.gotson.komga.infrastructure.datasource
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.flywaydb.core.Flyway
|
||||
import org.flywaydb.core.api.configuration.FluentConfiguration
|
||||
import org.gotson.komga.infrastructure.configuration.KomgaProperties
|
||||
import org.springframework.beans.factory.BeanInitializationException
|
||||
import org.springframework.beans.factory.annotation.Qualifier
|
||||
import org.springframework.beans.factory.annotation.Value
|
||||
import org.springframework.context.annotation.Profile
|
||||
import org.springframework.jdbc.core.JdbcTemplate
|
||||
import org.springframework.jdbc.support.JdbcUtils
|
||||
import org.springframework.jms.config.JmsListenerEndpointRegistry
|
||||
import org.springframework.stereotype.Component
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.Paths
|
||||
import java.sql.PreparedStatement
|
||||
import java.sql.ResultSet
|
||||
import java.sql.ResultSetMetaData
|
||||
import java.sql.Types
|
||||
import java.time.LocalDateTime
|
||||
import java.time.format.DateTimeFormatter
|
||||
import javax.annotation.PostConstruct
|
||||
import javax.sql.DataSource
|
||||
import kotlin.time.measureTime
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
@Profile("!test")
|
||||
class DatabaseMigration(
|
||||
@Qualifier("h2DataSource") private val h2DataSource: DataSource,
|
||||
@Qualifier("sqliteDataSource") private val sqliteDataSource: DataSource,
|
||||
private val jmsListenerEndpointRegistry: JmsListenerEndpointRegistry,
|
||||
@Value("\${spring.datasource.url}") private val h2Url: String,
|
||||
private val komgaProperties: KomgaProperties
|
||||
) {
|
||||
|
||||
// tables in order of creation, to ensure there is no missing foreign key
|
||||
private val tables = listOf(
|
||||
"LIBRARY",
|
||||
"USER",
|
||||
"USER_LIBRARY_SHARING",
|
||||
"SERIES",
|
||||
"SERIES_METADATA",
|
||||
"BOOK",
|
||||
"MEDIA",
|
||||
"MEDIA_PAGE",
|
||||
"MEDIA_FILE",
|
||||
"BOOK_METADATA",
|
||||
"BOOK_METADATA_AUTHOR",
|
||||
"READ_PROGRESS",
|
||||
"COLLECTION",
|
||||
"COLLECTION_SERIES"
|
||||
)
|
||||
|
||||
lateinit var h2MigratedFilePath: Path
|
||||
lateinit var sqlitePath: Path
|
||||
|
||||
@PostConstruct
|
||||
fun init() {
|
||||
try {
|
||||
logger.info { "Initiating database migration from H2 to SQLite" }
|
||||
|
||||
logger.info { "H2 url: $h2Url" }
|
||||
var h2Filename = extractH2Path(h2Url)?.plus(".mv.db")
|
||||
if (h2Filename == null) {
|
||||
logger.warn { "The H2 URL ($h2Url) does not refer to a file database, skipping migration" }
|
||||
return
|
||||
}
|
||||
|
||||
val h2Path = convertHomeDir(h2Filename)
|
||||
h2Filename = h2Path.toString()
|
||||
logger.info { "H2 database file: $h2Filename" }
|
||||
|
||||
if (Files.notExists(h2Path)) {
|
||||
logger.warn { "The H2 database file does not exists: $h2Path, skipping migration" }
|
||||
return
|
||||
}
|
||||
|
||||
h2MigratedFilePath = Paths.get("$h2Filename.migrated")
|
||||
if (Files.exists(h2MigratedFilePath)) {
|
||||
logger.info { "The H2 database has already been migrated, skipping migration" }
|
||||
return
|
||||
}
|
||||
|
||||
h2Backup(h2Filename)
|
||||
|
||||
// make sure H2 database is at the latest migration
|
||||
flywayMigrateH2()
|
||||
|
||||
sqlitePath = convertHomeDir(komgaProperties.database.file)
|
||||
// flyway Migrate must perform exactly one migration (target of one)
|
||||
// if it performs 0, the database has already been migrated and probably has data in it
|
||||
// it should never perform more than one with a target of 1 migration
|
||||
if (flywayMigrateSqlite() != 1)
|
||||
throw BeanInitializationException("The SQLite database ($sqlitePath) is not newly minted")
|
||||
|
||||
logger.info { "Stopping all JMS listeners" }
|
||||
jmsListenerEndpointRegistry.stop()
|
||||
|
||||
var rows: Int
|
||||
measureTime {
|
||||
rows = transferH2DataToSqlite()
|
||||
}.also {
|
||||
val insertsPerSecond = rows / it.inSeconds
|
||||
logger.info { "Migration performed in $it ($rows rows). $insertsPerSecond inserts per second." }
|
||||
}
|
||||
|
||||
logger.info { "Creating H2 migrated file: $h2MigratedFilePath" }
|
||||
Files.createFile(h2MigratedFilePath)
|
||||
|
||||
logger.info { "Starting all JMS listeners" }
|
||||
jmsListenerEndpointRegistry.start()
|
||||
|
||||
logger.info { "Migration finished" }
|
||||
|
||||
} catch (e: Exception) {
|
||||
logger.error(e) { "Migration failed" }
|
||||
|
||||
if (this::sqlitePath.isInitialized) {
|
||||
logger.info { "Deleting Sqlite database if exists" }
|
||||
Files.deleteIfExists(sqlitePath)
|
||||
}
|
||||
|
||||
if (this::h2MigratedFilePath.isInitialized) {
|
||||
logger.info { "Deleting H2 migrated file if exists" }
|
||||
Files.deleteIfExists(h2MigratedFilePath)
|
||||
}
|
||||
|
||||
throw BeanInitializationException("Migration failed")
|
||||
}
|
||||
}
|
||||
|
||||
private fun flywayMigrateSqlite(): Int {
|
||||
logger.info { "Initialize SQLite database with initial migration: 20200706141854" }
|
||||
return Flyway(FluentConfiguration()
|
||||
.dataSource(sqliteDataSource)
|
||||
.locations("classpath:db/migration/sqlite")
|
||||
.target("20200706141854")
|
||||
).migrate()
|
||||
}
|
||||
|
||||
private fun flywayMigrateH2(): Int {
|
||||
logger.info { "Migrating H2 database to the latest migration" }
|
||||
return Flyway(FluentConfiguration()
|
||||
.dataSource(h2DataSource)
|
||||
.locations("classpath:db/migration/h2")
|
||||
).migrate()
|
||||
}
|
||||
|
||||
private fun h2Backup(h2Filename: String) {
|
||||
val jdbcTemplate = JdbcTemplate(h2DataSource)
|
||||
val timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd.HH-mm-ss"))
|
||||
val backup = "$h2Filename.backup.$timestamp.zip"
|
||||
logger.info { "Perform a specific backup of the H2 database to: $backup" }
|
||||
jdbcTemplate.execute("BACKUP TO '$backup'")
|
||||
logger.info { "Backup finished" }
|
||||
}
|
||||
|
||||
private fun transferH2DataToSqlite(): Int {
|
||||
val maxBatchSize = komgaProperties.database.batchSize
|
||||
|
||||
val sourceConnection = h2DataSource.connection
|
||||
val destinationConnection = sqliteDataSource.connection
|
||||
var resultSet: ResultSet? = null
|
||||
var selectStatement: PreparedStatement? = null
|
||||
var insertStatement: PreparedStatement? = null
|
||||
|
||||
var totalRows = 0
|
||||
|
||||
destinationConnection.autoCommit = false
|
||||
destinationConnection.transactionIsolation = 1
|
||||
|
||||
try {
|
||||
tables.forEach { table ->
|
||||
logger.info { "Migrate table: $table" }
|
||||
selectStatement = sourceConnection.prepareStatement("select * from $table")
|
||||
resultSet = selectStatement!!.executeQuery()
|
||||
insertStatement = destinationConnection.prepareStatement(createInsert(resultSet!!.metaData, table))
|
||||
|
||||
var batchSize = 0
|
||||
var batchCount = 1
|
||||
while (resultSet!!.next()) {
|
||||
for (i in 1..resultSet!!.metaData.columnCount) {
|
||||
if (resultSet!!.metaData.getColumnType(i) == Types.BLOB) {
|
||||
val blob = resultSet!!.getBlob(i)
|
||||
val byteArray = blob?.binaryStream?.readBytes()
|
||||
insertStatement!!.setObject(i, byteArray)
|
||||
} else
|
||||
insertStatement!!.setObject(i, resultSet!!.getObject(i))
|
||||
}
|
||||
insertStatement!!.addBatch()
|
||||
batchSize++
|
||||
totalRows++
|
||||
|
||||
if (batchSize >= maxBatchSize) {
|
||||
insertStatement!!.executeBatch()
|
||||
logger.info { "Insert batch #$batchCount ($batchSize rows)" }
|
||||
batchSize = 0
|
||||
batchCount++
|
||||
}
|
||||
}
|
||||
insertStatement!!.executeBatch()
|
||||
logger.info { "Insert batch #$batchCount ($batchSize rows)" }
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
destinationConnection.rollback()
|
||||
throw e
|
||||
} finally {
|
||||
destinationConnection.commit()
|
||||
JdbcUtils.closeResultSet(resultSet)
|
||||
JdbcUtils.closeStatement(selectStatement)
|
||||
JdbcUtils.closeStatement(insertStatement)
|
||||
JdbcUtils.closeConnection(sourceConnection)
|
||||
JdbcUtils.closeConnection(destinationConnection)
|
||||
}
|
||||
|
||||
return totalRows
|
||||
}
|
||||
|
||||
private fun createInsert(metadata: ResultSetMetaData, table: String): String {
|
||||
val columns = (1..metadata.columnCount).map { metadata.getColumnName(it) }
|
||||
val quids = MutableList(columns.size) { "?" }
|
||||
|
||||
return "insert into $table (${columns.joinToString()}) values (${quids.joinToString()})"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
val excludeH2Url = listOf(":mem:", ":ssl:", ":tcp:", ":zip:")
|
||||
|
||||
fun extractH2Path(url: String): String? {
|
||||
if (!url.startsWith("jdbc:h2:")) return null
|
||||
if (excludeH2Url.any { url.contains(it, ignoreCase = true) }) return null
|
||||
return url.split(":").last().split(";").first()
|
||||
}
|
||||
|
||||
fun convertHomeDir(path: String): Path {
|
||||
val aPath = Paths.get(path)
|
||||
val components = aPath.toList()
|
||||
|
||||
return if (components.first().toString() == "~") {
|
||||
Paths.get(System.getProperty("user.home"), *components.drop(1).map { it.toString() }.toTypedArray())
|
||||
} else aPath
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
package org.gotson.komga.infrastructure.h2
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.gotson.komga.infrastructure.configuration.KomgaProperties
|
||||
import org.springframework.jdbc.core.JdbcTemplate
|
||||
import org.springframework.stereotype.Component
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
class DatabaseBackuper(
|
||||
private val jdbcTemplate: JdbcTemplate,
|
||||
private val komgaProperties: KomgaProperties
|
||||
) {
|
||||
|
||||
fun backupDatabase() {
|
||||
val path = Paths.get(komgaProperties.databaseBackup.path)
|
||||
|
||||
Files.deleteIfExists(path)
|
||||
|
||||
val command = "BACKUP TO '$path'"
|
||||
|
||||
logger.info { "Executing command: $command" }
|
||||
jdbcTemplate.execute(command)
|
||||
}
|
||||
}
|
||||
|
|
@ -3,7 +3,6 @@ package org.gotson.komga.infrastructure.jooq
|
|||
import org.gotson.komga.domain.model.Book
|
||||
import org.gotson.komga.domain.model.BookSearch
|
||||
import org.gotson.komga.domain.persistence.BookRepository
|
||||
import org.gotson.komga.jooq.Sequences
|
||||
import org.gotson.komga.jooq.Tables
|
||||
import org.gotson.komga.jooq.tables.records.BookRecord
|
||||
import org.jooq.Condition
|
||||
|
|
@ -12,6 +11,7 @@ import org.jooq.impl.DSL
|
|||
import org.springframework.stereotype.Component
|
||||
import java.net.URL
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
|
||||
@Component
|
||||
class BookDao(
|
||||
|
|
@ -21,15 +21,17 @@ class BookDao(
|
|||
private val b = Tables.BOOK
|
||||
private val m = Tables.MEDIA
|
||||
private val d = Tables.BOOK_METADATA
|
||||
private val a = Tables.BOOK_METADATA_AUTHOR
|
||||
|
||||
override fun findByIdOrNull(bookId: Long): Book? =
|
||||
override fun findByIdOrNull(bookId: String): Book? =
|
||||
findByIdOrNull(dsl, bookId)
|
||||
|
||||
private fun findByIdOrNull(dsl: DSLContext, bookId: String): Book? =
|
||||
dsl.selectFrom(b)
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchOneInto(b)
|
||||
?.toDomain()
|
||||
|
||||
override fun findBySeriesId(seriesId: Long): Collection<Book> =
|
||||
override fun findBySeriesId(seriesId: String): Collection<Book> =
|
||||
dsl.selectFrom(b)
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.fetchInto(b)
|
||||
|
|
@ -51,34 +53,40 @@ class BookDao(
|
|||
.map { it.toDomain() }
|
||||
|
||||
|
||||
override fun getLibraryId(bookId: Long): Long? =
|
||||
override fun getLibraryId(bookId: String): String? =
|
||||
dsl.select(b.LIBRARY_ID)
|
||||
.from(b)
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchOne(0, Long::class.java)
|
||||
.fetchOne(0, String::class.java)
|
||||
|
||||
override fun findFirstIdInSeries(seriesId: Long): Long? =
|
||||
override fun findFirstIdInSeries(seriesId: String): String? =
|
||||
dsl.select(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.orderBy(d.NUMBER_SORT)
|
||||
.limit(1)
|
||||
.fetchOne(0, Long::class.java)
|
||||
.fetchOne(0, String::class.java)
|
||||
|
||||
override fun findAllIdBySeriesId(seriesId: Long): Collection<Long> =
|
||||
override fun findAllIdBySeriesId(seriesId: String): Collection<String> =
|
||||
dsl.select(b.ID)
|
||||
.from(b)
|
||||
.where(b.SERIES_ID.eq(seriesId))
|
||||
.fetch(0, Long::class.java)
|
||||
.fetch(0, String::class.java)
|
||||
|
||||
override fun findAllIdByLibraryId(libraryId: Long): Collection<Long> =
|
||||
override fun findAllIdBySeriesIds(seriesIds: Collection<String>): Collection<String> =
|
||||
dsl.select(b.ID)
|
||||
.from(b)
|
||||
.where(b.SERIES_ID.`in`(seriesIds))
|
||||
.fetch(0, String::class.java)
|
||||
|
||||
override fun findAllIdByLibraryId(libraryId: String): Collection<String> =
|
||||
dsl.select(b.ID)
|
||||
.from(b)
|
||||
.where(b.LIBRARY_ID.eq(libraryId))
|
||||
.fetch(0, Long::class.java)
|
||||
.fetch(0, String::class.java)
|
||||
|
||||
override fun findAllId(bookSearch: BookSearch): Collection<Long> {
|
||||
override fun findAllId(bookSearch: BookSearch): Collection<String> {
|
||||
val conditions = bookSearch.toCondition()
|
||||
|
||||
return dsl.select(b.ID)
|
||||
|
|
@ -86,27 +94,58 @@ class BookDao(
|
|||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.where(conditions)
|
||||
.fetch(0, Long::class.java)
|
||||
.fetch(0, String::class.java)
|
||||
}
|
||||
|
||||
|
||||
override fun insert(book: Book): Book {
|
||||
val id = dsl.nextval(Sequences.HIBERNATE_SEQUENCE)
|
||||
dsl.insertInto(b)
|
||||
.set(b.ID, id)
|
||||
.set(b.NAME, book.name)
|
||||
.set(b.URL, book.url.toString())
|
||||
.set(b.NUMBER, book.number)
|
||||
.set(b.FILE_LAST_MODIFIED, book.fileLastModified)
|
||||
.set(b.FILE_SIZE, book.fileSize)
|
||||
.set(b.LIBRARY_ID, book.libraryId)
|
||||
.set(b.SERIES_ID, book.seriesId)
|
||||
.execute()
|
||||
override fun insert(book: Book) {
|
||||
insertMany(listOf(book))
|
||||
}
|
||||
|
||||
return findByIdOrNull(id)!!
|
||||
override fun insertMany(books: Collection<Book>) {
|
||||
if (books.isNotEmpty()) {
|
||||
dsl.transaction { config ->
|
||||
config.dsl().batch(
|
||||
config.dsl().insertInto(
|
||||
b,
|
||||
b.ID,
|
||||
b.NAME,
|
||||
b.URL,
|
||||
b.NUMBER,
|
||||
b.FILE_LAST_MODIFIED,
|
||||
b.FILE_SIZE,
|
||||
b.LIBRARY_ID,
|
||||
b.SERIES_ID
|
||||
).values(null as String?, null, null, null, null, null, null, null)
|
||||
).also { step ->
|
||||
books.forEach {
|
||||
step.bind(
|
||||
it.id,
|
||||
it.name,
|
||||
it.url,
|
||||
it.number,
|
||||
it.fileLastModified,
|
||||
it.fileSize,
|
||||
it.libraryId,
|
||||
it.seriesId
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun update(book: Book) {
|
||||
update(dsl, book)
|
||||
}
|
||||
|
||||
override fun updateMany(books: Collection<Book>) {
|
||||
dsl.transaction { config ->
|
||||
books.map { update(config.dsl(), it) }
|
||||
}
|
||||
}
|
||||
|
||||
private fun update(dsl: DSLContext, book: Book) {
|
||||
dsl.update(b)
|
||||
.set(b.NAME, book.name)
|
||||
.set(b.URL, book.url.toString())
|
||||
|
|
@ -115,24 +154,22 @@ class BookDao(
|
|||
.set(b.FILE_SIZE, book.fileSize)
|
||||
.set(b.LIBRARY_ID, book.libraryId)
|
||||
.set(b.SERIES_ID, book.seriesId)
|
||||
.set(b.LAST_MODIFIED_DATE, LocalDateTime.now())
|
||||
.set(b.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(b.ID.eq(book.id))
|
||||
.execute()
|
||||
}
|
||||
|
||||
override fun delete(bookId: Long) {
|
||||
override fun delete(bookId: String) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
with(config.dsl()) {
|
||||
deleteFrom(b).where(b.ID.eq(bookId)).execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun deleteAll(bookIds: List<Long>) {
|
||||
override fun deleteByBookIds(bookIds: Collection<String>) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
with(config.dsl()) {
|
||||
deleteFrom(b).where(b.ID.`in`(bookIds)).execute()
|
||||
}
|
||||
}
|
||||
|
|
@ -140,8 +177,7 @@ class BookDao(
|
|||
|
||||
override fun deleteAll() {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
with(config.dsl()) {
|
||||
deleteFrom(b).execute()
|
||||
}
|
||||
}
|
||||
|
|
@ -170,8 +206,8 @@ class BookDao(
|
|||
id = id,
|
||||
libraryId = libraryId,
|
||||
seriesId = seriesId,
|
||||
createdDate = createdDate,
|
||||
lastModifiedDate = lastModifiedDate,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
number = number
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import org.jooq.DSLContext
|
|||
import org.jooq.Record
|
||||
import org.jooq.ResultQuery
|
||||
import org.jooq.impl.DSL
|
||||
import org.jooq.impl.DSL.inline
|
||||
import org.springframework.data.domain.Page
|
||||
import org.springframework.data.domain.PageImpl
|
||||
import org.springframework.data.domain.PageRequest
|
||||
|
|
@ -55,7 +56,7 @@ class BookDtoDao(
|
|||
"readProgress.lastModified" to r.LAST_MODIFIED_DATE
|
||||
)
|
||||
|
||||
override fun findAll(search: BookSearchWithReadProgress, userId: Long, pageable: Pageable): Page<BookDto> {
|
||||
override fun findAll(search: BookSearchWithReadProgress, userId: String, pageable: Pageable): Page<BookDto> {
|
||||
val conditions = search.toCondition()
|
||||
|
||||
val count = dsl.selectCount()
|
||||
|
|
@ -83,18 +84,18 @@ class BookDtoDao(
|
|||
)
|
||||
}
|
||||
|
||||
override fun findByIdOrNull(bookId: Long, userId: Long): BookDto? =
|
||||
override fun findByIdOrNull(bookId: String, userId: String): BookDto? =
|
||||
selectBase(userId)
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchAndMap()
|
||||
.firstOrNull()
|
||||
|
||||
override fun findPreviousInSeries(bookId: Long, userId: Long): BookDto? = findSibling(bookId, userId, next = false)
|
||||
override fun findPreviousInSeries(bookId: String, userId: String): BookDto? = findSibling(bookId, userId, next = false)
|
||||
|
||||
override fun findNextInSeries(bookId: Long, userId: Long): BookDto? = findSibling(bookId, userId, next = true)
|
||||
override fun findNextInSeries(bookId: String, userId: String): BookDto? = findSibling(bookId, userId, next = true)
|
||||
|
||||
|
||||
override fun findOnDeck(libraryIds: Collection<Long>, userId: Long, pageable: Pageable): Page<BookDto> {
|
||||
override fun findOnDeck(libraryIds: Collection<String>, userId: String, pageable: Pageable): Page<BookDto> {
|
||||
val conditions = if (libraryIds.isEmpty()) DSL.trueCondition() else s.LIBRARY_ID.`in`(libraryIds)
|
||||
|
||||
val seriesIds = dsl.select(s.ID)
|
||||
|
|
@ -104,11 +105,11 @@ class BookDtoDao(
|
|||
.and(readProgressCondition(userId))
|
||||
.where(conditions)
|
||||
.groupBy(s.ID)
|
||||
.having(SeriesDtoDao.countUnread.ge(1.toBigDecimal()))
|
||||
.and(SeriesDtoDao.countRead.ge(1.toBigDecimal()))
|
||||
.and(SeriesDtoDao.countInProgress.eq(0.toBigDecimal()))
|
||||
.having(SeriesDtoDao.countUnread.ge(inline(1.toBigDecimal())))
|
||||
.and(SeriesDtoDao.countRead.ge(inline(1.toBigDecimal())))
|
||||
.and(SeriesDtoDao.countInProgress.eq(inline(0.toBigDecimal())))
|
||||
.orderBy(DSL.max(r.LAST_MODIFIED_DATE).desc())
|
||||
.fetchInto(Long::class.java)
|
||||
.fetchInto(String::class.java)
|
||||
|
||||
val dtos = seriesIds
|
||||
.drop(pageable.pageNumber * pageable.pageSize)
|
||||
|
|
@ -130,15 +131,15 @@ class BookDtoDao(
|
|||
)
|
||||
}
|
||||
|
||||
private fun readProgressCondition(userId: Long): Condition = r.USER_ID.eq(userId).or(r.USER_ID.isNull)
|
||||
private fun readProgressCondition(userId: String): Condition = r.USER_ID.eq(userId).or(r.USER_ID.isNull)
|
||||
|
||||
private fun findSibling(bookId: Long, userId: Long, next: Boolean): BookDto? {
|
||||
private fun findSibling(bookId: String, userId: String, next: Boolean): BookDto? {
|
||||
val record = dsl.select(b.SERIES_ID, d.NUMBER_SORT)
|
||||
.from(b)
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.where(b.ID.eq(bookId))
|
||||
.fetchOne()
|
||||
val seriesId = record.get(0, Long::class.java)
|
||||
val seriesId = record.get(0, String::class.java)
|
||||
val numberSort = record.get(1, Float::class.java)
|
||||
|
||||
return selectBase(userId)
|
||||
|
|
@ -150,7 +151,7 @@ class BookDtoDao(
|
|||
.firstOrNull()
|
||||
}
|
||||
|
||||
private fun selectBase(userId: Long) =
|
||||
private fun selectBase(userId: String) =
|
||||
dsl.select(
|
||||
*b.fields(),
|
||||
*mediaFields,
|
||||
|
|
@ -210,9 +211,9 @@ class BookDtoDao(
|
|||
name = name,
|
||||
url = URL(url).toFilePath(),
|
||||
number = number,
|
||||
created = createdDate.toUTC(),
|
||||
lastModified = lastModifiedDate.toUTC(),
|
||||
fileLastModified = fileLastModified.toUTC(),
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate,
|
||||
fileLastModified = fileLastModified,
|
||||
sizeBytes = fileSize,
|
||||
media = media,
|
||||
metadata = metadata,
|
||||
|
|
@ -253,7 +254,7 @@ class BookDtoDao(
|
|||
ReadProgressDto(
|
||||
page = page,
|
||||
completed = completed,
|
||||
created = createdDate.toUTC(),
|
||||
lastModified = lastModifiedDate.toUTC()
|
||||
created = createdDate,
|
||||
lastModified = lastModifiedDate
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import org.gotson.komga.jooq.tables.records.BookMetadataRecord
|
|||
import org.jooq.DSLContext
|
||||
import org.springframework.stereotype.Component
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
|
||||
@Component
|
||||
class BookMetadataDao(
|
||||
|
|
@ -20,17 +21,20 @@ class BookMetadataDao(
|
|||
|
||||
private val groupFields = arrayOf(*d.fields(), *a.fields())
|
||||
|
||||
override fun findById(bookId: Long): BookMetadata =
|
||||
findOne(bookId).first()
|
||||
override fun findById(bookId: String): BookMetadata =
|
||||
find(dsl, listOf(bookId)).first()
|
||||
|
||||
override fun findByIdOrNull(bookId: Long): BookMetadata? =
|
||||
findOne(bookId).firstOrNull()
|
||||
override fun findByIdOrNull(bookId: String): BookMetadata? =
|
||||
find(dsl, listOf(bookId)).firstOrNull()
|
||||
|
||||
private fun findOne(bookId: Long) =
|
||||
override fun findByIds(bookIds: Collection<String>): Collection<BookMetadata> =
|
||||
find(dsl, bookIds)
|
||||
|
||||
private fun find(dsl: DSLContext, bookIds: Collection<String>) =
|
||||
dsl.select(*groupFields)
|
||||
.from(d)
|
||||
.leftJoin(a).on(d.BOOK_ID.eq(a.BOOK_ID))
|
||||
.where(d.BOOK_ID.eq(bookId))
|
||||
.where(d.BOOK_ID.`in`(bookIds))
|
||||
.groupBy(*groupFields)
|
||||
.fetchGroups(
|
||||
{ it.into(d) }, { it.into(a) }
|
||||
|
|
@ -46,89 +50,136 @@ class BookMetadataDao(
|
|||
.fetch(a.NAME)
|
||||
}
|
||||
|
||||
override fun insert(metadata: BookMetadata): BookMetadata {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
insertInto(d)
|
||||
.set(d.BOOK_ID, metadata.bookId)
|
||||
.set(d.TITLE, metadata.title)
|
||||
.set(d.TITLE_LOCK, metadata.titleLock)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
.set(d.SUMMARY_LOCK, metadata.summaryLock)
|
||||
.set(d.NUMBER, metadata.number)
|
||||
.set(d.NUMBER_LOCK, metadata.numberLock)
|
||||
.set(d.NUMBER_SORT, metadata.numberSort)
|
||||
.set(d.NUMBER_SORT_LOCK, metadata.numberSortLock)
|
||||
.set(d.READING_DIRECTION, metadata.readingDirection?.toString())
|
||||
.set(d.READING_DIRECTION_LOCK, metadata.readingDirectionLock)
|
||||
.set(d.PUBLISHER, metadata.publisher)
|
||||
.set(d.PUBLISHER_LOCK, metadata.publisherLock)
|
||||
.set(d.AGE_RATING, metadata.ageRating)
|
||||
.set(d.AGE_RATING_LOCK, metadata.ageRatingLock)
|
||||
.set(d.RELEASE_DATE, metadata.releaseDate)
|
||||
.set(d.RELEASE_DATE_LOCK, metadata.releaseDateLock)
|
||||
.set(d.AUTHORS_LOCK, metadata.authorsLock)
|
||||
.execute()
|
||||
override fun insert(metadata: BookMetadata) {
|
||||
insertMany(listOf(metadata))
|
||||
}
|
||||
|
||||
insertAuthors(this, metadata)
|
||||
override fun insertMany(metadatas: Collection<BookMetadata>) {
|
||||
if (metadatas.isNotEmpty()) {
|
||||
dsl.transaction { config ->
|
||||
config.dsl().batch(
|
||||
config.dsl().insertInto(
|
||||
d,
|
||||
d.BOOK_ID,
|
||||
d.TITLE,
|
||||
d.TITLE_LOCK,
|
||||
d.SUMMARY,
|
||||
d.SUMMARY_LOCK,
|
||||
d.NUMBER,
|
||||
d.NUMBER_LOCK,
|
||||
d.NUMBER_SORT,
|
||||
d.NUMBER_SORT_LOCK,
|
||||
d.READING_DIRECTION,
|
||||
d.READING_DIRECTION_LOCK,
|
||||
d.PUBLISHER,
|
||||
d.PUBLISHER_LOCK,
|
||||
d.AGE_RATING,
|
||||
d.AGE_RATING_LOCK,
|
||||
d.RELEASE_DATE,
|
||||
d.RELEASE_DATE_LOCK,
|
||||
d.AUTHORS_LOCK
|
||||
).values(null as String?, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null)
|
||||
).also { step ->
|
||||
metadatas.forEach {
|
||||
step.bind(
|
||||
it.bookId,
|
||||
it.title,
|
||||
it.titleLock,
|
||||
it.summary,
|
||||
it.summaryLock,
|
||||
it.number,
|
||||
it.numberLock,
|
||||
it.numberSort,
|
||||
it.numberSortLock,
|
||||
it.readingDirection?.toString(),
|
||||
it.readingDirectionLock,
|
||||
it.publisher,
|
||||
it.publisherLock,
|
||||
it.ageRating,
|
||||
it.ageRatingLock,
|
||||
it.releaseDate,
|
||||
it.releaseDateLock,
|
||||
it.authorsLock
|
||||
)
|
||||
}
|
||||
}.execute()
|
||||
|
||||
insertAuthors(config.dsl(), metadatas)
|
||||
}
|
||||
}
|
||||
|
||||
return findById(metadata.bookId)
|
||||
}
|
||||
|
||||
override fun update(metadata: BookMetadata) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
update(d)
|
||||
.set(d.TITLE, metadata.title)
|
||||
.set(d.TITLE_LOCK, metadata.titleLock)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
.set(d.SUMMARY_LOCK, metadata.summaryLock)
|
||||
.set(d.NUMBER, metadata.number)
|
||||
.set(d.NUMBER_LOCK, metadata.numberLock)
|
||||
.set(d.NUMBER_SORT, metadata.numberSort)
|
||||
.set(d.NUMBER_SORT_LOCK, metadata.numberSortLock)
|
||||
.set(d.READING_DIRECTION, metadata.readingDirection?.toString())
|
||||
.set(d.READING_DIRECTION_LOCK, metadata.readingDirectionLock)
|
||||
.set(d.PUBLISHER, metadata.publisher)
|
||||
.set(d.PUBLISHER_LOCK, metadata.publisherLock)
|
||||
.set(d.AGE_RATING, metadata.ageRating)
|
||||
.set(d.AGE_RATING_LOCK, metadata.ageRatingLock)
|
||||
.set(d.RELEASE_DATE, metadata.releaseDate)
|
||||
.set(d.RELEASE_DATE_LOCK, metadata.releaseDateLock)
|
||||
.set(d.AUTHORS_LOCK, metadata.authorsLock)
|
||||
.set(d.LAST_MODIFIED_DATE, LocalDateTime.now())
|
||||
.where(d.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
updateMetadata(config.dsl(), metadata)
|
||||
}
|
||||
}
|
||||
|
||||
deleteFrom(a)
|
||||
.where(a.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
override fun updateMany(metadatas: Collection<BookMetadata>) {
|
||||
dsl.transaction { config ->
|
||||
metadatas.forEach { updateMetadata(config.dsl(), it) }
|
||||
}
|
||||
}
|
||||
|
||||
insertAuthors(this, metadata)
|
||||
private fun updateMetadata(dsl: DSLContext, metadata: BookMetadata) {
|
||||
dsl.update(d)
|
||||
.set(d.TITLE, metadata.title)
|
||||
.set(d.TITLE_LOCK, metadata.titleLock)
|
||||
.set(d.SUMMARY, metadata.summary)
|
||||
.set(d.SUMMARY_LOCK, metadata.summaryLock)
|
||||
.set(d.NUMBER, metadata.number)
|
||||
.set(d.NUMBER_LOCK, metadata.numberLock)
|
||||
.set(d.NUMBER_SORT, metadata.numberSort)
|
||||
.set(d.NUMBER_SORT_LOCK, metadata.numberSortLock)
|
||||
.set(d.READING_DIRECTION, metadata.readingDirection?.toString())
|
||||
.set(d.READING_DIRECTION_LOCK, metadata.readingDirectionLock)
|
||||
.set(d.PUBLISHER, metadata.publisher)
|
||||
.set(d.PUBLISHER_LOCK, metadata.publisherLock)
|
||||
.set(d.AGE_RATING, metadata.ageRating)
|
||||
.set(d.AGE_RATING_LOCK, metadata.ageRatingLock)
|
||||
.set(d.RELEASE_DATE, metadata.releaseDate)
|
||||
.set(d.RELEASE_DATE_LOCK, metadata.releaseDateLock)
|
||||
.set(d.AUTHORS_LOCK, metadata.authorsLock)
|
||||
.set(d.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(d.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
|
||||
dsl.deleteFrom(a)
|
||||
.where(a.BOOK_ID.eq(metadata.bookId))
|
||||
.execute()
|
||||
|
||||
insertAuthors(dsl, listOf(metadata))
|
||||
}
|
||||
|
||||
private fun insertAuthors(dsl: DSLContext, metadatas: Collection<BookMetadata>) {
|
||||
if (metadatas.any { it.authors.isNotEmpty() }) {
|
||||
dsl.batch(
|
||||
dsl.insertInto(a, a.BOOK_ID, a.NAME, a.ROLE)
|
||||
.values(null as String?, null, null)
|
||||
).also { step ->
|
||||
metadatas.forEach { metadata ->
|
||||
metadata.authors.forEach {
|
||||
step.bind(metadata.bookId, it.name, it.role)
|
||||
}
|
||||
}
|
||||
}.execute()
|
||||
}
|
||||
}
|
||||
|
||||
override fun delete(bookId: String) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl()) {
|
||||
deleteFrom(a).where(a.BOOK_ID.eq(bookId)).execute()
|
||||
deleteFrom(d).where(d.BOOK_ID.eq(bookId)).execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun insertAuthors(dsl: DSLContext, metadata: BookMetadata) {
|
||||
metadata.authors.forEach {
|
||||
dsl.insertInto(a)
|
||||
.set(a.BOOK_ID, metadata.bookId)
|
||||
.set(a.NAME, it.name)
|
||||
.set(a.ROLE, it.role)
|
||||
.execute()
|
||||
}
|
||||
}
|
||||
|
||||
override fun delete(bookId: Long) {
|
||||
override fun deleteByBookIds(bookIds: Collection<String>) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
deleteFrom(a).where(a.BOOK_ID.eq(bookId)).execute()
|
||||
deleteFrom(d).where(d.BOOK_ID.eq(bookId)).execute()
|
||||
with(config.dsl()) {
|
||||
deleteFrom(a).where(a.BOOK_ID.`in`(bookIds)).execute()
|
||||
deleteFrom(d).where(d.BOOK_ID.`in`(bookIds)).execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -149,8 +200,8 @@ class BookMetadataDao(
|
|||
|
||||
bookId = bookId,
|
||||
|
||||
createdDate = createdDate,
|
||||
lastModifiedDate = lastModifiedDate,
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone(),
|
||||
|
||||
titleLock = titleLock,
|
||||
summaryLock = summaryLock,
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@ package org.gotson.komga.infrastructure.jooq
|
|||
|
||||
import org.gotson.komga.domain.model.KomgaUser
|
||||
import org.gotson.komga.domain.persistence.KomgaUserRepository
|
||||
import org.gotson.komga.jooq.Sequences.HIBERNATE_SEQUENCE
|
||||
import org.gotson.komga.jooq.Tables
|
||||
import org.jooq.DSLContext
|
||||
import org.jooq.Record
|
||||
import org.jooq.ResultQuery
|
||||
import org.springframework.stereotype.Component
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
|
||||
@Component
|
||||
class KomgaUserDao(
|
||||
|
|
@ -24,7 +24,7 @@ class KomgaUserDao(
|
|||
selectBase()
|
||||
.fetchAndMap()
|
||||
|
||||
override fun findByIdOrNull(id: Long): KomgaUser? =
|
||||
override fun findByIdOrNull(id: String): KomgaUser? =
|
||||
selectBase()
|
||||
.where(u.ID.equal(id))
|
||||
.fetchAndMap()
|
||||
|
|
@ -49,57 +49,70 @@ class KomgaUserDao(
|
|||
sharedLibrariesIds = ulr.mapNotNull { it.libraryId }.toSet(),
|
||||
sharedAllLibraries = ur.sharedAllLibraries,
|
||||
id = ur.id,
|
||||
createdDate = ur.createdDate,
|
||||
lastModifiedDate = ur.lastModifiedDate
|
||||
createdDate = ur.createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = ur.lastModifiedDate.toCurrentTimeZone()
|
||||
)
|
||||
}
|
||||
|
||||
override fun save(user: KomgaUser): KomgaUser {
|
||||
val id = if (user.id == 0L) dsl.nextval(HIBERNATE_SEQUENCE) else user.id
|
||||
|
||||
override fun insert(user: KomgaUser) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
mergeInto(u)
|
||||
.using(dsl.selectOne())
|
||||
.on(u.ID.eq(id))
|
||||
.whenMatchedThenUpdate()
|
||||
insertInto(u)
|
||||
.set(u.ID, user.id)
|
||||
.set(u.EMAIL, user.email)
|
||||
.set(u.PASSWORD, user.password)
|
||||
.set(u.ROLE_ADMIN, user.roleAdmin)
|
||||
.set(u.ROLE_FILE_DOWNLOAD, user.roleFileDownload)
|
||||
.set(u.ROLE_PAGE_STREAMING, user.rolePageStreaming)
|
||||
.set(u.SHARED_ALL_LIBRARIES, user.sharedAllLibraries)
|
||||
.set(u.LAST_MODIFIED_DATE, LocalDateTime.now())
|
||||
.whenNotMatchedThenInsert(u.ID, u.EMAIL, u.PASSWORD, u.ROLE_ADMIN, u.ROLE_FILE_DOWNLOAD, u.ROLE_PAGE_STREAMING, u.SHARED_ALL_LIBRARIES)
|
||||
.values(id, user.email, user.password, user.roleAdmin, user.roleFileDownload, user.rolePageStreaming, user.sharedAllLibraries)
|
||||
.execute()
|
||||
|
||||
deleteFrom(ul)
|
||||
.where(ul.USER_ID.eq(id))
|
||||
.execute()
|
||||
|
||||
user.sharedLibrariesIds.forEach {
|
||||
insertInto(ul)
|
||||
.columns(ul.USER_ID, ul.LIBRARY_ID)
|
||||
.values(id, it)
|
||||
.values(user.id, it)
|
||||
.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return findByIdOrNull(id)!!
|
||||
}
|
||||
|
||||
override fun saveAll(users: Iterable<KomgaUser>): Collection<KomgaUser> =
|
||||
users.map { save(it) }
|
||||
|
||||
override fun delete(user: KomgaUser) {
|
||||
override fun update(user: KomgaUser) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
deleteFrom(ul).where(ul.USER_ID.equal(user.id)).execute()
|
||||
deleteFrom(u).where(u.ID.equal(user.id)).execute()
|
||||
update(u)
|
||||
.set(u.EMAIL, user.email)
|
||||
.set(u.PASSWORD, user.password)
|
||||
.set(u.ROLE_ADMIN, user.roleAdmin)
|
||||
.set(u.ROLE_FILE_DOWNLOAD, user.roleFileDownload)
|
||||
.set(u.ROLE_PAGE_STREAMING, user.rolePageStreaming)
|
||||
.set(u.SHARED_ALL_LIBRARIES, user.sharedAllLibraries)
|
||||
.set(u.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(u.ID.eq(user.id))
|
||||
.execute()
|
||||
|
||||
deleteFrom(ul)
|
||||
.where(ul.USER_ID.eq(user.id))
|
||||
.execute()
|
||||
|
||||
user.sharedLibrariesIds.forEach {
|
||||
insertInto(ul)
|
||||
.columns(ul.USER_ID, ul.LIBRARY_ID)
|
||||
.values(user.id, it)
|
||||
.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun delete(userId: String) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
deleteFrom(ul).where(ul.USER_ID.equal(userId)).execute()
|
||||
deleteFrom(u).where(u.ID.equal(userId)).execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@ package org.gotson.komga.infrastructure.jooq
|
|||
|
||||
import org.gotson.komga.domain.model.Library
|
||||
import org.gotson.komga.domain.persistence.LibraryRepository
|
||||
import org.gotson.komga.jooq.Sequences.HIBERNATE_SEQUENCE
|
||||
import org.gotson.komga.jooq.Tables
|
||||
import org.gotson.komga.jooq.tables.records.LibraryRecord
|
||||
import org.jooq.DSLContext
|
||||
import org.springframework.stereotype.Component
|
||||
import java.net.URL
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
|
||||
@Component
|
||||
class LibraryDao(
|
||||
|
|
@ -18,15 +18,15 @@ class LibraryDao(
|
|||
private val l = Tables.LIBRARY
|
||||
private val ul = Tables.USER_LIBRARY_SHARING
|
||||
|
||||
override fun findByIdOrNull(libraryId: Long): Library? =
|
||||
override fun findByIdOrNull(libraryId: String): Library? =
|
||||
findOne(libraryId)
|
||||
?.toDomain()
|
||||
|
||||
override fun findById(libraryId: Long): Library =
|
||||
override fun findById(libraryId: String): Library =
|
||||
findOne(libraryId)
|
||||
.toDomain()
|
||||
|
||||
private fun findOne(libraryId: Long) =
|
||||
private fun findOne(libraryId: String) =
|
||||
dsl.selectFrom(l)
|
||||
.where(l.ID.eq(libraryId))
|
||||
.fetchOneInto(l)
|
||||
|
|
@ -36,13 +36,13 @@ class LibraryDao(
|
|||
.fetchInto(l)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun findAllById(libraryIds: Collection<Long>): Collection<Library> =
|
||||
override fun findAllById(libraryIds: Collection<String>): Collection<Library> =
|
||||
dsl.selectFrom(l)
|
||||
.where(l.ID.`in`(libraryIds))
|
||||
.fetchInto(l)
|
||||
.map { it.toDomain() }
|
||||
|
||||
override fun delete(libraryId: Long) {
|
||||
override fun delete(libraryId: String) {
|
||||
dsl.transaction { config ->
|
||||
with(config.dsl())
|
||||
{
|
||||
|
|
@ -62,11 +62,9 @@ class LibraryDao(
|
|||
}
|
||||
}
|
||||
|
||||
override fun insert(library: Library): Library {
|
||||
val id = dsl.nextval(HIBERNATE_SEQUENCE)
|
||||
|
||||
override fun insert(library: Library) {
|
||||
dsl.insertInto(l)
|
||||
.set(l.ID, id)
|
||||
.set(l.ID, library.id)
|
||||
.set(l.NAME, library.name)
|
||||
.set(l.ROOT, library.root.toString())
|
||||
.set(l.IMPORT_COMICINFO_BOOK, library.importComicInfoBook)
|
||||
|
|
@ -75,8 +73,6 @@ class LibraryDao(
|
|||
.set(l.IMPORT_EPUB_BOOK, library.importEpubBook)
|
||||
.set(l.IMPORT_EPUB_SERIES, library.importEpubSeries)
|
||||
.execute()
|
||||
|
||||
return findById(id)
|
||||
}
|
||||
|
||||
override fun update(library: Library) {
|
||||
|
|
@ -88,7 +84,7 @@ class LibraryDao(
|
|||
.set(l.IMPORT_COMICINFO_COLLECTION, library.importComicInfoCollection)
|
||||
.set(l.IMPORT_EPUB_BOOK, library.importEpubBook)
|
||||
.set(l.IMPORT_EPUB_SERIES, library.importEpubSeries)
|
||||
.set(l.LAST_MODIFIED_DATE, LocalDateTime.now())
|
||||
.set(l.LAST_MODIFIED_DATE, LocalDateTime.now(ZoneId.of("Z")))
|
||||
.where(l.ID.eq(library.id))
|
||||
.execute()
|
||||
}
|
||||
|
|
@ -106,7 +102,7 @@ class LibraryDao(
|
|||
importEpubBook = importEpubBook,
|
||||
importEpubSeries = importEpubSeries,
|
||||
id = id,
|
||||
createdDate = createdDate,
|
||||
lastModifiedDate = lastModifiedDate
|
||||
createdDate = createdDate.toCurrentTimeZone(),
|
||||
lastModifiedDate = lastModifiedDate.toCurrentTimeZone()
|
||||
)
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue