mirror of
https://github.com/stashapp/stash.git
synced 2025-12-06 16:34:02 +01:00
Add database optimise task (#3929)
* Add database optimise task * Wrap errors * US internationalisation --------- Co-authored-by: WithoutPants <53250216+WithoutPants@users.noreply.github.com>
This commit is contained in:
parent
95a78de3aa
commit
4961c967ee
11 changed files with 146 additions and 10 deletions
|
|
@ -45,3 +45,7 @@ mutation BackupDatabase($input: BackupDatabaseInput!) {
|
||||||
mutation AnonymiseDatabase($input: AnonymiseDatabaseInput!) {
|
mutation AnonymiseDatabase($input: AnonymiseDatabaseInput!) {
|
||||||
anonymiseDatabase(input: $input)
|
anonymiseDatabase(input: $input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mutation OptimiseDatabase {
|
||||||
|
optimiseDatabase
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -379,6 +379,9 @@ type Mutation {
|
||||||
"Anonymise the database in a separate file. Optionally returns a link to download the database file"
|
"Anonymise the database in a separate file. Optionally returns a link to download the database file"
|
||||||
anonymiseDatabase(input: AnonymiseDatabaseInput!): String
|
anonymiseDatabase(input: AnonymiseDatabaseInput!): String
|
||||||
|
|
||||||
|
"Optimises the database. Returns the job ID"
|
||||||
|
optimiseDatabase: ID!
|
||||||
|
|
||||||
"Reload scrapers"
|
"Reload scrapers"
|
||||||
reloadScrapers: Boolean!
|
reloadScrapers: Boolean!
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -208,3 +208,8 @@ func (r *mutationResolver) AnonymiseDatabase(ctx context.Context, input Anonymis
|
||||||
|
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) OptimiseDatabase(ctx context.Context) (string, error) {
|
||||||
|
jobID := manager.GetInstance().OptimiseDatabase(ctx)
|
||||||
|
return strconv.Itoa(jobID), nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -265,6 +265,14 @@ func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
|
||||||
return s.JobManager.Add(ctx, "Cleaning...", &j)
|
return s.JobManager.Add(ctx, "Cleaning...", &j)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Manager) OptimiseDatabase(ctx context.Context) int {
|
||||||
|
j := OptimiseDatabaseJob{
|
||||||
|
Optimiser: s.Database,
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.JobManager.Add(ctx, "Optimising database...", &j)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Manager) MigrateHash(ctx context.Context) int {
|
func (s *Manager) MigrateHash(ctx context.Context) int {
|
||||||
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) {
|
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) {
|
||||||
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
||||||
|
|
|
||||||
56
internal/manager/task_optimise.go
Normal file
56
internal/manager/task_optimise.go
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/job"
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Optimiser interface {
|
||||||
|
Analyze(ctx context.Context) error
|
||||||
|
Vacuum(ctx context.Context) error
|
||||||
|
}
|
||||||
|
|
||||||
|
type OptimiseDatabaseJob struct {
|
||||||
|
Optimiser Optimiser
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *OptimiseDatabaseJob) Execute(ctx context.Context, progress *job.Progress) {
|
||||||
|
logger.Info("Optimising database")
|
||||||
|
progress.SetTotal(2)
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
var err error
|
||||||
|
|
||||||
|
progress.ExecuteTask("Analyzing database", func() {
|
||||||
|
err = j.Optimiser.Analyze(ctx)
|
||||||
|
progress.Increment()
|
||||||
|
})
|
||||||
|
if job.IsCancelled(ctx) {
|
||||||
|
logger.Info("Stopping due to user request")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Error analyzing database: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
progress.ExecuteTask("Vacuuming database", func() {
|
||||||
|
err = j.Optimiser.Vacuum(ctx)
|
||||||
|
progress.Increment()
|
||||||
|
})
|
||||||
|
if job.IsCancelled(ctx) {
|
||||||
|
logger.Info("Stopping due to user request")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Error vacuuming database: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
logger.Infof("Finished optimising database after %s", elapsed)
|
||||||
|
}
|
||||||
|
|
@ -58,7 +58,7 @@ func (db *Anonymiser) Anonymise(ctx context.Context) error {
|
||||||
func() error { return db.anonymiseStudios(ctx) },
|
func() error { return db.anonymiseStudios(ctx) },
|
||||||
func() error { return db.anonymiseTags(ctx) },
|
func() error { return db.anonymiseTags(ctx) },
|
||||||
func() error { return db.anonymiseMovies(ctx) },
|
func() error { return db.anonymiseMovies(ctx) },
|
||||||
func() error { db.optimise(); return nil },
|
func() error { return db.Optimise(ctx) },
|
||||||
})
|
})
|
||||||
}(); err != nil {
|
}(); err != nil {
|
||||||
// delete the database
|
// delete the database
|
||||||
|
|
|
||||||
|
|
@ -436,21 +436,28 @@ func (db *Database) RunMigrations() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// optimize database after migration
|
// optimize database after migration
|
||||||
db.optimise()
|
err = db.Optimise(ctx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("error while performing post-migration optimisation: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *Database) optimise() {
|
func (db *Database) Optimise(ctx context.Context) error {
|
||||||
logger.Info("Optimizing database")
|
logger.Info("Optimising database")
|
||||||
_, err := db.db.Exec("ANALYZE")
|
|
||||||
|
err := db.Analyze(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warnf("error while performing post-migration optimization: %v", err)
|
return fmt.Errorf("performing optimization: %w", err)
|
||||||
}
|
}
|
||||||
_, err = db.db.Exec("VACUUM")
|
|
||||||
|
err = db.Vacuum(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warnf("error while performing post-migration vacuum: %v", err)
|
return fmt.Errorf("performing vacuum: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Vacuum runs a VACUUM on the database, rebuilding the database file into a minimal amount of disk space.
|
// Vacuum runs a VACUUM on the database, rebuilding the database file into a minimal amount of disk space.
|
||||||
|
|
@ -459,6 +466,12 @@ func (db *Database) Vacuum(ctx context.Context) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Analyze runs an ANALYZE on the database to improve query performance.
|
||||||
|
func (db *Database) Analyze(ctx context.Context) error {
|
||||||
|
_, err := db.db.ExecContext(ctx, "ANALYZE")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
func (db *Database) ExecSQL(ctx context.Context, query string, args []interface{}) (*int64, *int64, error) {
|
func (db *Database) ExecSQL(ctx context.Context, query string, args []interface{}) (*int64, *int64, error) {
|
||||||
wrapper := dbWrapper{}
|
wrapper := dbWrapper{}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ import {
|
||||||
mutateAnonymiseDatabase,
|
mutateAnonymiseDatabase,
|
||||||
mutateMigrateSceneScreenshots,
|
mutateMigrateSceneScreenshots,
|
||||||
mutateMigrateBlobs,
|
mutateMigrateBlobs,
|
||||||
|
mutateOptimiseDatabase,
|
||||||
} from "src/core/StashService";
|
} from "src/core/StashService";
|
||||||
import { useToast } from "src/hooks/Toast";
|
import { useToast } from "src/hooks/Toast";
|
||||||
import downloadFile from "src/utils/download";
|
import downloadFile from "src/utils/download";
|
||||||
|
|
@ -338,6 +339,24 @@ export const DataManagementTasks: React.FC<IDataManagementTasks> = ({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function onOptimiseDatabase() {
|
||||||
|
try {
|
||||||
|
await mutateOptimiseDatabase();
|
||||||
|
Toast.success({
|
||||||
|
content: intl.formatMessage(
|
||||||
|
{ id: "config.tasks.added_job_to_queue" },
|
||||||
|
{
|
||||||
|
operation_name: intl.formatMessage({
|
||||||
|
id: "actions.optimise_database",
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
Toast.error(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function onAnonymise(download?: boolean) {
|
async function onAnonymise(download?: boolean) {
|
||||||
try {
|
try {
|
||||||
setIsAnonymiseRunning(true);
|
setIsAnonymiseRunning(true);
|
||||||
|
|
@ -419,6 +438,25 @@ export const DataManagementTasks: React.FC<IDataManagementTasks> = ({
|
||||||
setOptions={(o) => setCleanOptions(o)}
|
setOptions={(o) => setCleanOptions(o)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<Setting
|
||||||
|
headingID="actions.optimise_database"
|
||||||
|
subHeading={
|
||||||
|
<>
|
||||||
|
<FormattedMessage id="config.tasks.optimise_database" />
|
||||||
|
<br />
|
||||||
|
<FormattedMessage id="config.tasks.optimise_database_warning" />
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<Button
|
||||||
|
id="optimiseDatabase"
|
||||||
|
variant="danger"
|
||||||
|
onClick={() => onOptimiseDatabase()}
|
||||||
|
>
|
||||||
|
<FormattedMessage id="actions.optimise_database" />
|
||||||
|
</Button>
|
||||||
|
</Setting>
|
||||||
</SettingSection>
|
</SettingSection>
|
||||||
|
|
||||||
<SettingSection headingID="metadata">
|
<SettingSection headingID="metadata">
|
||||||
|
|
@ -519,7 +557,7 @@ export const DataManagementTasks: React.FC<IDataManagementTasks> = ({
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<Button
|
<Button
|
||||||
id="backup"
|
id="anonymise"
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
type="submit"
|
type="submit"
|
||||||
onClick={() => onAnonymise()}
|
onClick={() => onAnonymise()}
|
||||||
|
|
@ -533,7 +571,7 @@ export const DataManagementTasks: React.FC<IDataManagementTasks> = ({
|
||||||
subHeadingID="config.tasks.anonymise_and_download"
|
subHeadingID="config.tasks.anonymise_and_download"
|
||||||
>
|
>
|
||||||
<Button
|
<Button
|
||||||
id="anonymousDownload"
|
id="anonymiseDownload"
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
type="submit"
|
type="submit"
|
||||||
onClick={() => onAnonymise(true)}
|
onClick={() => onAnonymise(true)}
|
||||||
|
|
|
||||||
|
|
@ -2138,6 +2138,11 @@ export const mutateAnonymiseDatabase = (input: GQL.AnonymiseDatabaseInput) =>
|
||||||
variables: { input },
|
variables: { input },
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const mutateOptimiseDatabase = () =>
|
||||||
|
client.mutate<GQL.OptimiseDatabaseMutation>({
|
||||||
|
mutation: GQL.OptimiseDatabaseDocument,
|
||||||
|
});
|
||||||
|
|
||||||
export const mutateMigrateHashNaming = () =>
|
export const mutateMigrateHashNaming = () =>
|
||||||
client.mutate<GQL.MigrateHashNamingMutation>({
|
client.mutate<GQL.MigrateHashNamingMutation>({
|
||||||
mutation: GQL.MigrateHashNamingDocument,
|
mutation: GQL.MigrateHashNamingDocument,
|
||||||
|
|
|
||||||
|
|
@ -67,6 +67,7 @@
|
||||||
"not_running": "not running",
|
"not_running": "not running",
|
||||||
"open_in_external_player": "Open in external player",
|
"open_in_external_player": "Open in external player",
|
||||||
"open_random": "Open Random",
|
"open_random": "Open Random",
|
||||||
|
"optimise_database": "Optimise Database",
|
||||||
"overwrite": "Overwrite",
|
"overwrite": "Overwrite",
|
||||||
"play_random": "Play Random",
|
"play_random": "Play Random",
|
||||||
"play_selected": "Play selected",
|
"play_selected": "Play selected",
|
||||||
|
|
@ -484,6 +485,8 @@
|
||||||
},
|
},
|
||||||
"migrations": "Migrations",
|
"migrations": "Migrations",
|
||||||
"only_dry_run": "Only perform a dry run. Don't remove anything",
|
"only_dry_run": "Only perform a dry run. Don't remove anything",
|
||||||
|
"optimise_database": "Attempt to improve performance by analysing and then rebuilding the entire database file.",
|
||||||
|
"optimise_database_warning": "Warning: while this task is running, any operations that modify the database will fail, and depending on your database size, it could take several minutes to complete. It also requires at the very minimum as much free disk space as your database is large, but 1.5x is recommended.",
|
||||||
"plugin_tasks": "Plugin Tasks",
|
"plugin_tasks": "Plugin Tasks",
|
||||||
"scan": {
|
"scan": {
|
||||||
"scanning_all_paths": "Scanning all paths",
|
"scanning_all_paths": "Scanning all paths",
|
||||||
|
|
|
||||||
|
|
@ -19,5 +19,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"optimise_database": "Optimize Database",
|
||||||
"performer_favorite": "Performer Favorited"
|
"performer_favorite": "Performer Favorited"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue