mirror of
https://github.com/stashapp/stash.git
synced 2025-12-08 01:13:09 +01:00
* Log 3 unchecked errors Rather than ignore errors, log them at the WARNING log level. The server has been functioning without these, so assume they are not at the ERROR level. * Log errors in concurrency test If we can't initialize the configuration, treat the test as a failure. * Undo the errcheck on configurations for now. * Handle unchecked errors in pkg/manager * Resolve unchecked errors * Handle DLNA/DMS unchecked errors * Handle error checking in concurrency test Generalize config initialization, so we can initialize a configuration without writing it to disk. Use this in the test case, since otherwise the test fails to write. * Handle the remaining unchecked errors * Heed gosimple in update test * Use one-line if-initializer statements While here, fix a wrong variable capture error. * testing.T doesn't support %w use %v instead which is supported. * Remove unused query builder functions The Int/String criterion handler functions are now generalized. Thus, there's no need to keep these functions around anymore. * Mark filterBuilder.addRecursiveWith nolint The function is useful in the future and no other refactors are looking nice. Keep the function around, but tell the linter to ignore it. * Remove utils.Btoi There are no users of this utility function * Return error on scan failure If we fail to scan the row when looking for the unique checksum index, then report the error upwards. * Fix comments on exported functions * Fix typos * Fix startup error
142 lines
3.8 KiB
Go
142 lines
3.8 KiB
Go
package api
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"io/ioutil"
|
|
"path/filepath"
|
|
"strconv"
|
|
"sync"
|
|
"time"
|
|
|
|
"github.com/stashapp/stash/pkg/database"
|
|
"github.com/stashapp/stash/pkg/logger"
|
|
"github.com/stashapp/stash/pkg/manager"
|
|
"github.com/stashapp/stash/pkg/manager/config"
|
|
"github.com/stashapp/stash/pkg/models"
|
|
"github.com/stashapp/stash/pkg/utils"
|
|
)
|
|
|
|
func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
|
|
jobID, err := manager.GetInstance().Scan(ctx, input)
|
|
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
|
|
jobID, err := manager.GetInstance().Import(ctx)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) {
|
|
t, err := manager.CreateImportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
jobID := manager.GetInstance().RunSingleTask(ctx, t)
|
|
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
|
|
jobID, err := manager.GetInstance().Export(ctx)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) ExportObjects(ctx context.Context, input models.ExportObjectsInput) (*string, error) {
|
|
t := manager.CreateExportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
|
|
|
|
var wg sync.WaitGroup
|
|
wg.Add(1)
|
|
t.Start(&wg)
|
|
|
|
if t.DownloadHash != "" {
|
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
|
|
|
// generate timestamp
|
|
suffix := time.Now().Format("20060102-150405")
|
|
ret := baseURL + "/downloads/" + t.DownloadHash + "/export" + suffix + ".zip"
|
|
return &ret, nil
|
|
}
|
|
|
|
return nil, nil
|
|
}
|
|
|
|
func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
|
|
jobID, err := manager.GetInstance().Generate(ctx, input)
|
|
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
|
|
jobID := manager.GetInstance().AutoTag(ctx, input)
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) MetadataClean(ctx context.Context, input models.CleanMetadataInput) (string, error) {
|
|
jobID := manager.GetInstance().Clean(ctx, input)
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error) {
|
|
jobID := manager.GetInstance().MigrateHash(ctx)
|
|
return strconv.Itoa(jobID), nil
|
|
}
|
|
|
|
func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.BackupDatabaseInput) (*string, error) {
|
|
// if download is true, then backup to temporary file and return a link
|
|
download := input.Download != nil && *input.Download
|
|
mgr := manager.GetInstance()
|
|
var backupPath string
|
|
if download {
|
|
if err := utils.EnsureDir(mgr.Paths.Generated.Downloads); err != nil {
|
|
return nil, fmt.Errorf("could not create backup directory %v: %w", mgr.Paths.Generated.Downloads, err)
|
|
}
|
|
f, err := ioutil.TempFile(mgr.Paths.Generated.Downloads, "backup*.sqlite")
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
backupPath = f.Name()
|
|
f.Close()
|
|
} else {
|
|
backupPath = database.DatabaseBackupPath()
|
|
}
|
|
|
|
err := database.Backup(database.DB, backupPath)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
if download {
|
|
downloadHash := mgr.DownloadStore.RegisterFile(backupPath, "", false)
|
|
logger.Debugf("Generated backup file %s with hash %s", backupPath, downloadHash)
|
|
|
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
|
|
|
fn := filepath.Base(database.DatabaseBackupPath())
|
|
ret := baseURL + "/downloads/" + downloadHash + "/" + fn
|
|
return &ret, nil
|
|
} else {
|
|
logger.Infof("Successfully backed up database to: %s", backupPath)
|
|
}
|
|
|
|
return nil, nil
|
|
}
|