Files refactor fixes (#2743)

* Fix destroy gallery not destroying file
* Re-add minModTime functionality
* Deprecate useFileMetadata and stripFileExtension
* Optimise files post migration
* Decorate moved files. Use first missing file in move
* Include path in thumbnail generation error log
* Fix stash-box draft submission
* Don't destroy files unless deleting
* Call handler for files with no associated objects
* Fix moved zips causing error on scan
This commit is contained in:
WithoutPants 2022-07-18 10:51:59 +10:00
parent 461068462c
commit abb574205a
28 changed files with 463 additions and 255 deletions

View file

@ -71,10 +71,19 @@ input ScanMetaDataFilterInput {
input ScanMetadataInput { input ScanMetadataInput {
paths: [String!] paths: [String!]
# useFileMetadata is deprecated with the new file management system
# if this functionality is desired, then we can make a built in scraper instead.
"""Set name, date, details from metadata (if present)""" """Set name, date, details from metadata (if present)"""
useFileMetadata: Boolean useFileMetadata: Boolean @deprecated(reason: "Not implemented")
# stripFileExtension is deprecated since we no longer set the title from the
# filename - it is automatically returned if the object has no title. If this
# functionality is desired, then we could make this an option to not include
# the extension in the auto-generated title.
"""Strip file extension from title""" """Strip file extension from title"""
stripFileExtension: Boolean stripFileExtension: Boolean @deprecated(reason: "Not implemented")
"""Generate previews during scan""" """Generate previews during scan"""
scanGeneratePreviews: Boolean scanGeneratePreviews: Boolean
"""Generate image previews during scan""" """Generate image previews during scan"""

View file

@ -59,7 +59,7 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input S
} }
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
res, err = client.SubmitSceneDraft(ctx, id, boxes[input.StashBoxIndex].Endpoint, filepath) res, err = client.SubmitSceneDraft(ctx, scene, boxes[input.StashBoxIndex].Endpoint, filepath)
return err return err
}) })

View file

@ -66,7 +66,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
if err != nil { if err != nil {
// don't log for unsupported image format // don't log for unsupported image format
if !errors.Is(err, image.ErrNotSupportedForThumbnail) { if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
logger.Errorf("error generating thumbnail for image: %s", err.Error()) logger.Errorf("error generating thumbnail for %s: %v", f.Path, err)
var exitErr *exec.ExitError var exitErr *exec.ExitError
if errors.As(err, &exitErr) { if errors.As(err, &exitErr) {

View file

@ -2,8 +2,10 @@ package config
type ScanMetadataOptions struct { type ScanMetadataOptions struct {
// Set name, date, details from metadata (if present) // Set name, date, details from metadata (if present)
// Deprecated: not implemented
UseFileMetadata bool `json:"useFileMetadata"` UseFileMetadata bool `json:"useFileMetadata"`
// Strip file extension from title // Strip file extension from title
// Deprecated: not implemented
StripFileExtension bool `json:"stripFileExtension"` StripFileExtension bool `json:"stripFileExtension"`
// Generate previews during scan // Generate previews during scan
ScanGeneratePreviews bool `json:"scanGeneratePreviews"` ScanGeneratePreviews bool `json:"scanGeneratePreviews"`

View file

@ -197,6 +197,8 @@ func initialize() error {
Repository: db.Gallery, Repository: db.Gallery,
ImageFinder: db.Image, ImageFinder: db.Image,
ImageService: instance.ImageService, ImageService: instance.ImageService,
File: db.File,
Folder: db.Folder,
} }
instance.JobManager = initJobManager() instance.JobManager = initJobManager()
@ -265,15 +267,15 @@ func initialize() error {
return nil return nil
} }
func videoFileFilter(f file.File) bool { func videoFileFilter(ctx context.Context, f file.File) bool {
return isVideo(f.Base().Basename) return isVideo(f.Base().Basename)
} }
func imageFileFilter(f file.File) bool { func imageFileFilter(ctx context.Context, f file.File) bool {
return isImage(f.Base().Basename) return isImage(f.Base().Basename)
} }
func galleryFileFilter(f file.File) bool { func galleryFileFilter(ctx context.Context, f file.File) bool {
return isZip(f.Base().Basename) return isZip(f.Base().Basename)
} }

View file

@ -86,6 +86,7 @@ type SceneService interface {
type ImageService interface { type ImageService interface {
Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error
DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error)
} }
type GalleryService interface { type GalleryService interface {

View file

@ -60,11 +60,9 @@ type cleanFilter struct {
func newCleanFilter(c *config.Instance) *cleanFilter { func newCleanFilter(c *config.Instance) *cleanFilter {
return &cleanFilter{ return &cleanFilter{
scanFilter: scanFilter{ scanFilter: scanFilter{
extensionConfig: newExtensionConfig(c),
stashPaths: c.GetStashPaths(), stashPaths: c.GetStashPaths(),
generatedPath: c.GetGeneratedPath(), generatedPath: c.GetGeneratedPath(),
vidExt: c.GetVideoExtensions(),
imgExt: c.GetImageExtensions(),
zipExt: c.GetGalleryExtensions(),
videoExcludeRegex: generateRegexps(c.GetExcludes()), videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()), imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
}, },

View file

@ -51,11 +51,17 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
const taskQueueSize = 200000 const taskQueueSize = 200000
taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, instance.Config.GetParallelTasksWithAutoDetection()) taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, instance.Config.GetParallelTasksWithAutoDetection())
var minModTime time.Time
if j.input.Filter != nil && j.input.Filter.MinModTime != nil {
minModTime = *j.input.Filter.MinModTime
}
j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{ j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{
Paths: paths, Paths: paths,
ScanFilters: []file.PathFilter{newScanFilter(instance.Config)}, ScanFilters: []file.PathFilter{newScanFilter(instance.Config, minModTime)},
ZipFileExtensions: instance.Config.GetGalleryExtensions(), ZipFileExtensions: instance.Config.GetGalleryExtensions(),
ParallelTasks: instance.Config.GetParallelTasksWithAutoDetection(), ParallelTasks: instance.Config.GetParallelTasksWithAutoDetection(),
HandlerRequiredFilters: []file.Filter{newHandlerRequiredFilter(instance.Config)},
}, progress) }, progress)
taskQueue.Close() taskQueue.Close()
@ -71,25 +77,92 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
j.subscriptions.notify() j.subscriptions.notify()
} }
type scanFilter struct { type extensionConfig struct {
stashPaths []*config.StashConfig vidExt []string
generatedPath string imgExt []string
vidExt []string zipExt []string
imgExt []string
zipExt []string
videoExcludeRegex []*regexp.Regexp
imageExcludeRegex []*regexp.Regexp
} }
func newScanFilter(c *config.Instance) *scanFilter { func newExtensionConfig(c *config.Instance) extensionConfig {
return extensionConfig{
vidExt: c.GetVideoExtensions(),
imgExt: c.GetImageExtensions(),
zipExt: c.GetGalleryExtensions(),
}
}
type fileCounter interface {
CountByFileID(ctx context.Context, fileID file.ID) (int, error)
}
// handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated.
type handlerRequiredFilter struct {
extensionConfig
SceneFinder fileCounter
ImageFinder fileCounter
GalleryFinder fileCounter
}
func newHandlerRequiredFilter(c *config.Instance) *handlerRequiredFilter {
db := instance.Database
return &handlerRequiredFilter{
extensionConfig: newExtensionConfig(c),
SceneFinder: db.Scene,
ImageFinder: db.Image,
GalleryFinder: db.Gallery,
}
}
func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool {
path := ff.Base().Path
isVideoFile := fsutil.MatchExtension(path, f.vidExt)
isImageFile := fsutil.MatchExtension(path, f.imgExt)
isZipFile := fsutil.MatchExtension(path, f.zipExt)
var counter fileCounter
switch {
case isVideoFile:
// return true if there are no scenes associated
counter = f.SceneFinder
case isImageFile:
counter = f.ImageFinder
case isZipFile:
counter = f.GalleryFinder
}
if counter == nil {
return false
}
n, err := counter.CountByFileID(ctx, ff.Base().ID)
if err != nil {
// just ignore
return false
}
// execute handler if there are no related objects
return n == 0
}
type scanFilter struct {
extensionConfig
stashPaths []*config.StashConfig
generatedPath string
videoExcludeRegex []*regexp.Regexp
imageExcludeRegex []*regexp.Regexp
minModTime time.Time
}
func newScanFilter(c *config.Instance, minModTime time.Time) *scanFilter {
return &scanFilter{ return &scanFilter{
extensionConfig: newExtensionConfig(c),
stashPaths: c.GetStashPaths(), stashPaths: c.GetStashPaths(),
generatedPath: c.GetGeneratedPath(), generatedPath: c.GetGeneratedPath(),
vidExt: c.GetVideoExtensions(),
imgExt: c.GetImageExtensions(),
zipExt: c.GetGalleryExtensions(),
videoExcludeRegex: generateRegexps(c.GetExcludes()), videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()), imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
minModTime: minModTime,
} }
} }
@ -98,6 +171,11 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
return false return false
} }
// exit early on cutoff
if info.Mode().IsRegular() && info.ModTime().Before(f.minModTime) {
return false
}
isVideoFile := fsutil.MatchExtension(path, f.vidExt) isVideoFile := fsutil.MatchExtension(path, f.vidExt)
isImageFile := fsutil.MatchExtension(path, f.imgExt) isImageFile := fsutil.MatchExtension(path, f.imgExt)
isZipFile := fsutil.MatchExtension(path, f.zipExt) isZipFile := fsutil.MatchExtension(path, f.zipExt)

View file

@ -180,6 +180,43 @@ func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Dele
return err return err
} }
// don't delete files in zip files
if deleteFile && f.Base().ZipFileID != nil {
if err := fileDeleter.Files([]string{f.Base().Path}); err != nil {
return err
}
}
return nil
}
type FolderGetterDestroyer interface {
FolderGetter
FolderDestroyer
}
type ZipDestroyer struct {
FileDestroyer Destroyer
FolderDestroyer FolderGetterDestroyer
}
func (d *ZipDestroyer) DestroyZip(ctx context.Context, f File, fileDeleter *Deleter, deleteFile bool) error {
// destroy contained folders
folders, err := d.FolderDestroyer.FindByZipFileID(ctx, f.Base().ID)
if err != nil {
return err
}
for _, ff := range folders {
if err := d.FolderDestroyer.Destroy(ctx, ff.ID); err != nil {
return err
}
}
if err := d.FileDestroyer.Destroy(ctx, f.Base().ID); err != nil {
return err
}
if deleteFile { if deleteFile {
if err := fileDeleter.Files([]string{f.Base().Path}); err != nil { if err := fileDeleter.Files([]string{f.Base().Path}); err != nil {
return err return err

View file

@ -198,7 +198,7 @@ type FilteredDecorator struct {
// Decorate runs the decorator if the filter accepts the file. // Decorate runs the decorator if the filter accepts the file.
func (d *FilteredDecorator) Decorate(ctx context.Context, fs FS, f File) (File, error) { func (d *FilteredDecorator) Decorate(ctx context.Context, fs FS, f File) (File, error) {
if d.Accept(f) { if d.Accept(ctx, f) {
return d.Decorator.Decorate(ctx, fs, f) return d.Decorator.Decorate(ctx, fs, f)
} }
return f, nil return f, nil

View file

@ -18,13 +18,13 @@ func (pff PathFilterFunc) Accept(path string) bool {
// Filter provides a filter function for Files. // Filter provides a filter function for Files.
type Filter interface { type Filter interface {
Accept(f File) bool Accept(ctx context.Context, f File) bool
} }
type FilterFunc func(f File) bool type FilterFunc func(ctx context.Context, f File) bool
func (ff FilterFunc) Accept(f File) bool { func (ff FilterFunc) Accept(ctx context.Context, f File) bool {
return ff(f) return ff(ctx, f)
} }
// Handler provides a handler for Files. // Handler provides a handler for Files.
@ -40,7 +40,7 @@ type FilteredHandler struct {
// Handle runs the handler if the filter accepts the file. // Handle runs the handler if the filter accepts the file.
func (h *FilteredHandler) Handle(ctx context.Context, f File) error { func (h *FilteredHandler) Handle(ctx context.Context, f File) error {
if h.Accept(f) { if h.Accept(ctx, f) {
return h.Handler.Handle(ctx, f) return h.Handler.Handle(ctx, f)
} }
return nil return nil

View file

@ -100,6 +100,9 @@ type ScanOptions struct {
// ScanFilters are used to determine if a file should be scanned. // ScanFilters are used to determine if a file should be scanned.
ScanFilters []PathFilter ScanFilters []PathFilter
// HandlerRequiredFilters are used to determine if an unchanged file needs to be handled
HandlerRequiredFilters []Filter
ParallelTasks int ParallelTasks int
} }
@ -616,8 +619,15 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
baseFile.SetFingerprints(fp) baseFile.SetFingerprints(fp)
file, err := s.fireDecorators(ctx, f.fs, baseFile)
if err != nil {
s.incrementProgress()
return nil, err
}
// determine if the file is renamed from an existing file in the store // determine if the file is renamed from an existing file in the store
renamed, err := s.handleRename(ctx, baseFile, fp) // do this after decoration so that missing fields can be populated
renamed, err := s.handleRename(ctx, file, fp)
if err != nil { if err != nil {
s.incrementProgress() s.incrementProgress()
return nil, err return nil, err
@ -627,15 +637,8 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
return renamed, nil return renamed, nil
} }
file, err := s.fireDecorators(ctx, f.fs, baseFile)
if err != nil {
s.incrementProgress()
return nil, err
}
// if not renamed, queue file for creation // if not renamed, queue file for creation
if err := s.queueDBOperation(ctx, path, func(ctx context.Context) error { if err := s.queueDBOperation(ctx, path, func(ctx context.Context) error {
logger.Infof("%s doesn't exist. Creating new file entry...", path)
if err := s.Repository.Create(ctx, file); err != nil { if err := s.Repository.Create(ctx, file); err != nil {
return fmt.Errorf("creating file %q: %w", path, err) return fmt.Errorf("creating file %q: %w", path, err)
} }
@ -733,7 +736,7 @@ func (s *scanJob) getFileFS(f *BaseFile) (FS, error) {
return fs.OpenZip(zipPath) return fs.OpenZip(zipPath)
} }
func (s *scanJob) handleRename(ctx context.Context, f *BaseFile, fp []Fingerprint) (File, error) { func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (File, error) {
var others []File var others []File
for _, tfp := range fp { for _, tfp := range fp {
@ -761,36 +764,48 @@ func (s *scanJob) handleRename(ctx context.Context, f *BaseFile, fp []Fingerprin
} }
n := len(missing) n := len(missing)
switch { if n == 0 {
case n == 1: // no missing files, not a rename
// assume does not exist, update existing file
other := missing[0]
otherBase := other.Base()
logger.Infof("%s moved to %s. Updating path...", otherBase.Path, f.Path)
f.ID = otherBase.ID
f.CreatedAt = otherBase.CreatedAt
f.Fingerprints = otherBase.Fingerprints
*otherBase = *f
if err := s.queueDBOperation(ctx, f.Path, func(ctx context.Context) error {
if err := s.Repository.Update(ctx, other); err != nil {
return fmt.Errorf("updating file for rename %q: %w", f.Path, err)
}
return nil
}); err != nil {
return nil, err
}
return other, nil
case n > 1:
// multiple candidates
// TODO - mark all as missing and just create a new file
return nil, nil return nil, nil
} }
return nil, nil // assume does not exist, update existing file
// it's possible that there may be multiple missing files.
// just use the first one to rename.
other := missing[0]
otherBase := other.Base()
fBase := f.Base()
logger.Infof("%s moved to %s. Updating path...", otherBase.Path, fBase.Path)
fBase.ID = otherBase.ID
fBase.CreatedAt = otherBase.CreatedAt
fBase.Fingerprints = otherBase.Fingerprints
if err := s.queueDBOperation(ctx, fBase.Path, func(ctx context.Context) error {
if err := s.Repository.Update(ctx, f); err != nil {
return fmt.Errorf("updating file for rename %q: %w", fBase.Path, err)
}
return nil
}); err != nil {
return nil, err
}
return f, nil
}
func (s *scanJob) isHandlerRequired(ctx context.Context, f File) bool {
accept := len(s.options.HandlerRequiredFilters) == 0
for _, filter := range s.options.HandlerRequiredFilters {
// accept if any filter accepts the file
if filter.Accept(ctx, f) {
accept = true
break
}
}
return accept
} }
// returns a file only if it was updated // returns a file only if it was updated
@ -802,7 +817,31 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
updated := !fileModTime.Equal(base.ModTime) updated := !fileModTime.Equal(base.ModTime)
if !updated { if !updated {
s.incrementProgress() handlerRequired := false
if err := s.withDB(ctx, func(ctx context.Context) error {
// check if the handler needs to be run
handlerRequired = s.isHandlerRequired(ctx, existing)
return nil
}); err != nil {
return nil, err
}
if !handlerRequired {
s.incrementProgress()
return nil, nil
}
if err := s.queueDBOperation(ctx, path, func(ctx context.Context) error {
if err := s.fireHandlers(ctx, existing); err != nil {
return err
}
s.incrementProgress()
return nil
}); err != nil {
return nil, err
}
return nil, nil return nil, nil
} }

View file

@ -3,6 +3,7 @@ package gallery
import ( import (
"context" "context"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
@ -10,12 +11,8 @@ import (
func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) {
var imgsDestroyed []*models.Image var imgsDestroyed []*models.Image
// TODO - we currently destroy associated files so that they will be rescanned.
// A better way would be to keep the file entries in the database, and recreate
// associated objects during the scan process if there are none already.
// if this is a zip-based gallery, delete the images as well first // if this is a zip-based gallery, delete the images as well first
zipImgsDestroyed, err := s.destroyZipImages(ctx, i, fileDeleter, deleteGenerated, deleteFile) zipImgsDestroyed, err := s.destroyZipFileImages(ctx, i, fileDeleter, deleteGenerated, deleteFile)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -42,9 +39,14 @@ func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *i
return imgsDestroyed, nil return imgsDestroyed, nil
} }
func (s *Service) destroyZipImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) {
var imgsDestroyed []*models.Image var imgsDestroyed []*models.Image
destroyer := &file.ZipDestroyer{
FileDestroyer: s.File,
FolderDestroyer: s.Folder,
}
// for zip-based galleries, delete the images as well first // for zip-based galleries, delete the images as well first
for _, f := range i.Files { for _, f := range i.Files {
// only do this where there are no other galleries related to the file // only do this where there are no other galleries related to the file
@ -58,21 +60,15 @@ func (s *Service) destroyZipImages(ctx context.Context, i *models.Gallery, fileD
continue continue
} }
imgs, err := s.ImageFinder.FindByZipFileID(ctx, f.Base().ID) thisDestroyed, err := s.ImageService.DestroyZipImages(ctx, f, fileDeleter, deleteGenerated)
if err != nil { if err != nil {
return nil, err return nil, err
} }
for _, img := range imgs { imgsDestroyed = append(imgsDestroyed, thisDestroyed...)
if err := s.ImageService.Destroy(ctx, img, fileDeleter, deleteGenerated, false); err != nil {
return nil, err
}
imgsDestroyed = append(imgsDestroyed, img)
}
if deleteFile { if deleteFile {
if err := fileDeleter.Files([]string{f.Base().Path}); err != nil { if err := destroyer.DestroyZip(ctx, f, fileDeleter.Deleter, deleteFile); err != nil {
return nil, err return nil, err
} }
} }

View file

@ -64,8 +64,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
UpdatedAt: now, UpdatedAt: now,
} }
logger.Infof("%s doesn't exist. Creating new gallery...", f.Base().Path)
if err := h.CreatorUpdater.Create(ctx, newGallery, []file.ID{baseFile.ID}); err != nil { if err := h.CreatorUpdater.Create(ctx, newGallery, []file.ID{baseFile.ID}); err != nil {
return fmt.Errorf("creating new image: %w", err) return fmt.Errorf("creating new gallery: %w", err)
} }
h.PluginCache.ExecutePostHooks(ctx, newGallery.ID, plugin.GalleryCreatePost, nil, nil) h.PluginCache.ExecutePostHooks(ctx, newGallery.ID, plugin.GalleryCreatePost, nil, nil)

View file

@ -8,8 +8,12 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
type Repository interface { type FinderByFile interface {
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
}
type Repository interface {
FinderByFile
Destroy(ctx context.Context, id int) error Destroy(ctx context.Context, id int) error
} }
@ -20,10 +24,13 @@ type ImageFinder interface {
type ImageService interface { type ImageService interface {
Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error
DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error)
} }
type Service struct { type Service struct {
Repository Repository Repository Repository
ImageFinder ImageFinder ImageFinder ImageFinder
ImageService ImageService ImageService ImageService
File file.Store
Folder file.FolderStore
} }

View file

@ -33,12 +33,37 @@ func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
// Destroy destroys an image, optionally marking the file and generated files for deletion. // Destroy destroys an image, optionally marking the file and generated files for deletion.
func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error {
// TODO - we currently destroy associated files so that they will be rescanned. return s.destroyImage(ctx, i, fileDeleter, deleteGenerated, deleteFile)
// A better way would be to keep the file entries in the database, and recreate }
// associated objects during the scan process if there are none already.
if err := s.destroyFiles(ctx, i, fileDeleter, deleteFile); err != nil { // DestroyZipImages destroys all images in zip, optionally marking the files and generated files for deletion.
return err // Returns a slice of images that were destroyed.
func (s *Service) DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *FileDeleter, deleteGenerated bool) ([]*models.Image, error) {
var imgsDestroyed []*models.Image
imgs, err := s.Repository.FindByZipFileID(ctx, zipFile.Base().ID)
if err != nil {
return nil, err
}
for _, img := range imgs {
const deleteFileInZip = false
if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip); err != nil {
return nil, err
}
imgsDestroyed = append(imgsDestroyed, img)
}
return imgsDestroyed, nil
}
// Destroy destroys an image, optionally marking the file and generated files for deletion.
func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error {
if deleteFile {
if err := s.deleteFiles(ctx, i, fileDeleter); err != nil {
return err
}
} }
if deleteGenerated { if deleteGenerated {
@ -50,7 +75,8 @@ func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *Fil
return s.Repository.Destroy(ctx, i.ID) return s.Repository.Destroy(ctx, i.ID)
} }
func (s *Service) destroyFiles(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteFile bool) error { // deleteFiles deletes files for the image from the database and file system, if they are not in use by other images
func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter *FileDeleter) error {
for _, f := range i.Files { for _, f := range i.Files {
// only delete files where there is no other associated image // only delete files where there is no other associated image
otherImages, err := s.Repository.FindByFileID(ctx, f.ID) otherImages, err := s.Repository.FindByFileID(ctx, f.ID)
@ -64,7 +90,8 @@ func (s *Service) destroyFiles(ctx context.Context, i *models.Image, fileDeleter
} }
// don't delete files in zip archives // don't delete files in zip archives
if deleteFile && f.ZipFileID == nil { const deleteFile = true
if f.ZipFileID == nil {
if err := file.Destroy(ctx, s.File, f, fileDeleter.Deleter, deleteFile); err != nil { if err := file.Destroy(ctx, s.File, f, fileDeleter.Deleter, deleteFile); err != nil {
return err return err
} }

View file

@ -115,6 +115,8 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
} }
} }
logger.Infof("%s doesn't exist. Creating new image...", f.Base().Path)
if err := h.CreatorUpdater.Create(ctx, &models.ImageCreateInput{ if err := h.CreatorUpdater.Create(ctx, &models.ImageCreateInput{
Image: newImage, Image: newImage,
FileIDs: []file.ID{imageFile.ID}, FileIDs: []file.ID{imageFile.ID},

View file

@ -9,6 +9,7 @@ import (
type FinderByFile interface { type FinderByFile interface {
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error)
FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error)
} }
type Repository interface { type Repository interface {

View file

@ -140,12 +140,10 @@ func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter
} }
} }
// TODO - we currently destroy associated files so that they will be rescanned. if deleteFile {
// A better way would be to keep the file entries in the database, and recreate if err := s.deleteFiles(ctx, scene, fileDeleter); err != nil {
// associated objects during the scan process if there are none already. return err
}
if err := s.destroyFiles(ctx, scene, fileDeleter, deleteFile); err != nil {
return err
} }
if deleteGenerated { if deleteGenerated {
@ -161,7 +159,8 @@ func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter
return nil return nil
} }
func (s *Service) destroyFiles(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteFile bool) error { // deleteFiles deletes files from the database and file system
func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter) error {
for _, f := range scene.Files { for _, f := range scene.Files {
// only delete files where there is no other associated scene // only delete files where there is no other associated scene
otherScenes, err := s.Repository.FindByFileID(ctx, f.ID) otherScenes, err := s.Repository.FindByFileID(ctx, f.ID)
@ -174,12 +173,13 @@ func (s *Service) destroyFiles(ctx context.Context, scene *models.Scene, fileDel
continue continue
} }
const deleteFile = true
if err := file.Destroy(ctx, s.File, f, fileDeleter.Deleter, deleteFile); err != nil { if err := file.Destroy(ctx, s.File, f, fileDeleter.Deleter, deleteFile); err != nil {
return err return err
} }
// don't delete files in zip archives // don't delete files in zip archives
if deleteFile && f.ZipFileID == nil { if f.ZipFileID == nil {
funscriptPath := video.GetFunscriptPath(f.Path) funscriptPath := video.GetFunscriptPath(f.Path)
funscriptExists, _ := fsutil.FileExists(funscriptPath) funscriptExists, _ := fsutil.FileExists(funscriptPath)
if funscriptExists { if funscriptExists {

View file

@ -88,6 +88,8 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
UpdatedAt: now, UpdatedAt: now,
} }
logger.Infof("%s doesn't exist. Creating new scene...", f.Base().Path)
if err := h.CreatorUpdater.Create(ctx, newScene, []file.ID{videoFile.ID}); err != nil { if err := h.CreatorUpdater.Create(ctx, newScene, []file.ID{videoFile.ID}); err != nil {
return fmt.Errorf("creating new scene: %w", err) return fmt.Errorf("creating new scene: %w", err)
} }

View file

@ -736,151 +736,139 @@ func (c Client) GetUser(ctx context.Context) (*graphql.Me, error) {
return c.client.Me(ctx) return c.client.Me(ctx)
} }
func (c Client) SubmitSceneDraft(ctx context.Context, sceneID int, endpoint string, imagePath string) (*string, error) { func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpoint string, imagePath string) (*string, error) {
draft := graphql.SceneDraftInput{} draft := graphql.SceneDraftInput{}
var image *os.File var image io.Reader
if err := txn.WithTxn(ctx, c.txnManager, func(ctx context.Context) error { r := c.repository
r := c.repository pqb := r.Performer
qb := r.Scene sqb := r.Studio
pqb := r.Performer
sqb := r.Studio
scene, err := qb.Find(ctx, sceneID) if scene.Title != "" {
draft.Title = &scene.Title
}
if scene.Details != "" {
draft.Details = &scene.Details
}
if scene.URL != "" && len(strings.TrimSpace(scene.URL)) > 0 {
url := strings.TrimSpace(scene.URL)
draft.URL = &url
}
if scene.Date != nil {
v := scene.Date.String()
draft.Date = &v
}
if scene.StudioID != nil {
studio, err := sqb.Find(ctx, int(*scene.StudioID))
if err != nil { if err != nil {
return err return nil, err
}
studioDraft := graphql.DraftEntityInput{
Name: studio.Name.String,
} }
if scene.Title != "" { stashIDs, err := sqb.GetStashIDs(ctx, studio.ID)
draft.Title = &scene.Title
}
if scene.Details != "" {
draft.Details = &scene.Details
}
if scene.URL != "" && len(strings.TrimSpace(scene.URL)) > 0 {
url := strings.TrimSpace(scene.URL)
draft.URL = &url
}
if scene.Date != nil {
v := scene.Date.String()
draft.Date = &v
}
if scene.StudioID != nil {
studio, err := sqb.Find(ctx, int(*scene.StudioID))
if err != nil {
return err
}
studioDraft := graphql.DraftEntityInput{
Name: studio.Name.String,
}
stashIDs, err := sqb.GetStashIDs(ctx, studio.ID)
if err != nil {
return err
}
for _, stashID := range stashIDs {
if stashID.Endpoint == endpoint {
studioDraft.ID = &stashID.StashID
break
}
}
draft.Studio = &studioDraft
}
fingerprints := []*graphql.FingerprintInput{}
duration := scene.Duration()
if oshash := scene.OSHash(); oshash != "" && duration != 0 {
fingerprint := graphql.FingerprintInput{
Hash: oshash,
Algorithm: graphql.FingerprintAlgorithmOshash,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if checksum := scene.Checksum(); checksum != "" && duration != 0 {
fingerprint := graphql.FingerprintInput{
Hash: checksum,
Algorithm: graphql.FingerprintAlgorithmMd5,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if phash := scene.Phash(); phash != 0 && duration != 0 {
fingerprint := graphql.FingerprintInput{
Hash: utils.PhashToString(phash),
Algorithm: graphql.FingerprintAlgorithmPhash,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
draft.Fingerprints = fingerprints
scenePerformers, err := pqb.FindBySceneID(ctx, sceneID)
if err != nil { if err != nil {
return err return nil, err
} }
for _, stashID := range stashIDs {
performers := []*graphql.DraftEntityInput{} if stashID.Endpoint == endpoint {
for _, p := range scenePerformers { studioDraft.ID = &stashID.StashID
performerDraft := graphql.DraftEntityInput{
Name: p.Name.String,
}
stashIDs, err := pqb.GetStashIDs(ctx, p.ID)
if err != nil {
return err
}
for _, stashID := range stashIDs {
if stashID.Endpoint == endpoint {
performerDraft.ID = &stashID.StashID
break
}
}
performers = append(performers, &performerDraft)
}
draft.Performers = performers
var tags []*graphql.DraftEntityInput
sceneTags, err := r.Tag.FindBySceneID(ctx, scene.ID)
if err != nil {
return err
}
for _, tag := range sceneTags {
tags = append(tags, &graphql.DraftEntityInput{Name: tag.Name})
}
draft.Tags = tags
exists, _ := fsutil.FileExists(imagePath)
if exists {
file, err := os.Open(imagePath)
if err == nil {
image = file
}
}
stashIDs := scene.StashIDs
var stashID *string
for _, v := range stashIDs {
if v.Endpoint == endpoint {
vv := v.StashID
stashID = &vv
break break
} }
} }
draft.ID = stashID draft.Studio = &studioDraft
}
return nil fingerprints := []*graphql.FingerprintInput{}
}); err != nil { duration := scene.Duration()
if oshash := scene.OSHash(); oshash != "" && duration != 0 {
fingerprint := graphql.FingerprintInput{
Hash: oshash,
Algorithm: graphql.FingerprintAlgorithmOshash,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if checksum := scene.Checksum(); checksum != "" && duration != 0 {
fingerprint := graphql.FingerprintInput{
Hash: checksum,
Algorithm: graphql.FingerprintAlgorithmMd5,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if phash := scene.Phash(); phash != 0 && duration != 0 {
fingerprint := graphql.FingerprintInput{
Hash: utils.PhashToString(phash),
Algorithm: graphql.FingerprintAlgorithmPhash,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
draft.Fingerprints = fingerprints
scenePerformers, err := pqb.FindBySceneID(ctx, scene.ID)
if err != nil {
return nil, err return nil, err
} }
performers := []*graphql.DraftEntityInput{}
for _, p := range scenePerformers {
performerDraft := graphql.DraftEntityInput{
Name: p.Name.String,
}
stashIDs, err := pqb.GetStashIDs(ctx, p.ID)
if err != nil {
return nil, err
}
for _, stashID := range stashIDs {
if stashID.Endpoint == endpoint {
performerDraft.ID = &stashID.StashID
break
}
}
performers = append(performers, &performerDraft)
}
draft.Performers = performers
var tags []*graphql.DraftEntityInput
sceneTags, err := r.Tag.FindBySceneID(ctx, scene.ID)
if err != nil {
return nil, err
}
for _, tag := range sceneTags {
tags = append(tags, &graphql.DraftEntityInput{Name: tag.Name})
}
draft.Tags = tags
exists, _ := fsutil.FileExists(imagePath)
if exists {
file, err := os.Open(imagePath)
if err == nil {
image = file
}
}
stashIDs := scene.StashIDs
var stashID *string
for _, v := range stashIDs {
if v.Endpoint == endpoint {
vv := v.StashID
stashID = &vv
break
}
}
draft.ID = stashID
var id *string var id *string
var ret graphql.SubmitSceneDraft var ret graphql.SubmitSceneDraft
err := c.submitDraft(ctx, graphql.SubmitSceneDraftDocument, draft, image, &ret) err = c.submitDraft(ctx, graphql.SubmitSceneDraftDocument, draft, image, &ret)
id = ret.SubmitSceneDraft.ID id = ret.SubmitSceneDraft.ID
return id, err return id, err

View file

@ -388,6 +388,13 @@ func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mo
return ret, nil return ret, nil
} }
func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) {
joinTable := galleriesFilesJoinTable
q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID))
return count(ctx, q)
}
func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) { func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) {
table := qb.queryTable() table := qb.queryTable()

View file

@ -379,6 +379,13 @@ func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode
return ret, nil return ret, nil
} }
func (qb *ImageStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) {
joinTable := imagesFilesJoinTable
q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID))
return count(ctx, q)
}
func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) { func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) {
table := imagesQueryTable table := imagesQueryTable

View file

@ -134,7 +134,6 @@ func (m *schema32Migrator) migrateFiles(ctx context.Context) error {
limit = 1000 limit = 1000
logEvery = 10000 logEvery = 10000
) )
offset := 0
result := struct { result := struct {
Count int `db:"count"` Count int `db:"count"`
@ -146,10 +145,19 @@ func (m *schema32Migrator) migrateFiles(ctx context.Context) error {
logger.Infof("Migrating %d files...", result.Count) logger.Infof("Migrating %d files...", result.Count)
lastID := 0
count := 0
for { for {
gotSome := false gotSome := false
query := fmt.Sprintf("SELECT `id`, `basename` FROM `files` ORDER BY `id` LIMIT %d OFFSET %d", limit, offset) // using offset for this is slow. Save the last id and filter by that instead
query := "SELECT `id`, `basename` FROM `files` "
if lastID != 0 {
query += fmt.Sprintf("WHERE `id` > %d ", lastID)
}
query += fmt.Sprintf("ORDER BY `id` LIMIT %d", limit)
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
rows, err := m.db.Query(query) rows, err := m.db.Query(query)
@ -188,6 +196,9 @@ func (m *schema32Migrator) migrateFiles(ctx context.Context) error {
return err return err
} }
} }
lastID = id
count++
} }
return rows.Err() return rows.Err()
@ -199,10 +210,8 @@ func (m *schema32Migrator) migrateFiles(ctx context.Context) error {
break break
} }
offset += limit if count%logEvery == 0 {
logger.Infof("Migrated %d files", count)
if offset%logEvery == 0 {
logger.Infof("Migrated %d files", offset)
} }
} }

View file

@ -470,6 +470,13 @@ func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode
return ret, nil return ret, nil
} }
func (qb *SceneStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) {
joinTable := scenesFilesJoinTable
q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID))
return count(ctx, q)
}
func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) { func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) {
table := qb.queryTable() table := qb.queryTable()

View file

@ -12,8 +12,6 @@ export const ScanOptions: React.FC<IScanOptions> = ({
setOptions: setOptionsState, setOptions: setOptionsState,
}) => { }) => {
const { const {
useFileMetadata,
stripFileExtension,
scanGeneratePreviews, scanGeneratePreviews,
scanGenerateImagePreviews, scanGenerateImagePreviews,
scanGenerateSprites, scanGenerateSprites,
@ -63,18 +61,6 @@ export const ScanOptions: React.FC<IScanOptions> = ({
headingID="config.tasks.generate_thumbnails_during_scan" headingID="config.tasks.generate_thumbnails_during_scan"
onChange={(v) => setOptions({ scanGenerateThumbnails: v })} onChange={(v) => setOptions({ scanGenerateThumbnails: v })}
/> />
<BooleanSetting
id="strip-file-extension"
checked={stripFileExtension ?? false}
headingID="config.tasks.dont_include_file_extension_as_part_of_the_title"
onChange={(v) => setOptions({ stripFileExtension: v })}
/>
<BooleanSetting
id="use-file-metadata"
checked={useFileMetadata ?? false}
headingID="config.tasks.set_name_date_details_from_metadata_if_present"
onChange={(v) => setOptions({ useFileMetadata: v })}
/>
</> </>
); );
}; };

View file

@ -10,8 +10,6 @@ Please report all issues to the following Github issue: https://github.com/stash
* Import/export functionality is currently disabled. Needs further design. * Import/export functionality is currently disabled. Needs further design.
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign. * Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
* Deleting galleries is currently slow. * Deleting galleries is currently slow.
* Don't include file extension as part of the title scan flag is not supported.
* Set name, date, details from embedded file metadata scan flag is not supported.
### ✨ New Features ### ✨ New Features
* Added support for identical files. Identical files are assigned to the same scene/gallery/image and can be viewed in File Info. ([#2676](https://github.com/stashapp/stash/pull/2676)) * Added support for identical files. Identical files are assigned to the same scene/gallery/image and can be viewed in File Info. ([#2676](https://github.com/stashapp/stash/pull/2676))
@ -19,4 +17,6 @@ Please report all issues to the following Github issue: https://github.com/stash
* Added release notes dialog. ([#2726](https://github.com/stashapp/stash/pull/2726)) * Added release notes dialog. ([#2726](https://github.com/stashapp/stash/pull/2726))
### 🎨 Improvements ### 🎨 Improvements
* Object titles are now displayed as the file basename if the title is not explicitly set. The `Don't include file extension as part of the title` scan flag is no longer supported.
* `Set name, date, details from embedded file metadata` scan flag is no longer supported. This functionality may be implemented as a built-in scraper in the future.
* Moved Changelogs to Settings page. ([#2726](https://github.com/stashapp/stash/pull/2726)) * Moved Changelogs to Settings page. ([#2726](https://github.com/stashapp/stash/pull/2726))

View file

@ -10,10 +10,11 @@ Please report all issues to the following Github issue: https://github.com/stash
* Import/export functionality is currently disabled. Needs further design. * Import/export functionality is currently disabled. Needs further design.
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign. * Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
* Deleting galleries is currently slow. * Deleting galleries is currently slow.
* Don't include file extension as part of the title scan flag is not supported.
* Set name, date, details from embedded file metadata scan flag is not supported.
### Other changes: ### Other changes:
* Added support for filtering and sorting by file count. ([#2744](https://github.com/stashapp/stash/pull/2744)) * Added support for filtering and sorting by file count. ([#2744](https://github.com/stashapp/stash/pull/2744))
* Changelog has been moved from the stats page to a section in the Settings page. * Changelog has been moved from the stats page to a section in the Settings page.
* Object titles are now displayed as the file basename if the title is not explicitly set. The `Don't include file extension as part of the title` scan flag is no longer supported.
* `Set name, date, details from embedded file metadata` scan flag is no longer supported. This functionality may be implemented as a built-in scraper in the future.