From c364346a59e61d5378fecff5ce802711a3f7d8c6 Mon Sep 17 00:00:00 2001 From: DingDongSoLong4 <99329275+DingDongSoLong4@users.noreply.github.com> Date: Fri, 1 Sep 2023 02:39:29 +0200 Subject: [PATCH] Model refactor (#3915) * Add mockery config file * Move basic file/folder structs to models * Fix hack due to import loop * Move file interfaces to models * Move folder interfaces to models * Move scene interfaces to models * Move scene marker interfaces to models * Move image interfaces to models * Move gallery interfaces to models * Move gallery chapter interfaces to models * Move studio interfaces to models * Move movie interfaces to models * Move performer interfaces to models * Move tag interfaces to models * Move autotag interfaces to models * Regenerate mocks --- .mockery.yml | 4 + Makefile | 2 +- cmd/phasher/main.go | 6 +- gqlgen.yml | 30 ++ internal/api/loaders/dataloaders.go | 39 +- internal/api/loaders/fileloader_gen.go | 46 +-- .../api/loaders/galleryfileidsloader_gen.go | 56 +-- .../api/loaders/imagefileidsloader_gen.go | 56 +-- .../api/loaders/scenefileidsloader_gen.go | 56 +-- internal/api/models.go | 7 + internal/api/resolver_model_gallery.go | 46 +-- internal/api/resolver_model_image.go | 73 +--- internal/api/resolver_model_scene.go | 95 ++--- internal/api/resolver_mutation_file.go | 9 +- internal/api/resolver_mutation_gallery.go | 4 +- internal/api/resolver_mutation_image.go | 4 +- internal/api/resolver_mutation_scene.go | 10 +- internal/api/routes_image.go | 6 +- internal/api/routes_movie.go | 2 +- internal/api/routes_performer.go | 2 +- internal/api/routes_scene.go | 21 +- internal/api/routes_studio.go | 3 +- internal/api/routes_tag.go | 3 +- internal/api/server.go | 4 +- internal/api/types.go | 8 - internal/autotag/gallery.go | 15 +- internal/autotag/image.go | 15 +- internal/autotag/integration_test.go | 71 ++-- internal/autotag/performer.go | 12 +- internal/autotag/scene.go | 15 +- internal/autotag/studio.go | 24 +- internal/autotag/tag.go | 12 +- internal/autotag/tagger.go | 15 +- internal/dlna/cds.go | 6 +- internal/dlna/dms.go | 5 +- internal/dlna/paging.go | 5 +- internal/dlna/service.go | 3 +- internal/identify/identify.go | 6 +- internal/identify/identify_test.go | 2 +- internal/identify/performer.go | 2 +- internal/identify/scene.go | 21 +- internal/identify/scene_test.go | 6 +- internal/manager/fingerprint.go | 25 +- internal/manager/manager.go | 13 +- internal/manager/repository.go | 50 +-- internal/manager/scene.go | 5 +- internal/manager/task_clean.go | 18 +- internal/manager/task_export.go | 13 +- .../manager/task_generate_clip_preview.go | 3 +- internal/manager/task_generate_markers.go | 3 +- internal/manager/task_generate_phash.go | 11 +- internal/manager/task_identify.go | 2 +- internal/manager/task_import.go | 9 +- internal/manager/task_scan.go | 26 +- pkg/ffmpeg/stream_segmented.go | 15 +- pkg/ffmpeg/stream_transcode.go | 3 +- pkg/file/clean.go | 47 +-- pkg/file/delete.go | 9 +- pkg/file/file.go | 226 +---------- pkg/file/folder.go | 90 +---- pkg/file/folder_rename_detect.go | 25 +- pkg/file/frame.go | 20 - pkg/file/fs.go | 14 +- pkg/file/handler.go | 16 +- pkg/file/image/scan.go | 17 +- pkg/file/image_file.go | 21 -- .../import_file.go => pkg/file/import.go | 64 ++-- pkg/file/move.go | 9 +- pkg/file/scan.go | 130 ++++--- pkg/file/video/caption.go | 11 +- pkg/file/video/scan.go | 9 +- pkg/file/video_file.go | 29 -- pkg/file/walk.go | 12 +- pkg/file/zip.go | 23 +- pkg/gallery/chapter_import.go | 7 +- pkg/gallery/delete.go | 7 +- pkg/gallery/export.go | 9 +- pkg/gallery/export_test.go | 9 +- pkg/gallery/import.go | 32 +- pkg/gallery/import_test.go | 3 +- pkg/gallery/query.go | 23 +- pkg/gallery/scan.go | 33 +- pkg/gallery/service.go | 37 +- pkg/gallery/update.go | 4 +- pkg/hash/videophash/phash.go | 6 +- pkg/image/delete.go | 6 +- pkg/image/export.go | 3 +- pkg/image/export_test.go | 5 +- pkg/image/import.go | 28 +- pkg/image/query.go | 22 +- pkg/image/scan.go | 46 +-- pkg/image/service.go | 18 +- pkg/image/thumbnail.go | 7 +- pkg/image/update.go | 8 +- pkg/match/cache.go | 6 +- pkg/match/path.go | 38 +- pkg/match/scraped.go | 4 +- pkg/models/file.go | 16 +- pkg/{file => models}/fingerprint.go | 21 +- pkg/{file => models}/fingerprint_test.go | 2 +- pkg/models/fs.go | 27 ++ pkg/models/gallery.go | 43 --- pkg/models/gallery_chapter.go | 21 -- pkg/models/image.go | 47 +-- pkg/models/mocks/FileReaderWriter.go | 350 ++++++++++++++++++ pkg/models/mocks/FolderReaderWriter.go | 193 ++++++++++ pkg/models/mocks/GalleryReaderWriter.go | 233 +++++++++++- pkg/models/mocks/ImageReaderWriter.go | 201 ++++++++++ pkg/models/mocks/SceneMarkerReaderWriter.go | 10 +- pkg/models/mocks/SceneReaderWriter.go | 191 +++++++++- pkg/models/mocks/StudioReaderWriter.go | 41 +- pkg/models/mocks/TagReaderWriter.go | 10 +- pkg/models/mocks/query.go | 4 + pkg/models/model_file.go | 246 ++++++++++++ pkg/models/model_folder.go | 51 +++ pkg/models/model_gallery.go | 16 +- pkg/models/model_image.go | 14 +- pkg/models/model_scene.go | 16 +- pkg/models/movie.go | 36 -- pkg/models/performer.go | 42 --- pkg/models/relationships.go | 50 +-- pkg/models/repository.go | 5 +- pkg/models/repository_file.go | 88 +++++ pkg/models/repository_folder.go | 64 ++++ pkg/models/repository_gallery.go | 91 +++++ pkg/models/repository_gallery_chapter.go | 55 +++ pkg/models/repository_image.go | 92 +++++ pkg/models/repository_movie.go | 86 +++++ pkg/models/repository_performer.go | 98 +++++ pkg/models/repository_scene.go | 115 ++++++ pkg/models/repository_scene_marker.go | 76 ++++ pkg/models/repository_studio.go | 94 +++++ pkg/models/repository_tag.go | 104 ++++++ pkg/models/scene.go | 81 +--- pkg/models/scene_marker.go | 29 -- pkg/models/studio.go | 38 -- pkg/models/tag.go | 48 --- pkg/movie/export.go | 3 +- pkg/movie/import.go | 17 +- pkg/movie/query.go | 10 +- pkg/movie/update.go | 12 - pkg/performer/import.go | 16 +- pkg/performer/query.go | 14 +- pkg/performer/update.go | 13 - pkg/scene/create.go | 3 +- pkg/scene/delete.go | 11 +- pkg/scene/export.go | 26 +- pkg/scene/export_test.go | 9 +- pkg/scene/filename_parser.go | 14 +- pkg/scene/hash.go | 7 +- pkg/scene/import.go | 37 +- pkg/scene/marker_import.go | 7 +- pkg/scene/marker_query.go | 10 +- pkg/scene/merge.go | 3 +- pkg/scene/migrate_screenshots.go | 3 +- pkg/scene/query.go | 23 +- pkg/scene/scan.go | 26 +- pkg/scene/service.go | 45 +-- pkg/scene/update.go | 24 +- pkg/scraper/autotag.go | 12 +- pkg/scraper/cache.go | 16 +- pkg/scraper/postprocessing.go | 3 +- pkg/scraper/stashbox/stash_box.go | 32 +- pkg/sqlite/file.go | 151 ++++---- pkg/sqlite/file_test.go | 122 +++--- pkg/sqlite/fingerprint.go | 12 +- pkg/sqlite/folder.go | 52 +-- pkg/sqlite/folder_test.go | 38 +- pkg/sqlite/gallery.go | 33 +- pkg/sqlite/gallery_test.go | 21 +- pkg/sqlite/image.go | 27 +- pkg/sqlite/image_test.go | 46 +-- pkg/sqlite/repository.go | 29 +- pkg/sqlite/scene.go | 39 +- pkg/sqlite/scene_test.go | 29 +- pkg/sqlite/setup_test.go | 79 ++-- pkg/sqlite/table.go | 17 +- pkg/sqlite/values.go | 16 +- pkg/studio/export.go | 8 +- pkg/studio/import.go | 9 +- pkg/studio/query.go | 18 +- pkg/studio/update.go | 19 +- pkg/tag/import.go | 10 +- pkg/tag/query.go | 12 +- pkg/tag/update.go | 15 +- 185 files changed, 3840 insertions(+), 2559 deletions(-) create mode 100644 .mockery.yml delete mode 100644 pkg/file/frame.go delete mode 100644 pkg/file/image_file.go rename internal/manager/import_file.go => pkg/file/import.go (68%) delete mode 100644 pkg/file/video_file.go rename pkg/{file => models}/fingerprint.go (90%) rename pkg/{file => models}/fingerprint_test.go (99%) create mode 100644 pkg/models/fs.go delete mode 100644 pkg/models/gallery_chapter.go create mode 100644 pkg/models/mocks/FileReaderWriter.go create mode 100644 pkg/models/mocks/FolderReaderWriter.go create mode 100644 pkg/models/model_folder.go create mode 100644 pkg/models/repository_file.go create mode 100644 pkg/models/repository_folder.go create mode 100644 pkg/models/repository_gallery.go create mode 100644 pkg/models/repository_gallery_chapter.go create mode 100644 pkg/models/repository_image.go create mode 100644 pkg/models/repository_movie.go create mode 100644 pkg/models/repository_performer.go create mode 100644 pkg/models/repository_scene.go create mode 100644 pkg/models/repository_scene_marker.go create mode 100644 pkg/models/repository_studio.go create mode 100644 pkg/models/repository_tag.go delete mode 100644 pkg/movie/update.go delete mode 100644 pkg/performer/update.go diff --git a/.mockery.yml b/.mockery.yml new file mode 100644 index 000000000..0dcb289e3 --- /dev/null +++ b/.mockery.yml @@ -0,0 +1,4 @@ +dir: ./pkg/models +name: ".*ReaderWriter" +outpkg: mocks +output: ./pkg/models/mocks diff --git a/Makefile b/Makefile index 701542585..0f6a43cdf 100644 --- a/Makefile +++ b/Makefile @@ -319,7 +319,7 @@ it: # generates test mocks .PHONY: generate-test-mocks generate-test-mocks: - go run github.com/vektra/mockery/v2 --dir ./pkg/models --name '.*ReaderWriter' --outpkg mocks --output ./pkg/models/mocks + go run github.com/vektra/mockery/v2 # runs server # sets the config file to use the local dev config diff --git a/cmd/phasher/main.go b/cmd/phasher/main.go index f4648b74e..99a749192 100644 --- a/cmd/phasher/main.go +++ b/cmd/phasher/main.go @@ -8,8 +8,8 @@ import ( flag "github.com/spf13/pflag" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/hash/videophash" + "github.com/stashapp/stash/pkg/models" ) func customUsage() { @@ -28,8 +28,8 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp ffmpeg.FFProbe, inputfile string, quiet * // videoFile.Path (from BaseFile) // videoFile.Duration // The rest of the struct isn't needed. - vf := &file.VideoFile{ - BaseFile: &file.BaseFile{Path: inputfile}, + vf := &models.VideoFile{ + BaseFile: &models.BaseFile{Path: inputfile}, Duration: ffvideoFile.FileDuration, } diff --git a/gqlgen.yml b/gqlgen.yml index d10316572..ec9feab24 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -23,6 +23,12 @@ autobind: models: # Scalars + ID: + model: + - github.com/99designs/gqlgen/graphql.ID + - github.com/99designs/gqlgen/graphql.IntID + - github.com/stashapp/stash/pkg/models.FileID + - github.com/stashapp/stash/pkg/models.FolderID Int64: model: github.com/99designs/gqlgen/graphql.Int64 Timestamp: @@ -33,6 +39,30 @@ models: fields: title: resolver: true + # override models, from internal/api/models.go + BaseFile: + model: github.com/stashapp/stash/internal/api.BaseFile + GalleryFile: + model: github.com/stashapp/stash/internal/api.GalleryFile + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice + VideoFile: + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice + # override float fields - #1572 + duration: + fieldName: DurationFinite + frame_rate: + fieldName: FrameRateFinite + ImageFile: + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice # autobind on config causes generation issues BlobsStorageType: model: github.com/stashapp/stash/internal/manager/config.BlobsStorageType diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index 30b865632..d98c663a1 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -1,14 +1,14 @@ -//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene -//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery -//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image -//go:generate go run -mod=vendor github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer -//go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio -//go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag -//go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie -//go:generate go run -mod=vendor github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/file.ID github.com/stashapp/stash/pkg/file.File -//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID -//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID -//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID +//go:generate go run github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene +//go:generate go run github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery +//go:generate go run github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image +//go:generate go run github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer +//go:generate go run github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio +//go:generate go run github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag +//go:generate go run github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie +//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File +//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID +//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID +//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID package loaders @@ -18,7 +18,6 @@ import ( "time" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -216,8 +215,8 @@ func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models } } -func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file.File, []error) { - return func(keys []file.ID) (ret []file.File, errs []error) { +func (m Middleware) fetchFiles(ctx context.Context) func(keys []models.FileID) ([]models.File, []error) { + return func(keys []models.FileID) (ret []models.File, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.File.Find(ctx, keys...) @@ -227,8 +226,8 @@ func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file } } -func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Scene.GetManyFileIDs(ctx, keys) @@ -238,8 +237,8 @@ func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([] } } -func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Image.GetManyFileIDs(ctx, keys) @@ -249,8 +248,8 @@ func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([] } } -func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Gallery.GetManyFileIDs(ctx, keys) diff --git a/internal/api/loaders/fileloader_gen.go b/internal/api/loaders/fileloader_gen.go index 348dcbb7f..6289e7a50 100644 --- a/internal/api/loaders/fileloader_gen.go +++ b/internal/api/loaders/fileloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // FileLoaderConfig captures the config to create a new FileLoader type FileLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []file.ID) ([]file.File, []error) + Fetch func(keys []models.FileID) ([]models.File, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewFileLoader(config FileLoaderConfig) *FileLoader { // FileLoader batches and caches requests type FileLoader struct { // this method provides the data for the loader - fetch func(keys []file.ID) ([]file.File, []error) + fetch func(keys []models.FileID) ([]models.File, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type FileLoader struct { // INTERNAL // lazily created cache - cache map[file.ID]file.File + cache map[models.FileID]models.File // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -55,26 +55,26 @@ type FileLoader struct { } type fileLoaderBatch struct { - keys []file.ID - data []file.File + keys []models.FileID + data []models.File error []error closing bool done chan struct{} } // Load a File by key, batching and caching will be applied automatically -func (l *FileLoader) Load(key file.ID) (file.File, error) { +func (l *FileLoader) Load(key models.FileID) (models.File, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a File. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { +func (l *FileLoader) LoadThunk(key models.FileID) func() (models.File, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (file.File, error) { + return func() (models.File, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (file.File, error) { + return func() (models.File, error) { <-batch.done - var data file.File + var data models.File if pos < len(batch.data) { data = batch.data[pos] } @@ -113,14 +113,14 @@ func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) { - results := make([]func() (file.File, error), len(keys)) +func (l *FileLoader) LoadAll(keys []models.FileID) ([]models.File, []error) { + results := make([]func() (models.File, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - files := make([]file.File, len(keys)) + files := make([]models.File, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { files[i], errors[i] = thunk() @@ -131,13 +131,13 @@ func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) { // LoadAllThunk returns a function that when called will block waiting for a Files. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) { - results := make([]func() (file.File, error), len(keys)) +func (l *FileLoader) LoadAllThunk(keys []models.FileID) func() ([]models.File, []error) { + results := make([]func() (models.File, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]file.File, []error) { - files := make([]file.File, len(keys)) + return func() ([]models.File, []error) { + files := make([]models.File, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { files[i], errors[i] = thunk() @@ -149,7 +149,7 @@ func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *FileLoader) Prime(key file.ID, value file.File) bool { +func (l *FileLoader) Prime(key models.FileID, value models.File) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -160,22 +160,22 @@ func (l *FileLoader) Prime(key file.ID, value file.File) bool { } // Clear the value at key from the cache, if it exists -func (l *FileLoader) Clear(key file.ID) { +func (l *FileLoader) Clear(key models.FileID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *FileLoader) unsafeSet(key file.ID, value file.File) { +func (l *FileLoader) unsafeSet(key models.FileID, value models.File) { if l.cache == nil { - l.cache = map[file.ID]file.File{} + l.cache = map[models.FileID]models.File{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *fileLoaderBatch) keyIndex(l *FileLoader, key file.ID) int { +func (b *fileLoaderBatch) keyIndex(l *FileLoader, key models.FileID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/api/loaders/galleryfileidsloader_gen.go b/internal/api/loaders/galleryfileidsloader_gen.go index 808cfbf0f..e3c539036 100644 --- a/internal/api/loaders/galleryfileidsloader_gen.go +++ b/internal/api/loaders/galleryfileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // GalleryFileIDsLoaderConfig captures the config to create a new GalleryFileIDsLoader type GalleryFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewGalleryFileIDsLoader(config GalleryFileIDsLoaderConfig) *GalleryFileIDsL // GalleryFileIDsLoader batches and caches requests type GalleryFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type GalleryFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type GalleryFileIDsLoader struct { type galleryFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *GalleryFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *GalleryFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *GalleryFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *GalleryFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *GalleryFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *GalleryFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *GalleryFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/loaders/imagefileidsloader_gen.go b/internal/api/loaders/imagefileidsloader_gen.go index 7e633d8ef..e19d458ad 100644 --- a/internal/api/loaders/imagefileidsloader_gen.go +++ b/internal/api/loaders/imagefileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // ImageFileIDsLoaderConfig captures the config to create a new ImageFileIDsLoader type ImageFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewImageFileIDsLoader(config ImageFileIDsLoaderConfig) *ImageFileIDsLoader // ImageFileIDsLoader batches and caches requests type ImageFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type ImageFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type ImageFileIDsLoader struct { type imageFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *ImageFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *ImageFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *ImageFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *ImageFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *ImageFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *ImageFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *ImageFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/loaders/scenefileidsloader_gen.go b/internal/api/loaders/scenefileidsloader_gen.go index 663be2c6f..16e1690c4 100644 --- a/internal/api/loaders/scenefileidsloader_gen.go +++ b/internal/api/loaders/scenefileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // SceneFileIDsLoaderConfig captures the config to create a new SceneFileIDsLoader type SceneFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewSceneFileIDsLoader(config SceneFileIDsLoaderConfig) *SceneFileIDsLoader // SceneFileIDsLoader batches and caches requests type SceneFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type SceneFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type SceneFileIDsLoader struct { type sceneFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *SceneFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *SceneFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *SceneFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *SceneFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *SceneFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *SceneFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *SceneFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/models.go b/internal/api/models.go index 92713a56e..03c20ee43 100644 --- a/internal/api/models.go +++ b/internal/api/models.go @@ -9,9 +9,16 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) +type BaseFile interface{} + +type GalleryFile struct { + *models.BaseFile +} + var ErrTimestamp = errors.New("cannot parse Timestamp") func MarshalTimestamp(t time.Time) graphql.Marshaler { diff --git a/internal/api/resolver_model_gallery.go b/internal/api/resolver_model_gallery.go index 8157404dc..e7c0cd6a0 100644 --- a/internal/api/resolver_model_gallery.go +++ b/internal/api/resolver_model_gallery.go @@ -2,18 +2,16 @@ package api import ( "context" - "strconv" "time" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/manager/config" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) -func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (file.File, error) { +func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (models.File, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { @@ -26,7 +24,7 @@ func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Galler return nil, nil } -func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]file.File, error) { +func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]models.File, error) { fileIDs, err := loaders.From(ctx).GalleryFiles.Load(obj.ID) if err != nil { return nil, err @@ -45,34 +43,20 @@ func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*Ga ret := make([]*GalleryFile, len(files)) for i, f := range files { - base := f.Base() ret[i] = &GalleryFile{ - ID: strconv.Itoa(int(base.ID)), - Path: base.Path, - Basename: base.Basename, - ParentFolderID: strconv.Itoa(int(base.ParentFolderID)), - ModTime: base.ModTime, - Size: base.Size, - CreatedAt: base.CreatedAt, - UpdatedAt: base.UpdatedAt, - Fingerprints: resolveFingerprints(base), - } - - if base.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*base.ZipFileID)) - ret[i].ZipFileID = &zipFileID + BaseFile: f.Base(), } } return ret, nil } -func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Folder, error) { +func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*models.Folder, error) { if obj.FolderID == nil { return nil, nil } - var ret *file.Folder + var ret *models.Folder if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error @@ -91,25 +75,7 @@ func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Fol return nil, nil } - rr := &Folder{ - ID: ret.ID.String(), - Path: ret.Path, - ModTime: ret.ModTime, - CreatedAt: ret.CreatedAt, - UpdatedAt: ret.UpdatedAt, - } - - if ret.ParentFolderID != nil { - pfidStr := ret.ParentFolderID.String() - rr.ParentFolderID = &pfidStr - } - - if ret.ZipFileID != nil { - zfidStr := ret.ZipFileID.String() - rr.ZipFileID = &zfidStr - } - - return rr, nil + return ret, nil } func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) { diff --git a/internal/api/resolver_model_image.go b/internal/api/resolver_model_image.go index 9bfadafc7..246ff8b44 100644 --- a/internal/api/resolver_model_image.go +++ b/internal/api/resolver_model_image.go @@ -3,57 +3,35 @@ package api import ( "context" "fmt" - "strconv" "time" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -func convertImageFile(f *file.ImageFile) *ImageFile { - ret := &ImageFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Size: f.Size, - Width: f.Width, - Height: f.Height, - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), +func convertVisualFile(f models.File) (models.VisualFile, error) { + vf, ok := f.(models.VisualFile) + if !ok { + return nil, fmt.Errorf("file %s is not a visual file", f.Base().Path) } - - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret.ZipFileID = &zipFileID - } - - return ret + return vf, nil } -func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (file.VisualFile, error) { +func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (models.VisualFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { return nil, err } - asFrame, ok := f.(file.VisualFile) - if !ok { - return nil, fmt.Errorf("file %T is not an frame", f) - } - - return asFrame, nil + return convertVisualFile(f) } return nil, nil } -func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]file.File, error) { +func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]models.File, error) { fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) if err != nil { return nil, err @@ -88,30 +66,21 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile }, nil } -func convertVisualFile(f file.File) VisualFile { - switch f := f.(type) { - case *file.ImageFile: - return convertImageFile(f) - case *file.VideoFile: - return convertVideoFile(f) - default: - panic(fmt.Sprintf("unknown file type %T", f)) - } -} - -func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]VisualFile, error) { - fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) +func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]models.VisualFile, error) { + files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) - ret := make([]VisualFile, len(files)) + ret := make([]models.VisualFile, len(files)) for i, f := range files { - ret[i] = convertVisualFile(f) + ret[i], err = convertVisualFile(f) + if err != nil { + return nil, err + } } - return ret, firstError(errs) + return ret, nil } func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, error) { @@ -122,24 +91,22 @@ func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, e return nil, nil } -func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) { +func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*models.ImageFile, error) { files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - var ret []*ImageFile + var ret []*models.ImageFile for _, f := range files { // filter out non-image files - imageFile, ok := f.(*file.ImageFile) + imageFile, ok := f.(*models.ImageFile) if !ok { continue } - thisFile := convertImageFile(imageFile) - - ret = append(ret, thisFile) + ret = append(ret, imageFile) } return ret, nil diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 9d5b41725..27ccaf33b 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -9,50 +9,28 @@ import ( "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -func convertVideoFile(f *file.VideoFile) *VideoFile { - ret := &VideoFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Format: f.Format, - Size: f.Size, - Duration: handleFloat64Value(f.Duration), - VideoCodec: f.VideoCodec, - AudioCodec: f.AudioCodec, - Width: f.Width, - Height: f.Height, - FrameRate: handleFloat64Value(f.FrameRate), - BitRate: int(f.BitRate), - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), +func convertVideoFile(f models.File) (*models.VideoFile, error) { + vf, ok := f.(*models.VideoFile) + if !ok { + return nil, fmt.Errorf("file %T is not a video file", f) } - - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret.ZipFileID = &zipFileID - } - - return ret + return vf, nil } -func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) { +func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*models.VideoFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { return nil, err } - ret, ok := f.(*file.VideoFile) - if !ok { - return nil, fmt.Errorf("file %T is not an image file", f) + ret, err := convertVideoFile(f) + if err != nil { + return nil, err } obj.Files.SetPrimary(ret) @@ -65,26 +43,29 @@ func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) ( return nil, nil } -func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*file.VideoFile, error) { +func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*models.VideoFile, error) { fileIDs, err := loaders.From(ctx).SceneFiles.Load(obj.ID) if err != nil { return nil, err } files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) - ret := make([]*file.VideoFile, len(files)) - for i, bf := range files { - f, ok := bf.(*file.VideoFile) - if !ok { - return nil, fmt.Errorf("file %T is not a video file", f) - } + err = firstError(errs) + if err != nil { + return nil, err + } - ret[i] = f + ret := make([]*models.VideoFile, len(files)) + for i, f := range files { + ret[i], err = convertVideoFile(f) + if err != nil { + return nil, err + } } obj.Files.Set(ret) - return ret, firstError(errs) + return ret, nil } func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) { @@ -132,19 +113,13 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc }, nil } -func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) { +func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*models.VideoFile, error) { files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - ret := make([]*VideoFile, len(files)) - - for i, f := range files { - ret[i] = convertVideoFile(f) - } - - return ret, nil + return files, nil } func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) { @@ -159,28 +134,6 @@ func (r *sceneResolver) Rating100(ctx context.Context, obj *models.Scene) (*int, return obj.Rating, nil } -func resolveFingerprints(f *file.BaseFile) []*Fingerprint { - ret := make([]*Fingerprint, len(f.Fingerprints)) - - for i, fp := range f.Fingerprints { - ret[i] = &Fingerprint{ - Type: fp.Type, - Value: formatFingerprint(fp.Fingerprint), - } - } - - return ret -} - -func formatFingerprint(fp interface{}) string { - switch v := fp.(type) { - case int64: - return strconv.FormatUint(uint64(v), 16) - default: - return fmt.Sprintf("%v", fp) - } -} - func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) config := manager.GetInstance().Config @@ -352,7 +305,7 @@ func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, return nil, nil } - val := f.Fingerprints.Get(file.FingerprintTypePhash) + val := f.Fingerprints.Get(models.FingerprintTypePhash) if val == nil { return nil, nil } diff --git a/internal/api/resolver_mutation_file.go b/internal/api/resolver_mutation_file.go index 0b8b84ea0..2fcf66fcf 100644 --- a/internal/api/resolver_mutation_file.go +++ b/internal/api/resolver_mutation_file.go @@ -8,6 +8,7 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) @@ -19,7 +20,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) mover.RegisterHooks(ctx, r.txnManager) var ( - folder *file.Folder + folder *models.Folder basename string ) @@ -37,7 +38,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) return fmt.Errorf("invalid folder id %s: %w", *input.DestinationFolderID, err) } - folder, err = folderStore.Find(ctx, file.FolderID(folderID)) + folder, err = folderStore.Find(ctx, models.FolderID(folderID)) if err != nil { return fmt.Errorf("finding destination folder: %w", err) } @@ -82,7 +83,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) } for _, fileIDInt := range fileIDs { - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) f, err := fileStore.Find(ctx, fileID) if err != nil { return fmt.Errorf("finding file %d: %w", fileID, err) @@ -158,7 +159,7 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b qb := r.repository.File for _, fileIDInt := range fileIDs { - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) f, err := qb.Find(ctx, fileID) if err != nil { return err diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go index 368808d2c..ebdb94e64 100644 --- a/internal/api/resolver_mutation_gallery.go +++ b/internal/api/resolver_mutation_gallery.go @@ -199,7 +199,7 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle return nil, fmt.Errorf("converting primary file id: %w", err) } - converted := file.ID(primaryFileID) + converted := models.FileID(primaryFileID) updatedGallery.PrimaryFileID = &converted if err := originalGallery.LoadFiles(ctx, r.repository.Gallery); err != nil { @@ -207,7 +207,7 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle } // ensure that new primary file is associated with gallery - var f file.File + var f models.File for _, ff := range originalGallery.Files.List() { if ff.Base().ID == converted { f = ff diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 6d5c3a88a..6ea58e211 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -123,7 +123,7 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp return nil, fmt.Errorf("converting primary file id: %w", err) } - converted := file.ID(primaryFileID) + converted := models.FileID(primaryFileID) updatedImage.PrimaryFileID = &converted if err := i.LoadFiles(ctx, r.repository.Image); err != nil { @@ -131,7 +131,7 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp } // ensure that new primary file is associated with image - var f file.File + var f models.File for _, ff := range i.Files.List() { if ff.Base().ID == converted { f = ff diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index 1846d554d..12d89c9d8 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -56,9 +56,9 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input SceneCreateInp return nil, fmt.Errorf("converting file ids: %w", err) } - fileIDs := make([]file.ID, len(fileIDsInt)) + fileIDs := make([]models.FileID, len(fileIDsInt)) for i, v := range fileIDsInt { - fileIDs[i] = file.ID(v) + fileIDs[i] = models.FileID(v) } // Populate a new scene from the input @@ -212,7 +212,7 @@ func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTr return nil, fmt.Errorf("converting primary file id: %w", err) } - converted := file.ID(primaryFileID) + converted := models.FileID(primaryFileID) updatedScene.PrimaryFileID = &converted } @@ -300,7 +300,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } // ensure that new primary file is associated with scene - var f *file.VideoFile + var f *models.VideoFile for _, ff := range originalScene.Files.List() { if ff.ID == newPrimaryFileID { f = ff @@ -575,7 +575,7 @@ func (r *mutationResolver) SceneAssignFile(ctx context.Context, input AssignScen return false, fmt.Errorf("converting file ID: %w", err) } - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) if err := r.withTxn(ctx, func(ctx context.Context) error { return r.Resolver.sceneService.AssignFile(ctx, sceneID, fileID) diff --git a/internal/api/routes_image.go b/internal/api/routes_image.go index 4ea612d3b..4cc257671 100644 --- a/internal/api/routes_image.go +++ b/internal/api/routes_image.go @@ -22,14 +22,14 @@ import ( ) type ImageFinder interface { - Find(ctx context.Context, id int) (*models.Image, error) + models.ImageGetter FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error) } type imageRoutes struct { txnManager txn.Manager imageFinder ImageFinder - fileFinder file.Finder + fileGetter models.FileGetter } func (rs imageRoutes) Routes() chi.Router { @@ -168,7 +168,7 @@ func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler { } if image != nil { - if err := image.LoadPrimaryFile(ctx, rs.fileFinder); err != nil { + if err := image.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { if !errors.Is(err, context.Canceled) { logger.Errorf("error loading primary file for image %d: %v", imageID, err) } diff --git a/internal/api/routes_movie.go b/internal/api/routes_movie.go index a64aae76c..400587763 100644 --- a/internal/api/routes_movie.go +++ b/internal/api/routes_movie.go @@ -14,9 +14,9 @@ import ( ) type MovieFinder interface { + models.MovieGetter GetFrontImage(ctx context.Context, movieID int) ([]byte, error) GetBackImage(ctx context.Context, movieID int) ([]byte, error) - Find(ctx context.Context, id int) (*models.Movie, error) } type movieRoutes struct { diff --git a/internal/api/routes_performer.go b/internal/api/routes_performer.go index e7631de5b..d05e53095 100644 --- a/internal/api/routes_performer.go +++ b/internal/api/routes_performer.go @@ -15,7 +15,7 @@ import ( ) type PerformerFinder interface { - Find(ctx context.Context, id int) (*models.Performer, error) + models.PerformerGetter GetImage(ctx context.Context, performerID int) ([]byte, error) } diff --git a/internal/api/routes_scene.go b/internal/api/routes_scene.go index 43d37da36..e0584d688 100644 --- a/internal/api/routes_scene.go +++ b/internal/api/routes_scene.go @@ -12,40 +12,43 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type SceneFinder interface { - manager.SceneCoverGetter + models.SceneGetter - scene.IDFinder FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) + GetCover(ctx context.Context, sceneID int) ([]byte, error) } type SceneMarkerFinder interface { - Find(ctx context.Context, id int) (*models.SceneMarker, error) + models.SceneMarkerGetter FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) } +type SceneMarkerTagFinder interface { + models.TagGetter + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) +} + type CaptionFinder interface { - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) + GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) } type sceneRoutes struct { txnManager txn.Manager sceneFinder SceneFinder - fileFinder file.Finder + fileGetter models.FileGetter captionFinder CaptionFinder sceneMarkerFinder SceneMarkerFinder - tagFinder scene.MarkerTagFinder + tagFinder SceneMarkerTagFinder } func (rs sceneRoutes) Routes() chi.Router { @@ -574,7 +577,7 @@ func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler { scene, _ = qb.Find(ctx, sceneID) if scene != nil { - if err := scene.LoadPrimaryFile(ctx, rs.fileFinder); err != nil { + if err := scene.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { if !errors.Is(err, context.Canceled) { logger.Errorf("error loading primary file for scene %d: %v", sceneID, err) } diff --git a/internal/api/routes_studio.go b/internal/api/routes_studio.go index ca4e580f6..1cce39385 100644 --- a/internal/api/routes_studio.go +++ b/internal/api/routes_studio.go @@ -11,13 +11,12 @@ import ( "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type StudioFinder interface { - studio.Finder + models.StudioGetter GetImage(ctx context.Context, studioID int) ([]byte, error) } diff --git a/internal/api/routes_tag.go b/internal/api/routes_tag.go index d8837da80..9ccf11a11 100644 --- a/internal/api/routes_tag.go +++ b/internal/api/routes_tag.go @@ -11,13 +11,12 @@ import ( "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type TagFinder interface { - tag.Finder + models.TagGetter GetImage(ctx context.Context, tagID int) ([]byte, error) } diff --git a/internal/api/server.go b/internal/api/server.go index 6eec5b524..b909914cd 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -151,7 +151,7 @@ func Start() error { r.Mount("/scene", sceneRoutes{ txnManager: txnManager, sceneFinder: txnManager.Scene, - fileFinder: txnManager.File, + fileGetter: txnManager.File, captionFinder: txnManager.File, sceneMarkerFinder: txnManager.SceneMarker, tagFinder: txnManager.Tag, @@ -159,7 +159,7 @@ func Start() error { r.Mount("/image", imageRoutes{ txnManager: txnManager, imageFinder: txnManager.Image, - fileFinder: txnManager.File, + fileGetter: txnManager.File, }.Routes()) r.Mount("/studio", studioRoutes{ txnManager: txnManager, diff --git a/internal/api/types.go b/internal/api/types.go index 13d86f975..79b4aa020 100644 --- a/internal/api/types.go +++ b/internal/api/types.go @@ -18,14 +18,6 @@ func handleFloat64(v float64) *float64 { return &v } -func handleFloat64Value(v float64) float64 { - if math.IsInf(v, 0) || math.IsNaN(v) { - return 0 - } - - return v -} - func translateUpdateIDs(strIDs []string, mode models.RelationshipUpdateMode) (*models.UpdateIDs, error) { ids, err := stringslice.StringSliceToIntSlice(strIDs) if err != nil { diff --git a/internal/autotag/gallery.go b/internal/autotag/gallery.go index d2a8c2c5d..f768a31dd 100644 --- a/internal/autotag/gallery.go +++ b/internal/autotag/gallery.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type GalleryFinderUpdater interface { + models.GalleryQueryer + models.GalleryUpdater +} + type GalleryPerformerUpdater interface { models.PerformerIDLoader - gallery.PartialUpdater + models.GalleryUpdater } type GalleryTagUpdater interface { models.TagIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { @@ -39,7 +44,7 @@ func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { } // GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path. -func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getGalleryFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -63,7 +68,7 @@ func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerform // GalleryStudios tags the provided gallery with the first studio whose name matches the gallery's path. // // Gallerys will not be tagged if studio is already set. -func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -77,7 +82,7 @@ func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpda } // GalleryTags tags the provided gallery with tags whose name matches the gallery's path. -func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getGalleryFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/image.go b/internal/autotag/image.go index 404640786..d28960f3c 100644 --- a/internal/autotag/image.go +++ b/internal/autotag/image.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type ImageFinderUpdater interface { + models.ImageQueryer + models.ImageUpdater +} + type ImagePerformerUpdater interface { models.PerformerIDLoader - image.PartialUpdater + models.ImageUpdater } type ImageTagUpdater interface { models.TagIDLoader - image.PartialUpdater + models.ImageUpdater } func getImageFileTagger(s *models.Image, cache *match.Cache) tagger { @@ -30,7 +35,7 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger { } // ImagePerformers tags the provided image with performers whose name matches the image's path. -func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getImageFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -54,7 +59,7 @@ func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpda // ImageStudios tags the provided image with the first studio whose name matches the image's path. // // Images will not be tagged if studio is already set. -func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -68,7 +73,7 @@ func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, s } // ImageTags tags the provided image with tags whose name matches the image's path. -func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getImageFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go index eb4b0a9ad..774a7738b 100644 --- a/internal/autotag/integration_test.go +++ b/internal/autotag/integration_test.go @@ -10,7 +10,6 @@ import ( "path/filepath" "testing" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sqlite" "github.com/stashapp/stash/pkg/txn" @@ -124,12 +123,12 @@ func createTag(ctx context.Context, qb models.TagWriter) error { return nil } -func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the scenes scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt) for _, fn := range scenePatterns { - f, err := createSceneFile(ctx, fn, folderStore, fileStore) + f, err := createSceneFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -141,7 +140,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore } for _, fn := range falseScenePatterns { - f, err := createSceneFile(ctx, fn, folderStore, fileStore) + f, err := createSceneFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -154,7 +153,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore // add organized scenes for _, fn := range scenePatterns { - f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -168,7 +167,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore } // create scene with existing studio io - f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileStore) + f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileCreator) if err != nil { return err } @@ -196,7 +195,7 @@ func makeScene(expectedResult bool) *models.Scene { return s } -func createSceneFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.VideoFile, error) { +func createSceneFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.VideoFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -207,21 +206,21 @@ func createSceneFile(ctx context.Context, name string, folderStore file.FolderSt folderID := folder.ID - f := &file.VideoFile{ - BaseFile: &file.BaseFile{ + f := &models.VideoFile{ + BaseFile: &models.BaseFile{ Basename: basename, ParentFolderID: folderID, }, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, fmt.Errorf("creating scene file %q: %w", name, err) } return f, nil } -func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folderPath string) (*file.Folder, error) { +func getOrCreateFolder(ctx context.Context, folderStore models.FolderFinderCreator, folderPath string) (*models.Folder, error) { f, err := folderStore.FindByPath(ctx, folderPath) if err != nil { return nil, fmt.Errorf("getting folder by path: %w", err) @@ -231,7 +230,7 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder return f, nil } - var parentID file.FolderID + var parentID models.FolderID dir := filepath.Dir(folderPath) if dir != "." { parent, err := getOrCreateFolder(ctx, folderStore, dir) @@ -242,7 +241,7 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder parentID = parent.ID } - f = &file.Folder{ + f = &models.Folder{ Path: folderPath, } @@ -257,8 +256,8 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder return f, nil } -func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *file.VideoFile) error { - err := sqb.Create(ctx, s, []file.ID{f.ID}) +func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *models.VideoFile) error { + err := sqb.Create(ctx, s, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Failed to create scene with path '%s': %s", f.Path, err.Error()) @@ -267,12 +266,12 @@ func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f return nil } -func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the images imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt) for _, fn := range imagePatterns { - f, err := createImageFile(ctx, fn, folderStore, fileStore) + f, err := createImageFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -283,7 +282,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f } } for _, fn := range falseImagePatterns { - f, err := createImageFile(ctx, fn, folderStore, fileStore) + f, err := createImageFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -296,7 +295,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f // add organized images for _, fn := range imagePatterns { - f, err := createImageFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createImageFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -310,7 +309,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f } // create image with existing studio io - f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileStore) + f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileCreator) if err != nil { return err } @@ -326,7 +325,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f return nil } -func createImageFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.ImageFile, error) { +func createImageFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.ImageFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -337,14 +336,14 @@ func createImageFile(ctx context.Context, name string, folderStore file.FolderSt folderID := folder.ID - f := &file.ImageFile{ - BaseFile: &file.BaseFile{ + f := &models.ImageFile{ + BaseFile: &models.BaseFile{ Basename: basename, ParentFolderID: folderID, }, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, err } @@ -362,10 +361,10 @@ func makeImage(expectedResult bool) *models.Image { return o } -func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *file.ImageFile) error { +func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *models.ImageFile) error { err := w.Create(ctx, &models.ImageCreateInput{ Image: o, - FileIDs: []file.ID{f.ID}, + FileIDs: []models.FileID{f.ID}, }) if err != nil { @@ -375,12 +374,12 @@ func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f * return nil } -func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the galleries galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt) for _, fn := range galleryPatterns { - f, err := createGalleryFile(ctx, fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -391,7 +390,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt } } for _, fn := range falseGalleryPatterns { - f, err := createGalleryFile(ctx, fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -404,7 +403,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt // add organized galleries for _, fn := range galleryPatterns { - f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -418,7 +417,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt } // create gallery with existing studio io - f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileStore) + f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileCreator) if err != nil { return err } @@ -434,7 +433,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt return nil } -func createGalleryFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.BaseFile, error) { +func createGalleryFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.BaseFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -445,12 +444,12 @@ func createGalleryFile(ctx context.Context, name string, folderStore file.Folder folderID := folder.ID - f := &file.BaseFile{ + f := &models.BaseFile{ Basename: basename, ParentFolderID: folderID, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, err } @@ -468,8 +467,8 @@ func makeGallery(expectedResult bool) *models.Gallery { return o } -func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *file.BaseFile) error { - err := w.Create(ctx, o, []file.ID{f.ID}) +func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *models.BaseFile) error { + err := w.Create(ctx, o, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error()) } diff --git a/internal/autotag/performer.go b/internal/autotag/performer.go index 32364dc50..cc839f361 100644 --- a/internal/autotag/performer.go +++ b/internal/autotag/performer.go @@ -13,21 +13,21 @@ import ( ) type SceneQueryPerformerUpdater interface { - scene.Queryer + models.SceneQueryer models.PerformerIDLoader - scene.PartialUpdater + models.SceneUpdater } type ImageQueryPerformerUpdater interface { - image.Queryer + models.ImageQueryer models.PerformerIDLoader - image.PartialUpdater + models.ImageUpdater } type GalleryQueryPerformerUpdater interface { - gallery.Queryer + models.GalleryQueryer models.PerformerIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getPerformerTaggers(p *models.Performer, cache *match.Cache) []tagger { diff --git a/internal/autotag/scene.go b/internal/autotag/scene.go index 285ff7d7d..6095905e8 100644 --- a/internal/autotag/scene.go +++ b/internal/autotag/scene.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type SceneFinderUpdater interface { + models.SceneQueryer + models.SceneUpdater +} + type ScenePerformerUpdater interface { models.PerformerIDLoader - scene.PartialUpdater + models.SceneUpdater } type SceneTagUpdater interface { models.TagIDLoader - scene.PartialUpdater + models.SceneUpdater } func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger { @@ -30,7 +35,7 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger { } // ScenePerformers tags the provided scene with performers whose name matches the scene's path. -func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getSceneFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -54,7 +59,7 @@ func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpda // SceneStudios tags the provided scene with the first studio whose name matches the scene's path. // // Scenes will not be tagged if studio is already set. -func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -68,7 +73,7 @@ func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, s } // SceneTags tags the provided scene with tags whose name matches the scene's path. -func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getSceneFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/studio.go b/internal/autotag/studio.go index bfa6c941e..ef5a6f0da 100644 --- a/internal/autotag/studio.go +++ b/internal/autotag/studio.go @@ -3,18 +3,15 @@ package autotag import ( "context" - "github.com/stashapp/stash/pkg/gallery" - "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) // the following functions aren't used in Tagger because they assume // use within a transaction -func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *models.Scene, studioID int) (bool, error) { +func addSceneStudio(ctx context.Context, sceneWriter models.SceneUpdater, o *models.Scene, studioID int) (bool, error) { // don't set if already set if o.StudioID != nil { return false, nil @@ -31,7 +28,7 @@ func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *mo return true, nil } -func addImageStudio(ctx context.Context, imageWriter image.PartialUpdater, i *models.Image, studioID int) (bool, error) { +func addImageStudio(ctx context.Context, imageWriter models.ImageUpdater, i *models.Image, studioID int) (bool, error) { // don't set if already set if i.StudioID != nil { return false, nil @@ -84,11 +81,6 @@ func getStudioTagger(p *models.Studio, aliases []string, cache *match.Cache) []t return ret } -type SceneFinderUpdater interface { - scene.Queryer - scene.PartialUpdater -} - // StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene. func (tagger *Tagger) StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw SceneFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) @@ -120,12 +112,6 @@ func (tagger *Tagger) StudioScenes(ctx context.Context, p *models.Studio, paths return nil } -type ImageFinderUpdater interface { - image.Queryer - Find(ctx context.Context, id int) (*models.Image, error) - UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) -} - // StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image. func (tagger *Tagger) StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw ImageFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) @@ -157,12 +143,6 @@ func (tagger *Tagger) StudioImages(ctx context.Context, p *models.Studio, paths return nil } -type GalleryFinderUpdater interface { - gallery.Queryer - gallery.PartialUpdater - Find(ctx context.Context, id int) (*models.Gallery, error) -} - // StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery. func (tagger *Tagger) StudioGalleries(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw GalleryFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) diff --git a/internal/autotag/tag.go b/internal/autotag/tag.go index 94c7c1bb3..8c404f62f 100644 --- a/internal/autotag/tag.go +++ b/internal/autotag/tag.go @@ -13,21 +13,21 @@ import ( ) type SceneQueryTagUpdater interface { - scene.Queryer + models.SceneQueryer models.TagIDLoader - scene.PartialUpdater + models.SceneUpdater } type ImageQueryTagUpdater interface { - image.Queryer + models.ImageQueryer models.TagIDLoader - image.PartialUpdater + models.ImageUpdater } type GalleryQueryTagUpdater interface { - gallery.Queryer + models.GalleryQueryer models.TagIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger { diff --git a/internal/autotag/tagger.go b/internal/autotag/tagger.go index 07cb1da87..b814bea60 100644 --- a/internal/autotag/tagger.go +++ b/internal/autotag/tagger.go @@ -17,12 +17,9 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/gallery" - "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) @@ -54,7 +51,7 @@ func (t *tagger) addLog(otherType, otherName string) { logger.Infof("Added %s '%s' to %s '%s'", otherType, otherName, t.Type, t.Name) } -func (t *tagger) tagPerformers(ctx context.Context, performerReader match.PerformerAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagPerformers(ctx context.Context, performerReader models.PerformerAutoTagQueryer, addFunc addLinkFunc) error { others, err := match.PathToPerformers(ctx, t.Path, performerReader, t.cache, t.trimExt) if err != nil { return err @@ -75,7 +72,7 @@ func (t *tagger) tagPerformers(ctx context.Context, performerReader match.Perfor return nil } -func (t *tagger) tagStudios(ctx context.Context, studioReader match.StudioAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagStudios(ctx context.Context, studioReader models.StudioAutoTagQueryer, addFunc addLinkFunc) error { studio, err := match.PathToStudio(ctx, t.Path, studioReader, t.cache, t.trimExt) if err != nil { return err @@ -96,7 +93,7 @@ func (t *tagger) tagStudios(ctx context.Context, studioReader match.StudioAutoTa return nil } -func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagTags(ctx context.Context, tagReader models.TagAutoTagQueryer, addFunc addLinkFunc) error { others, err := match.PathToTags(ctx, t.Path, tagReader, t.cache, t.trimExt) if err != nil { return err @@ -117,7 +114,7 @@ func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer, return nil } -func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addSceneLinkFunc) error { +func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader models.SceneQueryer, addFunc addSceneLinkFunc) error { return match.PathToScenesFn(ctx, t.Name, paths, sceneReader, func(ctx context.Context, p *models.Scene) error { added, err := addFunc(p) @@ -133,7 +130,7 @@ func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scen }) } -func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addImageLinkFunc) error { +func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader models.ImageQueryer, addFunc addImageLinkFunc) error { return match.PathToImagesFn(ctx, t.Name, paths, imageReader, func(ctx context.Context, p *models.Image) error { added, err := addFunc(p) @@ -149,7 +146,7 @@ func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader imag }) } -func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addGalleryLinkFunc) error { +func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader models.GalleryQueryer, addFunc addGalleryLinkFunc) error { return match.PathToGalleriesFn(ctx, t.Name, paths, galleryReader, func(ctx context.Context, p *models.Gallery) error { added, err := addFunc(p) diff --git a/internal/dlna/cds.go b/internal/dlna/cds.go index 826b52acd..eba98ac48 100644 --- a/internal/dlna/cds.go +++ b/internal/dlna/cds.go @@ -363,7 +363,7 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string) if err := txn.WithReadTxn(context.TODO(), me.txnManager, func(ctx context.Context) error { scene, err = me.repository.SceneFinder.Find(ctx, sceneID) if scene != nil { - err = scene.LoadPrimaryFile(ctx, me.repository.FileFinder) + err = scene.LoadPrimaryFile(ctx, me.repository.FileGetter) } if err != nil { @@ -478,7 +478,7 @@ func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType } } else { for _, s := range scenes { - if err := s.LoadPrimaryFile(ctx, me.repository.FileFinder); err != nil { + if err := s.LoadPrimaryFile(ctx, me.repository.FileGetter); err != nil { return err } @@ -506,7 +506,7 @@ func (me *contentDirectoryService) getPageVideos(sceneFilter *models.SceneFilter sort := me.VideoSortOrder direction := getSortDirection(sceneFilter, sort) var err error - objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileFinder, page, host, sort, direction) + objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileGetter, page, host, sort, direction) if err != nil { return err } diff --git a/internal/dlna/dms.go b/internal/dlna/dms.go index 502dbe0e4..fe078aab0 100644 --- a/internal/dlna/dms.go +++ b/internal/dlna/dms.go @@ -48,13 +48,12 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) type SceneFinder interface { - scene.Queryer - scene.IDFinder + models.SceneGetter + models.SceneQueryer } type StudioFinder interface { diff --git a/internal/dlna/paging.go b/internal/dlna/paging.go index bd1b00283..fae6ebf13 100644 --- a/internal/dlna/paging.go +++ b/internal/dlna/paging.go @@ -6,7 +6,6 @@ import ( "math" "strconv" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" ) @@ -20,7 +19,7 @@ func (p *scenePager) getPageID(page int) string { return p.parentID + "/page/" + strconv.Itoa(page) } -func (p *scenePager) getPages(ctx context.Context, r scene.Queryer, total int) ([]interface{}, error) { +func (p *scenePager) getPages(ctx context.Context, r models.SceneQueryer, total int) ([]interface{}, error) { var objs []interface{} // get the first scene of each page to set an appropriate title @@ -60,7 +59,7 @@ func (p *scenePager) getPages(ctx context.Context, r scene.Queryer, total int) ( return objs, nil } -func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f file.Finder, page int, host string, sort string, direction models.SortDirectionEnum) ([]interface{}, error) { +func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f models.FileGetter, page int, host string, sort string, direction models.SortDirectionEnum) ([]interface{}, error) { var objs []interface{} findFilter := &models.FindFilterType{ diff --git a/internal/dlna/service.go b/internal/dlna/service.go index 0d8932e08..d5399e6a1 100644 --- a/internal/dlna/service.go +++ b/internal/dlna/service.go @@ -8,7 +8,6 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -16,7 +15,7 @@ import ( type Repository struct { SceneFinder SceneFinder - FileFinder file.Finder + FileGetter models.FileGetter StudioFinder StudioFinder TagFinder TagFinder PerformerFinder PerformerFinder diff --git a/internal/identify/identify.go b/internal/identify/identify.go index 3a9cea610..db8ca2f54 100644 --- a/internal/identify/identify.go +++ b/internal/identify/identify.go @@ -46,7 +46,7 @@ type SceneIdentifier struct { SceneReaderUpdater SceneReaderUpdater StudioReaderWriter models.StudioReaderWriter PerformerCreator PerformerCreator - TagCreatorFinder TagCreatorFinder + TagFinderCreator models.TagFinderCreator DefaultOptions *MetadataOptions Sources []ScraperSource @@ -176,7 +176,7 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene, sceneReader: t.SceneReaderUpdater, studioReaderWriter: t.StudioReaderWriter, performerCreator: t.PerformerCreator, - tagCreatorFinder: t.TagCreatorFinder, + tagCreator: t.TagFinderCreator, scene: s, result: result, fieldOptions: fieldOptions, @@ -332,7 +332,7 @@ func (t *SceneIdentifier) addTagToScene(ctx context.Context, txnManager txn.Mana return err } - ret, err := t.TagCreatorFinder.Find(ctx, tagID) + ret, err := t.TagFinderCreator.Find(ctx, tagID) if err != nil { logger.Infof("Added tag id %s to skipped scene %s", tagToAdd, s.Path) } else { diff --git a/internal/identify/identify_test.go b/internal/identify/identify_test.go index 30dd72803..04ff03607 100644 --- a/internal/identify/identify_test.go +++ b/internal/identify/identify_test.go @@ -186,7 +186,7 @@ func TestSceneIdentifier_Identify(t *testing.T) { t.Run(tt.name, func(t *testing.T) { identifier := SceneIdentifier{ SceneReaderUpdater: mockSceneReaderWriter, - TagCreatorFinder: mockTagFinderCreator, + TagFinderCreator: mockTagFinderCreator, DefaultOptions: defaultOptions, Sources: sources, SceneUpdatePostHookExecutor: mockHookExecutor{}, diff --git a/internal/identify/performer.go b/internal/identify/performer.go index f544473d2..947bb09d6 100644 --- a/internal/identify/performer.go +++ b/internal/identify/performer.go @@ -10,7 +10,7 @@ import ( ) type PerformerCreator interface { - Create(ctx context.Context, newPerformer *models.Performer) error + models.PerformerCreator UpdateImage(ctx context.Context, performerID int, image []byte) error } diff --git a/internal/identify/scene.go b/internal/identify/scene.go index 160a0a8b6..9a951c13b 100644 --- a/internal/identify/scene.go +++ b/internal/identify/scene.go @@ -11,32 +11,29 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/sliceutil/intslice" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type SceneReaderUpdater interface { +type SceneCoverGetter interface { GetCover(ctx context.Context, sceneID int) ([]byte, error) - scene.Updater +} + +type SceneReaderUpdater interface { + SceneCoverGetter + models.SceneUpdater models.PerformerIDLoader models.TagIDLoader models.StashIDLoader models.URLLoader } -type TagCreatorFinder interface { - Create(ctx context.Context, newTag *models.Tag) error - tag.Finder -} - type sceneRelationships struct { - sceneReader SceneReaderUpdater + sceneReader SceneCoverGetter studioReaderWriter models.StudioReaderWriter performerCreator PerformerCreator - tagCreatorFinder TagCreatorFinder + tagCreator models.TagCreator scene *models.Scene result *scrapeResult fieldOptions map[string]*FieldOptions @@ -173,7 +170,7 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) { CreatedAt: now, UpdatedAt: now, } - err := g.tagCreatorFinder.Create(ctx, &newTag) + err := g.tagCreator.Create(ctx, &newTag) if err != nil { return nil, fmt.Errorf("error creating tag: %w", err) } diff --git a/internal/identify/scene_test.go b/internal/identify/scene_test.go index ae6963ee3..bb0598b06 100644 --- a/internal/identify/scene_test.go +++ b/internal/identify/scene_test.go @@ -377,9 +377,9 @@ func Test_sceneRelationships_tags(t *testing.T) { })).Return(errors.New("error creating tag")) tr := sceneRelationships{ - sceneReader: mockSceneReaderWriter, - tagCreatorFinder: mockTagReaderWriter, - fieldOptions: make(map[string]*FieldOptions), + sceneReader: mockSceneReaderWriter, + tagCreator: mockTagReaderWriter, + fieldOptions: make(map[string]*FieldOptions), } tests := []struct { diff --git a/internal/manager/fingerprint.go b/internal/manager/fingerprint.go index fc183cc6a..b30ac4532 100644 --- a/internal/manager/fingerprint.go +++ b/internal/manager/fingerprint.go @@ -10,13 +10,14 @@ import ( "github.com/stashapp/stash/pkg/hash/md5" "github.com/stashapp/stash/pkg/hash/oshash" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) type fingerprintCalculator struct { Config *config.Instance } -func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) (*file.Fingerprint, error) { +func (c *fingerprintCalculator) calculateOshash(f *models.BaseFile, o file.Opener) (*models.Fingerprint, error) { r, err := o.Open() if err != nil { return nil, fmt.Errorf("opening file: %w", err) @@ -34,13 +35,13 @@ func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) return nil, fmt.Errorf("calculating oshash: %w", err) } - return &file.Fingerprint{ - Type: file.FingerprintTypeOshash, + return &models.Fingerprint{ + Type: models.FingerprintTypeOshash, Fingerprint: hash, }, nil } -func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, error) { +func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*models.Fingerprint, error) { r, err := o.Open() if err != nil { return nil, fmt.Errorf("opening file: %w", err) @@ -53,24 +54,24 @@ func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, return nil, fmt.Errorf("calculating md5: %w", err) } - return &file.Fingerprint{ - Type: file.FingerprintTypeMD5, + return &models.Fingerprint{ + Type: models.FingerprintTypeMD5, Fingerprint: hash, }, nil } -func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener, useExisting bool) ([]file.Fingerprint, error) { - var ret []file.Fingerprint +func (c *fingerprintCalculator) CalculateFingerprints(f *models.BaseFile, o file.Opener, useExisting bool) ([]models.Fingerprint, error) { + var ret []models.Fingerprint calculateMD5 := true if useAsVideo(f.Path) { var ( - fp *file.Fingerprint + fp *models.Fingerprint err error ) if useExisting { - fp = f.Fingerprints.For(file.FingerprintTypeOshash) + fp = f.Fingerprints.For(models.FingerprintTypeOshash) } if fp == nil { @@ -89,12 +90,12 @@ func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.O if calculateMD5 { var ( - fp *file.Fingerprint + fp *models.Fingerprint err error ) if useExisting { - fp = f.Fingerprints.For(file.FingerprintTypeMD5) + fp = f.Fingerprints.For(models.FingerprintTypeMD5) } if fp == nil { diff --git a/internal/manager/manager.go b/internal/manager/manager.go index 0b1c50abe..e199f9ce7 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -26,6 +26,7 @@ import ( "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/scene" @@ -222,7 +223,7 @@ func initialize() error { instance.DLNAService = dlna.NewService(instance.Repository, dlna.Repository{ SceneFinder: instance.Repository.Scene, - FileFinder: instance.Repository.File, + FileGetter: instance.Repository.File, StudioFinder: instance.Repository.Studio, TagFinder: instance.Repository.Tag, PerformerFinder: instance.Repository.Performer, @@ -280,15 +281,15 @@ func initialize() error { return nil } -func videoFileFilter(ctx context.Context, f file.File) bool { +func videoFileFilter(ctx context.Context, f models.File) bool { return useAsVideo(f.Base().Path) } -func imageFileFilter(ctx context.Context, f file.File) bool { +func imageFileFilter(ctx context.Context, f models.File) bool { return useAsImage(f.Base().Path) } -func galleryFileFilter(ctx context.Context, f file.File) bool { +func galleryFileFilter(ctx context.Context, f models.File) bool { return isZip(f.Base().Basename) } @@ -297,7 +298,7 @@ func makeScanner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Scanner { Repository: file.Repository{ Manager: db, DatabaseProvider: db, - Store: db.File, + FileStore: db.File, FolderStore: db.Folder, }, FileDecorators: []file.Decorator{ @@ -325,7 +326,7 @@ func makeCleaner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Cleaner { Repository: file.Repository{ Manager: db, DatabaseProvider: db, - Store: db.File, + FileStore: db.File, FolderStore: db.Folder, }, Handlers: []file.CleanHandler{ diff --git a/internal/manager/repository.go b/internal/manager/repository.go index f6f8176aa..77859d06b 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -3,8 +3,6 @@ package manager import ( "context" - "github.com/stashapp/stash/pkg/file" - "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" @@ -12,49 +10,17 @@ import ( "github.com/stashapp/stash/pkg/txn" ) -type ImageReaderWriter interface { - models.ImageReaderWriter - image.FinderCreatorUpdater - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type GalleryReaderWriter interface { - models.GalleryReaderWriter - gallery.FinderCreatorUpdater - gallery.Finder - models.FileLoader - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type SceneReaderWriter interface { - models.SceneReaderWriter - scene.CreatorUpdater - models.URLLoader - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type FileReaderWriter interface { - file.Store - Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error) - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) - IsPrimary(ctx context.Context, fileID file.ID) (bool, error) -} - -type FolderReaderWriter interface { - file.FolderStore -} - type Repository struct { models.TxnManager - File FileReaderWriter - Folder FolderReaderWriter - Gallery GalleryReaderWriter + File models.FileReaderWriter + Folder models.FolderReaderWriter + Gallery models.GalleryReaderWriter GalleryChapter models.GalleryChapterReaderWriter - Image ImageReaderWriter + Image models.ImageReaderWriter Movie models.MovieReaderWriter Performer models.PerformerReaderWriter - Scene SceneReaderWriter + Scene models.SceneReaderWriter SceneMarker models.SceneMarkerReaderWriter Studio models.StudioReaderWriter Tag models.TagReaderWriter @@ -94,15 +60,15 @@ func sqliteRepository(d *sqlite.Database) Repository { } type SceneService interface { - Create(ctx context.Context, input *models.Scene, fileIDs []file.ID, coverImage []byte) (*models.Scene, error) - AssignFile(ctx context.Context, sceneID int, fileID file.ID) error + Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) + AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error Merge(ctx context.Context, sourceIDs []int, destinationID int, values models.ScenePartial) error Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error } type ImageService interface { Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error - DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) + DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } type GalleryService interface { diff --git a/internal/manager/scene.go b/internal/manager/scene.go index 39b96fec7..ff551754e 100644 --- a/internal/manager/scene.go +++ b/internal/manager/scene.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/models" ) @@ -57,7 +56,7 @@ var ( } ) -func GetVideoFileContainer(file *file.VideoFile) (ffmpeg.Container, error) { +func GetVideoFileContainer(file *models.VideoFile) (ffmpeg.Container, error) { var container ffmpeg.Container format := file.Format if format != "" { @@ -88,7 +87,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL *url.URL, maxStrea // convert StreamingResolutionEnum to ResolutionEnum maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize) - sceneResolution := file.GetMinResolution(pf) + sceneResolution := models.GetMinResolution(pf) includeSceneStreamPath := func(streamingResolution models.StreamingResolutionEnum) bool { var minResolution int if streamingResolution == models.StreamingResolutionEnumOriginal { diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go index 43cbc92d9..f5c3e1d54 100644 --- a/internal/manager/task_clean.go +++ b/internal/manager/task_clean.go @@ -257,7 +257,7 @@ type cleanHandler struct { PluginCache *plugin.Cache } -func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { if err := h.handleRelatedScenes(ctx, fileDeleter, fileID); err != nil { return err } @@ -271,11 +271,11 @@ func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter return nil } -func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID file.FolderID) error { +func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID models.FolderID) error { return h.deleteRelatedFolderGalleries(ctx, folderID) } -func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { mgr := GetInstance() sceneQB := mgr.Database.Scene scenes, err := sceneQB.FindByFileID(ctx, fileID) @@ -313,7 +313,7 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range scene.Files.List() { if f.ID != fileID { newPrimaryID = f.ID @@ -332,7 +332,7 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil return nil } -func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.ID) error { +func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID models.FileID) error { mgr := GetInstance() qb := mgr.Database.Gallery galleries, err := qb.FindByFileID(ctx, fileID) @@ -358,7 +358,7 @@ func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.I }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range g.Files.List() { if f.Base().ID != fileID { newPrimaryID = f.Base().ID @@ -377,7 +377,7 @@ func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.I return nil } -func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID file.FolderID) error { +func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID models.FolderID) error { mgr := GetInstance() qb := mgr.Database.Gallery galleries, err := qb.FindByFolderID(ctx, folderID) @@ -401,7 +401,7 @@ func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderI return nil } -func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { mgr := GetInstance() imageQB := mgr.Database.Image images, err := imageQB.FindByFileID(ctx, fileID) @@ -431,7 +431,7 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range i.Files.List() { if f.Base().ID != fileID { newPrimaryID = f.Base().ID diff --git a/internal/manager/task_export.go b/internal/manager/task_export.go index f186d3eb4..98ae1918f 100644 --- a/internal/manager/task_export.go +++ b/internal/manager/task_export.go @@ -13,7 +13,6 @@ import ( "time" "github.com/stashapp/stash/internal/manager/config" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" @@ -386,7 +385,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Reposit logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers) } -func exportFile(f file.File, t *ExportTask) { +func exportFile(f models.File, t *ExportTask) { newFileJSON := fileToJSON(f) fn := newFileJSON.Filename() @@ -396,7 +395,7 @@ func exportFile(f file.File, t *ExportTask) { } } -func fileToJSON(f file.File) jsonschema.DirEntry { +func fileToJSON(f models.File) jsonschema.DirEntry { bf := f.Base() base := jsonschema.BaseFile{ @@ -422,7 +421,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { } switch ff := f.(type) { - case *file.VideoFile: + case *models.VideoFile: base.Type = jsonschema.DirEntryTypeVideo return jsonschema.VideoFile{ BaseFile: &base, @@ -437,7 +436,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { Interactive: ff.Interactive, InteractiveSpeed: ff.InteractiveSpeed, } - case *file.ImageFile: + case *models.ImageFile: base.Type = jsonschema.DirEntryTypeImage return jsonschema.ImageFile{ BaseFile: &base, @@ -450,7 +449,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { return &base } -func exportFolder(f file.Folder, t *ExportTask) { +func exportFolder(f models.Folder, t *ExportTask) { newFileJSON := folderToJSON(f) fn := newFileJSON.Filename() @@ -460,7 +459,7 @@ func exportFolder(f file.Folder, t *ExportTask) { } } -func folderToJSON(f file.Folder) jsonschema.DirEntry { +func folderToJSON(f models.Folder) jsonschema.DirEntry { base := jsonschema.BaseDirEntry{ Type: jsonschema.DirEntryTypeFolder, ModTime: json.JSONTime{Time: f.ModTime}, diff --git a/internal/manager/task_generate_clip_preview.go b/internal/manager/task_generate_clip_preview.go index c0ecfeedf..e8f98cd17 100644 --- a/internal/manager/task_generate_clip_preview.go +++ b/internal/manager/task_generate_clip_preview.go @@ -4,7 +4,6 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" @@ -44,7 +43,7 @@ func (t *GenerateClipPreviewTask) Start(ctx context.Context) { } func (t *GenerateClipPreviewTask) required() bool { - _, ok := t.Image.Files.Primary().(*file.VideoFile) + _, ok := t.Image.Files.Primary().(*models.VideoFile) if !ok { return false } diff --git a/internal/manager/task_generate_markers.go b/internal/manager/task_generate_markers.go index 5d709874f..fa5ac9022 100644 --- a/internal/manager/task_generate_markers.go +++ b/internal/manager/task_generate_markers.go @@ -5,7 +5,6 @@ import ( "fmt" "path/filepath" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -102,7 +101,7 @@ func (t *GenerateMarkersTask) generateSceneMarkers(ctx context.Context) { } } -func (t *GenerateMarkersTask) generateMarker(videoFile *file.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) { +func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) { sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) seconds := int(sceneMarker.Seconds) diff --git a/internal/manager/task_generate_phash.go b/internal/manager/task_generate_phash.go index 8ae84b02e..9f3945da3 100644 --- a/internal/manager/task_generate_phash.go +++ b/internal/manager/task_generate_phash.go @@ -4,7 +4,6 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/hash/videophash" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -12,11 +11,11 @@ import ( ) type GeneratePhashTask struct { - File *file.VideoFile + File *models.VideoFile Overwrite bool fileNamingAlgorithm models.HashAlgorithm txnManager txn.Manager - fileUpdater file.Updater + fileUpdater models.FileUpdater } func (t *GeneratePhashTask) GetDescription() string { @@ -38,8 +37,8 @@ func (t *GeneratePhashTask) Start(ctx context.Context) { if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error { qb := t.fileUpdater hashValue := int64(*hash) - t.File.Fingerprints = t.File.Fingerprints.AppendUnique(file.Fingerprint{ - Type: file.FingerprintTypePhash, + t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{ + Type: models.FingerprintTypePhash, Fingerprint: hashValue, }) @@ -54,5 +53,5 @@ func (t *GeneratePhashTask) required() bool { return true } - return t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil + return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil } diff --git a/internal/manager/task_identify.go b/internal/manager/task_identify.go index f7ee5784c..0022a69ca 100644 --- a/internal/manager/task_identify.go +++ b/internal/manager/task_identify.go @@ -136,7 +136,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source SceneReaderUpdater: instance.Repository.Scene, StudioReaderWriter: instance.Repository.Studio, PerformerCreator: instance.Repository.Performer, - TagCreatorFinder: instance.Repository.Tag, + TagFinderCreator: instance.Repository.Tag, DefaultOptions: j.input.Options, Sources: sources, diff --git a/internal/manager/task_import.go b/internal/manager/task_import.go index aa0e7ec63..c0f97e254 100644 --- a/internal/manager/task_import.go +++ b/internal/manager/task_import.go @@ -10,6 +10,7 @@ import ( "path/filepath" "github.com/99designs/gqlgen/graphql" + "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" @@ -281,7 +282,7 @@ func (t *ImportTask) ImportStudios(ctx context.Context) { logger.Info("[studios] import complete") } -func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, readerWriter studio.NameFinderCreatorUpdater) error { +func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, readerWriter studio.ImporterReaderWriter) error { importer := &studio.Importer{ ReaderWriter: readerWriter, Input: *studioJSON, @@ -385,7 +386,7 @@ func (t *ImportTask) ImportFiles(ctx context.Context) { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { return t.ImportFile(ctx, fileJSON, pendingParent) }); err != nil { - if errors.Is(err, errZipFileNotExist) { + if errors.Is(err, file.ErrZipFileNotExist) { // add to the pending parent list so that it is created after the parent s := pendingParent[fileJSON.DirEntry().ZipFile] s = append(s, fileJSON) @@ -421,7 +422,7 @@ func (t *ImportTask) ImportFile(ctx context.Context, fileJSON jsonschema.DirEntr r := t.txnManager readerWriter := r.File - fileImporter := &fileFolderImporter{ + fileImporter := &file.Importer{ ReaderWriter: readerWriter, FolderStore: r.Folder, Input: fileJSON, @@ -569,7 +570,7 @@ func (t *ImportTask) ImportTags(ctx context.Context) { logger.Info("[tags] import complete") } -func (t *ImportTask) ImportTag(ctx context.Context, tagJSON *jsonschema.Tag, pendingParent map[string][]*jsonschema.Tag, fail bool, readerWriter tag.NameFinderCreatorUpdater) error { +func (t *ImportTask) ImportTag(ctx context.Context, tagJSON *jsonschema.Tag, pendingParent map[string][]*jsonschema.Tag, fail bool, readerWriter tag.ImporterReaderWriter) error { importer := &tag.Importer{ ReaderWriter: readerWriter, Input: *tagJSON, diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index 7c5e20156..f1f3e3927 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -96,17 +96,17 @@ func newExtensionConfig(c *config.Instance) extensionConfig { } type fileCounter interface { - CountByFileID(ctx context.Context, fileID file.ID) (int, error) + CountByFileID(ctx context.Context, fileID models.FileID) (int, error) } type galleryFinder interface { fileCounter - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) } type sceneFinder interface { fileCounter - FindByPrimaryFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) + FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } // handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated. @@ -139,7 +139,7 @@ func newHandlerRequiredFilter(c *config.Instance) *handlerRequiredFilter { } } -func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool { +func (f *handlerRequiredFilter) Accept(ctx context.Context, ff models.File) bool { path := ff.Base().Path isVideoFile := useAsVideo(path) isImageFile := useAsImage(path) @@ -213,7 +213,7 @@ func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool { // clean captions - scene handler handles this as well, but // unchanged files aren't processed by the scene handler - videoFile, _ := ff.(*file.VideoFile) + videoFile, _ := ff.(*models.VideoFile) if videoFile != nil { if err := video.CleanCaptions(ctx, videoFile, f.txnManager, f.CaptionUpdater); err != nil { logger.Errorf("Error cleaning captions: %v", err) @@ -370,7 +370,7 @@ type imageGenerators struct { progress *job.Progress } -func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file.File) error { +func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f models.File) error { const overwrite = false progress := g.progress @@ -387,12 +387,12 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file. } // avoid adding a task if the file isn't a video file - _, isVideo := f.(*file.VideoFile) + _, isVideo := f.(*models.VideoFile) if isVideo && t.ScanGenerateClipPreviews { // this is a bit of a hack: the task requires files to be loaded, but // we don't really need to since we already have the file ii := *i - ii.Files = models.NewRelatedFiles([]file.File{f}) + ii.Files = models.NewRelatedFiles([]models.File{f}) progress.AddTotal(1) previewsFn := func(ctx context.Context) { @@ -415,7 +415,7 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file. return nil } -func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f file.File) error { +func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f models.File) error { thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) exists, _ := fsutil.FileExists(thumbPath) if exists { @@ -424,12 +424,12 @@ func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image path := f.Base().Path - asFrame, ok := f.(file.VisualFile) + vf, ok := f.(models.VisualFile) if !ok { - return fmt.Errorf("file %s does not implement Frame", path) + return fmt.Errorf("file %s is not a visual file", path) } - if asFrame.GetHeight() <= models.DefaultGthumbWidth && asFrame.GetWidth() <= models.DefaultGthumbWidth { + if vf.GetHeight() <= models.DefaultGthumbWidth && vf.GetWidth() <= models.DefaultGthumbWidth { return nil } @@ -466,7 +466,7 @@ type sceneGenerators struct { progress *job.Progress } -func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error { +func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error { const overwrite = false progress := g.progress diff --git a/pkg/ffmpeg/stream_segmented.go b/pkg/ffmpeg/stream_segmented.go index fa7347582..68e6f4282 100644 --- a/pkg/ffmpeg/stream_segmented.go +++ b/pkg/ffmpeg/stream_segmented.go @@ -16,7 +16,6 @@ import ( "sync/atomic" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -51,7 +50,7 @@ const ( type StreamType struct { Name string SegmentType *SegmentType - ServeManifest func(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) + ServeManifest func(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) Args func(codec VideoCodec, segment int, videoFilter VideoFilter, videoOnly bool, outputDir string) Args } @@ -250,7 +249,7 @@ var ErrInvalidSegment = errors.New("invalid segment") type StreamOptions struct { StreamType *StreamType - VideoFile *file.VideoFile + VideoFile *models.VideoFile Resolution string Hash string Segment string @@ -279,7 +278,7 @@ type waitingSegment struct { type runningStream struct { dir string streamType *StreamType - vf *file.VideoFile + vf *models.VideoFile maxTranscodeSize int outputDir string @@ -394,7 +393,7 @@ func (tp *transcodeProcess) checkSegments() { } } -func lastSegment(vf *file.VideoFile) int { +func lastSegment(vf *models.VideoFile) int { return int(math.Ceil(vf.Duration/segmentLength)) - 1 } @@ -405,7 +404,7 @@ func segmentExists(path string) bool { // serveHLSManifest serves a generated HLS playlist. The URLs for the segments // are of the form {r.URL}/%d.ts{?urlQuery} where %d is the segment index. -func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) { +func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) { if sm.cacheDir == "" { logger.Error("[transcode] cannot live transcode with HLS because cache dir is unset") http.Error(w, "cannot live transcode with HLS because cache dir is unset", http.StatusServiceUnavailable) @@ -460,7 +459,7 @@ func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, } // serveDASHManifest serves a generated DASH manifest. -func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) { +func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) { if sm.cacheDir == "" { logger.Error("[transcode] cannot live transcode with DASH because cache dir is unset") http.Error(w, "cannot live transcode files with DASH because cache dir is unset", http.StatusServiceUnavailable) @@ -550,7 +549,7 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request utils.ServeStaticContent(w, r, buf.Bytes()) } -func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *file.VideoFile, resolution string) { +func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *models.VideoFile, resolution string) { streamType.ServeManifest(sm, w, r, vf, resolution) } diff --git a/pkg/ffmpeg/stream_transcode.go b/pkg/ffmpeg/stream_transcode.go index cd123183f..c5593ab8b 100644 --- a/pkg/ffmpeg/stream_transcode.go +++ b/pkg/ffmpeg/stream_transcode.go @@ -8,7 +8,6 @@ import ( "strings" "syscall" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -134,7 +133,7 @@ var ( type TranscodeOptions struct { StreamType StreamFormat - VideoFile *file.VideoFile + VideoFile *models.VideoFile Resolution string StartTime float64 } diff --git a/pkg/file/clean.go b/pkg/file/clean.go index 44470c5a0..d3e27a774 100644 --- a/pkg/file/clean.go +++ b/pkg/file/clean.go @@ -10,12 +10,13 @@ import ( "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) // Cleaner scans through stored file and folder instances and removes those that are no longer present on disk. type Cleaner struct { - FS FS + FS models.FS Repository Repository Handlers []CleanHandler @@ -55,44 +56,44 @@ func (s *Cleaner) Clean(ctx context.Context, options CleanOptions, progress *job } type fileOrFolder struct { - fileID ID - folderID FolderID + fileID models.FileID + folderID models.FolderID } type deleteSet struct { orderedList []fileOrFolder - fileIDSet map[ID]string + fileIDSet map[models.FileID]string - folderIDSet map[FolderID]string + folderIDSet map[models.FolderID]string } func newDeleteSet() deleteSet { return deleteSet{ - fileIDSet: make(map[ID]string), - folderIDSet: make(map[FolderID]string), + fileIDSet: make(map[models.FileID]string), + folderIDSet: make(map[models.FolderID]string), } } -func (s *deleteSet) add(id ID, path string) { +func (s *deleteSet) add(id models.FileID, path string) { if _, ok := s.fileIDSet[id]; !ok { s.orderedList = append(s.orderedList, fileOrFolder{fileID: id}) s.fileIDSet[id] = path } } -func (s *deleteSet) has(id ID) bool { +func (s *deleteSet) has(id models.FileID) bool { _, ok := s.fileIDSet[id] return ok } -func (s *deleteSet) addFolder(id FolderID, path string) { +func (s *deleteSet) addFolder(id models.FolderID, path string) { if _, ok := s.folderIDSet[id]; !ok { s.orderedList = append(s.orderedList, fileOrFolder{folderID: id}) s.folderIDSet[id] = path } } -func (s *deleteSet) hasFolder(id FolderID) bool { +func (s *deleteSet) hasFolder(id models.FolderID) bool { _, ok := s.folderIDSet[id] return ok } @@ -113,7 +114,7 @@ func (j *cleanJob) execute(ctx context.Context) error { if err := txn.WithReadTxn(ctx, j.Repository, func(ctx context.Context) error { var err error - fileCount, err = j.Repository.CountAllInPaths(ctx, j.options.Paths) + fileCount, err = j.Repository.FileStore.CountAllInPaths(ctx, j.options.Paths) if err != nil { return err } @@ -177,7 +178,7 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { return nil } - files, err := j.Repository.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) + files, err := j.Repository.FileStore.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) if err != nil { return fmt.Errorf("error querying for files: %w", err) } @@ -221,9 +222,9 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { } // flagFolderForDelete adds folders to the toDelete set, with the leaf folders added first -func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f File) error { +func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f models.File) error { // add contained files first - containedFiles, err := j.Repository.FindByZipFileID(ctx, f.Base().ID) + containedFiles, err := j.Repository.FileStore.FindByZipFileID(ctx, f.Base().ID) if err != nil { return fmt.Errorf("error finding contained files for %q: %w", f.Base().Path, err) } @@ -306,7 +307,7 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error return nil } -func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *Folder) error { +func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *models.Folder) error { // it is possible that child folders may be included while parent folders are not // so we need to check child folders separately toDelete.addFolder(folder.ID, folder.Path) @@ -314,7 +315,7 @@ func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, return nil } -func (j *cleanJob) shouldClean(ctx context.Context, f File) bool { +func (j *cleanJob) shouldClean(ctx context.Context, f models.File) bool { path := f.Base().Path info, err := f.Base().Info(j.FS) @@ -336,7 +337,7 @@ func (j *cleanJob) shouldClean(ctx context.Context, f File) bool { return !filter.Accept(ctx, path, info) } -func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool { +func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool { path := f.Path info, err := f.Info(j.FS) @@ -376,7 +377,7 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool { return !filter.Accept(ctx, path, info) } -func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) { +func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) { // delete associated objects fileDeleter := NewDeleter() if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error { @@ -386,14 +387,14 @@ func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) { return err } - return j.Repository.Destroy(ctx, fileID) + return j.Repository.FileStore.Destroy(ctx, fileID) }); err != nil { logger.Errorf("Error deleting file %q from database: %s", fn, err.Error()) return } } -func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn string) { +func (j *cleanJob) deleteFolder(ctx context.Context, folderID models.FolderID, fn string) { // delete associated objects fileDeleter := NewDeleter() if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error { @@ -410,7 +411,7 @@ func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn strin } } -func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID ID) error { +func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID models.FileID) error { for _, h := range j.Handlers { if err := h.HandleFile(ctx, fileDeleter, fileID); err != nil { return err @@ -420,7 +421,7 @@ func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileI return nil } -func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error { +func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID models.FolderID) error { for _, h := range j.Handlers { if err := h.HandleFolder(ctx, fileDeleter, folderID); err != nil { return err diff --git a/pkg/file/delete.go b/pkg/file/delete.go index 9ee27c176..88eb5169e 100644 --- a/pkg/file/delete.go +++ b/pkg/file/delete.go @@ -9,6 +9,7 @@ import ( "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -179,7 +180,7 @@ func (d *Deleter) renameForRestore(path string) error { return d.RenamerRemover.Rename(path+deleteFileSuffix, path) } -func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Deleter, deleteFile bool) error { +func Destroy(ctx context.Context, destroyer models.FileDestroyer, f models.File, fileDeleter *Deleter, deleteFile bool) error { if err := destroyer.Destroy(ctx, f.Base().ID); err != nil { return err } @@ -195,11 +196,11 @@ func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Dele } type ZipDestroyer struct { - FileDestroyer GetterDestroyer - FolderDestroyer FolderGetterDestroyer + FileDestroyer models.FileFinderDestroyer + FolderDestroyer models.FolderFinderDestroyer } -func (d *ZipDestroyer) DestroyZip(ctx context.Context, f File, fileDeleter *Deleter, deleteFile bool) error { +func (d *ZipDestroyer) DestroyZip(ctx context.Context, f models.File, fileDeleter *Deleter, deleteFile bool) error { // destroy contained files files, err := d.FileDestroyer.FindByZipFileID(ctx, f.Base().ID) if err != nil { diff --git a/pkg/file/file.go b/pkg/file/file.go index 50a2d6138..179e1e01a 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -1,225 +1,15 @@ package file import ( - "bytes" - "context" - "io" - "io/fs" - "net/http" - "strconv" - "time" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/txn" ) -// ID represents an ID of a file. -type ID int32 +// Repository provides access to storage methods for files and folders. +type Repository struct { + txn.Manager + txn.DatabaseProvider -func (i ID) String() string { - return strconv.Itoa(int(i)) -} - -// DirEntry represents a file or directory in the file system. -type DirEntry struct { - ZipFileID *ID `json:"zip_file_id"` - - // transient - not persisted - // only guaranteed to have id, path and basename set - ZipFile File - - ModTime time.Time `json:"mod_time"` -} - -func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) { - if e.ZipFile != nil { - zipPath := e.ZipFile.Base().Path - zfs, err := fs.OpenZip(zipPath) - if err != nil { - return nil, err - } - defer zfs.Close() - fs = zfs - } - // else assume os file - - ret, err := fs.Lstat(path) - return ret, err -} - -// File represents a file in the file system. -type File interface { - Base() *BaseFile - SetFingerprints(fp Fingerprints) - Open(fs FS) (io.ReadCloser, error) -} - -// BaseFile represents a file in the file system. -type BaseFile struct { - ID ID `json:"id"` - - DirEntry - - // resolved from parent folder and basename only - not stored in DB - Path string `json:"path"` - - Basename string `json:"basename"` - ParentFolderID FolderID `json:"parent_folder_id"` - - Fingerprints Fingerprints `json:"fingerprints"` - - Size int64 `json:"size"` - - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - -// SetFingerprints sets the fingerprints of the file. -// If a fingerprint of the same type already exists, it is overwritten. -func (f *BaseFile) SetFingerprints(fp Fingerprints) { - for _, v := range fp { - f.SetFingerprint(v) - } -} - -// SetFingerprint sets the fingerprint of the file. -// If a fingerprint of the same type already exists, it is overwritten. -func (f *BaseFile) SetFingerprint(fp Fingerprint) { - for i, existing := range f.Fingerprints { - if existing.Type == fp.Type { - f.Fingerprints[i] = fp - return - } - } - - f.Fingerprints = append(f.Fingerprints, fp) -} - -// Base is used to fulfil the File interface. -func (f *BaseFile) Base() *BaseFile { - return f -} - -func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) { - if f.ZipFile != nil { - zipPath := f.ZipFile.Base().Path - zfs, err := fs.OpenZip(zipPath) - if err != nil { - return nil, err - } - - return zfs.OpenOnly(f.Path) - } - - return fs.Open(f.Path) -} - -func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) { - return f.info(fs, f.Path) -} - -func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { - reader, err := f.Open(fs) - if err != nil { - return err - } - - defer reader.Close() - - content, ok := reader.(io.ReadSeeker) - if !ok { - data, err := io.ReadAll(reader) - if err != nil { - return err - } - content = bytes.NewReader(data) - } - - if r.URL.Query().Has("t") { - w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") - } else { - w.Header().Set("Cache-Control", "no-cache") - } - http.ServeContent(w, r, f.Basename, f.ModTime, content) - - return nil -} - -type Finder interface { - Find(ctx context.Context, id ...ID) ([]File, error) -} - -// Getter provides methods to find Files. -type Getter interface { - Finder - FindByPath(ctx context.Context, path string) (File, error) - FindAllByPath(ctx context.Context, path string) ([]File, error) - FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) - FindByZipFileID(ctx context.Context, zipFileID ID) ([]File, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) - FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error) -} - -type Counter interface { - CountAllInPaths(ctx context.Context, p []string) (int, error) - CountByFolderID(ctx context.Context, folderID FolderID) (int, error) -} - -// Creator provides methods to create Files. -type Creator interface { - Create(ctx context.Context, f File) error -} - -// Updater provides methods to update Files. -type Updater interface { - Update(ctx context.Context, f File) error -} - -type Destroyer interface { - Destroy(ctx context.Context, id ID) error -} - -type GetterUpdater interface { - Getter - Updater -} - -type GetterDestroyer interface { - Getter - Destroyer -} - -// Store provides methods to find, create and update Files. -type Store interface { - Getter - Counter - Creator - Updater - Destroyer - - IsPrimary(ctx context.Context, fileID ID) (bool, error) -} - -// Decorator wraps the Decorate method to add additional functionality while scanning files. -type Decorator interface { - Decorate(ctx context.Context, fs FS, f File) (File, error) - IsMissingMetadata(ctx context.Context, fs FS, f File) bool -} - -type FilteredDecorator struct { - Decorator - Filter -} - -// Decorate runs the decorator if the filter accepts the file. -func (d *FilteredDecorator) Decorate(ctx context.Context, fs FS, f File) (File, error) { - if d.Accept(ctx, f) { - return d.Decorator.Decorate(ctx, fs, f) - } - return f, nil -} - -func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs FS, f File) bool { - if d.Accept(ctx, f) { - return d.Decorator.IsMissingMetadata(ctx, fs, f) - } - - return false + FileStore models.FileReaderWriter + FolderStore models.FolderReaderWriter } diff --git a/pkg/file/folder.go b/pkg/file/folder.go index 5ffd7f2b5..02087dd41 100644 --- a/pkg/file/folder.go +++ b/pkg/file/folder.go @@ -3,94 +3,16 @@ package file import ( "context" "fmt" - "io/fs" "path/filepath" - "strconv" "strings" "time" + + "github.com/stashapp/stash/pkg/models" ) -// FolderID represents an ID of a folder. -type FolderID int32 - -// String converts the ID to a string. -func (i FolderID) String() string { - return strconv.Itoa(int(i)) -} - -// Folder represents a folder in the file system. -type Folder struct { - ID FolderID `json:"id"` - DirEntry - Path string `json:"path"` - ParentFolderID *FolderID `json:"parent_folder_id"` - - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - -func (f *Folder) Info(fs FS) (fs.FileInfo, error) { - return f.info(fs, f.Path) -} - -type FolderFinder interface { - Find(ctx context.Context, id FolderID) (*Folder, error) -} - -// FolderPathFinder finds Folders by their path. -type FolderPathFinder interface { - FindByPath(ctx context.Context, path string) (*Folder, error) -} - -// FolderGetter provides methods to find Folders. -type FolderGetter interface { - FolderFinder - FolderPathFinder - FindByZipFileID(ctx context.Context, zipFileID ID) ([]*Folder, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) - FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) -} - -type FolderCounter interface { - CountAllInPaths(ctx context.Context, p []string) (int, error) -} - -// FolderCreator provides methods to create Folders. -type FolderCreator interface { - Create(ctx context.Context, f *Folder) error -} - -type FolderFinderCreator interface { - FolderPathFinder - FolderCreator -} - -// FolderUpdater provides methods to update Folders. -type FolderUpdater interface { - Update(ctx context.Context, f *Folder) error -} - -type FolderDestroyer interface { - Destroy(ctx context.Context, id FolderID) error -} - -type FolderGetterDestroyer interface { - FolderGetter - FolderDestroyer -} - -// FolderStore provides methods to find, create and update Folders. -type FolderStore interface { - FolderGetter - FolderCounter - FolderCreator - FolderUpdater - FolderDestroyer -} - // GetOrCreateFolderHierarchy gets the folder for the given path, or creates a folder hierarchy for the given path if one if no existing folder is found. // Does not create any folders in the file system -func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, path string) (*Folder, error) { +func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) { // get or create folder hierarchy folder, err := fc.FindByPath(ctx, path) if err != nil { @@ -106,10 +28,10 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, pat now := time.Now() - folder = &Folder{ + folder = &models.Folder{ Path: path, ParentFolderID: &parent.ID, - DirEntry: DirEntry{ + DirEntry: models.DirEntry{ // leave mod time empty for now - it will be updated when the folder is scanned }, CreatedAt: now, @@ -126,7 +48,7 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, pat // TransferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes // ZipFileID from folders under oldPath. -func TransferZipFolderHierarchy(ctx context.Context, folderStore FolderStore, zipFileID ID, oldPath string, newPath string) error { +func TransferZipFolderHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, zipFileID models.FileID, oldPath string, newPath string) error { zipFolders, err := folderStore.FindByZipFileID(ctx, zipFileID) if err != nil { return err diff --git a/pkg/file/folder_rename_detect.go b/pkg/file/folder_rename_detect.go index 0e52eb785..0b57d9c08 100644 --- a/pkg/file/folder_rename_detect.go +++ b/pkg/file/folder_rename_detect.go @@ -7,27 +7,28 @@ import ( "io/fs" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) type folderRenameCandidate struct { - folder *Folder + folder *models.Folder found int files int } type folderRenameDetector struct { // candidates is a map of folder id to the number of files that match - candidates map[FolderID]folderRenameCandidate + candidates map[models.FolderID]folderRenameCandidate // rejects is a set of folder ids which were found to still exist - rejects map[FolderID]struct{} + rejects map[models.FolderID]struct{} } -func (d *folderRenameDetector) isReject(id FolderID) bool { +func (d *folderRenameDetector) isReject(id models.FolderID) bool { _, ok := d.rejects[id] return ok } -func (d *folderRenameDetector) getCandidate(id FolderID) *folderRenameCandidate { +func (d *folderRenameDetector) getCandidate(id models.FolderID) *folderRenameCandidate { c, ok := d.candidates[id] if !ok { return nil @@ -40,14 +41,14 @@ func (d *folderRenameDetector) setCandidate(c folderRenameCandidate) { d.candidates[c.folder.ID] = c } -func (d *folderRenameDetector) reject(id FolderID) { +func (d *folderRenameDetector) reject(id models.FolderID) { d.rejects[id] = struct{}{} } // bestCandidate returns the folder that is the best candidate for a rename. // This is the folder that has the largest number of its original files that // are still present in the new location. -func (d *folderRenameDetector) bestCandidate() *Folder { +func (d *folderRenameDetector) bestCandidate() *models.Folder { if len(d.candidates) == 0 { return nil } @@ -74,14 +75,14 @@ func (d *folderRenameDetector) bestCandidate() *Folder { return best.folder } -func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models.Folder, error) { // in order for a folder to be considered moved, the existing folder must be // missing, and the majority of the old folder's files must be present, unchanged, // in the new folder. detector := folderRenameDetector{ - candidates: make(map[FolderID]folderRenameCandidate), - rejects: make(map[FolderID]struct{}), + candidates: make(map[models.FolderID]folderRenameCandidate), + rejects: make(map[models.FolderID]struct{}), } // rejects is a set of folder ids which were found to still exist @@ -117,7 +118,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, } // check if the file exists in the database based on basename, size and mod time - existing, err := s.Repository.Store.FindByFileInfo(ctx, info, size) + existing, err := s.Repository.FileStore.FindByFileInfo(ctx, info, size) if err != nil { return fmt.Errorf("checking for existing file %q: %w", path, err) } @@ -163,7 +164,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, // parent folder is missing, possible candidate // count the total number of files in the existing folder - count, err := s.Repository.Store.CountByFolderID(ctx, parentFolderID) + count, err := s.Repository.FileStore.CountByFolderID(ctx, parentFolderID) if err != nil { return fmt.Errorf("counting files in folder %d: %w", parentFolderID, err) } diff --git a/pkg/file/frame.go b/pkg/file/frame.go deleted file mode 100644 index de9f74662..000000000 --- a/pkg/file/frame.go +++ /dev/null @@ -1,20 +0,0 @@ -package file - -// VisualFile is an interface for files that have a width and height. -type VisualFile interface { - File - GetWidth() int - GetHeight() int - GetFormat() string -} - -func GetMinResolution(f VisualFile) int { - w := f.GetWidth() - h := f.GetHeight() - - if w < h { - return w - } - - return h -} diff --git a/pkg/file/fs.go b/pkg/file/fs.go index 09c7c7c8e..80148cfa1 100644 --- a/pkg/file/fs.go +++ b/pkg/file/fs.go @@ -6,6 +6,7 @@ import ( "os" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" ) // Opener provides an interface to open a file. @@ -14,7 +15,7 @@ type Opener interface { } type fsOpener struct { - fs FS + fs models.FS name string } @@ -22,15 +23,6 @@ func (o *fsOpener) Open() (io.ReadCloser, error) { return o.fs.Open(o.name) } -// FS represents a file system. -type FS interface { - Stat(name string) (fs.FileInfo, error) - Lstat(name string) (fs.FileInfo, error) - Open(name string) (fs.ReadDirFile, error) - OpenZip(name string) (*ZipFS, error) - IsPathCaseSensitive(path string) (bool, error) -} - // OsFS is a file system backed by the OS. type OsFS struct{} @@ -66,7 +58,7 @@ func (f *OsFS) Open(name string) (fs.ReadDirFile, error) { return os.Open(name) } -func (f *OsFS) OpenZip(name string) (*ZipFS, error) { +func (f *OsFS) OpenZip(name string) (models.ZipFS, error) { info, err := f.Lstat(name) if err != nil { return nil, err diff --git a/pkg/file/handler.go b/pkg/file/handler.go index 5932968b6..10616eefa 100644 --- a/pkg/file/handler.go +++ b/pkg/file/handler.go @@ -3,6 +3,8 @@ package file import ( "context" "io/fs" + + "github.com/stashapp/stash/pkg/models" ) // PathFilter provides a filter function for paths. @@ -18,18 +20,18 @@ func (pff PathFilterFunc) Accept(path string) bool { // Filter provides a filter function for Files. type Filter interface { - Accept(ctx context.Context, f File) bool + Accept(ctx context.Context, f models.File) bool } -type FilterFunc func(ctx context.Context, f File) bool +type FilterFunc func(ctx context.Context, f models.File) bool -func (ff FilterFunc) Accept(ctx context.Context, f File) bool { +func (ff FilterFunc) Accept(ctx context.Context, f models.File) bool { return ff(ctx, f) } // Handler provides a handler for Files. type Handler interface { - Handle(ctx context.Context, f File, oldFile File) error + Handle(ctx context.Context, f models.File, oldFile models.File) error } // FilteredHandler is a Handler runs only if the filter accepts the file. @@ -39,7 +41,7 @@ type FilteredHandler struct { } // Handle runs the handler if the filter accepts the file. -func (h *FilteredHandler) Handle(ctx context.Context, f File, oldFile File) error { +func (h *FilteredHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if h.Accept(ctx, f) { return h.Handler.Handle(ctx, f, oldFile) } @@ -48,6 +50,6 @@ func (h *FilteredHandler) Handle(ctx context.Context, f File, oldFile File) erro // CleanHandler provides a handler for cleaning Files and Folders. type CleanHandler interface { - HandleFile(ctx context.Context, fileDeleter *Deleter, fileID ID) error - HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error + HandleFile(ctx context.Context, fileDeleter *Deleter, fileID models.FileID) error + HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID models.FolderID) error } diff --git a/pkg/file/image/scan.go b/pkg/file/image/scan.go index 5203adba9..ba22bbee9 100644 --- a/pkg/file/image/scan.go +++ b/pkg/file/image/scan.go @@ -13,6 +13,7 @@ import ( "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" _ "golang.org/x/image/webp" ) @@ -21,10 +22,10 @@ type Decorator struct { FFProbe ffmpeg.FFProbe } -func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) { +func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { base := f.Base() - decorateFallback := func() (file.File, error) { + decorateFallback := func() (models.File, error) { r, err := fs.Open(base.Path) if err != nil { return f, fmt.Errorf("reading image file %q: %w", base.Path, err) @@ -35,7 +36,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file if err != nil { return f, fmt.Errorf("decoding image file %q: %w", base.Path, err) } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: format, Width: c.Width, @@ -58,7 +59,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file // Fallback to catch non-animated avif images that FFProbe detects as video files if probe.Bitrate == 0 && probe.VideoCodec == "av1" { - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: "avif", Width: probe.Width, @@ -78,7 +79,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file return videoFileDecorator.Decorate(ctx, fs, f) } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: probe.VideoCodec, Width: probe.Width, @@ -86,14 +87,14 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file }, nil } -func (d *Decorator) IsMissingMetadata(ctx context.Context, fs file.FS, f file.File) bool { +func (d *Decorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { const ( unsetString = "unset" unsetNumber = -1 ) - imf, isImage := f.(*file.ImageFile) - vf, isVideo := f.(*file.VideoFile) + imf, isImage := f.(*models.ImageFile) + vf, isVideo := f.(*models.VideoFile) switch { case isImage: diff --git a/pkg/file/image_file.go b/pkg/file/image_file.go deleted file mode 100644 index 0de2d9b98..000000000 --- a/pkg/file/image_file.go +++ /dev/null @@ -1,21 +0,0 @@ -package file - -// ImageFile is an extension of BaseFile to represent image files. -type ImageFile struct { - *BaseFile - Format string `json:"format"` - Width int `json:"width"` - Height int `json:"height"` -} - -func (f ImageFile) GetWidth() int { - return f.Width -} - -func (f ImageFile) GetHeight() int { - return f.Height -} - -func (f ImageFile) GetFormat() string { - return f.Format -} diff --git a/internal/manager/import_file.go b/pkg/file/import.go similarity index 68% rename from internal/manager/import_file.go rename to pkg/file/import.go index bad9d5bce..0af94a4d2 100644 --- a/internal/manager/import_file.go +++ b/pkg/file/import.go @@ -1,4 +1,4 @@ -package manager +package file import ( "context" @@ -7,24 +7,22 @@ import ( "path/filepath" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" ) -// HACK: this is all here because of an import loop in jsonschema -> models -> file +var ErrZipFileNotExist = errors.New("zip file does not exist") -var errZipFileNotExist = errors.New("zip file does not exist") - -type fileFolderImporter struct { - ReaderWriter file.Store - FolderStore file.FolderStore +type Importer struct { + ReaderWriter models.FileFinderCreator + FolderStore models.FolderFinderCreator Input jsonschema.DirEntry - file file.File - folder *file.Folder + file models.File + folder *models.Folder } -func (i *fileFolderImporter) PreImport(ctx context.Context) error { +func (i *Importer) PreImport(ctx context.Context) error { var err error switch ff := i.Input.(type) { @@ -37,9 +35,9 @@ func (i *fileFolderImporter) PreImport(ctx context.Context) error { return err } -func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) { - ret := file.Folder{ - DirEntry: file.DirEntry{ +func (i *Importer) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*models.Folder, error) { + ret := models.Folder{ + DirEntry: models.DirEntry{ ModTime: baseJSON.ModTime.GetTime(), }, Path: baseJSON.Path, @@ -56,14 +54,14 @@ func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *j return &ret, nil } -func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (file.File, error) { +func (i *Importer) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (models.File, error) { switch ff := fileJSON.(type) { case *jsonschema.VideoFile: baseFile, err := i.baseFileJSONToBaseFile(ctx, ff.BaseFile) if err != nil { return nil, err } - return &file.VideoFile{ + return &models.VideoFile{ BaseFile: baseFile, Format: ff.Format, Width: ff.Width, @@ -81,7 +79,7 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc if err != nil { return nil, err } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: baseFile, Format: ff.Format, Width: ff.Width, @@ -94,9 +92,9 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc return nil, fmt.Errorf("unknown file type") } -func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) { - baseFile := file.BaseFile{ - DirEntry: file.DirEntry{ +func (i *Importer) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*models.BaseFile, error) { + baseFile := models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: baseJSON.ModTime.GetTime(), }, Basename: filepath.Base(baseJSON.Path), @@ -106,7 +104,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO } for _, fp := range baseJSON.Fingerprints { - baseFile.Fingerprints = append(baseFile.Fingerprints, file.Fingerprint{ + baseFile.Fingerprints = append(baseFile.Fingerprints, models.Fingerprint{ Type: fp.Type, Fingerprint: fp.Fingerprint, }) @@ -119,7 +117,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO return &baseFile, nil } -func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error { +func (i *Importer) populateZipFileID(ctx context.Context, f *models.DirEntry) error { zipFilePath := i.Input.DirEntry().ZipFile if zipFilePath != "" { zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath) @@ -128,7 +126,7 @@ func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirE } if zf == nil { - return errZipFileNotExist + return ErrZipFileNotExist } id := zf.Base().ID @@ -138,15 +136,15 @@ func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirE return nil } -func (i *fileFolderImporter) PostImport(ctx context.Context, id int) error { +func (i *Importer) PostImport(ctx context.Context, id int) error { return nil } -func (i *fileFolderImporter) Name() string { +func (i *Importer) Name() string { return i.Input.DirEntry().Path } -func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) { +func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { path := i.Input.DirEntry().Path existing, err := i.ReaderWriter.FindByPath(ctx, path) if err != nil { @@ -161,7 +159,7 @@ func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) { return nil, nil } -func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string) (*file.Folder, error) { +func (i *Importer) createFolderHierarchy(ctx context.Context, p string) (*models.Folder, error) { parentPath := filepath.Dir(p) if parentPath == p { @@ -177,7 +175,7 @@ func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string return i.getOrCreateFolder(ctx, p, parent) } -func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, parent *file.Folder) (*file.Folder, error) { +func (i *Importer) getOrCreateFolder(ctx context.Context, path string, parent *models.Folder) (*models.Folder, error) { folder, err := i.FolderStore.FindByPath(ctx, path) if err != nil { return nil, err @@ -189,7 +187,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, now := time.Now() - folder = &file.Folder{ + folder = &models.Folder{ Path: path, CreatedAt: now, UpdatedAt: now, @@ -207,7 +205,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, return folder, nil } -func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) { +func (i *Importer) Create(ctx context.Context) (*int, error) { // create folder hierarchy and set parent folder id path := i.Input.DirEntry().Path path = filepath.Dir(path) @@ -223,7 +221,7 @@ func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) { return i.createFile(ctx, folder) } -func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file.Folder) (*int, error) { +func (i *Importer) createFile(ctx context.Context, parentFolder *models.Folder) (*int, error) { if parentFolder != nil { i.file.Base().ParentFolderID = parentFolder.ID } @@ -236,7 +234,7 @@ func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file. return &id, nil } -func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *file.Folder) (*int, error) { +func (i *Importer) createFolder(ctx context.Context, parentFolder *models.Folder) (*int, error) { if parentFolder != nil { i.folder.ParentFolderID = &parentFolder.ID } @@ -249,7 +247,7 @@ func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *fil return &id, nil } -func (i *fileFolderImporter) Update(ctx context.Context, id int) error { +func (i *Importer) Update(ctx context.Context, id int) error { // update not supported return nil } diff --git a/pkg/file/move.go b/pkg/file/move.go index 3b3c66ec5..64a83fed6 100644 --- a/pkg/file/move.go +++ b/pkg/file/move.go @@ -11,6 +11,7 @@ import ( "time" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -40,14 +41,14 @@ func (r folderCreatorStatRenamerImpl) Mkdir(name string, perm os.FileMode) error type Mover struct { Renamer DirMakerStatRenamer - Files GetterUpdater - Folders FolderStore + Files models.FileFinderUpdater + Folders models.FolderReaderWriter moved map[string]string foldersCreated []string } -func NewMover(fileStore GetterUpdater, folderStore FolderStore) *Mover { +func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter) *Mover { return &Mover{ Files: fileStore, Folders: folderStore, @@ -60,7 +61,7 @@ func NewMover(fileStore GetterUpdater, folderStore FolderStore) *Mover { // Move moves the file to the given folder and basename. If basename is empty, then the existing basename is used. // Assumes that the parent folder exists in the filesystem. -func (m *Mover) Move(ctx context.Context, f File, folder *Folder, basename string) error { +func (m *Mover) Move(ctx context.Context, f models.File, folder *models.Folder, basename string) error { fBase := f.Base() // don't allow moving files in zip files diff --git a/pkg/file/scan.go b/pkg/file/scan.go index badb5ab23..a0d301e60 100644 --- a/pkg/file/scan.go +++ b/pkg/file/scan.go @@ -13,6 +13,7 @@ import ( "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) @@ -24,15 +25,6 @@ const ( maxRetries = -1 ) -// Repository provides access to storage methods for files and folders. -type Repository struct { - txn.Manager - txn.DatabaseProvider - Store - - FolderStore FolderStore -} - // Scanner scans files into the database. // // The scan process works using two goroutines. The first walks through the provided paths @@ -59,7 +51,7 @@ type Repository struct { // If the file is not a renamed file, then the decorators are fired and the file is created, then // the applicable handlers are fired. type Scanner struct { - FS FS + FS models.FS Repository Repository FingerprintCalculator FingerprintCalculator @@ -67,6 +59,38 @@ type Scanner struct { FileDecorators []Decorator } +// FingerprintCalculator calculates a fingerprint for the provided file. +type FingerprintCalculator interface { + CalculateFingerprints(f *models.BaseFile, o Opener, useExisting bool) ([]models.Fingerprint, error) +} + +// Decorator wraps the Decorate method to add additional functionality while scanning files. +type Decorator interface { + Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) + IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool +} + +type FilteredDecorator struct { + Decorator + Filter +} + +// Decorate runs the decorator if the filter accepts the file. +func (d *FilteredDecorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { + if d.Accept(ctx, f) { + return d.Decorator.Decorate(ctx, fs, f) + } + return f, nil +} + +func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { + if d.Accept(ctx, f) { + return d.Decorator.IsMissingMetadata(ctx, fs, f) + } + + return false +} + // ProgressReporter is used to report progress of the scan. type ProgressReporter interface { AddTotal(total int) @@ -129,8 +153,8 @@ func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOpti } type scanFile struct { - *BaseFile - fs FS + *models.BaseFile + fs models.FS info fs.FileInfo } @@ -198,7 +222,7 @@ func (s *scanJob) queueFiles(ctx context.Context, paths []string) error { return err } -func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs.WalkDirFunc { +func (s *scanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *scanFile) fs.WalkDirFunc { return func(path string, d fs.DirEntry, err error) error { if err != nil { // don't let errors prevent scanning @@ -229,8 +253,8 @@ func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs } ff := scanFile{ - BaseFile: &BaseFile{ - DirEntry: DirEntry{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: modTime(info), }, Path: path, @@ -286,7 +310,7 @@ func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs } } -func getFileSize(f FS, path string, info fs.FileInfo) (int64, error) { +func getFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { // #2196/#3042 - replace size with target size if file is a symlink if info.Mode()&os.ModeSymlink == os.ModeSymlink { targetInfo, err := f.Stat(path) @@ -408,10 +432,10 @@ func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) { }) } -func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, error) { +func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { // check the folder cache first if f, ok := s.folderPathToID.Load(path); ok { - v := f.(FolderID) + v := f.(models.FolderID) return &v, nil } @@ -428,7 +452,7 @@ func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, erro return &ret.ID, nil } -func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, error) { +func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*models.FileID, error) { if zipFile == nil { return nil, nil } @@ -441,11 +465,11 @@ func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, err // check the folder cache first if f, ok := s.zipPathToID.Load(path); ok { - v := f.(ID) + v := f.(models.FileID) return &v, nil } - ret, err := s.Repository.FindByPath(ctx, path) + ret, err := s.Repository.FileStore.FindByPath(ctx, path) if err != nil { return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err) } @@ -489,7 +513,7 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { }) } -func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folder, error) { renamed, err := s.handleFolderRename(ctx, file) if err != nil { return nil, err @@ -501,7 +525,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro now := time.Now() - toCreate := &Folder{ + toCreate := &models.Folder{ DirEntry: file.DirEntry, Path: file.Path, CreatedAt: now, @@ -536,7 +560,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro return toCreate, nil } -func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*models.Folder, error) { // ignore folders in zip files if file.ZipFileID != nil { return nil, nil @@ -572,7 +596,7 @@ func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*Folde return renamedFrom, nil } -func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *Folder) (*Folder, error) { +func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *models.Folder) (*models.Folder, error) { update := false // update if mod time is changed @@ -613,12 +637,12 @@ func modTime(info fs.FileInfo) time.Time { func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { defer s.incrementProgress(f) - var ff File + var ff models.File // don't use a transaction to check if new or existing if err := s.withDB(ctx, func(ctx context.Context) error { // determine if file already exists in data store var err error - ff, err = s.Repository.FindByPath(ctx, f.Path) + ff, err = s.Repository.FileStore.FindByPath(ctx, f.Path) if err != nil { return fmt.Errorf("checking for existing file %q: %w", f.Path, err) } @@ -661,7 +685,7 @@ func (s *scanJob) isZipFile(path string) bool { return false } -func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { +func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error) { now := time.Now() baseFile := f.BaseFile @@ -716,7 +740,7 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { // if not renamed, queue file for creation if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Create(ctx, file); err != nil { + if err := s.Repository.FileStore.Create(ctx, file); err != nil { return fmt.Errorf("creating file %q: %w", path, err) } @@ -732,7 +756,7 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { return file, nil } -func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, error) { +func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { for _, h := range s.FileDecorators { var err error f, err = h.Decorate(ctx, fs, f) @@ -744,7 +768,7 @@ func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, erro return f, nil } -func (s *scanJob) fireHandlers(ctx context.Context, f File, oldFile File) error { +func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { for _, h := range s.handlers { if err := h.Handle(ctx, f, oldFile); err != nil { return err @@ -754,7 +778,7 @@ func (s *scanJob) fireHandlers(ctx context.Context, f File, oldFile File) error return nil } -func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExisting bool) (Fingerprints, error) { +func (s *scanJob) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { // only log if we're (re)calculating fingerprints if !useExisting { logger.Infof("Calculating fingerprints for %s ...", path) @@ -772,7 +796,7 @@ func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExis return fp, nil } -func appendFileUnique(v []File, toAdd []File) []File { +func appendFileUnique(v []models.File, toAdd []models.File) []models.File { for _, f := range toAdd { found := false id := f.Base().ID @@ -791,7 +815,7 @@ func appendFileUnique(v []File, toAdd []File) []File { return v } -func (s *scanJob) getFileFS(f *BaseFile) (FS, error) { +func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { if f.ZipFile == nil { return s.FS, nil } @@ -805,11 +829,11 @@ func (s *scanJob) getFileFS(f *BaseFile) (FS, error) { return fs.OpenZip(zipPath) } -func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (File, error) { - var others []File +func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) { + var others []models.File for _, tfp := range fp { - thisOthers, err := s.Repository.FindByFingerprint(ctx, tfp) + thisOthers, err := s.Repository.FileStore.FindByFingerprint(ctx, tfp) if err != nil { return nil, fmt.Errorf("getting files by fingerprint %v: %w", tfp, err) } @@ -817,7 +841,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F others = appendFileUnique(others, thisOthers) } - var missing []File + var missing []models.File fZipID := f.Base().ZipFileID for _, other := range others { @@ -867,7 +891,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F fBase.Fingerprints = otherBase.Fingerprints if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, f); err != nil { + if err := s.Repository.FileStore.Update(ctx, f); err != nil { return fmt.Errorf("updating file for rename %q: %w", fBase.Path, err) } @@ -889,7 +913,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F return f, nil } -func (s *scanJob) isHandlerRequired(ctx context.Context, f File) bool { +func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { accept := len(s.options.HandlerRequiredFilters) == 0 for _, filter := range s.options.HandlerRequiredFilters { // accept if any filter accepts the file @@ -910,7 +934,7 @@ func (s *scanJob) isHandlerRequired(ctx context.Context, f File) bool { // - file size // - image format, width or height // - video codec, audio codec, format, width, height, framerate or bitrate -func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing File) bool { +func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing models.File) bool { for _, h := range s.FileDecorators { if h.IsMissingMetadata(ctx, f.fs, existing) { return true @@ -920,7 +944,7 @@ func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing Fi return false } -func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing models.File) (models.File, error) { path := existing.Base().Path logger.Infof("Updating metadata for %s", path) @@ -934,7 +958,7 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F // queue file for update if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -946,7 +970,7 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F return existing, nil } -func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing models.File) (models.File, error) { const useExisting = true fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting) if err != nil { @@ -957,7 +981,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi existing.SetFingerprints(fp) if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", f.Path, err) } @@ -971,7 +995,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi } // returns a file only if it was updated -func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { base := existing.Base() path := base.Path @@ -1006,7 +1030,7 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) // queue file for update if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1022,21 +1046,21 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) return existing, nil } -func (s *scanJob) removeOutdatedFingerprints(existing File, fp Fingerprints) { +func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { // HACK - if no MD5 fingerprint was returned, and the oshash is changed // then remove the MD5 fingerprint - oshash := fp.For(FingerprintTypeOshash) + oshash := fp.For(models.FingerprintTypeOshash) if oshash == nil { return } - existingOshash := existing.Base().Fingerprints.For(FingerprintTypeOshash) + existingOshash := existing.Base().Fingerprints.For(models.FingerprintTypeOshash) if existingOshash == nil || *existingOshash == *oshash { // missing oshash or same oshash - nothing to do return } - md5 := fp.For(FingerprintTypeMD5) + md5 := fp.For(models.FingerprintTypeMD5) if md5 != nil { // nothing to do @@ -1045,11 +1069,11 @@ func (s *scanJob) removeOutdatedFingerprints(existing File, fp Fingerprints) { // oshash has changed, MD5 is missing - remove MD5 from the existing fingerprints logger.Infof("Removing outdated checksum from %s", existing.Base().Path) - existing.Base().Fingerprints.Remove(FingerprintTypeMD5) + existing.Base().Fingerprints.Remove(models.FingerprintTypeMD5) } // returns a file only if it was updated -func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { var err error isMissingMetdata := s.isMissingMetadata(ctx, f, existing) diff --git a/pkg/file/video/caption.go b/pkg/file/video/caption.go index d2f8e79a5..bec3db6fd 100644 --- a/pkg/file/video/caption.go +++ b/pkg/file/video/caption.go @@ -9,7 +9,6 @@ import ( "strings" "github.com/asticode/go-astisub" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -87,12 +86,12 @@ func getCaptionsLangFromPath(captionPath string) string { } type CaptionUpdater interface { - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) - UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error + GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) + UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error } // associates captions to scene/s with the same basename -func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb file.Getter, w CaptionUpdater) { +func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) { captionLang := getCaptionsLangFromPath(captionPath) captionPrefix := getCaptionPrefix(captionPath) @@ -108,7 +107,7 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag // found some files // filter out non video files switch f.(type) { - case *file.VideoFile: + case *models.VideoFile: break default: continue @@ -143,7 +142,7 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag } // CleanCaptions removes non existent/accessible language codes from captions -func CleanCaptions(ctx context.Context, f *file.VideoFile, txnMgr txn.Manager, w CaptionUpdater) error { +func CleanCaptions(ctx context.Context, f *models.VideoFile, txnMgr txn.Manager, w CaptionUpdater) error { captions, err := w.GetCaptions(ctx, f.ID) if err != nil { return fmt.Errorf("getting captions for file %s: %w", f.Path, err) diff --git a/pkg/file/video/scan.go b/pkg/file/video/scan.go index 1f3d7817f..ca7d0be96 100644 --- a/pkg/file/video/scan.go +++ b/pkg/file/video/scan.go @@ -7,6 +7,7 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // Decorator adds video specific fields to a File. @@ -14,7 +15,7 @@ type Decorator struct { FFProbe ffmpeg.FFProbe } -func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) { +func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { if d.FFProbe == "" { return f, errors.New("ffprobe not configured") } @@ -42,7 +43,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file interactive = true } - return &file.VideoFile{ + return &models.VideoFile{ BaseFile: base, Format: string(container), VideoCodec: videoFile.VideoCodec, @@ -56,13 +57,13 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file }, nil } -func (d *Decorator) IsMissingMetadata(ctx context.Context, fs file.FS, f file.File) bool { +func (d *Decorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { const ( unsetString = "unset" unsetNumber = -1 ) - vf, ok := f.(*file.VideoFile) + vf, ok := f.(*models.VideoFile) if !ok { return true } diff --git a/pkg/file/video_file.go b/pkg/file/video_file.go deleted file mode 100644 index 382c81e19..000000000 --- a/pkg/file/video_file.go +++ /dev/null @@ -1,29 +0,0 @@ -package file - -// VideoFile is an extension of BaseFile to represent video files. -type VideoFile struct { - *BaseFile - Format string `json:"format"` - Width int `json:"width"` - Height int `json:"height"` - Duration float64 `json:"duration"` - VideoCodec string `json:"video_codec"` - AudioCodec string `json:"audio_codec"` - FrameRate float64 `json:"frame_rate"` - BitRate int64 `json:"bitrate"` - - Interactive bool `json:"interactive"` - InteractiveSpeed *int `json:"interactive_speed"` -} - -func (f VideoFile) GetWidth() int { - return f.Width -} - -func (f VideoFile) GetHeight() int { - return f.Height -} - -func (f VideoFile) GetFormat() string { - return f.Format -} diff --git a/pkg/file/walk.go b/pkg/file/walk.go index a73781d45..3c6a157b7 100644 --- a/pkg/file/walk.go +++ b/pkg/file/walk.go @@ -6,6 +6,8 @@ import ( "os" "path/filepath" "sort" + + "github.com/stashapp/stash/pkg/models" ) // Modified from github.com/facebookgo/symwalk @@ -48,7 +50,7 @@ import ( // // Note that symwalk.Walk does not terminate if there are any non-terminating loops in // the file structure. -func walkSym(f FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) error { +func walkSym(f models.FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) error { symWalkFunc := func(path string, info fs.DirEntry, err error) error { if fname, err := filepath.Rel(filename, path); err == nil { @@ -80,7 +82,7 @@ func walkSym(f FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) e } // symWalk extends filepath.Walk to also follow symlinks -func symWalk(fs FS, path string, walkFn fs.WalkDirFunc) error { +func symWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { return walkSym(fs, path, path, walkFn) } @@ -93,7 +95,7 @@ func (d *statDirEntry) IsDir() bool { return d.info.IsDir() } func (d *statDirEntry) Type() fs.FileMode { return d.info.Mode().Type() } func (d *statDirEntry) Info() (fs.FileInfo, error) { return d.info, nil } -func fsWalk(f FS, root string, fn fs.WalkDirFunc) error { +func fsWalk(f models.FS, root string, fn fs.WalkDirFunc) error { info, err := f.Lstat(root) if err != nil { err = fn(root, nil, err) @@ -106,7 +108,7 @@ func fsWalk(f FS, root string, fn fs.WalkDirFunc) error { return err } -func walkDir(f FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { +func walkDir(f models.FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { if err := walkDirFn(path, d, nil); err != nil || !d.IsDir() { if errors.Is(err, fs.SkipDir) && d.IsDir() { // Successfully skipped directory. @@ -143,7 +145,7 @@ func walkDir(f FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { // readDir reads the directory named by dirname and returns // a sorted list of directory entries. -func readDir(fs FS, dirname string) ([]fs.DirEntry, error) { +func readDir(fs models.FS, dirname string) ([]fs.DirEntry, error) { f, err := fs.Open(dirname) if err != nil { return nil, err diff --git a/pkg/file/zip.go b/pkg/file/zip.go index 5cef1184e..a17b59685 100644 --- a/pkg/file/zip.go +++ b/pkg/file/zip.go @@ -10,6 +10,7 @@ import ( "path/filepath" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/xWTF/chardet" "golang.org/x/net/html/charset" @@ -22,14 +23,14 @@ var ( ) // ZipFS is a file system backed by a zip file. -type ZipFS struct { +type zipFS struct { *zip.Reader zipFileCloser io.Closer zipInfo fs.FileInfo zipPath string } -func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { +func newZipFS(fs models.FS, path string, info fs.FileInfo) (*zipFS, error) { reader, err := fs.Open(path) if err != nil { return nil, err @@ -85,7 +86,7 @@ func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { } } - return &ZipFS{ + return &zipFS{ Reader: zipReader, zipFileCloser: reader, zipInfo: info, @@ -93,7 +94,7 @@ func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { }, nil } -func (f *ZipFS) rel(name string) (string, error) { +func (f *zipFS) rel(name string) (string, error) { if f.zipPath == name { return ".", nil } @@ -110,7 +111,7 @@ func (f *ZipFS) rel(name string) (string, error) { return relName, nil } -func (f *ZipFS) Stat(name string) (fs.FileInfo, error) { +func (f *zipFS) Stat(name string) (fs.FileInfo, error) { reader, err := f.Open(name) if err != nil { return nil, err @@ -120,15 +121,15 @@ func (f *ZipFS) Stat(name string) (fs.FileInfo, error) { return reader.Stat() } -func (f *ZipFS) Lstat(name string) (fs.FileInfo, error) { +func (f *zipFS) Lstat(name string) (fs.FileInfo, error) { return f.Stat(name) } -func (f *ZipFS) OpenZip(name string) (*ZipFS, error) { +func (f *zipFS) OpenZip(name string) (models.ZipFS, error) { return nil, errZipFSOpenZip } -func (f *ZipFS) IsPathCaseSensitive(path string) (bool, error) { +func (f *zipFS) IsPathCaseSensitive(path string) (bool, error) { return true, nil } @@ -145,7 +146,7 @@ func (f *zipReadDirFile) ReadDir(n int) ([]fs.DirEntry, error) { return asReadDirFile.ReadDir(n) } -func (f *ZipFS) Open(name string) (fs.ReadDirFile, error) { +func (f *zipFS) Open(name string) (fs.ReadDirFile, error) { relName, err := f.rel(name) if err != nil { return nil, err @@ -161,12 +162,12 @@ func (f *ZipFS) Open(name string) (fs.ReadDirFile, error) { }, nil } -func (f *ZipFS) Close() error { +func (f *zipFS) Close() error { return f.zipFileCloser.Close() } // openOnly returns a ReadCloser where calling Close will close the zip fs as well. -func (f *ZipFS) OpenOnly(name string) (io.ReadCloser, error) { +func (f *zipFS) OpenOnly(name string) (io.ReadCloser, error) { r, err := f.Open(name) if err != nil { return nil, err diff --git a/pkg/gallery/chapter_import.go b/pkg/gallery/chapter_import.go index 91abe909d..ee223b1aa 100644 --- a/pkg/gallery/chapter_import.go +++ b/pkg/gallery/chapter_import.go @@ -8,15 +8,14 @@ import ( "github.com/stashapp/stash/pkg/models/jsonschema" ) -type ChapterCreatorUpdater interface { - Create(ctx context.Context, newGalleryChapter *models.GalleryChapter) error - Update(ctx context.Context, updatedGalleryChapter *models.GalleryChapter) error +type ChapterImporterReaderWriter interface { + models.GalleryChapterCreatorUpdater FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) } type ChapterImporter struct { GalleryID int - ReaderWriter ChapterCreatorUpdater + ReaderWriter ChapterImporterReaderWriter Input jsonschema.GalleryChapter MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/gallery/delete.go b/pkg/gallery/delete.go index 60aee0d28..5609b2f4b 100644 --- a/pkg/gallery/delete.go +++ b/pkg/gallery/delete.go @@ -41,12 +41,7 @@ func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *i return imgsDestroyed, nil } -type ChapterDestroyer interface { - FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) - Destroy(ctx context.Context, id int) error -} - -func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, qb ChapterDestroyer) error { +func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, qb models.GalleryChapterDestroyer) error { return qb.Destroy(ctx, galleryChapter.ID) } diff --git a/pkg/gallery/export.go b/pkg/gallery/export.go index d53a2a8e5..83f3c31ce 100644 --- a/pkg/gallery/export.go +++ b/pkg/gallery/export.go @@ -7,13 +7,8 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" ) -type ChapterFinder interface { - FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) -} - // ToBasicJSON converts a gallery object into its JSON object equivalent. It // does not convert the relationships to other objects. func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { @@ -48,7 +43,7 @@ func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { // GetStudioName returns the name of the provided gallery's studio. It returns an // empty string if there is no studio assigned to the gallery. -func GetStudioName(ctx context.Context, reader studio.Finder, gallery *models.Gallery) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, gallery *models.Gallery) (string, error) { if gallery.StudioID != nil { studio, err := reader.Find(ctx, *gallery.StudioID) if err != nil { @@ -65,7 +60,7 @@ func GetStudioName(ctx context.Context, reader studio.Finder, gallery *models.Ga // GetGalleryChaptersJSON returns a slice of GalleryChapter JSON representation // objects corresponding to the provided gallery's chapters. -func GetGalleryChaptersJSON(ctx context.Context, chapterReader ChapterFinder, gallery *models.Gallery) ([]jsonschema.GalleryChapter, error) { +func GetGalleryChaptersJSON(ctx context.Context, chapterReader models.GalleryChapterFinder, gallery *models.Gallery) ([]jsonschema.GalleryChapter, error) { galleryChapters, err := chapterReader.FindByGalleryID(ctx, gallery.ID) if err != nil { return nil, fmt.Errorf("error getting gallery chapters: %v", err) diff --git a/pkg/gallery/export_test.go b/pkg/gallery/export_test.go index fcd90b9e9..3a6ffa2ec 100644 --- a/pkg/gallery/export_test.go +++ b/pkg/gallery/export_test.go @@ -3,7 +3,6 @@ package gallery import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -50,8 +49,8 @@ var ( func createFullGallery(id int) models.Gallery { return models.Gallery{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), @@ -69,8 +68,8 @@ func createFullGallery(id int) models.Gallery { func createEmptyGallery(id int) models.Gallery { return models.Gallery{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index ccb258eb0..57d151245 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -5,22 +5,25 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) +type ImporterReaderWriter interface { + models.GalleryCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) + FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) +} + type Importer struct { - ReaderWriter FullCreatorUpdater - StudioWriter studio.NameFinderCreator - PerformerWriter performer.NameFinderCreator - TagWriter tag.NameFinderCreator - FileFinder file.Getter - FolderFinder file.FolderGetter + ReaderWriter ImporterReaderWriter + StudioWriter models.StudioFinderCreator + PerformerWriter models.PerformerFinderCreator + TagWriter models.TagFinderCreator + FileFinder models.FileFinder + FolderFinder models.FolderFinder Input jsonschema.Gallery MissingRefBehaviour models.ImportMissingRefEnum @@ -28,11 +31,6 @@ type Importer struct { gallery models.Gallery } -type FullCreatorUpdater interface { - FinderCreatorUpdater - Update(ctx context.Context, updatedGallery *models.Gallery) error -} - func (i *Importer) PreImport(ctx context.Context) error { i.gallery = i.galleryJSONToGallery(i.Input) @@ -251,7 +249,7 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta } func (i *Importer) populateFilesFolder(ctx context.Context) error { - files := make([]file.File, 0) + files := make([]models.File, 0) for _, ref := range i.Input.ZipFiles { path := ref @@ -340,7 +338,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.gallery.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index bfbdefa9e..0997b4a57 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -6,7 +6,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -68,7 +67,7 @@ func TestImporterPreImport(t *testing.T) { Rating: &rating, Organized: organized, URL: url, - Files: models.NewRelatedFiles([]file.File{}), + Files: models.NewRelatedFiles([]models.File{}), TagIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}), CreatedAt: createdAt, diff --git a/pkg/gallery/query.go b/pkg/gallery/query.go index cc2a043d7..da0b5f0c1 100644 --- a/pkg/gallery/query.go +++ b/pkg/gallery/query.go @@ -4,27 +4,10 @@ import ( "context" "strconv" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) ([]*models.Gallery, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error) -} - -type Finder interface { - FindByPath(ctx context.Context, p string) ([]*models.Gallery, error) - FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) -} - -func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByPerformerID(ctx context.Context, r models.GalleryQueryer, id int) (int, error) { filter := &models.GalleryFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -35,7 +18,7 @@ func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error return r.QueryCount(ctx, filter, nil) } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.GalleryQueryer, id int, depth *int) (int, error) { filter := &models.GalleryFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -47,7 +30,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.GalleryQueryer, id int, depth *int) (int, error) { filter := &models.GalleryFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/gallery/scan.go b/pkg/gallery/scan.go index 8a35890ee..a8f52e89b 100644 --- a/pkg/gallery/scan.go +++ b/pkg/gallery/scan.go @@ -7,39 +7,40 @@ import ( "strings" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" ) -type FinderCreatorUpdater interface { - Finder - Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) + GetFiles(ctx context.Context, relatedID int) ([]models.File, error) + + Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.FileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } -type SceneFinderUpdater interface { +type ScanSceneFinderUpdater interface { FindByPath(ctx context.Context, p string) ([]*models.Scene, error) Update(ctx context.Context, updatedScene *models.Scene) error AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error } -type ImageFinderUpdater interface { - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) +type ScanImageFinderUpdater interface { + FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) } type ScanHandler struct { - CreatorUpdater FullCreatorUpdater - SceneFinderUpdater SceneFinderUpdater - ImageFinderUpdater ImageFinderUpdater + CreatorUpdater ScanCreatorUpdater + SceneFinderUpdater ScanSceneFinderUpdater + ImageFinderUpdater ScanImageFinderUpdater PluginCache *plugin.Cache } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { baseFile := f.Base() // try to match the file to a gallery @@ -83,7 +84,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File logger.Infof("%s doesn't exist. Creating new gallery...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, newGallery, []file.ID{baseFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, newGallery, []models.FileID{baseFile.ID}); err != nil { return fmt.Errorf("creating new gallery: %w", err) } @@ -112,7 +113,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f file.File, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f models.File, updateExisting bool) error { for _, i := range existing { if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -146,7 +147,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return nil } -func (h *ScanHandler) associateScene(ctx context.Context, existing []*models.Gallery, f file.File) error { +func (h *ScanHandler) associateScene(ctx context.Context, existing []*models.Gallery, f models.File) error { galleryIDs := make([]int, len(existing)) for i, g := range existing { galleryIDs[i] = g.ID diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index 7dfc3857f..6db604fc4 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -3,50 +3,25 @@ package gallery import ( "context" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) -} - -type Repository interface { - models.GalleryFinder - FinderByFile - Destroy(ctx context.Context, id int) error - models.FileLoader - ImageUpdater - PartialUpdater -} - -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) -} - type ImageFinder interface { - FindByFolderID(ctx context.Context, folder file.FolderID) ([]*models.Image, error) - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) + FindByFolderID(ctx context.Context, folder models.FolderID) ([]*models.Image, error) + FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) models.GalleryIDLoader } type ImageService interface { Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error - DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) -} - -type ChapterRepository interface { - ChapterFinder - ChapterDestroyer - - Update(ctx context.Context, updatedObject models.GalleryChapter) (*models.GalleryChapter, error) + DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } type Service struct { - Repository Repository + Repository models.GalleryReaderWriter ImageFinder ImageFinder ImageService ImageService - File file.Store - Folder file.FolderStore + File models.FileReaderWriter + Folder models.FolderReaderWriter } diff --git a/pkg/gallery/update.go b/pkg/gallery/update.go index 72f479bea..71d92c540 100644 --- a/pkg/gallery/update.go +++ b/pkg/gallery/update.go @@ -54,7 +54,7 @@ func (s *Service) RemoveImages(ctx context.Context, g *models.Gallery, toRemove return s.Updated(ctx, g.ID) } -func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, performerID int) error { +func AddPerformer(ctx context.Context, qb models.GalleryUpdater, o *models.Gallery, performerID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{ PerformerIDs: &models.UpdateIDs{ IDs: []int{performerID}, @@ -64,7 +64,7 @@ func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, per return err } -func AddTag(ctx context.Context, qb PartialUpdater, o *models.Gallery, tagID int) error { +func AddTag(ctx context.Context, qb models.GalleryUpdater, o *models.Gallery, tagID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{ TagIDs: &models.UpdateIDs{ IDs: []int{tagID}, diff --git a/pkg/hash/videophash/phash.go b/pkg/hash/videophash/phash.go index 0cbefc2ae..6e586b178 100644 --- a/pkg/hash/videophash/phash.go +++ b/pkg/hash/videophash/phash.go @@ -13,8 +13,8 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg/transcoder" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) const ( @@ -23,7 +23,7 @@ const ( rows = 5 ) -func Generate(encoder *ffmpeg.FFMpeg, videoFile *file.VideoFile) (*uint64, error) { +func Generate(encoder *ffmpeg.FFMpeg, videoFile *models.VideoFile) (*uint64, error) { sprite, err := generateSprite(encoder, videoFile) if err != nil { return nil, err @@ -76,7 +76,7 @@ func combineImages(images []image.Image) image.Image { return montage } -func generateSprite(encoder *ffmpeg.FFMpeg, videoFile *file.VideoFile) (image.Image, error) { +func generateSprite(encoder *ffmpeg.FFMpeg, videoFile *models.VideoFile) (image.Image, error) { logger.Infof("[generator] generating phash sprite for %s", videoFile.Path) // Generate sprite image offset by 5% on each end to avoid intro/outros diff --git a/pkg/image/delete.go b/pkg/image/delete.go index 78ef4b09a..89f4c1811 100644 --- a/pkg/image/delete.go +++ b/pkg/image/delete.go @@ -10,10 +10,6 @@ import ( "github.com/stashapp/stash/pkg/models/paths" ) -type Destroyer interface { - Destroy(ctx context.Context, id int) error -} - // FileDeleter is an extension of file.Deleter that handles deletion of image files. type FileDeleter struct { *file.Deleter @@ -45,7 +41,7 @@ func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *Fil // DestroyZipImages destroys all images in zip, optionally marking the files and generated files for deletion. // Returns a slice of images that were destroyed. -func (s *Service) DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *FileDeleter, deleteGenerated bool) ([]*models.Image, error) { +func (s *Service) DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *FileDeleter, deleteGenerated bool) ([]*models.Image, error) { var imgsDestroyed []*models.Image imgs, err := s.Repository.FindByZipFileID(ctx, zipFile.Base().ID) diff --git a/pkg/image/export.go b/pkg/image/export.go index d67351e8d..a7c4d8575 100644 --- a/pkg/image/export.go +++ b/pkg/image/export.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" ) // ToBasicJSON converts a image object into its JSON object equivalent. It @@ -53,7 +52,7 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image { // GetStudioName returns the name of the provided image's studio. It returns an // empty string if there is no studio assigned to the image. -func GetStudioName(ctx context.Context, reader studio.Finder, image *models.Image) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, image *models.Image) (string, error) { if image.StudioID != nil { studio, err := reader.Find(ctx, *image.StudioID) if err != nil { diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go index 4c46aae95..3b64f40cb 100644 --- a/pkg/image/export_test.go +++ b/pkg/image/export_test.go @@ -3,7 +3,6 @@ package image import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -45,8 +44,8 @@ var ( func createFullImage(id int) models.Image { return models.Image{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), diff --git a/pkg/image/import.go b/pkg/image/import.go index 3c1e7ac8b..4ce2287eb 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -5,13 +5,9 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) type GalleryFinder interface { @@ -19,18 +15,18 @@ type GalleryFinder interface { FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) } -type FullCreatorUpdater interface { - FinderCreatorUpdater - Update(ctx context.Context, updatedImage *models.Image) error +type ImporterReaderWriter interface { + models.ImageCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) } type Importer struct { - ReaderWriter FullCreatorUpdater - FileFinder file.Getter - StudioWriter studio.NameFinderCreator + ReaderWriter ImporterReaderWriter + FileFinder models.FileFinder + StudioWriter models.StudioFinderCreator GalleryFinder GalleryFinder - PerformerWriter performer.NameFinderCreator - TagWriter tag.NameFinderCreator + PerformerWriter models.PerformerFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.Image MissingRefBehaviour models.ImportMissingRefEnum @@ -99,7 +95,7 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image { } func (i *Importer) populateFiles(ctx context.Context) error { - files := make([]file.File, 0) + files := make([]models.File, 0) for _, ref := range i.Input.Files { path := ref @@ -330,7 +326,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.image.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } @@ -360,7 +356,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -395,7 +391,7 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { newTag := models.NewTag(name) diff --git a/pkg/image/query.go b/pkg/image/query.go index 85d1df05c..a5c9a1732 100644 --- a/pkg/image/query.go +++ b/pkg/image/query.go @@ -7,14 +7,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (int, error) -} - // QueryOptions returns a ImageQueryResult populated with the provided filters. func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType, count bool) models.ImageQueryOptions { return models.ImageQueryOptions{ @@ -27,7 +19,7 @@ func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFi } // Query queries for images using the provided filters. -func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { +func Query(ctx context.Context, qb models.ImageQueryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { result, err := qb.Query(ctx, QueryOptions(imageFilter, findFilter, false)) if err != nil { return nil, err @@ -41,7 +33,7 @@ func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, return images, nil } -func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByPerformerID(ctx context.Context, r models.ImageQueryer, id int) (int, error) { filter := &models.ImageFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -52,7 +44,7 @@ func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error return r.QueryCount(ctx, filter, nil) } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -64,7 +56,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -76,7 +68,7 @@ func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, return r.QueryCount(ctx, filter, nil) } -func FindByGalleryID(ctx context.Context, r Queryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { +func FindByGalleryID(ctx context.Context, r models.ImageQueryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { perPage := -1 findFilter := models.FindFilterType{ @@ -99,7 +91,7 @@ func FindByGalleryID(ctx context.Context, r Queryer, galleryID int, sortBy strin }, &findFilter) } -func FindGalleryCover(ctx context.Context, r Queryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { +func FindGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { const useCoverJpg = true img, err := findGalleryCover(ctx, r, galleryID, useCoverJpg, galleryCoverRegex) if err != nil { @@ -114,7 +106,7 @@ func FindGalleryCover(ctx context.Context, r Queryer, galleryID int, galleryCove return findGalleryCover(ctx, r, galleryID, !useCoverJpg, galleryCoverRegex) } -func findGalleryCover(ctx context.Context, r Queryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { +func findGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { // try to find cover.jpg in the gallery perPage := 1 sortBy := "path" diff --git a/pkg/image/scan.go b/pkg/image/scan.go index d28d94a86..d584d0f55 100644 --- a/pkg/image/scan.go +++ b/pkg/image/scan.go @@ -8,7 +8,6 @@ import ( "path/filepath" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" @@ -21,21 +20,22 @@ var ( ErrNotImageFile = errors.New("not an image file") ) -type FinderCreatorUpdater interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Image, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) + GetFiles(ctx context.Context, relatedID int) ([]models.File, error) + GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) + Create(ctx context.Context, newImage *models.ImageCreateInput) error UpdatePartial(ctx context.Context, id int, updatedImage models.ImagePartial) (*models.Image, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.GalleryIDLoader - models.FileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } type GalleryFinderCreator interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) - Create(ctx context.Context, newObject *models.Gallery, fileIDs []file.ID) error + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) + Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) } @@ -44,11 +44,11 @@ type ScanConfig interface { } type ScanGenerator interface { - Generate(ctx context.Context, i *models.Image, f file.File) error + Generate(ctx context.Context, i *models.Image, f models.File) error } type ScanHandler struct { - CreatorUpdater FinderCreatorUpdater + CreatorUpdater ScanCreatorUpdater GalleryFinder GalleryFinderCreator ScanGenerator ScanGenerator @@ -80,7 +80,7 @@ func (h *ScanHandler) validate() error { return nil } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if err := h.validate(); err != nil { return err } @@ -130,7 +130,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File if err := h.CreatorUpdater.Create(ctx, &models.ImageCreateInput{ Image: newImage, - FileIDs: []file.ID{imageFile.ID}, + FileIDs: []models.FileID{imageFile.ID}, }); err != nil { return fmt.Errorf("creating new image: %w", err) } @@ -151,8 +151,8 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File // remove the old thumbnail if the checksum changed - we'll regenerate it if oldFile != nil { - oldHash := oldFile.Base().Fingerprints.GetString(file.FingerprintTypeMD5) - newHash := f.Base().Fingerprints.GetString(file.FingerprintTypeMD5) + oldHash := oldFile.Base().Fingerprints.GetString(models.FingerprintTypeMD5) + newHash := f.Base().Fingerprints.GetString(models.FingerprintTypeMD5) if oldHash != "" && newHash != "" && oldHash != newHash { // remove cache dir of gallery @@ -173,7 +173,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.BaseFile, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *models.BaseFile, updateExisting bool) error { for _, i := range existing { if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -239,7 +239,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return nil } -func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f models.File) (*models.Gallery, error) { folderID := f.Base().ParentFolderID g, err := h.GalleryFinder.FindByFolderID(ctx, folderID) if err != nil { @@ -299,7 +299,7 @@ func (h *ScanHandler) associateFolderImages(ctx context.Context, g *models.Galle return nil } -func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile models.File) (*models.Gallery, error) { g, err := h.GalleryFinder.FindByFileID(ctx, zipFile.Base().ID) if err != nil { return nil, fmt.Errorf("finding zip based gallery: %w", err) @@ -319,7 +319,7 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile fi logger.Infof("%s doesn't exist. Creating new gallery...", zipFile.Base().Path) - if err := h.GalleryFinder.Create(ctx, newGallery, []file.ID{zipFile.Base().ID}); err != nil { + if err := h.GalleryFinder.Create(ctx, newGallery, []models.FileID{zipFile.Base().ID}); err != nil { return nil, fmt.Errorf("creating zip-based gallery: %w", err) } @@ -328,7 +328,7 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile fi return newGallery, nil } -func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*models.Gallery, error) { // don't create folder-based galleries for files in zip file if f.Base().ZipFile != nil { return h.getOrCreateZipBasedGallery(ctx, f.Base().ZipFile) @@ -357,7 +357,7 @@ func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f file.File) (*mod return nil, nil } -func (h *ScanHandler) getGalleryToAssociate(ctx context.Context, newImage *models.Image, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getGalleryToAssociate(ctx context.Context, newImage *models.Image, f models.File) (*models.Gallery, error) { g, err := h.getOrCreateGallery(ctx, f) if err != nil { return nil, err diff --git a/pkg/image/service.go b/pkg/image/service.go index 5aacc4e59..55dc7686d 100644 --- a/pkg/image/service.go +++ b/pkg/image/service.go @@ -1,24 +1,10 @@ package image import ( - "context" - - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) -} - -type Repository interface { - FinderByFile - Destroyer - models.FileLoader -} - type Service struct { - File file.Store - Repository Repository + File models.FileReaderWriter + Repository models.ImageReaderWriter } diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go index dc07b0f55..16191fa55 100644 --- a/pkg/image/thumbnail.go +++ b/pkg/image/thumbnail.go @@ -14,6 +14,7 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg/transcoder" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" ) const ffmpegImageQuality = 5 @@ -68,7 +69,7 @@ func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg, ffProbe ffmpeg.FFProbe, c // the provided max size. It resizes based on the largest X/Y direction. // It returns nil and an error if an error occurs reading, decoding or encoding // the image, or if the image is not suitable for thumbnails. -func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error) { +func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, error) { reader, err := f.Open(&file.OsFS{}) if err != nil { return nil, err @@ -82,7 +83,7 @@ func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error data := buf.Bytes() - if imageFile, ok := f.(*file.ImageFile); ok { + if imageFile, ok := f.(*models.ImageFile); ok { format := imageFile.Format animated := imageFile.Format == formatGif @@ -98,7 +99,7 @@ func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error } // Videofiles can only be thumbnailed with ffmpeg - if _, ok := f.(*file.VideoFile); ok { + if _, ok := f.(*models.VideoFile); ok { return e.ffmpegImageThumbnail(buf, maxSize) } diff --git a/pkg/image/update.go b/pkg/image/update.go index 3a173b7ad..e3a63b53d 100644 --- a/pkg/image/update.go +++ b/pkg/image/update.go @@ -6,11 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) -} - -func AddPerformer(ctx context.Context, qb PartialUpdater, i *models.Image, performerID int) error { +func AddPerformer(ctx context.Context, qb models.ImageUpdater, i *models.Image, performerID int) error { _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{ PerformerIDs: &models.UpdateIDs{ IDs: []int{performerID}, @@ -21,7 +17,7 @@ func AddPerformer(ctx context.Context, qb PartialUpdater, i *models.Image, perfo return err } -func AddTag(ctx context.Context, qb PartialUpdater, i *models.Image, tagID int) error { +func AddTag(ctx context.Context, qb models.ImageUpdater, i *models.Image, tagID int) error { _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{ TagIDs: &models.UpdateIDs{ IDs: []int{tagID}, diff --git a/pkg/match/cache.go b/pkg/match/cache.go index 06237c7f6..6946f65db 100644 --- a/pkg/match/cache.go +++ b/pkg/match/cache.go @@ -20,7 +20,7 @@ type Cache struct { // against. This means that performers with single-letter words in their names could potentially // be missed. // This query is expensive, so it's queried once and cached, if the cache if provided. -func getSingleLetterPerformers(ctx context.Context, c *Cache, reader PerformerAutoTagQueryer) ([]*models.Performer, error) { +func getSingleLetterPerformers(ctx context.Context, c *Cache, reader models.PerformerAutoTagQueryer) ([]*models.Performer, error) { if c == nil { c = &Cache{} } @@ -53,7 +53,7 @@ func getSingleLetterPerformers(ctx context.Context, c *Cache, reader PerformerAu // getSingleLetterStudios returns all studios with names that start with single character words. // See getSingleLetterPerformers for details. -func getSingleLetterStudios(ctx context.Context, c *Cache, reader StudioAutoTagQueryer) ([]*models.Studio, error) { +func getSingleLetterStudios(ctx context.Context, c *Cache, reader models.StudioAutoTagQueryer) ([]*models.Studio, error) { if c == nil { c = &Cache{} } @@ -86,7 +86,7 @@ func getSingleLetterStudios(ctx context.Context, c *Cache, reader StudioAutoTagQ // getSingleLetterTags returns all tags with names that start with single character words. // See getSingleLetterPerformers for details. -func getSingleLetterTags(ctx context.Context, c *Cache, reader TagAutoTagQueryer) ([]*models.Tag, error) { +func getSingleLetterTags(ctx context.Context, c *Cache, reader models.TagAutoTagQueryer) ([]*models.Tag, error) { if c == nil { c = &Cache{} } diff --git a/pkg/match/path.go b/pkg/match/path.go index 666d64374..0b5aaa6ba 100644 --- a/pkg/match/path.go +++ b/pkg/match/path.go @@ -14,8 +14,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) const ( @@ -28,24 +26,6 @@ const ( var separatorRE = regexp.MustCompile(separatorPattern) -type PerformerAutoTagQueryer interface { - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Performer, error) - models.AliasLoader -} - -type StudioAutoTagQueryer interface { - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Studio, error) - studio.Queryer - GetAliases(ctx context.Context, studioID int) ([]string, error) -} - -type TagAutoTagQueryer interface { - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Tag, error) - tag.Queryer - GetAliases(ctx context.Context, tagID int) ([]string, error) -} - func getPathQueryRegex(name string) string { // escape specific regex characters name = regexp.QuoteMeta(name) @@ -146,7 +126,7 @@ func regexpMatchesPath(r *regexp.Regexp, path string) int { return found[len(found)-1][0] } -func getPerformers(ctx context.Context, words []string, performerReader PerformerAutoTagQueryer, cache *Cache) ([]*models.Performer, error) { +func getPerformers(ctx context.Context, words []string, performerReader models.PerformerAutoTagQueryer, cache *Cache) ([]*models.Performer, error) { performers, err := performerReader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -160,7 +140,7 @@ func getPerformers(ctx context.Context, words []string, performerReader Performe return append(performers, swPerformers...), nil } -func PathToPerformers(ctx context.Context, path string, reader PerformerAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Performer, error) { +func PathToPerformers(ctx context.Context, path string, reader models.PerformerAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Performer, error) { words := getPathWords(path, trimExt) performers, err := getPerformers(ctx, words, reader, cache) @@ -198,7 +178,7 @@ func PathToPerformers(ctx context.Context, path string, reader PerformerAutoTagQ return ret, nil } -func getStudios(ctx context.Context, words []string, reader StudioAutoTagQueryer, cache *Cache) ([]*models.Studio, error) { +func getStudios(ctx context.Context, words []string, reader models.StudioAutoTagQueryer, cache *Cache) ([]*models.Studio, error) { studios, err := reader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -215,7 +195,7 @@ func getStudios(ctx context.Context, words []string, reader StudioAutoTagQueryer // PathToStudio returns the Studio that matches the given path. // Where multiple matching studios are found, the one that matches the latest // position in the path is returned. -func PathToStudio(ctx context.Context, path string, reader StudioAutoTagQueryer, cache *Cache, trimExt bool) (*models.Studio, error) { +func PathToStudio(ctx context.Context, path string, reader models.StudioAutoTagQueryer, cache *Cache, trimExt bool) (*models.Studio, error) { words := getPathWords(path, trimExt) candidates, err := getStudios(ctx, words, reader, cache) @@ -249,7 +229,7 @@ func PathToStudio(ctx context.Context, path string, reader StudioAutoTagQueryer, return ret, nil } -func getTags(ctx context.Context, words []string, reader TagAutoTagQueryer, cache *Cache) ([]*models.Tag, error) { +func getTags(ctx context.Context, words []string, reader models.TagAutoTagQueryer, cache *Cache) ([]*models.Tag, error) { tags, err := reader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -263,7 +243,7 @@ func getTags(ctx context.Context, words []string, reader TagAutoTagQueryer, cach return append(tags, swTags...), nil } -func PathToTags(ctx context.Context, path string, reader TagAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Tag, error) { +func PathToTags(ctx context.Context, path string, reader models.TagAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Tag, error) { words := getPathWords(path, trimExt) tags, err := getTags(ctx, words, reader, cache) @@ -299,7 +279,7 @@ func PathToTags(ctx context.Context, path string, reader TagAutoTagQueryer, cach return ret, nil } -func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReader scene.Queryer, fn func(ctx context.Context, scene *models.Scene) error) error { +func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReader models.SceneQueryer, fn func(ctx context.Context, scene *models.Scene) error) error { regex := getPathQueryRegex(name) organized := false filter := models.SceneFilterType{ @@ -358,7 +338,7 @@ func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReade return nil } -func PathToImagesFn(ctx context.Context, name string, paths []string, imageReader image.Queryer, fn func(ctx context.Context, scene *models.Image) error) error { +func PathToImagesFn(ctx context.Context, name string, paths []string, imageReader models.ImageQueryer, fn func(ctx context.Context, scene *models.Image) error) error { regex := getPathQueryRegex(name) organized := false filter := models.ImageFilterType{ @@ -417,7 +397,7 @@ func PathToImagesFn(ctx context.Context, name string, paths []string, imageReade return nil } -func PathToGalleriesFn(ctx context.Context, name string, paths []string, galleryReader gallery.Queryer, fn func(ctx context.Context, scene *models.Gallery) error) error { +func PathToGalleriesFn(ctx context.Context, name string, paths []string, galleryReader models.GalleryQueryer, fn func(ctx context.Context, scene *models.Gallery) error) error { regex := getPathQueryRegex(name) organized := false filter := models.GalleryFilterType{ diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go index d1182a329..675a8d7fc 100644 --- a/pkg/match/scraped.go +++ b/pkg/match/scraped.go @@ -58,7 +58,7 @@ func ScrapedPerformer(ctx context.Context, qb PerformerFinder, p *models.Scraped } type StudioFinder interface { - studio.Queryer + models.StudioQueryer FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) } @@ -134,7 +134,7 @@ func ScrapedMovie(ctx context.Context, qb MovieNamesFinder, m *models.ScrapedMov // ScrapedTag matches the provided tag with the tags // in the database and sets the ID field if one is found. -func ScrapedTag(ctx context.Context, qb tag.Queryer, s *models.ScrapedTag) error { +func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag) error { if s.StoredID != nil { return nil } diff --git a/pkg/models/file.go b/pkg/models/file.go index 827a55d5c..e6ce41d1e 100644 --- a/pkg/models/file.go +++ b/pkg/models/file.go @@ -4,8 +4,6 @@ import ( "context" "path/filepath" "strings" - - "github.com/stashapp/stash/pkg/file" ) type FileQueryOptions struct { @@ -57,24 +55,24 @@ func PathsFileFilter(paths []string) *FileFilterType { type FileQueryResult struct { // can't use QueryResult because id type is wrong - IDs []file.ID + IDs []FileID Count int - finder file.Finder - files []file.File + getter FileGetter + files []File resolveErr error } -func NewFileQueryResult(finder file.Finder) *FileQueryResult { +func NewFileQueryResult(fileGetter FileGetter) *FileQueryResult { return &FileQueryResult{ - finder: finder, + getter: fileGetter, } } -func (r *FileQueryResult) Resolve(ctx context.Context) ([]file.File, error) { +func (r *FileQueryResult) Resolve(ctx context.Context) ([]File, error) { // cache results if r.files == nil && r.resolveErr == nil { - r.files, r.resolveErr = r.finder.Find(ctx, r.IDs...) + r.files, r.resolveErr = r.getter.Find(ctx, r.IDs...) } return r.files, r.resolveErr } diff --git a/pkg/file/fingerprint.go b/pkg/models/fingerprint.go similarity index 90% rename from pkg/file/fingerprint.go rename to pkg/models/fingerprint.go index 3155276c5..0123f289d 100644 --- a/pkg/file/fingerprint.go +++ b/pkg/models/fingerprint.go @@ -1,4 +1,9 @@ -package file +package models + +import ( + "fmt" + "strconv" +) var ( FingerprintTypeOshash = "oshash" @@ -12,6 +17,15 @@ type Fingerprint struct { Fingerprint interface{} } +func (f *Fingerprint) Value() string { + switch v := f.Fingerprint.(type) { + case int64: + return strconv.FormatUint(uint64(v), 16) + default: + return fmt.Sprintf("%v", f.Fingerprint) + } +} + type Fingerprints []Fingerprint func (f *Fingerprints) Remove(type_ string) { @@ -114,8 +128,3 @@ func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints { return append(f, o) } - -// FingerprintCalculator calculates a fingerprint for the provided file. -type FingerprintCalculator interface { - CalculateFingerprints(f *BaseFile, o Opener, useExisting bool) ([]Fingerprint, error) -} diff --git a/pkg/file/fingerprint_test.go b/pkg/models/fingerprint_test.go similarity index 99% rename from pkg/file/fingerprint_test.go rename to pkg/models/fingerprint_test.go index f13ce2254..d35f4b082 100644 --- a/pkg/file/fingerprint_test.go +++ b/pkg/models/fingerprint_test.go @@ -1,4 +1,4 @@ -package file +package models import "testing" diff --git a/pkg/models/fs.go b/pkg/models/fs.go new file mode 100644 index 000000000..bdbf60349 --- /dev/null +++ b/pkg/models/fs.go @@ -0,0 +1,27 @@ +package models + +import ( + "io" + "io/fs" +) + +// FileOpener provides an interface to open a file. +type FileOpener interface { + Open() (io.ReadCloser, error) +} + +// FS represents a file system. +type FS interface { + Stat(name string) (fs.FileInfo, error) + Lstat(name string) (fs.FileInfo, error) + Open(name string) (fs.ReadDirFile, error) + OpenZip(name string) (ZipFS, error) + IsPathCaseSensitive(path string) (bool, error) +} + +// ZipFS represents a zip file system. +type ZipFS interface { + FS + io.Closer + OpenOnly(name string) (io.ReadCloser, error) +} diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 61ee2a72d..d3644d3fd 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -1,11 +1,5 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) - type GalleryFilterType struct { And *GalleryFilterType `json:"AND"` Or *GalleryFilterType `json:"OR"` @@ -86,40 +80,3 @@ type GalleryDestroyInput struct { DeleteFile *bool `json:"delete_file"` DeleteGenerated *bool `json:"delete_generated"` } - -type GalleryFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Gallery, error) -} - -type GalleryReader interface { - Find(ctx context.Context, id int) (*Gallery, error) - GalleryFinder - FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error) - FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error) - FindByPath(ctx context.Context, path string) ([]*Gallery, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error) - FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error) - - SceneIDLoader - PerformerIDLoader - TagIDLoader - - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Gallery, error) - Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error) - QueryCount(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) (int, error) - GetImageIDs(ctx context.Context, galleryID int) ([]int, error) -} - -type GalleryWriter interface { - Create(ctx context.Context, newGallery *Gallery, fileIDs []file.ID) error - Update(ctx context.Context, updatedGallery *Gallery) error - UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) - Destroy(ctx context.Context, id int) error - UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error -} - -type GalleryReaderWriter interface { - GalleryReader - GalleryWriter -} diff --git a/pkg/models/gallery_chapter.go b/pkg/models/gallery_chapter.go deleted file mode 100644 index 005780982..000000000 --- a/pkg/models/gallery_chapter.go +++ /dev/null @@ -1,21 +0,0 @@ -package models - -import "context" - -type GalleryChapterReader interface { - Find(ctx context.Context, id int) (*GalleryChapter, error) - FindMany(ctx context.Context, ids []int) ([]*GalleryChapter, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*GalleryChapter, error) -} - -type GalleryChapterWriter interface { - Create(ctx context.Context, newGalleryChapter *GalleryChapter) error - Update(ctx context.Context, updatedGalleryChapter *GalleryChapter) error - UpdatePartial(ctx context.Context, id int, updatedGalleryChapter GalleryChapterPartial) (*GalleryChapter, error) - Destroy(ctx context.Context, id int) error -} - -type GalleryChapterReaderWriter interface { - GalleryChapterReader - GalleryChapterWriter -} diff --git a/pkg/models/image.go b/pkg/models/image.go index 288f69976..ef90dfd7d 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -77,60 +77,21 @@ type ImageQueryResult struct { Megapixels float64 TotalSize float64 - finder ImageFinder + getter ImageGetter images []*Image resolveErr error } -func NewImageQueryResult(finder ImageFinder) *ImageQueryResult { +func NewImageQueryResult(getter ImageGetter) *ImageQueryResult { return &ImageQueryResult{ - finder: finder, + getter: getter, } } func (r *ImageQueryResult) Resolve(ctx context.Context) ([]*Image, error) { // cache results if r.images == nil && r.resolveErr == nil { - r.images, r.resolveErr = r.finder.FindMany(ctx, r.IDs) + r.images, r.resolveErr = r.getter.FindMany(ctx, r.IDs) } return r.images, r.resolveErr } - -type ImageFinder interface { - // TODO - rename to Find and remove existing method - FindMany(ctx context.Context, ids []int) ([]*Image, error) -} - -type ImageReader interface { - ImageFinder - // TODO - remove this in another PR - Find(ctx context.Context, id int) (*Image, error) - FindByChecksum(ctx context.Context, checksum string) ([]*Image, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) - CountByGalleryID(ctx context.Context, galleryID int) (int, error) - OCountByPerformerID(ctx context.Context, performerID int) (int, error) - Count(ctx context.Context) (int, error) - Size(ctx context.Context) (float64, error) - All(ctx context.Context) ([]*Image, error) - Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error) - QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) - - GalleryIDLoader - PerformerIDLoader - TagIDLoader -} - -type ImageWriter interface { - Create(ctx context.Context, newImage *ImageCreateInput) error - Update(ctx context.Context, updatedImage *Image) error - UpdatePartial(ctx context.Context, id int, partial ImagePartial) (*Image, error) - IncrementOCounter(ctx context.Context, id int) (int, error) - DecrementOCounter(ctx context.Context, id int) (int, error) - ResetOCounter(ctx context.Context, id int) (int, error) - Destroy(ctx context.Context, id int) error -} - -type ImageReaderWriter interface { - ImageReader - ImageWriter -} diff --git a/pkg/models/mocks/FileReaderWriter.go b/pkg/models/mocks/FileReaderWriter.go new file mode 100644 index 000000000..8e7982b47 --- /dev/null +++ b/pkg/models/mocks/FileReaderWriter.go @@ -0,0 +1,350 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + fs "io/fs" + + mock "github.com/stretchr/testify/mock" + + models "github.com/stashapp/stash/pkg/models" +) + +// FileReaderWriter is an autogenerated mock type for the FileReaderWriter type +type FileReaderWriter struct { + mock.Mock +} + +// CountAllInPaths provides a mock function with given fields: ctx, p +func (_m *FileReaderWriter) CountAllInPaths(ctx context.Context, p []string) (int, error) { + ret := _m.Called(ctx, p) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, []string) int); ok { + r0 = rf(ctx, p) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CountByFolderID provides a mock function with given fields: ctx, folderID +func (_m *FileReaderWriter) CountByFolderID(ctx context.Context, folderID models.FolderID) (int, error) { + ret := _m.Called(ctx, folderID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) int); ok { + r0 = rf(ctx, folderID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, folderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: ctx, f +func (_m *FileReaderWriter) Create(ctx context.Context, f models.File) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.File) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Destroy provides a mock function with given fields: ctx, id +func (_m *FileReaderWriter) Destroy(ctx context.Context, id models.FileID) error { + ret := _m.Called(ctx, id) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, id +func (_m *FileReaderWriter) Find(ctx context.Context, id ...models.FileID) ([]models.File, error) { + _va := make([]interface{}, len(id)) + for _i := range id { + _va[_i] = id[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, ...models.FileID) []models.File); ok { + r0 = rf(ctx, id...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, ...models.FileID) error); ok { + r1 = rf(ctx, id...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllByPath provides a mock function with given fields: ctx, path +func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]models.File, error) { + ret := _m.Called(ctx, path) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, string) []models.File); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset +func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]models.File, error) { + ret := _m.Called(ctx, p, limit, offset) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []models.File); ok { + r0 = rf(ctx, p, limit, offset) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { + r1 = rf(ctx, p, limit, offset) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFileInfo provides a mock function with given fields: ctx, info, size +func (_m *FileReaderWriter) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]models.File, error) { + ret := _m.Called(ctx, info, size) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, fs.FileInfo, int64) []models.File); ok { + r0 = rf(ctx, info, size) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, fs.FileInfo, int64) error); ok { + r1 = rf(ctx, info, size) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprint provides a mock function with given fields: ctx, fp +func (_m *FileReaderWriter) FindByFingerprint(ctx context.Context, fp models.Fingerprint) ([]models.File, error) { + ret := _m.Called(ctx, fp) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, models.Fingerprint) []models.File); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPath provides a mock function with given fields: ctx, path +func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string) (models.File, error) { + ret := _m.Called(ctx, path) + + var r0 models.File + if rf, ok := ret.Get(0).(func(context.Context, string) models.File); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *FileReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]models.File, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []models.File); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCaptions provides a mock function with given fields: ctx, fileID +func (_m *FileReaderWriter) GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.VideoCaption + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.VideoCaption); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.VideoCaption) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// IsPrimary provides a mock function with given fields: ctx, fileID +func (_m *FileReaderWriter) IsPrimary(ctx context.Context, fileID models.FileID) (bool, error) { + ret := _m.Called(ctx, fileID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) bool); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Query provides a mock function with given fields: ctx, options +func (_m *FileReaderWriter) Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error) { + ret := _m.Called(ctx, options) + + var r0 *models.FileQueryResult + if rf, ok := ret.Get(0).(func(context.Context, models.FileQueryOptions) *models.FileQueryResult); ok { + r0 = rf(ctx, options) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.FileQueryResult) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileQueryOptions) error); ok { + r1 = rf(ctx, options) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: ctx, f +func (_m *FileReaderWriter) Update(ctx context.Context, f models.File) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.File) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateCaptions provides a mock function with given fields: ctx, fileID, captions +func (_m *FileReaderWriter) UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error { + ret := _m.Called(ctx, fileID, captions) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FileID, []*models.VideoCaption) error); ok { + r0 = rf(ctx, fileID, captions) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/FolderReaderWriter.go b/pkg/models/mocks/FolderReaderWriter.go new file mode 100644 index 000000000..968bed4ad --- /dev/null +++ b/pkg/models/mocks/FolderReaderWriter.go @@ -0,0 +1,193 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + + models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" +) + +// FolderReaderWriter is an autogenerated mock type for the FolderReaderWriter type +type FolderReaderWriter struct { + mock.Mock +} + +// CountAllInPaths provides a mock function with given fields: ctx, p +func (_m *FolderReaderWriter) CountAllInPaths(ctx context.Context, p []string) (int, error) { + ret := _m.Called(ctx, p) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, []string) int); ok { + r0 = rf(ctx, p) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: ctx, f +func (_m *FolderReaderWriter) Create(ctx context.Context, f *models.Folder) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Folder) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Destroy provides a mock function with given fields: ctx, id +func (_m *FolderReaderWriter) Destroy(ctx context.Context, id models.FolderID) error { + ret := _m.Called(ctx, id) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, id +func (_m *FolderReaderWriter) Find(ctx context.Context, id models.FolderID) (*models.Folder, error) { + ret := _m.Called(ctx, id) + + var r0 *models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) *models.Folder); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset +func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]*models.Folder, error) { + ret := _m.Called(ctx, p, limit, offset) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []*models.Folder); ok { + r0 = rf(ctx, p, limit, offset) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { + r1 = rf(ctx, p, limit, offset) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByParentFolderID provides a mock function with given fields: ctx, parentFolderID +func (_m *FolderReaderWriter) FindByParentFolderID(ctx context.Context, parentFolderID models.FolderID) ([]*models.Folder, error) { + ret := _m.Called(ctx, parentFolderID) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Folder); ok { + r0 = rf(ctx, parentFolderID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, parentFolderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPath provides a mock function with given fields: ctx, path +func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string) (*models.Folder, error) { + ret := _m.Called(ctx, path) + + var r0 *models.Folder + if rf, ok := ret.Get(0).(func(context.Context, string) *models.Folder); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *FolderReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Folder, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Folder); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: ctx, f +func (_m *FolderReaderWriter) Update(ctx context.Context, f *models.Folder) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Folder) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index 1c0ddf957..2b901466b 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -5,10 +5,8 @@ package mocks import ( context "context" - file "github.com/stashapp/stash/pkg/file" - mock "github.com/stretchr/testify/mock" - models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" ) // GalleryReaderWriter is an autogenerated mock type for the GalleryReaderWriter type @@ -16,6 +14,41 @@ type GalleryReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *GalleryReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AddImages provides a mock function with given fields: ctx, galleryID, imageIDs +func (_m *GalleryReaderWriter) AddImages(ctx context.Context, galleryID int, imageIDs ...int) error { + _va := make([]interface{}, len(imageIDs)) + for _i := range imageIDs { + _va[_i] = imageIDs[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, galleryID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, ...int) error); ok { + r0 = rf(ctx, galleryID, imageIDs...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *GalleryReaderWriter) All(ctx context.Context) ([]*models.Gallery, error) { ret := _m.Called(ctx) @@ -60,12 +93,33 @@ func (_m *GalleryReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *GalleryReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: ctx, newGallery, fileIDs -func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error { +func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error { ret := _m.Called(ctx, newGallery, fileIDs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []file.ID) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []models.FileID) error); ok { r0 = rf(ctx, newGallery, fileIDs) } else { r0 = ret.Error(0) @@ -157,6 +211,75 @@ func (_m *GalleryReaderWriter) FindByChecksums(ctx context.Context, checksums [] return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *GalleryReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Gallery); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *GalleryReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Gallery); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFolderID provides a mock function with given fields: ctx, folderID +func (_m *GalleryReaderWriter) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) { + ret := _m.Called(ctx, folderID) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Gallery); ok { + r0 = rf(ctx, folderID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, folderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByImageID provides a mock function with given fields: ctx, imageID func (_m *GalleryReaderWriter) FindByImageID(ctx context.Context, imageID int) ([]*models.Gallery, error) { ret := _m.Called(ctx, imageID) @@ -249,13 +372,59 @@ func (_m *GalleryReaderWriter) FindMany(ctx context.Context, ids []int) ([]*mode return r0, r1 } -// GetImageIDs provides a mock function with given fields: ctx, galleryID -func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, galleryID int) ([]int, error) { - ret := _m.Called(ctx, galleryID) +// FindUserGalleryByTitle provides a mock function with given fields: ctx, title +func (_m *GalleryReaderWriter) FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) { + ret := _m.Called(ctx, title) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, string) []*models.Gallery); ok { + r0 = rf(ctx, title) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, title) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetFiles provides a mock function with given fields: ctx, relatedID +func (_m *GalleryReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, int) []models.File); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetImageIDs provides a mock function with given fields: ctx, relatedID +func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) var r0 []int if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { - r0 = rf(ctx, galleryID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]int) @@ -264,7 +433,30 @@ func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, galleryID int) ( var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, galleryID) + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *GalleryReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) } else { r1 = ret.Error(1) } @@ -392,6 +584,27 @@ func (_m *GalleryReaderWriter) QueryCount(ctx context.Context, galleryFilter *mo return r0, r1 } +// RemoveImages provides a mock function with given fields: ctx, galleryID, imageIDs +func (_m *GalleryReaderWriter) RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error { + _va := make([]interface{}, len(imageIDs)) + for _i := range imageIDs { + _va[_i] = imageIDs[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, galleryID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, ...int) error); ok { + r0 = rf(ctx, galleryID, imageIDs...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGallery func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.Gallery) error { ret := _m.Called(ctx, updatedGallery) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index f745f8afe..4924fd51d 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -14,6 +14,20 @@ type ImageReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *ImageReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *ImageReaderWriter) All(ctx context.Context) ([]*models.Image, error) { ret := _m.Called(ctx) @@ -58,6 +72,27 @@ func (_m *ImageReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CountByGalleryID provides a mock function with given fields: ctx, galleryID func (_m *ImageReaderWriter) CountByGalleryID(ctx context.Context, galleryID int) (int, error) { ret := _m.Called(ctx, galleryID) @@ -174,6 +209,75 @@ func (_m *ImageReaderWriter) FindByChecksum(ctx context.Context, checksum string return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Image); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *ImageReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Image); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFolderID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) FindByFolderID(ctx context.Context, fileID models.FolderID) ([]*models.Image, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Image); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByGalleryID provides a mock function with given fields: ctx, galleryID func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) ([]*models.Image, error) { ret := _m.Called(ctx, galleryID) @@ -197,6 +301,29 @@ func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) return r0, r1 } +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *ImageReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Image); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ctx, ids func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) { ret := _m.Called(ctx, ids) @@ -220,6 +347,29 @@ func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models return r0, r1 } +// GetFiles provides a mock function with given fields: ctx, relatedID +func (_m *ImageReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, int) []models.File); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetGalleryIDs provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) { ret := _m.Called(ctx, relatedID) @@ -243,6 +393,29 @@ func (_m *ImageReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *ImageReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetPerformerIDs provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) { ret := _m.Called(ctx, relatedID) @@ -453,3 +626,31 @@ func (_m *ImageReaderWriter) UpdatePartial(ctx context.Context, id int, partial return r0, r1 } + +// UpdatePerformers provides a mock function with given fields: ctx, imageID, performerIDs +func (_m *ImageReaderWriter) UpdatePerformers(ctx context.Context, imageID int, performerIDs []int) error { + ret := _m.Called(ctx, imageID, performerIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, imageID, performerIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateTags provides a mock function with given fields: ctx, imageID, tagIDs +func (_m *ImageReaderWriter) UpdateTags(ctx context.Context, imageID int, tagIDs []int) error { + ret := _m.Called(ctx, imageID, tagIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, imageID, tagIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/SceneMarkerReaderWriter.go b/pkg/models/mocks/SceneMarkerReaderWriter.go index 2be3b1184..4b4fa6d2c 100644 --- a/pkg/models/mocks/SceneMarkerReaderWriter.go +++ b/pkg/models/mocks/SceneMarkerReaderWriter.go @@ -199,13 +199,13 @@ func (_m *SceneMarkerReaderWriter) GetMarkerStrings(ctx context.Context, q *stri return r0, r1 } -// GetTagIDs provides a mock function with given fields: ctx, imageID -func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, imageID int) ([]int, error) { - ret := _m.Called(ctx, imageID) +// GetTagIDs provides a mock function with given fields: ctx, relatedID +func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) var r0 []int if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { - r0 = rf(ctx, imageID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]int) @@ -214,7 +214,7 @@ func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, imageID int) ( var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, imageID) + r1 = rf(ctx, relatedID) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 8d7245ee9..933745871 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -5,10 +5,8 @@ package mocks import ( context "context" - file "github.com/stashapp/stash/pkg/file" - mock "github.com/stretchr/testify/mock" - models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" ) // SceneReaderWriter is an autogenerated mock type for the SceneReaderWriter type @@ -16,6 +14,34 @@ type SceneReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *SceneReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AddGalleryIDs provides a mock function with given fields: ctx, sceneID, galleryIDs +func (_m *SceneReaderWriter) AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error { + ret := _m.Called(ctx, sceneID, galleryIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, sceneID, galleryIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) { ret := _m.Called(ctx) @@ -39,6 +65,20 @@ func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) { return r0, r1 } +// AssignFiles provides a mock function with given fields: ctx, sceneID, fileID +func (_m *SceneReaderWriter) AssignFiles(ctx context.Context, sceneID int, fileID []models.FileID) error { + ret := _m.Called(ctx, sceneID, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []models.FileID) error); ok { + r0 = rf(ctx, sceneID, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Count provides a mock function with given fields: ctx func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) { ret := _m.Called(ctx) @@ -60,6 +100,27 @@ func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CountByMovieID provides a mock function with given fields: ctx, movieID func (_m *SceneReaderWriter) CountByMovieID(ctx context.Context, movieID int) (int, error) { ret := _m.Called(ctx, movieID) @@ -187,11 +248,11 @@ func (_m *SceneReaderWriter) CountMissingOSHash(ctx context.Context) (int, error } // Create provides a mock function with given fields: ctx, newScene, fileIDs -func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error { +func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error { ret := _m.Called(ctx, newScene, fileIDs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Scene, []file.ID) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.Scene, []models.FileID) error); ok { r0 = rf(ctx, newScene, fileIDs) } else { r0 = ret.Error(0) @@ -302,6 +363,52 @@ func (_m *SceneReaderWriter) FindByChecksum(ctx context.Context, checksum string return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Scene); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *SceneReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Scene); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByGalleryID provides a mock function with given fields: ctx, performerID func (_m *SceneReaderWriter) FindByGalleryID(ctx context.Context, performerID int) ([]*models.Scene, error) { ret := _m.Called(ctx, performerID) @@ -417,6 +524,29 @@ func (_m *SceneReaderWriter) FindByPerformerID(ctx context.Context, performerID return r0, r1 } +// FindByPrimaryFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Scene); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindDuplicates provides a mock function with given fields: ctx, distance, durationDiff func (_m *SceneReaderWriter) FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*models.Scene, error) { ret := _m.Called(ctx, distance, durationDiff) @@ -487,15 +617,15 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, } // GetFiles provides a mock function with given fields: ctx, relatedID -func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) { +func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) { ret := _m.Called(ctx, relatedID) - var r0 []*file.VideoFile - if rf, ok := ret.Get(0).(func(context.Context, int) []*file.VideoFile); ok { + var r0 []*models.VideoFile + if rf, ok := ret.Get(0).(func(context.Context, int) []*models.VideoFile); ok { r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*file.VideoFile) + r0 = ret.Get(0).([]*models.VideoFile) } } @@ -532,6 +662,29 @@ func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *SceneReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetMovies provides a mock function with given fields: ctx, id func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) { ret := _m.Called(ctx, id) @@ -689,20 +842,20 @@ func (_m *SceneReaderWriter) IncrementOCounter(ctx context.Context, id int) (int return r0, r1 } -// IncrementWatchCount provides a mock function with given fields: ctx, id -func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, id int) (int, error) { - ret := _m.Called(ctx, id) +// IncrementWatchCount provides a mock function with given fields: ctx, sceneID +func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, sceneID int) (int, error) { + ret := _m.Called(ctx, sceneID) var r0 int if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { - r0 = rf(ctx, id) + r0 = rf(ctx, sceneID) } else { r0 = ret.Get(0).(int) } var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, id) + r1 = rf(ctx, sceneID) } else { r1 = ret.Error(1) } @@ -859,20 +1012,20 @@ func (_m *SceneReaderWriter) ResetOCounter(ctx context.Context, id int) (int, er return r0, r1 } -// SaveActivity provides a mock function with given fields: ctx, id, resumeTime, playDuration -func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) { - ret := _m.Called(ctx, id, resumeTime, playDuration) +// SaveActivity provides a mock function with given fields: ctx, sceneID, resumeTime, playDuration +func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) { + ret := _m.Called(ctx, sceneID, resumeTime, playDuration) var r0 bool if rf, ok := ret.Get(0).(func(context.Context, int, *float64, *float64) bool); ok { - r0 = rf(ctx, id, resumeTime, playDuration) + r0 = rf(ctx, sceneID, resumeTime, playDuration) } else { r0 = ret.Get(0).(bool) } var r1 error if rf, ok := ret.Get(1).(func(context.Context, int, *float64, *float64) error); ok { - r1 = rf(ctx, id, resumeTime, playDuration) + r1 = rf(ctx, sceneID, resumeTime, playDuration) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index 56fd6200d..c46e45d4c 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -58,13 +58,13 @@ func (_m *StudioReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } -// Create provides a mock function with given fields: ctx, input -func (_m *StudioReaderWriter) Create(ctx context.Context, input *models.Studio) error { - ret := _m.Called(ctx, input) +// Create provides a mock function with given fields: ctx, newStudio +func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error { + ret := _m.Called(ctx, newStudio) var r0 error if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { - r0 = rf(ctx, input) + r0 = rf(ctx, newStudio) } else { r0 = ret.Error(0) } @@ -132,6 +132,29 @@ func (_m *StudioReaderWriter) FindByName(ctx context.Context, name string, nocas return r0, r1 } +// FindBySceneID provides a mock function with given fields: ctx, sceneID +func (_m *StudioReaderWriter) FindBySceneID(ctx context.Context, sceneID int) (*models.Studio, error) { + ret := _m.Called(ctx, sceneID) + + var r0 *models.Studio + if rf, ok := ret.Get(0).(func(context.Context, int) *models.Studio); ok { + r0 = rf(ctx, sceneID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, sceneID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByStashID provides a mock function with given fields: ctx, stashID func (_m *StudioReaderWriter) FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) { ret := _m.Called(ctx, stashID) @@ -395,13 +418,13 @@ func (_m *StudioReaderWriter) UpdateImage(ctx context.Context, studioID int, ima return r0 } -// UpdatePartial provides a mock function with given fields: ctx, input -func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, input models.StudioPartial) (*models.Studio, error) { - ret := _m.Called(ctx, input) +// UpdatePartial provides a mock function with given fields: ctx, updatedStudio +func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, updatedStudio models.StudioPartial) (*models.Studio, error) { + ret := _m.Called(ctx, updatedStudio) var r0 *models.Studio if rf, ok := ret.Get(0).(func(context.Context, models.StudioPartial) *models.Studio); ok { - r0 = rf(ctx, input) + r0 = rf(ctx, updatedStudio) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Studio) @@ -410,7 +433,7 @@ func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, input models.St var r1 error if rf, ok := ret.Get(1).(func(context.Context, models.StudioPartial) error); ok { - r1 = rf(ctx, input) + r1 = rf(ctx, updatedStudio) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index b4553c3d7..a061b79b2 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -385,13 +385,13 @@ func (_m *TagReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.T return r0, r1 } -// GetAliases provides a mock function with given fields: ctx, tagID -func (_m *TagReaderWriter) GetAliases(ctx context.Context, tagID int) ([]string, error) { - ret := _m.Called(ctx, tagID) +// GetAliases provides a mock function with given fields: ctx, relatedID +func (_m *TagReaderWriter) GetAliases(ctx context.Context, relatedID int) ([]string, error) { + ret := _m.Called(ctx, relatedID) var r0 []string if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok { - r0 = rf(ctx, tagID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]string) @@ -400,7 +400,7 @@ func (_m *TagReaderWriter) GetAliases(ctx context.Context, tagID int) ([]string, var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, tagID) + r1 = rf(ctx, relatedID) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/query.go b/pkg/models/mocks/query.go index 346bd1e55..dd35d0f86 100644 --- a/pkg/models/mocks/query.go +++ b/pkg/models/mocks/query.go @@ -31,6 +31,10 @@ type imageResolver struct { images []*models.Image } +func (s *imageResolver) Find(ctx context.Context, id int) (*models.Image, error) { + panic("not implemented") +} + func (s *imageResolver) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) { return s.images, nil } diff --git a/pkg/models/model_file.go b/pkg/models/model_file.go index 4e8ddbef8..b4ca2c3c1 100644 --- a/pkg/models/model_file.go +++ b/pkg/models/model_file.go @@ -1,9 +1,14 @@ package models import ( + "bytes" "fmt" "io" + "io/fs" + "math" + "net/http" "strconv" + "time" ) type HashAlgorithm string @@ -47,3 +52,244 @@ func (e *HashAlgorithm) UnmarshalGQL(v interface{}) error { func (e HashAlgorithm) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } + +// ID represents an ID of a file. +type FileID int32 + +func (i FileID) String() string { + return strconv.Itoa(int(i)) +} + +func (i *FileID) UnmarshalGQL(v interface{}) (err error) { + switch v := v.(type) { + case string: + var id int + id, err = strconv.Atoi(v) + *i = FileID(id) + return err + case int: + *i = FileID(v) + return nil + default: + return fmt.Errorf("%T is not an int", v) + } +} + +func (i FileID) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(i.String())) +} + +// DirEntry represents a file or directory in the file system. +type DirEntry struct { + ZipFileID *FileID `json:"zip_file_id"` + + // transient - not persisted + // only guaranteed to have id, path and basename set + ZipFile File + + ModTime time.Time `json:"mod_time"` +} + +func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) { + if e.ZipFile != nil { + zipPath := e.ZipFile.Base().Path + zfs, err := fs.OpenZip(zipPath) + if err != nil { + return nil, err + } + defer zfs.Close() + fs = zfs + } + // else assume os file + + ret, err := fs.Lstat(path) + return ret, err +} + +// File represents a file in the file system. +type File interface { + Base() *BaseFile + SetFingerprints(fp Fingerprints) + Open(fs FS) (io.ReadCloser, error) +} + +// BaseFile represents a file in the file system. +type BaseFile struct { + ID FileID `json:"id"` + + DirEntry + + // resolved from parent folder and basename only - not stored in DB + Path string `json:"path"` + + Basename string `json:"basename"` + ParentFolderID FolderID `json:"parent_folder_id"` + + Fingerprints Fingerprints `json:"fingerprints"` + + Size int64 `json:"size"` + + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (f *BaseFile) FingerprintSlice() []Fingerprint { + return f.Fingerprints +} + +// SetFingerprints sets the fingerprints of the file. +// If a fingerprint of the same type already exists, it is overwritten. +func (f *BaseFile) SetFingerprints(fp Fingerprints) { + for _, v := range fp { + f.SetFingerprint(v) + } +} + +// SetFingerprint sets the fingerprint of the file. +// If a fingerprint of the same type already exists, it is overwritten. +func (f *BaseFile) SetFingerprint(fp Fingerprint) { + for i, existing := range f.Fingerprints { + if existing.Type == fp.Type { + f.Fingerprints[i] = fp + return + } + } + + f.Fingerprints = append(f.Fingerprints, fp) +} + +// Base is used to fulfil the File interface. +func (f *BaseFile) Base() *BaseFile { + return f +} + +func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) { + if f.ZipFile != nil { + zipPath := f.ZipFile.Base().Path + zfs, err := fs.OpenZip(zipPath) + if err != nil { + return nil, err + } + + return zfs.OpenOnly(f.Path) + } + + return fs.Open(f.Path) +} + +func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) { + return f.info(fs, f.Path) +} + +func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { + reader, err := f.Open(fs) + if err != nil { + return err + } + + defer reader.Close() + + content, ok := reader.(io.ReadSeeker) + if !ok { + data, err := io.ReadAll(reader) + if err != nil { + return err + } + content = bytes.NewReader(data) + } + + if r.URL.Query().Has("t") { + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + } else { + w.Header().Set("Cache-Control", "no-cache") + } + http.ServeContent(w, r, f.Basename, f.ModTime, content) + + return nil +} + +// VisualFile is an interface for files that have a width and height. +type VisualFile interface { + File + GetWidth() int + GetHeight() int + GetFormat() string +} + +func GetMinResolution(f VisualFile) int { + w := f.GetWidth() + h := f.GetHeight() + + if w < h { + return w + } + + return h +} + +// ImageFile is an extension of BaseFile to represent image files. +type ImageFile struct { + *BaseFile + Format string `json:"format"` + Width int `json:"width"` + Height int `json:"height"` +} + +func (f ImageFile) GetWidth() int { + return f.Width +} + +func (f ImageFile) GetHeight() int { + return f.Height +} + +func (f ImageFile) GetFormat() string { + return f.Format +} + +// VideoFile is an extension of BaseFile to represent video files. +type VideoFile struct { + *BaseFile + Format string `json:"format"` + Width int `json:"width"` + Height int `json:"height"` + Duration float64 `json:"duration"` + VideoCodec string `json:"video_codec"` + AudioCodec string `json:"audio_codec"` + FrameRate float64 `json:"frame_rate"` + BitRate int64 `json:"bitrate"` + + Interactive bool `json:"interactive"` + InteractiveSpeed *int `json:"interactive_speed"` +} + +func (f VideoFile) GetWidth() int { + return f.Width +} + +func (f VideoFile) GetHeight() int { + return f.Height +} + +func (f VideoFile) GetFormat() string { + return f.Format +} + +// #1572 - Inf and NaN values cause the JSON marshaller to fail +// Replace these values with 0 rather than erroring + +func (f VideoFile) DurationFinite() float64 { + ret := f.Duration + if math.IsInf(ret, 0) || math.IsNaN(ret) { + return 0 + } + return ret +} + +func (f VideoFile) FrameRateFinite() float64 { + ret := f.FrameRate + if math.IsInf(ret, 0) || math.IsNaN(ret) { + return 0 + } + return ret +} diff --git a/pkg/models/model_folder.go b/pkg/models/model_folder.go new file mode 100644 index 000000000..590cdd7bd --- /dev/null +++ b/pkg/models/model_folder.go @@ -0,0 +1,51 @@ +package models + +import ( + "fmt" + "io" + "io/fs" + "strconv" + "time" +) + +// FolderID represents an ID of a folder. +type FolderID int32 + +// String converts the ID to a string. +func (i FolderID) String() string { + return strconv.Itoa(int(i)) +} + +func (i *FolderID) UnmarshalGQL(v interface{}) (err error) { + switch v := v.(type) { + case string: + var id int + id, err = strconv.Atoi(v) + *i = FolderID(id) + return err + case int: + *i = FolderID(v) + return nil + default: + return fmt.Errorf("%T is not an int", v) + } +} + +func (i FolderID) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(i.String())) +} + +// Folder represents a folder in the file system. +type Folder struct { + ID FolderID `json:"id"` + DirEntry + Path string `json:"path"` + ParentFolderID *FolderID `json:"parent_folder_id"` + + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (f *Folder) Info(fs FS) (fs.FileInfo, error) { + return f.info(fs, f.Path) +} diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index 932d5cd17..8f563f06f 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -5,8 +5,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) type Gallery struct { @@ -24,11 +22,11 @@ type Gallery struct { // transient - not persisted Files RelatedFiles // transient - not persisted - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file or folder Path string - FolderID *file.FolderID `json:"folder_id"` + FolderID *FolderID `json:"folder_id"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` @@ -45,13 +43,13 @@ func (g *Gallery) IsUserCreated() bool { } func (g *Gallery) LoadFiles(ctx context.Context, l FileLoader) error { - return g.Files.load(func() ([]file.File, error) { + return g.Files.load(func() ([]File, error) { return l.GetFiles(ctx, g.ID) }) } -func (g *Gallery) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return g.Files.loadPrimary(func() (file.File, error) { +func (g *Gallery) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return g.Files.loadPrimary(func() (File, error) { if g.PrimaryFileID == nil { return nil, nil } @@ -89,7 +87,7 @@ func (g *Gallery) LoadTagIDs(ctx context.Context, l TagIDLoader) error { func (g Gallery) PrimaryChecksum() string { // renamed from Checksum to prevent gqlgen from using it in the resolver if p := g.Files.Primary(); p != nil { - v := p.Base().Fingerprints.Get(file.FingerprintTypeMD5) + v := p.Base().Fingerprints.Get(FingerprintTypeMD5) if v == nil { return "" } @@ -120,7 +118,7 @@ type GalleryPartial struct { SceneIDs *UpdateIDs TagIDs *UpdateIDs PerformerIDs *UpdateIDs - PrimaryFileID *file.ID + PrimaryFileID *FileID } func NewGalleryPartial() GalleryPartial { diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index e025ba0b1..9e0a0389a 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -5,8 +5,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) // Image stores the metadata for a single image. @@ -24,7 +22,7 @@ type Image struct { // transient - not persisted Files RelatedFiles - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file - empty if no files Path string // transient - checksum of primary file - empty if no files @@ -39,13 +37,13 @@ type Image struct { } func (i *Image) LoadFiles(ctx context.Context, l FileLoader) error { - return i.Files.load(func() ([]file.File, error) { + return i.Files.load(func() ([]File, error) { return l.GetFiles(ctx, i.ID) }) } -func (i *Image) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return i.Files.loadPrimary(func() (file.File, error) { +func (i *Image) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return i.Files.loadPrimary(func() (File, error) { if i.PrimaryFileID == nil { return nil, nil } @@ -107,7 +105,7 @@ func (i Image) DisplayName() string { type ImageCreateInput struct { *Image - FileIDs []file.ID + FileIDs []FileID } type ImagePartial struct { @@ -125,7 +123,7 @@ type ImagePartial struct { GalleryIDs *UpdateIDs TagIDs *UpdateIDs PerformerIDs *UpdateIDs - PrimaryFileID *file.ID + PrimaryFileID *FileID } func NewImagePartial() ImagePartial { diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index f19113f49..eadbaab3a 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -6,8 +6,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) // Scene stores the metadata for a single video scene. @@ -26,7 +24,7 @@ type Scene struct { // transient - not persisted Files RelatedVideoFiles - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file - empty if no files Path string // transient - oshash of primary file - empty if no files @@ -57,13 +55,13 @@ func (s *Scene) LoadURLs(ctx context.Context, l URLLoader) error { } func (s *Scene) LoadFiles(ctx context.Context, l VideoFileLoader) error { - return s.Files.load(func() ([]*file.VideoFile, error) { + return s.Files.load(func() ([]*VideoFile, error) { return l.GetFiles(ctx, s.ID) }) } -func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return s.Files.loadPrimary(func() (*file.VideoFile, error) { +func (s *Scene) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return s.Files.loadPrimary(func() (*VideoFile, error) { if s.PrimaryFileID == nil { return nil, nil } @@ -73,10 +71,10 @@ func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error { return nil, err } - var vf *file.VideoFile + var vf *VideoFile if len(f) > 0 { var ok bool - vf, ok = f[0].(*file.VideoFile) + vf, ok = f[0].(*VideoFile) if !ok { return nil, errors.New("not a video file") } @@ -173,7 +171,7 @@ type ScenePartial struct { PerformerIDs *UpdateIDs MovieIDs *UpdateMovieIDs StashIDs *UpdateStashIDs - PrimaryFileID *file.ID + PrimaryFileID *FileID } func NewScenePartial() ScenePartial { diff --git a/pkg/models/movie.go b/pkg/models/movie.go index d00b3f491..be7effad3 100644 --- a/pkg/models/movie.go +++ b/pkg/models/movie.go @@ -1,7 +1,5 @@ package models -import "context" - type MovieFilterType struct { Name *StringCriterionInput `json:"name"` Director *StringCriterionInput `json:"director"` @@ -27,37 +25,3 @@ type MovieFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type MovieReader interface { - Find(ctx context.Context, id int) (*Movie, error) - FindMany(ctx context.Context, ids []int) ([]*Movie, error) - // FindBySceneID(sceneID int) ([]*Movie, error) - FindByName(ctx context.Context, name string, nocase bool) (*Movie, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Movie, error) - All(ctx context.Context) ([]*Movie, error) - Count(ctx context.Context) (int, error) - Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) - QueryCount(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) (int, error) - GetFrontImage(ctx context.Context, movieID int) ([]byte, error) - HasFrontImage(ctx context.Context, movieID int) (bool, error) - GetBackImage(ctx context.Context, movieID int) ([]byte, error) - HasBackImage(ctx context.Context, movieID int) (bool, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) - CountByPerformerID(ctx context.Context, performerID int) (int, error) - FindByStudioID(ctx context.Context, studioID int) ([]*Movie, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) -} - -type MovieWriter interface { - Create(ctx context.Context, newMovie *Movie) error - UpdatePartial(ctx context.Context, id int, updatedMovie MoviePartial) (*Movie, error) - Update(ctx context.Context, updatedMovie *Movie) error - Destroy(ctx context.Context, id int) error - UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error - UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error -} - -type MovieReaderWriter interface { - MovieReader - MovieWriter -} diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 78d0a8995..752f1ce08 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -1,7 +1,6 @@ package models import ( - "context" "fmt" "io" "strconv" @@ -193,44 +192,3 @@ type PerformerFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type PerformerFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Performer, error) -} - -type PerformerReader interface { - Find(ctx context.Context, id int) (*Performer, error) - PerformerFinder - FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error) - FindByImageID(ctx context.Context, imageID int) ([]*Performer, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Performer, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Performer, error) - FindByStashID(ctx context.Context, stashID StashID) ([]*Performer, error) - FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Performer, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Performer, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error) - Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) - QueryCount(ctx context.Context, galleryFilter *PerformerFilterType, findFilter *FindFilterType) (int, error) - AliasLoader - GetImage(ctx context.Context, performerID int) ([]byte, error) - HasImage(ctx context.Context, performerID int) (bool, error) - StashIDLoader - TagIDLoader -} - -type PerformerWriter interface { - Create(ctx context.Context, newPerformer *Performer) error - UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) - Update(ctx context.Context, updatedPerformer *Performer) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, performerID int, image []byte) error -} - -type PerformerReaderWriter interface { - PerformerReader - PerformerWriter -} diff --git a/pkg/models/relationships.go b/pkg/models/relationships.go index f59e7d92e..2e6f07708 100644 --- a/pkg/models/relationships.go +++ b/pkg/models/relationships.go @@ -1,15 +1,15 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) +import "context" type SceneIDLoader interface { GetSceneIDs(ctx context.Context, relatedID int) ([]int, error) } +type ImageIDLoader interface { + GetImageIDs(ctx context.Context, relatedID int) ([]int, error) +} + type GalleryIDLoader interface { GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) } @@ -22,6 +22,10 @@ type TagIDLoader interface { GetTagIDs(ctx context.Context, relatedID int) ([]int, error) } +type FileIDLoader interface { + GetManyFileIDs(ctx context.Context, ids []int) ([][]FileID, error) +} + type SceneMovieLoader interface { GetMovies(ctx context.Context, id int) ([]MoviesScenes, error) } @@ -31,11 +35,11 @@ type StashIDLoader interface { } type VideoFileLoader interface { - GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) + GetFiles(ctx context.Context, relatedID int) ([]*VideoFile, error) } type FileLoader interface { - GetFiles(ctx context.Context, relatedID int) ([]file.File, error) + GetFiles(ctx context.Context, relatedID int) ([]File, error) } type AliasLoader interface { @@ -224,12 +228,12 @@ func (r *RelatedStashIDs) load(fn func() ([]StashID, error)) error { } type RelatedVideoFiles struct { - primaryFile *file.VideoFile - files []*file.VideoFile + primaryFile *VideoFile + files []*VideoFile primaryLoaded bool } -func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles { +func NewRelatedVideoFiles(files []*VideoFile) RelatedVideoFiles { ret := RelatedVideoFiles{ files: files, primaryLoaded: true, @@ -242,12 +246,12 @@ func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles { return ret } -func (r *RelatedVideoFiles) SetPrimary(f *file.VideoFile) { +func (r *RelatedVideoFiles) SetPrimary(f *VideoFile) { r.primaryFile = f r.primaryLoaded = true } -func (r *RelatedVideoFiles) Set(f []*file.VideoFile) { +func (r *RelatedVideoFiles) Set(f []*VideoFile) { r.files = f if len(r.files) > 0 { r.primaryFile = r.files[0] @@ -267,7 +271,7 @@ func (r RelatedVideoFiles) PrimaryLoaded() bool { } // List returns the related files. Panics if the relationship has not been loaded. -func (r RelatedVideoFiles) List() []*file.VideoFile { +func (r RelatedVideoFiles) List() []*VideoFile { if !r.Loaded() { panic("relationship has not been loaded") } @@ -276,7 +280,7 @@ func (r RelatedVideoFiles) List() []*file.VideoFile { } // Primary returns the primary file. Panics if the relationship has not been loaded. -func (r RelatedVideoFiles) Primary() *file.VideoFile { +func (r RelatedVideoFiles) Primary() *VideoFile { if !r.PrimaryLoaded() { panic("relationship has not been loaded") } @@ -284,7 +288,7 @@ func (r RelatedVideoFiles) Primary() *file.VideoFile { return r.primaryFile } -func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error { +func (r *RelatedVideoFiles) load(fn func() ([]*VideoFile, error)) error { if r.Loaded() { return nil } @@ -304,7 +308,7 @@ func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error { return nil } -func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) error { +func (r *RelatedVideoFiles) loadPrimary(fn func() (*VideoFile, error)) error { if r.PrimaryLoaded() { return nil } @@ -321,12 +325,12 @@ func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) erro } type RelatedFiles struct { - primaryFile file.File - files []file.File + primaryFile File + files []File primaryLoaded bool } -func NewRelatedFiles(files []file.File) RelatedFiles { +func NewRelatedFiles(files []File) RelatedFiles { ret := RelatedFiles{ files: files, primaryLoaded: true, @@ -350,7 +354,7 @@ func (r RelatedFiles) PrimaryLoaded() bool { } // List returns the related files. Panics if the relationship has not been loaded. -func (r RelatedFiles) List() []file.File { +func (r RelatedFiles) List() []File { if !r.Loaded() { panic("relationship has not been loaded") } @@ -359,7 +363,7 @@ func (r RelatedFiles) List() []file.File { } // Primary returns the primary file. Panics if the relationship has not been loaded. -func (r RelatedFiles) Primary() file.File { +func (r RelatedFiles) Primary() File { if !r.PrimaryLoaded() { panic("relationship has not been loaded") } @@ -367,7 +371,7 @@ func (r RelatedFiles) Primary() file.File { return r.primaryFile } -func (r *RelatedFiles) load(fn func() ([]file.File, error)) error { +func (r *RelatedFiles) load(fn func() ([]File, error)) error { if r.Loaded() { return nil } @@ -387,7 +391,7 @@ func (r *RelatedFiles) load(fn func() ([]file.File, error)) error { return nil } -func (r *RelatedFiles) loadPrimary(fn func() (file.File, error)) error { +func (r *RelatedFiles) loadPrimary(fn func() (File, error)) error { if r.PrimaryLoaded() { return nil } diff --git a/pkg/models/repository.go b/pkg/models/repository.go index fe0e21dc0..9ba4eead1 100644 --- a/pkg/models/repository.go +++ b/pkg/models/repository.go @@ -1,7 +1,6 @@ package models import ( - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/txn" ) @@ -14,8 +13,8 @@ type TxnManager interface { type Repository struct { TxnManager - File file.Store - Folder file.FolderStore + File FileReaderWriter + Folder FolderReaderWriter Gallery GalleryReaderWriter GalleryChapter GalleryChapterReaderWriter Image ImageReaderWriter diff --git a/pkg/models/repository_file.go b/pkg/models/repository_file.go new file mode 100644 index 000000000..8ea9709db --- /dev/null +++ b/pkg/models/repository_file.go @@ -0,0 +1,88 @@ +package models + +import ( + "context" + "io/fs" +) + +// FileGetter provides methods to get files by ID. +type FileGetter interface { + Find(ctx context.Context, id ...FileID) ([]File, error) +} + +// FileFinder provides methods to find files. +type FileFinder interface { + FileGetter + FindAllByPath(ctx context.Context, path string) ([]File, error) + FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) + FindByPath(ctx context.Context, path string) (File, error) + FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error) + FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error) +} + +// FileQueryer provides methods to query files. +type FileQueryer interface { + Query(ctx context.Context, options FileQueryOptions) (*FileQueryResult, error) +} + +// FileCounter provides methods to count files. +type FileCounter interface { + CountAllInPaths(ctx context.Context, p []string) (int, error) + CountByFolderID(ctx context.Context, folderID FolderID) (int, error) +} + +// FileCreator provides methods to create files. +type FileCreator interface { + Create(ctx context.Context, f File) error +} + +// FileUpdater provides methods to update files. +type FileUpdater interface { + Update(ctx context.Context, f File) error +} + +// FileDestroyer provides methods to destroy files. +type FileDestroyer interface { + Destroy(ctx context.Context, id FileID) error +} + +type FileFinderCreator interface { + FileFinder + FileCreator +} + +type FileFinderUpdater interface { + FileFinder + FileUpdater +} + +type FileFinderDestroyer interface { + FileFinder + FileDestroyer +} + +// FileReader provides all methods to read files. +type FileReader interface { + FileFinder + FileQueryer + FileCounter + + GetCaptions(ctx context.Context, fileID FileID) ([]*VideoCaption, error) + IsPrimary(ctx context.Context, fileID FileID) (bool, error) +} + +// FileWriter provides all methods to modify files. +type FileWriter interface { + FileCreator + FileUpdater + FileDestroyer + + UpdateCaptions(ctx context.Context, fileID FileID, captions []*VideoCaption) error +} + +// FileReaderWriter provides all file methods. +type FileReaderWriter interface { + FileReader + FileWriter +} diff --git a/pkg/models/repository_folder.go b/pkg/models/repository_folder.go new file mode 100644 index 000000000..c3f82f529 --- /dev/null +++ b/pkg/models/repository_folder.go @@ -0,0 +1,64 @@ +package models + +import "context" + +// FolderGetter provides methods to get folders by ID. +type FolderGetter interface { + Find(ctx context.Context, id FolderID) (*Folder, error) +} + +// FolderFinder provides methods to find folders. +type FolderFinder interface { + FolderGetter + FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) + FindByPath(ctx context.Context, path string) (*Folder, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error) + FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) +} + +type FolderCounter interface { + CountAllInPaths(ctx context.Context, p []string) (int, error) +} + +// FolderCreator provides methods to create folders. +type FolderCreator interface { + Create(ctx context.Context, f *Folder) error +} + +// FolderUpdater provides methods to update folders. +type FolderUpdater interface { + Update(ctx context.Context, f *Folder) error +} + +type FolderDestroyer interface { + Destroy(ctx context.Context, id FolderID) error +} + +type FolderFinderCreator interface { + FolderFinder + FolderCreator +} + +type FolderFinderDestroyer interface { + FolderFinder + FolderDestroyer +} + +// FolderReader provides all methods to read folders. +type FolderReader interface { + FolderFinder + FolderCounter +} + +// FolderWriter provides all methods to modify folders. +type FolderWriter interface { + FolderCreator + FolderUpdater + FolderDestroyer +} + +// FolderReaderWriter provides all folder methods. +type FolderReaderWriter interface { + FolderReader + FolderWriter +} diff --git a/pkg/models/repository_gallery.go b/pkg/models/repository_gallery.go new file mode 100644 index 000000000..64019886c --- /dev/null +++ b/pkg/models/repository_gallery.go @@ -0,0 +1,91 @@ +package models + +import "context" + +// GalleryGetter provides methods to get galleries by ID. +type GalleryGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Gallery, error) + Find(ctx context.Context, id int) (*Gallery, error) +} + +// GalleryFinder provides methods to find galleries. +type GalleryFinder interface { + GalleryGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Gallery, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error) + FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error) + FindByPath(ctx context.Context, path string) ([]*Gallery, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Gallery, error) + FindByFolderID(ctx context.Context, folderID FolderID) ([]*Gallery, error) + FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error) + FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error) + FindUserGalleryByTitle(ctx context.Context, title string) ([]*Gallery, error) +} + +// GalleryQueryer provides methods to query galleries. +type GalleryQueryer interface { + Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error) + QueryCount(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) (int, error) +} + +// GalleryCounter provides methods to count galleries. +type GalleryCounter interface { + Count(ctx context.Context) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) +} + +// GalleryCreator provides methods to create galleries. +type GalleryCreator interface { + Create(ctx context.Context, newGallery *Gallery, fileIDs []FileID) error +} + +// GalleryUpdater provides methods to update galleries. +type GalleryUpdater interface { + Update(ctx context.Context, updatedGallery *Gallery) error + UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) + UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error +} + +// GalleryDestroyer provides methods to destroy galleries. +type GalleryDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type GalleryCreatorUpdater interface { + GalleryCreator + GalleryUpdater +} + +// GalleryReader provides all methods to read galleries. +type GalleryReader interface { + GalleryFinder + GalleryQueryer + GalleryCounter + + FileIDLoader + ImageIDLoader + SceneIDLoader + PerformerIDLoader + TagIDLoader + FileLoader + + All(ctx context.Context) ([]*Gallery, error) +} + +// GalleryWriter provides all methods to modify galleries. +type GalleryWriter interface { + GalleryCreator + GalleryUpdater + GalleryDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + AddImages(ctx context.Context, galleryID int, imageIDs ...int) error + RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error +} + +// GalleryReaderWriter provides all gallery methods. +type GalleryReaderWriter interface { + GalleryReader + GalleryWriter +} diff --git a/pkg/models/repository_gallery_chapter.go b/pkg/models/repository_gallery_chapter.go new file mode 100644 index 000000000..5a926a000 --- /dev/null +++ b/pkg/models/repository_gallery_chapter.go @@ -0,0 +1,55 @@ +package models + +import "context" + +// GalleryChapterGetter provides methods to get gallery chapters by ID. +type GalleryChapterGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*GalleryChapter, error) + Find(ctx context.Context, id int) (*GalleryChapter, error) +} + +// GalleryChapterFinder provides methods to find gallery chapters. +type GalleryChapterFinder interface { + GalleryChapterGetter + FindByGalleryID(ctx context.Context, galleryID int) ([]*GalleryChapter, error) +} + +// GalleryChapterCreator provides methods to create gallery chapters. +type GalleryChapterCreator interface { + Create(ctx context.Context, newGalleryChapter *GalleryChapter) error +} + +// GalleryChapterUpdater provides methods to update gallery chapters. +type GalleryChapterUpdater interface { + Update(ctx context.Context, updatedGalleryChapter *GalleryChapter) error + UpdatePartial(ctx context.Context, id int, updatedGalleryChapter GalleryChapterPartial) (*GalleryChapter, error) +} + +// GalleryChapterDestroyer provides methods to destroy gallery chapters. +type GalleryChapterDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type GalleryChapterCreatorUpdater interface { + GalleryChapterCreator + GalleryChapterUpdater +} + +// GalleryChapterReader provides all methods to read gallery chapters. +type GalleryChapterReader interface { + GalleryChapterFinder +} + +// GalleryChapterWriter provides all methods to modify gallery chapters. +type GalleryChapterWriter interface { + GalleryChapterCreator + GalleryChapterUpdater + GalleryChapterDestroyer +} + +// GalleryChapterReaderWriter provides all gallery chapter methods. +type GalleryChapterReaderWriter interface { + GalleryChapterReader + GalleryChapterWriter +} diff --git a/pkg/models/repository_image.go b/pkg/models/repository_image.go new file mode 100644 index 000000000..5b191b2ab --- /dev/null +++ b/pkg/models/repository_image.go @@ -0,0 +1,92 @@ +package models + +import "context" + +// ImageGetter provides methods to get images by ID. +type ImageGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Image, error) + Find(ctx context.Context, id int) (*Image, error) +} + +// ImageFinder provides methods to find images. +type ImageFinder interface { + ImageGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Image, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Image, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Image, error) + FindByFolderID(ctx context.Context, fileID FolderID) ([]*Image, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Image, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) +} + +// ImageQueryer provides methods to query images. +type ImageQueryer interface { + Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error) + QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) +} + +// ImageCounter provides methods to count images. +type ImageCounter interface { + Count(ctx context.Context) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) + CountByGalleryID(ctx context.Context, galleryID int) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) +} + +// ImageCreator provides methods to create images. +type ImageCreator interface { + Create(ctx context.Context, newImage *ImageCreateInput) error +} + +// ImageUpdater provides methods to update images. +type ImageUpdater interface { + Update(ctx context.Context, updatedImage *Image) error + UpdatePartial(ctx context.Context, id int, partial ImagePartial) (*Image, error) + UpdatePerformers(ctx context.Context, imageID int, performerIDs []int) error + UpdateTags(ctx context.Context, imageID int, tagIDs []int) error +} + +// ImageDestroyer provides methods to destroy images. +type ImageDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type ImageCreatorUpdater interface { + ImageCreator + ImageUpdater +} + +// ImageReader provides all methods to read images. +type ImageReader interface { + ImageFinder + ImageQueryer + ImageCounter + + FileIDLoader + GalleryIDLoader + PerformerIDLoader + TagIDLoader + FileLoader + + All(ctx context.Context) ([]*Image, error) + Size(ctx context.Context) (float64, error) +} + +// ImageWriter provides all methods to modify images. +type ImageWriter interface { + ImageCreator + ImageUpdater + ImageDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + IncrementOCounter(ctx context.Context, id int) (int, error) + DecrementOCounter(ctx context.Context, id int) (int, error) + ResetOCounter(ctx context.Context, id int) (int, error) +} + +// ImageReaderWriter provides all image methods. +type ImageReaderWriter interface { + ImageReader + ImageWriter +} diff --git a/pkg/models/repository_movie.go b/pkg/models/repository_movie.go new file mode 100644 index 000000000..9234ea7a5 --- /dev/null +++ b/pkg/models/repository_movie.go @@ -0,0 +1,86 @@ +package models + +import "context" + +// MovieGetter provides methods to get movies by ID. +type MovieGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Movie, error) + Find(ctx context.Context, id int) (*Movie, error) +} + +// MovieFinder provides methods to find movies. +type MovieFinder interface { + MovieGetter + FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) + FindByStudioID(ctx context.Context, studioID int) ([]*Movie, error) + FindByName(ctx context.Context, name string, nocase bool) (*Movie, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Movie, error) +} + +// MovieQueryer provides methods to query movies. +type MovieQueryer interface { + Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) + QueryCount(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) (int, error) +} + +// MovieCounter provides methods to count movies. +type MovieCounter interface { + Count(ctx context.Context) (int, error) + CountByPerformerID(ctx context.Context, performerID int) (int, error) + CountByStudioID(ctx context.Context, studioID int) (int, error) +} + +// MovieCreator provides methods to create movies. +type MovieCreator interface { + Create(ctx context.Context, newMovie *Movie) error +} + +// MovieUpdater provides methods to update movies. +type MovieUpdater interface { + Update(ctx context.Context, updatedMovie *Movie) error + UpdatePartial(ctx context.Context, id int, updatedMovie MoviePartial) (*Movie, error) + UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error + UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error +} + +// MovieDestroyer provides methods to destroy movies. +type MovieDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type MovieCreatorUpdater interface { + MovieCreator + MovieUpdater +} + +type MovieFinderCreator interface { + MovieFinder + MovieCreator +} + +// MovieReader provides all methods to read movies. +type MovieReader interface { + MovieFinder + MovieQueryer + MovieCounter + + All(ctx context.Context) ([]*Movie, error) + GetFrontImage(ctx context.Context, movieID int) ([]byte, error) + HasFrontImage(ctx context.Context, movieID int) (bool, error) + GetBackImage(ctx context.Context, movieID int) ([]byte, error) + HasBackImage(ctx context.Context, movieID int) (bool, error) +} + +// MovieWriter provides all methods to modify movies. +type MovieWriter interface { + MovieCreator + MovieUpdater + MovieDestroyer +} + +// MovieReaderWriter provides all movie methods. +type MovieReaderWriter interface { + MovieReader + MovieWriter +} diff --git a/pkg/models/repository_performer.go b/pkg/models/repository_performer.go new file mode 100644 index 000000000..aac7e0488 --- /dev/null +++ b/pkg/models/repository_performer.go @@ -0,0 +1,98 @@ +package models + +import "context" + +// PerformerGetter provides methods to get performers by ID. +type PerformerGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Performer, error) + Find(ctx context.Context, id int) (*Performer, error) +} + +// PerformerFinder provides methods to find performers. +type PerformerFinder interface { + PerformerGetter + FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error) + FindByImageID(ctx context.Context, imageID int) ([]*Performer, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Performer, error) + FindByStashID(ctx context.Context, stashID StashID) ([]*Performer, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Performer, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Performer, error) +} + +// PerformerQueryer provides methods to query performers. +type PerformerQueryer interface { + Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) + QueryCount(ctx context.Context, galleryFilter *PerformerFilterType, findFilter *FindFilterType) (int, error) +} + +type PerformerAutoTagQueryer interface { + PerformerQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error) +} + +// PerformerCounter provides methods to count performers. +type PerformerCounter interface { + Count(ctx context.Context) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) +} + +// PerformerCreator provides methods to create performers. +type PerformerCreator interface { + Create(ctx context.Context, newPerformer *Performer) error +} + +// PerformerUpdater provides methods to update performers. +type PerformerUpdater interface { + Update(ctx context.Context, updatedPerformer *Performer) error + UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) + UpdateImage(ctx context.Context, performerID int, image []byte) error +} + +// PerformerDestroyer provides methods to destroy performers. +type PerformerDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type PerformerFinderCreator interface { + PerformerFinder + PerformerCreator +} + +type PerformerCreatorUpdater interface { + PerformerCreator + PerformerUpdater +} + +// PerformerReader provides all methods to read performers. +type PerformerReader interface { + PerformerFinder + PerformerQueryer + PerformerAutoTagQueryer + PerformerCounter + + AliasLoader + StashIDLoader + TagIDLoader + + All(ctx context.Context) ([]*Performer, error) + GetImage(ctx context.Context, performerID int) ([]byte, error) + HasImage(ctx context.Context, performerID int) (bool, error) +} + +// PerformerWriter provides all methods to modify performers. +type PerformerWriter interface { + PerformerCreator + PerformerUpdater + PerformerDestroyer +} + +// PerformerReaderWriter provides all performer methods. +type PerformerReaderWriter interface { + PerformerReader + PerformerWriter +} diff --git a/pkg/models/repository_scene.go b/pkg/models/repository_scene.go new file mode 100644 index 000000000..fdd839ed6 --- /dev/null +++ b/pkg/models/repository_scene.go @@ -0,0 +1,115 @@ +package models + +import "context" + +// SceneGetter provides methods to get scenes by ID. +type SceneGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Scene, error) + Find(ctx context.Context, id int) (*Scene, error) +} + +// SceneFinder provides methods to find scenes. +type SceneFinder interface { + SceneGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Scene, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Scene, error) + FindByOSHash(ctx context.Context, oshash string) ([]*Scene, error) + FindByPath(ctx context.Context, path string) ([]*Scene, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Scene, error) + FindByPrimaryFileID(ctx context.Context, fileID FileID) ([]*Scene, error) + FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) + FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) + FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) + FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) +} + +// SceneQueryer provides methods to query scenes. +type SceneQueryer interface { + Query(ctx context.Context, options SceneQueryOptions) (*SceneQueryResult, error) + QueryCount(ctx context.Context, sceneFilter *SceneFilterType, findFilter *FindFilterType) (int, error) +} + +// SceneCounter provides methods to count scenes. +type SceneCounter interface { + Count(ctx context.Context) (int, error) + CountByPerformerID(ctx context.Context, performerID int) (int, error) + CountByMovieID(ctx context.Context, movieID int) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) + CountByStudioID(ctx context.Context, studioID int) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) + CountMissingChecksum(ctx context.Context) (int, error) + CountMissingOSHash(ctx context.Context) (int, error) + OCount(ctx context.Context) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) + PlayCount(ctx context.Context) (int, error) + UniqueScenePlayCount(ctx context.Context) (int, error) +} + +// SceneCreator provides methods to create scenes. +type SceneCreator interface { + Create(ctx context.Context, newScene *Scene, fileIDs []FileID) error +} + +// SceneUpdater provides methods to update scenes. +type SceneUpdater interface { + Update(ctx context.Context, updatedScene *Scene) error + UpdatePartial(ctx context.Context, id int, updatedScene ScenePartial) (*Scene, error) + UpdateCover(ctx context.Context, sceneID int, cover []byte) error +} + +// SceneDestroyer provides methods to destroy scenes. +type SceneDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type SceneCreatorUpdater interface { + SceneCreator + SceneUpdater +} + +// SceneReader provides all methods to read scenes. +type SceneReader interface { + SceneFinder + SceneQueryer + SceneCounter + + URLLoader + FileIDLoader + GalleryIDLoader + PerformerIDLoader + TagIDLoader + SceneMovieLoader + StashIDLoader + VideoFileLoader + + All(ctx context.Context) ([]*Scene, error) + Wall(ctx context.Context, q *string) ([]*Scene, error) + Size(ctx context.Context) (float64, error) + Duration(ctx context.Context) (float64, error) + PlayDuration(ctx context.Context) (float64, error) + GetCover(ctx context.Context, sceneID int) ([]byte, error) + HasCover(ctx context.Context, sceneID int) (bool, error) +} + +// SceneWriter provides all methods to modify scenes. +type SceneWriter interface { + SceneCreator + SceneUpdater + SceneDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error + AssignFiles(ctx context.Context, sceneID int, fileID []FileID) error + IncrementOCounter(ctx context.Context, id int) (int, error) + DecrementOCounter(ctx context.Context, id int) (int, error) + ResetOCounter(ctx context.Context, id int) (int, error) + SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) + IncrementWatchCount(ctx context.Context, sceneID int) (int, error) +} + +// SceneReaderWriter provides all scene methods. +type SceneReaderWriter interface { + SceneReader + SceneWriter +} diff --git a/pkg/models/repository_scene_marker.go b/pkg/models/repository_scene_marker.go new file mode 100644 index 000000000..d35ec762f --- /dev/null +++ b/pkg/models/repository_scene_marker.go @@ -0,0 +1,76 @@ +package models + +import "context" + +// SceneMarkerGetter provides methods to get scene markers by ID. +type SceneMarkerGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*SceneMarker, error) + Find(ctx context.Context, id int) (*SceneMarker, error) +} + +// SceneMarkerFinder provides methods to find scene markers. +type SceneMarkerFinder interface { + SceneMarkerGetter + FindBySceneID(ctx context.Context, sceneID int) ([]*SceneMarker, error) +} + +// SceneMarkerQueryer provides methods to query scene markers. +type SceneMarkerQueryer interface { + Query(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]*SceneMarker, int, error) + QueryCount(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) (int, error) +} + +// SceneMarkerCounter provides methods to count scene markers. +type SceneMarkerCounter interface { + Count(ctx context.Context) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) +} + +// SceneMarkerCreator provides methods to create scene markers. +type SceneMarkerCreator interface { + Create(ctx context.Context, newSceneMarker *SceneMarker) error +} + +// SceneMarkerUpdater provides methods to update scene markers. +type SceneMarkerUpdater interface { + Update(ctx context.Context, updatedSceneMarker *SceneMarker) error + UpdatePartial(ctx context.Context, id int, updatedSceneMarker SceneMarkerPartial) (*SceneMarker, error) + UpdateTags(ctx context.Context, markerID int, tagIDs []int) error +} + +// SceneMarkerDestroyer provides methods to destroy scene markers. +type SceneMarkerDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type SceneMarkerCreatorUpdater interface { + SceneMarkerCreator + SceneMarkerUpdater +} + +// SceneMarkerReader provides all methods to read scene markers. +type SceneMarkerReader interface { + SceneMarkerFinder + SceneMarkerQueryer + SceneMarkerCounter + + TagIDLoader + + All(ctx context.Context) ([]*SceneMarker, error) + Wall(ctx context.Context, q *string) ([]*SceneMarker, error) + GetMarkerStrings(ctx context.Context, q *string, sort *string) ([]*MarkerStringsResultType, error) +} + +// SceneMarkerWriter provides all methods to modify scene markers. +type SceneMarkerWriter interface { + SceneMarkerCreator + SceneMarkerUpdater + SceneMarkerDestroyer +} + +// SceneMarkerReaderWriter provides all scene marker methods. +type SceneMarkerReaderWriter interface { + SceneMarkerReader + SceneMarkerWriter +} diff --git a/pkg/models/repository_studio.go b/pkg/models/repository_studio.go new file mode 100644 index 000000000..272bf8fed --- /dev/null +++ b/pkg/models/repository_studio.go @@ -0,0 +1,94 @@ +package models + +import "context" + +// StudioGetter provides methods to get studios by ID. +type StudioGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Studio, error) + Find(ctx context.Context, id int) (*Studio, error) +} + +// StudioFinder provides methods to find studios. +type StudioFinder interface { + StudioGetter + FindChildren(ctx context.Context, id int) ([]*Studio, error) + FindBySceneID(ctx context.Context, sceneID int) (*Studio, error) + FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Studio, error) + FindByName(ctx context.Context, name string, nocase bool) (*Studio, error) +} + +// StudioQueryer provides methods to query studios. +type StudioQueryer interface { + Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) +} + +type StudioAutoTagQueryer interface { + StudioQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Studio, error) +} + +// StudioCounter provides methods to count studios. +type StudioCounter interface { + Count(ctx context.Context) (int, error) +} + +// StudioCreator provides methods to create studios. +type StudioCreator interface { + Create(ctx context.Context, newStudio *Studio) error +} + +// StudioUpdater provides methods to update studios. +type StudioUpdater interface { + Update(ctx context.Context, updatedStudio *Studio) error + UpdatePartial(ctx context.Context, updatedStudio StudioPartial) (*Studio, error) + UpdateImage(ctx context.Context, studioID int, image []byte) error +} + +// StudioDestroyer provides methods to destroy studios. +type StudioDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type StudioFinderCreator interface { + StudioFinder + StudioCreator +} + +type StudioCreatorUpdater interface { + StudioCreator + StudioUpdater +} + +// StudioReader provides all methods to read studios. +type StudioReader interface { + StudioFinder + StudioQueryer + StudioAutoTagQueryer + StudioCounter + + AliasLoader + StashIDLoader + + All(ctx context.Context) ([]*Studio, error) + GetImage(ctx context.Context, studioID int) ([]byte, error) + HasImage(ctx context.Context, studioID int) (bool, error) +} + +// StudioWriter provides all methods to modify studios. +type StudioWriter interface { + StudioCreator + StudioUpdater + StudioDestroyer +} + +// StudioReaderWriter provides all studio methods. +type StudioReaderWriter interface { + StudioReader + StudioWriter +} diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go new file mode 100644 index 000000000..6351c2bdf --- /dev/null +++ b/pkg/models/repository_tag.go @@ -0,0 +1,104 @@ +package models + +import "context" + +// TagGetter provides methods to get tags by ID. +type TagGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Tag, error) + Find(ctx context.Context, id int) (*Tag, error) +} + +// TagFinder provides methods to find tags. +type TagFinder interface { + TagGetter + FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) + FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) + FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error) + FindByChildTagID(ctx context.Context, childID int) ([]*Tag, error) + FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error) + FindByImageID(ctx context.Context, imageID int) ([]*Tag, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error) + FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error) + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error) + FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) +} + +// TagQueryer provides methods to query tags. +type TagQueryer interface { + Query(ctx context.Context, tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) +} + +type TagAutoTagQueryer interface { + TagQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Tag, error) +} + +// TagCounter provides methods to count tags. +type TagCounter interface { + Count(ctx context.Context) (int, error) +} + +// TagCreator provides methods to create tags. +type TagCreator interface { + Create(ctx context.Context, newTag *Tag) error +} + +// TagUpdater provides methods to update tags. +type TagUpdater interface { + Update(ctx context.Context, updatedTag *Tag) error + UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) + UpdateAliases(ctx context.Context, tagID int, aliases []string) error + UpdateImage(ctx context.Context, tagID int, image []byte) error + UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error + UpdateChildTags(ctx context.Context, tagID int, parentIDs []int) error +} + +// TagDestroyer provides methods to destroy tags. +type TagDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type TagFinderCreator interface { + TagFinder + TagCreator +} + +type TagCreatorUpdater interface { + TagCreator + TagUpdater +} + +// TagReader provides all methods to read tags. +type TagReader interface { + TagFinder + TagQueryer + TagAutoTagQueryer + TagCounter + + AliasLoader + + All(ctx context.Context) ([]*Tag, error) + GetImage(ctx context.Context, tagID int) ([]byte, error) + HasImage(ctx context.Context, tagID int) (bool, error) +} + +// TagWriter provides all methods to modify tags. +type TagWriter interface { + TagCreator + TagUpdater + TagDestroyer + + Merge(ctx context.Context, source []int, destination int) error +} + +// TagReaderWriter provides all tags methods. +type TagReaderWriter interface { + TagReader + TagWriter +} diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 8f8d2eaf4..e66576f35 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -1,10 +1,6 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) +import "context" type PHashDuplicationCriterionInput struct { Duplicated *bool `json:"duplicated"` @@ -112,7 +108,7 @@ type SceneQueryResult struct { TotalDuration float64 TotalSize float64 - finder SceneFinder + getter SceneGetter scenes []*Scene resolveErr error } @@ -129,83 +125,16 @@ type ScenesDestroyInput struct { DeleteGenerated *bool `json:"delete_generated"` } -func NewSceneQueryResult(finder SceneFinder) *SceneQueryResult { +func NewSceneQueryResult(getter SceneGetter) *SceneQueryResult { return &SceneQueryResult{ - finder: finder, + getter: getter, } } func (r *SceneQueryResult) Resolve(ctx context.Context) ([]*Scene, error) { // cache results if r.scenes == nil && r.resolveErr == nil { - r.scenes, r.resolveErr = r.finder.FindMany(ctx, r.IDs) + r.scenes, r.resolveErr = r.getter.FindMany(ctx, r.IDs) } return r.scenes, r.resolveErr } - -type SceneFinder interface { - // TODO - rename this to Find and remove existing method - FindMany(ctx context.Context, ids []int) ([]*Scene, error) -} - -type SceneReader interface { - SceneFinder - // TODO - remove this in another PR - Find(ctx context.Context, id int) (*Scene, error) - FindByChecksum(ctx context.Context, checksum string) ([]*Scene, error) - FindByOSHash(ctx context.Context, oshash string) ([]*Scene, error) - FindByPath(ctx context.Context, path string) ([]*Scene, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) - FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) - FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) - - URLLoader - GalleryIDLoader - PerformerIDLoader - TagIDLoader - SceneMovieLoader - StashIDLoader - VideoFileLoader - - CountByPerformerID(ctx context.Context, performerID int) (int, error) - OCountByPerformerID(ctx context.Context, performerID int) (int, error) - OCount(ctx context.Context) (int, error) - // FindByStudioID(studioID int) ([]*Scene, error) - FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) - CountByMovieID(ctx context.Context, movieID int) (int, error) - Count(ctx context.Context) (int, error) - PlayCount(ctx context.Context) (int, error) - UniqueScenePlayCount(ctx context.Context) (int, error) - Size(ctx context.Context) (float64, error) - Duration(ctx context.Context) (float64, error) - PlayDuration(ctx context.Context) (float64, error) - // SizeCount() (string, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - CountMissingChecksum(ctx context.Context) (int, error) - CountMissingOSHash(ctx context.Context) (int, error) - Wall(ctx context.Context, q *string) ([]*Scene, error) - All(ctx context.Context) ([]*Scene, error) - Query(ctx context.Context, options SceneQueryOptions) (*SceneQueryResult, error) - QueryCount(ctx context.Context, sceneFilter *SceneFilterType, findFilter *FindFilterType) (int, error) - GetCover(ctx context.Context, sceneID int) ([]byte, error) - HasCover(ctx context.Context, sceneID int) (bool, error) -} - -type SceneWriter interface { - Create(ctx context.Context, newScene *Scene, fileIDs []file.ID) error - Update(ctx context.Context, updatedScene *Scene) error - UpdatePartial(ctx context.Context, id int, updatedScene ScenePartial) (*Scene, error) - IncrementOCounter(ctx context.Context, id int) (int, error) - DecrementOCounter(ctx context.Context, id int) (int, error) - ResetOCounter(ctx context.Context, id int) (int, error) - SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) - IncrementWatchCount(ctx context.Context, id int) (int, error) - Destroy(ctx context.Context, id int) error - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - -type SceneReaderWriter interface { - SceneReader - SceneWriter -} diff --git a/pkg/models/scene_marker.go b/pkg/models/scene_marker.go index 673a547e9..4a10c0e21 100644 --- a/pkg/models/scene_marker.go +++ b/pkg/models/scene_marker.go @@ -1,7 +1,5 @@ package models -import "context" - type SceneMarkerFilterType struct { // Filter to only include scene markers with this tag TagID *string `json:"tag_id"` @@ -28,30 +26,3 @@ type MarkerStringsResultType struct { ID string `json:"id"` Title string `json:"title"` } - -type SceneMarkerReader interface { - Find(ctx context.Context, id int) (*SceneMarker, error) - FindMany(ctx context.Context, ids []int) ([]*SceneMarker, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*SceneMarker, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - GetMarkerStrings(ctx context.Context, q *string, sort *string) ([]*MarkerStringsResultType, error) - Wall(ctx context.Context, q *string) ([]*SceneMarker, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*SceneMarker, error) - Query(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]*SceneMarker, int, error) - QueryCount(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) (int, error) - GetTagIDs(ctx context.Context, imageID int) ([]int, error) -} - -type SceneMarkerWriter interface { - Create(ctx context.Context, newSceneMarker *SceneMarker) error - Update(ctx context.Context, updatedSceneMarker *SceneMarker) error - UpdatePartial(ctx context.Context, id int, updatedSceneMarker SceneMarkerPartial) (*SceneMarker, error) - Destroy(ctx context.Context, id int) error - UpdateTags(ctx context.Context, markerID int, tagIDs []int) error -} - -type SceneMarkerReaderWriter interface { - SceneMarkerReader - SceneMarkerWriter -} diff --git a/pkg/models/studio.go b/pkg/models/studio.go index f98173d2a..0973df4e3 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -1,7 +1,5 @@ package models -import "context" - type StudioFilterType struct { And *StudioFilterType `json:"AND"` Or *StudioFilterType `json:"OR"` @@ -37,39 +35,3 @@ type StudioFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type StudioFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Studio, error) -} - -type StudioReader interface { - Find(ctx context.Context, id int) (*Studio, error) - StudioFinder - FindChildren(ctx context.Context, id int) ([]*Studio, error) - FindByName(ctx context.Context, name string, nocase bool) (*Studio, error) - FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error) - FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Studio, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Studio, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Studio, error) - Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) - GetImage(ctx context.Context, studioID int) ([]byte, error) - HasImage(ctx context.Context, studioID int) (bool, error) - AliasLoader - StashIDLoader -} - -type StudioWriter interface { - Create(ctx context.Context, newStudio *Studio) error - UpdatePartial(ctx context.Context, input StudioPartial) (*Studio, error) - Update(ctx context.Context, updatedStudio *Studio) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, studioID int, image []byte) error -} - -type StudioReaderWriter interface { - StudioReader - StudioWriter -} diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 0ddcc1d86..b2cff5a0e 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -1,7 +1,5 @@ package models -import "context" - type TagFilterType struct { And *TagFilterType `json:"AND"` Or *TagFilterType `json:"OR"` @@ -39,49 +37,3 @@ type TagFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type TagFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Tag, error) -} - -type TagReader interface { - Find(ctx context.Context, id int) (*Tag, error) - TagFinder - FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error) - FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error) - FindByImageID(ctx context.Context, imageID int) ([]*Tag, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error) - FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) - FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error) - FindByChildTagID(ctx context.Context, childID int) ([]*Tag, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Tag, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Tag, error) - Query(ctx context.Context, tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) - GetImage(ctx context.Context, tagID int) ([]byte, error) - HasImage(ctx context.Context, tagID int) (bool, error) - GetAliases(ctx context.Context, tagID int) ([]string, error) - FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) - FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) -} - -type TagWriter interface { - Create(ctx context.Context, newTag *Tag) error - UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) - Update(ctx context.Context, updatedTag *Tag) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, tagID int, image []byte) error - UpdateAliases(ctx context.Context, tagID int, aliases []string) error - Merge(ctx context.Context, source []int, destination int) error - UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error - UpdateChildTags(ctx context.Context, tagID int, parentIDs []int) error -} - -type TagReaderWriter interface { - TagReader - TagWriter -} diff --git a/pkg/movie/export.go b/pkg/movie/export.go index 09963ce5e..5a6c49aa3 100644 --- a/pkg/movie/export.go +++ b/pkg/movie/export.go @@ -8,7 +8,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/utils" ) @@ -18,7 +17,7 @@ type ImageGetter interface { } // ToJSON converts a Movie into its JSON equivalent. -func ToJSON(ctx context.Context, reader ImageGetter, studioReader studio.Finder, movie *models.Movie) (*jsonschema.Movie, error) { +func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Movie) (*jsonschema.Movie, error) { newMovieJSON := jsonschema.Movie{ Name: movie.Name, Aliases: movie.Aliases, diff --git a/pkg/movie/import.go b/pkg/movie/import.go index 75e08b0bb..e231031e8 100644 --- a/pkg/movie/import.go +++ b/pkg/movie/import.go @@ -6,24 +6,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/utils" ) -type ImageUpdater interface { - UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error - UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error -} - -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedMovie *models.Movie) error - ImageUpdater +type ImporterReaderWriter interface { + models.MovieCreatorUpdater + FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) } type Importer struct { - ReaderWriter NameFinderCreatorUpdater - StudioWriter studio.NameFinderCreator + ReaderWriter ImporterReaderWriter + StudioWriter models.StudioFinderCreator Input jsonschema.Movie MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/movie/query.go b/pkg/movie/query.go index 3736f9437..3fac932a0 100644 --- a/pkg/movie/query.go +++ b/pkg/movie/query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) (int, error) -} - -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.MovieQueryer, id int, depth *int) (int, error) { filter := &models.MovieFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/movie/update.go b/pkg/movie/update.go deleted file mode 100644 index 4111215e2..000000000 --- a/pkg/movie/update.go +++ /dev/null @@ -1,12 +0,0 @@ -package movie - -import ( - "context" - - "github.com/stashapp/stash/pkg/models" -) - -type NameFinderCreator interface { - FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) - Create(ctx context.Context, newMovie *models.Movie) error -} diff --git a/pkg/performer/import.go b/pkg/performer/import.go index f84030a6e..1c3c075a4 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -10,19 +10,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedPerformer *models.Performer) error - UpdateImage(ctx context.Context, performerID int, image []byte) error +type ImporterReaderWriter interface { + models.PerformerCreatorUpdater + models.PerformerQueryer } type Importer struct { - ReaderWriter NameFinderCreatorUpdater - TagWriter tag.NameFinderCreator + ReaderWriter ImporterReaderWriter + TagWriter models.TagFinderCreator Input jsonschema.Performer MissingRefBehaviour models.ImportMissingRefEnum @@ -65,7 +63,7 @@ func (i *Importer) populateTags(ctx context.Context) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -100,7 +98,7 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { newTag := models.NewTag(name) diff --git a/pkg/performer/query.go b/pkg/performer/query.go index b8df03a1c..d85fa5148 100644 --- a/pkg/performer/query.go +++ b/pkg/performer/query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, galleryFilter *models.PerformerFilterType, findFilter *models.FindFilterType) (int, error) -} - -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.PerformerQueryer, id int, depth *int) (int, error) { filter := &models.PerformerFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -27,7 +19,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.PerformerQueryer, id int, depth *int) (int, error) { filter := &models.PerformerFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -39,7 +31,7 @@ func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, return r.QueryCount(ctx, filter, nil) } -func CountByAppearsWith(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByAppearsWith(ctx context.Context, r models.PerformerQueryer, id int) (int, error) { filter := &models.PerformerFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/performer/update.go b/pkg/performer/update.go deleted file mode 100644 index d846eb6ce..000000000 --- a/pkg/performer/update.go +++ /dev/null @@ -1,13 +0,0 @@ -package performer - -import ( - "context" - - "github.com/stashapp/stash/pkg/models" -) - -type NameFinderCreator interface { - FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Performer, error) - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) - Create(ctx context.Context, newPerformer *models.Performer) error -} diff --git a/pkg/scene/create.go b/pkg/scene/create.go index c2345d2ef..428c636a7 100644 --- a/pkg/scene/create.go +++ b/pkg/scene/create.go @@ -6,12 +6,11 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" ) -func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []file.ID, coverImage []byte) (*models.Scene, error) { +func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) { // title must be set if no files are provided if input.Title == "" && len(fileIDs) == 0 { return nil, errors.New("title must be set if scene has no files") diff --git a/pkg/scene/delete.go b/pkg/scene/delete.go index c7e8fdcc4..7426c390b 100644 --- a/pkg/scene/delete.go +++ b/pkg/scene/delete.go @@ -105,15 +105,6 @@ func (d *FileDeleter) MarkMarkerFiles(scene *models.Scene, seconds int) error { return d.Files(files) } -type Destroyer interface { - Destroy(ctx context.Context, id int) error -} - -type MarkerDestroyer interface { - FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) - Destroy(ctx context.Context, id int) error -} - // Destroy deletes a scene and its associated relationships from the // database. func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { @@ -190,7 +181,7 @@ func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDele // DestroyMarker deletes the scene marker from the database and returns a // function that removes the generated files, to be executed after the // transaction is successfully committed. -func DestroyMarker(ctx context.Context, scene *models.Scene, sceneMarker *models.SceneMarker, qb MarkerDestroyer, fileDeleter *FileDeleter) error { +func DestroyMarker(ctx context.Context, scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerDestroyer, fileDeleter *FileDeleter) error { if err := qb.Destroy(ctx, sceneMarker.ID); err != nil { return err } diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 5fa3b8b2d..90419e2c4 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -11,8 +11,6 @@ import ( "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/sliceutil/intslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) @@ -20,18 +18,10 @@ type CoverGetter interface { GetCover(ctx context.Context, sceneID int) ([]byte, error) } -type MarkerTagFinder interface { - tag.Finder - TagFinder - FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) -} - -type MarkerFinder interface { - FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) -} - type TagFinder interface { + models.TagGetter FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) } // ToBasicJSON converts a scene object into its JSON object equivalent. It @@ -88,7 +78,7 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) ( // GetStudioName returns the name of the provided scene's studio. It returns an // empty string if there is no studio assigned to the scene. -func GetStudioName(ctx context.Context, reader studio.Finder, scene *models.Scene) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, scene *models.Scene) (string, error) { if scene.StudioID != nil { studio, err := reader.Find(ctx, *scene.StudioID) if err != nil { @@ -126,7 +116,7 @@ func getTagNames(tags []*models.Tag) []string { } // GetDependentTagIDs returns a slice of unique tag IDs that this scene references. -func GetDependentTagIDs(ctx context.Context, tags MarkerTagFinder, markerReader MarkerFinder, scene *models.Scene) ([]int, error) { +func GetDependentTagIDs(ctx context.Context, tags TagFinder, markerReader models.SceneMarkerFinder, scene *models.Scene) ([]int, error) { var ret []int t, err := tags.FindBySceneID(ctx, scene.ID) @@ -158,13 +148,9 @@ func GetDependentTagIDs(ctx context.Context, tags MarkerTagFinder, markerReader return ret, nil } -type MovieFinder interface { - Find(ctx context.Context, id int) (*models.Movie, error) -} - // GetSceneMoviesJSON returns a slice of SceneMovie JSON representation objects // corresponding to the provided scene's scene movie relationships. -func GetSceneMoviesJSON(ctx context.Context, movieReader MovieFinder, scene *models.Scene) ([]jsonschema.SceneMovie, error) { +func GetSceneMoviesJSON(ctx context.Context, movieReader models.MovieGetter, scene *models.Scene) ([]jsonschema.SceneMovie, error) { sceneMovies := scene.Movies.List() var results []jsonschema.SceneMovie @@ -202,7 +188,7 @@ func GetDependentMovieIDs(ctx context.Context, scene *models.Scene) ([]int, erro // GetSceneMarkersJSON returns a slice of SceneMarker JSON representation // objects corresponding to the provided scene's markers. -func GetSceneMarkersJSON(ctx context.Context, markerReader MarkerFinder, tagReader MarkerTagFinder, scene *models.Scene) ([]jsonschema.SceneMarker, error) { +func GetSceneMarkersJSON(ctx context.Context, markerReader models.SceneMarkerFinder, tagReader TagFinder, scene *models.Scene) ([]jsonschema.SceneMarker, error) { sceneMarkers, err := markerReader.FindBySceneID(ctx, scene.ID) if err != nil { return nil, fmt.Errorf("error getting scene markers: %v", err) diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index 85a63aa55..19e12ecea 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -3,7 +3,6 @@ package scene import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -93,9 +92,9 @@ func createFullScene(id int) models.Scene { Rating: &rating, Organized: organized, URLs: models.NewRelatedStrings([]string{url}), - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ { - BaseFile: &file.BaseFile{ + BaseFile: &models.BaseFile{ Path: path, }, }, @@ -111,9 +110,9 @@ func createFullScene(id int) models.Scene { func createEmptyScene(id int) models.Scene { return models.Scene{ ID: id, - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ { - BaseFile: &file.BaseFile{ + BaseFile: &models.BaseFile{ Path: path, }, }, diff --git a/pkg/scene/filename_parser.go b/pkg/scene/filename_parser.go index 3dfab3538..b7c38863e 100644 --- a/pkg/scene/filename_parser.go +++ b/pkg/scene/filename_parser.go @@ -450,11 +450,11 @@ func (p *FilenameParser) initWhiteSpaceRegex() { } type FilenameParserRepository struct { - Scene Queryer + Scene models.SceneQueryer Performer PerformerNamesFinder - Studio studio.Queryer + Studio models.StudioQueryer Movie MovieNameFinder - Tag tag.Queryer + Tag models.TagQueryer } func (p *FilenameParser) Parse(ctx context.Context, repo FilenameParserRepository) ([]*models.SceneParserResult, int, error) { @@ -544,7 +544,7 @@ func (p *FilenameParser) queryPerformer(ctx context.Context, qb PerformerNamesFi return ret } -func (p *FilenameParser) queryStudio(ctx context.Context, qb studio.Queryer, studioName string) *models.Studio { +func (p *FilenameParser) queryStudio(ctx context.Context, qb models.StudioQueryer, studioName string) *models.Studio { // massage the performer name studioName = delimiterRE.ReplaceAllString(studioName, " ") @@ -587,7 +587,7 @@ func (p *FilenameParser) queryMovie(ctx context.Context, qb MovieNameFinder, mov return ret } -func (p *FilenameParser) queryTag(ctx context.Context, qb tag.Queryer, tagName string) *models.Tag { +func (p *FilenameParser) queryTag(ctx context.Context, qb models.TagQueryer, tagName string) *models.Tag { // massage the tag name tagName = delimiterRE.ReplaceAllString(tagName, " ") @@ -626,7 +626,7 @@ func (p *FilenameParser) setPerformers(ctx context.Context, qb PerformerNamesFin } } -func (p *FilenameParser) setTags(ctx context.Context, qb tag.Queryer, h sceneHolder, result *models.SceneParserResult) { +func (p *FilenameParser) setTags(ctx context.Context, qb models.TagQueryer, h sceneHolder, result *models.SceneParserResult) { // query for each performer tagsSet := make(map[int]bool) for _, tagName := range h.tags { @@ -642,7 +642,7 @@ func (p *FilenameParser) setTags(ctx context.Context, qb tag.Queryer, h sceneHol } } -func (p *FilenameParser) setStudio(ctx context.Context, qb studio.Queryer, h sceneHolder, result *models.SceneParserResult) { +func (p *FilenameParser) setStudio(ctx context.Context, qb models.StudioQueryer, h sceneHolder, result *models.SceneParserResult) { // query for each performer if h.studio != "" { studio := p.queryStudio(ctx, qb, h.studio) diff --git a/pkg/scene/hash.go b/pkg/scene/hash.go index 4b06a73ef..efa9c0fd3 100644 --- a/pkg/scene/hash.go +++ b/pkg/scene/hash.go @@ -1,18 +1,17 @@ package scene import ( - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) // GetHash returns the hash of the file, based on the hash algorithm provided. If // hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned. -func GetHash(f file.File, hashAlgorithm models.HashAlgorithm) string { +func GetHash(f models.File, hashAlgorithm models.HashAlgorithm) string { switch hashAlgorithm { case models.HashAlgorithmMd5: - return f.Base().Fingerprints.GetString(file.FingerprintTypeMD5) + return f.Base().Fingerprints.GetString(models.FingerprintTypeMD5) case models.HashAlgorithmOshash: - return f.Base().Fingerprints.GetString(file.FingerprintTypeOshash) + return f.Base().Fingerprints.GetString(models.FingerprintTypeOshash) default: panic("unknown hash algorithm") } diff --git a/pkg/scene/import.go b/pkg/scene/import.go index 2d73c0f2c..e2cfe8aba 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -5,32 +5,25 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" - "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/movie" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type FullCreatorUpdater interface { - CreatorUpdater - Update(ctx context.Context, updatedScene *models.Scene) error - Updater +type ImporterReaderWriter interface { + models.SceneCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } type Importer struct { - ReaderWriter FullCreatorUpdater - FileFinder file.Getter - StudioWriter studio.NameFinderCreator - GalleryFinder gallery.Finder - PerformerWriter performer.NameFinderCreator - MovieWriter movie.NameFinderCreator - TagWriter tag.NameFinderCreator + ReaderWriter ImporterReaderWriter + FileFinder models.FileFinder + StudioWriter models.StudioFinderCreator + GalleryFinder models.GalleryFinder + PerformerWriter models.PerformerFinderCreator + MovieWriter models.MovieFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.Scene MissingRefBehaviour models.ImportMissingRefEnum FileNamingAlgorithm models.HashAlgorithm @@ -123,7 +116,7 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene { } func (i *Importer) populateFiles(ctx context.Context) error { - files := make([]*file.VideoFile, 0) + files := make([]*models.VideoFile, 0) for _, ref := range i.Input.Files { path := ref @@ -135,7 +128,7 @@ func (i *Importer) populateFiles(ctx context.Context) error { if f == nil { return fmt.Errorf("scene file '%s' not found", path) } else { - files = append(files, f.(*file.VideoFile)) + files = append(files, f.(*models.VideoFile)) } } @@ -413,7 +406,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.scene.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } @@ -437,7 +430,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -472,7 +465,7 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { newTag := models.NewTag(name) diff --git a/pkg/scene/marker_import.go b/pkg/scene/marker_import.go index 20127cbf8..33937af7e 100644 --- a/pkg/scene/marker_import.go +++ b/pkg/scene/marker_import.go @@ -7,20 +7,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/tag" ) type MarkerCreatorUpdater interface { - Create(ctx context.Context, newSceneMarker *models.SceneMarker) error - Update(ctx context.Context, updatedSceneMarker *models.SceneMarker) error + models.SceneMarkerCreatorUpdater FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) - UpdateTags(ctx context.Context, markerID int, tagIDs []int) error } type MarkerImporter struct { SceneID int ReaderWriter MarkerCreatorUpdater - TagWriter tag.NameFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.SceneMarker MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/scene/marker_query.go b/pkg/scene/marker_query.go index e4ae5b6df..d9cd311a7 100644 --- a/pkg/scene/marker_query.go +++ b/pkg/scene/marker_query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type MarkerQueryer interface { - Query(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) ([]*models.SceneMarker, int, error) -} - -type MarkerCountQueryer interface { - QueryCount(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) (int, error) -} - -func MarkerCountByTagID(ctx context.Context, r MarkerCountQueryer, id int, depth *int) (int, error) { +func MarkerCountByTagID(ctx context.Context, r models.SceneMarkerQueryer, id int, depth *int) (int, error) { filter := &models.SceneMarkerFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/scene/merge.go b/pkg/scene/merge.go index ed660d83e..8934f5515 100644 --- a/pkg/scene/merge.go +++ b/pkg/scene/merge.go @@ -6,7 +6,6 @@ import ( "fmt" "os" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -33,7 +32,7 @@ func (s *Service) Merge(ctx context.Context, sourceIDs []int, destinationID int, return fmt.Errorf("finding source scenes: %w", err) } - var fileIDs []file.ID + var fileIDs []models.FileID for _, src := range sources { // TODO - delete generated files as needed diff --git a/pkg/scene/migrate_screenshots.go b/pkg/scene/migrate_screenshots.go index 94d73643f..59eade299 100644 --- a/pkg/scene/migrate_screenshots.go +++ b/pkg/scene/migrate_screenshots.go @@ -20,7 +20,8 @@ type MigrateSceneScreenshotsInput struct { type HashFinderCoverUpdater interface { FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) - CoverUpdater + HasCover(ctx context.Context, sceneID int) (bool, error) + UpdateCover(ctx context.Context, sceneID int, cover []byte) error } type ScreenshotMigrator struct { diff --git a/pkg/scene/query.go b/pkg/scene/query.go index 3dc7524ed..a8b1993a6 100644 --- a/pkg/scene/query.go +++ b/pkg/scene/query.go @@ -11,19 +11,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, options models.SceneQueryOptions) (*models.SceneQueryResult, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) (int, error) -} - -type IDFinder interface { - Find(ctx context.Context, id int) (*models.Scene, error) - FindMany(ctx context.Context, ids []int) ([]*models.Scene, error) -} - // QueryOptions returns a SceneQueryOptions populated with the provided filters. func QueryOptions(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, count bool) models.SceneQueryOptions { return models.SceneQueryOptions{ @@ -36,7 +23,7 @@ func QueryOptions(sceneFilter *models.SceneFilterType, findFilter *models.FindFi } // QueryWithCount queries for scenes, returning the scene objects and the total count. -func QueryWithCount(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { +func QueryWithCount(ctx context.Context, qb models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { // this was moved from the queryBuilder code // left here so that calling functions can reference this instead result, err := qb.Query(ctx, QueryOptions(sceneFilter, findFilter, true)) @@ -53,7 +40,7 @@ func QueryWithCount(ctx context.Context, qb Queryer, sceneFilter *models.SceneFi } // Query queries for scenes using the provided filters. -func Query(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, error) { +func Query(ctx context.Context, qb models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, error) { result, err := qb.Query(ctx, QueryOptions(sceneFilter, findFilter, false)) if err != nil { return nil, err @@ -67,7 +54,7 @@ func Query(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, return scenes, nil } -func BatchProcess(ctx context.Context, reader Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, fn func(scene *models.Scene) error) error { +func BatchProcess(ctx context.Context, reader models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, fn func(scene *models.Scene) error) error { const batchSize = 1000 if findFilter == nil { @@ -134,7 +121,7 @@ func FilterFromPaths(paths []string) *models.SceneFilterType { return ret } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.SceneQueryer, id int, depth *int) (int, error) { filter := &models.SceneFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -146,7 +133,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.SceneQueryer, id int, depth *int) (int, error) { filter := &models.SceneFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/scene/scan.go b/pkg/scene/scan.go index 5ccdee256..f16d0d5c6 100644 --- a/pkg/scene/scan.go +++ b/pkg/scene/scan.go @@ -6,7 +6,6 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -19,21 +18,22 @@ var ( ErrNotVideoFile = errors.New("not a video file") ) -type CreatorUpdater interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) - Creator +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) + GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) + + Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.VideoFileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } type ScanGenerator interface { - Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error + Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error } type ScanHandler struct { - CreatorUpdater CreatorUpdater + CreatorUpdater ScanCreatorUpdater ScanGenerator ScanGenerator CaptionUpdater video.CaptionUpdater @@ -63,12 +63,12 @@ func (h *ScanHandler) validate() error { return nil } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if err := h.validate(); err != nil { return err } - videoFile, ok := f.(*file.VideoFile) + videoFile, ok := f.(*models.VideoFile) if !ok { return ErrNotVideoFile } @@ -108,7 +108,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File logger.Infof("%s doesn't exist. Creating new scene...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, newScene, []file.ID{videoFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, newScene, []models.FileID{videoFile.ID}); err != nil { return fmt.Errorf("creating new scene: %w", err) } @@ -140,7 +140,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *file.VideoFile, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *models.VideoFile, updateExisting bool) error { for _, s := range existing { if err := s.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err diff --git a/pkg/scene/service.go b/pkg/scene/service.go index f7b51ce1e..05fa9f532 100644 --- a/pkg/scene/service.go +++ b/pkg/scene/service.go @@ -1,58 +1,19 @@ package scene import ( - "context" - - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/plugin" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) -} - -type FileAssigner interface { - AssignFiles(ctx context.Context, sceneID int, fileID []file.ID) error -} - -type Creator interface { - Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error -} - -type CoverUpdater interface { - HasCover(ctx context.Context, sceneID int) (bool, error) - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - type Config interface { GetVideoFileNamingAlgorithm() models.HashAlgorithm } -type Repository interface { - IDFinder - FinderByFile - Creator - PartialUpdater - Destroyer - models.VideoFileLoader - FileAssigner - CoverUpdater - models.SceneReader -} - -type MarkerRepository interface { - MarkerFinder - MarkerDestroyer - - Update(ctx context.Context, updatedObject *models.SceneMarker) error -} - type Service struct { - File file.Store - Repository Repository - MarkerRepository MarkerRepository + File models.FileReaderWriter + Repository models.SceneReaderWriter + MarkerRepository models.SceneMarkerReaderWriter PluginCache *plugin.Cache Paths *paths.Paths diff --git a/pkg/scene/update.go b/pkg/scene/update.go index e3f3e252b..f0a1a030f 100644 --- a/pkg/scene/update.go +++ b/pkg/scene/update.go @@ -6,20 +6,10 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -type Updater interface { - PartialUpdater - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error) -} - var ErrEmptyUpdater = errors.New("no fields have been set") // UpdateSet is used to update a scene and its relationships. @@ -46,7 +36,7 @@ func (u *UpdateSet) IsEmpty() bool { // Update updates a scene by updating the fields in the Partial field, then // updates non-nil relationships. Returns an error if there is no work to // be done. -func (u *UpdateSet) Update(ctx context.Context, qb Updater) (*models.Scene, error) { +func (u *UpdateSet) Update(ctx context.Context, qb models.SceneUpdater) (*models.Scene, error) { if u.IsEmpty() { return nil, ErrEmptyUpdater } @@ -83,7 +73,7 @@ func (u UpdateSet) UpdateInput() models.SceneUpdateInput { return ret } -func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Scene, performerID int) error { +func AddPerformer(ctx context.Context, qb models.SceneUpdater, o *models.Scene, performerID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ PerformerIDs: &models.UpdateIDs{ IDs: []int{performerID}, @@ -93,7 +83,7 @@ func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Scene, perfo return err } -func AddTag(ctx context.Context, qb PartialUpdater, o *models.Scene, tagID int) error { +func AddTag(ctx context.Context, qb models.SceneUpdater, o *models.Scene, tagID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ TagIDs: &models.UpdateIDs{ IDs: []int{tagID}, @@ -103,7 +93,7 @@ func AddTag(ctx context.Context, qb PartialUpdater, o *models.Scene, tagID int) return err } -func AddGallery(ctx context.Context, qb PartialUpdater, o *models.Scene, galleryID int) error { +func AddGallery(ctx context.Context, qb models.SceneUpdater, o *models.Scene, galleryID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ TagIDs: &models.UpdateIDs{ IDs: []int{galleryID}, @@ -113,7 +103,7 @@ func AddGallery(ctx context.Context, qb PartialUpdater, o *models.Scene, gallery return err } -func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) error { +func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error { // ensure file isn't a primary file and that it is a video file f, err := s.File.Find(ctx, fileID) if err != nil { @@ -121,7 +111,7 @@ func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) e } ff := f[0] - if _, ok := ff.(*file.VideoFile); !ok { + if _, ok := ff.(*models.VideoFile); !ok { return fmt.Errorf("%s is not a video file", ff.Base().Path) } @@ -134,5 +124,5 @@ func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) e return errors.New("cannot reassign primary file") } - return s.Repository.AssignFiles(ctx, sceneID, []file.ID{fileID}) + return s.Repository.AssignFiles(ctx, sceneID, []models.FileID{fileID}) } diff --git a/pkg/scraper/autotag.go b/pkg/scraper/autotag.go index 6ba8b371d..5eb3922a8 100644 --- a/pkg/scraper/autotag.go +++ b/pkg/scraper/autotag.go @@ -20,14 +20,14 @@ const ( type autotagScraper struct { // repository models.Repository txnManager txn.Manager - performerReader match.PerformerAutoTagQueryer - studioReader match.StudioAutoTagQueryer - tagReader match.TagAutoTagQueryer + performerReader models.PerformerAutoTagQueryer + studioReader models.StudioAutoTagQueryer + tagReader models.TagAutoTagQueryer globalConfig GlobalConfig } -func autotagMatchPerformers(ctx context.Context, path string, performerReader match.PerformerAutoTagQueryer, trimExt bool) ([]*models.ScrapedPerformer, error) { +func autotagMatchPerformers(ctx context.Context, path string, performerReader models.PerformerAutoTagQueryer, trimExt bool) ([]*models.ScrapedPerformer, error) { p, err := match.PathToPerformers(ctx, path, performerReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching performers: %w", err) @@ -52,7 +52,7 @@ func autotagMatchPerformers(ctx context.Context, path string, performerReader ma return ret, nil } -func autotagMatchStudio(ctx context.Context, path string, studioReader match.StudioAutoTagQueryer, trimExt bool) (*models.ScrapedStudio, error) { +func autotagMatchStudio(ctx context.Context, path string, studioReader models.StudioAutoTagQueryer, trimExt bool) (*models.ScrapedStudio, error) { studio, err := match.PathToStudio(ctx, path, studioReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching studios: %w", err) @@ -69,7 +69,7 @@ func autotagMatchStudio(ctx context.Context, path string, studioReader match.Stu return nil, nil } -func autotagMatchTags(ctx context.Context, path string, tagReader match.TagAutoTagQueryer, trimExt bool) ([]*models.ScrapedTag, error) { +func autotagMatchTags(ctx context.Context, path string, tagReader models.TagAutoTagQueryer, trimExt bool) ([]*models.ScrapedTag, error) { t, err := match.PathToTags(ctx, path, tagReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching tags: %w", err) diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index d526ecb0a..c110944f6 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -15,8 +15,6 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" ) @@ -53,27 +51,27 @@ func isCDPPathWS(c GlobalConfig) bool { } type SceneFinder interface { - scene.IDFinder + models.SceneGetter models.URLLoader } type PerformerFinder interface { - match.PerformerAutoTagQueryer + models.PerformerAutoTagQueryer match.PerformerFinder } type StudioFinder interface { - match.StudioAutoTagQueryer - match.StudioFinder + models.StudioAutoTagQueryer + FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) } type TagFinder interface { - match.TagAutoTagQueryer - tag.Queryer + models.TagGetter + models.TagAutoTagQueryer } type GalleryFinder interface { - Find(ctx context.Context, id int) (*models.Gallery, error) + models.GalleryGetter models.FileLoader } diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go index e2d404d7c..e504e4d1c 100644 --- a/pkg/scraper/postprocessing.go +++ b/pkg/scraper/postprocessing.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" ) @@ -201,7 +200,7 @@ func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery) (Scraped return g, nil } -func postProcessTags(ctx context.Context, tqb tag.Queryer, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) { +func postProcessTags(ctx context.Context, tqb models.TagQueryer, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) { var ret []*models.ScrapedTag for _, t := range scrapedTags { diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index 6a5df09e9..7abff7032 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -19,41 +19,39 @@ import ( "github.com/Yamashou/gqlgenc/graphqljson" "github.com/gofrs/uuid" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper" "github.com/stashapp/stash/pkg/scraper/stashbox/graphql" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type SceneReader interface { - Find(ctx context.Context, id int) (*models.Scene, error) + models.SceneGetter models.StashIDLoader models.VideoFileLoader } type PerformerReader interface { + models.PerformerGetter match.PerformerFinder - Find(ctx context.Context, id int) (*models.Performer, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) models.AliasLoader models.StashIDLoader + FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) GetImage(ctx context.Context, performerID int) ([]byte, error) } type StudioReader interface { + models.StudioGetter match.StudioFinder - studio.Finder models.StashIDLoader } + type TagFinder interface { - tag.Queryer + models.TagQueryer FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) } @@ -151,7 +149,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) var sceneFPs []*graphql.FingerprintQueryInput for _, f := range scene.Files.List() { - checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5) + checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5) if checksum != "" { sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ Hash: checksum, @@ -159,7 +157,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) }) } - oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash) + oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash) if oshash != "" { sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ Hash: oshash, @@ -167,7 +165,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) }) } - phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash) + phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash) if phash != 0 { phashStr := utils.PhashToString(phash) sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ @@ -279,7 +277,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin duration := f.Duration if duration != 0 { - if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" { + if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" { fingerprint := graphql.FingerprintInput{ Hash: checksum, Algorithm: graphql.FingerprintAlgorithmMd5, @@ -291,7 +289,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin }) } - if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" { + if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" { fingerprint := graphql.FingerprintInput{ Hash: oshash, Algorithm: graphql.FingerprintAlgorithmOshash, @@ -303,7 +301,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin }) } - if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 { + if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 { fingerprint := graphql.FingerprintInput{ Hash: utils.PhashToString(phash), Algorithm: graphql.FingerprintAlgorithmPhash, @@ -979,7 +977,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo duration := f.Duration if duration != 0 { - if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" { + if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" { fingerprint := graphql.FingerprintInput{ Hash: oshash, Algorithm: graphql.FingerprintAlgorithmOshash, @@ -988,7 +986,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo fingerprints = appendFingerprintUnique(fingerprints, &fingerprint) } - if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" { + if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" { fingerprint := graphql.FingerprintInput{ Hash: checksum, Algorithm: graphql.FingerprintAlgorithmMd5, @@ -997,7 +995,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo fingerprints = appendFingerprintUnique(fingerprints, &fingerprint) } - if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 { + if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 { fingerprint := graphql.FingerprintInput{ Hash: utils.PhashToString(phash), Algorithm: graphql.FingerprintAlgorithmPhash, diff --git a/pkg/sqlite/file.go b/pkg/sqlite/file.go index 760a77465..2113aad13 100644 --- a/pkg/sqlite/file.go +++ b/pkg/sqlite/file.go @@ -13,7 +13,6 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) @@ -31,17 +30,17 @@ const ( ) type basicFileRow struct { - ID file.ID `db:"id" goqu:"skipinsert"` - Basename string `db:"basename"` - ZipFileID null.Int `db:"zip_file_id"` - ParentFolderID file.FolderID `db:"parent_folder_id"` - Size int64 `db:"size"` - ModTime Timestamp `db:"mod_time"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + ID models.FileID `db:"id" goqu:"skipinsert"` + Basename string `db:"basename"` + ZipFileID null.Int `db:"zip_file_id"` + ParentFolderID models.FolderID `db:"parent_folder_id"` + Size int64 `db:"size"` + ModTime Timestamp `db:"mod_time"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` } -func (r *basicFileRow) fromBasicFile(o file.BaseFile) { +func (r *basicFileRow) fromBasicFile(o models.BaseFile) { r.ID = o.ID r.Basename = o.Basename r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID) @@ -53,20 +52,20 @@ func (r *basicFileRow) fromBasicFile(o file.BaseFile) { } type videoFileRow struct { - FileID file.ID `db:"file_id"` - Format string `db:"format"` - Width int `db:"width"` - Height int `db:"height"` - Duration float64 `db:"duration"` - VideoCodec string `db:"video_codec"` - AudioCodec string `db:"audio_codec"` - FrameRate float64 `db:"frame_rate"` - BitRate int64 `db:"bit_rate"` - Interactive bool `db:"interactive"` - InteractiveSpeed null.Int `db:"interactive_speed"` + FileID models.FileID `db:"file_id"` + Format string `db:"format"` + Width int `db:"width"` + Height int `db:"height"` + Duration float64 `db:"duration"` + VideoCodec string `db:"video_codec"` + AudioCodec string `db:"audio_codec"` + FrameRate float64 `db:"frame_rate"` + BitRate int64 `db:"bit_rate"` + Interactive bool `db:"interactive"` + InteractiveSpeed null.Int `db:"interactive_speed"` } -func (f *videoFileRow) fromVideoFile(ff file.VideoFile) { +func (f *videoFileRow) fromVideoFile(ff models.VideoFile) { f.FileID = ff.ID f.Format = ff.Format f.Width = ff.Width @@ -81,13 +80,13 @@ func (f *videoFileRow) fromVideoFile(ff file.VideoFile) { } type imageFileRow struct { - FileID file.ID `db:"file_id"` - Format string `db:"format"` - Width int `db:"width"` - Height int `db:"height"` + FileID models.FileID `db:"file_id"` + Format string `db:"format"` + Width int `db:"width"` + Height int `db:"height"` } -func (f *imageFileRow) fromImageFile(ff file.ImageFile) { +func (f *imageFileRow) fromImageFile(ff models.ImageFile) { f.FileID = ff.ID f.Format = ff.Format f.Width = ff.Width @@ -110,8 +109,8 @@ type videoFileQueryRow struct { InteractiveSpeed null.Int `db:"interactive_speed"` } -func (f *videoFileQueryRow) resolve() *file.VideoFile { - return &file.VideoFile{ +func (f *videoFileQueryRow) resolve() *models.VideoFile { + return &models.VideoFile{ Format: f.Format.String, Width: int(f.Width.Int64), Height: int(f.Height.Int64), @@ -159,8 +158,8 @@ func (imageFileQueryRow) columns(table *table) []interface{} { } } -func (f *imageFileQueryRow) resolve() *file.ImageFile { - return &file.ImageFile{ +func (f *imageFileQueryRow) resolve() *models.ImageFile { + return &models.ImageFile{ Format: f.Format.String, Width: int(f.Width.Int64), Height: int(f.Height.Int64), @@ -186,15 +185,15 @@ type fileQueryRow struct { imageFileQueryRow } -func (r *fileQueryRow) resolve() file.File { - basic := &file.BaseFile{ - ID: file.ID(r.FileID.Int64), - DirEntry: file.DirEntry{ +func (r *fileQueryRow) resolve() models.File { + basic := &models.BaseFile{ + ID: models.FileID(r.FileID.Int64), + DirEntry: models.DirEntry{ ZipFileID: nullIntFileIDPtr(r.ZipFileID), ModTime: r.ModTime.Timestamp, }, Path: filepath.Join(r.FolderPath.String, r.Basename.String), - ParentFolderID: file.FolderID(r.ParentFolderID.Int64), + ParentFolderID: models.FolderID(r.ParentFolderID.Int64), Basename: r.Basename.String, Size: r.Size.Int64, CreatedAt: r.CreatedAt.Timestamp, @@ -202,14 +201,14 @@ func (r *fileQueryRow) resolve() file.File { } if basic.ZipFileID != nil && r.ZipFolderPath.Valid && r.ZipBasename.Valid { - basic.ZipFile = &file.BaseFile{ + basic.ZipFile = &models.BaseFile{ ID: *basic.ZipFileID, Path: filepath.Join(r.ZipFolderPath.String, r.ZipBasename.String), Basename: r.ZipBasename.String, } } - var ret file.File = basic + var ret models.File = basic if r.videoFileQueryRow.Format.Valid { vf := r.videoFileQueryRow.resolve() @@ -228,7 +227,7 @@ func (r *fileQueryRow) resolve() file.File { return ret } -func appendFingerprintsUnique(vs []file.Fingerprint, v ...file.Fingerprint) []file.Fingerprint { +func appendFingerprintsUnique(vs []models.Fingerprint, v ...models.Fingerprint) []models.Fingerprint { for _, vv := range v { found := false for _, vsv := range vs { @@ -245,7 +244,7 @@ func appendFingerprintsUnique(vs []file.Fingerprint, v ...file.Fingerprint) []fi return vs } -func (r *fileQueryRow) appendRelationships(i *file.BaseFile) { +func (r *fileQueryRow) appendRelationships(i *models.BaseFile) { if r.fingerprintQueryRow.valid() { i.Fingerprints = appendFingerprintsUnique(i.Fingerprints, r.fingerprintQueryRow.resolve()) } @@ -253,16 +252,16 @@ func (r *fileQueryRow) appendRelationships(i *file.BaseFile) { type fileQueryRows []fileQueryRow -func (r fileQueryRows) resolve() []file.File { - var ret []file.File - var last file.File - var lastID file.ID +func (r fileQueryRows) resolve() []models.File { + var ret []models.File + var last models.File + var lastID models.FileID for _, row := range r { - if last == nil || lastID != file.ID(row.FileID.Int64) { + if last == nil || lastID != models.FileID(row.FileID.Int64) { f := row.resolve() last = f - lastID = file.ID(row.FileID.Int64) + lastID = models.FileID(row.FileID.Int64) ret = append(ret, last) continue } @@ -295,7 +294,7 @@ func (qb *FileStore) table() exp.IdentifierExpression { return qb.tableMgr.table } -func (qb *FileStore) Create(ctx context.Context, f file.File) error { +func (qb *FileStore) Create(ctx context.Context, f models.File) error { var r basicFileRow r.fromBasicFile(*f.Base()) @@ -304,15 +303,15 @@ func (qb *FileStore) Create(ctx context.Context, f file.File) error { return err } - fileID := file.ID(id) + fileID := models.FileID(id) // create extended stuff here switch ef := f.(type) { - case *file.VideoFile: + case *models.VideoFile: if err := qb.createVideoFile(ctx, fileID, *ef); err != nil { return err } - case *file.ImageFile: + case *models.ImageFile: if err := qb.createImageFile(ctx, fileID, *ef); err != nil { return err } @@ -333,7 +332,7 @@ func (qb *FileStore) Create(ctx context.Context, f file.File) error { return nil } -func (qb *FileStore) Update(ctx context.Context, f file.File) error { +func (qb *FileStore) Update(ctx context.Context, f models.File) error { var r basicFileRow r.fromBasicFile(*f.Base()) @@ -345,11 +344,11 @@ func (qb *FileStore) Update(ctx context.Context, f file.File) error { // create extended stuff here switch ef := f.(type) { - case *file.VideoFile: + case *models.VideoFile: if err := qb.updateOrCreateVideoFile(ctx, id, *ef); err != nil { return err } - case *file.ImageFile: + case *models.ImageFile: if err := qb.updateOrCreateImageFile(ctx, id, *ef); err != nil { return err } @@ -362,11 +361,11 @@ func (qb *FileStore) Update(ctx context.Context, f file.File) error { return nil } -func (qb *FileStore) Destroy(ctx context.Context, id file.ID) error { +func (qb *FileStore) Destroy(ctx context.Context, id models.FileID) error { return qb.tableMgr.destroyExisting(ctx, []int{int(id)}) } -func (qb *FileStore) createVideoFile(ctx context.Context, id file.ID, f file.VideoFile) error { +func (qb *FileStore) createVideoFile(ctx context.Context, id models.FileID, f models.VideoFile) error { var r videoFileRow r.fromVideoFile(f) r.FileID = id @@ -377,7 +376,7 @@ func (qb *FileStore) createVideoFile(ctx context.Context, id file.ID, f file.Vid return nil } -func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id file.ID, f file.VideoFile) error { +func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id models.FileID, f models.VideoFile) error { exists, err := videoFileTableMgr.idExists(ctx, id) if err != nil { return err @@ -397,7 +396,7 @@ func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id file.ID, f return nil } -func (qb *FileStore) createImageFile(ctx context.Context, id file.ID, f file.ImageFile) error { +func (qb *FileStore) createImageFile(ctx context.Context, id models.FileID, f models.ImageFile) error { var r imageFileRow r.fromImageFile(f) r.FileID = id @@ -408,7 +407,7 @@ func (qb *FileStore) createImageFile(ctx context.Context, id file.ID, f file.Ima return nil } -func (qb *FileStore) updateOrCreateImageFile(ctx context.Context, id file.ID, f file.ImageFile) error { +func (qb *FileStore) updateOrCreateImageFile(ctx context.Context, id models.FileID, f models.ImageFile) error { exists, err := imageFileTableMgr.idExists(ctx, id) if err != nil { return err @@ -515,7 +514,7 @@ func (qb *FileStore) countDataset() *goqu.SelectDataset { ) } -func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (file.File, error) { +func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (models.File, error) { ret, err := qb.getMany(ctx, q) if err != nil { return nil, err @@ -528,7 +527,7 @@ func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (file.File, return ret[0], nil } -func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]file.File, error) { +func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]models.File, error) { const single = false var rows fileQueryRows if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { @@ -546,8 +545,8 @@ func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]file return rows.resolve(), nil } -func (qb *FileStore) Find(ctx context.Context, ids ...file.ID) ([]file.File, error) { - var files []file.File +func (qb *FileStore) Find(ctx context.Context, ids ...models.FileID) ([]models.File, error) { + var files []models.File for _, id := range ids { file, err := qb.find(ctx, id) if err != nil { @@ -564,7 +563,7 @@ func (qb *FileStore) Find(ctx context.Context, ids ...file.ID) ([]file.File, err return files, nil } -func (qb *FileStore) find(ctx context.Context, id file.ID) (file.File, error) { +func (qb *FileStore) find(ctx context.Context, id models.FileID) (models.File, error) { q := qb.selectDataset().Where(qb.tableMgr.byID(id)) ret, err := qb.get(ctx, q) @@ -576,7 +575,7 @@ func (qb *FileStore) find(ctx context.Context, id file.ID) (file.File, error) { } // FindByPath returns the first file that matches the given path. Wildcard characters are supported. -func (qb *FileStore) FindByPath(ctx context.Context, p string) (file.File, error) { +func (qb *FileStore) FindByPath(ctx context.Context, p string) (models.File, error) { ret, err := qb.FindAllByPath(ctx, p) @@ -593,7 +592,7 @@ func (qb *FileStore) FindByPath(ctx context.Context, p string) (file.File, error // FindAllByPath returns all the files that match the given path. // Wildcard characters are supported. -func (qb *FileStore) FindAllByPath(ctx context.Context, p string) ([]file.File, error) { +func (qb *FileStore) FindAllByPath(ctx context.Context, p string) ([]models.File, error) { // separate basename from path basename := filepath.Base(p) dirName := filepath.Dir(p) @@ -646,7 +645,7 @@ func (qb *FileStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectD // FindAllByPaths returns the all files that are within any of the given paths. // Returns all if limit is < 0. // Returns all files if p is empty. -func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]file.File, error) { +func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]models.File, error) { table := qb.table() folderTable := folderTableMgr.table @@ -680,7 +679,7 @@ func (qb *FileStore) CountAllInPaths(ctx context.Context, p []string) (int, erro return count(ctx, q) } -func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) ([]file.File, error) { +func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) ([]models.File, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( @@ -692,7 +691,7 @@ func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) return qb.getMany(ctx, q) } -func (qb *FileStore) FindByFingerprint(ctx context.Context, fp file.Fingerprint) ([]file.File, error) { +func (qb *FileStore) FindByFingerprint(ctx context.Context, fp models.Fingerprint) ([]models.File, error) { fingerprintTable := fingerprintTableMgr.table fingerprints := fingerprintTable.As("fp") @@ -705,7 +704,7 @@ func (qb *FileStore) FindByFingerprint(ctx context.Context, fp file.Fingerprint) return qb.findBySubquery(ctx, sq) } -func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]file.File, error) { +func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]models.File, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( @@ -716,7 +715,7 @@ func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([] } // FindByFileInfo finds files that match the base name, size, and mod time of the given file. -func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]file.File, error) { +func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]models.File, error) { table := qb.table() modTime := info.ModTime().Format(time.RFC3339) @@ -730,7 +729,7 @@ func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size return qb.getMany(ctx, q) } -func (qb *FileStore) CountByFolderID(ctx context.Context, folderID file.FolderID) (int, error) { +func (qb *FileStore) CountByFolderID(ctx context.Context, folderID models.FolderID) (int, error) { table := qb.table() q := qb.countDataset().Prepared(true).Where( @@ -740,7 +739,7 @@ func (qb *FileStore) CountByFolderID(ctx context.Context, folderID file.FolderID return count(ctx, q) } -func (qb *FileStore) IsPrimary(ctx context.Context, fileID file.ID) (bool, error) { +func (qb *FileStore) IsPrimary(ctx context.Context, fileID models.FileID) (bool, error) { joinTables := []exp.IdentifierExpression{ scenesFilesJoinTable, galleriesFilesJoinTable, @@ -867,9 +866,9 @@ func (qb *FileStore) Query(ctx context.Context, options models.FileQueryOptions) return nil, fmt.Errorf("error finding IDs: %w", err) } - result.IDs = make([]file.ID, len(idsResult)) + result.IDs = make([]models.FileID, len(idsResult)) for i, id := range idsResult { - result.IDs[i] = file.ID(id) + result.IDs[i] = models.FileID(id) } return result, nil @@ -929,10 +928,10 @@ func (qb *FileStore) captionRepository() *captionRepository { } } -func (qb *FileStore) GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) { +func (qb *FileStore) GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) { return qb.captionRepository().get(ctx, fileID) } -func (qb *FileStore) UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error { +func (qb *FileStore) UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error { return qb.captionRepository().replace(ctx, fileID, captions) } diff --git a/pkg/sqlite/file_test.go b/pkg/sqlite/file_test.go index 2bcbe42e9..766ffcc70 100644 --- a/pkg/sqlite/file_test.go +++ b/pkg/sqlite/file_test.go @@ -9,7 +9,7 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -17,10 +17,10 @@ func getFilePath(folderIdx int, basename string) string { return filepath.Join(folderPaths[folderIdx], basename) } -func makeZipFileWithID(index int) file.File { +func makeZipFileWithID(index int) models.File { f := makeFile(index) - return &file.BaseFile{ + return &models.BaseFile{ ID: fileIDs[index], Basename: f.Base().Basename, Path: getFilePath(fileFolders[index], getFileBaseName(index)), @@ -49,13 +49,13 @@ func Test_fileFileStore_Create(t *testing.T) { tests := []struct { name string - newObject file.File + newObject models.File wantErr bool }{ { "full", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -64,7 +64,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -77,9 +77,9 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "video file", - &file.VideoFile{ - BaseFile: &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.VideoFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -88,7 +88,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -110,9 +110,9 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "image file", - &file.ImageFile{ - BaseFile: &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.ImageFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -121,7 +121,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -138,15 +138,15 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "duplicate path", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: fileModTime, }, Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)), ParentFolderID: folderIDs[folderIdxWithFiles], Basename: getFileBaseName(fileIdxZip), Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -159,22 +159,22 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "empty basename", - &file.BaseFile{ + &models.BaseFile{ ParentFolderID: folderIDs[folderIdxWithFiles], }, true, }, { "missing folder id", - &file.BaseFile{ + &models.BaseFile{ Basename: basename, }, true, }, { "invalid folder id", - &file.BaseFile{ - DirEntry: file.DirEntry{}, + &models.BaseFile{ + DirEntry: models.DirEntry{}, ParentFolderID: invalidFolderID, Basename: basename, }, @@ -182,8 +182,8 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "invalid zip file id", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Basename: basename, @@ -210,15 +210,15 @@ func Test_fileFileStore_Create(t *testing.T) { assert.NotZero(s.Base().ID) - var copy file.File + var copy models.File switch t := s.(type) { - case *file.BaseFile: + case *models.BaseFile: v := *t copy = &v - case *file.VideoFile: + case *models.VideoFile: v := *t copy = &v - case *file.ImageFile: + case *models.ImageFile: v := *t copy = &v } @@ -266,14 +266,14 @@ func Test_fileStore_Update(t *testing.T) { tests := []struct { name string - updatedObject file.File + updatedObject models.File wantErr bool }{ { "full", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -282,7 +282,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -295,10 +295,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "video file", - &file.VideoFile{ - BaseFile: &file.BaseFile{ + &models.VideoFile{ + BaseFile: &models.BaseFile{ ID: fileIDs[fileIdxStartVideoFiles], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -307,7 +307,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -329,10 +329,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "image file", - &file.ImageFile{ - BaseFile: &file.BaseFile{ + &models.ImageFile{ + BaseFile: &models.BaseFile{ ID: fileIDs[fileIdxStartImageFiles], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -341,7 +341,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -358,16 +358,16 @@ func Test_fileStore_Update(t *testing.T) { }, { "duplicate path", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ModTime: fileModTime, }, Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)), ParentFolderID: folderIDs[folderIdxWithFiles], Basename: getFileBaseName(fileIdxZip), Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -380,7 +380,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "clear zip", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)+".renamed"), Basename: getFileBaseName(fileIdxZip) + ".renamed", @@ -390,7 +390,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "clear folder", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, }, @@ -398,7 +398,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "invalid parent folder id", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, ParentFolderID: invalidFolderID, @@ -407,10 +407,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "invalid zip file id", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, ParentFolderID: folderIDs[folderIdxWithFiles], @@ -450,7 +450,7 @@ func Test_fileStore_Update(t *testing.T) { } } -func makeFileWithID(index int) file.File { +func makeFileWithID(index int) models.File { ret := makeFile(index) ret.Base().Path = getFilePath(fileFolders[index], getFileBaseName(index)) ret.Base().ID = fileIDs[index] @@ -461,8 +461,8 @@ func makeFileWithID(index int) file.File { func Test_fileStore_Find(t *testing.T) { tests := []struct { name string - id file.ID - want file.File + id models.FileID + want models.File wantErr bool }{ { @@ -473,7 +473,7 @@ func Test_fileStore_Find(t *testing.T) { }, { "invalid", - file.ID(invalidID), + models.FileID(invalidID), nil, true, }, @@ -529,7 +529,7 @@ func Test_FileStore_FindByPath(t *testing.T) { tests := []struct { name string path string - want file.File + want models.File wantErr bool }{ { @@ -565,31 +565,31 @@ func Test_FileStore_FindByPath(t *testing.T) { func TestFileStore_FindByFingerprint(t *testing.T) { tests := []struct { name string - fp file.Fingerprint - want []file.File + fp models.Fingerprint + want []models.File wantErr bool }{ { "by MD5", - file.Fingerprint{ + models.Fingerprint{ Type: "MD5", Fingerprint: getPrefixedStringValue("file", fileIdxZip, "md5"), }, - []file.File{makeFileWithID(fileIdxZip)}, + []models.File{makeFileWithID(fileIdxZip)}, false, }, { "by OSHASH", - file.Fingerprint{ + models.Fingerprint{ Type: "OSHASH", Fingerprint: getPrefixedStringValue("file", fileIdxZip, "oshash"), }, - []file.File{makeFileWithID(fileIdxZip)}, + []models.File{makeFileWithID(fileIdxZip)}, false, }, { "non-existing", - file.Fingerprint{ + models.Fingerprint{ Type: "OSHASH", Fingerprint: "foo", }, @@ -617,7 +617,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) { func TestFileStore_IsPrimary(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID want bool }{ { diff --git a/pkg/sqlite/fingerprint.go b/pkg/sqlite/fingerprint.go index 0f7c36d12..49bae54ca 100644 --- a/pkg/sqlite/fingerprint.go +++ b/pkg/sqlite/fingerprint.go @@ -6,7 +6,7 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) @@ -23,8 +23,8 @@ func (r fingerprintQueryRow) valid() bool { return r.Type.Valid } -func (r *fingerprintQueryRow) resolve() file.Fingerprint { - return file.Fingerprint{ +func (r *fingerprintQueryRow) resolve() models.Fingerprint { + return models.Fingerprint{ Type: r.Type.String, Fingerprint: r.Fingerprint, } @@ -45,7 +45,7 @@ var FingerprintReaderWriter = &fingerprintQueryBuilder{ tableMgr: fingerprintTableMgr, } -func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID file.ID, f file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID models.FileID, f models.Fingerprint) error { table := qb.table() q := dialect.Insert(table).Cols(fileIDColumn, "type", "fingerprint").Vals( goqu.Vals{fileID, f.Type, f.Fingerprint}, @@ -58,7 +58,7 @@ func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID file.ID, f return nil } -func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID file.ID, f []file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID models.FileID, f []models.Fingerprint) error { for _, ff := range f { if err := qb.insert(ctx, fileID, ff); err != nil { return err @@ -68,7 +68,7 @@ func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID file. return nil } -func (qb *fingerprintQueryBuilder) replaceJoins(ctx context.Context, fileID file.ID, f []file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) replaceJoins(ctx context.Context, fileID models.FileID, f []models.Fingerprint) error { if err := qb.destroy(ctx, []int{int(fileID)}); err != nil { return err } diff --git a/pkg/sqlite/folder.go b/pkg/sqlite/folder.go index ff1e8a2c5..26cbf8962 100644 --- a/pkg/sqlite/folder.go +++ b/pkg/sqlite/folder.go @@ -10,23 +10,23 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) const folderTable = "folders" type folderRow struct { - ID file.FolderID `db:"id" goqu:"skipinsert"` - Path string `db:"path"` - ZipFileID null.Int `db:"zip_file_id"` - ParentFolderID null.Int `db:"parent_folder_id"` - ModTime Timestamp `db:"mod_time"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + ID models.FolderID `db:"id" goqu:"skipinsert"` + Path string `db:"path"` + ZipFileID null.Int `db:"zip_file_id"` + ParentFolderID null.Int `db:"parent_folder_id"` + ModTime Timestamp `db:"mod_time"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` } -func (r *folderRow) fromFolder(o file.Folder) { +func (r *folderRow) fromFolder(o models.Folder) { r.ID = o.ID r.Path = o.Path r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID) @@ -43,10 +43,10 @@ type folderQueryRow struct { ZipFolderPath null.String `db:"zip_folder_path"` } -func (r *folderQueryRow) resolve() *file.Folder { - ret := &file.Folder{ +func (r *folderQueryRow) resolve() *models.Folder { + ret := &models.Folder{ ID: r.ID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: nullIntFileIDPtr(r.ZipFileID), ModTime: r.ModTime.Timestamp, }, @@ -57,7 +57,7 @@ func (r *folderQueryRow) resolve() *file.Folder { } if ret.ZipFileID != nil && r.ZipFolderPath.Valid && r.ZipBasename.Valid { - ret.ZipFile = &file.BaseFile{ + ret.ZipFile = &models.BaseFile{ ID: *ret.ZipFileID, Path: filepath.Join(r.ZipFolderPath.String, r.ZipBasename.String), Basename: r.ZipBasename.String, @@ -69,8 +69,8 @@ func (r *folderQueryRow) resolve() *file.Folder { type folderQueryRows []folderQueryRow -func (r folderQueryRows) resolve() []*file.Folder { - var ret []*file.Folder +func (r folderQueryRows) resolve() []*models.Folder { + var ret []*models.Folder for _, row := range r { f := row.resolve() @@ -97,7 +97,7 @@ func NewFolderStore() *FolderStore { } } -func (qb *FolderStore) Create(ctx context.Context, f *file.Folder) error { +func (qb *FolderStore) Create(ctx context.Context, f *models.Folder) error { var r folderRow r.fromFolder(*f) @@ -107,12 +107,12 @@ func (qb *FolderStore) Create(ctx context.Context, f *file.Folder) error { } // only assign id once we are successful - f.ID = file.FolderID(id) + f.ID = models.FolderID(id) return nil } -func (qb *FolderStore) Update(ctx context.Context, updatedObject *file.Folder) error { +func (qb *FolderStore) Update(ctx context.Context, updatedObject *models.Folder) error { var r folderRow r.fromFolder(*updatedObject) @@ -123,7 +123,7 @@ func (qb *FolderStore) Update(ctx context.Context, updatedObject *file.Folder) e return nil } -func (qb *FolderStore) Destroy(ctx context.Context, id file.FolderID) error { +func (qb *FolderStore) Destroy(ctx context.Context, id models.FolderID) error { return qb.tableMgr.destroyExisting(ctx, []int{int(id)}) } @@ -179,7 +179,7 @@ func (qb *FolderStore) countDataset() *goqu.SelectDataset { ) } -func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*file.Folder, error) { +func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Folder, error) { ret, err := qb.getMany(ctx, q) if err != nil { return nil, err @@ -192,7 +192,7 @@ func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*file.Fo return ret[0], nil } -func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*file.Folder, error) { +func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Folder, error) { const single = false var rows folderQueryRows if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { @@ -210,7 +210,7 @@ func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*f return rows.resolve(), nil } -func (qb *FolderStore) Find(ctx context.Context, id file.FolderID) (*file.Folder, error) { +func (qb *FolderStore) Find(ctx context.Context, id models.FolderID) (*models.Folder, error) { q := qb.selectDataset().Where(qb.tableMgr.byID(id)) ret, err := qb.get(ctx, q) @@ -221,7 +221,7 @@ func (qb *FolderStore) Find(ctx context.Context, id file.FolderID) (*file.Folder return ret, nil } -func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*file.Folder, error) { +func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*models.Folder, error) { q := qb.selectDataset().Prepared(true).Where(qb.table().Col("path").Eq(p)) ret, err := qb.get(ctx, q) @@ -232,7 +232,7 @@ func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*file.Folder, return ret, nil } -func (qb *FolderStore) FindByParentFolderID(ctx context.Context, parentFolderID file.FolderID) ([]*file.Folder, error) { +func (qb *FolderStore) FindByParentFolderID(ctx context.Context, parentFolderID models.FolderID) ([]*models.Folder, error) { q := qb.selectDataset().Where(qb.table().Col("parent_folder_id").Eq(int(parentFolderID))) ret, err := qb.getMany(ctx, q) @@ -261,7 +261,7 @@ func (qb *FolderStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.Selec // FindAllInPaths returns the all folders that are or are within any of the given paths. // Returns all if limit is < 0. // Returns all folders if p is empty. -func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*file.Folder, error) { +func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*models.Folder, error) { q := qb.selectDataset().Prepared(true) q = qb.allInPaths(q, p) @@ -300,7 +300,7 @@ func (qb *FolderStore) CountAllInPaths(ctx context.Context, p []string) (int, er // return qb.getMany(ctx, q) // } -func (qb *FolderStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*file.Folder, error) { +func (qb *FolderStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Folder, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( diff --git a/pkg/sqlite/folder_test.go b/pkg/sqlite/folder_test.go index 71e45305a..1d948d063 100644 --- a/pkg/sqlite/folder_test.go +++ b/pkg/sqlite/folder_test.go @@ -9,13 +9,13 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) var ( - invalidFolderID = file.FolderID(invalidID) - invalidFileID = file.ID(invalidID) + invalidFolderID = models.FolderID(invalidID) + invalidFileID = models.FileID(invalidID) ) func Test_FolderStore_Create(t *testing.T) { @@ -28,13 +28,13 @@ func Test_FolderStore_Create(t *testing.T) { tests := []struct { name string - newObject file.Folder + newObject models.Folder wantErr bool }{ { "full", - file.Folder{ - DirEntry: file.DirEntry{ + models.Folder{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -47,7 +47,7 @@ func Test_FolderStore_Create(t *testing.T) { }, { "invalid parent folder id", - file.Folder{ + models.Folder{ Path: path, ParentFolderID: &invalidFolderID, }, @@ -55,8 +55,8 @@ func Test_FolderStore_Create(t *testing.T) { }, { "invalid zip file id", - file.Folder{ - DirEntry: file.DirEntry{ + models.Folder{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Path: path, @@ -109,14 +109,14 @@ func Test_FolderStore_Update(t *testing.T) { tests := []struct { name string - updatedObject *file.Folder + updatedObject *models.Folder wantErr bool }{ { "full", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -129,7 +129,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "clear zip", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxInZip], Path: path, }, @@ -137,7 +137,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "clear folder", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], Path: path, }, @@ -145,7 +145,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "invalid parent folder id", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], Path: path, ParentFolderID: &invalidFolderID, @@ -154,9 +154,9 @@ func Test_FolderStore_Update(t *testing.T) { }, { "invalid zip file id", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Path: path, @@ -192,7 +192,7 @@ func Test_FolderStore_Update(t *testing.T) { } } -func makeFolderWithID(index int) *file.Folder { +func makeFolderWithID(index int) *models.Folder { ret := makeFolder(index) ret.ID = folderIDs[index] @@ -207,7 +207,7 @@ func Test_FolderStore_FindByPath(t *testing.T) { tests := []struct { name string path string - want *file.Folder + want *models.Folder wantErr bool }{ { diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index b7ece948d..7bdf98bd3 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -11,7 +11,6 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "gopkg.in/guregu/null.v4" @@ -163,7 +162,7 @@ func (qb *GalleryStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, fileIDs []file.ID) error { +func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error { var r galleryRow r.fromGallery(*newObject) @@ -230,7 +229,7 @@ func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Galler } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.Base().ID } @@ -287,7 +286,7 @@ func (qb *GalleryStore) Destroy(ctx context.Context, id int) error { return qb.tableMgr.destroyExisting(ctx, []int{id}) } -func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, error) { +func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -299,13 +298,13 @@ func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, erro return nil, err } - ret := make([]file.File, len(files)) + ret := make([]models.File, len(files)) copy(ret, files) return ret, nil } -func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } @@ -412,7 +411,7 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]* return ret, nil } -func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) { sq := dialect.From(galleriesFilesJoinTable).Select(galleriesFilesJoinTable.Col(galleryIDColumn)).Where( galleriesFilesJoinTable.Col(fileIDColumn).Eq(fileID), ) @@ -425,14 +424,14 @@ func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mo return ret, nil } -func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := galleriesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) { fingerprintTable := fingerprintTableMgr.table var ex []exp.Expression @@ -460,20 +459,20 @@ func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Finger } func (qb *GalleryStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Gallery, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) } func (qb *GalleryStore) FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error) { - fingerprints := make([]file.Fingerprint, len(checksums)) + fingerprints := make([]models.Fingerprint, len(checksums)) for i, c := range checksums { - fingerprints[i] = file.Fingerprint{ - Type: file.FingerprintTypeMD5, + fingerprints[i] = models.Fingerprint{ + Type: models.FingerprintTypeMD5, Fingerprint: c, } } @@ -519,7 +518,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal return ret, nil } -func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) { table := qb.table() sq := dialect.From(table).Select(table.Col(idColumn)).Where( @@ -1118,9 +1117,9 @@ func (qb *GalleryStore) filesRepository() *filesRepository { } } -func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *GalleryStore) performersRepository() *joinRepository { diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index d33d5ba2a..c8dbe0276 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -10,7 +10,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -97,7 +96,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ galleryFile, }), CreatedAt: createdAt, @@ -145,9 +144,9 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { assert := assert.New(t) s := tt.newObject - var fileIDs []file.ID + var fileIDs []models.FileID if s.Files.Loaded() { - fileIDs = []file.ID{s.Files.List()[0].Base().ID} + fileIDs = []models.FileID{s.Files.List()[0].Base().ID} } if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr { @@ -195,7 +194,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { } } -func makeGalleryFileWithID(i int) *file.BaseFile { +func makeGalleryFileWithID(i int) *models.BaseFile { ret := makeGalleryFile(i) ret.ID = galleryFileIDs[i] return ret @@ -229,7 +228,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFileWithID(galleryIdxWithScene), }), CreatedAt: createdAt, @@ -449,7 +448,7 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithGallery], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFile(galleryIdxWithImage), }), CreatedAt: createdAt, @@ -466,7 +465,7 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) { clearGalleryPartial(), models.Gallery{ ID: galleryIDs[galleryIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFile(galleryIdxWithImage), }), SceneIDs: models.NewRelatedIDs([]int{}), @@ -844,7 +843,7 @@ func makeGalleryWithID(index int) *models.Gallery { ret := makeGallery(index, includeScenes) ret.ID = galleryIDs[index] - ret.Files = models.NewRelatedFiles([]file.File{makeGalleryFile(index)}) + ret.Files = models.NewRelatedFiles([]models.File{makeGalleryFile(index)}) return ret } @@ -1281,7 +1280,7 @@ func galleriesToIDs(i []*models.Gallery) []int { func Test_galleryStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1330,7 +1329,7 @@ func Test_galleryStore_FindByFileID(t *testing.T) { func Test_galleryStore_FindByFolderID(t *testing.T) { tests := []struct { name string - folderID file.FolderID + folderID models.FolderID include []int exclude []int }{ diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index 20e7801d8..359826426 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -8,7 +8,6 @@ import ( "path/filepath" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "gopkg.in/guregu/null.v4" @@ -150,7 +149,7 @@ func (qb *ImageStore) selectDataset() *goqu.SelectDataset { checksum, goqu.On( checksum.Col(fileIDColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn)), - checksum.Col("type").Eq(file.FingerprintTypeMD5), + checksum.Col("type").Eq(models.FingerprintTypeMD5), ), ).Select( qb.table().All(), @@ -271,7 +270,7 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.Base().ID } @@ -389,7 +388,7 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } -func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]file.File, error) { +func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -404,12 +403,12 @@ func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]file.File, error) return files, nil } -func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } -func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) { +func (qb *ImageStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) { table := qb.table() sq := dialect.From(table). @@ -427,14 +426,14 @@ func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode return ret, nil } -func (qb *ImageStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *ImageStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := imagesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) { +func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) { table := qb.table() fingerprintTable := fingerprintTableMgr.table @@ -467,9 +466,9 @@ func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr } func (qb *ImageStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) @@ -523,7 +522,7 @@ func (qb *ImageStore) OCountByPerformerID(ctx context.Context, performerID int) return ret, nil } -func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) { +func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Image, error) { table := qb.table() fileTable := goqu.T(fileTable) @@ -548,7 +547,7 @@ func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID return ret, nil } -func (qb *ImageStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) { +func (qb *ImageStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) { table := qb.table() fileTable := goqu.T(fileTable) @@ -1043,9 +1042,9 @@ func (qb *ImageStore) filesRepository() *filesRepository { } } -func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *ImageStore) GetGalleryIDs(ctx context.Context, imageID int) ([]int, error) { diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index 4f3ebcc22..900c0b794 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -10,7 +10,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -97,8 +96,8 @@ func Test_imageQueryBuilder_Create(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ - imageFile.(*file.ImageFile), + Files: models.NewRelatedFiles([]models.File{ + imageFile.(*models.ImageFile), }), PrimaryFileID: &imageFile.Base().ID, Path: imageFile.Base().Path, @@ -146,7 +145,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) - var fileIDs []file.ID + var fileIDs []models.FileID if tt.newObject.Files.Loaded() { for _, f := range tt.newObject.Files.List() { fileIDs = append(fileIDs, f.Base().ID) @@ -205,7 +204,7 @@ func clearImageFileIDs(image *models.Image) { } } -func makeImageFileWithID(i int) *file.ImageFile { +func makeImageFileWithID(i int) *models.ImageFile { ret := makeImageFile(i) ret.ID = imageFileIDs[i] return ret @@ -444,7 +443,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeImageFile(imageIdx1WithGallery), }), CreatedAt: createdAt, @@ -462,7 +461,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { models.Image{ ID: imageIDs[imageIdx1WithGallery], OCounter: getOCounter(imageIdx1WithGallery), - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeImageFile(imageIdx1WithGallery), }), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -965,7 +964,7 @@ func makeImageWithID(index int) *models.Image { ret := makeImage(index) ret.ID = imageIDs[index] - ret.Files = models.NewRelatedFiles([]file.File{makeImageFile(index)}) + ret.Files = models.NewRelatedFiles([]models.File{makeImageFile(index)}) return ret } @@ -1153,15 +1152,15 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { tests := []struct { name string - fingerprints []file.Fingerprint + fingerprints []models.Fingerprint want []*models.Image wantErr bool }{ { "valid", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithGallery), }, }, @@ -1170,9 +1169,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "invalid", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: "invalid checksum", }, }, @@ -1181,9 +1180,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "with performers", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithTwoPerformers), }, }, @@ -1192,9 +1191,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "with tags", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithTwoTags), }, }, @@ -1316,7 +1315,7 @@ func imagesToIDs(i []*models.Image) []int { func Test_imageStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1365,7 +1364,7 @@ func Test_imageStore_FindByFileID(t *testing.T) { func Test_imageStore_FindByFolderID(t *testing.T) { tests := []struct { name string - folderID file.FolderID + folderID models.FolderID include []int exclude []int }{ @@ -1420,7 +1419,7 @@ func Test_imageStore_FindByFolderID(t *testing.T) { func Test_imageStore_FindByZipFileID(t *testing.T) { tests := []struct { name string - zipFileID file.ID + zipFileID models.FileID include []int exclude []int }{ @@ -1868,11 +1867,12 @@ func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) { t.Errorf("Error loading primary file: %s", err.Error()) return nil } - asFrame, ok := image.Files.Primary().(file.VisualFile) + f := image.Files.Primary() + vf, ok := f.(models.VisualFile) if !ok { - t.Errorf("Error: Associated primary file of image is not of type VisualFile") + t.Errorf("Error: image primary file is not a visual file (is type %T)", f) } - verifyImageResolution(t, asFrame.GetHeight(), resolution) + verifyImageResolution(t, vf.GetHeight(), resolution) } return nil diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index 2292e868a..c65965fe7 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -9,7 +9,6 @@ import ( "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) @@ -336,7 +335,7 @@ type captionRepository struct { repository } -func (r *captionRepository) get(ctx context.Context, id file.ID) ([]*models.VideoCaption, error) { +func (r *captionRepository) get(ctx context.Context, id models.FileID) ([]*models.VideoCaption, error) { query := fmt.Sprintf("SELECT %s, %s, %s from %s WHERE %s = ?", captionCodeColumn, captionFilenameColumn, captionTypeColumn, r.tableName, r.idColumn) var ret []*models.VideoCaption err := r.queryFunc(ctx, query, []interface{}{id}, false, func(rows *sqlx.Rows) error { @@ -359,12 +358,12 @@ func (r *captionRepository) get(ctx context.Context, id file.ID) ([]*models.Vide return ret, err } -func (r *captionRepository) insert(ctx context.Context, id file.ID, caption *models.VideoCaption) (sql.Result, error) { +func (r *captionRepository) insert(ctx context.Context, id models.FileID, caption *models.VideoCaption) (sql.Result, error) { stmt := fmt.Sprintf("INSERT INTO %s (%s, %s, %s, %s) VALUES (?, ?, ?, ?)", r.tableName, r.idColumn, captionCodeColumn, captionFilenameColumn, captionTypeColumn) return r.tx.Exec(ctx, stmt, id, caption.LanguageCode, caption.Filename, caption.CaptionType) } -func (r *captionRepository) replace(ctx context.Context, id file.ID, captions []*models.VideoCaption) error { +func (r *captionRepository) replace(ctx context.Context, id models.FileID, captions []*models.VideoCaption) error { if err := r.destroy(ctx, []int{int(id)}); err != nil { return err } @@ -443,12 +442,12 @@ type filesRepository struct { } type relatedFileRow struct { - ID int `db:"id"` - FileID file.ID `db:"file_id"` - Primary bool `db:"primary"` + ID int `db:"id"` + FileID models.FileID `db:"file_id"` + Primary bool `db:"primary"` } -func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]file.ID, error) { +func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]models.FileID, error) { var primaryClause string if primaryOnly { primaryClause = " AND `primary` = 1" @@ -476,7 +475,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo return nil, err } - ret := make([][]file.ID, len(ids)) + ret := make([][]models.FileID, len(ids)) idToIndex := make(map[int]int) for i, id := range ids { idToIndex[id] = i @@ -488,7 +487,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo if row.Primary { // prepend to list - ret[idToIndex[id]] = append([]file.ID{fileID}, ret[idToIndex[id]]...) + ret[idToIndex[id]] = append([]models.FileID{fileID}, ret[idToIndex[id]]...) } else { ret[idToIndex[id]] = append(ret[idToIndex[id]], row.FileID) } @@ -497,15 +496,15 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo return ret, nil } -func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) { +func (r *filesRepository) get(ctx context.Context, id int) ([]models.FileID, error) { query := fmt.Sprintf("SELECT file_id, `primary` from %s WHERE %s = ?", r.tableName, r.idColumn) type relatedFile struct { - FileID file.ID `db:"file_id"` - Primary bool `db:"primary"` + FileID models.FileID `db:"file_id"` + Primary bool `db:"primary"` } - var ret []file.ID + var ret []models.FileID if err := r.queryFunc(ctx, query, []interface{}{id}, false, func(rows *sqlx.Rows) error { var f relatedFile @@ -515,7 +514,7 @@ func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) { if f.Primary { // prepend to list - ret = append([]file.ID{f.FileID}, ret...) + ret = append([]models.FileID{f.FileID}, ret...) } else { ret = append(ret, f.FileID) } diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index 8fc37937b..215c17409 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -17,7 +17,6 @@ import ( "gopkg.in/guregu/null.v4" "gopkg.in/guregu/null.v4/zero" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stashapp/stash/pkg/utils" @@ -232,13 +231,13 @@ func (qb *SceneStore) selectDataset() *goqu.SelectDataset { checksum, goqu.On( checksum.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)), - checksum.Col("type").Eq(file.FingerprintTypeMD5), + checksum.Col("type").Eq(models.FingerprintTypeMD5), ), ).LeftJoin( oshash, goqu.On( oshash.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)), - oshash.Col("type").Eq(file.FingerprintTypeOshash), + oshash.Col("type").Eq(models.FingerprintTypeOshash), ), ).Select( qb.table().All(), @@ -250,7 +249,7 @@ func (qb *SceneStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileIDs []file.ID) error { +func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileIDs []models.FileID) error { var r sceneRow r.fromScene(*newObject) @@ -411,7 +410,7 @@ func (qb *SceneStore) Update(ctx context.Context, updatedObject *models.Scene) e } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.ID } @@ -538,7 +537,7 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } -func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, error) { +func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*models.VideoFile, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -550,10 +549,10 @@ func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, return nil, err } - ret := make([]*file.VideoFile, len(files)) + ret := make([]*models.VideoFile, len(files)) for i, f := range files { var ok bool - ret[i], ok = f.(*file.VideoFile) + ret[i], ok = f.(*models.VideoFile) if !ok { return nil, fmt.Errorf("expected file to be *file.VideoFile not %T", f) } @@ -562,12 +561,12 @@ func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, return ret, nil } -func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } -func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) { +func (qb *SceneStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { sq := dialect.From(scenesFilesJoinTable).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where( scenesFilesJoinTable.Col(fileIDColumn).Eq(fileID), ) @@ -580,7 +579,7 @@ func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode return ret, nil } -func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) { +func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { sq := dialect.From(scenesFilesJoinTable).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where( scenesFilesJoinTable.Col(fileIDColumn).Eq(fileID), scenesFilesJoinTable.Col("primary").Eq(1), @@ -594,14 +593,14 @@ func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID file.ID) ( return ret, nil } -func (qb *SceneStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *SceneStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := scenesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) { +func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) { fingerprintTable := fingerprintTableMgr.table var ex []exp.Expression @@ -629,18 +628,18 @@ func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr } func (qb *SceneStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) } func (qb *SceneStore) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeOshash, + Type: models.FingerprintTypeOshash, Fingerprint: oshash, }, }) @@ -1684,7 +1683,7 @@ func (qb *SceneStore) destroyCover(ctx context.Context, sceneID int) error { return qb.DestroyImage(ctx, sceneID, sceneCoverBlobColumn) } -func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []file.ID) error { +func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []models.FileID) error { // assuming a file can only be assigned to a single scene if err := scenesFilesTableMgr.destroyJoins(ctx, fileIDs); err != nil { return err @@ -1736,9 +1735,9 @@ func (qb *SceneStore) filesRepository() *filesRepository { } } -func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *SceneStore) performersRepository() *joinRepository { diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index 8ab34a112..0da236f4d 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -14,7 +14,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stretchr/testify/assert" @@ -165,8 +164,8 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ - videoFile.(*file.VideoFile), + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ + videoFile.(*models.VideoFile), }), CreatedAt: createdAt, UpdatedAt: updatedAt, @@ -248,7 +247,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) - var fileIDs []file.ID + var fileIDs []models.FileID if tt.newObject.Files.Loaded() { for _, f := range tt.newObject.Files.List() { fileIDs = append(fileIDs, f.ID) @@ -308,7 +307,7 @@ func clearSceneFileIDs(scene *models.Scene) { } } -func makeSceneFileWithID(i int) *file.VideoFile { +func makeSceneFileWithID(i int) *models.VideoFile { ret := makeSceneFile(i) ret.ID = sceneFileIDs[i] return ret @@ -626,7 +625,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { }, models.Scene{ ID: sceneIDs[sceneIdxWithSpacedName], - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ makeSceneFile(sceneIdxWithSpacedName), }), Title: title, @@ -678,7 +677,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { models.Scene{ ID: sceneIDs[sceneIdxWithSpacedName], OCounter: getOCounter(sceneIdxWithSpacedName), - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ makeSceneFile(sceneIdxWithSpacedName), }), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -1460,7 +1459,7 @@ func makeSceneWithID(index int) *models.Scene { ret := makeScene(index) ret.ID = sceneIDs[index] - ret.Files = models.NewRelatedVideoFiles([]*file.VideoFile{makeSceneFile(index)}) + ret.Files = models.NewRelatedVideoFiles([]*models.VideoFile{makeSceneFile(index)}) return ret } @@ -1891,7 +1890,7 @@ func scenesToIDs(i []*models.Scene) []int { func Test_sceneStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1940,7 +1939,7 @@ func Test_sceneStore_FindByFileID(t *testing.T) { func Test_sceneStore_CountByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID want int }{ { @@ -3053,8 +3052,8 @@ func queryScenes(ctx context.Context, t *testing.T, queryBuilder models.SceneRea func createScene(ctx context.Context, width int, height int) (*models.Scene, error) { name := fmt.Sprintf("TestSceneQueryResolutionModifiers %d %d", width, height) - sceneFile := &file.VideoFile{ - BaseFile: &file.BaseFile{ + sceneFile := &models.VideoFile{ + BaseFile: &models.BaseFile{ Basename: name, ParentFolderID: folderIDs[folderIdxWithSceneFiles], }, @@ -3068,7 +3067,7 @@ func createScene(ctx context.Context, width int, height int) (*models.Scene, err scene := &models.Scene{} - if err := db.Scene.Create(ctx, scene, []file.ID{sceneFile.ID}); err != nil { + if err := db.Scene.Create(ctx, scene, []models.FileID{sceneFile.ID}); err != nil { return nil, err } @@ -4559,7 +4558,7 @@ func TestSceneStore_AssignFiles(t *testing.T) { tests := []struct { name string sceneID int - fileID file.ID + fileID models.FileID wantErr bool }{ { @@ -4587,7 +4586,7 @@ func TestSceneStore_AssignFiles(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { withRollbackTxn(func(ctx context.Context) error { - if err := qb.AssignFiles(ctx, tt.sceneID, []file.ID{tt.fileID}); (err != nil) != tt.wantErr { + if err := qb.AssignFiles(ctx, tt.sceneID, []models.FileID{tt.fileID}); (err != nil) != tt.wantErr { t.Errorf("SceneStore.AssignFiles() error = %v, wantErr %v", err, tt.wantErr) } diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index e89c9302c..737a28e72 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -14,7 +14,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stashapp/stash/pkg/sqlite" @@ -283,11 +282,11 @@ const ( ) var ( - folderIDs []file.FolderID - fileIDs []file.ID - sceneFileIDs []file.ID - imageFileIDs []file.ID - galleryFileIDs []file.ID + folderIDs []models.FolderID + fileIDs []models.FileID + sceneFileIDs []models.FileID + imageFileIDs []models.FileID + galleryFileIDs []models.FileID chapterIDs []int sceneIDs []int @@ -700,8 +699,8 @@ func getFolderModTime(index int) time.Time { return time.Date(2000, 1, (index%10)+1, 0, 0, 0, 0, time.UTC) } -func makeFolder(i int) file.Folder { - var folderID *file.FolderID +func makeFolder(i int) models.Folder { + var folderID *models.FolderID var folderIdx *int if pidx, ok := folderParentFolders[i]; ok { folderIdx = &pidx @@ -709,9 +708,9 @@ func makeFolder(i int) file.Folder { folderID = &v } - return file.Folder{ + return models.Folder{ ParentFolderID: folderID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ // zip files have to be added after creating files ModTime: getFolderModTime(i), }, @@ -748,8 +747,8 @@ func getFileModTime(index int) time.Time { return getFolderModTime(index) } -func getFileFingerprints(index int) []file.Fingerprint { - return []file.Fingerprint{ +func getFileFingerprints(index int) []models.Fingerprint { + return []models.Fingerprint{ { Type: "MD5", Fingerprint: getPrefixedStringValue("file", index, "md5"), @@ -772,22 +771,22 @@ func getFileDuration(index int) float64 { return float64(duration) + 0.432 } -func makeFile(i int) file.File { +func makeFile(i int) models.File { folderID := folderIDs[fileFolders[i]] if folderID == 0 { folderID = folderIDs[folderIdxWithFiles] } - var zipFileID *file.ID + var zipFileID *models.FileID if zipFileIndex, found := fileZipFiles[i]; found { zipFileID = &fileIDs[zipFileIndex] } - var ret file.File - baseFile := &file.BaseFile{ + var ret models.File + baseFile := &models.BaseFile{ Basename: getFileBaseName(i), ParentFolderID: folderID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ // zip files have to be added after creating files ModTime: getFileModTime(i), ZipFileID: zipFileID, @@ -799,7 +798,7 @@ func makeFile(i int) file.File { ret = baseFile if i >= fileIdxStartVideoFiles && i < fileIdxStartImageFiles { - ret = &file.VideoFile{ + ret = &models.VideoFile{ BaseFile: baseFile, Format: getFileStringValue(i, "format"), Width: getWidth(i), @@ -811,7 +810,7 @@ func makeFile(i int) file.File { BitRate: int64(getFileDuration(i)) * 3, } } else if i >= fileIdxStartImageFiles && i < fileIdxStartGalleryFiles { - ret = &file.ImageFile{ + ret = &models.ImageFile{ BaseFile: baseFile, Format: getFileStringValue(i, "format"), Width: getWidth(i), @@ -977,27 +976,27 @@ func getSceneBasename(index int) string { return getSceneStringValue(index, pathField) } -func makeSceneFile(i int) *file.VideoFile { - fp := []file.Fingerprint{ +func makeSceneFile(i int) *models.VideoFile { + fp := []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getSceneStringValue(i, checksumField), }, { - Type: file.FingerprintTypeOshash, + Type: models.FingerprintTypeOshash, Fingerprint: getSceneStringValue(i, "oshash"), }, } if i != sceneIdxMissingPhash { - fp = append(fp, file.Fingerprint{ - Type: file.FingerprintTypePhash, + fp = append(fp, models.Fingerprint{ + Type: models.FingerprintTypePhash, Fingerprint: getScenePhash(i, "phash"), }) } - return &file.VideoFile{ - BaseFile: &file.BaseFile{ + return &models.VideoFile{ + BaseFile: &models.BaseFile{ Path: getFilePath(folderIdxWithSceneFiles, getSceneBasename(i)), Basename: getSceneBasename(i), ParentFolderID: folderIDs[folderIdxWithSceneFiles], @@ -1100,7 +1099,7 @@ func createScenes(ctx context.Context, n int) error { scene := makeScene(i) - if err := sqb.Create(ctx, scene, []file.ID{f.ID}); err != nil { + if err := sqb.Create(ctx, scene, []models.FileID{f.ID}); err != nil { return fmt.Errorf("Error creating scene %v+: %s", scene, err.Error()) } @@ -1118,15 +1117,15 @@ func getImageBasename(index int) string { return getImageStringValue(index, pathField) } -func makeImageFile(i int) *file.ImageFile { - return &file.ImageFile{ - BaseFile: &file.BaseFile{ +func makeImageFile(i int) *models.ImageFile { + return &models.ImageFile{ + BaseFile: &models.BaseFile{ Path: getFilePath(folderIdxWithImageFiles, getImageBasename(i)), Basename: getImageBasename(i), ParentFolderID: folderIDs[folderIdxWithImageFiles], - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getImageStringValue(i, checksumField), }, }, @@ -1180,7 +1179,7 @@ func createImages(ctx context.Context, n int) error { err := qb.Create(ctx, &models.ImageCreateInput{ Image: image, - FileIDs: []file.ID{f.ID}, + FileIDs: []models.FileID{f.ID}, }) if err != nil { @@ -1209,14 +1208,14 @@ func getGalleryBasename(index int) string { return getGalleryStringValue(index, pathField) } -func makeGalleryFile(i int) *file.BaseFile { - return &file.BaseFile{ +func makeGalleryFile(i int) *models.BaseFile { + return &models.BaseFile{ Path: getFilePath(folderIdxWithGalleryFiles, getGalleryBasename(i)), Basename: getGalleryBasename(i), ParentFolderID: folderIDs[folderIdxWithGalleryFiles], - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getGalleryStringValue(i, checksumField), }, }, @@ -1255,14 +1254,14 @@ func createGalleries(ctx context.Context, n int) error { fqb := db.File for i := 0; i < n; i++ { - var fileIDs []file.ID + var fileIDs []models.FileID if i != galleryIdxWithoutFile { f := makeGalleryFile(i) if err := fqb.Create(ctx, f); err != nil { return fmt.Errorf("creating gallery file: %w", err) } galleryFileIDs = append(galleryFileIDs, f.ID) - fileIDs = []file.ID{f.ID} + fileIDs = []models.FileID{f.ID} } else { galleryFileIDs = append(galleryFileIDs, 0) } diff --git a/pkg/sqlite/table.go b/pkg/sqlite/table.go index e3cedce37..510b5877c 100644 --- a/pkg/sqlite/table.go +++ b/pkg/sqlite/table.go @@ -11,7 +11,6 @@ import ( "github.com/jmoiron/sqlx" "gopkg.in/guregu/null.v4" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil" @@ -707,12 +706,12 @@ type relatedFilesTable struct { } // type scenesFilesRow struct { -// SceneID int `db:"scene_id"` -// Primary bool `db:"primary"` -// FileID file.ID `db:"file_id"` +// SceneID int `db:"scene_id"` +// Primary bool `db:"primary"` +// FileID models.FileID `db:"file_id"` // } -func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool, fileID file.ID) error { +func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool, fileID models.FileID) error { q := dialect.Insert(t.table.table).Cols(t.idColumn.GetCol(), "primary", "file_id").Vals( goqu.Vals{id, primary, fileID}, ) @@ -724,7 +723,7 @@ func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool return nil } -func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimary bool, fileIDs []file.ID) error { +func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimary bool, fileIDs []models.FileID) error { for i, fk := range fileIDs { if err := t.insertJoin(ctx, id, firstPrimary && i == 0, fk); err != nil { return err @@ -734,7 +733,7 @@ func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimar return nil } -func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs []file.ID) error { +func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs []models.FileID) error { if err := t.destroy(ctx, []int{id}); err != nil { return err } @@ -744,7 +743,7 @@ func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs [] } // destroyJoins destroys all entries in the table with the provided fileIDs -func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []file.ID) error { +func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []models.FileID) error { q := dialect.Delete(t.table.table).Where(t.table.table.Col("file_id").In(fileIDs)) if _, err := exec(ctx, q); err != nil { @@ -754,7 +753,7 @@ func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []file.ID) return nil } -func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID file.ID) error { +func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID models.FileID) error { table := t.table.table q := dialect.Update(table).Prepared(true).Set(goqu.Record{ diff --git a/pkg/sqlite/values.go b/pkg/sqlite/values.go index be812275f..5e196051b 100644 --- a/pkg/sqlite/values.go +++ b/pkg/sqlite/values.go @@ -1,9 +1,9 @@ package sqlite import ( - "github.com/stashapp/stash/pkg/file" - "gopkg.in/guregu/null.v4" + + "github.com/stashapp/stash/pkg/models" ) // null package does not provide methods to convert null.Int to int pointer @@ -33,27 +33,27 @@ func nullFloatPtr(i null.Float) *float64 { return &v } -func nullIntFolderIDPtr(i null.Int) *file.FolderID { +func nullIntFolderIDPtr(i null.Int) *models.FolderID { if !i.Valid { return nil } - v := file.FolderID(i.Int64) + v := models.FolderID(i.Int64) return &v } -func nullIntFileIDPtr(i null.Int) *file.ID { +func nullIntFileIDPtr(i null.Int) *models.FileID { if !i.Valid { return nil } - v := file.ID(i.Int64) + v := models.FileID(i.Int64) return &v } -func nullIntFromFileIDPtr(i *file.ID) null.Int { +func nullIntFromFileIDPtr(i *models.FileID) null.Int { if i == nil { return null.NewInt(0, false) } @@ -61,7 +61,7 @@ func nullIntFromFileIDPtr(i *file.ID) null.Int { return null.IntFrom(int64(*i)) } -func nullIntFromFolderIDPtr(i *file.FolderID) null.Int { +func nullIntFromFolderIDPtr(i *models.FolderID) null.Int { if i == nil { return null.NewInt(0, false) } diff --git a/pkg/studio/export.go b/pkg/studio/export.go index 2ad158c17..9d6d79299 100644 --- a/pkg/studio/export.go +++ b/pkg/studio/export.go @@ -11,15 +11,15 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type FinderImageAliasStashIDGetter interface { - Finder - GetImage(ctx context.Context, studioID int) ([]byte, error) +type FinderImageStashIDGetter interface { + models.StudioGetter models.AliasLoader models.StashIDLoader + GetImage(ctx context.Context, studioID int) ([]byte, error) } // ToJSON converts a Studio object into its JSON equivalent. -func ToJSON(ctx context.Context, reader FinderImageAliasStashIDGetter, studio *models.Studio) (*jsonschema.Studio, error) { +func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models.Studio) (*jsonschema.Studio, error) { newStudioJSON := jsonschema.Studio{ Name: studio.Name, URL: studio.URL, diff --git a/pkg/studio/import.go b/pkg/studio/import.go index 653dfce61..df712daab 100644 --- a/pkg/studio/import.go +++ b/pkg/studio/import.go @@ -10,16 +10,15 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedStudio *models.Studio) error - UpdateImage(ctx context.Context, studioID int, image []byte) error +type ImporterReaderWriter interface { + models.StudioCreatorUpdater + FindByName(ctx context.Context, name string, nocase bool) (*models.Studio, error) } var ErrParentStudioNotExist = errors.New("parent studio does not exist") type Importer struct { - ReaderWriter NameFinderCreatorUpdater + ReaderWriter ImporterReaderWriter Input jsonschema.Studio MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/studio/query.go b/pkg/studio/query.go index ce3594eb1..b20cec331 100644 --- a/pkg/studio/query.go +++ b/pkg/studio/query.go @@ -6,21 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Finder interface { - Find(ctx context.Context, id int) (*models.Studio, error) -} - -type Queryer interface { - Query(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) ([]*models.Studio, int, error) -} - -type FinderQueryer interface { - Finder - Queryer - models.AliasLoader -} - -func ByName(ctx context.Context, qb Queryer, name string) (*models.Studio, error) { +func ByName(ctx context.Context, qb models.StudioQueryer, name string) (*models.Studio, error) { f := &models.StudioFilterType{ Name: &models.StringCriterionInput{ Value: name, @@ -44,7 +30,7 @@ func ByName(ctx context.Context, qb Queryer, name string) (*models.Studio, error return nil, nil } -func ByAlias(ctx context.Context, qb Queryer, alias string) (*models.Studio, error) { +func ByAlias(ctx context.Context, qb models.StudioQueryer, alias string) (*models.Studio, error) { f := &models.StudioFilterType{ Aliases: &models.StringCriterionInput{ Value: alias, diff --git a/pkg/studio/update.go b/pkg/studio/update.go index 0b159edcd..a1a16a0c4 100644 --- a/pkg/studio/update.go +++ b/pkg/studio/update.go @@ -12,11 +12,6 @@ var ( ErrStudioOwnAncestor = errors.New("studio cannot be an ancestor of itself") ) -type NameFinderCreator interface { - FindByName(ctx context.Context, name string, nocase bool) (*models.Studio, error) - Create(ctx context.Context, newStudio *models.Studio) error -} - type NameExistsError struct { Name string } @@ -36,7 +31,7 @@ func (e *NameUsedByAliasError) Error() string { // EnsureStudioNameUnique returns an error if the studio name provided // is used as a name or alias of another existing tag. -func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb Queryer) error { +func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb models.StudioQueryer) error { // ensure name is unique sameNameStudio, err := ByName(ctx, qb, name) if err != nil { @@ -65,7 +60,7 @@ func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb Queryer return nil } -func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Queryer) error { +func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb models.StudioQueryer) error { for _, a := range aliases { if err := EnsureStudioNameUnique(ctx, id, a, qb); err != nil { return err @@ -75,11 +70,17 @@ func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Query return nil } +type ValidateModifyReader interface { + models.StudioGetter + models.StudioQueryer + models.AliasLoader +} + // Checks to make sure that: // 1. The studio exists locally // 2. The studio is not its own ancestor // 3. The studio's aliases are unique -func ValidateModify(ctx context.Context, s models.StudioPartial, qb FinderQueryer) error { +func ValidateModify(ctx context.Context, s models.StudioPartial, qb ValidateModifyReader) error { existing, err := qb.Find(ctx, s.ID) if err != nil { return err @@ -110,7 +111,7 @@ func ValidateModify(ctx context.Context, s models.StudioPartial, qb FinderQuerye return nil } -func validateParent(ctx context.Context, studioID int, newParentID int, qb FinderQueryer) error { +func validateParent(ctx context.Context, studioID int, newParentID int, qb models.StudioGetter) error { if newParentID == studioID { return ErrStudioOwnAncestor } diff --git a/pkg/tag/import.go b/pkg/tag/import.go index 67bdbc460..368815bbe 100644 --- a/pkg/tag/import.go +++ b/pkg/tag/import.go @@ -9,13 +9,9 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { +type ImporterReaderWriter interface { + models.TagCreatorUpdater FindByName(ctx context.Context, name string, nocase bool) (*models.Tag, error) - Create(ctx context.Context, newTag *models.Tag) error - Update(ctx context.Context, updatedTag *models.Tag) error - UpdateImage(ctx context.Context, tagID int, image []byte) error - UpdateAliases(ctx context.Context, tagID int, aliases []string) error - UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error } type ParentTagNotExistError struct { @@ -31,7 +27,7 @@ func (e ParentTagNotExistError) MissingParent() string { } type Importer struct { - ReaderWriter NameFinderCreatorUpdater + ReaderWriter ImporterReaderWriter Input jsonschema.Tag MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/tag/query.go b/pkg/tag/query.go index a048054d7..76567434d 100644 --- a/pkg/tag/query.go +++ b/pkg/tag/query.go @@ -6,15 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Finder interface { - Find(ctx context.Context, id int) (*models.Tag, error) -} - -type Queryer interface { - Query(ctx context.Context, tagFilter *models.TagFilterType, findFilter *models.FindFilterType) ([]*models.Tag, int, error) -} - -func ByName(ctx context.Context, qb Queryer, name string) (*models.Tag, error) { +func ByName(ctx context.Context, qb models.TagQueryer, name string) (*models.Tag, error) { f := &models.TagFilterType{ Name: &models.StringCriterionInput{ Value: name, @@ -38,7 +30,7 @@ func ByName(ctx context.Context, qb Queryer, name string) (*models.Tag, error) { return nil, nil } -func ByAlias(ctx context.Context, qb Queryer, alias string) (*models.Tag, error) { +func ByAlias(ctx context.Context, qb models.TagQueryer, alias string) (*models.Tag, error) { f := &models.TagFilterType{ Aliases: &models.StringCriterionInput{ Value: alias, diff --git a/pkg/tag/update.go b/pkg/tag/update.go index 3b0dbd414..dcb78bf9c 100644 --- a/pkg/tag/update.go +++ b/pkg/tag/update.go @@ -7,11 +7,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type NameFinderCreator interface { - FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Tag, error) - Create(ctx context.Context, newTag *models.Tag) error -} - type NameExistsError struct { Name string } @@ -43,7 +38,7 @@ func (e *InvalidTagHierarchyError) Error() string { // EnsureTagNameUnique returns an error if the tag name provided // is used as a name or alias of another existing tag. -func EnsureTagNameUnique(ctx context.Context, id int, name string, qb Queryer) error { +func EnsureTagNameUnique(ctx context.Context, id int, name string, qb models.TagQueryer) error { // ensure name is unique sameNameTag, err := ByName(ctx, qb, name) if err != nil { @@ -72,7 +67,7 @@ func EnsureTagNameUnique(ctx context.Context, id int, name string, qb Queryer) e return nil } -func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Queryer) error { +func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb models.TagQueryer) error { for _, a := range aliases { if err := EnsureTagNameUnique(ctx, id, a, qb); err != nil { return err @@ -82,14 +77,14 @@ func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Query return nil } -type RelationshipGetter interface { +type RelationshipFinder interface { FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) FindByParentTagID(ctx context.Context, parentID int) ([]*models.Tag, error) } -func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipGetter) error { +func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipFinder) error { id := tag.ID allAncestors := make(map[int]*models.TagPath) allDescendants := make(map[int]*models.TagPath) @@ -177,7 +172,7 @@ func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs return nil } -func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipGetter) ([]int, []int, error) { +func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipFinder) ([]int, []int, error) { var mergedParents, mergedChildren []int allIds := append([]int{destination}, sources...)