diff --git a/cmd/phasher/main.go b/cmd/phasher/main.go index 864195631..be2053784 100644 --- a/cmd/phasher/main.go +++ b/cmd/phasher/main.go @@ -5,20 +5,39 @@ import ( "fmt" "os" "os/exec" + "path/filepath" flag "github.com/spf13/pflag" "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/hash/imagephash" "github.com/stashapp/stash/pkg/hash/videophash" "github.com/stashapp/stash/pkg/models" ) func customUsage() { fmt.Fprintf(os.Stderr, "Usage:\n") - fmt.Fprintf(os.Stderr, "%s [OPTIONS] VIDEOFILE...\n\nOptions:\n", os.Args[0]) + fmt.Fprintf(os.Stderr, "%s [OPTIONS] FILE...\n\nOptions:\n", os.Args[0]) flag.PrintDefaults() } func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error { + // Determine if this is a video or image file based on extension + ext := filepath.Ext(inputfile) + ext = ext[1:] // remove the leading dot + + // Common image extensions + imageExts := map[string]bool{ + "jpg": true, "jpeg": true, "png": true, "gif": true, "webp": true, "bmp": true, "avif": true, + } + + if imageExts[ext] { + return printImagePhash(ff, inputfile, quiet) + } + + return printVideoPhash(ff, ffp, inputfile, quiet) +} + +func printVideoPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error { ffvideoFile, err := ffp.NewVideoFile(inputfile) if err != nil { return err @@ -46,6 +65,24 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet return nil } +func printImagePhash(ff *ffmpeg.FFMpeg, inputfile string, quiet *bool) error { + imgFile := &models.ImageFile{ + BaseFile: &models.BaseFile{Path: inputfile}, + } + + phash, err := imagephash.Generate(ff, imgFile) + if err != nil { + return err + } + + if *quiet { + fmt.Printf("%x\n", *phash) + } else { + fmt.Printf("%x %v\n", *phash, imgFile.Path) + } + return nil +} + func getPaths() (string, string) { ffmpegPath, _ := exec.LookPath("ffmpeg") ffprobePath, _ := exec.LookPath("ffprobe") @@ -67,7 +104,7 @@ func main() { args := flag.Args() if len(args) < 1 { - fmt.Fprintf(os.Stderr, "Missing VIDEOFILE argument.\n") + fmt.Fprintf(os.Stderr, "Missing FILE argument.\n") flag.Usage() os.Exit(2) } @@ -87,4 +124,5 @@ func main() { fmt.Fprintln(os.Stderr, err) } } + } diff --git a/gqlgen.yml b/gqlgen.yml index b949d44dc..4a3d73d51 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -140,4 +140,8 @@ models: fields: plugins: resolver: true + Performer: + fields: + career_length: + resolver: true diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index edfdecaac..7fda85b24 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -422,6 +422,8 @@ type Mutation { """ moveFiles(input: MoveFilesInput!): Boolean! deleteFiles(ids: [ID!]!): Boolean! + "Deletes file entries from the database without deleting the files from the filesystem" + destroyFiles(ids: [ID!]!): Boolean! fileSetFingerprints(input: FileSetFingerprintsInput!): Boolean! diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index 87bf3ae54..2e0bba5b2 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -184,6 +184,18 @@ input ConfigGeneralInput { scraperPackageSources: [PackageSourceInput!] "Source of plugin packages" pluginPackageSources: [PackageSourceInput!] + + "Size of the longest dimension for each sprite in pixels" + spriteScreenshotSize: Int + + "True if sprite generation should use the sprite interval and min/max sprites settings instead of the default" + useCustomSpriteInterval: Boolean + "Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true" + spriteInterval: Float + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + minimumSprites: Int + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + maximumSprites: Int } type ConfigGeneralResult { @@ -287,6 +299,16 @@ type ConfigGeneralResult { logAccess: Boolean! "Maximum log size" logFileMaxSize: Int! + "True if sprite generation should use the sprite interval and min/max sprites settings instead of the default" + useCustomSpriteInterval: Boolean! + "Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true" + spriteInterval: Float! + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + minimumSprites: Int! + "Maximum number of sprites to be generated - only used if useCustomSpriteInterval is true" + maximumSprites: Int! + "Size of the longest dimension for each sprite in pixels" + spriteScreenshotSize: Int! "Array of video file extensions" videoExtensions: [String!]! "Array of image file extensions" @@ -395,6 +417,9 @@ input ConfigInterfaceInput { customLocales: String customLocalesEnabled: Boolean + "When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting" + disableCustomizations: Boolean + "Interface language" language: String @@ -472,6 +497,9 @@ type ConfigInterfaceResult { customLocales: String customLocalesEnabled: Boolean + "When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting" + disableCustomizations: Boolean + "Interface language" language: String diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 4cf25d840..075e40372 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -75,10 +75,26 @@ input OrientationCriterionInput { value: [OrientationEnum!]! } -input PHashDuplicationCriterionInput { - duplicated: Boolean - "Currently unimplemented" +input DuplicationCriterionInput { + duplicated: Boolean @deprecated(reason: "Use phash field instead") + "Currently unimplemented. Intended for phash distance matching." distance: Int + "Filter by phash duplication" + phash: Boolean + "Filter by URL duplication" + url: Boolean + "Filter by Stash ID duplication" + stash_id: Boolean + "Filter by title duplication" + title: Boolean +} + +input FileDuplicationCriterionInput { + duplicated: Boolean @deprecated(reason: "Use phash field instead") + "Currently unimplemented. Intended for phash distance matching." + distance: Int + "Filter by phash duplication" + phash: Boolean } input StashIDCriterionInput { @@ -138,8 +154,13 @@ input PerformerFilterType { penis_length: FloatCriterionInput "Filter by ciricumcision" circumcised: CircumcisionCriterionInput - "Filter by career length" + "Deprecated: use career_start and career_end. This filter is non-functional." career_length: StringCriterionInput + @deprecated(reason: "Use career_start and career_end") + "Filter by career start year" + career_start: IntCriterionInput + "Filter by career end year" + career_end: IntCriterionInput "Filter by tattoos" tattoos: StringCriterionInput "Filter by piercings" @@ -261,8 +282,8 @@ input SceneFilterType { organized: Boolean "Filter by o-counter" o_counter: IntCriterionInput - "Filter Scenes that have an exact phash match available" - duplicated: PHashDuplicationCriterionInput + "Filter Scenes by duplication criteria" + duplicated: DuplicationCriterionInput "Filter by resolution" resolution: ResolutionCriterionInput "Filter by orientation" @@ -308,6 +329,8 @@ input SceneFilterType { @deprecated(reason: "use stash_ids_endpoint instead") "Filter by StashIDs" stash_ids_endpoint: StashIDsCriterionInput + "Filter by StashID count" + stash_id_count: IntCriterionInput "Filter by url" url: StringCriterionInput "Filter by interactive" @@ -348,6 +371,8 @@ input SceneFilterType { markers_filter: SceneMarkerFilterType "Filter by related files that meet this criteria" files_filter: FileFilterType + + custom_fields: [CustomFieldCriterionInput!] } input MovieFilterType { @@ -430,6 +455,8 @@ input GroupFilterType { containing_group_count: IntCriterionInput "Filter by number of sub-groups the group has" sub_group_count: IntCriterionInput + "Filter by number of scenes the group has" + scene_count: IntCriterionInput "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType @@ -465,6 +492,8 @@ input StudioFilterType { image_count: IntCriterionInput "Filter by gallery count" gallery_count: IntCriterionInput + "Filter by group count" + group_count: IntCriterionInput "Filter by tag count" tag_count: IntCriterionInput "Filter by url" @@ -475,16 +504,22 @@ input StudioFilterType { child_count: IntCriterionInput "Filter by autotag ignore value" ignore_auto_tag: Boolean + "Filter by organized" + organized: Boolean "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType "Filter by related images that meet this criteria" images_filter: ImageFilterType "Filter by related galleries that meet this criteria" galleries_filter: GalleryFilterType + "Filter by related groups that meet this criteria" + groups_filter: GroupFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input GalleryFilterType { @@ -638,12 +673,20 @@ input TagFilterType { images_filter: ImageFilterType "Filter by related galleries that meet this criteria" galleries_filter: GalleryFilterType + "Filter by related groups that meet this criteria" + groups_filter: GroupFilterType + "Filter by related performers that meet this criteria" + performers_filter: PerformerFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input ImageFilterType { @@ -658,6 +701,8 @@ input ImageFilterType { id: IntCriterionInput "Filter by file checksum" checksum: StringCriterionInput + "Filter by file phash distance" + phash_distance: PhashDistanceCriterionInput "Filter by path" path: StringCriterionInput "Filter by file count" @@ -732,8 +777,8 @@ input FileFilterType { "Filter by modification time" mod_time: TimestampCriterionInput - "Filter files that have an exact match available" - duplicated: PHashDuplicationCriterionInput + "Filter files by duplication criteria (only phash applies to files)" + duplicated: FileDuplicationCriterionInput "find files based on hash" hashes: [FingerprintFilterInput!] diff --git a/graphql/schema/types/gallery.graphql b/graphql/schema/types/gallery.graphql index 999a743f7..f456157a7 100644 --- a/graphql/schema/types/gallery.graphql +++ b/graphql/schema/types/gallery.graphql @@ -100,6 +100,8 @@ input GalleryDestroyInput { """ delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindGalleriesResultType { diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql index fb95556f5..b7ec1a9f5 100644 --- a/graphql/schema/types/image.graphql +++ b/graphql/schema/types/image.graphql @@ -82,12 +82,16 @@ input ImageDestroyInput { id: ID! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } input ImagesDestroyInput { ids: [ID!]! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindImagesResultType { diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index c01858f64..27cbb86fb 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -10,8 +10,11 @@ input GenerateMetadataInput { transcodes: Boolean "Generate transcodes even if not required" forceTranscodes: Boolean + "Generate video phashes during scan" phashes: Boolean interactiveHeatmapsSpeeds: Boolean + "Generate image phashes during scan" + imagePhashes: Boolean imageThumbnails: Boolean clipPreviews: Boolean @@ -19,6 +22,10 @@ input GenerateMetadataInput { sceneIDs: [ID!] "marker ids to generate for" markerIDs: [ID!] + "image ids to generate for" + imageIDs: [ID!] + "gallery ids to generate for" + galleryIDs: [ID!] "overwrite existing media" overwrite: Boolean @@ -85,8 +92,10 @@ input ScanMetadataInput { scanGenerateImagePreviews: Boolean "Generate sprites during scan" scanGenerateSprites: Boolean - "Generate phashes during scan" + "Generate video phashes during scan" scanGeneratePhashes: Boolean + "Generate image phashes during scan" + scanGenerateImagePhashes: Boolean "Generate image thumbnails during scan" scanGenerateThumbnails: Boolean "Generate image clip previews during scan" @@ -107,8 +116,10 @@ type ScanMetadataOptions { scanGenerateImagePreviews: Boolean! "Generate sprites during scan" scanGenerateSprites: Boolean! - "Generate phashes during scan" + "Generate video phashes during scan" scanGeneratePhashes: Boolean! + "Generate image phashes during scan" + scanGenerateImagePhashes: Boolean "Generate image thumbnails during scan" scanGenerateThumbnails: Boolean! "Generate image clip previews during scan" @@ -204,7 +215,9 @@ input IdentifyMetadataOptionsInput { setCoverImage: Boolean setOrganized: Boolean "defaults to true if not provided" - includeMalePerformers: Boolean + includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders") + "Filter to only include performers with these genders. If not provided, all genders are included." + performerGenders: [GenderEnum!] "defaults to true if not provided" skipMultipleMatches: Boolean "tag to tag skipped multiple matches with" @@ -249,7 +262,9 @@ type IdentifyMetadataOptions { setCoverImage: Boolean setOrganized: Boolean "defaults to true if not provided" - includeMalePerformers: Boolean + includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders") + "Filter to only include performers with these genders. If not provided, all genders are included." + performerGenders: [GenderEnum!] "defaults to true if not provided" skipMultipleMatches: Boolean "tag to tag skipped multiple matches with" @@ -310,6 +325,8 @@ input ImportObjectsInput { input BackupDatabaseInput { download: Boolean + "If true, blob files will be included in the backup. This can significantly increase the size of the backup and the time it takes to create it, but allows for a complete backup of the system that can be restored without needing access to the original media files." + includeBlobs: Boolean } input AnonymiseDatabaseInput { diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index e788b91a8..97a80b94f 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -30,7 +30,9 @@ type Performer { fake_tits: String penis_length: Float circumcised: CircumisedEnum - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: Int + career_end: Int tattoos: String piercings: String alias_list: [String!]! @@ -77,9 +79,12 @@ input PerformerCreateInput { fake_tits: String penis_length: Float circumcised: CircumisedEnum - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: Int + career_end: Int tattoos: String piercings: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" alias_list: [String!] twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -115,9 +120,12 @@ input PerformerUpdateInput { fake_tits: String penis_length: Float circumcised: CircumisedEnum - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: Int + career_end: Int tattoos: String piercings: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" alias_list: [String!] twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -158,9 +166,12 @@ input BulkPerformerUpdateInput { fake_tits: String penis_length: Float circumcised: CircumisedEnum - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: Int + career_end: Int tattoos: String piercings: String + "Duplicate aliases and those equal to name will result in an error (case-insensitive)" alias_list: BulkUpdateStrings twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index eca01d15e..4d99e0a21 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -79,6 +79,8 @@ type Scene { performers: [Performer!]! stash_ids: [StashID!]! + custom_fields: Map! + "Return valid stream paths" sceneStreams: [SceneStreamEndpoint!]! } @@ -120,6 +122,8 @@ input SceneCreateInput { Files must not already be primary for another scene. """ file_ids: [ID!] + + custom_fields: Map } input SceneUpdateInput { @@ -158,6 +162,8 @@ input SceneUpdateInput { ) primary_file_id: ID + + custom_fields: CustomFieldsInput } enum BulkUpdateIdMode { @@ -190,18 +196,24 @@ input BulkSceneUpdateInput { tag_ids: BulkUpdateIds group_ids: BulkUpdateIds movie_ids: BulkUpdateIds @deprecated(reason: "Use group_ids") + + custom_fields: CustomFieldsInput } input SceneDestroyInput { id: ID! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } input ScenesDestroyInput { ids: [ID!]! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindScenesResultType { diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index 487c89516..0818e61c2 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -18,7 +18,9 @@ type ScrapedPerformer { fake_tits: String penis_length: String circumcised: String - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: Int + career_end: Int tattoos: String piercings: String # aliases must be comma-delimited to be parsed correctly @@ -54,7 +56,9 @@ input ScrapedPerformerInput { fake_tits: String penis_length: String circumcised: String - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: Int + career_end: Int tattoos: String piercings: String aliases: String diff --git a/graphql/schema/types/studio.graphql b/graphql/schema/types/studio.graphql index 4c5778c5b..51a87bf4f 100644 --- a/graphql/schema/types/studio.graphql +++ b/graphql/schema/types/studio.graphql @@ -8,6 +8,7 @@ type Studio { aliases: [String!]! tags: [Tag!]! ignore_auto_tag: Boolean! + organized: Boolean! image_path: String # Resolver scene_count(depth: Int): Int! # Resolver @@ -26,6 +27,8 @@ type Studio { groups: [Group!]! movies: [Movie!]! @deprecated(reason: "use groups instead") o_counter: Int + + custom_fields: Map! } input StudioCreateInput { @@ -40,9 +43,13 @@ input StudioCreateInput { rating100: Int favorite: Boolean details: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] tag_ids: [ID!] ignore_auto_tag: Boolean + organized: Boolean + + custom_fields: Map } input StudioUpdateInput { @@ -58,9 +65,13 @@ input StudioUpdateInput { rating100: Int favorite: Boolean details: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] tag_ids: [ID!] ignore_auto_tag: Boolean + organized: Boolean + + custom_fields: CustomFieldsInput } input BulkStudioUpdateInput { @@ -74,6 +85,7 @@ input BulkStudioUpdateInput { details: String tag_ids: BulkUpdateIds ignore_auto_tag: Boolean + organized: Boolean } input StudioDestroyInput { diff --git a/graphql/schema/types/tag.graphql b/graphql/schema/types/tag.graphql index 8424ab92a..0acbc927f 100644 --- a/graphql/schema/types/tag.graphql +++ b/graphql/schema/types/tag.graphql @@ -24,6 +24,7 @@ type Tag { parent_count: Int! # Resolver child_count: Int! # Resolver + custom_fields: Map! } input TagCreateInput { @@ -31,6 +32,7 @@ input TagCreateInput { "Value that does not appear in the UI but overrides name for sorting" sort_name: String description: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] ignore_auto_tag: Boolean favorite: Boolean @@ -40,6 +42,8 @@ input TagCreateInput { parent_ids: [ID!] child_ids: [ID!] + + custom_fields: Map } input TagUpdateInput { @@ -48,6 +52,7 @@ input TagUpdateInput { "Value that does not appear in the UI but overrides name for sorting" sort_name: String description: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] ignore_auto_tag: Boolean favorite: Boolean @@ -57,6 +62,8 @@ input TagUpdateInput { parent_ids: [ID!] child_ids: [ID!] + + custom_fields: CustomFieldsInput } input TagDestroyInput { @@ -71,11 +78,14 @@ type FindTagsResultType { input TagsMergeInput { source: [ID!]! destination: ID! + # values defined here will override values in the destination + values: TagUpdateInput } input BulkTagUpdateInput { ids: [ID!] description: String + "Duplicate aliases and those equal to name will result in an error (case-insensitive)" aliases: BulkUpdateStrings ignore_auto_tag: Boolean favorite: Boolean diff --git a/graphql/stash-box/query.graphql b/graphql/stash-box/query.graphql index 2367e85cf..e2686ac4d 100644 --- a/graphql/stash-box/query.graphql +++ b/graphql/stash-box/query.graphql @@ -120,18 +120,6 @@ fragment SceneFragment on Scene { } } -query FindSceneByFingerprint($fingerprint: FingerprintQueryInput!) { - findSceneByFingerprint(fingerprint: $fingerprint) { - ...SceneFragment - } -} - -query FindScenesByFullFingerprints($fingerprints: [FingerprintQueryInput!]!) { - findScenesByFullFingerprints(fingerprints: $fingerprints) { - ...SceneFragment - } -} - query FindScenesBySceneFingerprints( $fingerprints: [[FingerprintQueryInput!]!]! ) { diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index 38f72b0a1..520714432 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -42,13 +42,14 @@ const ( ) type Loaders struct { - SceneByID *SceneLoader - SceneFiles *SceneFileIDsLoader - ScenePlayCount *ScenePlayCountLoader - SceneOCount *SceneOCountLoader - ScenePlayHistory *ScenePlayHistoryLoader - SceneOHistory *SceneOHistoryLoader - SceneLastPlayed *SceneLastPlayedLoader + SceneByID *SceneLoader + SceneFiles *SceneFileIDsLoader + ScenePlayCount *ScenePlayCountLoader + SceneOCount *SceneOCountLoader + ScenePlayHistory *ScenePlayHistoryLoader + SceneOHistory *SceneOHistoryLoader + SceneLastPlayed *SceneLastPlayedLoader + SceneCustomFields *CustomFieldsLoader ImageFiles *ImageFileIDsLoader GalleryFiles *GalleryFileIDsLoader @@ -59,11 +60,14 @@ type Loaders struct { PerformerByID *PerformerLoader PerformerCustomFields *CustomFieldsLoader - StudioByID *StudioLoader - TagByID *TagLoader - GroupByID *GroupLoader - FileByID *FileLoader - FolderByID *FolderLoader + StudioByID *StudioLoader + StudioCustomFields *CustomFieldsLoader + + TagByID *TagLoader + TagCustomFields *CustomFieldsLoader + GroupByID *GroupLoader + FileByID *FileLoader + FolderByID *FolderLoader } type Middleware struct { @@ -99,6 +103,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchPerformerCustomFields(ctx), }, + StudioCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchStudioCustomFields(ctx), + }, + SceneCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchSceneCustomFields(ctx), + }, StudioByID: &StudioLoader{ wait: wait, maxBatch: maxBatch, @@ -109,6 +123,11 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchTags(ctx), }, + TagCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchTagCustomFields(ctx), + }, GroupByID: &GroupLoader{ wait: wait, maxBatch: maxBatch, @@ -194,6 +213,18 @@ func (m Middleware) fetchScenes(ctx context.Context) func(keys []int) ([]*models } } +func (m Middleware) fetchSceneCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Scene.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models.Image, []error) { return func(keys []int) (ret []*models.Image, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -253,6 +284,18 @@ func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*model } } +func (m Middleware) fetchStudioCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Studio.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.Tag, []error) { return func(keys []int) (ret []*models.Tag, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -264,6 +307,18 @@ func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.T } } +func (m Middleware) fetchTagCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Tag.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchGroups(ctx context.Context) func(keys []int) ([]*models.Group, []error) { return func(keys []int) (ret []*models.Group, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_model_performer.go b/internal/api/resolver_model_performer.go index 94da62932..b770f5801 100644 --- a/internal/api/resolver_model_performer.go +++ b/internal/api/resolver_model_performer.go @@ -10,6 +10,7 @@ import ( "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/performer" + "github.com/stashapp/stash/pkg/utils" ) func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer) ([]string, error) { @@ -109,6 +110,15 @@ func (r *performerResolver) HeightCm(ctx context.Context, obj *models.Performer) return obj.Height, nil } +func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.CareerStart == nil && obj.CareerEnd == nil { + return nil, nil + } + + ret := utils.FormatYearRange(obj.CareerStart, obj.CareerEnd) + return &ret, nil +} + func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer) (*string, error) { if obj.Birthdate != nil { ret := obj.Birthdate.String() diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 2600c9538..81113d858 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -410,3 +410,16 @@ func (r *sceneResolver) OHistory(ctx context.Context, obj *models.Scene) ([]*tim return ptrRet, nil } + +func (r *sceneResolver) CustomFields(ctx context.Context, obj *models.Scene) (map[string]interface{}, error) { + m, err := loaders.From(ctx).SceneCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_model_studio.go b/internal/api/resolver_model_studio.go index fabcf38bd..b54455920 100644 --- a/internal/api/resolver_model_studio.go +++ b/internal/api/resolver_model_studio.go @@ -207,6 +207,19 @@ func (r *studioResolver) Groups(ctx context.Context, obj *models.Studio) (ret [] return ret, nil } +func (r *studioResolver) CustomFields(ctx context.Context, obj *models.Studio) (map[string]interface{}, error) { + m, err := loaders.From(ctx).StudioCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} + // deprecated func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Group, err error) { return r.Groups(ctx, obj) diff --git a/internal/api/resolver_model_tag.go b/internal/api/resolver_model_tag.go index deae41f21..7518036b0 100644 --- a/internal/api/resolver_model_tag.go +++ b/internal/api/resolver_model_tag.go @@ -181,3 +181,16 @@ func (r *tagResolver) ChildCount(ctx context.Context, obj *models.Tag) (ret int, return ret, nil } + +func (r *tagResolver) CustomFields(ctx context.Context, obj *models.Tag) (map[string]interface{}, error) { + m, err := loaders.From(ctx).TagCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_mutation_configure.go b/internal/api/resolver_mutation_configure.go index daed0b5b7..718d24998 100644 --- a/internal/api/resolver_mutation_configure.go +++ b/internal/api/resolver_mutation_configure.go @@ -287,6 +287,11 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen if input.PreviewPreset != nil { c.SetString(config.PreviewPreset, input.PreviewPreset.String()) } + r.setConfigBool(config.UseCustomSpriteInterval, input.UseCustomSpriteInterval) + r.setConfigFloat(config.SpriteInterval, input.SpriteInterval) + r.setConfigInt(config.MinimumSprites, input.MinimumSprites) + r.setConfigInt(config.MaximumSprites, input.MaximumSprites) + r.setConfigInt(config.SpriteScreenshotSize, input.SpriteScreenshotSize) r.setConfigBool(config.TranscodeHardwareAcceleration, input.TranscodeHardwareAcceleration) if input.MaxTranscodeSize != nil { @@ -515,6 +520,8 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI r.setConfigBool(config.CustomLocalesEnabled, input.CustomLocalesEnabled) + r.setConfigBool(config.DisableCustomizations, input.DisableCustomizations) + if input.DisableDropdownCreate != nil { ddc := input.DisableDropdownCreate r.setConfigBool(config.DisableDropdownCreatePerformer, ddc.Performer) diff --git a/internal/api/resolver_mutation_file.go b/internal/api/resolver_mutation_file.go index c5e5e3530..afbefe554 100644 --- a/internal/api/resolver_mutation_file.go +++ b/internal/api/resolver_mutation_file.go @@ -210,6 +210,58 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b return true, nil } +func (r *mutationResolver) DestroyFiles(ctx context.Context, ids []string) (ret bool, err error) { + fileIDs, err := stringslice.StringSliceToIntSlice(ids) + if err != nil { + return false, fmt.Errorf("converting ids: %w", err) + } + + destroyer := &file.ZipDestroyer{ + FileDestroyer: r.repository.File, + FolderDestroyer: r.repository.Folder, + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.File + + for _, fileIDInt := range fileIDs { + fileID := models.FileID(fileIDInt) + f, err := qb.Find(ctx, fileID) + if err != nil { + return err + } + + if len(f) == 0 { + return fmt.Errorf("file with id %d not found", fileID) + } + + path := f[0].Base().Path + + // ensure not a primary file + isPrimary, err := qb.IsPrimary(ctx, fileID) + if err != nil { + return fmt.Errorf("checking if file %s is primary: %w", path, err) + } + + if isPrimary { + return fmt.Errorf("cannot destroy primary file entry %s", path) + } + + // destroy DB entries only (no filesystem deletion) + const deleteFile = false + if err := destroyer.DestroyZip(ctx, f[0], nil, deleteFile); err != nil { + return fmt.Errorf("destroying file entry %s: %w", path, err) + } + } + + return nil + }); err != nil { + return false, err + } + + return true, nil +} + func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSetFingerprintsInput) (bool, error) { fileIDInt, err := strconv.Atoi(input.ID) if err != nil { diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go index 8f4863c6d..e7f853922 100644 --- a/internal/api/resolver_mutation_gallery.go +++ b/internal/api/resolver_mutation_gallery.go @@ -346,6 +346,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Gallery @@ -366,7 +367,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall galleries = append(galleries, gallery) - imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile) + imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) if err != nil { return err } diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 82d9be4cd..230d48358 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -325,7 +325,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD return fmt.Errorf("image with id %d not found", imageID) } - return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)) + return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)) }); err != nil { fileDeleter.Rollback() return false, err @@ -372,7 +372,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image images = append(images, i) - if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil { + if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)); err != nil { return err } } diff --git a/internal/api/resolver_mutation_metadata.go b/internal/api/resolver_mutation_metadata.go index 8120e2d31..ea6496800 100644 --- a/internal/api/resolver_mutation_metadata.go +++ b/internal/api/resolver_mutation_metadata.go @@ -122,9 +122,10 @@ func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error func (r *mutationResolver) BackupDatabase(ctx context.Context, input BackupDatabaseInput) (*string, error) { // if download is true, then backup to temporary file and return a link download := input.Download != nil && *input.Download + includeBlobs := input.IncludeBlobs != nil && *input.IncludeBlobs mgr := manager.GetInstance() - backupPath, backupName, err := mgr.BackupDatabase(download) + backupPath, backupName, err := mgr.BackupDatabase(download, includeBlobs) if err != nil { logger.Errorf("Error backing up database: %v", err) return nil, err diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go index ab9abf6cf..653348304 100644 --- a/internal/api/resolver_mutation_performer.go +++ b/internal/api/resolver_mutation_performer.go @@ -43,7 +43,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.Name = strings.TrimSpace(input.Name) newPerformer.Disambiguation = translator.string(input.Disambiguation) - newPerformer.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.AliasList)) + newPerformer.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.AliasList), newPerformer.Name)) newPerformer.Gender = input.Gender newPerformer.Ethnicity = translator.string(input.Ethnicity) newPerformer.Country = translator.string(input.Country) @@ -52,7 +52,17 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.FakeTits = translator.string(input.FakeTits) newPerformer.PenisLength = input.PenisLength newPerformer.Circumcised = input.Circumcised - newPerformer.CareerLength = translator.string(input.CareerLength) + newPerformer.CareerStart = input.CareerStart + newPerformer.CareerEnd = input.CareerEnd + // if career_start/career_end not provided, parse deprecated career_length + if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil { + start, end, err := utils.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + newPerformer.CareerStart = start + newPerformer.CareerEnd = end + } newPerformer.Tattoos = translator.string(input.Tattoos) newPerformer.Piercings = translator.string(input.Piercings) newPerformer.Favorite = translator.bool(input.Favorite) @@ -261,7 +271,22 @@ func performerPartialFromInput(input models.PerformerUpdateInput, translator cha updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised") - updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") + // prefer career_start/career_end over deprecated career_length + if translator.hasField("career_start") || translator.hasField("career_end") { + updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start") + updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end") + } else if translator.hasField("career_length") && input.CareerLength != nil { + start, end, err := utils.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + if start != nil { + updatedPerformer.CareerStart = models.NewOptionalInt(*start) + } + if end != nil { + updatedPerformer.CareerEnd = models.NewOptionalInt(*end) + } + } updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite") @@ -348,6 +373,27 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per } } + if updatedPerformer.Aliases != nil { + p, err := qb.Find(ctx, performerID) + if err != nil { + return err + } + if p != nil { + if err := p.LoadAliases(ctx, qb); err != nil { + return err + } + + effectiveAliases := updatedPerformer.Aliases.Apply(p.Aliases.List()) + name := p.Name + if updatedPerformer.Name.Set { + name = updatedPerformer.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name) + updatedPerformer.Aliases.Values = sanitized + updatedPerformer.Aliases.Mode = models.RelationshipUpdateModeSet + } + } if err := performer.ValidateUpdate(ctx, performerID, *updatedPerformer, qb); err != nil { return err } @@ -396,7 +442,22 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised") - updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") + // prefer career_start/career_end over deprecated career_length + if translator.hasField("career_start") || translator.hasField("career_end") { + updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start") + updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end") + } else if translator.hasField("career_length") && input.CareerLength != nil { + start, end, err := utils.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + if start != nil { + updatedPerformer.CareerStart = models.NewOptionalInt(*start) + } + if end != nil { + updatedPerformer.CareerEnd = models.NewOptionalInt(*end) + } + } updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index cb2aa7d24..70158fc6f 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -103,8 +103,15 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr } } + customFields := convertMapJSONNumbers(input.CustomFields) + if err := r.withTxn(ctx, func(ctx context.Context) error { - ret, err = r.Resolver.sceneService.Create(ctx, &newScene, fileIDs, coverImageData) + ret, err = r.Resolver.sceneService.Create(ctx, models.CreateSceneInput{ + Scene: &newScene, + FileIDs: fileIDs, + CoverImage: coverImageData, + CustomFields: customFields, + }) return err }); err != nil { return nil, err @@ -306,6 +313,15 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } } + var customFields *models.CustomFieldsInput + if input.CustomFields != nil { + cfCopy := *input.CustomFields + customFields = &cfCopy + // convert json.Numbers to int/float + customFields.Full = convertMapJSONNumbers(customFields.Full) + customFields.Partial = convertMapJSONNumbers(customFields.Partial) + } + scene, err := qb.UpdatePartial(ctx, sceneID, *updatedScene) if err != nil { return nil, err @@ -317,6 +333,12 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } } + if customFields != nil { + if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil { + return nil, err + } + } + return scene, nil } @@ -387,6 +409,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU } } + var customFields *models.CustomFieldsInput + if input.CustomFields != nil { + cf := handleUpdateCustomFields(*input.CustomFields) + customFields = &cf + } + ret := []*models.Scene{} // Start the transaction and save the scenes @@ -399,6 +427,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU return err } + if customFields != nil { + if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil { + return err + } + } + ret = append(ret, scene) } @@ -441,6 +475,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene @@ -457,7 +492,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD // kill any running encoders manager.KillRunningStreams(s, fileNamingAlgo) - return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile) + return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) }); err != nil { fileDeleter.Rollback() return false, err @@ -495,6 +530,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene @@ -513,7 +549,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene // kill any running encoders manager.KillRunningStreams(scene, fileNamingAlgo) - if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile); err != nil { + if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } } @@ -573,6 +609,7 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput var values *models.ScenePartial var coverImageData []byte + var customFields *models.CustomFieldsInput if input.Values != nil { translator := changesetTranslator{ @@ -591,6 +628,11 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput return nil, fmt.Errorf("processing cover image: %w", err) } } + + if input.Values.CustomFields != nil { + cf := handleUpdateCustomFields(*input.Values.CustomFields) + customFields = &cf + } } else { v := models.NewScenePartial() values = &v @@ -622,7 +664,20 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput return fmt.Errorf("scene with id %d not found", destID) } - return r.sceneUpdateCoverImage(ctx, ret, coverImageData) + // only update cover image if one was provided + if len(coverImageData) > 0 { + if err := r.sceneUpdateCoverImage(ctx, ret, coverImageData); err != nil { + return err + } + } + + if customFields != nil { + if err := r.Resolver.repository.Scene.SetCustomFields(ctx, ret.ID, *customFields); err != nil { + return err + } + } + + return nil }); err != nil { return nil, err } diff --git a/internal/api/resolver_mutation_studio.go b/internal/api/resolver_mutation_studio.go index da3aa1983..c7af918a1 100644 --- a/internal/api/resolver_mutation_studio.go +++ b/internal/api/resolver_mutation_studio.go @@ -31,14 +31,15 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio } // Populate a new studio from the input - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = strings.TrimSpace(input.Name) newStudio.Rating = input.Rating100 newStudio.Favorite = translator.bool(input.Favorite) newStudio.Details = translator.string(input.Details) newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) - newStudio.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases)) + newStudio.Organized = translator.bool(input.Organized) + newStudio.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newStudio.Name)) newStudio.StashIDs = models.NewRelatedStashIDs(models.StashIDInputs(input.StashIds).ToStashIDs()) var err error @@ -61,6 +62,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio if err != nil { return nil, fmt.Errorf("converting tag ids: %w", err) } + newStudio.CustomFields = convertMapJSONNumbers(input.CustomFields) // Process the base 64 encoded image string var imageData []byte @@ -119,6 +121,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100") updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite") updatedStudio.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + updatedStudio.Organized = translator.optionalBool(input.Organized, "organized") updatedStudio.Aliases = translator.updateStrings(input.Aliases, "aliases") updatedStudio.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") @@ -152,6 +155,11 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio } } + updatedStudio.CustomFields = input.CustomFields + // convert json.Numbers to int/float + updatedStudio.CustomFields.Full = convertMapJSONNumbers(updatedStudio.CustomFields.Full) + updatedStudio.CustomFields.Partial = convertMapJSONNumbers(updatedStudio.CustomFields.Partial) + // Process the base 64 encoded image string var imageData []byte imageIncluded := translator.hasField("image") @@ -167,6 +175,28 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Studio + if updatedStudio.Aliases != nil { + s, err := qb.Find(ctx, studioID) + if err != nil { + return err + } + if s != nil { + if err := s.LoadAliases(ctx, qb); err != nil { + return err + } + + effectiveAliases := updatedStudio.Aliases.Apply(s.Aliases.List()) + name := s.Name + if updatedStudio.Name.Set { + name = updatedStudio.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name) + updatedStudio.Aliases.Values = sanitized + updatedStudio.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + if err := studio.ValidateModify(ctx, updatedStudio, qb); err != nil { return err } @@ -233,6 +263,7 @@ func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudi partial.Rating = translator.optionalInt(input.Rating100, "rating100") partial.Details = translator.optionalString(input.Details, "details") partial.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + partial.Organized = translator.optionalBool(input.Organized, "organized") partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") if err != nil { diff --git a/internal/api/resolver_mutation_tag.go b/internal/api/resolver_mutation_tag.go index f8d4943be..ac0183b74 100644 --- a/internal/api/resolver_mutation_tag.go +++ b/internal/api/resolver_mutation_tag.go @@ -6,7 +6,6 @@ import ( "strconv" "strings" - "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin/hook" "github.com/stashapp/stash/pkg/sliceutil/stringslice" @@ -31,11 +30,14 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) } // Populate a new tag from the input - newTag := models.NewTag() + newTag := models.CreateTagInput{ + Tag: &models.Tag{}, + } + *newTag.Tag = models.NewTag() newTag.Name = strings.TrimSpace(input.Name) newTag.SortName = translator.string(input.SortName) - newTag.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases)) + newTag.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newTag.Name)) newTag.Favorite = translator.bool(input.Favorite) newTag.Description = translator.string(input.Description) newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) @@ -60,6 +62,8 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) return nil, fmt.Errorf("converting child tag ids: %w", err) } + newTag.CustomFields = convertMapJSONNumbers(input.CustomFields) + // Process the base 64 encoded image string var imageData []byte if input.Image != nil { @@ -73,7 +77,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - if err := tag.ValidateCreate(ctx, newTag, qb); err != nil { + if err := tag.ValidateCreate(ctx, *newTag.Tag, qb); err != nil { return err } @@ -98,17 +102,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) return r.getTag(ctx, newTag.ID) } -func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) { - tagID, err := strconv.Atoi(input.ID) - if err != nil { - return nil, fmt.Errorf("converting id: %w", err) - } - - translator := changesetTranslator{ - inputMap: getUpdateInputMap(ctx), - } - - // Populate tag from the input +func tagPartialFromInput(input TagUpdateInput, translator changesetTranslator) (*models.TagPartial, error) { updatedTag := models.NewTagPartial() updatedTag.Name = translator.optionalString(input.Name, "name") @@ -127,6 +121,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) } updatedTag.StashIDs = translator.updateStashIDs(updateStashIDInputs, "stash_ids") + var err error updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids") if err != nil { return nil, fmt.Errorf("converting parent tag ids: %w", err) @@ -137,6 +132,32 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) return nil, fmt.Errorf("converting child tag ids: %w", err) } + if input.CustomFields != nil { + updatedTag.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedTag.CustomFields.Full = convertMapJSONNumbers(updatedTag.CustomFields.Full) + updatedTag.CustomFields.Partial = convertMapJSONNumbers(updatedTag.CustomFields.Partial) + } + + return &updatedTag, nil +} + +func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) { + tagID, err := strconv.Atoi(input.ID) + if err != nil { + return nil, fmt.Errorf("converting id: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate tag from the input + updatedTag, err := tagPartialFromInput(input, translator) + if err != nil { + return nil, err + } + var imageData []byte imageIncluded := translator.hasField("image") if input.Image != nil { @@ -151,11 +172,33 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - if err := tag.ValidateUpdate(ctx, tagID, updatedTag, qb); err != nil { + if updatedTag.Aliases != nil { + t, err := qb.Find(ctx, tagID) + if err != nil { + return err + } + if t != nil { + if err := t.LoadAliases(ctx, qb); err != nil { + return err + } + + newAliases := updatedTag.Aliases.Apply(t.Aliases.List()) + name := t.Name + if updatedTag.Name.Set { + name = updatedTag.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(newAliases, name) + updatedTag.Aliases.Values = sanitized + updatedTag.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + + if err := tag.ValidateUpdate(ctx, tagID, *updatedTag, qb); err != nil { return err } - t, err = qb.UpdatePartial(ctx, tagID, updatedTag) + t, err = qb.UpdatePartial(ctx, tagID, *updatedTag) if err != nil { return err } @@ -303,6 +346,31 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) return nil, nil } + var values *models.TagPartial + var imageData []byte + + if input.Values != nil { + translator := changesetTranslator{ + inputMap: getNamedUpdateInputMap(ctx, "input.values"), + } + + values, err = tagPartialFromInput(*input.Values, translator) + if err != nil { + return nil, err + } + + if input.Values.Image != nil { + var err error + imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image) + if err != nil { + return nil, fmt.Errorf("processing cover image: %w", err) + } + } + } else { + v := models.NewTagPartial() + values = &v + } + var t *models.Tag if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag @@ -317,28 +385,22 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) return fmt.Errorf("tag with id %d not found", destination) } - parents, children, err := tag.MergeHierarchy(ctx, destination, source, qb) - if err != nil { - return err - } - if err = qb.Merge(ctx, source, destination); err != nil { return err } - err = qb.UpdateParentTags(ctx, destination, parents) - if err != nil { - return err - } - err = qb.UpdateChildTags(ctx, destination, children) - if err != nil { + if err := tag.ValidateUpdate(ctx, destination, *values, qb); err != nil { return err } - err = tag.ValidateHierarchyExisting(ctx, t, parents, children, qb) - if err != nil { - logger.Errorf("Error merging tag: %s", err) - return err + if _, err := qb.UpdatePartial(ctx, destination, *values); err != nil { + return fmt.Errorf("updating tag: %w", err) + } + + if len(imageData) > 0 { + if err := qb.UpdateImage(ctx, destination, imageData); err != nil { + return err + } } return nil diff --git a/internal/api/resolver_query_configuration.go b/internal/api/resolver_query_configuration.go index 8a20fcad1..cf2c0e3cc 100644 --- a/internal/api/resolver_query_configuration.go +++ b/internal/api/resolver_query_configuration.go @@ -96,6 +96,11 @@ func makeConfigGeneralResult() *ConfigGeneralResult { CalculateMd5: config.IsCalculateMD5(), VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), ParallelTasks: config.GetParallelTasks(), + UseCustomSpriteInterval: config.GetUseCustomSpriteInterval(), + SpriteInterval: config.GetSpriteInterval(), + SpriteScreenshotSize: config.GetSpriteScreenshotSize(), + MinimumSprites: config.GetMinimumSprites(), + MaximumSprites: config.GetMaximumSprites(), PreviewAudio: config.GetPreviewAudio(), PreviewSegments: config.GetPreviewSegments(), PreviewSegmentDuration: config.GetPreviewSegmentDuration(), @@ -156,6 +161,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult { javascriptEnabled := config.GetJavascriptEnabled() customLocales := config.GetCustomLocales() customLocalesEnabled := config.GetCustomLocalesEnabled() + disableCustomizations := config.GetDisableCustomizations() language := config.GetLanguage() handyKey := config.GetHandyKey() scriptOffset := config.GetFunscriptOffset() @@ -183,6 +189,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult { JavascriptEnabled: &javascriptEnabled, CustomLocales: &customLocales, CustomLocalesEnabled: &customLocalesEnabled, + DisableCustomizations: &disableCustomizations, Language: &language, ImageLightbox: &imageLightboxOptions, diff --git a/internal/api/server.go b/internal/api/server.go index ed11a99a5..a7516da52 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -450,7 +450,7 @@ func cssHandler(c *config.Config) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { var paths []string - if c.GetCSSEnabled() { + if c.GetCSSEnabled() && !c.GetDisableCustomizations() { // search for custom.css in current directory, then $HOME/.stash fn := c.GetCSSPath() exists, _ := fsutil.FileExists(fn) @@ -468,7 +468,7 @@ func javascriptHandler(c *config.Config) func(w http.ResponseWriter, r *http.Req return func(w http.ResponseWriter, r *http.Request) { var paths []string - if c.GetJavascriptEnabled() { + if c.GetJavascriptEnabled() && !c.GetDisableCustomizations() { // search for custom.js in current directory, then $HOME/.stash fn := c.GetJavascriptPath() exists, _ := fsutil.FileExists(fn) @@ -486,7 +486,7 @@ func customLocalesHandler(c *config.Config) func(w http.ResponseWriter, r *http. return func(w http.ResponseWriter, r *http.Request) { buffer := bytes.Buffer{} - if c.GetCustomLocalesEnabled() { + if c.GetCustomLocalesEnabled() && !c.GetDisableCustomizations() { // search for custom-locales.json in current directory, then $HOME/.stash path := c.GetCustomLocalesPath() exists, _ := fsutil.FileExists(path) diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go index fc83df848..27cce014e 100644 --- a/internal/autotag/integration_test.go +++ b/internal/autotag/integration_test.go @@ -101,16 +101,15 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error { func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) { // create the studio - studio := models.Studio{ - Name: name, - } + studio := models.NewCreateStudioInput() + studio.Name = name err := qb.Create(ctx, &studio) if err != nil { return nil, err } - return &studio, nil + return studio.Studio, nil } func createTag(ctx context.Context, qb models.TagWriter) error { @@ -119,7 +118,7 @@ func createTag(ctx context.Context, qb models.TagWriter) error { Name: testName, } - err := qb.Create(ctx, &tag) + err := qb.Create(ctx, &models.CreateTagInput{Tag: &tag}) if err != nil { return err } diff --git a/internal/identify/identify.go b/internal/identify/identify.go index 3d4c94467..6dc67dac3 100644 --- a/internal/identify/identify.go +++ b/internal/identify/identify.go @@ -147,6 +147,9 @@ func (t *SceneIdentifier) getOptions(source ScraperSource) MetadataOptions { if source.Options.IncludeMalePerformers != nil { options.IncludeMalePerformers = source.Options.IncludeMalePerformers } + if source.Options.PerformerGenders != nil { + options.PerformerGenders = source.Options.PerformerGenders + } if source.Options.SkipMultipleMatches != nil { options.SkipMultipleMatches = source.Options.SkipMultipleMatches } @@ -204,13 +207,23 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene, ret.Partial.StudioID = models.NewOptionalInt(*studioID) } - includeMalePerformers := true - if options.IncludeMalePerformers != nil { - includeMalePerformers = *options.IncludeMalePerformers + // Determine allowed genders for performer filtering + var allowedGenders []models.GenderEnum + if options.PerformerGenders != nil { + // New field takes precedence + allowedGenders = options.PerformerGenders + } else if options.IncludeMalePerformers != nil && !*options.IncludeMalePerformers { + // Legacy: if includeMalePerformers is false, include all genders except male + for _, g := range models.AllGenderEnum { + if g != models.GenderEnumMale { + allowedGenders = append(allowedGenders, g) + } + } } + // nil allowedGenders means include all performers addSkipSingleNamePerformerTag := false - performerIDs, err := rel.performers(ctx, !includeMalePerformers) + performerIDs, err := rel.performers(ctx, allowedGenders) if err != nil { if errors.Is(err, ErrSkipSingleNamePerformer) { addSkipSingleNamePerformerTag = true diff --git a/internal/identify/identify_test.go b/internal/identify/identify_test.go index eb646c305..35ad2006d 100644 --- a/internal/identify/identify_test.go +++ b/internal/identify/identify_test.go @@ -60,9 +60,15 @@ func TestSceneIdentifier_Identify(t *testing.T) { ) defaultOptions := &MetadataOptions{ - SetOrganized: &boolFalse, - SetCoverImage: &boolFalse, - IncludeMalePerformers: &boolFalse, + SetOrganized: &boolFalse, + SetCoverImage: &boolFalse, + PerformerGenders: []models.GenderEnum{ + models.GenderEnumFemale, + models.GenderEnumTransgenderFemale, + models.GenderEnumTransgenderMale, + models.GenderEnumIntersex, + models.GenderEnumNonBinary, + }, SkipSingleNamePerformers: &boolFalse, } sources := []ScraperSource{ @@ -216,9 +222,15 @@ func TestSceneIdentifier_modifyScene(t *testing.T) { boolFalse := false defaultOptions := &MetadataOptions{ - SetOrganized: &boolFalse, - SetCoverImage: &boolFalse, - IncludeMalePerformers: &boolFalse, + SetOrganized: &boolFalse, + SetCoverImage: &boolFalse, + PerformerGenders: []models.GenderEnum{ + models.GenderEnumFemale, + models.GenderEnumTransgenderFemale, + models.GenderEnumTransgenderMale, + models.GenderEnumIntersex, + models.GenderEnumNonBinary, + }, SkipSingleNamePerformers: &boolFalse, } tr := &SceneIdentifier{ diff --git a/internal/identify/options.go b/internal/identify/options.go index b4954a1f1..9e27a3e39 100644 --- a/internal/identify/options.go +++ b/internal/identify/options.go @@ -5,6 +5,7 @@ import ( "io" "strconv" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper" ) @@ -32,7 +33,10 @@ type MetadataOptions struct { SetCoverImage *bool `json:"setCoverImage"` SetOrganized *bool `json:"setOrganized"` // defaults to true if not provided + // Deprecated: use PerformerGenders instead IncludeMalePerformers *bool `json:"includeMalePerformers"` + // Filter to only include performers with these genders. If not provided, all genders are included. + PerformerGenders []models.GenderEnum `json:"performerGenders"` // defaults to true if not provided SkipMultipleMatches *bool `json:"skipMultipleMatches"` // ID of tag to tag skipped multiple matches with diff --git a/internal/identify/scene.go b/internal/identify/scene.go index 789674693..00d387c41 100644 --- a/internal/identify/scene.go +++ b/internal/identify/scene.go @@ -5,6 +5,7 @@ import ( "context" "errors" "fmt" + "slices" "strconv" "strings" "time" @@ -69,7 +70,7 @@ func (g sceneRelationships) studio(ctx context.Context) (*int, error) { return nil, nil } -func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]int, error) { +func (g sceneRelationships) performers(ctx context.Context, allowedGenders []models.GenderEnum) ([]int, error) { fieldStrategy := g.fieldOptions["performers"] scraped := g.result.result.Performers @@ -97,8 +98,11 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([] singleNamePerformerSkipped := false for _, p := range scraped { - if ignoreMale && p.Gender != nil && strings.EqualFold(*p.Gender, models.GenderEnumMale.String()) { - continue + if allowedGenders != nil && p.Gender != nil { + gender := models.GenderEnum(strings.ToUpper(*p.Gender)) + if !slices.Contains(allowedGenders, gender) { + continue + } } performerID, err := getPerformerID(ctx, endpoint, g.performerCreator, p, createMissing, g.skipSingleNamePerformers) @@ -167,7 +171,9 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) { } else if createMissing { newTag := t.ToTag(endpoint, nil) - err := g.tagCreator.Create(ctx, newTag) + err := g.tagCreator.Create(ctx, &models.CreateTagInput{ + Tag: newTag, + }) if err != nil { return nil, fmt.Errorf("error creating tag: %w", err) } diff --git a/internal/identify/scene_test.go b/internal/identify/scene_test.go index a76aef516..9a3fcf025 100644 --- a/internal/identify/scene_test.go +++ b/internal/identify/scene_test.go @@ -27,7 +27,7 @@ func Test_sceneRelationships_studio(t *testing.T) { db := mocks.NewDatabase() db.Studio.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + s := args.Get(1).(*models.CreateStudioInput) s.ID = validStoredIDInt }).Return(nil) @@ -183,13 +183,13 @@ func Test_sceneRelationships_performers(t *testing.T) { } tests := []struct { - name string - scene *models.Scene - fieldOptions *FieldOptions - scraped []*models.ScrapedPerformer - ignoreMale bool - want []int - wantErr bool + name string + scene *models.Scene + fieldOptions *FieldOptions + scraped []*models.ScrapedPerformer + allowedGenders []models.GenderEnum + want []int + wantErr bool }{ { "ignore", @@ -202,7 +202,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, nil, false, }, @@ -211,7 +211,7 @@ func Test_sceneRelationships_performers(t *testing.T) { emptyScene, defaultOptions, []*models.ScrapedPerformer{}, - false, + nil, nil, false, }, @@ -225,7 +225,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &existingPerformerStr, }, }, - false, + nil, nil, false, }, @@ -239,7 +239,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, []int{existingPerformerID, validStoredIDInt}, false, }, @@ -254,7 +254,7 @@ func Test_sceneRelationships_performers(t *testing.T) { Gender: &male, }, }, - true, + []models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary}, nil, false, }, @@ -270,7 +270,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, []int{validStoredIDInt}, false, }, @@ -287,7 +287,7 @@ func Test_sceneRelationships_performers(t *testing.T) { Gender: &female, }, }, - true, + []models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary}, []int{validStoredIDInt}, false, }, @@ -304,7 +304,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &invalidStoredID, }, }, - false, + nil, nil, true, }, @@ -319,7 +319,7 @@ func Test_sceneRelationships_performers(t *testing.T) { }, } - got, err := tr.performers(testCtx, tt.ignoreMale) + got, err := tr.performers(testCtx, tt.allowedGenders) if (err != nil) != tt.wantErr { t.Errorf("sceneRelationships.performers() error = %v, wantErr %v", err, tt.wantErr) return @@ -368,14 +368,14 @@ func Test_sceneRelationships_tags(t *testing.T) { db := mocks.NewDatabase() - db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool { - return p.Name == validName + db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool { + return p.Tag.Name == validName })).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = validStoredIDInt + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = validStoredIDInt }).Return(nil) - db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool { - return p.Name == invalidName + db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool { + return p.Tag.Name == invalidName })).Return(errors.New("error creating tag")) tr := sceneRelationships{ diff --git a/internal/identify/studio_test.go b/internal/identify/studio_test.go index 5424a6a93..083675650 100644 --- a/internal/identify/studio_test.go +++ b/internal/identify/studio_test.go @@ -21,13 +21,13 @@ func Test_createMissingStudio(t *testing.T) { db := mocks.NewDatabase() - db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool { + db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool { return p.Name == validName })).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + s := args.Get(1).(*models.CreateStudioInput) s.ID = createdID }).Return(nil) - db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool { + db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool { return p.Name == invalidName })).Return(errors.New("error creating studio")) diff --git a/internal/manager/backup.go b/internal/manager/backup.go new file mode 100644 index 000000000..4a41b263b --- /dev/null +++ b/internal/manager/backup.go @@ -0,0 +1,185 @@ +package manager + +import ( + "archive/zip" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/logger" +) + +type databaseBackupZip struct { + *zip.Writer +} + +func (z *databaseBackupZip) zipFileRename(fn, outDir, outFn string) error { + p := filepath.Join(outDir, outFn) + p = filepath.ToSlash(p) + + f, err := z.Create(p) + if err != nil { + return fmt.Errorf("error creating zip entry for %s: %v", fn, err) + } + + i, err := os.Open(fn) + if err != nil { + return fmt.Errorf("error opening %s: %v", fn, err) + } + + defer i.Close() + + if _, err := io.Copy(f, i); err != nil { + return fmt.Errorf("error writing %s to zip: %v", fn, err) + } + + return nil +} + +func (z *databaseBackupZip) zipFile(fn, outDir string) error { + return z.zipFileRename(fn, outDir, filepath.Base(fn)) +} + +func (s *Manager) BackupDatabase(download bool, includeBlobs bool) (string, string, error) { + var backupPath string + var backupName string + + // if we include blobs, then the output is a zip file + // if not, using the same backup logic as before, which creates a sqlite file + if !includeBlobs || s.Config.GetBlobsStorage() != config.BlobStorageTypeFilesystem { + return s.backupDatabaseOnly(download) + } + + // use tmp directory for the backup + backupDir := s.Paths.Generated.Tmp + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + f, err := os.CreateTemp(backupDir, "backup*.sqlite") + if err != nil { + return "", "", err + } + + backupPath = f.Name() + backupName = s.Database.DatabaseBackupPath("") + f.Close() + + // delete the temp file so that the backup operation can create it + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + + if err := s.Database.Backup(backupPath); err != nil { + return "", "", err + } + + // create a zip file + zipFileDir := s.Paths.Generated.Downloads + if !download { + zipFileDir = s.Config.GetBackupDirectoryPathOrDefault() + if zipFileDir != "" { + if err := fsutil.EnsureDir(zipFileDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", zipFileDir, err) + } + } + } + + zipFileName := backupName + ".zip" + zipFilePath := filepath.Join(zipFileDir, zipFileName) + + logger.Debugf("Preparing zip file for database backup at %v", zipFilePath) + + zf, err := os.Create(zipFilePath) + if err != nil { + return "", "", fmt.Errorf("could not create zip file %v: %w", zipFilePath, err) + } + defer zf.Close() + + z := databaseBackupZip{ + Writer: zip.NewWriter(zf), + } + + defer z.Close() + + // move the database file into the zip + dbFn := filepath.Base(s.Config.GetDatabasePath()) + if err := z.zipFileRename(backupPath, "", dbFn); err != nil { + return "", "", fmt.Errorf("could not add database backup to zip file: %w", err) + } + + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + + // walk the blobs directory and add files to the zip + blobsDir := s.Config.GetBlobsPath() + err = filepath.WalkDir(blobsDir, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + if d.IsDir() { + return nil + } + + // calculate out dir by removing the blobsDir prefix from the path + outDir := filepath.Join("blobs", strings.TrimPrefix(filepath.Dir(path), blobsDir)) + if err := z.zipFile(path, outDir); err != nil { + return fmt.Errorf("could not add blob %v to zip file: %w", path, err) + } + + return nil + }) + + if err != nil { + return "", "", fmt.Errorf("error walking blobs directory: %w", err) + } + + return zipFilePath, zipFileName, nil +} + +func (s *Manager) backupDatabaseOnly(download bool) (string, string, error) { + var backupPath string + var backupName string + + if download { + backupDir := s.Paths.Generated.Downloads + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + f, err := os.CreateTemp(backupDir, "backup*.sqlite") + if err != nil { + return "", "", err + } + + backupPath = f.Name() + backupName = s.Database.DatabaseBackupPath("") + f.Close() + + // delete the temp file so that the backup operation can create it + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + } else { + backupDir := s.Config.GetBackupDirectoryPathOrDefault() + if backupDir != "" { + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + } + backupPath = s.Database.DatabaseBackupPath(backupDir) + backupName = filepath.Base(backupPath) + } + + err := s.Database.Backup(backupPath) + if err != nil { + return "", "", err + } + + return backupPath, backupName, nil +} diff --git a/internal/manager/config/config.go b/internal/manager/config/config.go index 35534f119..19e263810 100644 --- a/internal/manager/config/config.go +++ b/internal/manager/config/config.go @@ -83,6 +83,21 @@ const ( ParallelTasks = "parallel_tasks" parallelTasksDefault = 1 + UseCustomSpriteInterval = "use_custom_sprite_interval" + UseCustomSpriteIntervalDefault = false + + SpriteInterval = "sprite_interval" + SpriteIntervalDefault = 30 + + MinimumSprites = "minimum_sprites" + MinimumSpritesDefault = 10 + + MaximumSprites = "maximum_sprites" + MaximumSpritesDefault = 500 + + SpriteScreenshotSize = "sprite_screenshot_width" + spriteScreenshotSizeDefault = 160 + PreviewPreset = "preview_preset" TranscodeHardwareAcceleration = "ffmpeg.hardware_acceleration" @@ -194,6 +209,7 @@ const ( CSSEnabled = "cssenabled" JavascriptEnabled = "javascriptenabled" CustomLocalesEnabled = "customlocalesenabled" + DisableCustomizations = "disable_customizations" ShowScrubber = "show_scrubber" showScrubberDefault = true @@ -974,6 +990,50 @@ func (i *Config) GetParallelTasksWithAutoDetection() int { return parallelTasks } +// GetUseCustomSpriteInterval returns true if the sprite minimum, maximum, and interval settings +// should be used instead of the default +func (i *Config) GetUseCustomSpriteInterval() bool { + value := i.getBool(UseCustomSpriteInterval) + return value +} + +// GetSpriteInterval returns the time (in seconds) to be between each scrubber sprite +// A value of 0 indicates that the sprite interval should be automatically determined +// based on the minimum sprite setting. +func (i *Config) GetSpriteInterval() float64 { + value := i.getFloat64(SpriteInterval) + return value +} + +// GetMinimumSprites returns the minimum number of sprites that have to be generated +// A value of 0 will be overridden with the default of 10. +func (i *Config) GetMinimumSprites() int { + value := i.getInt(MinimumSprites) + if value <= 0 { + return MinimumSpritesDefault + } + return value +} + +// GetMaximumSprites returns the maximum number of sprites that can be generated +// A value of 0 indicates no maximum. +func (i *Config) GetMaximumSprites() int { + value := i.getInt(MaximumSprites) + return value +} + +// GetSpriteScreenshotSize returns the required size of the screenshots to be taken +// during sprite generation in pixels. This will be the width for landscape scenes +// and the height for portrait scenes, with the other dimension being scaled to maintain +// the aspect ratio. If the value is less than or equal to 0, the default will be used. +func (i *Config) GetSpriteScreenshotSize() int { + value := i.getInt(SpriteScreenshotSize) + if value <= 0 { + return spriteScreenshotSizeDefault + } + return value +} + func (i *Config) GetPreviewAudio() bool { return i.getBool(PreviewAudio) } @@ -1479,6 +1539,13 @@ func (i *Config) GetCustomLocalesEnabled() bool { return i.getBool(CustomLocalesEnabled) } +// GetDisableCustomizations returns true if all customizations (plugins, custom CSS, +// custom JavaScript, and custom locales) should be disabled. This is useful for +// troubleshooting issues without permanently disabling individual customizations. +func (i *Config) GetDisableCustomizations() bool { + return i.getBool(DisableCustomizations) +} + func (i *Config) GetHandyKey() string { return i.getString(HandyKey) } @@ -1853,6 +1920,12 @@ func (i *Config) setDefaultValues() { i.setDefault(PreviewAudio, previewAudioDefault) i.setDefault(SoundOnPreview, false) + i.setDefault(UseCustomSpriteInterval, UseCustomSpriteIntervalDefault) + i.setDefault(SpriteInterval, SpriteIntervalDefault) + i.setDefault(MinimumSprites, MinimumSpritesDefault) + i.setDefault(MaximumSprites, MaximumSpritesDefault) + i.setDefault(SpriteScreenshotSize, spriteScreenshotSizeDefault) + i.setDefault(ThemeColor, DefaultThemeColor) i.setDefault(WriteImageThumbnails, writeImageThumbnailsDefault) diff --git a/internal/manager/config/tasks.go b/internal/manager/config/tasks.go index 0cfabef30..af7d5f674 100644 --- a/internal/manager/config/tasks.go +++ b/internal/manager/config/tasks.go @@ -11,8 +11,10 @@ type ScanMetadataOptions struct { ScanGenerateImagePreviews bool `json:"scanGenerateImagePreviews"` // Generate sprites during scan ScanGenerateSprites bool `json:"scanGenerateSprites"` - // Generate phashes during scan + // Generate video phashes during scan ScanGeneratePhashes bool `json:"scanGeneratePhashes"` + // Generate image phashes during scan + ScanGenerateImagePhashes bool `json:"scanGenerateImagePhashes"` // Generate image thumbnails during scan ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"` // Generate image thumbnails during scan diff --git a/internal/manager/generator_sprite.go b/internal/manager/generator_sprite.go index c28d28674..dc56fde88 100644 --- a/internal/manager/generator_sprite.go +++ b/internal/manager/generator_sprite.go @@ -21,8 +21,7 @@ type SpriteGenerator struct { VideoChecksum string ImageOutputPath string VTTOutputPath string - Rows int - Columns int + Config SpriteGeneratorConfig SlowSeek bool // use alternate seek function, very slow! Overwrite bool @@ -30,13 +29,81 @@ type SpriteGenerator struct { g *generate.Generator } -func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) { +// SpriteGeneratorConfig holds configuration for the SpriteGenerator +type SpriteGeneratorConfig struct { + // MinimumSprites is the minimum number of sprites to generate, even if the video duration is short + // SpriteInterval will be adjusted accordingly to ensure at least this many sprites are generated. + // A value of 0 means no minimum, and the generator will use the provided SpriteInterval or + // calculate it based on the video duration and MaximumSprites + MinimumSprites int + + // MaximumSprites is the maximum number of sprites to generate, even if the video duration is long + // SpriteInterval will be adjusted accordingly to ensure no more than this many sprites are generated + // A value of 0 means no maximum, and the generator will use the provided SpriteInterval or + // calculate it based on the video duration and MinimumSprites + MaximumSprites int + + // SpriteInterval is the default interval in seconds between each sprite. + // If MinimumSprites or MaximumSprites are set, this value will be adjusted accordingly + // to ensure the desired number of sprites are generated + // A value of 0 means the generator will calculate the interval based on the video duration and + // the provided MinimumSprites and MaximumSprites + SpriteInterval float64 + + // SpriteSize is the size in pixels of the longest dimension of each sprite image. + // The other dimension will be automatically calculated to maintain the aspect ratio of the video + SpriteSize int +} + +const ( + // DefaultSpriteAmount is the default number of sprites to generate if no configuration is provided + // This corresponds to the legacy behavior of the generator, which generates 81 sprites at equal + // intervals across the video duration + DefaultSpriteAmount = 81 + + // DefaultSpriteSize is the default size in pixels of the longest dimension of each sprite image + // if no configuration is provided. This corresponds to the legacy behavior of the generator. + DefaultSpriteSize = 160 +) + +var DefaultSpriteGeneratorConfig = SpriteGeneratorConfig{ + MinimumSprites: DefaultSpriteAmount, + MaximumSprites: DefaultSpriteAmount, + SpriteInterval: 0, + SpriteSize: DefaultSpriteSize, +} + +// NewSpriteGenerator creates a new SpriteGenerator for the given video file and configuration +// It calculates the appropriate sprite interval and count based on the video duration and the provided configuration +func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, config SpriteGeneratorConfig) (*SpriteGenerator, error) { exists, err := fsutil.FileExists(videoFile.Path) if !exists { return nil, err } + + if videoFile.VideoStreamDuration <= 0 { + s := fmt.Sprintf("video %s: duration(%.3f)/frame count(%d) invalid, skipping sprite creation", videoFile.Path, videoFile.VideoStreamDuration, videoFile.FrameCount) + return nil, errors.New(s) + } + + config.SpriteInterval = calculateSpriteInterval(videoFile, config) + chunkCount := int(math.Ceil(videoFile.VideoStreamDuration / config.SpriteInterval)) + + // adjust the chunk count to the next highest perfect square, to ensure the sprite image + // is completely filled (no empty space in the grid) and the grid is as square as possible (minimizing the number of rows/columns) + gridSize := generate.GetSpriteGridSize(chunkCount) + newChunkCount := gridSize * gridSize + + if newChunkCount != chunkCount { + logger.Debugf("[generator] adjusting chunk count from %d to %d to fit a %dx%d grid", chunkCount, newChunkCount, gridSize, gridSize) + chunkCount = newChunkCount + } + + if config.SpriteSize <= 0 { + config.SpriteSize = DefaultSpriteSize + } + slowSeek := false - chunkCount := rows * cols // For files with small duration / low frame count try to seek using frame number intead of seconds if videoFile.VideoStreamDuration < 5 || (0 < videoFile.FrameCount && videoFile.FrameCount <= int64(chunkCount)) { // some files can have FrameCount == 0, only use SlowSeek if duration < 5 @@ -71,9 +138,8 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO VideoChecksum: videoChecksum, ImageOutputPath: imageOutputPath, VTTOutputPath: vttOutputPath, - Rows: rows, + Config: config, SlowSeek: slowSeek, - Columns: cols, g: &generate.Generator{ Encoder: instance.FFMpeg, FFMpegConfig: instance.Config, @@ -83,6 +149,40 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO }, nil } +func calculateSpriteInterval(videoFile ffmpeg.VideoFile, config SpriteGeneratorConfig) float64 { + // If a custom sprite interval is provided, start with that + spriteInterval := config.SpriteInterval + + // If no custom interval is provided, calculate the interval based on the + // video duration and minimum sprite count + if spriteInterval <= 0 { + minSprites := config.MinimumSprites + if minSprites <= 0 { + panic("invalid configuration: MinimumSprites must be greater than 0 if SpriteInterval is not set") + } + + logger.Debugf("[generator] calculating sprite interval for video duration %.3fs with minimum sprites %d", videoFile.VideoStreamDuration, minSprites) + return videoFile.VideoStreamDuration / float64(minSprites) + } + + // Calculate the number of sprites that would be generated with the provided interval + spriteCount := int(math.Ceil(videoFile.VideoStreamDuration / spriteInterval)) + + // If the calculated sprite count is greater than the maximum, adjust the interval to meet the maximum + if config.MaximumSprites > 0 && spriteCount > int(config.MaximumSprites) { + spriteInterval = videoFile.VideoStreamDuration / float64(config.MaximumSprites) + logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which exceeds the maximum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MaximumSprites, spriteInterval) + } + + // If the calculated sprite count is less than the minimum, adjust the interval to meet the minimum + if config.MinimumSprites > 0 && spriteCount < int(config.MinimumSprites) { + spriteInterval = videoFile.VideoStreamDuration / float64(config.MinimumSprites) + logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which is less than the minimum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MinimumSprites, spriteInterval) + } + + return spriteInterval +} + func (g *SpriteGenerator) Generate() error { if err := g.generateSpriteImage(); err != nil { return err @@ -100,6 +200,8 @@ func (g *SpriteGenerator) generateSpriteImage() error { var images []image.Image + isPortrait := g.Info.VideoFile.Height > g.Info.VideoFile.Width + if !g.SlowSeek { logger.Infof("[generator] generating sprite image for %s", g.Info.VideoFile.Path) // generate `ChunkCount` thumbnails @@ -107,8 +209,7 @@ func (g *SpriteGenerator) generateSpriteImage() error { for i := 0; i < g.Info.ChunkCount; i++ { time := float64(i) * stepSize - - img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time) + img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time, g.Config.SpriteSize, isPortrait) if err != nil { return err } @@ -126,7 +227,7 @@ func (g *SpriteGenerator) generateSpriteImage() error { return errors.New("invalid frame number conversion") } - img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame)) + img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame), g.Config.SpriteSize) if err != nil { return err } @@ -158,7 +259,7 @@ func (g *SpriteGenerator) generateSpriteVTT() error { stepSize /= g.Info.FrameRate } - return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize) + return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize, g.Info.ChunkCount) } func (g *SpriteGenerator) imageExists() bool { diff --git a/internal/manager/manager.go b/internal/manager/manager.go index f4f3fa636..d3b91ec29 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -313,46 +313,6 @@ func (s *Manager) validateFFmpeg() error { return nil } -func (s *Manager) BackupDatabase(download bool) (string, string, error) { - var backupPath string - var backupName string - if download { - backupDir := s.Paths.Generated.Downloads - if err := fsutil.EnsureDir(backupDir); err != nil { - return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) - } - f, err := os.CreateTemp(backupDir, "backup*.sqlite") - if err != nil { - return "", "", err - } - - backupPath = f.Name() - backupName = s.Database.DatabaseBackupPath("") - f.Close() - - // delete the temp file so that the backup operation can create it - if err := os.Remove(backupPath); err != nil { - return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) - } - } else { - backupDir := s.Config.GetBackupDirectoryPathOrDefault() - if backupDir != "" { - if err := fsutil.EnsureDir(backupDir); err != nil { - return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) - } - } - backupPath = s.Database.DatabaseBackupPath(backupDir) - backupName = filepath.Base(backupPath) - } - - err := s.Database.Backup(backupPath) - if err != nil { - return "", "", err - } - - return backupPath, backupName, nil -} - func (s *Manager) AnonymiseDatabase(download bool) (string, string, error) { var outPath string var outName string diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go index 1e66433be..bac726c1b 100644 --- a/internal/manager/manager_tasks.go +++ b/internal/manager/manager_tasks.go @@ -100,6 +100,8 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error return 0, err } + cfg := config.GetInstance() + scanner := &file.Scanner{ Repository: file.NewRepository(s.Repository), FileDecorators: []file.Decorator{ @@ -118,6 +120,10 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error }, FingerprintCalculator: &fingerprintCalculator{s.Config}, FS: &file.OsFS{}, + ZipFileExtensions: cfg.GetGalleryExtensions(), + // ScanFilters is set in ScanJob.Execute + // HandlerRequiredFilters is set in ScanJob.Execute + Rescan: input.Rescan, } scanJob := ScanJob{ diff --git a/internal/manager/repository.go b/internal/manager/repository.go index 8d4ef1137..afbf0b963 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -10,17 +10,17 @@ import ( ) type SceneService interface { - Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) + Create(ctx context.Context, input models.CreateSceneInput) (*models.Scene, error) AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error - Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error FindByIDs(ctx context.Context, ids []int, load ...scene.LoadRelationshipOption) ([]*models.Scene, error) sceneFingerprintGetter } type ImageService interface { - Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } @@ -31,7 +31,7 @@ type GalleryService interface { SetCover(ctx context.Context, g *models.Gallery, coverImageId int) error ResetCover(ctx context.Context, g *models.Gallery) error - Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) + Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error diff --git a/internal/manager/task/clean_generated.go b/internal/manager/task/clean_generated.go index 902989046..a59bda6d1 100644 --- a/internal/manager/task/clean_generated.go +++ b/internal/manager/task/clean_generated.go @@ -565,6 +565,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. j.setProgressFromFilename(sceneHash[0:2], progress) // check if the scene exists + var walkErr error if err := j.Repository.WithReadTxn(ctx, func(ctx context.Context) error { var err error scenes, err = j.getScenesWithHash(ctx, sceneHash) @@ -575,15 +576,18 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. if len(scenes) == 0 { j.logDelete("deleting unused marker directory: %s", sceneHash) j.deleteDir(path) - } else { - // get the markers now - for _, scene := range scenes { - thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID) - if err != nil { - return fmt.Errorf("error getting markers for scene: %v", err) - } - markers = append(markers, thisMarkers...) + // #5911 - we've just deleted the directory, so skip it in the walk to avoid errors + walkErr = fs.SkipDir + return nil + } + + // get the markers now + for _, scene := range scenes { + thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID) + if err != nil { + return fmt.Errorf("error getting markers for scene: %v", err) } + markers = append(markers, thisMarkers...) } return nil @@ -591,7 +595,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. logger.Error(err.Error()) } - return nil + return walkErr } filename := info.Name() diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go index 9690cf4c8..ddd86e2f2 100644 --- a/internal/manager/task_clean.go +++ b/internal/manager/task_clean.go @@ -300,7 +300,10 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil // only delete if the scene has no other files if len(scene.Files.List()) <= 1 { logger.Infof("Deleting scene %q since it has no other related files", scene.DisplayName()) - if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil { + const deleteGenerated = true + const deleteFile = false + const destroyFileEntry = false + if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } @@ -421,7 +424,10 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil if len(i.Files.List()) <= 1 { logger.Infof("Deleting image %q since it has no other related files", i.DisplayName()) - if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil { + const deleteGenerated = true + const deleteFile = false + const destroyFileEntry = false + if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } diff --git a/internal/manager/task_generate.go b/internal/manager/task_generate.go index 30ecd08bf..cc991d5d6 100644 --- a/internal/manager/task_generate.go +++ b/internal/manager/task_generate.go @@ -29,6 +29,7 @@ type GenerateMetadataInput struct { // Generate transcodes even if not required ForceTranscodes bool `json:"forceTranscodes"` Phashes bool `json:"phashes"` + ImagePhashes bool `json:"imagePhashes"` InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"` ClipPreviews bool `json:"clipPreviews"` ImageThumbnails bool `json:"imageThumbnails"` @@ -36,6 +37,10 @@ type GenerateMetadataInput struct { SceneIDs []string `json:"sceneIDs"` // marker ids to generate for MarkerIDs []string `json:"markerIDs"` + // image ids to generate for + ImageIDs []string `json:"imageIDs"` + // gallery ids to generate for + GalleryIDs []string `json:"galleryIDs"` // overwrite existing media Overwrite bool `json:"overwrite"` } @@ -73,6 +78,7 @@ type totalsGenerate struct { markers int64 transcodes int64 phashes int64 + imagePhashes int64 interactiveHeatmapSpeeds int64 clipPreviews int64 imageThumbnails int64 @@ -82,8 +88,9 @@ type totalsGenerate struct { func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error { var scenes []*models.Scene - var err error var markers []*models.SceneMarker + var images []*models.Image + var err error j.overwrite = j.input.Overwrite j.fileNamingAlgo = config.GetInstance().GetVideoFileNamingAlgorithm() @@ -105,6 +112,14 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error if err != nil { logger.Error(err.Error()) } + imageIDs, err := stringslice.StringSliceToIntSlice(j.input.ImageIDs) + if err != nil { + logger.Error(err.Error()) + } + galleryIDs, err := stringslice.StringSliceToIntSlice(j.input.GalleryIDs) + if err != nil { + logger.Error(err.Error()) + } g := &generate.Generator{ Encoder: instance.FFMpeg, @@ -118,7 +133,7 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error r := j.repository if err := r.WithReadTxn(ctx, func(ctx context.Context) error { qb := r.Scene - if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 { + if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 && len(j.input.ImageIDs) == 0 && len(j.input.GalleryIDs) == 0 { j.queueTasks(ctx, g, queue) } else { if len(j.input.SceneIDs) > 0 { @@ -141,6 +156,33 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error j.queueMarkerJob(g, m, queue) } } + + if len(j.input.ImageIDs) > 0 { + images, err = r.Image.FindMany(ctx, imageIDs) + for _, i := range images { + if err := i.LoadFiles(ctx, r.Image); err != nil { + return err + } + + j.queueImageJob(g, i, queue) + } + } + + if len(j.input.GalleryIDs) > 0 { + for _, galleryID := range galleryIDs { + imgs, err := r.Image.FindByGalleryID(ctx, galleryID) + if err != nil { + return err + } + for _, img := range imgs { + if err := img.LoadFiles(ctx, r.Image); err != nil { + return err + } + + j.queueImageJob(g, img, queue) + } + } + } } return nil @@ -172,14 +214,17 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error if j.input.Phashes { logMsg += fmt.Sprintf(" %d phashes", totals.phashes) } + if j.input.ImagePhashes { + logMsg += fmt.Sprintf(" %d image phashes", totals.imagePhashes) + } if j.input.InteractiveHeatmapsSpeeds { logMsg += fmt.Sprintf(" %d heatmaps & speeds", totals.interactiveHeatmapSpeeds) } if j.input.ClipPreviews { - logMsg += fmt.Sprintf(" %d Image Clip Previews", totals.clipPreviews) + logMsg += fmt.Sprintf(" %d image clip previews", totals.clipPreviews) } if j.input.ImageThumbnails { - logMsg += fmt.Sprintf(" %d Image Thumbnails", totals.imageThumbnails) + logMsg += fmt.Sprintf(" %d image thumbnails", totals.imageThumbnails) } if logMsg == "Generating" { logMsg = "Nothing selected to generate" @@ -284,7 +329,7 @@ func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generato r := j.repository - for more := j.input.ClipPreviews || j.input.ImageThumbnails; more; { + for more := j.input.ClipPreviews || j.input.ImageThumbnails || j.input.ImagePhashes; more; { if job.IsCancelled(ctx) { return } @@ -525,4 +570,23 @@ func (j *GenerateJob) queueImageJob(g *generate.Generator, image *models.Image, queue <- task } } + + if j.input.ImagePhashes { + // generate for all files in image + for _, f := range image.Files.List() { + if imageFile, ok := f.(*models.ImageFile); ok { + task := &GenerateImagePhashTask{ + repository: j.repository, + File: imageFile, + Overwrite: j.overwrite, + } + + if task.required() { + j.totals.imagePhashes++ + j.totals.tasks++ + queue <- task + } + } + } + } } diff --git a/internal/manager/task_generate_image_phash.go b/internal/manager/task_generate_image_phash.go new file mode 100644 index 000000000..a5c764df0 --- /dev/null +++ b/internal/manager/task_generate_image_phash.go @@ -0,0 +1,103 @@ +package manager + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/hash/imagephash" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type GenerateImagePhashTask struct { + repository models.Repository + File *models.ImageFile + Overwrite bool +} + +func (t *GenerateImagePhashTask) GetDescription() string { + return fmt.Sprintf("Generating phash for %s", t.File.Path) +} + +func (t *GenerateImagePhashTask) Start(ctx context.Context) { + if !t.required() { + return + } + + var hash int64 + set := false + + // #4393 - if there is a file with the same md5, we can use the same phash + // only use this if we're not overwriting + if !t.Overwrite { + existing, err := t.findExistingPhash(ctx) + if err != nil { + logger.Warnf("Error finding existing phash: %v", err) + } else if existing != nil { + logger.Infof("Using existing phash for %s", t.File.Path) + hash = existing.(int64) + set = true + } + } + + if !set { + generated, err := imagephash.Generate(instance.FFMpeg, t.File) + if err != nil { + logger.Errorf("Error generating phash for %q: %v", t.File.Path, err) + logErrorOutput(err) + return + } + + hash = int64(*generated) + } + + r := t.repository + if err := r.WithTxn(ctx, func(ctx context.Context) error { + t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{ + Type: models.FingerprintTypePhash, + Fingerprint: hash, + }) + + return r.File.Update(ctx, t.File) + }); err != nil && ctx.Err() == nil { + logger.Errorf("Error setting phash: %v", err) + } +} + +func (t *GenerateImagePhashTask) findExistingPhash(ctx context.Context) (interface{}, error) { + r := t.repository + var ret interface{} + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + md5 := t.File.Fingerprints.Get(models.FingerprintTypeMD5) + + // find other files with the same md5 + files, err := r.File.FindByFingerprint(ctx, models.Fingerprint{ + Type: models.FingerprintTypeMD5, + Fingerprint: md5, + }) + if err != nil { + return fmt.Errorf("finding files by md5: %w", err) + } + + // find the first file with a phash + for _, file := range files { + if phash := file.Base().Fingerprints.Get(models.FingerprintTypePhash); phash != nil { + ret = phash + return nil + } + } + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (t *GenerateImagePhashTask) required() bool { + if t.Overwrite { + return true + } + + return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil +} diff --git a/internal/manager/task_generate_phash.go b/internal/manager/task_generate_phash.go index 54dc1a10b..5d35a8738 100644 --- a/internal/manager/task_generate_phash.go +++ b/internal/manager/task_generate_phash.go @@ -44,7 +44,7 @@ func (t *GeneratePhashTask) Start(ctx context.Context) { if !set { generated, err := videophash.Generate(instance.FFMpeg, t.File) if err != nil { - logger.Errorf("Error generating phash: %v", err) + logger.Errorf("Error generating phash for %q: %v", t.File.Path, err) logErrorOutput(err) return } diff --git a/internal/manager/task_generate_sprite.go b/internal/manager/task_generate_sprite.go index 0275830ab..c173147cd 100644 --- a/internal/manager/task_generate_sprite.go +++ b/internal/manager/task_generate_sprite.go @@ -34,7 +34,17 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) { sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) imagePath := instance.Paths.Scene.GetSpriteImageFilePath(sceneHash) vttPath := instance.Paths.Scene.GetSpriteVttFilePath(sceneHash) - generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, 9, 9) + + cfg := DefaultSpriteGeneratorConfig + cfg.SpriteSize = instance.Config.GetSpriteScreenshotSize() + + if instance.Config.GetUseCustomSpriteInterval() { + cfg.MinimumSprites = instance.Config.GetMinimumSprites() + cfg.MaximumSprites = instance.Config.GetMaximumSprites() + cfg.SpriteInterval = instance.Config.GetSpriteInterval() + } + + generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, cfg) if err != nil { logger.Errorf("error creating sprite generator: %s", err.Error()) diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index 6f7f34b3c..d09765577 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -2,13 +2,17 @@ package manager import ( "context" + "errors" "fmt" "io/fs" "path/filepath" "regexp" + "runtime/debug" + "sync" "time" "github.com/99designs/gqlgen/graphql/handler/lru" + "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" @@ -24,14 +28,13 @@ import ( "github.com/stashapp/stash/pkg/txn" ) -type scanner interface { - Scan(ctx context.Context, handlers []file.Handler, options file.ScanOptions, progressReporter file.ProgressReporter) -} - type ScanJob struct { - scanner scanner + scanner *file.Scanner input ScanMetadataInput subscriptions *subscriptionManager + + fileQueue chan file.ScannedFile + count int } func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { @@ -55,22 +58,22 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { start := time.Now() + nTasks := cfg.GetParallelTasksWithAutoDetection() + const taskQueueSize = 200000 - taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, cfg.GetParallelTasksWithAutoDetection()) + taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, nTasks) var minModTime time.Time if j.input.Filter != nil && j.input.Filter.MinModTime != nil { minModTime = *j.input.Filter.MinModTime } - j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{ - Paths: paths, - ScanFilters: []file.PathFilter{newScanFilter(c, repo, minModTime)}, - ZipFileExtensions: cfg.GetGalleryExtensions(), - ParallelTasks: cfg.GetParallelTasksWithAutoDetection(), - HandlerRequiredFilters: []file.Filter{newHandlerRequiredFilter(cfg, repo)}, - Rescan: j.input.Rescan, - }, progress) + // HACK - these should really be set in the scanner initialization + j.scanner.FileHandlers = getScanHandlers(j.input, taskQueue, progress) + j.scanner.ScanFilters = []file.PathFilter{newScanFilter(c, repo, minModTime)} + j.scanner.HandlerRequiredFilters = []file.Filter{newHandlerRequiredFilter(cfg, repo)} + + j.runJob(ctx, paths, nTasks, progress) taskQueue.Close() @@ -86,6 +89,264 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { return nil } +func (j *ScanJob) runJob(ctx context.Context, paths []string, nTasks int, progress *job.Progress) { + var wg sync.WaitGroup + wg.Add(1) + + j.fileQueue = make(chan file.ScannedFile, scanQueueSize) + + go func() { + defer func() { + wg.Done() + + // handle panics in goroutine + if p := recover(); p != nil { + logger.Errorf("panic while queuing files for scan: %v", p) + logger.Errorf(string(debug.Stack())) + } + }() + + if err := j.queueFiles(ctx, paths, progress); err != nil { + if errors.Is(err, context.Canceled) { + return + } + + logger.Errorf("error queuing files for scan: %v", err) + return + } + + logger.Infof("Finished adding files to queue. %d files queued", j.count) + }() + + defer wg.Wait() + + j.processQueue(ctx, nTasks, progress) +} + +const scanQueueSize = 200000 + +func (j *ScanJob) queueFiles(ctx context.Context, paths []string, progress *job.Progress) error { + fs := &file.OsFS{} + + defer func() { + close(j.fileQueue) + + progress.AddTotal(j.count) + progress.Definite() + }() + + var err error + progress.ExecuteTask("Walking directory tree", func() { + for _, p := range paths { + err = file.SymWalk(fs, p, j.queueFileFunc(ctx, fs, nil, progress)) + if err != nil { + return + } + } + }) + + return err +} + +func (j *ScanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *file.ScannedFile, progress *job.Progress) fs.WalkDirFunc { + return func(path string, d fs.DirEntry, err error) error { + if err != nil { + // don't let errors prevent scanning + logger.Errorf("error scanning %s: %v", path, err) + return nil + } + + if err = ctx.Err(); err != nil { + return err + } + + info, err := d.Info() + if err != nil { + logger.Errorf("reading info for %q: %v", path, err) + return nil + } + + if !j.scanner.AcceptEntry(ctx, path, info) { + if info.IsDir() { + logger.Debugf("Skipping directory %s", path) + return fs.SkipDir + } + + logger.Debugf("Skipping file %s", path) + return nil + } + + size, err := file.GetFileSize(f, path, info) + if err != nil { + return err + } + + ff := file.ScannedFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ + ModTime: file.ModTime(info), + }, + Path: path, + Basename: filepath.Base(path), + Size: size, + }, + FS: f, + Info: info, + } + + if zipFile != nil { + ff.ZipFileID = &zipFile.ID + ff.ZipFile = zipFile + } + + if info.IsDir() { + // handle folders immediately + if err := j.handleFolder(ctx, ff, progress); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", path, err) + } + + // skip the directory since we won't be able to process the files anyway + return fs.SkipDir + } + + return nil + } + + // if zip file is present, we handle immediately + if zipFile != nil { + progress.ExecuteTask("Scanning "+path, func() { + // don't increment progress in zip files + if err := j.handleFile(ctx, ff, nil); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", path, err) + } + // don't return an error, just skip the file + } + }) + + return nil + } + + logger.Tracef("Queueing file %s for scanning", path) + j.fileQueue <- ff + + j.count++ + + return nil + } +} + +func (j *ScanJob) processQueue(ctx context.Context, parallelTasks int, progress *job.Progress) { + if parallelTasks < 1 { + parallelTasks = 1 + } + + wg := sizedwaitgroup.New(parallelTasks) + + func() { + defer func() { + wg.Wait() + + // handle panics in goroutine + if p := recover(); p != nil { + logger.Errorf("panic while scanning files: %v", p) + logger.Errorf(string(debug.Stack())) + } + }() + + for f := range j.fileQueue { + logger.Tracef("Processing queued file %s", f.Path) + if err := ctx.Err(); err != nil { + return + } + + wg.Add() + ff := f + go func() { + defer wg.Done() + j.processQueueItem(ctx, ff, progress) + }() + } + }() +} + +func (j *ScanJob) processQueueItem(ctx context.Context, f file.ScannedFile, progress *job.Progress) { + progress.ExecuteTask("Scanning "+f.Path, func() { + var err error + if f.Info.IsDir() { + err = j.handleFolder(ctx, f, progress) + } else { + err = j.handleFile(ctx, f, progress) + } + + if err != nil && !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", f.Path, err) + } + }) +} + +func (j *ScanJob) handleFolder(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + if progress != nil { + defer progress.Increment() + } + + _, err := j.scanner.ScanFolder(ctx, f) + if err != nil { + return err + } + + return nil +} + +func (j *ScanJob) handleFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + if progress != nil { + defer progress.Increment() + } + + r, err := j.scanner.ScanFile(ctx, f) + if err != nil { + return err + } + + // handle rename should have already handled the contents of the zip file + // so shouldn't need to scan it again + + if (r.New || r.Updated) && j.scanner.IsZipFile(f.Info.Name()) { + ff := r.File + f.BaseFile = ff.Base() + + // scan zip files with a different context that is not cancellable + // cancelling while scanning zip file contents results in the scan + // contents being partially completed + zipCtx := context.WithoutCancel(ctx) + + if err := j.scanZipFile(zipCtx, f, progress); err != nil { + logger.Errorf("Error scanning zip file %q: %v", f.Path, err) + } + } + + return nil +} + +func (j *ScanJob) scanZipFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + zipFS, err := f.FS.OpenZip(f.Path, f.Size) + if err != nil { + if errors.Is(err, file.ErrNotReaderAt) { + // can't walk the zip file + // just return + logger.Debugf("Skipping zip file %q as it cannot be opened for walking", f.Path) + return nil + } + + return err + } + + defer zipFS.Close() + + return file.SymWalk(zipFS, f.Path, j.queueFileFunc(ctx, zipFS, &f, progress)) +} + type extensionConfig struct { vidExt []string imgExt []string @@ -463,6 +724,29 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f model } } + if t.ScanGenerateImagePhashes { + progress.AddTotal(1) + phashFn := func(ctx context.Context) { + mgr := GetInstance() + // Only generate phash for image files, not video files + if imageFile, ok := f.(*models.ImageFile); ok { + taskPhash := GenerateImagePhashTask{ + repository: mgr.Repository, + File: imageFile, + Overwrite: overwrite, + } + taskPhash.Start(ctx) + } + progress.Increment() + } + + if g.sequentialScanning { + phashFn(ctx) + } else { + g.taskQueue.Add(fmt.Sprintf("Generating phash for %s", path), phashFn) + } + } + return nil } diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go index 37859ba61..4848b46ad 100644 --- a/internal/manager/task_stash_box_tag.go +++ b/internal/manager/task_stash_box_tag.go @@ -275,6 +275,12 @@ func (t *stashBoxBatchStudioTagTask) getName() string { } func (t *stashBoxBatchStudioTagTask) Start(ctx context.Context) { + // Skip organized studios + if t.studio != nil && t.studio.Organized { + logger.Infof("Skipping organized studio %s", t.studio.Name) + return + } + studio, err := t.findStashBoxStudio(ctx) if err != nil { logger.Errorf("Error fetching studio data from stash-box: %v", err) diff --git a/internal/static/performer/NoName01.png b/internal/static/performer/NoName01.png deleted file mode 100644 index cdcba1db9..000000000 Binary files a/internal/static/performer/NoName01.png and /dev/null differ diff --git a/internal/static/performer/NoName02.png b/internal/static/performer/NoName02.png deleted file mode 100644 index 4687adc08..000000000 Binary files a/internal/static/performer/NoName02.png and /dev/null differ diff --git a/internal/static/performer/NoName02.svg b/internal/static/performer/NoName02.svg new file mode 100644 index 000000000..b5dbaf2b9 --- /dev/null +++ b/internal/static/performer/NoName02.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName03.png b/internal/static/performer/NoName03.png deleted file mode 100644 index 8ac0d13b7..000000000 Binary files a/internal/static/performer/NoName03.png and /dev/null differ diff --git a/internal/static/performer/NoName04.png b/internal/static/performer/NoName04.png deleted file mode 100644 index 41b55b816..000000000 Binary files a/internal/static/performer/NoName04.png and /dev/null differ diff --git a/internal/static/performer/NoName05.png b/internal/static/performer/NoName05.png deleted file mode 100644 index 8a49ba6d3..000000000 Binary files a/internal/static/performer/NoName05.png and /dev/null differ diff --git a/internal/static/performer/NoName05.svg b/internal/static/performer/NoName05.svg new file mode 100644 index 000000000..5a26d98d8 --- /dev/null +++ b/internal/static/performer/NoName05.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName06.png b/internal/static/performer/NoName06.png index 4359911ae..f2a8016e2 100644 Binary files a/internal/static/performer/NoName06.png and b/internal/static/performer/NoName06.png differ diff --git a/internal/static/performer/NoName07.png b/internal/static/performer/NoName07.png deleted file mode 100644 index 1bb5f6f82..000000000 Binary files a/internal/static/performer/NoName07.png and /dev/null differ diff --git a/internal/static/performer/NoName07.svg b/internal/static/performer/NoName07.svg new file mode 100644 index 000000000..ac90cf6d1 --- /dev/null +++ b/internal/static/performer/NoName07.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName08.png b/internal/static/performer/NoName08.png deleted file mode 100644 index 8ff7ff734..000000000 Binary files a/internal/static/performer/NoName08.png and /dev/null differ diff --git a/internal/static/performer/NoName09.png b/internal/static/performer/NoName09.png deleted file mode 100644 index 49b54b725..000000000 Binary files a/internal/static/performer/NoName09.png and /dev/null differ diff --git a/internal/static/performer/NoName09.svg b/internal/static/performer/NoName09.svg new file mode 100644 index 000000000..6009133a4 --- /dev/null +++ b/internal/static/performer/NoName09.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName10.png b/internal/static/performer/NoName10.png deleted file mode 100644 index a2b72043a..000000000 Binary files a/internal/static/performer/NoName10.png and /dev/null differ diff --git a/internal/static/performer/NoName11.png b/internal/static/performer/NoName11.png index 01034c2b0..45158b094 100644 Binary files a/internal/static/performer/NoName11.png and b/internal/static/performer/NoName11.png differ diff --git a/internal/static/performer/NoName12.png b/internal/static/performer/NoName12.png deleted file mode 100644 index 7f48ba39a..000000000 Binary files a/internal/static/performer/NoName12.png and /dev/null differ diff --git a/internal/static/performer/NoName12.svg b/internal/static/performer/NoName12.svg new file mode 100644 index 000000000..89843a774 --- /dev/null +++ b/internal/static/performer/NoName12.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName13.png b/internal/static/performer/NoName13.png deleted file mode 100644 index fdefafb59..000000000 Binary files a/internal/static/performer/NoName13.png and /dev/null differ diff --git a/internal/static/performer/NoName13.svg b/internal/static/performer/NoName13.svg new file mode 100644 index 000000000..fbbacaacf --- /dev/null +++ b/internal/static/performer/NoName13.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName14.png b/internal/static/performer/NoName14.png deleted file mode 100644 index 20a20a209..000000000 Binary files a/internal/static/performer/NoName14.png and /dev/null differ diff --git a/internal/static/performer/NoName14.svg b/internal/static/performer/NoName14.svg new file mode 100644 index 000000000..1d0231ab3 --- /dev/null +++ b/internal/static/performer/NoName14.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName15.png b/internal/static/performer/NoName15.png deleted file mode 100644 index cfc9d3a8c..000000000 Binary files a/internal/static/performer/NoName15.png and /dev/null differ diff --git a/internal/static/performer/NoName16.png b/internal/static/performer/NoName16.png deleted file mode 100644 index f54744280..000000000 Binary files a/internal/static/performer/NoName16.png and /dev/null differ diff --git a/internal/static/performer/NoName17.png b/internal/static/performer/NoName17.png deleted file mode 100644 index 068d1cf73..000000000 Binary files a/internal/static/performer/NoName17.png and /dev/null differ diff --git a/internal/static/performer/NoName17.svg b/internal/static/performer/NoName17.svg new file mode 100644 index 000000000..8df98d6c4 --- /dev/null +++ b/internal/static/performer/NoName17.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName18.png b/internal/static/performer/NoName18.png deleted file mode 100644 index 179d1d323..000000000 Binary files a/internal/static/performer/NoName18.png and /dev/null differ diff --git a/internal/static/performer/NoName19.png b/internal/static/performer/NoName19.png deleted file mode 100644 index 7349c26b2..000000000 Binary files a/internal/static/performer/NoName19.png and /dev/null differ diff --git a/internal/static/performer/NoName19.svg b/internal/static/performer/NoName19.svg new file mode 100644 index 000000000..a35c979d6 --- /dev/null +++ b/internal/static/performer/NoName19.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName20.png b/internal/static/performer/NoName20.png deleted file mode 100644 index 86dd404bc..000000000 Binary files a/internal/static/performer/NoName20.png and /dev/null differ diff --git a/internal/static/performer/NoName21.png b/internal/static/performer/NoName21.png deleted file mode 100644 index 7bee5cdb6..000000000 Binary files a/internal/static/performer/NoName21.png and /dev/null differ diff --git a/internal/static/performer/NoName21.svg b/internal/static/performer/NoName21.svg new file mode 100644 index 000000000..2d7647c1d --- /dev/null +++ b/internal/static/performer/NoName21.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName22.png b/internal/static/performer/NoName22.png deleted file mode 100644 index d92384f93..000000000 Binary files a/internal/static/performer/NoName22.png and /dev/null differ diff --git a/internal/static/performer/NoName22.svg b/internal/static/performer/NoName22.svg new file mode 100644 index 000000000..c81400587 --- /dev/null +++ b/internal/static/performer/NoName22.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName23.png b/internal/static/performer/NoName23.png deleted file mode 100644 index f28ca89c8..000000000 Binary files a/internal/static/performer/NoName23.png and /dev/null differ diff --git a/internal/static/performer/NoName23.svg b/internal/static/performer/NoName23.svg new file mode 100644 index 000000000..3156c267f --- /dev/null +++ b/internal/static/performer/NoName23.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName24.png b/internal/static/performer/NoName24.png deleted file mode 100644 index 7b9bb42a2..000000000 Binary files a/internal/static/performer/NoName24.png and /dev/null differ diff --git a/internal/static/performer/NoName24.svg b/internal/static/performer/NoName24.svg new file mode 100644 index 000000000..3afd26f25 --- /dev/null +++ b/internal/static/performer/NoName24.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName25.png b/internal/static/performer/NoName25.png deleted file mode 100644 index 1f4864eed..000000000 Binary files a/internal/static/performer/NoName25.png and /dev/null differ diff --git a/internal/static/performer/NoName25.svg b/internal/static/performer/NoName25.svg new file mode 100644 index 000000000..ab040b917 --- /dev/null +++ b/internal/static/performer/NoName25.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName26.png b/internal/static/performer/NoName26.png deleted file mode 100644 index b63c47ab5..000000000 Binary files a/internal/static/performer/NoName26.png and /dev/null differ diff --git a/internal/static/performer/NoName26.svg b/internal/static/performer/NoName26.svg new file mode 100644 index 000000000..0c1679e16 --- /dev/null +++ b/internal/static/performer/NoName26.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName27.png b/internal/static/performer/NoName27.png deleted file mode 100644 index eb57d9cf4..000000000 Binary files a/internal/static/performer/NoName27.png and /dev/null differ diff --git a/internal/static/performer/NoName27.svg b/internal/static/performer/NoName27.svg new file mode 100644 index 000000000..4bf73d04a --- /dev/null +++ b/internal/static/performer/NoName27.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName28.png b/internal/static/performer/NoName28.png deleted file mode 100644 index c00fb15b5..000000000 Binary files a/internal/static/performer/NoName28.png and /dev/null differ diff --git a/internal/static/performer/NoName28.svg b/internal/static/performer/NoName28.svg new file mode 100644 index 000000000..5af3dbc38 --- /dev/null +++ b/internal/static/performer/NoName28.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName29.png b/internal/static/performer/NoName29.png index 21e9e27fa..8a53967a5 100644 Binary files a/internal/static/performer/NoName29.png and b/internal/static/performer/NoName29.png differ diff --git a/internal/static/performer/NoName30.png b/internal/static/performer/NoName30.png deleted file mode 100644 index ba968026d..000000000 Binary files a/internal/static/performer/NoName30.png and /dev/null differ diff --git a/internal/static/performer/NoName30.svg b/internal/static/performer/NoName30.svg new file mode 100644 index 000000000..c77b1163f --- /dev/null +++ b/internal/static/performer/NoName30.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName31.png b/internal/static/performer/NoName31.png deleted file mode 100644 index a4003fa75..000000000 Binary files a/internal/static/performer/NoName31.png and /dev/null differ diff --git a/internal/static/performer/NoName31.svg b/internal/static/performer/NoName31.svg new file mode 100644 index 000000000..5504136d2 --- /dev/null +++ b/internal/static/performer/NoName31.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName32.png b/internal/static/performer/NoName32.png deleted file mode 100644 index 0ca4aca17..000000000 Binary files a/internal/static/performer/NoName32.png and /dev/null differ diff --git a/internal/static/performer/NoName32.svg b/internal/static/performer/NoName32.svg new file mode 100644 index 000000000..ec72d0836 --- /dev/null +++ b/internal/static/performer/NoName32.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName33.png b/internal/static/performer/NoName33.png index 38ae2116c..025a1ff7f 100644 Binary files a/internal/static/performer/NoName33.png and b/internal/static/performer/NoName33.png differ diff --git a/internal/static/performer/NoName34.png b/internal/static/performer/NoName34.png deleted file mode 100644 index c40683098..000000000 Binary files a/internal/static/performer/NoName34.png and /dev/null differ diff --git a/internal/static/performer/NoName34.svg b/internal/static/performer/NoName34.svg new file mode 100644 index 000000000..49086ca8a --- /dev/null +++ b/internal/static/performer/NoName34.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName35.png b/internal/static/performer/NoName35.png index 92d9ad784..70dc81443 100644 Binary files a/internal/static/performer/NoName35.png and b/internal/static/performer/NoName35.png differ diff --git a/internal/static/performer/NoName36.png b/internal/static/performer/NoName36.png deleted file mode 100644 index 7796c8b63..000000000 Binary files a/internal/static/performer/NoName36.png and /dev/null differ diff --git a/internal/static/performer/NoName36.svg b/internal/static/performer/NoName36.svg new file mode 100644 index 000000000..b69ce0aa3 --- /dev/null +++ b/internal/static/performer/NoName36.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName37.png b/internal/static/performer/NoName37.png deleted file mode 100644 index c47f0abac..000000000 Binary files a/internal/static/performer/NoName37.png and /dev/null differ diff --git a/internal/static/performer/NoName37.svg b/internal/static/performer/NoName37.svg new file mode 100644 index 000000000..d0053cb58 --- /dev/null +++ b/internal/static/performer/NoName37.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName38.png b/internal/static/performer/NoName38.png deleted file mode 100644 index da9fa37c9..000000000 Binary files a/internal/static/performer/NoName38.png and /dev/null differ diff --git a/internal/static/performer/NoName38.svg b/internal/static/performer/NoName38.svg new file mode 100644 index 000000000..0131c7efe --- /dev/null +++ b/internal/static/performer/NoName38.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName39.png b/internal/static/performer/NoName39.png deleted file mode 100644 index a7921d01d..000000000 Binary files a/internal/static/performer/NoName39.png and /dev/null differ diff --git a/internal/static/performer/NoName39.svg b/internal/static/performer/NoName39.svg new file mode 100644 index 000000000..6cc5080ac --- /dev/null +++ b/internal/static/performer/NoName39.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName40.png b/internal/static/performer/NoName40.png deleted file mode 100644 index 0214efad4..000000000 Binary files a/internal/static/performer/NoName40.png and /dev/null differ diff --git a/internal/static/performer/attribution.md b/internal/static/performer/attribution.md new file mode 100644 index 000000000..3cb40ca04 --- /dev/null +++ b/internal/static/performer/attribution.md @@ -0,0 +1,34 @@ +NoName02.svg - "[Exotic dancer silhouette](https://freesvg.org/exotic-dancer-silhouette)" by OpenClipart-Vectors under CC0 License +NoName05.svg - "[Fashion girl silhouette](https://creazilla.com/media/silhouette/76433/fashion-girl)" by Creazilla under CC0 License +NoName06.png - "[Woman, Female, Girl](https://pixabay.com/illustrations/woman-female-girl-lady-silhouette-163525/)" by No-longer-here under Pixabay License +NoName07.svg - "[Woman Silhouette 11](https://openclipart.org/detail/14083/woman-silhouette-11)" by nicubunu under CC0 License +NoName09.svg - "[Girl, Pose, Posing](https://pixabay.com/vectors/girl-pose-posing-female-woman-311535/)" by Clker-Free-Vector-Images under CC0 License +NoName11.png - "[Alpha Mask, Silhouette, Woman](https://pixabay.com/illustrations/alpha-mask-silhouette-woman-girl-3072470/)" by Wolfgang Eckert under Pixabay License +NoName12.svg - "[Dance, Dancer, Dancing](https://pixabay.com/vectors/dance-dancer-dancing-female-girl-2023863/)" by OpenClipart-Vectors under CC0 License +NoName13.svg - "[Dress, Silhouette, Woman](https://pixabay.com/vectors/dress-silhouette-woman-female-148745/)" by OpenClipart-Vectors under CC0 License +NoName14.svg - "[Woman in long dress silhouette](https://freesvg.org/woman-in-long-dress-silhouette)" by OpenClipart-Vectors under CC0 License +NoName17.svg - "[Female Model silhouette](https://creazilla.com/media/silhouette/2495/female-model)" by Natasha Sinegina under CC-BY-4.0 +NoName19.svg - "[Female, Girl, Heel](https://pixabay.com/vectors/female-girl-heel-silhouette-woman-2023898/)" by OpenClipart-Vectors under CC0 License +NoName21.svg - "[Lady, Silhouette, Woman](https://pixabay.com/vectors/lady-silhouette-woman-pink-296698/)" by Clker-Free-Vector-Images under CC0 License +NoName22.svg - "[Female, Girl, Heel](https://pixabay.com/vectors/female-girl-heel-silhouette-woman-2023856/)" by OpenClipart-Vectors under CC0 License +NoName23.svg - "[Woman, Female, Figure](https://pixabay.com/vectors/woman-female-figure-slender-slim-149723/)" by OpenClipart-Vectors under CC0 License +NoName24.svg - "[Silhouette, Woman, Bunny](https://pixabay.com/illustrations/silhouette-woman-bunny-girl-female-3196716/)" by Wolfgang Eckert under Pixabay License +NoName25.svg - "[Female, Girl, Silhouette](https://pixabay.com/vectors/female-girl-silhouette-woman-2023857/)" by OpenClipart-Vectors under CC0 License +NoName26.svg - "[Female, Girl, Silhouette](https://pixabay.com/vectors/female-girl-silhouette-woman-2024047/)" by OpenClipart-Vectors under CC0 License +NoName27.svg - "[Woman, School Clothes, Uniform](https://pixabay.com/illustrations/woman-school-clothes-uniform-644569/)" by Silvia under Pixabay License +NoName28.svg - "[Girl, Woman, Feminine](https://pixabay.com/illustrations/girl-woman-feminine-sensual-1369733/)" by Calzas under Pixabay License +NoName29.png - "[Alpha Mask, Silhouette, Woman](https://pixabay.com/illustrations/alpha-mask-silhouette-woman-girl-3066005/)" by Wolfgang Eckert under Pixabay License +NoName30.svg - "[Architetto](https://openclipart.org/detail/68047)" by Emilie Rollandin under CC0 License +NoName31.svg - "[Model silhouette](https://creazilla.com/media/silhouette/1785/model)" by Bob Comix under CC-BY-4.0 License +NoName32.svg - "[Fashion, Female, Girl](https://pixabay.com/vectors/fashion-female-girl-heel-model-2023859/)" by OpenClipart-Vectors under CC0 License +NoName33.png - "[Silhouette Donna 6](https://www.publicdomainpictures.net/view-image.php?image=82268)" by Tammy Sue under CC0 License +NoName34.svg - "[Donna in piedi 01](https://openclipart.org/detail/33139)" by Emilie Rollandin under CC0 License +NoName35.png - "[Silhouette, Woman, Young](https://pixabay.com/illustrations/silhouette-woman-young-move-female-3104942/)" by Wolfgang Eckert under Pixabay License +NoName36.svg - "[Fashion Model silhouette](https://creazilla.com/media/silhouette/2506/fashion-model)" by Natasha Sinegina under CC-BY-4.0 License +NoName37.svg - "[Female, Woman, Standing](https://pixabay.com/vectors/female-woman-standing-confident-2816234/)" by Mohamed Hassan under Pixabay License +NoName38.svg - "[Dress, Silhouette, Women](https://pixabay.com/vectors/dress-silhouette-women-dance-lady-3360422/)" by Mohamed Hassan under Pixabay License +NoName39.svg - "[Woman, Female, Lady](https://pixabay.com/illustrations/woman-female-lady-business-woman-220260/)" by No-longer-here under Pixabay License + +CC0 License: https://creativecommons.org/publicdomain/zero/1.0/ +CC-BY-4.0 License: https://creativecommons.org/licenses/by/4.0/ +Pixabay License: https://pixabay.com/service/license-summary/ \ No newline at end of file diff --git a/internal/static/performer_male/Male01.png b/internal/static/performer_male/Male01.png deleted file mode 100644 index 8a486299a..000000000 Binary files a/internal/static/performer_male/Male01.png and /dev/null differ diff --git a/internal/static/performer_male/Male01.svg b/internal/static/performer_male/Male01.svg new file mode 100644 index 000000000..72599423a --- /dev/null +++ b/internal/static/performer_male/Male01.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male02.png b/internal/static/performer_male/Male02.png deleted file mode 100644 index 673b120eb..000000000 Binary files a/internal/static/performer_male/Male02.png and /dev/null differ diff --git a/internal/static/performer_male/Male02.svg b/internal/static/performer_male/Male02.svg new file mode 100644 index 000000000..1f7f4072e --- /dev/null +++ b/internal/static/performer_male/Male02.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male03.png b/internal/static/performer_male/Male03.png deleted file mode 100644 index 1814d05bb..000000000 Binary files a/internal/static/performer_male/Male03.png and /dev/null differ diff --git a/internal/static/performer_male/Male03.svg b/internal/static/performer_male/Male03.svg new file mode 100644 index 000000000..60e0857ce --- /dev/null +++ b/internal/static/performer_male/Male03.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male04.png b/internal/static/performer_male/Male04.png deleted file mode 100644 index 9dd1f0bcc..000000000 Binary files a/internal/static/performer_male/Male04.png and /dev/null differ diff --git a/internal/static/performer_male/Male04.svg b/internal/static/performer_male/Male04.svg new file mode 100644 index 000000000..7e7e29fae --- /dev/null +++ b/internal/static/performer_male/Male04.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male05.png b/internal/static/performer_male/Male05.png deleted file mode 100644 index 35231f914..000000000 Binary files a/internal/static/performer_male/Male05.png and /dev/null differ diff --git a/internal/static/performer_male/Male05.svg b/internal/static/performer_male/Male05.svg new file mode 100644 index 000000000..b41f8d1cd --- /dev/null +++ b/internal/static/performer_male/Male05.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male06.png b/internal/static/performer_male/Male06.png deleted file mode 100644 index 9530d274a..000000000 Binary files a/internal/static/performer_male/Male06.png and /dev/null differ diff --git a/internal/static/performer_male/Male06.svg b/internal/static/performer_male/Male06.svg new file mode 100644 index 000000000..14578c380 --- /dev/null +++ b/internal/static/performer_male/Male06.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/attribution.md b/internal/static/performer_male/attribution.md new file mode 100644 index 000000000..119d73757 --- /dev/null +++ b/internal/static/performer_male/attribution.md @@ -0,0 +1,8 @@ +Male01.svg - "[Man Silhouette](https://freesvg.org/1528398040)" by "OpenClipart" under CC0 License +Male02.svg - "[Male pose silhouette](https://freesvg.org/male-pose-silhouette)" by OpenClipart under CC0 License +Male03.svg - "[Bald man walking in a suit silhouette vector image](https://freesvg.org/bald-man-walking-in-a-suit-silhouette-vector-image)" by OpenClipart under CC0 License +Male04.svg - "[Man silhouette vector clip art](https://freesvg.org/man-silhouette-vector-clip-art) by OpenClipart under CC0 License +Male05.svg - "[Man, Walking, Confident](https://pixabay.com/vectors/man-walking-confident-silhouette-2759950/)" by Mohamed Hassan under Pixabay License + +CC0 Licence: https://creativecommons.org/public-domain/cc0/ +Pixabay License: https://pixabay.com/service/license-summary/ \ No newline at end of file diff --git a/pkg/ffmpeg/transcoder/screenshot.go b/pkg/ffmpeg/transcoder/screenshot.go index c3343d594..c65f23941 100644 --- a/pkg/ffmpeg/transcoder/screenshot.go +++ b/pkg/ffmpeg/transcoder/screenshot.go @@ -9,7 +9,11 @@ type ScreenshotOptions struct { // Quality is the quality scale. See https://ffmpeg.org/ffmpeg.html#Main-options Quality int + // Width is the width to scale the screenshot to. If 0, no scaling will be applied. Width int + // Height is the height to scale the screenshot to. If 0, no scaling will be applied. + // Not used if Width is set. + Height int // Verbosity is the logging verbosity. Defaults to LogLevelError if not set. Verbosity ffmpeg.LogLevel @@ -70,6 +74,9 @@ func ScreenshotTime(input string, t float64, options ScreenshotOptions) ffmpeg.A if options.Width > 0 { vf = vf.ScaleWidth(options.Width) args = args.VideoFilter(vf) + } else if options.Height > 0 { + vf = vf.ScaleHeight(options.Height) + args = args.VideoFilter(vf) } args = args.AppendArgs(options.OutputType) diff --git a/pkg/file/file.go b/pkg/file/file.go index 407949ba1..b93083b35 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -3,6 +3,10 @@ package file import ( "context" + "fmt" + "io/fs" + "os" + "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -35,3 +39,23 @@ func (r *Repository) WithReadTxn(ctx context.Context, fn txn.TxnFunc) error { func (r *Repository) WithDB(ctx context.Context, fn txn.TxnFunc) error { return txn.WithDatabase(ctx, r.TxnManager, fn) } + +// ModTime returns the modification time truncated to seconds. +func ModTime(info fs.FileInfo) time.Time { + // truncate to seconds, since we don't store beyond that in the database + return info.ModTime().Truncate(time.Second) +} + +// GetFileSize gets the size of the file, taking into account symlinks. +func GetFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { + // #2196/#3042 - replace size with target size if file is a symlink + if info.Mode()&os.ModeSymlink == os.ModeSymlink { + targetInfo, err := f.Stat(path) + if err != nil { + return 0, fmt.Errorf("reading info for symlink %q: %w", path, err) + } + return targetInfo.Size(), nil + } + + return info.Size(), nil +} diff --git a/pkg/file/folder_rename_detect.go b/pkg/file/folder_rename_detect.go index 4c057461b..cfae7e4fb 100644 --- a/pkg/file/folder_rename_detect.go +++ b/pkg/file/folder_rename_detect.go @@ -75,7 +75,7 @@ func (d *folderRenameDetector) bestCandidate() *models.Folder { return best.folder } -func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*models.Folder, error) { // in order for a folder to be considered moved, the existing folder must be // missing, and the majority of the old folder's files must be present, unchanged, // in the new folder. @@ -88,7 +88,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. r := s.Repository - if err := symWalk(file.fs, file.Path, func(path string, d fs.DirEntry, err error) error { + if err := SymWalk(file.FS, file.Path, func(path string, d fs.DirEntry, err error) error { if err != nil { // don't let errors prevent scanning logger.Errorf("error scanning %s: %v", path, err) @@ -111,11 +111,11 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. return nil } - if !s.acceptEntry(ctx, path, info) { + if !s.AcceptEntry(ctx, path, info) { return nil } - size, err := getFileSize(file.fs, path, info) + size, err := GetFileSize(file.FS, path, info) if err != nil { return fmt.Errorf("getting file size for %q: %w", path, err) } @@ -154,7 +154,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. } // parent folder must be missing - _, err = file.fs.Lstat(pf.Path) + _, err = file.FS.Lstat(pf.Path) if err == nil { // parent folder exists, not a candidate detector.reject(parentFolderID) diff --git a/pkg/file/scan.go b/pkg/file/scan.go index 36b409c89..d9a58ad44 100644 --- a/pkg/file/scan.go +++ b/pkg/file/scan.go @@ -2,29 +2,18 @@ package file import ( "context" - "errors" "fmt" "io/fs" - "os" "path/filepath" - "runtime/debug" "strings" "sync" "time" - "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) -const ( - scanQueueSize = 200000 - // maximum number of times to retry in the event of a locked database - // use -1 to retry forever - maxRetries = -1 -) - // Scanner scans files into the database. // // The scan process works using two goroutines. The first walks through the provided paths @@ -55,8 +44,26 @@ type Scanner struct { Repository Repository FingerprintCalculator FingerprintCalculator + // ZipFileExtensions is a list of file extensions that are considered zip files. + // Extension does not include the . character. + ZipFileExtensions []string + + // ScanFilters are used to determine if a file should be scanned. + ScanFilters []PathFilter + + // HandlerRequiredFilters are used to determine if an unchanged file needs to be handled + HandlerRequiredFilters []Filter + // FileDecorators are applied to files as they are scanned. FileDecorators []Decorator + + // handlers are called after a file has been scanned. + FileHandlers []Handler + + // Rescan indicates whether files should be rescanned even if they haven't changed. + Rescan bool + + folderPathToID sync.Map } // FingerprintCalculator calculates a fingerprint for the provided file. @@ -91,257 +98,18 @@ func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs models.FS, return false } -// ProgressReporter is used to report progress of the scan. -type ProgressReporter interface { - AddTotal(total int) - Increment() - Definite() - ExecuteTask(description string, fn func()) -} - -type scanJob struct { - *Scanner - - // handlers are called after a file has been scanned. - handlers []Handler - - ProgressReports ProgressReporter - options ScanOptions - - startTime time.Time - fileQueue chan scanFile - retryList []scanFile - retrying bool - folderPathToID sync.Map - zipPathToID sync.Map - count int - - txnRetryer txn.Retryer -} - -// ScanOptions provides options for scanning files. -type ScanOptions struct { - Paths []string - - // ZipFileExtensions is a list of file extensions that are considered zip files. - // Extension does not include the . character. - ZipFileExtensions []string - - // ScanFilters are used to determine if a file should be scanned. - ScanFilters []PathFilter - - // HandlerRequiredFilters are used to determine if an unchanged file needs to be handled - HandlerRequiredFilters []Filter - - ParallelTasks int - - // When true files in path will be rescanned even if they haven't changed - Rescan bool -} - -// Scan starts the scanning process. -func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOptions, progressReporter ProgressReporter) { - job := &scanJob{ - Scanner: s, - handlers: handlers, - ProgressReports: progressReporter, - options: options, - txnRetryer: txn.Retryer{ - Manager: s.Repository.TxnManager, - Retries: maxRetries, - }, - } - - job.execute(ctx) -} - -type scanFile struct { +// ScannedFile represents a file being scanned. +type ScannedFile struct { *models.BaseFile - fs models.FS - info fs.FileInfo + FS models.FS + Info fs.FileInfo } -func (s *scanJob) withTxn(ctx context.Context, fn func(ctx context.Context) error) error { - return s.txnRetryer.WithTxn(ctx, fn) -} - -func (s *scanJob) withDB(ctx context.Context, fn func(ctx context.Context) error) error { - return s.Repository.WithDB(ctx, fn) -} - -func (s *scanJob) execute(ctx context.Context) { - paths := s.options.Paths - logger.Infof("scanning %d paths", len(paths)) - s.startTime = time.Now() - - s.fileQueue = make(chan scanFile, scanQueueSize) - var wg sync.WaitGroup - wg.Add(1) - - go func() { - defer func() { - wg.Done() - - // handle panics in goroutine - if p := recover(); p != nil { - logger.Errorf("panic while queuing files for scan: %v", p) - logger.Errorf(string(debug.Stack())) - } - }() - - if err := s.queueFiles(ctx, paths); err != nil { - if errors.Is(err, context.Canceled) { - return - } - - logger.Errorf("error queuing files for scan: %v", err) - return - } - - logger.Infof("Finished adding files to queue. %d files queued", s.count) - }() - - defer wg.Wait() - - if err := s.processQueue(ctx); err != nil { - if errors.Is(err, context.Canceled) { - return - } - - logger.Errorf("error scanning files: %v", err) - return - } -} - -func (s *scanJob) queueFiles(ctx context.Context, paths []string) error { - defer func() { - close(s.fileQueue) - - if s.ProgressReports != nil { - s.ProgressReports.AddTotal(s.count) - s.ProgressReports.Definite() - } - }() - - var err error - s.ProgressReports.ExecuteTask("Walking directory tree", func() { - for _, p := range paths { - err = symWalk(s.FS, p, s.queueFileFunc(ctx, s.FS, nil)) - if err != nil { - return - } - } - }) - - return err -} - -func (s *scanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *scanFile) fs.WalkDirFunc { - return func(path string, d fs.DirEntry, err error) error { - if err != nil { - // don't let errors prevent scanning - logger.Errorf("error scanning %s: %v", path, err) - return nil - } - - if err = ctx.Err(); err != nil { - return err - } - - info, err := d.Info() - if err != nil { - logger.Errorf("reading info for %q: %v", path, err) - return nil - } - - if !s.acceptEntry(ctx, path, info) { - if info.IsDir() { - return fs.SkipDir - } - - return nil - } - - size, err := getFileSize(f, path, info) - if err != nil { - return err - } - - ff := scanFile{ - BaseFile: &models.BaseFile{ - DirEntry: models.DirEntry{ - ModTime: modTime(info), - }, - Path: path, - Basename: filepath.Base(path), - Size: size, - }, - fs: f, - info: info, - } - - if zipFile != nil { - zipFileID, err := s.getZipFileID(ctx, zipFile) - if err != nil { - return err - } - ff.ZipFileID = zipFileID - ff.ZipFile = zipFile - } - - if info.IsDir() { - // handle folders immediately - if err := s.handleFolder(ctx, ff); err != nil { - if !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", path, err) - } - - // skip the directory since we won't be able to process the files anyway - return fs.SkipDir - } - - return nil - } - - // if zip file is present, we handle immediately - if zipFile != nil { - s.ProgressReports.ExecuteTask("Scanning "+path, func() { - if err := s.handleFile(ctx, ff); err != nil { - if !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", path, err) - } - // don't return an error, just skip the file - } - }) - - return nil - } - - s.fileQueue <- ff - - s.count++ - - return nil - } -} - -func getFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { - // #2196/#3042 - replace size with target size if file is a symlink - if info.Mode()&os.ModeSymlink == os.ModeSymlink { - targetInfo, err := f.Stat(path) - if err != nil { - return 0, fmt.Errorf("reading info for symlink %q: %w", path, err) - } - return targetInfo.Size(), nil - } - - return info.Size(), nil -} - -func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo) bool { +// AcceptEntry determines if the file entry should be accepted for scanning +func (s *Scanner) AcceptEntry(ctx context.Context, path string, info fs.FileInfo) bool { // always accept if there's no filters - accept := len(s.options.ScanFilters) == 0 - for _, filter := range s.options.ScanFilters { + accept := len(s.ScanFilters) == 0 + for _, filter := range s.ScanFilters { // accept if any filter accepts the file if filter.Accept(ctx, path, info) { accept = true @@ -352,102 +120,7 @@ func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo return accept } -func (s *scanJob) scanZipFile(ctx context.Context, f scanFile) error { - zipFS, err := f.fs.OpenZip(f.Path, f.Size) - if err != nil { - if errors.Is(err, errNotReaderAt) { - // can't walk the zip file - // just return - return nil - } - - return err - } - - defer zipFS.Close() - - return symWalk(zipFS, f.Path, s.queueFileFunc(ctx, zipFS, &f)) -} - -func (s *scanJob) processQueue(ctx context.Context) error { - parallelTasks := s.options.ParallelTasks - if parallelTasks < 1 { - parallelTasks = 1 - } - - wg := sizedwaitgroup.New(parallelTasks) - - if err := func() error { - defer wg.Wait() - - for f := range s.fileQueue { - if err := ctx.Err(); err != nil { - return err - } - - wg.Add() - ff := f - go func() { - defer wg.Done() - s.processQueueItem(ctx, ff) - }() - } - - return nil - }(); err != nil { - return err - } - - s.retrying = true - - if err := func() error { - defer wg.Wait() - - for _, f := range s.retryList { - if err := ctx.Err(); err != nil { - return err - } - - wg.Add() - ff := f - go func() { - defer wg.Done() - s.processQueueItem(ctx, ff) - }() - } - - return nil - }(); err != nil { - return err - } - - return nil -} - -func (s *scanJob) incrementProgress(f scanFile) { - // don't increment for files inside zip files since these aren't - // counted during the initial walking - if s.ProgressReports != nil && f.ZipFile == nil { - s.ProgressReports.Increment() - } -} - -func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) { - s.ProgressReports.ExecuteTask("Scanning "+f.Path, func() { - var err error - if f.info.IsDir() { - err = s.handleFolder(ctx, f) - } else { - err = s.handleFile(ctx, f) - } - - if err != nil && !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", f.Path, err) - } - }) -} - -func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { +func (s *Scanner) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { // check the folder cache first if f, ok := s.folderPathToID.Load(path); ok { v := f.(models.FolderID) @@ -470,48 +143,17 @@ func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderI return &ret.ID, nil } -func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*models.FileID, error) { - if zipFile == nil { - return nil, nil - } - - if zipFile.ID != 0 { - return &zipFile.ID, nil - } - - path := zipFile.Path - - // check the folder cache first - if f, ok := s.zipPathToID.Load(path); ok { - v := f.(models.FileID) - return &v, nil - } - - // assume case sensitive when searching for the zip file - const caseSensitive = true - - ret, err := s.Repository.File.FindByPath(ctx, path, caseSensitive) - if err != nil { - return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err) - } - - if ret == nil { - return nil, fmt.Errorf("zip file %q doesn't exist in database", zipFile.Path) - } - - s.zipPathToID.Store(path, ret.Base().ID) - return &ret.Base().ID, nil -} - -func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { +// ScanFolder scans the provided folder into the database, returning the folder entry. +// If the folder already exists, it is updated if necessary. +func (s *Scanner) ScanFolder(ctx context.Context, file ScannedFile) (*models.Folder, error) { + var f *models.Folder + var err error path := file.Path - return s.withTxn(ctx, func(ctx context.Context) error { - defer s.incrementProgress(file) - + err = s.Repository.WithTxn(ctx, func(ctx context.Context) error { // determine if folder already exists in data store (by path) // assume case sensitive by default - f, err := s.Repository.Folder.FindByPath(ctx, path, true) + f, err = s.Repository.Folder.FindByPath(ctx, path, true) if err != nil { return fmt.Errorf("checking for existing folder %q: %w", path, err) } @@ -520,7 +162,7 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { // case insensitive searching // assume case sensitive if in zip if f == nil && file.ZipFileID == nil { - caseSensitive, _ := file.fs.IsPathCaseSensitive(file.Path) + caseSensitive, _ := file.FS.IsPathCaseSensitive(file.Path) if !caseSensitive { f, err = s.Repository.Folder.FindByPath(ctx, path, false) @@ -547,9 +189,11 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { return nil }) + + return f, err } -func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) onNewFolder(ctx context.Context, file ScannedFile) (*models.Folder, error) { renamed, err := s.handleFolderRename(ctx, file) if err != nil { return nil, err @@ -596,7 +240,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folde return toCreate, nil } -func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) handleFolderRename(ctx context.Context, file ScannedFile) (*models.Folder, error) { // ignore folders in zip files if file.ZipFileID != nil { return nil, nil @@ -637,7 +281,7 @@ func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*model return renamedFrom, nil } -func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *models.Folder) (*models.Folder, error) { +func (s *Scanner) onExistingFolder(ctx context.Context, f ScannedFile, existing *models.Folder) (*models.Folder, error) { update := false // update if mod time is changed @@ -678,22 +322,22 @@ func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *mo return existing, nil } -func modTime(info fs.FileInfo) time.Time { - // truncate to seconds, since we don't store beyond that in the database - return info.ModTime().Truncate(time.Second) +type ScanFileResult struct { + File models.File + New bool + Renamed bool + Updated bool } -func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { - defer s.incrementProgress(f) - - var ff models.File +// ScanFile scans the provided file into the database, returning the scan result. +func (s *Scanner) ScanFile(ctx context.Context, f ScannedFile) (*ScanFileResult, error) { + var r *ScanFileResult // don't use a transaction to check if new or existing - if err := s.withDB(ctx, func(ctx context.Context) error { + if err := s.Repository.WithDB(ctx, func(ctx context.Context) error { // determine if file already exists in data store // assume case sensitive when searching for the file to begin with - var err error - ff, err = s.Repository.File.FindByPath(ctx, f.Path, true) + ff, err := s.Repository.File.FindByPath(ctx, f.Path, true) if err != nil { return fmt.Errorf("checking for existing file %q: %w", f.Path, err) } @@ -702,7 +346,7 @@ func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { // case insensitive search // assume case sensitive if in zip if ff == nil && f.ZipFileID != nil { - caseSensitive, _ := f.fs.IsPathCaseSensitive(f.Path) + caseSensitive, _ := f.FS.IsPathCaseSensitive(f.Path) if !caseSensitive { ff, err = s.Repository.File.FindByPath(ctx, f.Path, false) @@ -714,35 +358,23 @@ func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { if ff == nil { // returns a file only if it is actually new - ff, err = s.onNewFile(ctx, f) + r, err = s.onNewFile(ctx, f) return err } - ff, err = s.onExistingFile(ctx, f, ff) + r, err = s.onExistingFile(ctx, f, ff) return err }); err != nil { - return err + return nil, err } - if ff != nil && s.isZipFile(f.info.Name()) { - f.BaseFile = ff.Base() - - // scan zip files with a different context that is not cancellable - // cancelling while scanning zip file contents results in the scan - // contents being partially completed - zipCtx := context.WithoutCancel(ctx) - - if err := s.scanZipFile(zipCtx, f); err != nil { - logger.Errorf("Error scanning zip file %q: %v", f.Path, err) - } - } - - return nil + return r, nil } -func (s *scanJob) isZipFile(path string) bool { +// IsZipFile determines if the provided path is a zip file based on its extension. +func (s *Scanner) IsZipFile(path string) bool { fExt := filepath.Ext(path) - for _, ext := range s.options.ZipFileExtensions { + for _, ext := range s.ZipFileExtensions { if strings.EqualFold(fExt, "."+ext) { return true } @@ -751,7 +383,7 @@ func (s *scanJob) isZipFile(path string) bool { return false } -func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error) { +func (s *Scanner) onNewFile(ctx context.Context, f ScannedFile) (*ScanFileResult, error) { now := time.Now() baseFile := f.BaseFile @@ -767,28 +399,20 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error } if parentFolderID == nil { - // if parent folder doesn't exist, assume it's not yet created - // add this file to the queue to be created later - if s.retrying { - // if we're retrying and the folder still doesn't exist, then it's a problem - return nil, fmt.Errorf("parent folder for %q doesn't exist", path) - } - - s.retryList = append(s.retryList, f) - return nil, nil + return nil, fmt.Errorf("parent folder for %q doesn't exist", path) } baseFile.ParentFolderID = *parentFolderID const useExisting = false - fp, err := s.calculateFingerprints(f.fs, baseFile, path, useExisting) + fp, err := s.calculateFingerprints(f.FS, baseFile, path, useExisting) if err != nil { return nil, err } baseFile.SetFingerprints(fp) - file, err := s.fireDecorators(ctx, f.fs, baseFile) + file, err := s.fireDecorators(ctx, f.FS, baseFile) if err != nil { return nil, err } @@ -801,14 +425,17 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error } if renamed != nil { + return &ScanFileResult{ + File: renamed, + Renamed: true, + }, nil // handle rename should have already handled the contents of the zip file // so shouldn't need to scan it again // return nil so it doesn't - return nil, nil } // if not renamed, queue file for creation - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Create(ctx, file); err != nil { return fmt.Errorf("creating file %q: %w", path, err) } @@ -822,10 +449,13 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error return nil, err } - return file, nil + return &ScanFileResult{ + File: file, + New: true, + }, nil } -func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { +func (s *Scanner) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { for _, h := range s.FileDecorators { var err error f, err = h.Decorate(ctx, fs, f) @@ -837,8 +467,8 @@ func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.Fil return f, nil } -func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { - for _, h := range s.handlers { +func (s *Scanner) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { + for _, h := range s.FileHandlers { if err := h.Handle(ctx, f, oldFile); err != nil { return err } @@ -847,7 +477,7 @@ func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile model return nil } -func (s *scanJob) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { +func (s *Scanner) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { // only log if we're (re)calculating fingerprints if !useExisting { logger.Infof("Calculating fingerprints for %s ...", path) @@ -884,7 +514,7 @@ func appendFileUnique(v []models.File, toAdd []models.File) []models.File { return v } -func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { +func (s *Scanner) getFileFS(f *models.BaseFile) (models.FS, error) { if f.ZipFile == nil { return s.FS, nil } @@ -899,7 +529,7 @@ func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { return fs.OpenZip(zipPath, zipSize) } -func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) { +func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) { var others []models.File for _, tfp := range fp { @@ -941,7 +571,7 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F // treat as a move missing = append(missing, other) } - case !s.acceptEntry(ctx, other.Base().Path, info): + case !s.AcceptEntry(ctx, other.Base().Path, info): // #4393 - if the file is no longer in the configured library paths, treat it as a move logger.Debugf("File %q no longer in library paths. Treating as a move.", other.Base().Path) missing = append(missing, other) @@ -974,12 +604,12 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F fBaseCopy.Fingerprints = updatedBase.Fingerprints *updatedBase = fBaseCopy - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, updated); err != nil { return fmt.Errorf("updating file for rename %q: %w", newPath, err) } - if s.isZipFile(updatedBase.Basename) { + if s.IsZipFile(updatedBase.Basename) { if err := transferZipHierarchy(ctx, s.Repository.Folder, s.Repository.File, updatedBase.ID, oldPath, newPath); err != nil { return fmt.Errorf("moving zip hierarchy for renamed zip file %q: %w", newPath, err) } @@ -997,9 +627,9 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F return updated, nil } -func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { - accept := len(s.options.HandlerRequiredFilters) == 0 - for _, filter := range s.options.HandlerRequiredFilters { +func (s *Scanner) isHandlerRequired(ctx context.Context, f models.File) bool { + accept := len(s.HandlerRequiredFilters) == 0 + for _, filter := range s.HandlerRequiredFilters { // accept if any filter accepts the file if filter.Accept(ctx, f) { accept = true @@ -1018,9 +648,9 @@ func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { // - file size // - image format, width or height // - video codec, audio codec, format, width, height, framerate or bitrate -func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing models.File) bool { +func (s *Scanner) isMissingMetadata(ctx context.Context, f ScannedFile, existing models.File) bool { for _, h := range s.FileDecorators { - if h.IsMissingMetadata(ctx, f.fs, existing) { + if h.IsMissingMetadata(ctx, f.FS, existing) { return true } } @@ -1028,20 +658,20 @@ func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing mo return false } -func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) setMissingMetadata(ctx context.Context, f ScannedFile, existing models.File) (models.File, error) { path := existing.Base().Path logger.Infof("Updating metadata for %s", path) existing.Base().Size = f.Size var err error - existing, err = s.fireDecorators(ctx, f.fs, existing) + existing, err = s.fireDecorators(ctx, f.FS, existing) if err != nil { return nil, err } // queue file for update - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1054,9 +684,9 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing m return existing, nil } -func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) setMissingFingerprints(ctx context.Context, f ScannedFile, existing models.File) (models.File, error) { const useExisting = true - fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting) + fp, err := s.calculateFingerprints(f.FS, existing.Base(), f.Path, useExisting) if err != nil { return nil, err } @@ -1064,7 +694,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi if fp.ContentsChanged(existing.Base().Fingerprints) { existing.SetFingerprints(fp) - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", f.Path, err) } @@ -1079,14 +709,14 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi } // returns a file only if it was updated -func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) onExistingFile(ctx context.Context, f ScannedFile, existing models.File) (*ScanFileResult, error) { base := existing.Base() path := base.Path fileModTime := f.ModTime // #6326 - also force a rescan if the basename changed updated := !fileModTime.Equal(base.ModTime) || base.Basename != f.Basename - forceRescan := s.options.Rescan + forceRescan := s.Rescan if !updated && !forceRescan { return s.onUnchangedFile(ctx, f, existing) @@ -1108,7 +738,7 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model // calculate and update fingerprints for the file const useExisting = false - fp, err := s.calculateFingerprints(f.fs, base, path, useExisting) + fp, err := s.calculateFingerprints(f.FS, base, path, useExisting) if err != nil { return nil, err } @@ -1116,13 +746,13 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model s.removeOutdatedFingerprints(existing, fp) existing.SetFingerprints(fp) - existing, err = s.fireDecorators(ctx, f.fs, existing) + existing, err = s.fireDecorators(ctx, f.FS, existing) if err != nil { return nil, err } // queue file for update - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1135,11 +765,13 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model }); err != nil { return nil, err } - - return existing, nil + return &ScanFileResult{ + File: existing, + Updated: true, + }, nil } -func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { +func (s *Scanner) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { // HACK - if no MD5 fingerprint was returned, and the oshash is changed // then remove the MD5 fingerprint oshash := fp.For(models.FingerprintTypeOshash) @@ -1167,7 +799,7 @@ func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fin } // returns a file only if it was updated -func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) onUnchangedFile(ctx context.Context, f ScannedFile, existing models.File) (*ScanFileResult, error) { var err error isMissingMetdata := s.isMissingMetadata(ctx, f, existing) @@ -1186,7 +818,7 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode } handlerRequired := false - if err := s.withDB(ctx, func(ctx context.Context) error { + if err := s.Repository.WithDB(ctx, func(ctx context.Context) error { // check if the handler needs to be run handlerRequired = s.isHandlerRequired(ctx, existing) return nil @@ -1196,15 +828,20 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode if !handlerRequired { // if this file is a zip file, then we need to rescan the contents - // as well. We do this by returning the file, instead of nil. + // as well. We do this by indicating that the file is updated. if isMissingMetdata { - return existing, nil + return &ScanFileResult{ + File: existing, + Updated: true, + }, nil } - return nil, nil + return &ScanFileResult{ + File: existing, + }, nil } - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.fireHandlers(ctx, existing, nil); err != nil { return err } @@ -1215,6 +852,9 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode } // if this file is a zip file, then we need to rescan the contents - // as well. We do this by returning the file, instead of nil. - return existing, nil + // as well. We do this by indicating that the file is updated. + return &ScanFileResult{ + File: existing, + Updated: true, + }, nil } diff --git a/pkg/file/walk.go b/pkg/file/walk.go index 3c6a157b7..bd33f42c3 100644 --- a/pkg/file/walk.go +++ b/pkg/file/walk.go @@ -81,8 +81,8 @@ func walkSym(f models.FS, filename string, linkDirname string, walkFn fs.WalkDir return fsWalk(f, filename, symWalkFunc) } -// symWalk extends filepath.Walk to also follow symlinks -func symWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { +// SymWalk extends filepath.Walk to also follow symlinks +func SymWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { return walkSym(fs, path, path, walkFn) } diff --git a/pkg/file/zip.go b/pkg/file/zip.go index 4df2453dc..5afcd5329 100644 --- a/pkg/file/zip.go +++ b/pkg/file/zip.go @@ -18,7 +18,7 @@ import ( ) var ( - errNotReaderAt = errors.New("not a ReaderAt") + ErrNotReaderAt = errors.New("invalid reader: does not implement io.ReaderAt") errZipFSOpenZip = errors.New("cannot open zip file inside zip file") ) @@ -38,7 +38,7 @@ func newZipFS(fs models.FS, path string, size int64) (*zipFS, error) { asReaderAt, _ := reader.(io.ReaderAt) if asReaderAt == nil { reader.Close() - return nil, errNotReaderAt + return nil, ErrNotReaderAt } zipReader, err := zip.NewReader(asReaderAt, size) diff --git a/pkg/fsutil/fs.go b/pkg/fsutil/fs.go index 2b5c37f62..10666bb63 100644 --- a/pkg/fsutil/fs.go +++ b/pkg/fsutil/fs.go @@ -32,8 +32,8 @@ func IsFsPathCaseSensitive(path string) (bool, error) { return false, fmt.Errorf("could not case flip path %s", path) } - flipped := []byte(path) - for _, c := range []byte(fBase) { // replace base of path with the flipped one ( we need to flip the base or last dir part ) + flipped := []rune(path) + for _, c := range fBase { // replace base of path with the flipped one ( we need to flip the base or last dir part ) flipped[i] = c i++ } @@ -43,7 +43,7 @@ func IsFsPathCaseSensitive(path string) (bool, error) { return true, nil // fs of path should be case sensitive } - if fiCase.ModTime() == fi.ModTime() { // file path exists and is the same + if fiCase.ModTime().Equal(fi.ModTime()) { // file path exists and is the same return false, nil // fs of path is not case sensitive } return false, fmt.Errorf("can not determine case sensitivity of path %s", path) diff --git a/pkg/fsutil/fs_test.go b/pkg/fsutil/fs_test.go new file mode 100644 index 000000000..522e95fa6 --- /dev/null +++ b/pkg/fsutil/fs_test.go @@ -0,0 +1,44 @@ +package fsutil + +import ( + "os" + "path/filepath" + "testing" +) + +func TestIsFsPathCaseSensitive_UnicodeByteLength(t *testing.T) { + // Ⱥ (U+023A) is 2 bytes in UTF-8 + // Its lowercase ⱥ (U+2C65) is 3 bytes in UTF-8 + + dir := t.TempDir() + makeDir := func(path string) { + // Create the directory so os.Stat succeeds + if err := os.Mkdir(path, 0755); err != nil { + t.Fatal(err) + } + } + + path := filepath.Join(dir, "Ⱥtest") + makeDir(path) + + // ensure the test does not panic due to byte length differences in the case flipped path + _, err := IsFsPathCaseSensitive(path) + if err != nil { + t.Fatal(err) + } + + // no guarantee about case sensitivity of the fs running the tests, + // so we just want to ensure the function works and does not panic + // assert.True(t, r, "expected fs to be case sensitive") + + // test regular ASCII paths still work + path2 := filepath.Join(dir, "Test") + makeDir(path2) + + _, err = IsFsPathCaseSensitive(path2) + if err != nil { + t.Fatal(err) + } + + // assert.True(t, r, "expected fs to be case sensitive") +} diff --git a/pkg/gallery/delete.go b/pkg/gallery/delete.go index f5186f948..4bc2e2492 100644 --- a/pkg/gallery/delete.go +++ b/pkg/gallery/delete.go @@ -8,13 +8,13 @@ import ( "github.com/stashapp/stash/pkg/models" ) -func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { +func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) { var imgsDestroyed []*models.Image // chapter deletion is done via delete cascade, so we don't need to do anything here // if this is a zip-based gallery, delete the images as well first - zipImgsDestroyed, err := s.destroyZipFileImages(ctx, i, fileDeleter, deleteGenerated, deleteFile) + zipImgsDestroyed, err := s.destroyZipFileImages(ctx, i, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) if err != nil { return nil, err } @@ -45,7 +45,7 @@ func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, return qb.Destroy(ctx, galleryChapter.ID) } -func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { +func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) { if err := i.LoadFiles(ctx, s.Repository); err != nil { return nil, err } @@ -81,6 +81,12 @@ func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, f if err := destroyer.DestroyZip(ctx, f, fileDeleter.Deleter, deleteFile); err != nil { return nil, err } + } else if destroyFileEntry { + // destroy file DB entry without deleting filesystem file + const deleteFileFromFS = false + if err := destroyer.DestroyZip(ctx, f, nil, deleteFileFromFS); err != nil { + return nil, err + } } } diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index 0068b3f1c..22f3e6c44 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -126,7 +126,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -249,7 +249,9 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta newTag := models.NewTag() newTag.Name = name - err := i.TagWriter.Create(ctx, &newTag) + err := i.TagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index b64f80d8f..932f84d48 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -115,9 +115,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -147,7 +147,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -289,9 +289,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -323,7 +323,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index 62604e0c5..5b2678480 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -16,7 +16,7 @@ type ImageFinder interface { } type ImageService interface { - Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) DestroyFolderImages(ctx context.Context, folderID models.FolderID, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) } diff --git a/pkg/group/import.go b/pkg/group/import.go index 3fc7db8f1..d7acad47c 100644 --- a/pkg/group/import.go +++ b/pkg/group/import.go @@ -126,7 +126,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -203,7 +205,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) diff --git a/pkg/group/import_test.go b/pkg/group/import_test.go index c4ca47442..387ceb87e 100644 --- a/pkg/group/import_test.go +++ b/pkg/group/import_test.go @@ -121,9 +121,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -156,7 +156,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -212,9 +212,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -247,7 +247,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/hash/imagephash/phash.go b/pkg/hash/imagephash/phash.go new file mode 100644 index 000000000..73e8e3667 --- /dev/null +++ b/pkg/hash/imagephash/phash.go @@ -0,0 +1,84 @@ +package imagephash + +import ( + "bytes" + "context" + "fmt" + "image" + "path/filepath" + "strings" + + "github.com/corona10/goimagehash" + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/ffmpeg/transcoder" + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" +) + +// Generate computes a perceptual hash for an image file. +func Generate(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (*uint64, error) { + img, err := loadImage(encoder, imageFile) + if err != nil { + return nil, fmt.Errorf("loading image: %w", err) + } + + hash, err := goimagehash.PerceptionHash(img) + if err != nil { + return nil, fmt.Errorf("computing phash from image: %w", err) + } + + hashValue := hash.GetHash() + return &hashValue, nil +} + +// loadImage loads an image from disk and decodes it. +// For AVIF files, ffmpeg is used to convert to BMP first since Go has no built-in AVIF decoder. +func loadImage(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (image.Image, error) { + ext := strings.ToLower(filepath.Ext(imageFile.Path)) + if ext == ".avif" { + // AVIF in zip files is not supported - ffmpeg cannot read files inside zips + if imageFile.Base().ZipFileID != nil { + return nil, fmt.Errorf("AVIF images in zip files are not supported for phash generation") + } + return loadImageFFmpeg(encoder, imageFile.Path) + } + + reader, err := imageFile.Open(&file.OsFS{}) + if err != nil { + return nil, err + } + defer reader.Close() + + buf := new(bytes.Buffer) + if _, err := buf.ReadFrom(reader); err != nil { + return nil, err + } + + img, _, err := image.Decode(buf) + if err != nil { + return nil, fmt.Errorf("decoding image: %w", err) + } + + return img, nil +} + +// loadImageFFmpeg uses ffmpeg to convert an image to BMP and then decodes it. +func loadImageFFmpeg(encoder *ffmpeg.FFMpeg, path string) (image.Image, error) { + options := transcoder.ScreenshotOptions{ + OutputPath: "-", + OutputType: transcoder.ScreenshotOutputTypeBMP, + } + + args := transcoder.ScreenshotTime(path, 0, options) + data, err := encoder.GenerateOutput(context.Background(), args, nil) + if err != nil { + return nil, fmt.Errorf("converting image with ffmpeg: %w", err) + } + + img, _, err := image.Decode(bytes.NewReader(data)) + if err != nil { + return nil, fmt.Errorf("decoding ffmpeg output: %w", err) + } + + return img, nil +} diff --git a/pkg/image/delete.go b/pkg/image/delete.go index aa3a9c1c8..28bb54a59 100644 --- a/pkg/image/delete.go +++ b/pkg/image/delete.go @@ -37,8 +37,8 @@ func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error { } // Destroy destroys an image, optionally marking the file and generated files for deletion. -func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { - return s.destroyImage(ctx, i, fileDeleter, deleteGenerated, deleteFile) +func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { + return s.destroyImage(ctx, i, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) } // DestroyZipImages destroys all images in zip, optionally marking the files and generated files for deletion. @@ -75,7 +75,8 @@ func (s *Service) DestroyZipImages(ctx context.Context, zipFile models.File, fil } const deleteFileInZip = false - if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip); err != nil { + const destroyFileEntry = false + if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip, destroyFileEntry); err != nil { return nil, err } @@ -135,7 +136,8 @@ func (s *Service) DestroyFolderImages(ctx context.Context, folderID models.Folde continue } - if err := s.Destroy(ctx, img, fileDeleter, deleteGenerated, deleteFile); err != nil { + const destroyFileEntry = false + if err := s.Destroy(ctx, img, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return nil, err } @@ -146,11 +148,15 @@ func (s *Service) DestroyFolderImages(ctx context.Context, folderID models.Folde } // Destroy destroys an image, optionally marking the file and generated files for deletion. -func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { +func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { if deleteFile { if err := s.deleteFiles(ctx, i, fileDeleter); err != nil { return err } + } else if destroyFileEntry { + if err := s.destroyFileEntries(ctx, i); err != nil { + return err + } } if deleteGenerated { @@ -192,3 +198,35 @@ func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter return nil } + +// destroyFileEntries destroys file entries from the database without deleting +// the files from the filesystem +func (s *Service) destroyFileEntries(ctx context.Context, i *models.Image) error { + if err := i.LoadFiles(ctx, s.Repository); err != nil { + return err + } + + for _, f := range i.Files.List() { + // only destroy file entries where there is no other associated image + otherImages, err := s.Repository.FindByFileID(ctx, f.Base().ID) + if err != nil { + return err + } + + if len(otherImages) > 1 { + // other image associated, don't remove + continue + } + + // don't destroy files in zip archives + if f.Base().ZipFileID == nil { + const deleteFile = false + logger.Info("Destroying image file entry: ", f.Base().Path) + if err := file.Destroy(ctx, s.File, f, nil, deleteFile); err != nil { + return err + } + } + } + + return nil +} diff --git a/pkg/image/import.go b/pkg/image/import.go index bf92a6ae8..c7ef7f00c 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -159,7 +159,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -407,7 +407,9 @@ func createTags(ctx context.Context, tagWriter models.TagCreator, names []string newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } diff --git a/pkg/image/import_test.go b/pkg/image/import_test.go index 286e51fe3..5d01d4b97 100644 --- a/pkg/image/import_test.go +++ b/pkg/image/import_test.go @@ -77,9 +77,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -109,7 +109,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -251,9 +251,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -285,7 +285,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/models/custom_fields.go b/pkg/models/custom_fields.go index 5c3acd18b..3212d676f 100644 --- a/pkg/models/custom_fields.go +++ b/pkg/models/custom_fields.go @@ -17,3 +17,7 @@ type CustomFieldsReader interface { GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]CustomFieldMap, error) } + +type CustomFieldsWriter interface { + SetCustomFields(ctx context.Context, id int, fields CustomFieldsInput) error +} diff --git a/pkg/models/file.go b/pkg/models/file.go index 63c30ba4d..32263319c 100644 --- a/pkg/models/file.go +++ b/pkg/models/file.go @@ -26,7 +26,7 @@ type FileFilterType struct { ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder"` ZipFile *MultiCriterionInput `json:"zip_file"` ModTime *TimestampCriterionInput `json:"mod_time"` - Duplicated *PHashDuplicationCriterionInput `json:"duplicated"` + Duplicated *FileDuplicationCriterionInput `json:"duplicated"` Hashes []*FingerprintFilterInput `json:"hashes"` VideoFileFilter *VideoFileFilterInput `json:"video_file_filter"` ImageFileFilter *ImageFileFilterInput `json:"image_file_filter"` diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 5b75febc5..dfc776afe 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -95,6 +95,7 @@ type GalleryDestroyInput struct { // If true, then the zip file will be deleted if the gallery is zip-file-based. // If gallery is folder-based, then any files not associated with other // galleries will be deleted, along with the folder, if it is not empty. - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } diff --git a/pkg/models/group.go b/pkg/models/group.go index 6943b1055..ec550eea8 100644 --- a/pkg/models/group.go +++ b/pkg/models/group.go @@ -33,6 +33,8 @@ type GroupFilterType struct { ContainingGroupCount *IntCriterionInput `json:"containing_group_count"` // Filter by number of sub-groups the group has SubGroupCount *IntCriterionInput `json:"sub_group_count"` + // Filter by number of scenes the group has + SceneCount *IntCriterionInput `json:"scene_count"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related studios that meet this criteria diff --git a/pkg/models/image.go b/pkg/models/image.go index 4ab10eabf..84be79360 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -11,6 +11,8 @@ type ImageFilterType struct { Photographer *StringCriterionInput `json:"photographer"` // Filter by file checksum Checksum *StringCriterionInput `json:"checksum"` + // Filter by phash distance + PhashDistance *PhashDistanceCriterionInput `json:"phash_distance"` // Filter by path Path *StringCriterionInput `json:"path"` // Filter by file count @@ -88,15 +90,17 @@ type ImageUpdateInput struct { } type ImageDestroyInput struct { - ID string `json:"id"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + ID string `json:"id"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ImagesDestroyInput struct { - Ids []string `json:"ids"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + Ids []string `json:"ids"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ImageQueryOptions struct { diff --git a/pkg/models/jsonschema/performer.go b/pkg/models/jsonschema/performer.go index 5edd5724c..b738fbfac 100644 --- a/pkg/models/jsonschema/performer.go +++ b/pkg/models/jsonschema/performer.go @@ -48,7 +48,9 @@ type Performer struct { FakeTits string `json:"fake_tits,omitempty"` PenisLength float64 `json:"penis_length,omitempty"` Circumcised string `json:"circumcised,omitempty"` - CareerLength string `json:"career_length,omitempty"` + CareerLength string `json:"career_length,omitempty"` // deprecated - for import only + CareerStart *int `json:"career_start,omitempty"` + CareerEnd *int `json:"career_end,omitempty"` Tattoos string `json:"tattoos,omitempty"` Piercings string `json:"piercings,omitempty"` Aliases StringOrStringList `json:"aliases,omitempty"` diff --git a/pkg/models/jsonschema/scene.go b/pkg/models/jsonschema/scene.go index c2f266d5c..8f15b9c5d 100644 --- a/pkg/models/jsonschema/scene.go +++ b/pkg/models/jsonschema/scene.go @@ -80,6 +80,8 @@ type Scene struct { PlayDuration float64 `json:"play_duration,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"` + + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Scene) Filename(id int, basename string, hash string) string { diff --git a/pkg/models/jsonschema/studio.go b/pkg/models/jsonschema/studio.go index a3706df66..12a797c13 100644 --- a/pkg/models/jsonschema/studio.go +++ b/pkg/models/jsonschema/studio.go @@ -24,6 +24,9 @@ type Studio struct { StashIDs []models.StashID `json:"stash_ids,omitempty"` Tags []string `json:"tags,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + Organized bool `json:"organized,omitempty"` + + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` // deprecated - for import only URL string `json:"url,omitempty"` diff --git a/pkg/models/jsonschema/tag.go b/pkg/models/jsonschema/tag.go index faab1bfb2..e7b16b13f 100644 --- a/pkg/models/jsonschema/tag.go +++ b/pkg/models/jsonschema/tag.go @@ -11,17 +11,18 @@ import ( ) type Tag struct { - Name string `json:"name,omitempty"` - SortName string `json:"sort_name,omitempty"` - Description string `json:"description,omitempty"` - Favorite bool `json:"favorite,omitempty"` - Aliases []string `json:"aliases,omitempty"` - Image string `json:"image,omitempty"` - Parents []string `json:"parents,omitempty"` - IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` - StashIDs []models.StashID `json:"stash_ids,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + Name string `json:"name,omitempty"` + SortName string `json:"sort_name,omitempty"` + Description string `json:"description,omitempty"` + Favorite bool `json:"favorite,omitempty"` + Aliases []string `json:"aliases,omitempty"` + Image string `json:"image,omitempty"` + Parents []string `json:"parents,omitempty"` + IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + StashIDs []models.StashID `json:"stash_ids,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Tag) Filename() string { diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index ef10c890d..0053ad6f8 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -754,6 +754,52 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *SceneReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *SceneReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFiles provides a mock function with given fields: ctx, relatedID func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) { ret := _m.Called(ctx, relatedID) @@ -1332,6 +1378,20 @@ func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, sceneID int, resu return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *SceneReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Size provides a mock function with given fields: ctx func (_m *SceneReaderWriter) Size(ctx context.Context) (float64, error) { ret := _m.Called(ctx) diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index 481565d6f..f57a73aa1 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -80,11 +80,11 @@ func (_m *StudioReaderWriter) CountByTagID(ctx context.Context, tagID int) (int, } // Create provides a mock function with given fields: ctx, newStudio -func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error { +func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.CreateStudioInput) error { ret := _m.Called(ctx, newStudio) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateStudioInput) error); ok { r0 = rf(ctx, newStudio) } else { r0 = ret.Error(0) @@ -291,6 +291,52 @@ func (_m *StudioReaderWriter) GetAliases(ctx context.Context, relatedID int) ([] return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *StudioReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *StudioReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, studioID func (_m *StudioReaderWriter) GetImage(ctx context.Context, studioID int) ([]byte, error) { ret := _m.Called(ctx, studioID) @@ -479,11 +525,11 @@ func (_m *StudioReaderWriter) QueryForAutoTag(ctx context.Context, words []strin } // Update provides a mock function with given fields: ctx, updatedStudio -func (_m *StudioReaderWriter) Update(ctx context.Context, updatedStudio *models.Studio) error { +func (_m *StudioReaderWriter) Update(ctx context.Context, updatedStudio *models.UpdateStudioInput) error { ret := _m.Called(ctx, updatedStudio) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateStudioInput) error); ok { r0 = rf(ctx, updatedStudio) } else { r0 = ret.Error(0) diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index ac6b10584..95a3b7a87 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -101,11 +101,11 @@ func (_m *TagReaderWriter) CountByParentTagID(ctx context.Context, parentID int) } // Create provides a mock function with given fields: ctx, newTag -func (_m *TagReaderWriter) Create(ctx context.Context, newTag *models.Tag) error { +func (_m *TagReaderWriter) Create(ctx context.Context, newTag *models.CreateTagInput) error { ret := _m.Called(ctx, newTag) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateTagInput) error); ok { r0 = rf(ctx, newTag) } else { r0 = ret.Error(0) @@ -542,6 +542,52 @@ func (_m *TagReaderWriter) GetChildIDs(ctx context.Context, relatedID int) ([]in return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *TagReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *TagReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, tagID func (_m *TagReaderWriter) GetImage(ctx context.Context, tagID int) ([]byte, error) { ret := _m.Called(ctx, tagID) @@ -699,12 +745,26 @@ func (_m *TagReaderWriter) QueryForAutoTag(ctx context.Context, words []string) return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *TagReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedTag -func (_m *TagReaderWriter) Update(ctx context.Context, updatedTag *models.Tag) error { +func (_m *TagReaderWriter) Update(ctx context.Context, updatedTag *models.UpdateTagInput) error { ret := _m.Called(ctx, updatedTag) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateTagInput) error); ok { r0 = rf(ctx, updatedTag) } else { r0 = ret.Error(0) diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index 566dcae1e..a30eafa0a 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -19,7 +19,8 @@ type Performer struct { FakeTits string `json:"fake_tits"` PenisLength *float64 `json:"penis_length"` Circumcised *CircumisedEnum `json:"circumcised"` - CareerLength string `json:"career_length"` + CareerStart *int `json:"career_start"` + CareerEnd *int `json:"career_end"` Tattoos string `json:"tattoos"` Piercings string `json:"piercings"` Favorite bool `json:"favorite"` @@ -75,7 +76,8 @@ type PerformerPartial struct { FakeTits OptionalString PenisLength OptionalFloat64 Circumcised OptionalString - CareerLength OptionalString + CareerStart OptionalInt + CareerEnd OptionalInt Tattoos OptionalString Piercings OptionalString Favorite OptionalBool diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index cf0499388..64ad34b9c 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -53,6 +53,20 @@ func NewScene() Scene { } } +type CreateSceneInput struct { + *Scene + + FileIDs []FileID + CoverImage []byte + CustomFields CustomFieldMap `json:"custom_fields"` +} + +type UpdateSceneInput struct { + *Scene + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + // ScenePartial represents part of a Scene object. It is used to update // the database entry. type ScenePartial struct { diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index 4254a9876..3c0e083c1 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -27,9 +27,9 @@ type ScrapedStudio struct { func (ScrapedStudio) IsScrapedContent() {} -func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Studio { +func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *CreateStudioInput { // Populate a new studio from the input - ret := NewStudio() + ret := NewCreateStudioInput() ret.Name = strings.TrimSpace(s.Name) if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" { @@ -176,7 +176,9 @@ type ScrapedPerformer struct { FakeTits *string `json:"fake_tits"` PenisLength *string `json:"penis_length"` Circumcised *string `json:"circumcised"` - CareerLength *string `json:"career_length"` + CareerLength *string `json:"career_length"` // deprecated: use CareerStart/CareerEnd + CareerStart *int `json:"career_start"` + CareerEnd *int `json:"career_end"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` @@ -219,8 +221,16 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool ret.DeathDate = &date } } - if p.CareerLength != nil && !excluded["career_length"] { - ret.CareerLength = *p.CareerLength + + // assume that career length is _not_ populated in favour of start/end + + if p.CareerStart != nil && !excluded["career_start"] { + cs := *p.CareerStart + ret.CareerStart = &cs + } + if p.CareerEnd != nil && !excluded["career_end"] { + ce := *p.CareerEnd + ret.CareerEnd = &ce } if p.Country != nil && !excluded["country"] { ret.Country = *p.Country @@ -356,7 +366,16 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, } } if p.CareerLength != nil && !excluded["career_length"] { - ret.CareerLength = NewOptionalString(*p.CareerLength) + // parse career_length into career_start/career_end + start, end, err := utils.ParseYearRangeString(*p.CareerLength) + if err == nil { + if start != nil { + ret.CareerStart = NewOptionalInt(*start) + } + if end != nil { + ret.CareerEnd = NewOptionalInt(*end) + } + } } if p.Country != nil && !excluded["country"] { ret.Country = NewOptionalString(*p.Country) diff --git a/pkg/models/model_scraped_item_test.go b/pkg/models/model_scraped_item_test.go index b6b44025f..09d8fbb32 100644 --- a/pkg/models/model_scraped_item_test.go +++ b/pkg/models/model_scraped_item_test.go @@ -8,6 +8,8 @@ import ( "github.com/stretchr/testify/assert" ) +func intPtr(i int) *int { return &i } + func Test_scrapedToStudioInput(t *testing.T) { const name = "name" url := "url" @@ -113,7 +115,7 @@ func Test_scrapedToStudioInput(t *testing.T) { got.StashIDs.List()[stid].UpdatedAt = time.Time{} } } - assert.Equal(t, tt.want, got) + assert.Equal(t, tt.want, got.Studio) }) } } @@ -124,9 +126,10 @@ func Test_scrapedToPerformerInput(t *testing.T) { endpoint := "endpoint" remoteSiteID := "remoteSiteID" - var stringValues []string - for i := 0; i < 20; i++ { - stringValues = append(stringValues, strconv.Itoa(i)) + const nValues = 19 + stringValues := make([]string, nValues) + for i := 0; i < nValues; i++ { + stringValues[i] = strconv.Itoa(i) } upTo := 0 @@ -183,7 +186,8 @@ func Test_scrapedToPerformerInput(t *testing.T) { Weight: nextVal(), Measurements: nextVal(), FakeTits: nextVal(), - CareerLength: nextVal(), + CareerStart: intPtr(2005), + CareerEnd: intPtr(2015), Tattoos: nextVal(), Piercings: nextVal(), Aliases: nextVal(), @@ -208,8 +212,9 @@ func Test_scrapedToPerformerInput(t *testing.T) { Weight: nextIntVal(), Measurements: *nextVal(), FakeTits: *nextVal(), - CareerLength: *nextVal(), - Tattoos: *nextVal(), + CareerStart: intPtr(2005), + CareerEnd: intPtr(2015), + Tattoos: *nextVal(), // skip CareerLength counter slot Piercings: *nextVal(), Aliases: NewRelatedStrings([]string{*nextVal()}), URLs: NewRelatedStrings([]string{*nextVal(), *nextVal(), *nextVal()}), diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index 8c7a687af..ec81aac0e 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -16,6 +16,7 @@ type Studio struct { Favorite bool `json:"favorite"` Details string `json:"details"` IgnoreAutoTag bool `json:"ignore_auto_tag"` + Organized bool `json:"organized"` Aliases RelatedStrings `json:"aliases"` URLs RelatedStrings `json:"urls"` @@ -23,6 +24,18 @@ type Studio struct { StashIDs RelatedStashIDs `json:"stash_ids"` } +type CreateStudioInput struct { + *Studio + + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateStudioInput struct { + *Studio + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + func NewStudio() Studio { currentTime := time.Now() return Studio{ @@ -31,6 +44,13 @@ func NewStudio() Studio { } } +func NewCreateStudioInput() CreateStudioInput { + s := NewStudio() + return CreateStudioInput{ + Studio: &s, + } +} + // StudioPartial represents part of a Studio object. It is used to update the database entry. type StudioPartial struct { ID int @@ -43,11 +63,14 @@ type StudioPartial struct { CreatedAt OptionalTime UpdatedAt OptionalTime IgnoreAutoTag OptionalBool + Organized OptionalBool Aliases *UpdateStrings URLs *UpdateStrings TagIDs *UpdateIDs StashIDs *UpdateStashIDs + + CustomFields CustomFieldsInput } func NewStudioPartial() StudioPartial { diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index 4cd038f7e..aee468639 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -29,6 +29,18 @@ func NewTag() Tag { } } +type CreateTagInput struct { + *Tag + + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateTagInput struct { + *Tag + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + func (s *Tag) LoadAliases(ctx context.Context, l AliasLoader) error { return s.Aliases.load(func() ([]string, error) { return l.GetAliases(ctx, s.ID) @@ -66,6 +78,8 @@ type TagPartial struct { ParentIDs *UpdateIDs ChildIDs *UpdateIDs StashIDs *UpdateStashIDs + + CustomFields CustomFieldsInput } func NewTagPartial() TagPartial { diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 63a08b30c..e4fb8dd98 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -137,7 +137,11 @@ type PerformerFilterType struct { // Filter by circumcision Circumcised *CircumcisionCriterionInput `json:"circumcised"` // Filter by career length - CareerLength *StringCriterionInput `json:"career_length"` + CareerLength *StringCriterionInput `json:"career_length"` // deprecated + // Filter by career start year + CareerStart *IntCriterionInput `json:"career_start"` + // Filter by career end year + CareerEnd *IntCriterionInput `json:"career_end"` // Filter by tattoos Tattoos *StringCriterionInput `json:"tattoos"` // Filter by piercings @@ -224,6 +228,8 @@ type PerformerCreateInput struct { PenisLength *float64 `json:"penis_length"` Circumcised *CircumisedEnum `json:"circumcised"` CareerLength *string `json:"career_length"` + CareerStart *int `json:"career_start"` + CareerEnd *int `json:"career_end"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` @@ -263,6 +269,8 @@ type PerformerUpdateInput struct { PenisLength *float64 `json:"penis_length"` Circumcised *CircumisedEnum `json:"circumcised"` CareerLength *string `json:"career_length"` + CareerStart *int `json:"career_start"` + CareerEnd *int `json:"career_end"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` diff --git a/pkg/models/repository_scene.go b/pkg/models/repository_scene.go index 8c2833470..6b795c3af 100644 --- a/pkg/models/repository_scene.go +++ b/pkg/models/repository_scene.go @@ -104,6 +104,7 @@ type SceneReader interface { SceneGroupLoader StashIDLoader VideoFileLoader + CustomFieldsReader All(ctx context.Context) ([]*Scene, error) Wall(ctx context.Context, q *string) ([]*Scene, error) @@ -140,6 +141,7 @@ type SceneWriter interface { ViewHistoryWriter SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) ResetActivity(ctx context.Context, sceneID int, resetResume bool, resetDuration bool) (bool, error) + CustomFieldsWriter } // SceneReaderWriter provides all scene methods. diff --git a/pkg/models/repository_studio.go b/pkg/models/repository_studio.go index 99f98bffc..54fb6ed47 100644 --- a/pkg/models/repository_studio.go +++ b/pkg/models/repository_studio.go @@ -42,12 +42,12 @@ type StudioCounter interface { // StudioCreator provides methods to create studios. type StudioCreator interface { - Create(ctx context.Context, newStudio *Studio) error + Create(ctx context.Context, newStudio *CreateStudioInput) error } // StudioUpdater provides methods to update studios. type StudioUpdater interface { - Update(ctx context.Context, updatedStudio *Studio) error + Update(ctx context.Context, updatedStudio *UpdateStudioInput) error UpdatePartial(ctx context.Context, updatedStudio StudioPartial) (*Studio, error) UpdateImage(ctx context.Context, studioID int, image []byte) error } @@ -79,6 +79,8 @@ type StudioReader interface { TagIDLoader URLLoader + CustomFieldsReader + All(ctx context.Context) ([]*Studio, error) GetImage(ctx context.Context, studioID int) ([]byte, error) HasImage(ctx context.Context, studioID int) (bool, error) diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go index a7f828f0b..ba403cf2d 100644 --- a/pkg/models/repository_tag.go +++ b/pkg/models/repository_tag.go @@ -51,12 +51,12 @@ type TagCounter interface { // TagCreator provides methods to create tags. type TagCreator interface { - Create(ctx context.Context, newTag *Tag) error + Create(ctx context.Context, newTag *CreateTagInput) error } // TagUpdater provides methods to update tags. type TagUpdater interface { - Update(ctx context.Context, updatedTag *Tag) error + Update(ctx context.Context, updatedTag *UpdateTagInput) error UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) UpdateAliases(ctx context.Context, tagID int, aliases []string) error UpdateImage(ctx context.Context, tagID int, image []byte) error @@ -77,6 +77,7 @@ type TagFinderCreator interface { type TagCreatorUpdater interface { TagCreator TagUpdater + CustomFieldsWriter } // TagReader provides all methods to read tags. @@ -89,6 +90,7 @@ type TagReader interface { AliasLoader TagRelationLoader StashIDLoader + CustomFieldsReader All(ctx context.Context) ([]*Tag, error) GetImage(ctx context.Context, tagID int) ([]byte, error) @@ -100,6 +102,7 @@ type TagWriter interface { TagCreator TagUpdater TagDestroyer + CustomFieldsWriter Merge(ctx context.Context, source []int, destination int) error } diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 1c34967c6..839452501 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -2,10 +2,28 @@ package models import "context" -type PHashDuplicationCriterionInput struct { +type DuplicationCriterionInput struct { + // Deprecated: Use Phash field instead. Kept for backwards compatibility. Duplicated *bool `json:"duplicated"` - // Currently unimplemented + // Currently unimplemented. Intended for phash distance matching. Distance *int `json:"distance"` + // Filter by phash duplication + Phash *bool `json:"phash"` + // Filter by URL duplication + URL *bool `json:"url"` + // Filter by Stash ID duplication + StashID *bool `json:"stash_id"` + // Filter by title duplication + Title *bool `json:"title"` +} + +type FileDuplicationCriterionInput struct { + // Deprecated: Use Phash field instead. Kept for backwards compatibility. + Duplicated *bool `json:"duplicated"` + // Currently unimplemented. Intended for phash distance matching. + Distance *int `json:"distance"` + // Filter by phash duplication + Phash *bool `json:"phash"` } type SceneFilterType struct { @@ -33,8 +51,8 @@ type SceneFilterType struct { Organized *bool `json:"organized"` // Filter by o-counter OCounter *IntCriterionInput `json:"o_counter"` - // Filter Scenes that have an exact phash match available - Duplicated *PHashDuplicationCriterionInput `json:"duplicated"` + // Filter Scenes by duplication criteria + Duplicated *DuplicationCriterionInput `json:"duplicated"` // Filter by resolution Resolution *ResolutionCriterionInput `json:"resolution"` // Filter by orientation @@ -81,6 +99,8 @@ type SceneFilterType struct { StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` // Filter by StashIDs Endpoint StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` + // Filter by StashID count + StashIDCount *IntCriterionInput `json:"stash_id_count"` // Filter by url URL *StringCriterionInput `json:"url"` // Filter by interactive @@ -119,6 +139,9 @@ type SceneFilterType struct { CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type SceneQueryOptions struct { @@ -172,7 +195,8 @@ type SceneCreateInput struct { // The first id will be assigned as primary. // Files will be reassigned from existing scenes if applicable. // Files must not already be primary for another scene. - FileIds []string `json:"file_ids"` + FileIds []string `json:"file_ids"` + CustomFields map[string]any `json:"custom_fields,omitempty"` } type SceneUpdateInput struct { @@ -201,18 +225,21 @@ type SceneUpdateInput struct { PlayDuration *float64 `json:"play_duration"` PlayCount *int `json:"play_count"` PrimaryFileID *string `json:"primary_file_id"` + CustomFields *CustomFieldsInput } type SceneDestroyInput struct { - ID string `json:"id"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + ID string `json:"id"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ScenesDestroyInput struct { - Ids []string `json:"ids"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + Ids []string `json:"ids"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } func NewSceneQueryResult(getter SceneGetter) *SceneQueryResult { diff --git a/pkg/models/studio.go b/pkg/models/studio.go index fd306b16c..7ad8719ac 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -28,6 +28,8 @@ type StudioFilterType struct { ImageCount *IntCriterionInput `json:"image_count"` // Filter by gallery count GalleryCount *IntCriterionInput `json:"gallery_count"` + // Filter by group count + GroupCount *IntCriterionInput `json:"group_count"` // Filter by url URL *StringCriterionInput `json:"url"` // Filter by studio aliases @@ -36,16 +38,23 @@ type StudioFilterType struct { ChildCount *IntCriterionInput `json:"child_count"` // Filter by autotag ignore value IgnoreAutoTag *bool `json:"ignore_auto_tag"` + // Filter by organized + Organized *bool `json:"organized"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related images that meet this criteria ImagesFilter *ImageFilterType `json:"images_filter"` // Filter by related galleries that meet this criteria GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related groups that meet this criteria + GroupsFilter *GroupFilterType `json:"groups_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type StudioCreateInput struct { @@ -62,6 +71,9 @@ type StudioCreateInput struct { Aliases []string `json:"aliases"` TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + Organized *bool `json:"organized"` + + CustomFields map[string]interface{} `json:"custom_fields"` } type StudioUpdateInput struct { @@ -79,4 +91,7 @@ type StudioUpdateInput struct { Aliases []string `json:"aliases"` TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + Organized *bool `json:"organized"` + + CustomFields CustomFieldsInput `json:"custom_fields"` } diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 69d4f9e3c..3a133dcad 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -50,8 +50,17 @@ type TagFilterType struct { ImagesFilter *ImageFilterType `json:"images_filter"` // Filter by related galleries that meet this criteria GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related groups that meet this criteria + GroupsFilter *GroupFilterType `json:"groups_filter"` + // Filter by related performers that meet this criteria + PerformersFilter *PerformerFilterType `json:"performers_filter"` + // Filter by related studios that meet this criteria + StudiosFilter *StudioFilterType `json:"studios_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } diff --git a/pkg/performer/export.go b/pkg/performer/export.go index 1455fb7bf..691175b1f 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -30,7 +30,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode EyeColor: performer.EyeColor, Measurements: performer.Measurements, FakeTits: performer.FakeTits, - CareerLength: performer.CareerLength, Tattoos: performer.Tattoos, Piercings: performer.Piercings, Favorite: performer.Favorite, @@ -71,6 +70,13 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON.PenisLength = *performer.PenisLength } + if performer.CareerStart != nil { + newPerformerJSON.CareerStart = performer.CareerStart + } + if performer.CareerEnd != nil { + newPerformerJSON.CareerEnd = performer.CareerEnd + } + if err := performer.LoadAliases(ctx, reader); err != nil { return nil, fmt.Errorf("loading performer aliases: %w", err) } diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index e51049e14..1a87bc2b1 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -26,7 +26,6 @@ const ( performerName = "testPerformer" disambiguation = "disambiguation" url = "url" - careerLength = "careerLength" country = "country" ethnicity = "ethnicity" eyeColor = "eyeColor" @@ -49,6 +48,8 @@ var ( rating = 5 height = 123 weight = 60 + careerStart = 2005 + careerEnd = 2015 penisLength = 1.23 circumcisedEnum = models.CircumisedEnumCut circumcised = circumcisedEnum.String() @@ -87,7 +88,8 @@ func createFullPerformer(id int, name string) *models.Performer { URLs: models.NewRelatedStrings([]string{url, twitter, instagram}), Aliases: models.NewRelatedStrings(aliases), Birthdate: &birthDate, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Country: country, Ethnicity: ethnicity, EyeColor: eyeColor, @@ -132,7 +134,8 @@ func createFullJSONPerformer(name string, image string, withCustomFields bool) * URLs: []string{url, twitter, instagram}, Aliases: aliases, Birthdate: birthDate.String(), - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Country: country, Ethnicity: ethnicity, EyeColor: eyeColor, diff --git a/pkg/performer/import.go b/pkg/performer/import.go index 622af2b1a..1df69521a 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -32,14 +32,17 @@ type Importer struct { } func (i *Importer) PreImport(ctx context.Context) error { - i.performer = performerJSONToPerformer(i.Input) + var err error + i.performer, err = performerJSONToPerformer(i.Input) + if err != nil { + return err + } i.customFields = i.Input.CustomFields if err := i.populateTags(ctx); err != nil { return err } - var err error if len(i.Input.Image) > 0 { i.imageData, err = utils.ProcessBase64Image(i.Input.Image) if err != nil { @@ -107,7 +110,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -194,7 +199,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Performer { +func performerJSONToPerformer(performerJSON jsonschema.Performer) (models.Performer, error) { newPerformer := models.Performer{ Name: performerJSON.Name, Disambiguation: performerJSON.Disambiguation, @@ -203,7 +208,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform EyeColor: performerJSON.EyeColor, Measurements: performerJSON.Measurements, FakeTits: performerJSON.FakeTits, - CareerLength: performerJSON.CareerLength, Tattoos: performerJSON.Tattoos, Piercings: performerJSON.Piercings, Aliases: models.NewRelatedStrings(performerJSON.Aliases), @@ -280,5 +284,18 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform } } - return newPerformer + // prefer explicit career_start/career_end, fall back to parsing legacy career_length + if performerJSON.CareerStart != nil || performerJSON.CareerEnd != nil { + newPerformer.CareerStart = performerJSON.CareerStart + newPerformer.CareerEnd = performerJSON.CareerEnd + } else if performerJSON.CareerLength != "" { + start, end, err := utils.ParseYearRangeString(performerJSON.CareerLength) + if err != nil { + return models.Performer{}, fmt.Errorf("invalid career_length %q: %w", performerJSON.CareerLength, err) + } + newPerformer.CareerStart = start + newPerformer.CareerEnd = end + } + + return newPerformer, nil } diff --git a/pkg/performer/import_test.go b/pkg/performer/import_test.go index 0a3f86291..ca28c1990 100644 --- a/pkg/performer/import_test.go +++ b/pkg/performer/import_test.go @@ -111,9 +111,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -146,7 +146,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -315,3 +315,86 @@ func TestUpdate(t *testing.T) { db.AssertExpectations(t) } + +func TestImportCareerFields(t *testing.T) { + startYear := 2005 + endYear := 2015 + + // explicit career_start/career_end should be used directly + t.Run("explicit fields", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerStart: &startYear, + CareerEnd: &endYear, + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // explicit fields take priority over legacy career_length + t.Run("explicit fields override legacy", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerStart: &startYear, + CareerEnd: &endYear, + CareerLength: "1990 - 1995", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // legacy career_length should be parsed when explicit fields are absent + t.Run("legacy career_length fallback", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "2005 - 2015", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // legacy career_length with only start year + t.Run("legacy career_length start only", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "2005 -", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Nil(t, p.CareerEnd) + }) + + // unparseable career_length should return an error + t.Run("legacy career_length unparseable", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "not a year range", + } + + _, err := performerJSONToPerformer(input) + assert.NotNil(t, err) + }) + + // no career fields at all + t.Run("no career fields", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Nil(t, p.CareerStart) + assert.Nil(t, p.CareerEnd) + }) +} diff --git a/pkg/performer/validate.go b/pkg/performer/validate.go index 68f7a8ef5..3baaa182b 100644 --- a/pkg/performer/validate.go +++ b/pkg/performer/validate.go @@ -225,6 +225,11 @@ func ValidateUpdateAliases(existing models.Performer, name models.OptionalString newName = name.Value } + // If aliases is nil, we're only changing the name - check existing aliases against new name + if aliases == nil { + return ValidateAliases(newName, existing.Aliases) + } + newAliases := aliases.Apply(existing.Aliases.List()) return ValidateAliases(newName, models.NewRelatedStrings(newAliases)) diff --git a/pkg/performer/validate_test.go b/pkg/performer/validate_test.go index 33f4b1cec..afd9c01c5 100644 --- a/pkg/performer/validate_test.go +++ b/pkg/performer/validate_test.go @@ -213,12 +213,12 @@ func TestValidateUpdateAliases(t *testing.T) { want error }{ {"both unset", osUnset, nil, nil}, - {"invalid name set", os2, nil, &DuplicateAliasError{name2}}, + {"name conflicts with alias", os2, nil, &DuplicateAliasError{name2}}, {"valid name set", os3, nil, nil}, {"valid aliases empty", os1, []string{}, nil}, - {"invalid aliases set", osUnset, []string{name1U}, &DuplicateAliasError{name1U}}, + {"alias matches name", osUnset, []string{name1U}, &DuplicateAliasError{name1U}}, {"valid aliases set", osUnset, []string{name3, name2}, nil}, - {"invalid both set", os4, []string{name4}, &DuplicateAliasError{name4}}, + {"alias matches new name", os4, []string{name4}, &DuplicateAliasError{name4}}, {"valid both set", os2, []string{name1}, nil}, } diff --git a/pkg/scene/create.go b/pkg/scene/create.go index cd9234b5d..248906295 100644 --- a/pkg/scene/create.go +++ b/pkg/scene/create.go @@ -10,14 +10,14 @@ import ( "github.com/stashapp/stash/pkg/plugin/hook" ) -func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) { +func (s *Service) Create(ctx context.Context, input models.CreateSceneInput) (*models.Scene, error) { // title must be set if no files are provided - if input.Title == "" && len(fileIDs) == 0 { + if input.Scene.Title == "" && len(input.FileIDs) == 0 { return nil, errors.New("title must be set if scene has no files") } now := time.Now() - newScene := *input + newScene := *input.Scene newScene.CreatedAt = now newScene.UpdatedAt = now @@ -27,16 +27,24 @@ func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []mod return nil, fmt.Errorf("creating new scene: %w", err) } - for _, f := range fileIDs { + if len(input.CustomFields) > 0 { + if err := s.Repository.SetCustomFields(ctx, newScene.ID, models.CustomFieldsInput{ + Full: input.CustomFields, + }); err != nil { + return nil, fmt.Errorf("setting custom fields on new scene: %w", err) + } + } + + for _, f := range input.FileIDs { if err := s.AssignFile(ctx, newScene.ID, f); err != nil { return nil, fmt.Errorf("assigning file %d to new scene: %w", f, err) } } - if len(fileIDs) > 0 { + if len(input.FileIDs) > 0 { // assign the primary to the first if _, err := s.Repository.UpdatePartial(ctx, newScene.ID, models.ScenePartial{ - PrimaryFileID: &fileIDs[0], + PrimaryFileID: &input.FileIDs[0], }); err != nil { return nil, fmt.Errorf("setting primary file on new scene: %w", err) } @@ -48,8 +56,8 @@ func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []mod return nil, err } - if len(coverImage) > 0 { - if err := s.Repository.UpdateCover(ctx, ret.ID, coverImage); err != nil { + if len(input.CoverImage) > 0 { + if err := s.Repository.UpdateCover(ctx, ret.ID, input.CoverImage); err != nil { return nil, fmt.Errorf("setting cover on new scene: %w", err) } } diff --git a/pkg/scene/delete.go b/pkg/scene/delete.go index c34bbdf14..8ca3d6e11 100644 --- a/pkg/scene/delete.go +++ b/pkg/scene/delete.go @@ -109,7 +109,7 @@ func (d *FileDeleter) MarkMarkerFiles(scene *models.Scene, seconds int) error { // Destroy deletes a scene and its associated relationships from the // database. -func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { +func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { mqb := s.MarkerRepository markers, err := mqb.FindBySceneID(ctx, scene.ID) if err != nil { @@ -126,6 +126,10 @@ func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter if err := s.deleteFiles(ctx, scene, fileDeleter); err != nil { return err } + } else if destroyFileEntry { + if err := s.destroyFileEntries(ctx, scene); err != nil { + return err + } } if deleteGenerated { @@ -180,6 +184,35 @@ func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDele return nil } +// destroyFileEntries destroys file entries from the database without deleting +// the files from the filesystem +func (s *Service) destroyFileEntries(ctx context.Context, scene *models.Scene) error { + if err := scene.LoadFiles(ctx, s.Repository); err != nil { + return err + } + + for _, f := range scene.Files.List() { + // only destroy file entries where there is no other associated scene + otherScenes, err := s.Repository.FindByFileID(ctx, f.ID) + if err != nil { + return err + } + + if len(otherScenes) > 1 { + // other scenes associated, don't remove + continue + } + + const deleteFile = false + logger.Info("Destroying scene file entry: ", f.Path) + if err := file.Destroy(ctx, s.File, f, nil, deleteFile); err != nil { + return err + } + } + + return nil +} + // DestroyMarker deletes the scene marker from the database and returns a // function that removes the generated files, to be executed after the // transaction is successfully committed. diff --git a/pkg/scene/export.go b/pkg/scene/export.go index a012d1850..069bd587f 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -17,6 +17,7 @@ import ( type ExportGetter interface { models.ViewDateReader models.ODateReader + models.CustomFieldsReader GetCover(ctx context.Context, sceneID int) ([]byte, error) } @@ -92,6 +93,11 @@ func ToBasicJSON(ctx context.Context, reader ExportGetter, scene *models.Scene) newSceneJSON.OHistory = append(newSceneJSON.OHistory, json.JSONTime{Time: date}) } + newSceneJSON.CustomFields, err = reader.GetCustomFields(ctx, scene.ID) + if err != nil { + return nil, fmt.Errorf("getting scene custom fields: %v", err) + } + return &newSceneJSON, nil } diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index cde421bd8..9547ab5e7 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -22,6 +22,7 @@ const ( studioID = 4 missingStudioID = 5 errStudioID = 6 + customFieldsID = 7 noTagsID = 11 errTagsID = 12 @@ -33,6 +34,7 @@ const ( errMarkersID = 17 errFindPrimaryTagID = 18 errFindByMarkerID = 19 + errCustomFieldsID = 20 ) var ( @@ -82,6 +84,13 @@ var ( updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) ) +var ( + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } +) + func createFullScene(id int) models.Scene { return models.Scene{ ID: id, @@ -123,7 +132,7 @@ func createEmptyScene(id int) models.Scene { } } -func createFullJSONScene(image string) *jsonschema.Scene { +func createFullJSONScene(image string, customFields map[string]interface{}) *jsonschema.Scene { return &jsonschema.Scene{ Title: title, Files: []string{path}, @@ -142,6 +151,7 @@ func createFullJSONScene(image string) *jsonschema.Scene { StashIDs: []models.StashID{ stashID, }, + CustomFields: customFields, } } @@ -155,32 +165,49 @@ func createEmptyJSONScene() *jsonschema.Scene { UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: emptyCustomFields, } } type basicTestScenario struct { - input models.Scene - expected *jsonschema.Scene - err bool + input models.Scene + customFields map[string]interface{} + expected *jsonschema.Scene + err bool } var scenarios = []basicTestScenario{ { createFullScene(sceneID), - createFullJSONScene(imageBase64), + emptyCustomFields, + createFullJSONScene(imageBase64, emptyCustomFields), + false, + }, + { + createFullScene(customFieldsID), + customFields, + createFullJSONScene("", customFields), false, }, { createEmptyScene(noImageID), + emptyCustomFields, createEmptyJSONScene(), false, }, { createFullScene(errImageID), - createFullJSONScene(""), + emptyCustomFields, + createFullJSONScene("", emptyCustomFields), // failure to get image should not cause an error false, }, + { + createFullScene(errCustomFieldsID), + customFields, + createFullJSONScene("", customFields), + true, + }, } func TestToJSON(t *testing.T) { @@ -191,8 +218,12 @@ func TestToJSON(t *testing.T) { db.Scene.On("GetCover", testCtx, sceneID).Return(imageBytes, nil).Once() db.Scene.On("GetCover", testCtx, noImageID).Return(nil, nil).Once() db.Scene.On("GetCover", testCtx, errImageID).Return(nil, imageErr).Once() + db.Scene.On("GetCover", testCtx, mock.Anything).Return(nil, nil) db.Scene.On("GetViewDates", testCtx, mock.Anything).Return(nil, nil) db.Scene.On("GetODates", testCtx, mock.Anything).Return(nil, nil) + db.Scene.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once() + db.Scene.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, errors.New("error getting custom fields")).Once() + db.Scene.On("GetCustomFields", testCtx, mock.Anything).Return(emptyCustomFields, nil) for i, s := range scenarios { scene := s.input @@ -203,6 +234,8 @@ func TestToJSON(t *testing.T) { t.Errorf("[%d] unexpected error: %s", i, err.Error()) case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) + case err != nil: + // error case already handled, no need for assertion default: assert.Equal(t, s.expected, json, "[%d]", i) } diff --git a/pkg/scene/generate/sprite.go b/pkg/scene/generate/sprite.go index c3b10f680..e0dea9659 100644 --- a/pkg/scene/generate/sprite.go +++ b/pkg/scene/generate/sprite.go @@ -18,22 +18,19 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -const ( - spriteScreenshotWidth = 160 - - spriteRows = 9 - spriteCols = 9 - spriteChunks = spriteRows * spriteCols -) - -func (g Generator) SpriteScreenshot(ctx context.Context, input string, seconds float64) (image.Image, error) { +func (g Generator) SpriteScreenshot(ctx context.Context, input string, seconds float64, size int, isPortrait bool) (image.Image, error) { lockCtx := g.LockManager.ReadLock(ctx, input) defer lockCtx.Cancel() ssOptions := transcoder.ScreenshotOptions{ OutputPath: "-", OutputType: transcoder.ScreenshotOutputTypeBMP, - Width: spriteScreenshotWidth, + } + + if !isPortrait { + ssOptions.Width = size + } else { + ssOptions.Height = size } args := transcoder.ScreenshotTime(input, seconds, ssOptions) @@ -41,14 +38,14 @@ func (g Generator) SpriteScreenshot(ctx context.Context, input string, seconds f return g.generateImage(lockCtx, args) } -func (g Generator) SpriteScreenshotSlow(ctx context.Context, input string, frame int) (image.Image, error) { +func (g Generator) SpriteScreenshotSlow(ctx context.Context, input string, frame int, width int) (image.Image, error) { lockCtx := g.LockManager.ReadLock(ctx, input) defer lockCtx.Cancel() ssOptions := transcoder.ScreenshotOptions{ OutputPath: "-", OutputType: transcoder.ScreenshotOutputTypeBMP, - Width: spriteScreenshotWidth, + Width: width, } args := transcoder.ScreenshotFrame(input, frame, ssOptions) @@ -74,12 +71,13 @@ func (g Generator) CombineSpriteImages(images []image.Image) image.Image { // Combine all of the thumbnails into a sprite image width := images[0].Bounds().Size().X height := images[0].Bounds().Size().Y - canvasWidth := width * spriteCols - canvasHeight := height * spriteRows + gridSize := GetSpriteGridSize(len(images)) + canvasWidth := width * gridSize + canvasHeight := height * gridSize montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{}) for index := 0; index < len(images); index++ { - x := width * (index % spriteCols) - y := height * int(math.Floor(float64(index)/float64(spriteRows))) + x := width * (index % gridSize) + y := height * int(math.Floor(float64(index)/float64(gridSize))) img := images[index] montage = imaging.Paste(montage, img, image.Pt(x, y)) } @@ -87,14 +85,19 @@ func (g Generator) CombineSpriteImages(images []image.Image) image.Image { return montage } -func (g Generator) SpriteVTT(ctx context.Context, output string, spritePath string, stepSize float64) error { - lockCtx := g.LockManager.ReadLock(ctx, spritePath) - defer lockCtx.Cancel() - - return g.generateFile(lockCtx, g.ScenePaths, vttPattern, output, g.spriteVTT(spritePath, stepSize)) +// GetSpriteGridSize return the required size of a grid, where the number of images in width +// equals the number of images in height, to hold 'imageCount' images +func GetSpriteGridSize(imageCount int) int { + return int(math.Ceil(math.Sqrt(float64(imageCount)))) } -func (g Generator) spriteVTT(spritePath string, stepSize float64) generateFn { +func (g Generator) SpriteVTT(ctx context.Context, output string, spritePath string, stepSize float64, spriteChunks int) error { + lockCtx := g.LockManager.ReadLock(ctx, spritePath) + defer lockCtx.Cancel() + return g.generateFile(lockCtx, g.ScenePaths, vttPattern, output, g.spriteVTT(spritePath, stepSize, spriteChunks)) +} + +func (g Generator) spriteVTT(spritePath string, stepSize float64, spriteChunks int) generateFn { return func(lockCtx *fsutil.LockContext, tmpFn string) error { spriteImage, err := os.Open(spritePath) if err != nil { @@ -106,16 +109,17 @@ func (g Generator) spriteVTT(spritePath string, stepSize float64) generateFn { if err != nil { return err } - width := image.Width / spriteCols - height := image.Height / spriteRows + + gridSize := GetSpriteGridSize(spriteChunks) + width := image.Width / gridSize + height := image.Height / gridSize vttLines := []string{"WEBVTT", ""} for index := 0; index < spriteChunks; index++ { - x := width * (index % spriteCols) - y := height * int(math.Floor(float64(index)/float64(spriteRows))) + x := width * (index % gridSize) + y := height * int(math.Floor(float64(index)/float64(gridSize))) startTime := utils.GetVTTTime(float64(index) * stepSize) endTime := utils.GetVTTTime(float64(index+1) * stepSize) - vttLines = append(vttLines, startTime+" --> "+endTime) vttLines = append(vttLines, fmt.Sprintf("%s#xywh=%d,%d,%d,%d", spriteImageName, x, y, width, height)) vttLines = append(vttLines, "") diff --git a/pkg/scene/import.go b/pkg/scene/import.go index efffd380d..24dbf1cc0 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -18,6 +18,7 @@ type ImporterReaderWriter interface { models.SceneCreatorUpdater models.ViewHistoryWriter models.OHistoryWriter + models.CustomFieldsWriter FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } @@ -35,6 +36,7 @@ type Importer struct { ID int scene models.Scene + customFields map[string]interface{} coverImageData []byte viewHistory []time.Time oHistory []time.Time @@ -75,6 +77,8 @@ func (i *Importer) PreImport(ctx context.Context) error { } } + i.customFields = i.Input.CustomFields + i.populateViewHistory() i.populateOHistory() @@ -213,7 +217,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -449,6 +453,14 @@ func (i *Importer) PostImport(ctx context.Context, id int) error { return err } + if len(i.customFields) > 0 { + if err := i.ReaderWriter.SetCustomFields(ctx, id, models.CustomFieldsInput{ + Full: i.customFields, + }); err != nil { + return fmt.Errorf("error setting scene custom fields: %v", err) + } + } + return nil } @@ -549,7 +561,9 @@ func createTags(ctx context.Context, tagWriter models.TagCreator, names []string newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } diff --git a/pkg/scene/import_test.go b/pkg/scene/import_test.go index a6e3edcdf..98924e20d 100644 --- a/pkg/scene/import_test.go +++ b/pkg/scene/import_test.go @@ -241,9 +241,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -273,7 +273,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -508,9 +508,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -542,10 +542,110 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) db.AssertExpectations(t) } + +func TestImporterPostImport(t *testing.T) { + db := mocks.NewDatabase() + + vt := time.Now() + ot := vt.Add(time.Minute) + + var ( + okID = 1 + errViewHistoryID = 2 + errOHistoryID = 3 + errImageID = 4 + errCustomFieldsID = 5 + ) + + var ( + errImage = errors.New("error updating cover image") + errViewHistory = errors.New("error updating view history") + errOHistory = errors.New("error updating o history") + errCustomFields = errors.New("error updating custom fields") + ) + + table := []struct { + name string + importer Importer + err bool + }{ + { + name: "all set successfully", + importer: Importer{ + ID: okID, + coverImageData: []byte(imageBase64), + viewHistory: []time.Time{vt}, + oHistory: []time.Time{ot}, + customFields: customFields, + }, + err: false, + }, + { + name: "cover image set with error", + importer: Importer{ + ID: errImageID, + coverImageData: []byte(invalidImage), + }, + err: true, + }, + { + name: "view history set with error", + importer: Importer{ + ID: errViewHistoryID, + viewHistory: []time.Time{vt}, + }, + err: true, + }, + { + name: "o history set with error", + importer: Importer{ + ID: errOHistoryID, + oHistory: []time.Time{ot}, + }, + err: true, + }, + { + name: "custom fields set with error", + importer: Importer{ + ID: errCustomFieldsID, + customFields: customFields, + }, + err: true, + }, + } + + db.Scene.On("UpdateCover", testCtx, okID, []byte(imageBase64)).Return(nil).Once() + db.Scene.On("UpdateCover", testCtx, errImageID, []byte(invalidImage)).Return(errImage).Once() + db.Scene.On("AddViews", testCtx, okID, []time.Time{vt}).Return([]time.Time{vt}, nil).Once() + db.Scene.On("AddViews", testCtx, errViewHistoryID, []time.Time{vt}).Return(nil, errViewHistory).Once() + db.Scene.On("AddO", testCtx, okID, []time.Time{ot}).Return([]time.Time{ot}, nil).Once() + db.Scene.On("AddO", testCtx, errOHistoryID, []time.Time{ot}).Return(nil, errOHistory).Once() + db.Scene.On("SetCustomFields", testCtx, okID, models.CustomFieldsInput{ + Full: customFields, + }).Return(nil).Once() + db.Scene.On("SetCustomFields", testCtx, errCustomFieldsID, models.CustomFieldsInput{ + Full: customFields, + }).Return(errCustomFields).Once() + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + i := tt.importer + i.ReaderWriter = db.Scene + + err := i.PostImport(testCtx, i.ID) + + if tt.err { + assert.NotNil(t, err, "expected error but got nil") + } else { + assert.Nil(t, err, "unexpected error: %v", err) + } + }) + } +} diff --git a/pkg/scene/merge.go b/pkg/scene/merge.go index 77b551ab2..b2650ca92 100644 --- a/pkg/scene/merge.go +++ b/pkg/scene/merge.go @@ -120,7 +120,8 @@ func (s *Service) Merge(ctx context.Context, sourceIDs []int, destinationID int, for _, src := range sources { const deleteGenerated = true const deleteFile = false - if err := s.Destroy(ctx, src, fileDeleter, deleteGenerated, deleteFile); err != nil { + const destroyFileEntry = false + if err := s.Destroy(ctx, src, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return fmt.Errorf("deleting scene %d: %w", src.ID, err) } } diff --git a/pkg/scraper/action.go b/pkg/scraper/action.go index 74bbca415..cd31fbe72 100644 --- a/pkg/scraper/action.go +++ b/pkg/scraper/action.go @@ -24,9 +24,85 @@ func (e scraperAction) IsValid() bool { return false } -type scraperActionImpl interface { +type urlScraperActionImpl interface { scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) +} + +func (c Definition) getURLScraper(def ByURLDefinition, client *http.Client, globalConfig GlobalConfig) urlScraperActionImpl { + switch def.Action { + case scraperActionScript: + return &scriptURLScraper{ + scriptScraper: scriptScraper{ + definition: c, + globalConfig: globalConfig, + }, + definition: def, + } + case scraperActionStash: + return newStashScraper(client, c, globalConfig) + case scraperActionXPath: + return &xpathURLScraper{ + xpathScraper: xpathScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + case scraperActionJson: + return &jsonURLScraper{ + jsonScraper: jsonScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + } + + panic("unknown scraper action: " + def.Action) +} + +type nameScraperActionImpl interface { scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) +} + +func (c Definition) getNameScraper(def ByNameDefinition, client *http.Client, globalConfig GlobalConfig) nameScraperActionImpl { + switch def.Action { + case scraperActionScript: + return &scriptNameScraper{ + scriptScraper: scriptScraper{ + definition: c, + globalConfig: globalConfig, + }, + definition: def, + } + case scraperActionStash: + return newStashScraper(client, c, globalConfig) + case scraperActionXPath: + return &xpathNameScraper{ + xpathScraper: xpathScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + case scraperActionJson: + return &jsonNameScraper{ + jsonScraper: jsonScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + } + + panic("unknown scraper action: " + def.Action) +} + +type fragmentScraperActionImpl interface { scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) @@ -34,17 +110,37 @@ type scraperActionImpl interface { scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) } -func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, globalConfig GlobalConfig) scraperActionImpl { - switch scraper.Action { +func (c Definition) getFragmentScraper(actionDef ByFragmentDefinition, client *http.Client, globalConfig GlobalConfig) fragmentScraperActionImpl { + switch actionDef.Action { case scraperActionScript: - return newScriptScraper(scraper, c, globalConfig) + return &scriptFragmentScraper{ + scriptScraper: scriptScraper{ + definition: c, + globalConfig: globalConfig, + }, + definition: actionDef, + } case scraperActionStash: - return newStashScraper(scraper, client, c, globalConfig) + return newStashScraper(client, c, globalConfig) case scraperActionXPath: - return newXpathScraper(scraper, client, c, globalConfig) + return &xpathFragmentScraper{ + xpathScraper: xpathScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: actionDef, + } case scraperActionJson: - return newJsonScraper(scraper, client, c, globalConfig) + return &jsonFragmentScraper{ + jsonScraper: jsonScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: actionDef, + } } - panic("unknown scraper action: " + scraper.Action) + panic("unknown scraper action: " + actionDef.Action) } diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index 5cc51ac54..6aeb95fcf 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -182,7 +182,7 @@ func (c *Cache) ReloadScrapers() { if err != nil { logger.Errorf("Error loading scraper %s: %v", fp, err) } else { - scraper := newGroupScraper(*conf, c.globalConfig) + scraper := scraperFromDefinition(*conf, c.globalConfig) scrapers[scraper.spec().ID] = scraper } } diff --git a/pkg/scraper/cookies.go b/pkg/scraper/cookies.go index 0a2877b7b..c76dae037 100644 --- a/pkg/scraper/cookies.go +++ b/pkg/scraper/cookies.go @@ -18,7 +18,7 @@ import ( ) // jar constructs a cookie jar from a configuration -func (c config) jar() (*cookiejar.Jar, error) { +func (c Definition) jar() (*cookiejar.Jar, error) { opts := c.DriverOptions jar, err := cookiejar.New(&cookiejar.Options{ PublicSuffixList: publicsuffix.List, @@ -77,7 +77,7 @@ func randomSequence(n int) string { } // printCookies prints all cookies from the given cookie jar -func printCookies(jar *cookiejar.Jar, scraperConfig config, msg string) { +func printCookies(jar *cookiejar.Jar, scraperConfig Definition, msg string) { driverOptions := scraperConfig.DriverOptions if driverOptions != nil && !driverOptions.UseCDP { var foundURLs []*url.URL diff --git a/pkg/scraper/group.go b/pkg/scraper/defined_scraper.go similarity index 56% rename from pkg/scraper/group.go rename to pkg/scraper/defined_scraper.go index 43fd2a37b..0287101d0 100644 --- a/pkg/scraper/group.go +++ b/pkg/scraper/defined_scraper.go @@ -8,25 +8,26 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type group struct { - config config +// definedScraper implements the scraper interface using a Definition object. +type definedScraper struct { + config Definition globalConf GlobalConfig } -func newGroupScraper(c config, globalConfig GlobalConfig) scraper { - return group{ +func scraperFromDefinition(c Definition, globalConfig GlobalConfig) definedScraper { + return definedScraper{ config: c, globalConf: globalConfig, } } -func (g group) spec() Scraper { +func (g definedScraper) spec() Scraper { return g.config.spec() } // fragmentScraper finds an appropriate fragment scraper based on input. -func (g group) fragmentScraper(input Input) *scraperTypeConfig { +func (g definedScraper) fragmentScraper(input Input) *ByFragmentDefinition { switch { case input.Performer != nil: return g.config.PerformerByFragment @@ -43,7 +44,7 @@ func (g group) fragmentScraper(input Input) *scraperTypeConfig { return nil } -func (g group) viaFragment(ctx context.Context, client *http.Client, input Input) (ScrapedContent, error) { +func (g definedScraper) viaFragment(ctx context.Context, client *http.Client, input Input) (ScrapedContent, error) { stc := g.fragmentScraper(input) if stc == nil { // If there's no performer fragment scraper in the group, we try to use @@ -56,38 +57,38 @@ func (g group) viaFragment(ctx context.Context, client *http.Client, input Input return nil, ErrNotSupported } - s := g.config.getScraper(*stc, client, g.globalConf) + s := g.config.getFragmentScraper(*stc, client, g.globalConf) return s.scrapeByFragment(ctx, input) } -func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) { +func (g definedScraper) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) { if g.config.SceneByFragment == nil { return nil, ErrNotSupported } - s := g.config.getScraper(*g.config.SceneByFragment, client, g.globalConf) + s := g.config.getFragmentScraper(*g.config.SceneByFragment, client, g.globalConf) return s.scrapeSceneByScene(ctx, scene) } -func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (g definedScraper) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) { if g.config.GalleryByFragment == nil { return nil, ErrNotSupported } - s := g.config.getScraper(*g.config.GalleryByFragment, client, g.globalConf) + s := g.config.getFragmentScraper(*g.config.GalleryByFragment, client, g.globalConf) return s.scrapeGalleryByGallery(ctx, gallery) } -func (g group) viaImage(ctx context.Context, client *http.Client, gallery *models.Image) (*models.ScrapedImage, error) { +func (g definedScraper) viaImage(ctx context.Context, client *http.Client, gallery *models.Image) (*models.ScrapedImage, error) { if g.config.ImageByFragment == nil { return nil, ErrNotSupported } - s := g.config.getScraper(*g.config.ImageByFragment, client, g.globalConf) + s := g.config.getFragmentScraper(*g.config.ImageByFragment, client, g.globalConf) return s.scrapeImageByImage(ctx, gallery) } -func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig { +func loadUrlCandidates(c Definition, ty ScrapeContentType) []*ByURLDefinition { switch ty { case ScrapeContentTypePerformer: return c.PerformerByURL @@ -104,12 +105,13 @@ func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig { panic("loadUrlCandidates: unreachable") } -func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty ScrapeContentType) (ScrapedContent, error) { +func (g definedScraper) viaURL(ctx context.Context, client *http.Client, url string, ty ScrapeContentType) (ScrapedContent, error) { candidates := loadUrlCandidates(g.config, ty) for _, scraper := range candidates { if scraper.matchesURL(url) { - s := g.config.getScraper(scraper.scraperTypeConfig, client, g.globalConf) - ret, err := s.scrapeByURL(ctx, url, ty) + u := replaceURL(url, *scraper) // allow a URL Replace for url-queries + s := g.config.getURLScraper(*scraper, client, g.globalConf) + ret, err := s.scrapeByURL(ctx, u, ty) if err != nil { return nil, err } @@ -123,31 +125,31 @@ func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty S return nil, nil } -func (g group) viaName(ctx context.Context, client *http.Client, name string, ty ScrapeContentType) ([]ScrapedContent, error) { +func (g definedScraper) viaName(ctx context.Context, client *http.Client, name string, ty ScrapeContentType) ([]ScrapedContent, error) { switch ty { case ScrapeContentTypePerformer: if g.config.PerformerByName == nil { break } - s := g.config.getScraper(*g.config.PerformerByName, client, g.globalConf) + s := g.config.getNameScraper(*g.config.PerformerByName, client, g.globalConf) return s.scrapeByName(ctx, name, ty) case ScrapeContentTypeScene: if g.config.SceneByName == nil { break } - s := g.config.getScraper(*g.config.SceneByName, client, g.globalConf) + s := g.config.getNameScraper(*g.config.SceneByName, client, g.globalConf) return s.scrapeByName(ctx, name, ty) } return nil, fmt.Errorf("%w: cannot load %v by name", ErrNotSupported, ty) } -func (g group) supports(ty ScrapeContentType) bool { +func (g definedScraper) supports(ty ScrapeContentType) bool { return g.config.supports(ty) } -func (g group) supportsURL(url string, ty ScrapeContentType) bool { +func (g definedScraper) supportsURL(url string, ty ScrapeContentType) bool { return g.config.matchesURL(url, ty) } diff --git a/pkg/scraper/config.go b/pkg/scraper/definition.go similarity index 80% rename from pkg/scraper/config.go rename to pkg/scraper/definition.go index 5775dc97c..03ba4d75b 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/definition.go @@ -11,7 +11,8 @@ import ( "gopkg.in/yaml.v2" ) -type config struct { +// Definition represents a scraper definition (typically) loaded from a YAML configuration file. +type Definition struct { ID string path string @@ -19,43 +20,43 @@ type config struct { Name string `yaml:"name"` // Configuration for querying performers by name - PerformerByName *scraperTypeConfig `yaml:"performerByName"` + PerformerByName *ByNameDefinition `yaml:"performerByName"` // Configuration for querying performers by a Performer fragment - PerformerByFragment *scraperTypeConfig `yaml:"performerByFragment"` + PerformerByFragment *ByFragmentDefinition `yaml:"performerByFragment"` // Configuration for querying a performer by a URL - PerformerByURL []*scrapeByURLConfig `yaml:"performerByURL"` + PerformerByURL []*ByURLDefinition `yaml:"performerByURL"` // Configuration for querying scenes by a Scene fragment - SceneByFragment *scraperTypeConfig `yaml:"sceneByFragment"` + SceneByFragment *ByFragmentDefinition `yaml:"sceneByFragment"` // Configuration for querying gallery by a Gallery fragment - GalleryByFragment *scraperTypeConfig `yaml:"galleryByFragment"` + GalleryByFragment *ByFragmentDefinition `yaml:"galleryByFragment"` // Configuration for querying scenes by name - SceneByName *scraperTypeConfig `yaml:"sceneByName"` + SceneByName *ByNameDefinition `yaml:"sceneByName"` // Configuration for querying scenes by query fragment - SceneByQueryFragment *scraperTypeConfig `yaml:"sceneByQueryFragment"` + SceneByQueryFragment *ByFragmentDefinition `yaml:"sceneByQueryFragment"` // Configuration for querying a scene by a URL - SceneByURL []*scrapeByURLConfig `yaml:"sceneByURL"` + SceneByURL []*ByURLDefinition `yaml:"sceneByURL"` // Configuration for querying a gallery by a URL - GalleryByURL []*scrapeByURLConfig `yaml:"galleryByURL"` + GalleryByURL []*ByURLDefinition `yaml:"galleryByURL"` // Configuration for querying an image by a URL - ImageByURL []*scrapeByURLConfig `yaml:"imageByURL"` + ImageByURL []*ByURLDefinition `yaml:"imageByURL"` // Configuration for querying image by an Image fragment - ImageByFragment *scraperTypeConfig `yaml:"imageByFragment"` + ImageByFragment *ByFragmentDefinition `yaml:"imageByFragment"` // Configuration for querying a movie by a URL - deprecated, use GroupByURL - MovieByURL []*scrapeByURLConfig `yaml:"movieByURL"` + MovieByURL []*ByURLDefinition `yaml:"movieByURL"` // Configuration for querying a group by a URL - GroupByURL []*scrapeByURLConfig `yaml:"groupByURL"` + GroupByURL []*ByURLDefinition `yaml:"groupByURL"` // Scraper debugging options DebugOptions *scraperDebugOptions `yaml:"debug"` @@ -73,7 +74,7 @@ type config struct { DriverOptions *scraperDriverOptions `yaml:"driver"` } -func (c config) validate() error { +func (c Definition) validate() error { if strings.TrimSpace(c.Name) == "" { return errors.New("name must not be empty") } @@ -126,17 +127,13 @@ type stashServer struct { ApiKey string `yaml:"apiKey"` } -type scraperTypeConfig struct { +type ActionDefinition struct { Action scraperAction `yaml:"action"` Script []string `yaml:"script,flow"` Scraper string `yaml:"scraper"` - - // for xpath name scraper only - QueryURL string `yaml:"queryURL"` - QueryURLReplacements queryURLReplacements `yaml:"queryURLReplace"` } -func (c scraperTypeConfig) validate() error { +func (c ActionDefinition) validate() error { if !c.Action.IsValid() { return fmt.Errorf("%s is not a valid scraper action", c.Action) } @@ -148,20 +145,22 @@ func (c scraperTypeConfig) validate() error { return nil } -type scrapeByURLConfig struct { - scraperTypeConfig `yaml:",inline"` - URL []string `yaml:"url,flow"` +type ByURLDefinition struct { + ActionDefinition `yaml:",inline"` + URL []string `yaml:"url,flow"` + QueryURL string `yaml:"queryURL"` + QueryURLReplacements queryURLReplacements `yaml:"queryURLReplace"` } -func (c scrapeByURLConfig) validate() error { +func (c ByURLDefinition) validate() error { if len(c.URL) == 0 { return errors.New("url is mandatory for scrape by url scrapers") } - return c.scraperTypeConfig.validate() + return c.ActionDefinition.validate() } -func (c scrapeByURLConfig) matchesURL(url string) bool { +func (c ByURLDefinition) matchesURL(url string) bool { for _, thisURL := range c.URL { if strings.Contains(url, thisURL) { return true @@ -171,6 +170,18 @@ func (c scrapeByURLConfig) matchesURL(url string) bool { return false } +type ByFragmentDefinition struct { + ActionDefinition `yaml:",inline"` + + QueryURL string `yaml:"queryURL"` + QueryURLReplacements queryURLReplacements `yaml:"queryURLReplace"` +} + +type ByNameDefinition struct { + ActionDefinition `yaml:",inline"` + QueryURL string `yaml:"queryURL"` +} + type scraperDebugOptions struct { PrintHTML bool `yaml:"printHTML"` } @@ -206,8 +217,8 @@ type scraperDriverOptions struct { Headers []*header `yaml:"headers"` } -func loadConfigFromYAML(id string, reader io.Reader) (*config, error) { - ret := &config{} +func loadConfigFromYAML(id string, reader io.Reader) (*Definition, error) { + ret := &Definition{} parser := yaml.NewDecoder(reader) parser.SetStrict(true) @@ -225,7 +236,7 @@ func loadConfigFromYAML(id string, reader io.Reader) (*config, error) { return ret, nil } -func loadConfigFromYAMLFile(path string) (*config, error) { +func loadConfigFromYAMLFile(path string) (*Definition, error) { file, err := os.Open(path) if err != nil { return nil, err @@ -246,7 +257,7 @@ func loadConfigFromYAMLFile(path string) (*config, error) { return ret, nil } -func (c config) spec() Scraper { +func (c Definition) spec() Scraper { ret := Scraper{ ID: c.ID, Name: c.Name, @@ -334,7 +345,7 @@ func (c config) spec() Scraper { return ret } -func (c config) supports(ty ScrapeContentType) bool { +func (c Definition) supports(ty ScrapeContentType) bool { switch ty { case ScrapeContentTypePerformer: return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0 @@ -351,7 +362,7 @@ func (c config) supports(ty ScrapeContentType) bool { panic("Unhandled ScrapeContentType") } -func (c config) matchesURL(url string, ty ScrapeContentType) bool { +func (c Definition) matchesURL(url string, ty ScrapeContentType) bool { switch ty { case ScrapeContentTypePerformer: for _, scraper := range c.PerformerByURL { diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go index 96caf2fec..e78488b24 100644 --- a/pkg/scraper/freeones.go +++ b/pkg/scraper/freeones.go @@ -139,5 +139,5 @@ func getFreeonesScraper(globalConfig GlobalConfig) scraper { logger.Fatalf("Error loading builtin freeones scraper: %s", err.Error()) } - return newGroupScraper(*c, globalConfig) + return scraperFromDefinition(*c, globalConfig) } diff --git a/pkg/scraper/json.go b/pkg/scraper/json.go index 9f479f1c2..1dcb887da 100644 --- a/pkg/scraper/json.go +++ b/pkg/scraper/json.go @@ -15,43 +15,22 @@ import ( ) type jsonScraper struct { - scraper scraperTypeConfig - config config + definition Definition globalConfig GlobalConfig client *http.Client } -func newJsonScraper(scraper scraperTypeConfig, client *http.Client, config config, globalConfig GlobalConfig) *jsonScraper { - return &jsonScraper{ - scraper: scraper, - config: config, - client: client, - globalConfig: globalConfig, - } -} - -func (s *jsonScraper) getJsonScraper() *mappedScraper { - return s.config.JsonScrapers[s.scraper.Scraper] -} - -func (s *jsonScraper) scrapeURL(ctx context.Context, url string) (string, *mappedScraper, error) { - scraper := s.getJsonScraper() - - if scraper == nil { - return "", nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") +func (s *jsonScraper) getJsonScraper(name string) (*mappedScraper, error) { + ret, ok := s.definition.JsonScrapers[name] + if !ok { + return nil, fmt.Errorf("json scraper with name %s not found in config", name) } - doc, err := s.loadURL(ctx, url) - - if err != nil { - return "", nil, err - } - - return doc, scraper, nil + return &ret, nil } func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) { - r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig) + r, err := loadURL(ctx, url, s.client, s.definition, s.globalConfig) if err != nil { return "", err } @@ -66,21 +45,30 @@ func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) { return "", errors.New("not valid json") } - if s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML { + if s.definition.DebugOptions != nil && s.definition.DebugOptions.PrintHTML { logger.Infof("loadURL (%s) response: \n%s", url, docStr) } return docStr, err } -func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { - u := replaceURL(url, s.scraper) // allow a URL Replace for url-queries - doc, scraper, err := s.scrapeURL(ctx, u) +type jsonURLScraper struct { + jsonScraper + definition ByURLDefinition +} + +func (s *jsonURLScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { + scraper, err := s.getJsonScraper(s.definition.Scraper) if err != nil { return nil, err } - q := s.getJsonQuery(doc, u) + doc, err := s.loadURL(ctx, url) + if err != nil { + return nil, err + } + + q := s.getJsonQuery(doc, url) // if these just return the return values from scraper.scrape* functions then // it ends up returning ScrapedContent(nil) rather than nil switch ty { @@ -119,11 +107,15 @@ func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCont return nil, ErrNotSupported } -func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { - scraper := s.getJsonScraper() +type jsonNameScraper struct { + jsonScraper + definition ByNameDefinition +} - if scraper == nil { - return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper) +func (s *jsonNameScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } const placeholder = "{}" @@ -131,7 +123,7 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeCo // replace the placeholder string with the URL-escaped name escapedName := url.QueryEscape(name) - url := s.scraper.QueryURL + url := s.definition.QueryURL url = strings.ReplaceAll(url, placeholder, escapedName) doc, err := s.loadURL(ctx, url) @@ -172,18 +164,22 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeCo return nil, ErrNotSupported } -func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { +type jsonFragmentScraper struct { + jsonScraper + definition ByFragmentDefinition +} + +func (s *jsonFragmentScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { // construct the URL queryURL := queryURLParametersFromScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -196,7 +192,7 @@ func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scen return scraper.scrapeScene(ctx, q) } -func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { +func (s *jsonFragmentScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { switch { case input.Gallery != nil: return nil, fmt.Errorf("%w: cannot use a json scraper as a gallery fragment scraper", ErrNotSupported) @@ -210,15 +206,14 @@ func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (Scrape // construct the URL queryURL := queryURLParametersFromScrapedScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -231,18 +226,17 @@ func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (Scrape return scraper.scrapeScene(ctx, q) } -func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { +func (s *jsonFragmentScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { // construct the URL queryURL := queryURLParametersFromImage(image) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -255,18 +249,17 @@ func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Imag return scraper.scrapeImage(ctx, q) } -func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (s *jsonFragmentScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { // construct the URL queryURL := queryURLParametersFromGallery(gallery) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) diff --git a/pkg/scraper/json_test.go b/pkg/scraper/json_test.go index 249f17ad6..285c15489 100644 --- a/pkg/scraper/json_test.go +++ b/pkg/scraper/json_test.go @@ -68,7 +68,7 @@ jsonScrapers: } ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err != nil { diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index 3fac22ec3..d92415c61 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -2,22 +2,9 @@ package scraper import ( "context" - "errors" - "fmt" - "math" - "net/url" - "reflect" - "regexp" - "strconv" - "strings" - "time" - "gopkg.in/yaml.v2" - - "github.com/stashapp/stash/pkg/javascript" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil" ) type mappedQuery interface { @@ -28,850 +15,7 @@ type mappedQuery interface { getURL() string } -type commonMappedConfig map[string]string - -type mappedConfig map[string]mappedScraperAttrConfig - -func (s mappedConfig) applyCommon(c commonMappedConfig, src string) string { - if c == nil { - return src - } - - ret := src - for commonKey, commonVal := range c { - ret = strings.ReplaceAll(ret, commonKey, commonVal) - } - - return ret -} - -// extractHostname parses a URL string and returns the hostname. -// Returns empty string if the URL cannot be parsed. -func extractHostname(urlStr string) string { - if urlStr == "" { - return "" - } - - u, err := url.Parse(urlStr) - if err != nil { - logger.Warnf("Error parsing URL '%s': %s", urlStr, err.Error()) - return "" - } - - return u.Hostname() -} - -type isMultiFunc func(key string) bool - -func (s mappedConfig) process(ctx context.Context, q mappedQuery, common commonMappedConfig, isMulti isMultiFunc) mappedResults { - var ret mappedResults - - for k, attrConfig := range s { - - if attrConfig.Fixed != "" { - // TODO - not sure if this needs to set _all_ indexes for the key - const i = 0 - // Support {inputURL} and {inputHostname} placeholders in fixed values - value := strings.ReplaceAll(attrConfig.Fixed, "{inputURL}", q.getURL()) - value = strings.ReplaceAll(value, "{inputHostname}", extractHostname(q.getURL())) - ret = ret.setSingleValue(i, k, value) - } else { - selector := attrConfig.Selector - selector = s.applyCommon(common, selector) - // Support {inputURL} and {inputHostname} placeholders in selectors - selector = strings.ReplaceAll(selector, "{inputURL}", q.getURL()) - selector = strings.ReplaceAll(selector, "{inputHostname}", extractHostname(q.getURL())) - - found, err := q.runQuery(selector) - if err != nil { - logger.Warnf("key '%v': %v", k, err) - } - - if len(found) > 0 { - result := s.postProcess(ctx, q, attrConfig, found) - - // HACK - if the key is URLs, then we need to set the value as a multi-value - isMulti := isMulti != nil && isMulti(k) - if isMulti { - ret = ret.setMultiValue(0, k, result) - } else { - for i, text := range result { - ret = ret.setSingleValue(i, k, text) - } - } - } - } - } - - return ret -} - -func (s mappedConfig) postProcess(ctx context.Context, q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string { - // check if we're concatenating the results into a single result - var ret []string - if attrConfig.hasConcat() { - result := attrConfig.concatenateResults(found) - result = attrConfig.postProcess(ctx, result, q) - if attrConfig.hasSplit() { - results := attrConfig.splitString(result) - // skip cleaning when the query is used for searching - if q.getType() == SearchQuery { - return results - } - results = attrConfig.cleanResults(results) - return results - } - - ret = []string{result} - } else { - for _, text := range found { - text = attrConfig.postProcess(ctx, text, q) - if attrConfig.hasSplit() { - return attrConfig.splitString(text) - } - - ret = append(ret, text) - } - // skip cleaning when the query is used for searching - if q.getType() == SearchQuery { - return ret - } - ret = attrConfig.cleanResults(ret) - - } - - return ret -} - -type mappedSceneScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` - Performers mappedPerformerScraperConfig `yaml:"Performers"` - Studio mappedConfig `yaml:"Studio"` - Movies mappedConfig `yaml:"Movies"` - Groups mappedConfig `yaml:"Groups"` -} -type _mappedSceneScraperConfig mappedSceneScraperConfig - -const ( - mappedScraperConfigSceneTags = "Tags" - mappedScraperConfigScenePerformers = "Performers" - mappedScraperConfigSceneStudio = "Studio" - mappedScraperConfigSceneMovies = "Movies" - mappedScraperConfigSceneGroups = "Groups" -) - -func (s *mappedSceneScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] - thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] - thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] - thisMap[mappedScraperConfigSceneMovies] = parentMap[mappedScraperConfigSceneMovies] - thisMap[mappedScraperConfigSceneGroups] = parentMap[mappedScraperConfigSceneGroups] - - delete(parentMap, mappedScraperConfigSceneTags) - delete(parentMap, mappedScraperConfigScenePerformers) - delete(parentMap, mappedScraperConfigSceneStudio) - delete(parentMap, mappedScraperConfigSceneMovies) - delete(parentMap, mappedScraperConfigSceneGroups) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedSceneScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedSceneScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedGalleryScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` - Performers mappedConfig `yaml:"Performers"` - Studio mappedConfig `yaml:"Studio"` -} - -type _mappedGalleryScraperConfig mappedGalleryScraperConfig - -func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] - thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] - thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] - - delete(parentMap, mappedScraperConfigSceneTags) - delete(parentMap, mappedScraperConfigScenePerformers) - delete(parentMap, mappedScraperConfigSceneStudio) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedGalleryScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedGalleryScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedImageScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` - Performers mappedConfig `yaml:"Performers"` - Studio mappedConfig `yaml:"Studio"` -} -type _mappedImageScraperConfig mappedImageScraperConfig - -func (s *mappedImageScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] - thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] - thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] - - delete(parentMap, mappedScraperConfigSceneTags) - delete(parentMap, mappedScraperConfigScenePerformers) - delete(parentMap, mappedScraperConfigSceneStudio) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedImageScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedImageScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedPerformerScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` -} -type _mappedPerformerScraperConfig mappedPerformerScraperConfig - -const ( - mappedScraperConfigPerformerTags = "Tags" -) - -func (s *mappedPerformerScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigPerformerTags] = parentMap[mappedScraperConfigPerformerTags] - - delete(parentMap, mappedScraperConfigPerformerTags) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedPerformerScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedPerformerScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedMovieScraperConfig struct { - mappedConfig - - Studio mappedConfig `yaml:"Studio"` - Tags mappedConfig `yaml:"Tags"` -} -type _mappedMovieScraperConfig mappedMovieScraperConfig - -const ( - mappedScraperConfigMovieStudio = "Studio" - mappedScraperConfigMovieTags = "Tags" -) - -func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known movie sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigMovieStudio] = parentMap[mappedScraperConfigMovieStudio] - delete(parentMap, mappedScraperConfigMovieStudio) - - thisMap[mappedScraperConfigMovieTags] = parentMap[mappedScraperConfigMovieTags] - delete(parentMap, mappedScraperConfigMovieTags) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedMovieScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedMovieScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedRegexConfig struct { - Regex string `yaml:"regex"` - With string `yaml:"with"` -} - -type mappedRegexConfigs []mappedRegexConfig - -func (c mappedRegexConfig) apply(value string) string { - if c.Regex != "" { - re, err := regexp.Compile(c.Regex) - if err != nil { - logger.Warnf("Error compiling regex '%s': %s", c.Regex, err.Error()) - return value - } - - ret := re.ReplaceAllString(value, c.With) - - // trim leading and trailing whitespace - // this is done to maintain backwards compatibility with existing - // scrapers - ret = strings.TrimSpace(ret) - - logger.Debugf(`Replace: '%s' with '%s'`, c.Regex, c.With) - logger.Debugf("Before: %s", value) - logger.Debugf("After: %s", ret) - return ret - } - - return value -} - -func (c mappedRegexConfigs) apply(value string) string { - // apply regex in order - for _, config := range c { - value = config.apply(value) - } - - return value -} - -type postProcessAction interface { - Apply(ctx context.Context, value string, q mappedQuery) string -} - -type postProcessParseDate string - -func (p *postProcessParseDate) Apply(ctx context.Context, value string, q mappedQuery) string { - parseDate := string(*p) - - const internalDateFormat = "2006-01-02" - - valueLower := strings.ToLower(value) - if valueLower == "today" || valueLower == "yesterday" { // handle today, yesterday - dt := time.Now() - if valueLower == "yesterday" { // subtract 1 day from now - dt = dt.AddDate(0, 0, -1) - } - return dt.Format(internalDateFormat) - } - - if parseDate == "" { - return value - } - - if parseDate == "unix" { - // try to parse the date using unix timestamp format - // if it fails, then just fall back to the original value - timeAsInt, err := strconv.ParseInt(value, 10, 64) - if err != nil { - logger.Warnf("Error parsing date string '%s' using unix timestamp format : %s", value, err.Error()) - return value - } - parsedValue := time.Unix(timeAsInt, 0) - - return parsedValue.Format(internalDateFormat) - } - - // try to parse the date using the pattern - // if it fails, then just fall back to the original value - parsedValue, err := time.Parse(parseDate, value) - if err != nil { - logger.Warnf("Error parsing date string '%s' using format '%s': %s", value, parseDate, err.Error()) - return value - } - - // convert it into our date format - return parsedValue.Format(internalDateFormat) -} - -type postProcessSubtractDays bool - -func (p *postProcessSubtractDays) Apply(ctx context.Context, value string, q mappedQuery) string { - const internalDateFormat = "2006-01-02" - - i, err := strconv.Atoi(value) - if err != nil { - logger.Warnf("Error parsing day string %s: %s", value, err) - return value - } - - dt := time.Now() - dt = dt.AddDate(0, 0, -i) - return dt.Format(internalDateFormat) -} - -type postProcessReplace mappedRegexConfigs - -func (c *postProcessReplace) Apply(ctx context.Context, value string, q mappedQuery) string { - replace := mappedRegexConfigs(*c) - return replace.apply(value) -} - -type postProcessSubScraper mappedScraperAttrConfig - -func (p *postProcessSubScraper) Apply(ctx context.Context, value string, q mappedQuery) string { - subScrapeConfig := mappedScraperAttrConfig(*p) - - logger.Debugf("Sub-scraping for: %s", value) - ss := q.subScrape(ctx, value) - - if ss != nil { - found, err := ss.runQuery(subScrapeConfig.Selector) - if err != nil { - logger.Warnf("subscrape for '%v': %v", value, err) - } - - if len(found) > 0 { - // check if we're concatenating the results into a single result - var result string - if subScrapeConfig.hasConcat() { - result = subScrapeConfig.concatenateResults(found) - } else { - result = found[0] - } - - result = subScrapeConfig.postProcess(ctx, result, ss) - return result - } - } - - return "" -} - -type postProcessMap map[string]string - -func (p *postProcessMap) Apply(ctx context.Context, value string, q mappedQuery) string { - // return the mapped value if present - m := *p - mapped, ok := m[value] - - if ok { - return mapped - } - - return value -} - -type postProcessFeetToCm bool - -func (p *postProcessFeetToCm) Apply(ctx context.Context, value string, q mappedQuery) string { - const foot_in_cm = 30.48 - const inch_in_cm = 2.54 - - reg := regexp.MustCompile("[0-9]+") - filtered := reg.FindAllString(value, -1) - - var feet float64 - var inches float64 - if len(filtered) > 0 { - feet, _ = strconv.ParseFloat(filtered[0], 64) - } - if len(filtered) > 1 { - inches, _ = strconv.ParseFloat(filtered[1], 64) - } - - var centimeters = feet*foot_in_cm + inches*inch_in_cm - - // Return rounded integer string - return strconv.Itoa(int(math.Round(centimeters))) -} - -type postProcessLbToKg bool - -func (p *postProcessLbToKg) Apply(ctx context.Context, value string, q mappedQuery) string { - const lb_in_kg = 0.45359237 - w, err := strconv.ParseFloat(value, 64) - if err == nil { - w *= lb_in_kg - value = strconv.Itoa(int(math.Round(w))) - } - return value -} - -type postProcessJavascript string - -func (p *postProcessJavascript) Apply(ctx context.Context, value string, q mappedQuery) string { - vm := javascript.NewVM() - if err := vm.Set("value", value); err != nil { - logger.Warnf("javascript failed to set value: %v", err) - return value - } - - log := &javascript.Log{ - Logger: logger.Logger, - Prefix: "", - ProgressChan: make(chan float64), - } - - if err := log.AddToVM("log", vm); err != nil { - logger.Logger.Errorf("error adding log API: %w", err) - } - - util := &javascript.Util{} - if err := util.AddToVM("util", vm); err != nil { - logger.Logger.Errorf("error adding util API: %w", err) - } - - script, err := javascript.CompileScript("", "(function() { "+string(*p)+"})()") - if err != nil { - logger.Warnf("javascript failed to compile: %v", err) - return value - } - - output, err := vm.RunProgram(script) - if err != nil { - logger.Warnf("javascript failed to run: %v", err) - return value - } - - // assume output is string - return output.String() -} - -type mappedPostProcessAction struct { - ParseDate string `yaml:"parseDate"` - SubtractDays bool `yaml:"subtractDays"` - Replace mappedRegexConfigs `yaml:"replace"` - SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` - Map map[string]string `yaml:"map"` - FeetToCm bool `yaml:"feetToCm"` - LbToKg bool `yaml:"lbToKg"` - Javascript string `yaml:"javascript"` -} - -func (a mappedPostProcessAction) ToPostProcessAction() (postProcessAction, error) { - var found string - var ret postProcessAction - - ensureOnly := func(field string) error { - if found != "" { - return fmt.Errorf("post-process actions must have a single field, found %s and %s", found, field) - } - found = field - return nil - } - - if a.ParseDate != "" { - found = "parseDate" - action := postProcessParseDate(a.ParseDate) - ret = &action - } - if len(a.Replace) > 0 { - if err := ensureOnly("replace"); err != nil { - return nil, err - } - action := postProcessReplace(a.Replace) - ret = &action - } - if a.SubScraper != nil { - if err := ensureOnly("subScraper"); err != nil { - return nil, err - } - action := postProcessSubScraper(*a.SubScraper) - ret = &action - } - if a.Map != nil { - if err := ensureOnly("map"); err != nil { - return nil, err - } - action := postProcessMap(a.Map) - ret = &action - } - if a.FeetToCm { - if err := ensureOnly("feetToCm"); err != nil { - return nil, err - } - action := postProcessFeetToCm(a.FeetToCm) - ret = &action - } - if a.LbToKg { - if err := ensureOnly("lbToKg"); err != nil { - return nil, err - } - action := postProcessLbToKg(a.LbToKg) - ret = &action - } - if a.SubtractDays { - if err := ensureOnly("subtractDays"); err != nil { - return nil, err - } - action := postProcessSubtractDays(a.SubtractDays) - ret = &action - } - if a.Javascript != "" { - if err := ensureOnly("javascript"); err != nil { - return nil, err - } - action := postProcessJavascript(a.Javascript) - ret = &action - } - - if ret == nil { - return nil, errors.New("invalid post-process action") - } - - return ret, nil -} - -type mappedScraperAttrConfig struct { - Selector string `yaml:"selector"` - Fixed string `yaml:"fixed"` - PostProcess []mappedPostProcessAction `yaml:"postProcess"` - Concat string `yaml:"concat"` - Split string `yaml:"split"` - - postProcessActions []postProcessAction - - // Deprecated: use PostProcess instead - ParseDate string `yaml:"parseDate"` - Replace mappedRegexConfigs `yaml:"replace"` - SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` -} - -type _mappedScraperAttrConfig mappedScraperAttrConfig - -func (c *mappedScraperAttrConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // try unmarshalling into a string first - if err := unmarshal(&c.Selector); err != nil { - // if it's a type error then we try to unmarshall to the full object - var typeErr *yaml.TypeError - if !errors.As(err, &typeErr) { - return err - } - - // unmarshall to full object - // need it as a separate object - t := _mappedScraperAttrConfig{} - if err = unmarshal(&t); err != nil { - return err - } - - *c = mappedScraperAttrConfig(t) - } - - return c.convertPostProcessActions() -} - -func (c *mappedScraperAttrConfig) convertPostProcessActions() error { - // ensure we don't have the old deprecated fields and the new post process field - if len(c.PostProcess) > 0 { - if c.ParseDate != "" || len(c.Replace) > 0 || c.SubScraper != nil { - return errors.New("cannot include postProcess and (parseDate, replace, subScraper) deprecated fields") - } - - // convert xpathPostProcessAction actions to postProcessActions - for _, a := range c.PostProcess { - action, err := a.ToPostProcessAction() - if err != nil { - return err - } - c.postProcessActions = append(c.postProcessActions, action) - } - - c.PostProcess = nil - } else { - // convert old deprecated fields if present - // in same order as they used to be executed - if len(c.Replace) > 0 { - action := postProcessReplace(c.Replace) - c.postProcessActions = append(c.postProcessActions, &action) - c.Replace = nil - } - - if c.SubScraper != nil { - action := postProcessSubScraper(*c.SubScraper) - c.postProcessActions = append(c.postProcessActions, &action) - c.SubScraper = nil - } - - if c.ParseDate != "" { - action := postProcessParseDate(c.ParseDate) - c.postProcessActions = append(c.postProcessActions, &action) - c.ParseDate = "" - } - } - - return nil -} - -func (c mappedScraperAttrConfig) hasConcat() bool { - return c.Concat != "" -} - -func (c mappedScraperAttrConfig) hasSplit() bool { - return c.Split != "" -} - -func (c mappedScraperAttrConfig) concatenateResults(nodes []string) string { - separator := c.Concat - return strings.Join(nodes, separator) -} - -func (c mappedScraperAttrConfig) cleanResults(nodes []string) []string { - cleaned := sliceutil.Unique(nodes) // remove duplicate values - cleaned = sliceutil.Delete(cleaned, "") // remove empty values - return cleaned -} - -func (c mappedScraperAttrConfig) splitString(value string) []string { - separator := c.Split - var res []string - - if separator == "" { - return []string{value} - } - - for _, str := range strings.Split(value, separator) { - if str != "" { - res = append(res, str) - } - } - - return res -} - -func (c mappedScraperAttrConfig) postProcess(ctx context.Context, value string, q mappedQuery) string { - for _, action := range c.postProcessActions { - value = action.Apply(ctx, value, q) - } - - return value -} - -type mappedScrapers map[string]*mappedScraper +type mappedScrapers map[string]mappedScraper type mappedScraper struct { Common commonMappedConfig `yaml:"common"` @@ -885,102 +29,12 @@ type mappedScraper struct { Movie *mappedMovieScraperConfig `yaml:"movie"` } -type mappedResult map[string]interface{} -type mappedResults []mappedResult - -func (r mappedResult) apply(dest interface{}) { - destVal := reflect.ValueOf(dest).Elem() - - // all fields are either string pointers or string slices - for key, value := range r { - if err := mapFieldValue(destVal, key, value); err != nil { - logger.Errorf("Error mapping field %s in %T: %v", key, dest, err) - } - } -} - -func mapFieldValue(destVal reflect.Value, key string, value interface{}) error { - field := destVal.FieldByName(key) - - if !field.IsValid() { - return fmt.Errorf("field %s does not exist on %s", key, destVal.Type().Name()) - } - - if !field.CanSet() { - return fmt.Errorf("field %s cannot be set on %s", key, destVal.Type().Name()) - } - - fieldType := field.Type() - - switch v := value.(type) { - case string: - // if the field is a pointer to a string, then we need to convert the string to a pointer - // if the field is a string slice, then we need to convert the string to a slice - switch { - case fieldType.Kind() == reflect.String: - field.SetString(v) - case fieldType.Kind() == reflect.Ptr && fieldType.Elem().Kind() == reflect.String: - ptr := reflect.New(fieldType.Elem()) - ptr.Elem().SetString(v) - field.Set(ptr) - case fieldType.Kind() == reflect.Slice && fieldType.Elem().Kind() == reflect.String: - field.Set(reflect.ValueOf([]string{v})) - default: - return fmt.Errorf("cannot convert %T to %s", value, fieldType) - } - case []string: - // expect the field to be a string slice - if fieldType.Kind() == reflect.Slice && fieldType.Elem().Kind() == reflect.String { - field.Set(reflect.ValueOf(v)) - } else { - return fmt.Errorf("cannot convert %T to %s", value, fieldType) - } - default: - // fallback to reflection - reflectValue := reflect.ValueOf(value) - reflectValueType := reflectValue.Type() - - switch { - case reflectValueType.ConvertibleTo(fieldType): - field.Set(reflectValue.Convert(fieldType)) - case fieldType.Kind() == reflect.Pointer && reflectValueType.ConvertibleTo(fieldType.Elem()): - ptr := reflect.New(fieldType.Elem()) - ptr.Elem().Set(reflectValue.Convert(fieldType.Elem())) - field.Set(ptr) - default: - return fmt.Errorf("cannot convert %T to %s", value, fieldType) - } - } - - return nil -} - -func (r mappedResults) setSingleValue(index int, key string, value string) mappedResults { - if index >= len(r) { - r = append(r, make(mappedResult)) - } - - logger.Debugf(`[%d][%s] = %s`, index, key, value) - r[index][key] = value - return r -} - -func (r mappedResults) setMultiValue(index int, key string, value []string) mappedResults { - if index >= len(r) { - r = append(r, make(mappedResult)) - } - - logger.Debugf(`[%d][%s] = %s`, index, key, value) - r[index][key] = value - return r -} - func urlsIsMulti(key string) bool { return key == "URLs" } func (s mappedScraper) scrapePerformer(ctx context.Context, q mappedQuery) (*models.ScrapedPerformer, error) { - var ret models.ScrapedPerformer + var ret *models.ScrapedPerformer performerMap := s.Performer if performerMap == nil { @@ -992,31 +46,26 @@ func (s mappedScraper) scrapePerformer(ctx context.Context, q mappedQuery) (*mod results := performerMap.process(ctx, q, s.Common, urlsIsMulti) // now apply the tags + var tagResults mappedResults + if performerTagsMap != nil { logger.Debug(`Processing performer tags:`) - tagResults := performerTagsMap.process(ctx, q, s.Common, nil) - - for _, p := range tagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } + tagResults = performerTagsMap.process(ctx, q, s.Common, nil) } - if len(results) == 0 && len(ret.Tags) == 0 { + if len(results) == 0 { return nil, nil } if len(results) > 0 { - results[0].apply(&ret) + ret = results[0].scrapedPerformer() + ret.Tags = tagResults.scrapedTags() } - return &ret, nil + return ret, nil } func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]*models.ScrapedPerformer, error) { - var ret []*models.ScrapedPerformer - performerMap := s.Performer if performerMap == nil { return nil, nil @@ -1024,13 +73,7 @@ func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]* // isMulti is nil because it will behave incorrect when scraping multiple performers results := performerMap.process(ctx, q, s.Common, nil) - for _, r := range results { - var p models.ScrapedPerformer - r.apply(&p) - ret = append(ret, &p) - } - - return ret, nil + return results.scrapedPerformers(), nil } // processSceneRelationships sets the relationships on the models.ScrapedScene. It returns true if any relationships were set. @@ -1048,7 +91,7 @@ func (s mappedScraper) processSceneRelationships(ctx context.Context, q mappedQu if sceneTagsMap != nil { logger.Debug(`Processing scene tags:`) - ret.Tags = processRelationships[models.ScrapedTag](ctx, s, sceneTagsMap, q) + ret.Tags = sceneTagsMap.process(ctx, q, s.Common, nil).scrapedTags() } if sceneStudioMap != nil { @@ -1056,21 +99,20 @@ func (s mappedScraper) processSceneRelationships(ctx context.Context, q mappedQu studioResults := sceneStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 && resultIndex < len(studioResults) { - studio := &models.ScrapedStudio{} // when doing a `search` scrape get the related studio - studioResults[resultIndex].apply(studio) + studio := studioResults[resultIndex].scrapedStudio() ret.Studio = studio } } if sceneMoviesMap != nil { logger.Debug(`Processing scene movies:`) - ret.Movies = processRelationships[models.ScrapedMovie](ctx, s, sceneMoviesMap, q) + ret.Movies = sceneMoviesMap.process(ctx, q, s.Common, nil).scrapedMovies() } if sceneGroupsMap != nil { logger.Debug(`Processing scene groups:`) - ret.Groups = processRelationships[models.ScrapedGroup](ctx, s, sceneGroupsMap, q) + ret.Groups = sceneGroupsMap.process(ctx, q, s.Common, nil).scrapedGroups() } return len(ret.Performers) > 0 || len(ret.Tags) > 0 || ret.Studio != nil || len(ret.Movies) > 0 || len(ret.Groups) > 0 @@ -1094,12 +136,10 @@ func (s mappedScraper) processPerformers(ctx context.Context, performersMap mapp } for _, p := range performerResults { - performer := &models.ScrapedPerformer{} - p.apply(performer) + performer := p.scrapedPerformer() for _, p := range performerTagResults { - tag := &models.ScrapedTag{} - p.apply(tag) + tag := p.scrapedTag() performer.Tags = append(performer.Tags, tag) } @@ -1110,20 +150,6 @@ func (s mappedScraper) processPerformers(ctx context.Context, performersMap mapp return ret } -func processRelationships[T any](ctx context.Context, s mappedScraper, relationshipMap mappedConfig, q mappedQuery) []*T { - var ret []*T - - results := relationshipMap.process(ctx, q, s.Common, nil) - - for _, p := range results { - var value T - p.apply(&value) - ret = append(ret, &value) - } - - return ret -} - func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*models.ScrapedScene, error) { var ret []*models.ScrapedScene @@ -1139,10 +165,9 @@ func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*mode for i, r := range results { logger.Debug(`Processing scene:`) - var thisScene models.ScrapedScene - r.apply(&thisScene) - s.processSceneRelationships(ctx, q, i, &thisScene) - ret = append(ret, &thisScene) + thisScene := r.scrapedScene() + s.processSceneRelationships(ctx, q, i, thisScene) + ret = append(ret, thisScene) } return ret, nil @@ -1159,17 +184,17 @@ func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*models. logger.Debug(`Processing scene:`) results := sceneMap.process(ctx, q, s.Common, urlsIsMulti) - var ret models.ScrapedScene + var ret *models.ScrapedScene if len(results) > 0 { - results[0].apply(&ret) + ret = results[0].scrapedScene() } - hasRelationships := s.processSceneRelationships(ctx, q, 0, &ret) + hasRelationships := s.processSceneRelationships(ctx, q, 0, ret) // #3953 - process only returns results if the non-relationship fields are // populated // only return if we have results or relationships if len(results) > 0 || hasRelationships { - return &ret, nil + return ret, nil } return nil, nil @@ -1192,15 +217,19 @@ func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*models. logger.Debug(`Processing image:`) results := imageMap.process(ctx, q, s.Common, urlsIsMulti) + if len(results) > 0 { + ret = *results[0].scrapedImage() + } + // now apply the performers and tags if imagePerformersMap != nil { logger.Debug(`Processing image performers:`) - ret.Performers = processRelationships[models.ScrapedPerformer](ctx, s, imagePerformersMap, q) + ret.Performers = imagePerformersMap.process(ctx, q, s.Common, nil).scrapedPerformers() } if imageTagsMap != nil { logger.Debug(`Processing image tags:`) - ret.Tags = processRelationships[models.ScrapedTag](ctx, s, imageTagsMap, q) + ret.Tags = imageTagsMap.process(ctx, q, s.Common, nil).scrapedTags() } if imageStudioMap != nil { @@ -1208,9 +237,7 @@ func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*models. studioResults := imageStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 { - studio := &models.ScrapedStudio{} - studioResults[0].apply(studio) - ret.Studio = studio + ret.Studio = studioResults[0].scrapedStudio() } } @@ -1219,10 +246,6 @@ func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*models. return nil, nil } - if len(results) > 0 { - results[0].apply(&ret) - } - return &ret, nil } @@ -1243,27 +266,22 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*model logger.Debug(`Processing gallery:`) results := galleryMap.process(ctx, q, s.Common, urlsIsMulti) + if len(results) > 0 { + ret = *results[0].scrapedGallery() + } + // now apply the performers and tags if galleryPerformersMap != nil { logger.Debug(`Processing gallery performers:`) performerResults := galleryPerformersMap.process(ctx, q, s.Common, urlsIsMulti) - for _, p := range performerResults { - performer := &models.ScrapedPerformer{} - p.apply(performer) - ret.Performers = append(ret.Performers, performer) - } + ret.Performers = performerResults.scrapedPerformers() } if galleryTagsMap != nil { logger.Debug(`Processing gallery tags:`) tagResults := galleryTagsMap.process(ctx, q, s.Common, nil) - - for _, p := range tagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } + ret.Tags = tagResults.scrapedTags() } if galleryStudioMap != nil { @@ -1271,9 +289,7 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*model studioResults := galleryStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 { - studio := &models.ScrapedStudio{} - studioResults[0].apply(studio) - ret.Studio = studio + ret.Studio = studioResults[0].scrapedStudio() } } @@ -1282,10 +298,6 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*model return nil, nil } - if len(results) > 0 { - results[0].apply(&ret) - } - return &ret, nil } @@ -1309,14 +321,16 @@ func (s mappedScraper) scrapeGroup(ctx context.Context, q mappedQuery) (*models. results := groupMap.process(ctx, q, s.Common, urlsIsMulti) + if len(results) > 0 { + ret = *results[0].scrapedGroup() + } + if groupStudioMap != nil { logger.Debug(`Processing group studio:`) studioResults := groupStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 { - studio := &models.ScrapedStudio{} - studioResults[0].apply(studio) - ret.Studio = studio + ret.Studio = studioResults[0].scrapedStudio() } } @@ -1325,20 +339,12 @@ func (s mappedScraper) scrapeGroup(ctx context.Context, q mappedQuery) (*models. logger.Debug(`Processing group tags:`) tagResults := groupTagsMap.process(ctx, q, s.Common, nil) - for _, p := range tagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } + ret.Tags = tagResults.scrapedTags() } if len(results) == 0 && ret.Studio == nil && len(ret.Tags) == 0 { return nil, nil } - if len(results) > 0 { - results[0].apply(&ret) - } - return &ret, nil } diff --git a/pkg/scraper/mapped_config.go b/pkg/scraper/mapped_config.go new file mode 100644 index 000000000..920bf74b4 --- /dev/null +++ b/pkg/scraper/mapped_config.go @@ -0,0 +1,537 @@ +package scraper + +import ( + "context" + "errors" + "net/url" + "strings" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/sliceutil" + "gopkg.in/yaml.v2" +) + +type commonMappedConfig map[string]string + +type mappedConfig map[string]mappedScraperAttrConfig + +func (s mappedConfig) applyCommon(c commonMappedConfig, src string) string { + if c == nil { + return src + } + + ret := src + for commonKey, commonVal := range c { + ret = strings.ReplaceAll(ret, commonKey, commonVal) + } + + return ret +} + +// extractHostname parses a URL string and returns the hostname. +// Returns empty string if the URL cannot be parsed. +func extractHostname(urlStr string) string { + if urlStr == "" { + return "" + } + + u, err := url.Parse(urlStr) + if err != nil { + logger.Warnf("Error parsing URL '%s': %s", urlStr, err.Error()) + return "" + } + + return u.Hostname() +} + +type isMultiFunc func(key string) bool + +func (s mappedConfig) process(ctx context.Context, q mappedQuery, common commonMappedConfig, isMulti isMultiFunc) mappedResults { + var ret mappedResults + + for k, attrConfig := range s { + + if attrConfig.Fixed != "" { + // TODO - not sure if this needs to set _all_ indexes for the key + const i = 0 + // Support {inputURL} and {inputHostname} placeholders in fixed values + value := strings.ReplaceAll(attrConfig.Fixed, "{inputURL}", q.getURL()) + value = strings.ReplaceAll(value, "{inputHostname}", extractHostname(q.getURL())) + ret = ret.setSingleValue(i, k, value) + } else { + selector := attrConfig.Selector + selector = s.applyCommon(common, selector) + // Support {inputURL} and {inputHostname} placeholders in selectors + selector = strings.ReplaceAll(selector, "{inputURL}", q.getURL()) + selector = strings.ReplaceAll(selector, "{inputHostname}", extractHostname(q.getURL())) + + found, err := q.runQuery(selector) + if err != nil { + logger.Warnf("key '%v': %v", k, err) + } + + if len(found) > 0 { + result := s.postProcess(ctx, q, attrConfig, found) + + // HACK - if the key is URLs, then we need to set the value as a multi-value + isMulti := isMulti != nil && isMulti(k) + if isMulti { + ret = ret.setMultiValue(0, k, result) + } else { + for i, text := range result { + ret = ret.setSingleValue(i, k, text) + } + } + } + } + } + + return ret +} + +func (s mappedConfig) postProcess(ctx context.Context, q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string { + // check if we're concatenating the results into a single result + var ret []string + if attrConfig.hasConcat() { + result := attrConfig.concatenateResults(found) + result = attrConfig.postProcess(ctx, result, q) + if attrConfig.hasSplit() { + results := attrConfig.splitString(result) + // skip cleaning when the query is used for searching + if q.getType() == SearchQuery { + return results + } + results = attrConfig.cleanResults(results) + return results + } + + ret = []string{result} + } else { + for _, text := range found { + text = attrConfig.postProcess(ctx, text, q) + if attrConfig.hasSplit() { + return attrConfig.splitString(text) + } + + ret = append(ret, text) + } + // skip cleaning when the query is used for searching + if q.getType() == SearchQuery { + return ret + } + ret = attrConfig.cleanResults(ret) + + } + + return ret +} + +type mappedSceneScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` + Performers mappedPerformerScraperConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` + Movies mappedConfig `yaml:"Movies"` + Groups mappedConfig `yaml:"Groups"` +} +type _mappedSceneScraperConfig mappedSceneScraperConfig + +const ( + mappedScraperConfigSceneTags = "Tags" + mappedScraperConfigScenePerformers = "Performers" + mappedScraperConfigSceneStudio = "Studio" + mappedScraperConfigSceneMovies = "Movies" + mappedScraperConfigSceneGroups = "Groups" +) + +func (s *mappedSceneScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] + thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] + thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] + thisMap[mappedScraperConfigSceneMovies] = parentMap[mappedScraperConfigSceneMovies] + thisMap[mappedScraperConfigSceneGroups] = parentMap[mappedScraperConfigSceneGroups] + + delete(parentMap, mappedScraperConfigSceneTags) + delete(parentMap, mappedScraperConfigScenePerformers) + delete(parentMap, mappedScraperConfigSceneStudio) + delete(parentMap, mappedScraperConfigSceneMovies) + delete(parentMap, mappedScraperConfigSceneGroups) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedSceneScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedSceneScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedGalleryScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` + Performers mappedConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` +} + +type _mappedGalleryScraperConfig mappedGalleryScraperConfig + +func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] + thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] + thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] + + delete(parentMap, mappedScraperConfigSceneTags) + delete(parentMap, mappedScraperConfigScenePerformers) + delete(parentMap, mappedScraperConfigSceneStudio) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedGalleryScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedGalleryScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedImageScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` + Performers mappedConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` +} +type _mappedImageScraperConfig mappedImageScraperConfig + +func (s *mappedImageScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] + thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] + thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] + + delete(parentMap, mappedScraperConfigSceneTags) + delete(parentMap, mappedScraperConfigScenePerformers) + delete(parentMap, mappedScraperConfigSceneStudio) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedImageScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedImageScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedPerformerScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` +} +type _mappedPerformerScraperConfig mappedPerformerScraperConfig + +const ( + mappedScraperConfigPerformerTags = "Tags" +) + +func (s *mappedPerformerScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigPerformerTags] = parentMap[mappedScraperConfigPerformerTags] + + delete(parentMap, mappedScraperConfigPerformerTags) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedPerformerScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedPerformerScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedMovieScraperConfig struct { + mappedConfig + + Studio mappedConfig `yaml:"Studio"` + Tags mappedConfig `yaml:"Tags"` +} +type _mappedMovieScraperConfig mappedMovieScraperConfig + +const ( + mappedScraperConfigMovieStudio = "Studio" + mappedScraperConfigMovieTags = "Tags" +) + +func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known movie sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigMovieStudio] = parentMap[mappedScraperConfigMovieStudio] + delete(parentMap, mappedScraperConfigMovieStudio) + + thisMap[mappedScraperConfigMovieTags] = parentMap[mappedScraperConfigMovieTags] + delete(parentMap, mappedScraperConfigMovieTags) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedMovieScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedMovieScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedScraperAttrConfig struct { + Selector string `yaml:"selector"` + Fixed string `yaml:"fixed"` + PostProcess []mappedPostProcessAction `yaml:"postProcess"` + Concat string `yaml:"concat"` + Split string `yaml:"split"` + + postProcessActions []postProcessAction + + // Deprecated: use PostProcess instead + ParseDate string `yaml:"parseDate"` + Replace mappedRegexConfigs `yaml:"replace"` + SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` +} + +type _mappedScraperAttrConfig mappedScraperAttrConfig + +func (c *mappedScraperAttrConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // try unmarshalling into a string first + if err := unmarshal(&c.Selector); err != nil { + // if it's a type error then we try to unmarshall to the full object + var typeErr *yaml.TypeError + if !errors.As(err, &typeErr) { + return err + } + + // unmarshall to full object + // need it as a separate object + t := _mappedScraperAttrConfig{} + if err = unmarshal(&t); err != nil { + return err + } + + *c = mappedScraperAttrConfig(t) + } + + return c.convertPostProcessActions() +} + +func (c *mappedScraperAttrConfig) convertPostProcessActions() error { + // ensure we don't have the old deprecated fields and the new post process field + if len(c.PostProcess) > 0 { + if c.ParseDate != "" || len(c.Replace) > 0 || c.SubScraper != nil { + return errors.New("cannot include postProcess and (parseDate, replace, subScraper) deprecated fields") + } + + // convert xpathPostProcessAction actions to postProcessActions + for _, a := range c.PostProcess { + action, err := a.ToPostProcessAction() + if err != nil { + return err + } + c.postProcessActions = append(c.postProcessActions, action) + } + + c.PostProcess = nil + } else { + // convert old deprecated fields if present + // in same order as they used to be executed + if len(c.Replace) > 0 { + action := postProcessReplace(c.Replace) + c.postProcessActions = append(c.postProcessActions, &action) + c.Replace = nil + } + + if c.SubScraper != nil { + action := postProcessSubScraper(*c.SubScraper) + c.postProcessActions = append(c.postProcessActions, &action) + c.SubScraper = nil + } + + if c.ParseDate != "" { + action := postProcessParseDate(c.ParseDate) + c.postProcessActions = append(c.postProcessActions, &action) + c.ParseDate = "" + } + } + + return nil +} + +func (c mappedScraperAttrConfig) hasConcat() bool { + return c.Concat != "" +} + +func (c mappedScraperAttrConfig) hasSplit() bool { + return c.Split != "" +} + +func (c mappedScraperAttrConfig) concatenateResults(nodes []string) string { + separator := c.Concat + return strings.Join(nodes, separator) +} + +func (c mappedScraperAttrConfig) cleanResults(nodes []string) []string { + cleaned := sliceutil.Unique(nodes) // remove duplicate values + cleaned = sliceutil.Delete(cleaned, "") // remove empty values + return cleaned +} + +func (c mappedScraperAttrConfig) splitString(value string) []string { + separator := c.Split + var res []string + + if separator == "" { + return []string{value} + } + + for _, str := range strings.Split(value, separator) { + if str != "" { + res = append(res, str) + } + } + + return res +} + +func (c mappedScraperAttrConfig) postProcess(ctx context.Context, value string, q mappedQuery) string { + for _, action := range c.postProcessActions { + value = action.Apply(ctx, value, q) + } + + return value +} diff --git a/pkg/scraper/mapped_postprocessing.go b/pkg/scraper/mapped_postprocessing.go new file mode 100644 index 000000000..22a8b748a --- /dev/null +++ b/pkg/scraper/mapped_postprocessing.go @@ -0,0 +1,333 @@ +package scraper + +import ( + "context" + "errors" + "fmt" + "math" + "regexp" + "strconv" + "strings" + "time" + + "github.com/stashapp/stash/pkg/javascript" + "github.com/stashapp/stash/pkg/logger" +) + +type mappedRegexConfig struct { + Regex string `yaml:"regex"` + With string `yaml:"with"` +} + +type mappedRegexConfigs []mappedRegexConfig + +func (c mappedRegexConfig) apply(value string) string { + if c.Regex != "" { + re, err := regexp.Compile(c.Regex) + if err != nil { + logger.Warnf("Error compiling regex '%s': %s", c.Regex, err.Error()) + return value + } + + ret := re.ReplaceAllString(value, c.With) + + // trim leading and trailing whitespace + // this is done to maintain backwards compatibility with existing + // scrapers + ret = strings.TrimSpace(ret) + + logger.Debugf(`Replace: '%s' with '%s'`, c.Regex, c.With) + logger.Debugf("Before: %s", value) + logger.Debugf("After: %s", ret) + return ret + } + + return value +} + +func (c mappedRegexConfigs) apply(value string) string { + // apply regex in order + for _, config := range c { + value = config.apply(value) + } + + return value +} + +type postProcessAction interface { + Apply(ctx context.Context, value string, q mappedQuery) string +} + +type postProcessParseDate string + +func (p *postProcessParseDate) Apply(ctx context.Context, value string, q mappedQuery) string { + parseDate := string(*p) + + const internalDateFormat = "2006-01-02" + + valueLower := strings.ToLower(value) + if valueLower == "today" || valueLower == "yesterday" { // handle today, yesterday + dt := time.Now() + if valueLower == "yesterday" { // subtract 1 day from now + dt = dt.AddDate(0, 0, -1) + } + return dt.Format(internalDateFormat) + } + + if parseDate == "" { + return value + } + + if parseDate == "unix" { + // try to parse the date using unix timestamp format + // if it fails, then just fall back to the original value + timeAsInt, err := strconv.ParseInt(value, 10, 64) + if err != nil { + logger.Warnf("Error parsing date string '%s' using unix timestamp format : %s", value, err.Error()) + return value + } + parsedValue := time.Unix(timeAsInt, 0) + + return parsedValue.Format(internalDateFormat) + } + + // try to parse the date using the pattern + // if it fails, then just fall back to the original value + parsedValue, err := time.Parse(parseDate, value) + if err != nil { + logger.Warnf("Error parsing date string '%s' using format '%s': %s", value, parseDate, err.Error()) + return value + } + + // convert it into our date format + return parsedValue.Format(internalDateFormat) +} + +type postProcessSubtractDays bool + +func (p *postProcessSubtractDays) Apply(ctx context.Context, value string, q mappedQuery) string { + const internalDateFormat = "2006-01-02" + + i, err := strconv.Atoi(value) + if err != nil { + logger.Warnf("Error parsing day string %s: %s", value, err) + return value + } + + dt := time.Now() + dt = dt.AddDate(0, 0, -i) + return dt.Format(internalDateFormat) +} + +type postProcessReplace mappedRegexConfigs + +func (c *postProcessReplace) Apply(ctx context.Context, value string, q mappedQuery) string { + replace := mappedRegexConfigs(*c) + return replace.apply(value) +} + +type postProcessSubScraper mappedScraperAttrConfig + +func (p *postProcessSubScraper) Apply(ctx context.Context, value string, q mappedQuery) string { + subScrapeConfig := mappedScraperAttrConfig(*p) + + logger.Debugf("Sub-scraping for: %s", value) + ss := q.subScrape(ctx, value) + + if ss != nil { + found, err := ss.runQuery(subScrapeConfig.Selector) + if err != nil { + logger.Warnf("subscrape for '%v': %v", value, err) + } + + if len(found) > 0 { + // check if we're concatenating the results into a single result + var result string + if subScrapeConfig.hasConcat() { + result = subScrapeConfig.concatenateResults(found) + } else { + result = found[0] + } + + result = subScrapeConfig.postProcess(ctx, result, ss) + return result + } + } + + return "" +} + +type postProcessMap map[string]string + +func (p *postProcessMap) Apply(ctx context.Context, value string, q mappedQuery) string { + // return the mapped value if present + m := *p + mapped, ok := m[value] + + if ok { + return mapped + } + + return value +} + +type postProcessFeetToCm bool + +func (p *postProcessFeetToCm) Apply(ctx context.Context, value string, q mappedQuery) string { + const foot_in_cm = 30.48 + const inch_in_cm = 2.54 + + reg := regexp.MustCompile("[0-9]+") + filtered := reg.FindAllString(value, -1) + + var feet float64 + var inches float64 + if len(filtered) > 0 { + feet, _ = strconv.ParseFloat(filtered[0], 64) + } + if len(filtered) > 1 { + inches, _ = strconv.ParseFloat(filtered[1], 64) + } + + var centimeters = feet*foot_in_cm + inches*inch_in_cm + + // Return rounded integer string + return strconv.Itoa(int(math.Round(centimeters))) +} + +type postProcessLbToKg bool + +func (p *postProcessLbToKg) Apply(ctx context.Context, value string, q mappedQuery) string { + const lb_in_kg = 0.45359237 + w, err := strconv.ParseFloat(value, 64) + if err == nil { + w *= lb_in_kg + value = strconv.Itoa(int(math.Round(w))) + } + return value +} + +type postProcessJavascript string + +func (p *postProcessJavascript) Apply(ctx context.Context, value string, q mappedQuery) string { + vm := javascript.NewVM() + if err := vm.Set("value", value); err != nil { + logger.Warnf("javascript failed to set value: %v", err) + return value + } + + log := &javascript.Log{ + Logger: logger.Logger, + Prefix: "", + ProgressChan: make(chan float64), + } + + if err := log.AddToVM("log", vm); err != nil { + logger.Logger.Errorf("error adding log API: %w", err) + } + + util := &javascript.Util{} + if err := util.AddToVM("util", vm); err != nil { + logger.Logger.Errorf("error adding util API: %w", err) + } + + script, err := javascript.CompileScript("", "(function() { "+string(*p)+"})()") + if err != nil { + logger.Warnf("javascript failed to compile: %v", err) + return value + } + + output, err := vm.RunProgram(script) + if err != nil { + logger.Warnf("javascript failed to run: %v", err) + return value + } + + // assume output is string + return output.String() +} + +type mappedPostProcessAction struct { + ParseDate string `yaml:"parseDate"` + SubtractDays bool `yaml:"subtractDays"` + Replace mappedRegexConfigs `yaml:"replace"` + SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` + Map map[string]string `yaml:"map"` + FeetToCm bool `yaml:"feetToCm"` + LbToKg bool `yaml:"lbToKg"` + Javascript string `yaml:"javascript"` +} + +func (a mappedPostProcessAction) ToPostProcessAction() (postProcessAction, error) { + var found string + var ret postProcessAction + + ensureOnly := func(field string) error { + if found != "" { + return fmt.Errorf("post-process actions must have a single field, found %s and %s", found, field) + } + found = field + return nil + } + + if a.ParseDate != "" { + found = "parseDate" + action := postProcessParseDate(a.ParseDate) + ret = &action + } + if len(a.Replace) > 0 { + if err := ensureOnly("replace"); err != nil { + return nil, err + } + action := postProcessReplace(a.Replace) + ret = &action + } + if a.SubScraper != nil { + if err := ensureOnly("subScraper"); err != nil { + return nil, err + } + action := postProcessSubScraper(*a.SubScraper) + ret = &action + } + if a.Map != nil { + if err := ensureOnly("map"); err != nil { + return nil, err + } + action := postProcessMap(a.Map) + ret = &action + } + if a.FeetToCm { + if err := ensureOnly("feetToCm"); err != nil { + return nil, err + } + action := postProcessFeetToCm(a.FeetToCm) + ret = &action + } + if a.LbToKg { + if err := ensureOnly("lbToKg"); err != nil { + return nil, err + } + action := postProcessLbToKg(a.LbToKg) + ret = &action + } + if a.SubtractDays { + if err := ensureOnly("subtractDays"); err != nil { + return nil, err + } + action := postProcessSubtractDays(a.SubtractDays) + ret = &action + } + if a.Javascript != "" { + if err := ensureOnly("javascript"); err != nil { + return nil, err + } + action := postProcessJavascript(a.Javascript) + ret = &action + } + + if ret == nil { + return nil, errors.New("invalid post-process action") + } + + return ret, nil +} diff --git a/pkg/scraper/mapped_result.go b/pkg/scraper/mapped_result.go new file mode 100644 index 000000000..1260f3082 --- /dev/null +++ b/pkg/scraper/mapped_result.go @@ -0,0 +1,278 @@ +package scraper + +import ( + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type mappedResult map[string]interface{} +type mappedResults []mappedResult + +func (r mappedResult) string(key string) (string, bool) { + v, ok := r[key] + if !ok { + return "", false + } + + val, ok := v.(string) + if !ok { + logger.Errorf("String field %s is %T in mappedResult", key, r[key]) + } + + return val, true +} + +func (r mappedResult) mustString(key string) string { + v, ok := r[key] + if !ok { + logger.Errorf("Missing required string field %s in mappedResult", key) + return "" + } + + val, ok := v.(string) + if !ok { + logger.Errorf("String field %s is %T in mappedResult", key, r[key]) + } + + return val +} + +func (r mappedResult) stringPtr(key string) *string { + val, ok := r.string(key) + if !ok { + return nil + } + return &val +} + +func (r mappedResult) stringSlice(key string) []string { + v, ok := r[key] + if !ok { + return nil + } + + // need to try both []string and string + val, ok := v.([]string) + + if ok { + return val + } + + // try single string + singleVal, ok := v.(string) + if !ok { + logger.Errorf("String slice field %s is %T in mappedResult", key, r[key]) + return nil + } + + return []string{singleVal} +} + +func (r mappedResult) IntPtr(key string) *int { + v, ok := r[key] + if !ok { + return nil + } + + val, ok := v.(int) + if !ok { + logger.Errorf("Int field %s is %T in mappedResult", key, r[key]) + return nil + } + + return &val +} + +func (r mappedResults) setSingleValue(index int, key string, value string) mappedResults { + if index >= len(r) { + r = append(r, make(mappedResult)) + } + + logger.Debugf(`[%d][%s] = %s`, index, key, value) + r[index][key] = value + return r +} + +func (r mappedResults) setMultiValue(index int, key string, value []string) mappedResults { + if index >= len(r) { + r = append(r, make(mappedResult)) + } + + logger.Debugf(`[%d][%s] = %s`, index, key, value) + r[index][key] = value + return r +} + +func (r mappedResults) scrapedTags() []*models.ScrapedTag { + if len(r) == 0 { + return nil + } + + ret := make([]*models.ScrapedTag, len(r)) + for i, result := range r { + ret[i] = result.scrapedTag() + } + + return ret +} + +func (r mappedResult) scrapedTag() *models.ScrapedTag { + return &models.ScrapedTag{ + Name: r.mustString("Name"), + } +} + +func (r mappedResult) scrapedPerformer() *models.ScrapedPerformer { + ret := &models.ScrapedPerformer{ + Name: r.stringPtr("Name"), + Disambiguation: r.stringPtr("Disambiguation"), + Gender: r.stringPtr("Gender"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Twitter: r.stringPtr("Twitter"), + Birthdate: r.stringPtr("Birthdate"), + Ethnicity: r.stringPtr("Ethnicity"), + Country: r.stringPtr("Country"), + EyeColor: r.stringPtr("EyeColor"), + Height: r.stringPtr("Height"), + Measurements: r.stringPtr("Measurements"), + FakeTits: r.stringPtr("FakeTits"), + PenisLength: r.stringPtr("PenisLength"), + Circumcised: r.stringPtr("Circumcised"), + CareerLength: r.stringPtr("CareerLength"), + CareerStart: r.IntPtr("CareerStart"), + CareerEnd: r.IntPtr("CareerEnd"), + Tattoos: r.stringPtr("Tattoos"), + Piercings: r.stringPtr("Piercings"), + Aliases: r.stringPtr("Aliases"), + Image: r.stringPtr("Image"), + Images: r.stringSlice("Images"), + Details: r.stringPtr("Details"), + DeathDate: r.stringPtr("DeathDate"), + HairColor: r.stringPtr("HairColor"), + Weight: r.stringPtr("Weight"), + } + return ret +} + +func (r mappedResults) scrapedPerformers() []*models.ScrapedPerformer { + if len(r) == 0 { + return nil + } + + ret := make([]*models.ScrapedPerformer, len(r)) + for i, result := range r { + ret[i] = result.scrapedPerformer() + } + + return ret +} + +func (r mappedResult) scrapedScene() *models.ScrapedScene { + ret := &models.ScrapedScene{ + Title: r.stringPtr("Title"), + Code: r.stringPtr("Code"), + Details: r.stringPtr("Details"), + Director: r.stringPtr("Director"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Date: r.stringPtr("Date"), + Image: r.stringPtr("Image"), + Duration: r.IntPtr("Duration"), + } + return ret +} + +func (r mappedResult) scrapedImage() *models.ScrapedImage { + ret := &models.ScrapedImage{ + Title: r.stringPtr("Title"), + Code: r.stringPtr("Code"), + Details: r.stringPtr("Details"), + Photographer: r.stringPtr("Photographer"), + URLs: r.stringSlice("URLs"), + Date: r.stringPtr("Date"), + } + return ret +} + +func (r mappedResult) scrapedGallery() *models.ScrapedGallery { + ret := &models.ScrapedGallery{ + Title: r.stringPtr("Title"), + Code: r.stringPtr("Code"), + Details: r.stringPtr("Details"), + Photographer: r.stringPtr("Photographer"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Date: r.stringPtr("Date"), + } + return ret +} + +func (r mappedResult) scrapedStudio() *models.ScrapedStudio { + ret := &models.ScrapedStudio{ + Name: r.mustString("Name"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Image: r.stringPtr("Image"), + Details: r.stringPtr("Details"), + Aliases: r.stringPtr("Aliases"), + } + return ret +} + +func (r mappedResult) scrapedMovie() *models.ScrapedMovie { + ret := &models.ScrapedMovie{ + Name: r.stringPtr("Name"), + Aliases: r.stringPtr("Aliases"), + URLs: r.stringSlice("URLs"), + Duration: r.stringPtr("Duration"), + Date: r.stringPtr("Date"), + Director: r.stringPtr("Director"), + Synopsis: r.stringPtr("Synopsis"), + FrontImage: r.stringPtr("FrontImage"), + BackImage: r.stringPtr("BackImage"), + } + + return ret +} + +func (r mappedResult) scrapedGroup() *models.ScrapedGroup { + ret := &models.ScrapedGroup{ + Name: r.stringPtr("Name"), + Aliases: r.stringPtr("Aliases"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Duration: r.stringPtr("Duration"), + Date: r.stringPtr("Date"), + Director: r.stringPtr("Director"), + Synopsis: r.stringPtr("Synopsis"), + FrontImage: r.stringPtr("FrontImage"), + BackImage: r.stringPtr("BackImage"), + } + + return ret +} + +func (r mappedResults) scrapedMovies() []*models.ScrapedMovie { + if len(r) == 0 { + return nil + } + ret := make([]*models.ScrapedMovie, len(r)) + for i, result := range r { + ret[i] = result.scrapedMovie() + } + + return ret +} + +func (r mappedResults) scrapedGroups() []*models.ScrapedGroup { + if len(r) == 0 { + return nil + } + ret := make([]*models.ScrapedGroup, len(r)) + for i, result := range r { + ret[i] = result.scrapedGroup() + } + + return ret +} diff --git a/pkg/scraper/mapped_result_test.go b/pkg/scraper/mapped_result_test.go new file mode 100644 index 000000000..db6d921bf --- /dev/null +++ b/pkg/scraper/mapped_result_test.go @@ -0,0 +1,908 @@ +package scraper + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stretchr/testify/assert" +) + +// Test string method +func TestMappedResultString(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue string + expectedOk bool + }{ + { + name: "valid string", + data: mappedResult{"name": "test"}, + key: "name", + expectedValue: "test", + expectedOk: true, + }, + { + name: "missing key", + data: mappedResult{}, + key: "missing", + expectedValue: "", + expectedOk: false, + }, + { + name: "wrong type still returns ok true but empty value", + data: mappedResult{"num": 123}, + key: "num", + expectedValue: "", + expectedOk: true, // logs error but returns ok=true + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val, ok := test.data.string(test.key) + assert.Equal(t, test.expectedValue, val) + assert.Equal(t, test.expectedOk, ok) + }) + } +} + +// Test mustString method +func TestMappedResultMustString(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue string + }{ + { + name: "valid string", + data: mappedResult{"name": "test"}, + key: "name", + expectedValue: "test", + }, + { + name: "missing key returns empty string", + data: mappedResult{}, + key: "missing", + expectedValue: "", + }, + { + name: "wrong type returns empty string", + data: mappedResult{"num": 123}, + key: "num", + expectedValue: "", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.mustString(test.key) + assert.Equal(t, test.expectedValue, val) + }) + } +} + +// Test stringPtr method +func TestMappedResultStringPtr(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue *string + }{ + { + name: "valid string", + data: mappedResult{"name": "test"}, + key: "name", + expectedValue: strPtr("test"), + }, + { + name: "missing key returns nil", + data: mappedResult{}, + key: "missing", + expectedValue: nil, + }, + { + name: "wrong type returns non-nil pointer to empty string", + data: mappedResult{"num": 123}, + key: "num", + expectedValue: strPtr(""), // string() returns empty string but ok=true + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.stringPtr(test.key) + if test.expectedValue == nil { + assert.Nil(t, val) + } else { + assert.NotNil(t, val) + assert.Equal(t, *test.expectedValue, *val) + } + }) + } +} + +// Test stringSlice method +func TestMappedResultStringSlice(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue []string + }{ + { + name: "valid slice", + data: mappedResult{"tags": []string{"a", "b", "c"}}, + key: "tags", + expectedValue: []string{"a", "b", "c"}, + }, + { + name: "missing key returns nil", + data: mappedResult{}, + key: "missing", + expectedValue: nil, + }, + { + name: "single value converted to slice", + data: mappedResult{"tags": "not a slice"}, + key: "tags", + expectedValue: []string{"not a slice"}, + }, + { + name: "wrong type returns nil", + data: mappedResult{"tags": 123}, + key: "tags", + expectedValue: nil, + }, + { + name: "empty slice", + data: mappedResult{"tags": []string{}}, + key: "tags", + expectedValue: []string{}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.stringSlice(test.key) + assert.Equal(t, test.expectedValue, val) + }) + } +} + +// Test IntPtr method +func TestMappedResultIntPtr(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue *int + }{ + { + name: "valid int", + data: mappedResult{"duration": 120}, + key: "duration", + expectedValue: intPtr(120), + }, + { + name: "missing key returns nil", + data: mappedResult{}, + key: "missing", + expectedValue: nil, + }, + { + name: "wrong type returns nil", + data: mappedResult{"duration": "120"}, + key: "duration", + expectedValue: nil, + }, + { + name: "zero value", + data: mappedResult{"duration": 0}, + key: "duration", + expectedValue: intPtr(0), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.IntPtr(test.key) + assert.Equal(t, test.expectedValue, val) + }) + } +} + +// Test setSingleValue method +func TestMappedResultsSetSingleValue(t *testing.T) { + tests := []struct { + name string + initialResults mappedResults + index int + key string + value string + expectedLen int + shouldPanic bool + }{ + { + name: "append to empty", + initialResults: mappedResults{}, + index: 0, + key: "name", + value: "test", + expectedLen: 1, + shouldPanic: false, + }, + { + name: "set in existing", + initialResults: mappedResults{mappedResult{}}, + index: 0, + key: "name", + value: "test", + expectedLen: 1, + shouldPanic: false, + }, + { + name: "append to existing", + initialResults: mappedResults{mappedResult{}}, + index: 1, + key: "name", + value: "test", + expectedLen: 2, + shouldPanic: false, + }, + { + name: "sparse index causes panic", + initialResults: mappedResults{mappedResult{}}, + index: 5, + key: "name", + value: "test", + expectedLen: 6, + shouldPanic: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if test.shouldPanic { + assert.Panics(t, func() { + test.initialResults.setSingleValue(test.index, test.key, test.value) + }) + } else { + results := test.initialResults.setSingleValue(test.index, test.key, test.value) + assert.Equal(t, test.expectedLen, len(results)) + assert.Equal(t, test.value, results[test.index][test.key]) + } + }) + } +} + +// Test setMultiValue method +func TestMappedResultsSetMultiValue(t *testing.T) { + tests := []struct { + name string + initialResults mappedResults + index int + key string + value []string + expectedLen int + }{ + { + name: "append to empty", + initialResults: mappedResults{}, + index: 0, + key: "tags", + value: []string{"a", "b"}, + expectedLen: 1, + }, + { + name: "set in existing", + initialResults: mappedResults{mappedResult{}}, + index: 0, + key: "tags", + value: []string{"a", "b"}, + expectedLen: 1, + }, + { + name: "append to existing", + initialResults: mappedResults{mappedResult{}}, + index: 1, + key: "tags", + value: []string{"x", "y"}, + expectedLen: 2, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + results := test.initialResults.setMultiValue(test.index, test.key, test.value) + assert.Equal(t, test.expectedLen, len(results)) + assert.Equal(t, test.value, results[test.index][test.key]) + }) + } +} + +// Test scrapedTag method +func TestMappedResultScrapedTag(t *testing.T) { + tests := []struct { + name string + data mappedResult + expectedName string + }{ + { + name: "valid tag", + data: mappedResult{"Name": "Action"}, + expectedName: "Action", + }, + { + name: "missing name", + data: mappedResult{}, + expectedName: "", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + tag := test.data.scrapedTag() + assert.NotNil(t, tag) + assert.Equal(t, test.expectedName, tag.Name) + }) + } +} + +// Test scrapedTags method +func TestMappedResultsScrapedTags(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + expectedNames []string + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single tag", + data: mappedResults{ + mappedResult{"Name": "Action"}, + }, + expectedCount: 1, + expectedNames: []string{"Action"}, + }, + { + name: "multiple tags", + data: mappedResults{ + mappedResult{"Name": "Action"}, + mappedResult{"Name": "Drama"}, + mappedResult{"Name": "Comedy"}, + }, + expectedCount: 3, + expectedNames: []string{"Action", "Drama", "Comedy"}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + tags := test.data.scrapedTags() + if test.expectedCount == 0 { + assert.Nil(t, tags) + } else { + assert.NotNil(t, tags) + assert.Equal(t, test.expectedCount, len(tags)) + for i, expectedName := range test.expectedNames { + assert.Equal(t, expectedName, tags[i].Name) + } + } + }) + } +} + +// Test scrapedPerformer method +func TestMappedResultScrapedPerformer(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, p *models.ScrapedPerformer) + }{ + { + name: "full performer", + data: mappedResult{ + "Name": "Jane Doe", + "Disambiguation": "Actress", + "Gender": "Female", + "URL": "https://example.com/jane", + "URLs": []string{"url1", "url2"}, + "Twitter": "@jane", + "Birthdate": "1990-01-01", + "Ethnicity": "Caucasian", + "Country": "USA", + "EyeColor": "Blue", + "Height": "5'6\"", + "Measurements": "36-24-36", + "FakeTits": "No", + "PenisLength": "N/A", + "Circumcised": "N/A", + "CareerLength": "10 years", + "Tattoos": "Yes", + "Piercings": "Yes", + "Aliases": "Jane Smith", + "Image": "image.jpg", + "Images": []string{"img1", "img2"}, + "Details": "Some details", + "DeathDate": "N/A", + "HairColor": "Blonde", + "Weight": "130 lbs", + }, + validate: func(t *testing.T, p *models.ScrapedPerformer) { + assert.NotNil(t, p) + assert.Equal(t, "Jane Doe", *p.Name) + assert.Equal(t, "Actress", *p.Disambiguation) + assert.Equal(t, "Female", *p.Gender) + assert.Equal(t, "https://example.com/jane", *p.URL) + assert.Equal(t, []string{"url1", "url2"}, p.URLs) + assert.Equal(t, "@jane", *p.Twitter) + assert.Equal(t, "Blonde", *p.HairColor) + assert.Equal(t, "130 lbs", *p.Weight) + }, + }, + { + name: "minimal performer", + data: mappedResult{}, + validate: func(t *testing.T, p *models.ScrapedPerformer) { + assert.NotNil(t, p) + assert.Nil(t, p.Name) + assert.Nil(t, p.Gender) + assert.Empty(t, p.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + performer := test.data.scrapedPerformer() + test.validate(t, performer) + }) + } +} + +// Test scrapedPerformers method +func TestMappedResultsScrapedPerformers(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single performer", + data: mappedResults{ + mappedResult{"Name": "Jane Doe"}, + }, + expectedCount: 1, + }, + { + name: "multiple performers", + data: mappedResults{ + mappedResult{"Name": "Jane Doe"}, + mappedResult{"Name": "John Doe"}, + mappedResult{"Name": "Alice"}, + }, + expectedCount: 3, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + performers := test.data.scrapedPerformers() + if test.expectedCount == 0 { + assert.Nil(t, performers) + } else { + assert.NotNil(t, performers) + assert.Equal(t, test.expectedCount, len(performers)) + } + }) + } +} + +// Test scrapedScene method +func TestMappedResultScrapedScene(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, s *models.ScrapedScene) + }{ + { + name: "full scene", + data: mappedResult{ + "Title": "Scene Title", + "Code": "CODE123", + "Details": "Scene details", + "Director": "John Smith", + "URL": "https://example.com/scene", + "URLs": []string{"url1", "url2"}, + "Date": "2020-01-01", + "Image": "scene.jpg", + "Duration": 3600, + }, + validate: func(t *testing.T, s *models.ScrapedScene) { + assert.NotNil(t, s) + assert.Equal(t, "Scene Title", *s.Title) + assert.Equal(t, "CODE123", *s.Code) + assert.Equal(t, "Scene details", *s.Details) + assert.Equal(t, "John Smith", *s.Director) + assert.Equal(t, "https://example.com/scene", *s.URL) + assert.Equal(t, []string{"url1", "url2"}, s.URLs) + assert.Equal(t, "2020-01-01", *s.Date) + assert.Equal(t, "scene.jpg", *s.Image) + assert.Equal(t, 3600, *s.Duration) + }, + }, + { + name: "minimal scene", + data: mappedResult{}, + validate: func(t *testing.T, s *models.ScrapedScene) { + assert.NotNil(t, s) + assert.Nil(t, s.Title) + assert.Nil(t, s.Duration) + assert.Empty(t, s.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + scene := test.data.scrapedScene() + test.validate(t, scene) + }) + } +} + +// Test scrapedImage method +func TestMappedResultScrapedImage(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, i *models.ScrapedImage) + }{ + { + name: "full image", + data: mappedResult{ + "Title": "Image Title", + "Code": "IMG123", + "Details": "Image details", + "Photographer": "Jane Photographer", + "URLs": []string{"url1", "url2"}, + "Date": "2020-06-15", + }, + validate: func(t *testing.T, i *models.ScrapedImage) { + assert.NotNil(t, i) + assert.Equal(t, "Image Title", *i.Title) + assert.Equal(t, "IMG123", *i.Code) + assert.Equal(t, "Image details", *i.Details) + assert.Equal(t, "Jane Photographer", *i.Photographer) + assert.Equal(t, []string{"url1", "url2"}, i.URLs) + assert.Equal(t, "2020-06-15", *i.Date) + }, + }, + { + name: "minimal image", + data: mappedResult{}, + validate: func(t *testing.T, i *models.ScrapedImage) { + assert.NotNil(t, i) + assert.Nil(t, i.Title) + assert.Empty(t, i.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + image := test.data.scrapedImage() + test.validate(t, image) + }) + } +} + +// Test scrapedGallery method +func TestMappedResultScrapedGallery(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, g *models.ScrapedGallery) + }{ + { + name: "full gallery", + data: mappedResult{ + "Title": "Gallery Title", + "Code": "GAL123", + "Details": "Gallery details", + "Photographer": "Jane Photographer", + "URL": "https://example.com/gallery", + "URLs": []string{"url1", "url2"}, + "Date": "2020-07-20", + }, + validate: func(t *testing.T, g *models.ScrapedGallery) { + assert.NotNil(t, g) + assert.Equal(t, "Gallery Title", *g.Title) + assert.Equal(t, "GAL123", *g.Code) + assert.Equal(t, "Gallery details", *g.Details) + assert.Equal(t, "Jane Photographer", *g.Photographer) + assert.Equal(t, "https://example.com/gallery", *g.URL) + assert.Equal(t, []string{"url1", "url2"}, g.URLs) + assert.Equal(t, "2020-07-20", *g.Date) + }, + }, + { + name: "minimal gallery", + data: mappedResult{}, + validate: func(t *testing.T, g *models.ScrapedGallery) { + assert.NotNil(t, g) + assert.Nil(t, g.Title) + assert.Empty(t, g.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + gallery := test.data.scrapedGallery() + test.validate(t, gallery) + }) + } +} + +// Test scrapedStudio method +func TestMappedResultScrapedStudio(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, st *models.ScrapedStudio) + }{ + { + name: "full studio", + data: mappedResult{ + "Name": "Studio Name", + "URL": "https://example.com/studio", + "URLs": []string{"url1", "url2"}, + "Image": "studio.jpg", + "Details": "Studio details", + "Aliases": "Studio Alias", + }, + validate: func(t *testing.T, st *models.ScrapedStudio) { + assert.NotNil(t, st) + assert.Equal(t, "Studio Name", st.Name) + assert.Equal(t, "https://example.com/studio", *st.URL) + assert.Equal(t, []string{"url1", "url2"}, st.URLs) + assert.Equal(t, "studio.jpg", *st.Image) + assert.Equal(t, "Studio details", *st.Details) + assert.Equal(t, "Studio Alias", *st.Aliases) + }, + }, + { + name: "minimal studio", + data: mappedResult{}, + validate: func(t *testing.T, st *models.ScrapedStudio) { + assert.NotNil(t, st) + assert.Equal(t, "", st.Name) // mustString returns empty string + assert.Nil(t, st.URL) + assert.Empty(t, st.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + studio := test.data.scrapedStudio() + test.validate(t, studio) + }) + } +} + +// Test scrapedMovie method +func TestMappedResultScrapedMovie(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, m *models.ScrapedMovie) + }{ + { + name: "full movie", + data: mappedResult{ + "Name": "Movie Title", + "Aliases": "Movie Alias", + "URLs": []string{"url1", "url2"}, + "Duration": "120 minutes", + "Date": "2020-05-10", + "Director": "John Director", + "Synopsis": "Movie synopsis", + "FrontImage": "front.jpg", + "BackImage": "back.jpg", + }, + validate: func(t *testing.T, m *models.ScrapedMovie) { + assert.NotNil(t, m) + assert.Equal(t, "Movie Title", *m.Name) + assert.Equal(t, "Movie Alias", *m.Aliases) + assert.Equal(t, []string{"url1", "url2"}, m.URLs) + assert.Equal(t, "120 minutes", *m.Duration) + assert.Equal(t, "2020-05-10", *m.Date) + assert.Equal(t, "John Director", *m.Director) + assert.Equal(t, "Movie synopsis", *m.Synopsis) + assert.Equal(t, "front.jpg", *m.FrontImage) + assert.Equal(t, "back.jpg", *m.BackImage) + }, + }, + { + name: "minimal movie", + data: mappedResult{}, + validate: func(t *testing.T, m *models.ScrapedMovie) { + assert.NotNil(t, m) + assert.Nil(t, m.Name) + assert.Empty(t, m.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + movie := test.data.scrapedMovie() + test.validate(t, movie) + }) + } +} + +// Test scrapedMovies method +func TestMappedResultsScrapedMovies(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single movie", + data: mappedResults{ + mappedResult{"Name": "Movie 1"}, + }, + expectedCount: 1, + }, + { + name: "multiple movies", + data: mappedResults{ + mappedResult{"Name": "Movie 1"}, + mappedResult{"Name": "Movie 2"}, + mappedResult{"Name": "Movie 3"}, + }, + expectedCount: 3, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + movies := test.data.scrapedMovies() + if test.expectedCount == 0 { + assert.Nil(t, movies) + } else { + assert.NotNil(t, movies) + assert.Equal(t, test.expectedCount, len(movies)) + } + }) + } +} + +// Test scrapedGroup method +func TestMappedResultScrapedGroup(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, g *models.ScrapedGroup) + }{ + { + name: "full group", + data: mappedResult{ + "Name": "Group Title", + "Aliases": "Group Alias", + "URL": "https://example.com/group", + "URLs": []string{"url1", "url2"}, + "Duration": "240 minutes", + "Date": "2020-08-15", + "Director": "Jane Director", + "Synopsis": "Group synopsis", + "FrontImage": "front.jpg", + "BackImage": "back.jpg", + }, + validate: func(t *testing.T, g *models.ScrapedGroup) { + assert.NotNil(t, g) + assert.Equal(t, "Group Title", *g.Name) + assert.Equal(t, "Group Alias", *g.Aliases) + assert.Equal(t, "https://example.com/group", *g.URL) + assert.Equal(t, []string{"url1", "url2"}, g.URLs) + assert.Equal(t, "240 minutes", *g.Duration) + assert.Equal(t, "2020-08-15", *g.Date) + assert.Equal(t, "Jane Director", *g.Director) + assert.Equal(t, "Group synopsis", *g.Synopsis) + assert.Equal(t, "front.jpg", *g.FrontImage) + assert.Equal(t, "back.jpg", *g.BackImage) + }, + }, + { + name: "minimal group", + data: mappedResult{}, + validate: func(t *testing.T, g *models.ScrapedGroup) { + assert.NotNil(t, g) + assert.Nil(t, g.Name) + assert.Empty(t, g.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + group := test.data.scrapedGroup() + test.validate(t, group) + }) + } +} + +// Test scrapedGroups method +func TestMappedResultsScrapedGroups(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single group", + data: mappedResults{ + mappedResult{"Name": "Group 1"}, + }, + expectedCount: 1, + }, + { + name: "multiple groups", + data: mappedResults{ + mappedResult{"Name": "Group 1"}, + mappedResult{"Name": "Group 2"}, + mappedResult{"Name": "Group 3"}, + }, + expectedCount: 3, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + groups := test.data.scrapedGroups() + if test.expectedCount == 0 { + assert.Nil(t, groups) + } else { + assert.NotNil(t, groups) + assert.Equal(t, test.expectedCount, len(groups)) + } + }) + } +} + +// Helper functions +func strPtr(s string) *string { + return &s +} + +func intPtr(i int) *int { + return &i +} diff --git a/pkg/scraper/mapped_test.go b/pkg/scraper/mapped_test.go index 5f44e17af..667bb8385 100644 --- a/pkg/scraper/mapped_test.go +++ b/pkg/scraper/mapped_test.go @@ -25,7 +25,7 @@ xPathScrapers: - anything ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err == nil { diff --git a/pkg/scraper/performer.go b/pkg/scraper/performer.go index 98e931762..4684a6683 100644 --- a/pkg/scraper/performer.go +++ b/pkg/scraper/performer.go @@ -20,6 +20,8 @@ type ScrapedPerformerInput struct { PenisLength *string `json:"penis_length"` Circumcised *string `json:"circumcised"` CareerLength *string `json:"career_length"` + CareerStart *int `json:"career_start"` + CareerEnd *int `json:"career_end"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go index c2653743a..8a4d4de7d 100644 --- a/pkg/scraper/postprocessing.go +++ b/pkg/scraper/postprocessing.go @@ -125,6 +125,20 @@ func (c *postScraper) postScrapePerformer(ctx context.Context, p models.ScrapedP } } + isEmptyStr := func(s *string) bool { return s == nil || *s == "" } + isEmptyInt := func(s *int) bool { return s == nil || *s == 0 } + + // populate career start/end from career length and vice versa + if !isEmptyStr(p.CareerLength) && isEmptyInt(p.CareerStart) && isEmptyInt(p.CareerEnd) { + p.CareerStart, p.CareerEnd, err = utils.ParseYearRangeString(*p.CareerLength) + if err != nil { + logger.Warnf("Could not parse career length %s: %v", *p.CareerLength, err) + } + } else if isEmptyStr(p.CareerLength) && (!isEmptyInt(p.CareerStart) || !isEmptyInt(p.CareerEnd)) { + v := utils.FormatYearRange(p.CareerStart, p.CareerEnd) + p.CareerLength = &v + } + return p, nil } diff --git a/pkg/scraper/query_url.go b/pkg/scraper/query_url.go index 91adb7d67..2cd9f683e 100644 --- a/pkg/scraper/query_url.go +++ b/pkg/scraper/query_url.go @@ -110,7 +110,7 @@ func (p queryURLParameters) constructURL(url string) string { } // replaceURL does a partial URL Replace ( only url parameter is used) -func replaceURL(url string, scraperConfig scraperTypeConfig) string { +func replaceURL(url string, scraperConfig ByURLDefinition) string { u := url queryURL := queryURLParameterFromURL(u) if scraperConfig.QueryURLReplacements != nil { diff --git a/pkg/scraper/script.go b/pkg/scraper/script.go index 866c92365..f8e47b5d8 100644 --- a/pkg/scraper/script.go +++ b/pkg/scraper/script.go @@ -208,22 +208,11 @@ func galleryInputFromGallery(gallery *models.Gallery) galleryInput { var ErrScraperScript = errors.New("scraper script error") type scriptScraper struct { - scraper scraperTypeConfig - config config + definition Definition globalConfig GlobalConfig } -func newScriptScraper(scraper scraperTypeConfig, config config, globalConfig GlobalConfig) *scriptScraper { - return &scriptScraper{ - scraper: scraper, - config: config, - globalConfig: globalConfig, - } -} - -func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, out interface{}) error { - command := s.scraper.Script - +func (s *scriptScraper) runScraperScript(ctx context.Context, command []string, inString string, out interface{}) error { var cmd *exec.Cmd if python.IsPythonCommand(command[0]) { pythonPath := s.globalConfig.GetPythonPath() @@ -233,7 +222,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o logger.Warnf("%s", err) } else { cmd = p.Command(ctx, command[1:]) - envVariable, _ := filepath.Abs(filepath.Dir(filepath.Dir(s.config.path))) + envVariable, _ := filepath.Abs(filepath.Dir(filepath.Dir(s.definition.path))) python.AppendPythonPath(cmd, envVariable) } } @@ -243,7 +232,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o cmd = stashExec.CommandContext(ctx, command[0], command[1:]...) } - cmd.Dir = filepath.Dir(s.config.path) + cmd.Dir = filepath.Dir(s.definition.path) stdin, err := cmd.StdinPipe() if err != nil { @@ -273,7 +262,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o return errors.New("error running scraper script") } - go handleScraperStderr(s.config.Name, stderr) + go handleScraperStderr(s.definition.Name, stderr) logger.Debugf("Scraper script <%s> started", strings.Join(cmd.Args, " ")) @@ -312,7 +301,39 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o return nil } -func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { +func (s *scriptScraper) scrape(ctx context.Context, command []string, input string, ty ScrapeContentType) (ScrapedContent, error) { + switch ty { + case ScrapeContentTypePerformer: + var performer *models.ScrapedPerformer + err := s.runScraperScript(ctx, command, input, &performer) + return performer, err + case ScrapeContentTypeGallery: + var gallery *models.ScrapedGallery + err := s.runScraperScript(ctx, command, input, &gallery) + return gallery, err + case ScrapeContentTypeScene: + var scene *models.ScrapedScene + err := s.runScraperScript(ctx, command, input, &scene) + return scene, err + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: + var movie *models.ScrapedMovie + err := s.runScraperScript(ctx, command, input, &movie) + return movie, err + case ScrapeContentTypeImage: + var image *models.ScrapedImage + err := s.runScraperScript(ctx, command, input, &image) + return image, err + } + + return nil, ErrNotSupported +} + +type scriptNameScraper struct { + scriptScraper + definition ByNameDefinition +} + +func (s *scriptNameScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { input := `{"name": "` + name + `"}` var ret []ScrapedContent @@ -320,7 +341,7 @@ func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty Scrape switch ty { case ScrapeContentTypePerformer: var performers []models.ScrapedPerformer - err = s.runScraperScript(ctx, input, &performers) + err = s.runScraperScript(ctx, s.definition.Script, input, &performers) if err == nil { for _, p := range performers { v := p @@ -329,7 +350,7 @@ func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty Scrape } case ScrapeContentTypeScene: var scenes []models.ScrapedScene - err = s.runScraperScript(ctx, input, &scenes) + err = s.runScraperScript(ctx, s.definition.Script, input, &scenes) if err == nil { for _, s := range scenes { v := s @@ -343,7 +364,21 @@ func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty Scrape return ret, err } -func (s *scriptScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { +type scriptURLScraper struct { + scriptScraper + definition ByURLDefinition +} + +func (s *scriptURLScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { + return s.scrape(ctx, s.definition.Script, `{"url": "`+url+`"}`, ty) +} + +type scriptFragmentScraper struct { + scriptScraper + definition ByFragmentDefinition +} + +func (s *scriptFragmentScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { var inString []byte var err error var ty ScrapeContentType @@ -363,41 +398,10 @@ func (s *scriptScraper) scrapeByFragment(ctx context.Context, input Input) (Scra return nil, err } - return s.scrape(ctx, string(inString), ty) + return s.scrape(ctx, s.definition.Script, string(inString), ty) } -func (s *scriptScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { - return s.scrape(ctx, `{"url": "`+url+`"}`, ty) -} - -func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeContentType) (ScrapedContent, error) { - switch ty { - case ScrapeContentTypePerformer: - var performer *models.ScrapedPerformer - err := s.runScraperScript(ctx, input, &performer) - return performer, err - case ScrapeContentTypeGallery: - var gallery *models.ScrapedGallery - err := s.runScraperScript(ctx, input, &gallery) - return gallery, err - case ScrapeContentTypeScene: - var scene *models.ScrapedScene - err := s.runScraperScript(ctx, input, &scene) - return scene, err - case ScrapeContentTypeMovie, ScrapeContentTypeGroup: - var movie *models.ScrapedMovie - err := s.runScraperScript(ctx, input, &movie) - return movie, err - case ScrapeContentTypeImage: - var image *models.ScrapedImage - err := s.runScraperScript(ctx, input, &image) - return image, err - } - - return nil, ErrNotSupported -} - -func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { +func (s *scriptFragmentScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { inString, err := json.Marshal(sceneInputFromScene(scene)) if err != nil { @@ -406,12 +410,12 @@ func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sc var ret *models.ScrapedScene - err = s.runScraperScript(ctx, string(inString), &ret) + err = s.runScraperScript(ctx, s.definition.Script, string(inString), &ret) return ret, err } -func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (s *scriptFragmentScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { inString, err := json.Marshal(galleryInputFromGallery(gallery)) if err != nil { @@ -420,12 +424,12 @@ func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mod var ret *models.ScrapedGallery - err = s.runScraperScript(ctx, string(inString), &ret) + err = s.runScraperScript(ctx, s.definition.Script, string(inString), &ret) return ret, err } -func (s *scriptScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { +func (s *scriptFragmentScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { inString, err := json.Marshal(imageToUpdateInput(image)) if err != nil { @@ -434,7 +438,7 @@ func (s *scriptScraper) scrapeImageByImage(ctx context.Context, image *models.Im var ret *models.ScrapedImage - err = s.runScraperScript(ctx, string(inString), &ret) + err = s.runScraperScript(ctx, s.definition.Script, string(inString), &ret) return ret, err } diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index 5c5cab9fc..23c4b9063 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -14,15 +14,13 @@ import ( ) type stashScraper struct { - scraper scraperTypeConfig - config config + config Definition globalConfig GlobalConfig client *http.Client } -func newStashScraper(scraper scraperTypeConfig, client *http.Client, config config, globalConfig GlobalConfig) *stashScraper { +func newStashScraper(client *http.Client, config Definition, globalConfig GlobalConfig) *stashScraper { return &stashScraper{ - scraper: scraper, config: config, client: client, globalConfig: globalConfig, diff --git a/pkg/scraper/url.go b/pkg/scraper/url.go index b53d7b27f..d036ae68e 100644 --- a/pkg/scraper/url.go +++ b/pkg/scraper/url.go @@ -25,8 +25,8 @@ import ( const scrapeDefaultSleep = time.Second * 2 -func loadURL(ctx context.Context, loadURL string, client *http.Client, scraperConfig config, globalConfig GlobalConfig) (io.Reader, error) { - driverOptions := scraperConfig.DriverOptions +func loadURL(ctx context.Context, loadURL string, client *http.Client, def Definition, globalConfig GlobalConfig) (io.Reader, error) { + driverOptions := def.DriverOptions if driverOptions != nil && driverOptions.UseCDP { // get the page using chrome dp return urlFromCDP(ctx, loadURL, *driverOptions, globalConfig) @@ -37,7 +37,7 @@ func loadURL(ctx context.Context, loadURL string, client *http.Client, scraperCo return nil, err } - jar, err := scraperConfig.jar() + jar, err := def.jar() if err != nil { return nil, fmt.Errorf("error creating cookie jar: %w", err) } @@ -83,7 +83,7 @@ func loadURL(ctx context.Context, loadURL string, client *http.Client, scraperCo } bodyReader := bytes.NewReader(body) - printCookies(jar, scraperConfig, "Jar cookies found for scraper urls") + printCookies(jar, def, "Jar cookies found for scraper urls") return charset.NewReader(bodyReader, resp.Header.Get("Content-Type")) } diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index 5f7b76372..bf70869e8 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -3,7 +3,6 @@ package scraper import ( "bytes" "context" - "errors" "fmt" "net/http" "net/url" @@ -19,49 +18,36 @@ import ( ) type xpathScraper struct { - scraper scraperTypeConfig - config config + definition Definition globalConfig GlobalConfig client *http.Client } -func newXpathScraper(scraper scraperTypeConfig, client *http.Client, config config, globalConfig GlobalConfig) *xpathScraper { - return &xpathScraper{ - scraper: scraper, - config: config, - globalConfig: globalConfig, - client: client, +func (s *xpathScraper) getXpathScraper(name string) (*mappedScraper, error) { + ret, ok := s.definition.XPathScrapers[name] + if !ok { + return nil, fmt.Errorf("xpath scraper with name %s not found in config", name) } + return &ret, nil } -func (s *xpathScraper) getXpathScraper() *mappedScraper { - return s.config.XPathScrapers[s.scraper.Scraper] +type xpathURLScraper struct { + xpathScraper + definition ByURLDefinition } -func (s *xpathScraper) scrapeURL(ctx context.Context, url string) (*html.Node, *mappedScraper, error) { - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") - } - - doc, err := s.loadURL(ctx, url) - - if err != nil { - return nil, nil, err - } - - return doc, scraper, nil -} - -func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { - u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries - doc, scraper, err := s.scrapeURL(ctx, u) +func (s *xpathURLScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { + scraper, err := s.getXpathScraper(s.definition.Scraper) if err != nil { return nil, err } - q := s.getXPathQuery(doc, u) + doc, err := s.loadURL(ctx, url) + if err != nil { + return nil, err + } + + q := s.getXPathQuery(doc, url) // if these just return the return values from scraper.scrape* functions then // it ends up returning ScrapedContent(nil) rather than nil switch ty { @@ -100,11 +86,15 @@ func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCon return nil, ErrNotSupported } -func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { - scraper := s.getXpathScraper() +type xpathNameScraper struct { + xpathScraper + definition ByNameDefinition +} - if scraper == nil { - return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper) +func (s *xpathNameScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } const placeholder = "{}" @@ -112,7 +102,7 @@ func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC // replace the placeholder string with the URL-escaped name escapedName := url.QueryEscape(name) - url := s.scraper.QueryURL + url := s.definition.QueryURL url = strings.ReplaceAll(url, placeholder, escapedName) doc, err := s.loadURL(ctx, url) @@ -151,18 +141,22 @@ func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC return nil, ErrNotSupported } -func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { +type xpathFragmentScraper struct { + xpathScraper + definition ByFragmentDefinition +} + +func (s *xpathFragmentScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { // construct the URL queryURL := queryURLParametersFromScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -175,7 +169,7 @@ func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce return scraper.scrapeScene(ctx, q) } -func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { +func (s *xpathFragmentScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { switch { case input.Gallery != nil: return nil, fmt.Errorf("%w: cannot use an xpath scraper as a gallery fragment scraper", ErrNotSupported) @@ -189,15 +183,14 @@ func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap // construct the URL queryURL := queryURLParametersFromScrapedScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -210,18 +203,17 @@ func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap return scraper.scrapeScene(ctx, q) } -func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (s *xpathFragmentScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { // construct the URL queryURL := queryURLParametersFromGallery(gallery) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -234,18 +226,17 @@ func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode return scraper.scrapeGallery(ctx, q) } -func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { +func (s *xpathFragmentScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { // construct the URL queryURL := queryURLParametersFromImage(image) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -259,14 +250,14 @@ func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Ima } func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) { - r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig) + r, err := loadURL(ctx, url, s.client, s.definition, s.globalConfig) if err != nil { return nil, fmt.Errorf("failed to load URL %q: %w", url, err) } ret, err := html.Parse(r) - if err == nil && s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML { + if err == nil && s.definition.DebugOptions != nil && s.definition.DebugOptions.PrintHTML { var b bytes.Buffer if err := html.Render(&b, ret); err != nil { logger.Warnf("could not render HTML: %v", err) diff --git a/pkg/scraper/xpath_test.go b/pkg/scraper/xpath_test.go index 391f60728..42ee2227b 100644 --- a/pkg/scraper/xpath_test.go +++ b/pkg/scraper/xpath_test.go @@ -674,10 +674,10 @@ func verifyPerformers(t *testing.T, expectedNames []string, expectedURLs []strin } if expectedName != actualName { - t.Errorf("Expected performer name %s, got %s", expectedName, actualName) + t.Errorf("Expected performer name %q, got %q", expectedName, actualName) } if expectedURL != actualURL { - t.Errorf("Expected performer URL %s, got %s", expectedName, actualName) + t.Errorf("Expected performer URL %q, got %q", expectedURL, actualURL) } i++ } @@ -780,7 +780,7 @@ xPathScrapers: Name: //studio ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err != nil { @@ -892,7 +892,7 @@ xPathScrapers: selector: //span ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err != nil { @@ -904,12 +904,8 @@ xPathScrapers: client := &http.Client{} ctx := context.Background() - s := newGroupScraper(*c, globalConfig) - us, ok := s.(urlScraper) - if !ok { - t.Error("couldn't convert scraper into url scraper") - } - content, err := us.viaURL(ctx, client, ts.URL, ScrapeContentTypePerformer) + s := scraperFromDefinition(*c, globalConfig) + content, err := s.viaURL(ctx, client, ts.URL, ScrapeContentTypePerformer) if err != nil { t.Errorf("Error scraping performer: %s", err.Error()) diff --git a/pkg/sliceutil/stringslice/string_collections.go b/pkg/sliceutil/stringslice/string_collections.go index f5251de5f..eff3409e2 100644 --- a/pkg/sliceutil/stringslice/string_collections.go +++ b/pkg/sliceutil/stringslice/string_collections.go @@ -45,6 +45,23 @@ func UniqueFold(s []string) []string { return ret } +// UniqueExcludeFold returns a deduplicated slice of strings with the excluded string removed. +// The comparison is case-insensitive. +func UniqueExcludeFold(values []string, exclude string) []string { + seen := make(map[string]struct{}, len(values)) + seen[strings.ToLower(exclude)] = struct{}{} + ret := make([]string, 0, len(values)) + for _, v := range values { + vLower := strings.ToLower(v) + if _, exists := seen[vLower]; exists { + continue + } + seen[vLower] = struct{}{} + ret = append(ret, v) + } + return ret +} + // TrimSpace trims whitespace from each string in a slice. func TrimSpace(s []string) []string { for i, v := range s { diff --git a/pkg/sqlite/anonymise.go b/pkg/sqlite/anonymise.go index 764f569c0..e0a354980 100644 --- a/pkg/sqlite/anonymise.go +++ b/pkg/sqlite/anonymise.go @@ -332,6 +332,10 @@ func (db *Anonymiser) anonymiseScenes(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(scenesCustomFieldsTable.GetTable()), "scene_id"); err != nil { + return err + } + return nil } @@ -678,6 +682,10 @@ func (db *Anonymiser) anonymiseStudios(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(studiosCustomFieldsTable.GetTable()), "studio_id"); err != nil { + return err + } + return nil } @@ -873,6 +881,10 @@ func (db *Anonymiser) anonymiseTags(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(tagsCustomFieldsTable.GetTable()), "tag_id"); err != nil { + return err + } + return nil } diff --git a/pkg/sqlite/criterion_handlers.go b/pkg/sqlite/criterion_handlers.go index 6fe9c7ce9..1496df71d 100644 --- a/pkg/sqlite/criterion_handlers.go +++ b/pkg/sqlite/criterion_handlers.go @@ -1126,3 +1126,40 @@ func (h *relatedFilterHandler) handle(ctx context.Context, f *filterBuilder) { f.addWhere(fmt.Sprintf("%s IN ("+subQuery.toSQL(false)+")", h.relatedIDCol), subQuery.args...) } + +type phashDistanceCriterionHandler struct { + // assumes that applicable fingerprints table is joined as fingerprints_phash + joinFn func(f *filterBuilder) + criterion *models.PhashDistanceCriterionInput +} + +func (h *phashDistanceCriterionHandler) handle(ctx context.Context, f *filterBuilder) { + phashDistance := h.criterion + if phashDistance == nil { + return + } + + h.joinFn(f) + + value, _ := utils.StringToPhash(phashDistance.Value) + distance := 0 + if phashDistance.Distance != nil { + distance = *phashDistance.Distance + } + + switch { + case phashDistance.Modifier == models.CriterionModifierEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) < ?", value, distance) + case phashDistance.Modifier == models.CriterionModifierNotEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) > ?", value, distance) + default: + intCriterionHandler(&models.IntCriterionInput{ + Value: int(value), + Modifier: phashDistance.Modifier, + }, "fingerprints_phash.fingerprint", nil)(ctx, f) + } +} diff --git a/pkg/sqlite/custom_fields_test.go b/pkg/sqlite/custom_fields_test.go index 8ee154aec..a2c045851 100644 --- a/pkg/sqlite/custom_fields_test.go +++ b/pkg/sqlite/custom_fields_test.go @@ -11,11 +11,23 @@ import ( "github.com/stretchr/testify/assert" ) -func TestSetCustomFields(t *testing.T) { - performerIdx := performerIdx1WithScene +type customFieldsReaderWriter interface { + models.CustomFieldsReader + models.CustomFieldsWriter +} + +func testSetCustomFields(t *testing.T, namePrefix string, store customFieldsReaderWriter, id int, origCustomFields map[string]interface{}) { + getCustomFields := func() map[string]interface{} { + m := make(map[string]interface{}) + for k, v := range origCustomFields { + m[k] = v + } + return m + } mergeCustomFields := func(i map[string]interface{}) map[string]interface{} { - m := getPerformerCustomFields(performerIdx) + m := getCustomFields() + for k, v := range i { m[k] = v } @@ -70,7 +82,7 @@ func TestSetCustomFields(t *testing.T) { Remove: []string{"real"}, }, func() map[string]interface{} { - m := getPerformerCustomFields(performerIdx) + m := getCustomFields() delete(m, "real") return m }(), @@ -180,12 +192,8 @@ func TestSetCustomFields(t *testing.T) { }, } - // use performer custom fields store - store := db.Performer - id := performerIDs[performerIdx] - for _, tt := range tests { - runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + runWithRollbackTxn(t, namePrefix+" "+tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) err := store.SetCustomFields(ctx, id, tt.input) @@ -208,3 +216,27 @@ func TestSetCustomFields(t *testing.T) { }) } } + +func TestPerformerSetCustomFields(t *testing.T) { + performerIdx := performerIdx1WithScene + + testSetCustomFields(t, "Performer", db.Performer, performerIDs[performerIdx], getPerformerCustomFields(performerIdx)) +} + +func TestTagSetCustomFields(t *testing.T) { + tagIdx := tagIdx1WithScene + + testSetCustomFields(t, "Tag", db.Tag, tagIDs[tagIdx], getTagCustomFields(tagIdx)) +} + +func TestStudioSetCustomFields(t *testing.T) { + studioIdx := studioIdxWithScene + + testSetCustomFields(t, "Studio", db.Studio, studioIDs[studioIdx], getStudioCustomFields(studioIdx)) +} + +func TestSceneSetCustomFields(t *testing.T) { + sceneIdx := sceneIdxWithPerformer + + testSetCustomFields(t, "Scene", db.Scene, sceneIDs[sceneIdx], getSceneCustomFields(sceneIdx)) +} diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go index 0ea3d7170..5b67e5602 100644 --- a/pkg/sqlite/database.go +++ b/pkg/sqlite/database.go @@ -34,7 +34,7 @@ const ( cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE" ) -var appSchemaVersion uint = 75 +var appSchemaVersion uint = 80 //go:embed migrations/*.sql var migrationsBox embed.FS diff --git a/pkg/sqlite/file_filter.go b/pkg/sqlite/file_filter.go index 12c7ba3d5..157efb1d8 100644 --- a/pkg/sqlite/file_filter.go +++ b/pkg/sqlite/file_filter.go @@ -82,7 +82,7 @@ func (qb *fileFilterHandler) criterionHandler() criterionHandler { qb.hashesCriterionHandler(fileFilter.Hashes), - qb.phashDuplicatedCriterionHandler(fileFilter.Duplicated), + qb.duplicatedCriterionHandler(fileFilter.Duplicated), ×tampCriterionHandler{fileFilter.CreatedAt, "files.created_at", nil}, ×tampCriterionHandler{fileFilter.UpdatedAt, "files.updated_at", nil}, @@ -205,17 +205,27 @@ func (qb *fileFilterHandler) galleryCountCriterionHandler(c *models.IntCriterion return h.handler(c) } -func (qb *fileFilterHandler) phashDuplicatedCriterionHandler(duplicatedFilter *models.PHashDuplicationCriterionInput) criterionHandlerFunc { +func (qb *fileFilterHandler) duplicatedCriterionHandler(duplicatedFilter *models.FileDuplicationCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { // TODO: Wishlist item: Implement Distance matching - if duplicatedFilter != nil { - var v string - if *duplicatedFilter.Duplicated { - v = ">" - } else { - v = "=" - } + // For files, only phash duplication applies + if duplicatedFilter == nil { + return + } + var phashValue *bool + + // Handle legacy 'duplicated' field for backwards compatibility + //nolint:staticcheck + if duplicatedFilter.Duplicated != nil && duplicatedFilter.Phash == nil { + //nolint:staticcheck + phashValue = duplicatedFilter.Duplicated + } else if duplicatedFilter.Phash != nil { + phashValue = duplicatedFilter.Phash + } + + if phashValue != nil { + v := getCountOperator(*phashValue) f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "files.id = scph.file_id") } } diff --git a/pkg/sqlite/group_filter.go b/pkg/sqlite/group_filter.go index f29023785..f81783374 100644 --- a/pkg/sqlite/group_filter.go +++ b/pkg/sqlite/group_filter.go @@ -75,6 +75,7 @@ func (qb *groupFilterHandler) criterionHandler() criterionHandler { qb.tagsCriterionHandler(groupFilter.Tags), qb.tagCountCriterionHandler(groupFilter.TagCount), qb.groupOCounterCriterionHandler(groupFilter.OCounter), + qb.sceneCountCriterionHandler(groupFilter.SceneCount), &dateCriterionHandler{groupFilter.Date, "groups.date", nil}, groupHierarchyHandler.ParentsCriterionHandler(groupFilter.ContainingGroups), groupHierarchyHandler.ChildrenCriterionHandler(groupFilter.SubGroups), @@ -204,6 +205,16 @@ func (qb *groupFilterHandler) tagCountCriterionHandler(count *models.IntCriterio return h.handler(count) } +func (qb *groupFilterHandler) sceneCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: groupTable, + joinTable: groupsScenesTable, + primaryFK: groupIDColumn, + } + + return h.handler(count) +} + // used for sorting and filtering on group o-count var selectGroupOCountSQL = utils.StrFormat( "SELECT SUM(o_counter) "+ diff --git a/pkg/sqlite/group_test.go b/pkg/sqlite/group_test.go index d4a177e86..db293dd92 100644 --- a/pkg/sqlite/group_test.go +++ b/pkg/sqlite/group_test.go @@ -669,6 +669,32 @@ func TestGroupQuery(t *testing.T) { nil, false, }, + { + "scene count equals 1", + nil, + &models.GroupFilterType{ + SceneCount: &models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierEquals, + }, + }, + []int{groupIdxWithScene}, + []int{groupIdxWithParentAndChild}, + false, + }, + { + "scene count less than 1", + nil, + &models.GroupFilterType{ + SceneCount: &models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierLessThan, + }, + }, + []int{groupIdxWithParentAndChild}, + []int{groupIdxWithScene}, + false, + }, } for _, tt := range tests { diff --git a/pkg/sqlite/image_filter.go b/pkg/sqlite/image_filter.go index 1d119bfde..b56ade26d 100644 --- a/pkg/sqlite/image_filter.go +++ b/pkg/sqlite/image_filter.go @@ -62,6 +62,15 @@ func (qb *imageFilterHandler) criterionHandler() criterionHandler { stringCriterionHandler(imageFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) }), + + &phashDistanceCriterionHandler{ + joinFn: func(f *filterBuilder) { + imageRepository.addImagesFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "images_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + }, + criterion: imageFilter.PhashDistance, + }, + stringCriterionHandler(imageFilter.Title, "images.title"), stringCriterionHandler(imageFilter.Code, "images.code"), stringCriterionHandler(imageFilter.Details, "images.details"), diff --git a/pkg/sqlite/migrations/76_studio_custom_fields.up.sql b/pkg/sqlite/migrations/76_studio_custom_fields.up.sql new file mode 100644 index 000000000..81a72d4d4 --- /dev/null +++ b/pkg/sqlite/migrations/76_studio_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `studio_custom_fields` ( + `studio_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`studio_id`, `field`), + foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE +); + +CREATE INDEX `index_studio_custom_fields_field_value` ON `studio_custom_fields` (`field`, `value`); diff --git a/pkg/sqlite/migrations/77_tag_custom_fields.up.sql b/pkg/sqlite/migrations/77_tag_custom_fields.up.sql new file mode 100644 index 000000000..b34a5f794 --- /dev/null +++ b/pkg/sqlite/migrations/77_tag_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `tag_custom_fields` ( + `tag_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`tag_id`, `field`), + foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE +); + +CREATE INDEX `index_tag_custom_fields_field_value` ON `tag_custom_fields` (`field`, `value`); \ No newline at end of file diff --git a/pkg/sqlite/migrations/78_performer_career_dates.up.sql b/pkg/sqlite/migrations/78_performer_career_dates.up.sql new file mode 100644 index 000000000..006d9fae7 --- /dev/null +++ b/pkg/sqlite/migrations/78_performer_career_dates.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE "performers" ADD COLUMN "career_start" integer; +ALTER TABLE "performers" ADD COLUMN "career_end" integer; diff --git a/pkg/sqlite/migrations/78_postmigrate.go b/pkg/sqlite/migrations/78_postmigrate.go new file mode 100644 index 000000000..15d040457 --- /dev/null +++ b/pkg/sqlite/migrations/78_postmigrate.go @@ -0,0 +1,143 @@ +package migrations + +import ( + "context" + "database/sql" + "fmt" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/sqlite" + "github.com/stashapp/stash/pkg/utils" +) + +type schema78Migrator struct { + migrator +} + +func post78(ctx context.Context, db *sqlx.DB) error { + logger.Info("Running post-migration for schema version 78") + + m := schema78Migrator{ + migrator: migrator{ + db: db, + }, + } + + if err := m.migrateCareerLength(ctx); err != nil { + return fmt.Errorf("migrating career_length: %w", err) + } + + if err := m.dropCareerLength(); err != nil { + return fmt.Errorf("dropping career_length column: %w", err) + } + + return nil +} + +func (m *schema78Migrator) migrateCareerLength(ctx context.Context) error { + logger.Info("Migrating career_length to career_start/career_end") + + const limit = 1000 + + lastID := 0 + parsed := 0 + unparseable := 0 + + for { + gotSome := false + + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + query := `SELECT id, career_length FROM performers + WHERE career_length IS NOT NULL AND career_length != ''` + + if lastID != 0 { + query += fmt.Sprintf(" AND id > %d", lastID) + } + + query += fmt.Sprintf(" ORDER BY id LIMIT %d", limit) + + rows, err := tx.Query(query) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var ( + id int + careerLength string + ) + + if err := rows.Scan(&id, &careerLength); err != nil { + return err + } + + lastID = id + gotSome = true + + start, end, err := utils.ParseYearRangeString(careerLength) + if err != nil { + logger.Warnf("Could not parse career_length %q for performer %d: %v — preserving as custom field", careerLength, id, err) + + if err := m.preserveAsCustomField(tx, id, careerLength); err != nil { + return fmt.Errorf("preserving career_length for performer %d: %w", id, err) + } + unparseable++ + continue + } + + if err := m.updateCareerFields(tx, id, start, end); err != nil { + return fmt.Errorf("updating career fields for performer %d: %w", id, err) + } + parsed++ + } + + return rows.Err() + }); err != nil { + return err + } + + if !gotSome { + break + } + } + + logger.Infof("Career length migration complete: %d parsed, %d unparseable (preserved as custom fields)", parsed, unparseable) + return nil +} + +func (m *schema78Migrator) updateCareerFields(tx *sqlx.Tx, id int, start *int, end *int) error { + _, err := tx.Exec( + "UPDATE performers SET career_start = ?, career_end = ? WHERE id = ?", + start, end, id, + ) + return err +} + +func (m *schema78Migrator) preserveAsCustomField(tx *sqlx.Tx, id int, value string) error { + // check if a career_length custom field already exists + var existing sql.NullString + err := tx.Get(&existing, "SELECT value FROM performer_custom_fields WHERE performer_id = ? AND field = 'career_length'", id) + if err == nil { + logger.Debugf("career_length custom field already exists for performer %d, skipping", id) + return nil + } + + _, err = tx.Exec( + "INSERT INTO performer_custom_fields (performer_id, field, value) VALUES (?, 'career_length', ?)", + id, value, + ) + return err +} + +func (m *schema78Migrator) dropCareerLength() error { + logger.Info("Dropping career_length column from performers table") + return m.execAll([]string{ + "ALTER TABLE performers DROP COLUMN career_length", + }) +} + +func init() { + sqlite.RegisterPostMigration(78, post78) +} diff --git a/pkg/sqlite/migrations/79_scene_custom_fields.up.sql b/pkg/sqlite/migrations/79_scene_custom_fields.up.sql new file mode 100644 index 000000000..a56b34e3a --- /dev/null +++ b/pkg/sqlite/migrations/79_scene_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `scene_custom_fields` ( + `scene_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`scene_id`, `field`), + foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE +); + +CREATE INDEX `index_scene_custom_fields_field_value` ON `scene_custom_fields` (`field`, `value`); \ No newline at end of file diff --git a/pkg/sqlite/migrations/80_studio_organized.up.sql b/pkg/sqlite/migrations/80_studio_organized.up.sql new file mode 100644 index 000000000..3aa9c4656 --- /dev/null +++ b/pkg/sqlite/migrations/80_studio_organized.up.sql @@ -0,0 +1 @@ +ALTER TABLE `studios` ADD COLUMN `organized` boolean not null default '0'; \ No newline at end of file diff --git a/pkg/sqlite/performer.go b/pkg/sqlite/performer.go index 4e06b5b29..298a681fd 100644 --- a/pkg/sqlite/performer.go +++ b/pkg/sqlite/performer.go @@ -44,7 +44,8 @@ type performerRow struct { FakeTits zero.String `db:"fake_tits"` PenisLength null.Float `db:"penis_length"` Circumcised zero.String `db:"circumcised"` - CareerLength zero.String `db:"career_length"` + CareerStart null.Int `db:"career_start"` + CareerEnd null.Int `db:"career_end"` Tattoos zero.String `db:"tattoos"` Piercings zero.String `db:"piercings"` Favorite bool `db:"favorite"` @@ -82,7 +83,8 @@ func (r *performerRow) fromPerformer(o models.Performer) { if o.Circumcised != nil && o.Circumcised.IsValid() { r.Circumcised = zero.StringFrom(o.Circumcised.String()) } - r.CareerLength = zero.StringFrom(o.CareerLength) + r.CareerStart = intFromPtr(o.CareerStart) + r.CareerEnd = intFromPtr(o.CareerEnd) r.Tattoos = zero.StringFrom(o.Tattoos) r.Piercings = zero.StringFrom(o.Piercings) r.Favorite = o.Favorite @@ -110,7 +112,8 @@ func (r *performerRow) resolve() *models.Performer { Measurements: r.Measurements.String, FakeTits: r.FakeTits.String, PenisLength: nullFloatPtr(r.PenisLength), - CareerLength: r.CareerLength.String, + CareerStart: nullIntPtr(r.CareerStart), + CareerEnd: nullIntPtr(r.CareerEnd), Tattoos: r.Tattoos.String, Piercings: r.Piercings.String, Favorite: r.Favorite, @@ -155,7 +158,8 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) { r.setNullString("fake_tits", o.FakeTits) r.setNullFloat64("penis_length", o.PenisLength) r.setNullString("circumcised", o.Circumcised) - r.setNullString("career_length", o.CareerLength) + r.setNullInt("career_start", o.CareerStart) + r.setNullInt("career_end", o.CareerEnd) r.setNullString("tattoos", o.Tattoos) r.setNullString("piercings", o.Piercings) r.setBool("favorite", o.Favorite) @@ -706,6 +710,28 @@ func (qb *PerformerStore) sortByLastOAt(direction string) string { return " ORDER BY (" + selectPerformerLastOAtSQL + ") " + direction } +// used for sorting on performer latest scene +var selectPerformerLatestSceneSQL = utils.StrFormat( + "SELECT MAX(date) FROM ("+ + "SELECT {date} FROM {performers_scenes} s "+ + "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+ + "WHERE s.{performer_id} = {performers}.id"+ + ")", + map[string]interface{}{ + "performer_id": performerIDColumn, + "performers": performerTable, + "performers_scenes": performersScenesTable, + "scenes": sceneTable, + "scene_id": sceneIDColumn, + "date": sceneDateColumn, + }, +) + +func (qb *PerformerStore) sortByLatestScene(direction string) string { + // need to get the latest date from scenes + return " ORDER BY (" + selectPerformerLatestSceneSQL + ") " + direction +} + // used for sorting on performer last view_date var selectPerformerLastPlayedAtSQL = utils.StrFormat( "SELECT MAX(view_date) FROM ("+ @@ -754,7 +780,8 @@ func (qb *PerformerStore) sortByScenesDuration(direction string) string { var performerSortOptions = sortOptions{ "birthdate", - "career_length", + "career_start", + "career_end", "created_at", "galleries_count", "height", @@ -762,6 +789,7 @@ var performerSortOptions = sortOptions{ "images_count", "last_o_at", "last_played_at", + "latest_scene", "measurements", "name", "o_counter", @@ -812,6 +840,8 @@ func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) (s sortQuery += qb.sortByLastPlayedAt(direction) case "last_o_at": sortQuery += qb.sortByLastOAt(direction) + case "latest_scene": + sortQuery += qb.sortByLatestScene(direction) default: sortQuery += getSort(sort, direction, "performers") } diff --git a/pkg/sqlite/performer_filter.go b/pkg/sqlite/performer_filter.go index 401664e33..5296d5a25 100644 --- a/pkg/sqlite/performer_filter.go +++ b/pkg/sqlite/performer_filter.go @@ -47,6 +47,29 @@ func (qb *performerFilterHandler) validate() error { } } + // if legacy career length filter used, ensure only supported modifiers are used and value is valid + if filter.CareerLength != nil { + careerLength := filter.CareerLength + switch careerLength.Modifier { + case models.CriterionModifierEquals: + start, end, err := utils.ParseYearRangeString(careerLength.Value) + if err != nil { + return fmt.Errorf("invalid career length value: %s", careerLength.Value) + } + // ensure career start/end is not set + if start != nil && filter.CareerStart != nil { + return fmt.Errorf("cannot use legacy CareerLength filter with CareerStart filter") + } + if end != nil && filter.CareerEnd != nil { + return fmt.Errorf("cannot use legacy CareerLength filter with CareerEnd filter") + } + case models.CriterionModifierIsNull, models.CriterionModifierNotNull: + // valid modifiers, no value parsing needed + default: + return fmt.Errorf("invalid career length modifier: %s", careerLength.Modifier) + } + } + return nil } @@ -71,10 +94,13 @@ func (qb *performerFilterHandler) handle(ctx context.Context, f *filterBuilder) } func (qb *performerFilterHandler) criterionHandler() criterionHandler { - filter := qb.performerFilter + // make a copy of the filter to modify with legacy conversions without affecting original filter used for subfilters + filter := *qb.performerFilter const tableName = performerTable heightCmCrit := filter.HeightCm + convertLegacyCareerLengthFilter(&filter) + return compoundHandler{ stringCriterionHandler(filter.Name, tableName+".name"), stringCriterionHandler(filter.Disambiguation, tableName+".disambiguation"), @@ -129,7 +155,9 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { } }), - stringCriterionHandler(filter.CareerLength, tableName+".career_length"), + // CareerLength filter is deprecated and non-functional (column removed in schema 78) + intCriterionHandler(filter.CareerStart, tableName+".career_start", nil), + intCriterionHandler(filter.CareerEnd, tableName+".career_end", nil), stringCriterionHandler(filter.Tattoos, tableName+".tattoos"), stringCriterionHandler(filter.Piercings, tableName+".piercings"), intCriterionHandler(filter.Rating100, tableName+".rating", nil), @@ -221,6 +249,43 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { } } +func convertLegacyCareerLengthFilter(filter *models.PerformerFilterType) { + // convert legacy career length filter to career start/end filters + if filter.CareerLength != nil { + careerLength := filter.CareerLength + switch careerLength.Modifier { + case models.CriterionModifierEquals: + start, end, _ := utils.ParseYearRangeString(careerLength.Value) + if start != nil { + filter.CareerStart = &models.IntCriterionInput{ + Value: (*start) - 1, // minus one to make it exclusive + Modifier: models.CriterionModifierGreaterThan, + } + } + if end != nil { + filter.CareerEnd = &models.IntCriterionInput{ + Value: (*end) + 1, // plus one to make it exclusive + Modifier: models.CriterionModifierLessThan, + } + } + case models.CriterionModifierIsNull: + filter.CareerStart = &models.IntCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + filter.CareerEnd = &models.IntCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + case models.CriterionModifierNotNull: + filter.CareerStart = &models.IntCriterionInput{ + Modifier: models.CriterionModifierNotNull, + } + filter.CareerEnd = &models.IntCriterionInput{ + Modifier: models.CriterionModifierNotNull, + } + } + } +} + // TODO - we need to provide a whitelist of possible values func (qb *performerFilterHandler) performerIsMissingCriterionHandler(isMissing *string) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { diff --git a/pkg/sqlite/performer_test.go b/pkg/sqlite/performer_test.go index 8d53ca0db..46a5febee 100644 --- a/pkg/sqlite/performer_test.go +++ b/pkg/sqlite/performer_test.go @@ -66,7 +66,8 @@ func Test_PerformerStore_Create(t *testing.T) { fakeTits = "fakeTits" penisLength = 1.23 circumcised = models.CircumisedEnumCut - careerLength = "careerLength" + careerStart = 2005 + careerEnd = 2015 tattoos = "tattoos" piercings = "piercings" aliases = []string{"alias1", "alias2"} @@ -107,7 +108,8 @@ func Test_PerformerStore_Create(t *testing.T) { FakeTits: fakeTits, PenisLength: &penisLength, Circumcised: &circumcised, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Tattoos: tattoos, Piercings: piercings, Favorite: favorite, @@ -204,8 +206,6 @@ func Test_PerformerStore_Create(t *testing.T) { } assert.Equal(tt.newObject.CustomFields, cf) - - return }) } } @@ -229,7 +229,8 @@ func Test_PerformerStore_Update(t *testing.T) { fakeTits = "fakeTits" penisLength = 1.23 circumcised = models.CircumisedEnumCut - careerLength = "careerLength" + careerStart = 2005 + careerEnd = 2015 tattoos = "tattoos" piercings = "piercings" aliases = []string{"alias1", "alias2"} @@ -271,7 +272,8 @@ func Test_PerformerStore_Update(t *testing.T) { FakeTits: fakeTits, PenisLength: &penisLength, Circumcised: &circumcised, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Tattoos: tattoos, Piercings: piercings, Favorite: favorite, @@ -422,7 +424,8 @@ func clearPerformerPartial() models.PerformerPartial { FakeTits: nullString, PenisLength: nullFloat, Circumcised: nullString, - CareerLength: nullString, + CareerStart: nullInt, + CareerEnd: nullInt, Tattoos: nullString, Piercings: nullString, Aliases: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, @@ -455,7 +458,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { fakeTits = "fakeTits" penisLength = 1.23 circumcised = models.CircumisedEnumCut - careerLength = "careerLength" + careerStart = 2005 + careerEnd = 2015 tattoos = "tattoos" piercings = "piercings" aliases = []string{"alias1", "alias2"} @@ -501,7 +505,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { FakeTits: models.NewOptionalString(fakeTits), PenisLength: models.NewOptionalFloat64(penisLength), Circumcised: models.NewOptionalString(circumcised.String()), - CareerLength: models.NewOptionalString(careerLength), + CareerStart: models.NewOptionalInt(careerStart), + CareerEnd: models.NewOptionalInt(careerEnd), Tattoos: models.NewOptionalString(tattoos), Piercings: models.NewOptionalString(piercings), Aliases: &models.UpdateStrings{ @@ -552,7 +557,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { FakeTits: fakeTits, PenisLength: &penisLength, Circumcised: &circumcised, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Tattoos: tattoos, Piercings: piercings, Aliases: models.NewRelatedStrings(aliases), @@ -1766,30 +1772,117 @@ func verifyPerformerAge(t *testing.T, ageCriterion models.IntCriterionInput) { }) } -func TestPerformerQueryCareerLength(t *testing.T) { - const value = "2005" - careerLengthCriterion := models.StringCriterionInput{ +func TestPerformerQueryLegacyCareerLength(t *testing.T) { + const value = "2002 - 2012" + + tests := []struct { + name string + c models.StringCriterionInput + careerStartCrit *models.IntCriterionInput + careerEndCrit *models.IntCriterionInput + err bool + }{ + { + name: "valid format", + c: models.StringCriterionInput{ + Value: value, + Modifier: models.CriterionModifierEquals, + }, + careerStartCrit: &models.IntCriterionInput{ + Value: 2002, + Modifier: models.CriterionModifierEquals, + }, + careerEndCrit: &models.IntCriterionInput{ + Value: 2012, + Modifier: models.CriterionModifierEquals, + }, + err: false, + }, + { + name: "invalid format", + c: models.StringCriterionInput{ + Value: "invalid format", + Modifier: models.CriterionModifierEquals, + }, + err: true, + }, + { + name: "is null", + c: models.StringCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + careerStartCrit: &models.IntCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + careerEndCrit: &models.IntCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + err: false, + }, + { + name: "not null", + c: models.StringCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + careerStartCrit: &models.IntCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + careerEndCrit: &models.IntCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + err: false, + }, + { + name: "invalid modifier", + c: models.StringCriterionInput{ + Value: value, + Modifier: models.CriterionModifierMatchesRegex, + }, + err: true, + }, + } + + qb := db.Performer + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + performers, _, err := qb.Query(ctx, &models.PerformerFilterType{ + CareerLength: &tt.c, + }, nil) + + if err != nil && !tt.err { + t.Errorf("Error querying performer: %s", err.Error()) + } else if err == nil && tt.err { + t.Errorf("Expected error but got none") + } + + if err != nil || tt.err { + return + } + + if len(performers) == 0 { + t.Errorf("Expected to find performers but found none") + } + + for _, performer := range performers { + verifyIntPtr(t, performer.CareerStart, *tt.careerStartCrit) + verifyIntPtr(t, performer.CareerEnd, *tt.careerEndCrit) + } + }) + } +} + +func TestPerformerQueryCareerStart(t *testing.T) { + const value = 2002 + criterion := models.IntCriterionInput{ Value: value, Modifier: models.CriterionModifierEquals, } - verifyPerformerCareerLength(t, careerLengthCriterion) - - careerLengthCriterion.Modifier = models.CriterionModifierNotEquals - verifyPerformerCareerLength(t, careerLengthCriterion) - - careerLengthCriterion.Modifier = models.CriterionModifierMatchesRegex - verifyPerformerCareerLength(t, careerLengthCriterion) - - careerLengthCriterion.Modifier = models.CriterionModifierNotMatchesRegex - verifyPerformerCareerLength(t, careerLengthCriterion) -} - -func verifyPerformerCareerLength(t *testing.T, criterion models.StringCriterionInput) { withTxn(func(ctx context.Context) error { qb := db.Performer performerFilter := models.PerformerFilterType{ - CareerLength: &criterion, + CareerStart: &criterion, } performers, _, err := qb.Query(ctx, &performerFilter, nil) @@ -1798,8 +1891,33 @@ func verifyPerformerCareerLength(t *testing.T, criterion models.StringCriterionI } for _, performer := range performers { - cl := performer.CareerLength - verifyString(t, cl, criterion) + verifyIntPtr(t, performer.CareerStart, criterion) + } + + return nil + }) +} + +func TestPerformerQueryCareerEnd(t *testing.T) { + const value = 2012 + criterion := models.IntCriterionInput{ + Value: value, + Modifier: models.CriterionModifierEquals, + } + + withTxn(func(ctx context.Context) error { + qb := db.Performer + performerFilter := models.PerformerFilterType{ + CareerEnd: &criterion, + } + + performers, _, err := qb.Query(ctx, &performerFilter, nil) + if err != nil { + t.Errorf("Error querying performer: %s", err.Error()) + } + + for _, performer := range performers { + verifyIntPtr(t, performer.CareerEnd, criterion) } return nil diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index a0b9005a5..3049681b2 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -26,6 +26,7 @@ const ( sceneTable = "scenes" scenesFilesTable = "scenes_files" sceneIDColumn = "scene_id" + sceneDateColumn = "date" performersScenesTable = "performers_scenes" scenesTagsTable = "scenes_tags" scenesGalleriesTable = "scenes_galleries" @@ -233,6 +234,7 @@ var ( type SceneStore struct { blobJoinQueryBuilder + customFieldsStore tableMgr *table oDateManager @@ -247,6 +249,10 @@ func NewSceneStore(r *storeRepository, blobStore *BlobStore) *SceneStore { blobStore: blobStore, joinTable: sceneTable, }, + customFieldsStore: customFieldsStore{ + table: scenesCustomFieldsTable, + fk: scenesCustomFieldsTable.Col(sceneIDColumn), + }, tableMgr: sceneTableMgr, viewDateManager: viewDateManager{scenesViewTableMgr}, diff --git a/pkg/sqlite/scene_filter.go b/pkg/sqlite/scene_filter.go index 72c75eca5..a9eb6b0ae 100644 --- a/pkg/sqlite/scene_filter.go +++ b/pkg/sqlite/scene_filter.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/utils" ) type sceneFilterHandler struct { @@ -83,14 +82,27 @@ func (qb *sceneFilterHandler) criterionHandler() criterionHandler { criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { if sceneFilter.Phash != nil { // backwards compatibility - qb.phashDistanceCriterionHandler(&models.PhashDistanceCriterionInput{ - Value: sceneFilter.Phash.Value, - Modifier: sceneFilter.Phash.Modifier, - })(ctx, f) + h := phashDistanceCriterionHandler{ + joinFn: func(f *filterBuilder) { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + }, + criterion: &models.PhashDistanceCriterionInput{ + Value: sceneFilter.Phash.Value, + Modifier: sceneFilter.Phash.Modifier, + }, + } + h.handle(ctx, f) } }), - qb.phashDistanceCriterionHandler(sceneFilter.PhashDistance), + &phashDistanceCriterionHandler{ + joinFn: func(f *filterBuilder) { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + }, + criterion: sceneFilter.PhashDistance, + }, intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil), qb.oCountCriterionHandler(sceneFilter.OCounter), @@ -127,6 +139,8 @@ func (qb *sceneFilterHandler) criterionHandler() criterionHandler { parentIDCol: "scenes.id", }, + qb.stashIDCountCriterionHandler(sceneFilter.StashIDCount), + boolCriterionHandler(sceneFilter.Interactive, "video_files.interactive", qb.addVideoFilesTable), intCriterionHandler(sceneFilter.InteractiveSpeed, "video_files.interactive_speed", qb.addVideoFilesTable), @@ -160,11 +174,18 @@ func (qb *sceneFilterHandler) criterionHandler() criterionHandler { qb.performerTagsCriterionHandler(sceneFilter.PerformerTags), qb.performerFavoriteCriterionHandler(sceneFilter.PerformerFavorite), qb.performerAgeCriterionHandler(sceneFilter.PerformerAge), - qb.phashDuplicatedCriterionHandler(sceneFilter.Duplicated, qb.addSceneFilesTable), + qb.duplicatedCriterionHandler(sceneFilter.Duplicated), &dateCriterionHandler{sceneFilter.Date, "scenes.date", nil}, ×tampCriterionHandler{sceneFilter.CreatedAt, "scenes.created_at", nil}, ×tampCriterionHandler{sceneFilter.UpdatedAt, "scenes.updated_at", nil}, + &customFieldsFilterHandler{ + table: scenesCustomFieldsTable.GetTable(), + fkCol: sceneIDColumn, + c: sceneFilter.CustomFields, + idCol: "scenes.id", + }, + &relatedFilterHandler{ relatedIDCol: "scenes_galleries.gallery_id", relatedRepo: galleryRepository.repository, @@ -282,26 +303,71 @@ func (qb *sceneFilterHandler) fileCountCriterionHandler(fileCount *models.IntCri return h.handler(fileCount) } -func (qb *sceneFilterHandler) phashDuplicatedCriterionHandler(duplicatedFilter *models.PHashDuplicationCriterionInput, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { +func (qb *sceneFilterHandler) duplicatedCriterionHandler(duplicatedFilter *models.DuplicationCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - // TODO: Wishlist item: Implement Distance matching - if duplicatedFilter != nil { - if addJoinFn != nil { - addJoinFn(f) - } + if duplicatedFilter == nil { + return + } - var v string - if *duplicatedFilter.Duplicated { - v = ">" - } else { - v = "=" - } + // Handle legacy 'duplicated' field - treat as phash if phash not explicitly set + //nolint:staticcheck + if duplicatedFilter.Duplicated != nil && duplicatedFilter.Phash == nil { + //nolint:staticcheck + duplicatedFilter.Phash = duplicatedFilter.Duplicated + } - f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "scenes_files.file_id = scph.file_id") + // Handle explicit fields + if duplicatedFilter.Phash != nil { + qb.addSceneFilesTable(f) + qb.applyPhashDuplication(f, *duplicatedFilter.Phash) + } + + if duplicatedFilter.StashID != nil { + qb.applyStashIDDuplication(f, *duplicatedFilter.StashID) + } + + if duplicatedFilter.Title != nil { + qb.applyTitleDuplication(f, *duplicatedFilter.Title) + } + + if duplicatedFilter.URL != nil { + qb.applyURLDuplication(f, *duplicatedFilter.URL) } } } +// getCountOperator returns ">" for duplicated items (count > 1) or "=" for unique items (count = 1) +func getCountOperator(duplicated bool) string { + if duplicated { + return ">" + } + return "=" +} + +func (qb *sceneFilterHandler) applyPhashDuplication(f *filterBuilder, duplicated bool) { + // TODO: Wishlist item: Implement Distance matching + v := getCountOperator(duplicated) + f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "scenes_files.file_id = scph.file_id") +} + +func (qb *sceneFilterHandler) applyStashIDDuplication(f *filterBuilder, duplicated bool) { + v := getCountOperator(duplicated) + // Find stash_ids that appear on more than one scene + f.addInnerJoin("(SELECT scene_id FROM scene_stash_ids INNER JOIN (SELECT stash_id FROM scene_stash_ids GROUP BY stash_id HAVING COUNT(DISTINCT scene_id) "+v+" 1) dupes ON scene_stash_ids.stash_id = dupes.stash_id)", "scsi", "scenes.id = scsi.scene_id") +} + +func (qb *sceneFilterHandler) applyTitleDuplication(f *filterBuilder, duplicated bool) { + v := getCountOperator(duplicated) + // Find titles that appear on more than one scene (excluding empty titles) + f.addInnerJoin("(SELECT id FROM scenes WHERE title != '' AND title IS NOT NULL AND title IN (SELECT title FROM scenes WHERE title != '' AND title IS NOT NULL GROUP BY title HAVING COUNT(*) "+v+" 1))", "sctitle", "scenes.id = sctitle.id") +} + +func (qb *sceneFilterHandler) applyURLDuplication(f *filterBuilder, duplicated bool) { + v := getCountOperator(duplicated) + // Find URLs that appear on more than one scene + f.addInnerJoin("(SELECT scene_id FROM scene_urls INNER JOIN (SELECT url FROM scene_urls GROUP BY url HAVING COUNT(DISTINCT scene_id) "+v+" 1) dupes ON scene_urls.url = dupes.url)", "scurl", "scenes.id = scurl.scene_id") +} + func (qb *sceneFilterHandler) codecCriterionHandler(codec *models.StringCriterionInput, codecColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if codec != nil { @@ -441,6 +507,16 @@ func (qb *sceneFilterHandler) tagCountCriterionHandler(tagCount *models.IntCrite return h.handler(tagCount) } +func (qb *sceneFilterHandler) stashIDCountCriterionHandler(stashIDCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: "scene_stash_ids", + primaryFK: sceneIDColumn, + } + + return h.handler(stashIDCount) +} + func (qb *sceneFilterHandler) performersCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { h := joinedMultiCriterionHandlerBuilder{ primaryTable: sceneTable, @@ -547,42 +623,3 @@ func (qb *sceneFilterHandler) performerTagsCriterionHandler(tags *models.Hierarc joinPrimaryKey: sceneIDColumn, } } - -func (qb *sceneFilterHandler) phashDistanceCriterionHandler(phashDistance *models.PhashDistanceCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if phashDistance != nil { - qb.addSceneFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") - - value, _ := utils.StringToPhash(phashDistance.Value) - distance := 0 - if phashDistance.Distance != nil { - distance = *phashDistance.Distance - } - - if distance == 0 { - // use the default handler - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), - Modifier: phashDistance.Modifier, - }, "fingerprints_phash.fingerprint", nil)(ctx, f) - } - - switch { - case phashDistance.Modifier == models.CriterionModifierEquals && distance > 0: - // needed to avoid a type mismatch - f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") - f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) < ?", value, distance) - case phashDistance.Modifier == models.CriterionModifierNotEquals && distance > 0: - // needed to avoid a type mismatch - f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") - f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) > ?", value, distance) - default: - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), - Modifier: phashDistance.Modifier, - }, "fingerprints_phash.fingerprint", nil)(ctx, f) - } - } - } -} diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index df6676a0f..d386175c7 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -2273,6 +2273,32 @@ func TestSceneQuery(t *testing.T) { nil, false, }, + { + "single stash id", + nil, + &models.SceneFilterType{ + StashIDCount: &models.IntCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: 1, + }, + }, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + []int{sceneIdxWithGroup}, + false, + }, + { + "less than one stash id", + nil, + &models.SceneFilterType{ + StashIDCount: &models.IntCriterionInput{ + Modifier: models.CriterionModifierLessThan, + Value: 1, + }, + }, + []int{sceneIdxWithGroup}, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + false, + }, } for _, tt := range tests { @@ -4095,7 +4121,7 @@ func TestSceneQueryPhashDuplicated(t *testing.T) { withTxn(func(ctx context.Context) error { sqb := db.Scene duplicated := true - phashCriterion := models.PHashDuplicationCriterionInput{ + phashCriterion := models.DuplicationCriterionInput{ Duplicated: &duplicated, } @@ -4800,6 +4826,253 @@ func TestSceneStore_SaveActivity(t *testing.T) { } } +func TestSceneQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.SceneFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")}, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "not equals", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")}, + }, + }, + }, + nil, + []int{sceneIdxWithGallery}, + false, + }, + { + "includes", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")[9:]}, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "excludes", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")[9:]}, + }, + }, + }, + nil, + []int{sceneIdxWithGallery}, + false, + }, + { + "regex", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + []int{sceneIdxWithTwoPerformerTag}, + nil, + false, + }, + { + "invalid regex", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithTwoPerformerTag), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + nil, + []int{sceneIdxWithTwoPerformerTag}, + false, + }, + { + "invalid not matches regex", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "not null", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "between", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{sceneIdxWithPerformer}, + nil, + false, + }, + { + "not between", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithPerformer), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{sceneIdxWithPerformer}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + result, err := db.Scene.Query(ctx, models.SceneQueryOptions{ + SceneFilter: tt.filter, + }) + if (err != nil) != tt.wantErr { + t.Errorf("SceneStore.Query() error = %v, wantErr %v", err, tt.wantErr) + } + + if err != nil { + return + } + + scenes, err := result.Resolve(ctx) + if err != nil { + t.Errorf("SceneStore.Query().Resolve() error = %v", err) + return + } + + ids := scenesToIDs(scenes) + include := indexesToIDs(sceneIDs, tt.includeIdxs) + exclude := indexesToIDs(sceneIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Count // TODO SizeCount diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index 7e6f821d1..91f9f127b 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -1076,6 +1076,13 @@ func getObjectDate(index int) *models.Date { return &ret } +func sceneStashIDs(i int) []models.StashID { + if i%5 == 0 { + return nil + } + return []models.StashID{sceneStashID(i)} +} + func sceneStashID(i int) models.StashID { return models.StashID{ StashID: getSceneStringValue(i, "stashid"), @@ -1174,14 +1181,24 @@ func makeScene(i int) *models.Scene { PerformerIDs: models.NewRelatedIDs(pids), TagIDs: models.NewRelatedIDs(tids), Groups: models.NewRelatedGroups(groups), - StashIDs: models.NewRelatedStashIDs([]models.StashID{ - sceneStashID(i), - }), + StashIDs: models.NewRelatedStashIDs(sceneStashIDs(i)), PlayDuration: getScenePlayDuration(i), ResumeTime: getSceneResumeTime(i), } } +func getSceneCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getSceneStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + func createScenes(ctx context.Context, n int) error { sqb := db.Scene fqb := db.File @@ -1199,6 +1216,10 @@ func createScenes(ctx context.Context, n int) error { return fmt.Errorf("Error creating scene %v+: %s", scene, err.Error()) } + if err := sqb.SetCustomFields(ctx, scene.ID, models.CustomFieldsInput{Full: getSceneCustomFields(i)}); err != nil { + return fmt.Errorf("Error setting custom fields for scene %d: %s", scene.ID, err.Error()) + } + sceneIDs = append(sceneIDs, scene.ID) } @@ -1508,15 +1529,28 @@ func getPerformerDeathDate(index int) *models.Date { return &ret } -func getPerformerCareerLength(index int) *string { +func getPerformerCareerStart(index int) *int { if index%5 == 0 { return nil } - ret := fmt.Sprintf("20%2d", index) + ret := 2000 + index return &ret } +func getPerformerCareerEnd(index int) *int { + if index%5 == 0 { + return nil + } + + // only set career_end for even indices + if index%2 == 0 { + ret := 2010 + index + return &ret + } + return nil +} + func getPerformerPenisLength(index int) *float64 { if index%5 == 0 { return nil @@ -1610,10 +1644,8 @@ func createPerformers(ctx context.Context, n int, o int) error { TagIDs: models.NewRelatedIDs(tids), } - careerLength := getPerformerCareerLength(i) - if careerLength != nil { - performer.CareerLength = *careerLength - } + performer.CareerStart = getPerformerCareerStart(i) + performer.CareerEnd = getPerformerCareerEnd(i) if (index+1)%5 != 0 { performer.StashIDs = models.NewRelatedStashIDs([]models.StashID{ @@ -1704,6 +1736,18 @@ func tagStashID(i int) models.StashID { } } +func getTagCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getTagStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + // createTags creates n tags with plain Name and o tags with camel cased NaMe included func createTags(ctx context.Context, tqb models.TagReaderWriter, n int, o int) error { const namePlain = "Name" @@ -1731,7 +1775,10 @@ func createTags(ctx context.Context, tqb models.TagReaderWriter, n int, o int) e }) } - err := tqb.Create(ctx, &tag) + err := tqb.Create(ctx, &models.CreateTagInput{ + Tag: &tag, + CustomFields: getTagCustomFields(i), + }) if err != nil { return fmt.Errorf("Error creating tag %v+: %s", tag, err.Error()) @@ -1760,7 +1807,19 @@ func getStudioNullStringValue(index int, field string) string { return ret.String } -func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, parentID *int) (*models.Studio, error) { +func getStudioCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getStudioStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + +func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, parentID *int, customFields map[string]interface{}) (*models.Studio, error) { studio := models.Studio{ Name: name, } @@ -1769,7 +1828,7 @@ func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, par studio.ParentID = parentID } - err := createStudioFromModel(ctx, sqb, &studio) + err := createStudioFromModel(ctx, sqb, &studio, customFields) if err != nil { return nil, err } @@ -1777,8 +1836,11 @@ func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, par return &studio, nil } -func createStudioFromModel(ctx context.Context, sqb *sqlite.StudioStore, studio *models.Studio) error { - err := sqb.Create(ctx, studio) +func createStudioFromModel(ctx context.Context, sqb *sqlite.StudioStore, studio *models.Studio, customFields map[string]interface{}) error { + err := sqb.Create(ctx, &models.CreateStudioInput{ + Studio: studio, + CustomFields: customFields, + }) if err != nil { return fmt.Errorf("Error creating studio %v+: %s", studio, err.Error()) @@ -1840,7 +1902,7 @@ func createStudios(ctx context.Context, n int, o int) error { alias := getStudioStringValue(i, "Alias") studio.Aliases = models.NewRelatedStrings([]string{alias}) } - err := createStudioFromModel(ctx, sqb, &studio) + err := createStudioFromModel(ctx, sqb, &studio, getStudioCustomFields(i)) if err != nil { return err diff --git a/pkg/sqlite/studio.go b/pkg/sqlite/studio.go index 1a05be6f3..a866a94ab 100644 --- a/pkg/sqlite/studio.go +++ b/pkg/sqlite/studio.go @@ -15,6 +15,7 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/utils" ) const ( @@ -43,6 +44,7 @@ type studioRow struct { Favorite bool `db:"favorite"` Details zero.String `db:"details"` IgnoreAutoTag bool `db:"ignore_auto_tag"` + Organized bool `db:"organized"` // not used in resolutions or updates ImageBlob zero.String `db:"image_blob"` @@ -58,6 +60,7 @@ func (r *studioRow) fromStudio(o models.Studio) { r.Favorite = o.Favorite r.Details = zero.StringFrom(o.Details) r.IgnoreAutoTag = o.IgnoreAutoTag + r.Organized = o.Organized } func (r *studioRow) resolve() *models.Studio { @@ -71,6 +74,7 @@ func (r *studioRow) resolve() *models.Studio { Favorite: r.Favorite, Details: r.Details.String, IgnoreAutoTag: r.IgnoreAutoTag, + Organized: r.Organized, } return ret @@ -89,6 +93,7 @@ func (r *studioRowRecord) fromPartial(o models.StudioPartial) { r.setBool("favorite", o.Favorite) r.setNullString("details", o.Details) r.setBool("ignore_auto_tag", o.IgnoreAutoTag) + r.setBool("organized", o.Organized) } type studioRepositoryType struct { @@ -100,6 +105,7 @@ type studioRepositoryType struct { scenes repository images repository galleries repository + groups repository } var ( @@ -126,6 +132,10 @@ var ( tableName: galleryTable, idColumn: studioIDColumn, }, + groups: repository{ + tableName: groupTable, + idColumn: studioIDColumn, + }, tags: joinRepository{ repository: repository{ tableName: studiosTagsTable, @@ -140,6 +150,7 @@ var ( type StudioStore struct { blobJoinQueryBuilder + customFieldsStore tagRelationshipStore tableMgr *table @@ -151,6 +162,10 @@ func NewStudioStore(blobStore *BlobStore) *StudioStore { blobStore: blobStore, joinTable: studioTable, }, + customFieldsStore: customFieldsStore{ + table: studiosCustomFieldsTable, + fk: studiosCustomFieldsTable.Col(studioIDColumn), + }, tagRelationshipStore: tagRelationshipStore{ idRelationshipStore: idRelationshipStore{ joinTable: studiosTagsTableMgr, @@ -169,11 +184,11 @@ func (qb *StudioStore) selectDataset() *goqu.SelectDataset { return dialect.From(qb.table()).Select(qb.table().All()) } -func (qb *StudioStore) Create(ctx context.Context, newObject *models.Studio) error { +func (qb *StudioStore) Create(ctx context.Context, newObject *models.CreateStudioInput) error { var err error var r studioRow - r.fromStudio(*newObject) + r.fromStudio(*newObject.Studio) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { @@ -207,12 +222,17 @@ func (qb *StudioStore) Create(ctx context.Context, newObject *models.Studio) err } } + const partial = false + if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil { + return err + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } - *newObject = *updated + *newObject.Studio = *updated return nil } @@ -253,13 +273,17 @@ func (qb *StudioStore) UpdatePartial(ctx context.Context, input models.StudioPar } } - return qb.Find(ctx, input.ID) + if err := qb.SetCustomFields(ctx, input.ID, input.CustomFields); err != nil { + return nil, err + } + + return qb.find(ctx, input.ID) } // This is only used by the Import/Export functionality -func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.Studio) error { +func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.UpdateStudioInput) error { var r studioRow - r.fromStudio(*updatedObject) + r.fromStudio(*updatedObject.Studio) if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { return err @@ -287,6 +311,10 @@ func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.Studio) } } + if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil { + return err + } + return nil } @@ -601,12 +629,32 @@ func (qb *StudioStore) sortByScenesDuration(direction string) string { ) %s`, sceneTable, scenesFilesTable, scenesFilesTable, sceneIDColumn, sceneTable, scenesFilesTable, sceneTable, studioIDColumn, studioTable, getSortDirection(direction)) } +// used for sorting on performer latest scene +var selectStudioLatestSceneSQL = utils.StrFormat( + "SELECT MAX(date) FROM ("+ + "SELECT {date} FROM {scenes} s "+ + "WHERE s.{studio_id} = {studios}.id"+ + ")", + map[string]interface{}{ + "scenes": sceneTable, + "studios": studioTable, + "studio_id": studioIDColumn, + "date": sceneDateColumn, + }, +) + +func (qb *StudioStore) sortByLatestScene(direction string) string { + // need to get the latest date from scenes + return " ORDER BY (" + selectStudioLatestSceneSQL + ") " + direction +} + var studioSortOptions = sortOptions{ "child_count", "created_at", "galleries_count", "id", "images_count", + "latest_scene", "name", "scenes_count", "scenes_duration", @@ -646,6 +694,8 @@ func (qb *StudioStore) getStudioSort(findFilter *models.FindFilterType) (string, sortQuery += getCountSort(studioTable, galleryTable, studioIDColumn, direction) case "child_count": sortQuery += getCountSort(studioTable, studioTable, studioParentIDColumn, direction) + case "latest_scene": + sortQuery += qb.sortByLatestScene(direction) default: sortQuery += getSort(sort, direction, "studios") } diff --git a/pkg/sqlite/studio_filter.go b/pkg/sqlite/studio_filter.go index 83a917701..cfe3c59b6 100644 --- a/pkg/sqlite/studio_filter.go +++ b/pkg/sqlite/studio_filter.go @@ -59,6 +59,7 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler { intCriterionHandler(studioFilter.Rating100, studioTable+".rating", nil), boolCriterionHandler(studioFilter.Favorite, studioTable+".favorite", nil), boolCriterionHandler(studioFilter.IgnoreAutoTag, studioTable+".ignore_auto_tag", nil), + boolCriterionHandler(studioFilter.Organized, studioTable+".organized", nil), criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { if studioFilter.StashID != nil { @@ -84,6 +85,7 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler { qb.sceneCountCriterionHandler(studioFilter.SceneCount), qb.imageCountCriterionHandler(studioFilter.ImageCount), qb.galleryCountCriterionHandler(studioFilter.GalleryCount), + qb.groupCountCriterionHandler(studioFilter.GroupCount), qb.parentCriterionHandler(studioFilter.Parents), qb.aliasCriterionHandler(studioFilter.Aliases), qb.tagsCriterionHandler(studioFilter.Tags), @@ -117,6 +119,22 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler { studioRepository.galleries.innerJoin(f, "", "studios.id") }, }, + + &relatedFilterHandler{ + relatedIDCol: "groups.id", + relatedRepo: groupRepository.repository, + relatedHandler: &groupFilterHandler{studioFilter.GroupsFilter}, + joinFn: func(f *filterBuilder) { + studioRepository.groups.innerJoin(f, "", "studios.id") + }, + }, + + &customFieldsFilterHandler{ + table: studiosCustomFieldsTable.GetTable(), + fkCol: studioIDColumn, + c: studioFilter.CustomFields, + idCol: "studios.id", + }, } } @@ -172,6 +190,17 @@ func (qb *studioFilterHandler) galleryCountCriterionHandler(galleryCount *models } } +func (qb *studioFilterHandler) groupCountCriterionHandler(groupCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if groupCount != nil { + f.addLeftJoin("groups", "", "groups.studio_id = studios.id") + clause, args := getIntCriterionWhereClause("count(distinct groups.id)", *groupCount) + + f.addHaving(clause, args...) + } + } +} + func (qb *studioFilterHandler) tagCountCriterionHandler(tagCount *models.IntCriterionInput) criterionHandlerFunc { h := countCriterionHandlerBuilder{ primaryTable: studioTable, diff --git a/pkg/sqlite/studio_test.go b/pkg/sqlite/studio_test.go index 003877c77..eebc677c3 100644 --- a/pkg/sqlite/studio_test.go +++ b/pkg/sqlite/studio_test.go @@ -11,6 +11,7 @@ import ( "strconv" "strings" "testing" + "time" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" @@ -47,6 +48,566 @@ func TestStudioFindByName(t *testing.T) { }) } +func loadStudioRelationships(ctx context.Context, expected models.Studio, actual *models.Studio) error { + if expected.Aliases.Loaded() { + if err := actual.LoadAliases(ctx, db.Studio); err != nil { + return err + } + } + if expected.URLs.Loaded() { + if err := actual.LoadURLs(ctx, db.Studio); err != nil { + return err + } + } + if expected.TagIDs.Loaded() { + if err := actual.LoadTagIDs(ctx, db.Studio); err != nil { + return err + } + } + if expected.StashIDs.Loaded() { + if err := actual.LoadStashIDs(ctx, db.Studio); err != nil { + return err + } + } + + return nil +} + +func Test_StudioStore_Create(t *testing.T) { + var ( + name = "name" + details = "details" + url = "url" + rating = 3 + aliases = []string{"alias1", "alias2"} + ignoreAutoTag = true + organized = true + favorite = true + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + newObject models.CreateStudioInput + wantErr bool + }{ + { + "full", + models.CreateStudioInput{ + Studio: &models.Studio{ + Name: name, + URLs: models.NewRelatedStrings([]string{url}), + Favorite: favorite, + Rating: &rating, + Details: details, + IgnoreAutoTag: ignoreAutoTag, + Organized: organized, + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithStudio], tagIDs[tagIdx1WithDupName]}), + Aliases: models.NewRelatedStrings(aliases), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + CustomFields: testCustomFields, + }, + false, + }, + { + "invalid tag id", + models.CreateStudioInput{ + Studio: &models.Studio{ + Name: name, + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Studio + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + p := tt.newObject + if err := qb.Create(ctx, &p); (err != nil) != tt.wantErr { + t.Errorf("StudioStore.Create() error = %v, wantErr = %v", err, tt.wantErr) + } + + if tt.wantErr { + assert.Zero(p.ID) + return + } + + assert.NotZero(p.ID) + + copy := *tt.newObject.Studio + copy.ID = p.ID + + // load relationships + if err := loadStudioRelationships(ctx, copy, p.Studio); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(copy, *p.Studio) + + // ensure can find the Studio + found, err := qb.Find(ctx, p.ID) + if err != nil { + t.Errorf("StudioStore.Find() error = %v", err) + } + + if !assert.NotNil(found) { + return + } + + // load relationships + if err := loadStudioRelationships(ctx, copy, found); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + assert.Equal(copy, *found) + + // ensure custom fields are set + cf, err := qb.GetCustomFields(ctx, p.ID) + if err != nil { + t.Errorf("StudioStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.newObject.CustomFields, cf) + + return + }) + } +} + +func Test_StudioStore_Update(t *testing.T) { + var ( + name = "name" + details = "details" + url = "url" + rating = 3 + aliases = []string{"aliasX", "aliasY"} + ignoreAutoTag = true + organized = true + favorite = true + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + updatedObject models.UpdateStudioInput + wantErr bool + }{ + { + "full", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, + URLs: models.NewRelatedStrings([]string{url}), + Favorite: favorite, + Rating: &rating, + Details: details, + IgnoreAutoTag: ignoreAutoTag, + Organized: organized, + Aliases: models.NewRelatedStrings(aliases), + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithStudio]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + }, + false, + }, + { + "clear nullables", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, // name is mandatory + URLs: models.NewRelatedStrings([]string{}), + Aliases: models.NewRelatedStrings([]string{}), + TagIDs: models.NewRelatedIDs([]int{}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + }, + }, + false, + }, + { + "clear tag ids", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[sceneIdxWithTag], + Name: name, // name is mandatory + TagIDs: models.NewRelatedIDs([]int{}), + }, + }, + false, + }, + { + "set custom fields", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, // name is mandatory + }, + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + false, + }, + { + "clear custom fields", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, // name is mandatory + }, + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + false, + }, + { + "invalid tag id", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[sceneIdxWithGallery], + Name: name, // name is mandatory + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Studio + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + copy := *tt.updatedObject.Studio + + if err := qb.Update(ctx, &tt.updatedObject); (err != nil) != tt.wantErr { + t.Errorf("StudioStore.Update() error = %v, wantErr %v", err, tt.wantErr) + } + + if tt.wantErr { + return + } + + s, err := qb.Find(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("StudioStore.Find() error = %v", err) + } + + // load relationships + if err := loadStudioRelationships(ctx, copy, s); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(copy, *s) + + // ensure custom fields are correct + if tt.updatedObject.CustomFields.Full != nil { + cf, err := qb.GetCustomFields(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("StudioStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.updatedObject.CustomFields.Full, cf) + } + }) + } +} + +func clearStudioPartial() models.StudioPartial { + nullString := models.OptionalString{Set: true, Null: true} + nullInt := models.OptionalInt{Set: true, Null: true} + + // leave mandatory fields + return models.StudioPartial{ + URLs: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, + Aliases: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, + Rating: nullInt, + Details: nullString, + TagIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet}, + StashIDs: &models.UpdateStashIDs{Mode: models.RelationshipUpdateModeSet}, + } +} + +func Test_StudioStore_UpdatePartial(t *testing.T) { + var ( + name = "name" + details = "details" + url = "url" + aliases = []string{"aliasX", "aliasY"} + rating = 3 + ignoreAutoTag = true + organized = true + favorite = true + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + id int + partial models.StudioPartial + want models.Studio + wantErr bool + }{ + { + "full", + studioIDs[studioIdxWithDupName], + models.StudioPartial{ + Name: models.NewOptionalString(name), + URLs: &models.UpdateStrings{ + Values: []string{url}, + Mode: models.RelationshipUpdateModeSet, + }, + Aliases: &models.UpdateStrings{ + Values: aliases, + Mode: models.RelationshipUpdateModeSet, + }, + Favorite: models.NewOptionalBool(favorite), + Rating: models.NewOptionalInt(rating), + Details: models.NewOptionalString(details), + IgnoreAutoTag: models.NewOptionalBool(ignoreAutoTag), + Organized: models.NewOptionalBool(organized), + TagIDs: &models.UpdateIDs{ + IDs: []int{tagIDs[tagIdx1WithStudio], tagIDs[tagIdx1WithDupName]}, + Mode: models.RelationshipUpdateModeSet, + }, + StashIDs: &models.UpdateStashIDs{ + StashIDs: []models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }, + Mode: models.RelationshipUpdateModeSet, + }, + CreatedAt: models.NewOptionalTime(createdAt), + UpdatedAt: models.NewOptionalTime(updatedAt), + }, + models.Studio{ + ID: studioIDs[studioIdxWithDupName], + Name: name, + URLs: models.NewRelatedStrings([]string{url}), + Aliases: models.NewRelatedStrings(aliases), + Favorite: favorite, + Rating: &rating, + Details: details, + IgnoreAutoTag: ignoreAutoTag, + Organized: organized, + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithStudio]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + false, + }, + { + "clear all", + studioIDs[studioIdxWithTwoTags], + clearStudioPartial(), + models.Studio{ + ID: studioIDs[studioIdxWithTwoTags], + Name: getStudioStringValue(studioIdxWithTwoTags, "Name"), + Favorite: getStudioBoolValue(studioIdxWithTwoTags), + Aliases: models.NewRelatedStrings([]string{}), + TagIDs: models.NewRelatedIDs([]int{}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + IgnoreAutoTag: getIgnoreAutoTag(studioIdxWithTwoTags), + }, + false, + }, + { + "invalid id", + invalidID, + models.StudioPartial{Name: models.NewOptionalString(name)}, + models.Studio{}, + true, + }, + } + for _, tt := range tests { + qb := db.Studio + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tt.partial.ID = tt.id + + got, err := qb.UpdatePartial(ctx, tt.partial) + if (err != nil) != tt.wantErr { + t.Errorf("StudioStore.UpdatePartial() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if tt.wantErr { + return + } + + if err := loadStudioRelationships(ctx, tt.want, got); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(tt.want, *got) + + s, err := qb.Find(ctx, tt.id) + if err != nil { + t.Errorf("StudioStore.Find() error = %v", err) + } + + // load relationships + if err := loadStudioRelationships(ctx, tt.want, s); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(tt.want, *s) + }) + } +} + +func Test_StudioStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.StudioPartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + studioIDs[studioIdxWithGallery], + models.StudioPartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + studioIDs[studioIdxWithGallery], + models.StudioPartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + studioIDs[studioIdxWithGallery], + models.StudioPartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(2), + "real": 0.7, + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Studio + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tt.partial.ID = tt.id + + _, err := qb.UpdatePartial(ctx, tt.partial) + if err != nil { + t.Errorf("StudioStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("StudioStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + func TestStudioQueryNameOr(t *testing.T) { const studio1Idx = 1 const studio2Idx = 2 @@ -82,14 +643,6 @@ func TestStudioQueryNameOr(t *testing.T) { }) } -func loadStudioRelationships(ctx context.Context, t *testing.T, s *models.Studio) error { - if err := s.LoadURLs(ctx, db.Studio); err != nil { - return err - } - - return nil -} - func TestStudioQueryNameAndUrl(t *testing.T) { const studioIdx = 1 studioName := getStudioStringValue(studioIdx, "Name") @@ -311,13 +864,13 @@ func TestStudioDestroyParent(t *testing.T) { // create parent and child studios if err := withTxn(func(ctx context.Context) error { - createdParent, err := createStudio(ctx, db.Studio, parentName, nil) + createdParent, err := createStudio(ctx, db.Studio, parentName, nil, nil) if err != nil { return fmt.Errorf("Error creating parent studio: %s", err.Error()) } parentID := createdParent.ID - createdChild, err := createStudio(ctx, db.Studio, childName, &parentID) + createdChild, err := createStudio(ctx, db.Studio, childName, &parentID, nil) if err != nil { return fmt.Errorf("Error creating child studio: %s", err.Error()) } @@ -373,13 +926,13 @@ func TestStudioUpdateClearParent(t *testing.T) { // create parent and child studios if err := withTxn(func(ctx context.Context) error { - createdParent, err := createStudio(ctx, db.Studio, parentName, nil) + createdParent, err := createStudio(ctx, db.Studio, parentName, nil, nil) if err != nil { return fmt.Errorf("Error creating parent studio: %s", err.Error()) } parentID := createdParent.ID - createdChild, err := createStudio(ctx, db.Studio, childName, &parentID) + createdChild, err := createStudio(ctx, db.Studio, childName, &parentID, nil) if err != nil { return fmt.Errorf("Error creating child studio: %s", err.Error()) } @@ -414,7 +967,7 @@ func TestStudioUpdateStudioImage(t *testing.T) { // create studio to test against const name = "TestStudioUpdateStudioImage" - created, err := createStudio(ctx, db.Studio, name, nil) + created, err := createStudio(ctx, db.Studio, name, nil, nil) if err != nil { return fmt.Errorf("Error creating studio: %s", err.Error()) } @@ -578,7 +1131,7 @@ func TestStudioStashIDs(t *testing.T) { // create studio to test against const name = "TestStudioStashIDs" - created, err := createStudio(ctx, db.Studio, name, nil) + created, err := createStudio(ctx, db.Studio, name, nil, nil) if err != nil { return fmt.Errorf("Error creating studio: %s", err.Error()) } @@ -990,7 +1543,7 @@ func TestStudioAlias(t *testing.T) { // create studio to test against const name = "TestStudioAlias" - created, err := createStudio(ctx, db.Studio, name, nil) + created, err := createStudio(ctx, db.Studio, name, nil, nil) if err != nil { return fmt.Errorf("Error creating studio: %s", err.Error()) } @@ -1148,6 +1701,251 @@ func TestStudioQueryFast(t *testing.T) { }) } +func studiesToIDs(i []*models.Studio) []int { + ret := make([]int, len(i)) + for i, v := range i { + ret[i] = v.ID + } + + return ret +} + +func TestStudioQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.StudioFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")}, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "not equals", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")}, + }, + }, + }, + nil, + []int{studioIdxWithTwoScenes}, + false, + }, + { + "includes", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")[9:]}, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "excludes", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")[9:]}, + }, + }, + }, + nil, + []int{studioIdxWithTwoScenes}, + false, + }, + { + "regex", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*1_custom"}, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "invalid regex", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*1_custom"}, + }, + }, + }, + nil, + []int{studioIdxWithTwoScenes}, + false, + }, + { + "invalid not matches regex", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "not null", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "between", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{studioIdxWithGroup}, + nil, + false, + }, + { + "not between", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithGroup, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{studioIdxWithGroup}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + studios, _, err := db.Studio.Query(ctx, tt.filter, nil) + if (err != nil) != tt.wantErr { + t.Errorf("StudioStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } + + ids := studiesToIDs(studios) + include := indexesToIDs(studioIDs, tt.includeIdxs) + exclude := indexesToIDs(studioIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Create // TODO Update // TODO Destroy diff --git a/pkg/sqlite/tables.go b/pkg/sqlite/tables.go index 7cddf25cc..53e62b166 100644 --- a/pkg/sqlite/tables.go +++ b/pkg/sqlite/tables.go @@ -27,6 +27,7 @@ var ( scenesStashIDsJoinTable = goqu.T("scene_stash_ids") scenesGroupsJoinTable = goqu.T(groupsScenesTable) scenesURLsJoinTable = goqu.T(scenesURLsTable) + scenesCustomFieldsTable = goqu.T("scene_custom_fields") sceneMarkersTagsJoinTable = goqu.T(sceneMarkersTagsTable) @@ -40,6 +41,7 @@ var ( studiosURLsJoinTable = goqu.T(studioURLsTable) studiosTagsJoinTable = goqu.T(studiosTagsTable) studiosStashIDsJoinTable = goqu.T("studio_stash_ids") + studiosCustomFieldsTable = goqu.T("studio_custom_fields") groupsURLsJoinTable = goqu.T(groupURLsTable) groupsTagsJoinTable = goqu.T(groupsTagsTable) @@ -48,6 +50,7 @@ var ( tagsAliasesJoinTable = goqu.T(tagAliasesTable) tagRelationsJoinTable = goqu.T(tagRelationsTable) tagsStashIDsJoinTable = goqu.T("tag_stash_ids") + tagsCustomFieldsTable = goqu.T("tag_custom_fields") ) var ( diff --git a/pkg/sqlite/tag.go b/pkg/sqlite/tag.go index dd730c62c..a926dd56e 100644 --- a/pkg/sqlite/tag.go +++ b/pkg/sqlite/tag.go @@ -104,9 +104,12 @@ type tagRepositoryType struct { aliases stringRepository stashIDs stashIDRepository - scenes joinRepository - images joinRepository - galleries joinRepository + scenes joinRepository + images joinRepository + galleries joinRepository + groups joinRepository + performers joinRepository + studios joinRepository } var ( @@ -152,11 +155,36 @@ var ( fkColumn: galleryIDColumn, foreignTable: galleryTable, }, + groups: joinRepository{ + repository: repository{ + tableName: groupsTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: groupIDColumn, + foreignTable: groupTable, + }, + performers: joinRepository{ + repository: repository{ + tableName: performersTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: performerIDColumn, + foreignTable: performerTable, + }, + studios: joinRepository{ + repository: repository{ + tableName: studiosTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: studioIDColumn, + foreignTable: studioTable, + }, } ) type TagStore struct { blobJoinQueryBuilder + customFieldsStore tableMgr *table } @@ -167,6 +195,10 @@ func NewTagStore(blobStore *BlobStore) *TagStore { blobStore: blobStore, joinTable: tagTable, }, + customFieldsStore: customFieldsStore{ + table: tagsCustomFieldsTable, + fk: tagsCustomFieldsTable.Col(tagIDColumn), + }, tableMgr: tagTableMgr, } } @@ -179,9 +211,9 @@ func (qb *TagStore) selectDataset() *goqu.SelectDataset { return dialect.From(qb.table()).Select(qb.table().All()) } -func (qb *TagStore) Create(ctx context.Context, newObject *models.Tag) error { +func (qb *TagStore) Create(ctx context.Context, newObject *models.CreateTagInput) error { var r tagRow - r.fromTag(*newObject) + r.fromTag(*newObject.Tag) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { @@ -212,12 +244,17 @@ func (qb *TagStore) Create(ctx context.Context, newObject *models.Tag) error { } } + const partial = false + if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil { + return err + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } - *newObject = *updated + *newObject.Tag = *updated return nil } @@ -261,12 +298,16 @@ func (qb *TagStore) UpdatePartial(ctx context.Context, id int, partial models.Ta } } + if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil { + return nil, err + } + return qb.find(ctx, id) } -func (qb *TagStore) Update(ctx context.Context, updatedObject *models.Tag) error { +func (qb *TagStore) Update(ctx context.Context, updatedObject *models.UpdateTagInput) error { var r tagRow - r.fromTag(*updatedObject) + r.fromTag(*updatedObject.Tag) if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { return err @@ -296,6 +337,10 @@ func (qb *TagStore) Update(ctx context.Context, updatedObject *models.Tag) error } } + if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil { + return err + } + return nil } diff --git a/pkg/sqlite/tag_filter.go b/pkg/sqlite/tag_filter.go index 344b7de91..b3a7c1756 100644 --- a/pkg/sqlite/tag_filter.go +++ b/pkg/sqlite/tag_filter.go @@ -101,6 +101,13 @@ func (qb *tagFilterHandler) criterionHandler() criterionHandler { ×tampCriterionHandler{tagFilter.CreatedAt, "tags.created_at", nil}, ×tampCriterionHandler{tagFilter.UpdatedAt, "tags.updated_at", nil}, + &customFieldsFilterHandler{ + table: tagsCustomFieldsTable.GetTable(), + fkCol: tagIDColumn, + c: tagFilter.CustomFields, + idCol: "tags.id", + }, + &relatedFilterHandler{ relatedIDCol: "scenes_tags.scene_id", relatedRepo: sceneRepository.repository, @@ -127,6 +134,33 @@ func (qb *tagFilterHandler) criterionHandler() criterionHandler { tagRepository.galleries.innerJoin(f, "", "tags.id") }, }, + + &relatedFilterHandler{ + relatedIDCol: "groups_tags.group_id", + relatedRepo: groupRepository.repository, + relatedHandler: &groupFilterHandler{tagFilter.GroupsFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.groups.innerJoin(f, "", "tags.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performers_tags.performer_id", + relatedRepo: performerRepository.repository, + relatedHandler: &performerFilterHandler{tagFilter.PerformersFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.performers.innerJoin(f, "", "tags.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "studios_tags.studio_id", + relatedRepo: studioRepository.repository, + relatedHandler: &studioFilterHandler{tagFilter.StudiosFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.studios.innerJoin(f, "", "tags.id") + }, + }, } } diff --git a/pkg/sqlite/tag_test.go b/pkg/sqlite/tag_test.go index f1bac19b2..b673de3f9 100644 --- a/pkg/sqlite/tag_test.go +++ b/pkg/sqlite/tag_test.go @@ -1012,8 +1012,10 @@ func TestTagUpdateTagImage(t *testing.T) { // create tag to test against const name = "TestTagUpdateTagImage" - tag := models.Tag{ - Name: name, + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + }, } err := qb.Create(ctx, &tag) if err != nil { @@ -1032,15 +1034,17 @@ func TestTagUpdateAlias(t *testing.T) { // create tag to test against const name = "TestTagUpdateAlias" - tag := models.Tag{ - Name: name, + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + }, } err := qb.Create(ctx, &tag) if err != nil { return fmt.Errorf("Error creating tag: %s", err.Error()) } - aliases := []string{"alias1", "alias2"} + aliases := []string{"updatedAlias1", "updatedAlias2"} err = qb.UpdateAliases(ctx, tag.ID, aliases) if err != nil { return fmt.Errorf("Error updating tag aliases: %s", err.Error()) @@ -1065,8 +1069,10 @@ func TestTagStashIDs(t *testing.T) { // create tag to test against const name = "TestTagStashIDs" - tag := models.Tag{ - Name: name, + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + }, } err := qb.Create(ctx, &tag) if err != nil { @@ -1089,9 +1095,11 @@ func TestTagFindByStashID(t *testing.T) { const name = "TestTagFindByStashID" const stashID = "stashid" const endpoint = "endpoint" - tag := models.Tag{ - Name: name, - StashIDs: models.NewRelatedStashIDs([]models.StashID{{StashID: stashID, Endpoint: endpoint}}), + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + StashIDs: models.NewRelatedStashIDs([]models.StashID{{StashID: stashID, Endpoint: endpoint}}), + }, } err := qb.Create(ctx, &tag) if err != nil { @@ -1263,8 +1271,626 @@ func TestTagMerge(t *testing.T) { } } -// TODO Create -// TODO Update +func loadTagRelationships(ctx context.Context, expected models.Tag, actual *models.Tag) error { + if expected.Aliases.Loaded() { + if err := actual.LoadAliases(ctx, db.Tag); err != nil { + return err + } + } + if expected.ParentIDs.Loaded() { + if err := actual.LoadParentIDs(ctx, db.Tag); err != nil { + return err + } + } + if expected.ChildIDs.Loaded() { + if err := actual.LoadChildIDs(ctx, db.Tag); err != nil { + return err + } + } + if expected.StashIDs.Loaded() { + if err := actual.LoadStashIDs(ctx, db.Tag); err != nil { + return err + } + } + + return nil +} + +func Test_TagStore_Create(t *testing.T) { + var ( + name = "name" + sortName = "sortName" + description = "description" + favorite = true + ignoreAutoTag = true + aliases = []string{"alias1", "alias2"} + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = epochTime + updatedAt = epochTime + ) + + tests := []struct { + name string + newObject models.CreateTagInput + wantErr bool + }{ + { + "full", + models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + SortName: sortName, + Description: description, + Favorite: favorite, + IgnoreAutoTag: ignoreAutoTag, + Aliases: models.NewRelatedStrings(aliases), + ParentIDs: models.NewRelatedIDs([]int{tagIDs[tagIdxWithScene]}), + ChildIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + CustomFields: testCustomFields, + }, + false, + }, + { + "invalid parent id", + models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + ParentIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + { + "invalid child id", + models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + ChildIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Tag + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + p := tt.newObject + if err := qb.Create(ctx, &p); (err != nil) != tt.wantErr { + t.Errorf("TagStore.Create() error = %v, wantErr = %v", err, tt.wantErr) + } + + if tt.wantErr { + assert.Zero(p.ID) + return + } + + assert.NotZero(p.ID) + + copy := *tt.newObject.Tag + copy.ID = p.ID + + // load relationships + if err := loadTagRelationships(ctx, copy, p.Tag); err != nil { + t.Errorf("loadTagRelationships() error = %v", err) + return + } + + assert.Equal(copy, *p.Tag) + + // ensure can find the tag + found, err := qb.Find(ctx, p.ID) + if err != nil { + t.Errorf("TagStore.Find() error = %v", err) + } + + if !assert.NotNil(found) { + return + } + + // load relationships + if err := loadTagRelationships(ctx, copy, found); err != nil { + t.Errorf("loadTagRelationships() error = %v", err) + return + } + assert.Equal(copy, *found) + + // ensure custom fields are set + cf, err := qb.GetCustomFields(ctx, p.ID) + if err != nil { + t.Errorf("TagStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.newObject.CustomFields, cf) + + return + }) + } +} + +func Test_TagStore_Update(t *testing.T) { + var ( + name = "name" + sortName = "sortName" + description = "description" + favorite = true + ignoreAutoTag = true + aliases = []string{"alias1", "alias2"} + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = epochTime + updatedAt = epochTime + ) + + tests := []struct { + name string + updatedObject models.UpdateTagInput + wantErr bool + }{ + { + "full", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: name, + SortName: sortName, + Description: description, + Favorite: favorite, + IgnoreAutoTag: ignoreAutoTag, + Aliases: models.NewRelatedStrings(aliases), + ParentIDs: models.NewRelatedIDs([]int{tagIDs[tagIdxWithScene]}), + ChildIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{ + "string": "updated", + "int": int64(999), + "real": 9.99, + }, + }, + }, + false, + }, + { + "set custom fields", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + }, + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + false, + }, + { + "clear custom fields", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + }, + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + false, + }, + { + "invalid parent id", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + ParentIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + { + "invalid child id", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + ChildIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Tag + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + p := tt.updatedObject + if err := qb.Update(ctx, &p); (err != nil) != tt.wantErr { + t.Errorf("TagStore.Update() error = %v, wantErr = %v", err, tt.wantErr) + } + + if tt.wantErr { + return + } + + s, err := qb.Find(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("TagStore.Find() error = %v", err) + return + } + + // load relationships + if err := loadTagRelationships(ctx, *tt.updatedObject.Tag, s); err != nil { + t.Errorf("loadTagRelationships() error = %v", err) + return + } + + assert.Equal(*tt.updatedObject.Tag, *s) + + // ensure custom fields are correct + if tt.updatedObject.CustomFields.Full != nil { + cf, err := qb.GetCustomFields(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("TagStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.updatedObject.CustomFields.Full, cf) + } + }) + } +} + +func Test_TagStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.TagPartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + tagIDs[tagIdxWithGallery], + models.TagPartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + tagIDs[tagIdxWithGallery], + models.TagPartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + tagIDs[tagIdxWithGallery], + models.TagPartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(2), + "real": float64(1.7), + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Tag + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + _, err := qb.UpdatePartial(ctx, tt.id, tt.partial) + if err != nil { + t.Errorf("TagStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("TagStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + +func TestTagQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.TagFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")}, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "not equals", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")}, + }, + }, + }, + nil, + []int{tagIdxWithGallery}, + false, + }, + { + "includes", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")[9:]}, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "excludes", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")[9:]}, + }, + }, + }, + nil, + []int{tagIdxWithGallery}, + false, + }, + { + "regex", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "invalid regex", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + nil, + []int{tagIdxWithGallery}, + false, + }, + { + "invalid not matches regex", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "not null", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "between", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{tagIdx2WithScene}, + nil, + false, + }, + { + "not between", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdx2WithScene, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{tagIdx2WithScene}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tags, _, err := db.Tag.Query(ctx, tt.filter, nil) + if (err != nil) != tt.wantErr { + t.Errorf("TagStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } + + ids := tagsToIDs(tags) + include := indexesToIDs(tagIDs, tt.includeIdxs) + exclude := indexesToIDs(tagIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Destroy // TODO Find // TODO FindBySceneID diff --git a/pkg/stashbox/graphql/generated_client.go b/pkg/stashbox/graphql/generated_client.go index 640a1c893..29b702a7f 100644 --- a/pkg/stashbox/graphql/generated_client.go +++ b/pkg/stashbox/graphql/generated_client.go @@ -9,8 +9,6 @@ import ( ) type StashBoxGraphQLClient interface { - FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindSceneByFingerprint, error) - FindScenesByFullFingerprints(ctx context.Context, fingerprints []*FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindScenesByFullFingerprints, error) FindScenesBySceneFingerprints(ctx context.Context, fingerprints [][]*FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindScenesBySceneFingerprints, error) SearchScene(ctx context.Context, term string, interceptors ...clientv2.RequestInterceptor) (*SearchScene, error) SearchPerformer(ctx context.Context, term string, interceptors ...clientv2.RequestInterceptor) (*SearchPerformer, error) @@ -536,42 +534,6 @@ func (t *SceneFragment_Studio_StudioFragment_Parent) GetName() string { return t.Name } -type FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent struct { - ID string "json:\"id\" graphql:\"id\"" - Name string "json:\"name\" graphql:\"name\"" -} - -func (t *FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent) GetID() string { - if t == nil { - t = &FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent{} - } - return t.ID -} -func (t *FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent) GetName() string { - if t == nil { - t = &FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent{} - } - return t.Name -} - -type FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent struct { - ID string "json:\"id\" graphql:\"id\"" - Name string "json:\"name\" graphql:\"name\"" -} - -func (t *FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent) GetID() string { - if t == nil { - t = &FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent{} - } - return t.ID -} -func (t *FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent) GetName() string { - if t == nil { - t = &FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent{} - } - return t.Name -} - type FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Studio_StudioFragment_Parent struct { ID string "json:\"id\" graphql:\"id\"" Name string "json:\"name\" graphql:\"name\"" @@ -695,28 +657,6 @@ func (t *SubmitPerformerDraft_SubmitPerformerDraft) GetID() *string { return t.ID } -type FindSceneByFingerprint struct { - FindSceneByFingerprint []*SceneFragment "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" -} - -func (t *FindSceneByFingerprint) GetFindSceneByFingerprint() []*SceneFragment { - if t == nil { - t = &FindSceneByFingerprint{} - } - return t.FindSceneByFingerprint -} - -type FindScenesByFullFingerprints struct { - FindScenesByFullFingerprints []*SceneFragment "json:\"findScenesByFullFingerprints\" graphql:\"findScenesByFullFingerprints\"" -} - -func (t *FindScenesByFullFingerprints) GetFindScenesByFullFingerprints() []*SceneFragment { - if t == nil { - t = &FindScenesByFullFingerprints{} - } - return t.FindScenesByFullFingerprints -} - type FindScenesBySceneFingerprints struct { FindScenesBySceneFingerprints [][]*SceneFragment "json:\"findScenesBySceneFingerprints\" graphql:\"findScenesBySceneFingerprints\"" } @@ -849,278 +789,6 @@ func (t *SubmitPerformerDraft) GetSubmitPerformerDraft() *SubmitPerformerDraft_S return &t.SubmitPerformerDraft } -const FindSceneByFingerprintDocument = `query FindSceneByFingerprint ($fingerprint: FingerprintQueryInput!) { - findSceneByFingerprint(fingerprint: $fingerprint) { - ... SceneFragment - } -} -fragment SceneFragment on Scene { - id - title - code - details - director - duration - date - urls { - ... URLFragment - } - images { - ... ImageFragment - } - studio { - ... StudioFragment - } - tags { - ... TagFragment - } - performers { - ... PerformerAppearanceFragment - } - fingerprints { - ... FingerprintFragment - } -} -fragment URLFragment on URL { - url - type -} -fragment ImageFragment on Image { - id - url - width - height -} -fragment StudioFragment on Studio { - name - id - aliases - urls { - ... URLFragment - } - parent { - name - id - } - images { - ... ImageFragment - } -} -fragment TagFragment on Tag { - name - id -} -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment - } -} -fragment PerformerFragment on Performer { - id - name - disambiguation - aliases - gender - merged_ids - deleted - merged_into_id - urls { - ... URLFragment - } - images { - ... ImageFragment - } - birth_date - death_date - ethnicity - country - eye_color - hair_color - height - measurements { - ... MeasurementsFragment - } - breast_type - career_start_year - career_end_year - tattoos { - ... BodyModificationFragment - } - piercings { - ... BodyModificationFragment - } -} -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} -` - -func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindSceneByFingerprint, error) { - vars := map[string]any{ - "fingerprint": fingerprint, - } - - var res FindSceneByFingerprint - if err := c.Client.Post(ctx, "FindSceneByFingerprint", FindSceneByFingerprintDocument, &res, vars, interceptors...); err != nil { - if c.Client.ParseDataWhenErrors { - return &res, err - } - - return nil, err - } - - return &res, nil -} - -const FindScenesByFullFingerprintsDocument = `query FindScenesByFullFingerprints ($fingerprints: [FingerprintQueryInput!]!) { - findScenesByFullFingerprints(fingerprints: $fingerprints) { - ... SceneFragment - } -} -fragment SceneFragment on Scene { - id - title - code - details - director - duration - date - urls { - ... URLFragment - } - images { - ... ImageFragment - } - studio { - ... StudioFragment - } - tags { - ... TagFragment - } - performers { - ... PerformerAppearanceFragment - } - fingerprints { - ... FingerprintFragment - } -} -fragment URLFragment on URL { - url - type -} -fragment ImageFragment on Image { - id - url - width - height -} -fragment StudioFragment on Studio { - name - id - aliases - urls { - ... URLFragment - } - parent { - name - id - } - images { - ... ImageFragment - } -} -fragment TagFragment on Tag { - name - id -} -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment - } -} -fragment PerformerFragment on Performer { - id - name - disambiguation - aliases - gender - merged_ids - deleted - merged_into_id - urls { - ... URLFragment - } - images { - ... ImageFragment - } - birth_date - death_date - ethnicity - country - eye_color - hair_color - height - measurements { - ... MeasurementsFragment - } - breast_type - career_start_year - career_end_year - tattoos { - ... BodyModificationFragment - } - piercings { - ... BodyModificationFragment - } -} -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} -` - -func (c *Client) FindScenesByFullFingerprints(ctx context.Context, fingerprints []*FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindScenesByFullFingerprints, error) { - vars := map[string]any{ - "fingerprints": fingerprints, - } - - var res FindScenesByFullFingerprints - if err := c.Client.Post(ctx, "FindScenesByFullFingerprints", FindScenesByFullFingerprintsDocument, &res, vars, interceptors...); err != nil { - if c.Client.ParseDataWhenErrors { - return &res, err - } - - return nil, err - } - - return &res, nil -} - const FindScenesBySceneFingerprintsDocument = `query FindScenesBySceneFingerprints ($fingerprints: [[FingerprintQueryInput!]!]!) { findScenesBySceneFingerprints(fingerprints: $fingerprints) { ... SceneFragment @@ -1890,8 +1558,6 @@ func (c *Client) SubmitPerformerDraft(ctx context.Context, input PerformerDraftI } var DocumentOperationNames = map[string]string{ - FindSceneByFingerprintDocument: "FindSceneByFingerprint", - FindScenesByFullFingerprintsDocument: "FindScenesByFullFingerprints", FindScenesBySceneFingerprintsDocument: "FindScenesBySceneFingerprints", SearchSceneDocument: "SearchScene", SearchPerformerDocument: "SearchPerformer", diff --git a/pkg/stashbox/performer.go b/pkg/stashbox/performer.go index 38824eba1..231b936d6 100644 --- a/pkg/stashbox/performer.go +++ b/pkg/stashbox/performer.go @@ -231,6 +231,16 @@ func performerFragmentToScrapedPerformer(p graphql.PerformerFragment) *models.Sc sp.Height = &hs } + if p.CareerStartYear != nil { + cs := *p.CareerStartYear + sp.CareerStart = &cs + } + + if p.CareerEndYear != nil { + ce := *p.CareerEndYear + sp.CareerEnd = &ce + } + if p.BirthDate != nil { sp.Birthdate = padFuzzyDate(p.BirthDate) } @@ -388,16 +398,11 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf aliases := strings.Join(performer.Aliases.List(), ",") draft.Aliases = &aliases } - if performer.CareerLength != "" { - var career = strings.Split(performer.CareerLength, "-") - if i, err := strconv.Atoi(strings.TrimSpace(career[0])); err == nil { - draft.CareerStartYear = &i - } - if len(career) == 2 { - if y, err := strconv.Atoi(strings.TrimSpace(career[1])); err == nil { - draft.CareerEndYear = &y - } - } + if performer.CareerStart != nil { + draft.CareerStartYear = performer.CareerStart + } + if performer.CareerEnd != nil { + draft.CareerEndYear = performer.CareerEnd } if len(performer.URLs.List()) > 0 { diff --git a/pkg/studio/export.go b/pkg/studio/export.go index 1440c3cdd..206791da6 100644 --- a/pkg/studio/export.go +++ b/pkg/studio/export.go @@ -17,6 +17,7 @@ type FinderImageStashIDGetter interface { models.URLLoader models.StashIDLoader GetImage(ctx context.Context, studioID int) ([]byte, error) + models.CustomFieldsReader } // ToJSON converts a Studio object into its JSON equivalent. @@ -26,6 +27,7 @@ func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models Details: studio.Details, Favorite: studio.Favorite, IgnoreAutoTag: studio.IgnoreAutoTag, + Organized: studio.Organized, CreatedAt: json.JSONTime{Time: studio.CreatedAt}, UpdatedAt: json.JSONTime{Time: studio.UpdatedAt}, } @@ -60,6 +62,12 @@ func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models } newStudioJSON.StashIDs = studio.StashIDs.List() + var err error + newStudioJSON.CustomFields, err = reader.GetCustomFields(ctx, studio.ID) + if err != nil { + return nil, fmt.Errorf("getting studio custom fields: %v", err) + } + image, err := reader.GetImage(ctx, studio.ID) if err != nil { logger.Errorf("Error getting studio image: %v", err) diff --git a/pkg/studio/export_test.go b/pkg/studio/export_test.go index c333c0ad5..dce75ba9a 100644 --- a/pkg/studio/export_test.go +++ b/pkg/studio/export_test.go @@ -18,18 +18,25 @@ const ( errImageID = 3 missingParentStudioID = 4 errStudioID = 5 + customFieldsID = 6 parentStudioID = 10 missingStudioID = 11 errParentStudioID = 12 + errCustomFieldsID = 13 ) var ( - studioName = "testStudio" - url = "url" - details = "details" - parentStudioName = "parentStudio" - autoTagIgnored = true + studioName = "testStudio" + url = "url" + details = "details" + parentStudioName = "parentStudio" + autoTagIgnored = true + studioOrganized = true + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) var studioID = 1 @@ -67,6 +74,7 @@ func createFullStudio(id int, parentID int) models.Studio { UpdatedAt: updateTime, Rating: &rating, IgnoreAutoTag: autoTagIgnored, + Organized: studioOrganized, Aliases: models.NewRelatedStrings(aliases), TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs(stashIDs), @@ -91,7 +99,7 @@ func createEmptyStudio(id int) models.Studio { } } -func createFullJSONStudio(parentStudio, image string, aliases []string) *jsonschema.Studio { +func createFullJSONStudio(parentStudio, image string, aliases []string, customFields map[string]interface{}) *jsonschema.Studio { return &jsonschema.Studio{ Name: studioName, URLs: []string{url}, @@ -109,6 +117,8 @@ func createFullJSONStudio(parentStudio, image string, aliases []string) *jsonsch Aliases: aliases, StashIDs: stashIDs, IgnoreAutoTag: autoTagIgnored, + Organized: studioOrganized, + CustomFields: customFields, } } @@ -120,16 +130,18 @@ func createEmptyJSONStudio() *jsonschema.Studio { UpdatedAt: json.JSONTime{ Time: updateTime, }, - Aliases: []string{}, - URLs: []string{}, - StashIDs: []models.StashID{}, + Aliases: []string{}, + URLs: []string{}, + StashIDs: []models.StashID{}, + CustomFields: emptyCustomFields, } } type testScenario struct { - input models.Studio - expected *jsonschema.Studio - err bool + input models.Studio + customFields map[string]interface{} + expected *jsonschema.Studio + err bool } var scenarios []testScenario @@ -138,30 +150,48 @@ func initTestTable() { scenarios = []testScenario{ { createFullStudio(studioID, parentStudioID), - createFullJSONStudio(parentStudioName, image, []string{"alias"}), + emptyCustomFields, + createFullJSONStudio(parentStudioName, image, []string{"alias"}, emptyCustomFields), + false, + }, + { + createFullStudio(customFieldsID, parentStudioID), + customFields, + createFullJSONStudio(parentStudioName, image, []string{"alias"}, customFields), false, }, { createEmptyStudio(noImageID), + emptyCustomFields, createEmptyJSONStudio(), false, }, { createFullStudio(errImageID, parentStudioID), - createFullJSONStudio(parentStudioName, "", []string{"alias"}), + emptyCustomFields, + createFullJSONStudio(parentStudioName, "", []string{"alias"}, emptyCustomFields), // failure to get image is not an error false, }, { createFullStudio(missingParentStudioID, missingStudioID), - createFullJSONStudio("", image, []string{"alias"}), + emptyCustomFields, + createFullJSONStudio("", image, []string{"alias"}, emptyCustomFields), false, }, { createFullStudio(errStudioID, errParentStudioID), + emptyCustomFields, nil, true, }, + { + createFullStudio(errCustomFieldsID, parentStudioID), + customFields, + nil, + // failure to get custom fields should cause an error + true, + }, } } @@ -177,6 +207,7 @@ func TestToJSON(t *testing.T) { db.Studio.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once() db.Studio.On("GetImage", testCtx, missingParentStudioID).Return(imageBytes, nil).Maybe() db.Studio.On("GetImage", testCtx, errStudioID).Return(imageBytes, nil).Maybe() + db.Studio.On("GetImage", testCtx, customFieldsID).Return(imageBytes, nil).Once() parentStudioErr := errors.New("error getting parent studio") @@ -184,6 +215,15 @@ func TestToJSON(t *testing.T) { db.Studio.On("Find", testCtx, missingStudioID).Return(nil, nil) db.Studio.On("Find", testCtx, errParentStudioID).Return(nil, parentStudioErr) + customFieldsErr := errors.New("error getting custom fields") + + db.Studio.On("GetCustomFields", testCtx, studioID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, missingParentStudioID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, noImageID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, errImageID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, customFieldsErr).Once() + for i, s := range scenarios { studio := s.input json, err := ToJSON(testCtx, db.Studio, &studio) diff --git a/pkg/studio/import.go b/pkg/studio/import.go index 405852e53..264e2566a 100644 --- a/pkg/studio/import.go +++ b/pkg/studio/import.go @@ -26,13 +26,15 @@ type Importer struct { Input jsonschema.Studio MissingRefBehaviour models.ImportMissingRefEnum - ID int - studio models.Studio - imageData []byte + ID int + studio models.Studio + customFields models.CustomFieldMap + imageData []byte } func (i *Importer) PreImport(ctx context.Context) error { i.studio = studioJSONtoStudio(i.Input) + i.customFields = i.Input.CustomFields if err := i.populateParentStudio(ctx); err != nil { return err @@ -110,7 +112,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -153,7 +157,7 @@ func (i *Importer) populateParentStudio(ctx context.Context) error { } func (i *Importer) createParentStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.ReaderWriter.Create(ctx, &newStudio) @@ -194,7 +198,10 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - err := i.ReaderWriter.Create(ctx, &i.studio) + err := i.ReaderWriter.Create(ctx, &models.CreateStudioInput{ + Studio: &i.studio, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating studio: %v", err) } @@ -206,7 +213,12 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Update(ctx context.Context, id int) error { studio := i.studio studio.ID = id - err := i.ReaderWriter.Update(ctx, &studio) + err := i.ReaderWriter.Update(ctx, &models.UpdateStudioInput{ + Studio: &studio, + CustomFields: models.CustomFieldsInput{ + Full: i.customFields, + }, + }) if err != nil { return fmt.Errorf("error updating existing studio: %v", err) } @@ -221,6 +233,7 @@ func studioJSONtoStudio(studioJSON jsonschema.Studio) models.Studio { Details: studioJSON.Details, Favorite: studioJSON.Favorite, IgnoreAutoTag: studioJSON.IgnoreAutoTag, + Organized: studioJSON.Organized, CreatedAt: studioJSON.CreatedAt.GetTime(), UpdatedAt: studioJSON.UpdatedAt.GetTime(), diff --git a/pkg/studio/import_test.go b/pkg/studio/import_test.go index 882b8ca56..c2bbd40f5 100644 --- a/pkg/studio/import_test.go +++ b/pkg/studio/import_test.go @@ -49,6 +49,7 @@ func TestImporterPreImport(t *testing.T) { Name: studioName, Image: invalidImage, IgnoreAutoTag: autoTagIgnored, + Organized: studioOrganized, }, } @@ -62,7 +63,7 @@ func TestImporterPreImport(t *testing.T) { assert.Nil(t, err) - i.Input = *createFullJSONStudio(studioName, image, []string{"alias"}) + i.Input = *createFullJSONStudio(studioName, image, []string{"alias"}, customFields) i.Input.ParentStudio = "" err = i.PreImport(testCtx) @@ -71,6 +72,7 @@ func TestImporterPreImport(t *testing.T) { expectedStudio := createFullStudio(0, 0) expectedStudio.ParentID = nil assert.Equal(t, expectedStudio, i.studio) + assert.Equal(t, models.CustomFieldMap(customFields), i.customFields) } func TestImporterPreImportWithTag(t *testing.T) { @@ -121,9 +123,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -156,7 +158,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -206,9 +208,9 @@ func TestImporterPreImportWithMissingParent(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingParentStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -240,7 +242,7 @@ func TestImporterPreImportWithMissingParentCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingParentStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -327,11 +329,11 @@ func TestCreate(t *testing.T) { } errCreate := errors.New("Create error") - db.Studio.On("Create", testCtx, &studio).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + db.Studio.On("Create", testCtx, &models.CreateStudioInput{Studio: &studio}).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) s.ID = studioID }).Return(nil).Once() - db.Studio.On("Create", testCtx, &studioErr).Return(errCreate).Once() + db.Studio.On("Create", testCtx, &models.CreateStudioInput{Studio: &studioErr}).Return(errCreate).Once() id, err := i.Create(testCtx) assert.Equal(t, studioID, *id) @@ -366,7 +368,7 @@ func TestUpdate(t *testing.T) { // id needs to be set for the mock input studio.ID = studioID - db.Studio.On("Update", testCtx, &studio).Return(nil).Once() + db.Studio.On("Update", testCtx, &models.UpdateStudioInput{Studio: &studio}).Return(nil).Once() err := i.Update(testCtx, studioID) assert.Nil(t, err) @@ -375,7 +377,7 @@ func TestUpdate(t *testing.T) { // need to set id separately studioErr.ID = errImageID - db.Studio.On("Update", testCtx, &studioErr).Return(errUpdate).Once() + db.Studio.On("Update", testCtx, &models.UpdateStudioInput{Studio: &studioErr}).Return(errUpdate).Once() err = i.Update(testCtx, errImageID) assert.NotNil(t, err) diff --git a/pkg/studio/validate.go b/pkg/studio/validate.go index 4e2f51c84..526400066 100644 --- a/pkg/studio/validate.go +++ b/pkg/studio/validate.go @@ -75,7 +75,7 @@ func ValidateAliases(ctx context.Context, id int, aliases []string, qb models.St return nil } -func ValidateCreate(ctx context.Context, studio models.Studio, qb models.StudioQueryer) error { +func ValidateCreate(ctx context.Context, studio models.CreateStudioInput, qb models.StudioQueryer) error { if err := validateName(ctx, 0, studio.Name, qb); err != nil { return err } @@ -135,6 +135,7 @@ func ValidateModify(ctx context.Context, s models.StudioPartial, qb ValidateModi } effectiveAliases := s.Aliases.Apply(existing.Aliases.List()) + if err := ValidateAliases(ctx, s.ID, effectiveAliases, qb); err != nil { return err } diff --git a/pkg/studio/validate_test.go b/pkg/studio/validate_test.go index 6562dc5ca..b196ba3c3 100644 --- a/pkg/studio/validate_test.go +++ b/pkg/studio/validate_test.go @@ -102,3 +102,72 @@ func TestValidateUpdateName(t *testing.T) { }) } } + +func TestValidateUpdateAliases(t *testing.T) { + db := mocks.NewDatabase() + + const ( + name1 = "name 1" + name2 = "name 2" + alias1 = "alias 1" + newAlias = "new alias" + ) + + existing1 := models.Studio{ + ID: 1, + Name: name1, + } + existing2 := models.Studio{ + ID: 2, + Name: name2, + } + + pp := 1 + findFilter := &models.FindFilterType{ + PerPage: &pp, + } + + aliasFilter := func(n string) *models.StudioFilterType { + return &models.StudioFilterType{ + Aliases: &models.StringCriterionInput{ + Value: n, + Modifier: models.CriterionModifierEquals, + }, + } + } + + // name1 matches existing1 name - ok + db.Studio.On("Query", testCtx, nameFilter(alias1), findFilter).Return(nil, 0, nil) + db.Studio.On("Query", testCtx, aliasFilter(alias1), findFilter).Return(nil, 0, nil) + + // name2 matches existing2 name - error + db.Studio.On("Query", testCtx, nameFilter(name2), findFilter).Return([]*models.Studio{&existing2}, 1, nil) + + // alias matches existing alias - error + db.Studio.On("Query", testCtx, nameFilter(newAlias), findFilter).Return(nil, 0, nil) + db.Studio.On("Query", testCtx, aliasFilter(newAlias), findFilter).Return([]*models.Studio{&existing2}, 1, nil) + + // valid alias + db.Studio.On("Query", testCtx, nameFilter("valid"), findFilter).Return(nil, 0, nil) + db.Studio.On("Query", testCtx, aliasFilter("valid"), findFilter).Return(nil, 0, nil) + + tests := []struct { + tName string + studio models.Studio + aliases []string + want error + }{ + {"valid alias", existing1, []string{alias1}, nil}, + {"alias duplicates other name", existing1, []string{name2}, &NameExistsError{name2}}, + {"alias duplicates other alias", existing1, []string{newAlias}, &NameUsedByAliasError{newAlias, existing2.Name}}, + {"valid new alias", existing1, []string{"valid"}, nil}, + {"empty alias", existing1, []string{""}, ErrEmptyAlias}, + } + + for _, tt := range tests { + t.Run(tt.tName, func(t *testing.T) { + got := ValidateAliases(testCtx, tt.studio.ID, tt.aliases, db.Studio) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/tag/export.go b/pkg/tag/export.go index b07418667..fc7115209 100644 --- a/pkg/tag/export.go +++ b/pkg/tag/export.go @@ -16,6 +16,7 @@ type FinderAliasImageGetter interface { GetAliases(ctx context.Context, studioID int) ([]string, error) GetImage(ctx context.Context, tagID int) ([]byte, error) FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) + GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) models.StashIDLoader } @@ -63,6 +64,11 @@ func ToJSON(ctx context.Context, reader FinderAliasImageGetter, tag *models.Tag) newTagJSON.Parents = GetNames(parents) + newTagJSON.CustomFields, err = reader.GetCustomFields(ctx, tag.ID) + if err != nil { + return nil, fmt.Errorf("getting tag custom fields: %v", err) + } + return &newTagJSON, nil } diff --git a/pkg/tag/export_test.go b/pkg/tag/export_test.go index 84e082f30..cba2d4ebf 100644 --- a/pkg/tag/export_test.go +++ b/pkg/tag/export_test.go @@ -14,12 +14,14 @@ import ( ) const ( - tagID = 1 - noImageID = 2 - errImageID = 3 - errAliasID = 4 - withParentsID = 5 - errParentsID = 6 + tagID = iota + 1 + customFieldsID + noImageID + errImageID + errAliasID + withParentsID + errParentsID + errCustomFieldsID ) const ( @@ -32,6 +34,11 @@ var ( autoTagIgnored = true createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) + + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) func createTag(id int) models.Tag { @@ -47,8 +54,8 @@ func createTag(id int) models.Tag { } } -func createJSONTag(aliases []string, image string, parents []string) *jsonschema.Tag { - return &jsonschema.Tag{ +func createJSONTag(aliases []string, image string, parents []string, withCustomFields bool) *jsonschema.Tag { + ret := &jsonschema.Tag{ Name: tagName, SortName: sortName, Favorite: true, @@ -61,15 +68,23 @@ func createJSONTag(aliases []string, image string, parents []string) *jsonschema UpdatedAt: json.JSONTime{ Time: updateTime, }, - Image: image, - Parents: parents, + Image: image, + Parents: parents, + CustomFields: emptyCustomFields, } + + if withCustomFields { + ret.CustomFields = customFields + } + + return ret } type testScenario struct { - tag models.Tag - expected *jsonschema.Tag - err bool + tag models.Tag + customFields map[string]interface{} + expected *jsonschema.Tag + err bool } var scenarios []testScenario @@ -78,32 +93,50 @@ func initTestTable() { scenarios = []testScenario{ { createTag(tagID), - createJSONTag([]string{"alias"}, image, nil), + emptyCustomFields, + createJSONTag([]string{"alias"}, image, nil, false), + false, + }, + { + createTag(customFieldsID), + customFields, + createJSONTag([]string{"alias"}, image, nil, true), false, }, { createTag(noImageID), - createJSONTag(nil, "", nil), + emptyCustomFields, + createJSONTag(nil, "", nil, false), false, }, { createTag(errImageID), - createJSONTag(nil, "", nil), + emptyCustomFields, + createJSONTag(nil, "", nil, false), // getting the image should not cause an error false, }, { createTag(errAliasID), + emptyCustomFields, nil, true, }, { createTag(withParentsID), - createJSONTag(nil, image, []string{"parent"}), + emptyCustomFields, + createJSONTag(nil, image, []string{"parent"}, false), false, }, { createTag(errParentsID), + emptyCustomFields, + nil, + true, + }, + { + createTag(errCustomFieldsID), + customFields, nil, true, }, @@ -118,32 +151,48 @@ func TestToJSON(t *testing.T) { imageErr := errors.New("error getting image") aliasErr := errors.New("error getting aliases") parentsErr := errors.New("error getting parents") + customFieldsErr := errors.New("error getting custom fields") db.Tag.On("GetAliases", testCtx, tagID).Return([]string{"alias"}, nil).Once() + db.Tag.On("GetAliases", testCtx, customFieldsID).Return([]string{"alias"}, nil).Once() db.Tag.On("GetAliases", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("GetAliases", testCtx, errImageID).Return(nil, nil).Once() db.Tag.On("GetAliases", testCtx, errAliasID).Return(nil, aliasErr).Once() db.Tag.On("GetAliases", testCtx, withParentsID).Return(nil, nil).Once() db.Tag.On("GetAliases", testCtx, errParentsID).Return(nil, nil).Once() + db.Tag.On("GetAliases", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, tagID).Return(nil, nil).Once() + db.Tag.On("GetStashIDs", testCtx, customFieldsID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, errImageID).Return(nil, nil).Once() // errAliasID test fails before GetStashIDs is called, so no mock needed db.Tag.On("GetStashIDs", testCtx, withParentsID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, errParentsID).Return(nil, nil).Once() + db.Tag.On("GetStashIDs", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Tag.On("GetImage", testCtx, tagID).Return(imageBytes, nil).Once() + db.Tag.On("GetImage", testCtx, customFieldsID).Return(imageBytes, nil).Once() db.Tag.On("GetImage", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once() db.Tag.On("GetImage", testCtx, withParentsID).Return(imageBytes, nil).Once() db.Tag.On("GetImage", testCtx, errParentsID).Return(nil, nil).Once() + db.Tag.On("GetImage", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Tag.On("FindByChildTagID", testCtx, tagID).Return(nil, nil).Once() + db.Tag.On("FindByChildTagID", testCtx, customFieldsID).Return(nil, nil).Once() db.Tag.On("FindByChildTagID", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("FindByChildTagID", testCtx, withParentsID).Return([]*models.Tag{{Name: "parent"}}, nil).Once() db.Tag.On("FindByChildTagID", testCtx, errParentsID).Return(nil, parentsErr).Once() db.Tag.On("FindByChildTagID", testCtx, errImageID).Return(nil, nil).Once() + db.Tag.On("FindByChildTagID", testCtx, errCustomFieldsID).Return(nil, nil).Once() + + db.Tag.On("GetCustomFields", testCtx, tagID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, noImageID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, errImageID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, withParentsID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, customFieldsErr).Once() for i, s := range scenarios { tag := s.tag diff --git a/pkg/tag/import.go b/pkg/tag/import.go index 53b741886..501dc6795 100644 --- a/pkg/tag/import.go +++ b/pkg/tag/import.go @@ -31,8 +31,9 @@ type Importer struct { Input jsonschema.Tag MissingRefBehaviour models.ImportMissingRefEnum - tag models.Tag - imageData []byte + tag models.Tag + imageData []byte + customFields map[string]interface{} } func (i *Importer) PreImport(ctx context.Context) error { @@ -55,6 +56,8 @@ func (i *Importer) PreImport(ctx context.Context) error { } } + i.customFields = i.Input.CustomFields + return nil } @@ -78,6 +81,14 @@ func (i *Importer) PostImport(ctx context.Context, id int) error { return fmt.Errorf("error setting parents: %v", err) } + if len(i.customFields) > 0 { + if err := i.ReaderWriter.SetCustomFields(ctx, id, models.CustomFieldsInput{ + Full: i.customFields, + }); err != nil { + return fmt.Errorf("error setting tag custom fields: %v", err) + } + } + return nil } @@ -101,7 +112,10 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - err := i.ReaderWriter.Create(ctx, &i.tag) + err := i.ReaderWriter.Create(ctx, &models.CreateTagInput{ + Tag: &i.tag, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating tag: %v", err) } @@ -113,7 +127,12 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Update(ctx context.Context, id int) error { tag := i.tag tag.ID = id - err := i.ReaderWriter.Update(ctx, &tag) + err := i.ReaderWriter.Update(ctx, &models.UpdateTagInput{ + Tag: &tag, + CustomFields: models.CustomFieldsInput{ + Full: i.customFields, + }, + }) if err != nil { return fmt.Errorf("error updating existing tag: %v", err) } @@ -157,7 +176,9 @@ func (i *Importer) createParent(ctx context.Context, name string) (int, error) { newTag := models.NewTag() newTag.Name = name - err := i.ReaderWriter.Create(ctx, &newTag) + err := i.ReaderWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return 0, err } diff --git a/pkg/tag/import_test.go b/pkg/tag/import_test.go index b706c4937..f6eaec88a 100644 --- a/pkg/tag/import_test.go +++ b/pkg/tag/import_test.go @@ -154,14 +154,14 @@ func TestImporterPostImportParentMissing(t *testing.T) { db.Tag.On("UpdateParentTags", testCtx, ignoreID, emptyParents).Return(nil).Once() db.Tag.On("UpdateParentTags", testCtx, ignoreFoundID, []int{103}).Return(nil).Once() - db.Tag.On("Create", testCtx, mock.MatchedBy(func(t *models.Tag) bool { - return t.Name == "Create" + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == "Create" })).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = 100 + input := args.Get(1).(*models.CreateTagInput) + input.Tag.ID = 100 }).Return(nil).Once() - db.Tag.On("Create", testCtx, mock.MatchedBy(func(t *models.Tag) bool { - return t.Name == "CreateError" + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == "CreateError" })).Return(errors.New("failed creating parent")).Once() i.MissingRefBehaviour = models.ImportMissingRefEnumCreate @@ -261,11 +261,15 @@ func TestCreate(t *testing.T) { } errCreate := errors.New("Create error") - db.Tag.On("Create", testCtx, &tag).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = tagID + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == tag.Name + })).Run(func(args mock.Arguments) { + input := args.Get(1).(*models.CreateTagInput) + input.Tag.ID = tagID }).Return(nil).Once() - db.Tag.On("Create", testCtx, &tagErr).Return(errCreate).Once() + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == tagErr.Name + })).Return(errCreate).Once() id, err := i.Create(testCtx) assert.Equal(t, tagID, *id) @@ -299,7 +303,10 @@ func TestUpdate(t *testing.T) { // id needs to be set for the mock input tag.ID = tagID - db.Tag.On("Update", testCtx, &tag).Return(nil).Once() + tagInput := models.UpdateTagInput{ + Tag: &tag, + } + db.Tag.On("Update", testCtx, &tagInput).Return(nil).Once() err := i.Update(testCtx, tagID) assert.Nil(t, err) @@ -308,7 +315,10 @@ func TestUpdate(t *testing.T) { // need to set id separately tagErr.ID = errImageID - db.Tag.On("Update", testCtx, &tagErr).Return(errUpdate).Once() + errInput := models.UpdateTagInput{ + Tag: &tagErr, + } + db.Tag.On("Update", testCtx, &errInput).Return(errUpdate).Once() err = i.Update(testCtx, errImageID) assert.NotNil(t, err) diff --git a/pkg/tag/update.go b/pkg/tag/update.go index 99e9b9165..4a3a2901a 100644 --- a/pkg/tag/update.go +++ b/pkg/tag/update.go @@ -220,49 +220,3 @@ func ValidateHierarchyExisting(ctx context.Context, tag *models.Tag, parentIDs, return nil } - -func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipFinder) ([]int, []int, error) { - var mergedParents, mergedChildren []int - allIds := append([]int{destination}, sources...) - - addTo := func(mergedItems []int, tagIDs []int) []int { - Tags: - for _, tagID := range tagIDs { - // Ignore tags which are already set - for _, existingItem := range mergedItems { - if tagID == existingItem { - continue Tags - } - } - - // Ignore tags which are being merged, as these are rolled up anyway (if A is merged into B any direct link between them can be ignored) - for _, id := range allIds { - if tagID == id { - continue Tags - } - } - - mergedItems = append(mergedItems, tagID) - } - - return mergedItems - } - - for _, id := range allIds { - parents, err := qb.GetParentIDs(ctx, id) - if err != nil { - return nil, nil, err - } - - mergedParents = addTo(mergedParents, parents) - - children, err := qb.GetChildIDs(ctx, id) - if err != nil { - return nil, nil, err - } - - mergedChildren = addTo(mergedChildren, children) - } - - return mergedParents, mergedChildren, nil -} diff --git a/pkg/tag/validate.go b/pkg/tag/validate.go index 966cec945..abc260b5e 100644 --- a/pkg/tag/validate.go +++ b/pkg/tag/validate.go @@ -69,7 +69,9 @@ func ValidateUpdate(ctx context.Context, id int, partial models.TagPartial, qb m return err } - if err := EnsureAliasesUnique(ctx, id, partial.Aliases.Apply(existing.Aliases.List()), qb); err != nil { + newAliases := partial.Aliases.Apply(existing.Aliases.List()) + + if err := EnsureAliasesUnique(ctx, id, newAliases, qb); err != nil { return err } } diff --git a/pkg/tag/validate_test.go b/pkg/tag/validate_test.go new file mode 100644 index 000000000..539086a6d --- /dev/null +++ b/pkg/tag/validate_test.go @@ -0,0 +1,86 @@ +package tag + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" +) + +func nameFilter(n string) *models.TagFilterType { + return &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: n, + Modifier: models.CriterionModifierEquals, + }, + } +} + +func aliasFilter(n string) *models.TagFilterType { + return &models.TagFilterType{ + Aliases: &models.StringCriterionInput{ + Value: n, + Modifier: models.CriterionModifierEquals, + }, + } +} + +func TestEnsureAliasesUnique(t *testing.T) { + db := mocks.NewDatabase() + + const ( + name1 = "name 1" + name2 = "name 2" + alias1 = "alias 1" + newAlias = "new alias" + ) + + existing2 := models.Tag{ + ID: 2, + Name: name2, + } + + pp := 1 + findFilter := &models.FindFilterType{ + PerPage: &pp, + } + + // name1 matches existing1 name - ok + // EnsureAliasesUnique calls EnsureTagNameUnique. + // EnsureTagNameUnique calls ByName then ByAlias. + + // Case 1: valid alias + // ByName "alias 1" -> nil + // ByAlias "alias 1" -> nil + db.Tag.On("Query", testCtx, nameFilter(alias1), findFilter).Return(nil, 0, nil) + db.Tag.On("Query", testCtx, aliasFilter(alias1), findFilter).Return(nil, 0, nil) + + // Case 2: alias duplicates existing2 name + // ByName "name 2" -> existing2 + db.Tag.On("Query", testCtx, nameFilter(name2), findFilter).Return([]*models.Tag{&existing2}, 1, nil) + + // Case 3: alias duplicates existing2 alias + // ByName "new alias" -> nil + // ByAlias "new alias" -> existing2 + db.Tag.On("Query", testCtx, nameFilter(newAlias), findFilter).Return(nil, 0, nil) + db.Tag.On("Query", testCtx, aliasFilter(newAlias), findFilter).Return([]*models.Tag{&existing2}, 1, nil) + + tests := []struct { + tName string + id int + aliases []string + want error + }{ + {"valid alias", 1, []string{alias1}, nil}, + {"alias duplicates other name", 1, []string{name2}, &NameExistsError{name2}}, + {"alias duplicates other alias", 1, []string{newAlias}, &NameUsedByAliasError{newAlias, existing2.Name}}, + } + + for _, tt := range tests { + t.Run(tt.tName, func(t *testing.T) { + got := EnsureAliasesUnique(testCtx, tt.id, tt.aliases, db.Tag) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/utils/date.go b/pkg/utils/date.go index de5566e4d..4b805862a 100644 --- a/pkg/utils/date.go +++ b/pkg/utils/date.go @@ -2,6 +2,8 @@ package utils import ( "fmt" + "strconv" + "strings" "time" ) @@ -25,3 +27,80 @@ func ParseDateStringAsTime(dateString string) (time.Time, error) { return time.Time{}, fmt.Errorf("ParseDateStringAsTime failed: dateString <%s>", dateString) } + +// ParseYearRangeString parses a year range string into start and end year integers. +// Supported formats: "YYYY", "YYYY - YYYY", "YYYY-YYYY", "YYYY -", "- YYYY", "YYYY-present". +// Returns nil for start/end if not present in the string. +func ParseYearRangeString(s string) (start *int, end *int, err error) { + s = strings.TrimSpace(s) + if s == "" { + return nil, nil, fmt.Errorf("empty year range string") + } + + // normalize "present" to empty end + lower := strings.ToLower(s) + lower = strings.ReplaceAll(lower, "present", "") + + // split on "-" if it contains one + var parts []string + if strings.Contains(lower, "-") { + parts = strings.SplitN(lower, "-", 2) + } else { + // single value, treat as start year + year, err := parseYear(lower) + if err != nil { + return nil, nil, fmt.Errorf("invalid year range %q: %w", s, err) + } + return &year, nil, nil + } + + startStr := strings.TrimSpace(parts[0]) + endStr := strings.TrimSpace(parts[1]) + + if startStr != "" { + y, err := parseYear(startStr) + if err != nil { + return nil, nil, fmt.Errorf("invalid start year in %q: %w", s, err) + } + start = &y + } + + if endStr != "" { + y, err := parseYear(endStr) + if err != nil { + return nil, nil, fmt.Errorf("invalid end year in %q: %w", s, err) + } + end = &y + } + + if start == nil && end == nil { + return nil, nil, fmt.Errorf("could not parse year range %q", s) + } + + return start, end, nil +} + +func parseYear(s string) (int, error) { + s = strings.TrimSpace(s) + year, err := strconv.Atoi(s) + if err != nil { + return 0, fmt.Errorf("invalid year %q: %w", s, err) + } + if year < 1900 || year > 2200 { + return 0, fmt.Errorf("year %d out of reasonable range", year) + } + return year, nil +} + +func FormatYearRange(start *int, end *int) string { + switch { + case start == nil && end == nil: + return "" + case end == nil: + return fmt.Sprintf("%d -", *start) + case start == nil: + return fmt.Sprintf("- %d", *end) + default: + return fmt.Sprintf("%d - %d", *start, *end) + } +} diff --git a/pkg/utils/date_test.go b/pkg/utils/date_test.go index ae077c21e..a9e174094 100644 --- a/pkg/utils/date_test.go +++ b/pkg/utils/date_test.go @@ -2,6 +2,8 @@ package utils import ( "testing" + + "github.com/stretchr/testify/assert" ) func TestParseDateStringAsTime(t *testing.T) { @@ -41,3 +43,66 @@ func TestParseDateStringAsTime(t *testing.T) { }) } } + +func TestParseYearRangeString(t *testing.T) { + intPtr := func(v int) *int { return &v } + + tests := []struct { + name string + input string + wantStart *int + wantEnd *int + wantErr bool + }{ + {"single year", "2005", intPtr(2005), nil, false}, + {"year range with spaces", "2005 - 2010", intPtr(2005), intPtr(2010), false}, + {"year range no spaces", "2005-2010", intPtr(2005), intPtr(2010), false}, + {"year dash open", "2005 -", intPtr(2005), nil, false}, + {"year dash open no space", "2005-", intPtr(2005), nil, false}, + {"dash year", "- 2010", nil, intPtr(2010), false}, + {"year present", "2005-present", intPtr(2005), nil, false}, + {"year Present caps", "2005 - Present", intPtr(2005), nil, false}, + {"whitespace padding", " 2005 - 2010 ", intPtr(2005), intPtr(2010), false}, + {"empty string", "", nil, nil, true}, + {"garbage", "not a year", nil, nil, true}, + {"partial garbage start", "abc - 2010", nil, nil, true}, + {"partial garbage end", "2005 - abc", nil, nil, true}, + {"year out of range", "1800", nil, nil, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + start, end, err := ParseYearRangeString(tt.input) + if tt.wantErr { + assert.Error(t, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tt.wantStart, start) + assert.Equal(t, tt.wantEnd, end) + }) + } +} + +func TestFormatYearRange(t *testing.T) { + intPtr := func(v int) *int { return &v } + + tests := []struct { + name string + start *int + end *int + want string + }{ + {"both nil", nil, nil, ""}, + {"only start", intPtr(2005), nil, "2005 -"}, + {"only end", nil, intPtr(2010), "- 2010"}, + {"start and end", intPtr(2005), intPtr(2010), "2005 - 2010"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := FormatYearRange(tt.start, tt.end) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/ui/v2.5/graphql/data/config.graphql b/ui/v2.5/graphql/data/config.graphql index 9f605e862..bc682fa8a 100644 --- a/ui/v2.5/graphql/data/config.graphql +++ b/ui/v2.5/graphql/data/config.graphql @@ -39,6 +39,11 @@ fragment ConfigGeneralData on ConfigGeneralResult { logLevel logAccess logFileMaxSize + useCustomSpriteInterval + spriteInterval + minimumSprites + maximumSprites + spriteScreenshotSize createGalleriesFromFolders galleryCoverRegex videoExtensions @@ -92,6 +97,7 @@ fragment ConfigInterfaceData on ConfigInterfaceResult { javascriptEnabled customLocales customLocalesEnabled + disableCustomizations language imageLightbox { slideshowDelay @@ -143,7 +149,7 @@ fragment IdentifyMetadataOptionsData on IdentifyMetadataOptions { } setCoverImage setOrganized - includeMalePerformers + performerGenders skipMultipleMatches skipMultipleMatchTag skipSingleNamePerformers diff --git a/ui/v2.5/graphql/data/gallery.graphql b/ui/v2.5/graphql/data/gallery.graphql index c41f3e2b2..89f3ed44c 100644 --- a/ui/v2.5/graphql/data/gallery.graphql +++ b/ui/v2.5/graphql/data/gallery.graphql @@ -22,7 +22,7 @@ fragment GalleryData on Gallery { folder { ...FolderData } - + image_count chapters { ...GalleryChapterData } diff --git a/ui/v2.5/graphql/data/performer-slim.graphql b/ui/v2.5/graphql/data/performer-slim.graphql index 56a30842d..9bb628fba 100644 --- a/ui/v2.5/graphql/data/performer-slim.graphql +++ b/ui/v2.5/graphql/data/performer-slim.graphql @@ -16,7 +16,8 @@ fragment SlimPerformerData on Performer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings alias_list diff --git a/ui/v2.5/graphql/data/performer.graphql b/ui/v2.5/graphql/data/performer.graphql index 035c8abc7..2a75fbb95 100644 --- a/ui/v2.5/graphql/data/performer.graphql +++ b/ui/v2.5/graphql/data/performer.graphql @@ -13,7 +13,8 @@ fragment PerformerData on Performer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings alias_list diff --git a/ui/v2.5/graphql/data/scrapers.graphql b/ui/v2.5/graphql/data/scrapers.graphql index 4a0f588a4..e58c21a20 100644 --- a/ui/v2.5/graphql/data/scrapers.graphql +++ b/ui/v2.5/graphql/data/scrapers.graphql @@ -38,7 +38,8 @@ fragment ScrapedPerformerData on ScrapedPerformer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings aliases @@ -68,7 +69,8 @@ fragment ScrapedScenePerformerData on ScrapedPerformer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings aliases diff --git a/ui/v2.5/graphql/data/studio-slim.graphql b/ui/v2.5/graphql/data/studio-slim.graphql index c48f7d93e..4ca3c8b4d 100644 --- a/ui/v2.5/graphql/data/studio-slim.graphql +++ b/ui/v2.5/graphql/data/studio-slim.graphql @@ -17,5 +17,8 @@ fragment SlimStudioData on Studio { id name } + favorite + ignore_auto_tag + organized o_counter } diff --git a/ui/v2.5/graphql/data/studio.graphql b/ui/v2.5/graphql/data/studio.graphql index aabec7a9b..8347b4739 100644 --- a/ui/v2.5/graphql/data/studio.graphql +++ b/ui/v2.5/graphql/data/studio.graphql @@ -16,6 +16,7 @@ fragment StudioData on Studio { image_path } ignore_auto_tag + organized image_path scene_count scene_count_all: scene_count(depth: -1) diff --git a/ui/v2.5/graphql/data/tag.graphql b/ui/v2.5/graphql/data/tag.graphql index e640af0c9..19438e2a4 100644 --- a/ui/v2.5/graphql/data/tag.graphql +++ b/ui/v2.5/graphql/data/tag.graphql @@ -34,6 +34,8 @@ fragment TagData on Tag { children { ...SlimTagData } + + custom_fields } fragment SelectTagData on Tag { diff --git a/ui/v2.5/graphql/mutations/tag.graphql b/ui/v2.5/graphql/mutations/tag.graphql index f2138e057..33c50833a 100644 --- a/ui/v2.5/graphql/mutations/tag.graphql +++ b/ui/v2.5/graphql/mutations/tag.graphql @@ -24,8 +24,14 @@ mutation BulkTagUpdate($input: BulkTagUpdateInput!) { } } -mutation TagsMerge($source: [ID!]!, $destination: ID!) { - tagsMerge(input: { source: $source, destination: $destination }) { +mutation TagsMerge( + $source: [ID!]! + $destination: ID! + $values: TagUpdateInput +) { + tagsMerge( + input: { source: $source, destination: $destination, values: $values } + ) { ...TagData } } diff --git a/ui/v2.5/graphql/queries/tag.graphql b/ui/v2.5/graphql/queries/tag.graphql index e0b20ee02..c91315f99 100644 --- a/ui/v2.5/graphql/queries/tag.graphql +++ b/ui/v2.5/graphql/queries/tag.graphql @@ -1,5 +1,9 @@ -query FindTags($filter: FindFilterType, $tag_filter: TagFilterType) { - findTags(filter: $filter, tag_filter: $tag_filter) { +query FindTags( + $filter: FindFilterType + $tag_filter: TagFilterType + $ids: [ID!] +) { + findTags(filter: $filter, tag_filter: $tag_filter, ids: $ids) { count tags { ...TagData diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index f774aedbd..e024a0053 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -50,7 +50,7 @@ "graphql-ws": "^5.14.3", "i18n-iso-countries": "^7.5.0", "localforage": "^1.10.0", - "lodash-es": "^4.17.21", + "lodash-es": "^4.17.23", "moment": "^2.30.1", "mousetrap": "^1.6.5", "mousetrap-pause": "^1.0.0", diff --git a/ui/v2.5/pnpm-lock.yaml b/ui/v2.5/pnpm-lock.yaml index 27b993864..02033c41f 100644 --- a/ui/v2.5/pnpm-lock.yaml +++ b/ui/v2.5/pnpm-lock.yaml @@ -96,8 +96,8 @@ importers: specifier: ^1.10.0 version: 1.10.0 lodash-es: - specifier: ^4.17.21 - version: 4.17.21 + specifier: ^4.17.23 + version: 4.17.23 moment: specifier: ^2.30.1 version: 2.30.1 @@ -1215,6 +1215,7 @@ packages: '@formatjs/intl-enumerator@1.4.6': resolution: {integrity: sha512-O2YMcE3SuBy4jL8r6YNq/8hvFrQ92QGLawdmzFbOi8D1r3VOfEMr8ifnOMp3zt8XemfTLrma+aF6yRCVeEbVLw==} + deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. '@formatjs/intl-getcanonicallocales@2.3.0': resolution: {integrity: sha512-BOXbLwqQ7nKua/l7tKqDLRN84WupDXFDhGJQMFvsMVA2dKuOdRaWTxWpL3cJ7qPkoNw11Jf+Xpj4OSPBBvW0eQ==} @@ -2373,6 +2374,7 @@ packages: bootstrap@4.6.2: resolution: {integrity: sha512-51Bbp/Uxr9aTuy6ca/8FbFloBUJZLHwnhTcnjIeRn2suQWsWzcuJhGjKDB5eppVte/8oCdOL3VuwxvZDUggwGQ==} + deprecated: This version of Bootstrap is no longer supported. Please upgrade to the latest version. peerDependencies: jquery: 1.9.1 - 3 popper.js: ^1.16.1 @@ -3699,8 +3701,8 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} - lodash-es@4.17.21: - resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} + lodash-es@4.17.23: + resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} lodash.debounce@4.0.8: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} @@ -8788,7 +8790,7 @@ snapshots: deepmerge: 2.2.1 hoist-non-react-statics: 3.3.2 lodash: 4.17.21 - lodash-es: 4.17.21 + lodash-es: 4.17.23 react: 17.0.2 react-fast-compare: 2.0.4 tiny-warning: 1.0.3 @@ -9443,7 +9445,7 @@ snapshots: dependencies: p-locate: 5.0.0 - lodash-es@4.17.21: {} + lodash-es@4.17.23: {} lodash.debounce@4.0.8: {} diff --git a/ui/v2.5/src/App.tsx b/ui/v2.5/src/App.tsx index 761352373..d08274b18 100644 --- a/ui/v2.5/src/App.tsx +++ b/ui/v2.5/src/App.tsx @@ -49,6 +49,7 @@ import { PluginRoutes, PluginsLoader } from "./plugins"; // import plugin_api to run code import "./pluginApi"; import { ConnectionMonitor } from "./ConnectionMonitor"; +import { TroubleshootingModeOverlay } from "./components/TroubleshootingMode/TroubleshootingModeOverlay"; import { PatchFunction } from "./patch"; import moment from "moment/min/moment-with-locales"; @@ -352,11 +353,17 @@ export const App: React.FC = () => { formats={intlFormats} > - + {maybeRenderReleaseNotes()} + }> diff --git a/ui/v2.5/src/components/Changelog/Changelog.tsx b/ui/v2.5/src/components/Changelog/Changelog.tsx index 97175e1c2..7e4207dce 100644 --- a/ui/v2.5/src/components/Changelog/Changelog.tsx +++ b/ui/v2.5/src/components/Changelog/Changelog.tsx @@ -256,7 +256,9 @@ const Changelog: React.FC = () => { return (
-

Changelog:

+

+ +

{releases.map((r) => ( void; - type: "scene"; // TODO - add image generate + type: "scene" | "image" | "gallery"; } -export const GenerateDialog: React.FC = ({ +export const GenerateDialog: React.FC = ({ selectedIds, onClose, type, }) => { + const sceneIDs = type === "scene" ? selectedIds : undefined; + const imageIDs = type === "image" ? selectedIds : undefined; + const galleryIDs = type === "gallery" ? selectedIds : undefined; + const { configuration } = useConfigurationContext(); function getDefaultOptions(): GQL.GenerateMetadataInput { @@ -89,6 +93,13 @@ export const GenerateDialog: React.FC = ({ }, [configuration, configRead]); const selectionStatus = useMemo(() => { + const countableIds: Record = { + scene: "countables.scenes", + image: "countables.images", + gallery: "countables.galleries", + }; + const countableId = countableIds[type]; + if (selectedIds) { return ( @@ -98,7 +109,7 @@ export const GenerateDialog: React.FC = ({ num: selectedIds.length, scene: intl.formatMessage( { - id: "countables.scenes", + id: countableId, }, { count: selectedIds.length, @@ -118,7 +129,7 @@ export const GenerateDialog: React.FC = ({ num: intl.formatMessage({ id: "all" }), scene: intl.formatMessage( { - id: "countables.scenes", + id: countableId, }, { count: 0, @@ -135,13 +146,15 @@ export const GenerateDialog: React.FC = ({
{message}
); - }, [selectedIds, intl]); + }, [selectedIds, intl, type]); async function onGenerate() { try { await mutateMetadataGenerate({ ...options, - sceneIDs: selectedIds, + sceneIDs, + imageIDs, + galleryIDs, }); Toast.success( intl.formatMessage( diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx index 3073a7952..8262de4ec 100644 --- a/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx @@ -62,7 +62,7 @@ export const IdentifyDialog: React.FC = ({ createMissing: true, }, ], - includeMalePerformers: true, + performerGenders: undefined, setCoverImage: true, setOrganized: false, skipMultipleMatches: true, diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx index 1362df02a..4987db5f9 100644 --- a/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx @@ -6,6 +6,7 @@ import { IScraperSource } from "./constants"; import { FieldOptionsList } from "./FieldOptions"; import { ThreeStateBoolean } from "./ThreeStateBoolean"; import { TagSelect } from "src/components/Shared/Select"; +import { genderList } from "src/utils/gender"; interface IOptionsEditor { options: GQL.IdentifyMetadataOptionsInput; @@ -124,24 +125,52 @@ export const OptionsEditor: React.FC = ({ )} - - setOptions({ - includeMalePerformers: v, - }) - } - label={intl.formatMessage({ - id: "config.tasks.identify.include_male_performers", - })} - defaultValue={defaultOptions?.includeMalePerformers ?? undefined} - {...checkboxProps} - /> + + + + + {source && ( + ) => { + if (e.currentTarget.checked) { + setOptions({ performerGenders: undefined }); + } else { + setOptions({ + performerGenders: + defaultOptions?.performerGenders ?? genderList.slice(), + }); + } + }} + /> + )} + {(options.performerGenders != null || !source) && + genderList.map((gender) => { + const performerGenders = + options.performerGenders ?? genderList.slice(); + return ( + } + checked={performerGenders.includes(gender)} + onChange={(e: React.ChangeEvent) => { + const isChecked = e.currentTarget.checked; + setOptions({ + performerGenders: isChecked + ? [...performerGenders, gender] + : performerGenders.filter((g) => g !== gender), + }); + }} + /> + ); + })} + + + + > = ({ }; const Galleries: React.FC = () => { - return ; + return ; }; const GalleryRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx index 195766e03..18cbeff96 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx @@ -15,6 +15,11 @@ import { useFindGallery, useGalleryUpdate, } from "src/core/StashService"; +import { lazyComponent } from "src/utils/lazyComponent"; + +const GenerateDialog = lazyComponent( + () => import("../../Dialogs/GenerateDialog") +); import { ErrorMessage } from "src/components/Shared/ErrorMessage"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { Icon } from "src/components/Shared/Icon"; @@ -165,6 +170,7 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { } const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + const [isGenerateDialogOpen, setIsGenerateDialogOpen] = useState(false); function onDeleteDialogClosed(deleted: boolean) { setIsDeleteAlertOpen(false); @@ -184,6 +190,18 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { } } + function maybeRenderGenerateDialog() { + if (isGenerateDialogOpen) { + return ( + setIsGenerateDialogOpen(false)} + type="gallery" + /> + ); + } + } + function renderOperations() { return ( @@ -210,6 +228,12 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { > + setIsGenerateDialogOpen(true)} + > + {`${intl.formatMessage({ id: "actions.generate" })}…`} + setIsDeleteAlertOpen(true)} @@ -387,6 +411,7 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { {title} {maybeRenderDeleteDialog()} + {maybeRenderGenerateDialog()}
diff --git a/ui/v2.5/src/components/Galleries/GalleryList.tsx b/ui/v2.5/src/components/Galleries/GalleryList.tsx index 9a4fc5236..d06aaf3a4 100644 --- a/ui/v2.5/src/components/Galleries/GalleryList.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryList.tsx @@ -1,10 +1,10 @@ -import React, { useState } from "react"; -import { useIntl } from "react-intl"; +import React, { useCallback, useEffect } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; import cloneDeep from "lodash-es/cloneDeep"; import { useHistory } from "react-router-dom"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; -import { ItemList, ItemListContext, showWhenSelected } from "../List/ItemList"; +import { useFilteredItemList } from "../List/ItemList"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { queryFindGalleries, useFindGalleries } from "src/core/StashService"; @@ -12,19 +12,174 @@ import GalleryWallCard from "./GalleryWallCard"; import { EditGalleriesDialog } from "./EditGalleriesDialog"; import { DeleteGalleriesDialog } from "./DeleteGalleriesDialog"; import { ExportDialog } from "../Shared/ExportDialog"; +import { GenerateDialog } from "../Dialogs/GenerateDialog"; import { GalleryListTable } from "./GalleryListTable"; import { GalleryCardGrid } from "./GalleryCardGrid"; import { View } from "../List/views"; -import { PatchComponent } from "src/patch"; -import { IItemListOperation } from "../List/FilteredListToolbar"; +import useFocus from "src/utils/focus"; +import { + Sidebar, + SidebarPane, + SidebarPaneContent, + SidebarStateContext, + useSidebarState, +} from "../Shared/Sidebar"; +import { useCloseEditDelete, useFilterOperations } from "../List/util"; +import { + FilteredSidebarHeader, + useFilteredSidebarKeybinds, +} from "../List/Filters/FilterSidebar"; +import cx from "classnames"; +import { LoadedContent } from "../List/PagedList"; +import { Pagination, PaginationIndex } from "../List/Pagination"; +import { PatchComponent, PatchContainerComponent } from "src/patch"; +import { SidebarStudiosFilter } from "../List/Filters/StudiosFilter"; +import { SidebarPerformersFilter } from "../List/Filters/PerformersFilter"; +import { SidebarTagsFilter } from "../List/Filters/TagsFilter"; +import { SidebarRatingFilter } from "../List/Filters/RatingFilter"; +import { SidebarBooleanFilter } from "../List/Filters/BooleanFilter"; +import { OrganizedCriterionOption } from "src/models/list-filter/criteria/organized"; +import { Button } from "react-bootstrap"; +import { + IListFilterOperation, + ListOperations, +} from "../List/ListOperationButtons"; +import { + FilteredListToolbar, + IItemListOperation, +} from "../List/FilteredListToolbar"; +import { FilterTags } from "../List/FilterTags"; -function getItems(result: GQL.FindGalleriesQueryResult) { - return result?.data?.findGalleries?.galleries ?? []; -} +const GalleryList: React.FC<{ + galleries: GQL.SlimGalleryDataFragment[]; + filter: ListFilterModel; + selectedIds: Set; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; +}> = PatchComponent( + "GalleryList", + ({ galleries, filter, selectedIds, onSelectChange }) => { + if (galleries.length === 0) { + return null; + } -function getCount(result: GQL.FindGalleriesQueryResult) { - return result?.data?.findGalleries?.count ?? 0; -} + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.List) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Wall) { + return ( +
+ {galleries.map((gallery) => ( + + onSelectChange(gallery.id, selected, shiftKey) + } + selecting={selectedIds.size > 0} + /> + ))} +
+ ); + } + + return null; + } +); + +const GalleryFilterSidebarSections = PatchContainerComponent( + "FilteredGalleryList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + const hideStudios = view === View.StudioScenes; + + return ( + <> + + + + {!hideStudios && ( + + )} + + + + } + data-type={OrganizedCriterionOption.type} + option={OrganizedCriterionOption} + filter={filter} + setFilter={setFilter} + /> + + +
+ +
+ + ); +}; interface IGalleryList { filterHook?: (filter: ListFilterModel) => ListFilterModel; @@ -33,188 +188,323 @@ interface IGalleryList { extraOperations?: IItemListOperation[]; } -export const GalleryList: React.FC = PatchComponent( - "GalleryList", - ({ filterHook, view, alterQuery, extraOperations = [] }) => { - const intl = useIntl(); - const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); +function useViewRandom(filter: ListFilterModel, count: number) { + const history = useHistory(); - const filterMode = GQL.FilterMode.Galleries; + const viewRandom = useCallback(async () => { + // query for a random scene + if (count === 0) { + return; + } + + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindGalleries(filterCopy); + if (singleResult.data.findGalleries.galleries.length === 1) { + const { id } = singleResult.data.findGalleries.galleries[0]; + // navigate to the image player page + history.push(`/galleries/${id}`); + } + }, [history, filter, count]); + + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { + Mousetrap.bind("p r", () => { + viewRandom(); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }, [viewRandom]); +} + +export const FilteredGalleryList = PatchComponent( + "FilteredGalleryList", + (props: IGalleryList) => { + const intl = useIntl(); + + const searchFocus = useFocus(); + + const { filterHook, view, alterQuery, extraOperations = [] } = props; + + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Galleries, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindGalleries, + getCount: (r) => r.data?.findGalleries.count ?? 0, + getItems: (r) => r.data?.findGalleries.galleries ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(filter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); + + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }); + + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); + + const viewRandom = useViewRandom(filter, totalCount); + + function onExport(all: boolean) { + showModal( + closeModal()} + /> + ); + } + + function onEdit() { + showModal( + + ); + } + + function onDelete() { + showModal( + + ); + } + + function onGenerate() { + showModal( + closeModal()} + /> + ); + } + + const convertedExtraOperations: IListFilterOperation[] = + extraOperations.map((o) => ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); const otherOperations = [ - ...extraOperations, + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, { text: intl.formatMessage({ id: "actions.view_random" }), onClick: viewRandom, }, + { + text: `${intl.formatMessage({ id: "actions.generate" })}…`, + onClick: onGenerate, + isDisplayed: () => hasSelection, + }, { text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, + onClick: () => onExport(false), + isDisplayed: () => hasSelection, }, { text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, + onClick: () => onExport(true), }, ]; - function addKeybinds( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel - ) { - Mousetrap.bind("p r", () => { - viewRandom(result, filter); - }); + // render + if (sidebarStateLoading) return null; - return () => { - Mousetrap.unbind("p r"); - }; - } - - async function viewRandom( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel - ) { - // query for a random image - if (result.data?.findGalleries) { - const { count } = result.data.findGalleries; - - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindGalleries(filterCopy); - if (singleResult.data.findGalleries.galleries.length === 1) { - const { id } = singleResult.data.findGalleries.galleries[0]; - // navigate to the image player page - history.push(`/galleries/${id}`); - } - } - } - - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } - - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } - - function renderContent( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - function maybeRenderGalleryExportDialog() { - if (isExportDialogOpen) { - return ( - setIsExportDialogOpen(false)} - /> - ); - } - } - - function renderGalleries() { - if (!result.data?.findGalleries) return; - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Wall) { - return ( -
-
- {result.data.findGalleries.galleries.map((gallery) => ( - - onSelectChange(gallery.id, selected, shiftKey) - } - selecting={selectedIds.size > 0} - /> - ))} -
-
- ); - } - } - - return ( - <> - {maybeRenderGalleryExportDialog()} - {renderGalleries()} - - ); - } - - function renderEditDialog( - selectedImages: GQL.SlimGalleryDataFragment[], - onClose: (applied: boolean) => void - ) { - return ( - - ); - } - - function renderDeleteDialog( - selectedImages: GQL.SlimGalleryDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( - - ); - } + const operations = ( + + ); return ( - - - + {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
+ setFilter(filter.changePage(page))} + /> + +
+ + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
+ )} +
+
+
+
); } ); diff --git a/ui/v2.5/src/components/Galleries/styles.scss b/ui/v2.5/src/components/Galleries/styles.scss index 9890e887b..c53175313 100644 --- a/ui/v2.5/src/components/Galleries/styles.scss +++ b/ui/v2.5/src/components/Galleries/styles.scss @@ -229,7 +229,6 @@ div.GalleryWall { display: flex; flex-wrap: wrap; margin: 0 auto; - width: 96vw; /* Prevents last row from consuming all space and stretching images to oblivion */ &::after { diff --git a/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx b/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx index 057b99f2a..3ec78084a 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx @@ -1,7 +1,7 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; import { useGroupFilterHook } from "src/core/groups"; -import { PerformerList } from "src/components/Performers/PerformerList"; +import { FilteredPerformerList } from "src/components/Performers/PerformerList"; import { View } from "src/components/List/views"; interface IGroupPerformersPanel { @@ -18,7 +18,7 @@ export const GroupPerformersPanel: React.FC = ({ const filterHook = useGroupFilterHook(group, showChildGroupContent); return ( - , @@ -71,37 +61,6 @@ const useContainingGroupFilterHook = ( }; }; -const Toolbar: React.FC = ({ - onEdit, - onDelete, - operations, -}) => { - const { getSelected, onSelectAll, onSelectNone, onInvertSelection } = - useListContext(); - const { filter, setFilter } = useFilter(); - - return ( - -
- -
- setFilter(filter.setPageSize(size))} - /> - 0} - otherOperations={operations} - onEdit={onEdit} - onDelete={onDelete} - /> -
- ); -}; - interface IGroupSubGroupsPanel { active: boolean; group: GQL.GroupDataFragment; @@ -203,14 +162,14 @@ export const GroupSubGroupsPanel: React.FC = return ( <> {modal} - } + view={View.GroupSubGroups} /> ); diff --git a/ui/v2.5/src/components/Groups/GroupList.tsx b/ui/v2.5/src/components/Groups/GroupList.tsx index a08610569..6ce00831c 100644 --- a/ui/v2.5/src/components/Groups/GroupList.tsx +++ b/ui/v2.5/src/components/Groups/GroupList.tsx @@ -1,5 +1,5 @@ -import React, { PropsWithChildren, useState } from "react"; -import { useIntl } from "react-intl"; +import React, { useCallback, useEffect } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; import cloneDeep from "lodash-es/cloneDeep"; import Mousetrap from "mousetrap"; import { useHistory } from "react-router-dom"; @@ -11,208 +11,321 @@ import { useFindGroups, useGroupsDestroy, } from "src/core/StashService"; -import { ItemList, ItemListContext, showWhenSelected } from "../List/ItemList"; +import { useFilteredItemList } from "../List/ItemList"; import { ExportDialog } from "../Shared/ExportDialog"; import { DeleteEntityDialog } from "../Shared/DeleteEntityDialog"; import { GroupCardGrid } from "./GroupCardGrid"; import { EditGroupsDialog } from "./EditGroupsDialog"; import { View } from "../List/views"; import { - IFilteredListToolbar, + FilteredListToolbar, IItemListOperation, } from "../List/FilteredListToolbar"; -import { PatchComponent } from "src/patch"; +import { PatchComponent, PatchContainerComponent } from "src/patch"; +import useFocus from "src/utils/focus"; +import { + Sidebar, + SidebarPane, + SidebarPaneContent, + SidebarStateContext, + useSidebarState, +} from "../Shared/Sidebar"; +import { useCloseEditDelete, useFilterOperations } from "../List/util"; +import { + FilteredSidebarHeader, + useFilteredSidebarKeybinds, +} from "../List/Filters/FilterSidebar"; +import { + IListFilterOperation, + ListOperations, +} from "../List/ListOperationButtons"; +import cx from "classnames"; +import { FilterTags } from "../List/FilterTags"; +import { Pagination, PaginationIndex } from "../List/Pagination"; +import { LoadedContent } from "../List/PagedList"; +import { SidebarStudiosFilter } from "../List/Filters/StudiosFilter"; +import { SidebarTagsFilter } from "../List/Filters/TagsFilter"; +import { SidebarRatingFilter } from "../List/Filters/RatingFilter"; +import { Button } from "react-bootstrap"; -const GroupExportDialog: React.FC<{ - open?: boolean; +const GroupList: React.FC<{ + groups: GQL.ListGroupDataFragment[]; + filter: ListFilterModel; selectedIds: Set; - isExportAll?: boolean; - onClose: () => void; -}> = ({ open = false, selectedIds, isExportAll = false, onClose }) => { - if (!open) { + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; + fromGroupId?: string; + onMove?: (srcIds: string[], targetId: string, after: boolean) => void; +}> = PatchComponent( + "GroupList", + ({ groups, filter, selectedIds, onSelectChange, fromGroupId, onMove }) => { + if (groups.length === 0) { + return null; + } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + return null; } +); + +const GroupFilterSidebarSections = PatchContainerComponent( + "FilteredGroupList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + const hideStudios = view === View.StudioScenes; return ( - + <> + + + + {!hideStudios && ( + + )} + + + + +
+ +
+ ); }; -const filterMode = GQL.FilterMode.Groups; - -function getItems(result: GQL.FindGroupsQueryResult) { - return result?.data?.findGroups?.groups ?? []; -} - -function getCount(result: GQL.FindGroupsQueryResult) { - return result?.data?.findGroups?.count ?? 0; -} - interface IGroupListContext { filterHook?: (filter: ListFilterModel) => ListFilterModel; defaultFilter?: ListFilterModel; view?: View; alterQuery?: boolean; - selectable?: boolean; } -export const GroupListContext: React.FC< - PropsWithChildren -> = ({ alterQuery, filterHook, defaultFilter, view, selectable, children }) => { - return ( - - {children} - - ); -}; - interface IGroupList extends IGroupListContext { fromGroupId?: string; onMove?: (srcIds: string[], targetId: string, after: boolean) => void; - renderToolbar?: (props: IFilteredListToolbar) => React.ReactNode; otherOperations?: IItemListOperation[]; } -export const GroupList: React.FC = PatchComponent( - "GroupList", - ({ - filterHook, - alterQuery, - defaultFilter, - view, - fromGroupId, - onMove, - selectable, - renderToolbar, - otherOperations: providedOperations = [], - }) => { +function useViewRandom(filter: ListFilterModel, count: number) { + const history = useHistory(); + + const viewRandom = useCallback(async () => { + // query for a random scene + if (count === 0) { + return; + } + + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindGroups(filterCopy); + if (singleResult.data.findGroups.groups.length === 1) { + const { id } = singleResult.data.findGroups.groups[0]; + // navigate to the image player page + history.push(`/groups/${id}`); + } + }, [history, filter, count]); + + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { + Mousetrap.bind("p r", () => { + viewRandom(); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }, [viewRandom]); +} + +export const FilteredGroupList = PatchComponent( + "FilteredGroupList", + (props: IGroupList) => { const intl = useIntl(); - const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); - const otherOperations = [ - { - text: intl.formatMessage({ id: "actions.view_random" }), - onClick: viewRandom, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, - }, - ...providedOperations, - ]; + const searchFocus = useFocus(); - function addKeybinds( - result: GQL.FindGroupsQueryResult, - filter: ListFilterModel - ) { - Mousetrap.bind("p r", () => { - viewRandom(result, filter); + const { + filterHook, + view, + alterQuery, + onMove, + fromGroupId, + otherOperations: providedOperations = [], + defaultFilter, + } = props; + + const withSidebar = view !== View.GroupSubGroups; + const filterable = view !== View.GroupSubGroups; + const sortable = view !== View.GroupSubGroups; + + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Groups, + defaultFilter, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindGroups, + getCount: (r) => r.data?.findGroups.count ?? 0, + getItems: (r) => r.data?.findGroups.groups ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(filter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } }); return () => { - Mousetrap.unbind("p r"); + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); }; - } + }); - async function viewRandom( - result: GQL.FindGroupsQueryResult, - filter: ListFilterModel - ) { - // query for a random image - if (result.data?.findGroups) { - const { count } = result.data.findGroups; + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindGroups(filterCopy); - if (singleResult.data.findGroups.groups.length === 1) { - const { id } = singleResult.data.findGroups.groups[0]; - // navigate to the group page - history.push(`/groups/${id}`); - } - } - } + const viewRandom = useViewRandom(filter, totalCount); - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } - - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } - - function renderContent( - result: GQL.FindGroupsQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - return ( - <> - setIsExportDialogOpen(false)} - /> - {filter.displayMode === DisplayMode.Grid && ( - - )} - + function onExport(all: boolean) { + showModal( + closeModal()} + /> ); } - function renderEditDialog( - selectedGroups: GQL.ListGroupDataFragment[], - onClose: (applied: boolean) => void - ) { - return ; + function onEdit() { + showModal( + + ); } - function renderDeleteDialog( - selectedGroups: GQL.SlimGroupDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( + function onDelete() { + showModal( = PatchComponent( ); } - return ( - - ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); + + const otherOperations = [ + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.view_random" }), + onClick: viewRandom, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + + ); + + const content = ( + <> + - + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
+ setFilter(filter.changePage(page))} + /> + +
+ + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
+ )} + + ); + + if (!withSidebar) { + return content; + } + + return ( +
+ {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + {content} + + + +
); } ); diff --git a/ui/v2.5/src/components/Groups/Groups.tsx b/ui/v2.5/src/components/Groups/Groups.tsx index 5ec7b4eaf..1a89444b0 100644 --- a/ui/v2.5/src/components/Groups/Groups.tsx +++ b/ui/v2.5/src/components/Groups/Groups.tsx @@ -4,11 +4,11 @@ import { Helmet } from "react-helmet"; import { useTitleProps } from "src/hooks/title"; import Group from "./GroupDetails/Group"; import GroupCreate from "./GroupDetails/GroupCreate"; -import { GroupList } from "./GroupList"; +import { FilteredGroupList } from "./GroupList"; import { View } from "../List/views"; const Groups: React.FC = () => { - return ; + return ; }; const GroupRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Help/Manual.tsx b/ui/v2.5/src/components/Help/Manual.tsx index d8fc1dbed..e90e2e5ac 100644 --- a/ui/v2.5/src/components/Help/Manual.tsx +++ b/ui/v2.5/src/components/Help/Manual.tsx @@ -23,6 +23,7 @@ import Interactive from "src/docs/en/Manual/Interactive.md"; import Captions from "src/docs/en/Manual/Captions.md"; import Identify from "src/docs/en/Manual/Identify.md"; import Browsing from "src/docs/en/Manual/Browsing.md"; +import TroubleshootingMode from "src/docs/en/Manual/TroubleshootingMode.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; interface IManualProps { @@ -152,6 +153,11 @@ export const Manual: React.FC = ({ title: "Keyboard Shortcuts", content: KeyboardShortcuts, }, + { + key: "TroubleshootingMode.md", + title: "Troubleshooting Mode", + content: TroubleshootingMode, + }, { key: "Contributing.md", title: "Contributing", diff --git a/ui/v2.5/src/components/Images/ImageCard.tsx b/ui/v2.5/src/components/Images/ImageCard.tsx index 0b60a77ff..adaee9923 100644 --- a/ui/v2.5/src/components/Images/ImageCard.tsx +++ b/ui/v2.5/src/components/Images/ImageCard.tsx @@ -30,17 +30,9 @@ interface IImageCardProps { onPreview?: (ev: MouseEvent) => void; } -export const ImageCard: React.FC = PatchComponent( - "ImageCard", +const ImageCardPopovers = PatchComponent( + "ImageCard.Popovers", (props: IImageCardProps) => { - const file = useMemo( - () => - props.image.visual_files.length > 0 - ? props.image.visual_files[0] - : undefined, - [props.image] - ); - function maybeRenderTagPopoverButton() { if (props.image.tags.length <= 0) return; @@ -112,29 +104,65 @@ export const ImageCard: React.FC = PatchComponent( } } - function maybeRenderPopoverButtonGroup() { - if ( - props.image.tags.length > 0 || - props.image.performers.length > 0 || - props.image.o_counter || - props.image.galleries.length > 0 || - props.image.organized - ) { - return ( - <> -
- - {maybeRenderTagPopoverButton()} - {maybeRenderPerformerPopoverButton()} - {maybeRenderOCounter()} - {maybeRenderGallery()} - {maybeRenderOrganized()} - - - ); - } + if ( + props.image.tags.length > 0 || + props.image.performers.length > 0 || + props.image.o_counter || + props.image.galleries.length > 0 || + props.image.organized + ) { + return ( + <> +
+ + {maybeRenderTagPopoverButton()} + {maybeRenderPerformerPopoverButton()} + {maybeRenderOCounter()} + {maybeRenderGallery()} + {maybeRenderOrganized()} + + + ); } + return null; + } +); + +const ImageCardDetails = PatchComponent( + "ImageCard.Details", + (props: IImageCardProps) => { + return ( +
+ {props.image.date} + +
+ ); + } +); + +const ImageCardOverlays = PatchComponent( + "ImageCard.Overlays", + (props: IImageCardProps) => { + return ; + } +); + +const ImageCardImage = PatchComponent( + "ImageCard.Image", + (props: IImageCardProps) => { + const file = useMemo( + () => + props.image.visual_files.length > 0 + ? props.image.visual_files[0] + : undefined, + [props.image] + ); + function isPortrait() { const width = file?.width ? file.width : 0; const height = file?.height ? file.height : 0; @@ -148,6 +176,34 @@ export const ImageCard: React.FC = PatchComponent( const video = source.includes("preview"); const ImagePreview = video ? "video" : "img"; + return ( + <> +
+ + {props.onPreview ? ( +
+ +
+ ) : undefined} +
+ + + ); + } +); + +export const ImageCard: React.FC = PatchComponent( + "ImageCard", + (props: IImageCardProps) => { return ( = PatchComponent( width={props.cardWidth} title={imageTitle(props.image)} linkClassName="image-card-link" - image={ - <> -
- - {props.onPreview ? ( -
- -
- ) : undefined} -
- - - } - details={ -
- {props.image.date} - -
- } - overlays={} - popovers={maybeRenderPopoverButtonGroup()} + image={} + details={} + overlays={} + popovers={} selected={props.selected} selecting={props.selecting} onSelectedChanged={props.onSelectedChanged} diff --git a/ui/v2.5/src/components/Images/ImageDetails/Image.tsx b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx index 47de3971e..f79d95fca 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/Image.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx @@ -36,6 +36,7 @@ import cx from "classnames"; import { TruncatedText } from "src/components/Shared/TruncatedText"; import { goBackOrReplace } from "src/utils/history"; import { FormattedDate } from "src/components/Shared/Date"; +import { GenerateDialog } from "src/components/Dialogs/GenerateDialog"; interface IProps { image: GQL.ImageDataFragment; @@ -62,6 +63,7 @@ const ImagePage: React.FC = ({ image }) => { const [activeTabKey, setActiveTabKey] = useState("image-details-panel"); const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + const [isGenerateDialogOpen, setIsGenerateDialogOpen] = useState(false); async function onSave(input: GQL.ImageUpdateInput) { await updateImage({ @@ -170,6 +172,20 @@ const ImagePage: React.FC = ({ image }) => { } } + function maybeRenderSceneGenerateDialog() { + if (isGenerateDialogOpen) { + return ( + { + setIsGenerateDialogOpen(false); + }} + type="image" + /> + ); + } + } + function renderOperations() { return ( @@ -189,6 +205,13 @@ const ImagePage: React.FC = ({ image }) => { > + setIsGenerateDialogOpen(true)} + > + … + = ({ image }) => { {maybeRenderDeleteDialog()} + {maybeRenderSceneGenerateDialog()}
diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx index 4e566a626..f247e062b 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx @@ -9,6 +9,7 @@ import { useToast } from "src/hooks/Toast"; import TextUtils from "src/utils/text"; import { TextField, URLField, URLsField } from "src/utils/field"; import { FileSize } from "src/components/Shared/FileSize"; +import NavUtils from "src/utils/navigation"; interface IFileInfoPanelProps { file: GQL.ImageFileDataFragment | GQL.VideoFileDataFragment; @@ -23,6 +24,7 @@ const FileInfoPanel: React.FC = ( props: IFileInfoPanelProps ) => { const checksum = props.file.fingerprints.find((f) => f.type === "md5"); + const phash = props.file.fingerprints.find((f) => f.type === "phash"); return (
@@ -36,6 +38,15 @@ const FileInfoPanel: React.FC = ( )} + = PatchComponent( const filterMode = GQL.FilterMode.Images; - const otherOperations = [ + const { modal, showModal, closeModal } = useModal(); + + const otherOperations: IItemListOperation[] = [ ...extraOperations, { text: intl.formatMessage({ id: "actions.view_random" }), onClick: viewRandom, }, + { + text: `${intl.formatMessage({ id: "actions.generate" })}…`, + onClick: (result, filter, selectedIds) => { + showModal( + closeModal()} + /> + ); + return Promise.resolve(); + }, + isDisplayed: showWhenSelected, + }, { text: intl.formatMessage({ id: "actions.export" }), onClick: onExport, @@ -497,6 +515,7 @@ export const ImageList: React.FC = PatchComponent( view={view} selectable > + {modal} = ({ ); } + if (criterion instanceof DuplicatedCriterion) { + return ( + + ); + } + if (criterion instanceof CustomFieldsCriterion) { return ( diff --git a/ui/v2.5/src/components/List/FilteredListToolbar.tsx b/ui/v2.5/src/components/List/FilteredListToolbar.tsx index 162b30ff3..a6a983dc4 100644 --- a/ui/v2.5/src/components/List/FilteredListToolbar.tsx +++ b/ui/v2.5/src/components/List/FilteredListToolbar.tsx @@ -80,6 +80,8 @@ export interface IFilteredListToolbar { operations?: IListFilterOperation[]; operationComponent?: React.ReactNode; zoomable?: boolean; + filterable?: boolean; + sortable?: boolean; } export const FilteredListToolbar: React.FC = ({ @@ -93,6 +95,8 @@ export const FilteredListToolbar: React.FC = ({ operations, operationComponent, zoomable = false, + filterable = true, + sortable = true, }) => { const filterOptions = filter.options; const { setDisplayMode, setZoom } = useFilterOperations({ @@ -128,32 +132,40 @@ export const FilteredListToolbar: React.FC = ({ /> ) : ( <> - + {filterable && ( + + )} - - - showEditFilter()} - count={filter.count()} - /> - + {filterable && ( + + + showEditFilter()} + count={filter.count()} + /> + + )} - setFilter(filter.setSortBy(e ?? undefined))} - onChangeSortDirection={() => - setFilter(filter.toggleSortDirection()) - } - onReshuffleRandomSort={() => - setFilter(filter.reshuffleRandomSort()) - } - /> + {sortable && ( + + setFilter(filter.setSortBy(e ?? undefined)) + } + onChangeSortDirection={() => + setFilter(filter.toggleSortDirection()) + } + onReshuffleRandomSort={() => + setFilter(filter.reshuffleRandomSort()) + } + /> + )} void; +} + +export const DuplicatedFilter: React.FC = ({ + criterion, + setCriterion, +}) => { + const intl = useIntl(); + + function onFieldChange( + fieldId: DuplicationFieldId, + value: boolean | undefined + ) { + const c = criterion.clone(); + if (value === undefined) { + delete c.value[fieldId]; + } else { + c.value[fieldId] = value; + } + setCriterion(c); + } + + return ( +
+ {DUPLICATION_FIELD_IDS.map((fieldId) => ( + onFieldChange(fieldId, v)} + /> + ))} +
+ ); +}; + +interface ISidebarDuplicateFilterProps { + title?: React.ReactNode; + filter: ListFilterModel; + setFilter: (f: ListFilterModel) => void; + sectionID?: string; +} + +export const SidebarDuplicateFilter: React.FC = ({ + title, + filter, + setFilter, + sectionID, +}) => { + const intl = useIntl(); + const [expandedType, setExpandedType] = useState(null); + + const trueLabel = intl.formatMessage({ id: "true" }); + const falseLabel = intl.formatMessage({ id: "false" }); + + // Get label for a duplicate type + const getLabel = useCallback( + (typeId: DuplicationFieldId) => + intl.formatMessage({ id: DUPLICATION_FIELD_MESSAGE_IDS[typeId] }), + [intl] + ); + + // Get the single duplicated criterion from the filter + const getCriterion = useCallback((): DuplicatedCriterion | null => { + const criteria = filter.criteriaFor( + DuplicatedCriterionOption.type + ) as DuplicatedCriterion[]; + return criteria.length > 0 ? criteria[0] : null; + }, [filter]); + + // Get value for a specific type from the criterion + const getTypeValue = useCallback( + (typeId: DuplicationFieldId): boolean | undefined => { + const criterion = getCriterion(); + if (!criterion) return undefined; + return criterion.value[typeId]; + }, + [getCriterion] + ); + + // Build selected items list + const selected: Option[] = useMemo(() => { + const result: Option[] = []; + const criterion = getCriterion(); + if (!criterion) return result; + + for (const typeId of DUPLICATION_FIELD_IDS) { + const value = criterion.value[typeId]; + if (value !== undefined) { + const valueLabel = value ? trueLabel : falseLabel; + result.push({ + id: typeId, + label: `${getLabel(typeId)}: ${valueLabel}`, + }); + } + } + + return result; + }, [getCriterion, trueLabel, falseLabel, getLabel]); + + // Available options - show options that aren't already selected + const options = useMemo(() => { + const result: { id: DuplicationFieldId; label: string }[] = []; + + for (const typeId of DUPLICATION_FIELD_IDS) { + if (getTypeValue(typeId) === undefined) { + result.push({ id: typeId, label: getLabel(typeId) }); + } + } + + return result; + }, [getTypeValue, getLabel]); + + function onToggleExpand(id: string) { + setExpandedType(expandedType === id ? null : id); + } + + function onUnselect(item: Option) { + const typeId = item.id as DuplicationFieldId; + const criterion = getCriterion(); + + if (!criterion) return; + + const newCriterion = criterion.clone(); + delete newCriterion.value[typeId]; + + // If no fields are set, remove the criterion entirely + const hasAnyValue = DUPLICATION_FIELD_IDS.some( + (id) => newCriterion.value[id] !== undefined + ); + + if (!hasAnyValue) { + setFilter(filter.removeCriterion(DuplicatedCriterionOption.type)); + } else { + setFilter( + filter.replaceCriteria(DuplicatedCriterionOption.type, [newCriterion]) + ); + } + setExpandedType(null); + } + + function onSelectValue(typeId: string, value: boolean) { + const criterion = getCriterion(); + const newCriterion = criterion + ? criterion.clone() + : (DuplicatedCriterionOption.makeCriterion() as DuplicatedCriterion); + + newCriterion.value[typeId as DuplicationFieldId] = value; + setFilter( + filter.replaceCriteria(DuplicatedCriterionOption.type, [newCriterion]) + ); + setExpandedType(null); + } + + return ( + onUnselect(i)} /> + } + > +
+ +
+
+ ); +}; diff --git a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx index 200c16917..a9163578f 100644 --- a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx @@ -18,9 +18,13 @@ import { Option } from "./SidebarListFilter"; import { CriterionModifier, FilterMode, + GalleryFilterType, + GroupFilterType, InputMaybe, IntCriterionInput, + PerformerFilterType, SceneFilterType, + StudioFilterType, } from "src/core/generated-graphql"; import { useIntl } from "react-intl"; @@ -82,7 +86,7 @@ export const LabeledIdFilter: React.FC = ({ ); }; -type ModifierValue = "any" | "none" | "any_of" | "only" | "include_subs"; +export type ModifierValue = "any" | "none" | "any_of" | "only" | "include_subs"; export function getModifierCandidates(props: { modifier: CriterionModifier; @@ -515,12 +519,25 @@ export function makeQueryVariables(query: string, extraProps: {}) { interface IFilterType { scenes_filter?: InputMaybe; scene_count?: InputMaybe; + performers_filter?: InputMaybe; + performer_count?: InputMaybe; + galleries_filter?: InputMaybe; + gallery_count?: InputMaybe; + groups_filter?: InputMaybe; + group_count?: InputMaybe; + studios_filter?: InputMaybe; + studio_count?: InputMaybe; } export function setObjectFilter( out: IFilterType, mode: FilterMode, - relatedFilterOutput: SceneFilterType + relatedFilterOutput: + | SceneFilterType + | PerformerFilterType + | GalleryFilterType + | GroupFilterType + | StudioFilterType ) { const empty = Object.keys(relatedFilterOutput).length === 0; @@ -533,7 +550,49 @@ export function setObjectFilter( value: 0, }; } - out.scenes_filter = relatedFilterOutput; + out.scenes_filter = relatedFilterOutput as SceneFilterType; break; + case FilterMode.Performers: + // if empty, only get objects with performers + if (empty) { + out.performer_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + } + out.performers_filter = relatedFilterOutput as PerformerFilterType; + break; + case FilterMode.Galleries: + // if empty, only get objects with galleries + if (empty) { + out.gallery_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + } + out.galleries_filter = relatedFilterOutput as GalleryFilterType; + break; + case FilterMode.Groups: + // if empty, only get objects with groups + if (empty) { + out.group_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + } + out.groups_filter = relatedFilterOutput as GroupFilterType; + break; + case FilterMode.Studios: + // if empty, only get objects with studios + if (empty) { + out.studio_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + } + out.studios_filter = relatedFilterOutput as StudioFilterType; + break; + default: + throw new Error("Invalid filter mode"); } } diff --git a/ui/v2.5/src/components/List/Filters/OptionFilter.tsx b/ui/v2.5/src/components/List/Filters/OptionFilter.tsx index d9cfaf733..6753df09d 100644 --- a/ui/v2.5/src/components/List/Filters/OptionFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/OptionFilter.tsx @@ -1,10 +1,20 @@ import cloneDeep from "lodash-es/cloneDeep"; -import React from "react"; +import React, { useMemo } from "react"; import { Form } from "react-bootstrap"; import { CriterionValue, ModifierCriterion, + ModifierCriterionOption, } from "src/models/list-filter/criteria/criterion"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { Option, SidebarListFilter } from "./SidebarListFilter"; +import { CriterionModifier } from "src/core/generated-graphql"; +import { + getModifierCandidates, + ModifierValue, + modifierValueToModifier, +} from "./LabeledIdFilter"; +import { useIntl } from "react-intl"; interface IOptionsFilter { criterion: ModifierCriterion; @@ -83,3 +93,142 @@ export const OptionListFilter: React.FC = ({
); }; + +interface ISidebarFilter { + title?: React.ReactNode; + option: ModifierCriterionOption; + filter: ListFilterModel; + setFilter: (f: ListFilterModel) => void; + sectionID?: string; +} + +export const SidebarOptionFilter: React.FC = ({ + title, + option, + filter, + setFilter, + sectionID, +}) => { + const intl = useIntl(); + + const criteria = filter.criteriaFor( + option.type + ) as ModifierCriterion[]; + const criterion = criteria.length > 0 ? criteria[0] : null; + const { options: criterionOptions = [] } = option; + const currentValues = criteria.flatMap((c) => c.value as string[]); + + const hasNullModifiers = + option.modifierOptions.includes(CriterionModifier.IsNull) && + option.modifierOptions.includes(CriterionModifier.NotNull); + + const selected: Option[] = useMemo(() => { + if (!criterion) return []; + + if (criterion.modifier === CriterionModifier.IsNull) { + return [ + { + id: "none", + label: intl.formatMessage({ id: "criterion_modifier_values.none" }), + }, + ]; + } else if (criterion.modifier === CriterionModifier.NotNull) { + return [ + { + id: "any", + label: intl.formatMessage({ id: "criterion_modifier_values.any" }), + }, + ]; + } + + return criterionOptions + .filter((o) => currentValues.includes(o.toString())) + .map((o) => ({ + id: o.toString(), + label: o.toLocaleString(), + })); + }, [criterion, currentValues, criterionOptions, intl]); + + const modifierCandidates: Option[] = useMemo(() => { + if (!hasNullModifiers) return []; + + const c = getModifierCandidates({ + modifier: criterion?.modifier ?? option.defaultModifier, + defaultModifier: option.defaultModifier, + hasExcluded: false, + hasSelected: selected.length > 0, + singleValue: true, // so that it doesn't include any_of + }); + + return c.map((v) => { + const messageID = `criterion_modifier_values.${v}`; + + return { + id: v, + label: `(${intl.formatMessage({ + id: messageID, + })})`, + className: "modifier-object", + canExclude: false, + }; + }); + }, [criterion, option, selected, hasNullModifiers, intl]); + + const options = useMemo(() => { + const o = criterionOptions + .filter((oo) => !currentValues.includes(oo.toString())) + .map((oo) => ({ + id: oo.toString(), + label: oo.toString(), + })); + + return [...modifierCandidates, ...o]; + }, [criterionOptions, currentValues, modifierCandidates]); + + function onSelect(item: Option) { + const newCriterion = criterion ? criterion.clone() : option.makeCriterion(); + + if (item.className === "modifier-object") { + newCriterion.modifier = modifierValueToModifier(item.id as ModifierValue); + newCriterion.value = []; + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + return; + } + + const cv = newCriterion.value as string[]; + if (cv.includes(item.id)) { + return; + } else { + newCriterion.value = [...cv, item.id]; + } + + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + } + + function onUnselect(item: Option) { + if (item.className === "modifier-object") { + const newCriterion = criterion + ? criterion.clone() + : option.makeCriterion(); + newCriterion.modifier = option.defaultModifier; + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + return; + } + + setFilter(filter.removeCriterion(option.type)); + } + + return ( + <> + + + ); +}; diff --git a/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx b/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx index 3df19593f..7e0dee855 100644 --- a/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx @@ -1,5 +1,8 @@ import React, { ReactNode, useMemo } from "react"; -import { PerformersCriterion } from "src/models/list-filter/criteria/performers"; +import { + PerformersCriterion, + PerformersCriterionOption, +} from "src/models/list-filter/criteria/performers"; import { CriterionModifier, FindPerformersForSelectQueryVariables, @@ -18,6 +21,7 @@ import { useLabeledIdFilterState, } from "./LabeledIdFilter"; import { SidebarListFilter } from "./SidebarListFilter"; +import { FormattedMessage } from "react-intl"; interface IPerformersFilter { criterion: PerformersCriterion; @@ -106,12 +110,19 @@ const PerformersFilter: React.FC = ({ export const SidebarPerformersFilter: React.FC<{ title?: ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; filterHook?: (f: ListFilterModel) => ListFilterModel; sectionID?: string; -}> = ({ title, option, filter, setFilter, filterHook, sectionID }) => { +}> = ({ + title = , + option = PerformersCriterionOption, + filter, + setFilter, + filterHook, + sectionID = "performers", +}) => { const state = useLabeledIdFilterState({ filter, setFilter, @@ -120,7 +131,14 @@ export const SidebarPerformersFilter: React.FC<{ useQuery: usePerformerQueryFilter, }); - return ; + return ( + + ); }; export default PerformersFilter; diff --git a/ui/v2.5/src/components/List/Filters/RatingFilter.tsx b/ui/v2.5/src/components/List/Filters/RatingFilter.tsx index 9f5c8f8c9..8a07d54f9 100644 --- a/ui/v2.5/src/components/List/Filters/RatingFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/RatingFilter.tsx @@ -13,7 +13,10 @@ import { defaultRatingSystemOptions, } from "src/utils/rating"; import { useConfigurationContext } from "src/hooks/Config"; -import { RatingCriterion } from "src/models/list-filter/criteria/rating"; +import { + RatingCriterion, + RatingCriterionOption, +} from "src/models/list-filter/criteria/rating"; import { ListFilterModel } from "src/models/list-filter/filter"; import { Option, SidebarListFilter } from "./SidebarListFilter"; @@ -74,7 +77,7 @@ export const RatingFilter: React.FC = ({ interface ISidebarFilter { title?: React.ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; sectionID?: string; @@ -84,11 +87,11 @@ const any = "any"; const none = "none"; export const SidebarRatingFilter: React.FC = ({ - title, - option, + title = , + option = RatingCriterionOption, filter, setFilter, - sectionID, + sectionID = "rating", }) => { const intl = useIntl(); @@ -193,6 +196,7 @@ export const SidebarRatingFilter: React.FC = ({ return ( <> void; sectionID?: string; @@ -55,11 +57,11 @@ function snapToStep(value: number): number { } export const SidebarDurationFilter: React.FC = ({ - title, - option, + title = , + option = DurationCriterionOption, filter, setFilter, - sectionID, + sectionID = "duration", }) => { const criteria = filter.criteriaFor(option.type) as DurationCriterion[]; const criterion = criteria.length > 0 ? criteria[0] : null; diff --git a/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx b/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx index e922e688a..3e28bd927 100644 --- a/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx @@ -5,7 +5,10 @@ import { useFindStudiosForSelectQuery, } from "src/core/generated-graphql"; import { HierarchicalObjectsFilter } from "./SelectableFilter"; -import { StudiosCriterion } from "src/models/list-filter/criteria/studios"; +import { + StudiosCriterion, + StudiosCriterionOption, +} from "src/models/list-filter/criteria/studios"; import { sortByRelevance } from "src/utils/query"; import { CriterionOption } from "src/models/list-filter/criteria/criterion"; import { ListFilterModel } from "src/models/list-filter/filter"; @@ -16,6 +19,7 @@ import { useLabeledIdFilterState, } from "./LabeledIdFilter"; import { SidebarListFilter } from "./SidebarListFilter"; +import { FormattedMessage } from "react-intl"; interface IStudiosFilter { criterion: StudiosCriterion; @@ -94,12 +98,19 @@ const StudiosFilter: React.FC = ({ export const SidebarStudiosFilter: React.FC<{ title?: ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; filterHook?: (f: ListFilterModel) => ListFilterModel; sectionID?: string; -}> = ({ title, option, filter, setFilter, filterHook, sectionID }) => { +}> = ({ + title = , + option = StudiosCriterionOption, + filter, + setFilter, + filterHook, + sectionID = "studios", +}) => { const state = useLabeledIdFilterState({ filter, setFilter, @@ -111,7 +122,14 @@ export const SidebarStudiosFilter: React.FC<{ includeSubMessageID: "subsidiary_studios", }); - return ; + return ( + + ); }; export default StudiosFilter; diff --git a/ui/v2.5/src/components/List/Filters/TagsFilter.tsx b/ui/v2.5/src/components/List/Filters/TagsFilter.tsx index f4c618ffa..446a90331 100644 --- a/ui/v2.5/src/components/List/Filters/TagsFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/TagsFilter.tsx @@ -16,7 +16,11 @@ import { useLabeledIdFilterState, } from "./LabeledIdFilter"; import { SidebarListFilter } from "./SidebarListFilter"; -import { TagsCriterion } from "src/models/list-filter/criteria/tags"; +import { + TagsCriterion, + TagsCriterionOption, +} from "src/models/list-filter/criteria/tags"; +import { FormattedMessage } from "react-intl"; interface ITagsFilter { criterion: TagsCriterion; @@ -99,12 +103,19 @@ const TagsFilter: React.FC = ({ criterion, setCriterion }) => { export const SidebarTagsFilter: React.FC<{ title?: ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; filterHook?: (f: ListFilterModel) => ListFilterModel; sectionID?: string; -}> = ({ title, option, filter, setFilter, filterHook, sectionID }) => { +}> = ({ + title = , + option = TagsCriterionOption, + filter, + setFilter, + filterHook, + sectionID = "tags", +}) => { const state = useLabeledIdFilterState({ filter, setFilter, @@ -115,7 +126,14 @@ export const SidebarTagsFilter: React.FC<{ includeSubMessageID: "sub_tags", }); - return ; + return ( + + ); }; export default TagsFilter; diff --git a/ui/v2.5/src/components/List/ListOperationButtons.tsx b/ui/v2.5/src/components/List/ListOperationButtons.tsx index b377cedba..2a4232fb3 100644 --- a/ui/v2.5/src/components/List/ListOperationButtons.tsx +++ b/ui/v2.5/src/components/List/ListOperationButtons.tsx @@ -6,7 +6,9 @@ import { IconDefinition } from "@fortawesome/fontawesome-svg-core"; import { Icon } from "../Shared/Icon"; import { faEllipsisH, + faPencil, faPencilAlt, + faPlay, faTrash, } from "@fortawesome/free-solid-svg-icons"; import cx from "classnames"; @@ -58,6 +60,7 @@ export interface IListFilterOperation { isDisplayed?: () => boolean; icon?: IconDefinition; buttonVariant?: string; + className?: string; } interface IListOperationButtonsProps { @@ -264,3 +267,148 @@ export const ListOperationButtons: React.FC = ({ ); }; + +export const ListOperations: React.FC<{ + items: number; + hasSelection?: boolean; + operations?: IListFilterOperation[]; + onEdit?: () => void; + onDelete?: () => void; + onPlay?: () => void; + operationsClassName?: string; + operationsMenuClassName?: string; +}> = ({ + items, + hasSelection = false, + operations = [], + onEdit, + onDelete, + onPlay, + operationsClassName = "list-operations", + operationsMenuClassName, +}) => { + const intl = useIntl(); + + const dropdownOperations = useMemo(() => { + return operations.filter((o) => { + if (o.icon) { + return false; + } + + if (!o.isDisplayed) { + return true; + } + + return o.isDisplayed(); + }); + }, [operations]); + + const buttons = useMemo(() => { + const otherButtons = (operations ?? []).filter((o) => { + if (!o.icon) { + return false; + } + + if (!o.isDisplayed) { + return true; + } + + return o.isDisplayed(); + }); + + const ret: React.ReactNode[] = []; + + function addButton(b: React.ReactNode | null) { + if (b) { + ret.push(b); + } + } + + const playButton = + !!items && onPlay ? ( + + ) : null; + + const editButton = + hasSelection && onEdit ? ( + + ) : null; + + const deleteButton = + hasSelection && onDelete ? ( + + ) : null; + + addButton(playButton); + addButton(editButton); + addButton(deleteButton); + + otherButtons.forEach((button) => { + addButton( + + ); + }); + + if (ret.length === 0) { + return null; + } + + return ret; + }, [operations, hasSelection, onDelete, onEdit, onPlay, items, intl]); + + if (dropdownOperations.length === 0 && !buttons) { + return null; + } + + return ( +
+ + {buttons} + + {dropdownOperations.length > 0 && ( + + {dropdownOperations.map((o) => ( + + ))} + + )} + +
+ ); +}; diff --git a/ui/v2.5/src/components/List/styles.scss b/ui/v2.5/src/components/List/styles.scss index 5f1b4da2a..e7a4caf02 100644 --- a/ui/v2.5/src/components/List/styles.scss +++ b/ui/v2.5/src/components/List/styles.scss @@ -726,6 +726,24 @@ input[type="range"].zoom-slider { min-height: 2em; } +.duplicate-sub-options { + margin-left: 2rem; + padding-left: 0.5rem; + + .duplicate-sub-option { + align-items: center; + cursor: pointer; + display: flex; + height: 2em; + opacity: 0.8; + padding-left: 0.5rem; + + &:hover { + background-color: rgba(138, 155, 168, 0.15); + } + } +} + .tilted { transform: rotate(45deg); } @@ -1120,7 +1138,8 @@ input[type="range"].zoom-slider { justify-content: flex-end; } -.scene-list-toolbar .selected-items-info { +.scene-list-toolbar .selected-items-info, +.gallery-list-toolbar .selected-items-info { justify-content: flex-start; } diff --git a/ui/v2.5/src/components/List/util.ts b/ui/v2.5/src/components/List/util.ts index 707346848..d870c631f 100644 --- a/ui/v2.5/src/components/List/util.ts +++ b/ui/v2.5/src/components/List/util.ts @@ -139,6 +139,7 @@ function useEmptyFilter(props: { export interface IFilterStateHook { filterMode: GQL.FilterMode; + defaultFilter?: ListFilterModel; defaultSort?: string; view?: View; useURL?: boolean; @@ -149,7 +150,14 @@ export function useFilterState( config?: GQL.ConfigDataFragment; } ) { - const { filterMode, defaultSort, config, view, useURL } = props; + const { + filterMode, + defaultSort, + config, + view, + useURL, + defaultFilter: propDefaultFilter, + } = props; const [filter, setFilterState] = useState( () => @@ -158,10 +166,13 @@ export function useFilterState( const emptyFilter = useEmptyFilter({ filterMode, defaultSort, config }); - const { defaultFilter } = useDefaultFilter(emptyFilter, view); + const { defaultFilter: defaultFilterFromConfig } = useDefaultFilter( + emptyFilter, + view + ); const { setFilter } = useFilterURL(filter, setFilterState, { - defaultFilter, + defaultFilter: propDefaultFilter ?? defaultFilterFromConfig, active: useURL, }); diff --git a/ui/v2.5/src/components/List/views.ts b/ui/v2.5/src/components/List/views.ts index 5b9f9798f..4ea4e46d8 100644 --- a/ui/v2.5/src/components/List/views.ts +++ b/ui/v2.5/src/components/List/views.ts @@ -13,6 +13,7 @@ export enum View { TagScenes = "tag_scenes", TagImages = "tag_images", TagPerformers = "tag_performers", + TagGroups = "tag_groups", PerformerScenes = "performer_scenes", PerformerGalleries = "performer_galleries", diff --git a/ui/v2.5/src/components/MainNavbar.tsx b/ui/v2.5/src/components/MainNavbar.tsx index caee46f0c..c70994476 100644 --- a/ui/v2.5/src/components/MainNavbar.tsx +++ b/ui/v2.5/src/components/MainNavbar.tsx @@ -103,6 +103,7 @@ const allMenuItems: IMenuItem[] = [ href: "/scenes", icon: faPlayCircle, hotkey: "g s", + userCreatable: true, }, { name: "images", diff --git a/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx b/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx index 677ac3aa1..d60118d4b 100644 --- a/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx +++ b/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx @@ -42,7 +42,8 @@ const performerFields = [ "gender", "birthdate", "death_date", - "career_length", + "career_start", + "career_end", "country", "ethnicity", "eye_color", @@ -363,8 +364,15 @@ export const EditPerformersDialog: React.FC = ( {renderTextField("piercings", updateInput.piercings, (v) => setUpdateField({ piercings: v }) )} - {renderTextField("career_length", updateInput.career_length, (v) => - setUpdateField({ career_length: v }) + {renderTextField( + "career_start", + updateInput.career_start?.toString(), + (v) => setUpdateField({ career_start: v ? parseInt(v) : undefined }) + )} + {renderTextField( + "career_end", + updateInput.career_end?.toString(), + (v) => setUpdateField({ career_end: v ? parseInt(v) : undefined }) )} diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx index 95e03ff8b..473bbbd47 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx @@ -12,6 +12,7 @@ import { FormatHeight, FormatPenisLength, FormatWeight, + formatYearRange, } from "../PerformerList"; import { PatchComponent } from "src/patch"; import { CustomFields } from "src/components/Shared/CustomFields"; @@ -174,7 +175,10 @@ export const PerformerDetailsPanel: React.FC = /> diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx index 7bb8d399a..98871bf9a 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx @@ -44,7 +44,7 @@ import { yupInputNumber, yupInputEnum, yupDateString, - yupUniqueAliases, + yupRequiredStringArray, yupUniqueStringList, } from "src/utils/yup"; import { useTagsEdit } from "src/hooks/tagsEdit"; @@ -110,7 +110,7 @@ export const PerformerEditPanel: React.FC = ({ const schema = yup.object({ name: yup.string().required(), disambiguation: yup.string().ensure(), - alias_list: yupUniqueAliases(intl, "name"), + alias_list: yupRequiredStringArray(intl).defined(), gender: yupInputEnum(GQL.GenderEnum).nullable().defined(), birthdate: yupDateString(intl), death_date: yupDateString(intl), @@ -126,7 +126,8 @@ export const PerformerEditPanel: React.FC = ({ circumcised: yupInputEnum(GQL.CircumisedEnum).nullable().defined(), tattoos: yup.string().ensure(), piercings: yup.string().ensure(), - career_length: yup.string().ensure(), + career_start: yupInputNumber().positive().nullable().defined(), + career_end: yupInputNumber().positive().nullable().defined(), urls: yupUniqueStringList(intl), details: yup.string().ensure(), tag_ids: yup.array(yup.string().required()).defined(), @@ -155,7 +156,8 @@ export const PerformerEditPanel: React.FC = ({ circumcised: performer.circumcised ?? null, tattoos: performer.tattoos ?? "", piercings: performer.piercings ?? "", - career_length: performer.career_length ?? "", + career_start: performer.career_start ?? null, + career_end: performer.career_end ?? null, urls: performer.urls ?? [], details: performer.details ?? "", tag_ids: (performer.tags ?? []).map((t) => t.id), @@ -256,8 +258,11 @@ export const PerformerEditPanel: React.FC = ({ if (state.fake_tits) { formik.setFieldValue("fake_tits", state.fake_tits); } - if (state.career_length) { - formik.setFieldValue("career_length", state.career_length); + if (state.career_start) { + formik.setFieldValue("career_start", state.career_start); + } + if (state.career_end) { + formik.setFieldValue("career_end", state.career_end); } if (state.tattoos) { formik.setFieldValue("tattoos", state.tattoos); @@ -747,7 +752,8 @@ export const PerformerEditPanel: React.FC = ({ {renderInputField("tattoos", "textarea")} {renderInputField("piercings", "textarea")} - {renderInputField("career_length")} + {renderInputField("career_start", "number")} + {renderInputField("career_end", "number")} {renderURLListField("urls", onScrapePerformerURL, urlScrapable)} diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx index 5a9d0b81d..44b0401e9 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx @@ -1,6 +1,6 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; -import { GalleryList } from "src/components/Galleries/GalleryList"; +import { FilteredGalleryList } from "src/components/Galleries/GalleryList"; import { usePerformerFilterHook } from "src/core/performers"; import { View } from "src/components/List/views"; import { PatchComponent } from "src/patch"; @@ -14,7 +14,7 @@ export const PerformerGalleriesPanel: React.FC = PatchComponent("PerformerGalleriesPanel", ({ active, performer }) => { const filterHook = usePerformerFilterHook(performer); return ( - = PatchComponent("PerformerGroupsPanel", ({ active, performer }) => { const filterHook = usePerformerFilterHook(performer); return ( - = ( const [fakeTits, setFakeTits] = useState>( new ScrapeResult(props.performer.fake_tits, props.scraped.fake_tits) ); - const [careerLength, setCareerLength] = useState>( - new ScrapeResult( - props.performer.career_length, - props.scraped.career_length + const [careerStart, setCareerStart] = useState>( + new ScrapeResult( + props.performer.career_start, + props.scraped.career_start + ) + ); + const [careerEnd, setCareerEnd] = useState>( + new ScrapeResult( + props.performer.career_end, + props.scraped.career_end ) ); const [tattoos, setTattoos] = useState>( @@ -347,7 +354,8 @@ export const PerformerScrapeDialog: React.FC = ( fakeTits, penisLength, circumcised, - careerLength, + careerStart, + careerEnd, tattoos, piercings, urls, @@ -379,7 +387,8 @@ export const PerformerScrapeDialog: React.FC = ( height: height.getNewValue(), measurements: measurements.getNewValue(), fake_tits: fakeTits.getNewValue(), - career_length: careerLength.getNewValue(), + career_start: careerStart.getNewValue(), + career_end: careerEnd.getNewValue(), tattoos: tattoos.getNewValue(), piercings: piercings.getNewValue(), urls: urls.getNewValue(), @@ -493,11 +502,17 @@ export const PerformerScrapeDialog: React.FC = ( result={fakeTits} onChange={(value) => setFakeTits(value)} /> - setCareerLength(value)} + setCareerStart(value)} + /> + setCareerEnd(value)} /> = const filterHook = usePerformerFilterHook(performer); return ( - { const intl = useIntl(); @@ -112,6 +137,14 @@ export const FormatWeight = (weight?: number | null) => { ); }; +export function formatYearRange( + start?: number | null, + end?: number | null +): string | undefined { + if (!start && !end) return undefined; + return `${start ?? ""} - ${end ?? ""}`; +} + export const FormatCircumcised = (circumcised?: GQL.CircumisedEnum | null) => { const intl = useIntl(); if (!circumcised) { @@ -165,193 +198,292 @@ interface IPerformerList { extraOperations?: IItemListOperation[]; } -export const PerformerList: React.FC = PatchComponent( +const PerformerList: React.FC<{ + performers: GQL.PerformerDataFragment[]; + filter: ListFilterModel; + selectedIds: Set; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; + extraCriteria?: IPerformerCardExtraCriteria; +}> = PatchComponent( "PerformerList", - ({ filterHook, view, alterQuery, extraCriteria, extraOperations = [] }) => { + ({ performers, filter, selectedIds, onSelectChange, extraCriteria }) => { + if (performers.length === 0) { + return null; + } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.List) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Tagger) { + return ; + } + + return null; + } +); + +const PerformerFilterSidebarSections = PatchContainerComponent( + "FilteredPerformerList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + const AgeCriterionOption = PerformerListFilterOptions.criterionOptions.find( + (c) => c.type === "age" + ); + + return ( + <> + + + + + + } + data-type={FavoritePerformerCriterionOption.type} + option={FavoritePerformerCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="favourite" + /> + } + option={GenderCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="gender" + /> + } + option={AgeCriterionOption!} + filter={filter} + setFilter={setFilter} + sectionID="age" + /> + + +
+ +
+ + ); +}; + +function useViewRandom(filter: ListFilterModel, count: number) { + const history = useHistory(); + + const viewRandom = useCallback(async () => { + // query for a random performer + if (count === 0) { + return; + } + + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindPerformers(filterCopy); + if (singleResult.data.findPerformers.performers.length === 1) { + const { id } = singleResult.data.findPerformers.performers[0]; + // navigate to the image player page + history.push(`/performers/${id}`); + } + }, [history, filter, count]); + + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { + Mousetrap.bind("p r", () => { + viewRandom(); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }, [viewRandom]); +} + +export const FilteredPerformerList = PatchComponent( + "FilteredPerformerList", + (props: IPerformerList) => { const intl = useIntl(); const history = useHistory(); - const [mergePerformers, setMergePerformers] = useState< - GQL.SelectPerformerDataFragment[] | undefined - >(undefined); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); - const filterMode = GQL.FilterMode.Performers; + const searchFocus = useFocus(); - const otherOperations = [ - ...extraOperations, - { - text: intl.formatMessage({ id: "actions.open_random" }), - onClick: openRandom, - }, - { - text: `${intl.formatMessage({ id: "actions.merge" })}…`, - onClick: merge, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, - }, - ]; + const { + filterHook, + view, + alterQuery, + extraCriteria, + extraOperations = [], + } = props; - function addKeybinds( - result: GQL.FindPerformersQueryResult, - filter: ListFilterModel - ) { - Mousetrap.bind("p r", () => { - openRandom(result, filter); + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Performers, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindPerformers, + getCount: (r) => r.data?.findPerformers.count ?? 0, + getItems: (r) => r.data?.findPerformers.performers ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(filter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } }); return () => { - Mousetrap.unbind("p r"); + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); }; - } + }); - async function openRandom( - result: GQL.FindPerformersQueryResult, - filter: ListFilterModel - ) { - if (result.data?.findPerformers) { - const { count } = result.data.findPerformers; - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindPerformers(filterCopy); - if (singleResult.data.findPerformers.performers.length === 1) { - const { id } = singleResult.data.findPerformers.performers[0]!; - history.push(`/performers/${id}`); - } - } - } + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); - async function merge( - result: GQL.FindPerformersQueryResult, - filter: ListFilterModel, - selectedIds: Set - ) { - const selected = - result.data?.findPerformers.performers.filter((p) => - selectedIds.has(p.id) - ) ?? []; - setMergePerformers(selected); - } + const viewRandom = useViewRandom(filter, totalCount); - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } - - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } - - function renderContent( - result: GQL.FindPerformersQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - function renderMergeDialog() { - if (mergePerformers) { - return ( - { - setMergePerformers(undefined); - if (mergedId) { - history.push(`/performers/${mergedId}`); - } - }} - show - /> - ); - } - } - - function maybeRenderPerformerExportDialog() { - if (isExportDialogOpen) { - return ( - <> - setIsExportDialogOpen(false)} - /> - - ); - } - } - - function renderPerformers() { - if (!result.data?.findPerformers) return; - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Tagger) { - return ( - - ); - } - } - - return ( - <> - {renderMergeDialog()} - {maybeRenderPerformerExportDialog()} - {renderPerformers()} - + function onExport(all: boolean) { + showModal( + closeModal()} + /> ); } - function renderEditDialog( - selectedPerformers: GQL.SlimPerformerDataFragment[], - onClose: (applied: boolean) => void - ) { - return ( - + function onEdit() { + showModal( + ); } - function renderDeleteDialog( - selectedPerformers: GQL.SlimPerformerDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( + function onDelete() { + showModal( = PatchComponent( ); } - return ( - - { + closeModal(); + if (mergedId) { + history.push(`/performers/${mergedId}`); + } + }} + show /> - + ); + } + + const convertedExtraOperations: IListFilterOperation[] = + extraOperations.map((o) => ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); + + const otherOperations: IListFilterOperation[] = [ + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.open_random" }), + onClick: viewRandom, + }, + { + text: `${intl.formatMessage({ id: "actions.merge" })}…`, + onClick: onMerge, + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + + ); + + return ( +
+ {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
+ setFilter(filter.changePage(page))} + /> + +
+ + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
+ )} +
+
+
+
); } ); diff --git a/ui/v2.5/src/components/Performers/PerformerListTable.tsx b/ui/v2.5/src/components/Performers/PerformerListTable.tsx index 58538e7e2..3b500cee6 100644 --- a/ui/v2.5/src/components/Performers/PerformerListTable.tsx +++ b/ui/v2.5/src/components/Performers/PerformerListTable.tsx @@ -17,6 +17,7 @@ import { FormatHeight, FormatPenisLength, FormatWeight, + formatYearRange, } from "./PerformerList"; import TextUtils from "src/utils/text"; import { getCountryByISO } from "src/utils/country"; @@ -188,7 +189,7 @@ export const PerformerListTable: React.FC = ( ); const CareerLengthCell = (performer: GQL.PerformerDataFragment) => ( - {performer.career_length} + <>{formatYearRange(performer.career_start, performer.career_end) ?? ""} ); const SceneCountCell = (performer: GQL.PerformerDataFragment) => ( diff --git a/ui/v2.5/src/components/Performers/PerformerMergeDialog.tsx b/ui/v2.5/src/components/Performers/PerformerMergeDialog.tsx index 834d2ac76..efa51f1db 100644 --- a/ui/v2.5/src/components/Performers/PerformerMergeDialog.tsx +++ b/ui/v2.5/src/components/Performers/PerformerMergeDialog.tsx @@ -19,6 +19,7 @@ import { useToast } from "src/hooks/Toast"; import { faExchangeAlt, faSignInAlt } from "@fortawesome/free-solid-svg-icons"; import { ScrapeDialog } from "../Shared/ScrapeDialog/ScrapeDialog"; import { + ScrapedCustomFieldRows, ScrapedImageRow, ScrapedInputGroupRow, ScrapedStringListRow, @@ -27,9 +28,9 @@ import { import { ModalComponent } from "../Shared/Modal"; import { sortStoredIdObjects } from "src/utils/data"; import { + CustomFieldScrapeResults, ObjectListScrapeResult, ScrapeResult, - ZeroableScrapeResult, hasScrapedValues, } from "../Shared/ScrapeDialog/scrapeResult"; import { ScrapedTagsRow } from "../Shared/ScrapeDialog/ScrapedObjectsRow"; @@ -40,39 +41,6 @@ import { import { PerformerSelect } from "./PerformerSelect"; import { uniq } from "lodash-es"; -/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ -type CustomFieldScrapeResults = Map>; - -// There are a bunch of similar functions in PerformerScrapeDialog, but since we don't support -// scraping custom fields, this one is only needed here. The `renderScraped` naming is kept the same -// for consistency. -function renderScrapedCustomFieldRows( - results: CustomFieldScrapeResults, - onChange: (newCustomFields: CustomFieldScrapeResults) => void -) { - return ( - <> - {Array.from(results.entries()).map(([field, result]) => { - const fieldName = `custom_${field}`; - return ( - { - const newResults = new Map(results); - newResults.set(field, newResult); - onChange(newResults); - }} - /> - ); - })} - - ); -} - type MergeOptions = { values: GQL.PerformerUpdateInput; }; @@ -134,8 +102,11 @@ const PerformerMergeDetails: React.FC = ({ const [fakeTits, setFakeTits] = useState>( new ScrapeResult(dest.fake_tits) ); - const [careerLength, setCareerLength] = useState>( - new ScrapeResult(dest.career_length) + const [careerStart, setCareerStart] = useState>( + new ScrapeResult(dest.career_start?.toString()) + ); + const [careerEnd, setCareerEnd] = useState>( + new ScrapeResult(dest.career_end?.toString()) ); const [tattoos, setTattoos] = useState>( new ScrapeResult(dest.tattoos) @@ -296,11 +267,18 @@ const PerformerMergeDetails: React.FC = ({ !dest.fake_tits ) ); - setCareerLength( + setCareerStart( new ScrapeResult( - dest.career_length, - sources.find((s) => s.career_length)?.career_length, - !dest.career_length + dest.career_start?.toString(), + sources.find((s) => s.career_start)?.career_start?.toString(), + !dest.career_start + ) + ); + setCareerEnd( + new ScrapeResult( + dest.career_end?.toString(), + sources.find((s) => s.career_end)?.career_end?.toString(), + !dest.career_end ) ); setTattoos( @@ -410,7 +388,8 @@ const PerformerMergeDetails: React.FC = ({ penisLength, measurements, fakeTits, - careerLength, + careerStart, + careerEnd, tattoos, piercings, urls, @@ -436,7 +415,8 @@ const PerformerMergeDetails: React.FC = ({ penisLength, measurements, fakeTits, - careerLength, + careerStart, + careerEnd, tattoos, piercings, urls, @@ -552,10 +532,16 @@ const PerformerMergeDetails: React.FC = ({ onChange={(value) => setFakeTits(value)} /> setCareerLength(value)} + field="career_start" + title={intl.formatMessage({ id: "career_start" })} + result={careerStart} + onChange={(value) => setCareerStart(value)} + /> + setCareerEnd(value)} /> = ({ result={image} onChange={(value) => setImage(value)} /> - {hasCustomFieldValues && - renderScrapedCustomFieldRows(customFields, (newCustomFields) => - setCustomFields(newCustomFields) - )} + {hasCustomFieldValues && ( + setCustomFields(newCustomFields)} + /> + )} ); } @@ -642,7 +630,12 @@ const PerformerMergeDetails: React.FC = ({ : undefined, measurements: measurements.getNewValue(), fake_tits: fakeTits.getNewValue(), - career_length: careerLength.getNewValue(), + career_start: careerStart.getNewValue() + ? parseInt(careerStart.getNewValue()!) + : undefined, + career_end: careerEnd.getNewValue() + ? parseInt(careerEnd.getNewValue()!) + : undefined, tattoos: tattoos.getNewValue(), piercings: piercings.getNewValue(), urls: urls.getNewValue(), diff --git a/ui/v2.5/src/components/Performers/Performers.tsx b/ui/v2.5/src/components/Performers/Performers.tsx index d240ce988..7b6e32b8f 100644 --- a/ui/v2.5/src/components/Performers/Performers.tsx +++ b/ui/v2.5/src/components/Performers/Performers.tsx @@ -4,11 +4,11 @@ import { Helmet } from "react-helmet"; import { useTitleProps } from "src/hooks/title"; import Performer from "./PerformerDetails/Performer"; import PerformerCreate from "./PerformerDetails/PerformerCreate"; -import { PerformerList } from "./PerformerList"; +import { FilteredPerformerList } from "./PerformerList"; import { View } from "../List/views"; const Performers: React.FC = () => { - return ; + return ; }; const PerformerRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Performers/styles.scss b/ui/v2.5/src/components/Performers/styles.scss index 17ca3a737..54a010e50 100644 --- a/ui/v2.5/src/components/Performers/styles.scss +++ b/ui/v2.5/src/components/Performers/styles.scss @@ -68,7 +68,8 @@ .collapsed { .detail-item.tattoos, .detail-item.piercings, - .detail-item.career_length, + .detail-item.career_start, + .detail-item.career_end, .detail-item.details, .detail-item.tags, .detail-item.stash_ids { diff --git a/ui/v2.5/src/components/ScenePlayer/util.ts b/ui/v2.5/src/components/ScenePlayer/util.ts index 8c6fb8010..21ed99b62 100644 --- a/ui/v2.5/src/components/ScenePlayer/util.ts +++ b/ui/v2.5/src/components/ScenePlayer/util.ts @@ -1,7 +1,27 @@ -import videojs from "video.js"; +import videojs, { VideoJsPlayer } from "video.js"; export const VIDEO_PLAYER_ID = "VideoJsPlayer"; export const getPlayer = () => videojs.getPlayer(VIDEO_PLAYER_ID); export const getPlayerPosition = () => getPlayer()?.currentTime(); + +export type AbLoopOptions = { + start: number; + end: number | false; + enabled?: boolean; +}; + +export type AbLoopPluginApi = { + getOptions: () => AbLoopOptions; + setOptions: (options: AbLoopOptions) => void; +}; + +export const getAbLoopPlugin = () => { + const player = getPlayer(); + if (!player) return null; + const { abLoopPlugin } = player as VideoJsPlayer & { + abLoopPlugin?: AbLoopPluginApi; + }; + return abLoopPlugin ?? null; +}; diff --git a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx index 143daca4f..8ecb6e557 100644 --- a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx +++ b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx @@ -67,6 +67,8 @@ export const PreviewScrubber: React.FC = ({ const clientRect = imageParent.getBoundingClientRect(); const scale = scaleToFit(sprite, clientRect); + const spriteSheet = new Image(); + spriteSheet.src = sprite.url; setStyle({ backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx index 11c805ec6..d5a32fc31 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx @@ -4,18 +4,24 @@ import * as GQL from "src/core/generated-graphql"; import { Button, Badge, Card } from "react-bootstrap"; import TextUtils from "src/utils/text"; import { markerTitle } from "src/core/markers"; +import { useConfigurationContext } from "src/hooks/Config"; interface IPrimaryTags { sceneMarkers: GQL.SceneMarkerDataFragment[]; onClickMarker: (marker: GQL.SceneMarkerDataFragment) => void; + onLoopMarker: (marker: GQL.SceneMarkerDataFragment) => void; onEdit: (marker: GQL.SceneMarkerDataFragment) => void; } export const PrimaryTags: React.FC = ({ sceneMarkers, onClickMarker, + onLoopMarker, onEdit, }) => { + const { configuration } = useConfigurationContext(); + const showAbLoopControls = configuration?.ui?.showAbLoopControls; + if (!sceneMarkers?.length) return
; const primaryTagNames: Record = {}; @@ -52,10 +58,21 @@ export const PrimaryTags: React.FC = ({
-
- {TextUtils.formatTimestampRange( - marker.seconds, - marker.end_seconds ?? undefined +
+
+ {TextUtils.formatTimestampRange( + marker.seconds, + marker.end_seconds ?? undefined + )} +
+ {showAbLoopControls && marker.end_seconds != null && ( + )}
{tags}
diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx index ee38ebd47..435b9dce2 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx @@ -32,7 +32,10 @@ import { ListFilterModel } from "src/models/list-filter/filter"; import Mousetrap from "mousetrap"; import { OrganizedButton } from "./OrganizedButton"; import { useConfigurationContext } from "src/hooks/Config"; -import { getPlayerPosition } from "src/components/ScenePlayer/util"; +import { + getAbLoopPlugin, + getPlayerPosition, +} from "src/components/ScenePlayer/util"; import { faEllipsisV, faChevronRight, @@ -311,9 +314,53 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { }; function onClickMarker(marker: GQL.SceneMarkerDataFragment) { + const abLoopPlugin = getAbLoopPlugin(); + const opts = abLoopPlugin?.getOptions(); + const start = opts?.start; + const end = opts?.end; + + const hasLoopRange = + opts?.enabled && + typeof start === "number" && + typeof end === "number" && + Number.isFinite(start) && + Number.isFinite(end); + + if ( + abLoopPlugin && + opts && + hasLoopRange && + (marker.seconds < Math.min(start as number, end as number) || + marker.seconds > Math.max(start as number, end as number)) + ) { + abLoopPlugin.setOptions({ + ...opts, + enabled: false, + }); + } + setTimestamp(marker.seconds); } + function onLoopMarker(marker: GQL.SceneMarkerDataFragment) { + if (marker.end_seconds == null) return; + + setTimestamp(marker.seconds); + const start = Math.min(marker.seconds, marker.end_seconds); + const end = Math.max(marker.seconds, marker.end_seconds); + const abLoopPlugin = getAbLoopPlugin(); + const opts = abLoopPlugin?.getOptions(); + + if (opts && abLoopPlugin) { + abLoopPlugin.setOptions({ + ...opts, + start, + end, + enabled: true, + }); + } + } + async function onRescan() { await mutateMetadataScan({ paths: [objectPath(scene)], @@ -415,7 +462,7 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { className="bg-secondary text-white" onClick={() => setIsGenerateDialogOpen(true)} > - + = PatchComponent("ScenePage", (props) => { diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx index ef1a2e7e1..a2bad2f8e 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx @@ -11,7 +11,10 @@ import { } from "src/core/StashService"; import { DurationInput } from "src/components/Shared/DurationInput"; import { MarkerTitleSuggest } from "src/components/Shared/Select"; -import { getPlayerPosition } from "src/components/ScenePlayer/util"; +import { + getAbLoopPlugin, + getPlayerPosition, +} from "src/components/ScenePlayer/util"; import { useToast } from "src/hooks/Toast"; import isEqual from "lodash-es/isEqual"; import { formikUtils } from "src/utils/form"; @@ -61,16 +64,39 @@ export const SceneMarkerForm: React.FC = ({ }); // useMemo to only run getPlayerPosition when the input marker actually changes - const initialValues = useMemo( - () => ({ + const initialValues = useMemo(() => { + if (!marker) { + const abLoopPlugin = getAbLoopPlugin(); + const opts = abLoopPlugin?.getOptions(); + const start = opts?.start; + const end = opts?.end; + const hasAbLoop = Number.isFinite(start); + + if (opts?.enabled && hasAbLoop) { + const current = Math.round(getPlayerPosition() ?? 0); + const rawEnd = + Number.isFinite(end) && (end as number) > 0 ? (end as number) : null; + const endSeconds = + rawEnd !== null ? rawEnd : Math.max(start as number, current); + + return { + title: "", + seconds: start as number, + end_seconds: endSeconds, + primary_tag_id: "", + tag_ids: [], + }; + } + } + + return { title: marker?.title ?? "", seconds: marker?.seconds ?? Math.round(getPlayerPosition() ?? 0), end_seconds: marker?.end_seconds ?? null, primary_tag_id: marker?.primary_tag.id ?? "", tag_ids: marker?.tags.map((tag) => tag.id) ?? [], - }), - [marker] - ); + }; + }, [marker]); type InputValues = yup.InferType; diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx index 331c58c78..28a6e4d98 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx @@ -11,12 +11,14 @@ interface ISceneMarkersPanelProps { sceneId: string; isVisible: boolean; onClickMarker: (marker: GQL.SceneMarkerDataFragment) => void; + onLoopMarker: (marker: GQL.SceneMarkerDataFragment) => void; } export const SceneMarkersPanel: React.FC = ({ sceneId, isVisible, onClickMarker, + onLoopMarker, }) => { const { data, loading } = GQL.useFindSceneMarkerTagsQuery({ variables: { id: sceneId }, @@ -70,6 +72,7 @@ export const SceneMarkersPanel: React.FC = ({
diff --git a/ui/v2.5/src/components/Scenes/SceneList.tsx b/ui/v2.5/src/components/Scenes/SceneList.tsx index ff5237c9f..a0458c5ac 100644 --- a/ui/v2.5/src/components/Scenes/SceneList.tsx +++ b/ui/v2.5/src/components/Scenes/SceneList.tsx @@ -1,7 +1,7 @@ import React, { useCallback, useEffect, useMemo } from "react"; import cloneDeep from "lodash-es/cloneDeep"; import { FormattedMessage, useIntl } from "react-intl"; -import { useHistory } from "react-router-dom"; +import { useHistory, useLocation } from "react-router-dom"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import { queryFindScenes, useFindScenes } from "src/core/StashService"; @@ -19,12 +19,6 @@ import { SceneCardGrid } from "./SceneCardGrid"; import { TaggerContext } from "../Tagger/context"; import { IdentifyDialog } from "../Dialogs/IdentifyDialog/IdentifyDialog"; import { useConfigurationContext } from "src/hooks/Config"; -import { - faPencil, - faPlay, - faPlus, - faTrash, -} from "@fortawesome/free-solid-svg-icons"; import { SceneMergeModal } from "./SceneMergeDialog"; import { objectTitle } from "src/core/files"; import TextUtils from "src/utils/text"; @@ -32,10 +26,7 @@ import { View } from "../List/views"; import { FileSize } from "../Shared/FileSize"; import { LoadedContent } from "../List/PagedList"; import { useCloseEditDelete, useFilterOperations } from "../List/util"; -import { - OperationDropdown, - OperationDropdownItem, -} from "../List/ListOperationButtons"; +import { ListOperations } from "../List/ListOperationButtons"; import { useFilteredItemList } from "../List/ItemList"; import { Sidebar, @@ -46,20 +37,14 @@ import { } from "../Shared/Sidebar"; import { SidebarPerformersFilter } from "../List/Filters/PerformersFilter"; import { SidebarStudiosFilter } from "../List/Filters/StudiosFilter"; -import { PerformersCriterionOption } from "src/models/list-filter/criteria/performers"; -import { StudiosCriterionOption } from "src/models/list-filter/criteria/studios"; -import { TagsCriterionOption } from "src/models/list-filter/criteria/tags"; import { SidebarTagsFilter } from "../List/Filters/TagsFilter"; import cx from "classnames"; -import { RatingCriterionOption } from "src/models/list-filter/criteria/rating"; import { SidebarRatingFilter } from "../List/Filters/RatingFilter"; import { OrganizedCriterionOption } from "src/models/list-filter/criteria/organized"; import { HasMarkersCriterionOption } from "src/models/list-filter/criteria/has-markers"; import { SidebarBooleanFilter } from "../List/Filters/BooleanFilter"; -import { - DurationCriterionOption, - PerformerAgeCriterionOption, -} from "src/models/list-filter/scenes"; +import { PerformerAgeCriterionOption } from "src/models/list-filter/scenes"; +import { SidebarDuplicateFilter } from "../List/Filters/DuplicateFilter"; import { SidebarAgeFilter } from "../List/Filters/SidebarAgeFilter"; import { SidebarDurationFilter } from "../List/Filters/SidebarDurationFilter"; import { @@ -68,8 +53,7 @@ import { } from "../List/Filters/FilterSidebar"; import { PatchComponent, PatchContainerComponent } from "src/patch"; import { Pagination, PaginationIndex } from "../List/Pagination"; -import { Button, ButtonGroup } from "react-bootstrap"; -import { Icon } from "../Shared/Icon"; +import { Button } from "react-bootstrap"; import useFocus from "src/utils/focus"; import { useZoomKeybinds } from "../List/ZoomSlider"; import { FilteredListToolbar } from "../List/FilteredListToolbar"; @@ -200,59 +184,65 @@ const SceneList: React.FC<{ selectedIds: Set; onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; fromGroupId?: string; -}> = ({ scenes, filter, selectedIds, onSelectChange, fromGroupId }) => { - const queue = useMemo(() => SceneQueue.fromListFilterModel(filter), [filter]); +}> = PatchComponent( + "SceneList", + ({ scenes, filter, selectedIds, onSelectChange, fromGroupId }) => { + const queue = useMemo( + () => SceneQueue.fromListFilterModel(filter), + [filter] + ); + + if (scenes.length === 0 && filter.displayMode !== DisplayMode.Tagger) { + return null; + } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.List) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Wall) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Tagger) { + return ( + + ); + } - if (scenes.length === 0 && filter.displayMode !== DisplayMode.Tagger) { return null; } - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Wall) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Tagger) { - return ( - - ); - } - - return null; -}; +); const ScenesFilterSidebarSections = PatchContainerComponent( "FilteredSceneList.SidebarSections" @@ -298,48 +288,23 @@ const SidebarContent: React.FC<{ {!hideStudios && ( } - data-type={StudiosCriterionOption.type} - option={StudiosCriterionOption} filter={filter} setFilter={setFilter} filterHook={filterHook} - sectionID="studios" /> )} } - data-type={PerformersCriterionOption.type} - option={PerformersCriterionOption} filter={filter} setFilter={setFilter} filterHook={filterHook} - sectionID="performers" /> } - data-type={TagsCriterionOption.type} - option={TagsCriterionOption} filter={filter} setFilter={setFilter} filterHook={filterHook} - sectionID="tags" - /> - } - data-type={RatingCriterionOption.type} - option={RatingCriterionOption} - filter={filter} - setFilter={setFilter} - sectionID="rating" - /> - } - option={DurationCriterionOption} - filter={filter} - setFilter={setFilter} - sectionID="duration" /> + + } data-type={HasMarkersCriterionOption.type} @@ -356,6 +321,12 @@ const SidebarContent: React.FC<{ setFilter={setFilter} sectionID="organized" /> + } + filter={filter} + setFilter={setFilter} + sectionID="duplicated" + /> } option={PerformerAgeCriterionOption} @@ -374,102 +345,6 @@ const SidebarContent: React.FC<{ ); }; -interface IOperations { - text: string; - onClick: () => void; - isDisplayed?: () => boolean; - className?: string; -} - -const SceneListOperations: React.FC<{ - items: number; - hasSelection: boolean; - operations: IOperations[]; - onEdit: () => void; - onDelete: () => void; - onPlay: () => void; - onCreateNew: () => void; -}> = PatchComponent( - "SceneListOperations", - ({ - items, - hasSelection, - operations, - onEdit, - onDelete, - onPlay, - onCreateNew, - }) => { - const intl = useIntl(); - - return ( -
- - {!!items && ( - - )} - {!hasSelection && ( - - )} - - {hasSelection && ( - <> - - - - )} - - - {operations.map((o) => { - if (o.isDisplayed && !o.isDisplayed()) { - return null; - } - - return ( - - ); - })} - - -
- ); - } -); - interface IFilteredScenes { filterHook?: (filter: ListFilterModel) => ListFilterModel; defaultSort?: string; @@ -478,362 +353,379 @@ interface IFilteredScenes { fromGroupId?: string; } -export const FilteredSceneList = (props: IFilteredScenes) => { - const intl = useIntl(); - const history = useHistory(); +export const FilteredSceneList = PatchComponent( + "FilteredSceneList", + (props: IFilteredScenes) => { + const intl = useIntl(); + const history = useHistory(); + const location = useLocation(); - const searchFocus = useFocus(); + const searchFocus = useFocus(); - const { filterHook, defaultSort, view, alterQuery, fromGroupId } = props; + const { filterHook, defaultSort, view, alterQuery, fromGroupId } = props; - // States - const { - showSidebar, - setShowSidebar, - loading: sidebarStateLoading, - sectionOpen, - setSectionOpen, - } = useSidebarState(view); + // States + const { + showSidebar, + setShowSidebar, + loading: sidebarStateLoading, + sectionOpen, + setSectionOpen, + } = useSidebarState(view); - const { filterState, queryResult, modalState, listSelect, showEditFilter } = - useFilteredItemList({ - filterStateProps: { - filterMode: GQL.FilterMode.Scenes, - defaultSort, - view, - useURL: alterQuery, - }, - queryResultProps: { - useResult: useFindScenes, - getCount: (r) => r.data?.findScenes.count ?? 0, - getItems: (r) => r.data?.findScenes.scenes ?? [], - filterHook, - }, + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Scenes, + defaultSort, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindScenes, + getCount: (r) => r.data?.findScenes.count ?? 0, + getItems: (r) => r.data?.findScenes.scenes ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, }); - const { filter, setFilter } = filterState; + useAddKeybinds(filter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); - const { effectiveFilter, result, cachedResult, items, totalCount } = - queryResult; + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); - const { - selectedIds, - selectedItems, - onSelectChange, - onSelectAll, - onSelectNone, - onInvertSelection, - hasSelection, - } = listSelect; + const onEdit = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); - const { modal, showModal, closeModal } = modalState; + const onDelete = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); - // Utility hooks - const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ - filter, - setFilter, - }); + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); - useAddKeybinds(filter, totalCount); - useFilteredSidebarKeybinds({ - showSidebar, - setShowSidebar, - }); + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); - const onCloseEditDelete = useCloseEditDelete({ - closeModal, - onSelectNone, - result, - }); + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }, [onSelectAll, onSelectNone, hasSelection, onEdit, onDelete]); + useZoomKeybinds({ + zoomIndex: filter.zoomIndex, + onChangeZoom: (zoom) => setFilter(filter.setZoom(zoom)), + }); - const onEdit = useCallback(() => { - showModal( - + const metadataByline = useMemo(() => { + if (cachedResult.loading) return null; + + return renderMetadataByline(cachedResult) ?? null; + }, [cachedResult]); + + const queue = useMemo( + () => SceneQueue.fromListFilterModel(filter), + [filter] ); - }, [showModal, selectedItems, onCloseEditDelete]); - const onDelete = useCallback(() => { - showModal( - - ); - }, [showModal, selectedItems, onCloseEditDelete]); + const playRandom = usePlayRandom(effectiveFilter, totalCount); + const playSelected = usePlaySelected(selectedIds); + const playFirst = usePlayFirst(); - useEffect(() => { - Mousetrap.bind("e", () => { - if (hasSelection) { - onEdit?.(); + function onCreateNew() { + let queryParam = new URLSearchParams(location.search).get("q"); + let newPath = "/scenes/new"; + if (queryParam) { + newPath += "?q=" + encodeURIComponent(queryParam); } - }); - - Mousetrap.bind("d d", () => { - if (hasSelection) { - onDelete?.(); - } - }); - - return () => { - Mousetrap.unbind("e"); - Mousetrap.unbind("d d"); - }; - }, [onSelectAll, onSelectNone, hasSelection, onEdit, onDelete]); - useZoomKeybinds({ - zoomIndex: filter.zoomIndex, - onChangeZoom: (zoom) => setFilter(filter.setZoom(zoom)), - }); - - const metadataByline = useMemo(() => { - if (cachedResult.loading) return null; - - return renderMetadataByline(cachedResult) ?? null; - }, [cachedResult]); - - const queue = useMemo(() => SceneQueue.fromListFilterModel(filter), [filter]); - - const playRandom = usePlayRandom(effectiveFilter, totalCount); - const playSelected = usePlaySelected(selectedIds); - const playFirst = usePlayFirst(); - - function onCreateNew() { - history.push("/scenes/new"); - } - - function onPlay() { - if (items.length === 0) { - return; + history.push(newPath); } - // if there are selected items, play those - if (hasSelection) { - playSelected(); - return; + function onPlay() { + if (items.length === 0) { + return; + } + + // if there are selected items, play those + if (hasSelection) { + playSelected(); + return; + } + + // otherwise, play the first item in the list + const sceneID = items[0].id; + playFirst(queue, sceneID, 0); } - // otherwise, play the first item in the list - const sceneID = items[0].id; - playFirst(queue, sceneID, 0); - } + function onExport(all: boolean) { + showModal( + closeModal()} + /> + ); + } - function onExport(all: boolean) { - showModal( - closeModal()} + function onMerge() { + const selected = + selectedItems.map((s) => { + return { + id: s.id, + title: objectTitle(s), + }; + }) ?? []; + showModal( + { + closeModal(); + if (mergedID) { + history.push(`/scenes/${mergedID}`); + } + }} + show + /> + ); + } + + const otherOperations = [ + { + text: intl.formatMessage({ id: "actions.play" }), + onClick: () => onPlay(), + isDisplayed: () => items.length > 0, + className: "play-item", + }, + { + text: intl.formatMessage( + { id: "actions.create_entity" }, + { entityType: intl.formatMessage({ id: "scene" }) } + ), + onClick: () => onCreateNew(), + isDisplayed: () => !hasSelection, + className: "create-new-item", + }, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.play_random" }), + onClick: playRandom, + isDisplayed: () => totalCount > 1, + }, + { + text: `${intl.formatMessage({ id: "actions.generate" })}…`, + onClick: () => + showModal( + closeModal()} + /> + ), + isDisplayed: () => hasSelection, + }, + { + text: `${intl.formatMessage({ id: "actions.identify" })}…`, + onClick: () => + showModal( + closeModal()} + /> + ), + isDisplayed: () => hasSelection, + }, + { + text: `${intl.formatMessage({ id: "actions.merge" })}…`, + onClick: () => onMerge(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + ); - } - function onMerge() { - const selected = - selectedItems.map((s) => { - return { - id: s.id, - title: objectTitle(s), - }; - }) ?? []; - showModal( - { - closeModal(); - if (mergedID) { - history.push(`/scenes/${mergedID}`); - } - }} - show - /> - ); - } + return ( + +
+ {modal} - const otherOperations = [ - { - text: intl.formatMessage({ id: "actions.play" }), - onClick: () => onPlay(), - isDisplayed: () => items.length > 0, - className: "play-item", - }, - { - text: intl.formatMessage( - { id: "actions.create_entity" }, - { entityType: intl.formatMessage({ id: "scene" }) } - ), - onClick: () => onCreateNew(), - isDisplayed: () => !hasSelection, - className: "create-new-item", - }, - { - text: intl.formatMessage({ id: "actions.select_all" }), - onClick: () => onSelectAll(), - isDisplayed: () => totalCount > 0, - }, - { - text: intl.formatMessage({ id: "actions.select_none" }), - onClick: () => onSelectNone(), - isDisplayed: () => hasSelection, - }, - { - text: intl.formatMessage({ id: "actions.invert_selection" }), - onClick: () => onInvertSelection(), - isDisplayed: () => totalCount > 0, - }, - { - text: intl.formatMessage({ id: "actions.play_random" }), - onClick: playRandom, - isDisplayed: () => totalCount > 1, - }, - { - text: `${intl.formatMessage({ id: "actions.generate" })}…`, - onClick: () => - showModal( - closeModal()} - /> - ), - isDisplayed: () => hasSelection, - }, - { - text: `${intl.formatMessage({ id: "actions.identify" })}…`, - onClick: () => - showModal( - closeModal()} - /> - ), - isDisplayed: () => hasSelection, - }, - { - text: `${intl.formatMessage({ id: "actions.merge" })}…`, - onClick: () => onMerge(), - isDisplayed: () => hasSelection, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: () => onExport(false), - isDisplayed: () => hasSelection, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: () => onExport(true), - }, - ]; - - // render - if (sidebarStateLoading) return null; - - const operations = ( - - ); - - return ( - -
- {modal} - - - - setShowSidebar(false)}> - setShowSidebar(false)} - count={cachedResult.loading ? undefined : totalCount} - focus={searchFocus} - /> - - setShowSidebar(!showSidebar)} - > - - - showEditFilter(c.criterionOption.type)} - onRemoveCriterion={removeCriterion} - onRemoveAll={clearAllCriteria} - /> - -
- setFilter(filter.changePage(page))} + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} /> - + setShowSidebar(!showSidebar)} + > + -
- - + showEditFilter(c.criterionOption.type) + } + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} /> - - {totalCount > filter.itemsPerPage && ( -
-
- -
+
+ setFilter(filter.changePage(page))} + /> +
- )} - - - -
- - ); -}; + + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
+ )} +
+
+
+
+
+ ); + } +); export default FilteredSceneList; diff --git a/ui/v2.5/src/components/Settings/Settings.tsx b/ui/v2.5/src/components/Settings/Settings.tsx index 4c2b02455..86a781445 100644 --- a/ui/v2.5/src/components/Settings/Settings.tsx +++ b/ui/v2.5/src/components/Settings/Settings.tsx @@ -18,6 +18,8 @@ import { SettingsContext, useSettings } from "./context"; import { SettingsLibraryPanel } from "./SettingsLibraryPanel"; import { SettingsSecurityPanel } from "./SettingsSecurityPanel"; import Changelog from "../Changelog/Changelog"; +import { TroubleshootingModeButton } from "../TroubleshootingMode/TroubleshootingModeButton"; +import { useTroubleshootingMode } from "../TroubleshootingMode/useTroubleshootingMode"; const validTabs = [ "tasks", @@ -43,6 +45,7 @@ function isTabKey(tab: string | null): tab is TabKey { const SettingTabs: React.FC<{ tab: TabKey }> = ({ tab }) => { const { advancedMode, setAdvancedMode } = useSettings(); + const { isActive: troubleshootingModeActive } = useTroubleshootingMode(); const titleProps = useTitleProps({ id: "settings" }); @@ -148,6 +151,7 @@ const SettingTabs: React.FC<{ tab: TabKey }> = ({ tab }) => { />
+ {!troubleshootingModeActive && }
diff --git a/ui/v2.5/src/components/Settings/SettingsSystemPanel.tsx b/ui/v2.5/src/components/Settings/SettingsSystemPanel.tsx index 34fb634b2..446ad09a1 100644 --- a/ui/v2.5/src/components/Settings/SettingsSystemPanel.tsx +++ b/ui/v2.5/src/components/Settings/SettingsSystemPanel.tsx @@ -427,6 +427,44 @@ export const SettingsConfigurationPanel: React.FC = () => { /> + + saveGeneral({ spriteScreenshotSize: v })} + /> + saveGeneral({ useCustomSpriteInterval: v })} + /> + saveGeneral({ spriteInterval: v })} + /> + saveGeneral({ minimumSprites: v })} + /> + saveGeneral({ maximumSprites: v })} + /> + + = ({ ); }; +const BackupDialog: React.FC<{ + onClose: ( + confirmed?: boolean, + download?: boolean, + includeBlobs?: boolean + ) => void; +}> = ({ onClose }) => { + const intl = useIntl(); + const { configuration } = useConfigurationContext(); + + const includeBlobsDefault = + configuration?.general.blobsStorage === GQL.BlobsStorageType.Filesystem; + const backupDir = + configuration.general.backupDirectoryPath || + `<${intl.formatMessage({ + id: "config.general.backup_directory_path.heading", + })}>`; + + const [download, setDownload] = useState(false); + const [includeBlobs, setIncludeBlobs] = useState(includeBlobsDefault); + + let msg; + if (!includeBlobs) { + msg = intl.formatMessage( + { id: "config.tasks.backup_database.sqlite" }, + { + filename_format: ( + [origFilename].sqlite.[schemaVersion].[YYYYMMDD_HHMMSS] + ), + } + ); + } else { + msg = intl.formatMessage( + { id: "config.tasks.backup_database.zip" }, + { + filename_format: ( + + [origFilename].sqlite.[schemaVersion].[YYYYMMDD_HHMMSS].zip + + ), + } + ); + } + + const warning = + includeBlobs !== includeBlobsDefault ? ( +

+ + +

+ ) : null; + + const acceptID = download + ? "config.tasks.backup_database.download" + : "actions.backup"; + + return ( + onClose(true, download, includeBlobs), + }} + cancel={{ + onClick: () => onClose(), + variant: "secondary", + }} + > +
+ +
+ +
+ setDownload(false)} + label={intl.formatMessage( + { + id: "config.tasks.backup_database.to_directory", + }, + { + directory: {backupDir}, + } + )} + /> + + setDownload(true)} + label={intl.formatMessage({ + id: "config.tasks.backup_database.download", + })} + /> +
+ + + setIncludeBlobs(v)} + // if includeBlobsDefault is false, then blobs are in the database + disabled={!includeBlobsDefault} + /> + + +

{msg}

+ {warning} +
+
+ ); +}; + interface IDataManagementTasks { setIsBackupRunning: (v: boolean) => void; setIsAnonymiseRunning: (v: boolean) => void; @@ -167,6 +288,7 @@ export const DataManagementTasks: React.FC = ({ const [dialogOpen, setDialogOpenState] = useState({ importAlert: false, import: false, + backup: false, clean: false, cleanAlert: false, cleanGenerated: false, @@ -344,11 +466,12 @@ export const DataManagementTasks: React.FC = ({ } } - async function onBackup(download?: boolean) { + async function onBackup(download?: boolean, includeBlobs?: boolean) { try { setIsBackupRunning(true); const ret = await mutateBackupDatabase({ download, + includeBlobs, }); // download the result @@ -439,6 +562,17 @@ export const DataManagementTasks: React.FC = ({ }} /> )} + {dialogOpen.backup && ( + { + if (confirmed) { + onBackup(download, includeBlobs); + } + + setDialogOpen({ backup: false }); + }} + /> + )}
@@ -555,39 +689,25 @@ export const DataManagementTasks: React.FC = ({ - [origFilename].sqlite.[schemaVersion].[YYYYMMDD_HHMMSS] - - ), - } - )} + heading={ + <> + + + + + + } + subHeading={intl.formatMessage({ + id: "config.tasks.backup_database.description", + })} > - - - - diff --git a/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx b/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx index ee126d41e..c68b6d5eb 100644 --- a/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx +++ b/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx @@ -7,7 +7,7 @@ import { } from "../GeneratePreviewOptions"; interface IGenerateOptions { - type?: "scene" | "image"; + type?: "scene" | "image" | "gallery"; selection?: boolean; options: GQL.GenerateMetadataInput; setOptions: (s: GQL.GenerateMetadataInput) => void; @@ -27,7 +27,7 @@ export const GenerateOptions: React.FC = ({ } const showSceneOptions = !type || type === "scene"; - const showImageOptions = !type || type === "image"; + const showImageOptions = !type || type === "image" || type === "gallery"; return ( <> @@ -167,6 +167,13 @@ export const GenerateOptions: React.FC = ({ headingID="dialogs.scene_gen.image_thumbnails" onChange={(v) => setOptions({ imageThumbnails: v })} /> + setOptions({ imagePhashes: v })} + /> )} = ({ scanGenerateSprites, scanGeneratePhashes, scanGenerateThumbnails, + scanGenerateImagePhashes, scanGenerateClipPreviews, rescan, } = options; @@ -72,6 +73,13 @@ export const ScanOptions: React.FC = ({ headingID="config.tasks.generate_thumbnails_during_scan" onChange={(v) => setOptions({ scanGenerateThumbnails: v })} /> + setOptions({ scanGenerateImagePhashes: v })} + /> = ({ return ( onClose()} title=""> - Select Directory + + +
{ + if (ScreenUtils.isMobile()) { + return; + } + if ( - !containerWidth || zoomIndex === undefined || zoomIndex < 0 || - zoomIndex >= zoomWidths.length || - ScreenUtils.isMobile() + zoomIndex >= zoomWidths.length ) return; + // use a default card width if we don't have the container width yet + if (!containerWidth) { + return zoomWidths[zoomIndex]; + } + let zoomValue = zoomIndex; const preferredCardWidth = zoomWidths[zoomValue]; let fittedCardWidth = calculateCardWidth( diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialogRow.tsx b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialogRow.tsx index 88b79d87d..a0fe6489e 100644 --- a/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialogRow.tsx +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialogRow.tsx @@ -14,7 +14,7 @@ import { getCountryByISO } from "src/utils/country"; import { CountrySelect } from "../CountrySelect"; import { StringListInput } from "../StringListInput"; import { ImageSelector } from "../ImageSelector"; -import { ScrapeResult } from "./scrapeResult"; +import { CustomFieldScrapeResults, ScrapeResult } from "./scrapeResult"; import { ScrapeDialogContext } from "./ScrapeDialog"; function renderButtonIcon(selected: boolean) { @@ -171,6 +171,70 @@ export const ScrapedInputGroupRow: React.FC = ( ); }; +interface IScrapedNumberInputProps { + isNew?: boolean; + placeholder?: string; + locked?: boolean; + result: ScrapeResult; + onChange?: (value: number) => void; +} + +const ScrapedNumberInput: React.FC = (props) => { + return ( + { + if (props.isNew && props.onChange) { + props.onChange(Number(e.target.value)); + } + }} + className="bg-secondary text-white border-secondary" + type="number" + /> + ); +}; + +interface IScrapedNumberRowProps { + title: string; + field: string; + className?: string; + placeholder?: string; + result: ScrapeResult; + locked?: boolean; + onChange: (value: ScrapeResult) => void; +} + +export const ScrapedNumberRow: React.FC = (props) => { + return ( + + } + newField={ + + props.onChange(props.result.cloneWithValue(value)) + } + /> + } + onChange={props.onChange} + /> + ); +}; + interface IScrapedStringListProps { isNew?: boolean; placeholder?: string; @@ -431,3 +495,30 @@ export const ScrapedCountryRow: React.FC = ({ onChange={onChange} /> ); + +export const ScrapedCustomFieldRows: React.FC<{ + results: CustomFieldScrapeResults; + onChange: (newCustomFields: CustomFieldScrapeResults) => void; +}> = ({ results, onChange }) => { + return ( + <> + {Array.from(results.entries()).map(([field, result]) => { + const fieldName = `custom_${field}`; + return ( + { + const newResults = new Map(results); + newResults.set(field, newResult); + onChange(newResults); + }} + /> + ); + })} + + ); +}; diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts b/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts index b9b88cef0..63d1c76c1 100644 --- a/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts @@ -2,6 +2,9 @@ import lodashIsEqual from "lodash-es/isEqual"; import clone from "lodash-es/clone"; import { IHasStoredID } from "src/utils/data"; +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +export type CustomFieldScrapeResults = Map>; + export class ScrapeResult { public newValue?: T; public originalValue?: T; diff --git a/ui/v2.5/src/components/Studios/EditStudiosDialog.tsx b/ui/v2.5/src/components/Studios/EditStudiosDialog.tsx index 293a8dfb3..1c34dfc36 100644 --- a/ui/v2.5/src/components/Studios/EditStudiosDialog.tsx +++ b/ui/v2.5/src/components/Studios/EditStudiosDialog.tsx @@ -23,7 +23,13 @@ interface IListOperationProps { onClose: (applied: boolean) => void; } -const studioFields = ["favorite", "rating100", "details", "ignore_auto_tag"]; +const studioFields = [ + "favorite", + "rating100", + "details", + "ignore_auto_tag", + "organized", +]; export const EditStudiosDialog: React.FC = ( props: IListOperationProps @@ -236,6 +242,14 @@ export const EditStudiosDialog: React.FC = ( checked={updateInput.ignore_auto_tag ?? undefined} /> + + + setUpdateField({ organized: checked })} + checked={updateInput.organized ?? undefined} + /> + ); diff --git a/ui/v2.5/src/components/Studios/StudioCard.tsx b/ui/v2.5/src/components/Studios/StudioCard.tsx index 87c9b9528..839489182 100644 --- a/ui/v2.5/src/components/Studios/StudioCard.tsx +++ b/ui/v2.5/src/components/Studios/StudioCard.tsx @@ -7,13 +7,13 @@ import { PatchComponent } from "src/patch"; import { HoverPopover } from "../Shared/HoverPopover"; import { Icon } from "../Shared/Icon"; import { TagLink } from "../Shared/TagLink"; -import { Button, ButtonGroup } from "react-bootstrap"; +import { Button, ButtonGroup, OverlayTrigger, Tooltip } from "react-bootstrap"; import { FormattedMessage } from "react-intl"; import { PopoverCountButton } from "../Shared/PopoverCountButton"; import { RatingBanner } from "../Shared/RatingBanner"; import { FavoriteIcon } from "../Shared/FavoriteIcon"; import { useStudioUpdate } from "src/core/StashService"; -import { faTag } from "@fortawesome/free-solid-svg-icons"; +import { faTag, faBox } from "@fortawesome/free-solid-svg-icons"; import { OCounterButton } from "../Shared/CountButton"; interface IProps { @@ -185,6 +185,27 @@ export const StudioCard: React.FC = PatchComponent( return ; } + function maybeRenderOrganized() { + if (studio.organized) { + return ( + + + + } + placement="bottom" + > +
+ +
+
+ ); + } + } + function maybeRenderPopoverButtonGroup() { if ( studio.scene_count || @@ -193,7 +214,8 @@ export const StudioCard: React.FC = PatchComponent( studio.group_count || studio.performer_count || studio.o_counter || - studio.tags.length > 0 + studio.tags.length > 0 || + studio.organized ) { return ( <> @@ -206,6 +228,7 @@ export const StudioCard: React.FC = PatchComponent( {maybeRenderPerformersPopoverButton()} {maybeRenderTagPopoverButton()} {maybeRenderOCounter()} + {maybeRenderOrganized()} ); diff --git a/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx b/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx index 2edc53fe1..0096851e2 100644 --- a/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx +++ b/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx @@ -49,6 +49,7 @@ import { AliasList } from "src/components/Shared/DetailsPage/AliasList"; import { HeaderImage } from "src/components/Shared/DetailsPage/HeaderImage"; import { goBackOrReplace } from "src/utils/history"; import { OCounterButton } from "src/components/Shared/CountButton"; +import { OrganizedButton } from "src/components/Scenes/SceneDetails/OrganizedButton"; interface IProps { studio: GQL.StudioDataFragment; @@ -316,6 +317,28 @@ const StudioPage: React.FC = ({ studio, tabKey }) => { } } + const [organizedLoading, setOrganizedLoading] = useState(false); + + async function onOrganizedClick() { + if (!studio.id) return; + + setOrganizedLoading(true); + try { + await updateStudio({ + variables: { + input: { + id: studio.id, + organized: !studio.organized, + }, + }, + }); + } catch (e) { + Toast.error(e); + } finally { + setOrganizedLoading(false); + } + } + // set up hotkeys useEffect(() => { Mousetrap.bind("e", () => toggleEditing()); @@ -467,6 +490,11 @@ const StudioPage: React.FC = ({ studio, tabKey }) => { favorite={studio.favorite} onToggleFavorite={(v) => setFavorite(v)} /> + diff --git a/ui/v2.5/src/components/Studios/StudioDetails/StudioChildrenPanel.tsx b/ui/v2.5/src/components/Studios/StudioDetails/StudioChildrenPanel.tsx index b6cd8b484..a69364a89 100644 --- a/ui/v2.5/src/components/Studios/StudioDetails/StudioChildrenPanel.tsx +++ b/ui/v2.5/src/components/Studios/StudioDetails/StudioChildrenPanel.tsx @@ -2,7 +2,7 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; import { ParentStudiosCriterion } from "src/models/list-filter/criteria/studios"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { StudioList } from "../StudioList"; +import { FilteredStudioList } from "../StudioList"; import { View } from "src/components/List/views"; function useFilterHook(studio: GQL.StudioDataFragment) { @@ -51,7 +51,7 @@ export const StudioChildrenPanel: React.FC = ({ const filterHook = useFilterHook(studio); return ( - = ({ urls: yup.array(yup.string().required()).defined(), details: yup.string().ensure(), parent_id: yup.string().required().nullable(), - aliases: yupUniqueAliases(intl, "name"), + aliases: yupRequiredStringArray(intl).defined(), tag_ids: yup.array(yup.string().required()).defined(), ignore_auto_tag: yup.boolean().defined(), stash_ids: yup.mixed().defined(), diff --git a/ui/v2.5/src/components/Studios/StudioDetails/StudioGalleriesPanel.tsx b/ui/v2.5/src/components/Studios/StudioDetails/StudioGalleriesPanel.tsx index 340586b94..f5a1aba32 100644 --- a/ui/v2.5/src/components/Studios/StudioDetails/StudioGalleriesPanel.tsx +++ b/ui/v2.5/src/components/Studios/StudioDetails/StudioGalleriesPanel.tsx @@ -1,6 +1,6 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; -import { GalleryList } from "src/components/Galleries/GalleryList"; +import { FilteredGalleryList } from "src/components/Galleries/GalleryList"; import { useStudioFilterHook } from "src/core/studios"; import { View } from "src/components/List/views"; @@ -17,7 +17,7 @@ export const StudioGalleriesPanel: React.FC = ({ }) => { const filterHook = useStudioFilterHook(studio, showChildStudioContent); return ( - = ({ }) => { const filterHook = useStudioFilterHook(studio, showChildStudioContent); return ( - = ({ const filterHook = useStudioFilterHook(studio, showChildStudioContent); return ( - ; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; + fromParent?: boolean; +}> = PatchComponent( + "StudioList", + ({ studios, filter, selectedIds, onSelectChange, fromParent }) => { + if (studios.length === 0) { + return null; + } -function getCount(result: GQL.FindStudiosQueryResult) { - return result?.data?.findStudios?.count ?? 0; -} + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.List) { + return

TODO

; + } + if (filter.displayMode === DisplayMode.Wall) { + return

TODO

; + } + if (filter.displayMode === DisplayMode.Tagger) { + return ; + } + + return null; + } +); + +const StudioFilterSidebarSections = PatchContainerComponent( + "FilteredStudioList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + return ( + <> + + + + + + } + filter={filter} + setFilter={setFilter} + option={FavoriteStudioCriterionOption} + sectionID="favourite" + /> + + +
+ +
+ + ); +}; interface IStudioList { fromParent?: boolean; @@ -37,147 +157,161 @@ interface IStudioList { extraOperations?: IItemListOperation[]; } -export const StudioList: React.FC = PatchComponent( - "StudioList", - ({ fromParent, filterHook, view, alterQuery, extraOperations = [] }) => { +function useViewRandom(filter: ListFilterModel, count: number) { + const history = useHistory(); + + const viewRandom = useCallback(async () => { + // query for a random studio + if (count === 0) { + return; + } + + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindStudios(filterCopy); + if (singleResult.data.findStudios.studios.length === 1) { + const { id } = singleResult.data.findStudios.studios[0]; + // navigate to the studio page + history.push(`/studios/${id}`); + } + }, [history, filter, count]); + + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { + Mousetrap.bind("p r", () => { + viewRandom(); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }, [viewRandom]); +} + +export const FilteredStudioList = PatchComponent( + "FilteredStudioList", + (props: IStudioList) => { const intl = useIntl(); - const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); - const filterMode = GQL.FilterMode.Studios; + const searchFocus = useFocus(); - const otherOperations = [ - ...extraOperations, - { - text: intl.formatMessage({ id: "actions.view_random" }), - onClick: viewRandom, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, - }, - ]; + const { filterHook, view, alterQuery, extraOperations = [] } = props; - function addKeybinds( - result: GQL.FindStudiosQueryResult, - filter: ListFilterModel - ) { - Mousetrap.bind("p r", () => { - viewRandom(result, filter); + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Studios, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindStudios, + getCount: (r) => r.data?.findStudios.count ?? 0, + getItems: (r) => r.data?.findStudios.studios ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(filter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } }); return () => { - Mousetrap.unbind("p r"); + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); }; - } + }); - async function viewRandom( - result: GQL.FindStudiosQueryResult, - filter: ListFilterModel - ) { - // query for a random studio - if (result.data?.findStudios) { - const { count } = result.data.findStudios; + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindStudios(filterCopy); - if (singleResult.data.findStudios.studios.length === 1) { - const { id } = singleResult.data.findStudios.studios[0]; - // navigate to the studio page - history.push(`/studios/${id}`); - } - } - } + const viewRandom = useViewRandom(filter, totalCount); - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } - - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } - - function renderContent( - result: GQL.FindStudiosQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - function maybeRenderExportDialog() { - if (isExportDialogOpen) { - return ( - setIsExportDialogOpen(false)} - /> - ); - } - } - - function renderStudios() { - if (!result.data?.findStudios) return; - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return

TODO

; - } - if (filter.displayMode === DisplayMode.Wall) { - return

TODO

; - } - if (filter.displayMode === DisplayMode.Tagger) { - return ; - } - } - - return ( - <> - {maybeRenderExportDialog()} - {renderStudios()} - + function onExport(all: boolean) { + showModal( + closeModal()} + /> ); } - function renderEditDialog( - selectedStudios: GQL.SlimStudioDataFragment[], - onClose: (applied: boolean) => void - ) { - return ; + function onEdit() { + showModal( + + ); } - function renderDeleteDialog( - selectedStudios: GQL.SlimStudioDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( + function onDelete() { + showModal( = PatchComponent( ); } + const convertedExtraOperations = extraOperations.map((op) => ({ + text: op.text, + onClick: () => op.onClick(result, filter, selectedIds), + isDisplayed: () => op.isDisplayed?.(result, filter, selectedIds) ?? true, + })); + + const otherOperations = [ + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.view_random" }), + onClick: viewRandom, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + + ); + return ( - - - + {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
+ setFilter(filter.changePage(page))} + /> + +
+ + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
+ )} +
+
+
+
); } ); diff --git a/ui/v2.5/src/components/Studios/Studios.tsx b/ui/v2.5/src/components/Studios/Studios.tsx index 545de936f..956531fe0 100644 --- a/ui/v2.5/src/components/Studios/Studios.tsx +++ b/ui/v2.5/src/components/Studios/Studios.tsx @@ -4,11 +4,11 @@ import { Helmet } from "react-helmet"; import { useTitleProps } from "src/hooks/title"; import Studio from "./StudioDetails/Studio"; import StudioCreate from "./StudioDetails/StudioCreate"; -import { StudioList } from "./StudioList"; +import { FilteredStudioList } from "./StudioList"; import { View } from "../List/views"; const Studios: React.FC = () => { - return ; + return ; }; const StudioRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Tagger/PerformerModal.tsx b/ui/v2.5/src/components/Tagger/PerformerModal.tsx index 79f80708a..ac9444c5b 100755 --- a/ui/v2.5/src/components/Tagger/PerformerModal.tsx +++ b/ui/v2.5/src/components/Tagger/PerformerModal.tsx @@ -240,7 +240,8 @@ const PerformerModal: React.FC = ({ height_cm: Number.parseFloat(performer.height ?? "") ?? undefined, measurements: performer.measurements, fake_tits: performer.fake_tits, - career_length: performer.career_length, + career_start: performer.career_start, + career_end: performer.career_end, tattoos: performer.tattoos, piercings: performer.piercings, urls: performer.urls, @@ -326,7 +327,8 @@ const PerformerModal: React.FC = ({ {maybeRenderField("measurements", performer.measurements)} {performer?.gender !== GQL.GenderEnum.Male && maybeRenderField("fake_tits", performer.fake_tits)} - {maybeRenderField("career_length", performer.career_length)} + {maybeRenderField("career_start", performer.career_start?.toString())} + {maybeRenderField("career_end", performer.career_end?.toString())} {maybeRenderField("tattoos", performer.tattoos, false)} {maybeRenderField("piercings", performer.piercings, false)} {maybeRenderField("weight", performer.weight, false)} diff --git a/ui/v2.5/src/components/Tagger/constants.ts b/ui/v2.5/src/components/Tagger/constants.ts index d499062aa..d59a6d3d5 100644 --- a/ui/v2.5/src/components/Tagger/constants.ts +++ b/ui/v2.5/src/components/Tagger/constants.ts @@ -75,7 +75,8 @@ export const PERFORMER_FIELDS = [ "fake_tits", "tattoos", "piercings", - "career_length", + "career_start", + "career_end", "urls", "details", ]; diff --git a/ui/v2.5/src/components/Tags/TagDetails/TagEditPanel.tsx b/ui/v2.5/src/components/Tags/TagDetails/TagEditPanel.tsx index 077300788..22c99b80e 100644 --- a/ui/v2.5/src/components/Tags/TagDetails/TagEditPanel.tsx +++ b/ui/v2.5/src/components/Tags/TagDetails/TagEditPanel.tsx @@ -15,7 +15,7 @@ import { useToast } from "src/hooks/Toast"; import { useConfigurationContext } from "src/hooks/Config"; import { handleUnsavedChanges } from "src/utils/navigation"; import { formikUtils } from "src/utils/form"; -import { yupFormikValidate, yupUniqueAliases } from "src/utils/yup"; +import { yupFormikValidate, yupRequiredStringArray } from "src/utils/yup"; import { addUpdateStashID, getStashIDs } from "src/utils/stashIds"; import { Tag, TagSelect } from "../TagSelect"; import { Icon } from "src/components/Shared/Icon"; @@ -56,7 +56,7 @@ export const TagEditPanel: React.FC = ({ const schema = yup.object({ name: yup.string().required(), sort_name: yup.string().ensure(), - aliases: yupUniqueAliases(intl, "name"), + aliases: yupRequiredStringArray(intl).defined(), description: yup.string().ensure(), parent_ids: yup.array(yup.string().required()).defined(), child_ids: yup.array(yup.string().required()).defined(), diff --git a/ui/v2.5/src/components/Tags/TagDetails/TagGalleriesPanel.tsx b/ui/v2.5/src/components/Tags/TagDetails/TagGalleriesPanel.tsx index bb95a7ea1..f5df9946b 100644 --- a/ui/v2.5/src/components/Tags/TagDetails/TagGalleriesPanel.tsx +++ b/ui/v2.5/src/components/Tags/TagDetails/TagGalleriesPanel.tsx @@ -1,7 +1,7 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; import { useTagFilterHook } from "src/core/tags"; -import { GalleryList } from "src/components/Galleries/GalleryList"; +import { FilteredGalleryList } from "src/components/Galleries/GalleryList"; import { View } from "src/components/List/views"; interface ITagGalleriesPanel { @@ -17,7 +17,7 @@ export const TagGalleriesPanel: React.FC = ({ }) => { const filterHook = useTagFilterHook(tag, showSubTagContent); return ( - = ({ active, tag, showSubTagContent }) => { const filterHook = useTagFilterHook(tag, showSubTagContent); - return ; + return ( + + ); }; diff --git a/ui/v2.5/src/components/Tags/TagDetails/TagPerformersPanel.tsx b/ui/v2.5/src/components/Tags/TagDetails/TagPerformersPanel.tsx index 4891c0daf..a512ef5a3 100644 --- a/ui/v2.5/src/components/Tags/TagDetails/TagPerformersPanel.tsx +++ b/ui/v2.5/src/components/Tags/TagDetails/TagPerformersPanel.tsx @@ -1,7 +1,7 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; import { useTagFilterHook } from "src/core/tags"; -import { PerformerList } from "src/components/Performers/PerformerList"; +import { FilteredPerformerList } from "src/components/Performers/PerformerList"; import { View } from "src/components/List/views"; interface ITagPerformersPanel { @@ -17,7 +17,7 @@ export const TagPerformersPanel: React.FC = ({ }) => { const filterHook = useTagFilterHook(tag, showSubTagContent); return ( - = ({ showSubTagContent, }) => { const filterHook = useTagFilterHook(tag, showSubTagContent); - return ; + return ; }; diff --git a/ui/v2.5/src/components/Tags/TagMergeDialog.tsx b/ui/v2.5/src/components/Tags/TagMergeDialog.tsx index 15b648af5..a66ce5789 100644 --- a/ui/v2.5/src/components/Tags/TagMergeDialog.tsx +++ b/ui/v2.5/src/components/Tags/TagMergeDialog.tsx @@ -1,13 +1,412 @@ import { Button, Form, Col, Row } from "react-bootstrap"; -import React, { useEffect, useState } from "react"; +import * as GQL from "src/core/generated-graphql"; +import React, { useCallback, useEffect, useMemo, useState } from "react"; import { Icon } from "../Shared/Icon"; import { ModalComponent } from "src/components/Shared/Modal"; import * as FormUtils from "src/utils/form"; -import { useTagsMerge } from "src/core/StashService"; -import { useIntl } from "react-intl"; +import { queryFindTagsByID, useTagsMerge } from "src/core/StashService"; +import { FormattedMessage, useIntl } from "react-intl"; import { useToast } from "src/hooks/Toast"; import { faExchangeAlt, faSignInAlt } from "@fortawesome/free-solid-svg-icons"; import { Tag, TagSelect } from "./TagSelect"; +import { + CustomFieldScrapeResults, + hasScrapedValues, + ObjectListScrapeResult, + ScrapeResult, +} from "../Shared/ScrapeDialog/scrapeResult"; +import { sortStoredIdObjects } from "src/utils/data"; +import ImageUtils from "src/utils/image"; +import { uniq } from "lodash-es"; +import { LoadingIndicator } from "../Shared/LoadingIndicator"; +import { + ScrapedCustomFieldRows, + ScrapeDialogRow, + ScrapedImageRow, + ScrapedInputGroupRow, + ScrapedStringListRow, + ScrapedTextAreaRow, +} from "../Shared/ScrapeDialog/ScrapeDialogRow"; +import { ScrapedTagsRow } from "../Shared/ScrapeDialog/ScrapedObjectsRow"; +import { StringListSelect } from "../Shared/Select"; +import { ScrapeDialog } from "../Shared/ScrapeDialog/ScrapeDialog"; + +interface IStashIDsField { + values: GQL.StashId[]; +} + +const StashIDsField: React.FC = ({ values }) => { + return v.stash_id)} />; +}; + +interface ITagMergeDetailsProps { + sources: GQL.TagDataFragment[]; + dest: GQL.TagDataFragment; + onClose: (values?: GQL.TagUpdateInput) => void; +} + +const TagMergeDetails: React.FC = ({ + sources, + dest, + onClose, +}) => { + const intl = useIntl(); + + const [loading, setLoading] = useState(true); + + const filterCandidates = useCallback( + (t: { stored_id: string }) => + t.stored_id !== dest.id && sources.every((s) => s.id !== t.stored_id), + [dest.id, sources] + ); + + const [name, setName] = useState>( + new ScrapeResult(dest.name) + ); + const [sortName, setSortName] = useState>( + new ScrapeResult(dest.sort_name) + ); + const [aliases, setAliases] = useState>( + new ScrapeResult(dest.aliases) + ); + const [description, setDescription] = useState>( + new ScrapeResult(dest.description) + ); + const [parentTags, setParentTags] = useState< + ObjectListScrapeResult + >( + new ObjectListScrapeResult( + sortStoredIdObjects( + dest.parents.map(idToStoredID).filter(filterCandidates) + ) + ) + ); + const [childTags, setChildTags] = useState< + ObjectListScrapeResult + >( + new ObjectListScrapeResult( + sortStoredIdObjects( + dest.children.map(idToStoredID).filter(filterCandidates) + ) + ) + ); + + const [stashIDs, setStashIDs] = useState(new ScrapeResult([])); + + const [image, setImage] = useState>( + new ScrapeResult(dest.image_path) + ); + + const [customFields, setCustomFields] = useState( + new Map() + ); + + function idToStoredID(o: { id: string; name: string }) { + return { + stored_id: o.id, + name: o.name, + }; + } + + // calculate the values for everything + // uses the first set value for single value fields, and combines all + useEffect(() => { + async function loadImages() { + const src = sources.find((s) => s.image_path); + if (!dest.image_path || !src) return; + + setLoading(true); + + const destData = await ImageUtils.imageToDataURL(dest.image_path); + const srcData = await ImageUtils.imageToDataURL(src.image_path!); + + // keep destination image by default + const useNewValue = false; + setImage(new ScrapeResult(destData, srcData, useNewValue)); + + setLoading(false); + } + + // append dest to all so that if dest has stash_ids with the same + // endpoint, then it will be excluded first + const all = sources.concat(dest); + + setName( + new ScrapeResult(dest.name, sources.find((s) => s.name)?.name, !dest.name) + ); + setSortName( + new ScrapeResult( + dest.sort_name, + sources.find((s) => s.sort_name)?.sort_name, + !dest.sort_name + ) + ); + + setDescription( + new ScrapeResult( + dest.description, + sources.find((s) => s.description)?.description, + !dest.description + ) + ); + + // default alias list should be the existing aliases, plus the names of all sources, + // plus all source aliases, deduplicated + const allAliases = uniq( + dest.aliases.concat( + sources.map((s) => s.name), + sources.flatMap((s) => s.aliases) + ) + ); + setAliases(new ScrapeResult(dest.aliases, allAliases, !!allAliases.length)); + + // default parent/child tags should be the existing tags, plus all source parent/child tags, deduplicated + const allParentTags = uniq(all.flatMap((s) => s.parents)) + .map(idToStoredID) + .filter(filterCandidates); // exclude self and sources + + setParentTags( + new ObjectListScrapeResult( + sortStoredIdObjects(dest.parents.map(idToStoredID)), + sortStoredIdObjects(allParentTags), + !!allParentTags.length + ) + ); + + const allChildTags = uniq(all.flatMap((s) => s.children)) + .map(idToStoredID) + .filter(filterCandidates); // exclude self and sources + + setChildTags( + new ObjectListScrapeResult( + sortStoredIdObjects( + dest.children.map(idToStoredID).filter(filterCandidates) + ), + sortStoredIdObjects(allChildTags), + !!allChildTags.length + ) + ); + + setStashIDs( + new ScrapeResult( + dest.stash_ids, + all + .map((s) => s.stash_ids) + .flat() + .filter((s, index, a) => { + // remove entries with duplicate endpoints + return index === a.findIndex((ss) => ss.endpoint === s.endpoint); + }) + ) + ); + + setImage( + new ScrapeResult( + dest.image_path, + sources.find((s) => s.image_path)?.image_path, + !dest.image_path + ) + ); + + const customFieldNames = new Set(Object.keys(dest.custom_fields)); + + for (const s of sources) { + for (const n of Object.keys(s.custom_fields)) { + customFieldNames.add(n); + } + } + + setCustomFields( + new Map( + Array.from(customFieldNames) + .sort() + .map((field) => { + return [ + field, + new ScrapeResult( + dest.custom_fields?.[field], + sources.find((s) => s.custom_fields?.[field])?.custom_fields?.[ + field + ], + dest.custom_fields?.[field] === undefined + ), + ]; + }) + ) + ); + + loadImages(); + }, [sources, dest, filterCandidates]); + + const hasCustomFieldValues = useMemo(() => { + return hasScrapedValues(Array.from(customFields.values())); + }, [customFields]); + + // ensure this is updated if fields are changed + const hasValues = useMemo(() => { + return ( + hasCustomFieldValues || + hasScrapedValues([ + name, + sortName, + aliases, + description, + parentTags, + childTags, + stashIDs, + image, + ]) + ); + }, [ + name, + sortName, + aliases, + description, + parentTags, + childTags, + stashIDs, + image, + hasCustomFieldValues, + ]); + + function renderScrapeRows() { + if (loading) { + return ( +
+ +
+ ); + } + + if (!hasValues) { + return ( +
+ +
+ ); + } + + return ( + <> + setName(value)} + /> + setSortName(value)} + /> + setAliases(value)} + /> + setParentTags(value)} + /> + setChildTags(value)} + /> + setDescription(value)} + /> + + } + newField={} + onChange={(value) => setStashIDs(value)} + /> + setImage(value)} + /> + {hasCustomFieldValues && ( + setCustomFields(newCustomFields)} + /> + )} + + ); + } + + function createValues(): GQL.TagUpdateInput { + // only set the cover image if it's different from the existing cover image + const coverImage = image.useNewValue ? image.getNewValue() : undefined; + + return { + id: dest.id, + name: name.getNewValue(), + sort_name: sortName.getNewValue(), + aliases: aliases + .getNewValue() + ?.map((s) => s.trim()) + .filter((s) => s.length > 0), + parent_ids: parentTags.getNewValue()?.map((t) => t.stored_id!), + child_ids: childTags.getNewValue()?.map((t) => t.stored_id!), + description: description.getNewValue(), + stash_ids: stashIDs.getNewValue(), + image: coverImage, + custom_fields: { + partial: Object.fromEntries( + Array.from(customFields.entries()).flatMap(([field, v]) => + v.useNewValue ? [[field, v.getNewValue()]] : [] + ) + ), + }, + }; + } + + const dialogTitle = intl.formatMessage({ + id: "actions.merge", + }); + + const destinationLabel = !hasValues + ? "" + : intl.formatMessage({ id: "dialogs.merge.destination" }); + const sourceLabel = !hasValues + ? "" + : intl.formatMessage({ id: "dialogs.merge.source" }); + + return ( + { + if (!apply) { + onClose(); + } else { + onClose(createValues()); + } + }} + > + {renderScrapeRows()} + + ); +}; interface ITagMergeModalProps { show: boolean; @@ -23,6 +422,11 @@ export const TagMergeModal: React.FC = ({ const [src, setSrc] = useState([]); const [dest, setDest] = useState(null); + const [loadedSources, setLoadedSources] = useState([]); + const [loadedDest, setLoadedDest] = useState(); + + const [secondStep, setSecondStep] = useState(false); + const [running, setRunning] = useState(false); const [mergeTags] = useTagsMerge(); @@ -41,7 +445,23 @@ export const TagMergeModal: React.FC = ({ } }, [tags]); - async function onMerge() { + async function loadTags() { + try { + const tagIDs = src.map((s) => s.id); + tagIDs.push(dest!.id); + const query = await queryFindTagsByID(tagIDs); + const { tags: loadedTags } = query.data.findTags; + + setLoadedDest(loadedTags.find((s) => s.id === dest!.id)); + setLoadedSources(loadedTags.filter((s) => s.id !== dest!.id)); + setSecondStep(true); + } catch (e) { + Toast.error(e); + return; + } + } + + async function onMerge(values: GQL.TagUpdateInput) { if (!dest) return; const source = src.map((s) => s.id); @@ -53,6 +473,7 @@ export const TagMergeModal: React.FC = ({ variables: { source, destination, + values, }, }); if (result.data?.tagsMerge) { @@ -78,6 +499,23 @@ export const TagMergeModal: React.FC = ({ } } + if (secondStep && dest) { + return ( + { + setSecondStep(false); + if (values) { + onMerge(values); + } else { + onClose(); + } + }} + /> + ); + } + return ( = ({ icon={faSignInAlt} accept={{ text: intl.formatMessage({ id: "actions.merge" }), - onClick: () => onMerge(), + onClick: () => loadTags(), }} disabled={!canMerge()} cancel={{ diff --git a/ui/v2.5/src/components/TroubleshootingMode/TroubleshootingModeButton.tsx b/ui/v2.5/src/components/TroubleshootingMode/TroubleshootingModeButton.tsx new file mode 100644 index 000000000..164774446 --- /dev/null +++ b/ui/v2.5/src/components/TroubleshootingMode/TroubleshootingModeButton.tsx @@ -0,0 +1,67 @@ +import React, { useState } from "react"; +import { Button } from "react-bootstrap"; +import { FormattedMessage, useIntl } from "react-intl"; +import { faBug } from "@fortawesome/free-solid-svg-icons"; +import { ModalComponent } from "src/components/Shared/Modal"; +import { useTroubleshootingMode } from "./useTroubleshootingMode"; + +const DIALOG_ITEMS = [ + "config.ui.troubleshooting_mode.dialog_item_plugins", + "config.ui.troubleshooting_mode.dialog_item_css", + "config.ui.troubleshooting_mode.dialog_item_js", + "config.ui.troubleshooting_mode.dialog_item_locales", +] as const; + +export const TroubleshootingModeButton: React.FC = () => { + const intl = useIntl(); + const [showDialog, setShowDialog] = useState(false); + const { enable, isLoading } = useTroubleshootingMode(); + + return ( + <> +
+ +
+ + setShowDialog(false)} + header={intl.formatMessage({ + id: "config.ui.troubleshooting_mode.dialog_title", + })} + icon={faBug} + accept={{ + text: intl.formatMessage({ + id: "config.ui.troubleshooting_mode.enable", + }), + variant: "primary", + onClick: enable, + }} + cancel={{ + onClick: () => setShowDialog(false), + variant: "secondary", + }} + isRunning={isLoading} + > +

+ +

+
    + {DIALOG_ITEMS.map((id) => ( +
  • + +
  • + ))} +
+

+ +

+

+ +

+
+ + ); +}; diff --git a/ui/v2.5/src/components/TroubleshootingMode/TroubleshootingModeOverlay.tsx b/ui/v2.5/src/components/TroubleshootingMode/TroubleshootingModeOverlay.tsx new file mode 100644 index 000000000..bf2b38f8a --- /dev/null +++ b/ui/v2.5/src/components/TroubleshootingMode/TroubleshootingModeOverlay.tsx @@ -0,0 +1,28 @@ +import React from "react"; +import { Button } from "react-bootstrap"; +import { FormattedMessage } from "react-intl"; +import { faBug } from "@fortawesome/free-solid-svg-icons"; +import { Icon } from "src/components/Shared/Icon"; +import { useTroubleshootingMode } from "./useTroubleshootingMode"; + +export const TroubleshootingModeOverlay: React.FC = () => { + const { isActive, isLoading, disable } = useTroubleshootingMode(); + + if (!isActive) { + return null; + } + + return ( +
+
+ + + + + +
+
+ ); +}; diff --git a/ui/v2.5/src/components/TroubleshootingMode/useTroubleshootingMode.ts b/ui/v2.5/src/components/TroubleshootingMode/useTroubleshootingMode.ts new file mode 100644 index 000000000..63b4edd4f --- /dev/null +++ b/ui/v2.5/src/components/TroubleshootingMode/useTroubleshootingMode.ts @@ -0,0 +1,83 @@ +import { useState, useRef, useEffect } from "react"; +import { + useConfigureInterface, + useConfigureGeneral, + useConfiguration, +} from "src/core/StashService"; + +const ORIGINAL_LOG_LEVEL_KEY = "troubleshootingMode_originalLogLevel"; + +export function useTroubleshootingMode() { + const [isLoading, setIsLoading] = useState(false); + const isMounted = useRef(true); + + const { data: config } = useConfiguration(); + const [configureInterface] = useConfigureInterface(); + const [configureGeneral] = useConfigureGeneral(); + + const isActive = + config?.configuration?.interface?.disableCustomizations ?? false; + const currentLogLevel = config?.configuration?.general?.logLevel || "Info"; + + useEffect(() => { + return () => { + isMounted.current = false; + }; + }, []); + + async function enable() { + setIsLoading(true); + try { + // Store original log level for restoration later + localStorage.setItem(ORIGINAL_LOG_LEVEL_KEY, currentLogLevel); + + // Enable troubleshooting mode and set log level to Debug + await Promise.all([ + configureInterface({ + variables: { input: { disableCustomizations: true } }, + }), + configureGeneral({ + variables: { input: { logLevel: "Debug" } }, + }), + ]); + + window.location.reload(); + } catch (e) { + if (isMounted.current) { + setIsLoading(false); + } + throw e; + } + } + + async function disable() { + setIsLoading(true); + try { + // Restore original log level + const originalLogLevel = + localStorage.getItem(ORIGINAL_LOG_LEVEL_KEY) || "Info"; + + // Disable troubleshooting mode and restore log level + await Promise.all([ + configureInterface({ + variables: { input: { disableCustomizations: false } }, + }), + configureGeneral({ + variables: { input: { logLevel: originalLogLevel } }, + }), + ]); + + // Clean up localStorage + localStorage.removeItem(ORIGINAL_LOG_LEVEL_KEY); + + window.location.reload(); + } catch (e) { + if (isMounted.current) { + setIsLoading(false); + } + throw e; + } + } + + return { isActive, isLoading, enable, disable }; +} diff --git a/ui/v2.5/src/core/StashService.ts b/ui/v2.5/src/core/StashService.ts index 6aaf17125..58b1aae42 100644 --- a/ui/v2.5/src/core/StashService.ts +++ b/ui/v2.5/src/core/StashService.ts @@ -472,6 +472,14 @@ export const queryFindTagsForList = (filter: ListFilterModel) => }, }); +export const queryFindTagsByID = (tagIDs: string[]) => + client.query({ + query: GQL.FindTagsDocument, + variables: { + ids: tagIDs, + }, + }); + export const queryFindTagsByIDForSelect = (tagIDs: string[]) => client.query({ query: GQL.FindTagsForSelectDocument, diff --git a/ui/v2.5/src/core/performers.ts b/ui/v2.5/src/core/performers.ts index 9712c9824..016e9e13f 100644 --- a/ui/v2.5/src/core/performers.ts +++ b/ui/v2.5/src/core/performers.ts @@ -104,7 +104,10 @@ export const scrapedPerformerToCreateInput = ( height_cm: toCreate.height ? Number(toCreate.height) : undefined, measurements: toCreate.measurements, fake_tits: toCreate.fake_tits, - career_length: toCreate.career_length, + career_start: toCreate.career_start + ? Number(toCreate.career_start) + : undefined, + career_end: toCreate.career_end ? Number(toCreate.career_end) : undefined, tattoos: toCreate.tattoos, piercings: toCreate.piercings, alias_list: aliases, diff --git a/ui/v2.5/src/core/recommendations.ts b/ui/v2.5/src/core/recommendations.ts index b0a1232e4..7c55fed9d 100644 --- a/ui/v2.5/src/core/recommendations.ts +++ b/ui/v2.5/src/core/recommendations.ts @@ -16,7 +16,7 @@ export function getSlickSliderSettings(cardCount: number, isTouch: boolean) { return { dots: !isTouch, arrows: !isTouch, - infinite: !isTouch, + infinite: !isTouch && cardCount > 5, speed: 300, variableWidth: true, swipeToSlide: true, @@ -26,6 +26,7 @@ export function getSlickSliderSettings(cardCount: number, isTouch: boolean) { { breakpoint: 1909, settings: { + infinite: !isTouch && cardCount > 4, slidesToShow: cardCount! > 4 ? 4 : cardCount, slidesToScroll: determineSlidesToScroll(cardCount!, 4, isTouch), }, @@ -33,6 +34,7 @@ export function getSlickSliderSettings(cardCount: number, isTouch: boolean) { { breakpoint: 1542, settings: { + infinite: !isTouch && cardCount > 3, slidesToShow: cardCount! > 3 ? 3 : cardCount, slidesToScroll: determineSlidesToScroll(cardCount!, 3, isTouch), }, @@ -40,6 +42,7 @@ export function getSlickSliderSettings(cardCount: number, isTouch: boolean) { { breakpoint: 1170, settings: { + infinite: !isTouch && cardCount > 2, slidesToShow: cardCount! > 2 ? 2 : cardCount, slidesToScroll: determineSlidesToScroll(cardCount!, 2, isTouch), }, @@ -47,9 +50,10 @@ export function getSlickSliderSettings(cardCount: number, isTouch: boolean) { { breakpoint: 801, settings: { + infinite: !isTouch && cardCount > 1, slidesToShow: 1, slidesToScroll: 1, - dots: false, + dots: cardCount < 6, }, }, ], diff --git a/ui/v2.5/src/docs/en/Manual/AutoTagging.md b/ui/v2.5/src/docs/en/Manual/AutoTagging.md index 4b1cbb813..c3ef00971 100644 --- a/ui/v2.5/src/docs/en/Manual/AutoTagging.md +++ b/ui/v2.5/src/docs/en/Manual/AutoTagging.md @@ -1,16 +1,16 @@ # Auto Tag -Auto Tag automatically assigns Performers, Studios, and Tags to your media based on their names found in file paths or filenames. This task works for scenes, images, and galleries. +Auto tag automatically assigns Performers, Studios, and Tags to your media based on their names found in file paths or filenames. This task works for scenes, images, and galleries. This task is part of the advanced settings mode. ## Rules -> **Important:** Auto Tag only works for names that already exist in your Stash database. It does not create new Performers, Studios, or Tags. +> **⚠️ Important:** Auto tag only works for names that already exist in your Stash database. It does not create new Performers, Studios, or Tags. - Multi-word names are matched when words appear in order and are separated by any of these characters: `.`, `-`, `_`, or whitespace. These separators are treated as word boundaries. - Matching is case-insensitive but requires complete words within word boundaries. Partial words or misspelled words will not match. - - Auto Tag does not match performer aliases. Aliases will not be considered during matching. + - Auto tag does not match performer aliases. Aliases will not be considered during matching. ### Examples (performer "Jane Doe") @@ -35,14 +35,16 @@ This task is part of the advanced settings mode. ### Organized flag -Scenes, images, and galleries that have the Organized flag added to them will not be modified by Auto Tag. You can also use Organized flag status as a filter. +Scenes, images, and galleries that have the Organized flag added to them will not be modified by Auto tag. You can also use Organized flag status as a filter. -### Ignore Auto Tag flag +Studios also support the Organized flag, however it is purely informational. It serves as a front-end indicator for the user to mark that a studio's collection is complete and does not affect Auto tag behavior. The Ignore Auto tag flag should be used to exclude a studio from Auto tag. -Performers or Tags that have Ignore Auto Tag flag added to them will be skipped by the Auto Tag task. +### Ignore Auto tag flag + +Performers or Tags that have Ignore Auto tag flag added to them will be skipped by the Auto tag task. ## Running task -- **Auto Tag:** You can run the Auto Tag task on your entire library from the Tasks page. -- **Selective Auto Tag:** You can run the Auto Tag task on specific directories from the Tasks page. -- **Individual pages:** You can run Auto Tag tasks for specific Performers, Studios, and Tags from their respective pages. +- **Auto tag:** You can run the Auto tag task on your entire library from the Tasks page. +- **Selective auto tag:** You can run the Auto tag task on specific directories from the Tasks page. +- **Individual pages:** You can run Auto tag tasks for specific Performers, Studios, and Tags from their respective pages. diff --git a/ui/v2.5/src/docs/en/Manual/Captions.md b/ui/v2.5/src/docs/en/Manual/Captions.md index df2bee8bc..a575f915b 100644 --- a/ui/v2.5/src/docs/en/Manual/Captions.md +++ b/ui/v2.5/src/docs/en/Manual/Captions.md @@ -15,4 +15,4 @@ Where `{language_code}` is defined by the [ISO-6399-1](https://en.wikipedia.org/ Scenes with captions can be filtered with the `captions` criterion. -**Note:** If the caption file was added after the scene was initially added during scan, you will need to run a Selective Scan task for it to show up. +> **⚠️ Note:** If the caption file was added after the scene was initially added during scan, you will need to run a Selective scan task for it to show up. diff --git a/ui/v2.5/src/docs/en/Manual/Configuration.md b/ui/v2.5/src/docs/en/Manual/Configuration.md index 76464facf..2d08f9750 100644 --- a/ui/v2.5/src/docs/en/Manual/Configuration.md +++ b/ui/v2.5/src/docs/en/Manual/Configuration.md @@ -31,7 +31,7 @@ Some examples: - `"^/stash/videos/exclude/"` will exclude all directories that match `/stash/videos/exclude/` pattern. - `"\\\\stash\\network\\share\\excl\\"` will exclude specific Windows network path `\\stash\network\share\excl\`. -> **Note:** If a directory is excluded for images and videos, then the directory will be excluded from scans completely. +> **⚠️ Note:** If a directory is excluded for images and videos, then the directory will be excluded from scans completely. _There is a useful [regex101](https://regex101.com/) site that can help test and experiment with regexps._ @@ -87,7 +87,37 @@ This setting can be used to increase/decrease overall CPU utilisation in two sce 1. High performance 4+ core cpus. 2. Media files stored on remote/cloud filesystem. -Note: If this is set too high it will decrease overall performance and causes failures (out of memory). +> **⚠️ Note:** If this is set too high it will decrease overall performance and causes failures (out of memory). + +## Sprite generation + +### Sprite size + +Fixed size of a generated sprite, being the longest dimension in pixels. +Setting this to `0` will fallback to the default of `160`. +Althought it is possible to set this value to anything bigger than `0` it is recommended to set it to `160` at least. + +### Use custom sprite generation + +If this setting is disabled, the settings below will be ignored and the default sprite generation settings are used. + +### Sprite interval + +This represents the time in seconds between each sprite to be generated. This value will be adjusted if necessary to fit within the bounds of the `Minimum Sprites` and `Maximum Sprites` settings. + +Setting this to `0` means that the sprite interval will be calculated based on the value of the `Minimum Sprites` field. + +### Minimum sprites + +The minimal number of distinct sprites that will be generated for a scene. `Sprite interval` will be adjusted if necessary. +Setting this to `0` will fallback to the default of `10` + +### Maximum sprites + +The maximum number of distinct sprites that will be generated for a scene. `Sprite interval` will be adjusted if necessary. +Setting this to `0` indicates there is no maximum. + +> **⚠️ Note:** The number of generated sprites is adjusted upwards to the next perfect square to ensure the sprite image is completely filled (no empty space in the grid) and the grid is as square as possible (minimizing the number of rows/columns). This means that if you set a minimum of 10 sprites, 16 will actually be generated, and if you set a maximum of 15 sprites, 16 will actually be generated. ## Hardware accelerated live transcoding @@ -117,7 +147,7 @@ Some scrapers require a Chrome instance to function correctly. If left empty, st `Chrome CDP path` can be set to a path to the chrome executable, or an http(s) address to remote chrome instance (for example: `http://localhost:9222/json/version`). -> **Important**: As of Chrome 136 you need to specify `--user-data-dir` alongside `--remote-debugging-port`. Read more on their [official post](https://developer.chrome.com/blog/remote-debugging-port). +> **⚠️ Important:** As of Chrome 136 you need to specify `--user-data-dir` alongside `--remote-debugging-port`. Read more on their [official post](https://developer.chrome.com/blog/remote-debugging-port). ## Authentication diff --git a/ui/v2.5/src/docs/en/Manual/Deduplication.md b/ui/v2.5/src/docs/en/Manual/Deduplication.md index 24c0fb391..d842fcc68 100644 --- a/ui/v2.5/src/docs/en/Manual/Deduplication.md +++ b/ui/v2.5/src/docs/en/Manual/Deduplication.md @@ -2,8 +2,10 @@ [The dupe checker](/sceneDuplicateChecker) searches your collection for scenes that are perceptually similar. This means that the files don't need to be identical, and will be identified even with different bitrates, resolutions, and intros/outros. -To achieve this stash needs to generate what's called a phash, or perceptual hash. Similar to sprite generation stash will generate a set of 25 images from fixed points in the scene. These images will be stitched together, and then hashed using the phash algorithm. The phash can then be used to find scenes that are the same or similar to others in the database. Phash generation can be run during scan, or as a separate task. Note that generation can take a while due to the work involved with extracting screenshots. +To achieve this stash needs to generate what's called a phash, or perceptual hash. Similar to sprite generation stash will generate a set of 25 images from fixed points in the scene. These images will be stitched together, and then hashed using the phash algorithm. The phash can then be used to find scenes that are the same or similar to others in the database. Phash generation can be run during scan, or as a separate task. + +> **⚠️ Note:** Generation can take a while due to the work involved with extracting screenshots. The dupe checker can be run with four different levels of accuracy. `Exact` looks for scenes that have exactly the same phash. This is a fast and accurate operation that should not yield any false positives except in very rare cases. The other accuracy levels look for duplicate files within a set distance of each other. This means the scenes don't have exactly the same phash, but are very similar. `High` and `Medium` should still yield very good results with few or no false positives. `Low` is likely to produce some false positives, but might still be useful for finding dupes. -Note that to generate a phash stash requires an uncorrupted file. If any errors are encountered during sprite generation the phash will not be generated. This is to prevent false positives. +> **⚠️ Note:** To generate a pHash Stash requires an uncorrupted file. If any errors are encountered during sprite generation the pHash will not be generated. This is to prevent false positives. \ No newline at end of file diff --git a/ui/v2.5/src/docs/en/Manual/EmbeddedPlugins.md b/ui/v2.5/src/docs/en/Manual/EmbeddedPlugins.md index 1fc217ffc..9d54010e6 100644 --- a/ui/v2.5/src/docs/en/Manual/EmbeddedPlugins.md +++ b/ui/v2.5/src/docs/en/Manual/EmbeddedPlugins.md @@ -10,7 +10,9 @@ Stash currently supports Javascript embedded plugin tasks using [goja](https://g ### Plugin input -The input is provided to Javascript plugin tasks using the `input` global variable, and is an object based on the structure provided in the `Plugin input` section of the [Plugins](/help/Plugins.md) page. Note that the `server_connection` field should not be necessary in most embedded plugins. +The input is provided to Javascript plugin tasks using the `input` global variable, and is an object based on the structure provided in the `Plugin input` section of the [Plugins](/help/Plugins.md) page. + +> **⚠️ Note:** `server_connection` field should not be necessary in most embedded plugins. ### Plugin output diff --git a/ui/v2.5/src/docs/en/Manual/Identify.md b/ui/v2.5/src/docs/en/Manual/Identify.md index 724a392a3..9407ac9d9 100644 --- a/ui/v2.5/src/docs/en/Manual/Identify.md +++ b/ui/v2.5/src/docs/en/Manual/Identify.md @@ -20,7 +20,7 @@ The following options can be configured: | Option | Description | |--------|-------------| -| Include male performers | If false, male performers will not be created or set on scenes. | +| Performer genders | Filter which performer genders are included during identification. If no genders are selected, all performers are included regardless of gender. | | Set cover images | If false, scene cover images will not be modified. | | Set organized flag | If true, the organized flag is set to true when a scene is organized. | | Skip matches that have more than one result | If this is not enabled and more than one result is returned, one will be randomly chosen to match | diff --git a/ui/v2.5/src/docs/en/Manual/Images.md b/ui/v2.5/src/docs/en/Manual/Images.md index ede9b3457..5be7beba5 100644 --- a/ui/v2.5/src/docs/en/Manual/Images.md +++ b/ui/v2.5/src/docs/en/Manual/Images.md @@ -11,7 +11,7 @@ You can add images to every gallery manually in the gallery detail page. Deletin For best results, images in zip file should be stored without compression (copy, store or no compression options depending on the software you use. Eg on linux: `zip -0 -r gallery.zip foldertozip/`). This impacts **heavily** on the zip read performance. -> **:warning: Note:** AVIF files in ZIP archives are currently unsupported. +> **⚠️ Note:** AVIF files in ZIP archives are currently unsupported. If a filename of an image in the gallery zip file ends with `cover.jpg`, it will be treated like a cover and presented first in the gallery view page and as a gallery cover in the gallery list view. If more than one images match the name the first one found in natural sort order is selected. @@ -21,11 +21,11 @@ You can also manually select any image from a gallery as its cover. On the galle Images can also be clips/gifs. These are meant to be short video loops. Right now they are not possible in zipfiles. To declare video files to be images, there are two ways: -1. Deactivate video scanning for all libraries that contain clips/gifs, but keep image scanning active. Set the **Scan Video Extensions as Image Clip** option in the library section of your settings. -2. Make sure none of the file endings used by your clips/gifs are present in the **Video Extensions** and add them to the **Image Extensions** in the library section of your settings. +1. Deactivate video scanning for all libraries that contain clips/gifs, but keep image scanning active. Set the **Scan video extensions as image clips** option in the library section of your settings. +2. Make sure none of the file endings used by your clips/gifs are present in the **Video extensions** and add them to the **Image extensions** in the library section of your settings. A clip/gif will be a stillframe in the wall and grid view by default. To view the loop, you can go into the Lightbox Carousel (e.g. by clicking on an image in the wall view) or the image detail page. If you want the loop to be used as a preview on the wall and grid view, you will have to generate them. -You can do this as you scan for the new clip file by activating **Generate previews for image clips** on the scan settings, or do it after by going to the **Generated Content** section in the task section of your settings, activating **Image Clip Previews** and clicking generate. This takes a while, as the files are transcoded. +You can do this as you scan for the new clip file by activating **Generate previews for image clips** on the scan settings, or do it after by going to the **Generated Content** section in the task section of your settings, activating **Image clip previews** and clicking generate. This takes a while, as the files are transcoded. diff --git a/ui/v2.5/src/docs/en/Manual/Interactive.md b/ui/v2.5/src/docs/en/Manual/Interactive.md index 831109aab..ab12381dc 100644 --- a/ui/v2.5/src/docs/en/Manual/Interactive.md +++ b/ui/v2.5/src/docs/en/Manual/Interactive.md @@ -1,8 +1,8 @@ # Interactivity -Stash currently supports syncing with Handy devices, using funscript files. +Stash currently supports syncing with The Handy devices, using funscript files. -In order for stash to connect to your Handy device, the Handy Connection Key must be entered in Settings -> Interface. +In order for stash to connect to your Handy device, the Handy connection key must be entered in Settings -> Interface. Funscript files must be in the same directory as the matching video file and must have the same base name. For example, a funscript file for `video.mp4` must be named `video.funscript`. A scan must be run to update scenes with matching funscript files. diff --git a/ui/v2.5/src/docs/en/Manual/Interface.md b/ui/v2.5/src/docs/en/Manual/Interface.md index cf5911405..951fb3323 100644 --- a/ui/v2.5/src/docs/en/Manual/Interface.md +++ b/ui/v2.5/src/docs/en/Manual/Interface.md @@ -4,20 +4,20 @@ Setting the language affects the formatting of numbers and dates. -## SFW Content Mode +## SFW content mode -SFW Content Mode is used to indicate that the content being managed is _not_ adult content. +SFW content mode is used to indicate that the content being managed is _not_ adult content. -When SFW Content Mode is enabled, the following changes are made to the UI: +When SFW content mode is enabled, the following changes are made to the UI: - default performer images are changed to less adult-oriented images - certain adult-specific metadata fields are hidden (e.g. performer genital fields) - `O`-Counter is replaced with `Like`-counter -## Scene/Marker Wall Preview Type +## Scene/Marker Wall Preview type The Scene Wall and Marker pages display scene preview videos (mp4) by default. This can be changed to animated image (webp) or static image. -> **⚠️ Note:** scene/marker preview videos must be generated to see them in the applicable wall page if Video preview type is selected. Likewise, if Animated Image is selected, then Image Previews must be generated. +> **⚠️ Note:** scene/marker preview videos must be generated to see them in the applicable wall page if Video preview type is selected. Likewise, if Animated image is selected, then Image Previews must be generated. ## Show Studios as text @@ -33,25 +33,25 @@ The maximum loop duration option allows looping of shorter videos. Set this valu The "Track Activity" option allows tracking of scene play count and duration, and sets the resume point when a scene video is not finished. -The "Minimum Play Percent" gives the minimum proportion of a video that must be played before the play count of the scene is incremented. +The "Minimum play percent" gives the minimum proportion of a video that must be played before the play count of the scene is incremented. By default, when a scene has a resume point, the scene player will automatically seek to this point when the scene is played. Setting "Always start video from beginning" to true disables this behaviour. ## Custom CSS -The stash UI can be customised using custom CSS. See [here](https://docs.stashapp.cc/themes/custom-css-snippets/) for a community-curated set of CSS snippets to customise your UI. +The stash UI can be customised using custom CSS. See [here](https://discourse.stashapp.cc/t/custom-css-snippets/4043) for a community-curated set of CSS snippets to customise your UI. -There is also a [collection of community-created themes](https://docs.stashapp.cc/themes/list/#browse-themes) available. +There is also a [collection of community-created themes](https://discourse.stashapp.cc/tags/c/plugins/18/all/theme) available. -## Custom Javascript +## Custom JavaScript -Stash supports the injection of custom javascript to assist with theming or adding additional functionality. Be aware that bad Javascript could break the UI or worse. +Stash supports the injection of custom JavaScript to assist with theming or adding additional functionality. Be aware that bad JavaScript could break the UI or worse. ## Custom Locales The localisation strings can be customised. The master list of default (en-GB) locale strings can be found [here](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/locales/en-GB.json). The custom locale format is the same as this json file. -For example, to override the `actions.add_directory` label (which is `Add Directory` by default), you would have the following in the custom locale: +For example, to override the `actions.add_directory` label (which is `Add directory` by default), you would have the following in the custom locale: ``` { diff --git a/ui/v2.5/src/docs/en/Manual/Introduction.md b/ui/v2.5/src/docs/en/Manual/Introduction.md index 1496ad2b1..f32b84681 100644 --- a/ui/v2.5/src/docs/en/Manual/Introduction.md +++ b/ui/v2.5/src/docs/en/Manual/Introduction.md @@ -2,6 +2,8 @@ Stash works by cataloging your media using the paths that you provide. Once you have [configured](/settings?tab=library) the locations where your media is stored, you can click the Scan button in [`Settings -> Tasks`](/settings?tab=tasks) and stash will begin scanning and importing your media into its library. -For the best experience, it is recommended that after a scan is finished, that video previews and sprites are generated. You can do this in [`Settings -> Tasks`](/settings?tab=tasks). Note that currently it is only possible to perform one task at a time and but there is a task queue, so the generate tasks should be performed after scan is complete. +For the best experience, it is recommended that after a scan is finished, that video previews and sprites are generated. You can do this in [`Settings -> Tasks`](/settings?tab=tasks). + +> **⚠️ Note:** Currently it is only possible to perform one task at a time and but there is a task queue, so the generate tasks should be performed after scan is complete. Once your media is imported, you are ready to begin creating Performers, Studios and Tags, and curating your content! \ No newline at end of file diff --git a/ui/v2.5/src/docs/en/Manual/JSONSpec.md b/ui/v2.5/src/docs/en/Manual/JSONSpec.md index 0a53d09f2..b071f26cc 100644 --- a/ui/v2.5/src/docs/en/Manual/JSONSpec.md +++ b/ui/v2.5/src/docs/en/Manual/JSONSpec.md @@ -24,7 +24,7 @@ When exported, files are named with different formats depending on the object ty | Studios | `.json` | | Groups | `.json` | -Note that the file naming is not significant when importing. All json files will be read from the subdirectories. +> **⚠️ Note:** The file naming is not significant when importing. All json files will be read from the subdirectories. ## Content of the json files diff --git a/ui/v2.5/src/docs/en/Manual/Plugins.md b/ui/v2.5/src/docs/en/Manual/Plugins.md index cd24e0d4a..5e403af92 100644 --- a/ui/v2.5/src/docs/en/Manual/Plugins.md +++ b/ui/v2.5/src/docs/en/Manual/Plugins.md @@ -240,7 +240,7 @@ hooks: argKey: argValue ``` -**Note:** it is possible for hooks to trigger eachother or themselves if they perform mutations. For safety, hooks will not be triggered if they have already been triggered in the context of the operation. Stash uses cookies to track this context, so it's important for plugins to send cookies when performing operations. +**⚠️ Note:** It is possible for hooks to trigger eachother or themselves if they perform mutations. For safety, hooks will not be triggered if they have already been triggered in the context of the operation. Stash uses cookies to track this context, so it's important for plugins to send cookies when performing operations. #### Trigger types diff --git a/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md b/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md index 1f52028f8..4c97e3fcf 100644 --- a/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md +++ b/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md @@ -375,7 +375,7 @@ scene: selector: //div[@data-host="{inputHostname}"]//span[@class="site-name"] ``` -> **Note:** These placeholders represent the actual URL used to fetch the content, after any URL replacements have been applied. +> **⚠️ Note:** These placeholders represent the actual URL used to fetch the content, after any URL replacements have been applied. ### Common fragments @@ -391,6 +391,7 @@ performer: The `Measurements` xpath string will replace `$infoPiece` with `//div[@class="infoPiece"]/span`, resulting in: `//div[@class="infoPiece"]/span[text() = 'Measurements:']/../span[@class="smallInfo"]`. > **⚠️ Note:** Recursive common fragments are **not** supported. + Referencing a common fragment within another common fragment will cause an error. For example: ```yaml common: @@ -881,7 +882,7 @@ Title URLs ``` -> **Important**: `Title` field is required. +> **⚠️ Important:** `Title` field is required. ### Group @@ -900,7 +901,7 @@ Tags (see Tag fields) URLs ``` -> **Important**: `Name` field is required. +> **⚠️ Important:** `Name` field is required. ### Image @@ -944,9 +945,9 @@ URLs Weight ``` -> **Important**: `Name` field is required. +> **⚠️ Important:** `Name` field is required. -> **Note:** - `Gender` must be one of `male`, `female`, `transgender_male`, `transgender_female`, `intersex`, `non_binary` (case insensitive). +> **⚠️ Note:** `Gender` must be one of `male`, `female`, `transgender_male`, `transgender_female`, `intersex`, `non_binary` (case insensitive). ### Scene @@ -964,7 +965,7 @@ Title URLs ``` -> **Important**: `Title` field is required only if fileless. +> **⚠️ Important:** `Title` field is required only if fileless. ### Studio @@ -976,7 +977,7 @@ Tags (see Tag fields) URL ``` -> **Important**: `Name` field is required. +> **⚠️ Important:** `Name` field is required. ### Tag @@ -984,4 +985,4 @@ URL Name ``` -> **Important**: `Name` field is required. +> **⚠️ Important:** `Name` field is required. diff --git a/ui/v2.5/src/docs/en/Manual/Tagger.md b/ui/v2.5/src/docs/en/Manual/Tagger.md index ba9e5f17a..7c2d12a87 100644 --- a/ui/v2.5/src/docs/en/Manual/Tagger.md +++ b/ui/v2.5/src/docs/en/Manual/Tagger.md @@ -4,9 +4,9 @@ Stash can be integrated with stash-box which acts as a centralized metadata data ## Searching -The fingerprint search matches your current selection of files against the remote stash-box instance. Any scenes with a matching fingerprint will be returned, although there is currently no validation of fingerprints so it’s recommended to double-check the validity before saving. +The fingerprint search matches your current selection of files against the remote stash-box instance. Any scenes with a matching fingerprint will be returned, although there is currently no validation of fingerprints so it's recommended to double-check the validity before saving. -If no fingerprint match is found it’s possible to search by keywords. The search works by matching the query against a scene’s _title_, _release date_, _studio name_, and _performer names_. By default the tagger uses metadata set on the file, or parses the filename, this can be changed in the config. +If no fingerprint match is found it's possible to search by keywords. The search works by matching the query against a scene's _title_, _release date_, _studio name_, and _performer names_. By default the tagger uses metadata set on the file, or parses the filename, this can be changed in the config. An important thing to note is that it only returns a match *if all query terms are a match*. As an example, if a scene is titled `"A Trip to the Mall"` with the performer `"Jane Doe"`, a search for `"Trip to the Mall 1080p"` will *not* match, however `"trip mall doe"` would. Usually a few pieces of info is enough, for instance performer name + release date or studio name. To avoid common non-related keywords you can add them to the blacklist in the tagger config. Any items in the blacklist are stripped out of the query. diff --git a/ui/v2.5/src/docs/en/Manual/Tasks.md b/ui/v2.5/src/docs/en/Manual/Tasks.md index aa46f72bb..4191afd24 100644 --- a/ui/v2.5/src/docs/en/Manual/Tasks.md +++ b/ui/v2.5/src/docs/en/Manual/Tasks.md @@ -16,12 +16,13 @@ The scan task accepts the following options: |--------|-------------| | Generate scene covers | Generates scene covers for video files. | | Generate previews | Generates video previews (mp4) which play when hovering over a scene. | -| Generate animated image previews* | *Accessible in Advanced Mode* - Also generate animated (webp) previews, only required when Scene/Marker Wall Preview Type is set to Animated Image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.| +| Generate animated image previews | *Accessible in Advanced mode* - Also generate animated (webp) previews, only required when Scene/Marker Wall Preview type is set to Animated image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.| | Generate scrubber sprites | The set of images displayed below the video player for easy navigation. | -| Generate perceptual hashes | Generates perceptual hashes for scene deduplication and identification. | +| Generate video perceptual hashes | Generates perceptual hashes for scene deduplication and identification. | | Generate thumbnails for images | Generates thumbnails for image files. | +| Generate image perceptual hashes | Generates perceptual hashes for image deduplication and identification. | | Generate previews for image clips | Generates a gif/looping video as thumbnail for image clips/gifs. | -| Rescan | By default, Stash will only rescan existing files if the file's modified date has been updated since its previous scan. Stash will rescan files in the path when this option is enabled, regardless of the file modification time. Only required Stash needs to recalculate video/image metadata, or to rescan gallery zips. | +| Rescan | By default, Stash will only rescan existing files if the file's modified date has been updated since its previous scan. Stash will rescan files in the path when this option is enabled, regardless of the file modification time. Only required if Stash needs to recalculate video/image metadata, or to rescan gallery zips. | ## Auto Tagging See the [Auto Tagging](/help/AutoTagging.md) page. @@ -31,14 +32,16 @@ See the [Scene Filename Parser](/help/SceneFilenameParser.md) page. ## Generated Content -The scanning function automatically generates a screenshot of each scene. The generated content provides the following: +The generated content provides the following: +* Scene covers - screenshot of the scene used as the cover image * Video or image previews that are played when mousing over the scene card -* Perceptual hashes - helps match against StashDB, and feeds the duplicate finder +* Video Perceptual hashes - helps match against StashDB, and feeds the duplicate finder * Sprites (scene stills for parts of each scene) that are shown in the scene scrubber * Marker video previews that are shown in the markers page * Transcoded versions of scenes. See below * Image thumbnails of galleries +* Image Perceptual hashes - can be used for identification and deduplication The generate task accepts the following options: @@ -46,15 +49,17 @@ The generate task accepts the following options: |--------|-------------| | Scene covers | Generates scene covers for video files. | | Previews | Generates video previews (mp4) which play when hovering over a scene. | -| Animated image previews | *Accessible in Advanced Mode* - Generates animated previews (webp). Only required if the Preview Type is set to Animated Image. Requires Generate previews to be enabled. | -| Scene Scrubber Sprites | The set of images displayed below the video player for easy navigation. | -| Markers Previews | Generates 20 second video previews (mp4) which begin at the marker timecode. | -| Marker Animated Image Previews | *Accessible in Advanced Mode* - Also generate animated (webp) previews, only required when Scene/Marker Wall Preview Type is set to Animated Image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files. | -| Marker Screenshots | Generates static JPG images for markers. Only required if Preview Type is set to Static Image. Requires Marker Previews to be enabled. | -| Transcodes | *Accessible in Advanced Mode* - MP4 conversions of unsupported video formats. Allows direct streaming instead of live transcoding. | -| Perceptual hashes (for deduplication) | Generates perceptual hashes for scene deduplication and identification. | +| Animated image previews | *Accessible in Advanced mode* - Generates animated previews (webp). Only required if the Preview type is set to Animated image. Requires Generate previews to be enabled. | +| Scene scrubber sprites | The set of images displayed below the video player for easy navigation. | +| Marker previews | Generates 20 second video previews (mp4) which begin at the marker timecode. | +| Marker animated image previews | *Accessible in Advanced mode* - Also generate animated (webp) previews, only required when Scene/Marker Wall Preview type is set to Animated image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files. | +| Marker screenshots | Generates static JPG images for markers. Only required if Preview type is set to Static image. Requires marker previews to be enabled. | +| Transcodes | *Accessible in Advanced mode* - MP4 conversions of unsupported video formats. Allows direct streaming instead of live transcoding. | +| Video Perceptual hashes (for deduplication) | Generates perceptual hashes for scene deduplication and identification. | | Generate heatmaps and speeds for interactive scenes | Generates heatmaps and speeds for interactive scenes. | -| Image Clip Previews | Generates a gif/looping video as thumbnail for image clips/gifs. | +| Image clip previews | Generates a gif/looping video as thumbnail for image clips/gifs. | +| Image thumbnails | Generates thumbnails for image files. | +| Image Perceptual hashes (for deduplication) | Generates perceptual hashes for image deduplication and identification. | | Overwrite existing generated files | By default, where a generated file exists, it is not regenerated. When this flag is enabled, then the generated files are regenerated. | ### Transcodes @@ -80,3 +85,19 @@ The import and export tasks read and write JSON files to the configured metadata > **⚠️ Note:** The full import task wipes the current database completely before importing. See the [JSON Specification](/help/JSONSpec.md) page for details on the exported JSON format. + +## Backing up + +The backup task creates a backup of the stash database and (optionally) blob files. The backup can either be downloaded or output into the backup directory (under `Settings > Paths`) or the database directory if the backup directory is not configured. + +For a full backup, the database file and all blob files must be copied. The backup is stored as a zip file, with the database file at the root of the zip and the blob files in a `blobs` directory. + +> **⚠️ Note:** generated files are not included in the backup, so these will need to be regenerated when restoring with an empty system from backup. + +For database-only backups, only the database file is copied into the destination. This is useful for quick backups before performing risky operations, or for users who do not use filesystem blob storage. + +## Restoring from backup + +Restoring from backup is currently a manual process. The database backup zip file must be unzipped, and the database file and blob files (if applicable) copied into the database and blob directories respectively. Stash should then be restarted to load the restored database. + +> **⚠️ Note:** the filename for a database-only backup is not the same as the original database file, so the database file from the backup must be renamed to match the original database filename before copying it into the database directory. The original database filename can be found in `Settings > Paths > Database path`. \ No newline at end of file diff --git a/ui/v2.5/src/docs/en/Manual/TroubleshootingMode.md b/ui/v2.5/src/docs/en/Manual/TroubleshootingMode.md new file mode 100644 index 000000000..9a5ffd215 --- /dev/null +++ b/ui/v2.5/src/docs/en/Manual/TroubleshootingMode.md @@ -0,0 +1,7 @@ +# Troubleshooting Mode + +Troubleshooting mode disables all plugins and all custom CSS, JavaScript, and locales. It also temporarily sets the log level to `DEBUG`. This is useful when you are experiencing issues with your Stash instance to eliminate the possibility that a plugin or custom code is causing the issue. + +Troubleshooting mode is enabled from the Settings page, by clicking the `Troubleshooting mode` button at the bottom left of the page. + +When Troubleshooting mode is enabled, a red border and a banner will be displayed to remind you that you are in Troubleshooting mode. To exit Troubleshooting mode, click the `Exit` button in the banner. \ No newline at end of file diff --git a/ui/v2.5/src/docs/en/Manual/UIPluginApi.md b/ui/v2.5/src/docs/en/Manual/UIPluginApi.md index e1347a46f..54ef3a20f 100644 --- a/ui/v2.5/src/docs/en/Manual/UIPluginApi.md +++ b/ui/v2.5/src/docs/en/Manual/UIPluginApi.md @@ -33,6 +33,7 @@ This namespace contains the generated graphql client interface. This is a low-le - `FontAwesomeBrands` - `Mousetrap` - `MousetrapPause` +- `ReactFontAwesome` - `ReactSelect` ### `register` @@ -228,6 +229,8 @@ Returns `void`. - `DetailImage` - `ExternalLinkButtons` - `ExternalLinksButton` +- `FilteredGalleryList` +- `FilteredSceneList` - `FolderSelect` - `FrontPage` - `GalleryCard` @@ -235,17 +238,31 @@ Returns `void`. - `GalleryCard.Image` - `GalleryCard.Overlays` - `GalleryCard.Popovers` +- `GalleryCardGrid` - `GalleryIDSelect` +- `GalleryList` +- `GalleryRecommendationRow` - `GallerySelect` - `GallerySelect.sort` +- `GridCard` +- `GroupCard` +- `GroupCardGrid` - `GroupIDSelect` +- `GroupRecommendationRow` - `GroupSelect` - `GroupSelect.sort` - `HeaderImage` - `HoverPopover` - `Icon` +- `ImageCard` +- `ImageCard.Details` +- `ImageCard.Image` +- `ImageCard.Overlays` +- `ImageCard.Popovers` - `ImageDetailPanel` +- `ImageGridCard` - `ImageInput` +- `ImageRecommendationRow` - `LightboxLink` - `LoadingIndicator` - `MainNavBar.MenuItems` @@ -261,6 +278,7 @@ Returns `void`. - `PerformerCard.Overlays` - `PerformerCard.Popovers` - `PerformerCard.Title` +- `PerformerCardGrid` - `PerformerDetailsPanel` - `PerformerDetailsPanel.DetailGroup` - `PerformerGalleriesPanel` @@ -269,6 +287,7 @@ Returns `void`. - `PerformerIDSelect` - `PerformerImagesPanel` - `PerformerPage` +- `PerformerRecommendationRow` - `PerformerScenesPanel` - `PerformerSelect` - `PerformerSelect.sort` @@ -277,17 +296,27 @@ Returns `void`. - `RatingNumber` - `RatingStars` - `RatingSystem` +- `RecommendationRow` - `SceneCard` - `SceneCard.Details` - `SceneCard.Image` - `SceneCard.Overlays` - `SceneCard.Popovers` +- `SceneCardsGrid` - `SceneFileInfoPanel` - `SceneIDSelect` +- `SceneMarkerCard` +- `SceneMarkerCard.Details` +- `SceneMarkerCard.Image` +- `SceneMarkerCard.Popovers` +- `SceneMarkerCardsGrid` +- `SceneMarkerRecommendationRow` +- `SceneList` - `ScenePage` - `ScenePage.TabContent` - `ScenePage.Tabs` - `ScenePlayer` +- `SceneRecommendationRow` - `SceneSelect` - `SceneSelect.sort` - `SelectSetting` @@ -296,7 +325,11 @@ Returns `void`. - `SettingModal` - `StringListSetting` - `StringSetting` +- `StudioCard` +- `StudioCardGrid` +- `StudioDetailsPanel` - `StudioIDSelect` +- `StudioRecommendationRow` - `StudioSelect` - `StudioSelect.sort` - `SweatDrops` @@ -307,8 +340,10 @@ Returns `void`. - `TagCard.Overlays` - `TagCard.Popovers` - `TagCard.Title` +- `TagCardGrid` - `TagIDSelect` - `TagLink` +- `TagRecommendationRow` - `TagSelect` - `TagSelect.sort` - `TruncatedText` @@ -319,6 +354,4 @@ Allows plugins to listen for Stash's events. ```js PluginApi.Event.addEventListener("stash:location", (e) => console.log("Page Changed", e.detail.data.location.pathname)) -``` - - +``` \ No newline at end of file diff --git a/ui/v2.5/src/index.scss b/ui/v2.5/src/index.scss index 0c0bffdec..cadd1ad2f 100755 --- a/ui/v2.5/src/index.scss +++ b/ui/v2.5/src/index.scss @@ -526,8 +526,6 @@ textarea.text-input { } .zoom-1 { - width: 320px; - .gallery-card-image, .tag-card-image { height: 240px; @@ -1438,3 +1436,40 @@ select { h3 .TruncatedText { line-height: 1.5; } + +// Troubleshooting Mode overlay banner +.troubleshooting-mode-overlay { + border: 5px solid $danger; + bottom: 0; + left: 0; + opacity: 0.75; + pointer-events: none; + position: fixed; + right: 0; + top: 0; + z-index: 1040; + + .troubleshooting-mode-alert { + align-items: baseline; + border-radius: 0; + bottom: 0.5rem; + display: inline-flex; + margin: 0; + position: fixed; + right: 0.5rem; + + @include media-breakpoint-down(xs) { + @media (orientation: portrait) { + bottom: $navbar-height; + + & > span { + font-size: 0.75rem; + } + } + } + } + + .btn { + pointer-events: auto; + } +} diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index 704373fb0..f5451249e 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -1,7 +1,7 @@ { "actions": { "add": "Add", - "add_directory": "Add Directory", + "add_directory": "Add directory", "add_entity": "Add {entityType}", "add_manual_date": "Add manual date", "add_sub_groups": "Add Sub-Groups", @@ -14,7 +14,7 @@ "anonymise": "Anonymise", "apply": "Apply", "assign_stashid_to_parent_studio": "Assign Stash ID to existing parent studio and update metadata", - "auto_tag": "Auto Tag", + "auto_tag": "Auto tag", "backup": "Backup", "browse_for_image": "Browse for image…", "cancel": "Cancel", @@ -47,7 +47,7 @@ "disallow": "Disallow", "download": "Download", "download_anonymised": "Download anonymised", - "download_backup": "Download Backup", + "download_backup": "Download backup", "edit": "Edit", "edit_entity": "Edit {entityType}", "enable": "Enable", @@ -58,8 +58,8 @@ "finish": "Finish", "from_file": "From file…", "from_url": "From URL…", - "full_export": "Full Export", - "full_import": "Full Import", + "full_export": "Full export", + "full_import": "Full import", "generate": "Generate", "generate_thumb_default": "Generate default thumbnail", "generate_thumb_from_current": "Generate thumbnail from current", @@ -75,13 +75,13 @@ "logout": "Log out", "make_primary": "Make Primary", "merge": "Merge", - "migrate_blobs": "Migrate Blobs", - "migrate_scene_screenshots": "Migrate Scene Screenshots", + "migrate_blobs": "Migrate blobs", + "migrate_scene_screenshots": "Migrate scene screenshots", "next_action": "Next", "not_running": "not running", "open_in_external_player": "Open in external player", "open_random": "Open Random", - "optimise_database": "Optimise Database", + "optimise_database": "Optimise database", "overwrite": "Overwrite", "play": "Play", "play_random": "Play Random", @@ -115,13 +115,14 @@ "scrape_with": "Scrape with…", "search": "Search", "select_all": "Select All", + "select_directory": "Select directory", "select_entity": "Select {entityType}", "select_folders": "Select folders", "select_none": "Select None", "invert_selection": "Invert Selection", - "selective_auto_tag": "Selective Auto Tag", - "selective_clean": "Selective Clean", - "selective_scan": "Selective Scan", + "selective_auto_tag": "Selective auto tag", + "selective_clean": "Selective clean", + "selective_scan": "Selective scan", "set_as_default": "Set as default", "set_back_image": "Back image…", "set_cover": "Set as Cover", @@ -174,7 +175,9 @@ "filesystem": "Filesystem" }, "captions": "Captions", + "career_end": "Career End", "career_length": "Career Length", + "career_start": "Career Start", "chapters": "Chapters", "circumcised": "Circumcised", "circumcised_types": { @@ -252,7 +255,7 @@ "stash_wiki": "Stash {url} page", "version": "Version" }, - "advanced_mode": "Advanced Mode", + "advanced_mode": "Advanced mode", "application_paths": { "heading": "Application Paths" }, @@ -270,11 +273,14 @@ "tasks": "Tasks", "tools": "Tools" }, + "changelog": { + "header": "Changelog" + }, "dlna": { "allow_temp_ip": "Allow {tempIP}", "allowed_ip_addresses": "Allowed IP addresses", "allowed_ip_temporarily": "Allowed IP temporarily", - "default_ip_whitelist": "Default IP Whitelist", + "default_ip_whitelist": "Default IP whitelist", "default_ip_whitelist_desc": "Default IP addresses allow to access DLNA. Use {wildcard} to allow all IP addresses.", "disabled_dlna_temporarily": "Disabled DLNA temporarily", "disallowed_ip": "Disallowed IP", @@ -283,35 +289,35 @@ "network_interfaces": "Interfaces", "network_interfaces_desc": "Interfaces to expose DLNA server on. An empty list results in running on all interfaces. Requires DLNA restart after changing.", "recent_ip_addresses": "Recent IP addresses", - "server_display_name": "Server Display Name", + "server_display_name": "Server display name", "server_display_name_desc": "Display name for the DLNA server. Defaults to {server_name} if empty.", - "server_port": "Server Port", + "server_port": "Server port", "server_port_desc": "Port to run the DLNA server on. Requires DLNA restart after changing.", "successfully_cancelled_temporary_behaviour": "Successfully cancelled temporary behaviour", "until_restart": "until restart", - "video_sort_order": "Default Video Sort Order", + "video_sort_order": "Default video sort order", "video_sort_order_desc": "Order to sort videos by default." }, "general": { "auth": { - "api_key": "API Key", + "api_key": "API key", "api_key_desc": "API key for external systems. Only required when username/password is configured. Username must be saved before generating API key.", "authentication": "Authentication", "clear_api_key": "Clear API key", "credentials": { - "description": "Credentials to restrict access to stash.", + "description": "Credentials to restrict access to Stash.", "heading": "Credentials" }, "generate_api_key": "Generate API key", "log_file": "Log file", "log_file_desc": "Path to the file to output logging to. Blank to disable file logging. Requires restart.", - "log_http": "Log http access", - "log_http_desc": "Logs http access to the terminal. Requires restart.", + "log_http": "Log HTTP access", + "log_http_desc": "Logs HTTP access to the terminal. Requires restart.", "log_to_terminal": "Log to terminal", "log_to_terminal_desc": "Logs to the terminal in addition to a file. Always true if file logging is disabled. Requires restart.", "log_file_max_size": "Maximum log size", "log_file_max_size_desc": "Maximum size in megabytes of the log file before it is compressed. 0MB is disabled. Requires restart.", - "maximum_session_age": "Maximum Session Age", + "maximum_session_age": "Maximum session age", "maximum_session_age_desc": "Maximum idle time before a login session is expired, in seconds. Requires restart.", "password": "Password", "password_desc": "Password to access Stash. Leave blank to disable user authentication", @@ -320,50 +326,50 @@ "username_desc": "Username to access Stash. Leave blank to disable user authentication" }, "backup_directory_path": { - "description": "Directory location for SQLite database file backups", - "heading": "Backup Directory Path" + "description": "Directory location for SQLite database file backups.", + "heading": "Backup directory path" }, "delete_trash_path": { "description": "Path where deleted files will be moved to instead of being permanently deleted. Leave empty to permanently delete files.", - "heading": "Trash Path" + "heading": "Trash path" }, "blobs_path": { "description": "Where in the filesystem to store binary data. Applicable only when using the Filesystem blob storage type. WARNING: changing this requires manually moving existing data.", "heading": "Binary data filesystem path" }, "blobs_storage": { - "description": "Where to store binary data such as scene covers, performer, studio and tag images. After changing this value, the existing data must be migrated using the Migrate Blobs tasks. See Tasks page for migration.", + "description": "Where to store binary data such as scene covers, performer, studio and tag images. After changing this value, the existing data must be migrated using the Migrate blobs tasks. See Tasks page for migration.", "heading": "Binary data storage type" }, "cache_location": "Directory location of the cache. Required if streaming using HLS (such as on Apple devices) or DASH.", - "cache_path_head": "Cache Path", + "cache_path_head": "Cache path", "calculate_md5_and_ohash_desc": "Calculate MD5 checksum in addition to oshash. Enabling will cause initial scans to be slower. File naming hash must be set to oshash to disable MD5 calculation.", "calculate_md5_and_ohash_label": "Calculate MD5 for videos", "check_for_insecure_certificates": "Check for insecure certificates", - "check_for_insecure_certificates_desc": "Some sites use insecure ssl certificates. When unticked the scraper skips the insecure certificates check and allows scraping of those sites. If you get a certificate error when scraping untick this.", + "check_for_insecure_certificates_desc": "Some sites use insecure SSL certificates. When unticked the scraper skips the insecure certificates check and allows scraping of those sites. If you get a certificate error when scraping untick this.", "chrome_cdp_path": "Chrome CDP path", "chrome_cdp_path_desc": "File path to the Chrome executable, or a remote address (starting with http:// or https://, for example http://localhost:9222/json/version) to a Chrome instance.", - "create_galleries_from_folders_desc": "If true, creates galleries from folders containing images by default. Create a File called .forcegallery or .nogallery in a folder to enforce/prevent this.", + "create_galleries_from_folders_desc": "If true, creates galleries from folders containing images by default. Create a file called .forcegallery or .nogallery in a folder to override this setting.", "create_galleries_from_folders_label": "Create galleries from folders containing images", "database": "Database", - "db_path_head": "Database Path", + "db_path_head": "Database path", "directory_locations_to_your_content": "Directory locations to your content", - "excluded_image_gallery_patterns_desc": "Regexps of image and gallery files/paths to exclude from Scan and add to Clean", - "excluded_image_gallery_patterns_head": "Excluded Image/Gallery Patterns", - "excluded_video_patterns_desc": "Regexps of video files/paths to exclude from Scan and add to Clean", - "excluded_video_patterns_head": "Excluded Video Patterns", + "excluded_image_gallery_patterns_desc": "Regexps of image and gallery files/paths to exclude from Scan and add to Clean tasks.", + "excluded_image_gallery_patterns_head": "Excluded image/gallery patterns", + "excluded_video_patterns_desc": "Regexps of video files/paths to exclude from Scan and add to Clean tasks.", + "excluded_video_patterns_head": "Excluded video patterns", "ffmpeg": { "download_ffmpeg": { "description": "Downloads FFmpeg into the configuration directory and clears the ffmpeg and ffprobe paths to resolve from the configuration directory.", "heading": "Download FFmpeg" }, "ffmpeg_path": { - "description": "Path to the ffmpeg executable (not just the folder). If empty, ffmpeg will be resolved from the environment via $PATH, the configuration directory, or from $HOME/.stash", - "heading": "FFmpeg Executable Path" + "description": "Path to the ffmpeg executable (not just the folder). If empty, ffmpeg will be resolved from the environment via $PATH, the configuration directory, or from $HOME/.stash.", + "heading": "FFmpeg executable path" }, "ffprobe_path": { - "description": "Path to the ffprobe executable (not just the folder). If empty, ffprobe will be resolved from the environment via $PATH, the configuration directory, or from $HOME/.stash", - "heading": "FFprobe Executable Path" + "description": "Path to the ffprobe executable (not just the folder). If empty, ffprobe will be resolved from the environment via $PATH, the configuration directory, or from $HOME/.stash.", + "heading": "FFprobe executable path" }, "hardware_acceleration": { "desc": "Uses available hardware to encode video for live transcoding.", @@ -372,80 +378,91 @@ "live_transcode": { "input_args": { "desc": "Advanced: Additional arguments to pass to ffmpeg before the input field when live transcoding video.", - "heading": "FFmpeg Live Transcode Input Args" + "heading": "FFmpeg live transcode input arguments" }, "output_args": { "desc": "Advanced: Additional arguments to pass to ffmpeg before the output field when live transcoding video.", - "heading": "FFmpeg Live Transcode Output Args" + "heading": "FFmpeg live transcode output arguments" } }, "transcode": { "input_args": { "desc": "Advanced: Additional arguments to pass to ffmpeg before the input field when generating video.", - "heading": "FFmpeg Transcode Input Args" + "heading": "FFmpeg transcode input arguments" }, "output_args": { "desc": "Advanced: Additional arguments to pass to ffmpeg before the output field when generating video.", - "heading": "FFmpeg Transcode Output Args" + "heading": "FFmpeg transcode output arguments" } } }, "funscript_heatmap_draw_range": "Include range in generated heatmaps", "funscript_heatmap_draw_range_desc": "Draw range of motion on the y-axis of the generated heatmap. Existing heatmaps will need to be regenerated after changing.", - "gallery_cover_regex_desc": "Regexp used to identify an image as gallery cover", + "gallery_cover_regex_desc": "Regexps used to identify an image as gallery cover.", "gallery_cover_regex_label": "Gallery cover pattern", - "gallery_ext_desc": "Comma-delimited list of file extensions that will be identified as gallery zip files.", - "gallery_ext_head": "Gallery zip Extensions", + "gallery_ext_desc": "Comma-delimited list of file extensions that will be identified as gallery ZIP files.", + "gallery_ext_head": "Gallery ZIP extensions", "generated_file_naming_hash_desc": "Use MD5 or oshash for generated file naming. Changing this requires that all scenes have the applicable MD5/oshash value populated. After changing this value, existing generated files will need to be migrated or regenerated. See Tasks page for migration.", "generated_file_naming_hash_head": "Generated file naming hash", - "generated_files_location": "Directory location for the generated files (scene markers, scene previews, sprites, etc)", - "generated_path_head": "Generated Path", + "generated_files_location": "Directory location for the generated files (scene markers, scene previews, sprites, etc).", + "generated_path_head": "Generated path", "hashing": "Hashing", "heatmap_generation": "Funscript Heatmap Generation", "image_ext_desc": "Comma-delimited list of file extensions that will be identified as images.", - "image_ext_head": "Image Extensions", + "image_ext_head": "Image extensions", "include_audio_desc": "Includes audio stream when generating previews.", "include_audio_head": "Include audio", "logging": "Logging", - "maximum_streaming_transcode_size_desc": "Maximum size for transcoded streams", + "maximum_streaming_transcode_size_desc": "Maximum size for transcoded streams.", "maximum_streaming_transcode_size_head": "Maximum streaming transcode size", - "maximum_transcode_size_desc": "Maximum size for generated transcodes", + "maximum_transcode_size_desc": "Maximum size for generated transcodes.", "maximum_transcode_size_head": "Maximum transcode size", "metadata_path": { - "description": "Directory location used when performing a full export or import", - "heading": "Metadata Path" + "description": "Directory location used when performing a full export or import.", + "heading": "Metadata path" }, - "number_of_parallel_task_for_scan_generation_desc": "Set to 0 for auto-detection. Warning running more tasks than is required to achieve 100% cpu utilisation will decrease performance and potentially cause other issues.", + "number_of_parallel_task_for_scan_generation_desc": "Set to 0 for auto-detection. Warning running more tasks than is required to achieve 100% CPU utilisation will decrease performance and potentially cause other issues.", "number_of_parallel_task_for_scan_generation_head": "Number of parallel task for scan/generation", "parallel_scan_head": "Parallel Scan/Generation", "plugins_path": { - "description": "Directory location of plugin configuration files", - "heading": "Plugins Path" + "description": "Directory location of plugin configuration files.", + "heading": "Plugins path" }, "preview_generation": "Preview Generation", "python_path": { - "description": "Path to the python executable (not just the folder). Used for script scrapers and plugins. If blank, python will be resolved from the environment", - "heading": "Python Executable Path" + "description": "Path to the python executable (not just the folder). Used for script scrapers and plugins. If blank, Python will be resolved from the environment.", + "heading": "Python executable path" }, - "scraper_user_agent": "Scraper User Agent", - "scraper_user_agent_desc": "User-Agent string used during scrape http requests", + "scraper_user_agent": "Scraper User-Agent", + "scraper_user_agent_desc": "User-Agent string used during scrape HTTP requests.", "scrapers_path": { - "description": "Directory location of scraper configuration files", - "heading": "Scrapers Path" + "description": "Directory location of scraper configuration files.", + "heading": "Scrapers path" }, "scraping": "Scraping", + "sprite_generation_head": "Sprite generation", + "sprite_interval_desc": "Time between each generated sprite in seconds.", + "sprite_interval_head": "Sprite interval", + "sprite_maximum_desc": "Maximum number of sprites to be generated for a scene. Set to 0 to disable the limit.", + "sprite_maximum_head": "Maximum sprites", + "sprite_minimum_desc": "Minimum number of sprites to be generated for a scene", + "sprite_minimum_head": "Minimum sprites", + "sprite_screenshot_size_desc": "Desired size of each sprite in pixels.", + "sprite_screenshot_size_head": "Sprite size", "sqlite_location": "File location for the SQLite database (requires restart). WARNING: storing the database on a different system to where the Stash server is run from (i.e. over the network) is unsupported!", + "use_custom_sprite_interval_head": "Use custom sprite interval", + "use_custom_sprite_interval_desc": "Enable the custom sprite interval according to the settings below.", "video_ext_desc": "Comma-delimited list of file extensions that will be identified as videos.", - "video_ext_head": "Video Extensions", + "video_ext_head": "Video extensions", "video_head": "Video" }, "library": { "exclusions": "Exclusions", - "gallery_and_image_options": "Gallery and Image options", - "media_content_extensions": "Media content extensions" + "gallery_and_image_options": "Gallery and Image Options", + "media_content_extensions": "Media Content Extensions" }, "logs": { - "log_level": "Log Level" + "log_level": "Log level" }, "plugins": { "available_plugins": "Available Plugins", @@ -457,8 +474,8 @@ "available_scrapers": "Available Scrapers", "entity_metadata": "{entityType} Metadata", "entity_scrapers": "{entityType} scrapers", - "excluded_tag_patterns_desc": "Regexps of tag names to exclude from scraping results", - "excluded_tag_patterns_head": "Excluded Tag Patterns", + "excluded_tag_patterns_desc": "Regexps of tag names to exclude from scraping results.", + "excluded_tag_patterns_head": "Excluded tag patterns", "installed_scrapers": "Installed Scrapers", "scraper": "Scraper", "scrapers": "Scrapers", @@ -486,25 +503,35 @@ "anonymise_database": "Makes a copy of the database to the backups directory, anonymising all sensitive data. This can then be provided to others for troubleshooting and debugging purposes. The original database is not modified. Anonymised database uses the filename format {filename_format}.", "anonymising_database": "Anonymising database", "auto_tag": { - "auto_tagging_all_paths": "Auto Tagging all paths", - "auto_tagging_paths": "Auto Tagging the following paths" + "auto_tagging_all_paths": "Auto tagging all paths", + "auto_tagging_paths": "Auto tagging the following paths" }, - "auto_tag_based_on_filenames": "Auto-tag content based on file paths.", - "auto_tagging": "Auto Tagging", + "auto_tag_based_on_filenames": "Auto tag content based on file paths.", + "auto_tagging": "Auto tagging", "backing_up_database": "Backing up database", "backup_and_download": "Performs a backup of the database and downloads the resulting file.", - "backup_database": "Performs a backup of the database to the backups directory, with the filename format {filename_format}", + "backup_database": { + "description": "Performs a backup of the database and blob files.", + "destination": "Destination", + "download": "Download backup", + "include_blobs": "Include blobs in backup", + "include_blobs_desc": "Disable to only backup the SQLite database file.", + "sqlite": "Backup file will be a copy of the SQLite database file, with the filename {filename_format}", + "to_directory": "To {directory}", + "warning_blobs": "Blob files will not be included in the backup. This means that to succesfully restore from the backup, the blob files must be present in the blob storage location.", + "zip": "SQLite database file and blob files will be zipped into a single file, with the filename {filename_format}" + }, "cleanup_desc": "Check for missing files and remove them from the database. This is a destructive action.", "clean_generated": { "blob_files": "Blob files", "description": "Removes generated files without a corresponding database entry.", - "image_thumbnails": "Image Thumbnails", + "image_thumbnails": "Image thumbnails", "image_thumbnails_desc": "Image thumbnails and clips", - "markers": "Marker Previews", - "previews": "Scene Previews", + "markers": "Marker previews", + "previews": "Scene previews", "previews_desc": "Scene previews and thumbnails", - "sprites": "Scene Sprites", - "transcodes": "Scene Transcodes" + "sprites": "Scene sprites", + "transcodes": "Scene transcodes" }, "data_management": "Data management", "defaults_set": "Defaults have been set and will be used when clicking the {action} button on the Tasks page.", @@ -517,10 +544,12 @@ }, "generate_clip_previews_during_scan": "Generate previews for image clips", "generate_desc": "Generate supporting image, sprite, video, vtt and other files.", - "generate_phashes_during_scan": "Generate perceptual hashes", + "generate_image_phashes_during_scan": "Generate image perceptual hashes", + "generate_image_phashes_during_scan_tooltip": "For deduplication and identification.", + "generate_phashes_during_scan": "Generate video perceptual hashes", "generate_phashes_during_scan_tooltip": "For deduplication and scene identification.", "generate_previews_during_scan": "Generate animated image previews", - "generate_previews_during_scan_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview Type is set to Animated Image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.", + "generate_previews_during_scan_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview type is set to Animated image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.", "generate_sprites_during_scan": "Generate scrubber sprites", "generate_sprites_during_scan_tooltip": "The set of images displayed below the video player for easy navigation.", "generate_thumbnails_during_scan": "Generate thumbnails for images", @@ -541,6 +570,8 @@ "identifying_from_paths": "Identifying scenes from the following paths", "identifying_scenes": "Identifying {num} {scene}", "include_male_performers": "Include male performers", + "performer_genders": "Performer genders", + "performer_genders_desc": "Performers with selected genders will be included during identification.", "set_cover_images": "Set cover images", "set_organized": "Set organised flag", "skip_multiple_matches": "Skip matches that have more than one result", @@ -587,7 +618,7 @@ "tools": { "graphql_playground": "GraphQL playground", "heading": "Tools", - "scene_duplicate_checker": "Scene Duplicate Checker", + "scene_duplicate_checker": "Scene duplicate checker", "scene_filename_parser": { "add_field": "Add Field", "capitalize_title": "Capitalize title", @@ -599,7 +630,7 @@ "ignored_words": "Ignored words", "matches_with": "Matches with {i}", "select_parser_recipe": "Select Parser Recipe", - "title": "Scene Filename Parser", + "title": "Scene filename parser", "whitespace_chars": "Whitespace characters", "whitespace_chars_desc": "These characters will be replaced with whitespace in the title" }, @@ -616,19 +647,33 @@ "heading": "Custom CSS", "option_label": "Custom CSS enabled" }, + "troubleshooting_mode": { + "button": "Troubleshooting mode", + "dialog_title": "Enable troubleshooting mode", + "dialog_description": "This will temporarily disable all customizations to help diagnose issues:", + "dialog_item_plugins": "All plugins", + "dialog_item_css": "Custom CSS", + "dialog_item_js": "Custom JavaScript", + "dialog_item_locales": "Custom locales", + "dialog_log_level": "Log level will be set to Debug for detailed diagnostics.", + "dialog_reload_note": "The page will reload automatically.", + "enable": "Enable & Reload", + "overlay_message": "Troubleshooting mode is active - all customizations are disabled", + "exit": "Exit" + }, "custom_javascript": { - "description": "Page must be reloaded for changes to take effect. There is no guarantee of compatibility between custom Javascript and future releases of Stash.", - "heading": "Custom Javascript", - "option_label": "Custom Javascript enabled" + "description": "Page must be reloaded for changes to take effect. There is no guarantee of compatibility between custom JavaScript and future releases of Stash.", + "heading": "Custom JavaScript", + "option_label": "Custom JavaScript enabled" }, "custom_locales": { "description": "Override individual locale strings. See https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/locales/en-GB.json for the master list. Page must be reloaded for changes to take effect.", - "heading": "Custom localisation", + "heading": "Custom Localisation", "option_label": "Custom localisation enabled" }, "custom_title": { "description": "Custom text to append to the page title. If empty, defaults to 'Stash'.", - "heading": "Custom Title" + "heading": "Custom title" }, "delete_options": { "description": "Default settings when deleting images, galleries, and scenes.", @@ -640,14 +685,14 @@ }, "desktop_integration": { "desktop_integration": "Desktop Integration", - "notifications_enabled": "Enable Notifications", - "send_desktop_notifications_for_events": "Send desktop notifications for events", - "skip_opening_browser": "Skip Opening Browser", - "skip_opening_browser_on_startup": "Skip auto-opening browser during startup" + "notifications_enabled": "Enable notifications", + "send_desktop_notifications_for_events": "Send desktop notifications for events.", + "skip_opening_browser": "Skip opening browser", + "skip_opening_browser_on_startup": "Skip auto-opening browser during startup." }, "detail": { "compact_expanded_details": { - "description": "When enabled, this option will present expanded details while maintaining a compact presentation", + "description": "When enabled, this option will present expanded details while maintaining a compact presentation.", "heading": "Compact expanded details" }, "enable_background_image": { @@ -656,13 +701,13 @@ }, "heading": "Detail Page", "show_all_details": { - "description": "When enabled, all content details will be shown by default and each detail item will fit under a single column", + "description": "When enabled, all content details will be shown by default and each detail item will fit under a single column.", "heading": "Show all details" } }, "editing": { "disable_dropdown_create": { - "description": "Remove the ability to create new objects from the dropdown selectors", + "description": "Remove the ability to create new objects from the dropdown selectors.", "heading": "Disable dropdown create" }, "heading": "Editing", @@ -680,7 +725,7 @@ } }, "type": { - "label": "Rating System Type", + "label": "Rating system type", "options": { "decimal": "Decimal", "stars": "Stars" @@ -690,7 +735,7 @@ }, "funscript_offset": { "description": "Time offset in milliseconds for interactive scripts playback.", - "heading": "Funscript Offset (ms)" + "heading": "Funscript offset (ms)" }, "handy_connection": { "connect": "Connect", @@ -703,8 +748,8 @@ "sync": "Sync" }, "handy_connection_key": { - "description": "Handy connection key to use for interactive scenes. Setting this key will allow Stash to share your current scene information with handyfeeling.com", - "heading": "Handy Connection Key" + "description": "Handy connection key to use for interactive scenes. Setting this key will allow Stash to share your current scene information with handyfeeling.com.", + "heading": "Handy connection key" }, "image_lightbox": { "heading": "Image Lightbox" @@ -718,11 +763,11 @@ "heading": "Images", "options": { "create_image_clips_from_videos": { - "description": "When a library has Videos disabled, Video Files (files ending with Video Extension) will be scanned as Image Clip.", - "heading": "Scan Video Extensions as Image Clip" + "description": "When a library has Videos disabled, video files (see Video extensions) will be scanned as image clips.", + "heading": "Scan video extensions as image clips" }, "write_image_thumbnails": { - "description": "Write image thumbnails to disk when generated on-the-fly", + "description": "Write image thumbnails to disk when generated on-the-fly.", "heading": "Write image thumbnails" } } @@ -732,31 +777,31 @@ "heading": "Language" }, "max_loop_duration": { - "description": "Maximum scene duration where scene player will loop the video - 0 to disable", + "description": "Maximum scene duration where scene player will loop the video. Set 0 to disable.", "heading": "Maximum loop duration" }, "menu_items": { - "description": "Show or hide different types of content on the navigation bar", - "heading": "Menu Items" + "description": "Show or hide different types of content on the navigation bar.", + "heading": "Menu items" }, "minimum_play_percent": { "description": "The percentage of time in which a scene must be played before its play count is incremented.", - "heading": "Minimum Play Percent" + "heading": "Minimum play percent" }, "performers": { "options": { "image_location": { - "description": "Custom path for default performer images. Leave empty to use in-built defaults", - "heading": "Custom Performer Image Path" + "description": "Custom path for default performer images. Leave empty to use built-in defaults.", + "heading": "Custom performer image path" } } }, "preview_type": { "description": "The default option is video (mp4) previews. For less CPU usage when browsing, you can use the animated image (webp) previews. However they must be generated in addition to the video previews and are larger files.", - "heading": "Preview Type", + "heading": "Preview type", "options": { - "animated": "Animated Image", - "static": "Static Image", + "animated": "Animated image", + "static": "Static image", "video": "Video" } }, @@ -772,14 +817,14 @@ "always_start_from_beginning": "Always start video from beginning", "auto_start_video": "Auto-start video", "auto_start_video_on_play_selected": { - "description": "Auto-start scene videos when playing from queue, or playing selected or random from Scenes page", + "description": "Auto-start scene videos when playing from queue, or playing selected or random from Scenes page.", "heading": "Auto-start video when playing selected" }, "continue_playlist_default": { - "description": "Play next scene in queue when video finishes", + "description": "Play next scene in queue when video finishes.", "heading": "Continue playlist by default" }, - "disable_mobile_media_auto_rotate": "Disable auto-rotate of fullscreen media on Mobile", + "disable_mobile_media_auto_rotate": "Disable auto-rotate of fullscreen media on mobile", "enable_chromecast": "Enable Chromecast", "show_ab_loop_controls": "Show AB Loop plugin controls", "show_open_external": "Show 'Open In External Player' button", @@ -788,7 +833,7 @@ "track_activity": "Enable Scene Play history", "vr_tag": { "description": "The VR button will only be displayed for scenes with this tag.", - "heading": "VR Tag" + "heading": "VR tag" } } }, @@ -805,27 +850,27 @@ }, "sfw_mode": { "description": "Enable if using stash to store SFW content. Hides or changes some adult-content-related aspects of the UI.", - "heading": "SFW Content Mode" + "heading": "SFW content mode" }, "show_tag_card_on_hover": { - "description": "Show tag card when hovering tag badges", + "description": "Show tag card when hovering tag badges.", "heading": "Tag card tooltips" }, "slideshow_delay": { - "description": "Slideshow is available in galleries when in wall view mode", - "heading": "Slideshow Delay (seconds)" + "description": "Slideshow is available in galleries when in wall view mode.", + "heading": "Slideshow delay (seconds)" }, "studio_panel": { - "heading": "Studio view", + "heading": "Studio View", "options": { "show_child_studio_content": { - "description": "In the studio view, display content from the sub-studios as well", + "description": "In the studio view, display content from the sub-studios as well.", "heading": "Display sub-studios content" } } }, "performer_list": { - "heading": "Performer list", + "heading": "Performer List", "options": { "show_links_on_grid_card": { "heading": "Display links on performer grid cards" @@ -833,17 +878,17 @@ } }, "tag_panel": { - "heading": "Tag view", + "heading": "Tag View", "options": { "show_child_tagged_content": { - "description": "In the tag view, display content from the subtags as well", - "heading": "Display subtag content" + "description": "In the tag view, display content from the sub-tags as well.", + "heading": "Display sub-tag content" } } }, "title": "User Interface", "use_stash_hosted_funscript": { - "description": "When enabled, funscripts will be served directly from Stash to your Handy device without using the third party Handy server. Requires that Stash be accessible from your Handy device, and that an API key is generated if stash has credentials configured.", + "description": "When enabled, funscripts will be served directly from Stash to your Handy device without using the third party Handy server. Requires that Stash be accessible from your Handy device, and that an API key is generated if Stash has credentials configured.", "heading": "Serve funscripts directly" } } @@ -952,19 +997,19 @@ "display_mode": { "fit_horizontally": "Fit horizontally", "fit_to_screen": "Fit to screen", - "label": "Display Mode", + "label": "Display mode", "original": "Original" }, "options": "Options", "page_header": "Page {page} / {total}", "reset_zoom_on_nav": "Reset zoom level when changing image", "scale_up": { - "description": "Scale smaller images up to fill screen", + "description": "Scale smaller images up to fill screen.", "label": "Scale up to fit" }, "scroll_mode": { "description": "Hold shift to temporarily use other mode.", - "label": "Scroll Mode", + "label": "Scroll mode", "pan_y": "Pan Y", "zoom": "Zoom" } @@ -982,30 +1027,32 @@ "destination": "Reassign to" }, "scene_gen": { - "clip_previews": "Image Clip Previews", + "clip_previews": "Image clip previews", "covers": "Scene covers", "force_transcodes": "Force Transcode generation", "force_transcodes_tooltip": "By default, transcodes are only generated when the video file is not supported in the browser. When enabled, transcodes will be generated even when the video file appears to be supported in the browser.", - "image_previews": "Animated Image Previews", - "image_previews_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview Type is set to Animated Image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.", - "image_thumbnails": "Image Thumbnails", + "image_phash": "Image perceptual hashes", + "image_phash_tooltip": "For deduplication and identification", + "image_previews": "Animated image previews", + "image_previews_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview type is set to Animated image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.", + "image_thumbnails": "Image thumbnails", "interactive_heatmap_speed": "Generate heatmaps and speeds for interactive scenes", - "marker_image_previews": "Marker Animated Image Previews", - "marker_image_previews_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview Type is set to Animated Image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.", - "marker_screenshots": "Marker Screenshots", + "marker_image_previews": "Marker animated image previews", + "marker_image_previews_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview type is set to Animated image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.", + "marker_screenshots": "Marker screenshots", "marker_screenshots_tooltip": "Marker static JPG images", - "markers": "Marker Previews", + "markers": "Marker previews", "markers_tooltip": "20 second videos which begin at the given timecode.", - "override_preview_generation_options": "Override Preview Generation Options", - "override_preview_generation_options_desc": "Override Preview Generation Options for this operation. Defaults are set in System -> Preview Generation.", + "override_preview_generation_options": "Override preview generation options", + "override_preview_generation_options_desc": "Override preview generation options for this operation. Defaults are set in System -> Preview Generation.", "overwrite": "Overwrite existing files", - "phash": "Perceptual hashes", + "phash": "Video perceptual hashes", "phash_tooltip": "For deduplication and scene identification", "preview_exclude_end_time_desc": "Exclude the last x seconds from scene previews. This can be a value in seconds, or a percentage (eg 2%) of the total scene duration.", "preview_exclude_end_time_head": "Exclude end time", "preview_exclude_start_time_desc": "Exclude the first x seconds from scene previews. This can be a value in seconds, or a percentage (eg 2%) of the total scene duration.", "preview_exclude_start_time_head": "Exclude start time", - "preview_generation_options": "Preview Generation Options", + "preview_generation_options": "Preview generation options", "preview_options": "Preview Options", "preview_preset_desc": "The preset regulates size, quality and encoding time of preview generation. Presets beyond “slow” have diminishing returns and are not recommended.", "preview_preset_head": "Preview encoding preset", @@ -1013,7 +1060,7 @@ "preview_seg_count_head": "Number of segments in preview", "preview_seg_duration_desc": "Duration of each preview segment, in seconds.", "preview_seg_duration_head": "Preview segment duration", - "sprites": "Scene Scrubber Sprites", + "sprites": "Scene scrubber sprites", "sprites_tooltip": "The set of images displayed below the video player for easy navigation.", "transcodes": "Transcodes", "transcodes_tooltip": "MP4 transcodes will be pre-generated for all content; useful for slow CPUs but requires much more disk space", @@ -1069,7 +1116,10 @@ "select_youngest": "Select the youngest file in the duplicate group", "title": "Duplicate Scenes" }, + "duplicated": "Duplicated", "duplicated_phash": "Duplicated (pHash)", + "duplicated_stash_id": "Duplicated (Stash ID)", + "duplicated_title": "Duplicated (Title)", "duration": "Duration", "effect_filters": { "aspect": "Aspect", @@ -1166,7 +1216,7 @@ "height_cm": "Height (cm)", "help": "Help", "history": "History", - "ignore_auto_tag": "Ignore Auto Tag", + "ignore_auto_tag": "Ignore auto tag", "image": "Image", "image_count": "Image Count", "image_index": "Image #", @@ -1186,6 +1236,7 @@ "last_o_at": "Last O At", "last_o_at_sfw": "Last Like At", "last_played_at": "Last Played At", + "latest_scene": "Latest Scene", "library": "Library", "loading": { "generic": "Loading…", @@ -1234,16 +1285,16 @@ "organized": "Organised", "orientation": "Orientation", "package_manager": { - "add_source": "Add Source", - "check_for_updates": "Check for Updates", + "add_source": "Add source", + "check_for_updates": "Check for updates", "confirm_delete_source": "Are you sure you want to delete source {name} ({url})?", "confirm_uninstall": "Are you sure you want to uninstall {number} packages?", "description": "Description", - "edit_source": "Edit Source", + "edit_source": "Edit source", "hide_unselected": "Hide unselected", "install": "Install", - "installed_version": "Installed Version", - "latest_version": "Latest Version", + "installed_version": "Installed version", + "latest_version": "Latest version", "no_packages": "No packages found", "no_sources": "No sources configured", "no_upgradable": "No upgradable packages found", @@ -1254,7 +1305,7 @@ "source": { "local_path": { "description": "Relative path to store packages for this source. Note that changing this requires the packages to be moved manually.", - "heading": "Local Path" + "heading": "Local path" }, "name": "Name", "url": "Source URL" @@ -1463,6 +1514,7 @@ "welcome_to_stash": "Welcome to Stash" }, "stash_id": "Stash ID", + "stash_id_count": "Stash ID Count", "stash_id_endpoint": "Stash ID Endpoint URL", "stash_ids": "Stash IDs", "stashbox_search": { diff --git a/ui/v2.5/src/models/list-filter/criteria/criterion.ts b/ui/v2.5/src/models/list-filter/criteria/criterion.ts index 8f30e5d17..ae23a48d4 100644 --- a/ui/v2.5/src/models/list-filter/criteria/criterion.ts +++ b/ui/v2.5/src/models/list-filter/criteria/criterion.ts @@ -12,6 +12,7 @@ import { import TextUtils from "src/utils/text"; import { CriterionType, + IDuplicationValue, IHierarchicalLabelValue, ILabeledId, INumberValue, @@ -36,7 +37,8 @@ export type CriterionValue = | IStashIDValue | IDateValue | ITimestampValue - | IPhashDistanceValue; + | IPhashDistanceValue + | IDuplicationValue; export interface ISavedCriterion { modifier: CriterionModifier; diff --git a/ui/v2.5/src/models/list-filter/criteria/is-missing.ts b/ui/v2.5/src/models/list-filter/criteria/is-missing.ts index 58e3535a6..512616f3c 100644 --- a/ui/v2.5/src/models/list-filter/criteria/is-missing.ts +++ b/ui/v2.5/src/models/list-filter/criteria/is-missing.ts @@ -58,7 +58,8 @@ export const PerformerIsMissingCriterionOption = new IsMissingCriterionOption( "weight", "measurements", "fake_tits", - "career_length", + "career_start", + "career_end", "tattoos", "piercings", "aliases", diff --git a/ui/v2.5/src/models/list-filter/criteria/phash.ts b/ui/v2.5/src/models/list-filter/criteria/phash.ts index 0cbfa155e..e79b0a447 100644 --- a/ui/v2.5/src/models/list-filter/criteria/phash.ts +++ b/ui/v2.5/src/models/list-filter/criteria/phash.ts @@ -1,15 +1,28 @@ import { CriterionModifier, PhashDistanceCriterionInput, - PHashDuplicationCriterionInput, + DuplicationCriterionInput, } from "src/core/generated-graphql"; -import { IPhashDistanceValue } from "../types"; -import { - BooleanCriterionOption, - ModifierCriterion, - ModifierCriterionOption, - StringCriterion, -} from "./criterion"; +import { IDuplicationValue, IPhashDistanceValue } from "../types"; +import { ModifierCriterion, ModifierCriterionOption } from "./criterion"; +import { IntlShape } from "react-intl"; + +// Shared mapping of duplication field IDs to their i18n message IDs +export const DUPLICATION_FIELD_MESSAGE_IDS = { + phash: "media_info.phash", + stash_id: "stash_id", + title: "title", + url: "url", +} as const; + +export type DuplicationFieldId = keyof typeof DUPLICATION_FIELD_MESSAGE_IDS; + +export const DUPLICATION_FIELD_IDS: DuplicationFieldId[] = [ + "phash", + "stash_id", + "title", + "url", +]; export const PhashCriterionOption = new ModifierCriterionOption({ messageID: "media_info.phash", @@ -55,20 +68,97 @@ export class PhashCriterion extends ModifierCriterion { } } -export const DuplicatedCriterionOption = new BooleanCriterionOption( - "duplicated_phash", - "duplicated", - () => new DuplicatedCriterion() -); +export const DuplicatedCriterionOption = new ModifierCriterionOption({ + messageID: "duplicated", + type: "duplicated", + modifierOptions: [], // No modifiers for this filter + defaultModifier: CriterionModifier.Equals, + makeCriterion: () => new DuplicatedCriterion(), +}); -export class DuplicatedCriterion extends StringCriterion { +export class DuplicatedCriterion extends ModifierCriterion { constructor() { - super(DuplicatedCriterionOption); + super(DuplicatedCriterionOption, {}); } - public toCriterionInput(): PHashDuplicationCriterionInput { + public cloneValues() { + this.value = { ...this.value }; + } + + // Override getLabel to provide custom formatting for duplication fields + public getLabel(intl: IntlShape): string { + const parts: string[] = []; + const trueLabel = intl.formatMessage({ id: "true" }); + const falseLabel = intl.formatMessage({ id: "false" }); + + for (const fieldId of DUPLICATION_FIELD_IDS) { + const fieldValue = this.value[fieldId]; + if (fieldValue !== undefined) { + const label = intl.formatMessage({ + id: DUPLICATION_FIELD_MESSAGE_IDS[fieldId], + }); + parts.push(`${label}: ${fieldValue ? trueLabel : falseLabel}`); + } + } + + // Handle legacy duplicated field + if (parts.length === 0 && this.value.duplicated !== undefined) { + const label = intl.formatMessage({ id: "duplicated_phash" }); + return `${label}: ${this.value.duplicated ? trueLabel : falseLabel}`; + } + + if (parts.length === 0) { + return intl.formatMessage({ id: "duplicated" }); + } + + return parts.join(", "); + } + + protected getLabelValue(intl: IntlShape): string { + // Required by abstract class - returns basic label when getLabel isn't overridden + return intl.formatMessage({ id: "duplicated" }); + } + + protected toCriterionInput(): DuplicationCriterionInput { return { - duplicated: this.value === "true", + duplicated: this.value.duplicated, + distance: this.value.distance, + phash: this.value.phash, + url: this.value.url, + stash_id: this.value.stash_id, + title: this.value.title, }; } + + // Override to handle legacy saved formats + public setFromSavedCriterion(criterion: unknown): void { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const c = criterion as any; + + // Handle various saved formats + if (c.value !== undefined) { + // New format: { value: { phash: true, ... } } + if (typeof c.value === "object") { + this.value = c.value as IDuplicationValue; + } else if (typeof c.value === "string") { + // Legacy format: { value: "true" } - convert to phash + this.value = { phash: c.value === "true" }; + } + } else if (typeof c === "object") { + // Direct value format + this.value = c as IDuplicationValue; + } + + if (c.modifier) { + this.modifier = c.modifier; + } + } + + public isValid(): boolean { + // Check if any duplication field is set + const hasFieldSet = DUPLICATION_FIELD_IDS.some( + (fieldId) => this.value[fieldId] !== undefined + ); + return hasFieldSet || this.value.duplicated !== undefined; + } } diff --git a/ui/v2.5/src/models/list-filter/groups.ts b/ui/v2.5/src/models/list-filter/groups.ts index 5a263b272..ee0c90d73 100644 --- a/ui/v2.5/src/models/list-filter/groups.ts +++ b/ui/v2.5/src/models/list-filter/groups.ts @@ -63,6 +63,7 @@ const criterionOptions = [ createMandatoryNumberCriterionOption("sub_group_count"), TagsCriterionOption, createMandatoryNumberCriterionOption("tag_count"), + createMandatoryNumberCriterionOption("scene_count"), createMandatoryTimestampCriterionOption("created_at"), createMandatoryTimestampCriterionOption("updated_at"), ]; diff --git a/ui/v2.5/src/models/list-filter/images.ts b/ui/v2.5/src/models/list-filter/images.ts index 0b2e06df0..2d3db8265 100644 --- a/ui/v2.5/src/models/list-filter/images.ts +++ b/ui/v2.5/src/models/list-filter/images.ts @@ -22,6 +22,7 @@ import { import { ListFilterOptions, MediaSortByOptions } from "./filter-options"; import { DisplayMode } from "./types"; import { GalleriesCriterionOption } from "./criteria/galleries"; +import { PhashCriterionOption } from "./criteria/phash"; const defaultSortBy = "path"; @@ -47,6 +48,7 @@ const criterionOptions = [ createStringCriterionOption("details"), createStringCriterionOption("photographer"), createMandatoryStringCriterionOption("checksum", "media_info.checksum"), + PhashCriterionOption, PathCriterionOption, GalleriesCriterionOption, OrganizedCriterionOption, diff --git a/ui/v2.5/src/models/list-filter/performers.ts b/ui/v2.5/src/models/list-filter/performers.ts index fcc152d01..372dad342 100644 --- a/ui/v2.5/src/models/list-filter/performers.ts +++ b/ui/v2.5/src/models/list-filter/performers.ts @@ -31,7 +31,9 @@ const sortByOptions = [ "penis_length", "play_count", "last_played_at", - "career_length", + "latest_scene", + "career_start", + "career_end", "weight", "measurements", "scenes_duration", @@ -74,6 +76,8 @@ const numberCriteria: CriterionType[] = [ "age", "weight", "penis_length", + "career_start", + "career_end", ]; const stringCriteria: CriterionType[] = [ @@ -85,7 +89,6 @@ const stringCriteria: CriterionType[] = [ "eye_color", "measurements", "fake_tits", - "career_length", "tattoos", "piercings", "aliases", diff --git a/ui/v2.5/src/models/list-filter/scenes.ts b/ui/v2.5/src/models/list-filter/scenes.ts index 5fdb6a770..251e2592d 100644 --- a/ui/v2.5/src/models/list-filter/scenes.ts +++ b/ui/v2.5/src/models/list-filter/scenes.ts @@ -133,6 +133,7 @@ const criterionOptions = [ GalleriesCriterionOption, createStringCriterionOption("url"), StashIDCriterionOption, + createMandatoryNumberCriterionOption("stash_id_count"), InteractiveCriterionOption, CaptionsCriterionOption, createMandatoryNumberCriterionOption("interactive_speed"), diff --git a/ui/v2.5/src/models/list-filter/studios.ts b/ui/v2.5/src/models/list-filter/studios.ts index 02dfae2f6..a38540a47 100644 --- a/ui/v2.5/src/models/list-filter/studios.ts +++ b/ui/v2.5/src/models/list-filter/studios.ts @@ -21,6 +21,7 @@ const sortByOptions = [ "random", "rating", "scenes_duration", + "latest_scene", ] .map(ListFilterOptions.createSortBy) .concat([ @@ -52,6 +53,7 @@ const criterionOptions = [ TagsCriterionOption, RatingCriterionOption, createBooleanCriterionOption("ignore_auto_tag"), + createBooleanCriterionOption("organized"), createMandatoryNumberCriterionOption("tag_count"), createMandatoryNumberCriterionOption("scene_count"), createMandatoryNumberCriterionOption("image_count"), diff --git a/ui/v2.5/src/models/list-filter/types.ts b/ui/v2.5/src/models/list-filter/types.ts index 83ebaa010..7fe334c4c 100644 --- a/ui/v2.5/src/models/list-filter/types.ts +++ b/ui/v2.5/src/models/list-filter/types.ts @@ -47,9 +47,15 @@ export interface IRangeValue { export type INumberValue = IRangeValue; export type IDateValue = IRangeValue; export type ITimestampValue = IRangeValue; -export interface IPHashDuplicationValue { - duplicated: boolean; - distance?: number; // currently not implemented +export interface IDuplicationValue { + // Deprecated: Use phash field instead. Kept for backwards compatibility. + duplicated?: boolean; + // Currently not implemented. Intended for phash distance matching. + distance?: number; + phash?: boolean; + url?: boolean; + stash_id?: boolean; + title?: boolean; } export interface IStashIDValue { @@ -160,6 +166,8 @@ export type CriterionType = | "penis_length" | "circumcised" | "career_length" + | "career_start" + | "career_end" | "tattoos" | "piercings" | "aliases" @@ -200,6 +208,7 @@ export type CriterionType = | "ignore_auto_tag" | "file_count" | "stash_id_endpoint" + | "stash_id_count" | "date" | "created_at" | "updated_at" diff --git a/ui/v2.5/src/pluginApi.d.ts b/ui/v2.5/src/pluginApi.d.ts index 1aae25129..dd881c0b4 100644 --- a/ui/v2.5/src/pluginApi.d.ts +++ b/ui/v2.5/src/pluginApi.d.ts @@ -666,6 +666,8 @@ declare namespace PluginApi { DetailImage: React.FC; ExternalLinkButtons: React.FC; ExternalLinksButton: React.FC; + FilteredGalleryList: React.FC; + FilteredSceneList: React.FC; FolderSelect: React.FC; FrontPage: React.FC; GalleryCard: React.FC; diff --git a/ui/v2.5/src/plugins.tsx b/ui/v2.5/src/plugins.tsx index 41577a92c..00ffb9ca4 100644 --- a/ui/v2.5/src/plugins.tsx +++ b/ui/v2.5/src/plugins.tsx @@ -59,7 +59,8 @@ function sortPlugins(plugins: PluginList) { // load all plugins and their dependencies // returns true when all plugins are loaded, regardess of success or failure -function useLoadPlugins() { +// if disableCustomizations is true, skip loading plugins entirely +function useLoadPlugins(disableCustomizations?: boolean) { const { data: plugins, loading: pluginsLoading, @@ -74,6 +75,12 @@ function useLoadPlugins() { }, [plugins?.plugins, pluginsLoading, pluginsError]); const pluginJavascripts = useMemoOnce(() => { + // Skip loading plugin JS if customizations are disabled. + // Note: We check inside useMemoOnce rather than early-returning from useLoadPlugins + // to comply with React's rules of hooks - hooks must be called unconditionally. + if (disableCustomizations) { + return [[], true]; + } return [ uniq( sortedPlugins @@ -83,9 +90,12 @@ function useLoadPlugins() { ), !!sortedPlugins && !pluginsLoading && !pluginsError, ]; - }, [sortedPlugins, pluginsLoading, pluginsError]); + }, [sortedPlugins, pluginsLoading, pluginsError, disableCustomizations]); const pluginCSS = useMemoOnce(() => { + if (disableCustomizations) { + return [[], true]; + } return [ uniq( sortedPlugins @@ -95,7 +105,7 @@ function useLoadPlugins() { ), !!sortedPlugins && !pluginsLoading && !pluginsError, ]; - }, [sortedPlugins, pluginsLoading, pluginsError]); + }, [sortedPlugins, pluginsLoading, pluginsError, disableCustomizations]); const pluginJavascriptLoaded = useScript( pluginJavascripts ?? [], @@ -109,11 +119,15 @@ function useLoadPlugins() { }; } -export const PluginsLoader: React.FC> = ({ - children, -}) => { +interface IPluginsLoaderProps { + disableCustomizations?: boolean; +} + +export const PluginsLoader: React.FC< + React.PropsWithChildren +> = ({ disableCustomizations, children }) => { const Toast = useToast(); - const { loading: loaded, error } = useLoadPlugins(); + const { loading: loaded, error } = useLoadPlugins(disableCustomizations); useEffect(() => { if (error) { diff --git a/ui/v2.5/src/utils/navigation.ts b/ui/v2.5/src/utils/navigation.ts index 581d079c7..17d9dfe6b 100644 --- a/ui/v2.5/src/utils/navigation.ts +++ b/ui/v2.5/src/utils/navigation.ts @@ -342,6 +342,15 @@ const makeScenesPHashMatchUrl = (phash: GQL.Maybe | undefined) => { return `/scenes?${filter.makeQueryParameters()}`; }; +const makeImagesPHashMatchUrl = (phash: GQL.Maybe | undefined) => { + if (!phash) return "#"; + const filter = new ListFilterModel(GQL.FilterMode.Images, undefined); + const criterion = new PhashCriterion(); + criterion.value = { value: phash }; + filter.criteria.push(criterion); + return `/images?${filter.makeQueryParameters()}`; +}; + const makeGalleryImagesUrl = ( gallery: Partial, extraCriteria?: ModifierCriterion[] @@ -493,6 +502,7 @@ const NavUtils = { makeTagGroupsUrl, makeScenesPHashMatchUrl, makeSceneMarkerUrl, + makeImagesPHashMatchUrl, makeGroupScenesUrl, makeChildStudiosUrl, makeGalleryImagesUrl, diff --git a/ui/v2.5/src/utils/yup.ts b/ui/v2.5/src/utils/yup.ts index 5ae8123df..a9c4f69e1 100644 --- a/ui/v2.5/src/utils/yup.ts +++ b/ui/v2.5/src/utils/yup.ts @@ -92,45 +92,6 @@ export function yupUniqueStringList(intl: IntlShape) { }); } -export function yupUniqueAliases(intl: IntlShape, nameField: string) { - return yupRequiredStringArray(intl) - .defined() - .test({ - name: "unique", - test(value) { - const aliases = [this.parent[nameField].toLowerCase()]; - const dupes: number[] = []; - for (let i = 0; i < value.length; i++) { - const s = value[i].toLowerCase(); - if (aliases.includes(s)) { - dupes.push(i); - } else { - aliases.push(s); - } - } - if (dupes.length === 0) return true; - - const msg = yup.ValidationError.formatError( - intl.formatMessage({ id: "validation.unique" }), - { - label: this.schema.spec.label, - path: this.path, - } - ); - const errors = dupes.map( - (i) => - new yup.ValidationError( - msg, - value[i], - `${this.path}["${i}"]`, - "unique" - ) - ); - return new yup.ValidationError(errors, value, this.path, "unique"); - }, - }); -} - export function yupDateString(intl: IntlShape) { return yup .string()