diff --git a/Makefile b/Makefile index b2630a4f0..79551c2ba 100644 --- a/Makefile +++ b/Makefile @@ -7,21 +7,15 @@ ifeq (${SHELL}, cmd) endif ifdef IS_WIN_SHELL - SEPARATOR := && - SET := set RM := del /s /q RMDIR := rmdir /s /q - PWD := $(shell echo %cd%) else - SEPARATOR := ; - SET := export RM := rm -f RMDIR := rm -rf endif # set LDFLAGS environment variable to any extra ldflags required # set OUTPUT to generate a specific binary name - LDFLAGS := $(LDFLAGS) ifdef OUTPUT OUTPUT := -o $(OUTPUT) @@ -34,10 +28,16 @@ export CGO_ENABLED = 1 GO_BUILD_TAGS_WINDOWS := sqlite_omit_load_extension sqlite_stat4 osusergo GO_BUILD_TAGS_DEFAULT = $(GO_BUILD_TAGS_WINDOWS) netgo -.PHONY: release pre-build +# set STASH_NOLEGACY environment variable or uncomment to disable legacy browser support +# STASH_NOLEGACY := true +# set STASH_SOURCEMAPS environment variable or uncomment to enable UI sourcemaps +# STASH_SOURCEMAPS := true + +.PHONY: release release: pre-ui generate ui build-release +.PHONY: pre-build pre-build: ifndef BUILD_DATE $(eval BUILD_DATE := $(shell go run -mod=vendor scripts/getDate.go)) @@ -55,29 +55,37 @@ ifndef OFFICIAL_BUILD $(eval OFFICIAL_BUILD := false) endif +.PHONY: build-flags +build-flags: pre-build + $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.buildstamp=$(BUILD_DATE)') + $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.githash=$(GITHASH)') + $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.version=$(STASH_VERSION)') + $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/manager/config.officialBuild=$(OFFICIAL_BUILD)') ifndef GO_BUILD_TAGS $(eval GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT)) endif - + $(eval BUILD_FLAGS := -mod=vendor -v -tags "$(GO_BUILD_TAGS)" $(GO_BUILD_FLAGS) -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)") # NOTE: the build target still includes netgo because we cannot detect # Windows easily from the Makefile. -build: pre-build +.PHONY: build +build: build-flags build: - $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.version=$(STASH_VERSION)' -X 'github.com/stashapp/stash/internal/api.buildstamp=$(BUILD_DATE)' -X 'github.com/stashapp/stash/internal/api.githash=$(GITHASH)') - $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/manager/config.officialBuild=$(OFFICIAL_BUILD)') - go build $(OUTPUT) -mod=vendor -v -tags "$(GO_BUILD_TAGS)" $(GO_BUILD_FLAGS) -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS) $(PLATFORM_SPECIFIC_LDFLAGS)" ./cmd/stash + go build $(OUTPUT) $(BUILD_FLAGS) ./cmd/stash # strips debug symbols from the release build +.PHONY: build-release build-release: EXTRA_LDFLAGS := -s -w build-release: GO_BUILD_FLAGS := -trimpath build-release: build +.PHONY: build-release-static build-release-static: EXTRA_LDFLAGS := -extldflags=-static -s -w build-release-static: GO_BUILD_FLAGS := -trimpath build-release-static: build # cross-compile- targets should be run within the compiler docker container +.PHONY: cross-compile-windows cross-compile-windows: export GOOS := windows cross-compile-windows: export GOARCH := amd64 cross-compile-windows: export CC := x86_64-w64-mingw32-gcc @@ -86,6 +94,7 @@ cross-compile-windows: OUTPUT := -o dist/stash-win.exe cross-compile-windows: GO_BUILD_TAGS := $(GO_BUILD_TAGS_WINDOWS) cross-compile-windows: build-release-static +.PHONY: cross-compile-macos-intel cross-compile-macos-intel: export GOOS := darwin cross-compile-macos-intel: export GOARCH := amd64 cross-compile-macos-intel: export CC := o64-clang @@ -95,6 +104,7 @@ cross-compile-macos-intel: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT) # can't use static build for OSX cross-compile-macos-intel: build-release +.PHONY: cross-compile-macos-applesilicon cross-compile-macos-applesilicon: export GOOS := darwin cross-compile-macos-applesilicon: export GOARCH := arm64 cross-compile-macos-applesilicon: export CC := oa64e-clang @@ -104,6 +114,7 @@ cross-compile-macos-applesilicon: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT) # can't use static build for OSX cross-compile-macos-applesilicon: build-release +.PHONY: cross-compile-macos cross-compile-macos: rm -rf dist/Stash.app dist/Stash-macos.zip make cross-compile-macos-applesilicon @@ -118,18 +129,21 @@ cross-compile-macos: cd dist && zip -r Stash-macos.zip Stash.app && cd .. rm -rf dist/Stash.app +.PHONY: cross-compile-freebsd cross-compile-freebsd: export GOOS := freebsd cross-compile-freebsd: export GOARCH := amd64 cross-compile-freebsd: OUTPUT := -o dist/stash-freebsd cross-compile-freebsd: GO_BUILD_TAGS += netgo cross-compile-freebsd: build-release-static +.PHONY: cross-compile-linux cross-compile-linux: export GOOS := linux cross-compile-linux: export GOARCH := amd64 cross-compile-linux: OUTPUT := -o dist/stash-linux cross-compile-linux: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT) cross-compile-linux: build-release-static +.PHONY: cross-compile-linux-arm64v8 cross-compile-linux-arm64v8: export GOOS := linux cross-compile-linux-arm64v8: export GOARCH := arm64 cross-compile-linux-arm64v8: export CC := aarch64-linux-gnu-gcc @@ -137,6 +151,7 @@ cross-compile-linux-arm64v8: OUTPUT := -o dist/stash-linux-arm64v8 cross-compile-linux-arm64v8: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT) cross-compile-linux-arm64v8: build-release-static +.PHONY: cross-compile-linux-arm32v7 cross-compile-linux-arm32v7: export GOOS := linux cross-compile-linux-arm32v7: export GOARCH := arm cross-compile-linux-arm32v7: export GOARM := 7 @@ -145,6 +160,7 @@ cross-compile-linux-arm32v7: OUTPUT := -o dist/stash-linux-arm32v7 cross-compile-linux-arm32v7: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT) cross-compile-linux-arm32v7: build-release-static +.PHONY: cross-compile-linux-arm32v6 cross-compile-linux-arm32v6: export GOOS := linux cross-compile-linux-arm32v6: export GOARCH := arm cross-compile-linux-arm32v6: export GOARM := 6 @@ -153,6 +169,7 @@ cross-compile-linux-arm32v6: OUTPUT := -o dist/stash-linux-arm32v6 cross-compile-linux-arm32v6: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT) cross-compile-linux-arm32v6: build-release-static +.PHONY: cross-compile-all cross-compile-all: make cross-compile-windows make cross-compile-macos-intel @@ -164,15 +181,16 @@ cross-compile-all: .PHONY: touch-ui touch-ui: -ifndef IS_WIN_SHELL - @mkdir -p ui/v2.5/build - @touch ui/v2.5/build/index.html -else +ifdef IS_WIN_SHELL @if not exist "ui\\v2.5\\build" mkdir ui\\v2.5\\build @type nul >> ui/v2.5/build/index.html +else + @mkdir -p ui/v2.5/build + @touch ui/v2.5/build/index.html endif # Regenerates GraphQL files +.PHONY: generate generate: generate-backend generate-frontend .PHONY: generate-frontend @@ -219,14 +237,14 @@ generate-test-mocks: # runs server # sets the config file to use the local dev config .PHONY: server-start -server-start: export STASH_CONFIG_FILE=config.yml -server-start: -ifndef IS_WIN_SHELL - @mkdir -p .local -else +server-start: export STASH_CONFIG_FILE := config.yml +server-start: build-flags +ifdef IS_WIN_SHELL @if not exist ".local" mkdir .local +else + @mkdir -p .local endif - cd .local && go run ../cmd/stash + cd .local && go run $(BUILD_FLAGS) ../cmd/stash # removes local dev config files .PHONY: server-clean @@ -239,18 +257,32 @@ server-clean: pre-ui: cd ui/v2.5 && yarn install --frozen-lockfile +.PHONY: ui-env +ui-env: pre-build + $(eval export VITE_APP_DATE := $(BUILD_DATE)) + $(eval export VITE_APP_GITHASH := $(GITHASH)) + $(eval export VITE_APP_STASH_VERSION := $(STASH_VERSION)) +ifdef STASH_NOLEGACY + $(eval export VITE_APP_NOLEGACY := true) +endif +ifdef STASH_SOURCEMAPS + $(eval export VITE_APP_SOURCEMAPS := true) +endif + .PHONY: ui -ui: pre-build - $(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \ - $(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \ - $(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \ +ui: ui-env cd ui/v2.5 && yarn build +.PHONY: ui-nolegacy +ui-nolegacy: STASH_NOLEGACY := true +ui-nolegacy: ui + +.PHONY: ui-sourcemaps +ui-sourcemaps: STASH_SOURCEMAPS := true +ui-sourcemaps: ui + .PHONY: ui-start -ui-start: pre-build - $(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \ - $(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \ - $(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \ +ui-start: ui-env cd ui/v2.5 && yarn start --host .PHONY: fmt-ui diff --git a/README.md b/README.md index 5f2c0fdcd..3840a654e 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ https://stashapp.cc [![Docker pulls](https://img.shields.io/docker/pulls/stashapp/stash.svg)](https://hub.docker.com/r/stashapp/stash 'DockerHub') [![Open Collective backers](https://img.shields.io/opencollective/backers/stashapp?logo=opencollective)](https://opencollective.com/stashapp) [![Go Report Card](https://goreportcard.com/badge/github.com/stashapp/stash)](https://goreportcard.com/report/github.com/stashapp/stash) +[![Matrix](https://img.shields.io/matrix/stashapp:unredacted.org?logo=matrix&server_fqdn=matrix.org)](https://matrix.to/#/#stashapp:unredacted.org) [![Discord](https://img.shields.io/discord/559159668438728723.svg?logo=discord)](https://discord.gg/2TsNFKt) [![GitHub release (latest by date)](https://img.shields.io/github/v/release/stashapp/stash?logo=github)](https://github.com/stashapp/stash/releases/latest) [![GitHub issues by-label](https://img.shields.io/github/issues-raw/stashapp/stash/bounty)](https://github.com/stashapp/stash/labels/bounty) @@ -58,6 +59,7 @@ Check out our documentation on [Stash-Docs](https://docs.stashapp.cc) for inform For more help you can: * Check the in-app documentation, in the top right corner of the app (it's also mirrored on [Stash-Docs](https://docs.stashapp.cc/in-app-manual)) +* Join the [Matrix space](https://matrix.to/#/#stashapp:unredacted.org) * Join the [Discord server](https://discord.gg/2TsNFKt), where the community can offer support. * Start a [discussion on GitHub](https://github.com/stashapp/stash/discussions) diff --git a/cmd/stash/main.go b/cmd/stash/main.go index 7b1adeb26..4aadf4fb1 100644 --- a/cmd/stash/main.go +++ b/cmd/stash/main.go @@ -34,7 +34,7 @@ func main() { }() go handleSignals() - desktop.Start(manager.GetInstance(), &manager.FaviconProvider{UIBox: ui.UIBox}) + desktop.Start(manager.GetInstance(), &ui.FaviconProvider) blockForever() } diff --git a/go.mod b/go.mod index 1fbf6858a..d39d21b98 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/chromedp/chromedp v0.7.3 github.com/corona10/goimagehash v1.0.3 github.com/disintegration/imaging v1.6.0 - github.com/fvbommel/sortorder v1.0.2 + github.com/fvbommel/sortorder v1.1.0 github.com/go-chi/chi v4.0.2+incompatible github.com/golang-jwt/jwt/v4 v4.0.0 github.com/golang-migrate/migrate/v4 v4.15.0-beta.1 @@ -24,7 +24,6 @@ require ( github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/remeh/sizedwaitgroup v1.0.0 github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac - github.com/rs/cors v1.6.0 github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f github.com/sirupsen/logrus v1.8.1 github.com/spf13/afero v1.8.2 // indirect @@ -48,6 +47,7 @@ require ( require ( github.com/asticode/go-astisub v0.20.0 github.com/doug-martin/goqu/v9 v9.18.0 + github.com/go-chi/cors v1.2.1 github.com/go-chi/httplog v0.2.1 github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 github.com/hashicorp/golang-lru v0.5.4 diff --git a/go.sum b/go.sum index 14bf5606a..83456f972 100644 --- a/go.sum +++ b/go.sum @@ -233,8 +233,8 @@ github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMo github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fsouza/fake-gcs-server v1.17.0/go.mod h1:D1rTE4YCyHFNa99oyJJ5HyclvN/0uQR+pM/VdlL83bw= -github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= -github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/fvbommel/sortorder v1.1.0 h1:fUmoe+HLsBTctBDoaBwpQo5N+nrCp8g/BjKb/6ZQmYw= +github.com/fvbommel/sortorder v1.1.0/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= @@ -242,6 +242,8 @@ github.com/go-chi/chi v4.0.2+incompatible h1:maB6vn6FqCxrpz4FqWdh4+lwpyZIQS7YEAU github.com/go-chi/chi v4.0.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-chi/chi/v5 v5.0.0 h1:DBPx88FjZJH3FsICfDAfIfnb7XxKIYVGG6lOPlhENAg= github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs= +github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= +github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= github.com/go-chi/httplog v0.2.1 h1:KgCtIUkYNlfIsUPzE3utxd1KDKOvCrnAKaqdo0rmrh0= github.com/go-chi/httplog v0.2.1/go.mod h1:JyHOFO9twSfGoTin/RoP25Lx2a9Btq10ug+sgxe0+bo= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= @@ -668,8 +670,6 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rs/cors v1.6.0 h1:G9tHG9lebljV9mfp9SNPDL36nCDxmo3zTlAf1YgvzmI= -github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= diff --git a/graphql/documents/data/config.graphql b/graphql/documents/data/config.graphql index 173a7948e..2a56e9512 100644 --- a/graphql/documents/data/config.graphql +++ b/graphql/documents/data/config.graphql @@ -25,6 +25,7 @@ fragment ConfigGeneralData on ConfigGeneralResult { maxTranscodeSize maxStreamingTranscodeSize writeImageThumbnails + createImageClipsFromVideos apiKey username password @@ -99,6 +100,7 @@ fragment ConfigDLNAData on ConfigDLNAResult { enabled whitelistedIPs interfaces + videoSortOrder } fragment ConfigScrapingData on ConfigScrapingResult { @@ -139,6 +141,7 @@ fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult { scanGenerateSprites scanGeneratePhashes scanGenerateThumbnails + scanGenerateClipPreviews } identify { @@ -179,6 +182,7 @@ fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult { transcodes phashes interactiveHeatmapsSpeeds + clipPreviews } deleteFile diff --git a/graphql/documents/data/file.graphql b/graphql/documents/data/file.graphql index 7acb95feb..52a4c50f8 100644 --- a/graphql/documents/data/file.graphql +++ b/graphql/documents/data/file.graphql @@ -43,4 +43,46 @@ fragment GalleryFileData on GalleryFile { type value } -} \ No newline at end of file +} + +fragment VisualFileData on VisualFile { + ... on BaseFile { + id + path + size + mod_time + fingerprints { + type + value + } + } + ... on ImageFile { + id + path + size + mod_time + width + height + fingerprints { + type + value + } + } + ... on VideoFile { + id + path + size + mod_time + duration + video_codec + audio_codec + width + height + frame_rate + bit_rate + fingerprints { + type + value + } + } +} diff --git a/graphql/documents/data/image-slim.graphql b/graphql/documents/data/image-slim.graphql index 4f787d36e..9f84904dc 100644 --- a/graphql/documents/data/image-slim.graphql +++ b/graphql/documents/data/image-slim.graphql @@ -13,6 +13,7 @@ fragment SlimImageData on Image { paths { thumbnail + preview image } @@ -45,4 +46,8 @@ fragment SlimImageData on Image { favorite image_path } + + visual_files { + ...VisualFileData + } } diff --git a/graphql/documents/data/image.graphql b/graphql/documents/data/image.graphql index f9adb5515..155c940e4 100644 --- a/graphql/documents/data/image.graphql +++ b/graphql/documents/data/image.graphql @@ -15,6 +15,7 @@ fragment ImageData on Image { paths { thumbnail + preview image } @@ -33,4 +34,8 @@ fragment ImageData on Image { performers { ...PerformerData } + + visual_files { + ...VisualFileData + } } diff --git a/graphql/documents/data/performer-slim.graphql b/graphql/documents/data/performer-slim.graphql index 4bac5d90b..65019b98b 100644 --- a/graphql/documents/data/performer-slim.graphql +++ b/graphql/documents/data/performer-slim.graphql @@ -16,6 +16,8 @@ fragment SlimPerformerData on Performer { eye_color height_cm fake_tits + penis_length + circumcised career_length tattoos piercings diff --git a/graphql/documents/data/performer.graphql b/graphql/documents/data/performer.graphql index 338ae0e10..c89ce1e13 100644 --- a/graphql/documents/data/performer.graphql +++ b/graphql/documents/data/performer.graphql @@ -14,6 +14,8 @@ fragment PerformerData on Performer { height_cm measurements fake_tits + penis_length + circumcised career_length tattoos piercings @@ -25,6 +27,8 @@ fragment PerformerData on Performer { image_count gallery_count movie_count + performer_count + o_counter tags { ...SlimTagData diff --git a/graphql/documents/data/scrapers.graphql b/graphql/documents/data/scrapers.graphql index 8d02b3362..1d4553a97 100644 --- a/graphql/documents/data/scrapers.graphql +++ b/graphql/documents/data/scrapers.graphql @@ -13,6 +13,8 @@ fragment ScrapedPerformerData on ScrapedPerformer { height measurements fake_tits + penis_length + circumcised career_length tattoos piercings @@ -43,6 +45,8 @@ fragment ScrapedScenePerformerData on ScrapedPerformer { height measurements fake_tits + penis_length + circumcised career_length tattoos piercings diff --git a/graphql/documents/queries/scene.graphql b/graphql/documents/queries/scene.graphql index 1f762855a..e62303dc7 100644 --- a/graphql/documents/queries/scene.graphql +++ b/graphql/documents/queries/scene.graphql @@ -20,8 +20,8 @@ query FindScenesByPathRegex($filter: FindFilterType) { } } -query FindDuplicateScenes($distance: Int) { - findDuplicateScenes(distance: $distance) { +query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { ...SlimSceneData } } diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 112f8aba9..3a4f6e738 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -14,8 +14,16 @@ type Query { findScenesByPathRegex(filter: FindFilterType): FindScenesResultType! - """ Returns any groups of scenes that are perceptual duplicates within the queried distance """ - findDuplicateScenes(distance: Int): [[Scene!]!]! + """ + Returns any groups of scenes that are perceptual duplicates within the queried distance + and the difference between their duration is smaller than durationDiff + """ + findDuplicateScenes( + distance: Int, + """Max difference in seconds between files in order to be considered for similarity matching. + Fractional seconds are ok: 0.5 will mean only files that have durations within 0.5 seconds between them will be matched based on PHash distance.""" + duration_diff: Float + ): [[Scene!]!]! """Return valid stream paths""" sceneStreams(id: ID): [SceneStreamEndpoint!]! @@ -295,14 +303,14 @@ type Mutation { metadataClean(input: CleanMetadataInput!): ID! """Identifies scenes using scrapers. Returns the job ID""" metadataIdentify(input: IdentifyMetadataInput!): ID! - + """Migrate generated files for the current hash naming""" migrateHashNaming: ID! """Migrates legacy scene screenshot files into the blob storage""" migrateSceneScreenshots(input: MigrateSceneScreenshotsInput!): ID! """Migrates blobs from the old storage system to the current one""" migrateBlobs(input: MigrateBlobsInput!): ID! - + """Anonymise the database in a separate file. Optionally returns a link to download the database file""" anonymiseDatabase(input: AnonymiseDatabaseInput!): String diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index df0aba092..6c9939385 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -106,6 +106,8 @@ input ConfigGeneralInput { """Write image thumbnails to disk when generating on the fly""" writeImageThumbnails: Boolean + """Create Image Clips from Video extensions when Videos are disabled in Library""" + createImageClipsFromVideos: Boolean """Username""" username: String """Password""" @@ -215,6 +217,8 @@ type ConfigGeneralResult { """Write image thumbnails to disk when generating on the fly""" writeImageThumbnails: Boolean! + """Create Image Clips from Video extensions when Videos are disabled in Library""" + createImageClipsFromVideos: Boolean! """API Key""" apiKey: String! """Username""" @@ -431,6 +435,8 @@ input ConfigDLNAInput { whitelistedIPs: [String!] """List of interfaces to run DLNA on. Empty for all""" interfaces: [String!] + """Order to sort videos""" + videoSortOrder: String } type ConfigDLNAResult { @@ -441,6 +447,8 @@ type ConfigDLNAResult { whitelistedIPs: [String!]! """List of interfaces to run DLNA on. Empty for all""" interfaces: [String!]! + """Order to sort videos""" + videoSortOrder: String! } input ConfigScrapingInput { diff --git a/graphql/schema/types/file.graphql b/graphql/schema/types/file.graphql index 09b733c39..755d63215 100644 --- a/graphql/schema/types/file.graphql +++ b/graphql/schema/types/file.graphql @@ -73,12 +73,14 @@ type ImageFile implements BaseFile { fingerprints: [Fingerprint!]! width: Int! - height: Int! + height: Int! created_at: Time! updated_at: Time! } +union VisualFile = VideoFile | ImageFile + type GalleryFile implements BaseFile { id: ID! path: String! diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 9e124e49e..55724cc42 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -76,6 +76,10 @@ input PerformerFilterType { measurements: StringCriterionInput """Filter by fake tits value""" fake_tits: StringCriterionInput + """Filter by penis length value""" + penis_length: FloatCriterionInput + """Filter by ciricumcision""" + circumcised: CircumcisionCriterionInput """Filter by career length""" career_length: StringCriterionInput """Filter by tattoos""" @@ -98,6 +102,8 @@ input PerformerFilterType { image_count: IntCriterionInput """Filter by gallery count""" gallery_count: IntCriterionInput + """Filter by o count""" + o_counter: IntCriterionInput """Filter by StashID""" stash_id: StringCriterionInput @deprecated(reason: "Use stash_id_endpoint instead") """Filter by StashID""" @@ -116,6 +122,8 @@ input PerformerFilterType { death_year: IntCriterionInput """Filter by studios where performer appears in scene/image/gallery""" studios: HierarchicalMultiCriterionInput + """Filter by performers where performer appears with another performer in scene/image/gallery""" + performers: MultiCriterionInput """Filter by autotag ignore value""" ignore_auto_tag: Boolean """Filter by birthdate""" @@ -165,7 +173,9 @@ input SceneFilterType { """Filter by file checksum""" checksum: StringCriterionInput """Filter by file phash""" - phash: StringCriterionInput + phash: StringCriterionInput @deprecated(reason: "Use phash_distance instead") + """Filter by file phash distance""" + phash_distance: PhashDistanceCriterionInput """Filter by path""" path: StringCriterionInput """Filter by file count""" @@ -499,20 +509,33 @@ input IntCriterionInput { modifier: CriterionModifier! } +input FloatCriterionInput { + value: Float! + value2: Float + modifier: CriterionModifier! +} + input MultiCriterionInput { value: [ID!] modifier: CriterionModifier! + excludes: [ID!] } input GenderCriterionInput { value: GenderEnum modifier: CriterionModifier! } + +input CircumcisionCriterionInput { + value: [CircumisedEnum!] + modifier: CriterionModifier! +} input HierarchicalMultiCriterionInput { value: [ID!] modifier: CriterionModifier! depth: Int + excludes: [ID!] } input DateCriterionInput { @@ -527,6 +550,12 @@ input TimestampCriterionInput { modifier: CriterionModifier! } +input PhashDistanceCriterionInput { + value: String! + modifier: CriterionModifier! + distance: Int +} + enum FilterMode { SCENES, PERFORMERS, diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql index 6832cab24..c2e34f085 100644 --- a/graphql/schema/types/image.graphql +++ b/graphql/schema/types/image.graphql @@ -16,8 +16,9 @@ type Image { file_mod_time: Time @deprecated(reason: "Use files.mod_time") - file: ImageFileType! @deprecated(reason: "Use files.mod_time") - files: [ImageFile!]! + file: ImageFileType! @deprecated(reason: "Use visual_files") + files: [ImageFile!]! @deprecated(reason: "Use visual_files") + visual_files: [VisualFile!]! paths: ImagePathsType! # Resolver galleries: [Gallery!]! @@ -35,6 +36,7 @@ type ImageFileType { type ImagePathsType { thumbnail: String # Resolver + preview: String # Resolver image: String # Resolver } @@ -95,4 +97,4 @@ type FindImagesResultType { """Total file size in bytes""" filesize: Float! images: [Image!]! -} \ No newline at end of file +} diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index ecde11eac..8e575b3ec 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -14,6 +14,7 @@ input GenerateMetadataInput { forceTranscodes: Boolean phashes: Boolean interactiveHeatmapsSpeeds: Boolean + clipPreviews: Boolean """scene ids to generate for""" sceneIDs: [ID!] @@ -49,6 +50,7 @@ type GenerateMetadataOptions { transcodes: Boolean phashes: Boolean interactiveHeatmapsSpeeds: Boolean + clipPreviews: Boolean } type GeneratePreviewOptions { @@ -98,6 +100,8 @@ input ScanMetadataInput { scanGeneratePhashes: Boolean """Generate image thumbnails during scan""" scanGenerateThumbnails: Boolean + """Generate image clip previews during scan""" + scanGenerateClipPreviews: Boolean "Filter options for the scan" filter: ScanMetaDataFilterInput @@ -120,6 +124,8 @@ type ScanMetadataOptions { scanGeneratePhashes: Boolean! """Generate image thumbnails during scan""" scanGenerateThumbnails: Boolean! + """Generate image clip previews during scan""" + scanGenerateClipPreviews: Boolean! } input CleanMetadataInput { diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index 235960bfc..6cbe6ed32 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -6,6 +6,11 @@ enum GenderEnum { INTERSEX NON_BINARY } + +enum CircumisedEnum { + CUT + UNCUT +} type Performer { id: ID! @@ -24,6 +29,8 @@ type Performer { height_cm: Int measurements: String fake_tits: String + penis_length: Float + circumcised: CircumisedEnum career_length: String tattoos: String piercings: String @@ -37,6 +44,8 @@ type Performer { scene_count: Int # Resolver image_count: Int # Resolver gallery_count: Int # Resolver + performer_count: Int # Resolver + o_counter: Int # Resolver scenes: [Scene!]! stash_ids: [StashID!]! # rating expressed as 1-5 @@ -67,6 +76,8 @@ input PerformerCreateInput { height_cm: Int measurements: String fake_tits: String + penis_length: Float + circumcised: CircumisedEnum career_length: String tattoos: String piercings: String @@ -105,6 +116,8 @@ input PerformerUpdateInput { height_cm: Int measurements: String fake_tits: String + penis_length: Float + circumcised: CircumisedEnum career_length: String tattoos: String piercings: String @@ -148,6 +161,8 @@ input BulkPerformerUpdateInput { height_cm: Int measurements: String fake_tits: String + penis_length: Float + circumcised: CircumisedEnum career_length: String tattoos: String piercings: String diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index 518e5abca..a23b04fed 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -15,6 +15,8 @@ type ScrapedPerformer { height: String measurements: String fake_tits: String + penis_length: String + circumcised: String career_length: String tattoos: String piercings: String @@ -48,6 +50,8 @@ input ScrapedPerformerInput { height: String measurements: String fake_tits: String + penis_length: String + circumcised: String career_length: String tattoos: String piercings: String diff --git a/internal/api/authentication.go b/internal/api/authentication.go index d02f98b13..94b5328f5 100644 --- a/internal/api/authentication.go +++ b/internal/api/authentication.go @@ -5,6 +5,7 @@ import ( "net" "net/http" "net/url" + "path" "strings" "github.com/stashapp/stash/internal/manager" @@ -13,11 +14,6 @@ import ( "github.com/stashapp/stash/pkg/session" ) -const ( - loginEndPoint = "/login" - logoutEndPoint = "/logout" -) - const ( tripwireActivatedErrMsg = "Stash is exposed to the public internet without authentication, and is not serving any more content to protect your privacy. " + "More information and fixes are available at https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet" @@ -30,7 +26,7 @@ const ( func allowUnauthenticated(r *http.Request) bool { // #2715 - allow access to UI files - return strings.HasPrefix(r.URL.Path, loginEndPoint) || r.URL.Path == logoutEndPoint || r.URL.Path == "/css" || strings.HasPrefix(r.URL.Path, "/assets") + return strings.HasPrefix(r.URL.Path, loginEndpoint) || r.URL.Path == logoutEndpoint || r.URL.Path == "/css" || strings.HasPrefix(r.URL.Path, "/assets") } func authenticateHandler() func(http.Handler) http.Handler { @@ -38,38 +34,41 @@ func authenticateHandler() func(http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { c := config.GetInstance() - if !checkSecurityTripwireActivated(c, w) { + // error if external access tripwire activated + if accessErr := session.CheckExternalAccessTripwire(c); accessErr != nil { + http.Error(w, tripwireActivatedErrMsg, http.StatusForbidden) return } userID, err := manager.GetInstance().SessionStore.Authenticate(w, r) if err != nil { if errors.Is(err, session.ErrUnauthorized) { - w.WriteHeader(http.StatusInternalServerError) - _, err = w.Write([]byte(err.Error())) - if err != nil { - logger.Error(err) - } + http.Error(w, err.Error(), http.StatusInternalServerError) return } // unauthorized error - w.Header().Add("WWW-Authenticate", `FormBased`) + w.Header().Add("WWW-Authenticate", "FormBased") w.WriteHeader(http.StatusUnauthorized) return } if err := session.CheckAllowPublicWithoutAuth(c, r); err != nil { - var externalAccess session.ExternalAccessError - switch { - case errors.As(err, &externalAccess): - securityActivateTripwireAccessedFromInternetWithoutAuth(c, externalAccess, w) - return - default: + var accessErr session.ExternalAccessError + if errors.As(err, &accessErr) { + session.LogExternalAccessError(accessErr) + + err := c.ActivatePublicAccessTripwire(net.IP(accessErr).String()) + if err != nil { + logger.Errorf("Error activating public access tripwire: %v", err) + } + + http.Error(w, externalAccessErrMsg, http.StatusForbidden) + } else { logger.Errorf("Error checking external access security: %v", err) w.WriteHeader(http.StatusInternalServerError) - return } + return } ctx := r.Context() @@ -77,15 +76,15 @@ func authenticateHandler() func(http.Handler) http.Handler { if c.HasCredentials() { // authentication is required if userID == "" && !allowUnauthenticated(r) { - // authentication was not received, redirect - // if graphql was requested, we just return a forbidden error - if r.URL.Path == "/graphql" { - w.Header().Add("WWW-Authenticate", `FormBased`) + // if graphql or a non-webpage was requested, we just return a forbidden error + ext := path.Ext(r.URL.Path) + if r.URL.Path == gqlEndpoint || (ext != "" && ext != ".html") { + w.Header().Add("WWW-Authenticate", "FormBased") w.WriteHeader(http.StatusUnauthorized) return } - prefix := getProxyPrefix(r.Header) + prefix := getProxyPrefix(r) // otherwise redirect to the login page returnURL := url.URL{ @@ -95,7 +94,7 @@ func authenticateHandler() func(http.Handler) http.Handler { q := make(url.Values) q.Set(returnURLParam, returnURL.String()) u := url.URL{ - Path: prefix + "/login", + Path: prefix + loginEndpoint, RawQuery: q.Encode(), } http.Redirect(w, r, u.String(), http.StatusFound) @@ -111,31 +110,3 @@ func authenticateHandler() func(http.Handler) http.Handler { }) } } - -func checkSecurityTripwireActivated(c *config.Instance, w http.ResponseWriter) bool { - if accessErr := session.CheckExternalAccessTripwire(c); accessErr != nil { - w.WriteHeader(http.StatusForbidden) - _, err := w.Write([]byte(tripwireActivatedErrMsg)) - if err != nil { - logger.Error(err) - } - return false - } - - return true -} - -func securityActivateTripwireAccessedFromInternetWithoutAuth(c *config.Instance, accessErr session.ExternalAccessError, w http.ResponseWriter) { - session.LogExternalAccessError(accessErr) - - err := c.ActivatePublicAccessTripwire(net.IP(accessErr).String()) - if err != nil { - logger.Error(err) - } - - w.WriteHeader(http.StatusForbidden) - _, err = w.Write([]byte(externalAccessErrMsg)) - if err != nil { - logger.Error(err) - } -} diff --git a/internal/api/check_version.go b/internal/api/check_version.go index bb621cb9d..a2da99c9a 100644 --- a/internal/api/check_version.go +++ b/internal/api/check_version.go @@ -113,7 +113,6 @@ type LatestRelease struct { } func makeGithubRequest(ctx context.Context, url string, output interface{}) error { - transport := &http.Transport{Proxy: http.ProxyFromEnvironment} client := &http.Client{ @@ -124,6 +123,7 @@ func makeGithubRequest(ctx context.Context, url string, output interface{}) erro req, _ := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) req.Header.Add("Accept", apiAcceptHeader) // gh api recommendation , send header with api version + logger.Debugf("Github API request: %s", url) response, err := client.Do(req) if err != nil { @@ -229,19 +229,39 @@ func GetLatestRelease(ctx context.Context) (*LatestRelease, error) { } func getReleaseHash(ctx context.Context, tagName string) (string, error) { - url := apiTags - tags := []githubTagResponse{} - err := makeGithubRequest(ctx, url, &tags) - if err != nil { - return "", err + // Start with a small page size if not searching for latest_develop + perPage := 10 + if tagName == developmentTag { + perPage = 100 } - for _, tag := range tags { - if tag.Name == tagName { - if len(tag.Commit.Sha) != 40 { - return "", errors.New("invalid Github API response") + // Limit to 5 pages, ie 500 tags - should be plenty + for page := 1; page <= 5; { + url := fmt.Sprintf("%s?per_page=%d&page=%d", apiTags, perPage, page) + tags := []githubTagResponse{} + err := makeGithubRequest(ctx, url, &tags) + if err != nil { + return "", err + } + + for _, tag := range tags { + if tag.Name == tagName { + if len(tag.Commit.Sha) != 40 { + return "", errors.New("invalid Github API response") + } + return tag.Commit.Sha, nil } - return tag.Commit.Sha, nil + } + + if len(tags) == 0 { + break + } + + // if not found in the first 10, search again on page 1 with the first 100 + if perPage == 10 { + perPage = 100 + } else { + page++ } } diff --git a/internal/api/error.go b/internal/api/error.go new file mode 100644 index 000000000..5b30a8c12 --- /dev/null +++ b/internal/api/error.go @@ -0,0 +1,42 @@ +package api + +import ( + "context" + "encoding/json" + "errors" + + "github.com/99designs/gqlgen/graphql" + "github.com/stashapp/stash/pkg/logger" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +func gqlErrorHandler(ctx context.Context, e error) *gqlerror.Error { + if !errors.Is(ctx.Err(), context.Canceled) { + // log all errors - for now just log the error message + // we can potentially add more context later + fc := graphql.GetFieldContext(ctx) + if fc != nil { + logger.Errorf("%s: %v", fc.Path(), e) + + // log the args in debug level + logger.DebugFunc(func() (string, []interface{}) { + var args interface{} + args = fc.Args + + s, _ := json.Marshal(args) + if len(s) > 0 { + args = string(s) + } + + return "%s: %v", []interface{}{ + fc.Path(), + args, + } + }) + } + } + + // we may also want to transform the error message for the response + // for now just return the original error + return graphql.DefaultErrorPresenter(ctx, e) +} diff --git a/internal/api/images.go b/internal/api/images.go index ddcaee629..95ed4c844 100644 --- a/internal/api/images.go +++ b/internal/api/images.go @@ -87,7 +87,7 @@ func initialiseCustomImages() { } } -func getRandomPerformerImageUsingName(name string, gender models.GenderEnum, customPath string) ([]byte, error) { +func getRandomPerformerImageUsingName(name string, gender *models.GenderEnum, customPath string) ([]byte, error) { var box *imageBox // If we have a custom path, we should return a new box in the given path. @@ -95,11 +95,16 @@ func getRandomPerformerImageUsingName(name string, gender models.GenderEnum, cus box = performerBoxCustom } + var g models.GenderEnum + if gender != nil { + g = *gender + } + if box == nil { - switch gender { - case models.GenderEnumFemale: + switch g { + case models.GenderEnumFemale, models.GenderEnumTransgenderFemale: box = performerBox - case models.GenderEnumMale: + case models.GenderEnumMale, models.GenderEnumTransgenderMale: box = performerBoxMale default: box = performerBox diff --git a/internal/api/resolver_model_image.go b/internal/api/resolver_model_image.go index 2a1965c4e..9bfadafc7 100644 --- a/internal/api/resolver_model_image.go +++ b/internal/api/resolver_model_image.go @@ -12,42 +12,55 @@ import ( "github.com/stashapp/stash/pkg/models" ) -func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (*file.ImageFile, error) { +func convertImageFile(f *file.ImageFile) *ImageFile { + ret := &ImageFile{ + ID: strconv.Itoa(int(f.ID)), + Path: f.Path, + Basename: f.Basename, + ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), + ModTime: f.ModTime, + Size: f.Size, + Width: f.Width, + Height: f.Height, + CreatedAt: f.CreatedAt, + UpdatedAt: f.UpdatedAt, + Fingerprints: resolveFingerprints(f.Base()), + } + + if f.ZipFileID != nil { + zipFileID := strconv.Itoa(int(*f.ZipFileID)) + ret.ZipFileID = &zipFileID + } + + return ret +} + +func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (file.VisualFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { return nil, err } - ret, ok := f.(*file.ImageFile) + asFrame, ok := f.(file.VisualFile) if !ok { - return nil, fmt.Errorf("file %T is not an image file", f) + return nil, fmt.Errorf("file %T is not an frame", f) } - return ret, nil + return asFrame, nil } return nil, nil } -func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]*file.ImageFile, error) { +func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]file.File, error) { fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) if err != nil { return nil, err } files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) - ret := make([]*file.ImageFile, len(files)) - for i, bf := range files { - f, ok := bf.(*file.ImageFile) - if !ok { - return nil, fmt.Errorf("file %T is not an image file", f) - } - - ret[i] = f - } - - return ret, firstError(errs) + return files, firstError(errs) } func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) { @@ -65,9 +78,9 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile return nil, nil } - width := f.Width - height := f.Height - size := f.Size + width := f.GetWidth() + height := f.GetHeight() + size := f.Base().Size return &ImageFileType{ Size: int(size), Width: width, @@ -75,6 +88,32 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile }, nil } +func convertVisualFile(f file.File) VisualFile { + switch f := f.(type) { + case *file.ImageFile: + return convertImageFile(f) + case *file.VideoFile: + return convertVideoFile(f) + default: + panic(fmt.Sprintf("unknown file type %T", f)) + } +} + +func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]VisualFile, error) { + fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) + if err != nil { + return nil, err + } + + files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) + ret := make([]VisualFile, len(files)) + for i, f := range files { + ret[i] = convertVisualFile(f) + } + + return ret, firstError(errs) +} + func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, error) { if obj.Date != nil { result := obj.Date.String() @@ -89,27 +128,18 @@ func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageF return nil, err } - ret := make([]*ImageFile, len(files)) + var ret []*ImageFile - for i, f := range files { - ret[i] = &ImageFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Size: f.Size, - Width: f.Width, - Height: f.Height, - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), + for _, f := range files { + // filter out non-image files + imageFile, ok := f.(*file.ImageFile) + if !ok { + continue } - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret[i].ZipFileID = &zipFileID - } + thisFile := convertImageFile(imageFile) + + ret = append(ret, thisFile) } return ret, nil @@ -121,7 +151,7 @@ func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*ti return nil, err } if f != nil { - return &f.ModTime, nil + return &f.Base().ModTime, nil } return nil, nil @@ -131,10 +161,12 @@ func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePat baseURL, _ := ctx.Value(BaseURLCtxKey).(string) builder := urlbuilders.NewImageURLBuilder(baseURL, obj) thumbnailPath := builder.GetThumbnailURL() + previewPath := builder.GetPreviewURL() imagePath := builder.GetImageURL() return &ImagePathsType{ Image: &imagePath, Thumbnail: &thumbnailPath, + Preview: &previewPath, }, nil } diff --git a/internal/api/resolver_model_movie.go b/internal/api/resolver_model_movie.go index 9967ef323..fea2276ea 100644 --- a/internal/api/resolver_model_movie.go +++ b/internal/api/resolver_model_movie.go @@ -86,33 +86,38 @@ func (r *movieResolver) Synopsis(ctx context.Context, obj *models.Movie) (*strin } func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) { - baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieFrontImageURL() - return &frontimagePath, nil -} - -func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) { - // don't return any thing if there is no back image - hasImage := false + var hasImage bool if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error - hasImage, err = r.repository.Movie.HasBackImage(ctx, obj.ID) - if err != nil { - return err - } - - return nil + hasImage, err = r.repository.Movie.HasFrontImage(ctx, obj.ID) + return err }); err != nil { return nil, err } + baseURL, _ := ctx.Value(BaseURLCtxKey).(string) + imagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieFrontImageURL(hasImage) + return &imagePath, nil +} + +func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) { + var hasImage bool + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + var err error + hasImage, err = r.repository.Movie.HasBackImage(ctx, obj.ID) + return err + }); err != nil { + return nil, err + } + + // don't return anything if there is no back image if !hasImage { return nil, nil } baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL() - return &backimagePath, nil + imagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL() + return &imagePath, nil } func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret *int, err error) { diff --git a/internal/api/resolver_model_performer.go b/internal/api/resolver_model_performer.go index 8aac29022..afdfa6f14 100644 --- a/internal/api/resolver_model_performer.go +++ b/internal/api/resolver_model_performer.go @@ -10,6 +10,7 @@ import ( "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/performer" ) // Checksum is deprecated @@ -19,7 +20,7 @@ func (r *performerResolver) Checksum(ctx context.Context, obj *models.Performer) func (r *performerResolver) Aliases(ctx context.Context, obj *models.Performer) (*string, error) { if !obj.Aliases.Loaded() { - if err := r.withTxn(ctx, func(ctx context.Context) error { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { return obj.LoadAliases(ctx, r.repository.Performer) }); err != nil { return nil, err @@ -32,7 +33,7 @@ func (r *performerResolver) Aliases(ctx context.Context, obj *models.Performer) func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer) ([]string, error) { if !obj.Aliases.Loaded() { - if err := r.withTxn(ctx, func(ctx context.Context) error { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { return obj.LoadAliases(ctx, r.repository.Performer) }); err != nil { return nil, err @@ -63,8 +64,17 @@ func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer } func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer) (*string, error) { + var hasImage bool + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + var err error + hasImage, err = r.repository.Performer.HasImage(ctx, obj.ID) + return err + }); err != nil { + return nil, err + } + baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj).GetPerformerImageURL() + imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj).GetPerformerImageURL(hasImage) return &imagePath, nil } @@ -118,6 +128,24 @@ func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Perfor return &res, nil } +func (r *performerResolver) OCounter(ctx context.Context, obj *models.Performer) (ret *int, err error) { + var res_scene int + var res_image int + var res int + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + res_scene, err = r.repository.Scene.OCountByPerformerID(ctx, obj.ID) + if err != nil { + return err + } + res_image, err = r.repository.Image.OCountByPerformerID(ctx, obj.ID) + return err + }); err != nil { + return nil, err + } + res = res_scene + res_image + return &res, nil +} + func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.Scene.FindByPerformerID(ctx, obj.ID) @@ -181,3 +209,15 @@ func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performe return &res, nil } + +func (r *performerResolver) PerformerCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { + var res int + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + res, err = performer.CountByAppearsWith(ctx, r.repository.Performer, obj.ID) + return err + }); err != nil { + return nil, err + } + + return &res, nil +} diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index a5c70fadc..cd6f16a57 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -14,6 +14,35 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +func convertVideoFile(f *file.VideoFile) *VideoFile { + ret := &VideoFile{ + ID: strconv.Itoa(int(f.ID)), + Path: f.Path, + Basename: f.Basename, + ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), + ModTime: f.ModTime, + Format: f.Format, + Size: f.Size, + Duration: handleFloat64Value(f.Duration), + VideoCodec: f.VideoCodec, + AudioCodec: f.AudioCodec, + Width: f.Width, + Height: f.Height, + FrameRate: handleFloat64Value(f.FrameRate), + BitRate: int(f.BitRate), + CreatedAt: f.CreatedAt, + UpdatedAt: f.UpdatedAt, + Fingerprints: resolveFingerprints(f.Base()), + } + + if f.ZipFileID != nil { + zipFileID := strconv.Itoa(int(*f.ZipFileID)) + ret.ZipFileID = &zipFileID + } + + return ret +} + func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) @@ -112,30 +141,7 @@ func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoF ret := make([]*VideoFile, len(files)) for i, f := range files { - ret[i] = &VideoFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Format: f.Format, - Size: f.Size, - Duration: handleFloat64Value(f.Duration), - VideoCodec: f.VideoCodec, - AudioCodec: f.AudioCodec, - Width: f.Width, - Height: f.Height, - FrameRate: handleFloat64Value(f.FrameRate), - BitRate: int(f.BitRate), - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), - } - - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret[i].ZipFileID = &zipFileID - } + ret[i] = convertVideoFile(f) } return ret, nil @@ -178,8 +184,8 @@ func formatFingerprint(fp interface{}) string { func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) config := manager.GetInstance().Config - builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID) - screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt) + builder := urlbuilders.NewSceneURLBuilder(baseURL, obj) + screenshotPath := builder.GetScreenshotURL() previewPath := builder.GetStreamPreviewURL() streamPath := builder.GetStreamURL(config.GetAPIKey()).String() webpPath := builder.GetStreamPreviewImageURL() @@ -370,7 +376,7 @@ func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([] config := manager.GetInstance().Config baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID) + builder := urlbuilders.NewSceneURLBuilder(baseURL, obj) apiKey := config.GetAPIKey() return manager.GetSceneStreamPaths(obj, builder.GetStreamURL(apiKey), config.GetMaxStreamingTranscodeSize()) diff --git a/internal/api/resolver_model_scene_marker.go b/internal/api/resolver_model_scene_marker.go index 0057db4e8..3e6ab4030 100644 --- a/internal/api/resolver_model_scene_marker.go +++ b/internal/api/resolver_model_scene_marker.go @@ -48,20 +48,17 @@ func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) func (r *sceneMarkerResolver) Stream(ctx context.Context, obj *models.SceneMarker) (string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - sceneID := int(obj.SceneID.Int64) - return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamURL(obj.ID), nil + return urlbuilders.NewSceneMarkerURLBuilder(baseURL, obj).GetStreamURL(), nil } func (r *sceneMarkerResolver) Preview(ctx context.Context, obj *models.SceneMarker) (string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - sceneID := int(obj.SceneID.Int64) - return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewURL(obj.ID), nil + return urlbuilders.NewSceneMarkerURLBuilder(baseURL, obj).GetPreviewURL(), nil } func (r *sceneMarkerResolver) Screenshot(ctx context.Context, obj *models.SceneMarker) (string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - sceneID := int(obj.SceneID.Int64) - return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamScreenshotURL(obj.ID), nil + return urlbuilders.NewSceneMarkerURLBuilder(baseURL, obj).GetScreenshotURL(), nil } func (r *sceneMarkerResolver) CreatedAt(ctx context.Context, obj *models.SceneMarker) (*time.Time, error) { diff --git a/internal/api/resolver_model_studio.go b/internal/api/resolver_model_studio.go index 4d689df77..10bc577f3 100644 --- a/internal/api/resolver_model_studio.go +++ b/internal/api/resolver_model_studio.go @@ -27,9 +27,6 @@ func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, } func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) { - baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL() - var hasImage bool if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error @@ -39,11 +36,8 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st return nil, err } - // indicate that image is missing by setting default query param to true - if !hasImage { - imagePath += "?default=true" - } - + baseURL, _ := ctx.Value(BaseURLCtxKey).(string) + imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL(hasImage) return &imagePath, nil } diff --git a/internal/api/resolver_model_tag.go b/internal/api/resolver_model_tag.go index 70fee39e0..6f74c8d1b 100644 --- a/internal/api/resolver_model_tag.go +++ b/internal/api/resolver_model_tag.go @@ -111,8 +111,17 @@ func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret } func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, error) { + var hasImage bool + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + var err error + hasImage, err = r.repository.Tag.HasImage(ctx, obj.ID) + return err + }); err != nil { + return nil, err + } + baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL() + imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL(hasImage) return &imagePath, nil } diff --git a/internal/api/resolver_mutation_configure.go b/internal/api/resolver_mutation_configure.go index fc46bc323..bdc93137f 100644 --- a/internal/api/resolver_mutation_configure.go +++ b/internal/api/resolver_mutation_configure.go @@ -218,6 +218,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen c.Set(config.WriteImageThumbnails, *input.WriteImageThumbnails) } + if input.CreateImageClipsFromVideos != nil { + c.Set(config.CreateImageClipsFromVideos, *input.CreateImageClipsFromVideos) + } + if input.GalleryCoverRegex != nil { _, err := regexp.Compile(*input.GalleryCoverRegex) @@ -228,8 +232,13 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen c.Set(config.GalleryCoverRegex, *input.GalleryCoverRegex) } - if input.Username != nil { + if input.Username != nil && *input.Username != c.GetUsername() { c.Set(config.Username, input.Username) + if *input.Password == "" { + logger.Info("Username cleared") + } else { + logger.Info("Username changed") + } } if input.Password != nil { @@ -238,6 +247,11 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen currentPWHash := c.GetPasswordHash() if *input.Password != currentPWHash { + if *input.Password == "" { + logger.Info("Password cleared") + } else { + logger.Info("Password changed") + } c.SetPassword(*input.Password) } } @@ -483,6 +497,10 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input ConfigDLNAIn c.Set(config.DLNADefaultIPWhitelist, input.WhitelistedIPs) } + if input.VideoSortOrder != nil { + c.Set(config.DLNAVideoSortOrder, input.VideoSortOrder) + } + currentDLNAEnabled := c.GetDLNADefaultEnabled() if input.Enabled != nil && *input.Enabled != currentDLNAEnabled { c.Set(config.DLNADefaultEnabled, *input.Enabled) diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 6a482ff04..24b81967a 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -10,6 +10,7 @@ import ( "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stashapp/stash/pkg/sliceutil/stringslice" "github.com/stashapp/stash/pkg/utils" ) @@ -126,9 +127,9 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp } // ensure that new primary file is associated with scene - var f *file.ImageFile + var f file.File for _, ff := range i.Files.List() { - if ff.ID == converted { + if ff.Base().ID == converted { f = ff } } @@ -138,6 +139,8 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp } } + var updatedGalleryIDs []int + if translator.hasField("gallery_ids") { updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet) if err != nil { @@ -152,6 +155,8 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp if err := r.galleryService.ValidateImageGalleryChange(ctx, i, *updatedImage.GalleryIDs); err != nil { return nil, err } + + updatedGalleryIDs = updatedImage.GalleryIDs.ImpactedIDs(i.GalleryIDs.List()) } if translator.hasField("performer_ids") { @@ -174,6 +179,13 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp return nil, err } + // #3759 - update all impacted galleries + for _, galleryID := range updatedGalleryIDs { + if err := r.galleryService.Updated(ctx, galleryID); err != nil { + return nil, fmt.Errorf("updating gallery %d: %w", galleryID, err) + } + } + return image, nil } @@ -223,6 +235,7 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU // Start the transaction and save the image marker if err := r.withTxn(ctx, func(ctx context.Context) error { + var updatedGalleryIDs []int qb := r.repository.Image for _, imageID := range imageIDs { @@ -244,6 +257,9 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU if err := r.galleryService.ValidateImageGalleryChange(ctx, i, *updatedImage.GalleryIDs); err != nil { return err } + + thisUpdatedGalleryIDs := updatedImage.GalleryIDs.ImpactedIDs(i.GalleryIDs.List()) + updatedGalleryIDs = intslice.IntAppendUniques(updatedGalleryIDs, thisUpdatedGalleryIDs) } image, err := qb.UpdatePartial(ctx, imageID, updatedImage) @@ -254,6 +270,13 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU ret = append(ret, image) } + // #3759 - update all impacted galleries + for _, galleryID := range updatedGalleryIDs { + if err := r.galleryService.Updated(ctx, galleryID); err != nil { + return fmt.Errorf("updating gallery %d: %w", galleryID, err) + } + } + return nil }); err != nil { return nil, err diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go index 88aab07d0..2f3e9e01b 100644 --- a/internal/api/resolver_mutation_performer.go +++ b/internal/api/resolver_mutation_performer.go @@ -67,7 +67,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC newPerformer.URL = *input.URL } if input.Gender != nil { - newPerformer.Gender = *input.Gender + newPerformer.Gender = input.Gender } if input.Birthdate != nil { d := models.NewDate(*input.Birthdate) @@ -98,6 +98,12 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC if input.FakeTits != nil { newPerformer.FakeTits = *input.FakeTits } + if input.PenisLength != nil { + newPerformer.PenisLength = input.PenisLength + } + if input.Circumcised != nil { + newPerformer.Circumcised = input.Circumcised + } if input.CareerLength != nil { newPerformer.CareerLength = *input.CareerLength } @@ -222,6 +228,16 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity") updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") + updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") + + if translator.hasField("circumcised") { + if input.Circumcised != nil { + updatedPerformer.Circumcised = models.NewOptionalString(input.Circumcised.String()) + } else { + updatedPerformer.Circumcised = models.NewOptionalStringPtr(nil) + } + } + updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") @@ -339,6 +355,16 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer.Measurements = translator.optionalString(input.Measurements, "measurements") updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") + updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") + + if translator.hasField("circumcised") { + if input.Circumcised != nil { + updatedPerformer.Circumcised = models.NewOptionalString(input.Circumcised.String()) + } else { + updatedPerformer.Circumcised = models.NewOptionalStringPtr(nil) + } + } + updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") @@ -418,7 +444,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe // execute post hooks outside of txn var newRet []*models.Performer for _, performer := range ret { - r.hookExecutor.ExecutePostHooks(ctx, performer.ID, plugin.ImageUpdatePost, input, translator.getFields()) + r.hookExecutor.ExecutePostHooks(ctx, performer.ID, plugin.PerformerUpdatePost, input, translator.getFields()) performer, err = r.getPerformer(ctx, performer.ID) if err != nil { diff --git a/internal/api/resolver_query_configuration.go b/internal/api/resolver_query_configuration.go index fd598ce92..4c9f00aea 100644 --- a/internal/api/resolver_query_configuration.go +++ b/internal/api/resolver_query_configuration.go @@ -106,6 +106,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult { MaxTranscodeSize: &maxTranscodeSize, MaxStreamingTranscodeSize: &maxStreamingTranscodeSize, WriteImageThumbnails: config.IsWriteImageThumbnails(), + CreateImageClipsFromVideos: config.IsCreateImageClipsFromVideos(), GalleryCoverRegex: config.GetGalleryCoverRegex(), APIKey: config.GetAPIKey(), Username: config.GetUsername(), @@ -202,6 +203,7 @@ func makeConfigDLNAResult() *ConfigDLNAResult { Enabled: config.GetDLNADefaultEnabled(), WhitelistedIPs: config.GetDLNADefaultIPWhitelist(), Interfaces: config.GetDLNAInterfaces(), + VideoSortOrder: config.GetVideoSortOrder(), } } diff --git a/internal/api/resolver_query_find_scene.go b/internal/api/resolver_query_find_scene.go index 1eaa2dc03..c60cf88c2 100644 --- a/internal/api/resolver_query_find_scene.go +++ b/internal/api/resolver_query_find_scene.go @@ -220,13 +220,17 @@ func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models. return ret, nil } -func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int) (ret [][]*models.Scene, err error) { +func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int, durationDiff *float64) (ret [][]*models.Scene, err error) { dist := 0 + durDiff := -1. if distance != nil { dist = *distance } + if durationDiff != nil { + durDiff = *durationDiff + } if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Scene.FindDuplicates(ctx, dist) + ret, err = r.repository.Scene.FindDuplicates(ctx, dist, durDiff) return err }); err != nil { return nil, err diff --git a/internal/api/resolver_query_scene.go b/internal/api/resolver_query_scene.go index 120998d71..e7f16604b 100644 --- a/internal/api/resolver_query_scene.go +++ b/internal/api/resolver_query_scene.go @@ -34,7 +34,7 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*manage config := manager.GetInstance().Config baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - builder := urlbuilders.NewSceneURLBuilder(baseURL, scene.ID) + builder := urlbuilders.NewSceneURLBuilder(baseURL, scene) apiKey := config.GetAPIKey() return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(apiKey), config.GetMaxStreamingTranscodeSize()) diff --git a/internal/api/routes_image.go b/internal/api/routes_image.go index 7ac8c99ae..4ea612d3b 100644 --- a/internal/api/routes_image.go +++ b/internal/api/routes_image.go @@ -8,7 +8,6 @@ import ( "net/http" "os/exec" "strconv" - "syscall" "github.com/go-chi/chi" "github.com/stashapp/stash/internal/manager" @@ -19,6 +18,7 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" + "github.com/stashapp/stash/pkg/utils" ) type ImageFinder interface { @@ -40,6 +40,7 @@ func (rs imageRoutes) Routes() chi.Router { r.Get("/image", rs.Image) r.Get("/thumbnail", rs.Thumbnail) + r.Get("/preview", rs.Preview) }) return r @@ -51,12 +52,10 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { img := r.Context().Value(imageKey).(*models.Image) filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth) - w.Header().Add("Cache-Control", "max-age=604800000") - // if the thumbnail doesn't exist, encode on the fly exists, _ := fsutil.FileExists(filepath) if exists { - http.ServeFile(w, r, filepath) + utils.ServeStaticFile(w, r, filepath) } else { const useDefault = true @@ -66,13 +65,19 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { return } - encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG) + clipPreviewOptions := image.ClipPreviewOptions{ + InputArgs: manager.GetInstance().Config.GetTranscodeInputArgs(), + OutputArgs: manager.GetInstance().Config.GetTranscodeOutputArgs(), + Preset: manager.GetInstance().Config.GetPreviewPreset().String(), + } + + encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG, manager.GetInstance().FFProbe, clipPreviewOptions) data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth) if err != nil { // don't log for unsupported image format // don't log for file not found - can optionally be logged in serveImage if !errors.Is(err, image.ErrNotSupportedForThumbnail) && !errors.Is(err, fs.ErrNotExist) { - logger.Errorf("error generating thumbnail for %s: %v", f.Path, err) + logger.Errorf("error generating thumbnail for %s: %v", f.Base().Path, err) var exitErr *exec.ExitError if errors.As(err, &exitErr) { @@ -88,16 +93,24 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { // write the generated thumbnail to disk if enabled if manager.GetInstance().Config.IsWriteImageThumbnails() { logger.Debugf("writing thumbnail to disk: %s", img.Path) - if err := fsutil.WriteFile(filepath, data); err != nil { - logger.Errorf("error writing thumbnail for image %s: %v", img.Path, err) + if err := fsutil.WriteFile(filepath, data); err == nil { + utils.ServeStaticFile(w, r, filepath) + return } + logger.Errorf("error writing thumbnail for image %s: %v", img.Path, err) } - if n, err := w.Write(data); err != nil && !errors.Is(err, syscall.EPIPE) { - logger.Errorf("error serving thumbnail (wrote %v bytes out of %v): %v", n, len(data), err) - } + utils.ServeStaticContent(w, r, data) } } +func (rs imageRoutes) Preview(w http.ResponseWriter, r *http.Request) { + img := r.Context().Value(imageKey).(*models.Image) + filepath := manager.GetInstance().Paths.Generated.GetClipPreviewPath(img.Checksum, models.DefaultGthumbWidth) + + // don't check if the preview exists - we'll just return a 404 if it doesn't + utils.ServeStaticFile(w, r, filepath) +} + func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) { i := r.Context().Value(imageKey).(*models.Image) @@ -109,7 +122,7 @@ func (rs imageRoutes) serveImage(w http.ResponseWriter, r *http.Request, i *mode const defaultImageImage = "image/image.svg" if i.Files.Primary() != nil { - err := i.Files.Primary().Serve(&file.OsFS{}, w, r) + err := i.Files.Primary().Base().Serve(&file.OsFS{}, w, r) if err == nil { return } @@ -131,8 +144,8 @@ func (rs imageRoutes) serveImage(w http.ResponseWriter, r *http.Request, i *mode // fall back to static image f, _ := static.Image.Open(defaultImageImage) defer f.Close() - stat, _ := f.Stat() - http.ServeContent(w, r, "image.svg", stat.ModTime(), f.(io.ReadSeeker)) + image, _ := io.ReadAll(f) + utils.ServeImage(w, r, image) } // endregion diff --git a/internal/api/routes_movie.go b/internal/api/routes_movie.go index 7b77586a6..a64aae76c 100644 --- a/internal/api/routes_movie.go +++ b/internal/api/routes_movie.go @@ -58,9 +58,7 @@ func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) { image, _ = utils.ProcessBase64Image(models.DefaultMovieImage) } - if err := utils.ServeImage(image, w, r); err != nil { - logger.Warnf("error serving movie front image: %v", err) - } + utils.ServeImage(w, r, image) } func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) { @@ -85,9 +83,7 @@ func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) { image, _ = utils.ProcessBase64Image(models.DefaultMovieImage) } - if err := utils.ServeImage(image, w, r); err != nil { - logger.Warnf("error serving movie back image: %v", err) - } + utils.ServeImage(w, r, image) } func (rs movieRoutes) MovieCtx(next http.Handler) http.Handler { diff --git a/internal/api/routes_performer.go b/internal/api/routes_performer.go index 1717e99f9..e7631de5b 100644 --- a/internal/api/routes_performer.go +++ b/internal/api/routes_performer.go @@ -54,13 +54,11 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) { } } - if len(image) == 0 || defaultParam == "true" { + if len(image) == 0 { image, _ = getRandomPerformerImageUsingName(performer.Name, performer.Gender, config.GetInstance().GetCustomPerformerImageLocation()) } - if err := utils.ServeImage(image, w, r); err != nil { - logger.Warnf("error serving performer image: %v", err) - } + utils.ServeImage(w, r, image) } func (rs performerRoutes) PerformerCtx(next http.Handler) http.Handler { diff --git a/internal/api/routes_scene.go b/internal/api/routes_scene.go index c01c43104..9a5e81496 100644 --- a/internal/api/routes_scene.go +++ b/internal/api/routes_scene.go @@ -88,24 +88,12 @@ func (rs sceneRoutes) Routes() chi.Router { // region Handlers func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) { - scene := r.Context().Value(sceneKey).(*models.Scene) - // #3526 - return 404 if the scene does not have any files - if scene.Path == "" { - w.WriteHeader(http.StatusNotFound) - return + ss := manager.SceneServer{ + TxnManager: rs.txnManager, + SceneCoverGetter: rs.sceneFinder, } - - sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()) - - filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash) - streamRequestCtx := ffmpeg.NewStreamRequestContext(w, r) - - // #2579 - hijacking and closing the connection here causes video playback to fail in Safari - // We trust that the request context will be closed, so we don't need to call Cancel on the - // returned context here. - _ = manager.GetInstance().ReadLockManager.ReadLock(streamRequestCtx, filepath) - http.ServeFile(w, r, filepath) + ss.StreamSceneDirect(scene, w, r) } func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) { @@ -266,22 +254,16 @@ func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()) filepath := manager.GetInstance().Paths.Scene.GetVideoPreviewPath(sceneHash) - serveFileNoCache(w, r, filepath) -} -// serveFileNoCache serves the provided file, ensuring that the response -// contains headers to prevent caching. -func serveFileNoCache(w http.ResponseWriter, r *http.Request, filepath string) { - w.Header().Add("Cache-Control", "no-cache") - - http.ServeFile(w, r, filepath) + utils.ServeStaticFile(w, r, filepath) } func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()) filepath := manager.GetInstance().Paths.Scene.GetWebpPreviewPath(sceneHash) - http.ServeFile(w, r, filepath) + + utils.ServeStaticFile(w, r, filepath) } func (rs sceneRoutes) getChapterVttTitle(ctx context.Context, marker *models.SceneMarker) (*string, error) { @@ -355,7 +337,7 @@ func (rs sceneRoutes) VttChapter(w http.ResponseWriter, r *http.Request) { vtt := strings.Join(vttLines, "\n") w.Header().Set("Content-Type", "text/vtt") - _, _ = w.Write([]byte(vtt)) + utils.ServeStaticContent(w, r, []byte(vtt)) } func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) { @@ -366,9 +348,10 @@ func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) { } else { sceneHash = chi.URLParam(r, "sceneHash") } - w.Header().Set("Content-Type", "text/vtt") filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(sceneHash) - http.ServeFile(w, r, filepath) + + w.Header().Set("Content-Type", "text/vtt") + utils.ServeStaticFile(w, r, filepath) } func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) { @@ -379,23 +362,24 @@ func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) { } else { sceneHash = chi.URLParam(r, "sceneHash") } - w.Header().Set("Content-Type", "image/jpeg") filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(sceneHash) - http.ServeFile(w, r, filepath) + + utils.ServeStaticFile(w, r, filepath) } func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) { s := r.Context().Value(sceneKey).(*models.Scene) - funscript := video.GetFunscriptPath(s.Path) - serveFileNoCache(w, r, funscript) + filepath := video.GetFunscriptPath(s.Path) + + utils.ServeStaticFile(w, r, filepath) } func (rs sceneRoutes) InteractiveHeatmap(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()) - w.Header().Set("Content-Type", "image/png") filepath := manager.GetInstance().Paths.Scene.GetInteractiveHeatmapPath(sceneHash) - http.ServeFile(w, r, filepath) + + utils.ServeStaticFile(w, r, filepath) } func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang string, ext string) { @@ -434,16 +418,17 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin return } - var b bytes.Buffer - err = sub.WriteToWebVTT(&b) + var buf bytes.Buffer + + err = sub.WriteToWebVTT(&buf) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } w.Header().Set("Content-Type", "text/vtt") - w.Header().Add("Cache-Control", "no-cache") - _, _ = b.WriteTo(w) + utils.ServeStaticContent(w, r, buf.Bytes()) + return } } @@ -483,7 +468,7 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request) } filepath := manager.GetInstance().Paths.SceneMarkers.GetVideoPreviewPath(sceneHash, int(sceneMarker.Seconds)) - http.ServeFile(w, r, filepath) + utils.ServeStaticFile(w, r, filepath) } func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) { @@ -516,12 +501,10 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) exists, _ := fsutil.FileExists(filepath) if !exists { w.Header().Set("Content-Type", "image/png") - w.Header().Set("Cache-Control", "no-store") - _, _ = w.Write(utils.PendingGenerateResource) - return + utils.ServeStaticContent(w, r, utils.PendingGenerateResource) + } else { + utils.ServeStaticFile(w, r, filepath) } - - http.ServeFile(w, r, filepath) } func (rs sceneRoutes) SceneMarkerScreenshot(w http.ResponseWriter, r *http.Request) { @@ -554,12 +537,10 @@ func (rs sceneRoutes) SceneMarkerScreenshot(w http.ResponseWriter, r *http.Reque exists, _ := fsutil.FileExists(filepath) if !exists { w.Header().Set("Content-Type", "image/png") - w.Header().Set("Cache-Control", "no-store") - _, _ = w.Write(utils.PendingGenerateResource) - return + utils.ServeStaticContent(w, r, utils.PendingGenerateResource) + } else { + utils.ServeStaticFile(w, r, filepath) } - - http.ServeFile(w, r, filepath) } // endregion diff --git a/internal/api/routes_studio.go b/internal/api/routes_studio.go index a77763e8d..ca4e580f6 100644 --- a/internal/api/routes_studio.go +++ b/internal/api/routes_studio.go @@ -3,10 +3,12 @@ package api import ( "context" "errors" + "io" "net/http" "strconv" "github.com/go-chi/chi" + "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/studio" @@ -55,12 +57,17 @@ func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) { } if len(image) == 0 { - image, _ = utils.ProcessBase64Image(models.DefaultStudioImage) + const defaultStudioImage = "studio/studio.svg" + + // fall back to static image + f, _ := static.Studio.Open(defaultStudioImage) + defer f.Close() + stat, _ := f.Stat() + http.ServeContent(w, r, "studio.svg", stat.ModTime(), f.(io.ReadSeeker)) + return } - if err := utils.ServeImage(image, w, r); err != nil { - logger.Warnf("error serving studio image: %v", err) - } + utils.ServeImage(w, r, image) } func (rs studioRoutes) StudioCtx(next http.Handler) http.Handler { diff --git a/internal/api/routes_tag.go b/internal/api/routes_tag.go index e3ee439e9..d8837da80 100644 --- a/internal/api/routes_tag.go +++ b/internal/api/routes_tag.go @@ -3,10 +3,12 @@ package api import ( "context" "errors" + "io" "net/http" "strconv" "github.com/go-chi/chi" + "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/tag" @@ -55,12 +57,17 @@ func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) { } if len(image) == 0 { - image = models.DefaultTagImage + const defaultTagImage = "tag/tag.svg" + + // fall back to static image + f, _ := static.Tag.Open(defaultTagImage) + defer f.Close() + stat, _ := f.Stat() + http.ServeContent(w, r, "tag.svg", stat.ModTime(), f.(io.ReadSeeker)) + return } - if err := utils.ServeImage(image, w, r); err != nil { - logger.Warnf("error serving tag image: %v", err) - } + utils.ServeImage(w, r, image) } func (rs tagRoutes) TagCtx(next http.Handler) http.Handler { diff --git a/internal/api/server.go b/internal/api/server.go index c8e8a7b28..cfc57b3dd 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -27,17 +27,25 @@ import ( "github.com/gorilla/websocket" "github.com/vearutop/statigz" + "github.com/go-chi/cors" "github.com/go-chi/httplog" - "github.com/rs/cors" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/ui" ) +const ( + loginEndpoint = "/login" + logoutEndpoint = "/logout" + gqlEndpoint = "/graphql" + playgroundEndpoint = "/playground" +) + var version string var buildstamp string var githash string @@ -51,6 +59,7 @@ func Start() error { r := chi.NewRouter() r.Use(middleware.Heartbeat("/healthz")) + r.Use(cors.AllowAll().Handler) r.Use(authenticateHandler()) visitedPluginHandler := manager.GetInstance().SessionStore.VisitedPluginHandler() r.Use(visitedPluginHandler) @@ -67,7 +76,6 @@ func Start() error { r.Use(SecurityHeadersMiddleware) r.Use(middleware.DefaultCompress) r.Use(middleware.StripSlashes) - r.Use(cors.AllowAll().Handler) r.Use(BaseURLMiddleware) recoverFunc := func(ctx context.Context, err interface{}) error { @@ -120,7 +128,10 @@ func Start() error { gqlSrv.SetQueryCache(gqlLru.New(1000)) gqlSrv.Use(gqlExtension.Introspection{}) + gqlSrv.SetErrorPresenter(gqlErrorHandler) + gqlHandlerFunc := func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "no-store") gqlSrv.ServeHTTP(w, r) } @@ -130,14 +141,12 @@ func Start() error { gqlHandler := visitedPluginHandler(dataloaders.Middleware(http.HandlerFunc(gqlHandlerFunc))) manager.GetInstance().PluginCache.RegisterGQLHandler(gqlHandler) - r.HandleFunc("/graphql", gqlHandlerFunc) - r.HandleFunc("/playground", gqlPlayground.Handler("GraphQL playground", "/graphql")) - - // session handlers - r.Post(loginEndPoint, handleLogin(loginUIBox)) - r.Get(logoutEndPoint, handleLogout(loginUIBox)) - - r.Get(loginEndPoint, getLoginHandler(loginUIBox)) + r.HandleFunc(gqlEndpoint, gqlHandlerFunc) + r.HandleFunc(playgroundEndpoint, func(w http.ResponseWriter, r *http.Request) { + setPageSecurityHeaders(w, r) + endpoint := getProxyPrefix(r) + gqlEndpoint + gqlPlayground.Handler("GraphQL playground", endpoint)(w, r) + }) r.Mount("/performer", performerRoutes{ txnManager: txnManager, @@ -172,36 +181,17 @@ func Start() error { r.HandleFunc("/css", cssHandler(c, pluginCache)) r.HandleFunc("/javascript", javascriptHandler(c, pluginCache)) - r.HandleFunc("/customlocales", func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-Type", "application/json") - if c.GetCustomLocalesEnabled() { - // search for custom-locales.json in current directory, then $HOME/.stash - fn := c.GetCustomLocalesPath() - exists, _ := fsutil.FileExists(fn) - if exists { - http.ServeFile(w, r, fn) - return - } - } - _, _ = w.Write([]byte("{}")) - }) + r.HandleFunc("/customlocales", customLocalesHandler(c)) - r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) { - ext := path.Ext(r.URL.Path) - if ext == ".html" || ext == "" { - prefix := getProxyPrefix(r.Header) + staticLoginUI := statigz.FileServer(loginUIBox.(fs.ReadDirFS)) - data := getLoginPage(loginUIBox) - baseURLIndex := strings.Replace(string(data), "%BASE_URL%", prefix+"/", 2) - _, _ = w.Write([]byte(baseURLIndex)) - } else { - r.URL.Path = strings.Replace(r.URL.Path, loginEndPoint, "", 1) - loginRoot, err := fs.Sub(loginUIBox, loginRootDir) - if err != nil { - panic(err) - } - http.FileServer(http.FS(loginRoot)).ServeHTTP(w, r) - } + r.Get(loginEndpoint, handleLogin(loginUIBox)) + r.Post(loginEndpoint, handleLoginPost(loginUIBox)) + r.Get(logoutEndpoint, handleLogout()) + r.HandleFunc(loginEndpoint+"/*", func(w http.ResponseWriter, r *http.Request) { + r.URL.Path = strings.TrimPrefix(r.URL.Path, loginEndpoint) + w.Header().Set("Cache-Control", "no-cache") + staticLoginUI.ServeHTTP(w, r) }) // Serve static folders @@ -213,12 +203,10 @@ func Start() error { } customUILocation := c.GetCustomUILocation() - static := statigz.FileServer(uiBox) + staticUI := statigz.FileServer(uiBox.(fs.ReadDirFS)) // Serve the web app r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) { - const uiRootDir = "v2.5/build" - ext := path.Ext(r.URL.Path) if customUILocation != "" { @@ -232,29 +220,29 @@ func Start() error { if ext == ".html" || ext == "" { themeColor := c.GetThemeColor() - data, err := uiBox.ReadFile(uiRootDir + "/index.html") + data, err := fs.ReadFile(uiBox, "index.html") if err != nil { panic(err) } + indexHtml := string(data) - prefix := getProxyPrefix(r.Header) - baseURLIndex := strings.ReplaceAll(string(data), "%COLOR%", themeColor) - baseURLIndex = strings.ReplaceAll(baseURLIndex, "/%BASE_URL%", prefix) - baseURLIndex = strings.Replace(baseURLIndex, "base href=\"/\"", fmt.Sprintf("base href=\"%s\"", prefix+"/"), 1) - _, _ = w.Write([]byte(baseURLIndex)) + prefix := getProxyPrefix(r) + indexHtml = strings.ReplaceAll(indexHtml, "%COLOR%", themeColor) + indexHtml = strings.Replace(indexHtml, `= logrus.TraceLevel { + msg, args := fn() + log.Tracef(msg, args...) + } +} + func (log *Logger) Debug(args ...interface{}) { log.logger.Debug(args...) l := &LogItem{ @@ -253,6 +260,17 @@ func (log *Logger) Debugf(format string, args ...interface{}) { log.addLogItem(l) } +func (log *Logger) logFunc(level logrus.Level, logFn func(format string, args ...interface{}), fn func() (string, []interface{})) { + if log.logger.Level >= level { + msg, args := fn() + logFn(msg, args...) + } +} + +func (log *Logger) DebugFunc(fn func() (string, []interface{})) { + log.logFunc(logrus.DebugLevel, log.logger.Debugf, fn) +} + func (log *Logger) Info(args ...interface{}) { log.logger.Info(args...) l := &LogItem{ @@ -271,6 +289,10 @@ func (log *Logger) Infof(format string, args ...interface{}) { log.addLogItem(l) } +func (log *Logger) InfoFunc(fn func() (string, []interface{})) { + log.logFunc(logrus.InfoLevel, log.logger.Infof, fn) +} + func (log *Logger) Warn(args ...interface{}) { log.logger.Warn(args...) l := &LogItem{ @@ -289,6 +311,10 @@ func (log *Logger) Warnf(format string, args ...interface{}) { log.addLogItem(l) } +func (log *Logger) WarnFunc(fn func() (string, []interface{})) { + log.logFunc(logrus.WarnLevel, log.logger.Warnf, fn) +} + func (log *Logger) Error(args ...interface{}) { log.logger.Error(args...) l := &LogItem{ @@ -307,6 +333,10 @@ func (log *Logger) Errorf(format string, args ...interface{}) { log.addLogItem(l) } +func (log *Logger) ErrorFunc(fn func() (string, []interface{})) { + log.logFunc(logrus.ErrorLevel, log.logger.Errorf, fn) +} + func (log *Logger) Fatal(args ...interface{}) { log.logger.Fatal(args...) } diff --git a/internal/manager/config/config.go b/internal/manager/config/config.go index 4b2ba7921..44c643925 100644 --- a/internal/manager/config/config.go +++ b/internal/manager/config/config.go @@ -96,6 +96,9 @@ const ( WriteImageThumbnails = "write_image_thumbnails" writeImageThumbnailsDefault = true + CreateImageClipsFromVideos = "create_image_clip_from_videos" + createImageClipsFromVideosDefault = false + Host = "host" hostDefault = "0.0.0.0" @@ -210,6 +213,9 @@ const ( DLNADefaultIPWhitelist = "dlna.default_whitelist" DLNAInterfaces = "dlna.interfaces" + DLNAVideoSortOrder = "dlna.video_sort_order" + dlnaVideoSortOrderDefault = "title" + // Logging options LogFile = "logFile" LogOut = "logOut" @@ -862,6 +868,10 @@ func (i *Instance) IsWriteImageThumbnails() bool { return i.getBool(WriteImageThumbnails) } +func (i *Instance) IsCreateImageClipsFromVideos() bool { + return i.getBool(CreateImageClipsFromVideos) +} + func (i *Instance) GetAPIKey() string { return i.getString(ApiKey) } @@ -1370,6 +1380,17 @@ func (i *Instance) GetDLNAInterfaces() []string { return i.getStringSlice(DLNAInterfaces) } +// GetVideoSortOrder returns the sort order to display videos. If +// empty, videos will be sorted by titles. +func (i *Instance) GetVideoSortOrder() string { + ret := i.getString(DLNAVideoSortOrder) + if ret == "" { + ret = dlnaVideoSortOrderDefault + } + + return ret +} + // GetLogFile returns the filename of the file to output logs to. // An empty string means that file logging will be disabled. func (i *Instance) GetLogFile() string { @@ -1499,6 +1520,7 @@ func (i *Instance) setDefaultValues(write bool) error { i.main.SetDefault(ThemeColor, DefaultThemeColor) i.main.SetDefault(WriteImageThumbnails, writeImageThumbnailsDefault) + i.main.SetDefault(CreateImageClipsFromVideos, createImageClipsFromVideosDefault) i.main.SetDefault(Database, defaultDatabaseFilePath) diff --git a/internal/manager/config/init.go b/internal/manager/config/init.go index f512999a6..37a191436 100644 --- a/internal/manager/config/init.go +++ b/internal/manager/config/init.go @@ -24,6 +24,7 @@ type flagStruct struct { configFilePath string cpuProfilePath string nobrowser bool + helpFlag bool } func GetInstance() *Instance { @@ -40,6 +41,12 @@ func Initialize() (*Instance, error) { var err error initOnce.Do(func() { flags := initFlags() + + if flags.helpFlag { + pflag.Usage() + os.Exit(0) + } + overrides := makeOverrideConfig() _ = GetInstance() @@ -126,6 +133,7 @@ func initFlags() flagStruct { pflag.StringVarP(&flags.configFilePath, "config", "c", "", "config file to use") pflag.StringVar(&flags.cpuProfilePath, "cpuprofile", "", "write cpu profile to file") pflag.BoolVar(&flags.nobrowser, "nobrowser", false, "Don't open a browser window after launch") + pflag.BoolVarP(&flags.helpFlag, "help", "h", false, "show this help text and exit") pflag.Parse() diff --git a/internal/manager/config/tasks.go b/internal/manager/config/tasks.go index 1e541fcc5..b87a1d23a 100644 --- a/internal/manager/config/tasks.go +++ b/internal/manager/config/tasks.go @@ -19,6 +19,8 @@ type ScanMetadataOptions struct { ScanGeneratePhashes bool `json:"scanGeneratePhashes"` // Generate image thumbnails during scan ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"` + // Generate image thumbnails during scan + ScanGenerateClipPreviews bool `json:"scanGenerateClipPreviews"` } type AutoTagMetadataOptions struct { diff --git a/internal/manager/downloads.go b/internal/manager/downloads.go index 274b717b7..a1bc89264 100644 --- a/internal/manager/downloads.go +++ b/internal/manager/downloads.go @@ -80,6 +80,7 @@ func (s *DownloadStore) Serve(hash string, w http.ResponseWriter, r *http.Reques if f.contentType != "" { w.Header().Add("Content-Type", f.contentType) } + w.Header().Set("Cache-Control", "no-store") http.ServeFile(w, r, f.path) } diff --git a/internal/manager/favicon.go b/internal/manager/favicon.go deleted file mode 100644 index 6267cfec0..000000000 --- a/internal/manager/favicon.go +++ /dev/null @@ -1,28 +0,0 @@ -package manager - -import ( - "embed" - "runtime" -) - -const faviconDir = "v2.5/build/" - -type FaviconProvider struct { - UIBox embed.FS -} - -func (p *FaviconProvider) GetFavicon() []byte { - if runtime.GOOS == "windows" { - faviconPath := faviconDir + "favicon.ico" - ret, _ := p.UIBox.ReadFile(faviconPath) - return ret - } - - return p.GetFaviconPng() -} - -func (p *FaviconProvider) GetFaviconPng() []byte { - faviconPath := faviconDir + "favicon.png" - ret, _ := p.UIBox.ReadFile(faviconPath) - return ret -} diff --git a/internal/manager/fingerprint.go b/internal/manager/fingerprint.go index 5c2c66352..fc183cc6a 100644 --- a/internal/manager/fingerprint.go +++ b/internal/manager/fingerprint.go @@ -63,7 +63,7 @@ func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.O var ret []file.Fingerprint calculateMD5 := true - if isVideo(f.Basename) { + if useAsVideo(f.Path) { var ( fp *file.Fingerprint err error diff --git a/internal/manager/generator_interactive_heatmap_speed.go b/internal/manager/generator_interactive_heatmap_speed.go index 3b3b98bf4..3cae5f562 100644 --- a/internal/manager/generator_interactive_heatmap_speed.go +++ b/internal/manager/generator_interactive_heatmap_speed.go @@ -73,10 +73,11 @@ func (g *InteractiveHeatmapSpeedGenerator) Generate(funscriptPath string, heatma return fmt.Errorf("no valid actions in funscript") } + sceneDurationMilli := int64(sceneDuration * 1000) g.Funscript = funscript g.Funscript.UpdateIntensityAndSpeed() - err = g.RenderHeatmap(heatmapPath) + err = g.RenderHeatmap(heatmapPath, sceneDurationMilli) if err != nil { return err @@ -155,8 +156,8 @@ func (funscript *Script) UpdateIntensityAndSpeed() { } // funscript needs to have intensity updated first -func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap(heatmapPath string) error { - gradient := g.Funscript.getGradientTable(g.NumSegments) +func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap(heatmapPath string, sceneDurationMilli int64) error { + gradient := g.Funscript.getGradientTable(g.NumSegments, sceneDurationMilli) img := image.NewRGBA(image.Rect(0, 0, g.Width, g.Height)) for x := 0; x < g.Width; x++ { @@ -179,7 +180,7 @@ func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap(heatmapPath string) err } // add 10 minute marks - maxts := g.Funscript.Actions[len(g.Funscript.Actions)-1].At + maxts := sceneDurationMilli const tick = 600000 var ts int64 = tick c, _ := colorful.Hex("#000000") @@ -242,7 +243,7 @@ func (gt GradientTable) GetYRange(t float64) [2]float64 { return gt[len(gt)-1].YRange } -func (funscript Script) getGradientTable(numSegments int) GradientTable { +func (funscript Script) getGradientTable(numSegments int, sceneDurationMilli int64) GradientTable { const windowSize = 15 const backfillThreshold = 500 @@ -255,7 +256,7 @@ func (funscript Script) getGradientTable(numSegments int) GradientTable { gradient := make(GradientTable, numSegments) posList := []int{} - maxts := funscript.Actions[len(funscript.Actions)-1].At + maxts := sceneDurationMilli for _, a := range funscript.Actions { posList = append(posList, a.Pos) diff --git a/internal/manager/manager.go b/internal/manager/manager.go index a591e98ab..6d776fcf7 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -279,11 +279,11 @@ func initialize() error { } func videoFileFilter(ctx context.Context, f file.File) bool { - return isVideo(f.Base().Basename) + return useAsVideo(f.Base().Path) } func imageFileFilter(ctx context.Context, f file.File) bool { - return isImage(f.Base().Basename) + return useAsImage(f.Base().Path) } func galleryFileFilter(ctx context.Context, f file.File) bool { @@ -306,8 +306,10 @@ func makeScanner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Scanner { Filter: file.FilterFunc(videoFileFilter), }, &file.FilteredDecorator{ - Decorator: &file_image.Decorator{}, - Filter: file.FilterFunc(imageFileFilter), + Decorator: &file_image.Decorator{ + FFProbe: instance.FFProbe, + }, + Filter: file.FilterFunc(imageFileFilter), }, }, FingerprintCalculator: &fingerprintCalculator{instance.Config}, @@ -509,12 +511,8 @@ func (s *Manager) SetBlobStoreOptions() { } func writeStashIcon() { - p := FaviconProvider{ - UIBox: ui.UIBox, - } - iconPath := filepath.Join(instance.Config.GetConfigPath(), "icon.png") - err := os.WriteFile(iconPath, p.GetFaviconPng(), 0644) + err := os.WriteFile(iconPath, ui.FaviconProvider.GetFaviconPng(), 0644) if err != nil { logger.Errorf("Couldn't write icon file: %s", err.Error()) } diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go index 10bcacab0..3987fb9ba 100644 --- a/internal/manager/manager_tasks.go +++ b/internal/manager/manager_tasks.go @@ -15,6 +15,20 @@ import ( "github.com/stashapp/stash/pkg/models" ) +func useAsVideo(pathname string) bool { + if instance.Config.IsCreateImageClipsFromVideos() && config.StashConfigs.GetStashFromDirPath(instance.Config.GetStashPaths(), pathname).ExcludeVideo { + return false + } + return isVideo(pathname) +} + +func useAsImage(pathname string) bool { + if instance.Config.IsCreateImageClipsFromVideos() && config.StashConfigs.GetStashFromDirPath(instance.Config.GetStashPaths(), pathname).ExcludeVideo { + return isImage(pathname) || isVideo(pathname) + } + return isImage(pathname) +} + func isZip(pathname string) bool { gExt := config.GetInstance().GetGalleryExtensions() return fsutil.MatchExtension(pathname, gExt) diff --git a/internal/manager/repository.go b/internal/manager/repository.go index 41ac5f12e..55fea1672 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -15,7 +15,6 @@ import ( type ImageReaderWriter interface { models.ImageReaderWriter image.FinderCreatorUpdater - models.ImageFileLoader GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) } @@ -114,4 +113,6 @@ type GalleryService interface { Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error + + Updated(ctx context.Context, galleryID int) error } diff --git a/internal/manager/running_streams.go b/internal/manager/running_streams.go index 8fa397640..788b30f1c 100644 --- a/internal/manager/running_streams.go +++ b/internal/manager/running_streams.go @@ -39,9 +39,15 @@ type SceneServer struct { } func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) { - fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() + // #3526 - return 404 if the scene does not have any files + if scene.Path == "" { + http.Error(w, http.StatusText(404), 404) + return + } - filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo)) + sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()) + + filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash) streamRequestCtx := ffmpeg.NewStreamRequestContext(w, r) // #2579 - hijacking and closing the connection here causes video playback to fail in Safari @@ -69,11 +75,17 @@ func (s *SceneServer) ServeScreenshot(scene *models.Scene, w http.ResponseWriter if cover == nil { // fallback to legacy image if present if scene.Path != "" { - filepath := GetInstance().Paths.Scene.GetLegacyScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) + sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()) + filepath := GetInstance().Paths.Scene.GetLegacyScreenshotPath(sceneHash) // fall back to the scene image blob if the file isn't present screenshotExists, _ := fsutil.FileExists(filepath) if screenshotExists { + if r.URL.Query().Has("t") { + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + } else { + w.Header().Set("Cache-Control", "no-cache") + } http.ServeFile(w, r, filepath) return } @@ -83,11 +95,8 @@ func (s *SceneServer) ServeScreenshot(scene *models.Scene, w http.ResponseWriter // should always be there f, _ := static.Scene.Open(defaultSceneImage) defer f.Close() - stat, _ := f.Stat() - http.ServeContent(w, r, "scene.svg", stat.ModTime(), f.(io.ReadSeeker)) + cover, _ = io.ReadAll(f) } - if err := utils.ServeImage(cover, w, r); err != nil { - logger.Warnf("error serving screenshot image: %v", err) - } + utils.ServeImage(w, r, cover) } diff --git a/internal/manager/scene.go b/internal/manager/scene.go index a653cb632..39b96fec7 100644 --- a/internal/manager/scene.go +++ b/internal/manager/scene.go @@ -88,7 +88,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL *url.URL, maxStrea // convert StreamingResolutionEnum to ResolutionEnum maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize) - sceneResolution := pf.GetMinResolution() + sceneResolution := file.GetMinResolution(pf) includeSceneStreamPath := func(streamingResolution models.StreamingResolutionEnum) bool { var minResolution int if streamingResolution == models.StreamingResolutionEnumOriginal { diff --git a/internal/manager/task_autotag.go b/internal/manager/task_autotag.go index 0dfe59dd3..273e65f28 100644 --- a/internal/manager/task_autotag.go +++ b/internal/manager/task_autotag.go @@ -37,7 +37,7 @@ func (j *autoTagJob) Execute(ctx context.Context, progress *job.Progress) { j.autoTagSpecific(ctx, progress) } - logger.Infof("Finished autotag after %s", time.Since(begin).String()) + logger.Infof("Finished auto-tag after %s", time.Since(begin).String()) } func (j *autoTagJob) isFileBasedAutoTag(input AutoTagMetadataInput) bool { @@ -84,32 +84,34 @@ func (j *autoTagJob) autoTagSpecific(ctx context.Context, progress *job.Progress if performerCount == 1 && performerIds[0] == wildcard { performerCount, err = performerQuery.Count(ctx) if err != nil { - return fmt.Errorf("error getting performer count: %v", err) + return fmt.Errorf("getting performer count: %v", err) } } if studioCount == 1 && studioIds[0] == wildcard { studioCount, err = studioQuery.Count(ctx) if err != nil { - return fmt.Errorf("error getting studio count: %v", err) + return fmt.Errorf("getting studio count: %v", err) } } if tagCount == 1 && tagIds[0] == wildcard { tagCount, err = tagQuery.Count(ctx) if err != nil { - return fmt.Errorf("error getting tag count: %v", err) + return fmt.Errorf("getting tag count: %v", err) } } return nil }); err != nil { - logger.Error(err.Error()) + if !job.IsCancelled(ctx) { + logger.Errorf("auto-tag error: %v", err) + } return } total := performerCount + studioCount + tagCount progress.SetTotal(total) - logger.Infof("Starting autotag of %d performers, %d studios, %d tags", performerCount, studioCount, tagCount) + logger.Infof("Starting auto-tag of %d performers, %d studios, %d tags", performerCount, studioCount, tagCount) j.autoTagPerformers(ctx, progress, input.Paths, performerIds) j.autoTagStudios(ctx, progress, input.Paths, studioIds) @@ -142,7 +144,7 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre PerPage: &perPage, }) if err != nil { - return fmt.Errorf("error querying performers: %w", err) + return fmt.Errorf("querying performers: %w", err) } } else { performerIdInt, err := strconv.Atoi(performerId) @@ -167,11 +169,10 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre for _, performer := range performers { if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") return nil } - if err := func() error { + err := func() error { r := j.txnManager if err := tagger.PerformerScenes(ctx, performer, paths, r.Scene); err != nil { return fmt.Errorf("processing scenes: %w", err) @@ -184,8 +185,14 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre } return nil - }(); err != nil { - return fmt.Errorf("error auto-tagging performer '%s': %s", performer.Name, err.Error()) + }() + + if job.IsCancelled(ctx) { + return nil + } + + if err != nil { + return fmt.Errorf("tagging performer '%s': %s", performer.Name, err.Error()) } progress.Increment() @@ -193,8 +200,12 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre return nil }); err != nil { - logger.Error(err.Error()) - continue + logger.Errorf("auto-tag error: %v", err) + } + + if job.IsCancelled(ctx) { + logger.Info("Stopping performer auto-tag due to user request") + return } } } @@ -225,17 +236,17 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress, PerPage: &perPage, }) if err != nil { - return fmt.Errorf("error querying studios: %v", err) + return fmt.Errorf("querying studios: %v", err) } } else { studioIdInt, err := strconv.Atoi(studioId) if err != nil { - return fmt.Errorf("error parsing studio id %s: %s", studioId, err.Error()) + return fmt.Errorf("parsing studio id %s: %s", studioId, err.Error()) } studio, err := studioQuery.Find(ctx, studioIdInt) if err != nil { - return fmt.Errorf("error finding studio id %s: %s", studioId, err.Error()) + return fmt.Errorf("finding studio id %s: %s", studioId, err.Error()) } if studio == nil { @@ -247,11 +258,10 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress, for _, studio := range studios { if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") return nil } - if err := func() error { + err := func() error { aliases, err := r.Studio.GetAliases(ctx, studio.ID) if err != nil { return fmt.Errorf("getting studio aliases: %w", err) @@ -268,8 +278,14 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress, } return nil - }(); err != nil { - return fmt.Errorf("error auto-tagging studio '%s': %s", studio.Name.String, err.Error()) + }() + + if job.IsCancelled(ctx) { + return nil + } + + if err != nil { + return fmt.Errorf("tagging studio '%s': %s", studio.Name.String, err.Error()) } progress.Increment() @@ -277,8 +293,12 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress, return nil }); err != nil { - logger.Error(err.Error()) - continue + logger.Errorf("auto-tag error: %v", err) + } + + if job.IsCancelled(ctx) { + logger.Info("Stopping studio auto-tag due to user request") + return } } } @@ -308,28 +328,27 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa PerPage: &perPage, }) if err != nil { - return fmt.Errorf("error querying tags: %v", err) + return fmt.Errorf("querying tags: %v", err) } } else { tagIdInt, err := strconv.Atoi(tagId) if err != nil { - return fmt.Errorf("error parsing tag id %s: %s", tagId, err.Error()) + return fmt.Errorf("parsing tag id %s: %s", tagId, err.Error()) } tag, err := tagQuery.Find(ctx, tagIdInt) if err != nil { - return fmt.Errorf("error finding tag id %s: %s", tagId, err.Error()) + return fmt.Errorf("finding tag id %s: %s", tagId, err.Error()) } tags = append(tags, tag) } for _, tag := range tags { if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") return nil } - if err := func() error { + err := func() error { aliases, err := r.Tag.GetAliases(ctx, tag.ID) if err != nil { return fmt.Errorf("getting tag aliases: %w", err) @@ -346,8 +365,14 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa } return nil - }(); err != nil { - return fmt.Errorf("error auto-tagging tag '%s': %s", tag.Name, err.Error()) + }() + + if job.IsCancelled(ctx) { + return nil + } + + if err != nil { + return fmt.Errorf("tagging tag '%s': %s", tag.Name, err.Error()) } progress.Increment() @@ -355,8 +380,12 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa return nil }); err != nil { - logger.Error(err.Error()) - continue + logger.Errorf("auto-tag error: %v", err) + } + + if job.IsCancelled(ctx) { + logger.Info("Stopping tag auto-tag due to user request") + return } } } @@ -488,11 +517,13 @@ func (t *autoTagFilesTask) getCount(ctx context.Context, r Repository) (int, err return sceneCount + imageCount + galleryCount, nil } -func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) error { +func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) { if job.IsCancelled(ctx) { - return nil + return } + logger.Info("Auto-tagging scenes...") + batchSize := 1000 findFilter := models.BatchFindFilter(batchSize) @@ -506,12 +537,16 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) erro scenes, err = scene.Query(ctx, r.Scene, sceneFilter, findFilter) return err }); err != nil { - return fmt.Errorf("querying scenes: %w", err) + if !job.IsCancelled(ctx) { + logger.Errorf("error querying scenes for auto-tag: %w", err) + } + return } for _, ss := range scenes { if job.IsCancelled(ctx) { - return nil + logger.Info("Stopping auto-tag due to user request") + return } tt := autoTagSceneTask{ @@ -541,15 +576,15 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) erro } } } - - return nil } -func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) error { +func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) { if job.IsCancelled(ctx) { - return nil + return } + logger.Info("Auto-tagging images...") + batchSize := 1000 findFilter := models.BatchFindFilter(batchSize) @@ -563,12 +598,16 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) erro images, err = image.Query(ctx, r.Image, imageFilter, findFilter) return err }); err != nil { - return fmt.Errorf("querying images: %w", err) + if !job.IsCancelled(ctx) { + logger.Errorf("error querying images for auto-tag: %w", err) + } + return } for _, ss := range images { if job.IsCancelled(ctx) { - return nil + logger.Info("Stopping auto-tag due to user request") + return } tt := autoTagImageTask{ @@ -598,15 +637,15 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) erro } } } - - return nil } -func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) error { +func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) { if job.IsCancelled(ctx) { - return nil + return } + logger.Info("Auto-tagging galleries...") + batchSize := 1000 findFilter := models.BatchFindFilter(batchSize) @@ -620,12 +659,16 @@ func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) e galleries, _, err = r.Gallery.Query(ctx, galleryFilter, findFilter) return err }); err != nil { - return fmt.Errorf("querying galleries: %w", err) + if !job.IsCancelled(ctx) { + logger.Errorf("error querying galleries for auto-tag: %w", err) + } + return } for _, ss := range galleries { if job.IsCancelled(ctx) { - return nil + logger.Info("Stopping auto-tag due to user request") + return } tt := autoTagGalleryTask{ @@ -655,8 +698,6 @@ func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) e } } } - - return nil } func (t *autoTagFilesTask) process(ctx context.Context) { @@ -668,35 +709,19 @@ func (t *autoTagFilesTask) process(ctx context.Context) { } t.progress.SetTotal(total) - logger.Infof("Starting autotag of %d files", total) + logger.Infof("Starting auto-tag of %d files", total) return nil }); err != nil { - logger.Errorf("error getting count for autotag task: %v", err) + if !job.IsCancelled(ctx) { + logger.Errorf("error getting file count for auto-tag task: %v", err) + } return } - logger.Info("Autotagging scenes...") - if err := t.processScenes(ctx, r); err != nil { - logger.Errorf("error processing scenes: %w", err) - return - } - - logger.Info("Autotagging images...") - if err := t.processImages(ctx, r); err != nil { - logger.Errorf("error processing images: %w", err) - return - } - - logger.Info("Autotagging galleries...") - if err := t.processGalleries(ctx, r); err != nil { - logger.Errorf("error processing galleries: %w", err) - return - } - - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - } + t.processScenes(ctx, r) + t.processImages(ctx, r) + t.processGalleries(ctx, r) } type autoTagSceneTask struct { @@ -721,23 +746,25 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) { if t.performers { if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil { - return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.DisplayName(), err) + return fmt.Errorf("tagging scene performers for %s: %v", t.scene.DisplayName(), err) } } if t.studios { if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil { - return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.DisplayName(), err) + return fmt.Errorf("tagging scene studio for %s: %v", t.scene.DisplayName(), err) } } if t.tags { if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil { - return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.DisplayName(), err) + return fmt.Errorf("tagging scene tags for %s: %v", t.scene.DisplayName(), err) } } return nil }); err != nil { - logger.Error(err.Error()) + if !job.IsCancelled(ctx) { + logger.Errorf("auto-tag error: %v", err) + } } } @@ -758,23 +785,25 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if t.performers { if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil { - return fmt.Errorf("error tagging image performers for %s: %v", t.image.DisplayName(), err) + return fmt.Errorf("tagging image performers for %s: %v", t.image.DisplayName(), err) } } if t.studios { if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil { - return fmt.Errorf("error tagging image studio for %s: %v", t.image.DisplayName(), err) + return fmt.Errorf("tagging image studio for %s: %v", t.image.DisplayName(), err) } } if t.tags { if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil { - return fmt.Errorf("error tagging image tags for %s: %v", t.image.DisplayName(), err) + return fmt.Errorf("tagging image tags for %s: %v", t.image.DisplayName(), err) } } return nil }); err != nil { - logger.Error(err.Error()) + if !job.IsCancelled(ctx) { + logger.Errorf("auto-tag error: %v", err) + } } } @@ -795,22 +824,24 @@ func (t *autoTagGalleryTask) Start(ctx context.Context, wg *sync.WaitGroup) { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if t.performers { if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil { - return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.DisplayName(), err) + return fmt.Errorf("tagging gallery performers for %s: %v", t.gallery.DisplayName(), err) } } if t.studios { if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil { - return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.DisplayName(), err) + return fmt.Errorf("tagging gallery studio for %s: %v", t.gallery.DisplayName(), err) } } if t.tags { if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil { - return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.DisplayName(), err) + return fmt.Errorf("tagging gallery tags for %s: %v", t.gallery.DisplayName(), err) } } return nil }); err != nil { - logger.Error(err.Error()) + if !job.IsCancelled(ctx) { + logger.Errorf("auto-tag error: %v", err) + } } } diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go index b90f11be8..5eb4d20a9 100644 --- a/internal/manager/task_clean.go +++ b/internal/manager/task_clean.go @@ -201,9 +201,9 @@ func (f *cleanFilter) shouldCleanFile(path string, info fs.FileInfo, stash *conf switch { case info.IsDir() || fsutil.MatchExtension(path, f.zipExt): return f.shouldCleanGallery(path, stash) - case fsutil.MatchExtension(path, f.vidExt): + case useAsVideo(path): return f.shouldCleanVideoFile(path, stash) - case fsutil.MatchExtension(path, f.imgExt): + case useAsImage(path): return f.shouldCleanImage(path, stash) default: logger.Infof("File extension does not match any media extensions. Marking to clean: \"%s\"", path) diff --git a/internal/manager/task_generate.go b/internal/manager/task_generate.go index c3b4f16f7..ce3d71000 100644 --- a/internal/manager/task_generate.go +++ b/internal/manager/task_generate.go @@ -7,6 +7,7 @@ import ( "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -29,6 +30,7 @@ type GenerateMetadataInput struct { ForceTranscodes bool `json:"forceTranscodes"` Phashes bool `json:"phashes"` InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"` + ClipPreviews bool `json:"clipPreviews"` // scene ids to generate for SceneIDs []string `json:"sceneIDs"` // marker ids to generate for @@ -69,6 +71,7 @@ type totalsGenerate struct { transcodes int64 phashes int64 interactiveHeatmapSpeeds int64 + clipPreviews int64 tasks int } @@ -142,7 +145,38 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) { return } - logger.Infof("Generating %d covers %d sprites %d previews %d image previews %d markers %d transcodes %d phashes %d heatmaps & speeds", totals.covers, totals.sprites, totals.previews, totals.imagePreviews, totals.markers, totals.transcodes, totals.phashes, totals.interactiveHeatmapSpeeds) + logMsg := "Generating" + if j.input.Covers { + logMsg += fmt.Sprintf(" %d covers", totals.covers) + } + if j.input.Sprites { + logMsg += fmt.Sprintf(" %d sprites", totals.sprites) + } + if j.input.Previews { + logMsg += fmt.Sprintf(" %d previews", totals.previews) + } + if j.input.ImagePreviews { + logMsg += fmt.Sprintf(" %d image previews", totals.imagePreviews) + } + if j.input.Markers { + logMsg += fmt.Sprintf(" %d markers", totals.markers) + } + if j.input.Transcodes { + logMsg += fmt.Sprintf(" %d transcodes", totals.transcodes) + } + if j.input.Phashes { + logMsg += fmt.Sprintf(" %d phashes", totals.phashes) + } + if j.input.InteractiveHeatmapsSpeeds { + logMsg += fmt.Sprintf(" %d heatmaps & speeds", totals.interactiveHeatmapSpeeds) + } + if j.input.ClipPreviews { + logMsg += fmt.Sprintf(" %d Image Clip Previews", totals.clipPreviews) + } + if logMsg == "Generating" { + logMsg = "Nothing selected to generate" + } + logger.Infof(logMsg) progress.SetTotal(int(totals.tasks)) }() @@ -226,6 +260,38 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que } } + *findFilter.Page = 1 + for more := j.input.ClipPreviews; more; { + if job.IsCancelled(ctx) { + return totals + } + + images, err := image.Query(ctx, j.txnManager.Image, nil, findFilter) + if err != nil { + logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) + return totals + } + + for _, ss := range images { + if job.IsCancelled(ctx) { + return totals + } + + if err := ss.LoadFiles(ctx, j.txnManager.Image); err != nil { + logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) + return totals + } + + j.queueImageJob(g, ss, queue, &totals) + } + + if len(images) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + return totals } @@ -269,9 +335,10 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, task := &GenerateCoverTask{ txnManager: j.txnManager, Scene: *scene, + Overwrite: j.overwrite, } - if j.overwrite || task.required(ctx) { + if task.required(ctx) { totals.covers++ totals.tasks++ queue <- task @@ -285,7 +352,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, fileNamingAlgorithm: j.fileNamingAlgo, } - if j.overwrite || task.required() { + if task.required() { totals.sprites++ totals.tasks++ queue <- task @@ -309,21 +376,15 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, } if task.required() { - addTask := false - if j.overwrite || !task.doesVideoPreviewExist() { + if task.videoPreviewRequired() { totals.previews++ - addTask = true } - - if j.input.ImagePreviews && (j.overwrite || !task.doesImagePreviewExist()) { + if task.imagePreviewRequired() { totals.imagePreviews++ - addTask = true } - if addTask { - totals.tasks++ - queue <- task - } + totals.tasks++ + queue <- task } } @@ -357,7 +418,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, fileNamingAlgorithm: j.fileNamingAlgo, g: g, } - if task.isTranscodeNeeded() { + if task.required() { totals.transcodes++ totals.tasks++ queue <- task @@ -375,7 +436,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, Overwrite: j.overwrite, } - if task.shouldGenerate() { + if task.required() { totals.phashes++ totals.tasks++ queue <- task @@ -391,7 +452,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, TxnManager: j.txnManager, } - if task.shouldGenerate() { + if task.required() { totals.interactiveHeatmapSpeeds++ totals.tasks++ queue <- task @@ -411,3 +472,16 @@ func (j *GenerateJob) queueMarkerJob(g *generate.Generator, marker *models.Scene totals.tasks++ queue <- task } + +func (j *GenerateJob) queueImageJob(g *generate.Generator, image *models.Image, queue chan<- Task, totals *totalsGenerate) { + task := &GenerateClipPreviewTask{ + Image: *image, + Overwrite: j.overwrite, + } + + if task.required() { + totals.clipPreviews++ + totals.tasks++ + queue <- task + } +} diff --git a/internal/manager/task_generate_clip_preview.go b/internal/manager/task_generate_clip_preview.go new file mode 100644 index 000000000..c0ecfeedf --- /dev/null +++ b/internal/manager/task_generate_clip_preview.go @@ -0,0 +1,62 @@ +package manager + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type GenerateClipPreviewTask struct { + Image models.Image + Overwrite bool +} + +func (t *GenerateClipPreviewTask) GetDescription() string { + return fmt.Sprintf("Generating Preview for image Clip %s", t.Image.Path) +} + +func (t *GenerateClipPreviewTask) Start(ctx context.Context) { + if !t.required() { + return + } + + prevPath := GetInstance().Paths.Generated.GetClipPreviewPath(t.Image.Checksum, models.DefaultGthumbWidth) + filePath := t.Image.Files.Primary().Base().Path + + clipPreviewOptions := image.ClipPreviewOptions{ + InputArgs: GetInstance().Config.GetTranscodeInputArgs(), + OutputArgs: GetInstance().Config.GetTranscodeOutputArgs(), + Preset: GetInstance().Config.GetPreviewPreset().String(), + } + + encoder := image.NewThumbnailEncoder(GetInstance().FFMPEG, GetInstance().FFProbe, clipPreviewOptions) + err := encoder.GetPreview(filePath, prevPath, models.DefaultGthumbWidth) + if err != nil { + logger.Errorf("getting preview for image %s: %w", filePath, err) + return + } + +} + +func (t *GenerateClipPreviewTask) required() bool { + _, ok := t.Image.Files.Primary().(*file.VideoFile) + if !ok { + return false + } + + if t.Overwrite { + return true + } + + prevPath := GetInstance().Paths.Generated.GetClipPreviewPath(t.Image.Checksum, models.DefaultGthumbWidth) + if exists, _ := fsutil.FileExists(prevPath); exists { + return false + } + + return true +} diff --git a/internal/manager/task_generate_interactive_heatmap_speed.go b/internal/manager/task_generate_interactive_heatmap_speed.go index 564004b8e..4f91bd023 100644 --- a/internal/manager/task_generate_interactive_heatmap_speed.go +++ b/internal/manager/task_generate_interactive_heatmap_speed.go @@ -22,7 +22,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string { } func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) { - if !t.shouldGenerate() { + if !t.required() { return } @@ -52,13 +52,18 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) { } } -func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool { +func (t *GenerateInteractiveHeatmapSpeedTask) required() bool { primaryFile := t.Scene.Files.Primary() if primaryFile == nil || !primaryFile.Interactive { return false } + + if t.Overwrite { + return true + } + sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) - return !t.doesHeatmapExist(sceneHash) || primaryFile.InteractiveSpeed == nil || t.Overwrite + return !t.doesHeatmapExist(sceneHash) || primaryFile.InteractiveSpeed == nil } func (t *GenerateInteractiveHeatmapSpeedTask) doesHeatmapExist(sceneChecksum string) bool { diff --git a/internal/manager/task_generate_phash.go b/internal/manager/task_generate_phash.go index 6ba840694..8ae84b02e 100644 --- a/internal/manager/task_generate_phash.go +++ b/internal/manager/task_generate_phash.go @@ -24,7 +24,7 @@ func (t *GeneratePhashTask) GetDescription() string { } func (t *GeneratePhashTask) Start(ctx context.Context) { - if !t.shouldGenerate() { + if !t.required() { return } @@ -49,6 +49,10 @@ func (t *GeneratePhashTask) Start(ctx context.Context) { } } -func (t *GeneratePhashTask) shouldGenerate() bool { - return t.Overwrite || t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil +func (t *GeneratePhashTask) required() bool { + if t.Overwrite { + return true + } + + return t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil } diff --git a/internal/manager/task_generate_preview.go b/internal/manager/task_generate_preview.go index c81909417..df2a69ee5 100644 --- a/internal/manager/task_generate_preview.go +++ b/internal/manager/task_generate_preview.go @@ -30,13 +30,9 @@ func (t *GeneratePreviewTask) GetDescription() string { } func (t *GeneratePreviewTask) Start(ctx context.Context) { - if !t.Overwrite && !t.required() { - return - } - videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm) - if t.Overwrite || !t.doesVideoPreviewExist() { + if t.videoPreviewRequired() { ffprobe := instance.FFProbe videoFile, err := ffprobe.NewVideoFile(t.Scene.Path) if err != nil { @@ -51,7 +47,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) { } } - if t.ImagePreview && (t.Overwrite || !t.doesImagePreviewExist()) { + if t.imagePreviewRequired() { if err := t.generateWebp(videoChecksum); err != nil { logger.Errorf("error generating preview webp: %v", err) logErrorOutput(err) @@ -59,7 +55,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) { } } -func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64, videoFrameRate float64) error { +func (t *GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64, videoFrameRate float64) error { videoFilename := t.Scene.Path useVsync2 := false @@ -78,12 +74,16 @@ func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration f return nil } -func (t GeneratePreviewTask) generateWebp(videoChecksum string) error { +func (t *GeneratePreviewTask) generateWebp(videoChecksum string) error { videoFilename := t.Scene.Path return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum) } -func (t GeneratePreviewTask) required() bool { +func (t *GeneratePreviewTask) required() bool { + return t.videoPreviewRequired() || t.imagePreviewRequired() +} + +func (t *GeneratePreviewTask) videoPreviewRequired() bool { if t.Scene.Path == "" { return false } @@ -92,12 +92,6 @@ func (t GeneratePreviewTask) required() bool { return true } - videoExists := t.doesVideoPreviewExist() - imageExists := !t.ImagePreview || t.doesImagePreviewExist() - return !imageExists || !videoExists -} - -func (t *GeneratePreviewTask) doesVideoPreviewExist() bool { sceneChecksum := t.Scene.GetHash(t.fileNamingAlgorithm) if sceneChecksum == "" { return false @@ -108,10 +102,22 @@ func (t *GeneratePreviewTask) doesVideoPreviewExist() bool { t.videoPreviewExists = &videoExists } - return *t.videoPreviewExists + return !*t.videoPreviewExists } -func (t *GeneratePreviewTask) doesImagePreviewExist() bool { +func (t *GeneratePreviewTask) imagePreviewRequired() bool { + if !t.ImagePreview { + return false + } + + if t.Scene.Path == "" { + return false + } + + if t.Overwrite { + return true + } + sceneChecksum := t.Scene.GetHash(t.fileNamingAlgorithm) if sceneChecksum == "" { return false @@ -122,5 +128,5 @@ func (t *GeneratePreviewTask) doesImagePreviewExist() bool { t.imagePreviewExists = &imageExists } - return *t.imagePreviewExists + return !*t.imagePreviewExists } diff --git a/internal/manager/task_generate_screenshot.go b/internal/manager/task_generate_screenshot.go index 5d32f2762..384d8740c 100644 --- a/internal/manager/task_generate_screenshot.go +++ b/internal/manager/task_generate_screenshot.go @@ -25,8 +25,8 @@ func (t *GenerateCoverTask) Start(ctx context.Context) { var required bool if err := t.txnManager.WithReadTxn(ctx, func(ctx context.Context) error { - // don't generate the screenshot if it already exists required = t.required(ctx) + return t.Scene.LoadPrimaryFile(ctx, t.txnManager.File) }); err != nil { logger.Error(err) @@ -92,7 +92,12 @@ func (t *GenerateCoverTask) Start(ctx context.Context) { } // required returns true if the sprite needs to be generated -func (t GenerateCoverTask) required(ctx context.Context) bool { +// assumes in a transaction +func (t *GenerateCoverTask) required(ctx context.Context) bool { + if t.Scene.Path == "" { + return false + } + if t.Overwrite { return true } diff --git a/internal/manager/task_generate_sprite.go b/internal/manager/task_generate_sprite.go index eb96d8f4c..0275830ab 100644 --- a/internal/manager/task_generate_sprite.go +++ b/internal/manager/task_generate_sprite.go @@ -20,7 +20,7 @@ func (t *GenerateSpriteTask) GetDescription() string { } func (t *GenerateSpriteTask) Start(ctx context.Context) { - if !t.Overwrite && !t.required() { + if !t.required() { return } @@ -54,6 +54,11 @@ func (t GenerateSpriteTask) required() bool { if t.Scene.Path == "" { return false } + + if t.Overwrite { + return true + } + sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) return !t.doesSpriteExist(sceneHash) } diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index fa31af610..7c5e20156 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -141,8 +141,8 @@ func newHandlerRequiredFilter(c *config.Instance) *handlerRequiredFilter { func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool { path := ff.Base().Path - isVideoFile := fsutil.MatchExtension(path, f.vidExt) - isImageFile := fsutil.MatchExtension(path, f.imgExt) + isVideoFile := useAsVideo(path) + isImageFile := useAsImage(path) isZipFile := fsutil.MatchExtension(path, f.zipExt) var counter fileCounter @@ -246,6 +246,7 @@ func newScanFilter(c *config.Instance, minModTime time.Time) *scanFilter { func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool { if fsutil.IsPathInDir(f.generatedPath, path) { + logger.Warnf("Skipping %q as it overlaps with the generated folder", path) return false } @@ -254,8 +255,8 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) return false } - isVideoFile := fsutil.MatchExtension(path, f.vidExt) - isImageFile := fsutil.MatchExtension(path, f.imgExt) + isVideoFile := useAsVideo(path) + isImageFile := useAsImage(path) isZipFile := fsutil.MatchExtension(path, f.zipExt) // handle caption files @@ -288,7 +289,7 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) // shortcut: skip the directory entirely if it matches both exclusion patterns // add a trailing separator so that it correctly matches against patterns like path/.* pathExcludeTest := path + string(filepath.Separator) - if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) { + if (matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) { logger.Debugf("Skipping directory %s as it matches video and image exclusion patterns", path) return false } @@ -305,17 +306,14 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) } type scanConfig struct { - isGenerateThumbnails bool + isGenerateThumbnails bool + isGenerateClipPreviews bool } func (c *scanConfig) GetCreateGalleriesFromFolders() bool { return instance.Config.GetCreateGalleriesFromFolders() } -func (c *scanConfig) IsGenerateThumbnails() bool { - return c.isGenerateThumbnails -} - func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progress *job.Progress) []file.Handler { db := instance.Database pluginCache := instance.PluginCache @@ -324,11 +322,16 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre &file.FilteredHandler{ Filter: file.FilterFunc(imageFileFilter), Handler: &image.ScanHandler{ - CreatorUpdater: db.Image, - GalleryFinder: db.Gallery, - ThumbnailGenerator: &imageThumbnailGenerator{}, + CreatorUpdater: db.Image, + GalleryFinder: db.Gallery, + ScanGenerator: &imageGenerators{ + input: options, + taskQueue: taskQueue, + progress: progress, + }, ScanConfig: &scanConfig{ - isGenerateThumbnails: options.ScanGenerateThumbnails, + isGenerateThumbnails: options.ScanGenerateThumbnails, + isGenerateClipPreviews: options.ScanGenerateClipPreviews, }, PluginCache: pluginCache, Paths: instance.Paths, @@ -361,35 +364,97 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre } } -type imageThumbnailGenerator struct{} +type imageGenerators struct { + input ScanMetadataInput + taskQueue *job.TaskQueue + progress *job.Progress +} -func (g *imageThumbnailGenerator) GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error { +func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file.File) error { + const overwrite = false + + progress := g.progress + t := g.input + path := f.Base().Path + config := instance.Config + sequentialScanning := config.GetSequentialScanning() + + if t.ScanGenerateThumbnails { + // this should be quick, so always generate sequentially + if err := g.generateThumbnail(ctx, i, f); err != nil { + logger.Errorf("Error generating thumbnail for %s: %v", path, err) + } + } + + // avoid adding a task if the file isn't a video file + _, isVideo := f.(*file.VideoFile) + if isVideo && t.ScanGenerateClipPreviews { + // this is a bit of a hack: the task requires files to be loaded, but + // we don't really need to since we already have the file + ii := *i + ii.Files = models.NewRelatedFiles([]file.File{f}) + + progress.AddTotal(1) + previewsFn := func(ctx context.Context) { + taskPreview := GenerateClipPreviewTask{ + Image: ii, + Overwrite: overwrite, + } + + taskPreview.Start(ctx) + progress.Increment() + } + + if sequentialScanning { + previewsFn(ctx) + } else { + g.taskQueue.Add(fmt.Sprintf("Generating preview for %s", path), previewsFn) + } + } + + return nil +} + +func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f file.File) error { thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) exists, _ := fsutil.FileExists(thumbPath) if exists { return nil } - if f.Height <= models.DefaultGthumbWidth && f.Width <= models.DefaultGthumbWidth { + path := f.Base().Path + + asFrame, ok := f.(file.VisualFile) + if !ok { + return fmt.Errorf("file %s does not implement Frame", path) + } + + if asFrame.GetHeight() <= models.DefaultGthumbWidth && asFrame.GetWidth() <= models.DefaultGthumbWidth { return nil } - logger.Debugf("Generating thumbnail for %s", f.Path) + logger.Debugf("Generating thumbnail for %s", path) - encoder := image.NewThumbnailEncoder(instance.FFMPEG) + clipPreviewOptions := image.ClipPreviewOptions{ + InputArgs: instance.Config.GetTranscodeInputArgs(), + OutputArgs: instance.Config.GetTranscodeOutputArgs(), + Preset: instance.Config.GetPreviewPreset().String(), + } + + encoder := image.NewThumbnailEncoder(instance.FFMPEG, instance.FFProbe, clipPreviewOptions) data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth) if err != nil { // don't log for animated images if !errors.Is(err, image.ErrNotSupportedForThumbnail) { - return fmt.Errorf("getting thumbnail for image %s: %w", f.Path, err) + return fmt.Errorf("getting thumbnail for image %s: %w", path, err) } return nil } err = fsutil.WriteFile(thumbPath, data) if err != nil { - return fmt.Errorf("writing thumbnail for image %s: %w", f.Path, err) + return fmt.Errorf("writing thumbnail for image %s: %w", path, err) } return nil @@ -490,6 +555,7 @@ func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *file taskCover := GenerateCoverTask{ Scene: *s, txnManager: instance.Repository, + Overwrite: overwrite, } taskCover.Start(ctx) progress.Increment() diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go index 886da242f..dd31b4899 100644 --- a/internal/manager/task_stash_box_tag.go +++ b/internal/manager/task_stash_box_tag.go @@ -119,24 +119,27 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) { aliases = []string{} } newPerformer := models.Performer{ - Aliases: models.NewRelatedStrings(aliases), - Birthdate: getDate(performer.Birthdate), - CareerLength: getString(performer.CareerLength), - Country: getString(performer.Country), - CreatedAt: currentTime, - Ethnicity: getString(performer.Ethnicity), - EyeColor: getString(performer.EyeColor), - FakeTits: getString(performer.FakeTits), - Gender: models.GenderEnum(getString(performer.Gender)), - Height: getIntPtr(performer.Height), - Weight: getIntPtr(performer.Weight), - Instagram: getString(performer.Instagram), - Measurements: getString(performer.Measurements), - Name: *performer.Name, - Piercings: getString(performer.Piercings), - Tattoos: getString(performer.Tattoos), - Twitter: getString(performer.Twitter), - URL: getString(performer.URL), + Aliases: models.NewRelatedStrings(aliases), + Disambiguation: getString(performer.Disambiguation), + Details: getString(performer.Details), + Birthdate: getDate(performer.Birthdate), + DeathDate: getDate(performer.DeathDate), + CareerLength: getString(performer.CareerLength), + Country: getString(performer.Country), + CreatedAt: currentTime, + Ethnicity: getString(performer.Ethnicity), + EyeColor: getString(performer.EyeColor), + HairColor: getString(performer.HairColor), + FakeTits: getString(performer.FakeTits), + Height: getIntPtr(performer.Height), + Weight: getIntPtr(performer.Weight), + Instagram: getString(performer.Instagram), + Measurements: getString(performer.Measurements), + Name: *performer.Name, + Piercings: getString(performer.Piercings), + Tattoos: getString(performer.Tattoos), + Twitter: getString(performer.Twitter), + URL: getString(performer.URL), StashIDs: models.NewRelatedStashIDs([]models.StashID{ { Endpoint: t.box.Endpoint, @@ -146,6 +149,11 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) { UpdatedAt: currentTime, } + if performer.Gender != nil { + v := models.GenderEnum(getString(performer.Gender)) + newPerformer.Gender = &v + } + err := txn.WithTxn(ctx, instance.Repository, func(ctx context.Context) error { r := instance.Repository err := r.Performer.Create(ctx, &newPerformer) @@ -192,6 +200,10 @@ func (t *StashBoxPerformerTagTask) getPartial(performer *models.ScrapedPerformer value := getDate(performer.Birthdate) partial.Birthdate = models.NewOptionalDate(*value) } + if performer.DeathDate != nil && *performer.DeathDate != "" && !excluded["deathdate"] { + value := getDate(performer.DeathDate) + partial.Birthdate = models.NewOptionalDate(*value) + } if performer.CareerLength != nil && !excluded["career_length"] { partial.CareerLength = models.NewOptionalString(*performer.CareerLength) } @@ -204,6 +216,9 @@ func (t *StashBoxPerformerTagTask) getPartial(performer *models.ScrapedPerformer if performer.EyeColor != nil && !excluded["eye_color"] { partial.EyeColor = models.NewOptionalString(*performer.EyeColor) } + if performer.HairColor != nil && !excluded["hair_color"] { + partial.HairColor = models.NewOptionalString(*performer.HairColor) + } if performer.FakeTits != nil && !excluded["fake_tits"] { partial.FakeTits = models.NewOptionalString(*performer.FakeTits) } @@ -231,6 +246,9 @@ func (t *StashBoxPerformerTagTask) getPartial(performer *models.ScrapedPerformer if excluded["name"] && performer.Name != nil { partial.Name = models.NewOptionalString(*performer.Name) } + if performer.Disambiguation != nil && !excluded["disambiguation"] { + partial.Disambiguation = models.NewOptionalString(*performer.Disambiguation) + } if performer.Piercings != nil && !excluded["piercings"] { partial.Piercings = models.NewOptionalString(*performer.Piercings) } diff --git a/internal/manager/task_transcode.go b/internal/manager/task_transcode.go index 296042bdd..edda08fbb 100644 --- a/internal/manager/task_transcode.go +++ b/internal/manager/task_transcode.go @@ -101,7 +101,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) { // return true if transcode is needed // used only when counting files to generate, doesn't affect the actual transcode generation // if container is missing from DB it is treated as non supported in order not to delay the user -func (t *GenerateTranscodeTask) isTranscodeNeeded() bool { +func (t *GenerateTranscodeTask) required() bool { f := t.Scene.Files.Primary() if f == nil { return false diff --git a/internal/static/embed.go b/internal/static/embed.go index bc49aec12..9be76afa4 100644 --- a/internal/static/embed.go +++ b/internal/static/embed.go @@ -13,3 +13,9 @@ var Scene embed.FS //go:embed image var Image embed.FS + +//go:embed tag +var Tag embed.FS + +//go:embed studio +var Studio embed.FS diff --git a/internal/static/performer_male/Male01.png b/internal/static/performer_male/Male01.png new file mode 100644 index 000000000..8a486299a Binary files /dev/null and b/internal/static/performer_male/Male01.png differ diff --git a/internal/static/performer_male/Male02.png b/internal/static/performer_male/Male02.png new file mode 100644 index 000000000..673b120eb Binary files /dev/null and b/internal/static/performer_male/Male02.png differ diff --git a/internal/static/performer_male/Male03.png b/internal/static/performer_male/Male03.png new file mode 100644 index 000000000..1814d05bb Binary files /dev/null and b/internal/static/performer_male/Male03.png differ diff --git a/internal/static/performer_male/Male04.png b/internal/static/performer_male/Male04.png new file mode 100644 index 000000000..9dd1f0bcc Binary files /dev/null and b/internal/static/performer_male/Male04.png differ diff --git a/internal/static/performer_male/Male05.png b/internal/static/performer_male/Male05.png new file mode 100644 index 000000000..35231f914 Binary files /dev/null and b/internal/static/performer_male/Male05.png differ diff --git a/internal/static/performer_male/Male06.png b/internal/static/performer_male/Male06.png new file mode 100644 index 000000000..9530d274a Binary files /dev/null and b/internal/static/performer_male/Male06.png differ diff --git a/internal/static/performer_male/noname_male_01.jpg b/internal/static/performer_male/noname_male_01.jpg deleted file mode 100644 index f2c6fe51d..000000000 Binary files a/internal/static/performer_male/noname_male_01.jpg and /dev/null differ diff --git a/internal/static/performer_male/noname_male_02.jpg b/internal/static/performer_male/noname_male_02.jpg deleted file mode 100644 index 93ad7ec9d..000000000 Binary files a/internal/static/performer_male/noname_male_02.jpg and /dev/null differ diff --git a/internal/static/studio/studio.svg b/internal/static/studio/studio.svg new file mode 100644 index 000000000..bea97aa9f --- /dev/null +++ b/internal/static/studio/studio.svg @@ -0,0 +1,7 @@ + + + \ No newline at end of file diff --git a/internal/static/tag/tag.svg b/internal/static/tag/tag.svg new file mode 100644 index 000000000..3e5eb4999 --- /dev/null +++ b/internal/static/tag/tag.svg @@ -0,0 +1,67 @@ + + + + + + + + image/svg+xml + + + + + + + + + \ No newline at end of file diff --git a/pkg/ffmpeg/stream_segmented.go b/pkg/ffmpeg/stream_segmented.go index b12283fc9..fa7347582 100644 --- a/pkg/ffmpeg/stream_segmented.go +++ b/pkg/ffmpeg/stream_segmented.go @@ -20,6 +20,7 @@ import ( "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" "github.com/zencoder/go-dash/v3/mpd" ) @@ -455,7 +456,7 @@ func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, fmt.Fprint(&buf, "#EXT-X-ENDLIST\n") w.Header().Set("Content-Type", MimeHLS) - http.ServeContent(w, r, "", time.Time{}, bytes.NewReader(buf.Bytes())) + utils.ServeStaticContent(w, r, buf.Bytes()) } // serveDASHManifest serves a generated DASH manifest. @@ -546,7 +547,7 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request _ = m.Write(&buf) w.Header().Set("Content-Type", MimeDASH) - http.ServeContent(w, r, "", time.Time{}, bytes.NewReader(buf.Bytes())) + utils.ServeStaticContent(w, r, buf.Bytes()) } func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *file.VideoFile, resolution string) { @@ -561,9 +562,7 @@ func (sm *StreamManager) serveWaitingSegment(w http.ResponseWriter, r *http.Requ if err == nil { logger.Tracef("[transcode] streaming segment file %s", segment.file) w.Header().Set("Content-Type", segment.segmentType.MimeType) - // Prevent caching as segments are generated on the fly - w.Header().Add("Cache-Control", "no-cache") - http.ServeFile(w, r, segment.path) + utils.ServeStaticFile(w, r, segment.path) } else if !errors.Is(err, context.Canceled) { http.Error(w, err.Error(), http.StatusInternalServerError) } diff --git a/pkg/ffmpeg/stream_transcode.go b/pkg/ffmpeg/stream_transcode.go index 7fbfc08a8..cd123183f 100644 --- a/pkg/ffmpeg/stream_transcode.go +++ b/pkg/ffmpeg/stream_transcode.go @@ -260,6 +260,7 @@ func (sm *StreamManager) getTranscodeStream(ctx *fsutil.LockContext, options Tra mimeType := options.StreamType.MimeType handler := func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "no-store") w.Header().Set("Content-Type", mimeType) w.WriteHeader(http.StatusOK) diff --git a/pkg/ffmpeg/transcoder/image.go b/pkg/ffmpeg/transcoder/image.go index a476dff42..4221a9a54 100644 --- a/pkg/ffmpeg/transcoder/image.go +++ b/pkg/ffmpeg/transcoder/image.go @@ -10,6 +10,7 @@ var ErrUnsupportedFormat = errors.New("unsupported image format") type ImageThumbnailOptions struct { InputFormat ffmpeg.ImageFormat + OutputFormat ffmpeg.ImageFormat OutputPath string MaxDimensions int Quality int @@ -29,12 +30,15 @@ func ImageThumbnail(input string, options ImageThumbnailOptions) ffmpeg.Args { VideoFilter(videoFilter). VideoCodec(ffmpeg.VideoCodecMJpeg) + args = append(args, "-frames:v", "1") + if options.Quality > 0 { args = args.FixedQualityScaleVideo(options.Quality) } args = args.ImageFormat(ffmpeg.ImageFormatImage2Pipe). - Output(options.OutputPath) + Output(options.OutputPath). + ImageFormat(options.OutputFormat) return args } diff --git a/pkg/file/file.go b/pkg/file/file.go index 445edba9e..5b6f8d447 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -1,16 +1,13 @@ package file import ( + "bytes" "context" - "errors" "io" "io/fs" "net/http" "strconv" - "syscall" "time" - - "github.com/stashapp/stash/pkg/logger" ) // ID represents an ID of a file. @@ -119,8 +116,6 @@ func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) { } func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { - w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week - reader, err := f.Open(fs) if err != nil { return err @@ -128,23 +123,22 @@ func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { defer reader.Close() - rsc, ok := reader.(io.ReadSeeker) + content, ok := reader.(io.ReadSeeker) if !ok { - // fallback to direct copy data, err := io.ReadAll(reader) if err != nil { return err } - - k, err := w.Write(data) - if err != nil && !errors.Is(err, syscall.EPIPE) { - logger.Warnf("error serving file (wrote %v bytes out of %v): %v", k, len(data), err) - } - - return nil + content = bytes.NewReader(data) } - http.ServeContent(w, r, f.Basename, f.ModTime, rsc) + if r.URL.Query().Has("t") { + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + } else { + w.Header().Set("Cache-Control", "no-cache") + } + http.ServeContent(w, r, f.Basename, f.ModTime, content) + return nil } diff --git a/pkg/file/folder.go b/pkg/file/folder.go index 719d1a1f9..5ffd7f2b5 100644 --- a/pkg/file/folder.go +++ b/pkg/file/folder.go @@ -6,6 +6,7 @@ import ( "io/fs" "path/filepath" "strconv" + "strings" "time" ) @@ -122,3 +123,46 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, pat return folder, nil } + +// TransferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes +// ZipFileID from folders under oldPath. +func TransferZipFolderHierarchy(ctx context.Context, folderStore FolderStore, zipFileID ID, oldPath string, newPath string) error { + zipFolders, err := folderStore.FindByZipFileID(ctx, zipFileID) + if err != nil { + return err + } + + for _, oldFolder := range zipFolders { + oldZfPath := oldFolder.Path + + // sanity check - ignore folders which aren't under oldPath + if !strings.HasPrefix(oldZfPath, oldPath) { + continue + } + + relZfPath, err := filepath.Rel(oldPath, oldZfPath) + if err != nil { + return err + } + newZfPath := filepath.Join(newPath, relZfPath) + + newFolder, err := GetOrCreateFolderHierarchy(ctx, folderStore, newZfPath) + if err != nil { + return err + } + + // add ZipFileID to new folder + newFolder.ZipFileID = &zipFileID + if err = folderStore.Update(ctx, newFolder); err != nil { + return err + } + + // remove ZipFileID from old folder + oldFolder.ZipFileID = nil + if err = folderStore.Update(ctx, oldFolder); err != nil { + return err + } + } + + return nil +} diff --git a/pkg/file/frame.go b/pkg/file/frame.go new file mode 100644 index 000000000..de9f74662 --- /dev/null +++ b/pkg/file/frame.go @@ -0,0 +1,20 @@ +package file + +// VisualFile is an interface for files that have a width and height. +type VisualFile interface { + File + GetWidth() int + GetHeight() int + GetFormat() string +} + +func GetMinResolution(f VisualFile) int { + w := f.GetWidth() + h := f.GetHeight() + + if w < h { + return w + } + + return h +} diff --git a/pkg/file/image/scan.go b/pkg/file/image/scan.go index a029f5cce..ec4ce542b 100644 --- a/pkg/file/image/scan.go +++ b/pkg/file/image/scan.go @@ -9,12 +9,15 @@ import ( _ "image/jpeg" _ "image/png" + "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/file/video" _ "golang.org/x/image/webp" ) // Decorator adds image specific fields to a File. type Decorator struct { + FFProbe ffmpeg.FFProbe } func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) { @@ -25,16 +28,38 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file } defer r.Close() - c, format, err := image.DecodeConfig(r) + probe, err := d.FFProbe.NewVideoFile(base.Path) if err != nil { - return f, fmt.Errorf("decoding image file %q: %w", base.Path, err) + fmt.Printf("Warning: File %q could not be read with ffprobe: %s, assuming ImageFile", base.Path, err) + c, format, err := image.DecodeConfig(r) + if err != nil { + return f, fmt.Errorf("decoding image file %q: %w", base.Path, err) + } + return &file.ImageFile{ + BaseFile: base, + Format: format, + Width: c.Width, + Height: c.Height, + }, nil + } + + isClip := true + // This list is derived from ffmpegImageThumbnail in pkg/image/thumbnail. If one gets updated, the other should be as well + for _, item := range []string{"png", "mjpeg", "webp"} { + if item == probe.VideoCodec { + isClip = false + } + } + if isClip { + videoFileDecorator := video.Decorator{FFProbe: d.FFProbe} + return videoFileDecorator.Decorate(ctx, fs, f) } return &file.ImageFile{ BaseFile: base, - Format: format, - Width: c.Width, - Height: c.Height, + Format: probe.VideoCodec, + Width: probe.Width, + Height: probe.Height, }, nil } @@ -44,10 +69,16 @@ func (d *Decorator) IsMissingMetadata(ctx context.Context, fs file.FS, f file.Fi unsetNumber = -1 ) - imf, ok := f.(*file.ImageFile) - if !ok { + imf, isImage := f.(*file.ImageFile) + vf, isVideo := f.(*file.VideoFile) + + switch { + case isImage: + return imf.Format == unsetString || imf.Width == unsetNumber || imf.Height == unsetNumber + case isVideo: + videoFileDecorator := video.Decorator{FFProbe: d.FFProbe} + return videoFileDecorator.IsMissingMetadata(ctx, fs, vf) + default: return true } - - return imf.Format == unsetString || imf.Width == unsetNumber || imf.Height == unsetNumber } diff --git a/pkg/file/image_file.go b/pkg/file/image_file.go index 4e1f5690a..0de2d9b98 100644 --- a/pkg/file/image_file.go +++ b/pkg/file/image_file.go @@ -7,3 +7,15 @@ type ImageFile struct { Width int `json:"width"` Height int `json:"height"` } + +func (f ImageFile) GetWidth() int { + return f.Width +} + +func (f ImageFile) GetHeight() int { + return f.Height +} + +func (f ImageFile) GetFormat() string { + return f.Format +} diff --git a/pkg/file/move.go b/pkg/file/move.go index 3e29e328c..3b3c66ec5 100644 --- a/pkg/file/move.go +++ b/pkg/file/move.go @@ -87,7 +87,7 @@ func (m *Mover) Move(ctx context.Context, f File, folder *Folder, basename strin return fmt.Errorf("file %s already exists", newPath) } - if err := m.transferZipFolderHierarchy(ctx, fBase.ID, oldPath, newPath); err != nil { + if err := TransferZipFolderHierarchy(ctx, m.Folders, fBase.ID, oldPath, newPath); err != nil { return fmt.Errorf("moving folder hierarchy for file %s: %w", fBase.Path, err) } @@ -166,49 +166,6 @@ func (m *Mover) CreateFolderHierarchy(path string) error { return nil } -// transferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes -// ZipFileID from folders under oldPath. -func (m *Mover) transferZipFolderHierarchy(ctx context.Context, zipFileID ID, oldPath string, newPath string) error { - zipFolders, err := m.Folders.FindByZipFileID(ctx, zipFileID) - if err != nil { - return err - } - - for _, oldFolder := range zipFolders { - oldZfPath := oldFolder.Path - - // sanity check - ignore folders which aren't under oldPath - if !strings.HasPrefix(oldZfPath, oldPath) { - continue - } - - relZfPath, err := filepath.Rel(oldPath, oldZfPath) - if err != nil { - return err - } - newZfPath := filepath.Join(newPath, relZfPath) - - newFolder, err := GetOrCreateFolderHierarchy(ctx, m.Folders, newZfPath) - if err != nil { - return err - } - - // add ZipFileID to new folder - newFolder.ZipFileID = &zipFileID - if err = m.Folders.Update(ctx, newFolder); err != nil { - return err - } - - // remove ZipFileID from old folder - oldFolder.ZipFileID = nil - if err = m.Folders.Update(ctx, oldFolder); err != nil { - return err - } - } - - return nil -} - func (m *Mover) moveFile(oldPath, newPath string) error { if err := m.Renamer.Rename(oldPath, newPath); err != nil { return fmt.Errorf("renaming file %s to %s: %w", oldPath, newPath, err) diff --git a/pkg/file/scan.go b/pkg/file/scan.go index 148f18691..dcd625ff6 100644 --- a/pkg/file/scan.go +++ b/pkg/file/scan.go @@ -523,13 +523,29 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro } func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *Folder) (*Folder, error) { - // check if the mod time is changed + update := false + + // update if mod time is changed entryModTime := f.ModTime - if !entryModTime.Equal(existing.ModTime) { - // update entry in store existing.ModTime = entryModTime + update = true + } + // update if zip file ID has changed + fZfID := f.ZipFileID + existingZfID := existing.ZipFileID + if fZfID != existingZfID { + if fZfID == nil { + existing.ZipFileID = nil + update = true + } else if existingZfID == nil || *fZfID != *existingZfID { + existing.ZipFileID = fZfID + update = true + } + } + + if update { var err error if err = s.Repository.FolderStore.Update(ctx, existing); err != nil { return nil, fmt.Errorf("updating folder %q: %w", f.Path, err) @@ -753,7 +769,14 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F var missing []File + fZipID := f.Base().ZipFileID for _, other := range others { + // if file is from a zip file, then only rename if both files are from the same zip file + otherZipID := other.Base().ZipFileID + if otherZipID != nil && (fZipID == nil || *otherZipID != *fZipID) { + continue + } + // if file does not exist, then update it to the new path fs, err := s.getFileFS(other.Base()) if err != nil { @@ -798,6 +821,12 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F return fmt.Errorf("updating file for rename %q: %w", fBase.Path, err) } + if s.isZipFile(fBase.Basename) { + if err := TransferZipFolderHierarchy(ctx, s.Repository.FolderStore, fBase.ID, otherBase.Path, fBase.Path); err != nil { + return fmt.Errorf("moving folder hierarchy for renamed zip file %q: %w", fBase.Path, err) + } + } + if err := s.fireHandlers(ctx, f, other); err != nil { return err } diff --git a/pkg/file/video_file.go b/pkg/file/video_file.go index ec08aad87..382c81e19 100644 --- a/pkg/file/video_file.go +++ b/pkg/file/video_file.go @@ -16,13 +16,14 @@ type VideoFile struct { InteractiveSpeed *int `json:"interactive_speed"` } -func (f VideoFile) GetMinResolution() int { - w := f.Width - h := f.Height - - if w < h { - return w - } - - return h +func (f VideoFile) GetWidth() int { + return f.Width +} + +func (f VideoFile) GetHeight() int { + return f.Height +} + +func (f VideoFile) GetFormat() string { + return f.Format } diff --git a/pkg/fsutil/file.go b/pkg/fsutil/file.go index 7d91679fe..1bf982666 100644 --- a/pkg/fsutil/file.go +++ b/pkg/fsutil/file.go @@ -11,29 +11,55 @@ import ( "github.com/stashapp/stash/pkg/logger" ) +// CopyFile copies the contents of the file at srcpath to a regular file at dstpath. +// It will copy the last modified timestamp +// If dstpath already exists the function will fail. +func CopyFile(srcpath, dstpath string) (err error) { + r, err := os.Open(srcpath) + if err != nil { + return err + } + + w, err := os.OpenFile(dstpath, os.O_CREATE|os.O_EXCL, 0666) + if err != nil { + r.Close() // We need to close the input file as the defer below would not be called. + return err + } + + defer func() { + r.Close() // ok to ignore error: file was opened read-only. + e := w.Close() + // Report the error from w.Close, if any. + // But do so only if there isn't already an outgoing error. + if e != nil && err == nil { + err = e + } + // Copy modified time + if err == nil { + // io.Copy succeeded, we should fix the dstpath timestamp + srcFileInfo, e := os.Stat(srcpath) + if e != nil { + err = e + return + } + + e = os.Chtimes(dstpath, srcFileInfo.ModTime(), srcFileInfo.ModTime()) + if e != nil { + err = e + } + } + }() + + _, err = io.Copy(w, r) + return err +} + // SafeMove attempts to move the file with path src to dest using os.Rename. If this fails, then it copies src to dest, then deletes src. func SafeMove(src, dst string) error { err := os.Rename(src, dst) if err != nil { - in, err := os.Open(src) - if err != nil { - return err - } - defer in.Close() - - out, err := os.Create(dst) - if err != nil { - return err - } - defer out.Close() - - _, err = io.Copy(out, in) - if err != nil { - return err - } - - err = out.Close() + err = CopyFile(src, dst) if err != nil { return err } diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index acf70763f..7dfc3857f 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -18,6 +18,11 @@ type Repository interface { Destroy(ctx context.Context, id int) error models.FileLoader ImageUpdater + PartialUpdater +} + +type PartialUpdater interface { + UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) } type ImageFinder interface { diff --git a/pkg/gallery/update.go b/pkg/gallery/update.go index 5350499ac..72f479bea 100644 --- a/pkg/gallery/update.go +++ b/pkg/gallery/update.go @@ -2,20 +2,25 @@ package gallery import ( "context" + "fmt" + "time" "github.com/stashapp/stash/pkg/models" ) -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) -} - type ImageUpdater interface { GetImageIDs(ctx context.Context, galleryID int) ([]int, error) AddImages(ctx context.Context, galleryID int, imageIDs ...int) error RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error } +func (s *Service) Updated(ctx context.Context, galleryID int) error { + _, err := s.Repository.UpdatePartial(ctx, galleryID, models.GalleryPartial{ + UpdatedAt: models.NewOptionalTime(time.Now()), + }) + return err +} + // AddImages adds images to the provided gallery. // It returns an error if the gallery does not support adding images, or if // the operation fails. @@ -24,7 +29,12 @@ func (s *Service) AddImages(ctx context.Context, g *models.Gallery, toAdd ...int return err } - return s.Repository.AddImages(ctx, g.ID, toAdd...) + if err := s.Repository.AddImages(ctx, g.ID, toAdd...); err != nil { + return fmt.Errorf("failed to add images to gallery: %w", err) + } + + // #3759 - update the gallery's UpdatedAt timestamp + return s.Updated(ctx, g.ID) } // RemoveImages removes images from the provided gallery. @@ -36,7 +46,12 @@ func (s *Service) RemoveImages(ctx context.Context, g *models.Gallery, toRemove return err } - return s.Repository.RemoveImages(ctx, g.ID, toRemove...) + if err := s.Repository.RemoveImages(ctx, g.ID, toRemove...); err != nil { + return fmt.Errorf("failed to remove images from gallery: %w", err) + } + + // #3759 - update the gallery's UpdatedAt timestamp + return s.Updated(ctx, g.ID) } func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, performerID int) error { diff --git a/pkg/image/delete.go b/pkg/image/delete.go index b61e77045..dba0fd587 100644 --- a/pkg/image/delete.go +++ b/pkg/image/delete.go @@ -22,13 +22,19 @@ type FileDeleter struct { // MarkGeneratedFiles marks for deletion the generated files for the provided image. func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error { + var files []string thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth) exists, _ := fsutil.FileExists(thumbPath) if exists { - return d.Files([]string{thumbPath}) + files = append(files, thumbPath) + } + prevPath := d.Paths.Generated.GetClipPreviewPath(image.Checksum, models.DefaultGthumbWidth) + exists, _ = fsutil.FileExists(prevPath) + if exists { + files = append(files, prevPath) } - return nil + return d.Files(files) } // Destroy destroys an image, optionally marking the file and generated files for deletion. @@ -87,7 +93,7 @@ func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter for _, f := range i.Files.List() { // only delete files where there is no other associated image - otherImages, err := s.Repository.FindByFileID(ctx, f.ID) + otherImages, err := s.Repository.FindByFileID(ctx, f.Base().ID) if err != nil { return err } @@ -99,7 +105,7 @@ func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter // don't delete files in zip archives const deleteFile = true - if f.ZipFileID == nil { + if f.Base().ZipFileID == nil { if err := file.Destroy(ctx, s.File, f, fileDeleter.Deleter, deleteFile); err != nil { return err } diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go index 7f3393d6f..64a0ebb28 100644 --- a/pkg/image/export_test.go +++ b/pkg/image/export_test.go @@ -45,11 +45,9 @@ var ( func createFullImage(id int) models.Image { return models.Image{ ID: id, - Files: models.NewRelatedImageFiles([]*file.ImageFile{ - { - BaseFile: &file.BaseFile{ - Path: path, - }, + Files: models.NewRelatedFiles([]file.File{ + &file.BaseFile{ + Path: path, }, }), Title: title, diff --git a/pkg/image/import.go b/pkg/image/import.go index b5e54e594..6dfc0bde8 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -97,7 +97,7 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image { } func (i *Importer) populateFiles(ctx context.Context) error { - files := make([]*file.ImageFile, 0) + files := make([]file.File, 0) for _, ref := range i.Input.Files { path := ref @@ -109,11 +109,11 @@ func (i *Importer) populateFiles(ctx context.Context) error { if f == nil { return fmt.Errorf("image file '%s' not found", path) } else { - files = append(files, f.(*file.ImageFile)) + files = append(files, f) } } - i.image.Files = models.NewRelatedImageFiles(files) + i.image.Files = models.NewRelatedFiles(files) return nil } @@ -311,7 +311,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { var err error for _, f := range i.image.Files.List() { - existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID) + existing, err = i.ReaderWriter.FindByFileID(ctx, f.Base().ID) if err != nil { return nil, err } diff --git a/pkg/image/scan.go b/pkg/image/scan.go index 4c5280f6b..d28d94a86 100644 --- a/pkg/image/scan.go +++ b/pkg/image/scan.go @@ -29,25 +29,29 @@ type FinderCreatorUpdater interface { UpdatePartial(ctx context.Context, id int, updatedImage models.ImagePartial) (*models.Image, error) AddFileID(ctx context.Context, id int, fileID file.ID) error models.GalleryIDLoader - models.ImageFileLoader + models.FileLoader } type GalleryFinderCreator interface { FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) Create(ctx context.Context, newObject *models.Gallery, fileIDs []file.ID) error + UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) } type ScanConfig interface { GetCreateGalleriesFromFolders() bool - IsGenerateThumbnails() bool +} + +type ScanGenerator interface { + Generate(ctx context.Context, i *models.Image, f file.File) error } type ScanHandler struct { CreatorUpdater FinderCreatorUpdater GalleryFinder GalleryFinderCreator - ThumbnailGenerator ThumbnailGenerator + ScanGenerator ScanGenerator ScanConfig ScanConfig @@ -60,6 +64,9 @@ func (h *ScanHandler) validate() error { if h.CreatorUpdater == nil { return errors.New("CreatorUpdater is required") } + if h.ScanGenerator == nil { + return errors.New("ScanGenerator is required") + } if h.GalleryFinder == nil { return errors.New("GalleryFinder is required") } @@ -78,10 +85,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return err } - imageFile, ok := f.(*file.ImageFile) - if !ok { - return ErrNotImageFile - } + imageFile := f.Base() // try to match the file to an image existing, err := h.CreatorUpdater.FindByFileID(ctx, imageFile.ID) @@ -114,10 +118,16 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File logger.Infof("%s doesn't exist. Creating new image...", f.Base().Path) - if _, err := h.associateGallery(ctx, newImage, imageFile); err != nil { + g, err := h.getGalleryToAssociate(ctx, newImage, f) + if err != nil { return err } + if g != nil { + newImage.GalleryIDs.Add(g.ID) + logger.Infof("Adding %s to gallery %s", f.Base().Path, g.Path) + } + if err := h.CreatorUpdater.Create(ctx, &models.ImageCreateInput{ Image: newImage, FileIDs: []file.ID{imageFile.ID}, @@ -125,6 +135,15 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return fmt.Errorf("creating new image: %w", err) } + // update the gallery updated at timestamp if applicable + if g != nil { + if _, err := h.GalleryFinder.UpdatePartial(ctx, g.ID, models.GalleryPartial{ + UpdatedAt: models.NewOptionalTime(time.Now()), + }); err != nil { + return fmt.Errorf("updating gallery updated at timestamp: %w", err) + } + } + h.PluginCache.RegisterPostHooks(ctx, newImage.ID, plugin.ImageCreatePost, nil, nil) existing = []*models.Image{newImage} @@ -141,22 +160,20 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File } } - if h.ScanConfig.IsGenerateThumbnails() { - // do this after the commit so that the transaction isn't held up - txn.AddPostCommitHook(ctx, func(ctx context.Context) { - for _, s := range existing { - if err := h.ThumbnailGenerator.GenerateThumbnail(ctx, s, imageFile); err != nil { - // just log if cover generation fails. We can try again on rescan - logger.Errorf("Error generating thumbnail for %s: %v", imageFile.Path, err) - } + // do this after the commit so that generation doesn't hold up the transaction + txn.AddPostCommitHook(ctx, func(ctx context.Context) { + for _, s := range existing { + if err := h.ScanGenerator.Generate(ctx, s, f); err != nil { + // just log if cover generation fails. We can try again on rescan + logger.Errorf("Error generating content for %s: %v", imageFile.Path, err) } - }) - } + } + }) return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.ImageFile, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.BaseFile, updateExisting bool) error { for _, i := range existing { if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -164,23 +181,25 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. found := false for _, sf := range i.Files.List() { - if sf.ID == f.Base().ID { + if sf.Base().ID == f.Base().ID { found = true break } } // associate with gallery if applicable - changed, err := h.associateGallery(ctx, i, f) + g, err := h.getGalleryToAssociate(ctx, i, f) if err != nil { return err } var galleryIDs *models.UpdateIDs - if changed { + changed := false + if g != nil { + changed = true galleryIDs = &models.UpdateIDs{ - IDs: i.GalleryIDs.List(), - Mode: models.RelationshipUpdateModeSet, + IDs: []int{g.ID}, + Mode: models.RelationshipUpdateModeAdd, } } @@ -202,6 +221,14 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. }); err != nil { return fmt.Errorf("updating image: %w", err) } + + if g != nil { + if _, err := h.GalleryFinder.UpdatePartial(ctx, g.ID, models.GalleryPartial{ + UpdatedAt: models.NewOptionalTime(time.Now()), + }); err != nil { + return fmt.Errorf("updating gallery updated at timestamp: %w", err) + } + } } if changed || updateExisting { @@ -307,29 +334,42 @@ func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f file.File) (*mod return h.getOrCreateZipBasedGallery(ctx, f.Base().ZipFile) } - if h.ScanConfig.GetCreateGalleriesFromFolders() { + // Look for specific filename in Folder to find out if the Folder is marked to be handled differently as the setting + folderPath := filepath.Dir(f.Base().Path) + + forceGallery := false + if _, err := os.Stat(filepath.Join(folderPath, ".forcegallery")); err == nil { + forceGallery = true + } else if !errors.Is(err, os.ErrNotExist) { + return nil, fmt.Errorf("Could not test Path %s: %w", folderPath, err) + } + exemptGallery := false + if _, err := os.Stat(filepath.Join(folderPath, ".nogallery")); err == nil { + exemptGallery = true + } else if !errors.Is(err, os.ErrNotExist) { + return nil, fmt.Errorf("Could not test Path %s: %w", folderPath, err) + } + + if forceGallery || (h.ScanConfig.GetCreateGalleriesFromFolders() && !exemptGallery) { return h.getOrCreateFolderBasedGallery(ctx, f) } return nil, nil } -func (h *ScanHandler) associateGallery(ctx context.Context, newImage *models.Image, f file.File) (bool, error) { +func (h *ScanHandler) getGalleryToAssociate(ctx context.Context, newImage *models.Image, f file.File) (*models.Gallery, error) { g, err := h.getOrCreateGallery(ctx, f) if err != nil { - return false, err + return nil, err } if err := newImage.LoadGalleryIDs(ctx, h.CreatorUpdater); err != nil { - return false, err + return nil, err } - ret := false if g != nil && !intslice.IntInclude(newImage.GalleryIDs.List(), g.ID) { - ret = true - newImage.GalleryIDs.Add(g.ID) - logger.Infof("Adding %s to gallery %s", f.Base().Path, g.Path) + return g, nil } - return ret, nil + return nil, nil } diff --git a/pkg/image/service.go b/pkg/image/service.go index 667317735..5aacc4e59 100644 --- a/pkg/image/service.go +++ b/pkg/image/service.go @@ -15,7 +15,7 @@ type FinderByFile interface { type Repository interface { FinderByFile Destroyer - models.ImageFileLoader + models.FileLoader } type Service struct { diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go index 80c2139cc..dc07b0f55 100644 --- a/pkg/image/thumbnail.go +++ b/pkg/image/thumbnail.go @@ -6,13 +6,14 @@ import ( "errors" "fmt" "os/exec" + "path/filepath" "runtime" "sync" "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg/transcoder" "github.com/stashapp/stash/pkg/file" - "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/fsutil" ) const ffmpegImageQuality = 5 @@ -27,13 +28,17 @@ var ( ErrNotSupportedForThumbnail = errors.New("unsupported image format for thumbnail") ) -type ThumbnailGenerator interface { - GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error +type ThumbnailEncoder struct { + FFMpeg *ffmpeg.FFMpeg + FFProbe ffmpeg.FFProbe + ClipPreviewOptions ClipPreviewOptions + vips *vipsEncoder } -type ThumbnailEncoder struct { - ffmpeg *ffmpeg.FFMpeg - vips *vipsEncoder +type ClipPreviewOptions struct { + InputArgs []string + OutputArgs []string + Preset string } func GetVipsPath() string { @@ -43,9 +48,11 @@ func GetVipsPath() string { return vipsPath } -func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg) ThumbnailEncoder { +func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg, ffProbe ffmpeg.FFProbe, clipPreviewOptions ClipPreviewOptions) ThumbnailEncoder { ret := ThumbnailEncoder{ - ffmpeg: ffmpegEncoder, + FFMpeg: ffmpegEncoder, + FFProbe: ffProbe, + ClipPreviewOptions: clipPreviewOptions, } vipsPath := GetVipsPath() @@ -61,7 +68,7 @@ func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg) ThumbnailEncoder { // the provided max size. It resizes based on the largest X/Y direction. // It returns nil and an error if an error occurs reading, decoding or encoding // the image, or if the image is not suitable for thumbnails. -func (e *ThumbnailEncoder) GetThumbnail(f *file.ImageFile, maxSize int) ([]byte, error) { +func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error) { reader, err := f.Open(&file.OsFS{}) if err != nil { return nil, err @@ -75,47 +82,103 @@ func (e *ThumbnailEncoder) GetThumbnail(f *file.ImageFile, maxSize int) ([]byte, data := buf.Bytes() - format := f.Format - animated := f.Format == formatGif + if imageFile, ok := f.(*file.ImageFile); ok { + format := imageFile.Format + animated := imageFile.Format == formatGif - // #2266 - if image is webp, then determine if it is animated - if format == formatWebP { - animated = isWebPAnimated(data) + // #2266 - if image is webp, then determine if it is animated + if format == formatWebP { + animated = isWebPAnimated(data) + } + + // #2266 - don't generate a thumbnail for animated images + if animated { + return nil, fmt.Errorf("%w: %s", ErrNotSupportedForThumbnail, format) + } } - // #2266 - don't generate a thumbnail for animated images - if animated { - return nil, fmt.Errorf("%w: %s", ErrNotSupportedForThumbnail, format) + // Videofiles can only be thumbnailed with ffmpeg + if _, ok := f.(*file.VideoFile); ok { + return e.ffmpegImageThumbnail(buf, maxSize) } // vips has issues loading files from stdin on Windows if e.vips != nil && runtime.GOOS != "windows" { return e.vips.ImageThumbnail(buf, maxSize) } else { - return e.ffmpegImageThumbnail(buf, format, maxSize) + return e.ffmpegImageThumbnail(buf, maxSize) } } -func (e *ThumbnailEncoder) ffmpegImageThumbnail(image *bytes.Buffer, format string, maxSize int) ([]byte, error) { - var ffmpegFormat ffmpeg.ImageFormat - - switch format { - case "jpeg": - ffmpegFormat = ffmpeg.ImageFormatJpeg - case "png": - ffmpegFormat = ffmpeg.ImageFormatPng - case "webp": - ffmpegFormat = ffmpeg.ImageFormatWebp - default: - return nil, ErrUnsupportedImageFormat +// GetPreview returns the preview clip of the provided image clip resized to +// the provided max size. It resizes based on the largest X/Y direction. +// It is hardcoded to 30 seconds maximum right now +func (e *ThumbnailEncoder) GetPreview(inPath string, outPath string, maxSize int) error { + fileData, err := e.FFProbe.NewVideoFile(inPath) + if err != nil { + return err } + if fileData.Width <= maxSize { + maxSize = fileData.Width + } + clipDuration := fileData.VideoStreamDuration + if clipDuration > 30.0 { + clipDuration = 30.0 + } + return e.getClipPreview(inPath, outPath, maxSize, clipDuration, fileData.FrameRate) +} +func (e *ThumbnailEncoder) ffmpegImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) { args := transcoder.ImageThumbnail("-", transcoder.ImageThumbnailOptions{ - InputFormat: ffmpegFormat, + OutputFormat: ffmpeg.ImageFormatJpeg, OutputPath: "-", MaxDimensions: maxSize, Quality: ffmpegImageQuality, }) - return e.ffmpeg.GenerateOutput(context.TODO(), args, image) + return e.FFMpeg.GenerateOutput(context.TODO(), args, image) +} + +func (e *ThumbnailEncoder) getClipPreview(inPath string, outPath string, maxSize int, clipDuration float64, frameRate float64) error { + var thumbFilter ffmpeg.VideoFilter + thumbFilter = thumbFilter.ScaleMaxSize(maxSize) + + var thumbArgs ffmpeg.Args + thumbArgs = thumbArgs.VideoFilter(thumbFilter) + + o := e.ClipPreviewOptions + + thumbArgs = append(thumbArgs, + "-pix_fmt", "yuv420p", + "-preset", o.Preset, + "-crf", "25", + "-threads", "4", + "-strict", "-2", + "-f", "webm", + ) + + if frameRate <= 0.01 { + thumbArgs = append(thumbArgs, "-vsync", "2") + } + + thumbOptions := transcoder.TranscodeOptions{ + OutputPath: outPath, + StartTime: 0, + Duration: clipDuration, + + XError: true, + SlowSeek: false, + + VideoCodec: ffmpeg.VideoCodecVP9, + VideoArgs: thumbArgs, + + ExtraInputArgs: o.InputArgs, + ExtraOutputArgs: o.OutputArgs, + } + + if err := fsutil.EnsureDirAll(filepath.Dir(outPath)); err != nil { + return err + } + args := transcoder.Transcode(inPath, thumbOptions) + return e.FFMpeg.Generate(context.TODO(), args) } diff --git a/pkg/logger/basic.go b/pkg/logger/basic.go index d872777d5..3995eddfe 100644 --- a/pkg/logger/basic.go +++ b/pkg/logger/basic.go @@ -31,6 +31,11 @@ func (log *BasicLogger) Tracef(format string, args ...interface{}) { log.printf("Trace", format, args...) } +func (log *BasicLogger) TraceFunc(fn func() (string, []interface{})) { + format, args := fn() + log.printf("Trace", format, args...) +} + func (log *BasicLogger) Debug(args ...interface{}) { log.print("Debug", args...) } @@ -39,6 +44,11 @@ func (log *BasicLogger) Debugf(format string, args ...interface{}) { log.printf("Debug", format, args...) } +func (log *BasicLogger) DebugFunc(fn func() (string, []interface{})) { + format, args := fn() + log.printf("Debug", format, args...) +} + func (log *BasicLogger) Info(args ...interface{}) { log.print("Info", args...) } @@ -47,6 +57,11 @@ func (log *BasicLogger) Infof(format string, args ...interface{}) { log.printf("Info", format, args...) } +func (log *BasicLogger) InfoFunc(fn func() (string, []interface{})) { + format, args := fn() + log.printf("Info", format, args...) +} + func (log *BasicLogger) Warn(args ...interface{}) { log.print("Warn", args...) } @@ -55,6 +70,11 @@ func (log *BasicLogger) Warnf(format string, args ...interface{}) { log.printf("Warn", format, args...) } +func (log *BasicLogger) WarnFunc(fn func() (string, []interface{})) { + format, args := fn() + log.printf("Warn", format, args...) +} + func (log *BasicLogger) Error(args ...interface{}) { log.print("Error", args...) } @@ -63,6 +83,11 @@ func (log *BasicLogger) Errorf(format string, args ...interface{}) { log.printf("Error", format, args...) } +func (log *BasicLogger) ErrorFunc(fn func() (string, []interface{})) { + format, args := fn() + log.printf("Error", format, args...) +} + func (log *BasicLogger) Fatal(args ...interface{}) { log.print("Fatal", args...) os.Exit(1) diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index f97faf9f8..12ddd5053 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -16,18 +16,23 @@ type LoggerImpl interface { Trace(args ...interface{}) Tracef(format string, args ...interface{}) + TraceFunc(fn func() (string, []interface{})) Debug(args ...interface{}) Debugf(format string, args ...interface{}) + DebugFunc(fn func() (string, []interface{})) Info(args ...interface{}) Infof(format string, args ...interface{}) + InfoFunc(fn func() (string, []interface{})) Warn(args ...interface{}) Warnf(format string, args ...interface{}) + WarnFunc(fn func() (string, []interface{})) Error(args ...interface{}) Errorf(format string, args ...interface{}) + ErrorFunc(fn func() (string, []interface{})) Fatal(args ...interface{}) Fatalf(format string, args ...interface{}) @@ -61,6 +66,14 @@ func Tracef(format string, args ...interface{}) { } } +// TraceFunc calls TraceFunc with the Logger registered using RegisterLogger. +// If no logger has been registered, then this function is a no-op. +func TraceFunc(fn func() (string, []interface{})) { + if Logger != nil { + Logger.TraceFunc(fn) + } +} + // Debug calls Debug with the Logger registered using RegisterLogger. // If no logger has been registered, then this function is a no-op. func Debug(args ...interface{}) { @@ -77,6 +90,14 @@ func Debugf(format string, args ...interface{}) { } } +// DebugFunc calls DebugFunc with the Logger registered using RegisterLogger. +// If no logger has been registered, then this function is a no-op. +func DebugFunc(fn func() (string, []interface{})) { + if Logger != nil { + Logger.DebugFunc(fn) + } +} + // Info calls Info with the Logger registered using RegisterLogger. // If no logger has been registered, then this function is a no-op. func Info(args ...interface{}) { @@ -93,6 +114,14 @@ func Infof(format string, args ...interface{}) { } } +// InfoFunc calls InfoFunc with the Logger registered using RegisterLogger. +// If no logger has been registered, then this function is a no-op. +func InfoFunc(fn func() (string, []interface{})) { + if Logger != nil { + Logger.InfoFunc(fn) + } +} + // Warn calls Warn with the Logger registered using RegisterLogger. // If no logger has been registered, then this function is a no-op. func Warn(args ...interface{}) { @@ -109,6 +138,14 @@ func Warnf(format string, args ...interface{}) { } } +// WarnFunc calls WarnFunc with the Logger registered using RegisterLogger. +// If no logger has been registered, then this function is a no-op. +func WarnFunc(fn func() (string, []interface{})) { + if Logger != nil { + Logger.WarnFunc(fn) + } +} + // Error calls Error with the Logger registered using RegisterLogger. // If no logger has been registered, then this function is a no-op. func Error(args ...interface{}) { @@ -125,6 +162,14 @@ func Errorf(format string, args ...interface{}) { } } +// ErrorFunc calls ErrorFunc with the Logger registered using RegisterLogger. +// If no logger has been registered, then this function is a no-op. +func ErrorFunc(fn func() (string, []interface{})) { + if Logger != nil { + Logger.ErrorFunc(fn) + } +} + // Fatal calls Fatal with the Logger registered using RegisterLogger. // If no logger has been registered, then this function is a no-op. func Fatal(args ...interface{}) { diff --git a/pkg/models/filter.go b/pkg/models/filter.go index d614f262e..e9ddf7ab3 100644 --- a/pkg/models/filter.go +++ b/pkg/models/filter.go @@ -109,6 +109,20 @@ func (i IntCriterionInput) ValidModifier() bool { return false } +type FloatCriterionInput struct { + Value float64 `json:"value"` + Value2 *float64 `json:"value2"` + Modifier CriterionModifier `json:"modifier"` +} + +func (i FloatCriterionInput) ValidModifier() bool { + switch i.Modifier { + case CriterionModifierEquals, CriterionModifierNotEquals, CriterionModifierGreaterThan, CriterionModifierLessThan, CriterionModifierIsNull, CriterionModifierNotNull, CriterionModifierBetween, CriterionModifierNotBetween: + return true + } + return false +} + type ResolutionCriterionInput struct { Value ResolutionEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` @@ -118,11 +132,24 @@ type HierarchicalMultiCriterionInput struct { Value []string `json:"value"` Modifier CriterionModifier `json:"modifier"` Depth *int `json:"depth"` + Excludes []string `json:"excludes"` +} + +func (i HierarchicalMultiCriterionInput) CombineExcludes() HierarchicalMultiCriterionInput { + ii := i + if ii.Modifier == CriterionModifierExcludes { + ii.Modifier = CriterionModifierIncludesAll + ii.Excludes = append(ii.Excludes, ii.Value...) + ii.Value = nil + } + + return ii } type MultiCriterionInput struct { Value []string `json:"value"` Modifier CriterionModifier `json:"modifier"` + Excludes []string `json:"excludes"` } type DateCriterionInput struct { @@ -136,3 +163,9 @@ type TimestampCriterionInput struct { Value2 *string `json:"value2"` Modifier CriterionModifier `json:"modifier"` } + +type PhashDistanceCriterionInput struct { + Value string `json:"value"` + Modifier CriterionModifier `json:"modifier"` + Distance *int `json:"distance"` +} diff --git a/pkg/models/generate.go b/pkg/models/generate.go index 2fc66248c..c8fa9785c 100644 --- a/pkg/models/generate.go +++ b/pkg/models/generate.go @@ -18,6 +18,7 @@ type GenerateMetadataOptions struct { Transcodes bool `json:"transcodes"` Phashes bool `json:"phashes"` InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"` + ClipPreviews bool `json:"clipPreviews"` } type GeneratePreviewOptions struct { diff --git a/pkg/models/image.go b/pkg/models/image.go index 774e0536a..288f69976 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -108,6 +108,7 @@ type ImageReader interface { FindByChecksum(ctx context.Context, checksum string) ([]*Image, error) FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) CountByGalleryID(ctx context.Context, galleryID int) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) Count(ctx context.Context) (int, error) Size(ctx context.Context) (float64, error) All(ctx context.Context) ([]*Image, error) diff --git a/pkg/models/jsonschema/performer.go b/pkg/models/jsonschema/performer.go index c0996a1a5..248cf9557 100644 --- a/pkg/models/jsonschema/performer.go +++ b/pkg/models/jsonschema/performer.go @@ -48,6 +48,8 @@ type Performer struct { Height string `json:"height,omitempty"` Measurements string `json:"measurements,omitempty"` FakeTits string `json:"fake_tits,omitempty"` + PenisLength float64 `json:"penis_length,omitempty"` + Circumcised string `json:"circumcised,omitempty"` CareerLength string `json:"career_length,omitempty"` Tattoos string `json:"tattoos,omitempty"` Piercings string `json:"piercings,omitempty"` diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index 41468ceb2..67a9d318e 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -79,6 +79,27 @@ func (_m *ImageReaderWriter) CountByGalleryID(ctx context.Context, galleryID int return r0, r1 } +// OCountByPerformerID provides a mock function with given fields: ctx, performerID +func (_m *ImageReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) { + ret := _m.Called(ctx, performerID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, performerID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, performerID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: ctx, newImage func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.ImageCreateInput) error { ret := _m.Called(ctx, newImage) diff --git a/pkg/models/mocks/MovieReaderWriter.go b/pkg/models/mocks/MovieReaderWriter.go index 3131d31d6..2ec62f26c 100644 --- a/pkg/models/mocks/MovieReaderWriter.go +++ b/pkg/models/mocks/MovieReaderWriter.go @@ -342,6 +342,27 @@ func (_m *MovieReaderWriter) HasBackImage(ctx context.Context, movieID int) (boo return r0, r1 } +// HasFrontImage provides a mock function with given fields: ctx, movieID +func (_m *MovieReaderWriter) HasFrontImage(ctx context.Context, movieID int) (bool, error) { + ret := _m.Called(ctx, movieID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, int) bool); ok { + r0 = rf(ctx, movieID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, movieID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Query provides a mock function with given fields: ctx, movieFilter, findFilter func (_m *MovieReaderWriter) Query(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) { ret := _m.Called(ctx, movieFilter, findFilter) diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index b579ab562..3f3b3c5ac 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -397,6 +397,27 @@ func (_m *PerformerReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// HasImage provides a mock function with given fields: ctx, performerID +func (_m *PerformerReaderWriter) HasImage(ctx context.Context, performerID int) (bool, error) { + ret := _m.Called(ctx, performerID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, int) bool); ok { + r0 = rf(ctx, performerID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, performerID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Query provides a mock function with given fields: ctx, performerFilter, findFilter func (_m *PerformerReaderWriter) Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) { ret := _m.Called(ctx, performerFilter, findFilter) diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 5f7191827..7ee47e906 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -102,6 +102,27 @@ func (_m *SceneReaderWriter) CountByPerformerID(ctx context.Context, performerID return r0, r1 } +// OCountByPerformerID provides a mock function with given fields: ctx, performerID +func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) { + ret := _m.Called(ctx, performerID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, performerID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, performerID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CountByStudioID provides a mock function with given fields: ctx, studioID func (_m *SceneReaderWriter) CountByStudioID(ctx context.Context, studioID int) (int, error) { ret := _m.Called(ctx, studioID) @@ -418,7 +439,7 @@ func (_m *SceneReaderWriter) FindByPerformerID(ctx context.Context, performerID } // FindDuplicates provides a mock function with given fields: ctx, distance -func (_m *SceneReaderWriter) FindDuplicates(ctx context.Context, distance int) ([][]*models.Scene, error) { +func (_m *SceneReaderWriter) FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*models.Scene, error) { ret := _m.Called(ctx, distance) var r0 [][]*models.Scene diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index d67f4f00e..14e084515 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -440,6 +440,27 @@ func (_m *TagReaderWriter) GetImage(ctx context.Context, tagID int) ([]byte, err return r0, r1 } +// HasImage provides a mock function with given fields: ctx, tagID +func (_m *TagReaderWriter) HasImage(ctx context.Context, tagID int) (bool, error) { + ret := _m.Called(ctx, tagID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, int) bool); ok { + r0 = rf(ctx, tagID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, tagID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Merge provides a mock function with given fields: ctx, source, destination func (_m *TagReaderWriter) Merge(ctx context.Context, source []int, destination int) error { ret := _m.Called(ctx, source, destination) diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index 42425c455..e025ba0b1 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -2,7 +2,6 @@ package models import ( "context" - "errors" "path/filepath" "strconv" "time" @@ -24,7 +23,7 @@ type Image struct { Date *Date `json:"date"` // transient - not persisted - Files RelatedImageFiles + Files RelatedFiles PrimaryFileID *file.ID // transient - path of primary file - empty if no files Path string @@ -39,14 +38,14 @@ type Image struct { PerformerIDs RelatedIDs `json:"performer_ids"` } -func (i *Image) LoadFiles(ctx context.Context, l ImageFileLoader) error { - return i.Files.load(func() ([]*file.ImageFile, error) { +func (i *Image) LoadFiles(ctx context.Context, l FileLoader) error { + return i.Files.load(func() ([]file.File, error) { return l.GetFiles(ctx, i.ID) }) } func (i *Image) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return i.Files.loadPrimary(func() (*file.ImageFile, error) { + return i.Files.loadPrimary(func() (file.File, error) { if i.PrimaryFileID == nil { return nil, nil } @@ -56,15 +55,11 @@ func (i *Image) LoadPrimaryFile(ctx context.Context, l file.Finder) error { return nil, err } - var vf *file.ImageFile if len(f) > 0 { - var ok bool - vf, ok = f[0].(*file.ImageFile) - if !ok { - return nil, errors.New("not an image file") - } + return f[0], nil } - return vf, nil + + return nil, nil }) } diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index fd52a7674..134d46783 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -6,26 +6,28 @@ import ( ) type Performer struct { - ID int `json:"id"` - Name string `json:"name"` - Disambiguation string `json:"disambiguation"` - Gender GenderEnum `json:"gender"` - URL string `json:"url"` - Twitter string `json:"twitter"` - Instagram string `json:"instagram"` - Birthdate *Date `json:"birthdate"` - Ethnicity string `json:"ethnicity"` - Country string `json:"country"` - EyeColor string `json:"eye_color"` - Height *int `json:"height"` - Measurements string `json:"measurements"` - FakeTits string `json:"fake_tits"` - CareerLength string `json:"career_length"` - Tattoos string `json:"tattoos"` - Piercings string `json:"piercings"` - Favorite bool `json:"favorite"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` + ID int `json:"id"` + Name string `json:"name"` + Disambiguation string `json:"disambiguation"` + Gender *GenderEnum `json:"gender"` + URL string `json:"url"` + Twitter string `json:"twitter"` + Instagram string `json:"instagram"` + Birthdate *Date `json:"birthdate"` + Ethnicity string `json:"ethnicity"` + Country string `json:"country"` + EyeColor string `json:"eye_color"` + Height *int `json:"height"` + Measurements string `json:"measurements"` + FakeTits string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumisedEnum `json:"circumcised"` + CareerLength string `json:"career_length"` + Tattoos string `json:"tattoos"` + Piercings string `json:"piercings"` + Favorite bool `json:"favorite"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` // Rating expressed in 1-100 scale Rating *int `json:"rating"` Details string `json:"details"` @@ -90,6 +92,8 @@ type PerformerPartial struct { Height OptionalInt Measurements OptionalString FakeTits OptionalString + PenisLength OptionalFloat64 + Circumcised OptionalString CareerLength OptionalString Tattoos OptionalString Piercings OptionalString diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index fa25bcb7e..9d497b043 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -32,6 +32,8 @@ type ScrapedPerformer struct { Height *string `json:"height"` Measurements *string `json:"measurements"` FakeTits *string `json:"fake_tits"` + PenisLength *string `json:"penis_length"` + Circumcised *string `json:"circumcised"` CareerLength *string `json:"career_length"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index 989415293..fed4fafa3 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -37,8 +37,6 @@ type StudioPartial struct { IgnoreAutoTag *bool `db:"ignore_auto_tag" json:"ignore_auto_tag"` } -var DefaultStudioImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC" - func NewStudio(name string) *Studio { currentTime := time.Now() return &Studio{ diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index b12574155..f57bf199e 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -58,132 +58,3 @@ func (t *TagPaths) Append(o interface{}) { func (t *TagPaths) New() interface{} { return &TagPath{} } - -// Original Tag image from: https://fontawesome.com/icons/tag?style=solid -// Modified to change color and rotate -// Licensed under CC Attribution 4.0: https://fontawesome.com/license -var DefaultTagImage = []byte(` - - - - - - image/svg+xml - - - - - - - - -`) - -// var DefaultTagImage = []byte(` -// -// -// -// -// -// image/svg+xml -// -// -// -// -// -// -// -// -// `) diff --git a/pkg/models/movie.go b/pkg/models/movie.go index aac1aa759..f4d5bce1e 100644 --- a/pkg/models/movie.go +++ b/pkg/models/movie.go @@ -38,6 +38,7 @@ type MovieReader interface { Count(ctx context.Context) (int, error) Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) GetFrontImage(ctx context.Context, movieID int) ([]byte, error) + HasFrontImage(ctx context.Context, movieID int) (bool, error) GetBackImage(ctx context.Context, movieID int) ([]byte, error) HasBackImage(ctx context.Context, movieID int) (bool, error) FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) diff --git a/pkg/models/paths/paths_generated.go b/pkg/models/paths/paths_generated.go index aa65ea918..d87e1eed6 100644 --- a/pkg/models/paths/paths_generated.go +++ b/pkg/models/paths/paths_generated.go @@ -78,3 +78,8 @@ func (gp *generatedPaths) GetThumbnailPath(checksum string, width int) string { fname := fmt.Sprintf("%s_%d.jpg", checksum, width) return filepath.Join(gp.Thumbnails, fsutil.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), fname) } + +func (gp *generatedPaths) GetClipPreviewPath(checksum string, width int) string { + fname := fmt.Sprintf("%s_%d.webm", checksum, width) + return filepath.Join(gp.Thumbnails, fsutil.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), fname) +} diff --git a/pkg/models/performer.go b/pkg/models/performer.go index de0f278e1..23b70b0da 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -61,6 +61,52 @@ type GenderCriterionInput struct { Modifier CriterionModifier `json:"modifier"` } +type CircumisedEnum string + +const ( + CircumisedEnumCut CircumisedEnum = "CUT" + CircumisedEnumUncut CircumisedEnum = "UNCUT" +) + +var AllCircumcisionEnum = []CircumisedEnum{ + CircumisedEnumCut, + CircumisedEnumUncut, +} + +func (e CircumisedEnum) IsValid() bool { + switch e { + case CircumisedEnumCut, CircumisedEnumUncut: + return true + } + return false +} + +func (e CircumisedEnum) String() string { + return string(e) +} + +func (e *CircumisedEnum) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = CircumisedEnum(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid CircumisedEnum", str) + } + return nil +} + +func (e CircumisedEnum) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type CircumcisionCriterionInput struct { + Value []CircumisedEnum `json:"value"` + Modifier CriterionModifier `json:"modifier"` +} + type PerformerFilterType struct { And *PerformerFilterType `json:"AND"` Or *PerformerFilterType `json:"OR"` @@ -88,6 +134,10 @@ type PerformerFilterType struct { Measurements *StringCriterionInput `json:"measurements"` // Filter by fake tits value FakeTits *StringCriterionInput `json:"fake_tits"` + // Filter by penis length value + PenisLength *FloatCriterionInput `json:"penis_length"` + // Filter by circumcision + Circumcised *CircumcisionCriterionInput `json:"circumcised"` // Filter by career length CareerLength *StringCriterionInput `json:"career_length"` // Filter by tattoos @@ -110,6 +160,8 @@ type PerformerFilterType struct { ImageCount *IntCriterionInput `json:"image_count"` // Filter by gallery count GalleryCount *IntCriterionInput `json:"gallery_count"` + // Filter by O count + OCounter *IntCriterionInput `json:"o_counter"` // Filter by StashID StashID *StringCriterionInput `json:"stash_id"` // Filter by StashID Endpoint @@ -128,6 +180,8 @@ type PerformerFilterType struct { DeathYear *IntCriterionInput `json:"death_year"` // Filter by studios where performer appears in scene/image/gallery Studios *HierarchicalMultiCriterionInput `json:"studios"` + // Filter by performers where performer appears with another performer in scene/image/gallery + Performers *MultiCriterionInput `json:"performers"` // Filter by autotag ignore value IgnoreAutoTag *bool `json:"ignore_auto_tag"` // Filter by birthdate @@ -163,6 +217,7 @@ type PerformerReader interface { QueryCount(ctx context.Context, galleryFilter *PerformerFilterType, findFilter *FindFilterType) (int, error) AliasLoader GetImage(ctx context.Context, performerID int) ([]byte, error) + HasImage(ctx context.Context, performerID int) (bool, error) StashIDLoader TagIDLoader } diff --git a/pkg/models/relationships.go b/pkg/models/relationships.go index b3afcad9e..3975bffc3 100644 --- a/pkg/models/relationships.go +++ b/pkg/models/relationships.go @@ -34,10 +34,6 @@ type VideoFileLoader interface { GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) } -type ImageFileLoader interface { - GetFiles(ctx context.Context, relatedID int) ([]*file.ImageFile, error) -} - type FileLoader interface { GetFiles(ctx context.Context, relatedID int) ([]file.File, error) } @@ -320,89 +316,6 @@ func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) erro return nil } -type RelatedImageFiles struct { - primaryFile *file.ImageFile - files []*file.ImageFile - primaryLoaded bool -} - -func NewRelatedImageFiles(files []*file.ImageFile) RelatedImageFiles { - ret := RelatedImageFiles{ - files: files, - primaryLoaded: true, - } - - if len(files) > 0 { - ret.primaryFile = files[0] - } - - return ret -} - -// Loaded returns true if the relationship has been loaded. -func (r RelatedImageFiles) Loaded() bool { - return r.files != nil -} - -// Loaded returns true if the primary file relationship has been loaded. -func (r RelatedImageFiles) PrimaryLoaded() bool { - return r.primaryLoaded -} - -// List returns the related files. Panics if the relationship has not been loaded. -func (r RelatedImageFiles) List() []*file.ImageFile { - if !r.Loaded() { - panic("relationship has not been loaded") - } - - return r.files -} - -// Primary returns the primary file. Panics if the relationship has not been loaded. -func (r RelatedImageFiles) Primary() *file.ImageFile { - if !r.PrimaryLoaded() { - panic("relationship has not been loaded") - } - - return r.primaryFile -} - -func (r *RelatedImageFiles) load(fn func() ([]*file.ImageFile, error)) error { - if r.Loaded() { - return nil - } - - var err error - r.files, err = fn() - if err != nil { - return err - } - - if len(r.files) > 0 { - r.primaryFile = r.files[0] - } - - r.primaryLoaded = true - - return nil -} - -func (r *RelatedImageFiles) loadPrimary(fn func() (*file.ImageFile, error)) error { - if r.PrimaryLoaded() { - return nil - } - - var err error - r.primaryFile, err = fn() - if err != nil { - return err - } - - r.primaryLoaded = true - - return nil -} - type RelatedFiles struct { primaryFile file.File files []file.File diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 55a27606a..90655ff5e 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -27,6 +27,8 @@ type SceneFilterType struct { Checksum *StringCriterionInput `json:"checksum"` // Filter by file phash Phash *StringCriterionInput `json:"phash"` + // Filter by phash distance + PhashDistance *PhashDistanceCriterionInput `json:"phash_distance"` // Filter by path Path *StringCriterionInput `json:"path"` // Filter by file count @@ -151,7 +153,7 @@ type SceneReader interface { FindByPath(ctx context.Context, path string) ([]*Scene, error) FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) - FindDuplicates(ctx context.Context, distance int) ([][]*Scene, error) + FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) GalleryIDLoader PerformerIDLoader @@ -161,6 +163,7 @@ type SceneReader interface { VideoFileLoader CountByPerformerID(ctx context.Context, performerID int) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) // FindByStudioID(studioID int) ([]*Scene, error) FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) CountByMovieID(ctx context.Context, movieID int) (int, error) diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 601dfcc16..2bbdeca39 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -63,6 +63,7 @@ type TagReader interface { QueryForAutoTag(ctx context.Context, words []string) ([]*Tag, error) Query(ctx context.Context, tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) GetImage(ctx context.Context, tagID int) ([]byte, error) + HasImage(ctx context.Context, tagID int) (bool, error) GetAliases(ctx context.Context, tagID int) ([]string, error) FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) diff --git a/pkg/models/update.go b/pkg/models/update.go index fbfab3d30..ffa793bda 100644 --- a/pkg/models/update.go +++ b/pkg/models/update.go @@ -64,6 +64,48 @@ func (u *UpdateIDs) IDStrings() []string { return intslice.IntSliceToStringSlice(u.IDs) } +// GetImpactedIDs returns the IDs that will be impacted by the update. +// If the update is to add IDs, then the impacted IDs are the IDs being added. +// If the update is to remove IDs, then the impacted IDs are the IDs being removed. +// If the update is to set IDs, then the impacted IDs are the IDs being removed and the IDs being added. +// Any IDs that are already present and are being added are not returned. +// Likewise, any IDs that are not present that are being removed are not returned. +func (u *UpdateIDs) ImpactedIDs(existing []int) []int { + if u == nil { + return nil + } + + switch u.Mode { + case RelationshipUpdateModeAdd: + return intslice.IntExclude(u.IDs, existing) + case RelationshipUpdateModeRemove: + return intslice.IntIntercect(existing, u.IDs) + case RelationshipUpdateModeSet: + // get the difference between the two lists + return intslice.IntNotIntersect(existing, u.IDs) + } + + return nil +} + +// GetEffectiveIDs returns the new IDs that will be effective after the update. +func (u *UpdateIDs) EffectiveIDs(existing []int) []int { + if u == nil { + return nil + } + + switch u.Mode { + case RelationshipUpdateModeAdd: + return intslice.IntAppendUniques(existing, u.IDs) + case RelationshipUpdateModeRemove: + return intslice.IntExclude(existing, u.IDs) + case RelationshipUpdateModeSet: + return u.IDs + } + + return nil +} + type UpdateStrings struct { Values []string `json:"values"` Mode RelationshipUpdateMode `json:"mode"` diff --git a/pkg/models/update_test.go b/pkg/models/update_test.go new file mode 100644 index 000000000..0baf7926f --- /dev/null +++ b/pkg/models/update_test.go @@ -0,0 +1,92 @@ +package models + +import ( + "reflect" + "testing" +) + +func TestUpdateIDs_ImpactedIDs(t *testing.T) { + tests := []struct { + name string + IDs []int + Mode RelationshipUpdateMode + existing []int + want []int + }{ + { + name: "add", + IDs: []int{1, 2, 3}, + Mode: RelationshipUpdateModeAdd, + existing: []int{1, 2}, + want: []int{3}, + }, + { + name: "remove", + IDs: []int{1, 2, 3}, + Mode: RelationshipUpdateModeRemove, + existing: []int{1, 2}, + want: []int{1, 2}, + }, + { + name: "set", + IDs: []int{1, 2, 3}, + Mode: RelationshipUpdateModeSet, + existing: []int{1, 2}, + want: []int{3}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + u := &UpdateIDs{ + IDs: tt.IDs, + Mode: tt.Mode, + } + if got := u.ImpactedIDs(tt.existing); !reflect.DeepEqual(got, tt.want) { + t.Errorf("UpdateIDs.ImpactedIDs() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestUpdateIDs_EffectiveIDs(t *testing.T) { + tests := []struct { + name string + IDs []int + Mode RelationshipUpdateMode + existing []int + want []int + }{ + { + name: "add", + IDs: []int{2, 3}, + Mode: RelationshipUpdateModeAdd, + existing: []int{1, 2}, + want: []int{1, 2, 3}, + }, + { + name: "remove", + IDs: []int{2, 3}, + Mode: RelationshipUpdateModeRemove, + existing: []int{1, 2}, + want: []int{1}, + }, + { + name: "set", + IDs: []int{1, 2, 3}, + Mode: RelationshipUpdateModeSet, + existing: []int{1, 2}, + want: []int{1, 2, 3}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + u := &UpdateIDs{ + IDs: tt.IDs, + Mode: tt.Mode, + } + if got := u.EffectiveIDs(tt.existing); !reflect.DeepEqual(got, tt.want) { + t.Errorf("UpdateIDs.EffectiveIDs() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/performer/export.go b/pkg/performer/export.go index 4b46fd901..9aec8b34e 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -23,7 +23,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON := jsonschema.Performer{ Name: performer.Name, Disambiguation: performer.Disambiguation, - Gender: performer.Gender.String(), URL: performer.URL, Ethnicity: performer.Ethnicity, Country: performer.Country, @@ -43,6 +42,14 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode UpdatedAt: json.JSONTime{Time: performer.UpdatedAt}, } + if performer.Gender != nil { + newPerformerJSON.Gender = performer.Gender.String() + } + + if performer.Circumcised != nil { + newPerformerJSON.Circumcised = performer.Circumcised.String() + } + if performer.Birthdate != nil { newPerformerJSON.Birthdate = performer.Birthdate.String() } @@ -61,6 +68,10 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON.Weight = *performer.Weight } + if performer.PenisLength != nil { + newPerformerJSON.PenisLength = *performer.PenisLength + } + if err := performer.LoadAliases(ctx, reader); err != nil { return nil, fmt.Errorf("loading performer aliases: %w", err) } diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index f65693e3f..c5965404a 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -29,7 +29,6 @@ const ( ethnicity = "ethnicity" eyeColor = "eyeColor" fakeTits = "fakeTits" - gender = "gender" instagram = "instagram" measurements = "measurements" piercings = "piercings" @@ -42,10 +41,15 @@ const ( ) var ( - aliases = []string{"alias1", "alias2"} - rating = 5 - height = 123 - weight = 60 + genderEnum = models.GenderEnumFemale + gender = genderEnum.String() + aliases = []string{"alias1", "alias2"} + rating = 5 + height = 123 + weight = 60 + penisLength = 1.23 + circumcisedEnum = models.CircumisedEnumCut + circumcised = circumcisedEnum.String() ) var imageBytes = []byte("imageBytes") @@ -81,8 +85,10 @@ func createFullPerformer(id int, name string) *models.Performer { Ethnicity: ethnicity, EyeColor: eyeColor, FakeTits: fakeTits, + PenisLength: &penisLength, + Circumcised: &circumcisedEnum, Favorite: true, - Gender: gender, + Gender: &genderEnum, Height: &height, Instagram: instagram, Measurements: measurements, @@ -125,6 +131,8 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer { Ethnicity: ethnicity, EyeColor: eyeColor, FakeTits: fakeTits, + PenisLength: penisLength, + Circumcised: circumcised, Favorite: true, Gender: gender, Height: strconv.Itoa(height), diff --git a/pkg/performer/import.go b/pkg/performer/import.go index beebab35d..4ca27ce55 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -189,7 +189,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform newPerformer := models.Performer{ Name: performerJSON.Name, Disambiguation: performerJSON.Disambiguation, - Gender: models.GenderEnum(performerJSON.Gender), URL: performerJSON.URL, Ethnicity: performerJSON.Ethnicity, Country: performerJSON.Country, @@ -213,6 +212,16 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform StashIDs: models.NewRelatedStashIDs(performerJSON.StashIDs), } + if performerJSON.Gender != "" { + v := models.GenderEnum(performerJSON.Gender) + newPerformer.Gender = &v + } + + if performerJSON.Circumcised != "" { + v := models.CircumisedEnum(performerJSON.Circumcised) + newPerformer.Circumcised = &v + } + if performerJSON.Birthdate != "" { d, err := utils.ParseDateStringAsTime(performerJSON.Birthdate) if err == nil { @@ -237,6 +246,10 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform newPerformer.Weight = &performerJSON.Weight } + if performerJSON.PenisLength != 0 { + newPerformer.PenisLength = &performerJSON.PenisLength + } + if performerJSON.Height != "" { h, err := strconv.Atoi(performerJSON.Height) if err == nil { diff --git a/pkg/performer/query.go b/pkg/performer/query.go index d790c6d52..a3045ef67 100644 --- a/pkg/performer/query.go +++ b/pkg/performer/query.go @@ -25,3 +25,14 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int) (int, error) { return r.QueryCount(ctx, filter, nil) } + +func CountByAppearsWith(ctx context.Context, r CountQueryer, id int) (int, error) { + filter := &models.PerformerFilterType{ + Performers: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + }, + } + + return r.QueryCount(ctx, filter, nil) +} diff --git a/pkg/scraper/autotag.go b/pkg/scraper/autotag.go index 53aedc749..786cd024d 100644 --- a/pkg/scraper/autotag.go +++ b/pkg/scraper/autotag.go @@ -41,7 +41,7 @@ func autotagMatchPerformers(ctx context.Context, path string, performerReader ma Name: &pp.Name, StoredID: &id, } - if pp.Gender.IsValid() { + if pp.Gender != nil && pp.Gender.IsValid() { v := pp.Gender.String() sp.Gender = &v } diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index 3b5391994..5a15239db 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -150,7 +150,6 @@ func (c *Cache) loadScrapers() (map[string]scraper, error) { logger.Debugf("Reading scraper configs from %s", path) - scraperFiles := []string{} err := fsutil.SymWalk(path, func(fp string, f os.FileInfo, err error) error { if filepath.Ext(fp) == ".yml" { conf, err := loadConfigFromYAMLFile(fp) @@ -160,7 +159,6 @@ func (c *Cache) loadScrapers() (map[string]scraper, error) { scraper := newGroupScraper(*conf, c.globalConfig) scrapers[scraper.spec().ID] = scraper } - scraperFiles = append(scraperFiles, fp) } return nil }) @@ -187,7 +185,7 @@ func (c *Cache) ReloadScrapers() error { } // ListScrapers lists scrapers matching one of the given types. -// Returns a list of scrapers, sorted by their ID. +// Returns a list of scrapers, sorted by their name. func (c Cache) ListScrapers(tys []ScrapeContentType) []*Scraper { var ret []*Scraper for _, s := range c.scrapers { @@ -201,7 +199,7 @@ func (c Cache) ListScrapers(tys []ScrapeContentType) []*Scraper { } sort.Slice(ret, func(i, j int) bool { - return ret[i].ID < ret[j].ID + return strings.ToLower(ret[i].Name) < strings.ToLower(ret[j].Name) }) return ret diff --git a/pkg/scraper/performer.go b/pkg/scraper/performer.go index 48f6ce318..269368823 100644 --- a/pkg/scraper/performer.go +++ b/pkg/scraper/performer.go @@ -16,6 +16,8 @@ type ScrapedPerformerInput struct { Height *string `json:"height"` Measurements *string `json:"measurements"` FakeTits *string `json:"fake_tits"` + PenisLength *string `json:"penis_length"` + Circumcised *string `json:"circumcised"` CareerLength *string `json:"career_length"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index 9267bad0c..652a9de0a 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -69,6 +69,8 @@ type scrapedPerformerStash struct { Height *string `graphql:"height" json:"height"` Measurements *string `graphql:"measurements" json:"measurements"` FakeTits *string `graphql:"fake_tits" json:"fake_tits"` + PenisLength *string `graphql:"penis_length" json:"penis_length"` + Circumcised *string `graphql:"circumcised" json:"circumcised"` CareerLength *string `graphql:"career_length" json:"career_length"` Tattoos *string `graphql:"tattoos" json:"tattoos"` Piercings *string `graphql:"piercings" json:"piercings"` diff --git a/pkg/scraper/stashbox/graphql/generated_models.go b/pkg/scraper/stashbox/graphql/generated_models.go index 6b3e09565..0dfb4bf57 100644 --- a/pkg/scraper/stashbox/graphql/generated_models.go +++ b/pkg/scraper/stashbox/graphql/generated_models.go @@ -332,6 +332,7 @@ type Performer struct { Deleted bool `json:"deleted"` Edits []*Edit `json:"edits,omitempty"` SceneCount int `json:"scene_count"` + OCounter int `json:"o_counter"` MergedIds []string `json:"merged_ids,omitempty"` Studios []*PerformerStudio `json:"studios,omitempty"` IsFavorite bool `json:"is_favorite"` @@ -1771,6 +1772,7 @@ const ( PerformerSortEnumName PerformerSortEnum = "NAME" PerformerSortEnumBirthdate PerformerSortEnum = "BIRTHDATE" PerformerSortEnumSceneCount PerformerSortEnum = "SCENE_COUNT" + PerformerSortEnumOCounter PerformerSortEnum = "O_COUNTER" PerformerSortEnumCareerStartYear PerformerSortEnum = "CAREER_START_YEAR" PerformerSortEnumDebut PerformerSortEnum = "DEBUT" PerformerSortEnumCreatedAt PerformerSortEnum = "CREATED_AT" @@ -1781,6 +1783,7 @@ var AllPerformerSortEnum = []PerformerSortEnum{ PerformerSortEnumName, PerformerSortEnumBirthdate, PerformerSortEnumSceneCount, + PerformerSortEnumOCounter, PerformerSortEnumCareerStartYear, PerformerSortEnumDebut, PerformerSortEnumCreatedAt, @@ -1789,7 +1792,7 @@ var AllPerformerSortEnum = []PerformerSortEnum{ func (e PerformerSortEnum) IsValid() bool { switch e { - case PerformerSortEnumName, PerformerSortEnumBirthdate, PerformerSortEnumSceneCount, PerformerSortEnumCareerStartYear, PerformerSortEnumDebut, PerformerSortEnumCreatedAt, PerformerSortEnumUpdatedAt: + case PerformerSortEnumName, PerformerSortEnumBirthdate, PerformerSortEnumSceneCount, PerformerSortEnumOCounter, PerformerSortEnumCareerStartYear, PerformerSortEnumDebut, PerformerSortEnumCreatedAt, PerformerSortEnumUpdatedAt: return true } return false diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index 9d8e65d0c..1a83c1ab6 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -10,6 +10,7 @@ import ( "io" "mime/multipart" "net/http" + "regexp" "strconv" "strings" @@ -663,7 +664,7 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode func getFirstImage(ctx context.Context, client *http.Client, images []*graphql.ImageFragment) *string { ret, err := fetchImage(ctx, client, images[0].URL) - if err != nil { + if err != nil && !errors.Is(err, context.Canceled) { logger.Warnf("Error fetching image %s: %s", images[0].URL, err.Error()) } @@ -1008,7 +1009,7 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf if performer.FakeTits != "" { draft.BreastType = &performer.FakeTits } - if performer.Gender.IsValid() { + if performer.Gender != nil && performer.Gender.IsValid() { v := performer.Gender.String() draft.Gender = &v } @@ -1046,10 +1047,20 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf var urls []string if len(strings.TrimSpace(performer.Twitter)) > 0 { - urls = append(urls, "https://twitter.com/"+strings.TrimSpace(performer.Twitter)) + reg := regexp.MustCompile(`https?:\/\/(?:www\.)?twitter\.com`) + if reg.MatchString(performer.Twitter) { + urls = append(urls, strings.TrimSpace(performer.Twitter)) + } else { + urls = append(urls, "https://twitter.com/"+strings.TrimSpace(performer.Twitter)) + } } if len(strings.TrimSpace(performer.Instagram)) > 0 { - urls = append(urls, "https://instagram.com/"+strings.TrimSpace(performer.Instagram)) + reg := regexp.MustCompile(`https?:\/\/(?:www\.)?instagram\.com`) + if reg.MatchString(performer.Instagram) { + urls = append(urls, strings.TrimSpace(performer.Instagram)) + } else { + urls = append(urls, "https://instagram.com/"+strings.TrimSpace(performer.Instagram)) + } } if len(strings.TrimSpace(performer.URL)) > 0 { urls = append(urls, strings.TrimSpace(performer.URL)) diff --git a/pkg/session/session.go b/pkg/session/session.go index 76a0c0520..9fcb87549 100644 --- a/pkg/session/session.go +++ b/pkg/session/session.go @@ -34,7 +34,15 @@ const ( passwordFormKey = "password" ) -var ErrInvalidCredentials = errors.New("invalid username or password") +type InvalidCredentialsError struct { + Username string +} + +func (e InvalidCredentialsError) Error() string { + // don't leak the username + return "invalid credentials" +} + var ErrUnauthorized = errors.New("unauthorized") type Store struct { @@ -63,9 +71,12 @@ func (s *Store) Login(w http.ResponseWriter, r *http.Request) error { // authenticate the user if !s.config.ValidateCredentials(username, password) { - return ErrInvalidCredentials + return &InvalidCredentialsError{Username: username} } + // since we only have one user, don't leak the name + logger.Info("User logged in") + newSession.Values[userIDKey] = username err := newSession.Save(r, w) @@ -90,6 +101,9 @@ func (s *Store) Logout(w http.ResponseWriter, r *http.Request) error { return err } + // since we only have one user, don't leak the name + logger.Infof("User logged out") + return nil } diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go index d8e8b5e0d..c18b323ee 100644 --- a/pkg/sqlite/database.go +++ b/pkg/sqlite/database.go @@ -32,7 +32,7 @@ const ( dbConnTimeout = 30 ) -var appSchemaVersion uint = 45 +var appSchemaVersion uint = 46 //go:embed migrations/*.sql var migrationsBox embed.FS diff --git a/pkg/sqlite/driver.go b/pkg/sqlite/driver.go index 5712c77c7..d70676813 100644 --- a/pkg/sqlite/driver.go +++ b/pkg/sqlite/driver.go @@ -5,7 +5,7 @@ import ( "database/sql/driver" "fmt" - "github.com/fvbommel/sortorder" + "github.com/fvbommel/sortorder/casefolded" sqlite3 "github.com/mattn/go-sqlite3" ) @@ -29,6 +29,7 @@ func (d *CustomSQLiteDriver) Open(dsn string) (driver.Conn, error) { "regexp": regexFn, "durationToTinyInt": durationToTinyIntFn, "basename": basenameFn, + "phash_distance": phashDistanceFn, } for name, fn := range funcs { @@ -37,9 +38,9 @@ func (d *CustomSQLiteDriver) Open(dsn string) (driver.Conn, error) { } } - // COLLATE NATURAL_CS - Case sensitive natural sort - err := conn.RegisterCollation("NATURAL_CS", func(s string, s2 string) int { - if sortorder.NaturalLess(s, s2) { + // COLLATE NATURAL_CI - Case insensitive natural sort + err := conn.RegisterCollation("NATURAL_CI", func(s string, s2 string) int { + if casefolded.NaturalLess(s, s2) { return -1 } else { return 1 diff --git a/pkg/sqlite/filter.go b/pkg/sqlite/filter.go index d75012b4e..5934b2c99 100644 --- a/pkg/sqlite/filter.go +++ b/pkg/sqlite/filter.go @@ -9,7 +9,6 @@ import ( "strconv" "strings" - "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/models" @@ -426,6 +425,29 @@ func stringCriterionHandler(c *models.StringCriterionInput, column string) crite } } +func enumCriterionHandler(modifier models.CriterionModifier, values []string, column string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if modifier.IsValid() { + switch modifier { + case models.CriterionModifierIncludes, models.CriterionModifierEquals: + if len(values) > 0 { + f.whereClauses = append(f.whereClauses, getEnumSearchClause(column, values, false)) + } + case models.CriterionModifierExcludes, models.CriterionModifierNotEquals: + if len(values) > 0 { + f.whereClauses = append(f.whereClauses, getEnumSearchClause(column, values, true)) + } + case models.CriterionModifierIsNull: + f.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')") + case models.CriterionModifierNotNull: + f.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')") + default: + panic("unsupported string filter modifier") + } + } + } +} + func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, basenameColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if c != nil { @@ -525,6 +547,15 @@ func intCriterionHandler(c *models.IntCriterionInput, column string, addJoinFn f } } +func floatCriterionHandler(c *models.FloatCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + clause, args := getFloatCriterionWhereClause(column, *c) + f.addWhere(clause, args...) + } + } +} + func boolCriterionHandler(c *bool, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if c != nil { @@ -597,9 +628,12 @@ type joinedMultiCriterionHandlerBuilder struct { addJoinTable func(f *filterBuilder) } -func (m *joinedMultiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc { +func (m *joinedMultiCriterionHandlerBuilder) handler(c *models.MultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { + if c != nil { + // make local copy so we can modify it + criterion := *c + joinAlias := m.joinAs if joinAlias == "" { joinAlias = m.joinTable @@ -621,37 +655,70 @@ func (m *joinedMultiCriterionHandlerBuilder) handler(criterion *models.MultiCrit return } - if len(criterion.Value) == 0 { + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { return } - var args []interface{} - for _, tagID := range criterion.Value { - args = append(args, tagID) + // combine excludes if excludes modifier is selected + if criterion.Modifier == models.CriterionModifierExcludes { + criterion.Modifier = models.CriterionModifierIncludesAll + criterion.Excludes = append(criterion.Excludes, criterion.Value...) + criterion.Value = nil } - whereClause := "" - havingClause := "" + if len(criterion.Value) > 0 { + whereClause := "" + havingClause := "" + + var args []interface{} + for _, tagID := range criterion.Value { + args = append(args, tagID) + } + + switch criterion.Modifier { + case models.CriterionModifierIncludes: + // includes any of the provided ids + m.addJoinTable(f) + whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) + case models.CriterionModifierEquals: + // includes only the provided ids + m.addJoinTable(f) + whereClause = utils.StrFormat("{joinAlias}.{foreignFK} IN {inBinding} AND (SELECT COUNT(*) FROM {joinTable} s WHERE s.{primaryFK} = {primaryTable}.id) = ?", utils.StrFormatMap{ + "joinAlias": joinAlias, + "foreignFK": m.foreignFK, + "inBinding": getInBinding(len(criterion.Value)), + "joinTable": m.joinTable, + "primaryFK": m.primaryFK, + "primaryTable": m.primaryTable, + }) + havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) + args = append(args, len(criterion.Value)) + case models.CriterionModifierNotEquals: + f.setError(fmt.Errorf("not equals modifier is not supported for multi criterion input")) + case models.CriterionModifierIncludesAll: + // includes all of the provided ids + m.addJoinTable(f) + whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) + havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) + } + + f.addWhere(whereClause, args...) + f.addHaving(havingClause) + } + + if len(criterion.Excludes) > 0 { + var args []interface{} + for _, tagID := range criterion.Excludes { + args = append(args, tagID) + } - switch criterion.Modifier { - case models.CriterionModifierIncludes: - // includes any of the provided ids - m.addJoinTable(f) - whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) - case models.CriterionModifierIncludesAll: - // includes all of the provided ids - m.addJoinTable(f) - whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) - havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) - case models.CriterionModifierExcludes: // excludes all of the provided ids // need to use actual join table name for this // .id NOT IN (select . from where . in ) - whereClause = fmt.Sprintf("%[1]s.id NOT IN (SELECT %[3]s.%[2]s from %[3]s where %[3]s.%[4]s in %[5]s)", m.primaryTable, m.primaryFK, m.joinTable, m.foreignFK, getInBinding(len(criterion.Value))) - } + whereClause := fmt.Sprintf("%[1]s.id NOT IN (SELECT %[3]s.%[2]s from %[3]s where %[3]s.%[4]s in %[5]s)", m.primaryTable, m.primaryFK, m.joinTable, m.foreignFK, getInBinding(len(criterion.Excludes))) - f.addWhere(whereClause, args...) - f.addHaving(havingClause) + f.addWhere(whereClause, args...) + } } } } @@ -722,6 +789,28 @@ func (m *countCriterionHandlerBuilder) handler(criterion *models.IntCriterionInp } } +type joinedMultiSumCriterionHandlerBuilder struct { + primaryTable string + foreignTable1 string + joinTable1 string + foreignTable2 string + joinTable2 string + primaryFK string + foreignFK1 string + foreignFK2 string + sum string +} + +func (m *joinedMultiSumCriterionHandlerBuilder) handler(criterion *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if criterion != nil { + clause, args := getJoinedMultiSumCriterionClause(m.primaryTable, m.foreignTable1, m.joinTable1, m.foreignTable2, m.joinTable2, m.primaryFK, m.foreignFK1, m.foreignFK2, m.sum, *criterion) + + f.addWhere(clause, args...) + } + } +} + // handler for StringCriterion for string list fields type stringListCriterionHandlerBuilder struct { // table joining primary and foreign objects @@ -742,6 +831,33 @@ func (m *stringListCriterionHandlerBuilder) handler(criterion *models.StringCrit } } +func studioCriterionHandler(primaryTable string, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if studios == nil { + return + } + + studiosCopy := *studios + switch studiosCopy.Modifier { + case models.CriterionModifierEquals: + studiosCopy.Modifier = models.CriterionModifierIncludesAll + case models.CriterionModifierNotEquals: + studiosCopy.Modifier = models.CriterionModifierExcludes + } + + hh := hierarchicalMultiCriterionHandlerBuilder{ + tx: dbWrapper{}, + + primaryTable: primaryTable, + foreignTable: studioTable, + foreignFK: studioIDColumn, + parentFK: "parent_id", + } + + hh.handler(&studiosCopy)(ctx, f) + } +} + type hierarchicalMultiCriterionHandlerBuilder struct { tx dbWrapper @@ -750,12 +866,20 @@ type hierarchicalMultiCriterionHandlerBuilder struct { foreignFK string parentFK string + childFK string relationsTable string } -func getHierarchicalValues(ctx context.Context, tx dbWrapper, values []string, table, relationsTable, parentFK string, depth *int) string { +func getHierarchicalValues(ctx context.Context, tx dbWrapper, values []string, table, relationsTable, parentFK string, childFK string, depth *int) (string, error) { var args []interface{} + if parentFK == "" { + parentFK = "parent_id" + } + if childFK == "" { + childFK = "child_id" + } + depthVal := 0 if depth != nil { depthVal = *depth @@ -777,7 +901,7 @@ func getHierarchicalValues(ctx context.Context, tx dbWrapper, values []string, t } if valid { - return "VALUES" + strings.Join(valuesClauses, ",") + return "VALUES" + strings.Join(valuesClauses, ","), nil } } @@ -797,13 +921,14 @@ func getHierarchicalValues(ctx context.Context, tx dbWrapper, values []string, t "inBinding": getInBinding(inCount), "recursiveSelect": "", "parentFK": parentFK, + "childFK": childFK, "depthCondition": depthCondition, "unionClause": "", } if relationsTable != "" { - withClauseMap["recursiveSelect"] = utils.StrFormat(`SELECT p.root_id, c.child_id, depth + 1 FROM {relationsTable} AS c -INNER JOIN items as p ON c.parent_id = p.item_id + withClauseMap["recursiveSelect"] = utils.StrFormat(`SELECT p.root_id, c.{childFK}, depth + 1 FROM {relationsTable} AS c +INNER JOIN items as p ON c.{parentFK} = p.item_id `, withClauseMap) } else { withClauseMap["recursiveSelect"] = utils.StrFormat(`SELECT p.root_id, c.id, depth + 1 FROM {table} as c @@ -828,15 +953,13 @@ WHERE id in {inBinding} var valuesClause string err := tx.Get(ctx, &valuesClause, query, args...) if err != nil { - logger.Error(err) - // return record which never matches so we don't have to handle error here - return "VALUES(NULL, NULL)" + return "", fmt.Errorf("failed to get hierarchical values: %w", err) } - return valuesClause + return valuesClause, nil } -func addHierarchicalConditionClauses(f *filterBuilder, criterion *models.HierarchicalMultiCriterionInput, table, idColumn string) { +func addHierarchicalConditionClauses(f *filterBuilder, criterion models.HierarchicalMultiCriterionInput, table, idColumn string) { switch criterion.Modifier { case models.CriterionModifierIncludes: f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) @@ -848,9 +971,18 @@ func addHierarchicalConditionClauses(f *filterBuilder, criterion *models.Hierarc } } -func (m *hierarchicalMultiCriterionHandlerBuilder) handler(criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { +func (m *hierarchicalMultiCriterionHandlerBuilder) handler(c *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { + if c != nil { + // make a copy so we don't modify the original + criterion := *c + + // don't support equals/not equals + if criterion.Modifier == models.CriterionModifierEquals || criterion.Modifier == models.CriterionModifierNotEquals { + f.setError(fmt.Errorf("modifier %s is not supported for hierarchical multi criterion", criterion.Modifier)) + return + } + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { var notClause string if criterion.Modifier == models.CriterionModifierNotNull { @@ -865,19 +997,40 @@ func (m *hierarchicalMultiCriterionHandlerBuilder) handler(criterion *models.Hie return } - if len(criterion.Value) == 0 { + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { return } - valuesClause := getHierarchicalValues(ctx, m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, criterion.Depth) + // combine excludes if excludes modifier is selected + if criterion.Modifier == models.CriterionModifierExcludes { + criterion.Modifier = models.CriterionModifierIncludesAll + criterion.Excludes = append(criterion.Excludes, criterion.Value...) + criterion.Value = nil + } + + if len(criterion.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } + + switch criterion.Modifier { + case models.CriterionModifierIncludes: + f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) + case models.CriterionModifierIncludesAll: + f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) + f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", m.primaryTable, m.foreignFK, len(criterion.Value))) + } + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Excludes, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } - switch criterion.Modifier { - case models.CriterionModifierIncludes: - f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) - case models.CriterionModifierIncludesAll: - f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) - f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", m.primaryTable, m.foreignFK, len(criterion.Value))) - case models.CriterionModifierExcludes: f.addWhere(fmt.Sprintf("%s.%s NOT IN (SELECT column2 FROM (%s)) OR %[1]s.%[2]s IS NULL", m.primaryTable, m.foreignFK, valuesClause)) } } @@ -888,10 +1041,12 @@ type joinedHierarchicalMultiCriterionHandlerBuilder struct { tx dbWrapper primaryTable string + primaryKey string foreignTable string foreignFK string parentFK string + childFK string relationsTable string joinAs string @@ -899,10 +1054,45 @@ type joinedHierarchicalMultiCriterionHandlerBuilder struct { primaryFK string } -func (m *joinedHierarchicalMultiCriterionHandlerBuilder) handler(criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { +func (m *joinedHierarchicalMultiCriterionHandlerBuilder) addHierarchicalConditionClauses(f *filterBuilder, criterion models.HierarchicalMultiCriterionInput, table, idColumn string) { + primaryKey := m.primaryKey + if primaryKey == "" { + primaryKey = "id" + } + + switch criterion.Modifier { + case models.CriterionModifierEquals: + // includes only the provided ids + f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) + f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", table, idColumn, len(criterion.Value))) + f.addWhere(utils.StrFormat("(SELECT COUNT(*) FROM {joinTable} s WHERE s.{primaryFK} = {primaryTable}.{primaryKey}) = ?", utils.StrFormatMap{ + "joinTable": m.joinTable, + "primaryFK": m.primaryFK, + "primaryTable": m.primaryTable, + "primaryKey": primaryKey, + }), len(criterion.Value)) + case models.CriterionModifierNotEquals: + f.setError(fmt.Errorf("not equals modifier is not supported for hierarchical multi criterion input")) + default: + addHierarchicalConditionClauses(f, criterion, table, idColumn) + } +} + +func (m *joinedHierarchicalMultiCriterionHandlerBuilder) handler(c *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { + if c != nil { + // make a copy so we don't modify the original + criterion := *c joinAlias := m.joinAs + primaryKey := m.primaryKey + if primaryKey == "" { + primaryKey = "id" + } + + if criterion.Modifier == models.CriterionModifierEquals && criterion.Depth != nil && *criterion.Depth != 0 { + f.setError(fmt.Errorf("depth is not supported for equals modifier in hierarchical multi criterion input")) + return + } if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { var notClause string @@ -910,7 +1100,7 @@ func (m *joinedHierarchicalMultiCriterionHandlerBuilder) handler(criterion *mode notClause = "NOT" } - f.addLeftJoin(m.joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.id", joinAlias, m.primaryFK, m.primaryTable)) + f.addLeftJoin(m.joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.%s", joinAlias, m.primaryFK, m.primaryTable, primaryKey)) f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ "table": joinAlias, @@ -920,25 +1110,144 @@ func (m *joinedHierarchicalMultiCriterionHandlerBuilder) handler(criterion *mode return } - if len(criterion.Value) == 0 { + // combine excludes if excludes modifier is selected + if criterion.Modifier == models.CriterionModifierExcludes { + criterion.Modifier = models.CriterionModifierIncludesAll + criterion.Excludes = append(criterion.Excludes, criterion.Value...) + criterion.Value = nil + } + + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { return } - valuesClause := getHierarchicalValues(ctx, m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, criterion.Depth) + if len(criterion.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } - joinTable := utils.StrFormat(`( - SELECT j.*, d.column1 AS root_id, d.column2 AS item_id FROM {joinTable} AS j - INNER JOIN ({valuesClause}) AS d ON j.{foreignFK} = d.column2 -) -`, utils.StrFormatMap{ - "joinTable": m.joinTable, - "foreignFK": m.foreignFK, - "valuesClause": valuesClause, - }) + joinTable := utils.StrFormat(`( + SELECT j.*, d.column1 AS root_id, d.column2 AS item_id FROM {joinTable} AS j + INNER JOIN ({valuesClause}) AS d ON j.{foreignFK} = d.column2 + ) + `, utils.StrFormatMap{ + "joinTable": m.joinTable, + "foreignFK": m.foreignFK, + "valuesClause": valuesClause, + }) - f.addLeftJoin(joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.id", joinAlias, m.primaryFK, m.primaryTable)) + f.addLeftJoin(joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.%s", joinAlias, m.primaryFK, m.primaryTable, primaryKey)) - addHierarchicalConditionClauses(f, criterion, joinAlias, "root_id") + m.addHierarchicalConditionClauses(f, criterion, joinAlias, "root_id") + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Excludes, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } + + joinTable := utils.StrFormat(`( + SELECT j2.*, e.column1 AS root_id, e.column2 AS item_id FROM {joinTable} AS j2 + INNER JOIN ({valuesClause}) AS e ON j2.{foreignFK} = e.column2 + ) + `, utils.StrFormatMap{ + "joinTable": m.joinTable, + "foreignFK": m.foreignFK, + "valuesClause": valuesClause, + }) + + joinAlias2 := joinAlias + "2" + + f.addLeftJoin(joinTable, joinAlias2, fmt.Sprintf("%s.%s = %s.%s", joinAlias2, m.primaryFK, m.primaryTable, primaryKey)) + + // modify for exclusion + criterionCopy := criterion + criterionCopy.Modifier = models.CriterionModifierExcludes + criterionCopy.Value = c.Excludes + + m.addHierarchicalConditionClauses(f, criterionCopy, joinAlias2, "root_id") + } + } + } +} + +type joinedPerformerTagsHandler struct { + criterion *models.HierarchicalMultiCriterionInput + + primaryTable string // eg scenes + joinTable string // eg performers_scenes + joinPrimaryKey string // eg scene_id +} + +func (h *joinedPerformerTagsHandler) handle(ctx context.Context, f *filterBuilder) { + tags := h.criterion + + if tags != nil { + criterion := tags.CombineExcludes() + + // validate the modifier + switch criterion.Modifier { + case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: + // valid + default: + f.setError(fmt.Errorf("invalid modifier %s for performer tags", criterion.Modifier)) + } + + strFormatMap := utils.StrFormatMap{ + "primaryTable": h.primaryTable, + "joinTable": h.joinTable, + "joinPrimaryKey": h.joinPrimaryKey, + "inBinding": getInBinding(len(criterion.Value)), + } + + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addLeftJoin(h.joinTable, "", utils.StrFormat("{primaryTable}.id = {joinTable}.{joinPrimaryKey}", strFormatMap)) + f.addLeftJoin("performers_tags", "", utils.StrFormat("{joinTable}.performer_id = performers_tags.performer_id", strFormatMap)) + + f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) + return + } + + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { + return + } + + if len(criterion.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, dbWrapper{}, criterion.Value, tagTable, "tags_relations", "", "", criterion.Depth) + if err != nil { + f.setError(err) + return + } + + f.addWith(utils.StrFormat(`performer_tags AS ( +SELECT ps.{joinPrimaryKey} as primaryID, t.column1 AS root_tag_id FROM {joinTable} ps +INNER JOIN performers_tags pt ON pt.performer_id = ps.performer_id +INNER JOIN (`+valuesClause+`) t ON t.column2 = pt.tag_id +)`, strFormatMap)) + + f.addLeftJoin("performer_tags", "", utils.StrFormat("performer_tags.primaryID = {primaryTable}.id", strFormatMap)) + + addHierarchicalConditionClauses(f, criterion, "performer_tags", "root_tag_id") + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, dbWrapper{}, criterion.Excludes, tagTable, "tags_relations", "", "", criterion.Depth) + if err != nil { + f.setError(err) + return + } + + clause := utils.StrFormat("{primaryTable}.id NOT IN (SELECT {joinTable}.{joinPrimaryKey} FROM {joinTable} INNER JOIN performers_tags ON {joinTable}.performer_id = performers_tags.performer_id WHERE performers_tags.tag_id IN (SELECT column2 FROM (%s)))", strFormatMap) + f.addWhere(fmt.Sprintf(clause, valuesClause)) } } } diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index 590586b94..2e857cc34 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -670,7 +670,7 @@ func (qb *GalleryStore) makeFilter(ctx context.Context, galleryFilter *models.Ga query.handleCriterion(ctx, galleryPerformersCriterionHandler(qb, galleryFilter.Performers)) query.handleCriterion(ctx, galleryPerformerCountCriterionHandler(qb, galleryFilter.PerformerCount)) query.handleCriterion(ctx, hasChaptersCriterionHandler(galleryFilter.HasChapters)) - query.handleCriterion(ctx, galleryStudioCriterionHandler(qb, galleryFilter.Studios)) + query.handleCriterion(ctx, studioCriterionHandler(galleryTable, galleryFilter.Studios)) query.handleCriterion(ctx, galleryPerformerTagsCriterionHandler(qb, galleryFilter.PerformerTags)) query.handleCriterion(ctx, galleryAverageResolutionCriterionHandler(qb, galleryFilter.AverageResolution)) query.handleCriterion(ctx, galleryImageCountCriterionHandler(qb, galleryFilter.ImageCount)) @@ -968,51 +968,12 @@ func hasChaptersCriterionHandler(hasChapters *string) criterionHandlerFunc { } } -func galleryStudioCriterionHandler(qb *GalleryStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := hierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: galleryTable, - foreignTable: studioTable, - foreignFK: studioIDColumn, - parentFK: "parent_id", - } - - return h.handler(studios) -} - -func galleryPerformerTagsCriterionHandler(qb *GalleryStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if tags != nil { - if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { - var notClause string - if tags.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin("performers_galleries", "", "galleries.id = performers_galleries.gallery_id") - f.addLeftJoin("performers_tags", "", "performers_galleries.performer_id = performers_tags.performer_id") - - f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) - return - } - - if len(tags.Value) == 0 { - return - } - - valuesClause := getHierarchicalValues(ctx, qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) - - f.addWith(`performer_tags AS ( -SELECT pg.gallery_id, t.column1 AS root_tag_id FROM performers_galleries pg -INNER JOIN performers_tags pt ON pt.performer_id = pg.performer_id -INNER JOIN (` + valuesClause + `) t ON t.column2 = pt.tag_id -)`) - - f.addLeftJoin("performer_tags", "", "performer_tags.gallery_id = galleries.id") - - addHierarchicalConditionClauses(f, tags, "performer_tags", "root_tag_id") - } +func galleryPerformerTagsCriterionHandler(qb *GalleryStore, tags *models.HierarchicalMultiCriterionInput) criterionHandler { + return &joinedPerformerTagsHandler{ + criterion: tags, + primaryTable: galleryTable, + joinTable: performersGalleriesTable, + joinPrimaryKey: galleryIDColumn, } } @@ -1128,7 +1089,7 @@ func (qb *GalleryStore) setGallerySort(query *queryBuilder, findFilter *models.F // special handling for path addFileTable() addFolderTable() - query.sortAndPagination += fmt.Sprintf(" ORDER BY folders.path %s, file_folder.path %[1]s, files.basename %[1]s", direction) + query.sortAndPagination += fmt.Sprintf(" ORDER BY COALESCE(folders.path, '') || COALESCE(file_folder.path, '') || COALESCE(files.basename, '') COLLATE NATURAL_CI %s", direction) case "file_mod_time": sort = "mod_time" addFileTable() @@ -1136,10 +1097,13 @@ func (qb *GalleryStore) setGallerySort(query *queryBuilder, findFilter *models.F case "title": addFileTable() addFolderTable() - query.sortAndPagination += " ORDER BY COALESCE(galleries.title, files.basename, basename(COALESCE(folders.path, ''))) COLLATE NATURAL_CS " + direction + ", file_folder.path " + direction + query.sortAndPagination += " ORDER BY COALESCE(galleries.title, files.basename, basename(COALESCE(folders.path, ''))) COLLATE NATURAL_CI " + direction + ", file_folder.path COLLATE NATURAL_CI " + direction default: query.sortAndPagination += getSort(sort, direction, "galleries") } + + // Whatever the sorting, always use title/id as a final sort + query.sortAndPagination += ", COALESCE(galleries.title, galleries.id) COLLATE NATURAL_CI ASC" } func (qb *GalleryStore) filesRepository() *filesRepository { diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index 6d145cb1b..bad75d035 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -1945,154 +1945,369 @@ func TestGalleryQueryIsMissingDate(t *testing.T) { } func TestGalleryQueryPerformers(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Gallery - performerCriterion := models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdxWithGallery]), - strconv.Itoa(performerIDs[performerIdx1WithGallery]), + tests := []struct { + name string + filter models.MultiCriterionInput + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdxWithGallery]), + strconv.Itoa(performerIDs[performerIdx1WithGallery]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - galleryFilter := models.GalleryFilterType{ - Performers: &performerCriterion, - } - - galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil) - - assert.Len(t, galleries, 2) - - // ensure ids are correct - for _, gallery := range galleries { - assert.True(t, gallery.ID == galleryIDs[galleryIdxWithPerformer] || gallery.ID == galleryIDs[galleryIdxWithTwoPerformers]) - } - - performerCriterion = models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdx1WithGallery]), - strconv.Itoa(performerIDs[performerIdx2WithGallery]), + []int{ + galleryIdxWithPerformer, + galleryIdxWithTwoPerformers, }, - Modifier: models.CriterionModifierIncludesAll, - } - - galleries = queryGallery(ctx, t, sqb, &galleryFilter, nil) - - assert.Len(t, galleries, 1) - assert.Equal(t, galleryIDs[galleryIdxWithTwoPerformers], galleries[0].ID) - - performerCriterion = models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdx1WithGallery]), + []int{ + galleryIdxWithImage, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes all", + models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdx1WithGallery]), + strconv.Itoa(performerIDs[performerIdx2WithGallery]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + []int{ + galleryIdxWithTwoPerformers, + }, + []int{ + galleryIdxWithPerformer, + }, + false, + }, + { + "excludes", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[performerIdx1WithGallery])}, + }, + nil, + []int{galleryIdxWithTwoPerformers}, + false, + }, + { + "is null", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + []int{galleryIdxWithTag}, + []int{ + galleryIdxWithPerformer, + galleryIdxWithTwoPerformers, + galleryIdxWithPerformerTwoTags, + }, + false, + }, + { + "not null", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + []int{ + galleryIdxWithPerformer, + galleryIdxWithTwoPerformers, + galleryIdxWithPerformerTwoTags, + }, + []int{galleryIdxWithTag}, + false, + }, + { + "equals", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[performerIdx1WithGallery]), + strconv.Itoa(tagIDs[performerIdx2WithGallery]), + }, + }, + []int{galleryIdxWithTwoPerformers}, + []int{ + galleryIdxWithThreePerformers, + }, + false, + }, + { + "not equals", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[performerIdx1WithGallery]), + strconv.Itoa(tagIDs[performerIdx2WithGallery]), + }, + }, + nil, + nil, + true, + }, + } - q := getGalleryStringValue(galleryIdxWithTwoPerformers, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 0) + results, _, err := db.Gallery.Query(ctx, &models.GalleryFilterType{ + Performers: &tt.filter, + }, nil) + if (err != nil) != tt.wantErr { + t.Errorf("GalleryStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - return nil - }) + ids := galleriesToIDs(results) + + include := indexesToIDs(galleryIDs, tt.includeIdxs) + exclude := indexesToIDs(galleryIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } } func TestGalleryQueryTags(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Gallery - tagCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdxWithGallery]), - strconv.Itoa(tagIDs[tagIdx1WithGallery]), + tests := []struct { + name string + filter models.HierarchicalMultiCriterionInput + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithGallery]), + strconv.Itoa(tagIDs[tagIdx1WithGallery]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - galleryFilter := models.GalleryFilterType{ - Tags: &tagCriterion, - } - - galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil) - assert.Len(t, galleries, 2) - - // ensure ids are correct - for _, gallery := range galleries { - assert.True(t, gallery.ID == galleryIDs[galleryIdxWithTag] || gallery.ID == galleryIDs[galleryIdxWithTwoTags]) - } - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithGallery]), - strconv.Itoa(tagIDs[tagIdx2WithGallery]), + []int{ + galleryIdxWithTag, + galleryIdxWithTwoTags, }, - Modifier: models.CriterionModifierIncludesAll, - } - - galleries = queryGallery(ctx, t, sqb, &galleryFilter, nil) - - assert.Len(t, galleries, 1) - assert.Equal(t, galleryIDs[galleryIdxWithTwoTags], galleries[0].ID) - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithGallery]), + []int{ + galleryIdxWithImage, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes all", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithGallery]), + strconv.Itoa(tagIDs[tagIdx2WithGallery]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + []int{ + galleryIdxWithTwoTags, + }, + []int{ + galleryIdxWithTag, + }, + false, + }, + { + "excludes", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[tagIdx1WithGallery])}, + }, + nil, + []int{galleryIdxWithTwoTags}, + false, + }, + { + "is null", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + []int{galleryIdx1WithPerformer}, + []int{ + galleryIdxWithTag, + galleryIdxWithTwoTags, + galleryIdxWithThreeTags, + }, + false, + }, + { + "not null", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + []int{ + galleryIdxWithTag, + galleryIdxWithTwoTags, + galleryIdxWithThreeTags, + }, + []int{galleryIdx1WithPerformer}, + false, + }, + { + "equals", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithGallery]), + strconv.Itoa(tagIDs[tagIdx2WithGallery]), + }, + }, + []int{galleryIdxWithTwoTags}, + []int{ + galleryIdxWithThreeTags, + }, + false, + }, + { + "not equals", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithGallery]), + strconv.Itoa(tagIDs[tagIdx2WithGallery]), + }, + }, + nil, + nil, + true, + }, + } - q := getGalleryStringValue(galleryIdxWithTwoTags, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 0) + results, _, err := db.Gallery.Query(ctx, &models.GalleryFilterType{ + Tags: &tt.filter, + }, nil) + if (err != nil) != tt.wantErr { + t.Errorf("GalleryStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - return nil - }) + ids := galleriesToIDs(results) + + include := indexesToIDs(imageIDs, tt.includeIdxs) + exclude := indexesToIDs(imageIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } } func TestGalleryQueryStudio(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Gallery - studioCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(studioIDs[studioIdxWithGallery]), + tests := []struct { + name string + q string + studioCriterion models.HierarchicalMultiCriterionInput + expectedIDs []int + wantErr bool + }{ + { + "includes", + "", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGallery]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - galleryFilter := models.GalleryFilterType{ - Studios: &studioCriterion, - } - - galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil) - - assert.Len(t, galleries, 1) - - // ensure id is correct - assert.Equal(t, galleryIDs[galleryIdxWithStudio], galleries[0].ID) - - studioCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(studioIDs[studioIdxWithGallery]), + []int{galleryIDs[galleryIdxWithStudio]}, + false, + }, + { + "excludes", + getGalleryStringValue(galleryIdxWithStudio, titleField), + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGallery]), + }, + Modifier: models.CriterionModifierExcludes, }, - Modifier: models.CriterionModifierExcludes, - } + []int{}, + false, + }, + { + "excludes includes null", + getGalleryStringValue(galleryIdxWithImage, titleField), + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGallery]), + }, + Modifier: models.CriterionModifierExcludes, + }, + []int{galleryIDs[galleryIdxWithImage]}, + false, + }, + { + "equals", + "", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGallery]), + }, + Modifier: models.CriterionModifierEquals, + }, + []int{galleryIDs[galleryIdxWithStudio]}, + false, + }, + { + "not equals", + getGalleryStringValue(galleryIdxWithStudio, titleField), + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGallery]), + }, + Modifier: models.CriterionModifierNotEquals, + }, + []int{}, + false, + }, + } - q := getGalleryStringValue(galleryIdxWithStudio, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + qb := db.Gallery - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 0) + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + studioCriterion := tt.studioCriterion - return nil - }) + galleryFilter := models.GalleryFilterType{ + Studios: &studioCriterion, + } + + var findFilter *models.FindFilterType + if tt.q != "" { + findFilter = &models.FindFilterType{ + Q: &tt.q, + } + } + + gallerys := queryGallery(ctx, t, qb, &galleryFilter, findFilter) + + assert.ElementsMatch(t, galleriesToIDs(gallerys), tt.expectedIDs) + }) + } } func TestGalleryQueryStudioDepth(t *testing.T) { @@ -2157,81 +2372,198 @@ func TestGalleryQueryStudioDepth(t *testing.T) { } func TestGalleryQueryPerformerTags(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Gallery - tagCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdxWithPerformer]), - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + allDepth := -1 + + tests := []struct { + name string + findFilter *models.FindFilterType + filter *models.GalleryFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithPerformer]), + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierIncludes, + }, }, - Modifier: models.CriterionModifierIncludes, - } - - galleryFilter := models.GalleryFilterType{ - PerformerTags: &tagCriterion, - } - - galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil) - assert.Len(t, galleries, 2) - - // ensure ids are correct - for _, gallery := range galleries { - assert.True(t, gallery.ID == galleryIDs[galleryIdxWithPerformerTag] || gallery.ID == galleryIDs[galleryIdxWithPerformerTwoTags]) - } - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), - strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + []int{ + galleryIdxWithPerformerTag, + galleryIdxWithPerformerTwoTags, + galleryIdxWithTwoPerformerTag, }, - Modifier: models.CriterionModifierIncludesAll, - } - - galleries = queryGallery(ctx, t, sqb, &galleryFilter, nil) - - assert.Len(t, galleries, 1) - assert.Equal(t, galleryIDs[galleryIdxWithPerformerTwoTags], galleries[0].ID) - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + []int{ + galleryIdxWithPerformer, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes sub-tags", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithParentAndChild]), + }, + Depth: &allDepth, + Modifier: models.CriterionModifierIncludes, + }, + }, + []int{ + galleryIdxWithPerformerParentTag, + }, + []int{ + galleryIdxWithPerformer, + galleryIdxWithPerformerTag, + galleryIdxWithPerformerTwoTags, + galleryIdxWithTwoPerformerTag, + }, + false, + }, + { + "includes all", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + }, + []int{ + galleryIdxWithPerformerTwoTags, + }, + []int{ + galleryIdxWithPerformer, + galleryIdxWithPerformerTag, + galleryIdxWithTwoPerformerTag, + }, + false, + }, + { + "excludes performer tag tagIdx2WithPerformer", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[tagIdx2WithPerformer])}, + }, + }, + nil, + []int{galleryIdxWithTwoPerformerTag}, + false, + }, + { + "excludes sub-tags", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithParentAndChild]), + }, + Depth: &allDepth, + Modifier: models.CriterionModifierExcludes, + }, + }, + []int{ + galleryIdxWithPerformer, + galleryIdxWithPerformerTag, + galleryIdxWithPerformerTwoTags, + galleryIdxWithTwoPerformerTag, + }, + []int{ + galleryIdxWithPerformerParentTag, + }, + false, + }, + { + "is null", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + }, + []int{galleryIdx1WithImage}, + []int{galleryIdxWithPerformerTag}, + false, + }, + { + "not null", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + }, + []int{galleryIdxWithPerformerTag}, + []int{galleryIdx1WithImage}, + false, + }, + { + "equals", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + }, + }, + nil, + nil, + true, + }, + { + "not equals", + nil, + &models.GalleryFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + }, + }, + nil, + nil, + true, + }, + } - q := getGalleryStringValue(galleryIdxWithPerformerTwoTags, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 0) + results, _, err := db.Gallery.Query(ctx, tt.filter, tt.findFilter) + if (err != nil) != tt.wantErr { + t.Errorf("ImageStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - tagCriterion = models.HierarchicalMultiCriterionInput{ - Modifier: models.CriterionModifierIsNull, - } - q = getGalleryStringValue(galleryIdx1WithImage, titleField) + ids := galleriesToIDs(results) - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 1) - assert.Equal(t, galleryIDs[galleryIdx1WithImage], galleries[0].ID) + include := indexesToIDs(galleryIDs, tt.includeIdxs) + exclude := indexesToIDs(galleryIDs, tt.excludeIdxs) - q = getGalleryStringValue(galleryIdxWithPerformerTag, titleField) - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 0) - - tagCriterion.Modifier = models.CriterionModifierNotNull - - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 1) - assert.Equal(t, galleryIDs[galleryIdxWithPerformerTag], galleries[0].ID) - - q = getGalleryStringValue(galleryIdx1WithImage, titleField) - galleries = queryGallery(ctx, t, sqb, &galleryFilter, &findFilter) - assert.Len(t, galleries, 0) - - return nil - }) + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } } func TestGalleryQueryTagCount(t *testing.T) { diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index d5bb4e852..9dee5ed28 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -241,7 +241,7 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e if updatedObject.Files.Loaded() { fileIDs := make([]file.ID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { - fileIDs[i] = f.ID + fileIDs[i] = f.Base().ID } if err := imagesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil { @@ -360,7 +360,7 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } -func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]*file.ImageFile, error) { +func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]file.File, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -372,16 +372,7 @@ func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]*file.ImageFile, return nil, err } - ret := make([]*file.ImageFile, len(files)) - for i, f := range files { - var ok bool - ret[i], ok = f.(*file.ImageFile) - if !ok { - return nil, fmt.Errorf("expected file to be *file.ImageFile not %T", f) - } - } - - return ret, nil + return files, nil } func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { @@ -513,6 +504,19 @@ func (qb *ImageStore) CountByGalleryID(ctx context.Context, galleryID int) (int, return count(ctx, q) } +func (qb *ImageStore) OCountByPerformerID(ctx context.Context, performerID int) (int, error) { + table := qb.table() + joinTable := performersImagesJoinTable + q := dialect.Select(goqu.COALESCE(goqu.SUM("o_counter"), 0)).From(table).InnerJoin(joinTable, goqu.On(table.Col(idColumn).Eq(joinTable.Col(imageIDColumn)))).Where(joinTable.Col(performerIDColumn).Eq(performerID)) + + var ret int + if err := querySimple(ctx, q, &ret); err != nil { + return 0, err + } + + return ret, nil +} + func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) { table := qb.table() fileTable := goqu.T(fileTable) @@ -665,7 +669,7 @@ func (qb *ImageStore) makeFilter(ctx context.Context, imageFilter *models.ImageF query.handleCriterion(ctx, imageGalleriesCriterionHandler(qb, imageFilter.Galleries)) query.handleCriterion(ctx, imagePerformersCriterionHandler(qb, imageFilter.Performers)) query.handleCriterion(ctx, imagePerformerCountCriterionHandler(qb, imageFilter.PerformerCount)) - query.handleCriterion(ctx, imageStudioCriterionHandler(qb, imageFilter.Studios)) + query.handleCriterion(ctx, studioCriterionHandler(imageTable, imageFilter.Studios)) query.handleCriterion(ctx, imagePerformerTagsCriterionHandler(qb, imageFilter.PerformerTags)) query.handleCriterion(ctx, imagePerformerFavoriteCriterionHandler(imageFilter.PerformerFavorite)) query.handleCriterion(ctx, timestampCriterionHandler(imageFilter.CreatedAt, "images.created_at")) @@ -942,51 +946,12 @@ GROUP BY performers_images.image_id HAVING SUM(performers.favorite) = 0)`, "nofa } } -func imageStudioCriterionHandler(qb *ImageStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := hierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: imageTable, - foreignTable: studioTable, - foreignFK: studioIDColumn, - parentFK: "parent_id", - } - - return h.handler(studios) -} - -func imagePerformerTagsCriterionHandler(qb *ImageStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if tags != nil { - if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { - var notClause string - if tags.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin("performers_images", "", "images.id = performers_images.image_id") - f.addLeftJoin("performers_tags", "", "performers_images.performer_id = performers_tags.performer_id") - - f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) - return - } - - if len(tags.Value) == 0 { - return - } - - valuesClause := getHierarchicalValues(ctx, qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) - - f.addWith(`performer_tags AS ( -SELECT pi.image_id, t.column1 AS root_tag_id FROM performers_images pi -INNER JOIN performers_tags pt ON pt.performer_id = pi.performer_id -INNER JOIN (` + valuesClause + `) t ON t.column2 = pt.tag_id -)`) - - f.addLeftJoin("performer_tags", "", "performer_tags.image_id = images.id") - - addHierarchicalConditionClauses(f, tags, "performer_tags", "root_tag_id") - } +func imagePerformerTagsCriterionHandler(qb *ImageStore, tags *models.HierarchicalMultiCriterionInput) criterionHandler { + return &joinedPerformerTagsHandler{ + criterion: tags, + primaryTable: imageTable, + joinTable: performersImagesTable, + joinPrimaryKey: imageIDColumn, } } @@ -1026,7 +991,7 @@ func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *mod case "path": addFilesJoin() addFolderJoin() - sortClause = " ORDER BY folders.path " + direction + ", files.basename " + direction + sortClause = " ORDER BY COALESCE(folders.path, '') || COALESCE(files.basename, '') COLLATE NATURAL_CI " + direction case "file_count": sortClause = getCountSort(imageTable, imagesFilesTable, imageIDColumn, direction) case "tag_count": @@ -1039,10 +1004,13 @@ func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *mod case "title": addFilesJoin() addFolderJoin() - sortClause = " ORDER BY COALESCE(images.title, files.basename) COLLATE NATURAL_CS " + direction + ", folders.path " + direction + sortClause = " ORDER BY COALESCE(images.title, files.basename) COLLATE NATURAL_CI " + direction + ", folders.path COLLATE NATURAL_CI " + direction default: sortClause = getSort(sort, direction, "images") } + + // Whatever the sorting, always use title/id as a final sort + sortClause += ", COALESCE(images.title, images.id) COLLATE NATURAL_CI ASC" } q.sortAndPagination = sortClause + getPagination(findFilter) diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index 31f6d4876..3ec159877 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -97,7 +97,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedImageFiles([]*file.ImageFile{ + Files: models.NewRelatedFiles([]file.File{ imageFile.(*file.ImageFile), }), PrimaryFileID: &imageFile.Base().ID, @@ -149,7 +149,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) { var fileIDs []file.ID if tt.newObject.Files.Loaded() { for _, f := range tt.newObject.Files.List() { - fileIDs = append(fileIDs, f.ID) + fileIDs = append(fileIDs, f.Base().ID) } } s := tt.newObject @@ -444,7 +444,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedImageFiles([]*file.ImageFile{ + Files: models.NewRelatedFiles([]file.File{ makeImageFile(imageIdx1WithGallery), }), CreatedAt: createdAt, @@ -462,7 +462,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { models.Image{ ID: imageIDs[imageIdx1WithGallery], OCounter: getOCounter(imageIdx1WithGallery), - Files: models.NewRelatedImageFiles([]*file.ImageFile{ + Files: models.NewRelatedFiles([]file.File{ makeImageFile(imageIdx1WithGallery), }), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -965,7 +965,7 @@ func makeImageWithID(index int) *models.Image { ret := makeImage(index, true) ret.ID = imageIDs[index] - ret.Files = models.NewRelatedImageFiles([]*file.ImageFile{makeImageFile(index)}) + ret.Files = models.NewRelatedFiles([]file.File{makeImageFile(index)}) return ret } @@ -1868,8 +1868,11 @@ func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) { t.Errorf("Error loading primary file: %s", err.Error()) return nil } - - verifyImageResolution(t, image.Files.Primary().Height, resolution) + asFrame, ok := image.Files.Primary().(file.VisualFile) + if !ok { + t.Errorf("Error: Associated primary file of image is not of type VisualFile") + } + verifyImageResolution(t, asFrame.GetHeight(), resolution) } return nil @@ -2121,203 +2124,369 @@ func TestImageQueryGallery(t *testing.T) { } func TestImageQueryPerformers(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Image - performerCriterion := models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdxWithImage]), - strconv.Itoa(performerIDs[performerIdx1WithImage]), + tests := []struct { + name string + filter models.MultiCriterionInput + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdxWithImage]), + strconv.Itoa(performerIDs[performerIdx1WithImage]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - imageFilter := models.ImageFilterType{ - Performers: &performerCriterion, - } - - images := queryImages(ctx, t, sqb, &imageFilter, nil) - assert.Len(t, images, 2) - - // ensure ids are correct - for _, image := range images { - assert.True(t, image.ID == imageIDs[imageIdxWithPerformer] || image.ID == imageIDs[imageIdxWithTwoPerformers]) - } - - performerCriterion = models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdx1WithImage]), - strconv.Itoa(performerIDs[performerIdx2WithImage]), + []int{ + imageIdxWithPerformer, + imageIdxWithTwoPerformers, }, - Modifier: models.CriterionModifierIncludesAll, - } - - images = queryImages(ctx, t, sqb, &imageFilter, nil) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithTwoPerformers], images[0].ID) - - performerCriterion = models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdx1WithImage]), + []int{ + imageIdxWithGallery, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes all", + models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdx1WithImage]), + strconv.Itoa(performerIDs[performerIdx2WithImage]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + []int{ + imageIdxWithTwoPerformers, + }, + []int{ + imageIdxWithPerformer, + }, + false, + }, + { + "excludes", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[performerIdx1WithImage])}, + }, + nil, + []int{imageIdxWithTwoPerformers}, + false, + }, + { + "is null", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + []int{imageIdxWithTag}, + []int{ + imageIdxWithPerformer, + imageIdxWithTwoPerformers, + imageIdxWithPerformerTwoTags, + }, + false, + }, + { + "not null", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + []int{ + imageIdxWithPerformer, + imageIdxWithTwoPerformers, + imageIdxWithPerformerTwoTags, + }, + []int{imageIdxWithTag}, + false, + }, + { + "equals", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[performerIdx1WithImage]), + strconv.Itoa(tagIDs[performerIdx2WithImage]), + }, + }, + []int{imageIdxWithTwoPerformers}, + []int{ + imageIdxWithThreePerformers, + }, + false, + }, + { + "not equals", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[performerIdx1WithImage]), + strconv.Itoa(tagIDs[performerIdx2WithImage]), + }, + }, + nil, + nil, + true, + }, + } - q := getImageStringValue(imageIdxWithTwoPerformers, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) + results, err := db.Image.Query(ctx, models.ImageQueryOptions{ + ImageFilter: &models.ImageFilterType{ + Performers: &tt.filter, + }, + }) + if (err != nil) != tt.wantErr { + t.Errorf("ImageStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - performerCriterion = models.MultiCriterionInput{ - Modifier: models.CriterionModifierIsNull, - } - q = getImageStringValue(imageIdxWithGallery, titleField) + include := indexesToIDs(imageIDs, tt.includeIdxs) + exclude := indexesToIDs(imageIDs, tt.excludeIdxs) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithGallery], images[0].ID) - - q = getImageStringValue(imageIdxWithPerformerTag, titleField) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) - - performerCriterion.Modifier = models.CriterionModifierNotNull - - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithPerformerTag], images[0].ID) - - q = getImageStringValue(imageIdxWithGallery, titleField) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) - - return nil - }) + for _, i := range include { + assert.Contains(results.IDs, i) + } + for _, e := range exclude { + assert.NotContains(results.IDs, e) + } + }) + } } func TestImageQueryTags(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Image - tagCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdxWithImage]), - strconv.Itoa(tagIDs[tagIdx1WithImage]), + tests := []struct { + name string + filter models.HierarchicalMultiCriterionInput + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithImage]), + strconv.Itoa(tagIDs[tagIdx1WithImage]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - imageFilter := models.ImageFilterType{ - Tags: &tagCriterion, - } - - images := queryImages(ctx, t, sqb, &imageFilter, nil) - assert.Len(t, images, 2) - - // ensure ids are correct - for _, image := range images { - assert.True(t, image.ID == imageIDs[imageIdxWithTag] || image.ID == imageIDs[imageIdxWithTwoTags]) - } - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithImage]), - strconv.Itoa(tagIDs[tagIdx2WithImage]), + []int{ + imageIdxWithTag, + imageIdxWithTwoTags, }, - Modifier: models.CriterionModifierIncludesAll, - } - - images = queryImages(ctx, t, sqb, &imageFilter, nil) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithTwoTags], images[0].ID) - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithImage]), + []int{ + imageIdxWithGallery, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes all", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithImage]), + strconv.Itoa(tagIDs[tagIdx2WithImage]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + []int{ + imageIdxWithTwoTags, + }, + []int{ + imageIdxWithTag, + }, + false, + }, + { + "excludes", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[tagIdx1WithImage])}, + }, + nil, + []int{imageIdxWithTwoTags}, + false, + }, + { + "is null", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + []int{imageIdx1WithPerformer}, + []int{ + imageIdxWithTag, + imageIdxWithTwoTags, + imageIdxWithThreeTags, + }, + false, + }, + { + "not null", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + []int{ + imageIdxWithTag, + imageIdxWithTwoTags, + imageIdxWithThreeTags, + }, + []int{imageIdx1WithPerformer}, + false, + }, + { + "equals", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithImage]), + strconv.Itoa(tagIDs[tagIdx2WithImage]), + }, + }, + []int{imageIdxWithTwoTags}, + []int{ + imageIdxWithThreeTags, + }, + false, + }, + { + "not equals", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithImage]), + strconv.Itoa(tagIDs[tagIdx2WithImage]), + }, + }, + nil, + nil, + true, + }, + } - q := getImageStringValue(imageIdxWithTwoTags, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) + results, err := db.Image.Query(ctx, models.ImageQueryOptions{ + ImageFilter: &models.ImageFilterType{ + Tags: &tt.filter, + }, + }) + if (err != nil) != tt.wantErr { + t.Errorf("ImageStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - tagCriterion = models.HierarchicalMultiCriterionInput{ - Modifier: models.CriterionModifierIsNull, - } - q = getImageStringValue(imageIdxWithGallery, titleField) + include := indexesToIDs(imageIDs, tt.includeIdxs) + exclude := indexesToIDs(imageIDs, tt.excludeIdxs) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithGallery], images[0].ID) - - q = getImageStringValue(imageIdxWithTag, titleField) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) - - tagCriterion.Modifier = models.CriterionModifierNotNull - - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithTag], images[0].ID) - - q = getImageStringValue(imageIdxWithGallery, titleField) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) - - return nil - }) + for _, i := range include { + assert.Contains(results.IDs, i) + } + for _, e := range exclude { + assert.NotContains(results.IDs, e) + } + }) + } } func TestImageQueryStudio(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Image - studioCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(studioIDs[studioIdxWithImage]), + tests := []struct { + name string + q string + studioCriterion models.HierarchicalMultiCriterionInput + expectedIDs []int + wantErr bool + }{ + { + "includes", + "", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithImage]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - imageFilter := models.ImageFilterType{ - Studios: &studioCriterion, - } - - images, _, err := queryImagesWithCount(ctx, sqb, &imageFilter, nil) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } - - assert.Len(t, images, 1) - - // ensure id is correct - assert.Equal(t, imageIDs[imageIdxWithStudio], images[0].ID) - - studioCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(studioIDs[studioIdxWithImage]), + []int{imageIDs[imageIdxWithStudio]}, + false, + }, + { + "excludes", + getImageStringValue(imageIdxWithStudio, titleField), + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithImage]), + }, + Modifier: models.CriterionModifierExcludes, }, - Modifier: models.CriterionModifierExcludes, - } + []int{}, + false, + }, + { + "excludes includes null", + getImageStringValue(imageIdxWithGallery, titleField), + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithImage]), + }, + Modifier: models.CriterionModifierExcludes, + }, + []int{imageIDs[imageIdxWithGallery]}, + false, + }, + { + "equals", + "", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithImage]), + }, + Modifier: models.CriterionModifierEquals, + }, + []int{imageIDs[imageIdxWithStudio]}, + false, + }, + { + "not equals", + getImageStringValue(imageIdxWithStudio, titleField), + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithImage]), + }, + Modifier: models.CriterionModifierNotEquals, + }, + []int{}, + false, + }, + } - q := getImageStringValue(imageIdxWithStudio, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + qb := db.Image - images, _, err = queryImagesWithCount(ctx, sqb, &imageFilter, &findFilter) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } - assert.Len(t, images, 0) + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + studioCriterion := tt.studioCriterion - return nil - }) + imageFilter := models.ImageFilterType{ + Studios: &studioCriterion, + } + + var findFilter *models.FindFilterType + if tt.q != "" { + findFilter = &models.FindFilterType{ + Q: &tt.q, + } + } + + images := queryImages(ctx, t, qb, &imageFilter, findFilter) + + assert.ElementsMatch(t, imagesToIDs(images), tt.expectedIDs) + }) + } } func TestImageQueryStudioDepth(t *testing.T) { @@ -2391,81 +2560,201 @@ func queryImages(ctx context.Context, t *testing.T, sqb models.ImageReader, imag } func TestImageQueryPerformerTags(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Image - tagCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdxWithPerformer]), - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + allDepth := -1 + + tests := []struct { + name string + findFilter *models.FindFilterType + filter *models.ImageFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithPerformer]), + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierIncludes, + }, }, - Modifier: models.CriterionModifierIncludes, - } - - imageFilter := models.ImageFilterType{ - PerformerTags: &tagCriterion, - } - - images := queryImages(ctx, t, sqb, &imageFilter, nil) - assert.Len(t, images, 2) - - // ensure ids are correct - for _, image := range images { - assert.True(t, image.ID == imageIDs[imageIdxWithPerformerTag] || image.ID == imageIDs[imageIdxWithPerformerTwoTags]) - } - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), - strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + []int{ + imageIdxWithPerformerTag, + imageIdxWithPerformerTwoTags, + imageIdxWithTwoPerformerTag, }, - Modifier: models.CriterionModifierIncludesAll, - } - - images = queryImages(ctx, t, sqb, &imageFilter, nil) - - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithPerformerTwoTags], images[0].ID) - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + []int{ + imageIdxWithPerformer, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes sub-tags", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithParentAndChild]), + }, + Depth: &allDepth, + Modifier: models.CriterionModifierIncludes, + }, + }, + []int{ + imageIdxWithPerformerParentTag, + }, + []int{ + imageIdxWithPerformer, + imageIdxWithPerformerTag, + imageIdxWithPerformerTwoTags, + imageIdxWithTwoPerformerTag, + }, + false, + }, + { + "includes all", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + }, + []int{ + imageIdxWithPerformerTwoTags, + }, + []int{ + imageIdxWithPerformer, + imageIdxWithPerformerTag, + imageIdxWithTwoPerformerTag, + }, + false, + }, + { + "excludes performer tag tagIdx2WithPerformer", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[tagIdx2WithPerformer])}, + }, + }, + nil, + []int{imageIdxWithTwoPerformerTag}, + false, + }, + { + "excludes sub-tags", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithParentAndChild]), + }, + Depth: &allDepth, + Modifier: models.CriterionModifierExcludes, + }, + }, + []int{ + imageIdxWithPerformer, + imageIdxWithPerformerTag, + imageIdxWithPerformerTwoTags, + imageIdxWithTwoPerformerTag, + }, + []int{ + imageIdxWithPerformerParentTag, + }, + false, + }, + { + "is null", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + }, + []int{imageIdxWithGallery}, + []int{imageIdxWithPerformerTag}, + false, + }, + { + "not null", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + }, + []int{imageIdxWithPerformerTag}, + []int{imageIdxWithGallery}, + false, + }, + { + "equals", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + }, + }, + nil, + nil, + true, + }, + { + "not equals", + nil, + &models.ImageFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + }, + }, + nil, + nil, + true, + }, + } - q := getImageStringValue(imageIdxWithPerformerTwoTags, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) + results, err := db.Image.Query(ctx, models.ImageQueryOptions{ + ImageFilter: tt.filter, + QueryOptions: models.QueryOptions{ + FindFilter: tt.findFilter, + }, + }) + if (err != nil) != tt.wantErr { + t.Errorf("ImageStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - tagCriterion = models.HierarchicalMultiCriterionInput{ - Modifier: models.CriterionModifierIsNull, - } - q = getImageStringValue(imageIdxWithGallery, titleField) + include := indexesToIDs(imageIDs, tt.includeIdxs) + exclude := indexesToIDs(imageIDs, tt.excludeIdxs) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithGallery], images[0].ID) - - q = getImageStringValue(imageIdxWithPerformerTag, titleField) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) - - tagCriterion.Modifier = models.CriterionModifierNotNull - - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 1) - assert.Equal(t, imageIDs[imageIdxWithPerformerTag], images[0].ID) - - q = getImageStringValue(imageIdxWithGallery, titleField) - images = queryImages(ctx, t, sqb, &imageFilter, &findFilter) - assert.Len(t, images, 0) - - return nil - }) + for _, i := range include { + assert.Contains(results.IDs, i) + } + for _, e := range exclude { + assert.NotContains(results.IDs, e) + } + }) + } } func TestImageQueryTagCount(t *testing.T) { @@ -2584,7 +2873,7 @@ func TestImageQuerySorting(t *testing.T) { "date", models.SortDirectionEnumDesc, imageIdxWithTwoGalleries, - imageIdxWithGrandChildStudio, + imageIdxWithPerformerParentTag, }, } diff --git a/pkg/sqlite/migrations/46_penis_stats.up.sql b/pkg/sqlite/migrations/46_penis_stats.up.sql new file mode 100644 index 000000000..2e9e31654 --- /dev/null +++ b/pkg/sqlite/migrations/46_penis_stats.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE `performers` ADD COLUMN `penis_length` float; +ALTER TABLE `performers` ADD COLUMN `circumcised` varchar[10]; \ No newline at end of file diff --git a/pkg/sqlite/movies.go b/pkg/sqlite/movies.go index 1c591614d..3bc273cbf 100644 --- a/pkg/sqlite/movies.go +++ b/pkg/sqlite/movies.go @@ -176,7 +176,7 @@ func (qb *movieQueryBuilder) makeFilter(ctx context.Context, movieFilter *models query.handleCriterion(ctx, floatIntCriterionHandler(movieFilter.Duration, "movies.duration", nil)) query.handleCriterion(ctx, movieIsMissingCriterionHandler(qb, movieFilter.IsMissing)) query.handleCriterion(ctx, stringCriterionHandler(movieFilter.URL, "movies.url")) - query.handleCriterion(ctx, movieStudioCriterionHandler(qb, movieFilter.Studios)) + query.handleCriterion(ctx, studioCriterionHandler(movieTable, movieFilter.Studios)) query.handleCriterion(ctx, moviePerformersCriterionHandler(qb, movieFilter.Performers)) query.handleCriterion(ctx, dateCriterionHandler(movieFilter.Date, "movies.date")) query.handleCriterion(ctx, timestampCriterionHandler(movieFilter.CreatedAt, "movies.created_at")) @@ -239,19 +239,6 @@ func movieIsMissingCriterionHandler(qb *movieQueryBuilder, isMissing *string) cr } } -func movieStudioCriterionHandler(qb *movieQueryBuilder, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := hierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: movieTable, - foreignTable: studioTable, - foreignFK: studioIDColumn, - parentFK: "parent_id", - } - - return h.handler(studios) -} - func moviePerformersCriterionHandler(qb *movieQueryBuilder, performers *models.MultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if performers != nil { @@ -310,14 +297,17 @@ func (qb *movieQueryBuilder) getMovieSort(findFilter *models.FindFilterType) str direction = findFilter.GetDirection() } + sortQuery := "" switch sort { - case "name": // #943 - override name sorting to use natural sort - return " ORDER BY " + getColumn("movies", sort) + " COLLATE NATURAL_CS " + direction case "scenes_count": // generic getSort won't work for this - return getCountSort(movieTable, moviesScenesTable, movieIDColumn, direction) + sortQuery += getCountSort(movieTable, moviesScenesTable, movieIDColumn, direction) default: - return getSort(sort, direction, "movies") + sortQuery += getSort(sort, direction, "movies") } + + // Whatever the sorting, always use name/id as a final sort + sortQuery += ", COALESCE(movies.name, movies.id) COLLATE NATURAL_CI ASC" + return sortQuery } func (qb *movieQueryBuilder) queryMovie(ctx context.Context, query string, args []interface{}) (*models.Movie, error) { @@ -360,6 +350,10 @@ func (qb *movieQueryBuilder) GetFrontImage(ctx context.Context, movieID int) ([] return qb.GetImage(ctx, movieID, movieFrontImageBlobColumn) } +func (qb *movieQueryBuilder) HasFrontImage(ctx context.Context, movieID int) (bool, error) { + return qb.HasImage(ctx, movieID, movieFrontImageBlobColumn) +} + func (qb *movieQueryBuilder) GetBackImage(ctx context.Context, movieID int) ([]byte, error) { return qb.GetImage(ctx, movieID, movieBackImageBlobColumn) } diff --git a/pkg/sqlite/performer.go b/pkg/sqlite/performer.go index f288401d3..f4f11e684 100644 --- a/pkg/sqlite/performer.go +++ b/pkg/sqlite/performer.go @@ -42,6 +42,8 @@ type performerRow struct { Height null.Int `db:"height"` Measurements zero.String `db:"measurements"` FakeTits zero.String `db:"fake_tits"` + PenisLength null.Float `db:"penis_length"` + Circumcised zero.String `db:"circumcised"` CareerLength zero.String `db:"career_length"` Tattoos zero.String `db:"tattoos"` Piercings zero.String `db:"piercings"` @@ -64,7 +66,7 @@ func (r *performerRow) fromPerformer(o models.Performer) { r.ID = o.ID r.Name = o.Name r.Disambigation = zero.StringFrom(o.Disambiguation) - if o.Gender.IsValid() { + if o.Gender != nil && o.Gender.IsValid() { r.Gender = zero.StringFrom(o.Gender.String()) } r.URL = zero.StringFrom(o.URL) @@ -79,6 +81,10 @@ func (r *performerRow) fromPerformer(o models.Performer) { r.Height = intFromPtr(o.Height) r.Measurements = zero.StringFrom(o.Measurements) r.FakeTits = zero.StringFrom(o.FakeTits) + r.PenisLength = null.FloatFromPtr(o.PenisLength) + if o.Circumcised != nil && o.Circumcised.IsValid() { + r.Circumcised = zero.StringFrom(o.Circumcised.String()) + } r.CareerLength = zero.StringFrom(o.CareerLength) r.Tattoos = zero.StringFrom(o.Tattoos) r.Piercings = zero.StringFrom(o.Piercings) @@ -100,7 +106,6 @@ func (r *performerRow) resolve() *models.Performer { ID: r.ID, Name: r.Name, Disambiguation: r.Disambigation.String, - Gender: models.GenderEnum(r.Gender.String), URL: r.URL.String, Twitter: r.Twitter.String, Instagram: r.Instagram.String, @@ -111,6 +116,7 @@ func (r *performerRow) resolve() *models.Performer { Height: nullIntPtr(r.Height), Measurements: r.Measurements.String, FakeTits: r.FakeTits.String, + PenisLength: nullFloatPtr(r.PenisLength), CareerLength: r.CareerLength.String, Tattoos: r.Tattoos.String, Piercings: r.Piercings.String, @@ -126,6 +132,16 @@ func (r *performerRow) resolve() *models.Performer { IgnoreAutoTag: r.IgnoreAutoTag, } + if r.Gender.ValueOrZero() != "" { + v := models.GenderEnum(r.Gender.String) + ret.Gender = &v + } + + if r.Circumcised.ValueOrZero() != "" { + v := models.CircumisedEnum(r.Circumcised.String) + ret.Circumcised = &v + } + return ret } @@ -147,6 +163,8 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) { r.setNullInt("height", o.Height) r.setNullString("measurements", o.Measurements) r.setNullString("fake_tits", o.FakeTits) + r.setNullFloat64("penis_length", o.PenisLength) + r.setNullString("circumcised", o.Circumcised) r.setNullString("career_length", o.CareerLength) r.setNullString("tattoos", o.Tattoos) r.setNullString("piercings", o.Piercings) @@ -597,6 +615,15 @@ func (qb *PerformerStore) makeFilter(ctx context.Context, filter *models.Perform query.handleCriterion(ctx, stringCriterionHandler(filter.Measurements, tableName+".measurements")) query.handleCriterion(ctx, stringCriterionHandler(filter.FakeTits, tableName+".fake_tits")) + query.handleCriterion(ctx, floatCriterionHandler(filter.PenisLength, tableName+".penis_length", nil)) + + query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if circumcised := filter.Circumcised; circumcised != nil { + v := utils.StringerSliceToStringSlice(circumcised.Value) + enumCriterionHandler(circumcised.Modifier, v, tableName+".circumcised")(ctx, f) + } + })) + query.handleCriterion(ctx, stringCriterionHandler(filter.CareerLength, tableName+".career_length")) query.handleCriterion(ctx, stringCriterionHandler(filter.Tattoos, tableName+".tattoos")) query.handleCriterion(ctx, stringCriterionHandler(filter.Piercings, tableName+".piercings")) @@ -625,10 +652,13 @@ func (qb *PerformerStore) makeFilter(ctx context.Context, filter *models.Perform query.handleCriterion(ctx, performerStudiosCriterionHandler(qb, filter.Studios)) + query.handleCriterion(ctx, performerAppearsWithCriterionHandler(qb, filter.Performers)) + query.handleCriterion(ctx, performerTagCountCriterionHandler(qb, filter.TagCount)) query.handleCriterion(ctx, performerSceneCountCriterionHandler(qb, filter.SceneCount)) query.handleCriterion(ctx, performerImageCountCriterionHandler(qb, filter.ImageCount)) query.handleCriterion(ctx, performerGalleryCountCriterionHandler(qb, filter.GalleryCount)) + query.handleCriterion(ctx, performerOCounterCriterionHandler(qb, filter.OCounter)) query.handleCriterion(ctx, dateCriterionHandler(filter.Birthdate, tableName+".birthdate")) query.handleCriterion(ctx, dateCriterionHandler(filter.DeathDate, tableName+".death_date")) query.handleCriterion(ctx, timestampCriterionHandler(filter.CreatedAt, tableName+".created_at")) @@ -728,7 +758,7 @@ func performerAgeFilterCriterionHandler(age *models.IntCriterionInput) criterion return func(ctx context.Context, f *filterBuilder) { if age != nil && age.Modifier.IsValid() { clause, args := getIntCriterionWhereClause( - "cast(IFNULL(strftime('%Y.%m%d', performers.death_date), strftime('%Y.%m%d', 'now')) - strftime('%Y.%m%d', performers.birthdate) as int)", + "cast(strftime('%Y.%m%d',CASE WHEN performers.death_date IS NULL OR performers.death_date = '0001-01-01' OR performers.death_date = '' THEN 'now' ELSE performers.death_date END) - strftime('%Y.%m%d', performers.birthdate) as int)", *age, ) f.addWhere(clause, args...) @@ -805,6 +835,22 @@ func performerGalleryCountCriterionHandler(qb *PerformerStore, count *models.Int return h.handler(count) } +func performerOCounterCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { + h := joinedMultiSumCriterionHandlerBuilder{ + primaryTable: performerTable, + foreignTable1: sceneTable, + joinTable1: performersScenesTable, + foreignTable2: imageTable, + joinTable2: performersImagesTable, + primaryFK: performerIDColumn, + foreignFK1: sceneIDColumn, + foreignFK2: imageIDColumn, + sum: "o_counter", + } + + return h.handler(count) +} + func performerStudiosCriterionHandler(qb *PerformerStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if studios != nil { @@ -862,7 +908,11 @@ func performerStudiosCriterionHandler(qb *PerformerStore, studios *models.Hierar } const derivedPerformerStudioTable = "performer_studio" - valuesClause := getHierarchicalValues(ctx, qb.tx, studios.Value, studioTable, "", "parent_id", studios.Depth) + valuesClause, err := getHierarchicalValues(ctx, qb.tx, studios.Value, studioTable, "", "parent_id", "child_id", studios.Depth) + if err != nil { + f.setError(err) + return + } f.addWith("studio(root_id, item_id) AS (" + valuesClause + ")") templStr := `SELECT performer_id FROM {primaryTable} @@ -882,6 +932,60 @@ func performerStudiosCriterionHandler(qb *PerformerStore, studios *models.Hierar } } +func performerAppearsWithCriterionHandler(qb *PerformerStore, performers *models.MultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performers != nil { + formatMaps := []utils.StrFormatMap{ + { + "primaryTable": performersScenesTable, + "joinTable": performersScenesTable, + "primaryFK": sceneIDColumn, + }, + { + "primaryTable": performersImagesTable, + "joinTable": performersImagesTable, + "primaryFK": imageIDColumn, + }, + { + "primaryTable": performersGalleriesTable, + "joinTable": performersGalleriesTable, + "primaryFK": galleryIDColumn, + }, + } + + if len(performers.Value) == '0' { + return + } + + const derivedPerformerPerformersTable = "performer_performers" + + valuesClause := strings.Join(performers.Value, "),(") + + f.addWith("performer(id) AS (VALUES(" + valuesClause + "))") + + templStr := `SELECT {primaryTable}2.performer_id FROM {primaryTable} + INNER JOIN {primaryTable} AS {primaryTable}2 ON {primaryTable}.{primaryFK} = {primaryTable}2.{primaryFK} + INNER JOIN performer ON {primaryTable}.performer_id = performer.id + WHERE {primaryTable}2.performer_id != performer.id` + + if performers.Modifier == models.CriterionModifierIncludesAll && len(performers.Value) > 1 { + templStr += ` + GROUP BY {primaryTable}2.performer_id + HAVING(count(distinct {primaryTable}.performer_id) IS ` + strconv.Itoa(len(performers.Value)) + `)` + } + + var unions []string + for _, c := range formatMaps { + unions = append(unions, utils.StrFormat(templStr, c)) + } + + f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerPerformersTable, strings.Join(unions, " UNION "))) + + f.addInnerJoin(derivedPerformerPerformersTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerPerformersTable)) + } + } +} + func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) string { var sort string var direction string @@ -893,20 +997,26 @@ func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) st direction = findFilter.GetDirection() } - if sort == "tag_count" { - return getCountSort(performerTable, performersTagsTable, performerIDColumn, direction) + sortQuery := "" + switch sort { + case "tag_count": + sortQuery += getCountSort(performerTable, performersTagsTable, performerIDColumn, direction) + case "scenes_count": + sortQuery += getCountSort(performerTable, performersScenesTable, performerIDColumn, direction) + case "images_count": + sortQuery += getCountSort(performerTable, performersImagesTable, performerIDColumn, direction) + case "galleries_count": + sortQuery += getCountSort(performerTable, performersGalleriesTable, performerIDColumn, direction) + default: + sortQuery += getSort(sort, direction, "performers") } - if sort == "scenes_count" { - return getCountSort(performerTable, performersScenesTable, performerIDColumn, direction) - } - if sort == "images_count" { - return getCountSort(performerTable, performersImagesTable, performerIDColumn, direction) - } - if sort == "galleries_count" { - return getCountSort(performerTable, performersGalleriesTable, performerIDColumn, direction) + if sort == "o_counter" { + return getMultiSumSort("o_counter", performerTable, sceneTable, performersScenesTable, imageTable, performersImagesTable, performerIDColumn, sceneIDColumn, imageIDColumn, direction) } - return getSort(sort, direction, "performers") + // Whatever the sorting, always use name/id as a final sort + sortQuery += ", COALESCE(performers.name, performers.id) COLLATE NATURAL_CI ASC" + return sortQuery } func (qb *PerformerStore) tagsRepository() *joinRepository { @@ -928,6 +1038,10 @@ func (qb *PerformerStore) GetImage(ctx context.Context, performerID int) ([]byte return qb.blobJoinQueryBuilder.GetImage(ctx, performerID, performerImageBlobColumn) } +func (qb *PerformerStore) HasImage(ctx context.Context, performerID int) (bool, error) { + return qb.blobJoinQueryBuilder.HasImage(ctx, performerID, performerImageBlobColumn) +} + func (qb *PerformerStore) UpdateImage(ctx context.Context, performerID int, image []byte) error { return qb.blobJoinQueryBuilder.UpdateImage(ctx, performerID, performerImageBlobColumn, image) } diff --git a/pkg/sqlite/performer_test.go b/pkg/sqlite/performer_test.go index 2b24d6455..89605ac89 100644 --- a/pkg/sqlite/performer_test.go +++ b/pkg/sqlite/performer_test.go @@ -52,6 +52,8 @@ func Test_PerformerStore_Create(t *testing.T) { height = 134 measurements = "measurements" fakeTits = "fakeTits" + penisLength = 1.23 + circumcised = models.CircumisedEnumCut careerLength = "careerLength" tattoos = "tattoos" piercings = "piercings" @@ -81,7 +83,7 @@ func Test_PerformerStore_Create(t *testing.T) { models.Performer{ Name: name, Disambiguation: disambiguation, - Gender: gender, + Gender: &gender, URL: url, Twitter: twitter, Instagram: instagram, @@ -92,6 +94,8 @@ func Test_PerformerStore_Create(t *testing.T) { Height: &height, Measurements: measurements, FakeTits: fakeTits, + PenisLength: &penisLength, + Circumcised: &circumcised, CareerLength: careerLength, Tattoos: tattoos, Piercings: piercings, @@ -196,6 +200,8 @@ func Test_PerformerStore_Update(t *testing.T) { height = 134 measurements = "measurements" fakeTits = "fakeTits" + penisLength = 1.23 + circumcised = models.CircumisedEnumCut careerLength = "careerLength" tattoos = "tattoos" piercings = "piercings" @@ -226,7 +232,7 @@ func Test_PerformerStore_Update(t *testing.T) { ID: performerIDs[performerIdxWithGallery], Name: name, Disambiguation: disambiguation, - Gender: gender, + Gender: &gender, URL: url, Twitter: twitter, Instagram: instagram, @@ -237,6 +243,8 @@ func Test_PerformerStore_Update(t *testing.T) { Height: &height, Measurements: measurements, FakeTits: fakeTits, + PenisLength: &penisLength, + Circumcised: &circumcised, CareerLength: careerLength, Tattoos: tattoos, Piercings: piercings, @@ -327,6 +335,7 @@ func clearPerformerPartial() models.PerformerPartial { nullString := models.OptionalString{Set: true, Null: true} nullDate := models.OptionalDate{Set: true, Null: true} nullInt := models.OptionalInt{Set: true, Null: true} + nullFloat := models.OptionalFloat64{Set: true, Null: true} // leave mandatory fields return models.PerformerPartial{ @@ -342,6 +351,8 @@ func clearPerformerPartial() models.PerformerPartial { Height: nullInt, Measurements: nullString, FakeTits: nullString, + PenisLength: nullFloat, + Circumcised: nullString, CareerLength: nullString, Tattoos: nullString, Piercings: nullString, @@ -372,6 +383,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { height = 143 measurements = "measurements" fakeTits = "fakeTits" + penisLength = 1.23 + circumcised = models.CircumisedEnumCut careerLength = "careerLength" tattoos = "tattoos" piercings = "piercings" @@ -415,6 +428,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { Height: models.NewOptionalInt(height), Measurements: models.NewOptionalString(measurements), FakeTits: models.NewOptionalString(fakeTits), + PenisLength: models.NewOptionalFloat64(penisLength), + Circumcised: models.NewOptionalString(circumcised.String()), CareerLength: models.NewOptionalString(careerLength), Tattoos: models.NewOptionalString(tattoos), Piercings: models.NewOptionalString(piercings), @@ -453,7 +468,7 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { ID: performerIDs[performerIdxWithDupName], Name: name, Disambiguation: disambiguation, - Gender: gender, + Gender: &gender, URL: url, Twitter: twitter, Instagram: instagram, @@ -464,6 +479,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { Height: &height, Measurements: measurements, FakeTits: fakeTits, + PenisLength: &penisLength, + Circumcised: &circumcised, CareerLength: careerLength, Tattoos: tattoos, Piercings: piercings, @@ -496,12 +513,13 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { performerIDs[performerIdxWithTwoTags], clearPerformerPartial(), models.Performer{ - ID: performerIDs[performerIdxWithTwoTags], - Name: getPerformerStringValue(performerIdxWithTwoTags, "Name"), - Favorite: true, - Aliases: models.NewRelatedStrings([]string{}), - TagIDs: models.NewRelatedIDs([]int{}), - StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + ID: performerIDs[performerIdxWithTwoTags], + Name: getPerformerStringValue(performerIdxWithTwoTags, "Name"), + Favorite: getPerformerBoolValue(performerIdxWithTwoTags), + Aliases: models.NewRelatedStrings([]string{}), + TagIDs: models.NewRelatedIDs([]int{}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + IgnoreAutoTag: getIgnoreAutoTag(performerIdxWithTwoTags), }, false, }, @@ -957,16 +975,30 @@ func TestPerformerQuery(t *testing.T) { false, }, { - "alias", + "circumcised (cut)", nil, &models.PerformerFilterType{ - Aliases: &models.StringCriterionInput{ - Value: getPerformerStringValue(performerIdxWithGallery, "alias"), - Modifier: models.CriterionModifierEquals, + Circumcised: &models.CircumcisionCriterionInput{ + Value: []models.CircumisedEnum{models.CircumisedEnumCut}, + Modifier: models.CriterionModifierIncludes, }, }, - []int{performerIdxWithGallery}, - []int{performerIdxWithScene}, + []int{performerIdx1WithScene}, + []int{performerIdxWithScene, performerIdx2WithScene}, + false, + }, + { + "circumcised (excludes cut)", + nil, + &models.PerformerFilterType{ + Circumcised: &models.CircumcisionCriterionInput{ + Value: []models.CircumisedEnum{models.CircumisedEnumCut}, + Modifier: models.CriterionModifierExcludes, + }, + }, + []int{performerIdx2WithScene}, + // performerIdxWithScene has null value + []int{performerIdx1WithScene, performerIdxWithScene}, false, }, } @@ -995,6 +1027,107 @@ func TestPerformerQuery(t *testing.T) { } } +func TestPerformerQueryPenisLength(t *testing.T) { + var upper = 4.0 + + tests := []struct { + name string + modifier models.CriterionModifier + value float64 + value2 *float64 + }{ + { + "equals", + models.CriterionModifierEquals, + 1, + nil, + }, + { + "not equals", + models.CriterionModifierNotEquals, + 1, + nil, + }, + { + "greater than", + models.CriterionModifierGreaterThan, + 1, + nil, + }, + { + "between", + models.CriterionModifierBetween, + 2, + &upper, + }, + { + "greater than", + models.CriterionModifierNotBetween, + 2, + &upper, + }, + { + "null", + models.CriterionModifierIsNull, + 0, + nil, + }, + { + "not null", + models.CriterionModifierNotNull, + 0, + nil, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + filter := &models.PerformerFilterType{ + PenisLength: &models.FloatCriterionInput{ + Modifier: tt.modifier, + Value: tt.value, + Value2: tt.value2, + }, + } + + performers, _, err := db.Performer.Query(ctx, filter, nil) + if err != nil { + t.Errorf("PerformerStore.Query() error = %v", err) + return + } + + for _, p := range performers { + verifyFloat(t, p.PenisLength, *filter.PenisLength) + } + }) + } +} + +func verifyFloat(t *testing.T, value *float64, criterion models.FloatCriterionInput) bool { + t.Helper() + assert := assert.New(t) + switch criterion.Modifier { + case models.CriterionModifierEquals: + return assert.NotNil(value) && assert.Equal(criterion.Value, *value) + case models.CriterionModifierNotEquals: + return assert.NotNil(value) && assert.NotEqual(criterion.Value, *value) + case models.CriterionModifierGreaterThan: + return assert.NotNil(value) && assert.Greater(*value, criterion.Value) + case models.CriterionModifierLessThan: + return assert.NotNil(value) && assert.Less(*value, criterion.Value) + case models.CriterionModifierBetween: + return assert.NotNil(value) && assert.GreaterOrEqual(*value, criterion.Value) && assert.LessOrEqual(*value, *criterion.Value2) + case models.CriterionModifierNotBetween: + return assert.NotNil(value) && assert.True(*value < criterion.Value || *value > *criterion.Value2) + case models.CriterionModifierIsNull: + return assert.Nil(value) + case models.CriterionModifierNotNull: + return assert.NotNil(value) + } + + return false +} + func TestPerformerQueryForAutoTag(t *testing.T) { withTxn(func(ctx context.Context) error { tqb := db.Performer @@ -1772,10 +1905,10 @@ func TestPerformerQuerySortScenesCount(t *testing.T) { assert.True(t, len(performers) > 0) - // first performer should be performerIdxWithTwoScenes + // first performer should be performerIdx1WithScene firstPerformer := performers[0] - assert.Equal(t, performerIDs[performerIdxWithTwoScenes], firstPerformer.ID) + assert.Equal(t, performerIDs[performerIdx1WithScene], firstPerformer.ID) // sort in ascending order direction = models.SortDirectionEnumAsc @@ -1788,7 +1921,7 @@ func TestPerformerQuerySortScenesCount(t *testing.T) { assert.True(t, len(performers) > 0) lastPerformer := performers[len(performers)-1] - assert.Equal(t, performerIDs[performerIdxWithTwoScenes], lastPerformer.ID) + assert.Equal(t, performerIDs[performerIdxWithTag], lastPerformer.ID) return nil }) @@ -1928,7 +2061,7 @@ func TestPerformerStore_FindByStashIDStatus(t *testing.T) { name: "!hasStashID", hasStashID: false, stashboxEndpoint: getPerformerStringValue(performerIdxWithScene, "endpoint"), - include: []int{performerIdxWithImage}, + include: []int{performerIdxWithTwoScenes}, exclude: []int{performerIdx2WithScene}, wantErr: false, }, diff --git a/pkg/sqlite/phash.go b/pkg/sqlite/phash.go new file mode 100644 index 000000000..ceda69bd4 --- /dev/null +++ b/pkg/sqlite/phash.go @@ -0,0 +1,10 @@ +package sqlite + +import "github.com/corona10/goimagehash" + +func phashDistanceFn(phash1 int64, phash2 int64) (int64, error) { + hash1 := goimagehash.NewImageHash(uint64(phash1), goimagehash.PHash) + hash2 := goimagehash.NewImageHash(uint64(phash2), goimagehash.PHash) + distance, _ := hash1.Distance(hash2) + return int64(distance), nil +} diff --git a/pkg/sqlite/record.go b/pkg/sqlite/record.go index fbee73e86..5f4d31b55 100644 --- a/pkg/sqlite/record.go +++ b/pkg/sqlite/record.go @@ -3,6 +3,7 @@ package sqlite import ( "github.com/doug-martin/goqu/v9/exp" "github.com/stashapp/stash/pkg/models" + "gopkg.in/guregu/null.v4" "gopkg.in/guregu/null.v4/zero" ) @@ -77,11 +78,11 @@ func (r *updateRecord) setFloat64(destField string, v models.OptionalFloat64) { } } -// func (r *updateRecord) setNullFloat64(destField string, v models.OptionalFloat64) { -// if v.Set { -// r.set(destField, null.FloatFromPtr(v.Ptr())) -// } -// } +func (r *updateRecord) setNullFloat64(destField string, v models.OptionalFloat64) { + if v.Set { + r.set(destField, null.FloatFromPtr(v.Ptr())) + } +} func (r *updateRecord) setSQLiteTimestamp(destField string, v models.OptionalTime) { if v.Set { diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index a5e903653..1fe5bcdb0 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -36,23 +36,38 @@ const ( ) var findExactDuplicateQuery = ` -SELECT GROUP_CONCAT(scenes.id) as ids -FROM scenes -INNER JOIN scenes_files ON (scenes.id = scenes_files.scene_id) -INNER JOIN files ON (scenes_files.file_id = files.id) -INNER JOIN files_fingerprints ON (scenes_files.file_id = files_fingerprints.file_id AND files_fingerprints.type = 'phash') -GROUP BY files_fingerprints.fingerprint -HAVING COUNT(files_fingerprints.fingerprint) > 1 AND COUNT(DISTINCT scenes.id) > 1 -ORDER BY SUM(files.size) DESC; +SELECT GROUP_CONCAT(DISTINCT scene_id) as ids +FROM ( + SELECT scenes.id as scene_id + , video_files.duration as file_duration + , files.size as file_size + , files_fingerprints.fingerprint as phash + , abs(max(video_files.duration) OVER (PARTITION by files_fingerprints.fingerprint) - video_files.duration) as durationDiff + FROM scenes + INNER JOIN scenes_files ON (scenes.id = scenes_files.scene_id) + INNER JOIN files ON (scenes_files.file_id = files.id) + INNER JOIN files_fingerprints ON (scenes_files.file_id = files_fingerprints.file_id AND files_fingerprints.type = 'phash') + INNER JOIN video_files ON (files.id == video_files.file_id) +) +WHERE durationDiff <= ?1 + OR ?1 < 0 -- Always TRUE if the parameter is negative. + -- That will disable the durationDiff checking. +GROUP BY phash +HAVING COUNT(phash) > 1 + AND COUNT(DISTINCT scene_id) > 1 +ORDER BY SUM(file_size) DESC; ` var findAllPhashesQuery = ` -SELECT scenes.id as id, files_fingerprints.fingerprint as phash +SELECT scenes.id as id + , files_fingerprints.fingerprint as phash + , video_files.duration as duration FROM scenes -INNER JOIN scenes_files ON (scenes.id = scenes_files.scene_id) -INNER JOIN files ON (scenes_files.file_id = files.id) +INNER JOIN scenes_files ON (scenes.id = scenes_files.scene_id) +INNER JOIN files ON (scenes_files.file_id = files.id) INNER JOIN files_fingerprints ON (scenes_files.file_id = files_fingerprints.file_id AND files_fingerprints.type = 'phash') -ORDER BY files.size DESC +INNER JOIN video_files ON (files.id == video_files.file_id) +ORDER BY files.size DESC; ` type sceneRow struct { @@ -680,6 +695,19 @@ func (qb *SceneStore) CountByPerformerID(ctx context.Context, performerID int) ( return count(ctx, q) } +func (qb *SceneStore) OCountByPerformerID(ctx context.Context, performerID int) (int, error) { + table := qb.table() + joinTable := scenesPerformersJoinTable + + q := dialect.Select(goqu.COALESCE(goqu.SUM("o_counter"), 0)).From(table).InnerJoin(joinTable, goqu.On(table.Col(idColumn).Eq(joinTable.Col(sceneIDColumn)))).Where(joinTable.Col(performerIDColumn).Eq(performerID)) + var ret int + if err := querySimple(ctx, q, &ret); err != nil { + return 0, err + } + + return ret, nil +} + func (qb *SceneStore) FindByMovieID(ctx context.Context, movieID int) ([]*models.Scene, error) { sq := dialect.From(scenesMoviesJoinTable).Select(scenesMoviesJoinTable.Col(sceneIDColumn)).Where( scenesMoviesJoinTable.Col(movieIDColumn).Eq(movieID), @@ -882,17 +910,16 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { if sceneFilter.Phash != nil { - qb.addSceneFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") - - value, _ := utils.StringToPhash(sceneFilter.Phash.Value) - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), + // backwards compatibility + scenePhashDistanceCriterionHandler(qb, &models.PhashDistanceCriterionInput{ + Value: sceneFilter.Phash.Value, Modifier: sceneFilter.Phash.Modifier, - }, "fingerprints_phash.fingerprint", nil)(ctx, f) + })(ctx, f) } })) + query.handleCriterion(ctx, scenePhashDistanceCriterionHandler(qb, sceneFilter.PhashDistance)) + query.handleCriterion(ctx, intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil)) // legacy rating handler query.handleCriterion(ctx, rating5CriterionHandler(sceneFilter.Rating, "scenes.rating", nil)) @@ -932,7 +959,7 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF query.handleCriterion(ctx, sceneTagCountCriterionHandler(qb, sceneFilter.TagCount)) query.handleCriterion(ctx, scenePerformersCriterionHandler(qb, sceneFilter.Performers)) query.handleCriterion(ctx, scenePerformerCountCriterionHandler(qb, sceneFilter.PerformerCount)) - query.handleCriterion(ctx, sceneStudioCriterionHandler(qb, sceneFilter.Studios)) + query.handleCriterion(ctx, studioCriterionHandler(sceneTable, sceneFilter.Studios)) query.handleCriterion(ctx, sceneMoviesCriterionHandler(qb, sceneFilter.Movies)) query.handleCriterion(ctx, scenePerformerTagsCriterionHandler(qb, sceneFilter.PerformerTags)) query.handleCriterion(ctx, scenePerformerFavoriteCriterionHandler(sceneFilter.PerformerFavorite)) @@ -1325,19 +1352,6 @@ func scenePerformerAgeCriterionHandler(performerAge *models.IntCriterionInput) c } } -func sceneStudioCriterionHandler(qb *SceneStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := hierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: sceneTable, - foreignTable: studioTable, - foreignFK: studioIDColumn, - parentFK: "parent_id", - } - - return h.handler(studios) -} - func sceneMoviesCriterionHandler(qb *SceneStore, movies *models.MultiCriterionInput) criterionHandlerFunc { addJoinsFunc := func(f *filterBuilder) { qb.moviesRepository().join(f, "", "scenes.id") @@ -1347,37 +1361,50 @@ func sceneMoviesCriterionHandler(qb *SceneStore, movies *models.MultiCriterionIn return h.handler(movies) } -func scenePerformerTagsCriterionHandler(qb *SceneStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { +func scenePerformerTagsCriterionHandler(qb *SceneStore, tags *models.HierarchicalMultiCriterionInput) criterionHandler { + return &joinedPerformerTagsHandler{ + criterion: tags, + primaryTable: sceneTable, + joinTable: performersScenesTable, + joinPrimaryKey: sceneIDColumn, + } +} + +func scenePhashDistanceCriterionHandler(qb *SceneStore, phashDistance *models.PhashDistanceCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - if tags != nil { - if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { - var notClause string - if tags.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } + if phashDistance != nil { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") - f.addLeftJoin("performers_scenes", "", "scenes.id = performers_scenes.scene_id") - f.addLeftJoin("performers_tags", "", "performers_scenes.performer_id = performers_tags.performer_id") - - f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) - return + value, _ := utils.StringToPhash(phashDistance.Value) + distance := 0 + if phashDistance.Distance != nil { + distance = *phashDistance.Distance } - if len(tags.Value) == 0 { - return + if distance == 0 { + // use the default handler + intCriterionHandler(&models.IntCriterionInput{ + Value: int(value), + Modifier: phashDistance.Modifier, + }, "fingerprints_phash.fingerprint", nil)(ctx, f) } - valuesClause := getHierarchicalValues(ctx, qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) - - f.addWith(`performer_tags AS ( -SELECT ps.scene_id, t.column1 AS root_tag_id FROM performers_scenes ps -INNER JOIN performers_tags pt ON pt.performer_id = ps.performer_id -INNER JOIN (` + valuesClause + `) t ON t.column2 = pt.tag_id -)`) - - f.addLeftJoin("performer_tags", "", "performer_tags.scene_id = scenes.id") - - addHierarchicalConditionClauses(f, tags, "performer_tags", "root_tag_id") + switch { + case phashDistance.Modifier == models.CriterionModifierEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) < ?", value, distance) + case phashDistance.Modifier == models.CriterionModifierNotEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) > ?", value, distance) + default: + intCriterionHandler(&models.IntCriterionInput{ + Value: int(value), + Modifier: phashDistance.Modifier, + }, "fingerprints_phash.fingerprint", nil)(ctx, f) + } } } } @@ -1435,7 +1462,7 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF // special handling for path addFileTable() addFolderTable() - query.sortAndPagination += fmt.Sprintf(" ORDER BY folders.path %s, files.basename %[1]s", direction) + query.sortAndPagination += fmt.Sprintf(" ORDER BY COALESCE(folders.path, '') || COALESCE(files.basename, '') COLLATE NATURAL_CI %s", direction) case "perceptual_similarity": // special handling for phash addFileTable() @@ -1472,13 +1499,16 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF case "title": addFileTable() addFolderTable() - query.sortAndPagination += " ORDER BY COALESCE(scenes.title, files.basename) COLLATE NATURAL_CS " + direction + ", folders.path " + direction + query.sortAndPagination += " ORDER BY COALESCE(scenes.title, files.basename) COLLATE NATURAL_CI " + direction + ", folders.path COLLATE NATURAL_CI " + direction case "play_count": // handle here since getSort has special handling for _count suffix query.sortAndPagination += " ORDER BY scenes.play_count " + direction default: query.sortAndPagination += getSort(sort, direction, "scenes") } + + // Whatever the sorting, always use title/id as a final sort + query.sortAndPagination += ", COALESCE(scenes.title, scenes.id) COLLATE NATURAL_CI ASC" } func (qb *SceneStore) getPlayCount(ctx context.Context, id int) (int, error) { @@ -1675,11 +1705,11 @@ func (qb *SceneStore) GetStashIDs(ctx context.Context, sceneID int) ([]models.St return qb.stashIDRepository().get(ctx, sceneID) } -func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int) ([][]*models.Scene, error) { +func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*models.Scene, error) { var dupeIds [][]int if distance == 0 { var ids []string - if err := qb.tx.Select(ctx, &ids, findExactDuplicateQuery); err != nil { + if err := qb.tx.Select(ctx, &ids, findExactDuplicateQuery, durationDiff); err != nil { return nil, err } @@ -1701,7 +1731,8 @@ func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int) ([][]*mo if err := qb.queryFunc(ctx, findAllPhashesQuery, nil, false, func(rows *sqlx.Rows) error { phash := utils.Phash{ - Bucket: -1, + Bucket: -1, + Duration: -1, } if err := rows.StructScan(&phash); err != nil { return err @@ -1713,7 +1744,7 @@ func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int) ([][]*mo return nil, err } - dupeIds = utils.FindDuplicates(hashes, distance) + dupeIds = utils.FindDuplicates(hashes, distance, durationDiff) } var duplicates [][]*models.Scene diff --git a/pkg/sqlite/scene_marker.go b/pkg/sqlite/scene_marker.go index df3c73030..04eeb1e3a 100644 --- a/pkg/sqlite/scene_marker.go +++ b/pkg/sqlite/scene_marker.go @@ -209,7 +209,11 @@ func sceneMarkerTagsCriterionHandler(qb *sceneMarkerQueryBuilder, tags *models.H if len(tags.Value) == 0 { return } - valuesClause := getHierarchicalValues(ctx, qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) + valuesClause, err := getHierarchicalValues(ctx, qb.tx, tags.Value, tagTable, "tags_relations", "parent_id", "child_id", tags.Depth) + if err != nil { + f.setError(err) + return + } f.addWith(`marker_tags AS ( SELECT mt.scene_marker_id, t.column1 AS root_tag_id FROM scene_markers_tags mt @@ -221,7 +225,7 @@ INNER JOIN (` + valuesClause + `) t ON t.column2 = m.primary_tag_id f.addLeftJoin("marker_tags", "", "marker_tags.scene_marker_id = scene_markers.id") - addHierarchicalConditionClauses(f, tags, "marker_tags", "root_tag_id") + addHierarchicalConditionClauses(f, *tags, "marker_tags", "root_tag_id") } } } @@ -229,32 +233,23 @@ INNER JOIN (` + valuesClause + `) t ON t.column2 = m.primary_tag_id func sceneMarkerSceneTagsCriterionHandler(qb *sceneMarkerQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if tags != nil { - if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { - var notClause string - if tags.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } + f.addLeftJoin("scenes_tags", "", "scene_markers.scene_id = scenes_tags.scene_id") - f.addLeftJoin("scenes_tags", "", "scene_markers.scene_id = scenes_tags.scene_id") + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + tx: qb.tx, - f.addWhere(fmt.Sprintf("scenes_tags.tag_id IS %s NULL", notClause)) - return + primaryTable: "scene_markers", + primaryKey: sceneIDColumn, + foreignTable: tagTable, + foreignFK: tagIDColumn, + + relationsTable: "tags_relations", + joinTable: "scenes_tags", + joinAs: "marker_scenes_tags", + primaryFK: sceneIDColumn, } - if len(tags.Value) == 0 { - return - } - - valuesClause := getHierarchicalValues(ctx, qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) - - f.addWith(`scene_tags AS ( -SELECT st.scene_id, t.column1 AS root_tag_id FROM scenes_tags st -INNER JOIN (` + valuesClause + `) t ON t.column2 = st.tag_id -)`) - - f.addLeftJoin("scene_tags", "", "scene_tags.scene_id = scene_markers.scene_id") - - addHierarchicalConditionClauses(f, tags, "scene_tags", "root_tag_id") + h.handler(tags).handle(ctx, f) } } } diff --git a/pkg/sqlite/scene_marker_test.go b/pkg/sqlite/scene_marker_test.go index 9c5ae866f..b2f7b2ee6 100644 --- a/pkg/sqlite/scene_marker_test.go +++ b/pkg/sqlite/scene_marker_test.go @@ -5,9 +5,12 @@ package sqlite_test import ( "context" + "strconv" "testing" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil/intslice" + "github.com/stashapp/stash/pkg/sliceutil/stringslice" "github.com/stashapp/stash/pkg/sqlite" "github.com/stretchr/testify/assert" ) @@ -50,7 +53,7 @@ func TestMarkerCountByTagID(t *testing.T) { t.Errorf("error calling CountByTagID: %s", err.Error()) } - assert.Equal(t, 3, markerCount) + assert.Equal(t, 4, markerCount) markerCount, err = mqb.CountByTagID(ctx, tagIDs[tagIdxWithMarkers]) @@ -151,7 +154,7 @@ func TestMarkerQuerySceneTags(t *testing.T) { } withTxn(func(ctx context.Context) error { - testTags := func(m *models.SceneMarker, markerFilter *models.SceneMarkerFilterType) { + testTags := func(t *testing.T, m *models.SceneMarker, markerFilter *models.SceneMarkerFilterType) { s, err := db.Scene.Find(ctx, int(m.SceneID.Int64)) if err != nil { t.Errorf("error getting marker tag ids: %v", err) @@ -164,11 +167,40 @@ func TestMarkerQuerySceneTags(t *testing.T) { } tagIDs := s.TagIDs.List() - if markerFilter.SceneTags.Modifier == models.CriterionModifierIsNull && len(tagIDs) > 0 { - t.Errorf("expected marker %d to have no scene tags - found %d", m.ID, len(tagIDs)) - } - if markerFilter.SceneTags.Modifier == models.CriterionModifierNotNull && len(tagIDs) == 0 { - t.Errorf("expected marker %d to have scene tags - found 0", m.ID) + values, _ := stringslice.StringSliceToIntSlice(markerFilter.SceneTags.Value) + switch markerFilter.SceneTags.Modifier { + case models.CriterionModifierIsNull: + if len(tagIDs) > 0 { + t.Errorf("expected marker %d to have no scene tags - found %d", m.ID, len(tagIDs)) + } + case models.CriterionModifierNotNull: + if len(tagIDs) == 0 { + t.Errorf("expected marker %d to have scene tags - found 0", m.ID) + } + case models.CriterionModifierIncludes: + for _, v := range values { + assert.Contains(t, tagIDs, v) + } + case models.CriterionModifierExcludes: + for _, v := range values { + assert.NotContains(t, tagIDs, v) + } + case models.CriterionModifierEquals: + for _, v := range values { + assert.Contains(t, tagIDs, v) + } + assert.Len(t, tagIDs, len(values)) + case models.CriterionModifierNotEquals: + foundAll := true + for _, v := range values { + if !intslice.IntInclude(tagIDs, v) { + foundAll = false + break + } + } + if foundAll && len(tagIDs) == len(values) { + t.Errorf("expected marker %d to have scene tags not equal to %v - found %v", m.ID, values, tagIDs) + } } } @@ -191,6 +223,70 @@ func TestMarkerQuerySceneTags(t *testing.T) { }, nil, }, + { + "includes", + &models.SceneMarkerFilterType{ + SceneTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIncludes, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx3WithScene]), + }, + }, + }, + nil, + }, + { + "includes all", + &models.SceneMarkerFilterType{ + SceneTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIncludesAll, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithScene]), + strconv.Itoa(tagIDs[tagIdx3WithScene]), + }, + }, + }, + nil, + }, + { + "equals", + &models.SceneMarkerFilterType{ + SceneTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithScene]), + strconv.Itoa(tagIDs[tagIdx3WithScene]), + }, + }, + }, + nil, + }, + // not equals not supported + // { + // "not equals", + // &models.SceneMarkerFilterType{ + // SceneTags: &models.HierarchicalMultiCriterionInput{ + // Modifier: models.CriterionModifierNotEquals, + // Value: []string{ + // strconv.Itoa(tagIDs[tagIdx2WithScene]), + // strconv.Itoa(tagIDs[tagIdx3WithScene]), + // }, + // }, + // }, + // nil, + // }, + { + "excludes", + &models.SceneMarkerFilterType{ + SceneTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIncludes, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithScene]), + }, + }, + }, + nil, + }, } for _, tc := range cases { @@ -198,7 +294,7 @@ func TestMarkerQuerySceneTags(t *testing.T) { markers := queryMarkers(ctx, t, sqlite.SceneMarkerReaderWriter, tc.markerFilter, tc.findFilter) assert.Greater(t, len(markers), 0) for _, m := range markers { - testTags(m, tc.markerFilter) + testTags(t, m, tc.markerFilter) } }) } diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index 560d3fcfc..7b676fe76 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -668,7 +668,8 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { sceneIDs[sceneIdxWithSpacedName], clearScenePartial(), models.Scene{ - ID: sceneIDs[sceneIdxWithSpacedName], + ID: sceneIDs[sceneIdxWithSpacedName], + OCounter: getOCounter(sceneIdxWithSpacedName), Files: models.NewRelatedVideoFiles([]*file.VideoFile{ makeSceneFile(sceneIdxWithSpacedName), }), @@ -677,6 +678,10 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { PerformerIDs: models.NewRelatedIDs([]int{}), Movies: models.NewRelatedMovies([]models.MoviesScenes{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + PlayCount: getScenePlayCount(sceneIdxWithSpacedName), + PlayDuration: getScenePlayDuration(sceneIdxWithSpacedName), + LastPlayedAt: getSceneLastPlayed(sceneIdxWithSpacedName), + ResumeTime: getSceneResumeTime(sceneIdxWithSpacedName), }, false, }, @@ -2101,6 +2106,8 @@ func sceneQueryQ(ctx context.Context, t *testing.T, sqb models.SceneReader, q st // no Q should return all results filter.Q = nil + pp := totalScenes + filter.PerPage = &pp scenes = queryScene(ctx, t, sqb, nil, &filter) assert.Len(t, scenes, totalScenes) @@ -2230,8 +2237,8 @@ func TestSceneQuery(t *testing.T) { return } - include := indexesToIDs(performerIDs, tt.includeIdxs) - exclude := indexesToIDs(performerIDs, tt.excludeIdxs) + include := indexesToIDs(sceneIDs, tt.includeIdxs) + exclude := indexesToIDs(sceneIDs, tt.excludeIdxs) for _, i := range include { assert.Contains(results.IDs, i) @@ -3057,7 +3064,13 @@ func queryScenes(ctx context.Context, t *testing.T, queryBuilder models.SceneRea }, } - return queryScene(ctx, t, queryBuilder, &sceneFilter, nil) + // needed so that we don't hit the default limit of 25 scenes + pp := 1000 + findFilter := &models.FindFilterType{ + PerPage: &pp, + } + + return queryScene(ctx, t, queryBuilder, &sceneFilter, findFilter) } func createScene(ctx context.Context, width int, height int) (*models.Scene, error) { @@ -3329,192 +3342,473 @@ func TestSceneQueryIsMissingPhash(t *testing.T) { } func TestSceneQueryPerformers(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Scene - performerCriterion := models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdxWithScene]), - strconv.Itoa(performerIDs[performerIdx1WithScene]), + tests := []struct { + name string + filter models.MultiCriterionInput + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdxWithScene]), + strconv.Itoa(performerIDs[performerIdx1WithScene]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - sceneFilter := models.SceneFilterType{ - Performers: &performerCriterion, - } - - scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) - - assert.Len(t, scenes, 2) - - // ensure ids are correct - for _, scene := range scenes { - assert.True(t, scene.ID == sceneIDs[sceneIdxWithPerformer] || scene.ID == sceneIDs[sceneIdxWithTwoPerformers]) - } - - performerCriterion = models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdx1WithScene]), - strconv.Itoa(performerIDs[performerIdx2WithScene]), + []int{ + sceneIdxWithPerformer, + sceneIdxWithTwoPerformers, }, - Modifier: models.CriterionModifierIncludesAll, - } - - scenes = queryScene(ctx, t, sqb, &sceneFilter, nil) - - assert.Len(t, scenes, 1) - assert.Equal(t, sceneIDs[sceneIdxWithTwoPerformers], scenes[0].ID) - - performerCriterion = models.MultiCriterionInput{ - Value: []string{ - strconv.Itoa(performerIDs[performerIdx1WithScene]), + []int{ + sceneIdxWithGallery, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes all", + models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdx1WithScene]), + strconv.Itoa(performerIDs[performerIdx2WithScene]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + []int{ + sceneIdxWithTwoPerformers, + }, + []int{ + sceneIdxWithPerformer, + }, + false, + }, + { + "excludes", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[performerIdx1WithScene])}, + }, + nil, + []int{sceneIdxWithTwoPerformers}, + false, + }, + { + "is null", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + []int{sceneIdxWithTag}, + []int{ + sceneIdxWithPerformer, + sceneIdxWithTwoPerformers, + sceneIdxWithPerformerTwoTags, + }, + false, + }, + { + "not null", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + []int{ + sceneIdxWithPerformer, + sceneIdxWithTwoPerformers, + sceneIdxWithPerformerTwoTags, + }, + []int{sceneIdxWithTag}, + false, + }, + { + "equals", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[performerIdx1WithScene]), + strconv.Itoa(tagIDs[performerIdx2WithScene]), + }, + }, + []int{sceneIdxWithTwoPerformers}, + []int{ + sceneIdxWithThreePerformers, + }, + false, + }, + { + "not equals", + models.MultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[performerIdx1WithScene]), + strconv.Itoa(tagIDs[performerIdx2WithScene]), + }, + }, + nil, + nil, + true, + }, + } - q := getSceneStringValue(sceneIdxWithTwoPerformers, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter) - assert.Len(t, scenes, 0) + results, err := db.Scene.Query(ctx, models.SceneQueryOptions{ + SceneFilter: &models.SceneFilterType{ + Performers: &tt.filter, + }, + }) + if (err != nil) != tt.wantErr { + t.Errorf("SceneStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - return nil - }) + include := indexesToIDs(sceneIDs, tt.includeIdxs) + exclude := indexesToIDs(sceneIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(results.IDs, i) + } + for _, e := range exclude { + assert.NotContains(results.IDs, e) + } + }) + } } func TestSceneQueryTags(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Scene - tagCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdxWithScene]), - strconv.Itoa(tagIDs[tagIdx1WithScene]), + tests := []struct { + name string + filter models.HierarchicalMultiCriterionInput + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithScene]), + strconv.Itoa(tagIDs[tagIdx1WithScene]), + }, + Modifier: models.CriterionModifierIncludes, }, - Modifier: models.CriterionModifierIncludes, - } - - sceneFilter := models.SceneFilterType{ - Tags: &tagCriterion, - } - - scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) - assert.Len(t, scenes, 2) - - // ensure ids are correct - for _, scene := range scenes { - assert.True(t, scene.ID == sceneIDs[sceneIdxWithTag] || scene.ID == sceneIDs[sceneIdxWithTwoTags]) - } - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithScene]), - strconv.Itoa(tagIDs[tagIdx2WithScene]), + []int{ + sceneIdxWithTag, + sceneIdxWithTwoTags, }, - Modifier: models.CriterionModifierIncludesAll, - } - - scenes = queryScene(ctx, t, sqb, &sceneFilter, nil) - - assert.Len(t, scenes, 1) - assert.Equal(t, sceneIDs[sceneIdxWithTwoTags], scenes[0].ID) - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithScene]), + []int{ + sceneIdxWithGallery, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes all", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithScene]), + strconv.Itoa(tagIDs[tagIdx2WithScene]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + []int{ + sceneIdxWithTwoTags, + }, + []int{ + sceneIdxWithTag, + }, + false, + }, + { + "excludes", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[tagIdx1WithScene])}, + }, + nil, + []int{sceneIdxWithTwoTags}, + false, + }, + { + "is null", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + []int{sceneIdx1WithPerformer}, + []int{ + sceneIdxWithTag, + sceneIdxWithTwoTags, + sceneIdxWithMarkerAndTag, + }, + false, + }, + { + "not null", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + []int{ + sceneIdxWithTag, + sceneIdxWithTwoTags, + sceneIdxWithMarkerAndTag, + }, + []int{sceneIdx1WithPerformer}, + false, + }, + { + "equals", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithScene]), + strconv.Itoa(tagIDs[tagIdx2WithScene]), + }, + }, + []int{sceneIdxWithTwoTags}, + []int{ + sceneIdxWithThreeTags, + }, + false, + }, + { + "not equals", + models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithScene]), + strconv.Itoa(tagIDs[tagIdx2WithScene]), + }, + }, + nil, + nil, + true, + }, + } - q := getSceneStringValue(sceneIdxWithTwoTags, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter) - assert.Len(t, scenes, 0) + results, err := db.Scene.Query(ctx, models.SceneQueryOptions{ + SceneFilter: &models.SceneFilterType{ + Tags: &tt.filter, + }, + }) + if (err != nil) != tt.wantErr { + t.Errorf("SceneStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - return nil - }) + include := indexesToIDs(sceneIDs, tt.includeIdxs) + exclude := indexesToIDs(sceneIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(results.IDs, i) + } + for _, e := range exclude { + assert.NotContains(results.IDs, e) + } + }) + } } func TestSceneQueryPerformerTags(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Scene - tagCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdxWithPerformer]), - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + allDepth := -1 + + tests := []struct { + name string + findFilter *models.FindFilterType + filter *models.SceneFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "includes", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithPerformer]), + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierIncludes, + }, }, - Modifier: models.CriterionModifierIncludes, - } - - sceneFilter := models.SceneFilterType{ - PerformerTags: &tagCriterion, - } - - scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) - assert.Len(t, scenes, 2) - - // ensure ids are correct - for _, scene := range scenes { - assert.True(t, scene.ID == sceneIDs[sceneIdxWithPerformerTag] || scene.ID == sceneIDs[sceneIdxWithPerformerTwoTags]) - } - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), - strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + []int{ + sceneIdxWithPerformerTag, + sceneIdxWithPerformerTwoTags, + sceneIdxWithTwoPerformerTag, }, - Modifier: models.CriterionModifierIncludesAll, - } - - scenes = queryScene(ctx, t, sqb, &sceneFilter, nil) - - assert.Len(t, scenes, 1) - assert.Equal(t, sceneIDs[sceneIdxWithPerformerTwoTags], scenes[0].ID) - - tagCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + []int{ + sceneIdxWithPerformer, }, - Modifier: models.CriterionModifierExcludes, - } + false, + }, + { + "includes sub-tags", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithParentAndChild]), + }, + Depth: &allDepth, + Modifier: models.CriterionModifierIncludes, + }, + }, + []int{ + sceneIdxWithPerformerParentTag, + }, + []int{ + sceneIdxWithPerformer, + sceneIdxWithPerformerTag, + sceneIdxWithPerformerTwoTags, + sceneIdxWithTwoPerformerTag, + }, + false, + }, + { + "includes all", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + Modifier: models.CriterionModifierIncludesAll, + }, + }, + []int{ + sceneIdxWithPerformerTwoTags, + }, + []int{ + sceneIdxWithPerformer, + sceneIdxWithPerformerTag, + sceneIdxWithTwoPerformerTag, + }, + false, + }, + { + "excludes performer tag tagIdx2WithPerformer", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: []string{strconv.Itoa(tagIDs[tagIdx2WithPerformer])}, + }, + }, + nil, + []int{sceneIdxWithTwoPerformerTag}, + false, + }, + { + "excludes sub-tags", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithParentAndChild]), + }, + Depth: &allDepth, + Modifier: models.CriterionModifierExcludes, + }, + }, + []int{ + sceneIdxWithPerformer, + sceneIdxWithPerformerTag, + sceneIdxWithPerformerTwoTags, + sceneIdxWithTwoPerformerTag, + }, + []int{ + sceneIdxWithPerformerParentTag, + }, + false, + }, + { + "is null", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + }, + []int{sceneIdx1WithPerformer}, + []int{sceneIdxWithPerformerTag}, + false, + }, + { + "not null", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + }, + []int{sceneIdxWithPerformerTag}, + []int{sceneIdx1WithPerformer}, + false, + }, + { + "equals", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + }, + }, + nil, + nil, + true, + }, + { + "not equals", + nil, + &models.SceneFilterType{ + PerformerTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: []string{ + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + }, + }, + nil, + nil, + true, + }, + } - q := getSceneStringValue(sceneIdxWithPerformerTwoTags, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) - scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter) - assert.Len(t, scenes, 0) + results, err := db.Scene.Query(ctx, models.SceneQueryOptions{ + SceneFilter: tt.filter, + QueryOptions: models.QueryOptions{ + FindFilter: tt.findFilter, + }, + }) + if (err != nil) != tt.wantErr { + t.Errorf("SceneStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } - tagCriterion = models.HierarchicalMultiCriterionInput{ - Modifier: models.CriterionModifierIsNull, - } - q = getSceneStringValue(sceneIdx1WithPerformer, titleField) + include := indexesToIDs(sceneIDs, tt.includeIdxs) + exclude := indexesToIDs(sceneIDs, tt.excludeIdxs) - scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter) - assert.Len(t, scenes, 1) - assert.Equal(t, sceneIDs[sceneIdx1WithPerformer], scenes[0].ID) - - q = getSceneStringValue(sceneIdxWithPerformerTag, titleField) - scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter) - assert.Len(t, scenes, 0) - - tagCriterion.Modifier = models.CriterionModifierNotNull - - scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter) - assert.Len(t, scenes, 1) - assert.Equal(t, sceneIDs[sceneIdxWithPerformerTag], scenes[0].ID) - - q = getSceneStringValue(sceneIdx1WithPerformer, titleField) - scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter) - assert.Len(t, scenes, 0) - - return nil - }) + for _, i := range include { + assert.Contains(results.IDs, i) + } + for _, e := range exclude { + assert.NotContains(results.IDs, e) + } + }) + } } func TestSceneQueryStudio(t *testing.T) { @@ -3561,6 +3855,30 @@ func TestSceneQueryStudio(t *testing.T) { []int{sceneIDs[sceneIdxWithGallery]}, false, }, + { + "equals", + "", + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithScene]), + }, + Modifier: models.CriterionModifierEquals, + }, + []int{sceneIDs[sceneIdxWithStudio]}, + false, + }, + { + "not equals", + getSceneStringValue(sceneIdxWithStudio, titleField), + models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithScene]), + }, + Modifier: models.CriterionModifierNotEquals, + }, + []int{}, + false, + }, } qb := db.Scene @@ -4237,7 +4555,8 @@ func TestSceneStore_FindDuplicates(t *testing.T) { withRollbackTxn(func(ctx context.Context) error { distance := 0 - got, err := qb.FindDuplicates(ctx, distance) + durationDiff := -1. + got, err := qb.FindDuplicates(ctx, distance, durationDiff) if err != nil { t.Errorf("SceneStore.FindDuplicates() error = %v", err) return nil @@ -4246,7 +4565,8 @@ func TestSceneStore_FindDuplicates(t *testing.T) { assert.Len(t, got, dupeScenePhashes) distance = 1 - got, err = qb.FindDuplicates(ctx, distance) + durationDiff = -1. + got, err = qb.FindDuplicates(ctx, distance, durationDiff) if err != nil { t.Errorf("SceneStore.FindDuplicates() error = %v", err) return nil diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index 4300111cf..12a56947b 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -60,19 +60,24 @@ const ( sceneIdx1WithPerformer sceneIdx2WithPerformer sceneIdxWithTwoPerformers + sceneIdxWithThreePerformers sceneIdxWithTag sceneIdxWithTwoTags + sceneIdxWithThreeTags sceneIdxWithMarkerAndTag + sceneIdxWithMarkerTwoTags sceneIdxWithStudio sceneIdx1WithStudio sceneIdx2WithStudio sceneIdxWithMarkers sceneIdxWithPerformerTag + sceneIdxWithTwoPerformerTag sceneIdxWithPerformerTwoTags sceneIdxWithSpacedName sceneIdxWithStudioPerformer sceneIdxWithGrandChildStudio sceneIdxMissingPhash + sceneIdxWithPerformerParentTag // new indexes above lastSceneIdx @@ -90,16 +95,20 @@ const ( imageIdx1WithPerformer imageIdx2WithPerformer imageIdxWithTwoPerformers + imageIdxWithThreePerformers imageIdxWithTag imageIdxWithTwoTags + imageIdxWithThreeTags imageIdxWithStudio imageIdx1WithStudio imageIdx2WithStudio imageIdxWithStudioPerformer imageIdxInZip imageIdxWithPerformerTag + imageIdxWithTwoPerformerTag imageIdxWithPerformerTwoTags imageIdxWithGrandChildStudio + imageIdxWithPerformerParentTag // new indexes above totalImages ) @@ -108,20 +117,25 @@ const ( performerIdxWithScene = iota performerIdx1WithScene performerIdx2WithScene + performerIdx3WithScene performerIdxWithTwoScenes performerIdxWithImage performerIdxWithTwoImages performerIdx1WithImage performerIdx2WithImage + performerIdx3WithImage performerIdxWithTag + performerIdx2WithTag performerIdxWithTwoTags performerIdxWithGallery performerIdxWithTwoGalleries performerIdx1WithGallery performerIdx2WithGallery + performerIdx3WithGallery performerIdxWithSceneStudio performerIdxWithImageStudio performerIdxWithGalleryStudio + performerIdxWithParentTag // new indexes above // performers with dup names start from the end performerIdx1WithDupName @@ -155,16 +169,20 @@ const ( galleryIdx1WithPerformer galleryIdx2WithPerformer galleryIdxWithTwoPerformers + galleryIdxWithThreePerformers galleryIdxWithTag galleryIdxWithTwoTags + galleryIdxWithThreeTags galleryIdxWithStudio galleryIdx1WithStudio galleryIdx2WithStudio galleryIdxWithPerformerTag + galleryIdxWithTwoPerformerTag galleryIdxWithPerformerTwoTags galleryIdxWithStudioPerformer galleryIdxWithGrandChildStudio galleryIdxWithoutFile + galleryIdxWithPerformerParentTag // new indexes above lastGalleryIdx @@ -182,12 +200,14 @@ const ( tagIdxWithImage tagIdx1WithImage tagIdx2WithImage + tagIdx3WithImage tagIdxWithPerformer tagIdx1WithPerformer tagIdx2WithPerformer tagIdxWithGallery tagIdx1WithGallery tagIdx2WithGallery + tagIdx3WithGallery tagIdxWithChildTag tagIdxWithParentTag tagIdxWithGrandChild @@ -332,19 +352,24 @@ var ( var ( sceneTags = linkMap{ - sceneIdxWithTag: {tagIdxWithScene}, - sceneIdxWithTwoTags: {tagIdx1WithScene, tagIdx2WithScene}, - sceneIdxWithMarkerAndTag: {tagIdx3WithScene}, + sceneIdxWithTag: {tagIdxWithScene}, + sceneIdxWithTwoTags: {tagIdx1WithScene, tagIdx2WithScene}, + sceneIdxWithThreeTags: {tagIdx1WithScene, tagIdx2WithScene, tagIdx3WithScene}, + sceneIdxWithMarkerAndTag: {tagIdx3WithScene}, + sceneIdxWithMarkerTwoTags: {tagIdx2WithScene, tagIdx3WithScene}, } scenePerformers = linkMap{ - sceneIdxWithPerformer: {performerIdxWithScene}, - sceneIdxWithTwoPerformers: {performerIdx1WithScene, performerIdx2WithScene}, - sceneIdxWithPerformerTag: {performerIdxWithTag}, - sceneIdxWithPerformerTwoTags: {performerIdxWithTwoTags}, - sceneIdx1WithPerformer: {performerIdxWithTwoScenes}, - sceneIdx2WithPerformer: {performerIdxWithTwoScenes}, - sceneIdxWithStudioPerformer: {performerIdxWithSceneStudio}, + sceneIdxWithPerformer: {performerIdxWithScene}, + sceneIdxWithTwoPerformers: {performerIdx1WithScene, performerIdx2WithScene}, + sceneIdxWithThreePerformers: {performerIdx1WithScene, performerIdx2WithScene, performerIdx3WithScene}, + sceneIdxWithPerformerTag: {performerIdxWithTag}, + sceneIdxWithTwoPerformerTag: {performerIdxWithTag, performerIdx2WithTag}, + sceneIdxWithPerformerTwoTags: {performerIdxWithTwoTags}, + sceneIdx1WithPerformer: {performerIdxWithTwoScenes}, + sceneIdx2WithPerformer: {performerIdxWithTwoScenes}, + sceneIdxWithStudioPerformer: {performerIdxWithSceneStudio}, + sceneIdxWithPerformerParentTag: {performerIdxWithParentTag}, } sceneGalleries = linkMap{ @@ -376,6 +401,7 @@ var ( {sceneIdxWithMarkers, tagIdxWithPrimaryMarkers, nil}, {sceneIdxWithMarkers, tagIdxWithPrimaryMarkers, []int{tagIdxWithMarkers}}, {sceneIdxWithMarkerAndTag, tagIdxWithPrimaryMarkers, nil}, + {sceneIdxWithMarkerTwoTags, tagIdxWithPrimaryMarkers, nil}, } ) @@ -407,29 +433,36 @@ var ( imageIdxWithGrandChildStudio: studioIdxWithGrandParent, } imageTags = linkMap{ - imageIdxWithTag: {tagIdxWithImage}, - imageIdxWithTwoTags: {tagIdx1WithImage, tagIdx2WithImage}, + imageIdxWithTag: {tagIdxWithImage}, + imageIdxWithTwoTags: {tagIdx1WithImage, tagIdx2WithImage}, + imageIdxWithThreeTags: {tagIdx1WithImage, tagIdx2WithImage, tagIdx3WithImage}, } imagePerformers = linkMap{ - imageIdxWithPerformer: {performerIdxWithImage}, - imageIdxWithTwoPerformers: {performerIdx1WithImage, performerIdx2WithImage}, - imageIdxWithPerformerTag: {performerIdxWithTag}, - imageIdxWithPerformerTwoTags: {performerIdxWithTwoTags}, - imageIdx1WithPerformer: {performerIdxWithTwoImages}, - imageIdx2WithPerformer: {performerIdxWithTwoImages}, - imageIdxWithStudioPerformer: {performerIdxWithImageStudio}, + imageIdxWithPerformer: {performerIdxWithImage}, + imageIdxWithTwoPerformers: {performerIdx1WithImage, performerIdx2WithImage}, + imageIdxWithThreePerformers: {performerIdx1WithImage, performerIdx2WithImage, performerIdx3WithImage}, + imageIdxWithPerformerTag: {performerIdxWithTag}, + imageIdxWithTwoPerformerTag: {performerIdxWithTag, performerIdx2WithTag}, + imageIdxWithPerformerTwoTags: {performerIdxWithTwoTags}, + imageIdx1WithPerformer: {performerIdxWithTwoImages}, + imageIdx2WithPerformer: {performerIdxWithTwoImages}, + imageIdxWithStudioPerformer: {performerIdxWithImageStudio}, + imageIdxWithPerformerParentTag: {performerIdxWithParentTag}, } ) var ( galleryPerformers = linkMap{ - galleryIdxWithPerformer: {performerIdxWithGallery}, - galleryIdxWithTwoPerformers: {performerIdx1WithGallery, performerIdx2WithGallery}, - galleryIdxWithPerformerTag: {performerIdxWithTag}, - galleryIdxWithPerformerTwoTags: {performerIdxWithTwoTags}, - galleryIdx1WithPerformer: {performerIdxWithTwoGalleries}, - galleryIdx2WithPerformer: {performerIdxWithTwoGalleries}, - galleryIdxWithStudioPerformer: {performerIdxWithGalleryStudio}, + galleryIdxWithPerformer: {performerIdxWithGallery}, + galleryIdxWithTwoPerformers: {performerIdx1WithGallery, performerIdx2WithGallery}, + galleryIdxWithThreePerformers: {performerIdx1WithGallery, performerIdx2WithGallery, performerIdx3WithGallery}, + galleryIdxWithPerformerTag: {performerIdxWithTag}, + galleryIdxWithTwoPerformerTag: {performerIdxWithTag, performerIdx2WithTag}, + galleryIdxWithPerformerTwoTags: {performerIdxWithTwoTags}, + galleryIdx1WithPerformer: {performerIdxWithTwoGalleries}, + galleryIdx2WithPerformer: {performerIdxWithTwoGalleries}, + galleryIdxWithStudioPerformer: {performerIdxWithGalleryStudio}, + galleryIdxWithPerformerParentTag: {performerIdxWithParentTag}, } galleryStudios = map[int]int{ @@ -441,8 +474,9 @@ var ( } galleryTags = linkMap{ - galleryIdxWithTag: {tagIdxWithGallery}, - galleryIdxWithTwoTags: {tagIdx1WithGallery, tagIdx2WithGallery}, + galleryIdxWithTag: {tagIdxWithGallery}, + galleryIdxWithTwoTags: {tagIdx1WithGallery, tagIdx2WithGallery}, + galleryIdxWithThreeTags: {tagIdx1WithGallery, tagIdx2WithGallery, tagIdx3WithGallery}, } ) @@ -462,8 +496,10 @@ var ( var ( performerTags = linkMap{ - performerIdxWithTag: {tagIdxWithPerformer}, - performerIdxWithTwoTags: {tagIdx1WithPerformer, tagIdx2WithPerformer}, + performerIdxWithTag: {tagIdxWithPerformer}, + performerIdx2WithTag: {tagIdx2WithPerformer}, + performerIdxWithTwoTags: {tagIdx1WithPerformer, tagIdx2WithPerformer}, + performerIdxWithParentTag: {tagIdxWithParentAndChild}, } ) @@ -484,6 +520,16 @@ func indexesToIDs(ids []int, indexes []int) []int { return ret } +func indexFromID(ids []int, id int) int { + for i, v := range ids { + if v == id { + return i + } + } + + return -1 +} + var db *sqlite.Database func TestMain(m *testing.M) { @@ -1331,6 +1377,29 @@ func getPerformerCareerLength(index int) *string { return &ret } +func getPerformerPenisLength(index int) *float64 { + if index%5 == 0 { + return nil + } + + ret := float64(index) + return &ret +} + +func getPerformerCircumcised(index int) *models.CircumisedEnum { + var ret models.CircumisedEnum + switch { + case index%3 == 0: + return nil + case index%3 == 1: + ret = models.CircumisedEnumCut + default: + ret = models.CircumisedEnumUncut + } + + return &ret +} + func getIgnoreAutoTag(index int) bool { return index%5 == 0 } @@ -1372,6 +1441,8 @@ func createPerformers(ctx context.Context, n int, o int) error { DeathDate: getPerformerDeathDate(i), Details: getPerformerStringValue(i, "Details"), Ethnicity: getPerformerStringValue(i, "Ethnicity"), + PenisLength: getPerformerPenisLength(i), + Circumcised: getPerformerCircumcised(i), Rating: getIntPtr(getRating(i)), IgnoreAutoTag: getIgnoreAutoTag(i), TagIDs: models.NewRelatedIDs(tids), @@ -1406,11 +1477,8 @@ func getTagStringValue(index int, field string) string { } func getTagSceneCount(id int) int { - if id == tagIDs[tagIdx1WithScene] || id == tagIDs[tagIdx2WithScene] || id == tagIDs[tagIdxWithScene] || id == tagIDs[tagIdx3WithScene] { - return 1 - } - - return 0 + idx := indexFromID(tagIDs, id) + return len(sceneTags.reverseLookup(idx)) } func getTagMarkerCount(id int) int { @@ -1426,27 +1494,18 @@ func getTagMarkerCount(id int) int { } func getTagImageCount(id int) int { - if id == tagIDs[tagIdx1WithImage] || id == tagIDs[tagIdx2WithImage] || id == tagIDs[tagIdxWithImage] { - return 1 - } - - return 0 + idx := indexFromID(tagIDs, id) + return len(imageTags.reverseLookup(idx)) } func getTagGalleryCount(id int) int { - if id == tagIDs[tagIdx1WithGallery] || id == tagIDs[tagIdx2WithGallery] || id == tagIDs[tagIdxWithGallery] { - return 1 - } - - return 0 + idx := indexFromID(tagIDs, id) + return len(galleryTags.reverseLookup(idx)) } func getTagPerformerCount(id int) int { - if id == tagIDs[tagIdx1WithPerformer] || id == tagIDs[tagIdx2WithPerformer] || id == tagIDs[tagIdxWithPerformer] { - return 1 - } - - return 0 + idx := indexFromID(tagIDs, id) + return len(performerTags.reverseLookup(idx)) } func getTagParentCount(id int) int { @@ -1726,5 +1785,5 @@ func linkTagsParent(ctx context.Context, qb models.TagReaderWriter) error { } func addTagImage(ctx context.Context, qb models.TagWriter, tagIndex int) error { - return qb.UpdateImage(ctx, tagIDs[tagIndex], models.DefaultTagImage) + return qb.UpdateImage(ctx, tagIDs[tagIndex], []byte("image")) } diff --git a/pkg/sqlite/sql.go b/pkg/sqlite/sql.go index af864df01..90b922520 100644 --- a/pkg/sqlite/sql.go +++ b/pkg/sqlite/sql.go @@ -82,10 +82,10 @@ func getSort(sort string, direction string, tableName string) string { colName = sort } if strings.Compare(sort, "name") == 0 { - return " ORDER BY " + colName + " COLLATE NOCASE " + direction + return " ORDER BY " + colName + " COLLATE NATURAL_CI " + direction } if strings.Compare(sort, "title") == 0 { - return " ORDER BY " + colName + " COLLATE NATURAL_CS " + direction + return " ORDER BY " + colName + " COLLATE NATURAL_CI " + direction } return " ORDER BY " + colName + " " + direction @@ -103,6 +103,27 @@ func getCountSort(primaryTable, joinTable, primaryFK, direction string) string { return fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM %s WHERE %s = %s.id) %s", joinTable, primaryFK, primaryTable, getSortDirection(direction)) } +func getMultiSumSort(sum string, primaryTable, foreignTable1, joinTable1, foreignTable2, joinTable2, primaryFK, foreignFK1, foreignFK2, direction string) string { + return fmt.Sprintf(" ORDER BY (SELECT SUM(%s) "+ + "FROM ("+ + "SELECT SUM(%s) as %s from %s s "+ + "LEFT JOIN %s ON %s.id = s.%s "+ + "WHERE s.%s = %s.id "+ + "UNION ALL "+ + "SELECT SUM(%s) as %s from %s s "+ + "LEFT JOIN %s ON %s.id = s.%s "+ + "WHERE s.%s = %s.id "+ + ")) %s", + sum, + sum, sum, joinTable1, + foreignTable1, foreignTable1, foreignFK1, + primaryFK, primaryTable, + sum, sum, joinTable2, + foreignTable2, foreignTable2, foreignFK2, + primaryFK, primaryTable, + getSortDirection(direction)) +} + func getStringSearchClause(columns []string, q string, not bool) sqlClause { var likeClauses []string var args []interface{} @@ -138,6 +159,22 @@ func getStringSearchClause(columns []string, q string, not bool) sqlClause { return makeClause("("+likes+")", args...) } +func getEnumSearchClause(column string, enumVals []string, not bool) sqlClause { + var args []interface{} + + notStr := "" + if not { + notStr = " NOT" + } + + clause := fmt.Sprintf("(%s%s IN %s)", column, notStr, getInBinding(len(enumVals))) + for _, enumVal := range enumVals { + args = append(args, enumVal) + } + + return makeClause(clause, args...) +} + func getInBinding(length int) string { bindings := strings.Repeat("?, ", length) bindings = strings.TrimRight(bindings, ", ") @@ -154,8 +191,26 @@ func getIntWhereClause(column string, modifier models.CriterionModifier, value i upper = &u } - args := []interface{}{value} - betweenArgs := []interface{}{value, *upper} + args := []interface{}{value, *upper} + return getNumericWhereClause(column, modifier, args) +} + +func getFloatCriterionWhereClause(column string, input models.FloatCriterionInput) (string, []interface{}) { + return getFloatWhereClause(column, input.Modifier, input.Value, input.Value2) +} + +func getFloatWhereClause(column string, modifier models.CriterionModifier, value float64, upper *float64) (string, []interface{}) { + if upper == nil { + u := 0.0 + upper = &u + } + + args := []interface{}{value, *upper} + return getNumericWhereClause(column, modifier, args) +} + +func getNumericWhereClause(column string, modifier models.CriterionModifier, args []interface{}) (string, []interface{}) { + singleArgs := args[0:1] switch modifier { case models.CriterionModifierIsNull: @@ -163,20 +218,20 @@ func getIntWhereClause(column string, modifier models.CriterionModifier, value i case models.CriterionModifierNotNull: return fmt.Sprintf("%s IS NOT NULL", column), nil case models.CriterionModifierEquals: - return fmt.Sprintf("%s = ?", column), args + return fmt.Sprintf("%s = ?", column), singleArgs case models.CriterionModifierNotEquals: - return fmt.Sprintf("%s != ?", column), args + return fmt.Sprintf("%s != ?", column), singleArgs case models.CriterionModifierBetween: - return fmt.Sprintf("%s BETWEEN ? AND ?", column), betweenArgs + return fmt.Sprintf("%s BETWEEN ? AND ?", column), args case models.CriterionModifierNotBetween: - return fmt.Sprintf("%s NOT BETWEEN ? AND ?", column), betweenArgs + return fmt.Sprintf("%s NOT BETWEEN ? AND ?", column), args case models.CriterionModifierLessThan: - return fmt.Sprintf("%s < ?", column), args + return fmt.Sprintf("%s < ?", column), singleArgs case models.CriterionModifierGreaterThan: - return fmt.Sprintf("%s > ?", column), args + return fmt.Sprintf("%s > ?", column), singleArgs } - panic("unsupported int modifier type " + modifier) + panic("unsupported numeric modifier type " + modifier) } func getDateCriterionWhereClause(column string, input models.DateCriterionInput) (string, []interface{}) { @@ -287,6 +342,28 @@ func getCountCriterionClause(primaryTable, joinTable, primaryFK string, criterio return getIntCriterionWhereClause(lhs, criterion) } +func getJoinedMultiSumCriterionClause(primaryTable, foreignTable1, joinTable1, foreignTable2, joinTable2, primaryFK string, foreignFK1 string, foreignFK2 string, sum string, criterion models.IntCriterionInput) (string, []interface{}) { + lhs := fmt.Sprintf("(SELECT SUM(%s) "+ + "FROM ("+ + "SELECT SUM(%s) as %s from %s s "+ + "LEFT JOIN %s ON %s.id = s.%s "+ + "WHERE s.%s = %s.id "+ + "UNION ALL "+ + "SELECT SUM(%s) as %s from %s s "+ + "LEFT JOIN %s ON %s.id = s.%s "+ + "WHERE s.%s = %s.id "+ + "))", + sum, + sum, sum, joinTable1, + foreignTable1, foreignTable1, foreignFK1, + primaryFK, primaryTable, + sum, sum, joinTable2, + foreignTable2, foreignTable2, foreignFK2, + primaryFK, primaryTable, + ) + return getIntCriterionWhereClause(lhs, criterion) +} + func coalesce(column string) string { return fmt.Sprintf("COALESCE(%s, '')", column) } diff --git a/pkg/sqlite/studio.go b/pkg/sqlite/studio.go index b8f783de1..0b5ed7f2f 100644 --- a/pkg/sqlite/studio.go +++ b/pkg/sqlite/studio.go @@ -414,16 +414,21 @@ func (qb *studioQueryBuilder) getStudioSort(findFilter *models.FindFilterType) s direction = findFilter.GetDirection() } + sortQuery := "" switch sort { case "scenes_count": - return getCountSort(studioTable, sceneTable, studioIDColumn, direction) + sortQuery += getCountSort(studioTable, sceneTable, studioIDColumn, direction) case "images_count": - return getCountSort(studioTable, imageTable, studioIDColumn, direction) + sortQuery += getCountSort(studioTable, imageTable, studioIDColumn, direction) case "galleries_count": - return getCountSort(studioTable, galleryTable, studioIDColumn, direction) + sortQuery += getCountSort(studioTable, galleryTable, studioIDColumn, direction) default: - return getSort(sort, direction, "studios") + sortQuery += getSort(sort, direction, "studios") } + + // Whatever the sorting, always use name/id as a final sort + sortQuery += ", COALESCE(studios.name, studios.id) COLLATE NATURAL_CI ASC" + return sortQuery } func (qb *studioQueryBuilder) queryStudio(ctx context.Context, query string, args []interface{}) (*models.Studio, error) { diff --git a/pkg/sqlite/tag.go b/pkg/sqlite/tag.go index 9ad4abcaf..0c9f7422e 100644 --- a/pkg/sqlite/tag.go +++ b/pkg/sqlite/tag.go @@ -474,9 +474,19 @@ func tagMarkerCountCriterionHandler(qb *tagQueryBuilder, markerCount *models.Int } } -func tagParentsCriterionHandler(qb *tagQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { +func tagParentsCriterionHandler(qb *tagQueryBuilder, criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - if tags != nil { + if criterion != nil { + tags := criterion.CombineExcludes() + + // validate the modifier + switch tags.Modifier { + case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: + // valid + default: + f.setError(fmt.Errorf("invalid modifier %s for tag parent/children", criterion.Modifier)) + } + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { var notClause string if tags.Modifier == models.CriterionModifierNotNull { @@ -489,43 +499,88 @@ func tagParentsCriterionHandler(qb *tagQueryBuilder, tags *models.HierarchicalMu return } - if len(tags.Value) == 0 { + if len(tags.Value) == 0 && len(tags.Excludes) == 0 { return } - var args []interface{} - for _, val := range tags.Value { - args = append(args, val) + if len(tags.Value) > 0 { + var args []interface{} + for _, val := range tags.Value { + args = append(args, val) + } + + depthVal := 0 + if tags.Depth != nil { + depthVal = *tags.Depth + } + + var depthCondition string + if depthVal != -1 { + depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) + } + + query := `parents AS ( + SELECT parent_id AS root_id, child_id AS item_id, 0 AS depth FROM tags_relations WHERE parent_id IN` + getInBinding(len(tags.Value)) + ` + UNION + SELECT root_id, child_id, depth + 1 FROM tags_relations INNER JOIN parents ON item_id = parent_id ` + depthCondition + ` + )` + + f.addRecursiveWith(query, args...) + + f.addLeftJoin("parents", "", "parents.item_id = tags.id") + + addHierarchicalConditionClauses(f, tags, "parents", "root_id") } - depthVal := 0 - if tags.Depth != nil { - depthVal = *tags.Depth + if len(tags.Excludes) > 0 { + var args []interface{} + for _, val := range tags.Excludes { + args = append(args, val) + } + + depthVal := 0 + if tags.Depth != nil { + depthVal = *tags.Depth + } + + var depthCondition string + if depthVal != -1 { + depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) + } + + query := `parents2 AS ( + SELECT parent_id AS root_id, child_id AS item_id, 0 AS depth FROM tags_relations WHERE parent_id IN` + getInBinding(len(tags.Excludes)) + ` + UNION + SELECT root_id, child_id, depth + 1 FROM tags_relations INNER JOIN parents2 ON item_id = parent_id ` + depthCondition + ` + )` + + f.addRecursiveWith(query, args...) + + f.addLeftJoin("parents2", "", "parents2.item_id = tags.id") + + addHierarchicalConditionClauses(f, models.HierarchicalMultiCriterionInput{ + Value: tags.Excludes, + Depth: tags.Depth, + Modifier: models.CriterionModifierExcludes, + }, "parents2", "root_id") } - - var depthCondition string - if depthVal != -1 { - depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) - } - - query := `parents AS ( - SELECT parent_id AS root_id, child_id AS item_id, 0 AS depth FROM tags_relations WHERE parent_id IN` + getInBinding(len(tags.Value)) + ` - UNION - SELECT root_id, child_id, depth + 1 FROM tags_relations INNER JOIN parents ON item_id = parent_id ` + depthCondition + ` -)` - - f.addRecursiveWith(query, args...) - - f.addLeftJoin("parents", "", "parents.item_id = tags.id") - - addHierarchicalConditionClauses(f, tags, "parents", "root_id") } } } -func tagChildrenCriterionHandler(qb *tagQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { +func tagChildrenCriterionHandler(qb *tagQueryBuilder, criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - if tags != nil { + if criterion != nil { + tags := criterion.CombineExcludes() + + // validate the modifier + switch tags.Modifier { + case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: + // valid + default: + f.setError(fmt.Errorf("invalid modifier %s for tag parent/children", criterion.Modifier)) + } + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { var notClause string if tags.Modifier == models.CriterionModifierNotNull { @@ -538,36 +593,71 @@ func tagChildrenCriterionHandler(qb *tagQueryBuilder, tags *models.HierarchicalM return } - if len(tags.Value) == 0 { + if len(tags.Value) == 0 && len(tags.Excludes) == 0 { return } - var args []interface{} - for _, val := range tags.Value { - args = append(args, val) + if len(tags.Value) > 0 { + var args []interface{} + for _, val := range tags.Value { + args = append(args, val) + } + + depthVal := 0 + if tags.Depth != nil { + depthVal = *tags.Depth + } + + var depthCondition string + if depthVal != -1 { + depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) + } + + query := `children AS ( + SELECT child_id AS root_id, parent_id AS item_id, 0 AS depth FROM tags_relations WHERE child_id IN` + getInBinding(len(tags.Value)) + ` + UNION + SELECT root_id, parent_id, depth + 1 FROM tags_relations INNER JOIN children ON item_id = child_id ` + depthCondition + ` + )` + + f.addRecursiveWith(query, args...) + + f.addLeftJoin("children", "", "children.item_id = tags.id") + + addHierarchicalConditionClauses(f, tags, "children", "root_id") } - depthVal := 0 - if tags.Depth != nil { - depthVal = *tags.Depth + if len(tags.Excludes) > 0 { + var args []interface{} + for _, val := range tags.Excludes { + args = append(args, val) + } + + depthVal := 0 + if tags.Depth != nil { + depthVal = *tags.Depth + } + + var depthCondition string + if depthVal != -1 { + depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) + } + + query := `children2 AS ( + SELECT child_id AS root_id, parent_id AS item_id, 0 AS depth FROM tags_relations WHERE child_id IN` + getInBinding(len(tags.Excludes)) + ` + UNION + SELECT root_id, parent_id, depth + 1 FROM tags_relations INNER JOIN children2 ON item_id = child_id ` + depthCondition + ` + )` + + f.addRecursiveWith(query, args...) + + f.addLeftJoin("children2", "", "children2.item_id = tags.id") + + addHierarchicalConditionClauses(f, models.HierarchicalMultiCriterionInput{ + Value: tags.Excludes, + Depth: tags.Depth, + Modifier: models.CriterionModifierExcludes, + }, "children2", "root_id") } - - var depthCondition string - if depthVal != -1 { - depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) - } - - query := `children AS ( - SELECT child_id AS root_id, parent_id AS item_id, 0 AS depth FROM tags_relations WHERE child_id IN` + getInBinding(len(tags.Value)) + ` - UNION - SELECT root_id, parent_id, depth + 1 FROM tags_relations INNER JOIN children ON item_id = child_id ` + depthCondition + ` -)` - - f.addRecursiveWith(query, args...) - - f.addLeftJoin("children", "", "children.item_id = tags.id") - - addHierarchicalConditionClauses(f, tags, "children", "root_id") } } } @@ -609,22 +699,25 @@ func (qb *tagQueryBuilder) getTagSort(query *queryBuilder, findFilter *models.Fi direction = findFilter.GetDirection() } - if findFilter.Sort != nil { - switch *findFilter.Sort { - case "scenes_count": - return getCountSort(tagTable, scenesTagsTable, tagIDColumn, direction) - case "scene_markers_count": - return fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM scene_markers_tags WHERE tags.id = scene_markers_tags.tag_id)+(SELECT COUNT(*) FROM scene_markers WHERE tags.id = scene_markers.primary_tag_id) %s", getSortDirection(direction)) - case "images_count": - return getCountSort(tagTable, imagesTagsTable, tagIDColumn, direction) - case "galleries_count": - return getCountSort(tagTable, galleriesTagsTable, tagIDColumn, direction) - case "performers_count": - return getCountSort(tagTable, performersTagsTable, tagIDColumn, direction) - } + sortQuery := "" + switch sort { + case "scenes_count": + sortQuery += getCountSort(tagTable, scenesTagsTable, tagIDColumn, direction) + case "scene_markers_count": + sortQuery += fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM scene_markers_tags WHERE tags.id = scene_markers_tags.tag_id)+(SELECT COUNT(*) FROM scene_markers WHERE tags.id = scene_markers.primary_tag_id) %s", getSortDirection(direction)) + case "images_count": + sortQuery += getCountSort(tagTable, imagesTagsTable, tagIDColumn, direction) + case "galleries_count": + sortQuery += getCountSort(tagTable, galleriesTagsTable, tagIDColumn, direction) + case "performers_count": + sortQuery += getCountSort(tagTable, performersTagsTable, tagIDColumn, direction) + default: + sortQuery += getSort(sort, direction, "tags") } - return getSort(sort, direction, "tags") + // Whatever the sorting, always use name/id as a final sort + sortQuery += ", COALESCE(tags.name, tags.id) COLLATE NATURAL_CI ASC" + return sortQuery } func (qb *tagQueryBuilder) queryTag(ctx context.Context, query string, args []interface{}) (*models.Tag, error) { @@ -648,6 +741,10 @@ func (qb *tagQueryBuilder) GetImage(ctx context.Context, tagID int) ([]byte, err return qb.blobJoinQueryBuilder.GetImage(ctx, tagID, tagImageBlobColumn) } +func (qb *tagQueryBuilder) HasImage(ctx context.Context, tagID int) (bool, error) { + return qb.blobJoinQueryBuilder.HasImage(ctx, tagID, tagImageBlobColumn) +} + func (qb *tagQueryBuilder) UpdateImage(ctx context.Context, tagID int, image []byte) error { return qb.blobJoinQueryBuilder.UpdateImage(ctx, tagID, tagImageBlobColumn, image) } diff --git a/pkg/sqlite/tag_test.go b/pkg/sqlite/tag_test.go index d3ff5459f..5c601ca80 100644 --- a/pkg/sqlite/tag_test.go +++ b/pkg/sqlite/tag_test.go @@ -187,7 +187,7 @@ func TestTagQuerySort(t *testing.T) { tags := queryTags(ctx, t, sqb, nil, findFilter) assert := assert.New(t) - assert.Equal(tagIDs[tagIdxWithScene], tags[0].ID) + assert.Equal(tagIDs[tagIdx2WithScene], tags[0].ID) sortBy = "scene_markers_count" tags = queryTags(ctx, t, sqb, nil, findFilter) @@ -195,15 +195,15 @@ func TestTagQuerySort(t *testing.T) { sortBy = "images_count" tags = queryTags(ctx, t, sqb, nil, findFilter) - assert.Equal(tagIDs[tagIdxWithImage], tags[0].ID) + assert.Equal(tagIDs[tagIdx1WithImage], tags[0].ID) sortBy = "galleries_count" tags = queryTags(ctx, t, sqb, nil, findFilter) - assert.Equal(tagIDs[tagIdxWithGallery], tags[0].ID) + assert.Equal(tagIDs[tagIdx1WithGallery], tags[0].ID) sortBy = "performers_count" tags = queryTags(ctx, t, sqb, nil, findFilter) - assert.Equal(tagIDs[tagIdxWithPerformer], tags[0].ID) + assert.Equal(tagIDs[tagIdx2WithPerformer], tags[0].ID) return nil }) diff --git a/pkg/sqlite/values.go b/pkg/sqlite/values.go index eafb8e462..be812275f 100644 --- a/pkg/sqlite/values.go +++ b/pkg/sqlite/values.go @@ -24,6 +24,15 @@ func nullIntPtr(i null.Int) *int { return &v } +func nullFloatPtr(i null.Float) *float64 { + if !i.Valid { + return nil + } + + v := float64(i.Float64) + return &v +} + func nullIntFolderIDPtr(i null.Int) *file.FolderID { if !i.Valid { return nil diff --git a/pkg/utils/http.go b/pkg/utils/http.go new file mode 100644 index 000000000..d2b40af99 --- /dev/null +++ b/pkg/utils/http.go @@ -0,0 +1,41 @@ +package utils + +import ( + "bytes" + "net/http" + "time" + + "github.com/stashapp/stash/pkg/hash/md5" +) + +// Returns an MD5 hash of data, formatted for use as an HTTP ETag header. +// Intended for use with `http.ServeContent`, to respond to conditional requests. +func GenerateETag(data []byte) string { + hash := md5.FromBytes(data) + return `"` + hash + `"` +} + +// Serves static content, adding Cache-Control: no-cache and a generated ETag header. +// Responds to conditional requests using the ETag. +func ServeStaticContent(w http.ResponseWriter, r *http.Request, data []byte) { + if r.URL.Query().Has("t") { + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + } else { + w.Header().Set("Cache-Control", "no-cache") + } + w.Header().Set("ETag", GenerateETag(data)) + + http.ServeContent(w, r, "", time.Time{}, bytes.NewReader(data)) +} + +// Serves static content at filepath, adding Cache-Control: no-cache. +// Responds to conditional requests using the file modtime. +func ServeStaticFile(w http.ResponseWriter, r *http.Request, filepath string) { + if r.URL.Query().Has("t") { + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + } else { + w.Header().Set("Cache-Control", "no-cache") + } + + http.ServeFile(w, r, filepath) +} diff --git a/pkg/utils/image.go b/pkg/utils/image.go index df73bec82..20435e7eb 100644 --- a/pkg/utils/image.go +++ b/pkg/utils/image.go @@ -2,16 +2,12 @@ package utils import ( "context" - "crypto/md5" "crypto/tls" "encoding/base64" - "errors" "fmt" "io" "net/http" "regexp" - "strings" - "syscall" "time" ) @@ -110,30 +106,12 @@ func GetBase64StringFromData(data []byte) string { return base64.StdEncoding.EncodeToString(data) } -func ServeImage(image []byte, w http.ResponseWriter, r *http.Request) error { - etag := fmt.Sprintf("%x", md5.Sum(image)) - - if match := r.Header.Get("If-None-Match"); match != "" { - if strings.Contains(match, etag) { - w.WriteHeader(http.StatusNotModified) - return nil - } - } - +func ServeImage(w http.ResponseWriter, r *http.Request, image []byte) { contentType := http.DetectContentType(image) if contentType == "text/xml; charset=utf-8" || contentType == "text/plain; charset=utf-8" { contentType = "image/svg+xml" } w.Header().Set("Content-Type", contentType) - w.Header().Add("Etag", etag) - w.Header().Set("Cache-Control", "public, max-age=604800, immutable") - _, err := w.Write(image) - // Broken pipe errors are common when serving images and the remote - // connection closes the connection. Filter them out of the error - // messages, as they are benign. - if errors.Is(err, syscall.EPIPE) { - return nil - } - return err + ServeStaticContent(w, r, image) } diff --git a/pkg/utils/phash.go b/pkg/utils/phash.go index 7b15ec5e0..395d86f93 100644 --- a/pkg/utils/phash.go +++ b/pkg/utils/phash.go @@ -1,6 +1,7 @@ package utils import ( + "math" "strconv" "github.com/corona10/goimagehash" @@ -8,21 +9,28 @@ import ( ) type Phash struct { - SceneID int `db:"id"` - Hash int64 `db:"phash"` + SceneID int `db:"id"` + Hash int64 `db:"phash"` + Duration float64 `db:"duration"` Neighbors []int Bucket int } -func FindDuplicates(hashes []*Phash, distance int) [][]int { +func FindDuplicates(hashes []*Phash, distance int, durationDiff float64) [][]int { for i, scene := range hashes { sceneHash := goimagehash.NewImageHash(uint64(scene.Hash), goimagehash.PHash) for j, neighbor := range hashes { if i != j && scene.SceneID != neighbor.SceneID { - neighborHash := goimagehash.NewImageHash(uint64(neighbor.Hash), goimagehash.PHash) - neighborDistance, _ := sceneHash.Distance(neighborHash) - if neighborDistance <= distance { - scene.Neighbors = append(scene.Neighbors, j) + neighbourDurationDistance := 0. + if scene.Duration > 0 && neighbor.Duration > 0 { + neighbourDurationDistance = math.Abs(scene.Duration - neighbor.Duration) + } + if (neighbourDurationDistance <= durationDiff) || (durationDiff < 0) { + neighborHash := goimagehash.NewImageHash(uint64(neighbor.Hash), goimagehash.PHash) + neighborDistance, _ := sceneHash.Distance(neighborHash) + if neighborDistance <= distance { + scene.Neighbors = append(scene.Neighbors, j) + } } } } diff --git a/pkg/utils/strings.go b/pkg/utils/strings.go index 02e1fe67b..0b57f5f6e 100644 --- a/pkg/utils/strings.go +++ b/pkg/utils/strings.go @@ -31,3 +31,13 @@ func StrFormat(format string, m StrFormatMap) string { return strings.NewReplacer(args...).Replace(format) } + +// StringerSliceToStringSlice converts a slice of fmt.Stringers to a slice of strings. +func StringerSliceToStringSlice[V fmt.Stringer](v []V) []string { + ret := make([]string, len(v)) + for i, vv := range v { + ret[i] = vv.String() + } + + return ret +} diff --git a/scripts/test_db_generator/makeTestDB.go b/scripts/test_db_generator/makeTestDB.go index bfdb042df..a54e07a87 100644 --- a/scripts/test_db_generator/makeTestDB.go +++ b/scripts/test_db_generator/makeTestDB.go @@ -347,6 +347,10 @@ func getResolution() (int, int) { return w, h } +func getBool() { + return rand.Intn(2) == 0 +} + func getDate() time.Time { s := rand.Int63n(time.Now().Unix()) @@ -371,6 +375,7 @@ func generateImageFile(parentFolderID file.FolderID, path string) file.File { BaseFile: generateBaseFile(parentFolderID, path), Height: h, Width: w, + Clip: getBool(), } } diff --git a/ui/login/login.html b/ui/login/login.html index 48ee0cc50..54dee83f7 100644 --- a/ui/login/login.html +++ b/ui/login/login.html @@ -1,7 +1,7 @@ - + Login diff --git a/ui/ui.go b/ui/ui.go index 87d9e7f0e..89ea75a44 100644 --- a/ui/ui.go +++ b/ui/ui.go @@ -1,9 +1,46 @@ package ui -import "embed" +import ( + "embed" + "io/fs" + "runtime" +) //go:embed v2.5/build -var UIBox embed.FS +var uiBox embed.FS +var UIBox fs.FS //go:embed login -var LoginUIBox embed.FS +var loginUIBox embed.FS +var LoginUIBox fs.FS + +func init() { + var err error + UIBox, err = fs.Sub(uiBox, "v2.5/build") + if err != nil { + panic(err) + } + + LoginUIBox, err = fs.Sub(loginUIBox, "login") + if err != nil { + panic(err) + } +} + +type faviconProvider struct{} + +var FaviconProvider = faviconProvider{} + +func (p *faviconProvider) GetFavicon() []byte { + if runtime.GOOS == "windows" { + ret, _ := fs.ReadFile(UIBox, "favicon.ico") + return ret + } + + return p.GetFaviconPng() +} + +func (p *faviconProvider) GetFaviconPng() []byte { + ret, _ := fs.ReadFile(UIBox, "favicon.png") + return ret +} diff --git a/ui/v2.5/index.html b/ui/v2.5/index.html index 11bbb270d..4134a27f0 100755 --- a/ui/v2.5/index.html +++ b/ui/v2.5/index.html @@ -12,9 +12,6 @@ Stash - diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 7236dda18..47356a9d6 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -64,11 +64,13 @@ "slick-carousel": "^1.8.1", "string.prototype.replaceall": "^1.0.7", "thehandy": "^1.0.3", + "ua-parser-js": "^1.0.34", "universal-cookie": "^4.0.4", "video.js": "^7.21.3", "videojs-contrib-dash": "^5.1.1", "videojs-mobile-ui": "^0.8.0", "videojs-seek-buttons": "^3.0.1", + "videojs-vr": "^2.0.0", "videojs-vtt.js": "^0.15.4", "yup": "^1.0.0" }, @@ -89,8 +91,9 @@ "@types/react-helmet": "^6.1.6", "@types/react-router-bootstrap": "^0.24.5", "@types/react-router-hash-link": "^2.4.5", + "@types/ua-parser-js": "^0.7.36", "@types/video.js": "^7.3.51", - "@types/videojs-mobile-ui": "^0.5.0", + "@types/videojs-mobile-ui": "^0.8.0", "@types/videojs-seek-buttons": "^2.1.0", "@typescript-eslint/eslint-plugin": "^5.52.0", "@typescript-eslint/parser": "^5.52.0", diff --git a/ui/v2.5/public/vr.svg b/ui/v2.5/public/vr.svg new file mode 100644 index 000000000..2c6c29773 --- /dev/null +++ b/ui/v2.5/public/vr.svg @@ -0,0 +1,5 @@ + + diff --git a/ui/v2.5/src/@types/videojs-vr.d.ts b/ui/v2.5/src/@types/videojs-vr.d.ts new file mode 100644 index 000000000..54111718f --- /dev/null +++ b/ui/v2.5/src/@types/videojs-vr.d.ts @@ -0,0 +1,116 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +declare module "videojs-vr" { + import videojs from "video.js"; + + declare function videojsVR(options?: videojsVR.Options): videojsVR.Plugin; + + declare namespace videojsVR { + const VERSION: typeof videojs.VERSION; + + type ProjectionType = + // The video is half sphere and the user should not be able to look behind themselves + | "180" + // Used for side-by-side 180 videos The video is half sphere and the user should not be able to look behind themselves + | "180_LR" + // Used for monoscopic 180 videos The video is half sphere and the user should not be able to look behind themselves + | "180_MONO" + // The video is a sphere + | "360" + | "Sphere" + | "equirectangular" + // The video is a cube + | "360_CUBE" + | "Cube" + // This video is not a 360 video + | "NONE" + // Check player.mediainfo.projection to see if the current video is a 360 video. + | "AUTO" + // Used for side-by-side 360 videos + | "360_LR" + // Used for top-to-bottom 360 videos + | "360_TB" + // Used for Equi-Angular Cubemap videos + | "EAC" + // Used for side-by-side Equi-Angular Cubemap videos + | "EAC_LR"; + + interface Options { + /** + * Force the cardboard button to display on all devices even if we don't think they support it. + * + * @default false + */ + forceCardboard?: boolean; + + /** + * Whether motion/gyro controls should be enabled. + * + * @default true on iOS and Android + */ + motionControls?: boolean; + + /** + * Defines the projection type. + * + * @default "AUTO" + */ + projection?: ProjectionType; + + /** + * This alters the number of segments in the spherical mesh onto which equirectangular videos are projected. + * The default is 32 but in some circumstances you may notice artifacts and need to increase this number. + * + * @default 32 + */ + sphereDetail?: number; + + /** + * Enable debug logging for this plugin + * + * @default false + */ + debug?: boolean; + + /** + * Use this property to pass the Omnitone library object to the plugin. Please be aware of, the Omnitone library is not included in the build files. + */ + omnitone?: object; + + /** + * Default options for the Omnitone library. Please check available options on https://github.com/GoogleChrome/omnitone + */ + omnitoneOptions?: object; + + /** + * Feature to disable the togglePlay manually. This functionality is useful in live events so that users cannot stop the live, but still have a controlBar available. + * + * @default false + */ + disableTogglePlay?: boolean; + } + + interface PlayerMediaInfo { + /** + * This should be set on a source-by-source basis to turn 360 videos on an off depending upon the video. + * Note that AUTO is the same as NONE for player.mediainfo.projection. + */ + projection?: ProjectionType; + } + + class Plugin extends videojs.Plugin { + setProjection(projection: ProjectionType): void; + init(): void; + reset(): void; + } + } + + export = videojsVR; + + declare module "video.js" { + interface VideoJsPlayer { + vr: typeof videojsVR; + mediainfo?: videojsVR.PlayerMediaInfo; + } + } +} diff --git a/ui/v2.5/src/App.tsx b/ui/v2.5/src/App.tsx index 4a374065c..a522f3624 100644 --- a/ui/v2.5/src/App.tsx +++ b/ui/v2.5/src/App.tsx @@ -94,6 +94,20 @@ export const App: React.FC = () => { // use en-GB as default messages if any messages aren't found in the chosen language const [messages, setMessages] = useState<{}>(); + const [customMessages, setCustomMessages] = useState<{}>(); + + useEffect(() => { + (async () => { + try { + const res = await fetch(getPlatformURL() + "customlocales"); + if (res.ok) { + setCustomMessages(await res.json()); + } + } catch (err) { + console.log(err); + } + })(); + }, []); useEffect(() => { const setLocale = async () => { @@ -106,15 +120,6 @@ export const App: React.FC = () => { const defaultMessages = (await locales[defaultMessageLanguage]()).default; const mergedMessages = cloneDeep(Object.assign({}, defaultMessages)); const chosenMessages = (await locales[messageLanguage]()).default; - let customMessages = {}; - try { - const res = await fetch(getPlatformURL() + "customlocales"); - if (res.ok) { - customMessages = await res.json(); - } - } catch (err) { - console.log(err); - } mergeWith( mergedMessages, @@ -142,7 +147,7 @@ export const App: React.FC = () => { }; setLocale(); - }, [language]); + }, [customMessages, language]); const location = useLocation(); const history = useHistory(); diff --git a/ui/v2.5/src/components/Changelog/Changelog.tsx b/ui/v2.5/src/components/Changelog/Changelog.tsx index d959019a7..865fc4acd 100644 --- a/ui/v2.5/src/components/Changelog/Changelog.tsx +++ b/ui/v2.5/src/components/Changelog/Changelog.tsx @@ -25,6 +25,7 @@ import V0170 from "src/docs/en/Changelog/v0170.md"; import V0180 from "src/docs/en/Changelog/v0180.md"; import V0190 from "src/docs/en/Changelog/v0190.md"; import V0200 from "src/docs/en/Changelog/v0200.md"; +import V0210 from "src/docs/en/Changelog/v0210.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; const Changelog: React.FC = () => { @@ -60,9 +61,9 @@ const Changelog: React.FC = () => { // after new release: // add entry to releases, using the current* fields // then update the current fields. - const currentVersion = stashVersion || "v0.20.0"; + const currentVersion = stashVersion || "v0.21.0"; const currentDate = buildDate; - const currentPage = V0200; + const currentPage = V0210; const releases: IStashRelease[] = [ { @@ -71,6 +72,11 @@ const Changelog: React.FC = () => { page: currentPage, defaultOpen: true, }, + { + version: "v0.20.2", + date: "2023-04-08", + page: V0200, + }, { version: "v0.19.1", date: "2023-02-21", diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/FieldOptions.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/FieldOptions.tsx index ba027cd5c..68a31fe6b 100644 --- a/ui/v2.5/src/components/Dialogs/IdentifyDialog/FieldOptions.tsx +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/FieldOptions.tsx @@ -96,17 +96,13 @@ const FieldOptionsEditor: React.FC = ({ }); } - if (!localOptions) { - return <>; - } - return ( {allowSetDefault ? ( setLocalOptions({ ...localOptions, @@ -122,7 +118,7 @@ const FieldOptionsEditor: React.FC = ({ type="radio" key={f[0]} id={`${field}-strategy-${f[0]}`} - checked={localOptions.strategy === f[1]} + checked={strategy === f[1]} onChange={() => setLocalOptions({ ...localOptions, @@ -168,7 +164,9 @@ const FieldOptionsEditor: React.FC = ({ (f) => f.field === localOptions.field )?.createMissing; - if (localOptions.strategy === undefined) { + // if allowSetDefault is false, then strategy is considered merge + // if its true, then its using the default value and should not be shown here + if (localOptions.strategy === undefined && allowSetDefault) { return; } diff --git a/ui/v2.5/src/components/FrontPage/styles.scss b/ui/v2.5/src/components/FrontPage/styles.scss index a1661d032..e4049b5aa 100644 --- a/ui/v2.5/src/components/FrontPage/styles.scss +++ b/ui/v2.5/src/components/FrontPage/styles.scss @@ -306,17 +306,17 @@ } @media (max-width: 576px) { - .slick-list .scene-card, - .slick-list .studio-card, - .slick-list .gallery-card { + .slick-list .scene-card.card, + .slick-list .studio-card.card, + .slick-list .gallery-card.card { width: 20rem; } - .slick-list .movie-card { + .slick-list .movie-card.card { width: 16rem; } - .slick-list .performer-card { + .slick-list .performer-card.card { width: 16rem; } diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx index 1adedc799..f7d50da29 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx @@ -24,7 +24,11 @@ import { GalleryImagesPanel } from "./GalleryImagesPanel"; import { GalleryAddPanel } from "./GalleryAddPanel"; import { GalleryFileInfoPanel } from "./GalleryFileInfoPanel"; import { GalleryScenesPanel } from "./GalleryScenesPanel"; -import { faEllipsisV } from "@fortawesome/free-solid-svg-icons"; +import { + faEllipsisV, + faChevronRight, + faChevronLeft, +} from "@fortawesome/free-solid-svg-icons"; import { galleryPath, galleryTitle } from "src/core/galleries"; import { GalleryChapterPanel } from "./GalleryChaptersPanel"; @@ -60,6 +64,23 @@ export const GalleryPage: React.FC = ({ gallery }) => { const [organizedLoading, setOrganizedLoading] = useState(false); + async function onSave(input: GQL.GalleryCreateInput) { + await updateGallery({ + variables: { + input: { + id: gallery.id, + ...input, + }, + }, + }); + Toast.success({ + content: intl.formatMessage( + { id: "toast.updated_entity" }, + { entity: intl.formatMessage({ id: "gallery" }).toLocaleLowerCase() } + ), + }); + } + const onOrganizedClick = async () => { try { setOrganizedLoading(true); @@ -78,8 +99,8 @@ export const GalleryPage: React.FC = ({ gallery }) => { } }; - function getCollapseButtonText() { - return collapsed ? ">" : "<"; + function getCollapseButtonIcon() { + return collapsed ? faChevronRight : faChevronLeft; } async function onRescan() { @@ -238,6 +259,7 @@ export const GalleryPage: React.FC = ({ gallery }) => { setIsDeleteAlertOpen(true)} /> @@ -339,7 +361,7 @@ export const GalleryPage: React.FC = ({ gallery }) => {
diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryCreate.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryCreate.tsx index 62e80e23e..d6519bcd2 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryCreate.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryCreate.tsx @@ -1,16 +1,39 @@ import React, { useMemo } from "react"; import { FormattedMessage, useIntl } from "react-intl"; -import { useLocation } from "react-router-dom"; +import { useHistory, useLocation } from "react-router-dom"; +import * as GQL from "src/core/generated-graphql"; +import { useGalleryCreate } from "src/core/StashService"; +import { useToast } from "src/hooks/Toast"; import { GalleryEditPanel } from "./GalleryEditPanel"; const GalleryCreate: React.FC = () => { + const history = useHistory(); const intl = useIntl(); + const Toast = useToast(); + const location = useLocation(); const query = useMemo(() => new URLSearchParams(location.search), [location]); const gallery = { title: query.get("q") ?? undefined, }; + const [createGallery] = useGalleryCreate(); + + async function onSave(input: GQL.GalleryCreateInput) { + const result = await createGallery({ + variables: { input }, + }); + if (result.data?.galleryCreate) { + history.push(`/galleries/${result.data.galleryCreate.id}`); + Toast.success({ + content: intl.formatMessage( + { id: "toast.created_entity" }, + { entity: intl.formatMessage({ id: "gallery" }).toLocaleLowerCase() } + ), + }); + } + } + return (
@@ -20,7 +43,12 @@ const GalleryCreate: React.FC = () => { values={{ entityType: intl.formatMessage({ id: "gallery" }) }} /> - {}} /> + {}} + />
); diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx index d560127dc..7208971f0 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -1,6 +1,6 @@ import React, { useEffect, useState } from "react"; import { FormattedMessage, useIntl } from "react-intl"; -import { useHistory, Prompt } from "react-router-dom"; +import { Prompt } from "react-router-dom"; import { Button, Dropdown, @@ -15,8 +15,6 @@ import * as yup from "yup"; import { queryScrapeGallery, queryScrapeGalleryURL, - useGalleryCreate, - useGalleryUpdate, useListGalleryScrapers, mutateReloadScrapers, } from "src/core/StashService"; @@ -40,21 +38,23 @@ import { useRatingKeybinds } from "src/hooks/keybinds"; import { ConfigurationContext } from "src/hooks/Config"; import isEqual from "lodash-es/isEqual"; import { DateInput } from "src/components/Shared/DateInput"; +import { handleUnsavedChanges } from "src/utils/navigation"; interface IProps { gallery: Partial; isVisible: boolean; + onSubmit: (input: GQL.GalleryCreateInput) => Promise; onDelete: () => void; } export const GalleryEditPanel: React.FC = ({ gallery, isVisible, + onSubmit, onDelete, }) => { const intl = useIntl(); const Toast = useToast(); - const history = useHistory(); const [scenes, setScenes] = useState<{ id: string; title: string }[]>( (gallery?.scenes ?? []).map((s) => ({ id: s.id, @@ -74,9 +74,6 @@ export const GalleryEditPanel: React.FC = ({ // Network state const [isLoading, setIsLoading] = useState(false); - const [createGallery] = useGalleryCreate(); - const [updateGallery] = useGalleryUpdate(); - const titleRequired = isNew || (gallery?.files?.length === 0 && !gallery?.folder); @@ -151,7 +148,9 @@ export const GalleryEditPanel: React.FC = ({ useEffect(() => { if (isVisible) { Mousetrap.bind("s s", () => { - formik.handleSubmit(); + if (formik.dirty) { + formik.submitForm(); + } }); Mousetrap.bind("d d", () => { onDelete(); @@ -174,51 +173,11 @@ export const GalleryEditPanel: React.FC = ({ setQueryableScrapers(newQueryableScrapers); }, [Scrapers]); - async function onSave(input: GQL.GalleryCreateInput) { + async function onSave(input: InputValues) { setIsLoading(true); try { - if (isNew) { - const result = await createGallery({ - variables: { - input, - }, - }); - if (result.data?.galleryCreate) { - history.push(`/galleries/${result.data.galleryCreate.id}`); - Toast.success({ - content: intl.formatMessage( - { id: "toast.created_entity" }, - { - entity: intl - .formatMessage({ id: "gallery" }) - .toLocaleLowerCase(), - } - ), - }); - } - } else { - const result = await updateGallery({ - variables: { - input: { - id: gallery.id!, - ...input, - }, - }, - }); - if (result.data?.galleryUpdate) { - Toast.success({ - content: intl.formatMessage( - { id: "toast.updated_entity" }, - { - entity: intl - .formatMessage({ id: "gallery" }) - .toLocaleLowerCase(), - } - ), - }); - formik.resetForm(); - } - } + await onSubmit(input); + formik.resetForm(); } catch (e) { Toast.error(e); } @@ -412,7 +371,7 @@ export const GalleryEditPanel: React.FC = ({